From 28e6264b90e22e15ca7a7164c6fe35e4223ef14c Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Mon, 4 Dec 2023 14:39:08 -0800 Subject: [PATCH 01/68] fixed typos and warnings --- colors/colors.go | 31 +- go.sum | 27 +- .../add-build-pipeline-macro.go | 878 ++++---- macros/built-in/add-vault/add-vault-macro.go | 250 +-- .../pretty-print/pretty-print-macro.go | 6 +- .../remove-unused-tags-macro.go | 8 +- .../seed-risk-tracking-macro.go | 12 +- macros/built-in/seed-tags/seed-tags-macro.go | 8 +- main.go | 1924 +++++++++-------- model/types.go | 429 ++-- raa/dummy/dummy.go | 5 + raa/raa/raa.go | 11 +- report/excel.go | 6 +- report/json.go | 9 +- report/report.go | 70 +- .../code-backdooring/code-backdooring-rule.go | 2 +- ...ssing-authentication-second-factor-rule.go | 6 +- .../missing-authentication-rule.go | 2 +- .../missing-build-infrastructure-rule.go | 2 +- .../missing-cloud-hardening-rule.go | 60 +- .../missing-file-validation-rule.go | 2 +- .../missing-hardening-rule.go | 2 +- .../missing-identity-propagation-rule.go | 18 +- ...issing-identity-provider-isolation-rule.go | 10 +- .../missing-identity-store-rule.go | 12 +- .../missing-network-segmentation-rule.go | 8 +- .../missing-vault-isolation-rule.go | 8 +- .../missing-vault/missing-vault-rule.go | 2 +- .../mixed-targets-on-shared-runtime-rule.go | 2 +- .../search-query-injection-rule.go | 2 +- .../server-side-request-forgery-rule.go | 2 +- .../unchecked-deployment-rule.go | 2 +- .../unencrypted-asset-rule.go | 18 +- .../unencrypted-communication-rule.go | 5 +- .../unguarded-direct-datastore-access-rule.go | 7 +- .../untrusted-deserialization-rule.go | 4 +- risks/custom/demo/demo-rule.go | 1 + 37 files changed, 1962 insertions(+), 1889 deletions(-) diff --git a/colors/colors.go b/colors/colors.go index 506e97a6..cac70f6d 100644 --- a/colors/colors.go +++ b/colors/colors.go @@ -5,10 +5,33 @@ import ( "github.com/jung-kurt/gofpdf" ) -const Red, Amber, Green, Blue, DarkBlue, Black, Gray, LightGray, MiddleLightGray, MoreLightGray, VeryLightGray, ExtremeLightGray, Pink, LightPink = "#CC0000", "#AF780E", "#008000", "#000080", "#000060", "#000000", "#444444", "#666666", "#999999", "#D2D2D2", "#E5E5E5", "#F6F6F6", "#F987C5", "#FFE7EF" -const ExtremeLightBlue, OutOfScopeFancy, CustomDevelopedParts = "#DDFFFF", "#D5D7FF", "#FFFC97" -const LightBlue = "#77FFFF" -const Brown = "#8C4C17" +const ( + Amber = "#AF780E" + Green = "#008000" + Blue = "#000080" + DarkBlue = "#000060" + Black = "#000000" + Gray = "#444444" + LightGray = "#666666" + MiddleLightGray = "#999999" + MoreLightGray = "#D2D2D2" + VeryLightGray = "#E5E5E5" + ExtremeLightGray = "#F6F6F6" + Pink = "#F987C5" + LightPink = "#FFE7EF" + Red = "#CC0000" + OutOfScopeFancy = "#D5D7FF" + CustomDevelopedParts = "#FFFC97" + ExtremeLightBlue = "#DDFFFF" + LightBlue = "#77FFFF" + Brown = "#8C4C17" +) + +var ( + _ = Green + Blue + MoreLightGray + ExtremeLightGray + LightBlue + _ = ColorOutOfScope + _ = RgbHexColorModelFailure +) func DarkenHexColor(hexString string) string { colorBytes, _ := hex.DecodeString(hexString[1:]) diff --git a/go.sum b/go.sum index 1211ed1e..827e8d8f 100644 --- a/go.sum +++ b/go.sum @@ -2,13 +2,10 @@ github.com/blend/go-sdk v1.20220411.3 h1:GFV4/FQX5UzXLPwWV03gP811pj7B8J2sbuq+GJQ github.com/blend/go-sdk v1.20220411.3/go.mod h1:7lnH8fTi6U4i1fArEXRyOIY2E1X4MALg09qsQqY1+ak= github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= -github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s= -github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U= github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM= github.com/bytedance/sonic v1.10.2 h1:GQebETVBxYB7JGWJtLBi07OVzWwt+8dWA00gEVW2ZFE= github.com/bytedance/sonic v1.10.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= -github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams= github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0= github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA= @@ -18,8 +15,6 @@ github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLI github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= -github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= @@ -32,8 +27,6 @@ github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/o github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= -github.com/go-playground/validator/v10 v10.14.0 h1:vgvQWe3XCz3gIeFDm/HnTIbj6UGmg/+t63MyGU2n5js= -github.com/go-playground/validator/v10 v10.14.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= github.com/go-playground/validator/v10 v10.15.5 h1:LEBecTWb/1j5TNY1YYG2RcOUN3R7NLylN+x8TTueE24= github.com/go-playground/validator/v10 v10.15.5/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= @@ -52,15 +45,11 @@ github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+ github.com/jung-kurt/gofpdf v1.16.2 h1:jgbatWHfRlPYiK85qgevsZTHviWXKwB1TTiKdz5PtRc= github.com/jung-kurt/gofpdf v1.16.2/go.mod h1:1hl7y57EsiPAkLbOwzpzqgx1A30nQCk/YmFV8S2vmK0= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= -github.com/klauspost/cpuid/v2 v2.2.4 h1:acbojRNwl3o09bUq+yDCtZFc1aiwaAAxtcn8YkZXnvk= -github.com/klauspost/cpuid/v2 v2.2.4/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY= github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= -github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= -github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -70,15 +59,11 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= -github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ= -github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4= github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4= github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= -github.com/phpdave11/gofpdi v1.0.7 h1:k2oy4yhkQopCK+qW8KjCla0iU2RpDow+QUDmH9DDt44= github.com/phpdave11/gofpdi v1.0.7/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= github.com/phpdave11/gofpdi v1.0.13 h1:o61duiW8M9sMlkVXWlvP92sZJtGKENvW3VExs6dZukQ= github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= -github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -100,8 +85,7 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.3 h1:RP3t2pwF7cMEbC1dqtB6poj3niw/9gnV4Cjg5oW5gtY= -github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= @@ -109,20 +93,16 @@ github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4d github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= github.com/wcharczuk/go-chart v2.0.1+incompatible h1:0pz39ZAycJFF7ju/1mepnk26RLVLBCWz1STcD3doU0A= github.com/wcharczuk/go-chart v2.0.1+incompatible/go.mod h1:PF5tmL4EIx/7Wf+hEkpCqYi5He4u90sw+0+6FhrryuE= -github.com/xuri/efp v0.0.0-20230802181842-ad255f2331ca h1:uvPMDVyP7PXMMioYdyPH+0O+Ta/UO1WFfNYMO3Wz0eg= github.com/xuri/efp v0.0.0-20230802181842-ad255f2331ca/go.mod h1:ybY/Jr0T0GTCnYjKqmdwxyxn2BQf2RcQIIvex5QldPI= github.com/xuri/efp v0.0.0-20231025114914-d1ff6096ae53 h1:Chd9DkqERQQuHpXjR/HSV1jLZA6uaoiwwH3vSuF3IW0= github.com/xuri/efp v0.0.0-20231025114914-d1ff6096ae53/go.mod h1:ybY/Jr0T0GTCnYjKqmdwxyxn2BQf2RcQIIvex5QldPI= github.com/xuri/excelize/v2 v2.8.0 h1:Vd4Qy809fupgp1v7X+nCS/MioeQmYVVzi495UCTqB7U= github.com/xuri/excelize/v2 v2.8.0/go.mod h1:6iA2edBTKxKbZAa7X5bDhcCg51xdOn1Ar5sfoXRGrQg= -github.com/xuri/nfp v0.0.0-20230819163627-dc951e3ffe1a h1:Mw2VNrNNNjDtw68VsEj2+st+oCSn4Uz7vZw6TbhcV1o= github.com/xuri/nfp v0.0.0-20230819163627-dc951e3ffe1a/go.mod h1:WwHg+CVyzlv/TX9xqBFXEZAuxOPxn2k1GNHwG41IIUQ= github.com/xuri/nfp v0.0.0-20230919160717-d98342af3f05 h1:qhbILQo1K3mphbwKh1vNm4oGezE1eF9fQWmNiIpSfI4= github.com/xuri/nfp v0.0.0-20230919160717-d98342af3f05/go.mod h1:WwHg+CVyzlv/TX9xqBFXEZAuxOPxn2k1GNHwG41IIUQ= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= -golang.org/x/arch v0.3.0 h1:02VY4/ZcO/gBOH6PUaoiptASxtXU10jazRCP865E97k= -golang.org/x/arch v0.3.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/arch v0.5.0 h1:jpGode6huXQxcskEIpOCvrU+tzo81b6+oFLUYXWtH/Y= golang.org/x/arch v0.5.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= @@ -131,7 +111,6 @@ golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98y golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/image v0.11.0 h1:ds2RoQvBvYTiJkwpSFDwCcDFNX7DqjL2WsUgTNk0Ooo= golang.org/x/image v0.11.0/go.mod h1:bglhjqbqVuEb9e9+eNR45Jfu7D+T4Qan+NhQk8Ck2P8= golang.org/x/image v0.13.0 h1:3cge/F/QTkNLauhf2QoE9zp+7sr+ZcL4HnoZmdwg9sg= golang.org/x/image v0.13.0/go.mod h1:6mmbMOeV28HuMTgA6OSRkdXKYw/t5W9Uwn2Yv1r3Yxk= @@ -142,7 +121,6 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= -golang.org/x/net v0.14.0 h1:BONx9s002vGdD9umnlX1Po8vOZmrgH34qlHcD1MfK14= golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= @@ -153,7 +131,6 @@ golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -183,8 +160,6 @@ golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= -google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= diff --git a/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go b/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go index dc68cea1..b6ec0514 100644 --- a/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go +++ b/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go @@ -145,7 +145,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { }, nil case 11: possibleAnswers := make([]string, 0) - for id, _ := range model.ParsedModelRoot.TechnicalAssets { + for id := range model.ParsedModelRoot.TechnicalAssets { possibleAnswers = append(possibleAnswers, id) } sort.Strings(possibleAnswers) @@ -299,12 +299,12 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry Confidentiality: model.Confidential.String(), Integrity: model.Critical.String(), Availability: model.Important.String(), - Justification_cia_rating: "Sourcecode is at least rated as 'critical' in terms of integrity, because any " + + JustificationCiaRating: "Sourcecode is at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", } *changeLogCollector = append(*changeLogCollector, "adding data asset: sourcecode") if !dryRun { - modelInput.Data_assets["Sourcecode"] = dataAsset + modelInput.DataAssets["Sourcecode"] = dataAsset } } @@ -321,12 +321,12 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry Confidentiality: model.Confidential.String(), Integrity: model.Critical.String(), Availability: model.Important.String(), - Justification_cia_rating: "Deployment units are at least rated as 'critical' in terms of integrity, because any " + + JustificationCiaRating: "Deployment units are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", } *changeLogCollector = append(*changeLogCollector, "adding data asset: deployment") if !dryRun { - modelInput.Data_assets["Deployment"] = dataAsset + modelInput.DataAssets["Deployment"] = dataAsset } } @@ -340,137 +340,137 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry commLinks := make(map[string]model.InputCommunicationLink) commLinks["Sourcecode Repository Traffic"] = model.InputCommunicationLink{ - Target: sourceRepoID, - Description: "Sourcecode Repository Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EnduserIdentityPropagation.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: false, - Usage: model.DevOps.String(), - Data_assets_sent: []string{"sourcecode"}, - Data_assets_received: []string{"sourcecode"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: sourceRepoID, + Description: "Sourcecode Repository Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.EndUserIdentityPropagation.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: false, + Usage: model.DevOps.String(), + DataAssetsSent: []string{"sourcecode"}, + DataAssetsReceived: []string{"sourcecode"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } commLinks["Build Pipeline Traffic"] = model.InputCommunicationLink{ - Target: buildPipelineID, - Description: "Build Pipeline Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EnduserIdentityPropagation.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: true, - Usage: model.DevOps.String(), - Data_assets_sent: nil, - Data_assets_received: []string{"deployment"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: buildPipelineID, + Description: "Build Pipeline Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.EndUserIdentityPropagation.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: true, + Usage: model.DevOps.String(), + DataAssetsSent: nil, + DataAssetsReceived: []string{"deployment"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } commLinks["Artifact Registry Traffic"] = model.InputCommunicationLink{ - Target: artifactRegistryID, - Description: "Artifact Registry Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EnduserIdentityPropagation.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: true, - Usage: model.DevOps.String(), - Data_assets_sent: nil, - Data_assets_received: []string{"deployment"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: artifactRegistryID, + Description: "Artifact Registry Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.EndUserIdentityPropagation.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: true, + Usage: model.DevOps.String(), + DataAssetsSent: nil, + DataAssetsReceived: []string{"deployment"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } if containerTechUsed { commLinks["Container Registry Traffic"] = model.InputCommunicationLink{ - Target: containerRepoID, - Description: "Container Registry Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EnduserIdentityPropagation.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: false, - Usage: model.DevOps.String(), - Data_assets_sent: []string{"deployment"}, - Data_assets_received: []string{"deployment"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: containerRepoID, + Description: "Container Registry Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.EndUserIdentityPropagation.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: false, + Usage: model.DevOps.String(), + DataAssetsSent: []string{"deployment"}, + DataAssetsReceived: []string{"deployment"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } commLinks["Container Platform Traffic"] = model.InputCommunicationLink{ - Target: containerPlatformID, - Description: "Container Platform Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EnduserIdentityPropagation.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: false, - Usage: model.DevOps.String(), - Data_assets_sent: []string{"deployment"}, - Data_assets_received: []string{"deployment"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: containerPlatformID, + Description: "Container Platform Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.EndUserIdentityPropagation.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: false, + Usage: model.DevOps.String(), + DataAssetsSent: []string{"deployment"}, + DataAssetsReceived: []string{"deployment"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } } if codeInspectionUsed { commLinks["Code Inspection Platform Traffic"] = model.InputCommunicationLink{ - Target: codeInspectionPlatformID, - Description: "Code Inspection Platform Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EnduserIdentityPropagation.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: true, - Usage: model.DevOps.String(), - Data_assets_sent: nil, - Data_assets_received: []string{"sourcecode"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: codeInspectionPlatformID, + Description: "Code Inspection Platform Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.EndUserIdentityPropagation.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: true, + Usage: model.DevOps.String(), + DataAssetsSent: nil, + DataAssetsReceived: []string{"sourcecode"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: "Development Client", - Type: model.ExternalEntity.String(), - Usage: model.DevOps.String(), - Used_as_client_by_human: true, - Out_of_scope: true, - Justification_out_of_scope: "Development client is not directly in-scope of the application.", - Size: model.System.String(), - Technology: model.DevOpsClient.String(), - Tags: []string{}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Physical.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), - Justification_cia_rating: "Sourcecode processing components are at least rated as 'critical' in terms of integrity, because any " + + ID: id, + Description: "Development Client", + Type: model.ExternalEntity.String(), + Usage: model.DevOps.String(), + UsedAsClientByHuman: true, + OutOfScope: true, + JustificationOutOfScope: "Development client is not directly in-scope of the application.", + Size: model.System.String(), + Technology: model.DevOpsClient.String(), + Tags: []string{}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Physical.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.Critical.String(), + Availability: model.Important.String(), + JustificationCiaRating: "Sourcecode processing components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", - Multi_tenant: false, - Redundant: false, - Custom_developed_parts: false, - Data_assets_processed: []string{"sourcecode", "deployment"}, - Data_assets_stored: []string{"sourcecode", "deployment"}, - Data_formats_accepted: []string{"file"}, - Communication_links: commLinks, + MultiTenant: false, + Redundant: false, + CustomDevelopedParts: false, + DataAssetsProcessed: []string{"sourcecode", "deployment"}, + DataAssetsStored: []string{"sourcecode", "deployment"}, + DataFormatsAccepted: []string{"file"}, + CommunicationLinks: commLinks, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.Technical_assets["Development Client"] = techAsset + modelInput.TechnicalAssets["Development Client"] = techAsset } } @@ -483,36 +483,36 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry encryption = model.Transparent.String() } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: macroState["source-repository"][0] + " Sourcecode Repository", - Type: model.Process.String(), - Usage: model.DevOps.String(), - Used_as_client_by_human: false, - Out_of_scope: false, - Justification_out_of_scope: "", - Size: model.Service.String(), - Technology: model.SourcecodeRepository.String(), - Tags: []string{model.NormalizeTag(macroState["source-repository"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), - Justification_cia_rating: "Sourcecode processing components are at least rated as 'critical' in terms of integrity, because any " + + ID: id, + Description: macroState["source-repository"][0] + " Sourcecode Repository", + Type: model.Process.String(), + Usage: model.DevOps.String(), + UsedAsClientByHuman: false, + OutOfScope: false, + JustificationOutOfScope: "", + Size: model.Service.String(), + Technology: model.SourcecodeRepository.String(), + Tags: []string{model.NormalizeTag(macroState["source-repository"][0])}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Virtual.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.Critical.String(), + Availability: model.Important.String(), + JustificationCiaRating: "Sourcecode processing components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", - Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - Custom_developed_parts: false, - Data_assets_processed: []string{"sourcecode"}, - Data_assets_stored: []string{"sourcecode"}, - Data_formats_accepted: []string{"file"}, - Communication_links: nil, + MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + CustomDevelopedParts: false, + DataAssetsProcessed: []string{"sourcecode"}, + DataAssetsStored: []string{"sourcecode"}, + DataFormatsAccepted: []string{"file"}, + CommunicationLinks: nil, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.Technical_assets[macroState["source-repository"][0]+" Sourcecode Repository"] = techAsset + modelInput.TechnicalAssets[macroState["source-repository"][0]+" Sourcecode Repository"] = techAsset } } @@ -526,36 +526,36 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry encryption = model.Transparent.String() } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: macroState["container-registry"][0] + " Container Registry", - Type: model.Process.String(), - Usage: model.DevOps.String(), - Used_as_client_by_human: false, - Out_of_scope: false, - Justification_out_of_scope: "", - Size: model.Service.String(), - Technology: model.ArtifactRegistry.String(), - Tags: []string{model.NormalizeTag(macroState["container-registry"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), - Justification_cia_rating: "Container registry components are at least rated as 'critical' in terms of integrity, because any " + + ID: id, + Description: macroState["container-registry"][0] + " Container Registry", + Type: model.Process.String(), + Usage: model.DevOps.String(), + UsedAsClientByHuman: false, + OutOfScope: false, + JustificationOutOfScope: "", + Size: model.Service.String(), + Technology: model.ArtifactRegistry.String(), + Tags: []string{model.NormalizeTag(macroState["container-registry"][0])}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Virtual.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.Critical.String(), + Availability: model.Important.String(), + JustificationCiaRating: "Container registry components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", - Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - Custom_developed_parts: false, - Data_assets_processed: []string{"deployment"}, - Data_assets_stored: []string{"deployment"}, - Data_formats_accepted: []string{"file"}, - Communication_links: nil, + MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + CustomDevelopedParts: false, + DataAssetsProcessed: []string{"deployment"}, + DataAssetsStored: []string{"deployment"}, + DataFormatsAccepted: []string{"file"}, + CommunicationLinks: nil, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.Technical_assets[macroState["container-registry"][0]+" Container Registry"] = techAsset + modelInput.TechnicalAssets[macroState["container-registry"][0]+" Container Registry"] = techAsset } } @@ -568,36 +568,36 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry encryption = model.Transparent.String() } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: macroState["container-platform"][0] + " Container Platform", - Type: model.Process.String(), - Usage: model.DevOps.String(), - Used_as_client_by_human: false, - Out_of_scope: false, - Justification_out_of_scope: "", - Size: model.System.String(), - Technology: model.ContainerPlatform.String(), - Tags: []string{model.NormalizeTag(macroState["container-platform"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.MissionCritical.String(), - Availability: model.MissionCritical.String(), - Justification_cia_rating: "Container platform components are rated as 'mission-critical' in terms of integrity and availability, because any " + + ID: id, + Description: macroState["container-platform"][0] + " Container Platform", + Type: model.Process.String(), + Usage: model.DevOps.String(), + UsedAsClientByHuman: false, + OutOfScope: false, + JustificationOutOfScope: "", + Size: model.System.String(), + Technology: model.ContainerPlatform.String(), + Tags: []string{model.NormalizeTag(macroState["container-platform"][0])}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Virtual.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.MissionCritical.String(), + Availability: model.MissionCritical.String(), + JustificationCiaRating: "Container platform components are rated as 'mission-critical' in terms of integrity and availability, because any " + "malicious modification of it might lead to a backdoored production system.", - Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - Custom_developed_parts: false, - Data_assets_processed: []string{"deployment"}, - Data_assets_stored: []string{"deployment"}, - Data_formats_accepted: []string{"file"}, - Communication_links: nil, + MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + CustomDevelopedParts: false, + DataAssetsProcessed: []string{"deployment"}, + DataAssetsStored: []string{"deployment"}, + DataFormatsAccepted: []string{"file"}, + CommunicationLinks: nil, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.Technical_assets[macroState["container-platform"][0]+" Container Platform"] = techAsset + modelInput.TechnicalAssets[macroState["container-platform"][0]+" Container Platform"] = techAsset } } } @@ -613,115 +613,115 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry commLinks := make(map[string]model.InputCommunicationLink) commLinks["Sourcecode Repository Traffic"] = model.InputCommunicationLink{ - Target: sourceRepoID, - Description: "Sourcecode Repository Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: true, - Usage: model.DevOps.String(), - Data_assets_sent: nil, - Data_assets_received: []string{"sourcecode"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: sourceRepoID, + Description: "Sourcecode Repository Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: true, + Usage: model.DevOps.String(), + DataAssetsSent: nil, + DataAssetsReceived: []string{"sourcecode"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } commLinks["Artifact Registry Traffic"] = model.InputCommunicationLink{ - Target: artifactRegistryID, - Description: "Artifact Registry Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: false, - Usage: model.DevOps.String(), - Data_assets_sent: []string{"deployment"}, - Data_assets_received: []string{"deployment"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: artifactRegistryID, + Description: "Artifact Registry Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: false, + Usage: model.DevOps.String(), + DataAssetsSent: []string{"deployment"}, + DataAssetsReceived: []string{"deployment"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } if containerTechUsed { commLinks["Container Registry Traffic"] = model.InputCommunicationLink{ - Target: containerRepoID, - Description: "Container Registry Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: false, - Usage: model.DevOps.String(), - Data_assets_sent: []string{"deployment"}, - Data_assets_received: []string{"deployment"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: containerRepoID, + Description: "Container Registry Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: false, + Usage: model.DevOps.String(), + DataAssetsSent: []string{"deployment"}, + DataAssetsReceived: []string{"deployment"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } if macroState["push-or-pull"][0] == pushOrPull[0] { // Push commLinks["Container Platform Push"] = model.InputCommunicationLink{ - Target: containerPlatformID, - Description: "Container Platform Push", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: false, - Usage: model.DevOps.String(), - Data_assets_sent: []string{"deployment"}, - Data_assets_received: []string{"deployment"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: containerPlatformID, + Description: "Container Platform Push", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: false, + Usage: model.DevOps.String(), + DataAssetsSent: []string{"deployment"}, + DataAssetsReceived: []string{"deployment"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } } else { // Pull commLinkPull := model.InputCommunicationLink{ - Target: containerRepoID, - Description: "Container Platform Pull", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: true, - Usage: model.DevOps.String(), - Data_assets_sent: nil, - Data_assets_received: []string{"deployment"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: containerRepoID, + Description: "Container Platform Pull", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: true, + Usage: model.DevOps.String(), + DataAssetsSent: nil, + DataAssetsReceived: []string{"deployment"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } if !dryRun { titleOfTargetAsset := macroState["container-platform"][0] + " Container Platform" - containerPlatform := modelInput.Technical_assets[titleOfTargetAsset] - if containerPlatform.Communication_links == nil { - containerPlatform.Communication_links = make(map[string]model.InputCommunicationLink, 0) + containerPlatform := modelInput.TechnicalAssets[titleOfTargetAsset] + if containerPlatform.CommunicationLinks == nil { + containerPlatform.CommunicationLinks = make(map[string]model.InputCommunicationLink) } - containerPlatform.Communication_links["Container Platform Pull"] = commLinkPull - modelInput.Technical_assets[titleOfTargetAsset] = containerPlatform + containerPlatform.CommunicationLinks["Container Platform Pull"] = commLinkPull + modelInput.TechnicalAssets[titleOfTargetAsset] = containerPlatform } } } if codeInspectionUsed { commLinks["Code Inspection Platform Traffic"] = model.InputCommunicationLink{ - Target: codeInspectionPlatformID, - Description: "Code Inspection Platform Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: false, - Usage: model.DevOps.String(), - Data_assets_sent: []string{"sourcecode"}, - Data_assets_received: []string{}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: codeInspectionPlatformID, + Description: "Code Inspection Platform Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: false, + Usage: model.DevOps.String(), + DataAssetsSent: []string{"sourcecode"}, + DataAssetsReceived: []string{}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } } // The individual deployments @@ -729,73 +729,73 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry //fmt.Println("Adding deployment flow to:", deployTargetID) if containerTechUsed { if !dryRun { - containerPlatform := modelInput.Technical_assets[macroState["container-platform"][0]+" Container Platform"] - if containerPlatform.Communication_links == nil { - containerPlatform.Communication_links = make(map[string]model.InputCommunicationLink, 0) + containerPlatform := modelInput.TechnicalAssets[macroState["container-platform"][0]+" Container Platform"] + if containerPlatform.CommunicationLinks == nil { + containerPlatform.CommunicationLinks = make(map[string]model.InputCommunicationLink) } - containerPlatform.Communication_links["Container Spawning ("+deployTargetID+")"] = model.InputCommunicationLink{ - Target: deployTargetID, - Description: "Container Spawning " + deployTargetID, - Protocol: model.ContainerSpawning.String(), - Authentication: model.NoneAuthentication.String(), - Authorization: model.NoneAuthorization.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: false, - Usage: model.DevOps.String(), - Data_assets_sent: []string{"deployment"}, - Data_assets_received: nil, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + containerPlatform.CommunicationLinks["Container Spawning ("+deployTargetID+")"] = model.InputCommunicationLink{ + Target: deployTargetID, + Description: "Container Spawning " + deployTargetID, + Protocol: model.ContainerSpawning.String(), + Authentication: model.NoneAuthentication.String(), + Authorization: model.NoneAuthorization.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: false, + Usage: model.DevOps.String(), + DataAssetsSent: []string{"deployment"}, + DataAssetsReceived: nil, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } - modelInput.Technical_assets[macroState["container-platform"][0]+" Container Platform"] = containerPlatform + modelInput.TechnicalAssets[macroState["container-platform"][0]+" Container Platform"] = containerPlatform } } else { // No Containers used if macroState["push-or-pull"][0] == pushOrPull[0] { // Push commLinks["Deployment Push ("+deployTargetID+")"] = model.InputCommunicationLink{ - Target: deployTargetID, - Description: "Deployment Push to " + deployTargetID, - Protocol: model.SSH.String(), - Authentication: model.ClientCertificate.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: false, - Usage: model.DevOps.String(), - Data_assets_sent: []string{"deployment"}, - Data_assets_received: nil, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: deployTargetID, + Description: "Deployment Push to " + deployTargetID, + Protocol: model.SSH.String(), + Authentication: model.ClientCertificate.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: false, + Usage: model.DevOps.String(), + DataAssetsSent: []string{"deployment"}, + DataAssetsReceived: nil, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } } else { // Pull pullFromWhere := artifactRegistryID commLinkPull := model.InputCommunicationLink{ - Target: pullFromWhere, - Description: "Deployment Pull from " + deployTargetID, - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: true, - Usage: model.DevOps.String(), - Data_assets_sent: nil, - Data_assets_received: []string{"deployment"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: pullFromWhere, + Description: "Deployment Pull from " + deployTargetID, + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: true, + Usage: model.DevOps.String(), + DataAssetsSent: nil, + DataAssetsReceived: []string{"deployment"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } if !dryRun { // take care to lookup by title (as keyed in input YAML by title and only in parsed model representation by ID) titleOfTargetAsset := model.ParsedModelRoot.TechnicalAssets[deployTargetID].Title - x := modelInput.Technical_assets[titleOfTargetAsset] - if x.Communication_links == nil { - x.Communication_links = make(map[string]model.InputCommunicationLink, 0) + x := modelInput.TechnicalAssets[titleOfTargetAsset] + if x.CommunicationLinks == nil { + x.CommunicationLinks = make(map[string]model.InputCommunicationLink) } - x.Communication_links["Deployment Pull ("+deployTargetID+")"] = commLinkPull - modelInput.Technical_assets[titleOfTargetAsset] = x + x.CommunicationLinks["Deployment Pull ("+deployTargetID+")"] = commLinkPull + modelInput.TechnicalAssets[titleOfTargetAsset] = x } } @@ -804,8 +804,8 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry // don't forget to also add the "deployment" data asset as stored on the target targetAssetTitle := model.ParsedModelRoot.TechnicalAssets[deployTargetID].Title assetsStored := make([]string, 0) - if modelInput.Technical_assets[targetAssetTitle].Data_assets_stored != nil { - for _, val := range modelInput.Technical_assets[targetAssetTitle].Data_assets_stored { + if modelInput.TechnicalAssets[targetAssetTitle].DataAssetsStored != nil { + for _, val := range modelInput.TechnicalAssets[targetAssetTitle].DataAssetsStored { assetsStored = append(assetsStored, fmt.Sprintf("%v", val)) } } @@ -815,43 +815,43 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } mergedArrays = append(mergedArrays, "deployment") if !dryRun { - x := modelInput.Technical_assets[targetAssetTitle] - x.Data_assets_stored = mergedArrays - modelInput.Technical_assets[targetAssetTitle] = x + x := modelInput.TechnicalAssets[targetAssetTitle] + x.DataAssetsStored = mergedArrays + modelInput.TechnicalAssets[targetAssetTitle] = x } } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: macroState["build-pipeline"][0] + " Build Pipeline", - Type: model.Process.String(), - Usage: model.DevOps.String(), - Used_as_client_by_human: false, - Out_of_scope: false, - Justification_out_of_scope: "", - Size: model.Service.String(), - Technology: model.BuildPipeline.String(), - Tags: []string{model.NormalizeTag(macroState["build-pipeline"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), - Justification_cia_rating: "Build pipeline components are at least rated as 'critical' in terms of integrity, because any " + + ID: id, + Description: macroState["build-pipeline"][0] + " Build Pipeline", + Type: model.Process.String(), + Usage: model.DevOps.String(), + UsedAsClientByHuman: false, + OutOfScope: false, + JustificationOutOfScope: "", + Size: model.Service.String(), + Technology: model.BuildPipeline.String(), + Tags: []string{model.NormalizeTag(macroState["build-pipeline"][0])}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Virtual.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.Critical.String(), + Availability: model.Important.String(), + JustificationCiaRating: "Build pipeline components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", - Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - Custom_developed_parts: false, - Data_assets_processed: []string{"sourcecode", "deployment"}, - Data_assets_stored: []string{"sourcecode", "deployment"}, - Data_formats_accepted: []string{"file"}, - Communication_links: commLinks, + MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + CustomDevelopedParts: false, + DataAssetsProcessed: []string{"sourcecode", "deployment"}, + DataAssetsStored: []string{"sourcecode", "deployment"}, + DataFormatsAccepted: []string{"file"}, + CommunicationLinks: commLinks, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.Technical_assets[macroState["build-pipeline"][0]+" Build Pipeline"] = techAsset + modelInput.TechnicalAssets[macroState["build-pipeline"][0]+" Build Pipeline"] = techAsset } } @@ -864,36 +864,36 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry encryption = model.Transparent.String() } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: macroState["artifact-registry"][0] + " Artifact Registry", - Type: model.Process.String(), - Usage: model.DevOps.String(), - Used_as_client_by_human: false, - Out_of_scope: false, - Justification_out_of_scope: "", - Size: model.Service.String(), - Technology: model.ArtifactRegistry.String(), - Tags: []string{model.NormalizeTag(macroState["artifact-registry"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), - Justification_cia_rating: "Artifact registry components are at least rated as 'critical' in terms of integrity, because any " + + ID: id, + Description: macroState["artifact-registry"][0] + " Artifact Registry", + Type: model.Process.String(), + Usage: model.DevOps.String(), + UsedAsClientByHuman: false, + OutOfScope: false, + JustificationOutOfScope: "", + Size: model.Service.String(), + Technology: model.ArtifactRegistry.String(), + Tags: []string{model.NormalizeTag(macroState["artifact-registry"][0])}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Virtual.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.Critical.String(), + Availability: model.Important.String(), + JustificationCiaRating: "Artifact registry components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", - Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - Custom_developed_parts: false, - Data_assets_processed: []string{"sourcecode", "deployment"}, - Data_assets_stored: []string{"sourcecode", "deployment"}, - Data_formats_accepted: []string{"file"}, - Communication_links: nil, + MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + CustomDevelopedParts: false, + DataAssetsProcessed: []string{"sourcecode", "deployment"}, + DataAssetsStored: []string{"sourcecode", "deployment"}, + DataFormatsAccepted: []string{"file"}, + CommunicationLinks: nil, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.Technical_assets[macroState["artifact-registry"][0]+" Artifact Registry"] = techAsset + modelInput.TechnicalAssets[macroState["artifact-registry"][0]+" Artifact Registry"] = techAsset } } @@ -907,36 +907,36 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry encryption = model.Transparent.String() } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: macroState["code-inspection-platform"][0] + " Code Inspection Platform", - Type: model.Process.String(), - Usage: model.DevOps.String(), - Used_as_client_by_human: false, - Out_of_scope: false, - Justification_out_of_scope: "", - Size: model.Service.String(), - Technology: model.CodeInspectionPlatform.String(), - Tags: []string{model.NormalizeTag(macroState["code-inspection-platform"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Important.String(), - Availability: model.Operational.String(), - Justification_cia_rating: "Sourcecode inspection platforms are rated at least 'important' in terms of integrity, because any " + + ID: id, + Description: macroState["code-inspection-platform"][0] + " Code Inspection Platform", + Type: model.Process.String(), + Usage: model.DevOps.String(), + UsedAsClientByHuman: false, + OutOfScope: false, + JustificationOutOfScope: "", + Size: model.Service.String(), + Technology: model.CodeInspectionPlatform.String(), + Tags: []string{model.NormalizeTag(macroState["code-inspection-platform"][0])}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Virtual.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.Important.String(), + Availability: model.Operational.String(), + JustificationCiaRating: "Sourcecode inspection platforms are rated at least 'important' in terms of integrity, because any " + "malicious modification of it might lead to vulnerabilities found by the scanner engine not being shown.", - Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - Custom_developed_parts: false, - Data_assets_processed: []string{"sourcecode"}, - Data_assets_stored: []string{"sourcecode"}, - Data_formats_accepted: []string{"file"}, - Communication_links: nil, + MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + CustomDevelopedParts: false, + DataAssetsProcessed: []string{"sourcecode"}, + DataAssetsStored: []string{"sourcecode"}, + DataFormatsAccepted: []string{"file"}, + CommunicationLinks: nil, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.Technical_assets[macroState["code-inspection-platform"][0]+" Code Inspection Platform"] = techAsset + modelInput.TechnicalAssets[macroState["code-inspection-platform"][0]+" Code Inspection Platform"] = techAsset } } } @@ -947,25 +947,25 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry //fmt.Println("Adding new trust boundary of type:", trustBoundaryType) title := "DevOps Network" trustBoundary := model.InputTrustBoundary{ - ID: "devops-network", - Description: "DevOps Network", - Type: trustBoundaryType, - Tags: []string{}, - Technical_assets_inside: serverSideTechAssets, - Trust_boundaries_nested: nil, + ID: "devops-network", + Description: "DevOps Network", + Type: trustBoundaryType, + Tags: []string{}, + TechnicalAssetsInside: serverSideTechAssets, + TrustBoundariesNested: nil, } *changeLogCollector = append(*changeLogCollector, "adding trust boundary: devops-network") if !dryRun { - modelInput.Trust_boundaries[title] = trustBoundary + modelInput.TrustBoundaries[title] = trustBoundary } } else { existingTrustBoundaryToAddTo := macroState["selected-trust-boundary"][0] //fmt.Println("Adding to existing trust boundary:", existingTrustBoundaryToAddTo) title := model.ParsedModelRoot.TrustBoundaries[existingTrustBoundaryToAddTo].Title assetsInside := make([]string, 0) - if modelInput.Trust_boundaries[title].Technical_assets_inside != nil { - vals := modelInput.Trust_boundaries[title].Technical_assets_inside - for _, val := range vals { + if modelInput.TrustBoundaries[title].TechnicalAssetsInside != nil { + values := modelInput.TrustBoundaries[title].TechnicalAssetsInside + for _, val := range values { assetsInside = append(assetsInside, fmt.Sprintf("%v", val)) } } @@ -976,12 +976,12 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry mergedArrays = append(mergedArrays, serverSideTechAssets...) *changeLogCollector = append(*changeLogCollector, "filling existing trust boundary: "+existingTrustBoundaryToAddTo) if !dryRun { - if modelInput.Trust_boundaries == nil { - modelInput.Trust_boundaries = make(map[string]model.InputTrustBoundary, 0) + if modelInput.TrustBoundaries == nil { + modelInput.TrustBoundaries = make(map[string]model.InputTrustBoundary) } - tb := modelInput.Trust_boundaries[title] - tb.Technical_assets_inside = mergedArrays - modelInput.Trust_boundaries[title] = tb + tb := modelInput.TrustBoundaries[title] + tb.TechnicalAssetsInside = mergedArrays + modelInput.TrustBoundaries[title] = tb } } } @@ -994,17 +994,17 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } title := macroState["container-platform"][0] + " Runtime" sharedRuntime := model.InputSharedRuntime{ - ID: containerSharedRuntimeID, - Description: title, - Tags: []string{model.NormalizeTag(macroState["container-platform"][0])}, - Technical_assets_running: assetsRunning, + ID: containerSharedRuntimeID, + Description: title, + Tags: []string{model.NormalizeTag(macroState["container-platform"][0])}, + TechnicalAssetsRunning: assetsRunning, } *changeLogCollector = append(*changeLogCollector, "adding shared runtime: "+containerSharedRuntimeID) if !dryRun { - if modelInput.Shared_runtimes == nil { - modelInput.Shared_runtimes = make(map[string]model.InputSharedRuntime, 0) + if modelInput.SharedRuntimes == nil { + modelInput.SharedRuntimes = make(map[string]model.InputSharedRuntime) } - modelInput.Shared_runtimes[title] = sharedRuntime + modelInput.SharedRuntimes[title] = sharedRuntime } } diff --git a/macros/built-in/add-vault/add-vault-macro.go b/macros/built-in/add-vault/add-vault-macro.go index 03ec5f57..06fc5065 100644 --- a/macros/built-in/add-vault/add-vault-macro.go +++ b/macros/built-in/add-vault/add-vault-macro.go @@ -84,7 +84,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { }, nil case 4: possibleAnswers := make([]string, 0) - for id, _ := range model.ParsedModelRoot.TechnicalAssets { + for id := range model.ParsedModelRoot.TechnicalAssets { possibleAnswers = append(possibleAnswers, id) } sort.Strings(possibleAnswers) @@ -181,21 +181,21 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry if _, exists := model.ParsedModelRoot.DataAssets["Configuration Secrets"]; !exists { dataAsset := model.InputDataAsset{ - ID: "configuration-secrets", - Description: "Configuration secrets (like credentials, keys, certificates, etc.) secured and managed by a vault", - Usage: model.DevOps.String(), - Tags: []string{}, - Origin: "", - Owner: "", - Quantity: model.VeryFew.String(), - Confidentiality: model.StrictlyConfidential.String(), - Integrity: model.Critical.String(), - Availability: model.Critical.String(), - Justification_cia_rating: "Configuration secrets are rated as being 'strictly-confidential'.", + ID: "configuration-secrets", + Description: "Configuration secrets (like credentials, keys, certificates, etc.) secured and managed by a vault", + Usage: model.DevOps.String(), + Tags: []string{}, + Origin: "", + Owner: "", + Quantity: model.VeryFew.String(), + Confidentiality: model.StrictlyConfidential.String(), + Integrity: model.Critical.String(), + Availability: model.Critical.String(), + JustificationCiaRating: "Configuration secrets are rated as being 'strictly-confidential'.", } *changeLogCollector = append(*changeLogCollector, "adding data asset: configuration-secrets") if !dryRun { - modelInput.Data_assets["Configuration Secrets"] = dataAsset + modelInput.DataAssets["Configuration Secrets"] = dataAsset } } @@ -213,35 +213,35 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry if _, exists := model.ParsedModelRoot.TechnicalAssets[storageID]; !exists { serverSideTechAssets = append(serverSideTechAssets, storageID) techAsset := model.InputTechnicalAsset{ - ID: storageID, - Description: "Vault Storage", - Type: model.Datastore.String(), - Usage: model.DevOps.String(), - Used_as_client_by_human: false, - Out_of_scope: false, - Justification_out_of_scope: "", - Size: model.Component.String(), - Technology: tech, - Tags: []string{}, // TODO: let user enter or too detailed for a wizard? - Internet: false, - Machine: model.Virtual.String(), // TODO: let user enter or too detailed for a wizard? - Encryption: model.DataWithSymmetricSharedKey.String(), // can be assumed for a vault product as at least having some good encryption - Owner: "", - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Critical.String(), - Justification_cia_rating: "Vault components are only rated as 'confidential' as vaults usually apply a trust barrier to encrypt all data-at-rest with a vault key.", - Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - Custom_developed_parts: false, - Data_assets_processed: nil, - Data_assets_stored: []string{"configuration-secrets"}, - Data_formats_accepted: nil, - Communication_links: nil, + ID: storageID, + Description: "Vault Storage", + Type: model.Datastore.String(), + Usage: model.DevOps.String(), + UsedAsClientByHuman: false, + OutOfScope: false, + JustificationOutOfScope: "", + Size: model.Component.String(), + Technology: tech, + Tags: []string{}, // TODO: let user enter or too detailed for a wizard? + Internet: false, + Machine: model.Virtual.String(), // TODO: let user enter or too detailed for a wizard? + Encryption: model.DataWithSymmetricSharedKey.String(), // can be assumed for a vault product as at least having some good encryption + Owner: "", + Confidentiality: model.Confidential.String(), + Integrity: model.Critical.String(), + Availability: model.Critical.String(), + JustificationCiaRating: "Vault components are only rated as 'confidential' as vaults usually apply a trust barrier to encrypt all data-at-rest with a vault key.", + MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + CustomDevelopedParts: false, + DataAssetsProcessed: nil, + DataAssetsStored: []string{"configuration-secrets"}, + DataFormatsAccepted: nil, + CommunicationLinks: nil, } *changeLogCollector = append(*changeLogCollector, "adding technical asset: "+storageID) if !dryRun { - modelInput.Technical_assets["Vault Storage"] = techAsset + modelInput.TechnicalAssets["Vault Storage"] = techAsset } } } @@ -254,23 +254,23 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry if databaseUsed || filesystemUsed { accessLink := model.InputCommunicationLink{ - Target: storageID, - Description: "Vault Storage Access", - Protocol: model.LocalFileAccess.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: false, - Usage: model.DevOps.String(), - Data_assets_sent: []string{"configuration-secrets"}, - Data_assets_received: []string{"configuration-secrets"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: storageID, + Description: "Vault Storage Access", + Protocol: model.LocalFileAccess.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: false, + Usage: model.DevOps.String(), + DataAssetsSent: []string{"configuration-secrets"}, + DataAssetsReceived: []string{"configuration-secrets"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } if databaseUsed { - accessLink.Protocol = model.SQL_access_protocol.String() // TODO ask if encrypted and ask if NoSQL? or to detailed for a wizard? + accessLink.Protocol = model.SqlAccessProtocol.String() // TODO ask if encrypted and ask if NoSQL? or to detailed for a wizard? } commLinks["Vault Storage Access"] = accessLink } @@ -287,31 +287,31 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } for _, clientID := range macroState["clients"] { // add a connection from each client clientAccessCommLink := model.InputCommunicationLink{ - Target: vaultID, - Description: "Vault Access Traffic (by " + clientID + ")", - Protocol: model.HTTPS.String(), - Authentication: authentication, - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: true, - Usage: model.DevOps.String(), - Data_assets_sent: nil, - Data_assets_received: []string{"configuration-secrets"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: vaultID, + Description: "Vault Access Traffic (by " + clientID + ")", + Protocol: model.HTTPS.String(), + Authentication: authentication, + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: true, + Usage: model.DevOps.String(), + DataAssetsSent: nil, + DataAssetsReceived: []string{"configuration-secrets"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } clientAssetTitle := model.ParsedModelRoot.TechnicalAssets[clientID].Title if !dryRun { - client := modelInput.Technical_assets[clientAssetTitle] - client.Communication_links["Vault Access ("+clientID+")"] = clientAccessCommLink - modelInput.Technical_assets[clientAssetTitle] = client + client := modelInput.TechnicalAssets[clientAssetTitle] + client.CommunicationLinks["Vault Access ("+clientID+")"] = clientAccessCommLink + modelInput.TechnicalAssets[clientAssetTitle] = client } // don't forget to also add the "configuration-secrets" data asset as processed on the client assetsProcessed := make([]string, 0) - if modelInput.Technical_assets[clientAssetTitle].Data_assets_processed != nil { - for _, val := range modelInput.Technical_assets[clientAssetTitle].Data_assets_processed { + if modelInput.TechnicalAssets[clientAssetTitle].DataAssetsProcessed != nil { + for _, val := range modelInput.TechnicalAssets[clientAssetTitle].DataAssetsProcessed { assetsProcessed = append(assetsProcessed, fmt.Sprintf("%v", val)) } } @@ -321,45 +321,45 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } mergedArrays = append(mergedArrays, "configuration-secrets") if !dryRun { - x := modelInput.Technical_assets[clientAssetTitle] - x.Data_assets_processed = mergedArrays - modelInput.Technical_assets[clientAssetTitle] = x + x := modelInput.TechnicalAssets[clientAssetTitle] + x.DataAssetsProcessed = mergedArrays + modelInput.TechnicalAssets[clientAssetTitle] = x } } techAsset := model.InputTechnicalAsset{ - ID: vaultID, - Description: macroState["vault-name"][0] + " Vault", - Type: model.Process.String(), - Usage: model.DevOps.String(), - Used_as_client_by_human: false, - Out_of_scope: false, - Justification_out_of_scope: "", - Size: model.Service.String(), - Technology: model.Vault.String(), - Tags: []string{model.NormalizeTag(macroState["vault-name"][0])}, - Internet: false, - Machine: model.Virtual.String(), - Encryption: model.Transparent.String(), - Owner: "", - Confidentiality: model.StrictlyConfidential.String(), - Integrity: model.Critical.String(), - Availability: model.Critical.String(), - Justification_cia_rating: "Vault components are rated as 'strictly-confidential'.", - Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - Custom_developed_parts: false, - Data_assets_processed: []string{"configuration-secrets"}, - Data_assets_stored: nil, - Data_formats_accepted: nil, - Communication_links: commLinks, + ID: vaultID, + Description: macroState["vault-name"][0] + " Vault", + Type: model.Process.String(), + Usage: model.DevOps.String(), + UsedAsClientByHuman: false, + OutOfScope: false, + JustificationOutOfScope: "", + Size: model.Service.String(), + Technology: model.Vault.String(), + Tags: []string{model.NormalizeTag(macroState["vault-name"][0])}, + Internet: false, + Machine: model.Virtual.String(), + Encryption: model.Transparent.String(), + Owner: "", + Confidentiality: model.StrictlyConfidential.String(), + Integrity: model.Critical.String(), + Availability: model.Critical.String(), + JustificationCiaRating: "Vault components are rated as 'strictly-confidential'.", + MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + CustomDevelopedParts: false, + DataAssetsProcessed: []string{"configuration-secrets"}, + DataAssetsStored: nil, + DataFormatsAccepted: nil, + CommunicationLinks: commLinks, } if inMemoryUsed { - techAsset.Data_assets_stored = []string{"configuration-secrets"} + techAsset.DataAssetsStored = []string{"configuration-secrets"} } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+vaultID) if !dryRun { - modelInput.Technical_assets[macroState["vault-name"][0]+" Vault"] = techAsset + modelInput.TechnicalAssets[macroState["vault-name"][0]+" Vault"] = techAsset } } @@ -367,16 +367,16 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry if filesystemUsed { title := "Vault Environment" trustBoundary := model.InputTrustBoundary{ - ID: vaultEnvID, - Description: "Vault Environment", - Type: model.ExecutionEnvironment.String(), - Tags: []string{}, - Technical_assets_inside: []string{vaultID, storageID}, - Trust_boundaries_nested: nil, + ID: vaultEnvID, + Description: "Vault Environment", + Type: model.ExecutionEnvironment.String(), + Tags: []string{}, + TechnicalAssetsInside: []string{vaultID, storageID}, + TrustBoundariesNested: nil, } *changeLogCollector = append(*changeLogCollector, "adding trust boundary: "+vaultEnvID) if !dryRun { - modelInput.Trust_boundaries[title] = trustBoundary + modelInput.TrustBoundaries[title] = trustBoundary } } @@ -391,13 +391,13 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry Tags: []string{}, } if filesystemUsed { - trustBoundary.Trust_boundaries_nested = []string{vaultEnvID} + trustBoundary.TrustBoundariesNested = []string{vaultEnvID} } else { - trustBoundary.Technical_assets_inside = serverSideTechAssets + trustBoundary.TechnicalAssetsInside = serverSideTechAssets } *changeLogCollector = append(*changeLogCollector, "adding trust boundary: vault-network") if !dryRun { - modelInput.Trust_boundaries[title] = trustBoundary + modelInput.TrustBoundaries[title] = trustBoundary } } else { // adding to existing trust boundary existingTrustBoundaryToAddTo := macroState["selected-trust-boundary"][0] @@ -405,9 +405,9 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry if filesystemUsed { // ---------------------- nest as execution-environment trust boundary ---------------------- boundariesNested := make([]string, 0) - if modelInput.Trust_boundaries[title].Trust_boundaries_nested != nil { - vals := modelInput.Trust_boundaries[title].Trust_boundaries_nested - for _, val := range vals { + if modelInput.TrustBoundaries[title].TrustBoundariesNested != nil { + values := modelInput.TrustBoundaries[title].TrustBoundariesNested + for _, val := range values { boundariesNested = append(boundariesNested, fmt.Sprintf("%v", val)) } } @@ -418,15 +418,15 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry mergedArrays = append(mergedArrays, vaultEnvID) *changeLogCollector = append(*changeLogCollector, "filling existing trust boundary: "+existingTrustBoundaryToAddTo) if !dryRun { - tb := modelInput.Trust_boundaries[title] - tb.Trust_boundaries_nested = mergedArrays - modelInput.Trust_boundaries[title] = tb + tb := modelInput.TrustBoundaries[title] + tb.TrustBoundariesNested = mergedArrays + modelInput.TrustBoundaries[title] = tb } } else { // ---------------------- place assets inside directly ---------------------- assetsInside := make([]string, 0) - if modelInput.Trust_boundaries[title].Technical_assets_inside != nil { - vals := modelInput.Trust_boundaries[title].Technical_assets_inside - for _, val := range vals { + if modelInput.TrustBoundaries[title].TechnicalAssetsInside != nil { + values := modelInput.TrustBoundaries[title].TechnicalAssetsInside + for _, val := range values { assetsInside = append(assetsInside, fmt.Sprintf("%v", val)) } } @@ -437,9 +437,9 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry mergedArrays = append(mergedArrays, serverSideTechAssets...) *changeLogCollector = append(*changeLogCollector, "filling existing trust boundary: "+existingTrustBoundaryToAddTo) if !dryRun { - tb := modelInput.Trust_boundaries[title] - tb.Technical_assets_inside = mergedArrays - modelInput.Trust_boundaries[title] = tb + tb := modelInput.TrustBoundaries[title] + tb.TechnicalAssetsInside = mergedArrays + modelInput.TrustBoundaries[title] = tb } } } diff --git a/macros/built-in/pretty-print/pretty-print-macro.go b/macros/built-in/pretty-print/pretty-print-macro.go index 64149c57..51c05c05 100644 --- a/macros/built-in/pretty-print/pretty-print-macro.go +++ b/macros/built-in/pretty-print/pretty-print-macro.go @@ -14,7 +14,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { return model.NoMoreQuestions(), nil } -func ApplyAnswer(questionID string, answer ...string) (message string, validResult bool, err error) { +func ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { return "Answer processed", true, nil } @@ -22,10 +22,10 @@ func GoBack() (message string, validResult bool, err error) { return "Cannot go back further", false, nil } -func GetFinalChangeImpact(modelInput *model.ModelInput) (changes []string, message string, validResult bool, err error) { +func GetFinalChangeImpact(_ *model.ModelInput) (changes []string, message string, validResult bool, err error) { return []string{"pretty-printing the model file"}, "Changeset valid", true, err } -func Execute(modelInput *model.ModelInput) (message string, validResult bool, err error) { +func Execute(_ *model.ModelInput) (message string, validResult bool, err error) { return "Model pretty printing successful", true, nil } diff --git a/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go b/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go index f0deaec9..478ce5a2 100644 --- a/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go +++ b/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go @@ -18,7 +18,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { return model.NoMoreQuestions(), nil } -func ApplyAnswer(questionID string, answer ...string) (message string, validResult bool, err error) { +func ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { return "Answer processed", true, nil } @@ -26,12 +26,12 @@ func GoBack() (message string, validResult bool, err error) { return "Cannot go back further", false, nil } -func GetFinalChangeImpact(modelInput *model.ModelInput) (changes []string, message string, validResult bool, err error) { +func GetFinalChangeImpact(_ *model.ModelInput) (changes []string, message string, validResult bool, err error) { return []string{"remove unused tags from the model file"}, "Changeset valid", true, err } func Execute(modelInput *model.ModelInput) (message string, validResult bool, err error) { - tagUsageMap := make(map[string]bool, 0) + tagUsageMap := make(map[string]bool) for _, tag := range model.ParsedModelRoot.TagsAvailable { tagUsageMap[tag] = false // false = tag is not used } @@ -70,6 +70,6 @@ func Execute(modelInput *model.ModelInput) (message string, validResult bool, er } } sort.Strings(tagsSorted) - modelInput.Tags_available = tagsSorted + modelInput.TagsAvailable = tagsSorted return "Model file removal of " + strconv.Itoa(counter) + " unused tags successful", true, nil } diff --git a/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go b/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go index 9a64557a..8635c07b 100644 --- a/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go +++ b/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go @@ -18,7 +18,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { return model.NoMoreQuestions(), nil } -func ApplyAnswer(questionID string, answer ...string) (message string, validResult bool, err error) { +func ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { return "Answer processed", true, nil } @@ -26,7 +26,7 @@ func GoBack() (message string, validResult bool, err error) { return "Cannot go back further", false, nil } -func GetFinalChangeImpact(modelInput *model.ModelInput) (changes []string, message string, validResult bool, err error) { +func GetFinalChangeImpact(_ *model.ModelInput) (changes []string, message string, validResult bool, err error) { return []string{"seed the model file with with initial risk tracking entries for all untracked risks"}, "Changeset valid", true, err } @@ -38,16 +38,16 @@ func Execute(modelInput *model.ModelInput) (message string, validResult bool, er } } sort.Strings(syntheticRiskIDsToCreateTrackingFor) - if modelInput.Risk_tracking == nil { - modelInput.Risk_tracking = make(map[string]model.InputRiskTracking, 0) + if modelInput.RiskTracking == nil { + modelInput.RiskTracking = make(map[string]model.InputRiskTracking) } for _, id := range syntheticRiskIDsToCreateTrackingFor { - modelInput.Risk_tracking[id] = model.InputRiskTracking{ + modelInput.RiskTracking[id] = model.InputRiskTracking{ Status: model.Unchecked.String(), Justification: "", Ticket: "", Date: "", - Checked_by: "", + CheckedBy: "", } } return "Model file seeding with " + strconv.Itoa(len(syntheticRiskIDsToCreateTrackingFor)) + " initial risk tracking successful", true, nil diff --git a/macros/built-in/seed-tags/seed-tags-macro.go b/macros/built-in/seed-tags/seed-tags-macro.go index fc65c414..427a5281 100644 --- a/macros/built-in/seed-tags/seed-tags-macro.go +++ b/macros/built-in/seed-tags/seed-tags-macro.go @@ -18,7 +18,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { return model.NoMoreQuestions(), nil } -func ApplyAnswer(questionID string, answer ...string) (message string, validResult bool, err error) { +func ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { return "Answer processed", true, nil } @@ -26,12 +26,12 @@ func GoBack() (message string, validResult bool, err error) { return "Cannot go back further", false, nil } -func GetFinalChangeImpact(modelInput *model.ModelInput) (changes []string, message string, validResult bool, err error) { +func GetFinalChangeImpact(_ *model.ModelInput) (changes []string, message string, validResult bool, err error) { return []string{"seed the model file with supported tags from all risk rules"}, "Changeset valid", true, err } func Execute(modelInput *model.ModelInput) (message string, validResult bool, err error) { - tagMap := make(map[string]bool, 0) + tagMap := make(map[string]bool) for k, v := range model.AllSupportedTags { tagMap[k] = v } @@ -43,6 +43,6 @@ func Execute(modelInput *model.ModelInput) (message string, validResult bool, er tagsSorted = append(tagsSorted, tag) } sort.Strings(tagsSorted) - modelInput.Tags_available = tagsSorted + modelInput.TagsAvailable = tagsSorted return "Model file seeding with " + strconv.Itoa(len(model.AllSupportedTags)) + " tags successful", true, nil } diff --git a/main.go b/main.go index 938047aa..4ff9f5fe 100644 --- a/main.go +++ b/main.go @@ -17,7 +17,6 @@ import ( "fmt" "hash/fnv" "io" - "io/ioutil" "log" "net/http" "os" @@ -34,56 +33,56 @@ import ( "github.com/gin-gonic/gin" "github.com/google/uuid" "github.com/threagile/threagile/colors" - add_build_pipeline "github.com/threagile/threagile/macros/built-in/add-build-pipeline" - add_vault "github.com/threagile/threagile/macros/built-in/add-vault" - pretty_print "github.com/threagile/threagile/macros/built-in/pretty-print" - remove_unused_tags "github.com/threagile/threagile/macros/built-in/remove-unused-tags" - seed_risk_tracking "github.com/threagile/threagile/macros/built-in/seed-risk-tracking" - seed_tags "github.com/threagile/threagile/macros/built-in/seed-tags" + addbuildpipeline "github.com/threagile/threagile/macros/built-in/add-build-pipeline" + addvault "github.com/threagile/threagile/macros/built-in/add-vault" + prettyprint "github.com/threagile/threagile/macros/built-in/pretty-print" + removeunusedtags "github.com/threagile/threagile/macros/built-in/remove-unused-tags" + seedrisktracking "github.com/threagile/threagile/macros/built-in/seed-risk-tracking" + seedtags "github.com/threagile/threagile/macros/built-in/seed-tags" "github.com/threagile/threagile/model" "github.com/threagile/threagile/report" - accidental_secret_leak "github.com/threagile/threagile/risks/built-in/accidental-secret-leak" - code_backdooring "github.com/threagile/threagile/risks/built-in/code-backdooring" - container_baseimage_backdooring "github.com/threagile/threagile/risks/built-in/container-baseimage-backdooring" - container_platform_escape "github.com/threagile/threagile/risks/built-in/container-platform-escape" - cross_site_request_forgery "github.com/threagile/threagile/risks/built-in/cross-site-request-forgery" - cross_site_scripting "github.com/threagile/threagile/risks/built-in/cross-site-scripting" - dos_risky_access_across_trust_boundary "github.com/threagile/threagile/risks/built-in/dos-risky-access-across-trust-boundary" - incomplete_model "github.com/threagile/threagile/risks/built-in/incomplete-model" - ldap_injection "github.com/threagile/threagile/risks/built-in/ldap-injection" - missing_authentication "github.com/threagile/threagile/risks/built-in/missing-authentication" - missing_authentication_second_factor "github.com/threagile/threagile/risks/built-in/missing-authentication-second-factor" - missing_build_infrastructure "github.com/threagile/threagile/risks/built-in/missing-build-infrastructure" - missing_cloud_hardening "github.com/threagile/threagile/risks/built-in/missing-cloud-hardening" - missing_file_validation "github.com/threagile/threagile/risks/built-in/missing-file-validation" - missing_hardening "github.com/threagile/threagile/risks/built-in/missing-hardening" - missing_identity_propagation "github.com/threagile/threagile/risks/built-in/missing-identity-propagation" - missing_identity_provider_isolation "github.com/threagile/threagile/risks/built-in/missing-identity-provider-isolation" - missing_identity_store "github.com/threagile/threagile/risks/built-in/missing-identity-store" - missing_network_segmentation "github.com/threagile/threagile/risks/built-in/missing-network-segmentation" - missing_vault "github.com/threagile/threagile/risks/built-in/missing-vault" - missing_vault_isolation "github.com/threagile/threagile/risks/built-in/missing-vault-isolation" - missing_waf "github.com/threagile/threagile/risks/built-in/missing-waf" - mixed_targets_on_shared_runtime "github.com/threagile/threagile/risks/built-in/mixed-targets-on-shared-runtime" - path_traversal "github.com/threagile/threagile/risks/built-in/path-traversal" - push_instead_of_pull_deployment "github.com/threagile/threagile/risks/built-in/push-instead-of-pull-deployment" - search_query_injection "github.com/threagile/threagile/risks/built-in/search-query-injection" - server_side_request_forgery "github.com/threagile/threagile/risks/built-in/server-side-request-forgery" - service_registry_poisoning "github.com/threagile/threagile/risks/built-in/service-registry-poisoning" - sql_nosql_injection "github.com/threagile/threagile/risks/built-in/sql-nosql-injection" - unchecked_deployment "github.com/threagile/threagile/risks/built-in/unchecked-deployment" - unencrypted_asset "github.com/threagile/threagile/risks/built-in/unencrypted-asset" - unencrypted_communication "github.com/threagile/threagile/risks/built-in/unencrypted-communication" - unguarded_access_from_internet "github.com/threagile/threagile/risks/built-in/unguarded-access-from-internet" - unguarded_direct_datastore_access "github.com/threagile/threagile/risks/built-in/unguarded-direct-datastore-access" - unnecessary_communication_link "github.com/threagile/threagile/risks/built-in/unnecessary-communication-link" - unnecessary_data_asset "github.com/threagile/threagile/risks/built-in/unnecessary-data-asset" - unnecessary_data_transfer "github.com/threagile/threagile/risks/built-in/unnecessary-data-transfer" - unnecessary_technical_asset "github.com/threagile/threagile/risks/built-in/unnecessary-technical-asset" - untrusted_deserialization "github.com/threagile/threagile/risks/built-in/untrusted-deserialization" - wrong_communication_link_content "github.com/threagile/threagile/risks/built-in/wrong-communication-link-content" - wrong_trust_boundary_content "github.com/threagile/threagile/risks/built-in/wrong-trust-boundary-content" - xml_external_entity "github.com/threagile/threagile/risks/built-in/xml-external-entity" + accidentalsecretleak "github.com/threagile/threagile/risks/built-in/accidental-secret-leak" + codebackdooring "github.com/threagile/threagile/risks/built-in/code-backdooring" + containerbaseimagebackdooring "github.com/threagile/threagile/risks/built-in/container-baseimage-backdooring" + containerplatformescape "github.com/threagile/threagile/risks/built-in/container-platform-escape" + crosssiterequestforgery "github.com/threagile/threagile/risks/built-in/cross-site-request-forgery" + crosssitescripting "github.com/threagile/threagile/risks/built-in/cross-site-scripting" + dosriskyaccessacrosstrustboundary "github.com/threagile/threagile/risks/built-in/dos-risky-access-across-trust-boundary" + incompletemodel "github.com/threagile/threagile/risks/built-in/incomplete-model" + ldapinjection "github.com/threagile/threagile/risks/built-in/ldap-injection" + missingauthentication "github.com/threagile/threagile/risks/built-in/missing-authentication" + missingauthenticationsecondfactor "github.com/threagile/threagile/risks/built-in/missing-authentication-second-factor" + missingbuildinfrastructure "github.com/threagile/threagile/risks/built-in/missing-build-infrastructure" + missingcloudhardening "github.com/threagile/threagile/risks/built-in/missing-cloud-hardening" + missingfilevalidation "github.com/threagile/threagile/risks/built-in/missing-file-validation" + missinghardening "github.com/threagile/threagile/risks/built-in/missing-hardening" + missingidentitypropagation "github.com/threagile/threagile/risks/built-in/missing-identity-propagation" + missingidentityproviderisolation "github.com/threagile/threagile/risks/built-in/missing-identity-provider-isolation" + missingidentitystore "github.com/threagile/threagile/risks/built-in/missing-identity-store" + missingnetworksegmentation "github.com/threagile/threagile/risks/built-in/missing-network-segmentation" + missingvault "github.com/threagile/threagile/risks/built-in/missing-vault" + missingvaultisolation "github.com/threagile/threagile/risks/built-in/missing-vault-isolation" + missingwaf "github.com/threagile/threagile/risks/built-in/missing-waf" + mixedtargetsonsharedruntime "github.com/threagile/threagile/risks/built-in/mixed-targets-on-shared-runtime" + pathtraversal "github.com/threagile/threagile/risks/built-in/path-traversal" + pushinsteadofpulldeployment "github.com/threagile/threagile/risks/built-in/push-instead-of-pull-deployment" + searchqueryinjection "github.com/threagile/threagile/risks/built-in/search-query-injection" + serversiderequestforgery "github.com/threagile/threagile/risks/built-in/server-side-request-forgery" + serviceregistrypoisoning "github.com/threagile/threagile/risks/built-in/service-registry-poisoning" + sqlnosqlinjection "github.com/threagile/threagile/risks/built-in/sql-nosql-injection" + uncheckeddeployment "github.com/threagile/threagile/risks/built-in/unchecked-deployment" + unencryptedasset "github.com/threagile/threagile/risks/built-in/unencrypted-asset" + unencryptedcommunication "github.com/threagile/threagile/risks/built-in/unencrypted-communication" + unguardedaccessfrominternet "github.com/threagile/threagile/risks/built-in/unguarded-access-from-internet" + unguardeddirectdatastoreaccess "github.com/threagile/threagile/risks/built-in/unguarded-direct-datastore-access" + unnecessarycommunicationlink "github.com/threagile/threagile/risks/built-in/unnecessary-communication-link" + unnecessarydataasset "github.com/threagile/threagile/risks/built-in/unnecessary-data-asset" + unnecessarydatatransfer "github.com/threagile/threagile/risks/built-in/unnecessary-data-transfer" + unnecessarytechnicalasset "github.com/threagile/threagile/risks/built-in/unnecessary-technical-asset" + untrusteddeserialization "github.com/threagile/threagile/risks/built-in/untrusted-deserialization" + wrongcommunicationlinkcontent "github.com/threagile/threagile/risks/built-in/wrong-communication-link-content" + wrongtrustboundarycontent "github.com/threagile/threagile/risks/built-in/wrong-trust-boundary-content" + xmlexternalentity "github.com/threagile/threagile/risks/built-in/xml-external-entity" "golang.org/x/crypto/argon2" "gopkg.in/yaml.v3" ) @@ -123,465 +122,465 @@ func applyRiskGeneration() { } } - if _, ok := skippedRules[unencrypted_asset.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unencrypted_asset.Category().Id) - delete(skippedRules, unencrypted_asset.Category().Id) + if _, ok := skippedRules[unencryptedasset.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unencryptedasset.Category().Id) + delete(skippedRules, unencryptedasset.Category().Id) } else { - model.AddToListOfSupportedTags(unencrypted_asset.SupportedTags()) - risks := unencrypted_asset.GenerateRisks() + model.AddToListOfSupportedTags(unencryptedasset.SupportedTags()) + risks := unencryptedasset.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unencrypted_asset.Category()] = risks + model.GeneratedRisksByCategory[unencryptedasset.Category()] = risks } } - if _, ok := skippedRules[unencrypted_communication.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unencrypted_communication.Category().Id) - delete(skippedRules, unencrypted_communication.Category().Id) + if _, ok := skippedRules[unencryptedcommunication.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unencryptedcommunication.Category().Id) + delete(skippedRules, unencryptedcommunication.Category().Id) } else { - model.AddToListOfSupportedTags(unencrypted_communication.SupportedTags()) - risks := unencrypted_communication.GenerateRisks() + model.AddToListOfSupportedTags(unencryptedcommunication.SupportedTags()) + risks := unencryptedcommunication.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unencrypted_communication.Category()] = risks + model.GeneratedRisksByCategory[unencryptedcommunication.Category()] = risks } } - if _, ok := skippedRules[unguarded_direct_datastore_access.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unguarded_direct_datastore_access.Category().Id) - delete(skippedRules, unguarded_direct_datastore_access.Category().Id) + if _, ok := skippedRules[unguardeddirectdatastoreaccess.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unguardeddirectdatastoreaccess.Category().Id) + delete(skippedRules, unguardeddirectdatastoreaccess.Category().Id) } else { - model.AddToListOfSupportedTags(unguarded_direct_datastore_access.SupportedTags()) - risks := unguarded_direct_datastore_access.GenerateRisks() + model.AddToListOfSupportedTags(unguardeddirectdatastoreaccess.SupportedTags()) + risks := unguardeddirectdatastoreaccess.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unguarded_direct_datastore_access.Category()] = risks + model.GeneratedRisksByCategory[unguardeddirectdatastoreaccess.Category()] = risks } } - if _, ok := skippedRules[unguarded_access_from_internet.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unguarded_access_from_internet.Category().Id) - delete(skippedRules, unguarded_access_from_internet.Category().Id) + if _, ok := skippedRules[unguardedaccessfrominternet.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unguardedaccessfrominternet.Category().Id) + delete(skippedRules, unguardedaccessfrominternet.Category().Id) } else { - model.AddToListOfSupportedTags(unguarded_access_from_internet.SupportedTags()) - risks := unguarded_access_from_internet.GenerateRisks() + model.AddToListOfSupportedTags(unguardedaccessfrominternet.SupportedTags()) + risks := unguardedaccessfrominternet.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unguarded_access_from_internet.Category()] = risks + model.GeneratedRisksByCategory[unguardedaccessfrominternet.Category()] = risks } } - if _, ok := skippedRules[dos_risky_access_across_trust_boundary.Category().Id]; ok { - fmt.Println("Skipping risk rule:", dos_risky_access_across_trust_boundary.Category().Id) - delete(skippedRules, dos_risky_access_across_trust_boundary.Category().Id) + if _, ok := skippedRules[dosriskyaccessacrosstrustboundary.Category().Id]; ok { + fmt.Println("Skipping risk rule:", dosriskyaccessacrosstrustboundary.Category().Id) + delete(skippedRules, dosriskyaccessacrosstrustboundary.Category().Id) } else { - model.AddToListOfSupportedTags(dos_risky_access_across_trust_boundary.SupportedTags()) - risks := dos_risky_access_across_trust_boundary.GenerateRisks() + model.AddToListOfSupportedTags(dosriskyaccessacrosstrustboundary.SupportedTags()) + risks := dosriskyaccessacrosstrustboundary.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[dos_risky_access_across_trust_boundary.Category()] = risks + model.GeneratedRisksByCategory[dosriskyaccessacrosstrustboundary.Category()] = risks } } - if _, ok := skippedRules[missing_network_segmentation.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_network_segmentation.Category().Id) - delete(skippedRules, missing_network_segmentation.Category().Id) + if _, ok := skippedRules[missingnetworksegmentation.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingnetworksegmentation.Category().Id) + delete(skippedRules, missingnetworksegmentation.Category().Id) } else { - model.AddToListOfSupportedTags(missing_network_segmentation.SupportedTags()) - risks := missing_network_segmentation.GenerateRisks() + model.AddToListOfSupportedTags(missingnetworksegmentation.SupportedTags()) + risks := missingnetworksegmentation.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_network_segmentation.Category()] = risks + model.GeneratedRisksByCategory[missingnetworksegmentation.Category()] = risks } } - if _, ok := skippedRules[mixed_targets_on_shared_runtime.Category().Id]; ok { - fmt.Println("Skipping risk rule:", mixed_targets_on_shared_runtime.Category().Id) - delete(skippedRules, mixed_targets_on_shared_runtime.Category().Id) + if _, ok := skippedRules[mixedtargetsonsharedruntime.Category().Id]; ok { + fmt.Println("Skipping risk rule:", mixedtargetsonsharedruntime.Category().Id) + delete(skippedRules, mixedtargetsonsharedruntime.Category().Id) } else { - model.AddToListOfSupportedTags(mixed_targets_on_shared_runtime.SupportedTags()) - risks := mixed_targets_on_shared_runtime.GenerateRisks() + model.AddToListOfSupportedTags(mixedtargetsonsharedruntime.SupportedTags()) + risks := mixedtargetsonsharedruntime.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[mixed_targets_on_shared_runtime.Category()] = risks + model.GeneratedRisksByCategory[mixedtargetsonsharedruntime.Category()] = risks } } - if _, ok := skippedRules[missing_identity_propagation.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_identity_propagation.Category().Id) - delete(skippedRules, missing_identity_propagation.Category().Id) + if _, ok := skippedRules[missingidentitypropagation.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingidentitypropagation.Category().Id) + delete(skippedRules, missingidentitypropagation.Category().Id) } else { - model.AddToListOfSupportedTags(missing_identity_propagation.SupportedTags()) - risks := missing_identity_propagation.GenerateRisks() + model.AddToListOfSupportedTags(missingidentitypropagation.SupportedTags()) + risks := missingidentitypropagation.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_identity_propagation.Category()] = risks + model.GeneratedRisksByCategory[missingidentitypropagation.Category()] = risks } } - if _, ok := skippedRules[missing_identity_store.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_identity_store.Category().Id) - delete(skippedRules, missing_identity_store.Category().Id) + if _, ok := skippedRules[missingidentitystore.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingidentitystore.Category().Id) + delete(skippedRules, missingidentitystore.Category().Id) } else { - model.AddToListOfSupportedTags(missing_identity_store.SupportedTags()) - risks := missing_identity_store.GenerateRisks() + model.AddToListOfSupportedTags(missingidentitystore.SupportedTags()) + risks := missingidentitystore.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_identity_store.Category()] = risks + model.GeneratedRisksByCategory[missingidentitystore.Category()] = risks } } - if _, ok := skippedRules[missing_authentication.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_authentication.Category().Id) - delete(skippedRules, missing_authentication.Category().Id) + if _, ok := skippedRules[missingauthentication.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingauthentication.Category().Id) + delete(skippedRules, missingauthentication.Category().Id) } else { - model.AddToListOfSupportedTags(missing_authentication.SupportedTags()) - risks := missing_authentication.GenerateRisks() + model.AddToListOfSupportedTags(missingauthentication.SupportedTags()) + risks := missingauthentication.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_authentication.Category()] = risks + model.GeneratedRisksByCategory[missingauthentication.Category()] = risks } } - if _, ok := skippedRules[missing_authentication_second_factor.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_authentication_second_factor.Category().Id) - delete(skippedRules, missing_authentication_second_factor.Category().Id) + if _, ok := skippedRules[missingauthenticationsecondfactor.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingauthenticationsecondfactor.Category().Id) + delete(skippedRules, missingauthenticationsecondfactor.Category().Id) } else { - model.AddToListOfSupportedTags(missing_authentication_second_factor.SupportedTags()) - risks := missing_authentication_second_factor.GenerateRisks() + model.AddToListOfSupportedTags(missingauthenticationsecondfactor.SupportedTags()) + risks := missingauthenticationsecondfactor.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_authentication_second_factor.Category()] = risks + model.GeneratedRisksByCategory[missingauthenticationsecondfactor.Category()] = risks } } - if _, ok := skippedRules[unnecessary_data_transfer.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unnecessary_data_transfer.Category().Id) - delete(skippedRules, unnecessary_data_transfer.Category().Id) + if _, ok := skippedRules[unnecessarydatatransfer.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unnecessarydatatransfer.Category().Id) + delete(skippedRules, unnecessarydatatransfer.Category().Id) } else { - model.AddToListOfSupportedTags(unnecessary_data_transfer.SupportedTags()) - risks := unnecessary_data_transfer.GenerateRisks() + model.AddToListOfSupportedTags(unnecessarydatatransfer.SupportedTags()) + risks := unnecessarydatatransfer.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unnecessary_data_transfer.Category()] = risks + model.GeneratedRisksByCategory[unnecessarydatatransfer.Category()] = risks } } - if _, ok := skippedRules[unnecessary_communication_link.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unnecessary_communication_link.Category().Id) - delete(skippedRules, unnecessary_communication_link.Category().Id) + if _, ok := skippedRules[unnecessarycommunicationlink.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unnecessarycommunicationlink.Category().Id) + delete(skippedRules, unnecessarycommunicationlink.Category().Id) } else { - model.AddToListOfSupportedTags(unnecessary_communication_link.SupportedTags()) - risks := unnecessary_communication_link.GenerateRisks() + model.AddToListOfSupportedTags(unnecessarycommunicationlink.SupportedTags()) + risks := unnecessarycommunicationlink.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unnecessary_communication_link.Category()] = risks + model.GeneratedRisksByCategory[unnecessarycommunicationlink.Category()] = risks } } - if _, ok := skippedRules[unnecessary_technical_asset.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unnecessary_technical_asset.Category().Id) - delete(skippedRules, unnecessary_technical_asset.Category().Id) + if _, ok := skippedRules[unnecessarytechnicalasset.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unnecessarytechnicalasset.Category().Id) + delete(skippedRules, unnecessarytechnicalasset.Category().Id) } else { - model.AddToListOfSupportedTags(unnecessary_technical_asset.SupportedTags()) - risks := unnecessary_technical_asset.GenerateRisks() + model.AddToListOfSupportedTags(unnecessarytechnicalasset.SupportedTags()) + risks := unnecessarytechnicalasset.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unnecessary_technical_asset.Category()] = risks + model.GeneratedRisksByCategory[unnecessarytechnicalasset.Category()] = risks } } - if _, ok := skippedRules[unnecessary_data_asset.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unnecessary_data_asset.Category().Id) - delete(skippedRules, unnecessary_data_asset.Category().Id) + if _, ok := skippedRules[unnecessarydataasset.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unnecessarydataasset.Category().Id) + delete(skippedRules, unnecessarydataasset.Category().Id) } else { - model.AddToListOfSupportedTags(unnecessary_data_asset.SupportedTags()) - risks := unnecessary_data_asset.GenerateRisks() + model.AddToListOfSupportedTags(unnecessarydataasset.SupportedTags()) + risks := unnecessarydataasset.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unnecessary_data_asset.Category()] = risks + model.GeneratedRisksByCategory[unnecessarydataasset.Category()] = risks } } - if _, ok := skippedRules[sql_nosql_injection.Category().Id]; ok { - fmt.Println("Skipping risk rule:", sql_nosql_injection.Category().Id) - delete(skippedRules, sql_nosql_injection.Category().Id) + if _, ok := skippedRules[sqlnosqlinjection.Category().Id]; ok { + fmt.Println("Skipping risk rule:", sqlnosqlinjection.Category().Id) + delete(skippedRules, sqlnosqlinjection.Category().Id) } else { - model.AddToListOfSupportedTags(sql_nosql_injection.SupportedTags()) - risks := sql_nosql_injection.GenerateRisks() + model.AddToListOfSupportedTags(sqlnosqlinjection.SupportedTags()) + risks := sqlnosqlinjection.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[sql_nosql_injection.Category()] = risks + model.GeneratedRisksByCategory[sqlnosqlinjection.Category()] = risks } } - if _, ok := skippedRules[ldap_injection.Category().Id]; ok { - fmt.Println("Skipping risk rule:", ldap_injection.Category().Id) - delete(skippedRules, ldap_injection.Category().Id) + if _, ok := skippedRules[ldapinjection.Category().Id]; ok { + fmt.Println("Skipping risk rule:", ldapinjection.Category().Id) + delete(skippedRules, ldapinjection.Category().Id) } else { - model.AddToListOfSupportedTags(ldap_injection.SupportedTags()) - risks := ldap_injection.GenerateRisks() + model.AddToListOfSupportedTags(ldapinjection.SupportedTags()) + risks := ldapinjection.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[ldap_injection.Category()] = risks + model.GeneratedRisksByCategory[ldapinjection.Category()] = risks } } - if _, ok := skippedRules[cross_site_scripting.Category().Id]; ok { - fmt.Println("Skipping risk rule:", cross_site_scripting.Category().Id) - delete(skippedRules, cross_site_scripting.Category().Id) + if _, ok := skippedRules[crosssitescripting.Category().Id]; ok { + fmt.Println("Skipping risk rule:", crosssitescripting.Category().Id) + delete(skippedRules, crosssitescripting.Category().Id) } else { - model.AddToListOfSupportedTags(cross_site_scripting.SupportedTags()) - risks := cross_site_scripting.GenerateRisks() + model.AddToListOfSupportedTags(crosssitescripting.SupportedTags()) + risks := crosssitescripting.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[cross_site_scripting.Category()] = risks + model.GeneratedRisksByCategory[crosssitescripting.Category()] = risks } } - if _, ok := skippedRules[cross_site_request_forgery.Category().Id]; ok { - fmt.Println("Skipping risk rule:", cross_site_request_forgery.Category().Id) - delete(skippedRules, cross_site_request_forgery.Category().Id) + if _, ok := skippedRules[crosssiterequestforgery.Category().Id]; ok { + fmt.Println("Skipping risk rule:", crosssiterequestforgery.Category().Id) + delete(skippedRules, crosssiterequestforgery.Category().Id) } else { - model.AddToListOfSupportedTags(cross_site_request_forgery.SupportedTags()) - risks := cross_site_request_forgery.GenerateRisks() + model.AddToListOfSupportedTags(crosssiterequestforgery.SupportedTags()) + risks := crosssiterequestforgery.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[cross_site_request_forgery.Category()] = risks + model.GeneratedRisksByCategory[crosssiterequestforgery.Category()] = risks } } - if _, ok := skippedRules[server_side_request_forgery.Category().Id]; ok { - fmt.Println("Skipping risk rule:", server_side_request_forgery.Category().Id) - delete(skippedRules, server_side_request_forgery.Category().Id) + if _, ok := skippedRules[serversiderequestforgery.Category().Id]; ok { + fmt.Println("Skipping risk rule:", serversiderequestforgery.Category().Id) + delete(skippedRules, serversiderequestforgery.Category().Id) } else { - model.AddToListOfSupportedTags(server_side_request_forgery.SupportedTags()) - risks := server_side_request_forgery.GenerateRisks() + model.AddToListOfSupportedTags(serversiderequestforgery.SupportedTags()) + risks := serversiderequestforgery.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[server_side_request_forgery.Category()] = risks + model.GeneratedRisksByCategory[serversiderequestforgery.Category()] = risks } } - if _, ok := skippedRules[path_traversal.Category().Id]; ok { - fmt.Println("Skipping risk rule:", path_traversal.Category().Id) - delete(skippedRules, path_traversal.Category().Id) + if _, ok := skippedRules[pathtraversal.Category().Id]; ok { + fmt.Println("Skipping risk rule:", pathtraversal.Category().Id) + delete(skippedRules, pathtraversal.Category().Id) } else { - model.AddToListOfSupportedTags(path_traversal.SupportedTags()) - risks := path_traversal.GenerateRisks() + model.AddToListOfSupportedTags(pathtraversal.SupportedTags()) + risks := pathtraversal.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[path_traversal.Category()] = risks + model.GeneratedRisksByCategory[pathtraversal.Category()] = risks } } - if _, ok := skippedRules[push_instead_of_pull_deployment.Category().Id]; ok { - fmt.Println("Skipping risk rule:", push_instead_of_pull_deployment.Category().Id) - delete(skippedRules, push_instead_of_pull_deployment.Category().Id) + if _, ok := skippedRules[pushinsteadofpulldeployment.Category().Id]; ok { + fmt.Println("Skipping risk rule:", pushinsteadofpulldeployment.Category().Id) + delete(skippedRules, pushinsteadofpulldeployment.Category().Id) } else { - model.AddToListOfSupportedTags(push_instead_of_pull_deployment.SupportedTags()) - risks := push_instead_of_pull_deployment.GenerateRisks() + model.AddToListOfSupportedTags(pushinsteadofpulldeployment.SupportedTags()) + risks := pushinsteadofpulldeployment.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[push_instead_of_pull_deployment.Category()] = risks + model.GeneratedRisksByCategory[pushinsteadofpulldeployment.Category()] = risks } } - if _, ok := skippedRules[search_query_injection.Category().Id]; ok { - fmt.Println("Skipping risk rule:", search_query_injection.Category().Id) - delete(skippedRules, search_query_injection.Category().Id) + if _, ok := skippedRules[searchqueryinjection.Category().Id]; ok { + fmt.Println("Skipping risk rule:", searchqueryinjection.Category().Id) + delete(skippedRules, searchqueryinjection.Category().Id) } else { - model.AddToListOfSupportedTags(search_query_injection.SupportedTags()) - risks := search_query_injection.GenerateRisks() + model.AddToListOfSupportedTags(searchqueryinjection.SupportedTags()) + risks := searchqueryinjection.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[search_query_injection.Category()] = risks + model.GeneratedRisksByCategory[searchqueryinjection.Category()] = risks } } - if _, ok := skippedRules[service_registry_poisoning.Category().Id]; ok { - fmt.Println("Skipping risk rule:", service_registry_poisoning.Category().Id) - delete(skippedRules, service_registry_poisoning.Category().Id) + if _, ok := skippedRules[serviceregistrypoisoning.Category().Id]; ok { + fmt.Println("Skipping risk rule:", serviceregistrypoisoning.Category().Id) + delete(skippedRules, serviceregistrypoisoning.Category().Id) } else { - model.AddToListOfSupportedTags(service_registry_poisoning.SupportedTags()) - risks := service_registry_poisoning.GenerateRisks() + model.AddToListOfSupportedTags(serviceregistrypoisoning.SupportedTags()) + risks := serviceregistrypoisoning.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[service_registry_poisoning.Category()] = risks + model.GeneratedRisksByCategory[serviceregistrypoisoning.Category()] = risks } } - if _, ok := skippedRules[untrusted_deserialization.Category().Id]; ok { - fmt.Println("Skipping risk rule:", untrusted_deserialization.Category().Id) - delete(skippedRules, untrusted_deserialization.Category().Id) + if _, ok := skippedRules[untrusteddeserialization.Category().Id]; ok { + fmt.Println("Skipping risk rule:", untrusteddeserialization.Category().Id) + delete(skippedRules, untrusteddeserialization.Category().Id) } else { - model.AddToListOfSupportedTags(untrusted_deserialization.SupportedTags()) - risks := untrusted_deserialization.GenerateRisks() + model.AddToListOfSupportedTags(untrusteddeserialization.SupportedTags()) + risks := untrusteddeserialization.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[untrusted_deserialization.Category()] = risks + model.GeneratedRisksByCategory[untrusteddeserialization.Category()] = risks } } - if _, ok := skippedRules[xml_external_entity.Category().Id]; ok { - fmt.Println("Skipping risk rule:", xml_external_entity.Category().Id) - delete(skippedRules, xml_external_entity.Category().Id) + if _, ok := skippedRules[xmlexternalentity.Category().Id]; ok { + fmt.Println("Skipping risk rule:", xmlexternalentity.Category().Id) + delete(skippedRules, xmlexternalentity.Category().Id) } else { - model.AddToListOfSupportedTags(xml_external_entity.SupportedTags()) - risks := xml_external_entity.GenerateRisks() + model.AddToListOfSupportedTags(xmlexternalentity.SupportedTags()) + risks := xmlexternalentity.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[xml_external_entity.Category()] = risks + model.GeneratedRisksByCategory[xmlexternalentity.Category()] = risks } } - if _, ok := skippedRules[missing_cloud_hardening.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_cloud_hardening.Category().Id) - delete(skippedRules, missing_cloud_hardening.Category().Id) + if _, ok := skippedRules[missingcloudhardening.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingcloudhardening.Category().Id) + delete(skippedRules, missingcloudhardening.Category().Id) } else { - model.AddToListOfSupportedTags(missing_cloud_hardening.SupportedTags()) - risks := missing_cloud_hardening.GenerateRisks() + model.AddToListOfSupportedTags(missingcloudhardening.SupportedTags()) + risks := missingcloudhardening.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_cloud_hardening.Category()] = risks + model.GeneratedRisksByCategory[missingcloudhardening.Category()] = risks } } - if _, ok := skippedRules[missing_file_validation.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_file_validation.Category().Id) - delete(skippedRules, missing_file_validation.Category().Id) + if _, ok := skippedRules[missingfilevalidation.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingfilevalidation.Category().Id) + delete(skippedRules, missingfilevalidation.Category().Id) } else { - model.AddToListOfSupportedTags(missing_file_validation.SupportedTags()) - risks := missing_file_validation.GenerateRisks() + model.AddToListOfSupportedTags(missingfilevalidation.SupportedTags()) + risks := missingfilevalidation.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_file_validation.Category()] = risks + model.GeneratedRisksByCategory[missingfilevalidation.Category()] = risks } } - if _, ok := skippedRules[missing_hardening.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_hardening.Category().Id) - delete(skippedRules, missing_hardening.Category().Id) + if _, ok := skippedRules[missinghardening.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missinghardening.Category().Id) + delete(skippedRules, missinghardening.Category().Id) } else { - model.AddToListOfSupportedTags(missing_hardening.SupportedTags()) - risks := missing_hardening.GenerateRisks() + model.AddToListOfSupportedTags(missinghardening.SupportedTags()) + risks := missinghardening.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_hardening.Category()] = risks + model.GeneratedRisksByCategory[missinghardening.Category()] = risks } } - if _, ok := skippedRules[accidental_secret_leak.Category().Id]; ok { - fmt.Println("Skipping risk rule:", accidental_secret_leak.Category().Id) - delete(skippedRules, accidental_secret_leak.Category().Id) + if _, ok := skippedRules[accidentalsecretleak.Category().Id]; ok { + fmt.Println("Skipping risk rule:", accidentalsecretleak.Category().Id) + delete(skippedRules, accidentalsecretleak.Category().Id) } else { - model.AddToListOfSupportedTags(accidental_secret_leak.SupportedTags()) - risks := accidental_secret_leak.GenerateRisks() + model.AddToListOfSupportedTags(accidentalsecretleak.SupportedTags()) + risks := accidentalsecretleak.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[accidental_secret_leak.Category()] = risks + model.GeneratedRisksByCategory[accidentalsecretleak.Category()] = risks } } - if _, ok := skippedRules[code_backdooring.Category().Id]; ok { - fmt.Println("Skipping risk rule:", code_backdooring.Category().Id) - delete(skippedRules, code_backdooring.Category().Id) + if _, ok := skippedRules[codebackdooring.Category().Id]; ok { + fmt.Println("Skipping risk rule:", codebackdooring.Category().Id) + delete(skippedRules, codebackdooring.Category().Id) } else { - model.AddToListOfSupportedTags(code_backdooring.SupportedTags()) - risks := code_backdooring.GenerateRisks() + model.AddToListOfSupportedTags(codebackdooring.SupportedTags()) + risks := codebackdooring.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[code_backdooring.Category()] = risks + model.GeneratedRisksByCategory[codebackdooring.Category()] = risks } } - if _, ok := skippedRules[container_baseimage_backdooring.Category().Id]; ok { - fmt.Println("Skipping risk rule:", container_baseimage_backdooring.Category().Id) - delete(skippedRules, container_baseimage_backdooring.Category().Id) + if _, ok := skippedRules[containerbaseimagebackdooring.Category().Id]; ok { + fmt.Println("Skipping risk rule:", containerbaseimagebackdooring.Category().Id) + delete(skippedRules, containerbaseimagebackdooring.Category().Id) } else { - model.AddToListOfSupportedTags(container_baseimage_backdooring.SupportedTags()) - risks := container_baseimage_backdooring.GenerateRisks() + model.AddToListOfSupportedTags(containerbaseimagebackdooring.SupportedTags()) + risks := containerbaseimagebackdooring.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[container_baseimage_backdooring.Category()] = risks + model.GeneratedRisksByCategory[containerbaseimagebackdooring.Category()] = risks } } - if _, ok := skippedRules[container_platform_escape.Category().Id]; ok { - fmt.Println("Skipping risk rule:", container_platform_escape.Category().Id) - delete(skippedRules, container_platform_escape.Category().Id) + if _, ok := skippedRules[containerplatformescape.Category().Id]; ok { + fmt.Println("Skipping risk rule:", containerplatformescape.Category().Id) + delete(skippedRules, containerplatformescape.Category().Id) } else { - model.AddToListOfSupportedTags(container_platform_escape.SupportedTags()) - risks := container_platform_escape.GenerateRisks() + model.AddToListOfSupportedTags(containerplatformescape.SupportedTags()) + risks := containerplatformescape.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[container_platform_escape.Category()] = risks + model.GeneratedRisksByCategory[containerplatformescape.Category()] = risks } } - if _, ok := skippedRules[incomplete_model.Category().Id]; ok { - fmt.Println("Skipping risk rule:", incomplete_model.Category().Id) - delete(skippedRules, incomplete_model.Category().Id) + if _, ok := skippedRules[incompletemodel.Category().Id]; ok { + fmt.Println("Skipping risk rule:", incompletemodel.Category().Id) + delete(skippedRules, incompletemodel.Category().Id) } else { - model.AddToListOfSupportedTags(incomplete_model.SupportedTags()) - risks := incomplete_model.GenerateRisks() + model.AddToListOfSupportedTags(incompletemodel.SupportedTags()) + risks := incompletemodel.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[incomplete_model.Category()] = risks + model.GeneratedRisksByCategory[incompletemodel.Category()] = risks } } - if _, ok := skippedRules[unchecked_deployment.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unchecked_deployment.Category().Id) - delete(skippedRules, unchecked_deployment.Category().Id) + if _, ok := skippedRules[uncheckeddeployment.Category().Id]; ok { + fmt.Println("Skipping risk rule:", uncheckeddeployment.Category().Id) + delete(skippedRules, uncheckeddeployment.Category().Id) } else { - model.AddToListOfSupportedTags(unchecked_deployment.SupportedTags()) - risks := unchecked_deployment.GenerateRisks() + model.AddToListOfSupportedTags(uncheckeddeployment.SupportedTags()) + risks := uncheckeddeployment.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unchecked_deployment.Category()] = risks + model.GeneratedRisksByCategory[uncheckeddeployment.Category()] = risks } } - if _, ok := skippedRules[missing_build_infrastructure.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_build_infrastructure.Category().Id) - delete(skippedRules, missing_build_infrastructure.Category().Id) + if _, ok := skippedRules[missingbuildinfrastructure.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingbuildinfrastructure.Category().Id) + delete(skippedRules, missingbuildinfrastructure.Category().Id) } else { - model.AddToListOfSupportedTags(missing_build_infrastructure.SupportedTags()) - risks := missing_build_infrastructure.GenerateRisks() + model.AddToListOfSupportedTags(missingbuildinfrastructure.SupportedTags()) + risks := missingbuildinfrastructure.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_build_infrastructure.Category()] = risks + model.GeneratedRisksByCategory[missingbuildinfrastructure.Category()] = risks } } - if _, ok := skippedRules[missing_identity_provider_isolation.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_identity_provider_isolation.Category().Id) - delete(skippedRules, missing_identity_provider_isolation.Category().Id) + if _, ok := skippedRules[missingidentityproviderisolation.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingidentityproviderisolation.Category().Id) + delete(skippedRules, missingidentityproviderisolation.Category().Id) } else { - model.AddToListOfSupportedTags(missing_identity_provider_isolation.SupportedTags()) - risks := missing_identity_provider_isolation.GenerateRisks() + model.AddToListOfSupportedTags(missingidentityproviderisolation.SupportedTags()) + risks := missingidentityproviderisolation.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_identity_provider_isolation.Category()] = risks + model.GeneratedRisksByCategory[missingidentityproviderisolation.Category()] = risks } } - if _, ok := skippedRules[missing_vault.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_vault.Category().Id) - delete(skippedRules, missing_vault.Category().Id) + if _, ok := skippedRules[missingvault.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingvault.Category().Id) + delete(skippedRules, missingvault.Category().Id) } else { - model.AddToListOfSupportedTags(missing_vault.SupportedTags()) - risks := missing_vault.GenerateRisks() + model.AddToListOfSupportedTags(missingvault.SupportedTags()) + risks := missingvault.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_vault.Category()] = risks + model.GeneratedRisksByCategory[missingvault.Category()] = risks } } - if _, ok := skippedRules[missing_vault_isolation.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_vault_isolation.Category().Id) - delete(skippedRules, missing_vault_isolation.Category().Id) + if _, ok := skippedRules[missingvaultisolation.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingvaultisolation.Category().Id) + delete(skippedRules, missingvaultisolation.Category().Id) } else { - model.AddToListOfSupportedTags(missing_vault_isolation.SupportedTags()) - risks := missing_vault_isolation.GenerateRisks() + model.AddToListOfSupportedTags(missingvaultisolation.SupportedTags()) + risks := missingvaultisolation.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_vault_isolation.Category()] = risks + model.GeneratedRisksByCategory[missingvaultisolation.Category()] = risks } } - if _, ok := skippedRules[missing_waf.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_waf.Category().Id) - delete(skippedRules, missing_waf.Category().Id) + if _, ok := skippedRules[missingwaf.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingwaf.Category().Id) + delete(skippedRules, missingwaf.Category().Id) } else { - model.AddToListOfSupportedTags(missing_waf.SupportedTags()) - risks := missing_waf.GenerateRisks() + model.AddToListOfSupportedTags(missingwaf.SupportedTags()) + risks := missingwaf.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_waf.Category()] = risks + model.GeneratedRisksByCategory[missingwaf.Category()] = risks } } - if _, ok := skippedRules[wrong_communication_link_content.Category().Id]; ok { - fmt.Println("Skipping risk rule:", wrong_communication_link_content.Category().Id) - delete(skippedRules, wrong_communication_link_content.Category().Id) + if _, ok := skippedRules[wrongcommunicationlinkcontent.Category().Id]; ok { + fmt.Println("Skipping risk rule:", wrongcommunicationlinkcontent.Category().Id) + delete(skippedRules, wrongcommunicationlinkcontent.Category().Id) } else { - model.AddToListOfSupportedTags(wrong_communication_link_content.SupportedTags()) - risks := wrong_communication_link_content.GenerateRisks() + model.AddToListOfSupportedTags(wrongcommunicationlinkcontent.SupportedTags()) + risks := wrongcommunicationlinkcontent.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[wrong_communication_link_content.Category()] = risks + model.GeneratedRisksByCategory[wrongcommunicationlinkcontent.Category()] = risks } } - if _, ok := skippedRules[wrong_trust_boundary_content.Category().Id]; ok { - fmt.Println("Skipping risk rule:", wrong_trust_boundary_content.Category().Id) - delete(skippedRules, wrong_trust_boundary_content.Category().Id) + if _, ok := skippedRules[wrongtrustboundarycontent.Category().Id]; ok { + fmt.Println("Skipping risk rule:", wrongtrustboundarycontent.Category().Id) + delete(skippedRules, wrongtrustboundarycontent.Category().Id) } else { - model.AddToListOfSupportedTags(wrong_trust_boundary_content.SupportedTags()) - risks := wrong_trust_boundary_content.GenerateRisks() + model.AddToListOfSupportedTags(wrongtrustboundarycontent.SupportedTags()) + risks := wrongtrustboundarycontent.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[wrong_trust_boundary_content.Category()] = risks + model.GeneratedRisksByCategory[wrongtrustboundarycontent.Category()] = risks } } @@ -649,8 +648,8 @@ func checkRiskTracking() { } // save also the risk-category-id and risk-status directly in the risk for better JSON marshalling - for category, _ := range model.GeneratedRisksByCategory { - for i, _ := range model.GeneratedRisksByCategory[category] { + for category := range model.GeneratedRisksByCategory { + for i := range model.GeneratedRisksByCategory[category] { model.GeneratedRisksByCategory[category][i].CategoryId = category.Id model.GeneratedRisksByCategory[category][i].RiskStatus = model.GeneratedRisksByCategory[category][i].GetRiskTrackingStatusDefaultingUnchecked() } @@ -683,26 +682,26 @@ func unzip(src string, dest string) ([]string, error) { if err != nil { return filenames, err } - defer r.Close() + defer func() { _ = r.Close() }() for _, f := range r.File { // Store filename/path for returning and using later on - fpath := filepath.Join(dest, f.Name) + path := filepath.Join(dest, f.Name) // Check for ZipSlip. More Info: http://bit.ly/2MsjAWE - if !strings.HasPrefix(fpath, filepath.Clean(dest)+string(os.PathSeparator)) { - return filenames, fmt.Errorf("%s: illegal file path", fpath) + if !strings.HasPrefix(path, filepath.Clean(dest)+string(os.PathSeparator)) { + return filenames, fmt.Errorf("%s: illegal file path", path) } - filenames = append(filenames, fpath) + filenames = append(filenames, path) if f.FileInfo().IsDir() { // Make Folder - os.MkdirAll(fpath, os.ModePerm) + _ = os.MkdirAll(path, os.ModePerm) continue } // Make File - if err = os.MkdirAll(filepath.Dir(fpath), os.ModePerm); err != nil { + if err = os.MkdirAll(filepath.Dir(path), os.ModePerm); err != nil { return filenames, err } - outFile, err := os.OpenFile(fpath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode()) + outFile, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode()) if err != nil { return filenames, err } @@ -712,8 +711,8 @@ func unzip(src string, dest string) ([]string, error) { } _, err = io.Copy(outFile, rc) // Close the file without defer to close before next iteration of loop - outFile.Close() - rc.Close() + _ = outFile.Close() + _ = rc.Close() if err != nil { return filenames, err } @@ -729,10 +728,10 @@ func zipFiles(filename string, files []string) error { if err != nil { return err } - defer newZipFile.Close() + defer func() { _ = newZipFile.Close() }() zipWriter := zip.NewWriter(newZipFile) - defer zipWriter.Close() + defer func() { _ = zipWriter.Close() }() // Add files to zip for _, file := range files { @@ -748,7 +747,7 @@ func addFileToZip(zipWriter *zip.Writer, filename string) error { if err != nil { return err } - defer fileToZip.Close() + defer func() { _ = fileToZip.Close() }() // Get the file information info, err := fileToZip.Stat() @@ -785,7 +784,7 @@ func doIt(inputFilename string, outputDirectory string) { if *verbose { log.Println(err) } - os.Stderr.WriteString(err.Error() + "\n") + _, _ = os.Stderr.WriteString(err.Error() + "\n") os.Exit(2) } }() @@ -808,18 +807,18 @@ func doIt(inputFilename string, outputDirectory string) { if len(*executeModelMacro) > 0 { var macroDetails model.MacroDetails switch *executeModelMacro { - case add_build_pipeline.GetMacroDetails().ID: - macroDetails = add_build_pipeline.GetMacroDetails() - case add_vault.GetMacroDetails().ID: - macroDetails = add_vault.GetMacroDetails() - case pretty_print.GetMacroDetails().ID: - macroDetails = pretty_print.GetMacroDetails() - case remove_unused_tags.GetMacroDetails().ID: - macroDetails = remove_unused_tags.GetMacroDetails() - case seed_risk_tracking.GetMacroDetails().ID: - macroDetails = seed_risk_tracking.GetMacroDetails() - case seed_tags.GetMacroDetails().ID: - macroDetails = seed_tags.GetMacroDetails() + case addbuildpipeline.GetMacroDetails().ID: + macroDetails = addbuildpipeline.GetMacroDetails() + case addvault.GetMacroDetails().ID: + macroDetails = addvault.GetMacroDetails() + case prettyprint.GetMacroDetails().ID: + macroDetails = prettyprint.GetMacroDetails() + case removeunusedtags.GetMacroDetails().ID: + macroDetails = removeunusedtags.GetMacroDetails() + case seedrisktracking.GetMacroDetails().ID: + macroDetails = seedrisktracking.GetMacroDetails() + case seedtags.GetMacroDetails().ID: + macroDetails = seedtags.GetMacroDetails() default: log.Fatal("Unknown model macro: ", *executeModelMacro) } @@ -838,18 +837,18 @@ func doIt(inputFilename string, outputDirectory string) { var nextQuestion model.MacroQuestion for { switch macroDetails.ID { - case add_build_pipeline.GetMacroDetails().ID: - nextQuestion, err = add_build_pipeline.GetNextQuestion() - case add_vault.GetMacroDetails().ID: - nextQuestion, err = add_vault.GetNextQuestion() - case pretty_print.GetMacroDetails().ID: - nextQuestion, err = pretty_print.GetNextQuestion() - case remove_unused_tags.GetMacroDetails().ID: - nextQuestion, err = remove_unused_tags.GetNextQuestion() - case seed_risk_tracking.GetMacroDetails().ID: - nextQuestion, err = seed_risk_tracking.GetNextQuestion() - case seed_tags.GetMacroDetails().ID: - nextQuestion, err = seed_tags.GetNextQuestion() + case addbuildpipeline.GetMacroDetails().ID: + nextQuestion, err = addbuildpipeline.GetNextQuestion() + case addvault.GetMacroDetails().ID: + nextQuestion, err = addvault.GetNextQuestion() + case prettyprint.GetMacroDetails().ID: + nextQuestion, err = prettyprint.GetNextQuestion() + case removeunusedtags.GetMacroDetails().ID: + nextQuestion, err = removeunusedtags.GetNextQuestion() + case seedrisktracking.GetMacroDetails().ID: + nextQuestion, err = seedrisktracking.GetNextQuestion() + case seedtags.GetMacroDetails().ID: + nextQuestion, err = seedtags.GetNextQuestion() } checkErr(err) if nextQuestion.NoMoreQuestions() { @@ -865,7 +864,7 @@ func doIt(inputFilename string, outputDirectory string) { resultingMultiValueSelection := make([]string, 0) if nextQuestion.IsValueConstrained() { if nextQuestion.MultiSelect { - selectedValues := make(map[string]bool, 0) + selectedValues := make(map[string]bool) for { fmt.Println("Please select (multiple executions possible) from the following values (use number to select/deselect):") fmt.Println(" 0:", "SELECTION PROCESS FINISHED: CONTINUE TO NEXT QUESTION") @@ -939,18 +938,18 @@ func doIt(inputFilename string, outputDirectory string) { return } else if strings.ToLower(answer) == "back" { switch macroDetails.ID { - case add_build_pipeline.GetMacroDetails().ID: - message, validResult, err = add_build_pipeline.GoBack() - case add_vault.GetMacroDetails().ID: - message, validResult, err = add_vault.GoBack() - case pretty_print.GetMacroDetails().ID: - message, validResult, err = pretty_print.GoBack() - case remove_unused_tags.GetMacroDetails().ID: - message, validResult, err = remove_unused_tags.GoBack() - case seed_risk_tracking.GetMacroDetails().ID: - message, validResult, err = seed_risk_tracking.GoBack() - case seed_tags.GetMacroDetails().ID: - message, validResult, err = seed_tags.GoBack() + case addbuildpipeline.GetMacroDetails().ID: + message, validResult, err = addbuildpipeline.GoBack() + case addvault.GetMacroDetails().ID: + message, validResult, err = addvault.GoBack() + case prettyprint.GetMacroDetails().ID: + message, validResult, err = prettyprint.GoBack() + case removeunusedtags.GetMacroDetails().ID: + message, validResult, err = removeunusedtags.GoBack() + case seedrisktracking.GetMacroDetails().ID: + message, validResult, err = seedrisktracking.GoBack() + case seedtags.GetMacroDetails().ID: + message, validResult, err = seedtags.GoBack() } } else if len(answer) > 0 { // individual answer if nextQuestion.IsValueConstrained() { @@ -962,34 +961,34 @@ func doIt(inputFilename string, outputDirectory string) { } } switch macroDetails.ID { - case add_build_pipeline.GetMacroDetails().ID: - message, validResult, err = add_build_pipeline.ApplyAnswer(nextQuestion.ID, answer) - case add_vault.GetMacroDetails().ID: - message, validResult, err = add_vault.ApplyAnswer(nextQuestion.ID, answer) - case pretty_print.GetMacroDetails().ID: - message, validResult, err = pretty_print.ApplyAnswer(nextQuestion.ID, answer) - case remove_unused_tags.GetMacroDetails().ID: - message, validResult, err = remove_unused_tags.ApplyAnswer(nextQuestion.ID, answer) - case seed_risk_tracking.GetMacroDetails().ID: - message, validResult, err = seed_risk_tracking.ApplyAnswer(nextQuestion.ID, answer) - case seed_tags.GetMacroDetails().ID: - message, validResult, err = seed_tags.ApplyAnswer(nextQuestion.ID, answer) + case addbuildpipeline.GetMacroDetails().ID: + message, validResult, err = addbuildpipeline.ApplyAnswer(nextQuestion.ID, answer) + case addvault.GetMacroDetails().ID: + message, validResult, err = addvault.ApplyAnswer(nextQuestion.ID, answer) + case prettyprint.GetMacroDetails().ID: + message, validResult, err = prettyprint.ApplyAnswer(nextQuestion.ID, answer) + case removeunusedtags.GetMacroDetails().ID: + message, validResult, err = removeunusedtags.ApplyAnswer(nextQuestion.ID, answer) + case seedrisktracking.GetMacroDetails().ID: + message, validResult, err = seedrisktracking.ApplyAnswer(nextQuestion.ID, answer) + case seedtags.GetMacroDetails().ID: + message, validResult, err = seedtags.ApplyAnswer(nextQuestion.ID, answer) } } } else { switch macroDetails.ID { - case add_build_pipeline.GetMacroDetails().ID: - message, validResult, err = add_build_pipeline.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case add_vault.GetMacroDetails().ID: - message, validResult, err = add_vault.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case pretty_print.GetMacroDetails().ID: - message, validResult, err = pretty_print.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case remove_unused_tags.GetMacroDetails().ID: - message, validResult, err = remove_unused_tags.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case seed_risk_tracking.GetMacroDetails().ID: - message, validResult, err = seed_risk_tracking.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case seed_tags.GetMacroDetails().ID: - message, validResult, err = seed_tags.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case addbuildpipeline.GetMacroDetails().ID: + message, validResult, err = addbuildpipeline.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case addvault.GetMacroDetails().ID: + message, validResult, err = addvault.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case prettyprint.GetMacroDetails().ID: + message, validResult, err = prettyprint.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case removeunusedtags.GetMacroDetails().ID: + message, validResult, err = removeunusedtags.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case seedrisktracking.GetMacroDetails().ID: + message, validResult, err = seedrisktracking.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case seedtags.GetMacroDetails().ID: + message, validResult, err = seedtags.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) } } checkErr(err) @@ -1013,18 +1012,18 @@ func doIt(inputFilename string, outputDirectory string) { validResult := true var err error switch macroDetails.ID { - case add_build_pipeline.GetMacroDetails().ID: - changes, message, validResult, err = add_build_pipeline.GetFinalChangeImpact(&modelInput) - case add_vault.GetMacroDetails().ID: - changes, message, validResult, err = add_vault.GetFinalChangeImpact(&modelInput) - case pretty_print.GetMacroDetails().ID: - changes, message, validResult, err = pretty_print.GetFinalChangeImpact(&modelInput) - case remove_unused_tags.GetMacroDetails().ID: - changes, message, validResult, err = remove_unused_tags.GetFinalChangeImpact(&modelInput) - case seed_risk_tracking.GetMacroDetails().ID: - changes, message, validResult, err = seed_risk_tracking.GetFinalChangeImpact(&modelInput) - case seed_tags.GetMacroDetails().ID: - changes, message, validResult, err = seed_tags.GetFinalChangeImpact(&modelInput) + case addbuildpipeline.GetMacroDetails().ID: + changes, message, validResult, err = addbuildpipeline.GetFinalChangeImpact(&modelInput) + case addvault.GetMacroDetails().ID: + changes, message, validResult, err = addvault.GetFinalChangeImpact(&modelInput) + case prettyprint.GetMacroDetails().ID: + changes, message, validResult, err = prettyprint.GetFinalChangeImpact(&modelInput) + case removeunusedtags.GetMacroDetails().ID: + changes, message, validResult, err = removeunusedtags.GetFinalChangeImpact(&modelInput) + case seedrisktracking.GetMacroDetails().ID: + changes, message, validResult, err = seedrisktracking.GetFinalChangeImpact(&modelInput) + case seedtags.GetMacroDetails().ID: + changes, message, validResult, err = seedtags.GetFinalChangeImpact(&modelInput) } checkErr(err) for _, change := range changes { @@ -1049,18 +1048,18 @@ func doIt(inputFilename string, outputDirectory string) { validResult := true var err error switch macroDetails.ID { - case add_build_pipeline.GetMacroDetails().ID: - message, validResult, err = add_build_pipeline.Execute(&modelInput) - case add_vault.GetMacroDetails().ID: - message, validResult, err = add_vault.Execute(&modelInput) - case pretty_print.GetMacroDetails().ID: - message, validResult, err = pretty_print.Execute(&modelInput) - case remove_unused_tags.GetMacroDetails().ID: - message, validResult, err = remove_unused_tags.Execute(&modelInput) - case seed_risk_tracking.GetMacroDetails().ID: - message, validResult, err = seed_risk_tracking.Execute(&modelInput) - case seed_tags.GetMacroDetails().ID: - message, validResult, err = seed_tags.Execute(&modelInput) + case addbuildpipeline.GetMacroDetails().ID: + message, validResult, err = addbuildpipeline.Execute(&modelInput) + case addvault.GetMacroDetails().ID: + message, validResult, err = addvault.Execute(&modelInput) + case prettyprint.GetMacroDetails().ID: + message, validResult, err = prettyprint.Execute(&modelInput) + case removeunusedtags.GetMacroDetails().ID: + message, validResult, err = removeunusedtags.Execute(&modelInput) + case seedrisktracking.GetMacroDetails().ID: + message, validResult, err = seedrisktracking.Execute(&modelInput) + case seedtags.GetMacroDetails().ID: + message, validResult, err = seedtags.Execute(&modelInput) } checkErr(err) if !validResult { @@ -1080,7 +1079,7 @@ func doIt(inputFilename string, outputDirectory string) { yamlBytes = model.ReformatYAML(yamlBytes) */ fmt.Println("Writing model file:", inputFilename) - err = ioutil.WriteFile(inputFilename, yamlBytes, 0400) + err = os.WriteFile(inputFilename, yamlBytes, 0400) checkErr(err) fmt.Println("Model file successfully updated") return @@ -1089,7 +1088,6 @@ func doIt(inputFilename string, outputDirectory string) { return } } - fmt.Println() return } @@ -1102,10 +1100,10 @@ func doIt(inputFilename string, outputDirectory string) { if renderDataFlowDiagram { gvFile := outputDirectory + "/" + dataFlowDiagramFilenameDOT if !keepDiagramSourceFiles { - tmpFileGV, err := ioutil.TempFile(model.TempFolder, dataFlowDiagramFilenameDOT) + tmpFileGV, err := os.CreateTemp(model.TempFolder, dataFlowDiagramFilenameDOT) checkErr(err) gvFile = tmpFileGV.Name() - defer os.Remove(gvFile) + defer func() { _ = os.Remove(gvFile) }() } dotFile := writeDataFlowDiagramGraphvizDOT(gvFile, *diagramDPI) renderDataFlowDiagramGraphvizImage(dotFile, outputDirectory) @@ -1114,10 +1112,10 @@ func doIt(inputFilename string, outputDirectory string) { if renderDataAssetDiagram { gvFile := outputDirectory + "/" + dataAssetDiagramFilenameDOT if !keepDiagramSourceFiles { - tmpFile, err := ioutil.TempFile(model.TempFolder, dataAssetDiagramFilenameDOT) + tmpFile, err := os.CreateTemp(model.TempFolder, dataAssetDiagramFilenameDOT) checkErr(err) gvFile = tmpFile.Name() - defer os.Remove(gvFile) + defer func() { _ = os.Remove(gvFile) }() } dotFile := writeDataAssetDiagramGraphvizDOT(gvFile, *diagramDPI) renderDataAssetDiagramGraphvizImage(dotFile, outputDirectory) @@ -1167,7 +1165,7 @@ func doIt(inputFilename string, outputDirectory string) { // hash the YAML input file f, err := os.Open(inputFilename) checkErr(err) - defer f.Close() + defer func() { _ = f.Close() }() hasher := sha256.New() if _, err := io.Copy(hasher, f); err != nil { panic(err) @@ -1205,7 +1203,7 @@ func applyRAA() string { if *verbose { fmt.Println("Applying RAA calculation:", *raaPlugin) } - // determine plugin to load + // determine plugin to load. // load plugin: open the ".so" file to load the symbols plug, err := plugin.Open(*raaPlugin) checkErr(err) @@ -1222,7 +1220,7 @@ func applyRAA() string { } func loadCustomRiskRules() { - customRiskRules = make(map[string]model.CustomRiskRule, 0) + customRiskRules = make(map[string]model.CustomRiskRule) if len(*riskRulesPlugins) > 0 { if *verbose { fmt.Println("Loading custom risk rules:", *riskRulesPlugins) @@ -1310,13 +1308,13 @@ func execute(context *gin.Context, dryRun bool) (yamlContent []byte, ok bool) { filenameUploaded := strings.TrimSpace(header.Filename) - tmpInputDir, err := ioutil.TempDir(model.TempFolder, "threagile-input-") + tmpInputDir, err := os.MkdirTemp(model.TempFolder, "threagile-input-") checkErr(err) - defer os.RemoveAll(tmpInputDir) + defer func() { _ = os.RemoveAll(tmpInputDir) }() - tmpModelFile, err := ioutil.TempFile(tmpInputDir, "threagile-model-*") + tmpModelFile, err := os.CreateTemp(tmpInputDir, "threagile-model-*") checkErr(err) - defer os.Remove(tmpModelFile.Name()) + defer func() { _ = os.Remove(tmpModelFile.Name()) }() _, err = io.Copy(tmpModelFile, fileUploaded) checkErr(err) @@ -1342,13 +1340,13 @@ func execute(context *gin.Context, dryRun bool) (yamlContent []byte, ok bool) { } } - tmpOutputDir, err := ioutil.TempDir(model.TempFolder, "threagile-output-") + tmpOutputDir, err := os.MkdirTemp(model.TempFolder, "threagile-output-") checkErr(err) - defer os.RemoveAll(tmpOutputDir) + defer func() { _ = os.RemoveAll(tmpOutputDir) }() - tmpResultFile, err := ioutil.TempFile(model.TempFolder, "threagile-result-*.zip") + tmpResultFile, err := os.CreateTemp(model.TempFolder, "threagile-result-*.zip") checkErr(err) - defer os.Remove(tmpResultFile.Name()) + defer func() { _ = os.Remove(tmpResultFile.Name()) }() if dryRun { doItViaRuntimeCall(yamlFile, tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, false, false, false, true, true, true, 40) @@ -1357,9 +1355,9 @@ func execute(context *gin.Context, dryRun bool) (yamlContent []byte, ok bool) { } checkErr(err) - yamlContent, err = ioutil.ReadFile(yamlFile) + yamlContent, err = os.ReadFile(yamlFile) checkErr(err) - err = ioutil.WriteFile(tmpOutputDir+"/threagile.yaml", yamlContent, 0400) + err = os.WriteFile(tmpOutputDir+"/threagile.yaml", yamlContent, 0400) checkErr(err) if !dryRun { @@ -1570,153 +1568,153 @@ func startServer() { router.DELETE("/models/:model-id/shared-runtimes/:shared-runtime-id", deleteSharedRuntime) fmt.Println("Threagile server running...") - router.Run(":" + strconv.Itoa(*serverPort)) // listen and serve on 0.0.0.0:8080 or whatever port was specified + _ = router.Run(":" + strconv.Itoa(*serverPort)) // listen and serve on 0.0.0.0:8080 or whatever port was specified } func exampleFile(context *gin.Context) { - example, err := ioutil.ReadFile("/app/threagile-example-model.yaml") + example, err := os.ReadFile("/app/threagile-example-model.yaml") checkErr(err) context.Data(http.StatusOK, gin.MIMEYAML, example) } func stubFile(context *gin.Context) { - stub, err := ioutil.ReadFile("/app/threagile-stub-model.yaml") + stub, err := os.ReadFile("/app/threagile-stub-model.yaml") checkErr(err) context.Data(http.StatusOK, gin.MIMEYAML, addSupportedTags(stub)) // TODO use also the MIMEYAML way of serving YAML in model export? } func addSupportedTags(input []byte) []byte { // add distinct tags as "tags_available" - supportedTags := make(map[string]bool, 0) + supportedTags := make(map[string]bool) for _, customRule := range customRiskRules { for _, tag := range customRule.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } } - for _, tag := range accidental_secret_leak.SupportedTags() { + for _, tag := range accidentalsecretleak.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range code_backdooring.SupportedTags() { + for _, tag := range codebackdooring.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range container_baseimage_backdooring.SupportedTags() { + for _, tag := range containerbaseimagebackdooring.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range container_platform_escape.SupportedTags() { + for _, tag := range containerplatformescape.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range cross_site_request_forgery.SupportedTags() { + for _, tag := range crosssiterequestforgery.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range cross_site_scripting.SupportedTags() { + for _, tag := range crosssitescripting.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range dos_risky_access_across_trust_boundary.SupportedTags() { + for _, tag := range dosriskyaccessacrosstrustboundary.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range incomplete_model.SupportedTags() { + for _, tag := range incompletemodel.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range ldap_injection.SupportedTags() { + for _, tag := range ldapinjection.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_authentication.SupportedTags() { + for _, tag := range missingauthentication.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_authentication_second_factor.SupportedTags() { + for _, tag := range missingauthenticationsecondfactor.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_build_infrastructure.SupportedTags() { + for _, tag := range missingbuildinfrastructure.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_cloud_hardening.SupportedTags() { + for _, tag := range missingcloudhardening.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_file_validation.SupportedTags() { + for _, tag := range missingfilevalidation.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_hardening.SupportedTags() { + for _, tag := range missinghardening.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_identity_propagation.SupportedTags() { + for _, tag := range missingidentitypropagation.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_identity_provider_isolation.SupportedTags() { + for _, tag := range missingidentityproviderisolation.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_identity_store.SupportedTags() { + for _, tag := range missingidentitystore.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_network_segmentation.SupportedTags() { + for _, tag := range missingnetworksegmentation.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_vault.SupportedTags() { + for _, tag := range missingvault.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_vault_isolation.SupportedTags() { + for _, tag := range missingvaultisolation.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_waf.SupportedTags() { + for _, tag := range missingwaf.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range mixed_targets_on_shared_runtime.SupportedTags() { + for _, tag := range mixedtargetsonsharedruntime.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range path_traversal.SupportedTags() { + for _, tag := range pathtraversal.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range push_instead_of_pull_deployment.SupportedTags() { + for _, tag := range pushinsteadofpulldeployment.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range search_query_injection.SupportedTags() { + for _, tag := range searchqueryinjection.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range server_side_request_forgery.SupportedTags() { + for _, tag := range serversiderequestforgery.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range service_registry_poisoning.SupportedTags() { + for _, tag := range serviceregistrypoisoning.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range sql_nosql_injection.SupportedTags() { + for _, tag := range sqlnosqlinjection.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unchecked_deployment.SupportedTags() { + for _, tag := range uncheckeddeployment.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unencrypted_asset.SupportedTags() { + for _, tag := range unencryptedasset.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unencrypted_communication.SupportedTags() { + for _, tag := range unencryptedcommunication.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unguarded_access_from_internet.SupportedTags() { + for _, tag := range unguardedaccessfrominternet.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unguarded_direct_datastore_access.SupportedTags() { + for _, tag := range unguardeddirectdatastoreaccess.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unnecessary_communication_link.SupportedTags() { + for _, tag := range unnecessarycommunicationlink.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unnecessary_data_asset.SupportedTags() { + for _, tag := range unnecessarydataasset.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unnecessary_data_transfer.SupportedTags() { + for _, tag := range unnecessarydatatransfer.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unnecessary_technical_asset.SupportedTags() { + for _, tag := range unnecessarytechnicalasset.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range untrusted_deserialization.SupportedTags() { + for _, tag := range untrusteddeserialization.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range wrong_communication_link_content.SupportedTags() { + for _, tag := range wrongcommunicationlinkcontent.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range wrong_trust_boundary_content.SupportedTags() { + for _, tag := range wrongtrustboundarycontent.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range xml_external_entity.SupportedTags() { + for _, tag := range xmlexternalentity.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } tags := make([]string, 0, len(supportedTags)) @@ -1747,8 +1745,8 @@ func addSupportedTags(input []byte) []byte { const keySize = 32 type timeoutStruct struct { - xorRand []byte - createdNanotime, lastAcessedNanotime int64 + xorRand []byte + createdNanoTime, lastAccessedNanoTime int64 } var mapTokenHashToTimeoutStruct = make(map[string]timeoutStruct) @@ -1780,9 +1778,9 @@ func createToken(context *gin.Context) { tokenHash := hashSHA256(token) housekeepingTokenMaps() mapTokenHashToTimeoutStruct[tokenHash] = timeoutStruct{ - xorRand: xorBytesArr, - createdNanotime: now, - lastAcessedNanotime: now, + xorRand: xorBytesArr, + createdNanoTime: now, + lastAccessedNanoTime: now, } mapFolderNameToTokenHash[folderName] = tokenHash context.JSON(http.StatusCreated, gin.H{ @@ -1824,14 +1822,14 @@ func housekeepingTokenMaps() { if extremeShortTimeoutsForTesting { // remove all elements older than 1 minute (= 60000000000 ns) soft // and all elements older than 3 minutes (= 180000000000 ns) hard - if now-val.lastAcessedNanotime > 60000000000 || now-val.createdNanotime > 180000000000 { + if now-val.lastAccessedNanoTime > 60000000000 || now-val.createdNanoTime > 180000000000 { fmt.Println("About to remove a token hash from maps") deleteTokenHashFromMaps(tokenHash) } } else { // remove all elements older than 30 minutes (= 1800000000000 ns) soft // and all elements older than 10 hours (= 36000000000000 ns) hard - if now-val.lastAcessedNanotime > 1800000000000 || now-val.createdNanotime > 36000000000000 { + if now-val.lastAccessedNanoTime > 1800000000000 || now-val.createdNanoTime > 36000000000000 { deleteTokenHashFromMaps(tokenHash) } } @@ -1891,30 +1889,30 @@ func analyzeModelOnServerDirectly(context *gin.Context) { if !ok { return } - tmpModelFile, err := ioutil.TempFile(model.TempFolder, "threagile-direct-analyze-*") + tmpModelFile, err := os.CreateTemp(model.TempFolder, "threagile-direct-analyze-*") if err != nil { handleErrorInServiceCall(err, context) return } - defer os.Remove(tmpModelFile.Name()) - tmpOutputDir, err := ioutil.TempDir(model.TempFolder, "threagile-direct-analyze-") + defer func() { _ = os.Remove(tmpModelFile.Name()) }() + tmpOutputDir, err := os.MkdirTemp(model.TempFolder, "threagile-direct-analyze-") if err != nil { handleErrorInServiceCall(err, context) return } - defer os.RemoveAll(tmpOutputDir) - tmpResultFile, err := ioutil.TempFile(model.TempFolder, "threagile-result-*.zip") + defer func() { _ = os.RemoveAll(tmpOutputDir) }() + tmpResultFile, err := os.CreateTemp(model.TempFolder, "threagile-result-*.zip") checkErr(err) - defer os.Remove(tmpResultFile.Name()) + defer func() { _ = os.Remove(tmpResultFile.Name()) }() - err = ioutil.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) + err = os.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, true, true, true, true, true, true, true, true, dpi) if err != nil { handleErrorInServiceCall(err, context) return } - err = ioutil.WriteFile(tmpOutputDir+"/threagile.yaml", []byte(yamlText), 0400) + err = os.WriteFile(tmpOutputDir+"/threagile.yaml", []byte(yamlText), 0400) if err != nil { handleErrorInServiceCall(err, context) return @@ -2010,19 +2008,19 @@ func streamResponse(context *gin.Context, responseType responseType) { if !ok { return } - tmpModelFile, err := ioutil.TempFile(model.TempFolder, "threagile-render-*") + tmpModelFile, err := os.CreateTemp(model.TempFolder, "threagile-render-*") if err != nil { handleErrorInServiceCall(err, context) return } - defer os.Remove(tmpModelFile.Name()) - tmpOutputDir, err := ioutil.TempDir(model.TempFolder, "threagile-render-") + defer func() { _ = os.Remove(tmpModelFile.Name()) }() + tmpOutputDir, err := os.MkdirTemp(model.TempFolder, "threagile-render-") if err != nil { handleErrorInServiceCall(err, context) return } - defer os.RemoveAll(tmpOutputDir) - err = ioutil.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) + defer func() { _ = os.RemoveAll(tmpOutputDir) }() + err = os.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) if responseType == dataFlowDiagram { doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, true, false, false, false, false, false, false, false, dpi) if err != nil { @@ -2064,7 +2062,7 @@ func streamResponse(context *gin.Context, responseType responseType) { handleErrorInServiceCall(err, context) return } - json, err := ioutil.ReadFile(tmpOutputDir + "/" + jsonRisksFilename) + json, err := os.ReadFile(tmpOutputDir + "/" + jsonRisksFilename) if err != nil { handleErrorInServiceCall(err, context) return @@ -2076,7 +2074,7 @@ func streamResponse(context *gin.Context, responseType responseType) { handleErrorInServiceCall(err, context) return } - json, err := ioutil.ReadFile(tmpOutputDir + "/" + jsonTechnicalAssetsFilename) + json, err := os.ReadFile(tmpOutputDir + "/" + jsonTechnicalAssetsFilename) if err != nil { handleErrorInServiceCall(err, context) return @@ -2088,7 +2086,7 @@ func streamResponse(context *gin.Context, responseType responseType) { handleErrorInServiceCall(err, context) return } - json, err := ioutil.ReadFile(tmpOutputDir + "/" + jsonStatsFilename) + json, err := os.ReadFile(tmpOutputDir + "/" + jsonStatsFilename) if err != nil { handleErrorInServiceCall(err, context) return @@ -2106,14 +2104,14 @@ func importModel(context *gin.Context) { lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - uuid := context.Param("model-id") // UUID is syntactically validated in readModel+checkModelFolder (next line) via uuid.Parse(modelUUID) - _, _, ok = readModel(context, uuid, key, folderNameOfKey) + aUuid := context.Param("model-id") // UUID is syntactically validated in readModel+checkModelFolder (next line) via uuid.Parse(modelUUID) + _, _, ok = readModel(context, aUuid, key, folderNameOfKey) if ok { // first analyze it simply by executing the full risk process (just discard the result) to ensure that everything would work yamlContent, ok := execute(context, true) if ok { // if we're here, then no problem was raised, so ok to proceed - ok = writeModelYAML(context, string(yamlContent), key, folderNameForModel(folderNameOfKey, uuid), "Model Import", false) + ok = writeModelYAML(context, string(yamlContent), key, folderNameForModel(folderNameOfKey, aUuid), "Model Import", false) if ok { context.JSON(http.StatusCreated, gin.H{ "message": "model imported", @@ -2125,7 +2123,7 @@ func importModel(context *gin.Context) { func stats(context *gin.Context) { keyCount, modelCount := 0, 0 - keyFolders, err := ioutil.ReadDir(baseFolder) + keyFolders, err := os.ReadDir(baseFolder) if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -2136,7 +2134,7 @@ func stats(context *gin.Context) { for _, keyFolder := range keyFolders { if len(keyFolder.Name()) == 128 { // it's a sha512 token hash probably, so count it as token folder for the stats keyCount++ - modelFolders, err := ioutil.ReadDir(baseFolder + "/" + keyFolder.Name()) + modelFolders, err := os.ReadDir(baseFolder + "/" + keyFolder.Name()) if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -2170,7 +2168,7 @@ func getDataAsset(context *gin.Context) { modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, dataAsset := range modelInput.Data_assets { + for title, dataAsset := range modelInput.DataAssets { if dataAsset.ID == context.Param("data-asset-id") { context.JSON(http.StatusOK, gin.H{ title: dataAsset, @@ -2195,81 +2193,81 @@ func deleteDataAsset(context *gin.Context) { if ok { referencesDeleted := false // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, dataAsset := range modelInput.Data_assets { + for title, dataAsset := range modelInput.DataAssets { if dataAsset.ID == context.Param("data-asset-id") { // also remove all usages of this data asset !! - for _, techAsset := range modelInput.Technical_assets { - if techAsset.Data_assets_processed != nil { - for i, parsedChangeCandidateAsset := range techAsset.Data_assets_processed { + for _, techAsset := range modelInput.TechnicalAssets { + if techAsset.DataAssetsProcessed != nil { + for i, parsedChangeCandidateAsset := range techAsset.DataAssetsProcessed { referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) if referencedAsset == dataAsset.ID { // apply the removal referencesDeleted = true // Remove the element at index i // TODO needs more testing - copy(techAsset.Data_assets_processed[i:], techAsset.Data_assets_processed[i+1:]) // Shift a[i+1:] left one index. - techAsset.Data_assets_processed[len(techAsset.Data_assets_processed)-1] = "" // Erase last element (write zero value). - techAsset.Data_assets_processed = techAsset.Data_assets_processed[:len(techAsset.Data_assets_processed)-1] // Truncate slice. + copy(techAsset.DataAssetsProcessed[i:], techAsset.DataAssetsProcessed[i+1:]) // Shift a[i+1:] left one index. + techAsset.DataAssetsProcessed[len(techAsset.DataAssetsProcessed)-1] = "" // Erase last element (write zero value). + techAsset.DataAssetsProcessed = techAsset.DataAssetsProcessed[:len(techAsset.DataAssetsProcessed)-1] // Truncate slice. } } } - if techAsset.Data_assets_stored != nil { - for i, parsedChangeCandidateAsset := range techAsset.Data_assets_stored { + if techAsset.DataAssetsStored != nil { + for i, parsedChangeCandidateAsset := range techAsset.DataAssetsStored { referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) if referencedAsset == dataAsset.ID { // apply the removal referencesDeleted = true // Remove the element at index i // TODO needs more testing - copy(techAsset.Data_assets_stored[i:], techAsset.Data_assets_stored[i+1:]) // Shift a[i+1:] left one index. - techAsset.Data_assets_stored[len(techAsset.Data_assets_stored)-1] = "" // Erase last element (write zero value). - techAsset.Data_assets_stored = techAsset.Data_assets_stored[:len(techAsset.Data_assets_stored)-1] // Truncate slice. + copy(techAsset.DataAssetsStored[i:], techAsset.DataAssetsStored[i+1:]) // Shift a[i+1:] left one index. + techAsset.DataAssetsStored[len(techAsset.DataAssetsStored)-1] = "" // Erase last element (write zero value). + techAsset.DataAssetsStored = techAsset.DataAssetsStored[:len(techAsset.DataAssetsStored)-1] // Truncate slice. } } } - if techAsset.Communication_links != nil { - for title, commLink := range techAsset.Communication_links { - for i, dataAssetSent := range commLink.Data_assets_sent { + if techAsset.CommunicationLinks != nil { + for title, commLink := range techAsset.CommunicationLinks { + for i, dataAssetSent := range commLink.DataAssetsSent { referencedAsset := fmt.Sprintf("%v", dataAssetSent) if referencedAsset == dataAsset.ID { // apply the removal referencesDeleted = true // Remove the element at index i // TODO needs more testing - copy(techAsset.Communication_links[title].Data_assets_sent[i:], techAsset.Communication_links[title].Data_assets_sent[i+1:]) // Shift a[i+1:] left one index. - techAsset.Communication_links[title].Data_assets_sent[len(techAsset.Communication_links[title].Data_assets_sent)-1] = "" // Erase last element (write zero value). - x := techAsset.Communication_links[title] - x.Data_assets_sent = techAsset.Communication_links[title].Data_assets_sent[:len(techAsset.Communication_links[title].Data_assets_sent)-1] // Truncate slice. - techAsset.Communication_links[title] = x + copy(techAsset.CommunicationLinks[title].DataAssetsSent[i:], techAsset.CommunicationLinks[title].DataAssetsSent[i+1:]) // Shift a[i+1:] left one index. + techAsset.CommunicationLinks[title].DataAssetsSent[len(techAsset.CommunicationLinks[title].DataAssetsSent)-1] = "" // Erase last element (write zero value). + x := techAsset.CommunicationLinks[title] + x.DataAssetsSent = techAsset.CommunicationLinks[title].DataAssetsSent[:len(techAsset.CommunicationLinks[title].DataAssetsSent)-1] // Truncate slice. + techAsset.CommunicationLinks[title] = x } } - for i, dataAssetReceived := range commLink.Data_assets_received { + for i, dataAssetReceived := range commLink.DataAssetsReceived { referencedAsset := fmt.Sprintf("%v", dataAssetReceived) if referencedAsset == dataAsset.ID { // apply the removal referencesDeleted = true // Remove the element at index i // TODO needs more testing - copy(techAsset.Communication_links[title].Data_assets_received[i:], techAsset.Communication_links[title].Data_assets_received[i+1:]) // Shift a[i+1:] left one index. - techAsset.Communication_links[title].Data_assets_received[len(techAsset.Communication_links[title].Data_assets_received)-1] = "" // Erase last element (write zero value). - x := techAsset.Communication_links[title] - x.Data_assets_received = techAsset.Communication_links[title].Data_assets_received[:len(techAsset.Communication_links[title].Data_assets_received)-1] // Truncate slice. - techAsset.Communication_links[title] = x + copy(techAsset.CommunicationLinks[title].DataAssetsReceived[i:], techAsset.CommunicationLinks[title].DataAssetsReceived[i+1:]) // Shift a[i+1:] left one index. + techAsset.CommunicationLinks[title].DataAssetsReceived[len(techAsset.CommunicationLinks[title].DataAssetsReceived)-1] = "" // Erase last element (write zero value). + x := techAsset.CommunicationLinks[title] + x.DataAssetsReceived = techAsset.CommunicationLinks[title].DataAssetsReceived[:len(techAsset.CommunicationLinks[title].DataAssetsReceived)-1] // Truncate slice. + techAsset.CommunicationLinks[title] = x } } } } } - for indivRiskCatTitle, indivRiskCat := range modelInput.Individual_risk_categories { - if indivRiskCat.Risks_identified != nil { - for indivRiskInstanceTitle, indivRiskInstance := range indivRiskCat.Risks_identified { - if indivRiskInstance.Most_relevant_data_asset == dataAsset.ID { // apply the removal + for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { + if individualRiskCat.RisksIdentified != nil { + for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { + if individualRiskInstance.MostRelevantDataAsset == dataAsset.ID { // apply the removal referencesDeleted = true - x := modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] - x.Most_relevant_data_asset = "" // TODO needs more testing - modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] = x + x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] + x.MostRelevantDataAsset = "" // TODO needs more testing + modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x } } } } // remove it itself - delete(modelInput.Data_assets, title) + delete(modelInput.DataAssets, title) ok = writeModel(context, key, folderNameOfKey, &modelInput, "Data Asset Deletion") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2297,7 +2295,7 @@ func setSharedRuntime(context *gin.Context) { modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, sharedRuntime := range modelInput.Shared_runtimes { + for title, sharedRuntime := range modelInput.SharedRuntimes { if sharedRuntime.ID == context.Param("shared-runtime-id") { payload := payloadSharedRuntime{} err := context.BindJSON(&payload) @@ -2313,17 +2311,17 @@ func setSharedRuntime(context *gin.Context) { return } // in order to also update the title, remove the shared runtime from the map and re-insert it (with new key) - delete(modelInput.Shared_runtimes, title) - modelInput.Shared_runtimes[payload.Title] = sharedRuntimeInput + delete(modelInput.SharedRuntimes, title) + modelInput.SharedRuntimes[payload.Title] = sharedRuntimeInput idChanged := sharedRuntimeInput.ID != sharedRuntime.ID if idChanged { // ID-CHANGE-PROPAGATION - for indivRiskCatTitle, indivRiskCat := range modelInput.Individual_risk_categories { - if indivRiskCat.Risks_identified != nil { - for indivRiskInstanceTitle, indivRiskInstance := range indivRiskCat.Risks_identified { - if indivRiskInstance.Most_relevant_shared_runtime == sharedRuntime.ID { // apply the ID change - x := modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] - x.Most_relevant_shared_runtime = sharedRuntimeInput.ID // TODO needs more testing - modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] = x + for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { + if individualRiskCat.RisksIdentified != nil { + for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { + if individualRiskInstance.MostRelevantSharedRuntime == sharedRuntime.ID { // apply the ID change + x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] + x.MostRelevantSharedRuntime = sharedRuntimeInput.ID // TODO needs more testing + modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x } } } @@ -2356,7 +2354,7 @@ func setDataAsset(context *gin.Context) { modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, dataAsset := range modelInput.Data_assets { + for title, dataAsset := range modelInput.DataAssets { if dataAsset.ID == context.Param("data-asset-id") { payload := payloadDataAsset{} err := context.BindJSON(&payload) @@ -2372,52 +2370,52 @@ func setDataAsset(context *gin.Context) { return } // in order to also update the title, remove the asset from the map and re-insert it (with new key) - delete(modelInput.Data_assets, title) - modelInput.Data_assets[payload.Title] = dataAssetInput + delete(modelInput.DataAssets, title) + modelInput.DataAssets[payload.Title] = dataAssetInput idChanged := dataAssetInput.ID != dataAsset.ID if idChanged { // ID-CHANGE-PROPAGATION // also update all usages to point to the new (changed) ID !! - for techAssetTitle, techAsset := range modelInput.Technical_assets { - if techAsset.Data_assets_processed != nil { - for i, parsedChangeCandidateAsset := range techAsset.Data_assets_processed { + for techAssetTitle, techAsset := range modelInput.TechnicalAssets { + if techAsset.DataAssetsProcessed != nil { + for i, parsedChangeCandidateAsset := range techAsset.DataAssetsProcessed { referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.Technical_assets[techAssetTitle].Data_assets_processed[i] = dataAssetInput.ID + modelInput.TechnicalAssets[techAssetTitle].DataAssetsProcessed[i] = dataAssetInput.ID } } } - if techAsset.Data_assets_stored != nil { - for i, parsedChangeCandidateAsset := range techAsset.Data_assets_stored { + if techAsset.DataAssetsStored != nil { + for i, parsedChangeCandidateAsset := range techAsset.DataAssetsStored { referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.Technical_assets[techAssetTitle].Data_assets_stored[i] = dataAssetInput.ID + modelInput.TechnicalAssets[techAssetTitle].DataAssetsStored[i] = dataAssetInput.ID } } } - if techAsset.Communication_links != nil { - for title, commLink := range techAsset.Communication_links { - for i, dataAssetSent := range commLink.Data_assets_sent { + if techAsset.CommunicationLinks != nil { + for title, commLink := range techAsset.CommunicationLinks { + for i, dataAssetSent := range commLink.DataAssetsSent { referencedAsset := fmt.Sprintf("%v", dataAssetSent) if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.Technical_assets[techAssetTitle].Communication_links[title].Data_assets_sent[i] = dataAssetInput.ID + modelInput.TechnicalAssets[techAssetTitle].CommunicationLinks[title].DataAssetsSent[i] = dataAssetInput.ID } } - for i, dataAssetReceived := range commLink.Data_assets_received { + for i, dataAssetReceived := range commLink.DataAssetsReceived { referencedAsset := fmt.Sprintf("%v", dataAssetReceived) if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.Technical_assets[techAssetTitle].Communication_links[title].Data_assets_received[i] = dataAssetInput.ID + modelInput.TechnicalAssets[techAssetTitle].CommunicationLinks[title].DataAssetsReceived[i] = dataAssetInput.ID } } } } } - for indivRiskCatTitle, indivRiskCat := range modelInput.Individual_risk_categories { - if indivRiskCat.Risks_identified != nil { - for indivRiskInstanceTitle, indivRiskInstance := range indivRiskCat.Risks_identified { - if indivRiskInstance.Most_relevant_data_asset == dataAsset.ID { // apply the ID change - x := modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] - x.Most_relevant_data_asset = dataAssetInput.ID // TODO needs more testing - modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] = x + for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { + if individualRiskCat.RisksIdentified != nil { + for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { + if individualRiskInstance.MostRelevantDataAsset == dataAsset.ID { // apply the ID change + x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] + x.MostRelevantDataAsset = dataAssetInput.ID // TODO needs more testing + modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x } } } @@ -2450,7 +2448,7 @@ func getSharedRuntime(context *gin.Context) { modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, sharedRuntime := range modelInput.Shared_runtimes { + for title, sharedRuntime := range modelInput.SharedRuntimes { if sharedRuntime.ID == context.Param("shared-runtime-id") { context.JSON(http.StatusOK, gin.H{ title: sharedRuntime, @@ -2483,14 +2481,14 @@ func createNewSharedRuntime(context *gin.Context) { return } // yes, here keyed by title in YAML for better readability in the YAML file itself - if _, exists := modelInput.Shared_runtimes[payload.Title]; exists { + if _, exists := modelInput.SharedRuntimes[payload.Title]; exists { context.JSON(http.StatusConflict, gin.H{ "error": "shared runtime with this title already exists", }) return } - // but later it will in memory keyed by it's "id", so do this uniqueness check also - for _, runtime := range modelInput.Shared_runtimes { + // but later it will in memory keyed by its "id", so do this uniqueness check also + for _, runtime := range modelInput.SharedRuntimes { if runtime.ID == payload.Id { context.JSON(http.StatusConflict, gin.H{ "error": "shared runtime with this id already exists", @@ -2498,7 +2496,7 @@ func createNewSharedRuntime(context *gin.Context) { return } } - if !checkTechnicalAssetsExisting(modelInput, payload.Technical_assets_running) { + if !checkTechnicalAssetsExisting(modelInput, payload.TechnicalAssetsRunning) { context.JSON(http.StatusBadRequest, gin.H{ "error": "referenced technical asset does not exist", }) @@ -2508,10 +2506,10 @@ func createNewSharedRuntime(context *gin.Context) { if !ok { return } - if modelInput.Shared_runtimes == nil { - modelInput.Shared_runtimes = make(map[string]model.InputSharedRuntime) + if modelInput.SharedRuntimes == nil { + modelInput.SharedRuntimes = make(map[string]model.InputSharedRuntime) } - modelInput.Shared_runtimes[payload.Title] = sharedRuntimeInput + modelInput.SharedRuntimes[payload.Title] = sharedRuntimeInput ok = writeModel(context, key, folderNameOfKey, &modelInput, "Shared Runtime Creation") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2525,7 +2523,7 @@ func createNewSharedRuntime(context *gin.Context) { func checkTechnicalAssetsExisting(modelInput model.ModelInput, techAssetIDs []string) (ok bool) { for _, techAssetID := range techAssetIDs { exists := false - for _, val := range modelInput.Technical_assets { + for _, val := range modelInput.TechnicalAssets { if val.ID == techAssetID { exists = true break @@ -2538,12 +2536,12 @@ func checkTechnicalAssetsExisting(modelInput model.ModelInput, techAssetIDs []st return true } -func populateSharedRuntime(context *gin.Context, payload payloadSharedRuntime) (sharedRuntimeInput model.InputSharedRuntime, ok bool) { +func populateSharedRuntime(_ *gin.Context, payload payloadSharedRuntime) (sharedRuntimeInput model.InputSharedRuntime, ok bool) { sharedRuntimeInput = model.InputSharedRuntime{ - ID: payload.Id, - Description: payload.Description, - Tags: lowerCaseAndTrim(payload.Tags), - Technical_assets_running: payload.Technical_assets_running, + ID: payload.Id, + Description: payload.Description, + Tags: lowerCaseAndTrim(payload.Tags), + TechnicalAssetsRunning: payload.TechnicalAssetsRunning, } return sharedRuntimeInput, true } @@ -2559,23 +2557,23 @@ func deleteSharedRuntime(context *gin.Context) { if ok { referencesDeleted := false // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, sharedRuntime := range modelInput.Shared_runtimes { + for title, sharedRuntime := range modelInput.SharedRuntimes { if sharedRuntime.ID == context.Param("shared-runtime-id") { // also remove all usages of this shared runtime !! - for indivRiskCatTitle, indivRiskCat := range modelInput.Individual_risk_categories { - if indivRiskCat.Risks_identified != nil { - for indivRiskInstanceTitle, indivRiskInstance := range indivRiskCat.Risks_identified { - if indivRiskInstance.Most_relevant_shared_runtime == sharedRuntime.ID { // apply the removal + for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { + if individualRiskCat.RisksIdentified != nil { + for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { + if individualRiskInstance.MostRelevantSharedRuntime == sharedRuntime.ID { // apply the removal referencesDeleted = true - x := modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] - x.Most_relevant_shared_runtime = "" // TODO needs more testing - modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] = x + x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] + x.MostRelevantSharedRuntime = "" // TODO needs more testing + modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x } } } } // remove it itself - delete(modelInput.Shared_runtimes, title) + delete(modelInput.SharedRuntimes, title) ok = writeModel(context, key, folderNameOfKey, &modelInput, "Shared Runtime Deletion") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2612,14 +2610,14 @@ func createNewDataAsset(context *gin.Context) { return } // yes, here keyed by title in YAML for better readability in the YAML file itself - if _, exists := modelInput.Data_assets[payload.Title]; exists { + if _, exists := modelInput.DataAssets[payload.Title]; exists { context.JSON(http.StatusConflict, gin.H{ "error": "data asset with this title already exists", }) return } - // but later it will in memory keyed by it's "id", so do this uniqueness check also - for _, asset := range modelInput.Data_assets { + // but later it will in memory keyed by its "id", so do this uniqueness check also + for _, asset := range modelInput.DataAssets { if asset.ID == payload.Id { context.JSON(http.StatusConflict, gin.H{ "error": "data asset with this id already exists", @@ -2631,10 +2629,10 @@ func createNewDataAsset(context *gin.Context) { if !ok { return } - if modelInput.Data_assets == nil { - modelInput.Data_assets = make(map[string]model.InputDataAsset) + if modelInput.DataAssets == nil { + modelInput.DataAssets = make(map[string]model.InputDataAsset) } - modelInput.Data_assets[payload.Title] = dataAssetInput + modelInput.DataAssets[payload.Title] = dataAssetInput ok = writeModel(context, key, folderNameOfKey, &modelInput, "Data Asset Creation") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2672,17 +2670,17 @@ func populateDataAsset(context *gin.Context, payload payloadDataAsset) (dataAsse return dataAssetInput, false } dataAssetInput = model.InputDataAsset{ - ID: payload.Id, - Description: payload.Description, - Usage: usage.String(), - Tags: lowerCaseAndTrim(payload.Tags), - Origin: payload.Origin, - Owner: payload.Owner, - Quantity: quantity.String(), - Confidentiality: confidentiality.String(), - Integrity: integrity.String(), - Availability: availability.String(), - Justification_cia_rating: payload.Justification_cia_rating, + ID: payload.Id, + Description: payload.Description, + Usage: usage.String(), + Tags: lowerCaseAndTrim(payload.Tags), + Origin: payload.Origin, + Owner: payload.Owner, + Quantity: quantity.String(), + Confidentiality: confidentiality.String(), + Integrity: integrity.String(), + Availability: availability.String(), + JustificationCiaRating: payload.JustificationCiaRating, } return dataAssetInput, true } @@ -2694,9 +2692,9 @@ func getDataAssets(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, model.Data_assets) + context.JSON(http.StatusOK, aModel.DataAssets) } } @@ -2707,9 +2705,9 @@ func getTrustBoundaries(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, model.Trust_boundaries) + context.JSON(http.StatusOK, aModel.TrustBoundaries) } } @@ -2720,9 +2718,9 @@ func getSharedRuntimes(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, model.Shared_runtimes) + context.JSON(http.StatusOK, aModel.SharedRuntimes) } } @@ -2743,9 +2741,9 @@ func getModel(context *gin.Context) { defer unlockFolder(folderNameOfKey) _, yamlText, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { - tmpResultFile, err := ioutil.TempFile(model.TempFolder, "threagile-*.yaml") + tmpResultFile, err := os.CreateTemp(model.TempFolder, "threagile-*.yaml") checkErr(err) - err = ioutil.WriteFile(tmpResultFile.Name(), []byte(yamlText), 0400) + err = os.WriteFile(tmpResultFile.Name(), []byte(yamlText), 0400) if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -2753,16 +2751,16 @@ func getModel(context *gin.Context) { }) return } - defer os.Remove(tmpResultFile.Name()) + defer func() { _ = os.Remove(tmpResultFile.Name()) }() context.FileAttachment(tmpResultFile.Name(), "threagile.yaml") } } type payloadModels struct { - ID string `json:"id"` - Title string `json:"title"` - Timestamp_created time.Time `json:"timestamp_created"` - Timestamp_modified time.Time `json:"timestamp_modified"` + ID string `json:"id"` + Title string `json:"title"` + TimestampCreated time.Time `json:"timestamp_created"` + TimestampModified time.Time `json:"timestamp_modified"` } type payloadCover struct { @@ -2772,10 +2770,10 @@ type payloadCover struct { } type payloadOverview struct { - Management_summary_comment string `json:"management_summary_comment"` - Business_criticality string `json:"business_criticality"` - Business_overview model.Overview `json:"business_overview"` - Technical_overview model.Overview `json:"technical_overview"` + ManagementSummaryComment string `json:"management_summary_comment"` + BusinessCriticality string `json:"business_criticality"` + BusinessOverview model.Overview `json:"business_overview"` + TechnicalOverview model.Overview `json:"technical_overview"` } type payloadAbuseCases map[string]string @@ -2783,26 +2781,26 @@ type payloadAbuseCases map[string]string type payloadSecurityRequirements map[string]string type payloadDataAsset struct { - Title string `json:"title"` - Id string `json:"id"` - Description string `json:"description"` - Usage string `json:"usage"` - Tags []string `json:"tags"` - Origin string `json:"origin"` - Owner string `json:"owner"` - Quantity string `json:"quantity"` - Confidentiality string `json:"confidentiality"` - Integrity string `json:"integrity"` - Availability string `json:"availability"` - Justification_cia_rating string `json:"justification_cia_rating"` + Title string `json:"title"` + Id string `json:"id"` + Description string `json:"description"` + Usage string `json:"usage"` + Tags []string `json:"tags"` + Origin string `json:"origin"` + Owner string `json:"owner"` + Quantity string `json:"quantity"` + Confidentiality string `json:"confidentiality"` + Integrity string `json:"integrity"` + Availability string `json:"availability"` + JustificationCiaRating string `json:"justification_cia_rating"` } type payloadSharedRuntime struct { - Title string `json:"title"` - Id string `json:"id"` - Description string `json:"description"` - Tags []string `json:"tags"` - Technical_assets_running []string `json:"technical_assets_running"` + Title string `json:"title"` + Id string `json:"id"` + Description string `json:"description"` + Tags []string `json:"tags"` + TechnicalAssetsRunning []string `json:"technical_assets_running"` } func setSecurityRequirements(context *gin.Context) { @@ -2823,7 +2821,7 @@ func setSecurityRequirements(context *gin.Context) { }) return } - modelInput.Security_requirements = payload + modelInput.SecurityRequirements = payload ok = writeModel(context, key, folderNameOfKey, &modelInput, "Security Requirements Update") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2840,9 +2838,9 @@ func getSecurityRequirements(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, model.Security_requirements) + context.JSON(http.StatusOK, aModel.SecurityRequirements) } } @@ -2864,7 +2862,7 @@ func setAbuseCases(context *gin.Context) { }) return } - modelInput.Abuse_cases = payload + modelInput.AbuseCases = payload ok = writeModel(context, key, folderNameOfKey, &modelInput, "Abuse Cases Update") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2881,9 +2879,9 @@ func getAbuseCases(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, model.Abuse_cases) + context.JSON(http.StatusOK, aModel.AbuseCases) } } @@ -2905,17 +2903,17 @@ func setOverview(context *gin.Context) { }) return } - criticality, err := model.ParseCriticality(payload.Business_criticality) + criticality, err := model.ParseCriticality(payload.BusinessCriticality) if err != nil { handleErrorInServiceCall(err, context) return } - modelInput.Management_summary_comment = payload.Management_summary_comment - modelInput.Business_criticality = criticality.String() - modelInput.Business_overview.Description = payload.Business_overview.Description - modelInput.Business_overview.Images = payload.Business_overview.Images - modelInput.Technical_overview.Description = payload.Technical_overview.Description - modelInput.Technical_overview.Images = payload.Technical_overview.Images + modelInput.ManagementSummaryComment = payload.ManagementSummaryComment + modelInput.BusinessCriticality = criticality.String() + modelInput.BusinessOverview.Description = payload.BusinessOverview.Description + modelInput.BusinessOverview.Images = payload.BusinessOverview.Images + modelInput.TechnicalOverview.Description = payload.TechnicalOverview.Description + modelInput.TechnicalOverview.Images = payload.TechnicalOverview.Images ok = writeModel(context, key, folderNameOfKey, &modelInput, "Overview Update") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2939,13 +2937,13 @@ func getOverview(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { context.JSON(http.StatusOK, gin.H{ - "management_summary_comment": model.Management_summary_comment, - "business_criticality": model.Business_criticality, - "business_overview": model.Business_overview, - "technical_overview": model.Technical_overview, + "management_summary_comment": aModel.ManagementSummaryComment, + "business_criticality": aModel.BusinessCriticality, + "business_overview": aModel.BusinessOverview, + "technical_overview": aModel.TechnicalOverview, }) } } @@ -2989,12 +2987,12 @@ func getCover(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { context.JSON(http.StatusOK, gin.H{ - "title": model.Title, - "date": model.Date, - "author": model.Author, + "title": aModel.Title, + "date": aModel.Date, + "author": aModel.Author, }) } } @@ -3012,8 +3010,8 @@ func createNewModel(context *gin.Context) { lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - uuid := uuid.New().String() - err := os.Mkdir(folderNameForModel(folderNameOfKey, uuid), 0700) + aUuid := uuid.New().String() + err := os.Mkdir(folderNameForModel(folderNameOfKey, aUuid), 0700) if err != nil { context.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to create model", @@ -3021,7 +3019,7 @@ func createNewModel(context *gin.Context) { return } - yaml := `title: New Threat Model + aYaml := `title: New Threat Model threagile_version: ` + model.ThreagileVersion + ` author: name: "" @@ -3052,11 +3050,11 @@ diagram_tweak_suppress_edge_labels: false diagram_tweak_invisible_connections_between_assets: [] diagram_tweak_same_rank_assets: []` - ok = writeModelYAML(context, yaml, key, folderNameForModel(folderNameOfKey, uuid), "New Model Creation", true) + ok = writeModelYAML(context, aYaml, key, folderNameForModel(folderNameOfKey, aUuid), "New Model Creation", true) if ok { context.JSON(http.StatusCreated, gin.H{ "message": "model created", - "id": uuid, + "id": aUuid, }) } } @@ -3070,7 +3068,7 @@ func listModels(context *gin.Context) { // TODO currently returns error when any defer unlockFolder(folderNameOfKey) result := make([]payloadModels, 0) - modelFolders, err := ioutil.ReadDir(folderNameOfKey) + modelFolders, err := os.ReadDir(folderNameOfKey) if err != nil { log.Println(err) context.JSON(http.StatusNotFound, gin.H{ @@ -3078,9 +3076,9 @@ func listModels(context *gin.Context) { // TODO currently returns error when any }) return } - for _, fileInfo := range modelFolders { - if fileInfo.IsDir() { - modelStat, err := os.Stat(folderNameOfKey + "/" + fileInfo.Name() + "/threagile.yaml") + for _, dirEntry := range modelFolders { + if dirEntry.IsDir() { + modelStat, err := os.Stat(folderNameOfKey + "/" + dirEntry.Name() + "/threagile.yaml") if err != nil { log.Println(err) context.JSON(http.StatusNotFound, gin.H{ @@ -3088,15 +3086,23 @@ func listModels(context *gin.Context) { // TODO currently returns error when any }) return } - model, _, ok := readModel(context, fileInfo.Name(), key, folderNameOfKey) + aModel, _, ok := readModel(context, dirEntry.Name(), key, folderNameOfKey) if !ok { return } + fileInfo, err := dirEntry.Info() + if err != nil { + log.Println(err) + context.JSON(http.StatusNotFound, gin.H{ + "error": "unable to get file info", + }) + return + } result = append(result, payloadModels{ - ID: fileInfo.Name(), - Title: model.Title, - Timestamp_created: fileInfo.ModTime(), - Timestamp_modified: modelStat.ModTime(), + ID: dirEntry.Name(), + Title: aModel.Title, + TimestampCreated: fileInfo.ModTime(), + TimestampModified: modelStat.ModTime(), }) } } @@ -3156,7 +3162,7 @@ func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfK }) return modelInputResult, yamlText, false } - aesgcm, err := cipher.NewGCM(block) + aesGcm, err := cipher.NewGCM(block) if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -3165,7 +3171,7 @@ func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfK return modelInputResult, yamlText, false } - fileBytes, err := ioutil.ReadFile(modelFolder + "/threagile.yaml") + fileBytes, err := os.ReadFile(modelFolder + "/threagile.yaml") if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -3176,7 +3182,7 @@ func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfK nonce := fileBytes[0:12] ciphertext := fileBytes[12:] - plaintext, err := aesgcm.Open(nil, nonce, ciphertext, nil) + plaintext, err := aesGcm.Open(nil, nonce, ciphertext, nil) if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -3194,7 +3200,7 @@ func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfK return modelInputResult, yamlText, false } buf := new(bytes.Buffer) - buf.ReadFrom(r) + _, _ = buf.ReadFrom(r) modelInput := model.ModelInput{} yamlBytes := buf.Bytes() err = yaml.Unmarshal(yamlBytes, &modelInput) @@ -3211,7 +3217,7 @@ func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfK func writeModel(context *gin.Context, key []byte, folderNameOfKey string, modelInput *model.ModelInput, changeReasonForHistory string) (ok bool) { modelFolder, ok := checkModelFolder(context, context.Param("model-id"), folderNameOfKey) if ok { - modelInput.Threagile_version = model.ThreagileVersion + modelInput.ThreagileVersion = model.ThreagileVersion yamlBytes, err := yaml.Marshal(modelInput) if err != nil { log.Println(err) @@ -3234,8 +3240,8 @@ func writeModelYAML(context *gin.Context, yaml string, key []byte, modelFolder s } var b bytes.Buffer w := gzip.NewWriter(&b) - w.Write([]byte(yaml)) - w.Close() + _, _ = w.Write([]byte(yaml)) + _ = w.Close() plaintext := b.Bytes() cryptoKey := generateKeyFromAlreadyStrongRandomInput(key) block, err := aes.NewCipher(cryptoKey) @@ -3255,7 +3261,7 @@ func writeModelYAML(context *gin.Context, yaml string, key []byte, modelFolder s }) return false } - aesgcm, err := cipher.NewGCM(block) + aesGcm, err := cipher.NewGCM(block) if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -3263,7 +3269,7 @@ func writeModelYAML(context *gin.Context, yaml string, key []byte, modelFolder s }) return false } - ciphertext := aesgcm.Seal(nil, nonce, plaintext, nil) + ciphertext := aesGcm.Seal(nil, nonce, plaintext, nil) if !skipBackup { err = backupModelToHistory(modelFolder, changeReasonForHistory) if err != nil { @@ -3282,9 +3288,9 @@ func writeModelYAML(context *gin.Context, yaml string, key []byte, modelFolder s }) return false } - f.Write(nonce) - f.Write(ciphertext) - f.Close() + _, _ = f.Write(nonce) + _, _ = f.Write(ciphertext) + _ = f.Close() return true } @@ -3296,17 +3302,17 @@ func backupModelToHistory(modelFolder string, changeReasonForHistory string) (er return err } } - input, err := ioutil.ReadFile(modelFolder + "/threagile.yaml") + input, err := os.ReadFile(modelFolder + "/threagile.yaml") if err != nil { return err } historyFile := historyFolder + "/" + time.Now().Format("2006-01-02 15:04:05") + " " + changeReasonForHistory + ".backup" - err = ioutil.WriteFile(historyFile, input, 0400) + err = os.WriteFile(historyFile, input, 0400) if err != nil { return err } // now delete any old files if over limit to keep - files, err := ioutil.ReadDir(historyFolder) + files, err := os.ReadDir(historyFolder) if err != nil { return err } @@ -3366,7 +3372,7 @@ func checkObjectCreationThrottler(context *gin.Context, typeName string) bool { // remove all elements older than 3 minutes (= 180000000000 ns) now := time.Now().UnixNano() cutoff := now - 180000000000 - for keyCheck, _ := range createdObjectsThrottler { + for keyCheck := range createdObjectsThrottler { for i := 0; i < len(createdObjectsThrottler[keyCheck]); i++ { if createdObjectsThrottler[keyCheck][i] < cutoff { // Remove the element at index i from slice (safe while looping using i as iterator) @@ -3503,7 +3509,7 @@ func checkTokenToFolderName(context *gin.Context) (folderNameOfKey string, key [ }) return folderNameOfKey, key, false } - timeoutStruct.lastAcessedNanotime = time.Now().UnixNano() + timeoutStruct.lastAccessedNanoTime = time.Now().UnixNano() return folderNameOfKey, key, true } else { context.JSON(http.StatusNotFound, gin.H{ @@ -3597,7 +3603,7 @@ func parseCommandlineArgs() { license := flag.Bool("print-license", false, "print license information") flag.Usage = func() { printLogo() - fmt.Fprintf(os.Stderr, "Usage: threagile [options]") + _, _ = fmt.Fprintf(os.Stderr, "Usage: threagile [options]") fmt.Println() fmt.Println() fmt.Println() @@ -3697,12 +3703,12 @@ func parseCommandlineArgs() { fmt.Println("----------------------") fmt.Println("Built-in model macros:") fmt.Println("----------------------") - fmt.Println(add_build_pipeline.GetMacroDetails().ID, "-->", add_build_pipeline.GetMacroDetails().Title) - fmt.Println(add_vault.GetMacroDetails().ID, "-->", add_vault.GetMacroDetails().Title) - fmt.Println(pretty_print.GetMacroDetails().ID, "-->", pretty_print.GetMacroDetails().Title) - fmt.Println(remove_unused_tags.GetMacroDetails().ID, "-->", remove_unused_tags.GetMacroDetails().Title) - fmt.Println(seed_risk_tracking.GetMacroDetails().ID, "-->", seed_risk_tracking.GetMacroDetails().Title) - fmt.Println(seed_tags.GetMacroDetails().ID, "-->", seed_tags.GetMacroDetails().Title) + fmt.Println(addbuildpipeline.GetMacroDetails().ID, "-->", addbuildpipeline.GetMacroDetails().Title) + fmt.Println(addvault.GetMacroDetails().ID, "-->", addvault.GetMacroDetails().Title) + fmt.Println(prettyprint.GetMacroDetails().ID, "-->", prettyprint.GetMacroDetails().Title) + fmt.Println(removeunusedtags.GetMacroDetails().ID, "-->", removeunusedtags.GetMacroDetails().Title) + fmt.Println(seedrisktracking.GetMacroDetails().ID, "-->", seedrisktracking.GetMacroDetails().Title) + fmt.Println(seedtags.GetMacroDetails().ID, "-->", seedtags.GetMacroDetails().Title) fmt.Println() os.Exit(0) } @@ -3721,48 +3727,48 @@ func parseCommandlineArgs() { fmt.Println("--------------------") fmt.Println("Built-in risk rules:") fmt.Println("--------------------") - fmt.Println(accidental_secret_leak.Category().Id, "-->", accidental_secret_leak.Category().Title, "--> with tags:", accidental_secret_leak.SupportedTags()) - fmt.Println(code_backdooring.Category().Id, "-->", code_backdooring.Category().Title, "--> with tags:", code_backdooring.SupportedTags()) - fmt.Println(container_baseimage_backdooring.Category().Id, "-->", container_baseimage_backdooring.Category().Title, "--> with tags:", container_baseimage_backdooring.SupportedTags()) - fmt.Println(container_platform_escape.Category().Id, "-->", container_platform_escape.Category().Title, "--> with tags:", container_platform_escape.SupportedTags()) - fmt.Println(cross_site_request_forgery.Category().Id, "-->", cross_site_request_forgery.Category().Title, "--> with tags:", cross_site_request_forgery.SupportedTags()) - fmt.Println(cross_site_scripting.Category().Id, "-->", cross_site_scripting.Category().Title, "--> with tags:", cross_site_scripting.SupportedTags()) - fmt.Println(dos_risky_access_across_trust_boundary.Category().Id, "-->", dos_risky_access_across_trust_boundary.Category().Title, "--> with tags:", dos_risky_access_across_trust_boundary.SupportedTags()) - fmt.Println(incomplete_model.Category().Id, "-->", incomplete_model.Category().Title, "--> with tags:", incomplete_model.SupportedTags()) - fmt.Println(ldap_injection.Category().Id, "-->", ldap_injection.Category().Title, "--> with tags:", ldap_injection.SupportedTags()) - fmt.Println(missing_authentication.Category().Id, "-->", missing_authentication.Category().Title, "--> with tags:", missing_authentication.SupportedTags()) - fmt.Println(missing_authentication_second_factor.Category().Id, "-->", missing_authentication_second_factor.Category().Title, "--> with tags:", missing_authentication_second_factor.SupportedTags()) - fmt.Println(missing_build_infrastructure.Category().Id, "-->", missing_build_infrastructure.Category().Title, "--> with tags:", missing_build_infrastructure.SupportedTags()) - fmt.Println(missing_cloud_hardening.Category().Id, "-->", missing_cloud_hardening.Category().Title, "--> with tags:", missing_cloud_hardening.SupportedTags()) - fmt.Println(missing_file_validation.Category().Id, "-->", missing_file_validation.Category().Title, "--> with tags:", missing_file_validation.SupportedTags()) - fmt.Println(missing_hardening.Category().Id, "-->", missing_hardening.Category().Title, "--> with tags:", missing_hardening.SupportedTags()) - fmt.Println(missing_identity_propagation.Category().Id, "-->", missing_identity_propagation.Category().Title, "--> with tags:", missing_identity_propagation.SupportedTags()) - fmt.Println(missing_identity_provider_isolation.Category().Id, "-->", missing_identity_provider_isolation.Category().Title, "--> with tags:", missing_identity_provider_isolation.SupportedTags()) - fmt.Println(missing_identity_store.Category().Id, "-->", missing_identity_store.Category().Title, "--> with tags:", missing_identity_store.SupportedTags()) - fmt.Println(missing_network_segmentation.Category().Id, "-->", missing_network_segmentation.Category().Title, "--> with tags:", missing_network_segmentation.SupportedTags()) - fmt.Println(missing_vault.Category().Id, "-->", missing_vault.Category().Title, "--> with tags:", missing_vault.SupportedTags()) - fmt.Println(missing_vault_isolation.Category().Id, "-->", missing_vault_isolation.Category().Title, "--> with tags:", missing_vault_isolation.SupportedTags()) - fmt.Println(missing_waf.Category().Id, "-->", missing_waf.Category().Title, "--> with tags:", missing_waf.SupportedTags()) - fmt.Println(mixed_targets_on_shared_runtime.Category().Id, "-->", mixed_targets_on_shared_runtime.Category().Title, "--> with tags:", mixed_targets_on_shared_runtime.SupportedTags()) - fmt.Println(path_traversal.Category().Id, "-->", path_traversal.Category().Title, "--> with tags:", path_traversal.SupportedTags()) - fmt.Println(push_instead_of_pull_deployment.Category().Id, "-->", push_instead_of_pull_deployment.Category().Title, "--> with tags:", push_instead_of_pull_deployment.SupportedTags()) - fmt.Println(search_query_injection.Category().Id, "-->", search_query_injection.Category().Title, "--> with tags:", search_query_injection.SupportedTags()) - fmt.Println(server_side_request_forgery.Category().Id, "-->", server_side_request_forgery.Category().Title, "--> with tags:", server_side_request_forgery.SupportedTags()) - fmt.Println(service_registry_poisoning.Category().Id, "-->", service_registry_poisoning.Category().Title, "--> with tags:", service_registry_poisoning.SupportedTags()) - fmt.Println(sql_nosql_injection.Category().Id, "-->", sql_nosql_injection.Category().Title, "--> with tags:", sql_nosql_injection.SupportedTags()) - fmt.Println(unchecked_deployment.Category().Id, "-->", unchecked_deployment.Category().Title, "--> with tags:", unchecked_deployment.SupportedTags()) - fmt.Println(unencrypted_asset.Category().Id, "-->", unencrypted_asset.Category().Title, "--> with tags:", unencrypted_asset.SupportedTags()) - fmt.Println(unencrypted_communication.Category().Id, "-->", unencrypted_communication.Category().Title, "--> with tags:", unencrypted_communication.SupportedTags()) - fmt.Println(unguarded_access_from_internet.Category().Id, "-->", unguarded_access_from_internet.Category().Title, "--> with tags:", unguarded_access_from_internet.SupportedTags()) - fmt.Println(unguarded_direct_datastore_access.Category().Id, "-->", unguarded_direct_datastore_access.Category().Title, "--> with tags:", unguarded_direct_datastore_access.SupportedTags()) - fmt.Println(unnecessary_communication_link.Category().Id, "-->", unnecessary_communication_link.Category().Title, "--> with tags:", unnecessary_communication_link.SupportedTags()) - fmt.Println(unnecessary_data_asset.Category().Id, "-->", unnecessary_data_asset.Category().Title, "--> with tags:", unnecessary_data_asset.SupportedTags()) - fmt.Println(unnecessary_data_transfer.Category().Id, "-->", unnecessary_data_transfer.Category().Title, "--> with tags:", unnecessary_data_transfer.SupportedTags()) - fmt.Println(unnecessary_technical_asset.Category().Id, "-->", unnecessary_technical_asset.Category().Title, "--> with tags:", unnecessary_technical_asset.SupportedTags()) - fmt.Println(untrusted_deserialization.Category().Id, "-->", untrusted_deserialization.Category().Title, "--> with tags:", untrusted_deserialization.SupportedTags()) - fmt.Println(wrong_communication_link_content.Category().Id, "-->", wrong_communication_link_content.Category().Title, "--> with tags:", wrong_communication_link_content.SupportedTags()) - fmt.Println(wrong_trust_boundary_content.Category().Id, "-->", wrong_trust_boundary_content.Category().Title, "--> with tags:", wrong_trust_boundary_content.SupportedTags()) - fmt.Println(xml_external_entity.Category().Id, "-->", xml_external_entity.Category().Title, "--> with tags:", xml_external_entity.SupportedTags()) + fmt.Println(accidentalsecretleak.Category().Id, "-->", accidentalsecretleak.Category().Title, "--> with tags:", accidentalsecretleak.SupportedTags()) + fmt.Println(codebackdooring.Category().Id, "-->", codebackdooring.Category().Title, "--> with tags:", codebackdooring.SupportedTags()) + fmt.Println(containerbaseimagebackdooring.Category().Id, "-->", containerbaseimagebackdooring.Category().Title, "--> with tags:", containerbaseimagebackdooring.SupportedTags()) + fmt.Println(containerplatformescape.Category().Id, "-->", containerplatformescape.Category().Title, "--> with tags:", containerplatformescape.SupportedTags()) + fmt.Println(crosssiterequestforgery.Category().Id, "-->", crosssiterequestforgery.Category().Title, "--> with tags:", crosssiterequestforgery.SupportedTags()) + fmt.Println(crosssitescripting.Category().Id, "-->", crosssitescripting.Category().Title, "--> with tags:", crosssitescripting.SupportedTags()) + fmt.Println(dosriskyaccessacrosstrustboundary.Category().Id, "-->", dosriskyaccessacrosstrustboundary.Category().Title, "--> with tags:", dosriskyaccessacrosstrustboundary.SupportedTags()) + fmt.Println(incompletemodel.Category().Id, "-->", incompletemodel.Category().Title, "--> with tags:", incompletemodel.SupportedTags()) + fmt.Println(ldapinjection.Category().Id, "-->", ldapinjection.Category().Title, "--> with tags:", ldapinjection.SupportedTags()) + fmt.Println(missingauthentication.Category().Id, "-->", missingauthentication.Category().Title, "--> with tags:", missingauthentication.SupportedTags()) + fmt.Println(missingauthenticationsecondfactor.Category().Id, "-->", missingauthenticationsecondfactor.Category().Title, "--> with tags:", missingauthenticationsecondfactor.SupportedTags()) + fmt.Println(missingbuildinfrastructure.Category().Id, "-->", missingbuildinfrastructure.Category().Title, "--> with tags:", missingbuildinfrastructure.SupportedTags()) + fmt.Println(missingcloudhardening.Category().Id, "-->", missingcloudhardening.Category().Title, "--> with tags:", missingcloudhardening.SupportedTags()) + fmt.Println(missingfilevalidation.Category().Id, "-->", missingfilevalidation.Category().Title, "--> with tags:", missingfilevalidation.SupportedTags()) + fmt.Println(missinghardening.Category().Id, "-->", missinghardening.Category().Title, "--> with tags:", missinghardening.SupportedTags()) + fmt.Println(missingidentitypropagation.Category().Id, "-->", missingidentitypropagation.Category().Title, "--> with tags:", missingidentitypropagation.SupportedTags()) + fmt.Println(missingidentityproviderisolation.Category().Id, "-->", missingidentityproviderisolation.Category().Title, "--> with tags:", missingidentityproviderisolation.SupportedTags()) + fmt.Println(missingidentitystore.Category().Id, "-->", missingidentitystore.Category().Title, "--> with tags:", missingidentitystore.SupportedTags()) + fmt.Println(missingnetworksegmentation.Category().Id, "-->", missingnetworksegmentation.Category().Title, "--> with tags:", missingnetworksegmentation.SupportedTags()) + fmt.Println(missingvault.Category().Id, "-->", missingvault.Category().Title, "--> with tags:", missingvault.SupportedTags()) + fmt.Println(missingvaultisolation.Category().Id, "-->", missingvaultisolation.Category().Title, "--> with tags:", missingvaultisolation.SupportedTags()) + fmt.Println(missingwaf.Category().Id, "-->", missingwaf.Category().Title, "--> with tags:", missingwaf.SupportedTags()) + fmt.Println(mixedtargetsonsharedruntime.Category().Id, "-->", mixedtargetsonsharedruntime.Category().Title, "--> with tags:", mixedtargetsonsharedruntime.SupportedTags()) + fmt.Println(pathtraversal.Category().Id, "-->", pathtraversal.Category().Title, "--> with tags:", pathtraversal.SupportedTags()) + fmt.Println(pushinsteadofpulldeployment.Category().Id, "-->", pushinsteadofpulldeployment.Category().Title, "--> with tags:", pushinsteadofpulldeployment.SupportedTags()) + fmt.Println(searchqueryinjection.Category().Id, "-->", searchqueryinjection.Category().Title, "--> with tags:", searchqueryinjection.SupportedTags()) + fmt.Println(serversiderequestforgery.Category().Id, "-->", serversiderequestforgery.Category().Title, "--> with tags:", serversiderequestforgery.SupportedTags()) + fmt.Println(serviceregistrypoisoning.Category().Id, "-->", serviceregistrypoisoning.Category().Title, "--> with tags:", serviceregistrypoisoning.SupportedTags()) + fmt.Println(sqlnosqlinjection.Category().Id, "-->", sqlnosqlinjection.Category().Title, "--> with tags:", sqlnosqlinjection.SupportedTags()) + fmt.Println(uncheckeddeployment.Category().Id, "-->", uncheckeddeployment.Category().Title, "--> with tags:", uncheckeddeployment.SupportedTags()) + fmt.Println(unencryptedasset.Category().Id, "-->", unencryptedasset.Category().Title, "--> with tags:", unencryptedasset.SupportedTags()) + fmt.Println(unencryptedcommunication.Category().Id, "-->", unencryptedcommunication.Category().Title, "--> with tags:", unencryptedcommunication.SupportedTags()) + fmt.Println(unguardedaccessfrominternet.Category().Id, "-->", unguardedaccessfrominternet.Category().Title, "--> with tags:", unguardedaccessfrominternet.SupportedTags()) + fmt.Println(unguardeddirectdatastoreaccess.Category().Id, "-->", unguardeddirectdatastoreaccess.Category().Title, "--> with tags:", unguardeddirectdatastoreaccess.SupportedTags()) + fmt.Println(unnecessarycommunicationlink.Category().Id, "-->", unnecessarycommunicationlink.Category().Title, "--> with tags:", unnecessarycommunicationlink.SupportedTags()) + fmt.Println(unnecessarydataasset.Category().Id, "-->", unnecessarydataasset.Category().Title, "--> with tags:", unnecessarydataasset.SupportedTags()) + fmt.Println(unnecessarydatatransfer.Category().Id, "-->", unnecessarydatatransfer.Category().Title, "--> with tags:", unnecessarydatatransfer.SupportedTags()) + fmt.Println(unnecessarytechnicalasset.Category().Id, "-->", unnecessarytechnicalasset.Category().Title, "--> with tags:", unnecessarytechnicalasset.SupportedTags()) + fmt.Println(untrusteddeserialization.Category().Id, "-->", untrusteddeserialization.Category().Title, "--> with tags:", untrusteddeserialization.SupportedTags()) + fmt.Println(wrongcommunicationlinkcontent.Category().Id, "-->", wrongcommunicationlinkcontent.Category().Title, "--> with tags:", wrongcommunicationlinkcontent.SupportedTags()) + fmt.Println(wrongtrustboundarycontent.Category().Id, "-->", wrongtrustboundarycontent.Category().Title, "--> with tags:", wrongtrustboundarycontent.SupportedTags()) + fmt.Println(xmlexternalentity.Category().Id, "-->", xmlexternalentity.Category().Title, "--> with tags:", xmlexternalentity.SupportedTags()) fmt.Println() os.Exit(0) } @@ -3798,12 +3804,12 @@ func parseCommandlineArgs() { printLogo() fmt.Println("Explanation for the model macros:") fmt.Println() - fmt.Printf("%v: %v\n", add_build_pipeline.GetMacroDetails().ID, add_build_pipeline.GetMacroDetails().Description) - fmt.Printf("%v: %v\n", add_vault.GetMacroDetails().ID, add_vault.GetMacroDetails().Description) - fmt.Printf("%v: %v\n", pretty_print.GetMacroDetails().ID, pretty_print.GetMacroDetails().Description) - fmt.Printf("%v: %v\n", remove_unused_tags.GetMacroDetails().ID, remove_unused_tags.GetMacroDetails().Description) - fmt.Printf("%v: %v\n", seed_risk_tracking.GetMacroDetails().ID, seed_risk_tracking.GetMacroDetails().Description) - fmt.Printf("%v: %v\n", seed_tags.GetMacroDetails().ID, seed_tags.GetMacroDetails().Description) + fmt.Printf("%v: %v\n", addbuildpipeline.GetMacroDetails().ID, addbuildpipeline.GetMacroDetails().Description) + fmt.Printf("%v: %v\n", addvault.GetMacroDetails().ID, addvault.GetMacroDetails().Description) + fmt.Printf("%v: %v\n", prettyprint.GetMacroDetails().ID, prettyprint.GetMacroDetails().Description) + fmt.Printf("%v: %v\n", removeunusedtags.GetMacroDetails().ID, removeunusedtags.GetMacroDetails().Description) + fmt.Printf("%v: %v\n", seedrisktracking.GetMacroDetails().ID, seedrisktracking.GetMacroDetails().Description) + fmt.Printf("%v: %v\n", seedtags.GetMacroDetails().ID, seedtags.GetMacroDetails().Description) fmt.Println() os.Exit(0) @@ -3812,48 +3818,48 @@ func parseCommandlineArgs() { printLogo() fmt.Println("Explanation for risk rules:") fmt.Println() - fmt.Printf("%v: %v\n", accidental_secret_leak.Category().Id, accidental_secret_leak.Category().Description) - fmt.Printf("%v: %v\n", code_backdooring.Category().Id, code_backdooring.Category().Description) - fmt.Printf("%v: %v\n", container_baseimage_backdooring.Category().Id, container_baseimage_backdooring.Category().Description) - fmt.Printf("%v: %v\n", container_platform_escape.Category().Id, container_platform_escape.Category().Description) - fmt.Printf("%v: %v\n", cross_site_request_forgery.Category().Id, cross_site_request_forgery.Category().Description) - fmt.Printf("%v: %v\n", cross_site_scripting.Category().Id, cross_site_scripting.Category().Description) - fmt.Printf("%v: %v\n", dos_risky_access_across_trust_boundary.Category().Id, dos_risky_access_across_trust_boundary.Category().Description) - fmt.Printf("%v: %v\n", incomplete_model.Category().Id, incomplete_model.Category().Description) - fmt.Printf("%v: %v\n", ldap_injection.Category().Id, ldap_injection.Category().Description) - fmt.Printf("%v: %v\n", missing_authentication.Category().Id, missing_authentication.Category().Description) - fmt.Printf("%v: %v\n", missing_authentication_second_factor.Category().Id, missing_authentication_second_factor.Category().Description) - fmt.Printf("%v: %v\n", missing_build_infrastructure.Category().Id, missing_build_infrastructure.Category().Description) - fmt.Printf("%v: %v\n", missing_cloud_hardening.Category().Id, missing_cloud_hardening.Category().Description) - fmt.Printf("%v: %v\n", missing_file_validation.Category().Id, missing_file_validation.Category().Description) - fmt.Printf("%v: %v\n", missing_hardening.Category().Id, missing_hardening.Category().Description) - fmt.Printf("%v: %v\n", missing_identity_propagation.Category().Id, missing_identity_propagation.Category().Description) - fmt.Printf("%v: %v\n", missing_identity_provider_isolation.Category().Id, missing_identity_provider_isolation.Category().Description) - fmt.Printf("%v: %v\n", missing_identity_store.Category().Id, missing_identity_store.Category().Description) - fmt.Printf("%v: %v\n", missing_network_segmentation.Category().Id, missing_network_segmentation.Category().Description) - fmt.Printf("%v: %v\n", missing_vault.Category().Id, missing_vault.Category().Description) - fmt.Printf("%v: %v\n", missing_vault_isolation.Category().Id, missing_vault_isolation.Category().Description) - fmt.Printf("%v: %v\n", missing_waf.Category().Id, missing_waf.Category().Description) - fmt.Printf("%v: %v\n", mixed_targets_on_shared_runtime.Category().Id, mixed_targets_on_shared_runtime.Category().Description) - fmt.Printf("%v: %v\n", path_traversal.Category().Id, path_traversal.Category().Description) - fmt.Printf("%v: %v\n", push_instead_of_pull_deployment.Category().Id, push_instead_of_pull_deployment.Category().Description) - fmt.Printf("%v: %v\n", search_query_injection.Category().Id, search_query_injection.Category().Description) - fmt.Printf("%v: %v\n", server_side_request_forgery.Category().Id, server_side_request_forgery.Category().Description) - fmt.Printf("%v: %v\n", service_registry_poisoning.Category().Id, service_registry_poisoning.Category().Description) - fmt.Printf("%v: %v\n", sql_nosql_injection.Category().Id, sql_nosql_injection.Category().Description) - fmt.Printf("%v: %v\n", unchecked_deployment.Category().Id, unchecked_deployment.Category().Description) - fmt.Printf("%v: %v\n", unencrypted_asset.Category().Id, unencrypted_asset.Category().Description) - fmt.Printf("%v: %v\n", unencrypted_communication.Category().Id, unencrypted_communication.Category().Description) - fmt.Printf("%v: %v\n", unguarded_access_from_internet.Category().Id, unguarded_access_from_internet.Category().Description) - fmt.Printf("%v: %v\n", unguarded_direct_datastore_access.Category().Id, unguarded_direct_datastore_access.Category().Description) - fmt.Printf("%v: %v\n", unnecessary_communication_link.Category().Id, unnecessary_communication_link.Category().Description) - fmt.Printf("%v: %v\n", unnecessary_data_asset.Category().Id, unnecessary_data_asset.Category().Description) - fmt.Printf("%v: %v\n", unnecessary_data_transfer.Category().Id, unnecessary_data_transfer.Category().Description) - fmt.Printf("%v: %v\n", unnecessary_technical_asset.Category().Id, unnecessary_technical_asset.Category().Description) - fmt.Printf("%v: %v\n", untrusted_deserialization.Category().Id, untrusted_deserialization.Category().Description) - fmt.Printf("%v: %v\n", wrong_communication_link_content.Category().Id, wrong_communication_link_content.Category().Description) - fmt.Printf("%v: %v\n", wrong_trust_boundary_content.Category().Id, wrong_trust_boundary_content.Category().Description) - fmt.Printf("%v: %v\n", xml_external_entity.Category().Id, xml_external_entity.Category().Description) + fmt.Printf("%v: %v\n", accidentalsecretleak.Category().Id, accidentalsecretleak.Category().Description) + fmt.Printf("%v: %v\n", codebackdooring.Category().Id, codebackdooring.Category().Description) + fmt.Printf("%v: %v\n", containerbaseimagebackdooring.Category().Id, containerbaseimagebackdooring.Category().Description) + fmt.Printf("%v: %v\n", containerplatformescape.Category().Id, containerplatformescape.Category().Description) + fmt.Printf("%v: %v\n", crosssiterequestforgery.Category().Id, crosssiterequestforgery.Category().Description) + fmt.Printf("%v: %v\n", crosssitescripting.Category().Id, crosssitescripting.Category().Description) + fmt.Printf("%v: %v\n", dosriskyaccessacrosstrustboundary.Category().Id, dosriskyaccessacrosstrustboundary.Category().Description) + fmt.Printf("%v: %v\n", incompletemodel.Category().Id, incompletemodel.Category().Description) + fmt.Printf("%v: %v\n", ldapinjection.Category().Id, ldapinjection.Category().Description) + fmt.Printf("%v: %v\n", missingauthentication.Category().Id, missingauthentication.Category().Description) + fmt.Printf("%v: %v\n", missingauthenticationsecondfactor.Category().Id, missingauthenticationsecondfactor.Category().Description) + fmt.Printf("%v: %v\n", missingbuildinfrastructure.Category().Id, missingbuildinfrastructure.Category().Description) + fmt.Printf("%v: %v\n", missingcloudhardening.Category().Id, missingcloudhardening.Category().Description) + fmt.Printf("%v: %v\n", missingfilevalidation.Category().Id, missingfilevalidation.Category().Description) + fmt.Printf("%v: %v\n", missinghardening.Category().Id, missinghardening.Category().Description) + fmt.Printf("%v: %v\n", missingidentitypropagation.Category().Id, missingidentitypropagation.Category().Description) + fmt.Printf("%v: %v\n", missingidentityproviderisolation.Category().Id, missingidentityproviderisolation.Category().Description) + fmt.Printf("%v: %v\n", missingidentitystore.Category().Id, missingidentitystore.Category().Description) + fmt.Printf("%v: %v\n", missingnetworksegmentation.Category().Id, missingnetworksegmentation.Category().Description) + fmt.Printf("%v: %v\n", missingvault.Category().Id, missingvault.Category().Description) + fmt.Printf("%v: %v\n", missingvaultisolation.Category().Id, missingvaultisolation.Category().Description) + fmt.Printf("%v: %v\n", missingwaf.Category().Id, missingwaf.Category().Description) + fmt.Printf("%v: %v\n", mixedtargetsonsharedruntime.Category().Id, mixedtargetsonsharedruntime.Category().Description) + fmt.Printf("%v: %v\n", pathtraversal.Category().Id, pathtraversal.Category().Description) + fmt.Printf("%v: %v\n", pushinsteadofpulldeployment.Category().Id, pushinsteadofpulldeployment.Category().Description) + fmt.Printf("%v: %v\n", searchqueryinjection.Category().Id, searchqueryinjection.Category().Description) + fmt.Printf("%v: %v\n", serversiderequestforgery.Category().Id, serversiderequestforgery.Category().Description) + fmt.Printf("%v: %v\n", serviceregistrypoisoning.Category().Id, serviceregistrypoisoning.Category().Description) + fmt.Printf("%v: %v\n", sqlnosqlinjection.Category().Id, sqlnosqlinjection.Category().Description) + fmt.Printf("%v: %v\n", uncheckeddeployment.Category().Id, uncheckeddeployment.Category().Description) + fmt.Printf("%v: %v\n", unencryptedasset.Category().Id, unencryptedasset.Category().Description) + fmt.Printf("%v: %v\n", unencryptedcommunication.Category().Id, unencryptedcommunication.Category().Description) + fmt.Printf("%v: %v\n", unguardedaccessfrominternet.Category().Id, unguardedaccessfrominternet.Category().Description) + fmt.Printf("%v: %v\n", unguardeddirectdatastoreaccess.Category().Id, unguardeddirectdatastoreaccess.Category().Description) + fmt.Printf("%v: %v\n", unnecessarycommunicationlink.Category().Id, unnecessarycommunicationlink.Category().Description) + fmt.Printf("%v: %v\n", unnecessarydataasset.Category().Id, unnecessarydataasset.Category().Description) + fmt.Printf("%v: %v\n", unnecessarydatatransfer.Category().Id, unnecessarydatatransfer.Category().Description) + fmt.Printf("%v: %v\n", unnecessarytechnicalasset.Category().Id, unnecessarytechnicalasset.Category().Description) + fmt.Printf("%v: %v\n", untrusteddeserialization.Category().Id, untrusteddeserialization.Category().Description) + fmt.Printf("%v: %v\n", wrongcommunicationlinkcontent.Category().Id, wrongcommunicationlinkcontent.Category().Description) + fmt.Printf("%v: %v\n", wrongtrustboundarycontent.Category().Id, wrongtrustboundarycontent.Category().Description) + fmt.Printf("%v: %v\n", xmlexternalentity.Category().Id, xmlexternalentity.Category().Description) fmt.Println() os.Exit(0) } @@ -3875,7 +3881,7 @@ func parseCommandlineArgs() { } if *license { printLogo() - content, err := ioutil.ReadFile("/app/LICENSE.txt") + content, err := os.ReadFile("/app/LICENSE.txt") checkErr(err) fmt.Print(string(content)) fmt.Println() @@ -3935,20 +3941,20 @@ func printVersion() { } func createExampleModelFile() { - copyFile("/app/threagile-example-model.yaml", *outputDir+"/threagile-example-model.yaml") + _, _ = copyFile("/app/threagile-example-model.yaml", *outputDir+"/threagile-example-model.yaml") } func createStubModelFile() { loadCustomRiskRules() - stub, err := ioutil.ReadFile("/app/threagile-stub-model.yaml") + stub, err := os.ReadFile("/app/threagile-stub-model.yaml") checkErr(err) - err = ioutil.WriteFile(*outputDir+"/threagile-stub-model.yaml", addSupportedTags(stub), 0644) + err = os.WriteFile(*outputDir+"/threagile-stub-model.yaml", addSupportedTags(stub), 0644) checkErr(err) } func createEditingSupportFiles() { - copyFile("/app/schema.json", *outputDir+"/schema.json") - copyFile("/app/live-templates.txt", *outputDir+"/live-templates.txt") + _, _ = copyFile("/app/schema.json", *outputDir+"/schema.json") + _, _ = copyFile("/app/live-templates.txt", *outputDir+"/live-templates.txt") } func printExamples() { @@ -4007,13 +4013,13 @@ func copyFile(src, dst string) (int64, error) { if err != nil { return 0, err } - defer source.Close() + defer func() { _ = source.Close() }() destination, err := os.Create(dst) if err != nil { return 0, err } - defer destination.Close() + defer func() { _ = destination.Close() }() nBytes, err := io.Copy(destination, source) return nBytes, err } @@ -4022,7 +4028,7 @@ func parseModel(inputFilename string) { if *verbose { fmt.Println("Parsing model:", inputFilename) } - modelYaml, err := ioutil.ReadFile(inputFilename) + modelYaml, err := os.ReadFile(inputFilename) if err == nil { modelInput = model.ModelInput{} err = yaml.Unmarshal(modelYaml, &modelInput) @@ -4030,7 +4036,7 @@ func parseModel(inputFilename string) { //fmt.Println(modelInput) var businessCriticality model.Criticality - switch modelInput.Business_criticality { + switch modelInput.BusinessCriticality { case model.Archive.String(): businessCriticality = model.Archive case model.Operational.String(): @@ -4042,7 +4048,7 @@ func parseModel(inputFilename string) { case model.MissionCritical.String(): businessCriticality = model.MissionCritical default: - panic(errors.New("unknown 'business_criticality' value of application: " + modelInput.Business_criticality)) + panic(errors.New("unknown 'business_criticality' value of application: " + modelInput.BusinessCriticality)) } reportDate := time.Now() @@ -4057,21 +4063,21 @@ func parseModel(inputFilename string) { Author: modelInput.Author, Title: modelInput.Title, Date: reportDate, - ManagementSummaryComment: modelInput.Management_summary_comment, + ManagementSummaryComment: modelInput.ManagementSummaryComment, BusinessCriticality: businessCriticality, - BusinessOverview: removePathElementsFromImageFiles(modelInput.Business_overview), - TechnicalOverview: removePathElementsFromImageFiles(modelInput.Technical_overview), + BusinessOverview: removePathElementsFromImageFiles(modelInput.BusinessOverview), + TechnicalOverview: removePathElementsFromImageFiles(modelInput.TechnicalOverview), Questions: modelInput.Questions, - AbuseCases: modelInput.Abuse_cases, - SecurityRequirements: modelInput.Security_requirements, - TagsAvailable: lowerCaseAndTrim(modelInput.Tags_available), - DiagramTweakNodesep: modelInput.Diagram_tweak_nodesep, - DiagramTweakRanksep: modelInput.Diagram_tweak_ranksep, - DiagramTweakEdgeLayout: modelInput.Diagram_tweak_edge_layout, - DiagramTweakSuppressEdgeLabels: modelInput.Diagram_tweak_suppress_edge_labels, - DiagramTweakLayoutLeftToRight: modelInput.Diagram_tweak_layout_left_to_right, - DiagramTweakInvisibleConnectionsBetweenAssets: modelInput.Diagram_tweak_invisible_connections_between_assets, - DiagramTweakSameRankAssets: modelInput.Diagram_tweak_same_rank_assets, + AbuseCases: modelInput.AbuseCases, + SecurityRequirements: modelInput.SecurityRequirements, + TagsAvailable: lowerCaseAndTrim(modelInput.TagsAvailable), + DiagramTweakNodesep: modelInput.DiagramTweakNodesep, + DiagramTweakRanksep: modelInput.DiagramTweakRanksep, + DiagramTweakEdgeLayout: modelInput.DiagramTweakEdgeLayout, + DiagramTweakSuppressEdgeLabels: modelInput.DiagramTweakSuppressEdgeLabels, + DiagramTweakLayoutLeftToRight: modelInput.DiagramTweakLayoutLeftToRight, + DiagramTweakInvisibleConnectionsBetweenAssets: modelInput.DiagramTweakInvisibleConnectionsBetweenAssets, + DiagramTweakSameRankAssets: modelInput.DiagramTweakSameRankAssets, } if model.ParsedModelRoot.DiagramTweakNodesep == 0 { model.ParsedModelRoot.DiagramTweakNodesep = 2 @@ -4082,7 +4088,7 @@ func parseModel(inputFilename string) { // Data Assets =============================================================================== model.ParsedModelRoot.DataAssets = make(map[string]model.DataAsset) - for title, asset := range modelInput.Data_assets { + for title, asset := range modelInput.DataAssets { id := fmt.Sprintf("%v", asset.ID) var usage model.Usage @@ -4173,13 +4179,13 @@ func parseModel(inputFilename string) { Confidentiality: confidentiality, Integrity: integrity, Availability: availability, - JustificationCiaRating: fmt.Sprintf("%v", asset.Justification_cia_rating), + JustificationCiaRating: fmt.Sprintf("%v", asset.JustificationCiaRating), } } // Technical Assets =============================================================================== model.ParsedModelRoot.TechnicalAssets = make(map[string]model.TechnicalAsset) - for title, asset := range modelInput.Technical_assets { + for title, asset := range modelInput.TechnicalAssets { id := fmt.Sprintf("%v", asset.ID) var usage model.Usage @@ -4193,9 +4199,9 @@ func parseModel(inputFilename string) { } var dataAssetsProcessed = make([]string, 0) - if asset.Data_assets_processed != nil { - dataAssetsProcessed = make([]string, len(asset.Data_assets_processed)) - for i, parsedProcessedAsset := range asset.Data_assets_processed { + if asset.DataAssetsProcessed != nil { + dataAssetsProcessed = make([]string, len(asset.DataAssetsProcessed)) + for i, parsedProcessedAsset := range asset.DataAssetsProcessed { referencedAsset := fmt.Sprintf("%v", parsedProcessedAsset) checkDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") dataAssetsProcessed[i] = referencedAsset @@ -4203,9 +4209,9 @@ func parseModel(inputFilename string) { } var dataAssetsStored = make([]string, 0) - if asset.Data_assets_stored != nil { - dataAssetsStored = make([]string, len(asset.Data_assets_stored)) - for i, parsedStoredAssets := range asset.Data_assets_stored { + if asset.DataAssetsStored != nil { + dataAssetsStored = make([]string, len(asset.DataAssetsStored)) + for i, parsedStoredAssets := range asset.DataAssetsStored { referencedAsset := fmt.Sprintf("%v", parsedStoredAssets) checkDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") dataAssetsStored[i] = referencedAsset @@ -4368,8 +4374,8 @@ func parseModel(inputFilename string) { encryption = model.DataWithSymmetricSharedKey case model.DataWithAsymmetricSharedKey.String(): encryption = model.DataWithAsymmetricSharedKey - case model.DataWithEnduserIndividualKey.String(): - encryption = model.DataWithEnduserIndividualKey + case model.DataWithEndUserIndividualKey.String(): + encryption = model.DataWithEndUserIndividualKey default: panic(errors.New("unknown 'encryption' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Encryption))) } @@ -4437,8 +4443,8 @@ func parseModel(inputFilename string) { } dataFormatsAccepted := make([]model.DataFormat, 0) - if asset.Data_formats_accepted != nil { - for _, dataFormatName := range asset.Data_formats_accepted { + if asset.DataFormatsAccepted != nil { + for _, dataFormatName := range asset.DataFormatsAccepted { switch dataFormatName { case model.JSON.String(): dataFormatsAccepted = append(dataFormatsAccepted, model.JSON) @@ -4457,8 +4463,8 @@ func parseModel(inputFilename string) { } communicationLinks := make([]model.CommunicationLink, 0) - if asset.Communication_links != nil { - for commLinkTitle, commLink := range asset.Communication_links { + if asset.CommunicationLinks != nil { + for commLinkTitle, commLink := range asset.CommunicationLinks { constraint := true weight := 1 var protocol model.Protocol @@ -4492,8 +4498,8 @@ func parseModel(inputFilename string) { authorization = model.NoneAuthorization case model.TechnicalUser.String(): authorization = model.TechnicalUser - case model.EnduserIdentityPropagation.String(): - authorization = model.EnduserIdentityPropagation + case model.EndUserIdentityPropagation.String(): + authorization = model.EndUserIdentityPropagation default: panic(errors.New("unknown 'authorization' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Authorization))) } @@ -4522,44 +4528,44 @@ func parseModel(inputFilename string) { protocol = model.MQTT case model.JDBC.String(): protocol = model.JDBC - case model.JDBC_encrypted.String(): - protocol = model.JDBC_encrypted + case model.JdbcEncrypted.String(): + protocol = model.JdbcEncrypted case model.ODBC.String(): protocol = model.ODBC - case model.ODBC_encrypted.String(): - protocol = model.ODBC_encrypted - case model.SQL_access_protocol.String(): - protocol = model.SQL_access_protocol - case model.SQL_access_protocol_encrypted.String(): - protocol = model.SQL_access_protocol_encrypted - case model.NoSQL_access_protocol.String(): - protocol = model.NoSQL_access_protocol - case model.NoSQL_access_protocol_encrypted.String(): - protocol = model.NoSQL_access_protocol_encrypted + case model.OdbcEncrypted.String(): + protocol = model.OdbcEncrypted + case model.SqlAccessProtocol.String(): + protocol = model.SqlAccessProtocol + case model.SqlAccessProtocolEncrypted.String(): + protocol = model.SqlAccessProtocolEncrypted + case model.NosqlAccessProtocol.String(): + protocol = model.NosqlAccessProtocol + case model.NosqlAccessProtocolEncrypted.String(): + protocol = model.NosqlAccessProtocolEncrypted case model.TEXT.String(): protocol = model.TEXT - case model.TEXT_encrypted.String(): - protocol = model.TEXT_encrypted + case model.TextEncrypted.String(): + protocol = model.TextEncrypted case model.BINARY.String(): protocol = model.BINARY - case model.BINARY_encrypted.String(): - protocol = model.BINARY_encrypted + case model.BinaryEncrypted.String(): + protocol = model.BinaryEncrypted case model.SSH.String(): protocol = model.SSH - case model.SSH_tunnel.String(): - protocol = model.SSH_tunnel + case model.SshTunnel.String(): + protocol = model.SshTunnel case model.SMTP.String(): protocol = model.SMTP - case model.SMTP_encrypted.String(): - protocol = model.SMTP_encrypted + case model.SmtpEncrypted.String(): + protocol = model.SmtpEncrypted case model.POP3.String(): protocol = model.POP3 - case model.POP3_encrypted.String(): - protocol = model.POP3_encrypted + case model.Pop3Encrypted.String(): + protocol = model.Pop3Encrypted case model.IMAP.String(): protocol = model.IMAP - case model.IMAP_encrypted.String(): - protocol = model.IMAP_encrypted + case model.ImapEncrypted.String(): + protocol = model.ImapEncrypted case model.FTP.String(): protocol = model.FTP case model.FTPS.String(): @@ -4578,8 +4584,8 @@ func parseModel(inputFilename string) { protocol = model.NFS case model.SMB.String(): protocol = model.SMB - case model.SMB_encrypted.String(): - protocol = model.SMB_encrypted + case model.SmbEncrypted.String(): + protocol = model.SmbEncrypted case model.LocalFileAccess.String(): protocol = model.LocalFileAccess case model.NRPE.String(): @@ -4588,12 +4594,12 @@ func parseModel(inputFilename string) { protocol = model.XMPP case model.IIOP.String(): protocol = model.IIOP - case model.IIOP_encrypted.String(): - protocol = model.IIOP_encrypted + case model.IiopEncrypted.String(): + protocol = model.IiopEncrypted case model.JRMP.String(): protocol = model.JRMP - case model.JRMP_encrypted.String(): - protocol = model.JRMP_encrypted + case model.JrmpEncrypted.String(): + protocol = model.JrmpEncrypted case model.InProcessLibraryCall.String(): protocol = model.InProcessLibraryCall case model.ContainerSpawning.String(): @@ -4602,27 +4608,27 @@ func parseModel(inputFilename string) { panic(errors.New("unknown 'protocol' of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Protocol))) } - if commLink.Data_assets_sent != nil { - for _, dataAssetSent := range commLink.Data_assets_sent { + if commLink.DataAssetsSent != nil { + for _, dataAssetSent := range commLink.DataAssetsSent { referencedAsset := fmt.Sprintf("%v", dataAssetSent) checkDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") dataAssetsSent = append(dataAssetsSent, referencedAsset) } } - if commLink.Data_assets_received != nil { - for _, dataAssetReceived := range commLink.Data_assets_received { + if commLink.DataAssetsReceived != nil { + for _, dataAssetReceived := range commLink.DataAssetsReceived { referencedAsset := fmt.Sprintf("%v", dataAssetReceived) checkDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") dataAssetsReceived = append(dataAssetsReceived, referencedAsset) } } - if commLink.Diagram_tweak_weight > 0 { - weight = commLink.Diagram_tweak_weight + if commLink.DiagramTweakWeight > 0 { + weight = commLink.DiagramTweakWeight } - constraint = !commLink.Diagram_tweak_constraint + constraint = !commLink.DiagramTweakConstraint checkErr(err) @@ -4639,7 +4645,7 @@ func parseModel(inputFilename string) { Usage: usage, Tags: checkTags(lowerCaseAndTrim(commLink.Tags), "communication link '"+commLinkTitle+"' of technical asset '"+title+"'"), VPN: commLink.VPN, - IpFiltered: commLink.IP_filtered, + IpFiltered: commLink.IpFiltered, Readonly: commLink.Readonly, DataAssetsSent: dataAssetsSent, DataAssetsReceived: dataAssetsReceived, @@ -4649,7 +4655,7 @@ func parseModel(inputFilename string) { communicationLinks = append(communicationLinks, commLink) // track all comm links model.CommunicationLinks[commLink.Id] = commLink - // keep track of map of *all* comm links mapped by target-id (to be able to lookup "who is calling me" kind of things) + // keep track of map of *all* comm links mapped by target-id (to be able to look up "who is calling me" kind of things) model.IncomingTechnicalCommunicationLinksMappedByTargetId[commLink.TargetId] = append( model.IncomingTechnicalCommunicationLinksMappedByTargetId[commLink.TargetId], commLink) } @@ -4671,34 +4677,34 @@ func parseModel(inputFilename string) { Machine: technicalAssetMachine, Internet: asset.Internet, Encryption: encryption, - MultiTenant: asset.Multi_tenant, + MultiTenant: asset.MultiTenant, Redundant: asset.Redundant, - CustomDevelopedParts: asset.Custom_developed_parts, - UsedAsClientByHuman: asset.Used_as_client_by_human, - OutOfScope: asset.Out_of_scope, - JustificationOutOfScope: fmt.Sprintf("%v", asset.Justification_out_of_scope), + CustomDevelopedParts: asset.CustomDevelopedParts, + UsedAsClientByHuman: asset.UsedAsClientByHuman, + OutOfScope: asset.OutOfScope, + JustificationOutOfScope: fmt.Sprintf("%v", asset.JustificationOutOfScope), Owner: fmt.Sprintf("%v", asset.Owner), Confidentiality: confidentiality, Integrity: integrity, Availability: availability, - JustificationCiaRating: fmt.Sprintf("%v", asset.Justification_cia_rating), + JustificationCiaRating: fmt.Sprintf("%v", asset.JustificationCiaRating), DataAssetsProcessed: dataAssetsProcessed, DataAssetsStored: dataAssetsStored, DataFormatsAccepted: dataFormatsAccepted, CommunicationLinks: communicationLinks, - DiagramTweakOrder: asset.Diagram_tweak_order, + DiagramTweakOrder: asset.DiagramTweakOrder, } } // Trust Boundaries =============================================================================== checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries := make(map[string]bool) model.ParsedModelRoot.TrustBoundaries = make(map[string]model.TrustBoundary) - for title, boundary := range modelInput.Trust_boundaries { + for title, boundary := range modelInput.TrustBoundaries { id := fmt.Sprintf("%v", boundary.ID) var technicalAssetsInside = make([]string, 0) - if boundary.Technical_assets_inside != nil { - parsedInsideAssets := boundary.Technical_assets_inside + if boundary.TechnicalAssetsInside != nil { + parsedInsideAssets := boundary.TechnicalAssetsInside technicalAssetsInside = make([]string, len(parsedInsideAssets)) for i, parsedInsideAsset := range parsedInsideAssets { technicalAssetsInside[i] = fmt.Sprintf("%v", parsedInsideAsset) @@ -4715,8 +4721,8 @@ func parseModel(inputFilename string) { } var trustBoundariesNested = make([]string, 0) - if boundary.Trust_boundaries_nested != nil { - parsedNestedBoundaries := boundary.Trust_boundaries_nested + if boundary.TrustBoundariesNested != nil { + parsedNestedBoundaries := boundary.TrustBoundariesNested trustBoundariesNested = make([]string, len(parsedNestedBoundaries)) for i, parsedNestedBoundary := range parsedNestedBoundaries { trustBoundariesNested[i] = fmt.Sprintf("%v", parsedNestedBoundary) @@ -4766,12 +4772,12 @@ func parseModel(inputFilename string) { // Shared Runtime =============================================================================== model.ParsedModelRoot.SharedRuntimes = make(map[string]model.SharedRuntime) - for title, runtime := range modelInput.Shared_runtimes { + for title, runtime := range modelInput.SharedRuntimes { id := fmt.Sprintf("%v", runtime.ID) var technicalAssetsRunning = make([]string, 0) - if runtime.Technical_assets_running != nil { - parsedRunningAssets := runtime.Technical_assets_running + if runtime.TechnicalAssetsRunning != nil { + parsedRunningAssets := runtime.TechnicalAssetsRunning technicalAssetsRunning = make([]string, len(parsedRunningAssets)) for i, parsedRunningAsset := range parsedRunningAssets { assetId := fmt.Sprintf("%v", parsedRunningAsset) @@ -4784,7 +4790,7 @@ func parseModel(inputFilename string) { Id: id, Title: title, //fmt.Sprintf("%v", boundary["title"]), Description: withDefault(fmt.Sprintf("%v", runtime.Description), title), - Tags: checkTags((runtime.Tags), "shared runtime '"+title+"'"), + Tags: checkTags(runtime.Tags, "shared runtime '"+title+"'"), TechnicalAssetsRunning: technicalAssetsRunning, } checkIdSyntax(id) @@ -4799,11 +4805,11 @@ func parseModel(inputFilename string) { // Individual Risk Categories (just used as regular risk categories) =============================================================================== model.ParsedModelRoot.IndividualRiskCategories = make(map[string]model.RiskCategory) - for title, indivCat := range modelInput.Individual_risk_categories { - id := fmt.Sprintf("%v", indivCat.ID) + for title, individualCategory := range modelInput.IndividualRiskCategories { + id := fmt.Sprintf("%v", individualCategory.ID) var function model.RiskFunction - switch indivCat.Function { + switch individualCategory.Function { case model.BusinessSide.String(): function = model.BusinessSide case model.Architecture.String(): @@ -4813,11 +4819,11 @@ func parseModel(inputFilename string) { case model.Operations.String(): function = model.Operations default: - panic(errors.New("unknown 'function' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", indivCat.Function))) + panic(errors.New("unknown 'function' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.Function))) } var stride model.STRIDE - switch indivCat.STRIDE { + switch individualCategory.STRIDE { case model.Spoofing.String(): stride = model.Spoofing case model.Tampering.String(): @@ -4831,26 +4837,26 @@ func parseModel(inputFilename string) { case model.ElevationOfPrivilege.String(): stride = model.ElevationOfPrivilege default: - panic(errors.New("unknown 'stride' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", indivCat.STRIDE))) + panic(errors.New("unknown 'stride' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.STRIDE))) } cat := model.RiskCategory{ Id: id, Title: title, - Description: withDefault(fmt.Sprintf("%v", indivCat.Description), title), - Impact: fmt.Sprintf("%v", indivCat.Impact), - ASVS: fmt.Sprintf("%v", indivCat.ASVS), - CheatSheet: fmt.Sprintf("%v", indivCat.Cheat_sheet), - Action: fmt.Sprintf("%v", indivCat.Action), - Mitigation: fmt.Sprintf("%v", indivCat.Mitigation), - Check: fmt.Sprintf("%v", indivCat.Check), - DetectionLogic: fmt.Sprintf("%v", indivCat.Detection_logic), - RiskAssessment: fmt.Sprintf("%v", indivCat.Risk_assessment), - FalsePositives: fmt.Sprintf("%v", indivCat.False_positives), + Description: withDefault(fmt.Sprintf("%v", individualCategory.Description), title), + Impact: fmt.Sprintf("%v", individualCategory.Impact), + ASVS: fmt.Sprintf("%v", individualCategory.ASVS), + CheatSheet: fmt.Sprintf("%v", individualCategory.CheatSheet), + Action: fmt.Sprintf("%v", individualCategory.Action), + Mitigation: fmt.Sprintf("%v", individualCategory.Mitigation), + Check: fmt.Sprintf("%v", individualCategory.Check), + DetectionLogic: fmt.Sprintf("%v", individualCategory.DetectionLogic), + RiskAssessment: fmt.Sprintf("%v", individualCategory.RiskAssessment), + FalsePositives: fmt.Sprintf("%v", individualCategory.FalsePositives), Function: function, STRIDE: stride, - ModelFailurePossibleReason: indivCat.Model_failure_possible_reason, - CWE: indivCat.CWE, + ModelFailurePossibleReason: individualCategory.ModelFailurePossibleReason, + CWE: individualCategory.CWE, } checkIdSyntax(id) if _, exists := model.ParsedModelRoot.IndividualRiskCategories[id]; exists { @@ -4860,8 +4866,8 @@ func parseModel(inputFilename string) { // NOW THE INDIVIDUAL RISK INSTANCES: //individualRiskInstances := make([]model.Risk, 0) - if indivCat.Risks_identified != nil { // TODO: also add syntax checks of input YAML when linked asset is not found or when syntehtic-id is already used... - for title, indivRiskInstance := range indivCat.Risks_identified { + if individualCategory.RisksIdentified != nil { // TODO: also add syntax checks of input YAML when linked asset is not found or when synthetic-id is already used... + for title, individualRiskInstance := range individualCategory.RisksIdentified { var severity model.RiskSeverity var exploitationLikelihood model.RiskExploitationLikelihood var exploitationImpact model.RiskExploitationImpact @@ -4869,7 +4875,7 @@ func parseModel(inputFilename string) { var dataBreachProbability model.DataBreachProbability var dataBreachTechnicalAssetIDs []string - switch indivRiskInstance.Severity { + switch individualRiskInstance.Severity { case model.LowSeverity.String(): severity = model.LowSeverity case model.MediumSeverity.String(): @@ -4883,10 +4889,10 @@ func parseModel(inputFilename string) { case "": // added default severity = model.MediumSeverity default: - panic(errors.New("unknown 'severity' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", indivRiskInstance.Severity))) + panic(errors.New("unknown 'severity' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.Severity))) } - switch indivRiskInstance.Exploitation_likelihood { + switch individualRiskInstance.ExploitationLikelihood { case model.Unlikely.String(): exploitationLikelihood = model.Unlikely case model.Likely.String(): @@ -4898,10 +4904,10 @@ func parseModel(inputFilename string) { case "": // added default exploitationLikelihood = model.Likely default: - panic(errors.New("unknown 'exploitation_likelihood' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", indivRiskInstance.Exploitation_likelihood))) + panic(errors.New("unknown 'exploitation_likelihood' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationLikelihood))) } - switch indivRiskInstance.Exploitation_impact { + switch individualRiskInstance.ExploitationImpact { case model.LowImpact.String(): exploitationImpact = model.LowImpact case model.MediumImpact.String(): @@ -4913,35 +4919,35 @@ func parseModel(inputFilename string) { case "": // added default exploitationImpact = model.MediumImpact default: - panic(errors.New("unknown 'exploitation_impact' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", indivRiskInstance.Exploitation_impact))) + panic(errors.New("unknown 'exploitation_impact' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationImpact))) } - if len(indivRiskInstance.Most_relevant_data_asset) > 0 { - mostRelevantDataAssetId = fmt.Sprintf("%v", indivRiskInstance.Most_relevant_data_asset) + if len(individualRiskInstance.MostRelevantDataAsset) > 0 { + mostRelevantDataAssetId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantDataAsset) checkDataAssetTargetExists(mostRelevantDataAssetId, "individual risk '"+title+"'") } - if len(indivRiskInstance.Most_relevant_technical_asset) > 0 { - mostRelevantTechnicalAssetId = fmt.Sprintf("%v", indivRiskInstance.Most_relevant_technical_asset) + if len(individualRiskInstance.MostRelevantTechnicalAsset) > 0 { + mostRelevantTechnicalAssetId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantTechnicalAsset) checkTechnicalAssetExists(mostRelevantTechnicalAssetId, "individual risk '"+title+"'", false) } - if len(indivRiskInstance.Most_relevant_communication_link) > 0 { - mostRelevantCommunicationLinkId = fmt.Sprintf("%v", indivRiskInstance.Most_relevant_communication_link) + if len(individualRiskInstance.MostRelevantCommunicationLink) > 0 { + mostRelevantCommunicationLinkId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantCommunicationLink) checkCommunicationLinkExists(mostRelevantCommunicationLinkId, "individual risk '"+title+"'") } - if len(indivRiskInstance.Most_relevant_trust_boundary) > 0 { - mostRelevantTrustBoundaryId = fmt.Sprintf("%v", indivRiskInstance.Most_relevant_trust_boundary) + if len(individualRiskInstance.MostRelevantTrustBoundary) > 0 { + mostRelevantTrustBoundaryId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantTrustBoundary) checkTrustBoundaryExists(mostRelevantTrustBoundaryId, "individual risk '"+title+"'") } - if len(indivRiskInstance.Most_relevant_shared_runtime) > 0 { - mostRelevantSharedRuntimeId = fmt.Sprintf("%v", indivRiskInstance.Most_relevant_shared_runtime) + if len(individualRiskInstance.MostRelevantSharedRuntime) > 0 { + mostRelevantSharedRuntimeId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantSharedRuntime) checkSharedRuntimeExists(mostRelevantSharedRuntimeId, "individual risk '"+title+"'") } - switch indivRiskInstance.Data_breach_probability { + switch individualRiskInstance.DataBreachProbability { case model.Improbable.String(): dataBreachProbability = model.Improbable case model.Possible.String(): @@ -4951,12 +4957,12 @@ func parseModel(inputFilename string) { case "": // added default dataBreachProbability = model.Possible default: - panic(errors.New("unknown 'data_breach_probability' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", indivRiskInstance.Data_breach_probability))) + panic(errors.New("unknown 'data_breach_probability' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.DataBreachProbability))) } - if indivRiskInstance.Data_breach_technical_assets != nil { - dataBreachTechnicalAssetIDs = make([]string, len(indivRiskInstance.Data_breach_technical_assets)) - for i, parsedReferencedAsset := range indivRiskInstance.Data_breach_technical_assets { + if individualRiskInstance.DataBreachTechnicalAssets != nil { + dataBreachTechnicalAssetIDs = make([]string, len(individualRiskInstance.DataBreachTechnicalAssets)) + for i, parsedReferencedAsset := range individualRiskInstance.DataBreachTechnicalAssets { assetId := fmt.Sprintf("%v", parsedReferencedAsset) checkTechnicalAssetExists(assetId, "data breach technical assets of individual risk '"+title+"'", false) dataBreachTechnicalAssetIDs[i] = assetId @@ -4965,7 +4971,7 @@ func parseModel(inputFilename string) { checkErr(err) - indivRiskInstance := model.Risk{ + individualRiskInstance := model.Risk{ SyntheticId: createSyntheticId(cat.Id, mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId), Title: fmt.Sprintf("%v", title), Category: cat, @@ -4980,16 +4986,16 @@ func parseModel(inputFilename string) { DataBreachProbability: dataBreachProbability, DataBreachTechnicalAssetIDs: dataBreachTechnicalAssetIDs, } - model.GeneratedRisksByCategory[cat] = append(model.GeneratedRisksByCategory[cat], indivRiskInstance) + model.GeneratedRisksByCategory[cat] = append(model.GeneratedRisksByCategory[cat], individualRiskInstance) } } } // Risk Tracking =============================================================================== model.ParsedModelRoot.RiskTracking = make(map[string]model.RiskTracking) - for syntheticRiskId, riskTracking := range modelInput.Risk_tracking { + for syntheticRiskId, riskTracking := range modelInput.RiskTracking { justification := fmt.Sprintf("%v", riskTracking.Justification) - checkedBy := fmt.Sprintf("%v", riskTracking.Checked_by) + checkedBy := fmt.Sprintf("%v", riskTracking.CheckedBy) ticket := fmt.Sprintf("%v", riskTracking.Ticket) var date time.Time if len(riskTracking.Date) > 0 { @@ -5065,7 +5071,7 @@ func checkTags(tags []string, where string) []string { // in order to prevent Path-Traversal like stuff... func removePathElementsFromImageFiles(overview model.Overview) model.Overview { - for i, _ := range overview.Images { + for i := range overview.Images { newValue := make(map[string]string) for file, desc := range overview.Images[i] { newValue[filepath.Base(file)] = desc @@ -5082,8 +5088,8 @@ func applyWildcardRiskTrackingEvaluation() { for syntheticRiskIdPattern, riskTracking := range deferredRiskTrackingDueToWildcardMatching { foundSome := false var matchingRiskIdExpression = regexp.MustCompile(strings.ReplaceAll(regexp.QuoteMeta(syntheticRiskIdPattern), `\*`, `[^@]+`)) - for syntheticRiskId, _ := range model.GeneratedRisksBySyntheticId { - if matchingRiskIdExpression.Match([]byte(syntheticRiskId)) && hasNotYetAnyDirectNonWildcardRiskTrackings(syntheticRiskId) { + for syntheticRiskId := range model.GeneratedRisksBySyntheticId { + if matchingRiskIdExpression.Match([]byte(syntheticRiskId)) && hasNotYetAnyDirectNonWildcardRiskTracking(syntheticRiskId) { foundSome = true model.ParsedModelRoot.RiskTracking[syntheticRiskId] = model.RiskTracking{ SyntheticRiskId: strings.TrimSpace(syntheticRiskId), @@ -5105,7 +5111,7 @@ func applyWildcardRiskTrackingEvaluation() { } } -func hasNotYetAnyDirectNonWildcardRiskTrackings(syntheticRiskId string) bool { +func hasNotYetAnyDirectNonWildcardRiskTracking(syntheticRiskId string) bool { if _, ok := model.ParsedModelRoot.RiskTracking[syntheticRiskId]; ok { return false } @@ -5199,7 +5205,7 @@ func checkNestedTrustBoundariesExisting() { func hash(s string) string { h := fnv.New32a() - h.Write([]byte(s)) + _, _ = h.Write([]byte(s)) return fmt.Sprintf("%v", h.Sum32()) } @@ -5283,7 +5289,7 @@ func writeDataAssetDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fi // Write the DOT file file, err := os.Create(diagramFilenameDOT) checkErr(err) - defer file.Close() + defer func() { _ = file.Close() }() _, err = fmt.Fprintln(file, dotContent.String()) checkErr(err) return file @@ -5363,7 +5369,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: keys := make([]string, 0) - for k, _ := range model.ParsedModelRoot.TrustBoundaries { + for k := range model.ParsedModelRoot.TrustBoundaries { keys = append(keys, k) } sort.Strings(keys) @@ -5388,10 +5394,10 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil } snippet.WriteString("\n subgraph cluster_" + hash(trustBoundary.Id) + " {\n") color, fontColor, bgColor, style, fontname := colors.RgbHexColorTwilight(), colors.RgbHexColorTwilight() /*"#550E0C"*/, "#FAFAFA", "dashed", "Verdana" - penwidth := 4.5 + penWidth := 4.5 if len(trustBoundary.TrustBoundariesNested) > 0 { //color, fontColor, style, fontname = colors.Blue, colors.Blue, "dashed", "Verdana" - penwidth = 5.5 + penWidth = 5.5 } if len(trustBoundary.ParentTrustBoundaryID()) > 0 { bgColor = "#F1F1F1" @@ -5411,7 +5417,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil bgcolor="` + bgColor + `" fontcolor="` + fontColor + `" fontname="` + fontname + `" - penwidth="` + fmt.Sprintf("%f", penwidth) + `" + penwidth="` + fmt.Sprintf("%f", penWidth) + `" forcelabels=true outputorder="nodesfirst" margin="50.0" @@ -5441,7 +5447,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil subgraphSnippetsById[hash(trustBoundary.Id)] = snippet.String() } // here replace links and remove from map after replacement (i.e. move snippet into nested) - for i, _ := range subgraphSnippetsById { + for i := range subgraphSnippetsById { re := regexp.MustCompile(`LINK-NEEDS-REPLACED-BY-cluster_([0-9]*);`) for { matches := re.FindStringSubmatch(subgraphSnippetsById[i]) @@ -5456,7 +5462,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil } // now write them all keys = make([]string, 0) - for k, _ := range subgraphSnippetsById { + for k := range subgraphSnippetsById { keys = append(keys, k) } sort.Strings(keys) @@ -5469,7 +5475,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: // Convert map to slice of values: - techAssets := []model.TechnicalAsset{} + var techAssets []model.TechnicalAsset for _, techAsset := range model.ParsedModelRoot.TechnicalAssets { techAssets = append(techAssets, techAsset) } @@ -5495,7 +5501,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil } dir := "forward" if dataFlow.IsBidirectional() { - if !suppressBidirectionalArrows { // as it does not work as bug in grahviz with ortho: https://gitlab.com/graphviz/graphviz/issues/144 + if !suppressBidirectionalArrows { // as it does not work as bug in graphviz with ortho: https://gitlab.com/graphviz/graphviz/issues/144 dir = "both" } } @@ -5526,7 +5532,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil // Write the DOT file file, err := os.Create(diagramFilenameDOT) checkErr(err) - defer file.Close() + defer func() { _ = file.Close() }() _, err = fmt.Fprintln(file, dotContent.String()) checkErr(err) return file @@ -5668,21 +5674,21 @@ func renderDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string) { fmt.Println("Rendering data flow diagram input") } // tmp files - tmpFileDOT, err := ioutil.TempFile(model.TempFolder, "diagram-*-.gv") + tmpFileDOT, err := os.CreateTemp(model.TempFolder, "diagram-*-.gv") checkErr(err) - defer os.Remove(tmpFileDOT.Name()) + defer func() { _ = os.Remove(tmpFileDOT.Name()) }() - tmpFilePNG, err := ioutil.TempFile(model.TempFolder, "diagram-*-.png") + tmpFilePNG, err := os.CreateTemp(model.TempFolder, "diagram-*-.png") checkErr(err) - defer os.Remove(tmpFilePNG.Name()) + defer func() { _ = os.Remove(tmpFilePNG.Name()) }() // copy into tmp file as input - input, err := ioutil.ReadFile(dotFile.Name()) + input, err := os.ReadFile(dotFile.Name()) if err != nil { fmt.Println(err) return } - err = ioutil.WriteFile(tmpFileDOT.Name(), input, 0644) + err = os.WriteFile(tmpFileDOT.Name(), input, 0644) if err != nil { fmt.Println("Error creating", tmpFileDOT.Name()) fmt.Println(err) @@ -5698,12 +5704,12 @@ func renderDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string) { panic(errors.New("graph rendering call failed with error:" + err.Error())) } // copy into resulting file - input, err = ioutil.ReadFile(tmpFilePNG.Name()) + input, err = os.ReadFile(tmpFilePNG.Name()) if err != nil { fmt.Println(err) return } - err = ioutil.WriteFile(targetDir+"/"+dataFlowDiagramFilenamePNG, input, 0644) + err = os.WriteFile(targetDir+"/"+dataFlowDiagramFilenamePNG, input, 0644) if err != nil { fmt.Println("Error creating", dataFlowDiagramFilenamePNG) fmt.Println(err) @@ -5716,21 +5722,21 @@ func renderDataAssetDiagramGraphvizImage(dotFile *os.File, targetDir string) { / fmt.Println("Rendering data asset diagram input") } // tmp files - tmpFileDOT, err := ioutil.TempFile(model.TempFolder, "diagram-*-.gv") + tmpFileDOT, err := os.CreateTemp(model.TempFolder, "diagram-*-.gv") checkErr(err) - defer os.Remove(tmpFileDOT.Name()) + defer func() { _ = os.Remove(tmpFileDOT.Name()) }() - tmpFilePNG, err := ioutil.TempFile(model.TempFolder, "diagram-*-.png") + tmpFilePNG, err := os.CreateTemp(model.TempFolder, "diagram-*-.png") checkErr(err) - defer os.Remove(tmpFilePNG.Name()) + defer func() { _ = os.Remove(tmpFilePNG.Name()) }() // copy into tmp file as input - input, err := ioutil.ReadFile(dotFile.Name()) + input, err := os.ReadFile(dotFile.Name()) if err != nil { fmt.Println(err) return } - err = ioutil.WriteFile(tmpFileDOT.Name(), input, 0644) + err = os.WriteFile(tmpFileDOT.Name(), input, 0644) if err != nil { fmt.Println("Error creating", tmpFileDOT.Name()) fmt.Println(err) @@ -5746,12 +5752,12 @@ func renderDataAssetDiagramGraphvizImage(dotFile *os.File, targetDir string) { / panic(errors.New("graph rendering call failed with error: " + err.Error())) } // copy into resulting file - input, err = ioutil.ReadFile(tmpFilePNG.Name()) + input, err = os.ReadFile(tmpFilePNG.Name()) if err != nil { fmt.Println(err) return } - err = ioutil.WriteFile(targetDir+"/"+dataAssetDiagramFilenamePNG, input, 0644) + err = os.WriteFile(targetDir+"/"+dataAssetDiagramFilenamePNG, input, 0644) if err != nil { fmt.Println("Error creating", dataAssetDiagramFilenamePNG) fmt.Println(err) diff --git a/model/types.go b/model/types.go index d22c16db..e904b59d 100644 --- a/model/types.go +++ b/model/types.go @@ -26,14 +26,24 @@ var GeneratedRisksBySyntheticId map[string]Risk var AllSupportedTags map[string]bool +var ( + _ = ParseEncryptionStyle + _ = SortedKeysOfDataAssets + _ = SortedKeysOfTechnicalAssets + _ = SortedDataAssetsByDataBreachProbabilityAndTitleStillAtRisk + _ = ReduceToOnlyHighRisk + _ = ReduceToOnlyMediumRisk + _ = ReduceToOnlyLowRisk +) + func Init() { - CommunicationLinks = make(map[string]CommunicationLink, 0) - IncomingTechnicalCommunicationLinksMappedByTargetId = make(map[string][]CommunicationLink, 0) - DirectContainingTrustBoundaryMappedByTechnicalAssetId = make(map[string]TrustBoundary, 0) - DirectContainingSharedRuntimeMappedByTechnicalAssetId = make(map[string]SharedRuntime, 0) - GeneratedRisksByCategory = make(map[RiskCategory][]Risk, 0) - GeneratedRisksBySyntheticId = make(map[string]Risk, 0) - AllSupportedTags = make(map[string]bool, 0) + CommunicationLinks = make(map[string]CommunicationLink) + IncomingTechnicalCommunicationLinksMappedByTargetId = make(map[string][]CommunicationLink) + DirectContainingTrustBoundaryMappedByTechnicalAssetId = make(map[string]TrustBoundary) + DirectContainingSharedRuntimeMappedByTechnicalAssetId = make(map[string]SharedRuntime) + GeneratedRisksByCategory = make(map[RiskCategory][]Risk) + GeneratedRisksBySyntheticId = make(map[string]Risk) + AllSupportedTags = make(map[string]bool) } func AddToListOfSupportedTags(tags []string) { @@ -52,10 +62,10 @@ type CustomRiskRule interface { func AddTagToModelInput(modelInput *ModelInput, tag string, dryRun bool, changes *[]string) { tag = NormalizeTag(tag) - if !Contains(modelInput.Tags_available, tag) { + if !Contains(modelInput.TagsAvailable, tag) { *changes = append(*changes, "adding tag: "+tag) if !dryRun { - modelInput.Tags_available = append(modelInput.Tags_available, tag) + modelInput.TagsAvailable = append(modelInput.TagsAvailable, tag) } } } @@ -72,138 +82,138 @@ func MakeID(val string) string { // === Model Type Stuff ====================================== type ModelInput struct { // TODO: Eventually remove this and directly use ParsedModelRoot? But then the error messages for model errors are not quite as good anymore... - Threagile_version string - Title string - Author Author - Date string - Business_overview Overview - Technical_overview Overview - Business_criticality string - Management_summary_comment string - Questions map[string]string - Abuse_cases map[string]string - Security_requirements map[string]string - Tags_available []string - Data_assets map[string]InputDataAsset - Technical_assets map[string]InputTechnicalAsset - Trust_boundaries map[string]InputTrustBoundary - Shared_runtimes map[string]InputSharedRuntime - Individual_risk_categories map[string]InputIndividualRiskCategory - Risk_tracking map[string]InputRiskTracking - Diagram_tweak_nodesep, Diagram_tweak_ranksep int - Diagram_tweak_edge_layout string - Diagram_tweak_suppress_edge_labels bool - Diagram_tweak_layout_left_to_right bool - Diagram_tweak_invisible_connections_between_assets []string - Diagram_tweak_same_rank_assets []string + ThreagileVersion string + Title string + Author Author + Date string + BusinessOverview Overview + TechnicalOverview Overview + BusinessCriticality string + ManagementSummaryComment string + Questions map[string]string + AbuseCases map[string]string + SecurityRequirements map[string]string + TagsAvailable []string + DataAssets map[string]InputDataAsset + TechnicalAssets map[string]InputTechnicalAsset + TrustBoundaries map[string]InputTrustBoundary + SharedRuntimes map[string]InputSharedRuntime + IndividualRiskCategories map[string]InputIndividualRiskCategory + RiskTracking map[string]InputRiskTracking + DiagramTweakNodesep, DiagramTweakRanksep int + DiagramTweakEdgeLayout string + DiagramTweakSuppressEdgeLabels bool + DiagramTweakLayoutLeftToRight bool + DiagramTweakInvisibleConnectionsBetweenAssets []string + DiagramTweakSameRankAssets []string } type InputDataAsset struct { - ID string `json:"id"` - Description string `json:"description"` - Usage string `json:"usage"` - Tags []string `json:"tags"` - Origin string `json:"origin"` - Owner string `json:"owner"` - Quantity string `json:"quantity"` - Confidentiality string `json:"confidentiality"` - Integrity string `json:"integrity"` - Availability string `json:"availability"` - Justification_cia_rating string `json:"justification_cia_rating"` + ID string `json:"id"` + Description string `json:"description"` + Usage string `json:"usage"` + Tags []string `json:"tags"` + Origin string `json:"origin"` + Owner string `json:"owner"` + Quantity string `json:"quantity"` + Confidentiality string `json:"confidentiality"` + Integrity string `json:"integrity"` + Availability string `json:"availability"` + JustificationCiaRating string `json:"justification_cia_rating"` } type InputTechnicalAsset struct { - ID string `json:"id"` - Description string `json:"description"` - Type string `json:"type"` - Usage string `json:"usage"` - Used_as_client_by_human bool `json:"used_as_client_by_human"` - Out_of_scope bool `json:"out_of_scope"` - Justification_out_of_scope string `json:"justification_out_of_scope"` - Size string `json:"size"` - Technology string `json:"technology"` - Tags []string `json:"tags"` - Internet bool `json:"internet"` - Machine string `json:"machine"` - Encryption string `json:"encryption"` - Owner string `json:"owner"` - Confidentiality string `json:"confidentiality"` - Integrity string `json:"integrity"` - Availability string `json:"availability"` - Justification_cia_rating string `json:"justification_cia_rating"` - Multi_tenant bool `json:"multi_tenant"` - Redundant bool `json:"redundant"` - Custom_developed_parts bool `json:"custom_developed_parts"` - Data_assets_processed []string `json:"data_assets_processed"` - Data_assets_stored []string `json:"data_assets_stored"` - Data_formats_accepted []string `json:"data_formats_accepted"` - Diagram_tweak_order int `json:"diagram_tweak_order"` - Communication_links map[string]InputCommunicationLink `json:"communication_links"` + ID string `json:"id"` + Description string `json:"description"` + Type string `json:"type"` + Usage string `json:"usage"` + UsedAsClientByHuman bool `json:"used_as_client_by_human"` + OutOfScope bool `json:"out_of_scope"` + JustificationOutOfScope string `json:"justification_out_of_scope"` + Size string `json:"size"` + Technology string `json:"technology"` + Tags []string `json:"tags"` + Internet bool `json:"internet"` + Machine string `json:"machine"` + Encryption string `json:"encryption"` + Owner string `json:"owner"` + Confidentiality string `json:"confidentiality"` + Integrity string `json:"integrity"` + Availability string `json:"availability"` + JustificationCiaRating string `json:"justification_cia_rating"` + MultiTenant bool `json:"multi_tenant"` + Redundant bool `json:"redundant"` + CustomDevelopedParts bool `json:"custom_developed_parts"` + DataAssetsProcessed []string `json:"data_assets_processed"` + DataAssetsStored []string `json:"data_assets_stored"` + DataFormatsAccepted []string `json:"data_formats_accepted"` + DiagramTweakOrder int `json:"diagram_tweak_order"` + CommunicationLinks map[string]InputCommunicationLink `json:"communication_links"` } type InputCommunicationLink struct { - Target string `json:"target"` - Description string `json:"description"` - Protocol string `json:"protocol"` - Authentication string `json:"authentication"` - Authorization string `json:"authorization"` - Tags []string `json:"tags"` - VPN bool `json:"vpn"` - IP_filtered bool `json:"ip_filtered"` - Readonly bool `json:"readonly"` - Usage string `json:"usage"` - Data_assets_sent []string `json:"data_assets_sent"` - Data_assets_received []string `json:"data_assets_received"` - Diagram_tweak_weight int `json:"diagram_tweak_weight"` - Diagram_tweak_constraint bool `json:"diagram_tweak_constraint"` + Target string `json:"target"` + Description string `json:"description"` + Protocol string `json:"protocol"` + Authentication string `json:"authentication"` + Authorization string `json:"authorization"` + Tags []string `json:"tags"` + VPN bool `json:"vpn"` + IpFiltered bool `json:"ip_filtered"` + Readonly bool `json:"readonly"` + Usage string `json:"usage"` + DataAssetsSent []string `json:"data_assets_sent"` + DataAssetsReceived []string `json:"data_assets_received"` + DiagramTweakWeight int `json:"diagram_tweak_weight"` + DiagramTweakConstraint bool `json:"diagram_tweak_constraint"` } type InputSharedRuntime struct { - ID string `json:"id"` - Description string `json:"description"` - Tags []string `json:"tags"` - Technical_assets_running []string `json:"technical_assets_running"` + ID string `json:"id"` + Description string `json:"description"` + Tags []string `json:"tags"` + TechnicalAssetsRunning []string `json:"technical_assets_running"` } type InputTrustBoundary struct { - ID string `json:"id"` - Description string `json:"description"` - Type string `json:"type"` - Tags []string `json:"tags"` - Technical_assets_inside []string `json:"technical_assets_inside"` - Trust_boundaries_nested []string `json:"trust_boundaries_nested"` + ID string `json:"id"` + Description string `json:"description"` + Type string `json:"type"` + Tags []string `json:"tags"` + TechnicalAssetsInside []string `json:"technical_assets_inside"` + TrustBoundariesNested []string `json:"trust_boundaries_nested"` } type InputIndividualRiskCategory struct { - ID string `json:"id"` - Description string `json:"description"` - Impact string `json:"impact"` - ASVS string `json:"asvs"` - Cheat_sheet string `json:"cheat_sheet"` - Action string `json:"action"` - Mitigation string `json:"mitigation"` - Check string `json:"check"` - Function string `json:"function"` - STRIDE string `json:"stride"` - Detection_logic string `json:"detection_logic"` - Risk_assessment string `json:"risk_assessment"` - False_positives string `json:"false_positives"` - Model_failure_possible_reason bool `json:"model_failure_possible_reason"` - CWE int `json:"cwe"` - Risks_identified map[string]InputRiskIdentified `json:"risks_identified"` + ID string `json:"id"` + Description string `json:"description"` + Impact string `json:"impact"` + ASVS string `json:"asvs"` + CheatSheet string `json:"cheat_sheet"` + Action string `json:"action"` + Mitigation string `json:"mitigation"` + Check string `json:"check"` + Function string `json:"function"` + STRIDE string `json:"stride"` + DetectionLogic string `json:"detection_logic"` + RiskAssessment string `json:"risk_assessment"` + FalsePositives string `json:"false_positives"` + ModelFailurePossibleReason bool `json:"model_failure_possible_reason"` + CWE int `json:"cwe"` + RisksIdentified map[string]InputRiskIdentified `json:"risks_identified"` } type InputRiskIdentified struct { - Severity string `json:"severity"` - Exploitation_likelihood string `json:"exploitation_likelihood"` - Exploitation_impact string `json:"exploitation_impact"` - Data_breach_probability string `json:"data_breach_probability"` - Data_breach_technical_assets []string `json:"data_breach_technical_assets"` - Most_relevant_data_asset string `json:"most_relevant_data_asset"` - Most_relevant_technical_asset string `json:"most_relevant_technical_asset"` - Most_relevant_communication_link string `json:"most_relevant_communication_link"` - Most_relevant_trust_boundary string `json:"most_relevant_trust_boundary"` - Most_relevant_shared_runtime string `json:"most_relevant_shared_runtime"` + Severity string `json:"severity"` + ExploitationLikelihood string `json:"exploitation_likelihood"` + ExploitationImpact string `json:"exploitation_impact"` + DataBreachProbability string `json:"data_breach_probability"` + DataBreachTechnicalAssets []string `json:"data_breach_technical_assets"` + MostRelevantDataAsset string `json:"most_relevant_data_asset"` + MostRelevantTechnicalAsset string `json:"most_relevant_technical_asset"` + MostRelevantCommunicationLink string `json:"most_relevant_communication_link"` + MostRelevantTrustBoundary string `json:"most_relevant_trust_boundary"` + MostRelevantSharedRuntime string `json:"most_relevant_shared_runtime"` } type InputRiskTracking struct { @@ -211,7 +221,7 @@ type InputRiskTracking struct { Justification string `json:"justification"` Ticket string `json:"ticket"` Date string `json:"date"` - Checked_by string `json:"checked_by"` + CheckedBy string `json:"checked_by"` } // TypeDescription contains a name for a type and its description @@ -510,14 +520,14 @@ type Authorization int const ( NoneAuthorization Authorization = iota TechnicalUser - EnduserIdentityPropagation + EndUserIdentityPropagation ) func AuthorizationValues() []TypeEnum { return []TypeEnum{ NoneAuthorization, TechnicalUser, - EnduserIdentityPropagation, + EndUserIdentityPropagation, } } @@ -630,7 +640,7 @@ const ( Transparent DataWithSymmetricSharedKey DataWithAsymmetricSharedKey - DataWithEnduserIndividualKey + DataWithEndUserIndividualKey ) func EncryptionStyleValues() []TypeEnum { @@ -639,7 +649,7 @@ func EncryptionStyleValues() []TypeEnum { Transparent, DataWithSymmetricSharedKey, DataWithAsymmetricSharedKey, - DataWithEnduserIndividualKey, + DataWithEndUserIndividualKey, } } @@ -671,7 +681,7 @@ func (what EncryptionStyle) Explain() string { } func (what EncryptionStyle) Title() string { - return [...]string{"None", "Transparent", "Data with Symmetric Shared Key", "Data with Asymmetric Shared Key", "Data with Enduser Individual Key"}[what] + return [...]string{"None", "Transparent", "Data with Symmetric Shared Key", "Data with Asymmetric Shared Key", "Data with End-User Individual Key"}[what] } type DataFormat int @@ -728,29 +738,29 @@ const ( HTTPS WS WSS - Reverse_proxy_web_protocol - Reverse_proxy_web_protocol_encrypted + ReverseProxyWebProtocol + ReverseProxyWebProtocolEncrypted MQTT JDBC - JDBC_encrypted + JdbcEncrypted ODBC - ODBC_encrypted - SQL_access_protocol - SQL_access_protocol_encrypted - NoSQL_access_protocol - NoSQL_access_protocol_encrypted + OdbcEncrypted + SqlAccessProtocol + SqlAccessProtocolEncrypted + NosqlAccessProtocol + NosqlAccessProtocolEncrypted BINARY - BINARY_encrypted + BinaryEncrypted TEXT - TEXT_encrypted + TextEncrypted SSH - SSH_tunnel + SshTunnel SMTP - SMTP_encrypted + SmtpEncrypted POP3 - POP3_encrypted + Pop3Encrypted IMAP - IMAP_encrypted + ImapEncrypted FTP FTPS SFTP @@ -760,14 +770,14 @@ const ( JMS NFS SMB - SMB_encrypted + SmbEncrypted LocalFileAccess NRPE XMPP IIOP - IIOP_encrypted + IiopEncrypted JRMP - JRMP_encrypted + JrmpEncrypted InProcessLibraryCall ContainerSpawning ) @@ -779,29 +789,29 @@ func ProtocolValues() []TypeEnum { HTTPS, WS, WSS, - Reverse_proxy_web_protocol, - Reverse_proxy_web_protocol_encrypted, + ReverseProxyWebProtocol, + ReverseProxyWebProtocolEncrypted, MQTT, JDBC, - JDBC_encrypted, + JdbcEncrypted, ODBC, - ODBC_encrypted, - SQL_access_protocol, - SQL_access_protocol_encrypted, - NoSQL_access_protocol, - NoSQL_access_protocol_encrypted, + OdbcEncrypted, + SqlAccessProtocol, + SqlAccessProtocolEncrypted, + NosqlAccessProtocol, + NosqlAccessProtocolEncrypted, BINARY, - BINARY_encrypted, + BinaryEncrypted, TEXT, - TEXT_encrypted, + TextEncrypted, SSH, - SSH_tunnel, + SshTunnel, SMTP, - SMTP_encrypted, + SmtpEncrypted, POP3, - POP3_encrypted, + Pop3Encrypted, IMAP, - IMAP_encrypted, + ImapEncrypted, FTP, FTPS, SFTP, @@ -811,14 +821,14 @@ func ProtocolValues() []TypeEnum { JMS, NFS, SMB, - SMB_encrypted, + SmbEncrypted, LocalFileAccess, NRPE, XMPP, IIOP, - IIOP_encrypted, + IiopEncrypted, JRMP, - JRMP_encrypted, + JrmpEncrypted, InProcessLibraryCall, ContainerSpawning, } @@ -888,24 +898,24 @@ func (what Protocol) IsProcessLocal() bool { } func (what Protocol) IsEncrypted() bool { - return what == HTTPS || what == WSS || what == JDBC_encrypted || what == ODBC_encrypted || - what == NoSQL_access_protocol_encrypted || what == SQL_access_protocol_encrypted || what == BINARY_encrypted || what == TEXT_encrypted || what == SSH || what == SSH_tunnel || - what == FTPS || what == SFTP || what == SCP || what == LDAPS || what == Reverse_proxy_web_protocol_encrypted || - what == IIOP_encrypted || what == JRMP_encrypted || what == SMB_encrypted || what == SMTP_encrypted || what == POP3_encrypted || what == IMAP_encrypted + return what == HTTPS || what == WSS || what == JdbcEncrypted || what == OdbcEncrypted || + what == NosqlAccessProtocolEncrypted || what == SqlAccessProtocolEncrypted || what == BinaryEncrypted || what == TextEncrypted || what == SSH || what == SshTunnel || + what == FTPS || what == SFTP || what == SCP || what == LDAPS || what == ReverseProxyWebProtocolEncrypted || + what == IiopEncrypted || what == JrmpEncrypted || what == SmbEncrypted || what == SmtpEncrypted || what == Pop3Encrypted || what == ImapEncrypted } func (what Protocol) IsPotentialDatabaseAccessProtocol(includingLaxDatabaseProtocols bool) bool { - strictlyDatabaseOnlyProtocol := what == JDBC_encrypted || what == ODBC_encrypted || - what == NoSQL_access_protocol_encrypted || what == SQL_access_protocol_encrypted || what == JDBC || what == ODBC || what == NoSQL_access_protocol || what == SQL_access_protocol + strictlyDatabaseOnlyProtocol := what == JdbcEncrypted || what == OdbcEncrypted || + what == NosqlAccessProtocolEncrypted || what == SqlAccessProtocolEncrypted || what == JDBC || what == ODBC || what == NosqlAccessProtocol || what == SqlAccessProtocol if includingLaxDatabaseProtocols { // include HTTP for REST-based NoSQL-DBs as well as unknown binary - return strictlyDatabaseOnlyProtocol || what == HTTPS || what == HTTP || what == BINARY || what == BINARY_encrypted + return strictlyDatabaseOnlyProtocol || what == HTTPS || what == HTTP || what == BINARY || what == BinaryEncrypted } return strictlyDatabaseOnlyProtocol } func (what Protocol) IsPotentialWebAccessProtocol() bool { - return what == HTTP || what == HTTPS || what == WS || what == WSS || what == Reverse_proxy_web_protocol || what == Reverse_proxy_web_protocol_encrypted + return what == HTTP || what == HTTPS || what == WS || what == WSS || what == ReverseProxyWebProtocol || what == ReverseProxyWebProtocolEncrypted } type TechnicalAssetTechnology int @@ -1117,7 +1127,7 @@ func (what TechnicalAssetTechnology) IsSecurityControlRelated() bool { return what == Vault || what == HSM || what == WAF || what == IDS || what == IPS } -func (what TechnicalAssetTechnology) IsUnprotectedCommsTolerated() bool { +func (what TechnicalAssetTechnology) IsUnprotectedCommunicationsTolerated() bool { return what == Monitoring || what == IDS || what == IPS } @@ -1150,11 +1160,11 @@ func (what TechnicalAssetTechnology) IsLessProtectedType() bool { what == Mainframe } -func (what TechnicalAssetTechnology) IsUsuallyProcessingEnduserRequests() bool { +func (what TechnicalAssetTechnology) IsUsuallyProcessingEndUserRequests() bool { return what == WebServer || what == WebApplication || what == ApplicationServer || what == ERP || what == WebServiceREST || what == WebServiceSOAP || what == EJB || what == ReportEngine } -func (what TechnicalAssetTechnology) IsUsuallyStoringEnduserData() bool { +func (what TechnicalAssetTechnology) IsUsuallyStoringEndUserData() bool { return what == Database || what == ERP || what == FileServer || what == LocalFileSystem || what == BlockStorage || what == MailServer || what == StreamProcessing || what == MessageQueue } @@ -1335,8 +1345,8 @@ func (what DataAsset) IsTaggedWithAny(tags ...string) bool { return ContainsCaseInsensitiveAny(what.Tags, tags...) } -func (what DataAsset) IsTaggedWithBaseTag(basetag string) bool { - return IsTaggedWithBaseTag(what.Tags, basetag) +func (what DataAsset) IsTaggedWithBaseTag(baseTag string) bool { + return IsTaggedWithBaseTag(what.Tags, baseTag) } /* @@ -1386,7 +1396,7 @@ func (what DataAsset) IdentifiedRisksByResponsibleTechnicalAssetId() map[string] } result := make(map[string][]Risk) - for techAssetId, _ := range uniqueTechAssetIDsResponsibleForThisDataAsset { + for techAssetId := range uniqueTechAssetIDsResponsibleForThisDataAsset { result[techAssetId] = append(result[techAssetId], ParsedModelRoot.TechnicalAssets[techAssetId].GeneratedRisks()...) } return result @@ -1538,11 +1548,11 @@ func (what DataAsset) ReceivedViaCommLinksSorted() []CommunicationLink { return result } -func IsTaggedWithBaseTag(tags []string, basetag string) bool { // basetags are before the colon ":" like in "aws:ec2" it's "aws". The subtag is after the colon. Also a pure "aws" tag matches the basetag "aws" - basetag = strings.ToLower(strings.TrimSpace(basetag)) +func IsTaggedWithBaseTag(tags []string, baseTag string) bool { // base tags are before the colon ":" like in "aws:ec2" it's "aws". The subtag is after the colon. Also, a pure "aws" tag matches the base tag "aws" + baseTag = strings.ToLower(strings.TrimSpace(baseTag)) for _, tag := range tags { tag = strings.ToLower(strings.TrimSpace(tag)) - if tag == basetag || strings.HasPrefix(tag, basetag+":") { + if tag == baseTag || strings.HasPrefix(tag, baseTag+":") { return true } } @@ -1575,11 +1585,12 @@ func (what TechnicalAsset) IsTaggedWithAny(tags ...string) bool { return ContainsCaseInsensitiveAny(what.Tags, tags...) } -func (what TechnicalAsset) IsTaggedWithBaseTag(basetag string) bool { - return IsTaggedWithBaseTag(what.Tags, basetag) +func (what TechnicalAsset) IsTaggedWithBaseTag(baseTag string) bool { + return IsTaggedWithBaseTag(what.Tags, baseTag) } // first use the tag(s) of the asset itself, then their trust boundaries (recursively up) and then their shared runtime + func (what TechnicalAsset) IsTaggedWithAnyTraversingUp(tags ...string) bool { if ContainsCaseInsensitiveAny(what.Tags, tags...) { return true @@ -1929,8 +1940,8 @@ func (what CommunicationLink) IsTaggedWithAny(tags ...string) bool { return ContainsCaseInsensitiveAny(what.Tags, tags...) } -func (what CommunicationLink) IsTaggedWithBaseTag(basetag string) bool { - return IsTaggedWithBaseTag(what.Tags, basetag) +func (what CommunicationLink) IsTaggedWithBaseTag(baseTag string) bool { + return IsTaggedWithBaseTag(what.Tags, baseTag) } type ByTechnicalCommunicationLinkIdSort []CommunicationLink @@ -1961,8 +1972,8 @@ func (what TrustBoundary) IsTaggedWithAny(tags ...string) bool { return ContainsCaseInsensitiveAny(what.Tags, tags...) } -func (what TrustBoundary) IsTaggedWithBaseTag(basetag string) bool { - return IsTaggedWithBaseTag(what.Tags, basetag) +func (what TrustBoundary) IsTaggedWithBaseTag(baseTag string) bool { + return IsTaggedWithBaseTag(what.Tags, baseTag) } func (what TrustBoundary) IsTaggedWithAnyTraversingUp(tags ...string) bool { @@ -2030,8 +2041,8 @@ func (what SharedRuntime) IsTaggedWithAny(tags ...string) bool { return ContainsCaseInsensitiveAny(what.Tags, tags...) } -func (what SharedRuntime) IsTaggedWithBaseTag(basetag string) bool { - return IsTaggedWithBaseTag(what.Tags, basetag) +func (what SharedRuntime) IsTaggedWithBaseTag(baseTag string) bool { + return IsTaggedWithBaseTag(what.Tags, baseTag) } func (what SharedRuntime) HighestConfidentiality() Confidentiality { @@ -2203,7 +2214,7 @@ type ParsedModel struct { func SortedTechnicalAssetIDs() []string { res := make([]string, 0) - for id, _ := range ParsedModelRoot.TechnicalAssets { + for id := range ParsedModelRoot.TechnicalAssets { res = append(res, id) } sort.Strings(res) @@ -2227,9 +2238,10 @@ func TagsActuallyUsed() []string { // === Sorting stuff ===================================== // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedKeysOfIndividualRiskCategories() []string { keys := make([]string, 0) - for k, _ := range ParsedModelRoot.IndividualRiskCategories { + for k := range ParsedModelRoot.IndividualRiskCategories { keys = append(keys, k) } sort.Strings(keys) @@ -2237,9 +2249,10 @@ func SortedKeysOfIndividualRiskCategories() []string { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedKeysOfSecurityRequirements() []string { keys := make([]string, 0) - for k, _ := range ParsedModelRoot.SecurityRequirements { + for k := range ParsedModelRoot.SecurityRequirements { keys = append(keys, k) } sort.Strings(keys) @@ -2247,9 +2260,10 @@ func SortedKeysOfSecurityRequirements() []string { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedKeysOfAbuseCases() []string { keys := make([]string, 0) - for k, _ := range ParsedModelRoot.AbuseCases { + for k := range ParsedModelRoot.AbuseCases { keys = append(keys, k) } sort.Strings(keys) @@ -2257,9 +2271,10 @@ func SortedKeysOfAbuseCases() []string { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedKeysOfQuestions() []string { keys := make([]string, 0) - for k, _ := range ParsedModelRoot.Questions { + for k := range ParsedModelRoot.Questions { keys = append(keys, k) } sort.Strings(keys) @@ -2267,9 +2282,10 @@ func SortedKeysOfQuestions() []string { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedKeysOfDataAssets() []string { keys := make([]string, 0) - for k, _ := range ParsedModelRoot.DataAssets { + for k := range ParsedModelRoot.DataAssets { keys = append(keys, k) } sort.Strings(keys) @@ -2277,9 +2293,10 @@ func SortedKeysOfDataAssets() []string { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedKeysOfTechnicalAssets() []string { keys := make([]string, 0) - for k, _ := range ParsedModelRoot.TechnicalAssets { + for k := range ParsedModelRoot.TechnicalAssets { keys = append(keys, k) } sort.Strings(keys) @@ -2339,6 +2356,7 @@ func SharedRuntimesTaggedWithAny(tags ...string) []SharedRuntime { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedTechnicalAssetsByTitle() []TechnicalAsset { assets := make([]TechnicalAsset, 0) for _, asset := range ParsedModelRoot.TechnicalAssets { @@ -2349,6 +2367,7 @@ func SortedTechnicalAssetsByTitle() []TechnicalAsset { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedDataAssetsByTitle() []DataAsset { assets := make([]DataAsset, 0) for _, asset := range ParsedModelRoot.DataAssets { @@ -2359,6 +2378,7 @@ func SortedDataAssetsByTitle() []DataAsset { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedDataAssetsByDataBreachProbabilityAndTitleStillAtRisk() []DataAsset { assets := make([]DataAsset, 0) for _, asset := range ParsedModelRoot.DataAssets { @@ -2369,6 +2389,7 @@ func SortedDataAssetsByDataBreachProbabilityAndTitleStillAtRisk() []DataAsset { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedDataAssetsByDataBreachProbabilityAndTitle() []DataAsset { assets := make([]DataAsset, 0) for _, asset := range ParsedModelRoot.DataAssets { @@ -2379,6 +2400,7 @@ func SortedDataAssetsByDataBreachProbabilityAndTitle() []DataAsset { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedTechnicalAssetsByRiskSeverityAndTitle() []TechnicalAsset { assets := make([]TechnicalAsset, 0) for _, asset := range ParsedModelRoot.TechnicalAssets { @@ -2389,6 +2411,7 @@ func SortedTechnicalAssetsByRiskSeverityAndTitle() []TechnicalAsset { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedTechnicalAssetsByRAAAndTitle() []TechnicalAsset { assets := make([]TechnicalAsset, 0) for _, asset := range ParsedModelRoot.TechnicalAssets { @@ -2424,9 +2447,10 @@ func OutOfScopeTechnicalAssets() []TechnicalAsset { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedKeysOfTrustBoundaries() []string { keys := make([]string, 0) - for k, _ := range ParsedModelRoot.TrustBoundaries { + for k := range ParsedModelRoot.TrustBoundaries { keys = append(keys, k) } sort.Strings(keys) @@ -2443,9 +2467,10 @@ func SortedTrustBoundariesByTitle() []TrustBoundary { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedKeysOfSharedRuntime() []string { keys := make([]string, 0) - for k, _ := range ParsedModelRoot.SharedRuntimes { + for k := range ParsedModelRoot.SharedRuntimes { keys = append(keys, k) } sort.Strings(keys) @@ -2476,6 +2501,7 @@ func QuestionsUnanswered() int { // Line Styles: // dotted when model forgery attempt (i.e. nothing being sent and received) + func (what CommunicationLink) DetermineArrowLineStyle() string { if len(what.DataAssetsSent) == 0 && len(what.DataAssetsReceived) == 0 { return "dotted" // dotted, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... @@ -2487,6 +2513,7 @@ func (what CommunicationLink) DetermineArrowLineStyle() string { } // dotted when model forgery attempt (i.e. nothing being processed or stored) + func (what TechnicalAsset) DetermineShapeBorderLineStyle() string { if len(what.DataAssetsProcessed) == 0 && len(what.DataAssetsStored) == 0 || what.OutOfScope { return "dotted" // dotted, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... @@ -2495,6 +2522,7 @@ func (what TechnicalAsset) DetermineShapeBorderLineStyle() string { } // 3 when redundant + func (what TechnicalAsset) DetermineShapePeripheries() int { if what.Redundant { return 2 @@ -2620,6 +2648,7 @@ func (what TechnicalAsset) ProcessesOrStoresDataAsset(dataAssetId string) bool { } // red when >= confidential data stored in unencrypted technical asset + func (what TechnicalAsset) DetermineLabelColor() string { // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here // Check for red @@ -2679,6 +2708,7 @@ func (what TechnicalAsset) DetermineLabelColor() string { // red when mission-critical integrity, but still unauthenticated (non-readonly) channels access it // amber when critical integrity, but still unauthenticated (non-readonly) channels access it // pink when model forgery attempt (i.e. nothing being processed or stored) + func (what TechnicalAsset) DetermineShapeBorderColor() string { // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here // Check for red @@ -2728,7 +2758,7 @@ func (what TechnicalAsset) DetermineShapeBorderColor() string { } if len(what.DataAssetsProcessed) == 0 && len(what.DataAssetsStored) == 0 { - return colors.Pink // pink, because it's strange when too many technical assets process no data... some ok, but many in a diagram ist a sign of model forgery... + return colors.Pink // pink, because it's strange when too many technical assets process no data... some are ok, but many in a diagram is a sign of model forgery... } return colors.Black @@ -2769,6 +2799,7 @@ func (what CommunicationLink) DetermineLabelColor() string { } // pink when model forgery attempt (i.e. nothing being sent and received) + func (what CommunicationLink) DetermineArrowColor() string { // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here if len(what.DataAssetsSent) == 0 && len(what.DataAssetsReceived) == 0 || @@ -2858,6 +2889,7 @@ func (what TechnicalAsset) DetermineShapeFillColor() string { fillColor = colors.BrightenHexColor(fillColor) case Serverless: fillColor = colors.BrightenHexColor(colors.BrightenHexColor(fillColor)) + case Virtual: } return fillColor } @@ -3268,7 +3300,7 @@ type Risk struct { // TODO: refactor all "Id" here to "ID"? } -func (what Risk) GetRiskTracking() RiskTracking { // TODO: Unify function naming reagrding Get etc. +func (what Risk) GetRiskTracking() RiskTracking { // TODO: Unify function naming regarding Get etc. var result RiskTracking if riskTracking, ok := ParsedModelRoot.RiskTracking[what.SyntheticId]; ok { result = riskTracking @@ -3404,9 +3436,10 @@ type RiskRule interface { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedRiskCategories() []RiskCategory { categories := make([]RiskCategory, 0) - for k, _ := range GeneratedRisksByCategory { + for k := range GeneratedRisksByCategory { categories = append(categories, k) } sort.Sort(ByRiskCategoryHighestContainingRiskSeveritySortStillAtRisk(categories)) @@ -3793,7 +3826,7 @@ func FilteredByOnlyLowRisks() []Risk { } func FilterByModelFailures(risksByCat map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk, 0) + result := make(map[RiskCategory][]Risk) for riskCat, risks := range risksByCat { if riskCat.ModelFailurePossibleReason { result[riskCat] = risks diff --git a/raa/dummy/dummy.go b/raa/dummy/dummy.go index febfaf9a..1793be00 100644 --- a/raa/dummy/dummy.go +++ b/raa/dummy/dummy.go @@ -8,7 +8,12 @@ import ( // JUST A DUMMY TO HAVE AN ALTERNATIVE PLUGIN TO USE/TEST +var ( + _ = CalculateRAA +) + // used from plugin caller: + func CalculateRAA() string { for techAssetID, techAsset := range model.ParsedModelRoot.TechnicalAssets { techAsset.RAA = float64(rand.Intn(100)) diff --git a/raa/raa/raa.go b/raa/raa/raa.go index a0b98aa3..a6babc82 100644 --- a/raa/raa/raa.go +++ b/raa/raa/raa.go @@ -5,7 +5,12 @@ import ( "sort" ) +var ( + _ = CalculateRAA +) + // used from plugin caller: + func CalculateRAA() string { for techAssetID, techAsset := range model.ParsedModelRoot.TechnicalAssets { aa := calculateAttackerAttractiveness(techAsset) @@ -33,7 +38,7 @@ func calculateRelativeAttackerAttractiveness(attractiveness float64) float64 { // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: keys := make([]string, 0) - for k, _ := range model.ParsedModelRoot.TechnicalAssets { + for k := range model.ParsedModelRoot.TechnicalAssets { keys = append(keys, k) } sort.Strings(keys) @@ -53,7 +58,7 @@ func calculateRelativeAttackerAttractiveness(attractiveness float64) float64 { } // calculate the percent value of the value within the defined min/max range value := attractiveness - attackerAttractivenessMinimum - percent := float64(value) / float64(spread) * 100 + percent := value / spread * 100 if percent <= 0 { percent = 1 // since 0 suggests no attacks at all } @@ -72,7 +77,7 @@ func calculatePivotingNeighbourEffectAdjustment(techAsset model.TechnicalAsset) delta := calculateRelativeAttackerAttractiveness(calculateAttackerAttractiveness(outgoingNeighbour)) - calculateRelativeAttackerAttractiveness(calculateAttackerAttractiveness(techAsset)) if delta > 0 { potentialIncrease := delta / 3 - //fmt.Println("Positive delta from", techAsset.Id, "to", outgoingNeighbour.Id, "is", delta, "yields to pivoting eighbour effect of an incrase of", potentialIncrease) + //fmt.Println("Positive delta from", techAsset.Id, "to", outgoingNeighbour.Id, "is", delta, "yields to pivoting neighbour effect of an increase of", potentialIncrease) if potentialIncrease > adjustment { adjustment = potentialIncrease } diff --git a/report/excel.go b/report/excel.go index 3159b04b..1c1e414f 100644 --- a/report/excel.go +++ b/report/excel.go @@ -454,7 +454,7 @@ func WriteRisksExcelToFile(filename string) { checkErr(err) } -func WriteTagsExcelToFile(filename string) { // TODO: eventually when len(sortedTagsAvailable) == 0 is: write a hint in the execel that no tags are used +func WriteTagsExcelToFile(filename string) { // TODO: eventually when len(sortedTagsAvailable) == 0 is: write a hint in the Excel that no tags are used excelRow = 0 excel := excelize.NewFile() sheetName := model.ParsedModelRoot.Title @@ -492,7 +492,7 @@ func WriteTagsExcelToFile(filename string) { // TODO: eventually when len(sorted }) checkErr(err) - err = excel.SetCellValue(sheetName, "A1", "Element") // TODO is "Element" the correct generic name when referencing assets, links, trust boudaries etc.? Eventually add separate column "type of element" like "technical asset" or "data asset"? + err = excel.SetCellValue(sheetName, "A1", "Element") // TODO is "Element" the correct generic name when referencing assets, links, trust boundaries etc.? Eventually add separate column "type of element" like "technical asset" or "data asset"? sortedTagsAvailable := model.TagsActuallyUsed() sort.Strings(sortedTagsAvailable) axis := "" @@ -615,7 +615,7 @@ func writeRow(excel *excelize.File, sheetName string, axis string, styleBlackLef var alphabet = []string{"A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z"} func determineColumnLetter(i int) string { - // can only have 700 columns in excel that way, but that should be more than usable anyway ;)... otherwise think about your model... + // can only have 700 columns in Excel that way, but that should be more than usable anyway ;)... otherwise think about your model... i++ if i < 26 { return alphabet[i] diff --git a/report/json.go b/report/json.go index bd4d8009..a1456dab 100644 --- a/report/json.go +++ b/report/json.go @@ -3,7 +3,7 @@ package report import ( "encoding/json" "github.com/threagile/threagile/model" - "io/ioutil" + "os" ) func WriteRisksJSON(filename string) { @@ -20,19 +20,20 @@ func WriteRisksJSON(filename string) { if err != nil { panic(err) } - err = ioutil.WriteFile(filename, jsonBytes, 0644) + err = os.WriteFile(filename, jsonBytes, 0644) if err != nil { panic(err) } } // TODO: also a "data assets" json? + func WriteTechnicalAssetsJSON(filename string) { jsonBytes, err := json.Marshal(model.ParsedModelRoot.TechnicalAssets) if err != nil { panic(err) } - err = ioutil.WriteFile(filename, jsonBytes, 0644) + err = os.WriteFile(filename, jsonBytes, 0644) if err != nil { panic(err) } @@ -43,7 +44,7 @@ func WriteStatsJSON(filename string) { if err != nil { panic(err) } - err = ioutil.WriteFile(filename, jsonBytes, 0644) + err = os.WriteFile(filename, jsonBytes, 0644) if err != nil { panic(err) } diff --git a/report/report.go b/report/report.go index 39d3bdff..8bcfbe05 100644 --- a/report/report.go +++ b/report/report.go @@ -52,7 +52,6 @@ import ( "github.com/wcharczuk/go-chart" "github.com/wcharczuk/go-chart/drawing" "image" - "io/ioutil" "log" "os" "path/filepath" @@ -70,7 +69,8 @@ const /*dataFlowDiagramFullscreen,*/ allowedPdfLandscapePages, embedDiagramLegen var isLandscapePage bool var pdf *gofpdf.Fpdf -var alreadyTemplateImported = false + +// var alreadyTemplateImported = false var coverTemplateId, contentTemplateId, diagramLegendTemplateId int var pageNo int var linkCounter int @@ -79,6 +79,25 @@ var homeLink int var currentChapterTitleBreadcrumb string var firstParagraphRegEx = regexp.MustCompile(`(.*?)((
)|(

))`) +var ( + _ = pdfColorDataAssets + _ = rgbHexColorDataAssets + _ = pdfColorTechnicalAssets + _ = rgbHexColorTechnicalAssets + _ = pdfColorTrustBoundaries + _ = pdfColorSharedRuntime + _ = rgbHexColorTrustBoundaries + _ = rgbHexColorSharedRuntime + _ = pdfColorRiskFindings + _ = rgbHexColorRiskFindings + _ = rgbHexColorDisclaimer + _ = rgbHexColorGray + _ = rgbHexColorLightGray + _ = rgbHexColorOutOfScope + _ = rgbHexColorBlack + _ = pdfColorRed + _ = rgbHexColorRed +) func initReport() { pdf = nil @@ -187,11 +206,11 @@ func parseBackgroundTemplate(templateFilename string) { /* imageBox, err := rice.FindBox("template") checkErr(err) - file, err := ioutil.TempFile("", "background-*-.pdf") + file, err := os.CreateTemp("", "background-*-.pdf") checkErr(err) defer os.Remove(file.Name()) backgroundBytes := imageBox.MustBytes("background.pdf") - err = ioutil.WriteFile(file.Name(), backgroundBytes, 0644) + err = os.WriteFile(file.Name(), backgroundBytes, 0644) checkErr(err) */ coverTemplateId = gofpdi.ImportPage(pdf, templateFilename, 1, "/MediaBox") @@ -713,7 +732,7 @@ func createDisclaimer() { "is obligated to ensure the highly confidential contents are kept secret. The recipient assumes responsibility " + "for further distribution of this document." + "

" + - "In this particular project, a timebox approach was used to define the analysis effort. This means that the " + + "In this particular project, a time box approach was used to define the analysis effort. This means that the " + "author allotted a prearranged amount of time to identify and document threats. Because of this, there " + "is no guarantee that all possible threats and risks are discovered. Furthermore, the analysis " + "applies to a snapshot of the current state of the modeled architecture (based on the architecture information provided " + @@ -1224,11 +1243,11 @@ func createRiskMitigationStatus() { // CAUTION: Long labels might cause endless loop, then remove labels and render them manually later inside the PDF func embedStackedBarChart(sbcChart chart.StackedBarChart, x float64, y float64) { - tmpFilePNG, err := ioutil.TempFile(model.TempFolder, "chart-*-.png") + tmpFilePNG, err := os.CreateTemp(model.TempFolder, "chart-*-.png") checkErr(err) - defer os.Remove(tmpFilePNG.Name()) + defer func() { _ = os.Remove(tmpFilePNG.Name()) }() file, _ := os.Create(tmpFilePNG.Name()) - defer file.Close() + defer func() { _ = file.Close() }() err = sbcChart.Render(chart.PNG, file) checkErr(err) var options gofpdf.ImageOptions @@ -1238,12 +1257,12 @@ func embedStackedBarChart(sbcChart chart.StackedBarChart, x float64, y float64) } func embedPieChart(pieChart chart.PieChart, x float64, y float64) { - tmpFilePNG, err := ioutil.TempFile(model.TempFolder, "chart-*-.png") + tmpFilePNG, err := os.CreateTemp(model.TempFolder, "chart-*-.png") checkErr(err) - defer os.Remove(tmpFilePNG.Name()) + defer func() { _ = os.Remove(tmpFilePNG.Name()) }() file, err := os.Create(tmpFilePNG.Name()) checkErr(err) - defer file.Close() + defer func() { _ = file.Close() }() err = pieChart.Render(chart.PNG, file) checkErr(err) var options gofpdf.ImageOptions @@ -3993,13 +4012,13 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim } for _, key := range model.SortedKeysOfIndividualRiskCategories() { - indivRiskCat := model.ParsedModelRoot.IndividualRiskCategories[key] + individualRiskCategory := model.ParsedModelRoot.IndividualRiskCategories[key] pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - pdf.CellFormat(190, 3, indivRiskCat.Title, "0", 0, "", false, 0, "") + pdf.CellFormat(190, 3, individualRiskCategory.Title, "0", 0, "", false, 0, "") pdf.Ln(-1) pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, indivRiskCat.Id, "0", 0, "", false, 0, "") + pdf.CellFormat(190, 6, individualRiskCategory.Id, "0", 0, "", false, 0, "") pdf.Ln(-1) pdf.SetFont("Helvetica", "I", fontSizeBody) pdf.CellFormat(190, 6, "Individual Risk Category", "0", 0, "", false, 0, "") @@ -4009,22 +4028,22 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") pdfColorBlack() - pdf.MultiCell(160, 6, indivRiskCat.STRIDE.Title(), "0", "0", false) + pdf.MultiCell(160, 6, individualRiskCategory.STRIDE.Title(), "0", "0", false) pdfColorGray() pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(indivRiskCat.Description), "0", "0", false) + pdf.MultiCell(160, 6, firstParagraph(individualRiskCategory.Description), "0", "0", false) pdfColorGray() pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") pdfColorBlack() - pdf.MultiCell(160, 6, indivRiskCat.DetectionLogic, "0", "0", false) + pdf.MultiCell(160, 6, individualRiskCategory.DetectionLogic, "0", "0", false) pdfColorGray() pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") pdfColorBlack() - pdf.MultiCell(160, 6, indivRiskCat.RiskAssessment, "0", "0", false) + pdf.MultiCell(160, 6, individualRiskCategory.RiskAssessment, "0", "0", false) } pdf.Ln(-1) @@ -5597,11 +5616,11 @@ func getHeightWhenWidthIsFix(imageFullFilename string, width float64) float64 { } /* #nosec imageFullFilename is not tainted (see caller restricting it to image files of model folder only) */ file, err := os.Open(imageFullFilename) - defer file.Close() + defer func() { _ = file.Close() }() checkErr(err) - image, _, err := image.DecodeConfig(file) + img, _, err := image.DecodeConfig(file) checkErr(err) - return float64(image.Height) / (float64(image.Width) / width) + return float64(img.Height) / (float64(img.Width) / width) } func embedDataFlowDiagram(diagramFilenamePNG string) { @@ -5623,7 +5642,7 @@ func embedDataFlowDiagram(diagramFilenamePNG string) { // check to rotate the image if it is wider than high /* #nosec diagramFilenamePNG is not tainted */ imagePath, _ := os.Open(diagramFilenamePNG) - defer imagePath.Close() + defer func() { _ = imagePath.Close() }() srcImage, _, _ := image.Decode(imagePath) srcDimensions := srcImage.Bounds() // wider than high? @@ -5643,7 +5662,7 @@ func embedDataFlowDiagram(diagramFilenamePNG string) { // so rotate the image left by 90 degrees // ok, use temp PNG then // now rotate left by 90 degrees - rotatedFile, err := ioutil.TempFile(model.TempFolder, "diagram-*-.png") + rotatedFile, err := os.CreateTemp(model.TempFolder, "diagram-*-.png") checkErr(err) defer os.Remove(rotatedFile.Name()) dstImage := image.NewRGBA(image.Rect(0, 0, srcDimensions.Dy(), srcDimensions.Dx())) @@ -5713,7 +5732,7 @@ func embedDataRiskMapping(diagramFilenamePNG string) { // check to rotate the image if it is wider than high /* #nosec diagramFilenamePNG is not tainted */ imagePath, _ := os.Open(diagramFilenamePNG) - defer imagePath.Close() + defer func() { _ = imagePath.Close() }() srcImage, _, _ := image.Decode(imagePath) srcDimensions := srcImage.Bounds() // wider than high? @@ -5733,7 +5752,7 @@ func embedDataRiskMapping(diagramFilenamePNG string) { // so rotate the image left by 90 degrees // ok, use temp PNG then // now rotate left by 90 degrees - rotatedFile, err := ioutil.TempFile(model.TempFolder, "diagram-*-.png") + rotatedFile, err := os.CreateTemp(model.TempFolder, "diagram-*-.png") checkErr(err) defer os.Remove(rotatedFile.Name()) dstImage := image.NewRGBA(image.Rect(0, 0, srcDimensions.Dy(), srcDimensions.Dx())) @@ -5831,6 +5850,7 @@ func rgbHexColorSharedRuntime() string { func pdfColorRiskFindings() { pdf.SetTextColor(160, 40, 30) } + func rgbHexColorRiskFindings() string { return "#A0281E" } diff --git a/risks/built-in/code-backdooring/code-backdooring-rule.go b/risks/built-in/code-backdooring/code-backdooring-rule.go index 1f6e518e..22d8093d 100644 --- a/risks/built-in/code-backdooring/code-backdooring-rule.go +++ b/risks/built-in/code-backdooring/code-backdooring-rule.go @@ -96,7 +96,7 @@ func createRisk(technicalAsset model.TechnicalAsset, elevatedRisk bool) model.Ri } } dataBreachTechnicalAssetIDs := make([]string, 0) - for key, _ := range uniqueDataBreachTechnicalAssetIDs { + for key := range uniqueDataBreachTechnicalAssetIDs { dataBreachTechnicalAssetIDs = append(dataBreachTechnicalAssetIDs, key) } // create risk diff --git a/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go b/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go index e491655d..db2014c7 100644 --- a/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go +++ b/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go @@ -40,7 +40,7 @@ func GenerateRisks() []model.Risk { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] if technicalAsset.OutOfScope || technicalAsset.Technology.IsTrafficForwarding() || - technicalAsset.Technology.IsUnprotectedCommsTolerated() { + technicalAsset.Technology.IsUnprotectedCommunicationsTolerated() { continue } if technicalAsset.HighestConfidentiality() >= model.Confidential || @@ -51,7 +51,7 @@ func GenerateRisks() []model.Risk { commLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, commLink := range commLinks { caller := model.ParsedModelRoot.TechnicalAssets[commLink.SourceId] - if caller.Technology.IsUnprotectedCommsTolerated() || caller.Type == model.Datastore { + if caller.Technology.IsUnprotectedCommunicationsTolerated() || caller.Type == model.Datastore { continue } if caller.UsedAsClientByHuman { @@ -65,7 +65,7 @@ func GenerateRisks() []model.Risk { callersCommLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[caller.Id] for _, callersCommLink := range callersCommLinks { callersCaller := model.ParsedModelRoot.TechnicalAssets[callersCommLink.SourceId] - if callersCaller.Technology.IsUnprotectedCommsTolerated() || callersCaller.Type == model.Datastore { + if callersCaller.Technology.IsUnprotectedCommunicationsTolerated() || callersCaller.Type == model.Datastore { continue } if callersCaller.UsedAsClientByHuman { diff --git a/risks/built-in/missing-authentication/missing-authentication-rule.go b/risks/built-in/missing-authentication/missing-authentication-rule.go index 9d002242..82934af2 100644 --- a/risks/built-in/missing-authentication/missing-authentication-rule.go +++ b/risks/built-in/missing-authentication/missing-authentication-rule.go @@ -49,7 +49,7 @@ func GenerateRisks() []model.Risk { commLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, commLink := range commLinks { caller := model.ParsedModelRoot.TechnicalAssets[commLink.SourceId] - if caller.Technology.IsUnprotectedCommsTolerated() || caller.Type == model.Datastore { + if caller.Technology.IsUnprotectedCommunicationsTolerated() || caller.Type == model.Datastore { continue } highRisk := commLink.HighestConfidentiality() == model.StrictlyConfidential || diff --git a/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go b/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go index 1eb1662a..8941dc5b 100644 --- a/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go +++ b/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go @@ -40,7 +40,7 @@ func GenerateRisks() []model.Risk { hasCustomDevelopedParts, hasBuildPipeline, hasSourcecodeRepo, hasDevOpsClient := false, false, false, false impact := model.LowImpact var mostRelevantAsset model.TechnicalAsset - for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with highest sensitivity as example asset + for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] if technicalAsset.CustomDevelopedParts && !technicalAsset.OutOfScope { hasCustomDevelopedParts = true diff --git a/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go b/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go index e7dddb3a..77539d81 100644 --- a/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go +++ b/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go @@ -36,7 +36,7 @@ func Category() model.RiskCategory { } } -var specificSubtagsAWS = []string{"aws:vpc", "aws:ec2", "aws:s3", "aws:ebs", "aws:apigateway", "aws:lambda", "aws:dynamodb", "aws:rds", "aws:sqs", "aws:iam"} +var specificSubTagsAWS = []string{"aws:vpc", "aws:ec2", "aws:s3", "aws:ebs", "aws:apigateway", "aws:lambda", "aws:dynamodb", "aws:rds", "aws:sqs", "aws:iam"} func SupportedTags() []string { res := []string{ @@ -45,50 +45,50 @@ func SupportedTags() []string { "gcp", // Google Cloud Platform "ocp", // Oracle Cloud Platform } - res = append(res, specificSubtagsAWS...) + res = append(res, specificSubTagsAWS...) return res } func GenerateRisks() []model.Risk { risks := make([]model.Risk, 0) - sharedRuntimesWithUnspecificCloudRisks := make(map[string]bool, 0) - trustBoundariesWithUnspecificCloudRisks := make(map[string]bool, 0) - techAssetsWithUnspecificCloudRisks := make(map[string]bool, 0) + sharedRuntimesWithUnspecificCloudRisks := make(map[string]bool) + trustBoundariesWithUnspecificCloudRisks := make(map[string]bool) + techAssetsWithUnspecificCloudRisks := make(map[string]bool) - sharedRuntimeIDsAWS := make(map[string]bool, 0) - trustBoundaryIDsAWS := make(map[string]bool, 0) - techAssetIDsAWS := make(map[string]bool, 0) + sharedRuntimeIDsAWS := make(map[string]bool) + trustBoundaryIDsAWS := make(map[string]bool) + techAssetIDsAWS := make(map[string]bool) - sharedRuntimeIDsAzure := make(map[string]bool, 0) - trustBoundaryIDsAzure := make(map[string]bool, 0) - techAssetIDsAzure := make(map[string]bool, 0) + sharedRuntimeIDsAzure := make(map[string]bool) + trustBoundaryIDsAzure := make(map[string]bool) + techAssetIDsAzure := make(map[string]bool) - sharedRuntimeIDsGCP := make(map[string]bool, 0) - trustBoundaryIDsGCP := make(map[string]bool, 0) - techAssetIDsGCP := make(map[string]bool, 0) + sharedRuntimeIDsGCP := make(map[string]bool) + trustBoundaryIDsGCP := make(map[string]bool) + techAssetIDsGCP := make(map[string]bool) - sharedRuntimeIDsOCP := make(map[string]bool, 0) - trustBoundaryIDsOCP := make(map[string]bool, 0) - techAssetIDsOCP := make(map[string]bool, 0) + sharedRuntimeIDsOCP := make(map[string]bool) + trustBoundaryIDsOCP := make(map[string]bool) + techAssetIDsOCP := make(map[string]bool) - techAssetIDsWithSubtagSpecificCloudRisks := make(map[string]bool, 0) + techAssetIDsWithSubtagSpecificCloudRisks := make(map[string]bool) for _, trustBoundary := range model.ParsedModelRoot.TrustBoundaries { taggedOuterTB := trustBoundary.IsTaggedWithAny(SupportedTags()...) // false = generic cloud risks only // true = cloud-individual risks if taggedOuterTB || trustBoundary.Type.IsWithinCloud() { - addTrustBoundaryAccordingToBasetag(trustBoundary, trustBoundariesWithUnspecificCloudRisks, + addTrustBoundaryAccordingToBaseTag(trustBoundary, trustBoundariesWithUnspecificCloudRisks, trustBoundaryIDsAWS, trustBoundaryIDsAzure, trustBoundaryIDsGCP, trustBoundaryIDsOCP) for _, techAssetID := range trustBoundary.RecursivelyAllTechnicalAssetIDsInside() { added := false tA := model.ParsedModelRoot.TechnicalAssets[techAssetID] if tA.IsTaggedWithAny(SupportedTags()...) { - addAccordingToBasetag(tA, tA.Tags, + addAccordingToBaseTag(tA, tA.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) added = true } else if taggedOuterTB { - addAccordingToBasetag(tA, trustBoundary.Tags, + addAccordingToBaseTag(tA, trustBoundary.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) added = true @@ -102,7 +102,7 @@ func GenerateRisks() []model.Risk { // now loop over all technical assets, trust boundaries, and shared runtimes model-wide by tag for _, tA := range model.TechnicalAssetsTaggedWithAny(SupportedTags()...) { - addAccordingToBasetag(tA, tA.Tags, + addAccordingToBaseTag(tA, tA.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) } @@ -110,22 +110,22 @@ func GenerateRisks() []model.Risk { for _, candidateID := range tB.RecursivelyAllTechnicalAssetIDsInside() { tA := model.ParsedModelRoot.TechnicalAssets[candidateID] if tA.IsTaggedWithAny(SupportedTags()...) { - addAccordingToBasetag(tA, tA.Tags, + addAccordingToBaseTag(tA, tA.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) } else { - addAccordingToBasetag(tA, tB.Tags, + addAccordingToBaseTag(tA, tB.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) } } } for _, sR := range model.SharedRuntimesTaggedWithAny(SupportedTags()...) { - addSharedRuntimeAccordingToBasetag(sR, sharedRuntimesWithUnspecificCloudRisks, + addSharedRuntimeAccordingToBaseTag(sR, sharedRuntimesWithUnspecificCloudRisks, sharedRuntimeIDsAWS, sharedRuntimeIDsAzure, sharedRuntimeIDsGCP, sharedRuntimeIDsOCP) for _, candidateID := range sR.TechnicalAssetsRunning { tA := model.ParsedModelRoot.TechnicalAssets[candidateID] - addAccordingToBasetag(tA, sR.Tags, + addAccordingToBaseTag(tA, sR.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) } @@ -269,7 +269,7 @@ func GenerateRisks() []model.Risk { return risks } -func addTrustBoundaryAccordingToBasetag(trustBoundary model.TrustBoundary, +func addTrustBoundaryAccordingToBaseTag(trustBoundary model.TrustBoundary, trustBoundariesWithUnspecificCloudRisks map[string]bool, trustBoundaryIDsAWS map[string]bool, trustBoundaryIDsAzure map[string]bool, @@ -293,7 +293,7 @@ func addTrustBoundaryAccordingToBasetag(trustBoundary model.TrustBoundary, } } -func addSharedRuntimeAccordingToBasetag(sharedRuntime model.SharedRuntime, +func addSharedRuntimeAccordingToBaseTag(sharedRuntime model.SharedRuntime, sharedRuntimesWithUnspecificCloudRisks map[string]bool, sharedRuntimeIDsAWS map[string]bool, sharedRuntimeIDsAzure map[string]bool, @@ -317,13 +317,13 @@ func addSharedRuntimeAccordingToBasetag(sharedRuntime model.SharedRuntime, } } -func addAccordingToBasetag(techAsset model.TechnicalAsset, tags []string, +func addAccordingToBaseTag(techAsset model.TechnicalAsset, tags []string, techAssetIDsWithTagSpecificCloudRisks map[string]bool, techAssetIDsAWS map[string]bool, techAssetIDsAzure map[string]bool, techAssetIDsGCP map[string]bool, techAssetIDsOCP map[string]bool) { - if techAsset.IsTaggedWithAny(specificSubtagsAWS...) { + if techAsset.IsTaggedWithAny(specificSubTagsAWS...) { techAssetIDsWithTagSpecificCloudRisks[techAsset.Id] = true } if model.IsTaggedWithBaseTag(tags, "aws") { diff --git a/risks/built-in/missing-file-validation/missing-file-validation-rule.go b/risks/built-in/missing-file-validation/missing-file-validation-rule.go index c8633038..bc0b5d67 100644 --- a/risks/built-in/missing-file-validation/missing-file-validation-rule.go +++ b/risks/built-in/missing-file-validation/missing-file-validation-rule.go @@ -15,7 +15,7 @@ func Category() model.RiskCategory { Action: "File Validation", Mitigation: "Filter by file extension and discard (if feasible) the name provided. Whitelist the accepted file types " + "and determine the mime-type on the server-side (for example via \"Apache Tika\" or similar checks). If the file is retrievable by " + - "endusers and/or backoffice employees, consider performing scans for popular malware (if the files can be retrieved much later than they " + + "end users and/or backoffice employees, consider performing scans for popular malware (if the files can be retrieved much later than they " + "were uploaded, also apply a fresh malware scan during retrieval to scan with newer signatures of popular malware). Also enforce " + "limits on maximum file size to avoid denial-of-service like scenarios.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", diff --git a/risks/built-in/missing-hardening/missing-hardening-rule.go b/risks/built-in/missing-hardening/missing-hardening-rule.go index b4795740..d9061cfc 100644 --- a/risks/built-in/missing-hardening/missing-hardening-rule.go +++ b/risks/built-in/missing-hardening/missing-hardening-rule.go @@ -24,7 +24,7 @@ func Category() model.RiskCategory { Function: model.Operations, STRIDE: model.Tampering, DetectionLogic: "In-scope technical assets with RAA values of " + strconv.Itoa(raaLimit) + " % or higher. " + - "Generally for high-value targets like datastores, application servers, identity providers and ERP systems this limit is reduced to " + strconv.Itoa(raaLimitReduced) + " %", + "Generally for high-value targets like data stores, application servers, identity providers and ERP systems this limit is reduced to " + strconv.Itoa(raaLimitReduced) + " %", RiskAssessment: "The risk rating depends on the sensitivity of the data processed or stored in the technical asset.", FalsePositives: "Usually no false positives.", ModelFailurePossibleReason: false, diff --git a/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go b/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go index 31f10700..01e2ba38 100644 --- a/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go +++ b/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go @@ -8,22 +8,22 @@ func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-identity-propagation", Title: "Missing Identity Propagation", - Description: "Technical assets (especially multi-tenant systems), which usually process data for endusers should " + - "authorize every request based on the identity of the enduser when the data flow is authenticated (i.e. non-public). " + + Description: "Technical assets (especially multi-tenant systems), which usually process data for end users should " + + "authorize every request based on the identity of the end user when the data flow is authenticated (i.e. non-public). " + "For DevOps usages at least a technical-user authorization is required.", Impact: "If this risk is unmitigated, attackers might be able to access or modify foreign data after a successful compromise of a component within " + "the system due to missing resource-based authorization checks.", ASVS: "V4 - Access Control Verification Requirements", CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/Access_Control_Cheat_Sheet.html", Action: "Identity Propagation and Resource-based Authorization", - Mitigation: "When processing requests for endusers if possible authorize in the backend against the propagated " + - "identity of the enduser. This can be achieved in passing JWTs or similar tokens and checking them in the backend " + + Mitigation: "When processing requests for end users if possible authorize in the backend against the propagated " + + "identity of the end user. This can be achieved in passing JWTs or similar tokens and checking them in the backend " + "services. For DevOps usages apply at least a technical-user authorization.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", Function: model.Architecture, STRIDE: model.ElevationOfPrivilege, - DetectionLogic: "In-scope service-like technical assets which usually process data based on enduser requests, if authenticated " + - "(i.e. non-public), should authorize incoming requests based on the propagated enduser identity when their rating is sensitive. " + + DetectionLogic: "In-scope service-like technical assets which usually process data based on end user requests, if authenticated " + + "(i.e. non-public), should authorize incoming requests based on the propagated end user identity when their rating is sensitive. " + "This is especially the case for all multi-tenant assets (there even less-sensitive rated ones). " + "DevOps usages are exempted from this risk.", RiskAssessment: "The risk rating (medium or high) " + @@ -46,7 +46,7 @@ func GenerateRisks() []model.Risk { if technicalAsset.OutOfScope { continue } - if technicalAsset.Technology.IsUsuallyProcessingEnduserRequests() && + if technicalAsset.Technology.IsUsuallyProcessingEndUserRequests() && (technicalAsset.Confidentiality >= model.Confidential || technicalAsset.Integrity >= model.Critical || technicalAsset.Availability >= model.Critical || @@ -62,7 +62,7 @@ func GenerateRisks() []model.Risk { continue } if commLink.Authentication != model.NoneAuthentication && - commLink.Authorization != model.EnduserIdentityPropagation { + commLink.Authorization != model.EndUserIdentityPropagation { if commLink.Usage == model.DevOps && commLink.Authorization != model.NoneAuthorization { continue } @@ -87,7 +87,7 @@ func createRisk(technicalAsset model.TechnicalAsset, incomingAccess model.Commun Severity: model.CalculateSeverity(model.Unlikely, impact), ExploitationLikelihood: model.Unlikely, ExploitationImpact: impact, - Title: "Missing Enduser Identity Propagation over communication link " + incomingAccess.Title + " " + + Title: "Missing End User Identity Propagation over communication link " + incomingAccess.Title + " " + "from " + model.ParsedModelRoot.TechnicalAssets[incomingAccess.SourceId].Title + " " + "to " + technicalAsset.Title + "", MostRelevantTechnicalAssetId: technicalAsset.Id, diff --git a/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go b/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go index acd44afd..287cd751 100644 --- a/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go +++ b/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go @@ -8,18 +8,18 @@ func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-identity-provider-isolation", Title: "Missing Identity Provider Isolation", - Description: "Highly sensitive identity provider assets and their identity datastores should be isolated from other assets " + + Description: "Highly sensitive identity provider assets and their identity data stores should be isolated from other assets " + "by their own network segmentation trust-boundary (" + model.ExecutionEnvironment.String() + " boundaries do not count as network isolation).", Impact: "If this risk is unmitigated, attackers successfully attacking other components of the system might have an easy path towards " + - "highly sensitive identity provider assets and their identity datastores, as they are not separated by network segmentation.", + "highly sensitive identity provider assets and their identity data stores, as they are not separated by network segmentation.", ASVS: "V1 - Architecture, Design and Threat Modeling Requirements", CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", Action: "Network Segmentation", - Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive identity provider assets and their identity datastores.", + Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive identity provider assets and their identity data stores.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", Function: model.Operations, STRIDE: model.ElevationOfPrivilege, - DetectionLogic: "In-scope identity provider assets and their identity datastores " + + DetectionLogic: "In-scope identity provider assets and their identity data stores " + "when surrounded by other (not identity-related) assets (without a network trust-boundary in-between). " + "This risk is especially prevalent when other non-identity related assets are within the same execution environment (i.e. same database or same application server).", RiskAssessment: "Default is " + model.HighImpact.String() + " impact. The impact is increased to " + model.VeryHighImpact.String() + " when the asset missing the " + @@ -45,7 +45,7 @@ func GenerateRisks() []model.Risk { sameExecutionEnv := false createRiskEntry := false // now check for any other same-network assets of non-identity-related types - for sparringAssetCandidateId, _ := range model.ParsedModelRoot.TechnicalAssets { // so inner loop again over all assets + for sparringAssetCandidateId := range model.ParsedModelRoot.TechnicalAssets { // so inner loop again over all assets if technicalAsset.Id != sparringAssetCandidateId { sparringAssetCandidate := model.ParsedModelRoot.TechnicalAssets[sparringAssetCandidateId] if !sparringAssetCandidate.Technology.IsIdentityRelated() && !sparringAssetCandidate.Technology.IsCloseToHighValueTargetsTolerated() { diff --git a/risks/built-in/missing-identity-store/missing-identity-store-rule.go b/risks/built-in/missing-identity-store/missing-identity-store-rule.go index 9096e320..c985a39e 100644 --- a/risks/built-in/missing-identity-store/missing-identity-store-rule.go +++ b/risks/built-in/missing-identity-store/missing-identity-store-rule.go @@ -19,8 +19,8 @@ func Category() model.RiskCategory { Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", Function: model.Architecture, STRIDE: model.Spoofing, - DetectionLogic: "Models with authenticated data-flows authorized via enduser-identity missing an in-scope identity store.", - RiskAssessment: "The risk rating depends on the sensitivity of the enduser-identity authorized technical assets and " + + DetectionLogic: "Models with authenticated data-flows authorized via end user identity missing an in-scope identity store.", + RiskAssessment: "The risk rating depends on the sensitivity of the end user-identity authorized technical assets and " + "their data assets processed and stored.", FalsePositives: "Models only offering data/services without any real authentication need " + "can be considered as false positives after individual review.", @@ -42,14 +42,14 @@ func GenerateRisks() []model.Risk { return risks } } - // now check if we have enduser-identity authorized communication links, then it's a risk + // now check if we have end user identity authorized communication links, then it's a risk riskIdentified := false var mostRelevantAsset model.TechnicalAsset impact := model.LowImpact - for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with highest sensitivity as example asset + for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] - for _, commLink := range technicalAsset.CommunicationLinksSorted() { // use the sorted one to always get the same tech asset with highest sensitivity as example asset - if commLink.Authorization == model.EnduserIdentityPropagation { + for _, commLink := range technicalAsset.CommunicationLinksSorted() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset + if commLink.Authorization == model.EndUserIdentityPropagation { riskIdentified = true targetAsset := model.ParsedModelRoot.TechnicalAssets[commLink.TargetId] if impact == model.LowImpact { diff --git a/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go b/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go index 495a3215..2b29af70 100644 --- a/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go +++ b/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go @@ -11,7 +11,7 @@ func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-network-segmentation", Title: "Missing Network Segmentation", - Description: "Highly sensitive assets and/or datastores residing in the same network segment than other " + + Description: "Highly sensitive assets and/or data stores residing in the same network segment than other " + "lower sensitive assets (like webservers or content management systems etc.) should be better protected " + "by a network segmentation trust-boundary.", Impact: "If this risk is unmitigated, attackers successfully attacking other components of the system might have an easy path towards " + @@ -19,11 +19,11 @@ func Category() model.RiskCategory { ASVS: "V1 - Architecture, Design and Threat Modeling Requirements", CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", Action: "Network Segmentation", - Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive assets and/or datastores.", + Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive assets and/or data stores.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", Function: model.Operations, STRIDE: model.ElevationOfPrivilege, - DetectionLogic: "In-scope technical assets with high sensitivity and RAA values as well as datastores " + + DetectionLogic: "In-scope technical assets with high sensitivity and RAA values as well as data stores " + "when surrounded by assets (without a network trust-boundary in-between) which are of type " + model.ClientSystem.String() + ", " + model.WebServer.String() + ", " + model.WebApplication.String() + ", " + model.CMS.String() + ", " + model.WebServiceREST.String() + ", " + model.WebServiceSOAP.String() + ", " + model.BuildPipeline.String() + ", " + model.SourcecodeRepository.String() + ", " + model.Monitoring.String() + ", or similar and there is no direct connection between these " + @@ -46,7 +46,7 @@ func GenerateRisks() []model.Risk { // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: keys := make([]string, 0) - for k, _ := range model.ParsedModelRoot.TechnicalAssets { + for k := range model.ParsedModelRoot.TechnicalAssets { keys = append(keys, k) } sort.Strings(keys) diff --git a/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go b/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go index 8ef10cdf..0c8919b4 100644 --- a/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go +++ b/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go @@ -8,14 +8,14 @@ func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-vault-isolation", Title: "Missing Vault Isolation", - Description: "Highly sensitive vault assets and their datastores should be isolated from other assets " + + Description: "Highly sensitive vault assets and their data stores should be isolated from other assets " + "by their own network segmentation trust-boundary (" + model.ExecutionEnvironment.String() + " boundaries do not count as network isolation).", Impact: "If this risk is unmitigated, attackers successfully attacking other components of the system might have an easy path towards " + - "highly sensitive vault assets and their datastores, as they are not separated by network segmentation.", + "highly sensitive vault assets and their data stores, as they are not separated by network segmentation.", ASVS: "V1 - Architecture, Design and Threat Modeling Requirements", CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", Action: "Network Segmentation", - Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive vault assets and their datastores.", + Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive vault assets and their data stores.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", Function: model.Operations, STRIDE: model.ElevationOfPrivilege, @@ -45,7 +45,7 @@ func GenerateRisks() []model.Risk { sameExecutionEnv := false createRiskEntry := false // now check for any other same-network assets of non-vault-related types - for sparringAssetCandidateId, _ := range model.ParsedModelRoot.TechnicalAssets { // so inner loop again over all assets + for sparringAssetCandidateId := range model.ParsedModelRoot.TechnicalAssets { // so inner loop again over all assets if technicalAsset.Id != sparringAssetCandidateId { sparringAssetCandidate := model.ParsedModelRoot.TechnicalAssets[sparringAssetCandidateId] if sparringAssetCandidate.Technology != model.Vault && !isVaultStorage(technicalAsset, sparringAssetCandidate) { diff --git a/risks/built-in/missing-vault/missing-vault-rule.go b/risks/built-in/missing-vault/missing-vault-rule.go index a046131b..ac3e4590 100644 --- a/risks/built-in/missing-vault/missing-vault-rule.go +++ b/risks/built-in/missing-vault/missing-vault-rule.go @@ -39,7 +39,7 @@ func GenerateRisks() []model.Risk { hasVault := false var mostRelevantAsset model.TechnicalAsset impact := model.LowImpact - for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with highest sensitivity as example asset + for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset techAsset := model.ParsedModelRoot.TechnicalAssets[id] if techAsset.Technology == model.Vault { hasVault = true diff --git a/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go b/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go index 3c9ab06b..435a6ab9 100644 --- a/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go +++ b/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go @@ -41,7 +41,7 @@ func GenerateRisks() []model.Risk { risks := make([]model.Risk, 0) // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: keys := make([]string, 0) - for k, _ := range model.ParsedModelRoot.SharedRuntimes { + for k := range model.ParsedModelRoot.SharedRuntimes { keys = append(keys, k) } sort.Strings(keys) diff --git a/risks/built-in/search-query-injection/search-query-injection-rule.go b/risks/built-in/search-query-injection/search-query-injection-rule.go index 936ab6e6..1f250b28 100644 --- a/risks/built-in/search-query-injection/search-query-injection-rule.go +++ b/risks/built-in/search-query-injection/search-query-injection-rule.go @@ -43,7 +43,7 @@ func GenerateRisks() []model.Risk { continue } if incomingFlow.Protocol == model.HTTP || incomingFlow.Protocol == model.HTTPS || - incomingFlow.Protocol == model.BINARY || incomingFlow.Protocol == model.BINARY_encrypted { + incomingFlow.Protocol == model.BINARY || incomingFlow.Protocol == model.BinaryEncrypted { likelihood := model.VeryLikely if incomingFlow.Usage == model.DevOps { likelihood = model.Likely diff --git a/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go b/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go index 901c5fcf..a1844ae0 100644 --- a/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go +++ b/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go @@ -80,7 +80,7 @@ func createRisk(technicalAsset model.TechnicalAsset, outgoingFlow model.Communic impact = model.MediumImpact } dataBreachTechnicalAssetIDs := make([]string, 0) - for key, _ := range uniqueDataBreachTechnicalAssetIDs { + for key := range uniqueDataBreachTechnicalAssetIDs { dataBreachTechnicalAssetIDs = append(dataBreachTechnicalAssetIDs, key) } likelihood := model.Likely diff --git a/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go b/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go index 34baf45f..80cac869 100644 --- a/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go +++ b/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go @@ -71,7 +71,7 @@ func createRisk(technicalAsset model.TechnicalAsset) model.Risk { } } dataBreachTechnicalAssetIDs := make([]string, 0) - for key, _ := range uniqueDataBreachTechnicalAssetIDs { + for key := range uniqueDataBreachTechnicalAssetIDs { dataBreachTechnicalAssetIDs = append(dataBreachTechnicalAssetIDs, key) } // create risk diff --git a/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go b/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go index cdf23c6b..67c42992 100644 --- a/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go +++ b/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go @@ -23,7 +23,7 @@ func Category() model.RiskCategory { ", " + model.IPS.String() + " and embedded components like " + model.Library.String() + ") " + "storing data assets rated at least as " + model.Confidential.String() + " or " + model.Critical.String() + ". " + "For technical assets storing data assets rated as " + model.StrictlyConfidential.String() + " or " + model.MissionCritical.String() + " the " + - "encryption must be of type " + model.DataWithEnduserIndividualKey.String() + ".", + "encryption must be of type " + model.DataWithEndUserIndividualKey.String() + ".", RiskAssessment: "Depending on the confidentiality rating of the stored data-assets either medium or high risk.", FalsePositives: "When all sensitive data stored within the asset is already fully encrypted on document or data level.", ModelFailurePossibleReason: false, @@ -36,6 +36,7 @@ func SupportedTags() []string { } // check for technical assets that should be encrypted due to their confidentiality + func GenerateRisks() []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { @@ -45,16 +46,16 @@ func GenerateRisks() []model.Risk { technicalAsset.HighestIntegrity() >= model.Critical) { verySensitive := technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || technicalAsset.HighestIntegrity() == model.MissionCritical - requiresEnduserKey := verySensitive && technicalAsset.Technology.IsUsuallyStoringEnduserData() + requiresEndUserKey := verySensitive && technicalAsset.Technology.IsUsuallyStoringEndUserData() if technicalAsset.Encryption == model.NoneEncryption { impact := model.MediumImpact if verySensitive { impact = model.HighImpact } - risks = append(risks, createRisk(technicalAsset, impact, requiresEnduserKey)) - } else if requiresEnduserKey && + risks = append(risks, createRisk(technicalAsset, impact, requiresEndUserKey)) + } else if requiresEndUserKey && (technicalAsset.Encryption == model.Transparent || technicalAsset.Encryption == model.DataWithSymmetricSharedKey || technicalAsset.Encryption == model.DataWithAsymmetricSharedKey) { - risks = append(risks, createRisk(technicalAsset, model.MediumImpact, requiresEnduserKey)) + risks = append(risks, createRisk(technicalAsset, model.MediumImpact, requiresEndUserKey)) } } } @@ -63,16 +64,17 @@ func GenerateRisks() []model.Risk { // Simple routing assets like 'Reverse Proxy' or 'Load Balancer' usually don't have their own storage and thus have no // encryption requirement for the asset itself (though for the communication, but that's a different rule) + func IsEncryptionWaiver(asset model.TechnicalAsset) bool { return asset.Technology == model.ReverseProxy || asset.Technology == model.LoadBalancer || asset.Technology == model.WAF || asset.Technology == model.IDS || asset.Technology == model.IPS || asset.Technology.IsEmbeddedComponent() } -func createRisk(technicalAsset model.TechnicalAsset, impact model.RiskExploitationImpact, requiresEnduserKey bool) model.Risk { +func createRisk(technicalAsset model.TechnicalAsset, impact model.RiskExploitationImpact, requiresEndUserKey bool) model.Risk { title := "Unencrypted Technical Asset named " + technicalAsset.Title + "" - if requiresEnduserKey { - title += " missing enduser-individual encryption with " + model.DataWithEnduserIndividualKey.String() + if requiresEndUserKey { + title += " missing end user individual encryption with " + model.DataWithEndUserIndividualKey.String() } risk := model.Risk{ Category: Category(), diff --git a/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go b/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go index 0718540b..ed5a7eb3 100644 --- a/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go +++ b/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go @@ -33,6 +33,7 @@ func SupportedTags() []string { } // check for communication links that should be encrypted due to their confidentiality and/or integrity + func GenerateRisks() []model.Risk { risks := make([]model.Risk, 0) for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { @@ -42,8 +43,8 @@ func GenerateRisks() []model.Risk { targetAsset := model.ParsedModelRoot.TechnicalAssets[dataFlow.TargetId] if !technicalAsset.OutOfScope || !sourceAsset.OutOfScope { if !dataFlow.Protocol.IsEncrypted() && !dataFlow.Protocol.IsProcessLocal() && - !sourceAsset.Technology.IsUnprotectedCommsTolerated() && - !targetAsset.Technology.IsUnprotectedCommsTolerated() { + !sourceAsset.Technology.IsUnprotectedCommunicationsTolerated() && + !targetAsset.Technology.IsUnprotectedCommunicationsTolerated() { addedOne := false for _, sentDataAsset := range dataFlow.DataAssetsSent { dataAsset := model.ParsedModelRoot.DataAssets[sentDataAsset] diff --git a/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go b/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go index 5fa8f9e2..e845f955 100644 --- a/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go +++ b/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go @@ -8,8 +8,8 @@ func Category() model.RiskCategory { return model.RiskCategory{ Id: "unguarded-direct-datastore-access", Title: "Unguarded Direct Datastore Access", - Description: "Datastores accessed across trust boundaries must be guarded by some protecting service or application.", - Impact: "If this risk is unmitigated, attackers might be able to directly attack sensitive datastores without any protecting components in-between.", + Description: "Data stores accessed across trust boundaries must be guarded by some protecting service or application.", + Impact: "If this risk is unmitigated, attackers might be able to directly attack sensitive data stores without any protecting components in-between.", ASVS: "V1 - Architecture, Design and Threat Modeling Requirements", CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", Action: "Encapsulation of Datastore", @@ -34,7 +34,8 @@ func SupportedTags() []string { return []string{} } -// check for datastores that should not be accessed directly across trust boundaries +// check for data stores that should not be accessed directly across trust boundaries + func GenerateRisks() []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { diff --git a/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go b/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go index 5da7f2db..f4e5c8d6 100644 --- a/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go +++ b/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go @@ -55,8 +55,8 @@ func GenerateRisks() []model.Risk { } // check for any incoming IIOP and JRMP protocols for _, commLink := range model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] { - if commLink.Protocol == model.IIOP || commLink.Protocol == model.IIOP_encrypted || - commLink.Protocol == model.JRMP || commLink.Protocol == model.JRMP_encrypted { + if commLink.Protocol == model.IIOP || commLink.Protocol == model.IiopEncrypted || + commLink.Protocol == model.JRMP || commLink.Protocol == model.JrmpEncrypted { hasOne = true if commLink.IsAcrossTrustBoundaryNetworkOnly() { acrossTrustBoundary = true diff --git a/risks/custom/demo/demo-rule.go b/risks/custom/demo/demo-rule.go index 5eb8d672..2a2daacf 100644 --- a/risks/custom/demo/demo-rule.go +++ b/risks/custom/demo/demo-rule.go @@ -7,6 +7,7 @@ import ( type customRiskRule string // exported as symbol (here simply as variable to interface to bundle many functions under one symbol) named "CustomRiskRule" + var CustomRiskRule customRiskRule func (r customRiskRule) Category() model.RiskCategory { From 98882e0c33277624d6fa904d46c153df1413d40e Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Mon, 4 Dec 2023 16:39:47 -0800 Subject: [PATCH 02/68] fixed deprecaed and other warnings; fixed spelling --- colors/colors.go | 31 +- go.sum | 27 +- .../add-build-pipeline-macro.go | 878 ++++---- macros/built-in/add-vault/add-vault-macro.go | 250 +-- .../pretty-print/pretty-print-macro.go | 6 +- .../remove-unused-tags-macro.go | 8 +- .../seed-risk-tracking-macro.go | 12 +- macros/built-in/seed-tags/seed-tags-macro.go | 8 +- main.go | 1924 ++++++++--------- model/types.go | 429 ++-- raa/dummy/dummy.go | 5 - raa/raa/raa.go | 11 +- report/excel.go | 6 +- report/json.go | 9 +- report/report.go | 70 +- .../code-backdooring/code-backdooring-rule.go | 2 +- ...ssing-authentication-second-factor-rule.go | 6 +- .../missing-authentication-rule.go | 2 +- .../missing-build-infrastructure-rule.go | 2 +- .../missing-cloud-hardening-rule.go | 60 +- .../missing-file-validation-rule.go | 2 +- .../missing-hardening-rule.go | 2 +- .../missing-identity-propagation-rule.go | 18 +- ...issing-identity-provider-isolation-rule.go | 10 +- .../missing-identity-store-rule.go | 12 +- .../missing-network-segmentation-rule.go | 8 +- .../missing-vault-isolation-rule.go | 8 +- .../missing-vault/missing-vault-rule.go | 2 +- .../mixed-targets-on-shared-runtime-rule.go | 2 +- .../search-query-injection-rule.go | 2 +- .../server-side-request-forgery-rule.go | 2 +- .../unchecked-deployment-rule.go | 2 +- .../unencrypted-asset-rule.go | 18 +- .../unencrypted-communication-rule.go | 5 +- .../unguarded-direct-datastore-access-rule.go | 7 +- .../untrusted-deserialization-rule.go | 4 +- risks/custom/demo/demo-rule.go | 1 - 37 files changed, 1889 insertions(+), 1962 deletions(-) diff --git a/colors/colors.go b/colors/colors.go index cac70f6d..506e97a6 100644 --- a/colors/colors.go +++ b/colors/colors.go @@ -5,33 +5,10 @@ import ( "github.com/jung-kurt/gofpdf" ) -const ( - Amber = "#AF780E" - Green = "#008000" - Blue = "#000080" - DarkBlue = "#000060" - Black = "#000000" - Gray = "#444444" - LightGray = "#666666" - MiddleLightGray = "#999999" - MoreLightGray = "#D2D2D2" - VeryLightGray = "#E5E5E5" - ExtremeLightGray = "#F6F6F6" - Pink = "#F987C5" - LightPink = "#FFE7EF" - Red = "#CC0000" - OutOfScopeFancy = "#D5D7FF" - CustomDevelopedParts = "#FFFC97" - ExtremeLightBlue = "#DDFFFF" - LightBlue = "#77FFFF" - Brown = "#8C4C17" -) - -var ( - _ = Green + Blue + MoreLightGray + ExtremeLightGray + LightBlue - _ = ColorOutOfScope - _ = RgbHexColorModelFailure -) +const Red, Amber, Green, Blue, DarkBlue, Black, Gray, LightGray, MiddleLightGray, MoreLightGray, VeryLightGray, ExtremeLightGray, Pink, LightPink = "#CC0000", "#AF780E", "#008000", "#000080", "#000060", "#000000", "#444444", "#666666", "#999999", "#D2D2D2", "#E5E5E5", "#F6F6F6", "#F987C5", "#FFE7EF" +const ExtremeLightBlue, OutOfScopeFancy, CustomDevelopedParts = "#DDFFFF", "#D5D7FF", "#FFFC97" +const LightBlue = "#77FFFF" +const Brown = "#8C4C17" func DarkenHexColor(hexString string) string { colorBytes, _ := hex.DecodeString(hexString[1:]) diff --git a/go.sum b/go.sum index 827e8d8f..1211ed1e 100644 --- a/go.sum +++ b/go.sum @@ -2,10 +2,13 @@ github.com/blend/go-sdk v1.20220411.3 h1:GFV4/FQX5UzXLPwWV03gP811pj7B8J2sbuq+GJQ github.com/blend/go-sdk v1.20220411.3/go.mod h1:7lnH8fTi6U4i1fArEXRyOIY2E1X4MALg09qsQqY1+ak= github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= +github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s= +github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U= github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM= github.com/bytedance/sonic v1.10.2 h1:GQebETVBxYB7JGWJtLBi07OVzWwt+8dWA00gEVW2ZFE= github.com/bytedance/sonic v1.10.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= +github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams= github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0= github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA= @@ -15,6 +18,8 @@ github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLI github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= +github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= @@ -27,6 +32,8 @@ github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/o github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.14.0 h1:vgvQWe3XCz3gIeFDm/HnTIbj6UGmg/+t63MyGU2n5js= +github.com/go-playground/validator/v10 v10.14.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= github.com/go-playground/validator/v10 v10.15.5 h1:LEBecTWb/1j5TNY1YYG2RcOUN3R7NLylN+x8TTueE24= github.com/go-playground/validator/v10 v10.15.5/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= @@ -45,11 +52,15 @@ github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+ github.com/jung-kurt/gofpdf v1.16.2 h1:jgbatWHfRlPYiK85qgevsZTHviWXKwB1TTiKdz5PtRc= github.com/jung-kurt/gofpdf v1.16.2/go.mod h1:1hl7y57EsiPAkLbOwzpzqgx1A30nQCk/YmFV8S2vmK0= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.2.4 h1:acbojRNwl3o09bUq+yDCtZFc1aiwaAAxtcn8YkZXnvk= +github.com/klauspost/cpuid/v2 v2.2.4/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY= github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= +github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= +github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -59,11 +70,15 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= +github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ= +github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4= github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4= github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= +github.com/phpdave11/gofpdi v1.0.7 h1:k2oy4yhkQopCK+qW8KjCla0iU2RpDow+QUDmH9DDt44= github.com/phpdave11/gofpdi v1.0.7/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= github.com/phpdave11/gofpdi v1.0.13 h1:o61duiW8M9sMlkVXWlvP92sZJtGKENvW3VExs6dZukQ= github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= +github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -85,7 +100,8 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.3 h1:RP3t2pwF7cMEbC1dqtB6poj3niw/9gnV4Cjg5oW5gtY= +github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= @@ -93,16 +109,20 @@ github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4d github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= github.com/wcharczuk/go-chart v2.0.1+incompatible h1:0pz39ZAycJFF7ju/1mepnk26RLVLBCWz1STcD3doU0A= github.com/wcharczuk/go-chart v2.0.1+incompatible/go.mod h1:PF5tmL4EIx/7Wf+hEkpCqYi5He4u90sw+0+6FhrryuE= +github.com/xuri/efp v0.0.0-20230802181842-ad255f2331ca h1:uvPMDVyP7PXMMioYdyPH+0O+Ta/UO1WFfNYMO3Wz0eg= github.com/xuri/efp v0.0.0-20230802181842-ad255f2331ca/go.mod h1:ybY/Jr0T0GTCnYjKqmdwxyxn2BQf2RcQIIvex5QldPI= github.com/xuri/efp v0.0.0-20231025114914-d1ff6096ae53 h1:Chd9DkqERQQuHpXjR/HSV1jLZA6uaoiwwH3vSuF3IW0= github.com/xuri/efp v0.0.0-20231025114914-d1ff6096ae53/go.mod h1:ybY/Jr0T0GTCnYjKqmdwxyxn2BQf2RcQIIvex5QldPI= github.com/xuri/excelize/v2 v2.8.0 h1:Vd4Qy809fupgp1v7X+nCS/MioeQmYVVzi495UCTqB7U= github.com/xuri/excelize/v2 v2.8.0/go.mod h1:6iA2edBTKxKbZAa7X5bDhcCg51xdOn1Ar5sfoXRGrQg= +github.com/xuri/nfp v0.0.0-20230819163627-dc951e3ffe1a h1:Mw2VNrNNNjDtw68VsEj2+st+oCSn4Uz7vZw6TbhcV1o= github.com/xuri/nfp v0.0.0-20230819163627-dc951e3ffe1a/go.mod h1:WwHg+CVyzlv/TX9xqBFXEZAuxOPxn2k1GNHwG41IIUQ= github.com/xuri/nfp v0.0.0-20230919160717-d98342af3f05 h1:qhbILQo1K3mphbwKh1vNm4oGezE1eF9fQWmNiIpSfI4= github.com/xuri/nfp v0.0.0-20230919160717-d98342af3f05/go.mod h1:WwHg+CVyzlv/TX9xqBFXEZAuxOPxn2k1GNHwG41IIUQ= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= +golang.org/x/arch v0.3.0 h1:02VY4/ZcO/gBOH6PUaoiptASxtXU10jazRCP865E97k= +golang.org/x/arch v0.3.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/arch v0.5.0 h1:jpGode6huXQxcskEIpOCvrU+tzo81b6+oFLUYXWtH/Y= golang.org/x/arch v0.5.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= @@ -111,6 +131,7 @@ golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98y golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/image v0.11.0 h1:ds2RoQvBvYTiJkwpSFDwCcDFNX7DqjL2WsUgTNk0Ooo= golang.org/x/image v0.11.0/go.mod h1:bglhjqbqVuEb9e9+eNR45Jfu7D+T4Qan+NhQk8Ck2P8= golang.org/x/image v0.13.0 h1:3cge/F/QTkNLauhf2QoE9zp+7sr+ZcL4HnoZmdwg9sg= golang.org/x/image v0.13.0/go.mod h1:6mmbMOeV28HuMTgA6OSRkdXKYw/t5W9Uwn2Yv1r3Yxk= @@ -121,6 +142,7 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.14.0 h1:BONx9s002vGdD9umnlX1Po8vOZmrgH34qlHcD1MfK14= golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= @@ -131,6 +153,7 @@ golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -160,6 +183,8 @@ golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= +google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= diff --git a/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go b/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go index b6ec0514..dc68cea1 100644 --- a/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go +++ b/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go @@ -145,7 +145,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { }, nil case 11: possibleAnswers := make([]string, 0) - for id := range model.ParsedModelRoot.TechnicalAssets { + for id, _ := range model.ParsedModelRoot.TechnicalAssets { possibleAnswers = append(possibleAnswers, id) } sort.Strings(possibleAnswers) @@ -299,12 +299,12 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry Confidentiality: model.Confidential.String(), Integrity: model.Critical.String(), Availability: model.Important.String(), - JustificationCiaRating: "Sourcecode is at least rated as 'critical' in terms of integrity, because any " + + Justification_cia_rating: "Sourcecode is at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", } *changeLogCollector = append(*changeLogCollector, "adding data asset: sourcecode") if !dryRun { - modelInput.DataAssets["Sourcecode"] = dataAsset + modelInput.Data_assets["Sourcecode"] = dataAsset } } @@ -321,12 +321,12 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry Confidentiality: model.Confidential.String(), Integrity: model.Critical.String(), Availability: model.Important.String(), - JustificationCiaRating: "Deployment units are at least rated as 'critical' in terms of integrity, because any " + + Justification_cia_rating: "Deployment units are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", } *changeLogCollector = append(*changeLogCollector, "adding data asset: deployment") if !dryRun { - modelInput.DataAssets["Deployment"] = dataAsset + modelInput.Data_assets["Deployment"] = dataAsset } } @@ -340,137 +340,137 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry commLinks := make(map[string]model.InputCommunicationLink) commLinks["Sourcecode Repository Traffic"] = model.InputCommunicationLink{ - Target: sourceRepoID, - Description: "Sourcecode Repository Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EndUserIdentityPropagation.String(), - Tags: []string{}, - VPN: false, - IpFiltered: false, - Readonly: false, - Usage: model.DevOps.String(), - DataAssetsSent: []string{"sourcecode"}, - DataAssetsReceived: []string{"sourcecode"}, - DiagramTweakWeight: 0, - DiagramTweakConstraint: false, + Target: sourceRepoID, + Description: "Sourcecode Repository Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.EnduserIdentityPropagation.String(), + Tags: []string{}, + VPN: false, + IP_filtered: false, + Readonly: false, + Usage: model.DevOps.String(), + Data_assets_sent: []string{"sourcecode"}, + Data_assets_received: []string{"sourcecode"}, + Diagram_tweak_weight: 0, + Diagram_tweak_constraint: false, } commLinks["Build Pipeline Traffic"] = model.InputCommunicationLink{ - Target: buildPipelineID, - Description: "Build Pipeline Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EndUserIdentityPropagation.String(), - Tags: []string{}, - VPN: false, - IpFiltered: false, - Readonly: true, - Usage: model.DevOps.String(), - DataAssetsSent: nil, - DataAssetsReceived: []string{"deployment"}, - DiagramTweakWeight: 0, - DiagramTweakConstraint: false, + Target: buildPipelineID, + Description: "Build Pipeline Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.EnduserIdentityPropagation.String(), + Tags: []string{}, + VPN: false, + IP_filtered: false, + Readonly: true, + Usage: model.DevOps.String(), + Data_assets_sent: nil, + Data_assets_received: []string{"deployment"}, + Diagram_tweak_weight: 0, + Diagram_tweak_constraint: false, } commLinks["Artifact Registry Traffic"] = model.InputCommunicationLink{ - Target: artifactRegistryID, - Description: "Artifact Registry Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EndUserIdentityPropagation.String(), - Tags: []string{}, - VPN: false, - IpFiltered: false, - Readonly: true, - Usage: model.DevOps.String(), - DataAssetsSent: nil, - DataAssetsReceived: []string{"deployment"}, - DiagramTweakWeight: 0, - DiagramTweakConstraint: false, + Target: artifactRegistryID, + Description: "Artifact Registry Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.EnduserIdentityPropagation.String(), + Tags: []string{}, + VPN: false, + IP_filtered: false, + Readonly: true, + Usage: model.DevOps.String(), + Data_assets_sent: nil, + Data_assets_received: []string{"deployment"}, + Diagram_tweak_weight: 0, + Diagram_tweak_constraint: false, } if containerTechUsed { commLinks["Container Registry Traffic"] = model.InputCommunicationLink{ - Target: containerRepoID, - Description: "Container Registry Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EndUserIdentityPropagation.String(), - Tags: []string{}, - VPN: false, - IpFiltered: false, - Readonly: false, - Usage: model.DevOps.String(), - DataAssetsSent: []string{"deployment"}, - DataAssetsReceived: []string{"deployment"}, - DiagramTweakWeight: 0, - DiagramTweakConstraint: false, + Target: containerRepoID, + Description: "Container Registry Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.EnduserIdentityPropagation.String(), + Tags: []string{}, + VPN: false, + IP_filtered: false, + Readonly: false, + Usage: model.DevOps.String(), + Data_assets_sent: []string{"deployment"}, + Data_assets_received: []string{"deployment"}, + Diagram_tweak_weight: 0, + Diagram_tweak_constraint: false, } commLinks["Container Platform Traffic"] = model.InputCommunicationLink{ - Target: containerPlatformID, - Description: "Container Platform Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EndUserIdentityPropagation.String(), - Tags: []string{}, - VPN: false, - IpFiltered: false, - Readonly: false, - Usage: model.DevOps.String(), - DataAssetsSent: []string{"deployment"}, - DataAssetsReceived: []string{"deployment"}, - DiagramTweakWeight: 0, - DiagramTweakConstraint: false, + Target: containerPlatformID, + Description: "Container Platform Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.EnduserIdentityPropagation.String(), + Tags: []string{}, + VPN: false, + IP_filtered: false, + Readonly: false, + Usage: model.DevOps.String(), + Data_assets_sent: []string{"deployment"}, + Data_assets_received: []string{"deployment"}, + Diagram_tweak_weight: 0, + Diagram_tweak_constraint: false, } } if codeInspectionUsed { commLinks["Code Inspection Platform Traffic"] = model.InputCommunicationLink{ - Target: codeInspectionPlatformID, - Description: "Code Inspection Platform Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EndUserIdentityPropagation.String(), - Tags: []string{}, - VPN: false, - IpFiltered: false, - Readonly: true, - Usage: model.DevOps.String(), - DataAssetsSent: nil, - DataAssetsReceived: []string{"sourcecode"}, - DiagramTweakWeight: 0, - DiagramTweakConstraint: false, + Target: codeInspectionPlatformID, + Description: "Code Inspection Platform Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.EnduserIdentityPropagation.String(), + Tags: []string{}, + VPN: false, + IP_filtered: false, + Readonly: true, + Usage: model.DevOps.String(), + Data_assets_sent: nil, + Data_assets_received: []string{"sourcecode"}, + Diagram_tweak_weight: 0, + Diagram_tweak_constraint: false, } } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: "Development Client", - Type: model.ExternalEntity.String(), - Usage: model.DevOps.String(), - UsedAsClientByHuman: true, - OutOfScope: true, - JustificationOutOfScope: "Development client is not directly in-scope of the application.", - Size: model.System.String(), - Technology: model.DevOpsClient.String(), - Tags: []string{}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Physical.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), - JustificationCiaRating: "Sourcecode processing components are at least rated as 'critical' in terms of integrity, because any " + + ID: id, + Description: "Development Client", + Type: model.ExternalEntity.String(), + Usage: model.DevOps.String(), + Used_as_client_by_human: true, + Out_of_scope: true, + Justification_out_of_scope: "Development client is not directly in-scope of the application.", + Size: model.System.String(), + Technology: model.DevOpsClient.String(), + Tags: []string{}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Physical.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.Critical.String(), + Availability: model.Important.String(), + Justification_cia_rating: "Sourcecode processing components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", - MultiTenant: false, - Redundant: false, - CustomDevelopedParts: false, - DataAssetsProcessed: []string{"sourcecode", "deployment"}, - DataAssetsStored: []string{"sourcecode", "deployment"}, - DataFormatsAccepted: []string{"file"}, - CommunicationLinks: commLinks, + Multi_tenant: false, + Redundant: false, + Custom_developed_parts: false, + Data_assets_processed: []string{"sourcecode", "deployment"}, + Data_assets_stored: []string{"sourcecode", "deployment"}, + Data_formats_accepted: []string{"file"}, + Communication_links: commLinks, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.TechnicalAssets["Development Client"] = techAsset + modelInput.Technical_assets["Development Client"] = techAsset } } @@ -483,36 +483,36 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry encryption = model.Transparent.String() } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: macroState["source-repository"][0] + " Sourcecode Repository", - Type: model.Process.String(), - Usage: model.DevOps.String(), - UsedAsClientByHuman: false, - OutOfScope: false, - JustificationOutOfScope: "", - Size: model.Service.String(), - Technology: model.SourcecodeRepository.String(), - Tags: []string{model.NormalizeTag(macroState["source-repository"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), - JustificationCiaRating: "Sourcecode processing components are at least rated as 'critical' in terms of integrity, because any " + + ID: id, + Description: macroState["source-repository"][0] + " Sourcecode Repository", + Type: model.Process.String(), + Usage: model.DevOps.String(), + Used_as_client_by_human: false, + Out_of_scope: false, + Justification_out_of_scope: "", + Size: model.Service.String(), + Technology: model.SourcecodeRepository.String(), + Tags: []string{model.NormalizeTag(macroState["source-repository"][0])}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Virtual.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.Critical.String(), + Availability: model.Important.String(), + Justification_cia_rating: "Sourcecode processing components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", - MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - CustomDevelopedParts: false, - DataAssetsProcessed: []string{"sourcecode"}, - DataAssetsStored: []string{"sourcecode"}, - DataFormatsAccepted: []string{"file"}, - CommunicationLinks: nil, + Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + Custom_developed_parts: false, + Data_assets_processed: []string{"sourcecode"}, + Data_assets_stored: []string{"sourcecode"}, + Data_formats_accepted: []string{"file"}, + Communication_links: nil, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.TechnicalAssets[macroState["source-repository"][0]+" Sourcecode Repository"] = techAsset + modelInput.Technical_assets[macroState["source-repository"][0]+" Sourcecode Repository"] = techAsset } } @@ -526,36 +526,36 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry encryption = model.Transparent.String() } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: macroState["container-registry"][0] + " Container Registry", - Type: model.Process.String(), - Usage: model.DevOps.String(), - UsedAsClientByHuman: false, - OutOfScope: false, - JustificationOutOfScope: "", - Size: model.Service.String(), - Technology: model.ArtifactRegistry.String(), - Tags: []string{model.NormalizeTag(macroState["container-registry"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), - JustificationCiaRating: "Container registry components are at least rated as 'critical' in terms of integrity, because any " + + ID: id, + Description: macroState["container-registry"][0] + " Container Registry", + Type: model.Process.String(), + Usage: model.DevOps.String(), + Used_as_client_by_human: false, + Out_of_scope: false, + Justification_out_of_scope: "", + Size: model.Service.String(), + Technology: model.ArtifactRegistry.String(), + Tags: []string{model.NormalizeTag(macroState["container-registry"][0])}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Virtual.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.Critical.String(), + Availability: model.Important.String(), + Justification_cia_rating: "Container registry components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", - MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - CustomDevelopedParts: false, - DataAssetsProcessed: []string{"deployment"}, - DataAssetsStored: []string{"deployment"}, - DataFormatsAccepted: []string{"file"}, - CommunicationLinks: nil, + Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + Custom_developed_parts: false, + Data_assets_processed: []string{"deployment"}, + Data_assets_stored: []string{"deployment"}, + Data_formats_accepted: []string{"file"}, + Communication_links: nil, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.TechnicalAssets[macroState["container-registry"][0]+" Container Registry"] = techAsset + modelInput.Technical_assets[macroState["container-registry"][0]+" Container Registry"] = techAsset } } @@ -568,36 +568,36 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry encryption = model.Transparent.String() } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: macroState["container-platform"][0] + " Container Platform", - Type: model.Process.String(), - Usage: model.DevOps.String(), - UsedAsClientByHuman: false, - OutOfScope: false, - JustificationOutOfScope: "", - Size: model.System.String(), - Technology: model.ContainerPlatform.String(), - Tags: []string{model.NormalizeTag(macroState["container-platform"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.MissionCritical.String(), - Availability: model.MissionCritical.String(), - JustificationCiaRating: "Container platform components are rated as 'mission-critical' in terms of integrity and availability, because any " + + ID: id, + Description: macroState["container-platform"][0] + " Container Platform", + Type: model.Process.String(), + Usage: model.DevOps.String(), + Used_as_client_by_human: false, + Out_of_scope: false, + Justification_out_of_scope: "", + Size: model.System.String(), + Technology: model.ContainerPlatform.String(), + Tags: []string{model.NormalizeTag(macroState["container-platform"][0])}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Virtual.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.MissionCritical.String(), + Availability: model.MissionCritical.String(), + Justification_cia_rating: "Container platform components are rated as 'mission-critical' in terms of integrity and availability, because any " + "malicious modification of it might lead to a backdoored production system.", - MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - CustomDevelopedParts: false, - DataAssetsProcessed: []string{"deployment"}, - DataAssetsStored: []string{"deployment"}, - DataFormatsAccepted: []string{"file"}, - CommunicationLinks: nil, + Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + Custom_developed_parts: false, + Data_assets_processed: []string{"deployment"}, + Data_assets_stored: []string{"deployment"}, + Data_formats_accepted: []string{"file"}, + Communication_links: nil, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.TechnicalAssets[macroState["container-platform"][0]+" Container Platform"] = techAsset + modelInput.Technical_assets[macroState["container-platform"][0]+" Container Platform"] = techAsset } } } @@ -613,115 +613,115 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry commLinks := make(map[string]model.InputCommunicationLink) commLinks["Sourcecode Repository Traffic"] = model.InputCommunicationLink{ - Target: sourceRepoID, - Description: "Sourcecode Repository Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IpFiltered: false, - Readonly: true, - Usage: model.DevOps.String(), - DataAssetsSent: nil, - DataAssetsReceived: []string{"sourcecode"}, - DiagramTweakWeight: 0, - DiagramTweakConstraint: false, + Target: sourceRepoID, + Description: "Sourcecode Repository Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IP_filtered: false, + Readonly: true, + Usage: model.DevOps.String(), + Data_assets_sent: nil, + Data_assets_received: []string{"sourcecode"}, + Diagram_tweak_weight: 0, + Diagram_tweak_constraint: false, } commLinks["Artifact Registry Traffic"] = model.InputCommunicationLink{ - Target: artifactRegistryID, - Description: "Artifact Registry Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IpFiltered: false, - Readonly: false, - Usage: model.DevOps.String(), - DataAssetsSent: []string{"deployment"}, - DataAssetsReceived: []string{"deployment"}, - DiagramTweakWeight: 0, - DiagramTweakConstraint: false, + Target: artifactRegistryID, + Description: "Artifact Registry Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IP_filtered: false, + Readonly: false, + Usage: model.DevOps.String(), + Data_assets_sent: []string{"deployment"}, + Data_assets_received: []string{"deployment"}, + Diagram_tweak_weight: 0, + Diagram_tweak_constraint: false, } if containerTechUsed { commLinks["Container Registry Traffic"] = model.InputCommunicationLink{ - Target: containerRepoID, - Description: "Container Registry Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IpFiltered: false, - Readonly: false, - Usage: model.DevOps.String(), - DataAssetsSent: []string{"deployment"}, - DataAssetsReceived: []string{"deployment"}, - DiagramTweakWeight: 0, - DiagramTweakConstraint: false, + Target: containerRepoID, + Description: "Container Registry Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IP_filtered: false, + Readonly: false, + Usage: model.DevOps.String(), + Data_assets_sent: []string{"deployment"}, + Data_assets_received: []string{"deployment"}, + Diagram_tweak_weight: 0, + Diagram_tweak_constraint: false, } if macroState["push-or-pull"][0] == pushOrPull[0] { // Push commLinks["Container Platform Push"] = model.InputCommunicationLink{ - Target: containerPlatformID, - Description: "Container Platform Push", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IpFiltered: false, - Readonly: false, - Usage: model.DevOps.String(), - DataAssetsSent: []string{"deployment"}, - DataAssetsReceived: []string{"deployment"}, - DiagramTweakWeight: 0, - DiagramTweakConstraint: false, + Target: containerPlatformID, + Description: "Container Platform Push", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IP_filtered: false, + Readonly: false, + Usage: model.DevOps.String(), + Data_assets_sent: []string{"deployment"}, + Data_assets_received: []string{"deployment"}, + Diagram_tweak_weight: 0, + Diagram_tweak_constraint: false, } } else { // Pull commLinkPull := model.InputCommunicationLink{ - Target: containerRepoID, - Description: "Container Platform Pull", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IpFiltered: false, - Readonly: true, - Usage: model.DevOps.String(), - DataAssetsSent: nil, - DataAssetsReceived: []string{"deployment"}, - DiagramTweakWeight: 0, - DiagramTweakConstraint: false, + Target: containerRepoID, + Description: "Container Platform Pull", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IP_filtered: false, + Readonly: true, + Usage: model.DevOps.String(), + Data_assets_sent: nil, + Data_assets_received: []string{"deployment"}, + Diagram_tweak_weight: 0, + Diagram_tweak_constraint: false, } if !dryRun { titleOfTargetAsset := macroState["container-platform"][0] + " Container Platform" - containerPlatform := modelInput.TechnicalAssets[titleOfTargetAsset] - if containerPlatform.CommunicationLinks == nil { - containerPlatform.CommunicationLinks = make(map[string]model.InputCommunicationLink) + containerPlatform := modelInput.Technical_assets[titleOfTargetAsset] + if containerPlatform.Communication_links == nil { + containerPlatform.Communication_links = make(map[string]model.InputCommunicationLink, 0) } - containerPlatform.CommunicationLinks["Container Platform Pull"] = commLinkPull - modelInput.TechnicalAssets[titleOfTargetAsset] = containerPlatform + containerPlatform.Communication_links["Container Platform Pull"] = commLinkPull + modelInput.Technical_assets[titleOfTargetAsset] = containerPlatform } } } if codeInspectionUsed { commLinks["Code Inspection Platform Traffic"] = model.InputCommunicationLink{ - Target: codeInspectionPlatformID, - Description: "Code Inspection Platform Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IpFiltered: false, - Readonly: false, - Usage: model.DevOps.String(), - DataAssetsSent: []string{"sourcecode"}, - DataAssetsReceived: []string{}, - DiagramTweakWeight: 0, - DiagramTweakConstraint: false, + Target: codeInspectionPlatformID, + Description: "Code Inspection Platform Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IP_filtered: false, + Readonly: false, + Usage: model.DevOps.String(), + Data_assets_sent: []string{"sourcecode"}, + Data_assets_received: []string{}, + Diagram_tweak_weight: 0, + Diagram_tweak_constraint: false, } } // The individual deployments @@ -729,73 +729,73 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry //fmt.Println("Adding deployment flow to:", deployTargetID) if containerTechUsed { if !dryRun { - containerPlatform := modelInput.TechnicalAssets[macroState["container-platform"][0]+" Container Platform"] - if containerPlatform.CommunicationLinks == nil { - containerPlatform.CommunicationLinks = make(map[string]model.InputCommunicationLink) + containerPlatform := modelInput.Technical_assets[macroState["container-platform"][0]+" Container Platform"] + if containerPlatform.Communication_links == nil { + containerPlatform.Communication_links = make(map[string]model.InputCommunicationLink, 0) } - containerPlatform.CommunicationLinks["Container Spawning ("+deployTargetID+")"] = model.InputCommunicationLink{ - Target: deployTargetID, - Description: "Container Spawning " + deployTargetID, - Protocol: model.ContainerSpawning.String(), - Authentication: model.NoneAuthentication.String(), - Authorization: model.NoneAuthorization.String(), - Tags: []string{}, - VPN: false, - IpFiltered: false, - Readonly: false, - Usage: model.DevOps.String(), - DataAssetsSent: []string{"deployment"}, - DataAssetsReceived: nil, - DiagramTweakWeight: 0, - DiagramTweakConstraint: false, + containerPlatform.Communication_links["Container Spawning ("+deployTargetID+")"] = model.InputCommunicationLink{ + Target: deployTargetID, + Description: "Container Spawning " + deployTargetID, + Protocol: model.ContainerSpawning.String(), + Authentication: model.NoneAuthentication.String(), + Authorization: model.NoneAuthorization.String(), + Tags: []string{}, + VPN: false, + IP_filtered: false, + Readonly: false, + Usage: model.DevOps.String(), + Data_assets_sent: []string{"deployment"}, + Data_assets_received: nil, + Diagram_tweak_weight: 0, + Diagram_tweak_constraint: false, } - modelInput.TechnicalAssets[macroState["container-platform"][0]+" Container Platform"] = containerPlatform + modelInput.Technical_assets[macroState["container-platform"][0]+" Container Platform"] = containerPlatform } } else { // No Containers used if macroState["push-or-pull"][0] == pushOrPull[0] { // Push commLinks["Deployment Push ("+deployTargetID+")"] = model.InputCommunicationLink{ - Target: deployTargetID, - Description: "Deployment Push to " + deployTargetID, - Protocol: model.SSH.String(), - Authentication: model.ClientCertificate.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IpFiltered: false, - Readonly: false, - Usage: model.DevOps.String(), - DataAssetsSent: []string{"deployment"}, - DataAssetsReceived: nil, - DiagramTweakWeight: 0, - DiagramTweakConstraint: false, + Target: deployTargetID, + Description: "Deployment Push to " + deployTargetID, + Protocol: model.SSH.String(), + Authentication: model.ClientCertificate.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IP_filtered: false, + Readonly: false, + Usage: model.DevOps.String(), + Data_assets_sent: []string{"deployment"}, + Data_assets_received: nil, + Diagram_tweak_weight: 0, + Diagram_tweak_constraint: false, } } else { // Pull pullFromWhere := artifactRegistryID commLinkPull := model.InputCommunicationLink{ - Target: pullFromWhere, - Description: "Deployment Pull from " + deployTargetID, - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IpFiltered: false, - Readonly: true, - Usage: model.DevOps.String(), - DataAssetsSent: nil, - DataAssetsReceived: []string{"deployment"}, - DiagramTweakWeight: 0, - DiagramTweakConstraint: false, + Target: pullFromWhere, + Description: "Deployment Pull from " + deployTargetID, + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IP_filtered: false, + Readonly: true, + Usage: model.DevOps.String(), + Data_assets_sent: nil, + Data_assets_received: []string{"deployment"}, + Diagram_tweak_weight: 0, + Diagram_tweak_constraint: false, } if !dryRun { // take care to lookup by title (as keyed in input YAML by title and only in parsed model representation by ID) titleOfTargetAsset := model.ParsedModelRoot.TechnicalAssets[deployTargetID].Title - x := modelInput.TechnicalAssets[titleOfTargetAsset] - if x.CommunicationLinks == nil { - x.CommunicationLinks = make(map[string]model.InputCommunicationLink) + x := modelInput.Technical_assets[titleOfTargetAsset] + if x.Communication_links == nil { + x.Communication_links = make(map[string]model.InputCommunicationLink, 0) } - x.CommunicationLinks["Deployment Pull ("+deployTargetID+")"] = commLinkPull - modelInput.TechnicalAssets[titleOfTargetAsset] = x + x.Communication_links["Deployment Pull ("+deployTargetID+")"] = commLinkPull + modelInput.Technical_assets[titleOfTargetAsset] = x } } @@ -804,8 +804,8 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry // don't forget to also add the "deployment" data asset as stored on the target targetAssetTitle := model.ParsedModelRoot.TechnicalAssets[deployTargetID].Title assetsStored := make([]string, 0) - if modelInput.TechnicalAssets[targetAssetTitle].DataAssetsStored != nil { - for _, val := range modelInput.TechnicalAssets[targetAssetTitle].DataAssetsStored { + if modelInput.Technical_assets[targetAssetTitle].Data_assets_stored != nil { + for _, val := range modelInput.Technical_assets[targetAssetTitle].Data_assets_stored { assetsStored = append(assetsStored, fmt.Sprintf("%v", val)) } } @@ -815,43 +815,43 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } mergedArrays = append(mergedArrays, "deployment") if !dryRun { - x := modelInput.TechnicalAssets[targetAssetTitle] - x.DataAssetsStored = mergedArrays - modelInput.TechnicalAssets[targetAssetTitle] = x + x := modelInput.Technical_assets[targetAssetTitle] + x.Data_assets_stored = mergedArrays + modelInput.Technical_assets[targetAssetTitle] = x } } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: macroState["build-pipeline"][0] + " Build Pipeline", - Type: model.Process.String(), - Usage: model.DevOps.String(), - UsedAsClientByHuman: false, - OutOfScope: false, - JustificationOutOfScope: "", - Size: model.Service.String(), - Technology: model.BuildPipeline.String(), - Tags: []string{model.NormalizeTag(macroState["build-pipeline"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), - JustificationCiaRating: "Build pipeline components are at least rated as 'critical' in terms of integrity, because any " + + ID: id, + Description: macroState["build-pipeline"][0] + " Build Pipeline", + Type: model.Process.String(), + Usage: model.DevOps.String(), + Used_as_client_by_human: false, + Out_of_scope: false, + Justification_out_of_scope: "", + Size: model.Service.String(), + Technology: model.BuildPipeline.String(), + Tags: []string{model.NormalizeTag(macroState["build-pipeline"][0])}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Virtual.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.Critical.String(), + Availability: model.Important.String(), + Justification_cia_rating: "Build pipeline components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", - MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - CustomDevelopedParts: false, - DataAssetsProcessed: []string{"sourcecode", "deployment"}, - DataAssetsStored: []string{"sourcecode", "deployment"}, - DataFormatsAccepted: []string{"file"}, - CommunicationLinks: commLinks, + Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + Custom_developed_parts: false, + Data_assets_processed: []string{"sourcecode", "deployment"}, + Data_assets_stored: []string{"sourcecode", "deployment"}, + Data_formats_accepted: []string{"file"}, + Communication_links: commLinks, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.TechnicalAssets[macroState["build-pipeline"][0]+" Build Pipeline"] = techAsset + modelInput.Technical_assets[macroState["build-pipeline"][0]+" Build Pipeline"] = techAsset } } @@ -864,36 +864,36 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry encryption = model.Transparent.String() } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: macroState["artifact-registry"][0] + " Artifact Registry", - Type: model.Process.String(), - Usage: model.DevOps.String(), - UsedAsClientByHuman: false, - OutOfScope: false, - JustificationOutOfScope: "", - Size: model.Service.String(), - Technology: model.ArtifactRegistry.String(), - Tags: []string{model.NormalizeTag(macroState["artifact-registry"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), - JustificationCiaRating: "Artifact registry components are at least rated as 'critical' in terms of integrity, because any " + + ID: id, + Description: macroState["artifact-registry"][0] + " Artifact Registry", + Type: model.Process.String(), + Usage: model.DevOps.String(), + Used_as_client_by_human: false, + Out_of_scope: false, + Justification_out_of_scope: "", + Size: model.Service.String(), + Technology: model.ArtifactRegistry.String(), + Tags: []string{model.NormalizeTag(macroState["artifact-registry"][0])}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Virtual.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.Critical.String(), + Availability: model.Important.String(), + Justification_cia_rating: "Artifact registry components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", - MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - CustomDevelopedParts: false, - DataAssetsProcessed: []string{"sourcecode", "deployment"}, - DataAssetsStored: []string{"sourcecode", "deployment"}, - DataFormatsAccepted: []string{"file"}, - CommunicationLinks: nil, + Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + Custom_developed_parts: false, + Data_assets_processed: []string{"sourcecode", "deployment"}, + Data_assets_stored: []string{"sourcecode", "deployment"}, + Data_formats_accepted: []string{"file"}, + Communication_links: nil, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.TechnicalAssets[macroState["artifact-registry"][0]+" Artifact Registry"] = techAsset + modelInput.Technical_assets[macroState["artifact-registry"][0]+" Artifact Registry"] = techAsset } } @@ -907,36 +907,36 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry encryption = model.Transparent.String() } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: macroState["code-inspection-platform"][0] + " Code Inspection Platform", - Type: model.Process.String(), - Usage: model.DevOps.String(), - UsedAsClientByHuman: false, - OutOfScope: false, - JustificationOutOfScope: "", - Size: model.Service.String(), - Technology: model.CodeInspectionPlatform.String(), - Tags: []string{model.NormalizeTag(macroState["code-inspection-platform"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Important.String(), - Availability: model.Operational.String(), - JustificationCiaRating: "Sourcecode inspection platforms are rated at least 'important' in terms of integrity, because any " + + ID: id, + Description: macroState["code-inspection-platform"][0] + " Code Inspection Platform", + Type: model.Process.String(), + Usage: model.DevOps.String(), + Used_as_client_by_human: false, + Out_of_scope: false, + Justification_out_of_scope: "", + Size: model.Service.String(), + Technology: model.CodeInspectionPlatform.String(), + Tags: []string{model.NormalizeTag(macroState["code-inspection-platform"][0])}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Virtual.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.Important.String(), + Availability: model.Operational.String(), + Justification_cia_rating: "Sourcecode inspection platforms are rated at least 'important' in terms of integrity, because any " + "malicious modification of it might lead to vulnerabilities found by the scanner engine not being shown.", - MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - CustomDevelopedParts: false, - DataAssetsProcessed: []string{"sourcecode"}, - DataAssetsStored: []string{"sourcecode"}, - DataFormatsAccepted: []string{"file"}, - CommunicationLinks: nil, + Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + Custom_developed_parts: false, + Data_assets_processed: []string{"sourcecode"}, + Data_assets_stored: []string{"sourcecode"}, + Data_formats_accepted: []string{"file"}, + Communication_links: nil, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.TechnicalAssets[macroState["code-inspection-platform"][0]+" Code Inspection Platform"] = techAsset + modelInput.Technical_assets[macroState["code-inspection-platform"][0]+" Code Inspection Platform"] = techAsset } } } @@ -947,25 +947,25 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry //fmt.Println("Adding new trust boundary of type:", trustBoundaryType) title := "DevOps Network" trustBoundary := model.InputTrustBoundary{ - ID: "devops-network", - Description: "DevOps Network", - Type: trustBoundaryType, - Tags: []string{}, - TechnicalAssetsInside: serverSideTechAssets, - TrustBoundariesNested: nil, + ID: "devops-network", + Description: "DevOps Network", + Type: trustBoundaryType, + Tags: []string{}, + Technical_assets_inside: serverSideTechAssets, + Trust_boundaries_nested: nil, } *changeLogCollector = append(*changeLogCollector, "adding trust boundary: devops-network") if !dryRun { - modelInput.TrustBoundaries[title] = trustBoundary + modelInput.Trust_boundaries[title] = trustBoundary } } else { existingTrustBoundaryToAddTo := macroState["selected-trust-boundary"][0] //fmt.Println("Adding to existing trust boundary:", existingTrustBoundaryToAddTo) title := model.ParsedModelRoot.TrustBoundaries[existingTrustBoundaryToAddTo].Title assetsInside := make([]string, 0) - if modelInput.TrustBoundaries[title].TechnicalAssetsInside != nil { - values := modelInput.TrustBoundaries[title].TechnicalAssetsInside - for _, val := range values { + if modelInput.Trust_boundaries[title].Technical_assets_inside != nil { + vals := modelInput.Trust_boundaries[title].Technical_assets_inside + for _, val := range vals { assetsInside = append(assetsInside, fmt.Sprintf("%v", val)) } } @@ -976,12 +976,12 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry mergedArrays = append(mergedArrays, serverSideTechAssets...) *changeLogCollector = append(*changeLogCollector, "filling existing trust boundary: "+existingTrustBoundaryToAddTo) if !dryRun { - if modelInput.TrustBoundaries == nil { - modelInput.TrustBoundaries = make(map[string]model.InputTrustBoundary) + if modelInput.Trust_boundaries == nil { + modelInput.Trust_boundaries = make(map[string]model.InputTrustBoundary, 0) } - tb := modelInput.TrustBoundaries[title] - tb.TechnicalAssetsInside = mergedArrays - modelInput.TrustBoundaries[title] = tb + tb := modelInput.Trust_boundaries[title] + tb.Technical_assets_inside = mergedArrays + modelInput.Trust_boundaries[title] = tb } } } @@ -994,17 +994,17 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } title := macroState["container-platform"][0] + " Runtime" sharedRuntime := model.InputSharedRuntime{ - ID: containerSharedRuntimeID, - Description: title, - Tags: []string{model.NormalizeTag(macroState["container-platform"][0])}, - TechnicalAssetsRunning: assetsRunning, + ID: containerSharedRuntimeID, + Description: title, + Tags: []string{model.NormalizeTag(macroState["container-platform"][0])}, + Technical_assets_running: assetsRunning, } *changeLogCollector = append(*changeLogCollector, "adding shared runtime: "+containerSharedRuntimeID) if !dryRun { - if modelInput.SharedRuntimes == nil { - modelInput.SharedRuntimes = make(map[string]model.InputSharedRuntime) + if modelInput.Shared_runtimes == nil { + modelInput.Shared_runtimes = make(map[string]model.InputSharedRuntime, 0) } - modelInput.SharedRuntimes[title] = sharedRuntime + modelInput.Shared_runtimes[title] = sharedRuntime } } diff --git a/macros/built-in/add-vault/add-vault-macro.go b/macros/built-in/add-vault/add-vault-macro.go index 06fc5065..03ec5f57 100644 --- a/macros/built-in/add-vault/add-vault-macro.go +++ b/macros/built-in/add-vault/add-vault-macro.go @@ -84,7 +84,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { }, nil case 4: possibleAnswers := make([]string, 0) - for id := range model.ParsedModelRoot.TechnicalAssets { + for id, _ := range model.ParsedModelRoot.TechnicalAssets { possibleAnswers = append(possibleAnswers, id) } sort.Strings(possibleAnswers) @@ -181,21 +181,21 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry if _, exists := model.ParsedModelRoot.DataAssets["Configuration Secrets"]; !exists { dataAsset := model.InputDataAsset{ - ID: "configuration-secrets", - Description: "Configuration secrets (like credentials, keys, certificates, etc.) secured and managed by a vault", - Usage: model.DevOps.String(), - Tags: []string{}, - Origin: "", - Owner: "", - Quantity: model.VeryFew.String(), - Confidentiality: model.StrictlyConfidential.String(), - Integrity: model.Critical.String(), - Availability: model.Critical.String(), - JustificationCiaRating: "Configuration secrets are rated as being 'strictly-confidential'.", + ID: "configuration-secrets", + Description: "Configuration secrets (like credentials, keys, certificates, etc.) secured and managed by a vault", + Usage: model.DevOps.String(), + Tags: []string{}, + Origin: "", + Owner: "", + Quantity: model.VeryFew.String(), + Confidentiality: model.StrictlyConfidential.String(), + Integrity: model.Critical.String(), + Availability: model.Critical.String(), + Justification_cia_rating: "Configuration secrets are rated as being 'strictly-confidential'.", } *changeLogCollector = append(*changeLogCollector, "adding data asset: configuration-secrets") if !dryRun { - modelInput.DataAssets["Configuration Secrets"] = dataAsset + modelInput.Data_assets["Configuration Secrets"] = dataAsset } } @@ -213,35 +213,35 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry if _, exists := model.ParsedModelRoot.TechnicalAssets[storageID]; !exists { serverSideTechAssets = append(serverSideTechAssets, storageID) techAsset := model.InputTechnicalAsset{ - ID: storageID, - Description: "Vault Storage", - Type: model.Datastore.String(), - Usage: model.DevOps.String(), - UsedAsClientByHuman: false, - OutOfScope: false, - JustificationOutOfScope: "", - Size: model.Component.String(), - Technology: tech, - Tags: []string{}, // TODO: let user enter or too detailed for a wizard? - Internet: false, - Machine: model.Virtual.String(), // TODO: let user enter or too detailed for a wizard? - Encryption: model.DataWithSymmetricSharedKey.String(), // can be assumed for a vault product as at least having some good encryption - Owner: "", - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Critical.String(), - JustificationCiaRating: "Vault components are only rated as 'confidential' as vaults usually apply a trust barrier to encrypt all data-at-rest with a vault key.", - MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - CustomDevelopedParts: false, - DataAssetsProcessed: nil, - DataAssetsStored: []string{"configuration-secrets"}, - DataFormatsAccepted: nil, - CommunicationLinks: nil, + ID: storageID, + Description: "Vault Storage", + Type: model.Datastore.String(), + Usage: model.DevOps.String(), + Used_as_client_by_human: false, + Out_of_scope: false, + Justification_out_of_scope: "", + Size: model.Component.String(), + Technology: tech, + Tags: []string{}, // TODO: let user enter or too detailed for a wizard? + Internet: false, + Machine: model.Virtual.String(), // TODO: let user enter or too detailed for a wizard? + Encryption: model.DataWithSymmetricSharedKey.String(), // can be assumed for a vault product as at least having some good encryption + Owner: "", + Confidentiality: model.Confidential.String(), + Integrity: model.Critical.String(), + Availability: model.Critical.String(), + Justification_cia_rating: "Vault components are only rated as 'confidential' as vaults usually apply a trust barrier to encrypt all data-at-rest with a vault key.", + Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + Custom_developed_parts: false, + Data_assets_processed: nil, + Data_assets_stored: []string{"configuration-secrets"}, + Data_formats_accepted: nil, + Communication_links: nil, } *changeLogCollector = append(*changeLogCollector, "adding technical asset: "+storageID) if !dryRun { - modelInput.TechnicalAssets["Vault Storage"] = techAsset + modelInput.Technical_assets["Vault Storage"] = techAsset } } } @@ -254,23 +254,23 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry if databaseUsed || filesystemUsed { accessLink := model.InputCommunicationLink{ - Target: storageID, - Description: "Vault Storage Access", - Protocol: model.LocalFileAccess.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IpFiltered: false, - Readonly: false, - Usage: model.DevOps.String(), - DataAssetsSent: []string{"configuration-secrets"}, - DataAssetsReceived: []string{"configuration-secrets"}, - DiagramTweakWeight: 0, - DiagramTweakConstraint: false, + Target: storageID, + Description: "Vault Storage Access", + Protocol: model.LocalFileAccess.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IP_filtered: false, + Readonly: false, + Usage: model.DevOps.String(), + Data_assets_sent: []string{"configuration-secrets"}, + Data_assets_received: []string{"configuration-secrets"}, + Diagram_tweak_weight: 0, + Diagram_tweak_constraint: false, } if databaseUsed { - accessLink.Protocol = model.SqlAccessProtocol.String() // TODO ask if encrypted and ask if NoSQL? or to detailed for a wizard? + accessLink.Protocol = model.SQL_access_protocol.String() // TODO ask if encrypted and ask if NoSQL? or to detailed for a wizard? } commLinks["Vault Storage Access"] = accessLink } @@ -287,31 +287,31 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } for _, clientID := range macroState["clients"] { // add a connection from each client clientAccessCommLink := model.InputCommunicationLink{ - Target: vaultID, - Description: "Vault Access Traffic (by " + clientID + ")", - Protocol: model.HTTPS.String(), - Authentication: authentication, - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IpFiltered: false, - Readonly: true, - Usage: model.DevOps.String(), - DataAssetsSent: nil, - DataAssetsReceived: []string{"configuration-secrets"}, - DiagramTweakWeight: 0, - DiagramTweakConstraint: false, + Target: vaultID, + Description: "Vault Access Traffic (by " + clientID + ")", + Protocol: model.HTTPS.String(), + Authentication: authentication, + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IP_filtered: false, + Readonly: true, + Usage: model.DevOps.String(), + Data_assets_sent: nil, + Data_assets_received: []string{"configuration-secrets"}, + Diagram_tweak_weight: 0, + Diagram_tweak_constraint: false, } clientAssetTitle := model.ParsedModelRoot.TechnicalAssets[clientID].Title if !dryRun { - client := modelInput.TechnicalAssets[clientAssetTitle] - client.CommunicationLinks["Vault Access ("+clientID+")"] = clientAccessCommLink - modelInput.TechnicalAssets[clientAssetTitle] = client + client := modelInput.Technical_assets[clientAssetTitle] + client.Communication_links["Vault Access ("+clientID+")"] = clientAccessCommLink + modelInput.Technical_assets[clientAssetTitle] = client } // don't forget to also add the "configuration-secrets" data asset as processed on the client assetsProcessed := make([]string, 0) - if modelInput.TechnicalAssets[clientAssetTitle].DataAssetsProcessed != nil { - for _, val := range modelInput.TechnicalAssets[clientAssetTitle].DataAssetsProcessed { + if modelInput.Technical_assets[clientAssetTitle].Data_assets_processed != nil { + for _, val := range modelInput.Technical_assets[clientAssetTitle].Data_assets_processed { assetsProcessed = append(assetsProcessed, fmt.Sprintf("%v", val)) } } @@ -321,45 +321,45 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } mergedArrays = append(mergedArrays, "configuration-secrets") if !dryRun { - x := modelInput.TechnicalAssets[clientAssetTitle] - x.DataAssetsProcessed = mergedArrays - modelInput.TechnicalAssets[clientAssetTitle] = x + x := modelInput.Technical_assets[clientAssetTitle] + x.Data_assets_processed = mergedArrays + modelInput.Technical_assets[clientAssetTitle] = x } } techAsset := model.InputTechnicalAsset{ - ID: vaultID, - Description: macroState["vault-name"][0] + " Vault", - Type: model.Process.String(), - Usage: model.DevOps.String(), - UsedAsClientByHuman: false, - OutOfScope: false, - JustificationOutOfScope: "", - Size: model.Service.String(), - Technology: model.Vault.String(), - Tags: []string{model.NormalizeTag(macroState["vault-name"][0])}, - Internet: false, - Machine: model.Virtual.String(), - Encryption: model.Transparent.String(), - Owner: "", - Confidentiality: model.StrictlyConfidential.String(), - Integrity: model.Critical.String(), - Availability: model.Critical.String(), - JustificationCiaRating: "Vault components are rated as 'strictly-confidential'.", - MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - CustomDevelopedParts: false, - DataAssetsProcessed: []string{"configuration-secrets"}, - DataAssetsStored: nil, - DataFormatsAccepted: nil, - CommunicationLinks: commLinks, + ID: vaultID, + Description: macroState["vault-name"][0] + " Vault", + Type: model.Process.String(), + Usage: model.DevOps.String(), + Used_as_client_by_human: false, + Out_of_scope: false, + Justification_out_of_scope: "", + Size: model.Service.String(), + Technology: model.Vault.String(), + Tags: []string{model.NormalizeTag(macroState["vault-name"][0])}, + Internet: false, + Machine: model.Virtual.String(), + Encryption: model.Transparent.String(), + Owner: "", + Confidentiality: model.StrictlyConfidential.String(), + Integrity: model.Critical.String(), + Availability: model.Critical.String(), + Justification_cia_rating: "Vault components are rated as 'strictly-confidential'.", + Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + Custom_developed_parts: false, + Data_assets_processed: []string{"configuration-secrets"}, + Data_assets_stored: nil, + Data_formats_accepted: nil, + Communication_links: commLinks, } if inMemoryUsed { - techAsset.DataAssetsStored = []string{"configuration-secrets"} + techAsset.Data_assets_stored = []string{"configuration-secrets"} } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+vaultID) if !dryRun { - modelInput.TechnicalAssets[macroState["vault-name"][0]+" Vault"] = techAsset + modelInput.Technical_assets[macroState["vault-name"][0]+" Vault"] = techAsset } } @@ -367,16 +367,16 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry if filesystemUsed { title := "Vault Environment" trustBoundary := model.InputTrustBoundary{ - ID: vaultEnvID, - Description: "Vault Environment", - Type: model.ExecutionEnvironment.String(), - Tags: []string{}, - TechnicalAssetsInside: []string{vaultID, storageID}, - TrustBoundariesNested: nil, + ID: vaultEnvID, + Description: "Vault Environment", + Type: model.ExecutionEnvironment.String(), + Tags: []string{}, + Technical_assets_inside: []string{vaultID, storageID}, + Trust_boundaries_nested: nil, } *changeLogCollector = append(*changeLogCollector, "adding trust boundary: "+vaultEnvID) if !dryRun { - modelInput.TrustBoundaries[title] = trustBoundary + modelInput.Trust_boundaries[title] = trustBoundary } } @@ -391,13 +391,13 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry Tags: []string{}, } if filesystemUsed { - trustBoundary.TrustBoundariesNested = []string{vaultEnvID} + trustBoundary.Trust_boundaries_nested = []string{vaultEnvID} } else { - trustBoundary.TechnicalAssetsInside = serverSideTechAssets + trustBoundary.Technical_assets_inside = serverSideTechAssets } *changeLogCollector = append(*changeLogCollector, "adding trust boundary: vault-network") if !dryRun { - modelInput.TrustBoundaries[title] = trustBoundary + modelInput.Trust_boundaries[title] = trustBoundary } } else { // adding to existing trust boundary existingTrustBoundaryToAddTo := macroState["selected-trust-boundary"][0] @@ -405,9 +405,9 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry if filesystemUsed { // ---------------------- nest as execution-environment trust boundary ---------------------- boundariesNested := make([]string, 0) - if modelInput.TrustBoundaries[title].TrustBoundariesNested != nil { - values := modelInput.TrustBoundaries[title].TrustBoundariesNested - for _, val := range values { + if modelInput.Trust_boundaries[title].Trust_boundaries_nested != nil { + vals := modelInput.Trust_boundaries[title].Trust_boundaries_nested + for _, val := range vals { boundariesNested = append(boundariesNested, fmt.Sprintf("%v", val)) } } @@ -418,15 +418,15 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry mergedArrays = append(mergedArrays, vaultEnvID) *changeLogCollector = append(*changeLogCollector, "filling existing trust boundary: "+existingTrustBoundaryToAddTo) if !dryRun { - tb := modelInput.TrustBoundaries[title] - tb.TrustBoundariesNested = mergedArrays - modelInput.TrustBoundaries[title] = tb + tb := modelInput.Trust_boundaries[title] + tb.Trust_boundaries_nested = mergedArrays + modelInput.Trust_boundaries[title] = tb } } else { // ---------------------- place assets inside directly ---------------------- assetsInside := make([]string, 0) - if modelInput.TrustBoundaries[title].TechnicalAssetsInside != nil { - values := modelInput.TrustBoundaries[title].TechnicalAssetsInside - for _, val := range values { + if modelInput.Trust_boundaries[title].Technical_assets_inside != nil { + vals := modelInput.Trust_boundaries[title].Technical_assets_inside + for _, val := range vals { assetsInside = append(assetsInside, fmt.Sprintf("%v", val)) } } @@ -437,9 +437,9 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry mergedArrays = append(mergedArrays, serverSideTechAssets...) *changeLogCollector = append(*changeLogCollector, "filling existing trust boundary: "+existingTrustBoundaryToAddTo) if !dryRun { - tb := modelInput.TrustBoundaries[title] - tb.TechnicalAssetsInside = mergedArrays - modelInput.TrustBoundaries[title] = tb + tb := modelInput.Trust_boundaries[title] + tb.Technical_assets_inside = mergedArrays + modelInput.Trust_boundaries[title] = tb } } } diff --git a/macros/built-in/pretty-print/pretty-print-macro.go b/macros/built-in/pretty-print/pretty-print-macro.go index 51c05c05..64149c57 100644 --- a/macros/built-in/pretty-print/pretty-print-macro.go +++ b/macros/built-in/pretty-print/pretty-print-macro.go @@ -14,7 +14,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { return model.NoMoreQuestions(), nil } -func ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { +func ApplyAnswer(questionID string, answer ...string) (message string, validResult bool, err error) { return "Answer processed", true, nil } @@ -22,10 +22,10 @@ func GoBack() (message string, validResult bool, err error) { return "Cannot go back further", false, nil } -func GetFinalChangeImpact(_ *model.ModelInput) (changes []string, message string, validResult bool, err error) { +func GetFinalChangeImpact(modelInput *model.ModelInput) (changes []string, message string, validResult bool, err error) { return []string{"pretty-printing the model file"}, "Changeset valid", true, err } -func Execute(_ *model.ModelInput) (message string, validResult bool, err error) { +func Execute(modelInput *model.ModelInput) (message string, validResult bool, err error) { return "Model pretty printing successful", true, nil } diff --git a/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go b/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go index 478ce5a2..f0deaec9 100644 --- a/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go +++ b/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go @@ -18,7 +18,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { return model.NoMoreQuestions(), nil } -func ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { +func ApplyAnswer(questionID string, answer ...string) (message string, validResult bool, err error) { return "Answer processed", true, nil } @@ -26,12 +26,12 @@ func GoBack() (message string, validResult bool, err error) { return "Cannot go back further", false, nil } -func GetFinalChangeImpact(_ *model.ModelInput) (changes []string, message string, validResult bool, err error) { +func GetFinalChangeImpact(modelInput *model.ModelInput) (changes []string, message string, validResult bool, err error) { return []string{"remove unused tags from the model file"}, "Changeset valid", true, err } func Execute(modelInput *model.ModelInput) (message string, validResult bool, err error) { - tagUsageMap := make(map[string]bool) + tagUsageMap := make(map[string]bool, 0) for _, tag := range model.ParsedModelRoot.TagsAvailable { tagUsageMap[tag] = false // false = tag is not used } @@ -70,6 +70,6 @@ func Execute(modelInput *model.ModelInput) (message string, validResult bool, er } } sort.Strings(tagsSorted) - modelInput.TagsAvailable = tagsSorted + modelInput.Tags_available = tagsSorted return "Model file removal of " + strconv.Itoa(counter) + " unused tags successful", true, nil } diff --git a/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go b/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go index 8635c07b..9a64557a 100644 --- a/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go +++ b/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go @@ -18,7 +18,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { return model.NoMoreQuestions(), nil } -func ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { +func ApplyAnswer(questionID string, answer ...string) (message string, validResult bool, err error) { return "Answer processed", true, nil } @@ -26,7 +26,7 @@ func GoBack() (message string, validResult bool, err error) { return "Cannot go back further", false, nil } -func GetFinalChangeImpact(_ *model.ModelInput) (changes []string, message string, validResult bool, err error) { +func GetFinalChangeImpact(modelInput *model.ModelInput) (changes []string, message string, validResult bool, err error) { return []string{"seed the model file with with initial risk tracking entries for all untracked risks"}, "Changeset valid", true, err } @@ -38,16 +38,16 @@ func Execute(modelInput *model.ModelInput) (message string, validResult bool, er } } sort.Strings(syntheticRiskIDsToCreateTrackingFor) - if modelInput.RiskTracking == nil { - modelInput.RiskTracking = make(map[string]model.InputRiskTracking) + if modelInput.Risk_tracking == nil { + modelInput.Risk_tracking = make(map[string]model.InputRiskTracking, 0) } for _, id := range syntheticRiskIDsToCreateTrackingFor { - modelInput.RiskTracking[id] = model.InputRiskTracking{ + modelInput.Risk_tracking[id] = model.InputRiskTracking{ Status: model.Unchecked.String(), Justification: "", Ticket: "", Date: "", - CheckedBy: "", + Checked_by: "", } } return "Model file seeding with " + strconv.Itoa(len(syntheticRiskIDsToCreateTrackingFor)) + " initial risk tracking successful", true, nil diff --git a/macros/built-in/seed-tags/seed-tags-macro.go b/macros/built-in/seed-tags/seed-tags-macro.go index 427a5281..fc65c414 100644 --- a/macros/built-in/seed-tags/seed-tags-macro.go +++ b/macros/built-in/seed-tags/seed-tags-macro.go @@ -18,7 +18,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { return model.NoMoreQuestions(), nil } -func ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { +func ApplyAnswer(questionID string, answer ...string) (message string, validResult bool, err error) { return "Answer processed", true, nil } @@ -26,12 +26,12 @@ func GoBack() (message string, validResult bool, err error) { return "Cannot go back further", false, nil } -func GetFinalChangeImpact(_ *model.ModelInput) (changes []string, message string, validResult bool, err error) { +func GetFinalChangeImpact(modelInput *model.ModelInput) (changes []string, message string, validResult bool, err error) { return []string{"seed the model file with supported tags from all risk rules"}, "Changeset valid", true, err } func Execute(modelInput *model.ModelInput) (message string, validResult bool, err error) { - tagMap := make(map[string]bool) + tagMap := make(map[string]bool, 0) for k, v := range model.AllSupportedTags { tagMap[k] = v } @@ -43,6 +43,6 @@ func Execute(modelInput *model.ModelInput) (message string, validResult bool, er tagsSorted = append(tagsSorted, tag) } sort.Strings(tagsSorted) - modelInput.TagsAvailable = tagsSorted + modelInput.Tags_available = tagsSorted return "Model file seeding with " + strconv.Itoa(len(model.AllSupportedTags)) + " tags successful", true, nil } diff --git a/main.go b/main.go index 4ff9f5fe..938047aa 100644 --- a/main.go +++ b/main.go @@ -17,6 +17,7 @@ import ( "fmt" "hash/fnv" "io" + "io/ioutil" "log" "net/http" "os" @@ -33,56 +34,56 @@ import ( "github.com/gin-gonic/gin" "github.com/google/uuid" "github.com/threagile/threagile/colors" - addbuildpipeline "github.com/threagile/threagile/macros/built-in/add-build-pipeline" - addvault "github.com/threagile/threagile/macros/built-in/add-vault" - prettyprint "github.com/threagile/threagile/macros/built-in/pretty-print" - removeunusedtags "github.com/threagile/threagile/macros/built-in/remove-unused-tags" - seedrisktracking "github.com/threagile/threagile/macros/built-in/seed-risk-tracking" - seedtags "github.com/threagile/threagile/macros/built-in/seed-tags" + add_build_pipeline "github.com/threagile/threagile/macros/built-in/add-build-pipeline" + add_vault "github.com/threagile/threagile/macros/built-in/add-vault" + pretty_print "github.com/threagile/threagile/macros/built-in/pretty-print" + remove_unused_tags "github.com/threagile/threagile/macros/built-in/remove-unused-tags" + seed_risk_tracking "github.com/threagile/threagile/macros/built-in/seed-risk-tracking" + seed_tags "github.com/threagile/threagile/macros/built-in/seed-tags" "github.com/threagile/threagile/model" "github.com/threagile/threagile/report" - accidentalsecretleak "github.com/threagile/threagile/risks/built-in/accidental-secret-leak" - codebackdooring "github.com/threagile/threagile/risks/built-in/code-backdooring" - containerbaseimagebackdooring "github.com/threagile/threagile/risks/built-in/container-baseimage-backdooring" - containerplatformescape "github.com/threagile/threagile/risks/built-in/container-platform-escape" - crosssiterequestforgery "github.com/threagile/threagile/risks/built-in/cross-site-request-forgery" - crosssitescripting "github.com/threagile/threagile/risks/built-in/cross-site-scripting" - dosriskyaccessacrosstrustboundary "github.com/threagile/threagile/risks/built-in/dos-risky-access-across-trust-boundary" - incompletemodel "github.com/threagile/threagile/risks/built-in/incomplete-model" - ldapinjection "github.com/threagile/threagile/risks/built-in/ldap-injection" - missingauthentication "github.com/threagile/threagile/risks/built-in/missing-authentication" - missingauthenticationsecondfactor "github.com/threagile/threagile/risks/built-in/missing-authentication-second-factor" - missingbuildinfrastructure "github.com/threagile/threagile/risks/built-in/missing-build-infrastructure" - missingcloudhardening "github.com/threagile/threagile/risks/built-in/missing-cloud-hardening" - missingfilevalidation "github.com/threagile/threagile/risks/built-in/missing-file-validation" - missinghardening "github.com/threagile/threagile/risks/built-in/missing-hardening" - missingidentitypropagation "github.com/threagile/threagile/risks/built-in/missing-identity-propagation" - missingidentityproviderisolation "github.com/threagile/threagile/risks/built-in/missing-identity-provider-isolation" - missingidentitystore "github.com/threagile/threagile/risks/built-in/missing-identity-store" - missingnetworksegmentation "github.com/threagile/threagile/risks/built-in/missing-network-segmentation" - missingvault "github.com/threagile/threagile/risks/built-in/missing-vault" - missingvaultisolation "github.com/threagile/threagile/risks/built-in/missing-vault-isolation" - missingwaf "github.com/threagile/threagile/risks/built-in/missing-waf" - mixedtargetsonsharedruntime "github.com/threagile/threagile/risks/built-in/mixed-targets-on-shared-runtime" - pathtraversal "github.com/threagile/threagile/risks/built-in/path-traversal" - pushinsteadofpulldeployment "github.com/threagile/threagile/risks/built-in/push-instead-of-pull-deployment" - searchqueryinjection "github.com/threagile/threagile/risks/built-in/search-query-injection" - serversiderequestforgery "github.com/threagile/threagile/risks/built-in/server-side-request-forgery" - serviceregistrypoisoning "github.com/threagile/threagile/risks/built-in/service-registry-poisoning" - sqlnosqlinjection "github.com/threagile/threagile/risks/built-in/sql-nosql-injection" - uncheckeddeployment "github.com/threagile/threagile/risks/built-in/unchecked-deployment" - unencryptedasset "github.com/threagile/threagile/risks/built-in/unencrypted-asset" - unencryptedcommunication "github.com/threagile/threagile/risks/built-in/unencrypted-communication" - unguardedaccessfrominternet "github.com/threagile/threagile/risks/built-in/unguarded-access-from-internet" - unguardeddirectdatastoreaccess "github.com/threagile/threagile/risks/built-in/unguarded-direct-datastore-access" - unnecessarycommunicationlink "github.com/threagile/threagile/risks/built-in/unnecessary-communication-link" - unnecessarydataasset "github.com/threagile/threagile/risks/built-in/unnecessary-data-asset" - unnecessarydatatransfer "github.com/threagile/threagile/risks/built-in/unnecessary-data-transfer" - unnecessarytechnicalasset "github.com/threagile/threagile/risks/built-in/unnecessary-technical-asset" - untrusteddeserialization "github.com/threagile/threagile/risks/built-in/untrusted-deserialization" - wrongcommunicationlinkcontent "github.com/threagile/threagile/risks/built-in/wrong-communication-link-content" - wrongtrustboundarycontent "github.com/threagile/threagile/risks/built-in/wrong-trust-boundary-content" - xmlexternalentity "github.com/threagile/threagile/risks/built-in/xml-external-entity" + accidental_secret_leak "github.com/threagile/threagile/risks/built-in/accidental-secret-leak" + code_backdooring "github.com/threagile/threagile/risks/built-in/code-backdooring" + container_baseimage_backdooring "github.com/threagile/threagile/risks/built-in/container-baseimage-backdooring" + container_platform_escape "github.com/threagile/threagile/risks/built-in/container-platform-escape" + cross_site_request_forgery "github.com/threagile/threagile/risks/built-in/cross-site-request-forgery" + cross_site_scripting "github.com/threagile/threagile/risks/built-in/cross-site-scripting" + dos_risky_access_across_trust_boundary "github.com/threagile/threagile/risks/built-in/dos-risky-access-across-trust-boundary" + incomplete_model "github.com/threagile/threagile/risks/built-in/incomplete-model" + ldap_injection "github.com/threagile/threagile/risks/built-in/ldap-injection" + missing_authentication "github.com/threagile/threagile/risks/built-in/missing-authentication" + missing_authentication_second_factor "github.com/threagile/threagile/risks/built-in/missing-authentication-second-factor" + missing_build_infrastructure "github.com/threagile/threagile/risks/built-in/missing-build-infrastructure" + missing_cloud_hardening "github.com/threagile/threagile/risks/built-in/missing-cloud-hardening" + missing_file_validation "github.com/threagile/threagile/risks/built-in/missing-file-validation" + missing_hardening "github.com/threagile/threagile/risks/built-in/missing-hardening" + missing_identity_propagation "github.com/threagile/threagile/risks/built-in/missing-identity-propagation" + missing_identity_provider_isolation "github.com/threagile/threagile/risks/built-in/missing-identity-provider-isolation" + missing_identity_store "github.com/threagile/threagile/risks/built-in/missing-identity-store" + missing_network_segmentation "github.com/threagile/threagile/risks/built-in/missing-network-segmentation" + missing_vault "github.com/threagile/threagile/risks/built-in/missing-vault" + missing_vault_isolation "github.com/threagile/threagile/risks/built-in/missing-vault-isolation" + missing_waf "github.com/threagile/threagile/risks/built-in/missing-waf" + mixed_targets_on_shared_runtime "github.com/threagile/threagile/risks/built-in/mixed-targets-on-shared-runtime" + path_traversal "github.com/threagile/threagile/risks/built-in/path-traversal" + push_instead_of_pull_deployment "github.com/threagile/threagile/risks/built-in/push-instead-of-pull-deployment" + search_query_injection "github.com/threagile/threagile/risks/built-in/search-query-injection" + server_side_request_forgery "github.com/threagile/threagile/risks/built-in/server-side-request-forgery" + service_registry_poisoning "github.com/threagile/threagile/risks/built-in/service-registry-poisoning" + sql_nosql_injection "github.com/threagile/threagile/risks/built-in/sql-nosql-injection" + unchecked_deployment "github.com/threagile/threagile/risks/built-in/unchecked-deployment" + unencrypted_asset "github.com/threagile/threagile/risks/built-in/unencrypted-asset" + unencrypted_communication "github.com/threagile/threagile/risks/built-in/unencrypted-communication" + unguarded_access_from_internet "github.com/threagile/threagile/risks/built-in/unguarded-access-from-internet" + unguarded_direct_datastore_access "github.com/threagile/threagile/risks/built-in/unguarded-direct-datastore-access" + unnecessary_communication_link "github.com/threagile/threagile/risks/built-in/unnecessary-communication-link" + unnecessary_data_asset "github.com/threagile/threagile/risks/built-in/unnecessary-data-asset" + unnecessary_data_transfer "github.com/threagile/threagile/risks/built-in/unnecessary-data-transfer" + unnecessary_technical_asset "github.com/threagile/threagile/risks/built-in/unnecessary-technical-asset" + untrusted_deserialization "github.com/threagile/threagile/risks/built-in/untrusted-deserialization" + wrong_communication_link_content "github.com/threagile/threagile/risks/built-in/wrong-communication-link-content" + wrong_trust_boundary_content "github.com/threagile/threagile/risks/built-in/wrong-trust-boundary-content" + xml_external_entity "github.com/threagile/threagile/risks/built-in/xml-external-entity" "golang.org/x/crypto/argon2" "gopkg.in/yaml.v3" ) @@ -122,465 +123,465 @@ func applyRiskGeneration() { } } - if _, ok := skippedRules[unencryptedasset.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unencryptedasset.Category().Id) - delete(skippedRules, unencryptedasset.Category().Id) + if _, ok := skippedRules[unencrypted_asset.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unencrypted_asset.Category().Id) + delete(skippedRules, unencrypted_asset.Category().Id) } else { - model.AddToListOfSupportedTags(unencryptedasset.SupportedTags()) - risks := unencryptedasset.GenerateRisks() + model.AddToListOfSupportedTags(unencrypted_asset.SupportedTags()) + risks := unencrypted_asset.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unencryptedasset.Category()] = risks + model.GeneratedRisksByCategory[unencrypted_asset.Category()] = risks } } - if _, ok := skippedRules[unencryptedcommunication.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unencryptedcommunication.Category().Id) - delete(skippedRules, unencryptedcommunication.Category().Id) + if _, ok := skippedRules[unencrypted_communication.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unencrypted_communication.Category().Id) + delete(skippedRules, unencrypted_communication.Category().Id) } else { - model.AddToListOfSupportedTags(unencryptedcommunication.SupportedTags()) - risks := unencryptedcommunication.GenerateRisks() + model.AddToListOfSupportedTags(unencrypted_communication.SupportedTags()) + risks := unencrypted_communication.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unencryptedcommunication.Category()] = risks + model.GeneratedRisksByCategory[unencrypted_communication.Category()] = risks } } - if _, ok := skippedRules[unguardeddirectdatastoreaccess.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unguardeddirectdatastoreaccess.Category().Id) - delete(skippedRules, unguardeddirectdatastoreaccess.Category().Id) + if _, ok := skippedRules[unguarded_direct_datastore_access.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unguarded_direct_datastore_access.Category().Id) + delete(skippedRules, unguarded_direct_datastore_access.Category().Id) } else { - model.AddToListOfSupportedTags(unguardeddirectdatastoreaccess.SupportedTags()) - risks := unguardeddirectdatastoreaccess.GenerateRisks() + model.AddToListOfSupportedTags(unguarded_direct_datastore_access.SupportedTags()) + risks := unguarded_direct_datastore_access.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unguardeddirectdatastoreaccess.Category()] = risks + model.GeneratedRisksByCategory[unguarded_direct_datastore_access.Category()] = risks } } - if _, ok := skippedRules[unguardedaccessfrominternet.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unguardedaccessfrominternet.Category().Id) - delete(skippedRules, unguardedaccessfrominternet.Category().Id) + if _, ok := skippedRules[unguarded_access_from_internet.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unguarded_access_from_internet.Category().Id) + delete(skippedRules, unguarded_access_from_internet.Category().Id) } else { - model.AddToListOfSupportedTags(unguardedaccessfrominternet.SupportedTags()) - risks := unguardedaccessfrominternet.GenerateRisks() + model.AddToListOfSupportedTags(unguarded_access_from_internet.SupportedTags()) + risks := unguarded_access_from_internet.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unguardedaccessfrominternet.Category()] = risks + model.GeneratedRisksByCategory[unguarded_access_from_internet.Category()] = risks } } - if _, ok := skippedRules[dosriskyaccessacrosstrustboundary.Category().Id]; ok { - fmt.Println("Skipping risk rule:", dosriskyaccessacrosstrustboundary.Category().Id) - delete(skippedRules, dosriskyaccessacrosstrustboundary.Category().Id) + if _, ok := skippedRules[dos_risky_access_across_trust_boundary.Category().Id]; ok { + fmt.Println("Skipping risk rule:", dos_risky_access_across_trust_boundary.Category().Id) + delete(skippedRules, dos_risky_access_across_trust_boundary.Category().Id) } else { - model.AddToListOfSupportedTags(dosriskyaccessacrosstrustboundary.SupportedTags()) - risks := dosriskyaccessacrosstrustboundary.GenerateRisks() + model.AddToListOfSupportedTags(dos_risky_access_across_trust_boundary.SupportedTags()) + risks := dos_risky_access_across_trust_boundary.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[dosriskyaccessacrosstrustboundary.Category()] = risks + model.GeneratedRisksByCategory[dos_risky_access_across_trust_boundary.Category()] = risks } } - if _, ok := skippedRules[missingnetworksegmentation.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingnetworksegmentation.Category().Id) - delete(skippedRules, missingnetworksegmentation.Category().Id) + if _, ok := skippedRules[missing_network_segmentation.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missing_network_segmentation.Category().Id) + delete(skippedRules, missing_network_segmentation.Category().Id) } else { - model.AddToListOfSupportedTags(missingnetworksegmentation.SupportedTags()) - risks := missingnetworksegmentation.GenerateRisks() + model.AddToListOfSupportedTags(missing_network_segmentation.SupportedTags()) + risks := missing_network_segmentation.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missingnetworksegmentation.Category()] = risks + model.GeneratedRisksByCategory[missing_network_segmentation.Category()] = risks } } - if _, ok := skippedRules[mixedtargetsonsharedruntime.Category().Id]; ok { - fmt.Println("Skipping risk rule:", mixedtargetsonsharedruntime.Category().Id) - delete(skippedRules, mixedtargetsonsharedruntime.Category().Id) + if _, ok := skippedRules[mixed_targets_on_shared_runtime.Category().Id]; ok { + fmt.Println("Skipping risk rule:", mixed_targets_on_shared_runtime.Category().Id) + delete(skippedRules, mixed_targets_on_shared_runtime.Category().Id) } else { - model.AddToListOfSupportedTags(mixedtargetsonsharedruntime.SupportedTags()) - risks := mixedtargetsonsharedruntime.GenerateRisks() + model.AddToListOfSupportedTags(mixed_targets_on_shared_runtime.SupportedTags()) + risks := mixed_targets_on_shared_runtime.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[mixedtargetsonsharedruntime.Category()] = risks + model.GeneratedRisksByCategory[mixed_targets_on_shared_runtime.Category()] = risks } } - if _, ok := skippedRules[missingidentitypropagation.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingidentitypropagation.Category().Id) - delete(skippedRules, missingidentitypropagation.Category().Id) + if _, ok := skippedRules[missing_identity_propagation.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missing_identity_propagation.Category().Id) + delete(skippedRules, missing_identity_propagation.Category().Id) } else { - model.AddToListOfSupportedTags(missingidentitypropagation.SupportedTags()) - risks := missingidentitypropagation.GenerateRisks() + model.AddToListOfSupportedTags(missing_identity_propagation.SupportedTags()) + risks := missing_identity_propagation.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missingidentitypropagation.Category()] = risks + model.GeneratedRisksByCategory[missing_identity_propagation.Category()] = risks } } - if _, ok := skippedRules[missingidentitystore.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingidentitystore.Category().Id) - delete(skippedRules, missingidentitystore.Category().Id) + if _, ok := skippedRules[missing_identity_store.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missing_identity_store.Category().Id) + delete(skippedRules, missing_identity_store.Category().Id) } else { - model.AddToListOfSupportedTags(missingidentitystore.SupportedTags()) - risks := missingidentitystore.GenerateRisks() + model.AddToListOfSupportedTags(missing_identity_store.SupportedTags()) + risks := missing_identity_store.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missingidentitystore.Category()] = risks + model.GeneratedRisksByCategory[missing_identity_store.Category()] = risks } } - if _, ok := skippedRules[missingauthentication.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingauthentication.Category().Id) - delete(skippedRules, missingauthentication.Category().Id) + if _, ok := skippedRules[missing_authentication.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missing_authentication.Category().Id) + delete(skippedRules, missing_authentication.Category().Id) } else { - model.AddToListOfSupportedTags(missingauthentication.SupportedTags()) - risks := missingauthentication.GenerateRisks() + model.AddToListOfSupportedTags(missing_authentication.SupportedTags()) + risks := missing_authentication.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missingauthentication.Category()] = risks + model.GeneratedRisksByCategory[missing_authentication.Category()] = risks } } - if _, ok := skippedRules[missingauthenticationsecondfactor.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingauthenticationsecondfactor.Category().Id) - delete(skippedRules, missingauthenticationsecondfactor.Category().Id) + if _, ok := skippedRules[missing_authentication_second_factor.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missing_authentication_second_factor.Category().Id) + delete(skippedRules, missing_authentication_second_factor.Category().Id) } else { - model.AddToListOfSupportedTags(missingauthenticationsecondfactor.SupportedTags()) - risks := missingauthenticationsecondfactor.GenerateRisks() + model.AddToListOfSupportedTags(missing_authentication_second_factor.SupportedTags()) + risks := missing_authentication_second_factor.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missingauthenticationsecondfactor.Category()] = risks + model.GeneratedRisksByCategory[missing_authentication_second_factor.Category()] = risks } } - if _, ok := skippedRules[unnecessarydatatransfer.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unnecessarydatatransfer.Category().Id) - delete(skippedRules, unnecessarydatatransfer.Category().Id) + if _, ok := skippedRules[unnecessary_data_transfer.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unnecessary_data_transfer.Category().Id) + delete(skippedRules, unnecessary_data_transfer.Category().Id) } else { - model.AddToListOfSupportedTags(unnecessarydatatransfer.SupportedTags()) - risks := unnecessarydatatransfer.GenerateRisks() + model.AddToListOfSupportedTags(unnecessary_data_transfer.SupportedTags()) + risks := unnecessary_data_transfer.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unnecessarydatatransfer.Category()] = risks + model.GeneratedRisksByCategory[unnecessary_data_transfer.Category()] = risks } } - if _, ok := skippedRules[unnecessarycommunicationlink.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unnecessarycommunicationlink.Category().Id) - delete(skippedRules, unnecessarycommunicationlink.Category().Id) + if _, ok := skippedRules[unnecessary_communication_link.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unnecessary_communication_link.Category().Id) + delete(skippedRules, unnecessary_communication_link.Category().Id) } else { - model.AddToListOfSupportedTags(unnecessarycommunicationlink.SupportedTags()) - risks := unnecessarycommunicationlink.GenerateRisks() + model.AddToListOfSupportedTags(unnecessary_communication_link.SupportedTags()) + risks := unnecessary_communication_link.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unnecessarycommunicationlink.Category()] = risks + model.GeneratedRisksByCategory[unnecessary_communication_link.Category()] = risks } } - if _, ok := skippedRules[unnecessarytechnicalasset.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unnecessarytechnicalasset.Category().Id) - delete(skippedRules, unnecessarytechnicalasset.Category().Id) + if _, ok := skippedRules[unnecessary_technical_asset.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unnecessary_technical_asset.Category().Id) + delete(skippedRules, unnecessary_technical_asset.Category().Id) } else { - model.AddToListOfSupportedTags(unnecessarytechnicalasset.SupportedTags()) - risks := unnecessarytechnicalasset.GenerateRisks() + model.AddToListOfSupportedTags(unnecessary_technical_asset.SupportedTags()) + risks := unnecessary_technical_asset.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unnecessarytechnicalasset.Category()] = risks + model.GeneratedRisksByCategory[unnecessary_technical_asset.Category()] = risks } } - if _, ok := skippedRules[unnecessarydataasset.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unnecessarydataasset.Category().Id) - delete(skippedRules, unnecessarydataasset.Category().Id) + if _, ok := skippedRules[unnecessary_data_asset.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unnecessary_data_asset.Category().Id) + delete(skippedRules, unnecessary_data_asset.Category().Id) } else { - model.AddToListOfSupportedTags(unnecessarydataasset.SupportedTags()) - risks := unnecessarydataasset.GenerateRisks() + model.AddToListOfSupportedTags(unnecessary_data_asset.SupportedTags()) + risks := unnecessary_data_asset.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unnecessarydataasset.Category()] = risks + model.GeneratedRisksByCategory[unnecessary_data_asset.Category()] = risks } } - if _, ok := skippedRules[sqlnosqlinjection.Category().Id]; ok { - fmt.Println("Skipping risk rule:", sqlnosqlinjection.Category().Id) - delete(skippedRules, sqlnosqlinjection.Category().Id) + if _, ok := skippedRules[sql_nosql_injection.Category().Id]; ok { + fmt.Println("Skipping risk rule:", sql_nosql_injection.Category().Id) + delete(skippedRules, sql_nosql_injection.Category().Id) } else { - model.AddToListOfSupportedTags(sqlnosqlinjection.SupportedTags()) - risks := sqlnosqlinjection.GenerateRisks() + model.AddToListOfSupportedTags(sql_nosql_injection.SupportedTags()) + risks := sql_nosql_injection.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[sqlnosqlinjection.Category()] = risks + model.GeneratedRisksByCategory[sql_nosql_injection.Category()] = risks } } - if _, ok := skippedRules[ldapinjection.Category().Id]; ok { - fmt.Println("Skipping risk rule:", ldapinjection.Category().Id) - delete(skippedRules, ldapinjection.Category().Id) + if _, ok := skippedRules[ldap_injection.Category().Id]; ok { + fmt.Println("Skipping risk rule:", ldap_injection.Category().Id) + delete(skippedRules, ldap_injection.Category().Id) } else { - model.AddToListOfSupportedTags(ldapinjection.SupportedTags()) - risks := ldapinjection.GenerateRisks() + model.AddToListOfSupportedTags(ldap_injection.SupportedTags()) + risks := ldap_injection.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[ldapinjection.Category()] = risks + model.GeneratedRisksByCategory[ldap_injection.Category()] = risks } } - if _, ok := skippedRules[crosssitescripting.Category().Id]; ok { - fmt.Println("Skipping risk rule:", crosssitescripting.Category().Id) - delete(skippedRules, crosssitescripting.Category().Id) + if _, ok := skippedRules[cross_site_scripting.Category().Id]; ok { + fmt.Println("Skipping risk rule:", cross_site_scripting.Category().Id) + delete(skippedRules, cross_site_scripting.Category().Id) } else { - model.AddToListOfSupportedTags(crosssitescripting.SupportedTags()) - risks := crosssitescripting.GenerateRisks() + model.AddToListOfSupportedTags(cross_site_scripting.SupportedTags()) + risks := cross_site_scripting.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[crosssitescripting.Category()] = risks + model.GeneratedRisksByCategory[cross_site_scripting.Category()] = risks } } - if _, ok := skippedRules[crosssiterequestforgery.Category().Id]; ok { - fmt.Println("Skipping risk rule:", crosssiterequestforgery.Category().Id) - delete(skippedRules, crosssiterequestforgery.Category().Id) + if _, ok := skippedRules[cross_site_request_forgery.Category().Id]; ok { + fmt.Println("Skipping risk rule:", cross_site_request_forgery.Category().Id) + delete(skippedRules, cross_site_request_forgery.Category().Id) } else { - model.AddToListOfSupportedTags(crosssiterequestforgery.SupportedTags()) - risks := crosssiterequestforgery.GenerateRisks() + model.AddToListOfSupportedTags(cross_site_request_forgery.SupportedTags()) + risks := cross_site_request_forgery.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[crosssiterequestforgery.Category()] = risks + model.GeneratedRisksByCategory[cross_site_request_forgery.Category()] = risks } } - if _, ok := skippedRules[serversiderequestforgery.Category().Id]; ok { - fmt.Println("Skipping risk rule:", serversiderequestforgery.Category().Id) - delete(skippedRules, serversiderequestforgery.Category().Id) + if _, ok := skippedRules[server_side_request_forgery.Category().Id]; ok { + fmt.Println("Skipping risk rule:", server_side_request_forgery.Category().Id) + delete(skippedRules, server_side_request_forgery.Category().Id) } else { - model.AddToListOfSupportedTags(serversiderequestforgery.SupportedTags()) - risks := serversiderequestforgery.GenerateRisks() + model.AddToListOfSupportedTags(server_side_request_forgery.SupportedTags()) + risks := server_side_request_forgery.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[serversiderequestforgery.Category()] = risks + model.GeneratedRisksByCategory[server_side_request_forgery.Category()] = risks } } - if _, ok := skippedRules[pathtraversal.Category().Id]; ok { - fmt.Println("Skipping risk rule:", pathtraversal.Category().Id) - delete(skippedRules, pathtraversal.Category().Id) + if _, ok := skippedRules[path_traversal.Category().Id]; ok { + fmt.Println("Skipping risk rule:", path_traversal.Category().Id) + delete(skippedRules, path_traversal.Category().Id) } else { - model.AddToListOfSupportedTags(pathtraversal.SupportedTags()) - risks := pathtraversal.GenerateRisks() + model.AddToListOfSupportedTags(path_traversal.SupportedTags()) + risks := path_traversal.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[pathtraversal.Category()] = risks + model.GeneratedRisksByCategory[path_traversal.Category()] = risks } } - if _, ok := skippedRules[pushinsteadofpulldeployment.Category().Id]; ok { - fmt.Println("Skipping risk rule:", pushinsteadofpulldeployment.Category().Id) - delete(skippedRules, pushinsteadofpulldeployment.Category().Id) + if _, ok := skippedRules[push_instead_of_pull_deployment.Category().Id]; ok { + fmt.Println("Skipping risk rule:", push_instead_of_pull_deployment.Category().Id) + delete(skippedRules, push_instead_of_pull_deployment.Category().Id) } else { - model.AddToListOfSupportedTags(pushinsteadofpulldeployment.SupportedTags()) - risks := pushinsteadofpulldeployment.GenerateRisks() + model.AddToListOfSupportedTags(push_instead_of_pull_deployment.SupportedTags()) + risks := push_instead_of_pull_deployment.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[pushinsteadofpulldeployment.Category()] = risks + model.GeneratedRisksByCategory[push_instead_of_pull_deployment.Category()] = risks } } - if _, ok := skippedRules[searchqueryinjection.Category().Id]; ok { - fmt.Println("Skipping risk rule:", searchqueryinjection.Category().Id) - delete(skippedRules, searchqueryinjection.Category().Id) + if _, ok := skippedRules[search_query_injection.Category().Id]; ok { + fmt.Println("Skipping risk rule:", search_query_injection.Category().Id) + delete(skippedRules, search_query_injection.Category().Id) } else { - model.AddToListOfSupportedTags(searchqueryinjection.SupportedTags()) - risks := searchqueryinjection.GenerateRisks() + model.AddToListOfSupportedTags(search_query_injection.SupportedTags()) + risks := search_query_injection.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[searchqueryinjection.Category()] = risks + model.GeneratedRisksByCategory[search_query_injection.Category()] = risks } } - if _, ok := skippedRules[serviceregistrypoisoning.Category().Id]; ok { - fmt.Println("Skipping risk rule:", serviceregistrypoisoning.Category().Id) - delete(skippedRules, serviceregistrypoisoning.Category().Id) + if _, ok := skippedRules[service_registry_poisoning.Category().Id]; ok { + fmt.Println("Skipping risk rule:", service_registry_poisoning.Category().Id) + delete(skippedRules, service_registry_poisoning.Category().Id) } else { - model.AddToListOfSupportedTags(serviceregistrypoisoning.SupportedTags()) - risks := serviceregistrypoisoning.GenerateRisks() + model.AddToListOfSupportedTags(service_registry_poisoning.SupportedTags()) + risks := service_registry_poisoning.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[serviceregistrypoisoning.Category()] = risks + model.GeneratedRisksByCategory[service_registry_poisoning.Category()] = risks } } - if _, ok := skippedRules[untrusteddeserialization.Category().Id]; ok { - fmt.Println("Skipping risk rule:", untrusteddeserialization.Category().Id) - delete(skippedRules, untrusteddeserialization.Category().Id) + if _, ok := skippedRules[untrusted_deserialization.Category().Id]; ok { + fmt.Println("Skipping risk rule:", untrusted_deserialization.Category().Id) + delete(skippedRules, untrusted_deserialization.Category().Id) } else { - model.AddToListOfSupportedTags(untrusteddeserialization.SupportedTags()) - risks := untrusteddeserialization.GenerateRisks() + model.AddToListOfSupportedTags(untrusted_deserialization.SupportedTags()) + risks := untrusted_deserialization.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[untrusteddeserialization.Category()] = risks + model.GeneratedRisksByCategory[untrusted_deserialization.Category()] = risks } } - if _, ok := skippedRules[xmlexternalentity.Category().Id]; ok { - fmt.Println("Skipping risk rule:", xmlexternalentity.Category().Id) - delete(skippedRules, xmlexternalentity.Category().Id) + if _, ok := skippedRules[xml_external_entity.Category().Id]; ok { + fmt.Println("Skipping risk rule:", xml_external_entity.Category().Id) + delete(skippedRules, xml_external_entity.Category().Id) } else { - model.AddToListOfSupportedTags(xmlexternalentity.SupportedTags()) - risks := xmlexternalentity.GenerateRisks() + model.AddToListOfSupportedTags(xml_external_entity.SupportedTags()) + risks := xml_external_entity.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[xmlexternalentity.Category()] = risks + model.GeneratedRisksByCategory[xml_external_entity.Category()] = risks } } - if _, ok := skippedRules[missingcloudhardening.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingcloudhardening.Category().Id) - delete(skippedRules, missingcloudhardening.Category().Id) + if _, ok := skippedRules[missing_cloud_hardening.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missing_cloud_hardening.Category().Id) + delete(skippedRules, missing_cloud_hardening.Category().Id) } else { - model.AddToListOfSupportedTags(missingcloudhardening.SupportedTags()) - risks := missingcloudhardening.GenerateRisks() + model.AddToListOfSupportedTags(missing_cloud_hardening.SupportedTags()) + risks := missing_cloud_hardening.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missingcloudhardening.Category()] = risks + model.GeneratedRisksByCategory[missing_cloud_hardening.Category()] = risks } } - if _, ok := skippedRules[missingfilevalidation.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingfilevalidation.Category().Id) - delete(skippedRules, missingfilevalidation.Category().Id) + if _, ok := skippedRules[missing_file_validation.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missing_file_validation.Category().Id) + delete(skippedRules, missing_file_validation.Category().Id) } else { - model.AddToListOfSupportedTags(missingfilevalidation.SupportedTags()) - risks := missingfilevalidation.GenerateRisks() + model.AddToListOfSupportedTags(missing_file_validation.SupportedTags()) + risks := missing_file_validation.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missingfilevalidation.Category()] = risks + model.GeneratedRisksByCategory[missing_file_validation.Category()] = risks } } - if _, ok := skippedRules[missinghardening.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missinghardening.Category().Id) - delete(skippedRules, missinghardening.Category().Id) + if _, ok := skippedRules[missing_hardening.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missing_hardening.Category().Id) + delete(skippedRules, missing_hardening.Category().Id) } else { - model.AddToListOfSupportedTags(missinghardening.SupportedTags()) - risks := missinghardening.GenerateRisks() + model.AddToListOfSupportedTags(missing_hardening.SupportedTags()) + risks := missing_hardening.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missinghardening.Category()] = risks + model.GeneratedRisksByCategory[missing_hardening.Category()] = risks } } - if _, ok := skippedRules[accidentalsecretleak.Category().Id]; ok { - fmt.Println("Skipping risk rule:", accidentalsecretleak.Category().Id) - delete(skippedRules, accidentalsecretleak.Category().Id) + if _, ok := skippedRules[accidental_secret_leak.Category().Id]; ok { + fmt.Println("Skipping risk rule:", accidental_secret_leak.Category().Id) + delete(skippedRules, accidental_secret_leak.Category().Id) } else { - model.AddToListOfSupportedTags(accidentalsecretleak.SupportedTags()) - risks := accidentalsecretleak.GenerateRisks() + model.AddToListOfSupportedTags(accidental_secret_leak.SupportedTags()) + risks := accidental_secret_leak.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[accidentalsecretleak.Category()] = risks + model.GeneratedRisksByCategory[accidental_secret_leak.Category()] = risks } } - if _, ok := skippedRules[codebackdooring.Category().Id]; ok { - fmt.Println("Skipping risk rule:", codebackdooring.Category().Id) - delete(skippedRules, codebackdooring.Category().Id) + if _, ok := skippedRules[code_backdooring.Category().Id]; ok { + fmt.Println("Skipping risk rule:", code_backdooring.Category().Id) + delete(skippedRules, code_backdooring.Category().Id) } else { - model.AddToListOfSupportedTags(codebackdooring.SupportedTags()) - risks := codebackdooring.GenerateRisks() + model.AddToListOfSupportedTags(code_backdooring.SupportedTags()) + risks := code_backdooring.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[codebackdooring.Category()] = risks + model.GeneratedRisksByCategory[code_backdooring.Category()] = risks } } - if _, ok := skippedRules[containerbaseimagebackdooring.Category().Id]; ok { - fmt.Println("Skipping risk rule:", containerbaseimagebackdooring.Category().Id) - delete(skippedRules, containerbaseimagebackdooring.Category().Id) + if _, ok := skippedRules[container_baseimage_backdooring.Category().Id]; ok { + fmt.Println("Skipping risk rule:", container_baseimage_backdooring.Category().Id) + delete(skippedRules, container_baseimage_backdooring.Category().Id) } else { - model.AddToListOfSupportedTags(containerbaseimagebackdooring.SupportedTags()) - risks := containerbaseimagebackdooring.GenerateRisks() + model.AddToListOfSupportedTags(container_baseimage_backdooring.SupportedTags()) + risks := container_baseimage_backdooring.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[containerbaseimagebackdooring.Category()] = risks + model.GeneratedRisksByCategory[container_baseimage_backdooring.Category()] = risks } } - if _, ok := skippedRules[containerplatformescape.Category().Id]; ok { - fmt.Println("Skipping risk rule:", containerplatformescape.Category().Id) - delete(skippedRules, containerplatformescape.Category().Id) + if _, ok := skippedRules[container_platform_escape.Category().Id]; ok { + fmt.Println("Skipping risk rule:", container_platform_escape.Category().Id) + delete(skippedRules, container_platform_escape.Category().Id) } else { - model.AddToListOfSupportedTags(containerplatformescape.SupportedTags()) - risks := containerplatformescape.GenerateRisks() + model.AddToListOfSupportedTags(container_platform_escape.SupportedTags()) + risks := container_platform_escape.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[containerplatformescape.Category()] = risks + model.GeneratedRisksByCategory[container_platform_escape.Category()] = risks } } - if _, ok := skippedRules[incompletemodel.Category().Id]; ok { - fmt.Println("Skipping risk rule:", incompletemodel.Category().Id) - delete(skippedRules, incompletemodel.Category().Id) + if _, ok := skippedRules[incomplete_model.Category().Id]; ok { + fmt.Println("Skipping risk rule:", incomplete_model.Category().Id) + delete(skippedRules, incomplete_model.Category().Id) } else { - model.AddToListOfSupportedTags(incompletemodel.SupportedTags()) - risks := incompletemodel.GenerateRisks() + model.AddToListOfSupportedTags(incomplete_model.SupportedTags()) + risks := incomplete_model.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[incompletemodel.Category()] = risks + model.GeneratedRisksByCategory[incomplete_model.Category()] = risks } } - if _, ok := skippedRules[uncheckeddeployment.Category().Id]; ok { - fmt.Println("Skipping risk rule:", uncheckeddeployment.Category().Id) - delete(skippedRules, uncheckeddeployment.Category().Id) + if _, ok := skippedRules[unchecked_deployment.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unchecked_deployment.Category().Id) + delete(skippedRules, unchecked_deployment.Category().Id) } else { - model.AddToListOfSupportedTags(uncheckeddeployment.SupportedTags()) - risks := uncheckeddeployment.GenerateRisks() + model.AddToListOfSupportedTags(unchecked_deployment.SupportedTags()) + risks := unchecked_deployment.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[uncheckeddeployment.Category()] = risks + model.GeneratedRisksByCategory[unchecked_deployment.Category()] = risks } } - if _, ok := skippedRules[missingbuildinfrastructure.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingbuildinfrastructure.Category().Id) - delete(skippedRules, missingbuildinfrastructure.Category().Id) + if _, ok := skippedRules[missing_build_infrastructure.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missing_build_infrastructure.Category().Id) + delete(skippedRules, missing_build_infrastructure.Category().Id) } else { - model.AddToListOfSupportedTags(missingbuildinfrastructure.SupportedTags()) - risks := missingbuildinfrastructure.GenerateRisks() + model.AddToListOfSupportedTags(missing_build_infrastructure.SupportedTags()) + risks := missing_build_infrastructure.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missingbuildinfrastructure.Category()] = risks + model.GeneratedRisksByCategory[missing_build_infrastructure.Category()] = risks } } - if _, ok := skippedRules[missingidentityproviderisolation.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingidentityproviderisolation.Category().Id) - delete(skippedRules, missingidentityproviderisolation.Category().Id) + if _, ok := skippedRules[missing_identity_provider_isolation.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missing_identity_provider_isolation.Category().Id) + delete(skippedRules, missing_identity_provider_isolation.Category().Id) } else { - model.AddToListOfSupportedTags(missingidentityproviderisolation.SupportedTags()) - risks := missingidentityproviderisolation.GenerateRisks() + model.AddToListOfSupportedTags(missing_identity_provider_isolation.SupportedTags()) + risks := missing_identity_provider_isolation.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missingidentityproviderisolation.Category()] = risks + model.GeneratedRisksByCategory[missing_identity_provider_isolation.Category()] = risks } } - if _, ok := skippedRules[missingvault.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingvault.Category().Id) - delete(skippedRules, missingvault.Category().Id) + if _, ok := skippedRules[missing_vault.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missing_vault.Category().Id) + delete(skippedRules, missing_vault.Category().Id) } else { - model.AddToListOfSupportedTags(missingvault.SupportedTags()) - risks := missingvault.GenerateRisks() + model.AddToListOfSupportedTags(missing_vault.SupportedTags()) + risks := missing_vault.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missingvault.Category()] = risks + model.GeneratedRisksByCategory[missing_vault.Category()] = risks } } - if _, ok := skippedRules[missingvaultisolation.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingvaultisolation.Category().Id) - delete(skippedRules, missingvaultisolation.Category().Id) + if _, ok := skippedRules[missing_vault_isolation.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missing_vault_isolation.Category().Id) + delete(skippedRules, missing_vault_isolation.Category().Id) } else { - model.AddToListOfSupportedTags(missingvaultisolation.SupportedTags()) - risks := missingvaultisolation.GenerateRisks() + model.AddToListOfSupportedTags(missing_vault_isolation.SupportedTags()) + risks := missing_vault_isolation.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missingvaultisolation.Category()] = risks + model.GeneratedRisksByCategory[missing_vault_isolation.Category()] = risks } } - if _, ok := skippedRules[missingwaf.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingwaf.Category().Id) - delete(skippedRules, missingwaf.Category().Id) + if _, ok := skippedRules[missing_waf.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missing_waf.Category().Id) + delete(skippedRules, missing_waf.Category().Id) } else { - model.AddToListOfSupportedTags(missingwaf.SupportedTags()) - risks := missingwaf.GenerateRisks() + model.AddToListOfSupportedTags(missing_waf.SupportedTags()) + risks := missing_waf.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missingwaf.Category()] = risks + model.GeneratedRisksByCategory[missing_waf.Category()] = risks } } - if _, ok := skippedRules[wrongcommunicationlinkcontent.Category().Id]; ok { - fmt.Println("Skipping risk rule:", wrongcommunicationlinkcontent.Category().Id) - delete(skippedRules, wrongcommunicationlinkcontent.Category().Id) + if _, ok := skippedRules[wrong_communication_link_content.Category().Id]; ok { + fmt.Println("Skipping risk rule:", wrong_communication_link_content.Category().Id) + delete(skippedRules, wrong_communication_link_content.Category().Id) } else { - model.AddToListOfSupportedTags(wrongcommunicationlinkcontent.SupportedTags()) - risks := wrongcommunicationlinkcontent.GenerateRisks() + model.AddToListOfSupportedTags(wrong_communication_link_content.SupportedTags()) + risks := wrong_communication_link_content.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[wrongcommunicationlinkcontent.Category()] = risks + model.GeneratedRisksByCategory[wrong_communication_link_content.Category()] = risks } } - if _, ok := skippedRules[wrongtrustboundarycontent.Category().Id]; ok { - fmt.Println("Skipping risk rule:", wrongtrustboundarycontent.Category().Id) - delete(skippedRules, wrongtrustboundarycontent.Category().Id) + if _, ok := skippedRules[wrong_trust_boundary_content.Category().Id]; ok { + fmt.Println("Skipping risk rule:", wrong_trust_boundary_content.Category().Id) + delete(skippedRules, wrong_trust_boundary_content.Category().Id) } else { - model.AddToListOfSupportedTags(wrongtrustboundarycontent.SupportedTags()) - risks := wrongtrustboundarycontent.GenerateRisks() + model.AddToListOfSupportedTags(wrong_trust_boundary_content.SupportedTags()) + risks := wrong_trust_boundary_content.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[wrongtrustboundarycontent.Category()] = risks + model.GeneratedRisksByCategory[wrong_trust_boundary_content.Category()] = risks } } @@ -648,8 +649,8 @@ func checkRiskTracking() { } // save also the risk-category-id and risk-status directly in the risk for better JSON marshalling - for category := range model.GeneratedRisksByCategory { - for i := range model.GeneratedRisksByCategory[category] { + for category, _ := range model.GeneratedRisksByCategory { + for i, _ := range model.GeneratedRisksByCategory[category] { model.GeneratedRisksByCategory[category][i].CategoryId = category.Id model.GeneratedRisksByCategory[category][i].RiskStatus = model.GeneratedRisksByCategory[category][i].GetRiskTrackingStatusDefaultingUnchecked() } @@ -682,26 +683,26 @@ func unzip(src string, dest string) ([]string, error) { if err != nil { return filenames, err } - defer func() { _ = r.Close() }() + defer r.Close() for _, f := range r.File { // Store filename/path for returning and using later on - path := filepath.Join(dest, f.Name) + fpath := filepath.Join(dest, f.Name) // Check for ZipSlip. More Info: http://bit.ly/2MsjAWE - if !strings.HasPrefix(path, filepath.Clean(dest)+string(os.PathSeparator)) { - return filenames, fmt.Errorf("%s: illegal file path", path) + if !strings.HasPrefix(fpath, filepath.Clean(dest)+string(os.PathSeparator)) { + return filenames, fmt.Errorf("%s: illegal file path", fpath) } - filenames = append(filenames, path) + filenames = append(filenames, fpath) if f.FileInfo().IsDir() { // Make Folder - _ = os.MkdirAll(path, os.ModePerm) + os.MkdirAll(fpath, os.ModePerm) continue } // Make File - if err = os.MkdirAll(filepath.Dir(path), os.ModePerm); err != nil { + if err = os.MkdirAll(filepath.Dir(fpath), os.ModePerm); err != nil { return filenames, err } - outFile, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode()) + outFile, err := os.OpenFile(fpath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode()) if err != nil { return filenames, err } @@ -711,8 +712,8 @@ func unzip(src string, dest string) ([]string, error) { } _, err = io.Copy(outFile, rc) // Close the file without defer to close before next iteration of loop - _ = outFile.Close() - _ = rc.Close() + outFile.Close() + rc.Close() if err != nil { return filenames, err } @@ -728,10 +729,10 @@ func zipFiles(filename string, files []string) error { if err != nil { return err } - defer func() { _ = newZipFile.Close() }() + defer newZipFile.Close() zipWriter := zip.NewWriter(newZipFile) - defer func() { _ = zipWriter.Close() }() + defer zipWriter.Close() // Add files to zip for _, file := range files { @@ -747,7 +748,7 @@ func addFileToZip(zipWriter *zip.Writer, filename string) error { if err != nil { return err } - defer func() { _ = fileToZip.Close() }() + defer fileToZip.Close() // Get the file information info, err := fileToZip.Stat() @@ -784,7 +785,7 @@ func doIt(inputFilename string, outputDirectory string) { if *verbose { log.Println(err) } - _, _ = os.Stderr.WriteString(err.Error() + "\n") + os.Stderr.WriteString(err.Error() + "\n") os.Exit(2) } }() @@ -807,18 +808,18 @@ func doIt(inputFilename string, outputDirectory string) { if len(*executeModelMacro) > 0 { var macroDetails model.MacroDetails switch *executeModelMacro { - case addbuildpipeline.GetMacroDetails().ID: - macroDetails = addbuildpipeline.GetMacroDetails() - case addvault.GetMacroDetails().ID: - macroDetails = addvault.GetMacroDetails() - case prettyprint.GetMacroDetails().ID: - macroDetails = prettyprint.GetMacroDetails() - case removeunusedtags.GetMacroDetails().ID: - macroDetails = removeunusedtags.GetMacroDetails() - case seedrisktracking.GetMacroDetails().ID: - macroDetails = seedrisktracking.GetMacroDetails() - case seedtags.GetMacroDetails().ID: - macroDetails = seedtags.GetMacroDetails() + case add_build_pipeline.GetMacroDetails().ID: + macroDetails = add_build_pipeline.GetMacroDetails() + case add_vault.GetMacroDetails().ID: + macroDetails = add_vault.GetMacroDetails() + case pretty_print.GetMacroDetails().ID: + macroDetails = pretty_print.GetMacroDetails() + case remove_unused_tags.GetMacroDetails().ID: + macroDetails = remove_unused_tags.GetMacroDetails() + case seed_risk_tracking.GetMacroDetails().ID: + macroDetails = seed_risk_tracking.GetMacroDetails() + case seed_tags.GetMacroDetails().ID: + macroDetails = seed_tags.GetMacroDetails() default: log.Fatal("Unknown model macro: ", *executeModelMacro) } @@ -837,18 +838,18 @@ func doIt(inputFilename string, outputDirectory string) { var nextQuestion model.MacroQuestion for { switch macroDetails.ID { - case addbuildpipeline.GetMacroDetails().ID: - nextQuestion, err = addbuildpipeline.GetNextQuestion() - case addvault.GetMacroDetails().ID: - nextQuestion, err = addvault.GetNextQuestion() - case prettyprint.GetMacroDetails().ID: - nextQuestion, err = prettyprint.GetNextQuestion() - case removeunusedtags.GetMacroDetails().ID: - nextQuestion, err = removeunusedtags.GetNextQuestion() - case seedrisktracking.GetMacroDetails().ID: - nextQuestion, err = seedrisktracking.GetNextQuestion() - case seedtags.GetMacroDetails().ID: - nextQuestion, err = seedtags.GetNextQuestion() + case add_build_pipeline.GetMacroDetails().ID: + nextQuestion, err = add_build_pipeline.GetNextQuestion() + case add_vault.GetMacroDetails().ID: + nextQuestion, err = add_vault.GetNextQuestion() + case pretty_print.GetMacroDetails().ID: + nextQuestion, err = pretty_print.GetNextQuestion() + case remove_unused_tags.GetMacroDetails().ID: + nextQuestion, err = remove_unused_tags.GetNextQuestion() + case seed_risk_tracking.GetMacroDetails().ID: + nextQuestion, err = seed_risk_tracking.GetNextQuestion() + case seed_tags.GetMacroDetails().ID: + nextQuestion, err = seed_tags.GetNextQuestion() } checkErr(err) if nextQuestion.NoMoreQuestions() { @@ -864,7 +865,7 @@ func doIt(inputFilename string, outputDirectory string) { resultingMultiValueSelection := make([]string, 0) if nextQuestion.IsValueConstrained() { if nextQuestion.MultiSelect { - selectedValues := make(map[string]bool) + selectedValues := make(map[string]bool, 0) for { fmt.Println("Please select (multiple executions possible) from the following values (use number to select/deselect):") fmt.Println(" 0:", "SELECTION PROCESS FINISHED: CONTINUE TO NEXT QUESTION") @@ -938,18 +939,18 @@ func doIt(inputFilename string, outputDirectory string) { return } else if strings.ToLower(answer) == "back" { switch macroDetails.ID { - case addbuildpipeline.GetMacroDetails().ID: - message, validResult, err = addbuildpipeline.GoBack() - case addvault.GetMacroDetails().ID: - message, validResult, err = addvault.GoBack() - case prettyprint.GetMacroDetails().ID: - message, validResult, err = prettyprint.GoBack() - case removeunusedtags.GetMacroDetails().ID: - message, validResult, err = removeunusedtags.GoBack() - case seedrisktracking.GetMacroDetails().ID: - message, validResult, err = seedrisktracking.GoBack() - case seedtags.GetMacroDetails().ID: - message, validResult, err = seedtags.GoBack() + case add_build_pipeline.GetMacroDetails().ID: + message, validResult, err = add_build_pipeline.GoBack() + case add_vault.GetMacroDetails().ID: + message, validResult, err = add_vault.GoBack() + case pretty_print.GetMacroDetails().ID: + message, validResult, err = pretty_print.GoBack() + case remove_unused_tags.GetMacroDetails().ID: + message, validResult, err = remove_unused_tags.GoBack() + case seed_risk_tracking.GetMacroDetails().ID: + message, validResult, err = seed_risk_tracking.GoBack() + case seed_tags.GetMacroDetails().ID: + message, validResult, err = seed_tags.GoBack() } } else if len(answer) > 0 { // individual answer if nextQuestion.IsValueConstrained() { @@ -961,34 +962,34 @@ func doIt(inputFilename string, outputDirectory string) { } } switch macroDetails.ID { - case addbuildpipeline.GetMacroDetails().ID: - message, validResult, err = addbuildpipeline.ApplyAnswer(nextQuestion.ID, answer) - case addvault.GetMacroDetails().ID: - message, validResult, err = addvault.ApplyAnswer(nextQuestion.ID, answer) - case prettyprint.GetMacroDetails().ID: - message, validResult, err = prettyprint.ApplyAnswer(nextQuestion.ID, answer) - case removeunusedtags.GetMacroDetails().ID: - message, validResult, err = removeunusedtags.ApplyAnswer(nextQuestion.ID, answer) - case seedrisktracking.GetMacroDetails().ID: - message, validResult, err = seedrisktracking.ApplyAnswer(nextQuestion.ID, answer) - case seedtags.GetMacroDetails().ID: - message, validResult, err = seedtags.ApplyAnswer(nextQuestion.ID, answer) + case add_build_pipeline.GetMacroDetails().ID: + message, validResult, err = add_build_pipeline.ApplyAnswer(nextQuestion.ID, answer) + case add_vault.GetMacroDetails().ID: + message, validResult, err = add_vault.ApplyAnswer(nextQuestion.ID, answer) + case pretty_print.GetMacroDetails().ID: + message, validResult, err = pretty_print.ApplyAnswer(nextQuestion.ID, answer) + case remove_unused_tags.GetMacroDetails().ID: + message, validResult, err = remove_unused_tags.ApplyAnswer(nextQuestion.ID, answer) + case seed_risk_tracking.GetMacroDetails().ID: + message, validResult, err = seed_risk_tracking.ApplyAnswer(nextQuestion.ID, answer) + case seed_tags.GetMacroDetails().ID: + message, validResult, err = seed_tags.ApplyAnswer(nextQuestion.ID, answer) } } } else { switch macroDetails.ID { - case addbuildpipeline.GetMacroDetails().ID: - message, validResult, err = addbuildpipeline.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case addvault.GetMacroDetails().ID: - message, validResult, err = addvault.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case prettyprint.GetMacroDetails().ID: - message, validResult, err = prettyprint.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case removeunusedtags.GetMacroDetails().ID: - message, validResult, err = removeunusedtags.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case seedrisktracking.GetMacroDetails().ID: - message, validResult, err = seedrisktracking.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case seedtags.GetMacroDetails().ID: - message, validResult, err = seedtags.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case add_build_pipeline.GetMacroDetails().ID: + message, validResult, err = add_build_pipeline.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case add_vault.GetMacroDetails().ID: + message, validResult, err = add_vault.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case pretty_print.GetMacroDetails().ID: + message, validResult, err = pretty_print.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case remove_unused_tags.GetMacroDetails().ID: + message, validResult, err = remove_unused_tags.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case seed_risk_tracking.GetMacroDetails().ID: + message, validResult, err = seed_risk_tracking.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case seed_tags.GetMacroDetails().ID: + message, validResult, err = seed_tags.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) } } checkErr(err) @@ -1012,18 +1013,18 @@ func doIt(inputFilename string, outputDirectory string) { validResult := true var err error switch macroDetails.ID { - case addbuildpipeline.GetMacroDetails().ID: - changes, message, validResult, err = addbuildpipeline.GetFinalChangeImpact(&modelInput) - case addvault.GetMacroDetails().ID: - changes, message, validResult, err = addvault.GetFinalChangeImpact(&modelInput) - case prettyprint.GetMacroDetails().ID: - changes, message, validResult, err = prettyprint.GetFinalChangeImpact(&modelInput) - case removeunusedtags.GetMacroDetails().ID: - changes, message, validResult, err = removeunusedtags.GetFinalChangeImpact(&modelInput) - case seedrisktracking.GetMacroDetails().ID: - changes, message, validResult, err = seedrisktracking.GetFinalChangeImpact(&modelInput) - case seedtags.GetMacroDetails().ID: - changes, message, validResult, err = seedtags.GetFinalChangeImpact(&modelInput) + case add_build_pipeline.GetMacroDetails().ID: + changes, message, validResult, err = add_build_pipeline.GetFinalChangeImpact(&modelInput) + case add_vault.GetMacroDetails().ID: + changes, message, validResult, err = add_vault.GetFinalChangeImpact(&modelInput) + case pretty_print.GetMacroDetails().ID: + changes, message, validResult, err = pretty_print.GetFinalChangeImpact(&modelInput) + case remove_unused_tags.GetMacroDetails().ID: + changes, message, validResult, err = remove_unused_tags.GetFinalChangeImpact(&modelInput) + case seed_risk_tracking.GetMacroDetails().ID: + changes, message, validResult, err = seed_risk_tracking.GetFinalChangeImpact(&modelInput) + case seed_tags.GetMacroDetails().ID: + changes, message, validResult, err = seed_tags.GetFinalChangeImpact(&modelInput) } checkErr(err) for _, change := range changes { @@ -1048,18 +1049,18 @@ func doIt(inputFilename string, outputDirectory string) { validResult := true var err error switch macroDetails.ID { - case addbuildpipeline.GetMacroDetails().ID: - message, validResult, err = addbuildpipeline.Execute(&modelInput) - case addvault.GetMacroDetails().ID: - message, validResult, err = addvault.Execute(&modelInput) - case prettyprint.GetMacroDetails().ID: - message, validResult, err = prettyprint.Execute(&modelInput) - case removeunusedtags.GetMacroDetails().ID: - message, validResult, err = removeunusedtags.Execute(&modelInput) - case seedrisktracking.GetMacroDetails().ID: - message, validResult, err = seedrisktracking.Execute(&modelInput) - case seedtags.GetMacroDetails().ID: - message, validResult, err = seedtags.Execute(&modelInput) + case add_build_pipeline.GetMacroDetails().ID: + message, validResult, err = add_build_pipeline.Execute(&modelInput) + case add_vault.GetMacroDetails().ID: + message, validResult, err = add_vault.Execute(&modelInput) + case pretty_print.GetMacroDetails().ID: + message, validResult, err = pretty_print.Execute(&modelInput) + case remove_unused_tags.GetMacroDetails().ID: + message, validResult, err = remove_unused_tags.Execute(&modelInput) + case seed_risk_tracking.GetMacroDetails().ID: + message, validResult, err = seed_risk_tracking.Execute(&modelInput) + case seed_tags.GetMacroDetails().ID: + message, validResult, err = seed_tags.Execute(&modelInput) } checkErr(err) if !validResult { @@ -1079,7 +1080,7 @@ func doIt(inputFilename string, outputDirectory string) { yamlBytes = model.ReformatYAML(yamlBytes) */ fmt.Println("Writing model file:", inputFilename) - err = os.WriteFile(inputFilename, yamlBytes, 0400) + err = ioutil.WriteFile(inputFilename, yamlBytes, 0400) checkErr(err) fmt.Println("Model file successfully updated") return @@ -1088,6 +1089,7 @@ func doIt(inputFilename string, outputDirectory string) { return } } + fmt.Println() return } @@ -1100,10 +1102,10 @@ func doIt(inputFilename string, outputDirectory string) { if renderDataFlowDiagram { gvFile := outputDirectory + "/" + dataFlowDiagramFilenameDOT if !keepDiagramSourceFiles { - tmpFileGV, err := os.CreateTemp(model.TempFolder, dataFlowDiagramFilenameDOT) + tmpFileGV, err := ioutil.TempFile(model.TempFolder, dataFlowDiagramFilenameDOT) checkErr(err) gvFile = tmpFileGV.Name() - defer func() { _ = os.Remove(gvFile) }() + defer os.Remove(gvFile) } dotFile := writeDataFlowDiagramGraphvizDOT(gvFile, *diagramDPI) renderDataFlowDiagramGraphvizImage(dotFile, outputDirectory) @@ -1112,10 +1114,10 @@ func doIt(inputFilename string, outputDirectory string) { if renderDataAssetDiagram { gvFile := outputDirectory + "/" + dataAssetDiagramFilenameDOT if !keepDiagramSourceFiles { - tmpFile, err := os.CreateTemp(model.TempFolder, dataAssetDiagramFilenameDOT) + tmpFile, err := ioutil.TempFile(model.TempFolder, dataAssetDiagramFilenameDOT) checkErr(err) gvFile = tmpFile.Name() - defer func() { _ = os.Remove(gvFile) }() + defer os.Remove(gvFile) } dotFile := writeDataAssetDiagramGraphvizDOT(gvFile, *diagramDPI) renderDataAssetDiagramGraphvizImage(dotFile, outputDirectory) @@ -1165,7 +1167,7 @@ func doIt(inputFilename string, outputDirectory string) { // hash the YAML input file f, err := os.Open(inputFilename) checkErr(err) - defer func() { _ = f.Close() }() + defer f.Close() hasher := sha256.New() if _, err := io.Copy(hasher, f); err != nil { panic(err) @@ -1203,7 +1205,7 @@ func applyRAA() string { if *verbose { fmt.Println("Applying RAA calculation:", *raaPlugin) } - // determine plugin to load. + // determine plugin to load // load plugin: open the ".so" file to load the symbols plug, err := plugin.Open(*raaPlugin) checkErr(err) @@ -1220,7 +1222,7 @@ func applyRAA() string { } func loadCustomRiskRules() { - customRiskRules = make(map[string]model.CustomRiskRule) + customRiskRules = make(map[string]model.CustomRiskRule, 0) if len(*riskRulesPlugins) > 0 { if *verbose { fmt.Println("Loading custom risk rules:", *riskRulesPlugins) @@ -1308,13 +1310,13 @@ func execute(context *gin.Context, dryRun bool) (yamlContent []byte, ok bool) { filenameUploaded := strings.TrimSpace(header.Filename) - tmpInputDir, err := os.MkdirTemp(model.TempFolder, "threagile-input-") + tmpInputDir, err := ioutil.TempDir(model.TempFolder, "threagile-input-") checkErr(err) - defer func() { _ = os.RemoveAll(tmpInputDir) }() + defer os.RemoveAll(tmpInputDir) - tmpModelFile, err := os.CreateTemp(tmpInputDir, "threagile-model-*") + tmpModelFile, err := ioutil.TempFile(tmpInputDir, "threagile-model-*") checkErr(err) - defer func() { _ = os.Remove(tmpModelFile.Name()) }() + defer os.Remove(tmpModelFile.Name()) _, err = io.Copy(tmpModelFile, fileUploaded) checkErr(err) @@ -1340,13 +1342,13 @@ func execute(context *gin.Context, dryRun bool) (yamlContent []byte, ok bool) { } } - tmpOutputDir, err := os.MkdirTemp(model.TempFolder, "threagile-output-") + tmpOutputDir, err := ioutil.TempDir(model.TempFolder, "threagile-output-") checkErr(err) - defer func() { _ = os.RemoveAll(tmpOutputDir) }() + defer os.RemoveAll(tmpOutputDir) - tmpResultFile, err := os.CreateTemp(model.TempFolder, "threagile-result-*.zip") + tmpResultFile, err := ioutil.TempFile(model.TempFolder, "threagile-result-*.zip") checkErr(err) - defer func() { _ = os.Remove(tmpResultFile.Name()) }() + defer os.Remove(tmpResultFile.Name()) if dryRun { doItViaRuntimeCall(yamlFile, tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, false, false, false, true, true, true, 40) @@ -1355,9 +1357,9 @@ func execute(context *gin.Context, dryRun bool) (yamlContent []byte, ok bool) { } checkErr(err) - yamlContent, err = os.ReadFile(yamlFile) + yamlContent, err = ioutil.ReadFile(yamlFile) checkErr(err) - err = os.WriteFile(tmpOutputDir+"/threagile.yaml", yamlContent, 0400) + err = ioutil.WriteFile(tmpOutputDir+"/threagile.yaml", yamlContent, 0400) checkErr(err) if !dryRun { @@ -1568,153 +1570,153 @@ func startServer() { router.DELETE("/models/:model-id/shared-runtimes/:shared-runtime-id", deleteSharedRuntime) fmt.Println("Threagile server running...") - _ = router.Run(":" + strconv.Itoa(*serverPort)) // listen and serve on 0.0.0.0:8080 or whatever port was specified + router.Run(":" + strconv.Itoa(*serverPort)) // listen and serve on 0.0.0.0:8080 or whatever port was specified } func exampleFile(context *gin.Context) { - example, err := os.ReadFile("/app/threagile-example-model.yaml") + example, err := ioutil.ReadFile("/app/threagile-example-model.yaml") checkErr(err) context.Data(http.StatusOK, gin.MIMEYAML, example) } func stubFile(context *gin.Context) { - stub, err := os.ReadFile("/app/threagile-stub-model.yaml") + stub, err := ioutil.ReadFile("/app/threagile-stub-model.yaml") checkErr(err) context.Data(http.StatusOK, gin.MIMEYAML, addSupportedTags(stub)) // TODO use also the MIMEYAML way of serving YAML in model export? } func addSupportedTags(input []byte) []byte { // add distinct tags as "tags_available" - supportedTags := make(map[string]bool) + supportedTags := make(map[string]bool, 0) for _, customRule := range customRiskRules { for _, tag := range customRule.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } } - for _, tag := range accidentalsecretleak.SupportedTags() { + for _, tag := range accidental_secret_leak.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range codebackdooring.SupportedTags() { + for _, tag := range code_backdooring.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range containerbaseimagebackdooring.SupportedTags() { + for _, tag := range container_baseimage_backdooring.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range containerplatformescape.SupportedTags() { + for _, tag := range container_platform_escape.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range crosssiterequestforgery.SupportedTags() { + for _, tag := range cross_site_request_forgery.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range crosssitescripting.SupportedTags() { + for _, tag := range cross_site_scripting.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range dosriskyaccessacrosstrustboundary.SupportedTags() { + for _, tag := range dos_risky_access_across_trust_boundary.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range incompletemodel.SupportedTags() { + for _, tag := range incomplete_model.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range ldapinjection.SupportedTags() { + for _, tag := range ldap_injection.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missingauthentication.SupportedTags() { + for _, tag := range missing_authentication.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missingauthenticationsecondfactor.SupportedTags() { + for _, tag := range missing_authentication_second_factor.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missingbuildinfrastructure.SupportedTags() { + for _, tag := range missing_build_infrastructure.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missingcloudhardening.SupportedTags() { + for _, tag := range missing_cloud_hardening.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missingfilevalidation.SupportedTags() { + for _, tag := range missing_file_validation.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missinghardening.SupportedTags() { + for _, tag := range missing_hardening.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missingidentitypropagation.SupportedTags() { + for _, tag := range missing_identity_propagation.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missingidentityproviderisolation.SupportedTags() { + for _, tag := range missing_identity_provider_isolation.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missingidentitystore.SupportedTags() { + for _, tag := range missing_identity_store.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missingnetworksegmentation.SupportedTags() { + for _, tag := range missing_network_segmentation.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missingvault.SupportedTags() { + for _, tag := range missing_vault.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missingvaultisolation.SupportedTags() { + for _, tag := range missing_vault_isolation.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missingwaf.SupportedTags() { + for _, tag := range missing_waf.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range mixedtargetsonsharedruntime.SupportedTags() { + for _, tag := range mixed_targets_on_shared_runtime.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range pathtraversal.SupportedTags() { + for _, tag := range path_traversal.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range pushinsteadofpulldeployment.SupportedTags() { + for _, tag := range push_instead_of_pull_deployment.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range searchqueryinjection.SupportedTags() { + for _, tag := range search_query_injection.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range serversiderequestforgery.SupportedTags() { + for _, tag := range server_side_request_forgery.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range serviceregistrypoisoning.SupportedTags() { + for _, tag := range service_registry_poisoning.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range sqlnosqlinjection.SupportedTags() { + for _, tag := range sql_nosql_injection.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range uncheckeddeployment.SupportedTags() { + for _, tag := range unchecked_deployment.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unencryptedasset.SupportedTags() { + for _, tag := range unencrypted_asset.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unencryptedcommunication.SupportedTags() { + for _, tag := range unencrypted_communication.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unguardedaccessfrominternet.SupportedTags() { + for _, tag := range unguarded_access_from_internet.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unguardeddirectdatastoreaccess.SupportedTags() { + for _, tag := range unguarded_direct_datastore_access.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unnecessarycommunicationlink.SupportedTags() { + for _, tag := range unnecessary_communication_link.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unnecessarydataasset.SupportedTags() { + for _, tag := range unnecessary_data_asset.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unnecessarydatatransfer.SupportedTags() { + for _, tag := range unnecessary_data_transfer.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unnecessarytechnicalasset.SupportedTags() { + for _, tag := range unnecessary_technical_asset.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range untrusteddeserialization.SupportedTags() { + for _, tag := range untrusted_deserialization.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range wrongcommunicationlinkcontent.SupportedTags() { + for _, tag := range wrong_communication_link_content.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range wrongtrustboundarycontent.SupportedTags() { + for _, tag := range wrong_trust_boundary_content.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range xmlexternalentity.SupportedTags() { + for _, tag := range xml_external_entity.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } tags := make([]string, 0, len(supportedTags)) @@ -1745,8 +1747,8 @@ func addSupportedTags(input []byte) []byte { const keySize = 32 type timeoutStruct struct { - xorRand []byte - createdNanoTime, lastAccessedNanoTime int64 + xorRand []byte + createdNanotime, lastAcessedNanotime int64 } var mapTokenHashToTimeoutStruct = make(map[string]timeoutStruct) @@ -1778,9 +1780,9 @@ func createToken(context *gin.Context) { tokenHash := hashSHA256(token) housekeepingTokenMaps() mapTokenHashToTimeoutStruct[tokenHash] = timeoutStruct{ - xorRand: xorBytesArr, - createdNanoTime: now, - lastAccessedNanoTime: now, + xorRand: xorBytesArr, + createdNanotime: now, + lastAcessedNanotime: now, } mapFolderNameToTokenHash[folderName] = tokenHash context.JSON(http.StatusCreated, gin.H{ @@ -1822,14 +1824,14 @@ func housekeepingTokenMaps() { if extremeShortTimeoutsForTesting { // remove all elements older than 1 minute (= 60000000000 ns) soft // and all elements older than 3 minutes (= 180000000000 ns) hard - if now-val.lastAccessedNanoTime > 60000000000 || now-val.createdNanoTime > 180000000000 { + if now-val.lastAcessedNanotime > 60000000000 || now-val.createdNanotime > 180000000000 { fmt.Println("About to remove a token hash from maps") deleteTokenHashFromMaps(tokenHash) } } else { // remove all elements older than 30 minutes (= 1800000000000 ns) soft // and all elements older than 10 hours (= 36000000000000 ns) hard - if now-val.lastAccessedNanoTime > 1800000000000 || now-val.createdNanoTime > 36000000000000 { + if now-val.lastAcessedNanotime > 1800000000000 || now-val.createdNanotime > 36000000000000 { deleteTokenHashFromMaps(tokenHash) } } @@ -1889,30 +1891,30 @@ func analyzeModelOnServerDirectly(context *gin.Context) { if !ok { return } - tmpModelFile, err := os.CreateTemp(model.TempFolder, "threagile-direct-analyze-*") + tmpModelFile, err := ioutil.TempFile(model.TempFolder, "threagile-direct-analyze-*") if err != nil { handleErrorInServiceCall(err, context) return } - defer func() { _ = os.Remove(tmpModelFile.Name()) }() - tmpOutputDir, err := os.MkdirTemp(model.TempFolder, "threagile-direct-analyze-") + defer os.Remove(tmpModelFile.Name()) + tmpOutputDir, err := ioutil.TempDir(model.TempFolder, "threagile-direct-analyze-") if err != nil { handleErrorInServiceCall(err, context) return } - defer func() { _ = os.RemoveAll(tmpOutputDir) }() - tmpResultFile, err := os.CreateTemp(model.TempFolder, "threagile-result-*.zip") + defer os.RemoveAll(tmpOutputDir) + tmpResultFile, err := ioutil.TempFile(model.TempFolder, "threagile-result-*.zip") checkErr(err) - defer func() { _ = os.Remove(tmpResultFile.Name()) }() + defer os.Remove(tmpResultFile.Name()) - err = os.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) + err = ioutil.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, true, true, true, true, true, true, true, true, dpi) if err != nil { handleErrorInServiceCall(err, context) return } - err = os.WriteFile(tmpOutputDir+"/threagile.yaml", []byte(yamlText), 0400) + err = ioutil.WriteFile(tmpOutputDir+"/threagile.yaml", []byte(yamlText), 0400) if err != nil { handleErrorInServiceCall(err, context) return @@ -2008,19 +2010,19 @@ func streamResponse(context *gin.Context, responseType responseType) { if !ok { return } - tmpModelFile, err := os.CreateTemp(model.TempFolder, "threagile-render-*") + tmpModelFile, err := ioutil.TempFile(model.TempFolder, "threagile-render-*") if err != nil { handleErrorInServiceCall(err, context) return } - defer func() { _ = os.Remove(tmpModelFile.Name()) }() - tmpOutputDir, err := os.MkdirTemp(model.TempFolder, "threagile-render-") + defer os.Remove(tmpModelFile.Name()) + tmpOutputDir, err := ioutil.TempDir(model.TempFolder, "threagile-render-") if err != nil { handleErrorInServiceCall(err, context) return } - defer func() { _ = os.RemoveAll(tmpOutputDir) }() - err = os.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) + defer os.RemoveAll(tmpOutputDir) + err = ioutil.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) if responseType == dataFlowDiagram { doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, true, false, false, false, false, false, false, false, dpi) if err != nil { @@ -2062,7 +2064,7 @@ func streamResponse(context *gin.Context, responseType responseType) { handleErrorInServiceCall(err, context) return } - json, err := os.ReadFile(tmpOutputDir + "/" + jsonRisksFilename) + json, err := ioutil.ReadFile(tmpOutputDir + "/" + jsonRisksFilename) if err != nil { handleErrorInServiceCall(err, context) return @@ -2074,7 +2076,7 @@ func streamResponse(context *gin.Context, responseType responseType) { handleErrorInServiceCall(err, context) return } - json, err := os.ReadFile(tmpOutputDir + "/" + jsonTechnicalAssetsFilename) + json, err := ioutil.ReadFile(tmpOutputDir + "/" + jsonTechnicalAssetsFilename) if err != nil { handleErrorInServiceCall(err, context) return @@ -2086,7 +2088,7 @@ func streamResponse(context *gin.Context, responseType responseType) { handleErrorInServiceCall(err, context) return } - json, err := os.ReadFile(tmpOutputDir + "/" + jsonStatsFilename) + json, err := ioutil.ReadFile(tmpOutputDir + "/" + jsonStatsFilename) if err != nil { handleErrorInServiceCall(err, context) return @@ -2104,14 +2106,14 @@ func importModel(context *gin.Context) { lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - aUuid := context.Param("model-id") // UUID is syntactically validated in readModel+checkModelFolder (next line) via uuid.Parse(modelUUID) - _, _, ok = readModel(context, aUuid, key, folderNameOfKey) + uuid := context.Param("model-id") // UUID is syntactically validated in readModel+checkModelFolder (next line) via uuid.Parse(modelUUID) + _, _, ok = readModel(context, uuid, key, folderNameOfKey) if ok { // first analyze it simply by executing the full risk process (just discard the result) to ensure that everything would work yamlContent, ok := execute(context, true) if ok { // if we're here, then no problem was raised, so ok to proceed - ok = writeModelYAML(context, string(yamlContent), key, folderNameForModel(folderNameOfKey, aUuid), "Model Import", false) + ok = writeModelYAML(context, string(yamlContent), key, folderNameForModel(folderNameOfKey, uuid), "Model Import", false) if ok { context.JSON(http.StatusCreated, gin.H{ "message": "model imported", @@ -2123,7 +2125,7 @@ func importModel(context *gin.Context) { func stats(context *gin.Context) { keyCount, modelCount := 0, 0 - keyFolders, err := os.ReadDir(baseFolder) + keyFolders, err := ioutil.ReadDir(baseFolder) if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -2134,7 +2136,7 @@ func stats(context *gin.Context) { for _, keyFolder := range keyFolders { if len(keyFolder.Name()) == 128 { // it's a sha512 token hash probably, so count it as token folder for the stats keyCount++ - modelFolders, err := os.ReadDir(baseFolder + "/" + keyFolder.Name()) + modelFolders, err := ioutil.ReadDir(baseFolder + "/" + keyFolder.Name()) if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -2168,7 +2170,7 @@ func getDataAsset(context *gin.Context) { modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, dataAsset := range modelInput.DataAssets { + for title, dataAsset := range modelInput.Data_assets { if dataAsset.ID == context.Param("data-asset-id") { context.JSON(http.StatusOK, gin.H{ title: dataAsset, @@ -2193,81 +2195,81 @@ func deleteDataAsset(context *gin.Context) { if ok { referencesDeleted := false // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, dataAsset := range modelInput.DataAssets { + for title, dataAsset := range modelInput.Data_assets { if dataAsset.ID == context.Param("data-asset-id") { // also remove all usages of this data asset !! - for _, techAsset := range modelInput.TechnicalAssets { - if techAsset.DataAssetsProcessed != nil { - for i, parsedChangeCandidateAsset := range techAsset.DataAssetsProcessed { + for _, techAsset := range modelInput.Technical_assets { + if techAsset.Data_assets_processed != nil { + for i, parsedChangeCandidateAsset := range techAsset.Data_assets_processed { referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) if referencedAsset == dataAsset.ID { // apply the removal referencesDeleted = true // Remove the element at index i // TODO needs more testing - copy(techAsset.DataAssetsProcessed[i:], techAsset.DataAssetsProcessed[i+1:]) // Shift a[i+1:] left one index. - techAsset.DataAssetsProcessed[len(techAsset.DataAssetsProcessed)-1] = "" // Erase last element (write zero value). - techAsset.DataAssetsProcessed = techAsset.DataAssetsProcessed[:len(techAsset.DataAssetsProcessed)-1] // Truncate slice. + copy(techAsset.Data_assets_processed[i:], techAsset.Data_assets_processed[i+1:]) // Shift a[i+1:] left one index. + techAsset.Data_assets_processed[len(techAsset.Data_assets_processed)-1] = "" // Erase last element (write zero value). + techAsset.Data_assets_processed = techAsset.Data_assets_processed[:len(techAsset.Data_assets_processed)-1] // Truncate slice. } } } - if techAsset.DataAssetsStored != nil { - for i, parsedChangeCandidateAsset := range techAsset.DataAssetsStored { + if techAsset.Data_assets_stored != nil { + for i, parsedChangeCandidateAsset := range techAsset.Data_assets_stored { referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) if referencedAsset == dataAsset.ID { // apply the removal referencesDeleted = true // Remove the element at index i // TODO needs more testing - copy(techAsset.DataAssetsStored[i:], techAsset.DataAssetsStored[i+1:]) // Shift a[i+1:] left one index. - techAsset.DataAssetsStored[len(techAsset.DataAssetsStored)-1] = "" // Erase last element (write zero value). - techAsset.DataAssetsStored = techAsset.DataAssetsStored[:len(techAsset.DataAssetsStored)-1] // Truncate slice. + copy(techAsset.Data_assets_stored[i:], techAsset.Data_assets_stored[i+1:]) // Shift a[i+1:] left one index. + techAsset.Data_assets_stored[len(techAsset.Data_assets_stored)-1] = "" // Erase last element (write zero value). + techAsset.Data_assets_stored = techAsset.Data_assets_stored[:len(techAsset.Data_assets_stored)-1] // Truncate slice. } } } - if techAsset.CommunicationLinks != nil { - for title, commLink := range techAsset.CommunicationLinks { - for i, dataAssetSent := range commLink.DataAssetsSent { + if techAsset.Communication_links != nil { + for title, commLink := range techAsset.Communication_links { + for i, dataAssetSent := range commLink.Data_assets_sent { referencedAsset := fmt.Sprintf("%v", dataAssetSent) if referencedAsset == dataAsset.ID { // apply the removal referencesDeleted = true // Remove the element at index i // TODO needs more testing - copy(techAsset.CommunicationLinks[title].DataAssetsSent[i:], techAsset.CommunicationLinks[title].DataAssetsSent[i+1:]) // Shift a[i+1:] left one index. - techAsset.CommunicationLinks[title].DataAssetsSent[len(techAsset.CommunicationLinks[title].DataAssetsSent)-1] = "" // Erase last element (write zero value). - x := techAsset.CommunicationLinks[title] - x.DataAssetsSent = techAsset.CommunicationLinks[title].DataAssetsSent[:len(techAsset.CommunicationLinks[title].DataAssetsSent)-1] // Truncate slice. - techAsset.CommunicationLinks[title] = x + copy(techAsset.Communication_links[title].Data_assets_sent[i:], techAsset.Communication_links[title].Data_assets_sent[i+1:]) // Shift a[i+1:] left one index. + techAsset.Communication_links[title].Data_assets_sent[len(techAsset.Communication_links[title].Data_assets_sent)-1] = "" // Erase last element (write zero value). + x := techAsset.Communication_links[title] + x.Data_assets_sent = techAsset.Communication_links[title].Data_assets_sent[:len(techAsset.Communication_links[title].Data_assets_sent)-1] // Truncate slice. + techAsset.Communication_links[title] = x } } - for i, dataAssetReceived := range commLink.DataAssetsReceived { + for i, dataAssetReceived := range commLink.Data_assets_received { referencedAsset := fmt.Sprintf("%v", dataAssetReceived) if referencedAsset == dataAsset.ID { // apply the removal referencesDeleted = true // Remove the element at index i // TODO needs more testing - copy(techAsset.CommunicationLinks[title].DataAssetsReceived[i:], techAsset.CommunicationLinks[title].DataAssetsReceived[i+1:]) // Shift a[i+1:] left one index. - techAsset.CommunicationLinks[title].DataAssetsReceived[len(techAsset.CommunicationLinks[title].DataAssetsReceived)-1] = "" // Erase last element (write zero value). - x := techAsset.CommunicationLinks[title] - x.DataAssetsReceived = techAsset.CommunicationLinks[title].DataAssetsReceived[:len(techAsset.CommunicationLinks[title].DataAssetsReceived)-1] // Truncate slice. - techAsset.CommunicationLinks[title] = x + copy(techAsset.Communication_links[title].Data_assets_received[i:], techAsset.Communication_links[title].Data_assets_received[i+1:]) // Shift a[i+1:] left one index. + techAsset.Communication_links[title].Data_assets_received[len(techAsset.Communication_links[title].Data_assets_received)-1] = "" // Erase last element (write zero value). + x := techAsset.Communication_links[title] + x.Data_assets_received = techAsset.Communication_links[title].Data_assets_received[:len(techAsset.Communication_links[title].Data_assets_received)-1] // Truncate slice. + techAsset.Communication_links[title] = x } } } } } - for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { - if individualRiskCat.RisksIdentified != nil { - for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { - if individualRiskInstance.MostRelevantDataAsset == dataAsset.ID { // apply the removal + for indivRiskCatTitle, indivRiskCat := range modelInput.Individual_risk_categories { + if indivRiskCat.Risks_identified != nil { + for indivRiskInstanceTitle, indivRiskInstance := range indivRiskCat.Risks_identified { + if indivRiskInstance.Most_relevant_data_asset == dataAsset.ID { // apply the removal referencesDeleted = true - x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] - x.MostRelevantDataAsset = "" // TODO needs more testing - modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x + x := modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] + x.Most_relevant_data_asset = "" // TODO needs more testing + modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] = x } } } } // remove it itself - delete(modelInput.DataAssets, title) + delete(modelInput.Data_assets, title) ok = writeModel(context, key, folderNameOfKey, &modelInput, "Data Asset Deletion") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2295,7 +2297,7 @@ func setSharedRuntime(context *gin.Context) { modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, sharedRuntime := range modelInput.SharedRuntimes { + for title, sharedRuntime := range modelInput.Shared_runtimes { if sharedRuntime.ID == context.Param("shared-runtime-id") { payload := payloadSharedRuntime{} err := context.BindJSON(&payload) @@ -2311,17 +2313,17 @@ func setSharedRuntime(context *gin.Context) { return } // in order to also update the title, remove the shared runtime from the map and re-insert it (with new key) - delete(modelInput.SharedRuntimes, title) - modelInput.SharedRuntimes[payload.Title] = sharedRuntimeInput + delete(modelInput.Shared_runtimes, title) + modelInput.Shared_runtimes[payload.Title] = sharedRuntimeInput idChanged := sharedRuntimeInput.ID != sharedRuntime.ID if idChanged { // ID-CHANGE-PROPAGATION - for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { - if individualRiskCat.RisksIdentified != nil { - for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { - if individualRiskInstance.MostRelevantSharedRuntime == sharedRuntime.ID { // apply the ID change - x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] - x.MostRelevantSharedRuntime = sharedRuntimeInput.ID // TODO needs more testing - modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x + for indivRiskCatTitle, indivRiskCat := range modelInput.Individual_risk_categories { + if indivRiskCat.Risks_identified != nil { + for indivRiskInstanceTitle, indivRiskInstance := range indivRiskCat.Risks_identified { + if indivRiskInstance.Most_relevant_shared_runtime == sharedRuntime.ID { // apply the ID change + x := modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] + x.Most_relevant_shared_runtime = sharedRuntimeInput.ID // TODO needs more testing + modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] = x } } } @@ -2354,7 +2356,7 @@ func setDataAsset(context *gin.Context) { modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, dataAsset := range modelInput.DataAssets { + for title, dataAsset := range modelInput.Data_assets { if dataAsset.ID == context.Param("data-asset-id") { payload := payloadDataAsset{} err := context.BindJSON(&payload) @@ -2370,52 +2372,52 @@ func setDataAsset(context *gin.Context) { return } // in order to also update the title, remove the asset from the map and re-insert it (with new key) - delete(modelInput.DataAssets, title) - modelInput.DataAssets[payload.Title] = dataAssetInput + delete(modelInput.Data_assets, title) + modelInput.Data_assets[payload.Title] = dataAssetInput idChanged := dataAssetInput.ID != dataAsset.ID if idChanged { // ID-CHANGE-PROPAGATION // also update all usages to point to the new (changed) ID !! - for techAssetTitle, techAsset := range modelInput.TechnicalAssets { - if techAsset.DataAssetsProcessed != nil { - for i, parsedChangeCandidateAsset := range techAsset.DataAssetsProcessed { + for techAssetTitle, techAsset := range modelInput.Technical_assets { + if techAsset.Data_assets_processed != nil { + for i, parsedChangeCandidateAsset := range techAsset.Data_assets_processed { referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.TechnicalAssets[techAssetTitle].DataAssetsProcessed[i] = dataAssetInput.ID + modelInput.Technical_assets[techAssetTitle].Data_assets_processed[i] = dataAssetInput.ID } } } - if techAsset.DataAssetsStored != nil { - for i, parsedChangeCandidateAsset := range techAsset.DataAssetsStored { + if techAsset.Data_assets_stored != nil { + for i, parsedChangeCandidateAsset := range techAsset.Data_assets_stored { referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.TechnicalAssets[techAssetTitle].DataAssetsStored[i] = dataAssetInput.ID + modelInput.Technical_assets[techAssetTitle].Data_assets_stored[i] = dataAssetInput.ID } } } - if techAsset.CommunicationLinks != nil { - for title, commLink := range techAsset.CommunicationLinks { - for i, dataAssetSent := range commLink.DataAssetsSent { + if techAsset.Communication_links != nil { + for title, commLink := range techAsset.Communication_links { + for i, dataAssetSent := range commLink.Data_assets_sent { referencedAsset := fmt.Sprintf("%v", dataAssetSent) if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.TechnicalAssets[techAssetTitle].CommunicationLinks[title].DataAssetsSent[i] = dataAssetInput.ID + modelInput.Technical_assets[techAssetTitle].Communication_links[title].Data_assets_sent[i] = dataAssetInput.ID } } - for i, dataAssetReceived := range commLink.DataAssetsReceived { + for i, dataAssetReceived := range commLink.Data_assets_received { referencedAsset := fmt.Sprintf("%v", dataAssetReceived) if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.TechnicalAssets[techAssetTitle].CommunicationLinks[title].DataAssetsReceived[i] = dataAssetInput.ID + modelInput.Technical_assets[techAssetTitle].Communication_links[title].Data_assets_received[i] = dataAssetInput.ID } } } } } - for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { - if individualRiskCat.RisksIdentified != nil { - for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { - if individualRiskInstance.MostRelevantDataAsset == dataAsset.ID { // apply the ID change - x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] - x.MostRelevantDataAsset = dataAssetInput.ID // TODO needs more testing - modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x + for indivRiskCatTitle, indivRiskCat := range modelInput.Individual_risk_categories { + if indivRiskCat.Risks_identified != nil { + for indivRiskInstanceTitle, indivRiskInstance := range indivRiskCat.Risks_identified { + if indivRiskInstance.Most_relevant_data_asset == dataAsset.ID { // apply the ID change + x := modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] + x.Most_relevant_data_asset = dataAssetInput.ID // TODO needs more testing + modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] = x } } } @@ -2448,7 +2450,7 @@ func getSharedRuntime(context *gin.Context) { modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, sharedRuntime := range modelInput.SharedRuntimes { + for title, sharedRuntime := range modelInput.Shared_runtimes { if sharedRuntime.ID == context.Param("shared-runtime-id") { context.JSON(http.StatusOK, gin.H{ title: sharedRuntime, @@ -2481,14 +2483,14 @@ func createNewSharedRuntime(context *gin.Context) { return } // yes, here keyed by title in YAML for better readability in the YAML file itself - if _, exists := modelInput.SharedRuntimes[payload.Title]; exists { + if _, exists := modelInput.Shared_runtimes[payload.Title]; exists { context.JSON(http.StatusConflict, gin.H{ "error": "shared runtime with this title already exists", }) return } - // but later it will in memory keyed by its "id", so do this uniqueness check also - for _, runtime := range modelInput.SharedRuntimes { + // but later it will in memory keyed by it's "id", so do this uniqueness check also + for _, runtime := range modelInput.Shared_runtimes { if runtime.ID == payload.Id { context.JSON(http.StatusConflict, gin.H{ "error": "shared runtime with this id already exists", @@ -2496,7 +2498,7 @@ func createNewSharedRuntime(context *gin.Context) { return } } - if !checkTechnicalAssetsExisting(modelInput, payload.TechnicalAssetsRunning) { + if !checkTechnicalAssetsExisting(modelInput, payload.Technical_assets_running) { context.JSON(http.StatusBadRequest, gin.H{ "error": "referenced technical asset does not exist", }) @@ -2506,10 +2508,10 @@ func createNewSharedRuntime(context *gin.Context) { if !ok { return } - if modelInput.SharedRuntimes == nil { - modelInput.SharedRuntimes = make(map[string]model.InputSharedRuntime) + if modelInput.Shared_runtimes == nil { + modelInput.Shared_runtimes = make(map[string]model.InputSharedRuntime) } - modelInput.SharedRuntimes[payload.Title] = sharedRuntimeInput + modelInput.Shared_runtimes[payload.Title] = sharedRuntimeInput ok = writeModel(context, key, folderNameOfKey, &modelInput, "Shared Runtime Creation") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2523,7 +2525,7 @@ func createNewSharedRuntime(context *gin.Context) { func checkTechnicalAssetsExisting(modelInput model.ModelInput, techAssetIDs []string) (ok bool) { for _, techAssetID := range techAssetIDs { exists := false - for _, val := range modelInput.TechnicalAssets { + for _, val := range modelInput.Technical_assets { if val.ID == techAssetID { exists = true break @@ -2536,12 +2538,12 @@ func checkTechnicalAssetsExisting(modelInput model.ModelInput, techAssetIDs []st return true } -func populateSharedRuntime(_ *gin.Context, payload payloadSharedRuntime) (sharedRuntimeInput model.InputSharedRuntime, ok bool) { +func populateSharedRuntime(context *gin.Context, payload payloadSharedRuntime) (sharedRuntimeInput model.InputSharedRuntime, ok bool) { sharedRuntimeInput = model.InputSharedRuntime{ - ID: payload.Id, - Description: payload.Description, - Tags: lowerCaseAndTrim(payload.Tags), - TechnicalAssetsRunning: payload.TechnicalAssetsRunning, + ID: payload.Id, + Description: payload.Description, + Tags: lowerCaseAndTrim(payload.Tags), + Technical_assets_running: payload.Technical_assets_running, } return sharedRuntimeInput, true } @@ -2557,23 +2559,23 @@ func deleteSharedRuntime(context *gin.Context) { if ok { referencesDeleted := false // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, sharedRuntime := range modelInput.SharedRuntimes { + for title, sharedRuntime := range modelInput.Shared_runtimes { if sharedRuntime.ID == context.Param("shared-runtime-id") { // also remove all usages of this shared runtime !! - for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { - if individualRiskCat.RisksIdentified != nil { - for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { - if individualRiskInstance.MostRelevantSharedRuntime == sharedRuntime.ID { // apply the removal + for indivRiskCatTitle, indivRiskCat := range modelInput.Individual_risk_categories { + if indivRiskCat.Risks_identified != nil { + for indivRiskInstanceTitle, indivRiskInstance := range indivRiskCat.Risks_identified { + if indivRiskInstance.Most_relevant_shared_runtime == sharedRuntime.ID { // apply the removal referencesDeleted = true - x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] - x.MostRelevantSharedRuntime = "" // TODO needs more testing - modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x + x := modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] + x.Most_relevant_shared_runtime = "" // TODO needs more testing + modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] = x } } } } // remove it itself - delete(modelInput.SharedRuntimes, title) + delete(modelInput.Shared_runtimes, title) ok = writeModel(context, key, folderNameOfKey, &modelInput, "Shared Runtime Deletion") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2610,14 +2612,14 @@ func createNewDataAsset(context *gin.Context) { return } // yes, here keyed by title in YAML for better readability in the YAML file itself - if _, exists := modelInput.DataAssets[payload.Title]; exists { + if _, exists := modelInput.Data_assets[payload.Title]; exists { context.JSON(http.StatusConflict, gin.H{ "error": "data asset with this title already exists", }) return } - // but later it will in memory keyed by its "id", so do this uniqueness check also - for _, asset := range modelInput.DataAssets { + // but later it will in memory keyed by it's "id", so do this uniqueness check also + for _, asset := range modelInput.Data_assets { if asset.ID == payload.Id { context.JSON(http.StatusConflict, gin.H{ "error": "data asset with this id already exists", @@ -2629,10 +2631,10 @@ func createNewDataAsset(context *gin.Context) { if !ok { return } - if modelInput.DataAssets == nil { - modelInput.DataAssets = make(map[string]model.InputDataAsset) + if modelInput.Data_assets == nil { + modelInput.Data_assets = make(map[string]model.InputDataAsset) } - modelInput.DataAssets[payload.Title] = dataAssetInput + modelInput.Data_assets[payload.Title] = dataAssetInput ok = writeModel(context, key, folderNameOfKey, &modelInput, "Data Asset Creation") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2670,17 +2672,17 @@ func populateDataAsset(context *gin.Context, payload payloadDataAsset) (dataAsse return dataAssetInput, false } dataAssetInput = model.InputDataAsset{ - ID: payload.Id, - Description: payload.Description, - Usage: usage.String(), - Tags: lowerCaseAndTrim(payload.Tags), - Origin: payload.Origin, - Owner: payload.Owner, - Quantity: quantity.String(), - Confidentiality: confidentiality.String(), - Integrity: integrity.String(), - Availability: availability.String(), - JustificationCiaRating: payload.JustificationCiaRating, + ID: payload.Id, + Description: payload.Description, + Usage: usage.String(), + Tags: lowerCaseAndTrim(payload.Tags), + Origin: payload.Origin, + Owner: payload.Owner, + Quantity: quantity.String(), + Confidentiality: confidentiality.String(), + Integrity: integrity.String(), + Availability: availability.String(), + Justification_cia_rating: payload.Justification_cia_rating, } return dataAssetInput, true } @@ -2692,9 +2694,9 @@ func getDataAssets(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, aModel.DataAssets) + context.JSON(http.StatusOK, model.Data_assets) } } @@ -2705,9 +2707,9 @@ func getTrustBoundaries(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, aModel.TrustBoundaries) + context.JSON(http.StatusOK, model.Trust_boundaries) } } @@ -2718,9 +2720,9 @@ func getSharedRuntimes(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, aModel.SharedRuntimes) + context.JSON(http.StatusOK, model.Shared_runtimes) } } @@ -2741,9 +2743,9 @@ func getModel(context *gin.Context) { defer unlockFolder(folderNameOfKey) _, yamlText, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { - tmpResultFile, err := os.CreateTemp(model.TempFolder, "threagile-*.yaml") + tmpResultFile, err := ioutil.TempFile(model.TempFolder, "threagile-*.yaml") checkErr(err) - err = os.WriteFile(tmpResultFile.Name(), []byte(yamlText), 0400) + err = ioutil.WriteFile(tmpResultFile.Name(), []byte(yamlText), 0400) if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -2751,16 +2753,16 @@ func getModel(context *gin.Context) { }) return } - defer func() { _ = os.Remove(tmpResultFile.Name()) }() + defer os.Remove(tmpResultFile.Name()) context.FileAttachment(tmpResultFile.Name(), "threagile.yaml") } } type payloadModels struct { - ID string `json:"id"` - Title string `json:"title"` - TimestampCreated time.Time `json:"timestamp_created"` - TimestampModified time.Time `json:"timestamp_modified"` + ID string `json:"id"` + Title string `json:"title"` + Timestamp_created time.Time `json:"timestamp_created"` + Timestamp_modified time.Time `json:"timestamp_modified"` } type payloadCover struct { @@ -2770,10 +2772,10 @@ type payloadCover struct { } type payloadOverview struct { - ManagementSummaryComment string `json:"management_summary_comment"` - BusinessCriticality string `json:"business_criticality"` - BusinessOverview model.Overview `json:"business_overview"` - TechnicalOverview model.Overview `json:"technical_overview"` + Management_summary_comment string `json:"management_summary_comment"` + Business_criticality string `json:"business_criticality"` + Business_overview model.Overview `json:"business_overview"` + Technical_overview model.Overview `json:"technical_overview"` } type payloadAbuseCases map[string]string @@ -2781,26 +2783,26 @@ type payloadAbuseCases map[string]string type payloadSecurityRequirements map[string]string type payloadDataAsset struct { - Title string `json:"title"` - Id string `json:"id"` - Description string `json:"description"` - Usage string `json:"usage"` - Tags []string `json:"tags"` - Origin string `json:"origin"` - Owner string `json:"owner"` - Quantity string `json:"quantity"` - Confidentiality string `json:"confidentiality"` - Integrity string `json:"integrity"` - Availability string `json:"availability"` - JustificationCiaRating string `json:"justification_cia_rating"` + Title string `json:"title"` + Id string `json:"id"` + Description string `json:"description"` + Usage string `json:"usage"` + Tags []string `json:"tags"` + Origin string `json:"origin"` + Owner string `json:"owner"` + Quantity string `json:"quantity"` + Confidentiality string `json:"confidentiality"` + Integrity string `json:"integrity"` + Availability string `json:"availability"` + Justification_cia_rating string `json:"justification_cia_rating"` } type payloadSharedRuntime struct { - Title string `json:"title"` - Id string `json:"id"` - Description string `json:"description"` - Tags []string `json:"tags"` - TechnicalAssetsRunning []string `json:"technical_assets_running"` + Title string `json:"title"` + Id string `json:"id"` + Description string `json:"description"` + Tags []string `json:"tags"` + Technical_assets_running []string `json:"technical_assets_running"` } func setSecurityRequirements(context *gin.Context) { @@ -2821,7 +2823,7 @@ func setSecurityRequirements(context *gin.Context) { }) return } - modelInput.SecurityRequirements = payload + modelInput.Security_requirements = payload ok = writeModel(context, key, folderNameOfKey, &modelInput, "Security Requirements Update") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2838,9 +2840,9 @@ func getSecurityRequirements(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, aModel.SecurityRequirements) + context.JSON(http.StatusOK, model.Security_requirements) } } @@ -2862,7 +2864,7 @@ func setAbuseCases(context *gin.Context) { }) return } - modelInput.AbuseCases = payload + modelInput.Abuse_cases = payload ok = writeModel(context, key, folderNameOfKey, &modelInput, "Abuse Cases Update") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2879,9 +2881,9 @@ func getAbuseCases(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, aModel.AbuseCases) + context.JSON(http.StatusOK, model.Abuse_cases) } } @@ -2903,17 +2905,17 @@ func setOverview(context *gin.Context) { }) return } - criticality, err := model.ParseCriticality(payload.BusinessCriticality) + criticality, err := model.ParseCriticality(payload.Business_criticality) if err != nil { handleErrorInServiceCall(err, context) return } - modelInput.ManagementSummaryComment = payload.ManagementSummaryComment - modelInput.BusinessCriticality = criticality.String() - modelInput.BusinessOverview.Description = payload.BusinessOverview.Description - modelInput.BusinessOverview.Images = payload.BusinessOverview.Images - modelInput.TechnicalOverview.Description = payload.TechnicalOverview.Description - modelInput.TechnicalOverview.Images = payload.TechnicalOverview.Images + modelInput.Management_summary_comment = payload.Management_summary_comment + modelInput.Business_criticality = criticality.String() + modelInput.Business_overview.Description = payload.Business_overview.Description + modelInput.Business_overview.Images = payload.Business_overview.Images + modelInput.Technical_overview.Description = payload.Technical_overview.Description + modelInput.Technical_overview.Images = payload.Technical_overview.Images ok = writeModel(context, key, folderNameOfKey, &modelInput, "Overview Update") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2937,13 +2939,13 @@ func getOverview(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { context.JSON(http.StatusOK, gin.H{ - "management_summary_comment": aModel.ManagementSummaryComment, - "business_criticality": aModel.BusinessCriticality, - "business_overview": aModel.BusinessOverview, - "technical_overview": aModel.TechnicalOverview, + "management_summary_comment": model.Management_summary_comment, + "business_criticality": model.Business_criticality, + "business_overview": model.Business_overview, + "technical_overview": model.Technical_overview, }) } } @@ -2987,12 +2989,12 @@ func getCover(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { context.JSON(http.StatusOK, gin.H{ - "title": aModel.Title, - "date": aModel.Date, - "author": aModel.Author, + "title": model.Title, + "date": model.Date, + "author": model.Author, }) } } @@ -3010,8 +3012,8 @@ func createNewModel(context *gin.Context) { lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - aUuid := uuid.New().String() - err := os.Mkdir(folderNameForModel(folderNameOfKey, aUuid), 0700) + uuid := uuid.New().String() + err := os.Mkdir(folderNameForModel(folderNameOfKey, uuid), 0700) if err != nil { context.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to create model", @@ -3019,7 +3021,7 @@ func createNewModel(context *gin.Context) { return } - aYaml := `title: New Threat Model + yaml := `title: New Threat Model threagile_version: ` + model.ThreagileVersion + ` author: name: "" @@ -3050,11 +3052,11 @@ diagram_tweak_suppress_edge_labels: false diagram_tweak_invisible_connections_between_assets: [] diagram_tweak_same_rank_assets: []` - ok = writeModelYAML(context, aYaml, key, folderNameForModel(folderNameOfKey, aUuid), "New Model Creation", true) + ok = writeModelYAML(context, yaml, key, folderNameForModel(folderNameOfKey, uuid), "New Model Creation", true) if ok { context.JSON(http.StatusCreated, gin.H{ "message": "model created", - "id": aUuid, + "id": uuid, }) } } @@ -3068,7 +3070,7 @@ func listModels(context *gin.Context) { // TODO currently returns error when any defer unlockFolder(folderNameOfKey) result := make([]payloadModels, 0) - modelFolders, err := os.ReadDir(folderNameOfKey) + modelFolders, err := ioutil.ReadDir(folderNameOfKey) if err != nil { log.Println(err) context.JSON(http.StatusNotFound, gin.H{ @@ -3076,9 +3078,9 @@ func listModels(context *gin.Context) { // TODO currently returns error when any }) return } - for _, dirEntry := range modelFolders { - if dirEntry.IsDir() { - modelStat, err := os.Stat(folderNameOfKey + "/" + dirEntry.Name() + "/threagile.yaml") + for _, fileInfo := range modelFolders { + if fileInfo.IsDir() { + modelStat, err := os.Stat(folderNameOfKey + "/" + fileInfo.Name() + "/threagile.yaml") if err != nil { log.Println(err) context.JSON(http.StatusNotFound, gin.H{ @@ -3086,23 +3088,15 @@ func listModels(context *gin.Context) { // TODO currently returns error when any }) return } - aModel, _, ok := readModel(context, dirEntry.Name(), key, folderNameOfKey) + model, _, ok := readModel(context, fileInfo.Name(), key, folderNameOfKey) if !ok { return } - fileInfo, err := dirEntry.Info() - if err != nil { - log.Println(err) - context.JSON(http.StatusNotFound, gin.H{ - "error": "unable to get file info", - }) - return - } result = append(result, payloadModels{ - ID: dirEntry.Name(), - Title: aModel.Title, - TimestampCreated: fileInfo.ModTime(), - TimestampModified: modelStat.ModTime(), + ID: fileInfo.Name(), + Title: model.Title, + Timestamp_created: fileInfo.ModTime(), + Timestamp_modified: modelStat.ModTime(), }) } } @@ -3162,7 +3156,7 @@ func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfK }) return modelInputResult, yamlText, false } - aesGcm, err := cipher.NewGCM(block) + aesgcm, err := cipher.NewGCM(block) if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -3171,7 +3165,7 @@ func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfK return modelInputResult, yamlText, false } - fileBytes, err := os.ReadFile(modelFolder + "/threagile.yaml") + fileBytes, err := ioutil.ReadFile(modelFolder + "/threagile.yaml") if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -3182,7 +3176,7 @@ func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfK nonce := fileBytes[0:12] ciphertext := fileBytes[12:] - plaintext, err := aesGcm.Open(nil, nonce, ciphertext, nil) + plaintext, err := aesgcm.Open(nil, nonce, ciphertext, nil) if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -3200,7 +3194,7 @@ func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfK return modelInputResult, yamlText, false } buf := new(bytes.Buffer) - _, _ = buf.ReadFrom(r) + buf.ReadFrom(r) modelInput := model.ModelInput{} yamlBytes := buf.Bytes() err = yaml.Unmarshal(yamlBytes, &modelInput) @@ -3217,7 +3211,7 @@ func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfK func writeModel(context *gin.Context, key []byte, folderNameOfKey string, modelInput *model.ModelInput, changeReasonForHistory string) (ok bool) { modelFolder, ok := checkModelFolder(context, context.Param("model-id"), folderNameOfKey) if ok { - modelInput.ThreagileVersion = model.ThreagileVersion + modelInput.Threagile_version = model.ThreagileVersion yamlBytes, err := yaml.Marshal(modelInput) if err != nil { log.Println(err) @@ -3240,8 +3234,8 @@ func writeModelYAML(context *gin.Context, yaml string, key []byte, modelFolder s } var b bytes.Buffer w := gzip.NewWriter(&b) - _, _ = w.Write([]byte(yaml)) - _ = w.Close() + w.Write([]byte(yaml)) + w.Close() plaintext := b.Bytes() cryptoKey := generateKeyFromAlreadyStrongRandomInput(key) block, err := aes.NewCipher(cryptoKey) @@ -3261,7 +3255,7 @@ func writeModelYAML(context *gin.Context, yaml string, key []byte, modelFolder s }) return false } - aesGcm, err := cipher.NewGCM(block) + aesgcm, err := cipher.NewGCM(block) if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -3269,7 +3263,7 @@ func writeModelYAML(context *gin.Context, yaml string, key []byte, modelFolder s }) return false } - ciphertext := aesGcm.Seal(nil, nonce, plaintext, nil) + ciphertext := aesgcm.Seal(nil, nonce, plaintext, nil) if !skipBackup { err = backupModelToHistory(modelFolder, changeReasonForHistory) if err != nil { @@ -3288,9 +3282,9 @@ func writeModelYAML(context *gin.Context, yaml string, key []byte, modelFolder s }) return false } - _, _ = f.Write(nonce) - _, _ = f.Write(ciphertext) - _ = f.Close() + f.Write(nonce) + f.Write(ciphertext) + f.Close() return true } @@ -3302,17 +3296,17 @@ func backupModelToHistory(modelFolder string, changeReasonForHistory string) (er return err } } - input, err := os.ReadFile(modelFolder + "/threagile.yaml") + input, err := ioutil.ReadFile(modelFolder + "/threagile.yaml") if err != nil { return err } historyFile := historyFolder + "/" + time.Now().Format("2006-01-02 15:04:05") + " " + changeReasonForHistory + ".backup" - err = os.WriteFile(historyFile, input, 0400) + err = ioutil.WriteFile(historyFile, input, 0400) if err != nil { return err } // now delete any old files if over limit to keep - files, err := os.ReadDir(historyFolder) + files, err := ioutil.ReadDir(historyFolder) if err != nil { return err } @@ -3372,7 +3366,7 @@ func checkObjectCreationThrottler(context *gin.Context, typeName string) bool { // remove all elements older than 3 minutes (= 180000000000 ns) now := time.Now().UnixNano() cutoff := now - 180000000000 - for keyCheck := range createdObjectsThrottler { + for keyCheck, _ := range createdObjectsThrottler { for i := 0; i < len(createdObjectsThrottler[keyCheck]); i++ { if createdObjectsThrottler[keyCheck][i] < cutoff { // Remove the element at index i from slice (safe while looping using i as iterator) @@ -3509,7 +3503,7 @@ func checkTokenToFolderName(context *gin.Context) (folderNameOfKey string, key [ }) return folderNameOfKey, key, false } - timeoutStruct.lastAccessedNanoTime = time.Now().UnixNano() + timeoutStruct.lastAcessedNanotime = time.Now().UnixNano() return folderNameOfKey, key, true } else { context.JSON(http.StatusNotFound, gin.H{ @@ -3603,7 +3597,7 @@ func parseCommandlineArgs() { license := flag.Bool("print-license", false, "print license information") flag.Usage = func() { printLogo() - _, _ = fmt.Fprintf(os.Stderr, "Usage: threagile [options]") + fmt.Fprintf(os.Stderr, "Usage: threagile [options]") fmt.Println() fmt.Println() fmt.Println() @@ -3703,12 +3697,12 @@ func parseCommandlineArgs() { fmt.Println("----------------------") fmt.Println("Built-in model macros:") fmt.Println("----------------------") - fmt.Println(addbuildpipeline.GetMacroDetails().ID, "-->", addbuildpipeline.GetMacroDetails().Title) - fmt.Println(addvault.GetMacroDetails().ID, "-->", addvault.GetMacroDetails().Title) - fmt.Println(prettyprint.GetMacroDetails().ID, "-->", prettyprint.GetMacroDetails().Title) - fmt.Println(removeunusedtags.GetMacroDetails().ID, "-->", removeunusedtags.GetMacroDetails().Title) - fmt.Println(seedrisktracking.GetMacroDetails().ID, "-->", seedrisktracking.GetMacroDetails().Title) - fmt.Println(seedtags.GetMacroDetails().ID, "-->", seedtags.GetMacroDetails().Title) + fmt.Println(add_build_pipeline.GetMacroDetails().ID, "-->", add_build_pipeline.GetMacroDetails().Title) + fmt.Println(add_vault.GetMacroDetails().ID, "-->", add_vault.GetMacroDetails().Title) + fmt.Println(pretty_print.GetMacroDetails().ID, "-->", pretty_print.GetMacroDetails().Title) + fmt.Println(remove_unused_tags.GetMacroDetails().ID, "-->", remove_unused_tags.GetMacroDetails().Title) + fmt.Println(seed_risk_tracking.GetMacroDetails().ID, "-->", seed_risk_tracking.GetMacroDetails().Title) + fmt.Println(seed_tags.GetMacroDetails().ID, "-->", seed_tags.GetMacroDetails().Title) fmt.Println() os.Exit(0) } @@ -3727,48 +3721,48 @@ func parseCommandlineArgs() { fmt.Println("--------------------") fmt.Println("Built-in risk rules:") fmt.Println("--------------------") - fmt.Println(accidentalsecretleak.Category().Id, "-->", accidentalsecretleak.Category().Title, "--> with tags:", accidentalsecretleak.SupportedTags()) - fmt.Println(codebackdooring.Category().Id, "-->", codebackdooring.Category().Title, "--> with tags:", codebackdooring.SupportedTags()) - fmt.Println(containerbaseimagebackdooring.Category().Id, "-->", containerbaseimagebackdooring.Category().Title, "--> with tags:", containerbaseimagebackdooring.SupportedTags()) - fmt.Println(containerplatformescape.Category().Id, "-->", containerplatformescape.Category().Title, "--> with tags:", containerplatformescape.SupportedTags()) - fmt.Println(crosssiterequestforgery.Category().Id, "-->", crosssiterequestforgery.Category().Title, "--> with tags:", crosssiterequestforgery.SupportedTags()) - fmt.Println(crosssitescripting.Category().Id, "-->", crosssitescripting.Category().Title, "--> with tags:", crosssitescripting.SupportedTags()) - fmt.Println(dosriskyaccessacrosstrustboundary.Category().Id, "-->", dosriskyaccessacrosstrustboundary.Category().Title, "--> with tags:", dosriskyaccessacrosstrustboundary.SupportedTags()) - fmt.Println(incompletemodel.Category().Id, "-->", incompletemodel.Category().Title, "--> with tags:", incompletemodel.SupportedTags()) - fmt.Println(ldapinjection.Category().Id, "-->", ldapinjection.Category().Title, "--> with tags:", ldapinjection.SupportedTags()) - fmt.Println(missingauthentication.Category().Id, "-->", missingauthentication.Category().Title, "--> with tags:", missingauthentication.SupportedTags()) - fmt.Println(missingauthenticationsecondfactor.Category().Id, "-->", missingauthenticationsecondfactor.Category().Title, "--> with tags:", missingauthenticationsecondfactor.SupportedTags()) - fmt.Println(missingbuildinfrastructure.Category().Id, "-->", missingbuildinfrastructure.Category().Title, "--> with tags:", missingbuildinfrastructure.SupportedTags()) - fmt.Println(missingcloudhardening.Category().Id, "-->", missingcloudhardening.Category().Title, "--> with tags:", missingcloudhardening.SupportedTags()) - fmt.Println(missingfilevalidation.Category().Id, "-->", missingfilevalidation.Category().Title, "--> with tags:", missingfilevalidation.SupportedTags()) - fmt.Println(missinghardening.Category().Id, "-->", missinghardening.Category().Title, "--> with tags:", missinghardening.SupportedTags()) - fmt.Println(missingidentitypropagation.Category().Id, "-->", missingidentitypropagation.Category().Title, "--> with tags:", missingidentitypropagation.SupportedTags()) - fmt.Println(missingidentityproviderisolation.Category().Id, "-->", missingidentityproviderisolation.Category().Title, "--> with tags:", missingidentityproviderisolation.SupportedTags()) - fmt.Println(missingidentitystore.Category().Id, "-->", missingidentitystore.Category().Title, "--> with tags:", missingidentitystore.SupportedTags()) - fmt.Println(missingnetworksegmentation.Category().Id, "-->", missingnetworksegmentation.Category().Title, "--> with tags:", missingnetworksegmentation.SupportedTags()) - fmt.Println(missingvault.Category().Id, "-->", missingvault.Category().Title, "--> with tags:", missingvault.SupportedTags()) - fmt.Println(missingvaultisolation.Category().Id, "-->", missingvaultisolation.Category().Title, "--> with tags:", missingvaultisolation.SupportedTags()) - fmt.Println(missingwaf.Category().Id, "-->", missingwaf.Category().Title, "--> with tags:", missingwaf.SupportedTags()) - fmt.Println(mixedtargetsonsharedruntime.Category().Id, "-->", mixedtargetsonsharedruntime.Category().Title, "--> with tags:", mixedtargetsonsharedruntime.SupportedTags()) - fmt.Println(pathtraversal.Category().Id, "-->", pathtraversal.Category().Title, "--> with tags:", pathtraversal.SupportedTags()) - fmt.Println(pushinsteadofpulldeployment.Category().Id, "-->", pushinsteadofpulldeployment.Category().Title, "--> with tags:", pushinsteadofpulldeployment.SupportedTags()) - fmt.Println(searchqueryinjection.Category().Id, "-->", searchqueryinjection.Category().Title, "--> with tags:", searchqueryinjection.SupportedTags()) - fmt.Println(serversiderequestforgery.Category().Id, "-->", serversiderequestforgery.Category().Title, "--> with tags:", serversiderequestforgery.SupportedTags()) - fmt.Println(serviceregistrypoisoning.Category().Id, "-->", serviceregistrypoisoning.Category().Title, "--> with tags:", serviceregistrypoisoning.SupportedTags()) - fmt.Println(sqlnosqlinjection.Category().Id, "-->", sqlnosqlinjection.Category().Title, "--> with tags:", sqlnosqlinjection.SupportedTags()) - fmt.Println(uncheckeddeployment.Category().Id, "-->", uncheckeddeployment.Category().Title, "--> with tags:", uncheckeddeployment.SupportedTags()) - fmt.Println(unencryptedasset.Category().Id, "-->", unencryptedasset.Category().Title, "--> with tags:", unencryptedasset.SupportedTags()) - fmt.Println(unencryptedcommunication.Category().Id, "-->", unencryptedcommunication.Category().Title, "--> with tags:", unencryptedcommunication.SupportedTags()) - fmt.Println(unguardedaccessfrominternet.Category().Id, "-->", unguardedaccessfrominternet.Category().Title, "--> with tags:", unguardedaccessfrominternet.SupportedTags()) - fmt.Println(unguardeddirectdatastoreaccess.Category().Id, "-->", unguardeddirectdatastoreaccess.Category().Title, "--> with tags:", unguardeddirectdatastoreaccess.SupportedTags()) - fmt.Println(unnecessarycommunicationlink.Category().Id, "-->", unnecessarycommunicationlink.Category().Title, "--> with tags:", unnecessarycommunicationlink.SupportedTags()) - fmt.Println(unnecessarydataasset.Category().Id, "-->", unnecessarydataasset.Category().Title, "--> with tags:", unnecessarydataasset.SupportedTags()) - fmt.Println(unnecessarydatatransfer.Category().Id, "-->", unnecessarydatatransfer.Category().Title, "--> with tags:", unnecessarydatatransfer.SupportedTags()) - fmt.Println(unnecessarytechnicalasset.Category().Id, "-->", unnecessarytechnicalasset.Category().Title, "--> with tags:", unnecessarytechnicalasset.SupportedTags()) - fmt.Println(untrusteddeserialization.Category().Id, "-->", untrusteddeserialization.Category().Title, "--> with tags:", untrusteddeserialization.SupportedTags()) - fmt.Println(wrongcommunicationlinkcontent.Category().Id, "-->", wrongcommunicationlinkcontent.Category().Title, "--> with tags:", wrongcommunicationlinkcontent.SupportedTags()) - fmt.Println(wrongtrustboundarycontent.Category().Id, "-->", wrongtrustboundarycontent.Category().Title, "--> with tags:", wrongtrustboundarycontent.SupportedTags()) - fmt.Println(xmlexternalentity.Category().Id, "-->", xmlexternalentity.Category().Title, "--> with tags:", xmlexternalentity.SupportedTags()) + fmt.Println(accidental_secret_leak.Category().Id, "-->", accidental_secret_leak.Category().Title, "--> with tags:", accidental_secret_leak.SupportedTags()) + fmt.Println(code_backdooring.Category().Id, "-->", code_backdooring.Category().Title, "--> with tags:", code_backdooring.SupportedTags()) + fmt.Println(container_baseimage_backdooring.Category().Id, "-->", container_baseimage_backdooring.Category().Title, "--> with tags:", container_baseimage_backdooring.SupportedTags()) + fmt.Println(container_platform_escape.Category().Id, "-->", container_platform_escape.Category().Title, "--> with tags:", container_platform_escape.SupportedTags()) + fmt.Println(cross_site_request_forgery.Category().Id, "-->", cross_site_request_forgery.Category().Title, "--> with tags:", cross_site_request_forgery.SupportedTags()) + fmt.Println(cross_site_scripting.Category().Id, "-->", cross_site_scripting.Category().Title, "--> with tags:", cross_site_scripting.SupportedTags()) + fmt.Println(dos_risky_access_across_trust_boundary.Category().Id, "-->", dos_risky_access_across_trust_boundary.Category().Title, "--> with tags:", dos_risky_access_across_trust_boundary.SupportedTags()) + fmt.Println(incomplete_model.Category().Id, "-->", incomplete_model.Category().Title, "--> with tags:", incomplete_model.SupportedTags()) + fmt.Println(ldap_injection.Category().Id, "-->", ldap_injection.Category().Title, "--> with tags:", ldap_injection.SupportedTags()) + fmt.Println(missing_authentication.Category().Id, "-->", missing_authentication.Category().Title, "--> with tags:", missing_authentication.SupportedTags()) + fmt.Println(missing_authentication_second_factor.Category().Id, "-->", missing_authentication_second_factor.Category().Title, "--> with tags:", missing_authentication_second_factor.SupportedTags()) + fmt.Println(missing_build_infrastructure.Category().Id, "-->", missing_build_infrastructure.Category().Title, "--> with tags:", missing_build_infrastructure.SupportedTags()) + fmt.Println(missing_cloud_hardening.Category().Id, "-->", missing_cloud_hardening.Category().Title, "--> with tags:", missing_cloud_hardening.SupportedTags()) + fmt.Println(missing_file_validation.Category().Id, "-->", missing_file_validation.Category().Title, "--> with tags:", missing_file_validation.SupportedTags()) + fmt.Println(missing_hardening.Category().Id, "-->", missing_hardening.Category().Title, "--> with tags:", missing_hardening.SupportedTags()) + fmt.Println(missing_identity_propagation.Category().Id, "-->", missing_identity_propagation.Category().Title, "--> with tags:", missing_identity_propagation.SupportedTags()) + fmt.Println(missing_identity_provider_isolation.Category().Id, "-->", missing_identity_provider_isolation.Category().Title, "--> with tags:", missing_identity_provider_isolation.SupportedTags()) + fmt.Println(missing_identity_store.Category().Id, "-->", missing_identity_store.Category().Title, "--> with tags:", missing_identity_store.SupportedTags()) + fmt.Println(missing_network_segmentation.Category().Id, "-->", missing_network_segmentation.Category().Title, "--> with tags:", missing_network_segmentation.SupportedTags()) + fmt.Println(missing_vault.Category().Id, "-->", missing_vault.Category().Title, "--> with tags:", missing_vault.SupportedTags()) + fmt.Println(missing_vault_isolation.Category().Id, "-->", missing_vault_isolation.Category().Title, "--> with tags:", missing_vault_isolation.SupportedTags()) + fmt.Println(missing_waf.Category().Id, "-->", missing_waf.Category().Title, "--> with tags:", missing_waf.SupportedTags()) + fmt.Println(mixed_targets_on_shared_runtime.Category().Id, "-->", mixed_targets_on_shared_runtime.Category().Title, "--> with tags:", mixed_targets_on_shared_runtime.SupportedTags()) + fmt.Println(path_traversal.Category().Id, "-->", path_traversal.Category().Title, "--> with tags:", path_traversal.SupportedTags()) + fmt.Println(push_instead_of_pull_deployment.Category().Id, "-->", push_instead_of_pull_deployment.Category().Title, "--> with tags:", push_instead_of_pull_deployment.SupportedTags()) + fmt.Println(search_query_injection.Category().Id, "-->", search_query_injection.Category().Title, "--> with tags:", search_query_injection.SupportedTags()) + fmt.Println(server_side_request_forgery.Category().Id, "-->", server_side_request_forgery.Category().Title, "--> with tags:", server_side_request_forgery.SupportedTags()) + fmt.Println(service_registry_poisoning.Category().Id, "-->", service_registry_poisoning.Category().Title, "--> with tags:", service_registry_poisoning.SupportedTags()) + fmt.Println(sql_nosql_injection.Category().Id, "-->", sql_nosql_injection.Category().Title, "--> with tags:", sql_nosql_injection.SupportedTags()) + fmt.Println(unchecked_deployment.Category().Id, "-->", unchecked_deployment.Category().Title, "--> with tags:", unchecked_deployment.SupportedTags()) + fmt.Println(unencrypted_asset.Category().Id, "-->", unencrypted_asset.Category().Title, "--> with tags:", unencrypted_asset.SupportedTags()) + fmt.Println(unencrypted_communication.Category().Id, "-->", unencrypted_communication.Category().Title, "--> with tags:", unencrypted_communication.SupportedTags()) + fmt.Println(unguarded_access_from_internet.Category().Id, "-->", unguarded_access_from_internet.Category().Title, "--> with tags:", unguarded_access_from_internet.SupportedTags()) + fmt.Println(unguarded_direct_datastore_access.Category().Id, "-->", unguarded_direct_datastore_access.Category().Title, "--> with tags:", unguarded_direct_datastore_access.SupportedTags()) + fmt.Println(unnecessary_communication_link.Category().Id, "-->", unnecessary_communication_link.Category().Title, "--> with tags:", unnecessary_communication_link.SupportedTags()) + fmt.Println(unnecessary_data_asset.Category().Id, "-->", unnecessary_data_asset.Category().Title, "--> with tags:", unnecessary_data_asset.SupportedTags()) + fmt.Println(unnecessary_data_transfer.Category().Id, "-->", unnecessary_data_transfer.Category().Title, "--> with tags:", unnecessary_data_transfer.SupportedTags()) + fmt.Println(unnecessary_technical_asset.Category().Id, "-->", unnecessary_technical_asset.Category().Title, "--> with tags:", unnecessary_technical_asset.SupportedTags()) + fmt.Println(untrusted_deserialization.Category().Id, "-->", untrusted_deserialization.Category().Title, "--> with tags:", untrusted_deserialization.SupportedTags()) + fmt.Println(wrong_communication_link_content.Category().Id, "-->", wrong_communication_link_content.Category().Title, "--> with tags:", wrong_communication_link_content.SupportedTags()) + fmt.Println(wrong_trust_boundary_content.Category().Id, "-->", wrong_trust_boundary_content.Category().Title, "--> with tags:", wrong_trust_boundary_content.SupportedTags()) + fmt.Println(xml_external_entity.Category().Id, "-->", xml_external_entity.Category().Title, "--> with tags:", xml_external_entity.SupportedTags()) fmt.Println() os.Exit(0) } @@ -3804,12 +3798,12 @@ func parseCommandlineArgs() { printLogo() fmt.Println("Explanation for the model macros:") fmt.Println() - fmt.Printf("%v: %v\n", addbuildpipeline.GetMacroDetails().ID, addbuildpipeline.GetMacroDetails().Description) - fmt.Printf("%v: %v\n", addvault.GetMacroDetails().ID, addvault.GetMacroDetails().Description) - fmt.Printf("%v: %v\n", prettyprint.GetMacroDetails().ID, prettyprint.GetMacroDetails().Description) - fmt.Printf("%v: %v\n", removeunusedtags.GetMacroDetails().ID, removeunusedtags.GetMacroDetails().Description) - fmt.Printf("%v: %v\n", seedrisktracking.GetMacroDetails().ID, seedrisktracking.GetMacroDetails().Description) - fmt.Printf("%v: %v\n", seedtags.GetMacroDetails().ID, seedtags.GetMacroDetails().Description) + fmt.Printf("%v: %v\n", add_build_pipeline.GetMacroDetails().ID, add_build_pipeline.GetMacroDetails().Description) + fmt.Printf("%v: %v\n", add_vault.GetMacroDetails().ID, add_vault.GetMacroDetails().Description) + fmt.Printf("%v: %v\n", pretty_print.GetMacroDetails().ID, pretty_print.GetMacroDetails().Description) + fmt.Printf("%v: %v\n", remove_unused_tags.GetMacroDetails().ID, remove_unused_tags.GetMacroDetails().Description) + fmt.Printf("%v: %v\n", seed_risk_tracking.GetMacroDetails().ID, seed_risk_tracking.GetMacroDetails().Description) + fmt.Printf("%v: %v\n", seed_tags.GetMacroDetails().ID, seed_tags.GetMacroDetails().Description) fmt.Println() os.Exit(0) @@ -3818,48 +3812,48 @@ func parseCommandlineArgs() { printLogo() fmt.Println("Explanation for risk rules:") fmt.Println() - fmt.Printf("%v: %v\n", accidentalsecretleak.Category().Id, accidentalsecretleak.Category().Description) - fmt.Printf("%v: %v\n", codebackdooring.Category().Id, codebackdooring.Category().Description) - fmt.Printf("%v: %v\n", containerbaseimagebackdooring.Category().Id, containerbaseimagebackdooring.Category().Description) - fmt.Printf("%v: %v\n", containerplatformescape.Category().Id, containerplatformescape.Category().Description) - fmt.Printf("%v: %v\n", crosssiterequestforgery.Category().Id, crosssiterequestforgery.Category().Description) - fmt.Printf("%v: %v\n", crosssitescripting.Category().Id, crosssitescripting.Category().Description) - fmt.Printf("%v: %v\n", dosriskyaccessacrosstrustboundary.Category().Id, dosriskyaccessacrosstrustboundary.Category().Description) - fmt.Printf("%v: %v\n", incompletemodel.Category().Id, incompletemodel.Category().Description) - fmt.Printf("%v: %v\n", ldapinjection.Category().Id, ldapinjection.Category().Description) - fmt.Printf("%v: %v\n", missingauthentication.Category().Id, missingauthentication.Category().Description) - fmt.Printf("%v: %v\n", missingauthenticationsecondfactor.Category().Id, missingauthenticationsecondfactor.Category().Description) - fmt.Printf("%v: %v\n", missingbuildinfrastructure.Category().Id, missingbuildinfrastructure.Category().Description) - fmt.Printf("%v: %v\n", missingcloudhardening.Category().Id, missingcloudhardening.Category().Description) - fmt.Printf("%v: %v\n", missingfilevalidation.Category().Id, missingfilevalidation.Category().Description) - fmt.Printf("%v: %v\n", missinghardening.Category().Id, missinghardening.Category().Description) - fmt.Printf("%v: %v\n", missingidentitypropagation.Category().Id, missingidentitypropagation.Category().Description) - fmt.Printf("%v: %v\n", missingidentityproviderisolation.Category().Id, missingidentityproviderisolation.Category().Description) - fmt.Printf("%v: %v\n", missingidentitystore.Category().Id, missingidentitystore.Category().Description) - fmt.Printf("%v: %v\n", missingnetworksegmentation.Category().Id, missingnetworksegmentation.Category().Description) - fmt.Printf("%v: %v\n", missingvault.Category().Id, missingvault.Category().Description) - fmt.Printf("%v: %v\n", missingvaultisolation.Category().Id, missingvaultisolation.Category().Description) - fmt.Printf("%v: %v\n", missingwaf.Category().Id, missingwaf.Category().Description) - fmt.Printf("%v: %v\n", mixedtargetsonsharedruntime.Category().Id, mixedtargetsonsharedruntime.Category().Description) - fmt.Printf("%v: %v\n", pathtraversal.Category().Id, pathtraversal.Category().Description) - fmt.Printf("%v: %v\n", pushinsteadofpulldeployment.Category().Id, pushinsteadofpulldeployment.Category().Description) - fmt.Printf("%v: %v\n", searchqueryinjection.Category().Id, searchqueryinjection.Category().Description) - fmt.Printf("%v: %v\n", serversiderequestforgery.Category().Id, serversiderequestforgery.Category().Description) - fmt.Printf("%v: %v\n", serviceregistrypoisoning.Category().Id, serviceregistrypoisoning.Category().Description) - fmt.Printf("%v: %v\n", sqlnosqlinjection.Category().Id, sqlnosqlinjection.Category().Description) - fmt.Printf("%v: %v\n", uncheckeddeployment.Category().Id, uncheckeddeployment.Category().Description) - fmt.Printf("%v: %v\n", unencryptedasset.Category().Id, unencryptedasset.Category().Description) - fmt.Printf("%v: %v\n", unencryptedcommunication.Category().Id, unencryptedcommunication.Category().Description) - fmt.Printf("%v: %v\n", unguardedaccessfrominternet.Category().Id, unguardedaccessfrominternet.Category().Description) - fmt.Printf("%v: %v\n", unguardeddirectdatastoreaccess.Category().Id, unguardeddirectdatastoreaccess.Category().Description) - fmt.Printf("%v: %v\n", unnecessarycommunicationlink.Category().Id, unnecessarycommunicationlink.Category().Description) - fmt.Printf("%v: %v\n", unnecessarydataasset.Category().Id, unnecessarydataasset.Category().Description) - fmt.Printf("%v: %v\n", unnecessarydatatransfer.Category().Id, unnecessarydatatransfer.Category().Description) - fmt.Printf("%v: %v\n", unnecessarytechnicalasset.Category().Id, unnecessarytechnicalasset.Category().Description) - fmt.Printf("%v: %v\n", untrusteddeserialization.Category().Id, untrusteddeserialization.Category().Description) - fmt.Printf("%v: %v\n", wrongcommunicationlinkcontent.Category().Id, wrongcommunicationlinkcontent.Category().Description) - fmt.Printf("%v: %v\n", wrongtrustboundarycontent.Category().Id, wrongtrustboundarycontent.Category().Description) - fmt.Printf("%v: %v\n", xmlexternalentity.Category().Id, xmlexternalentity.Category().Description) + fmt.Printf("%v: %v\n", accidental_secret_leak.Category().Id, accidental_secret_leak.Category().Description) + fmt.Printf("%v: %v\n", code_backdooring.Category().Id, code_backdooring.Category().Description) + fmt.Printf("%v: %v\n", container_baseimage_backdooring.Category().Id, container_baseimage_backdooring.Category().Description) + fmt.Printf("%v: %v\n", container_platform_escape.Category().Id, container_platform_escape.Category().Description) + fmt.Printf("%v: %v\n", cross_site_request_forgery.Category().Id, cross_site_request_forgery.Category().Description) + fmt.Printf("%v: %v\n", cross_site_scripting.Category().Id, cross_site_scripting.Category().Description) + fmt.Printf("%v: %v\n", dos_risky_access_across_trust_boundary.Category().Id, dos_risky_access_across_trust_boundary.Category().Description) + fmt.Printf("%v: %v\n", incomplete_model.Category().Id, incomplete_model.Category().Description) + fmt.Printf("%v: %v\n", ldap_injection.Category().Id, ldap_injection.Category().Description) + fmt.Printf("%v: %v\n", missing_authentication.Category().Id, missing_authentication.Category().Description) + fmt.Printf("%v: %v\n", missing_authentication_second_factor.Category().Id, missing_authentication_second_factor.Category().Description) + fmt.Printf("%v: %v\n", missing_build_infrastructure.Category().Id, missing_build_infrastructure.Category().Description) + fmt.Printf("%v: %v\n", missing_cloud_hardening.Category().Id, missing_cloud_hardening.Category().Description) + fmt.Printf("%v: %v\n", missing_file_validation.Category().Id, missing_file_validation.Category().Description) + fmt.Printf("%v: %v\n", missing_hardening.Category().Id, missing_hardening.Category().Description) + fmt.Printf("%v: %v\n", missing_identity_propagation.Category().Id, missing_identity_propagation.Category().Description) + fmt.Printf("%v: %v\n", missing_identity_provider_isolation.Category().Id, missing_identity_provider_isolation.Category().Description) + fmt.Printf("%v: %v\n", missing_identity_store.Category().Id, missing_identity_store.Category().Description) + fmt.Printf("%v: %v\n", missing_network_segmentation.Category().Id, missing_network_segmentation.Category().Description) + fmt.Printf("%v: %v\n", missing_vault.Category().Id, missing_vault.Category().Description) + fmt.Printf("%v: %v\n", missing_vault_isolation.Category().Id, missing_vault_isolation.Category().Description) + fmt.Printf("%v: %v\n", missing_waf.Category().Id, missing_waf.Category().Description) + fmt.Printf("%v: %v\n", mixed_targets_on_shared_runtime.Category().Id, mixed_targets_on_shared_runtime.Category().Description) + fmt.Printf("%v: %v\n", path_traversal.Category().Id, path_traversal.Category().Description) + fmt.Printf("%v: %v\n", push_instead_of_pull_deployment.Category().Id, push_instead_of_pull_deployment.Category().Description) + fmt.Printf("%v: %v\n", search_query_injection.Category().Id, search_query_injection.Category().Description) + fmt.Printf("%v: %v\n", server_side_request_forgery.Category().Id, server_side_request_forgery.Category().Description) + fmt.Printf("%v: %v\n", service_registry_poisoning.Category().Id, service_registry_poisoning.Category().Description) + fmt.Printf("%v: %v\n", sql_nosql_injection.Category().Id, sql_nosql_injection.Category().Description) + fmt.Printf("%v: %v\n", unchecked_deployment.Category().Id, unchecked_deployment.Category().Description) + fmt.Printf("%v: %v\n", unencrypted_asset.Category().Id, unencrypted_asset.Category().Description) + fmt.Printf("%v: %v\n", unencrypted_communication.Category().Id, unencrypted_communication.Category().Description) + fmt.Printf("%v: %v\n", unguarded_access_from_internet.Category().Id, unguarded_access_from_internet.Category().Description) + fmt.Printf("%v: %v\n", unguarded_direct_datastore_access.Category().Id, unguarded_direct_datastore_access.Category().Description) + fmt.Printf("%v: %v\n", unnecessary_communication_link.Category().Id, unnecessary_communication_link.Category().Description) + fmt.Printf("%v: %v\n", unnecessary_data_asset.Category().Id, unnecessary_data_asset.Category().Description) + fmt.Printf("%v: %v\n", unnecessary_data_transfer.Category().Id, unnecessary_data_transfer.Category().Description) + fmt.Printf("%v: %v\n", unnecessary_technical_asset.Category().Id, unnecessary_technical_asset.Category().Description) + fmt.Printf("%v: %v\n", untrusted_deserialization.Category().Id, untrusted_deserialization.Category().Description) + fmt.Printf("%v: %v\n", wrong_communication_link_content.Category().Id, wrong_communication_link_content.Category().Description) + fmt.Printf("%v: %v\n", wrong_trust_boundary_content.Category().Id, wrong_trust_boundary_content.Category().Description) + fmt.Printf("%v: %v\n", xml_external_entity.Category().Id, xml_external_entity.Category().Description) fmt.Println() os.Exit(0) } @@ -3881,7 +3875,7 @@ func parseCommandlineArgs() { } if *license { printLogo() - content, err := os.ReadFile("/app/LICENSE.txt") + content, err := ioutil.ReadFile("/app/LICENSE.txt") checkErr(err) fmt.Print(string(content)) fmt.Println() @@ -3941,20 +3935,20 @@ func printVersion() { } func createExampleModelFile() { - _, _ = copyFile("/app/threagile-example-model.yaml", *outputDir+"/threagile-example-model.yaml") + copyFile("/app/threagile-example-model.yaml", *outputDir+"/threagile-example-model.yaml") } func createStubModelFile() { loadCustomRiskRules() - stub, err := os.ReadFile("/app/threagile-stub-model.yaml") + stub, err := ioutil.ReadFile("/app/threagile-stub-model.yaml") checkErr(err) - err = os.WriteFile(*outputDir+"/threagile-stub-model.yaml", addSupportedTags(stub), 0644) + err = ioutil.WriteFile(*outputDir+"/threagile-stub-model.yaml", addSupportedTags(stub), 0644) checkErr(err) } func createEditingSupportFiles() { - _, _ = copyFile("/app/schema.json", *outputDir+"/schema.json") - _, _ = copyFile("/app/live-templates.txt", *outputDir+"/live-templates.txt") + copyFile("/app/schema.json", *outputDir+"/schema.json") + copyFile("/app/live-templates.txt", *outputDir+"/live-templates.txt") } func printExamples() { @@ -4013,13 +4007,13 @@ func copyFile(src, dst string) (int64, error) { if err != nil { return 0, err } - defer func() { _ = source.Close() }() + defer source.Close() destination, err := os.Create(dst) if err != nil { return 0, err } - defer func() { _ = destination.Close() }() + defer destination.Close() nBytes, err := io.Copy(destination, source) return nBytes, err } @@ -4028,7 +4022,7 @@ func parseModel(inputFilename string) { if *verbose { fmt.Println("Parsing model:", inputFilename) } - modelYaml, err := os.ReadFile(inputFilename) + modelYaml, err := ioutil.ReadFile(inputFilename) if err == nil { modelInput = model.ModelInput{} err = yaml.Unmarshal(modelYaml, &modelInput) @@ -4036,7 +4030,7 @@ func parseModel(inputFilename string) { //fmt.Println(modelInput) var businessCriticality model.Criticality - switch modelInput.BusinessCriticality { + switch modelInput.Business_criticality { case model.Archive.String(): businessCriticality = model.Archive case model.Operational.String(): @@ -4048,7 +4042,7 @@ func parseModel(inputFilename string) { case model.MissionCritical.String(): businessCriticality = model.MissionCritical default: - panic(errors.New("unknown 'business_criticality' value of application: " + modelInput.BusinessCriticality)) + panic(errors.New("unknown 'business_criticality' value of application: " + modelInput.Business_criticality)) } reportDate := time.Now() @@ -4063,21 +4057,21 @@ func parseModel(inputFilename string) { Author: modelInput.Author, Title: modelInput.Title, Date: reportDate, - ManagementSummaryComment: modelInput.ManagementSummaryComment, + ManagementSummaryComment: modelInput.Management_summary_comment, BusinessCriticality: businessCriticality, - BusinessOverview: removePathElementsFromImageFiles(modelInput.BusinessOverview), - TechnicalOverview: removePathElementsFromImageFiles(modelInput.TechnicalOverview), + BusinessOverview: removePathElementsFromImageFiles(modelInput.Business_overview), + TechnicalOverview: removePathElementsFromImageFiles(modelInput.Technical_overview), Questions: modelInput.Questions, - AbuseCases: modelInput.AbuseCases, - SecurityRequirements: modelInput.SecurityRequirements, - TagsAvailable: lowerCaseAndTrim(modelInput.TagsAvailable), - DiagramTweakNodesep: modelInput.DiagramTweakNodesep, - DiagramTweakRanksep: modelInput.DiagramTweakRanksep, - DiagramTweakEdgeLayout: modelInput.DiagramTweakEdgeLayout, - DiagramTweakSuppressEdgeLabels: modelInput.DiagramTweakSuppressEdgeLabels, - DiagramTweakLayoutLeftToRight: modelInput.DiagramTweakLayoutLeftToRight, - DiagramTweakInvisibleConnectionsBetweenAssets: modelInput.DiagramTweakInvisibleConnectionsBetweenAssets, - DiagramTweakSameRankAssets: modelInput.DiagramTweakSameRankAssets, + AbuseCases: modelInput.Abuse_cases, + SecurityRequirements: modelInput.Security_requirements, + TagsAvailable: lowerCaseAndTrim(modelInput.Tags_available), + DiagramTweakNodesep: modelInput.Diagram_tweak_nodesep, + DiagramTweakRanksep: modelInput.Diagram_tweak_ranksep, + DiagramTweakEdgeLayout: modelInput.Diagram_tweak_edge_layout, + DiagramTweakSuppressEdgeLabels: modelInput.Diagram_tweak_suppress_edge_labels, + DiagramTweakLayoutLeftToRight: modelInput.Diagram_tweak_layout_left_to_right, + DiagramTweakInvisibleConnectionsBetweenAssets: modelInput.Diagram_tweak_invisible_connections_between_assets, + DiagramTweakSameRankAssets: modelInput.Diagram_tweak_same_rank_assets, } if model.ParsedModelRoot.DiagramTweakNodesep == 0 { model.ParsedModelRoot.DiagramTweakNodesep = 2 @@ -4088,7 +4082,7 @@ func parseModel(inputFilename string) { // Data Assets =============================================================================== model.ParsedModelRoot.DataAssets = make(map[string]model.DataAsset) - for title, asset := range modelInput.DataAssets { + for title, asset := range modelInput.Data_assets { id := fmt.Sprintf("%v", asset.ID) var usage model.Usage @@ -4179,13 +4173,13 @@ func parseModel(inputFilename string) { Confidentiality: confidentiality, Integrity: integrity, Availability: availability, - JustificationCiaRating: fmt.Sprintf("%v", asset.JustificationCiaRating), + JustificationCiaRating: fmt.Sprintf("%v", asset.Justification_cia_rating), } } // Technical Assets =============================================================================== model.ParsedModelRoot.TechnicalAssets = make(map[string]model.TechnicalAsset) - for title, asset := range modelInput.TechnicalAssets { + for title, asset := range modelInput.Technical_assets { id := fmt.Sprintf("%v", asset.ID) var usage model.Usage @@ -4199,9 +4193,9 @@ func parseModel(inputFilename string) { } var dataAssetsProcessed = make([]string, 0) - if asset.DataAssetsProcessed != nil { - dataAssetsProcessed = make([]string, len(asset.DataAssetsProcessed)) - for i, parsedProcessedAsset := range asset.DataAssetsProcessed { + if asset.Data_assets_processed != nil { + dataAssetsProcessed = make([]string, len(asset.Data_assets_processed)) + for i, parsedProcessedAsset := range asset.Data_assets_processed { referencedAsset := fmt.Sprintf("%v", parsedProcessedAsset) checkDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") dataAssetsProcessed[i] = referencedAsset @@ -4209,9 +4203,9 @@ func parseModel(inputFilename string) { } var dataAssetsStored = make([]string, 0) - if asset.DataAssetsStored != nil { - dataAssetsStored = make([]string, len(asset.DataAssetsStored)) - for i, parsedStoredAssets := range asset.DataAssetsStored { + if asset.Data_assets_stored != nil { + dataAssetsStored = make([]string, len(asset.Data_assets_stored)) + for i, parsedStoredAssets := range asset.Data_assets_stored { referencedAsset := fmt.Sprintf("%v", parsedStoredAssets) checkDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") dataAssetsStored[i] = referencedAsset @@ -4374,8 +4368,8 @@ func parseModel(inputFilename string) { encryption = model.DataWithSymmetricSharedKey case model.DataWithAsymmetricSharedKey.String(): encryption = model.DataWithAsymmetricSharedKey - case model.DataWithEndUserIndividualKey.String(): - encryption = model.DataWithEndUserIndividualKey + case model.DataWithEnduserIndividualKey.String(): + encryption = model.DataWithEnduserIndividualKey default: panic(errors.New("unknown 'encryption' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Encryption))) } @@ -4443,8 +4437,8 @@ func parseModel(inputFilename string) { } dataFormatsAccepted := make([]model.DataFormat, 0) - if asset.DataFormatsAccepted != nil { - for _, dataFormatName := range asset.DataFormatsAccepted { + if asset.Data_formats_accepted != nil { + for _, dataFormatName := range asset.Data_formats_accepted { switch dataFormatName { case model.JSON.String(): dataFormatsAccepted = append(dataFormatsAccepted, model.JSON) @@ -4463,8 +4457,8 @@ func parseModel(inputFilename string) { } communicationLinks := make([]model.CommunicationLink, 0) - if asset.CommunicationLinks != nil { - for commLinkTitle, commLink := range asset.CommunicationLinks { + if asset.Communication_links != nil { + for commLinkTitle, commLink := range asset.Communication_links { constraint := true weight := 1 var protocol model.Protocol @@ -4498,8 +4492,8 @@ func parseModel(inputFilename string) { authorization = model.NoneAuthorization case model.TechnicalUser.String(): authorization = model.TechnicalUser - case model.EndUserIdentityPropagation.String(): - authorization = model.EndUserIdentityPropagation + case model.EnduserIdentityPropagation.String(): + authorization = model.EnduserIdentityPropagation default: panic(errors.New("unknown 'authorization' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Authorization))) } @@ -4528,44 +4522,44 @@ func parseModel(inputFilename string) { protocol = model.MQTT case model.JDBC.String(): protocol = model.JDBC - case model.JdbcEncrypted.String(): - protocol = model.JdbcEncrypted + case model.JDBC_encrypted.String(): + protocol = model.JDBC_encrypted case model.ODBC.String(): protocol = model.ODBC - case model.OdbcEncrypted.String(): - protocol = model.OdbcEncrypted - case model.SqlAccessProtocol.String(): - protocol = model.SqlAccessProtocol - case model.SqlAccessProtocolEncrypted.String(): - protocol = model.SqlAccessProtocolEncrypted - case model.NosqlAccessProtocol.String(): - protocol = model.NosqlAccessProtocol - case model.NosqlAccessProtocolEncrypted.String(): - protocol = model.NosqlAccessProtocolEncrypted + case model.ODBC_encrypted.String(): + protocol = model.ODBC_encrypted + case model.SQL_access_protocol.String(): + protocol = model.SQL_access_protocol + case model.SQL_access_protocol_encrypted.String(): + protocol = model.SQL_access_protocol_encrypted + case model.NoSQL_access_protocol.String(): + protocol = model.NoSQL_access_protocol + case model.NoSQL_access_protocol_encrypted.String(): + protocol = model.NoSQL_access_protocol_encrypted case model.TEXT.String(): protocol = model.TEXT - case model.TextEncrypted.String(): - protocol = model.TextEncrypted + case model.TEXT_encrypted.String(): + protocol = model.TEXT_encrypted case model.BINARY.String(): protocol = model.BINARY - case model.BinaryEncrypted.String(): - protocol = model.BinaryEncrypted + case model.BINARY_encrypted.String(): + protocol = model.BINARY_encrypted case model.SSH.String(): protocol = model.SSH - case model.SshTunnel.String(): - protocol = model.SshTunnel + case model.SSH_tunnel.String(): + protocol = model.SSH_tunnel case model.SMTP.String(): protocol = model.SMTP - case model.SmtpEncrypted.String(): - protocol = model.SmtpEncrypted + case model.SMTP_encrypted.String(): + protocol = model.SMTP_encrypted case model.POP3.String(): protocol = model.POP3 - case model.Pop3Encrypted.String(): - protocol = model.Pop3Encrypted + case model.POP3_encrypted.String(): + protocol = model.POP3_encrypted case model.IMAP.String(): protocol = model.IMAP - case model.ImapEncrypted.String(): - protocol = model.ImapEncrypted + case model.IMAP_encrypted.String(): + protocol = model.IMAP_encrypted case model.FTP.String(): protocol = model.FTP case model.FTPS.String(): @@ -4584,8 +4578,8 @@ func parseModel(inputFilename string) { protocol = model.NFS case model.SMB.String(): protocol = model.SMB - case model.SmbEncrypted.String(): - protocol = model.SmbEncrypted + case model.SMB_encrypted.String(): + protocol = model.SMB_encrypted case model.LocalFileAccess.String(): protocol = model.LocalFileAccess case model.NRPE.String(): @@ -4594,12 +4588,12 @@ func parseModel(inputFilename string) { protocol = model.XMPP case model.IIOP.String(): protocol = model.IIOP - case model.IiopEncrypted.String(): - protocol = model.IiopEncrypted + case model.IIOP_encrypted.String(): + protocol = model.IIOP_encrypted case model.JRMP.String(): protocol = model.JRMP - case model.JrmpEncrypted.String(): - protocol = model.JrmpEncrypted + case model.JRMP_encrypted.String(): + protocol = model.JRMP_encrypted case model.InProcessLibraryCall.String(): protocol = model.InProcessLibraryCall case model.ContainerSpawning.String(): @@ -4608,27 +4602,27 @@ func parseModel(inputFilename string) { panic(errors.New("unknown 'protocol' of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Protocol))) } - if commLink.DataAssetsSent != nil { - for _, dataAssetSent := range commLink.DataAssetsSent { + if commLink.Data_assets_sent != nil { + for _, dataAssetSent := range commLink.Data_assets_sent { referencedAsset := fmt.Sprintf("%v", dataAssetSent) checkDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") dataAssetsSent = append(dataAssetsSent, referencedAsset) } } - if commLink.DataAssetsReceived != nil { - for _, dataAssetReceived := range commLink.DataAssetsReceived { + if commLink.Data_assets_received != nil { + for _, dataAssetReceived := range commLink.Data_assets_received { referencedAsset := fmt.Sprintf("%v", dataAssetReceived) checkDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") dataAssetsReceived = append(dataAssetsReceived, referencedAsset) } } - if commLink.DiagramTweakWeight > 0 { - weight = commLink.DiagramTweakWeight + if commLink.Diagram_tweak_weight > 0 { + weight = commLink.Diagram_tweak_weight } - constraint = !commLink.DiagramTweakConstraint + constraint = !commLink.Diagram_tweak_constraint checkErr(err) @@ -4645,7 +4639,7 @@ func parseModel(inputFilename string) { Usage: usage, Tags: checkTags(lowerCaseAndTrim(commLink.Tags), "communication link '"+commLinkTitle+"' of technical asset '"+title+"'"), VPN: commLink.VPN, - IpFiltered: commLink.IpFiltered, + IpFiltered: commLink.IP_filtered, Readonly: commLink.Readonly, DataAssetsSent: dataAssetsSent, DataAssetsReceived: dataAssetsReceived, @@ -4655,7 +4649,7 @@ func parseModel(inputFilename string) { communicationLinks = append(communicationLinks, commLink) // track all comm links model.CommunicationLinks[commLink.Id] = commLink - // keep track of map of *all* comm links mapped by target-id (to be able to look up "who is calling me" kind of things) + // keep track of map of *all* comm links mapped by target-id (to be able to lookup "who is calling me" kind of things) model.IncomingTechnicalCommunicationLinksMappedByTargetId[commLink.TargetId] = append( model.IncomingTechnicalCommunicationLinksMappedByTargetId[commLink.TargetId], commLink) } @@ -4677,34 +4671,34 @@ func parseModel(inputFilename string) { Machine: technicalAssetMachine, Internet: asset.Internet, Encryption: encryption, - MultiTenant: asset.MultiTenant, + MultiTenant: asset.Multi_tenant, Redundant: asset.Redundant, - CustomDevelopedParts: asset.CustomDevelopedParts, - UsedAsClientByHuman: asset.UsedAsClientByHuman, - OutOfScope: asset.OutOfScope, - JustificationOutOfScope: fmt.Sprintf("%v", asset.JustificationOutOfScope), + CustomDevelopedParts: asset.Custom_developed_parts, + UsedAsClientByHuman: asset.Used_as_client_by_human, + OutOfScope: asset.Out_of_scope, + JustificationOutOfScope: fmt.Sprintf("%v", asset.Justification_out_of_scope), Owner: fmt.Sprintf("%v", asset.Owner), Confidentiality: confidentiality, Integrity: integrity, Availability: availability, - JustificationCiaRating: fmt.Sprintf("%v", asset.JustificationCiaRating), + JustificationCiaRating: fmt.Sprintf("%v", asset.Justification_cia_rating), DataAssetsProcessed: dataAssetsProcessed, DataAssetsStored: dataAssetsStored, DataFormatsAccepted: dataFormatsAccepted, CommunicationLinks: communicationLinks, - DiagramTweakOrder: asset.DiagramTweakOrder, + DiagramTweakOrder: asset.Diagram_tweak_order, } } // Trust Boundaries =============================================================================== checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries := make(map[string]bool) model.ParsedModelRoot.TrustBoundaries = make(map[string]model.TrustBoundary) - for title, boundary := range modelInput.TrustBoundaries { + for title, boundary := range modelInput.Trust_boundaries { id := fmt.Sprintf("%v", boundary.ID) var technicalAssetsInside = make([]string, 0) - if boundary.TechnicalAssetsInside != nil { - parsedInsideAssets := boundary.TechnicalAssetsInside + if boundary.Technical_assets_inside != nil { + parsedInsideAssets := boundary.Technical_assets_inside technicalAssetsInside = make([]string, len(parsedInsideAssets)) for i, parsedInsideAsset := range parsedInsideAssets { technicalAssetsInside[i] = fmt.Sprintf("%v", parsedInsideAsset) @@ -4721,8 +4715,8 @@ func parseModel(inputFilename string) { } var trustBoundariesNested = make([]string, 0) - if boundary.TrustBoundariesNested != nil { - parsedNestedBoundaries := boundary.TrustBoundariesNested + if boundary.Trust_boundaries_nested != nil { + parsedNestedBoundaries := boundary.Trust_boundaries_nested trustBoundariesNested = make([]string, len(parsedNestedBoundaries)) for i, parsedNestedBoundary := range parsedNestedBoundaries { trustBoundariesNested[i] = fmt.Sprintf("%v", parsedNestedBoundary) @@ -4772,12 +4766,12 @@ func parseModel(inputFilename string) { // Shared Runtime =============================================================================== model.ParsedModelRoot.SharedRuntimes = make(map[string]model.SharedRuntime) - for title, runtime := range modelInput.SharedRuntimes { + for title, runtime := range modelInput.Shared_runtimes { id := fmt.Sprintf("%v", runtime.ID) var technicalAssetsRunning = make([]string, 0) - if runtime.TechnicalAssetsRunning != nil { - parsedRunningAssets := runtime.TechnicalAssetsRunning + if runtime.Technical_assets_running != nil { + parsedRunningAssets := runtime.Technical_assets_running technicalAssetsRunning = make([]string, len(parsedRunningAssets)) for i, parsedRunningAsset := range parsedRunningAssets { assetId := fmt.Sprintf("%v", parsedRunningAsset) @@ -4790,7 +4784,7 @@ func parseModel(inputFilename string) { Id: id, Title: title, //fmt.Sprintf("%v", boundary["title"]), Description: withDefault(fmt.Sprintf("%v", runtime.Description), title), - Tags: checkTags(runtime.Tags, "shared runtime '"+title+"'"), + Tags: checkTags((runtime.Tags), "shared runtime '"+title+"'"), TechnicalAssetsRunning: technicalAssetsRunning, } checkIdSyntax(id) @@ -4805,11 +4799,11 @@ func parseModel(inputFilename string) { // Individual Risk Categories (just used as regular risk categories) =============================================================================== model.ParsedModelRoot.IndividualRiskCategories = make(map[string]model.RiskCategory) - for title, individualCategory := range modelInput.IndividualRiskCategories { - id := fmt.Sprintf("%v", individualCategory.ID) + for title, indivCat := range modelInput.Individual_risk_categories { + id := fmt.Sprintf("%v", indivCat.ID) var function model.RiskFunction - switch individualCategory.Function { + switch indivCat.Function { case model.BusinessSide.String(): function = model.BusinessSide case model.Architecture.String(): @@ -4819,11 +4813,11 @@ func parseModel(inputFilename string) { case model.Operations.String(): function = model.Operations default: - panic(errors.New("unknown 'function' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.Function))) + panic(errors.New("unknown 'function' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", indivCat.Function))) } var stride model.STRIDE - switch individualCategory.STRIDE { + switch indivCat.STRIDE { case model.Spoofing.String(): stride = model.Spoofing case model.Tampering.String(): @@ -4837,26 +4831,26 @@ func parseModel(inputFilename string) { case model.ElevationOfPrivilege.String(): stride = model.ElevationOfPrivilege default: - panic(errors.New("unknown 'stride' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.STRIDE))) + panic(errors.New("unknown 'stride' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", indivCat.STRIDE))) } cat := model.RiskCategory{ Id: id, Title: title, - Description: withDefault(fmt.Sprintf("%v", individualCategory.Description), title), - Impact: fmt.Sprintf("%v", individualCategory.Impact), - ASVS: fmt.Sprintf("%v", individualCategory.ASVS), - CheatSheet: fmt.Sprintf("%v", individualCategory.CheatSheet), - Action: fmt.Sprintf("%v", individualCategory.Action), - Mitigation: fmt.Sprintf("%v", individualCategory.Mitigation), - Check: fmt.Sprintf("%v", individualCategory.Check), - DetectionLogic: fmt.Sprintf("%v", individualCategory.DetectionLogic), - RiskAssessment: fmt.Sprintf("%v", individualCategory.RiskAssessment), - FalsePositives: fmt.Sprintf("%v", individualCategory.FalsePositives), + Description: withDefault(fmt.Sprintf("%v", indivCat.Description), title), + Impact: fmt.Sprintf("%v", indivCat.Impact), + ASVS: fmt.Sprintf("%v", indivCat.ASVS), + CheatSheet: fmt.Sprintf("%v", indivCat.Cheat_sheet), + Action: fmt.Sprintf("%v", indivCat.Action), + Mitigation: fmt.Sprintf("%v", indivCat.Mitigation), + Check: fmt.Sprintf("%v", indivCat.Check), + DetectionLogic: fmt.Sprintf("%v", indivCat.Detection_logic), + RiskAssessment: fmt.Sprintf("%v", indivCat.Risk_assessment), + FalsePositives: fmt.Sprintf("%v", indivCat.False_positives), Function: function, STRIDE: stride, - ModelFailurePossibleReason: individualCategory.ModelFailurePossibleReason, - CWE: individualCategory.CWE, + ModelFailurePossibleReason: indivCat.Model_failure_possible_reason, + CWE: indivCat.CWE, } checkIdSyntax(id) if _, exists := model.ParsedModelRoot.IndividualRiskCategories[id]; exists { @@ -4866,8 +4860,8 @@ func parseModel(inputFilename string) { // NOW THE INDIVIDUAL RISK INSTANCES: //individualRiskInstances := make([]model.Risk, 0) - if individualCategory.RisksIdentified != nil { // TODO: also add syntax checks of input YAML when linked asset is not found or when synthetic-id is already used... - for title, individualRiskInstance := range individualCategory.RisksIdentified { + if indivCat.Risks_identified != nil { // TODO: also add syntax checks of input YAML when linked asset is not found or when syntehtic-id is already used... + for title, indivRiskInstance := range indivCat.Risks_identified { var severity model.RiskSeverity var exploitationLikelihood model.RiskExploitationLikelihood var exploitationImpact model.RiskExploitationImpact @@ -4875,7 +4869,7 @@ func parseModel(inputFilename string) { var dataBreachProbability model.DataBreachProbability var dataBreachTechnicalAssetIDs []string - switch individualRiskInstance.Severity { + switch indivRiskInstance.Severity { case model.LowSeverity.String(): severity = model.LowSeverity case model.MediumSeverity.String(): @@ -4889,10 +4883,10 @@ func parseModel(inputFilename string) { case "": // added default severity = model.MediumSeverity default: - panic(errors.New("unknown 'severity' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.Severity))) + panic(errors.New("unknown 'severity' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", indivRiskInstance.Severity))) } - switch individualRiskInstance.ExploitationLikelihood { + switch indivRiskInstance.Exploitation_likelihood { case model.Unlikely.String(): exploitationLikelihood = model.Unlikely case model.Likely.String(): @@ -4904,10 +4898,10 @@ func parseModel(inputFilename string) { case "": // added default exploitationLikelihood = model.Likely default: - panic(errors.New("unknown 'exploitation_likelihood' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationLikelihood))) + panic(errors.New("unknown 'exploitation_likelihood' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", indivRiskInstance.Exploitation_likelihood))) } - switch individualRiskInstance.ExploitationImpact { + switch indivRiskInstance.Exploitation_impact { case model.LowImpact.String(): exploitationImpact = model.LowImpact case model.MediumImpact.String(): @@ -4919,35 +4913,35 @@ func parseModel(inputFilename string) { case "": // added default exploitationImpact = model.MediumImpact default: - panic(errors.New("unknown 'exploitation_impact' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationImpact))) + panic(errors.New("unknown 'exploitation_impact' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", indivRiskInstance.Exploitation_impact))) } - if len(individualRiskInstance.MostRelevantDataAsset) > 0 { - mostRelevantDataAssetId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantDataAsset) + if len(indivRiskInstance.Most_relevant_data_asset) > 0 { + mostRelevantDataAssetId = fmt.Sprintf("%v", indivRiskInstance.Most_relevant_data_asset) checkDataAssetTargetExists(mostRelevantDataAssetId, "individual risk '"+title+"'") } - if len(individualRiskInstance.MostRelevantTechnicalAsset) > 0 { - mostRelevantTechnicalAssetId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantTechnicalAsset) + if len(indivRiskInstance.Most_relevant_technical_asset) > 0 { + mostRelevantTechnicalAssetId = fmt.Sprintf("%v", indivRiskInstance.Most_relevant_technical_asset) checkTechnicalAssetExists(mostRelevantTechnicalAssetId, "individual risk '"+title+"'", false) } - if len(individualRiskInstance.MostRelevantCommunicationLink) > 0 { - mostRelevantCommunicationLinkId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantCommunicationLink) + if len(indivRiskInstance.Most_relevant_communication_link) > 0 { + mostRelevantCommunicationLinkId = fmt.Sprintf("%v", indivRiskInstance.Most_relevant_communication_link) checkCommunicationLinkExists(mostRelevantCommunicationLinkId, "individual risk '"+title+"'") } - if len(individualRiskInstance.MostRelevantTrustBoundary) > 0 { - mostRelevantTrustBoundaryId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantTrustBoundary) + if len(indivRiskInstance.Most_relevant_trust_boundary) > 0 { + mostRelevantTrustBoundaryId = fmt.Sprintf("%v", indivRiskInstance.Most_relevant_trust_boundary) checkTrustBoundaryExists(mostRelevantTrustBoundaryId, "individual risk '"+title+"'") } - if len(individualRiskInstance.MostRelevantSharedRuntime) > 0 { - mostRelevantSharedRuntimeId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantSharedRuntime) + if len(indivRiskInstance.Most_relevant_shared_runtime) > 0 { + mostRelevantSharedRuntimeId = fmt.Sprintf("%v", indivRiskInstance.Most_relevant_shared_runtime) checkSharedRuntimeExists(mostRelevantSharedRuntimeId, "individual risk '"+title+"'") } - switch individualRiskInstance.DataBreachProbability { + switch indivRiskInstance.Data_breach_probability { case model.Improbable.String(): dataBreachProbability = model.Improbable case model.Possible.String(): @@ -4957,12 +4951,12 @@ func parseModel(inputFilename string) { case "": // added default dataBreachProbability = model.Possible default: - panic(errors.New("unknown 'data_breach_probability' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.DataBreachProbability))) + panic(errors.New("unknown 'data_breach_probability' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", indivRiskInstance.Data_breach_probability))) } - if individualRiskInstance.DataBreachTechnicalAssets != nil { - dataBreachTechnicalAssetIDs = make([]string, len(individualRiskInstance.DataBreachTechnicalAssets)) - for i, parsedReferencedAsset := range individualRiskInstance.DataBreachTechnicalAssets { + if indivRiskInstance.Data_breach_technical_assets != nil { + dataBreachTechnicalAssetIDs = make([]string, len(indivRiskInstance.Data_breach_technical_assets)) + for i, parsedReferencedAsset := range indivRiskInstance.Data_breach_technical_assets { assetId := fmt.Sprintf("%v", parsedReferencedAsset) checkTechnicalAssetExists(assetId, "data breach technical assets of individual risk '"+title+"'", false) dataBreachTechnicalAssetIDs[i] = assetId @@ -4971,7 +4965,7 @@ func parseModel(inputFilename string) { checkErr(err) - individualRiskInstance := model.Risk{ + indivRiskInstance := model.Risk{ SyntheticId: createSyntheticId(cat.Id, mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId), Title: fmt.Sprintf("%v", title), Category: cat, @@ -4986,16 +4980,16 @@ func parseModel(inputFilename string) { DataBreachProbability: dataBreachProbability, DataBreachTechnicalAssetIDs: dataBreachTechnicalAssetIDs, } - model.GeneratedRisksByCategory[cat] = append(model.GeneratedRisksByCategory[cat], individualRiskInstance) + model.GeneratedRisksByCategory[cat] = append(model.GeneratedRisksByCategory[cat], indivRiskInstance) } } } // Risk Tracking =============================================================================== model.ParsedModelRoot.RiskTracking = make(map[string]model.RiskTracking) - for syntheticRiskId, riskTracking := range modelInput.RiskTracking { + for syntheticRiskId, riskTracking := range modelInput.Risk_tracking { justification := fmt.Sprintf("%v", riskTracking.Justification) - checkedBy := fmt.Sprintf("%v", riskTracking.CheckedBy) + checkedBy := fmt.Sprintf("%v", riskTracking.Checked_by) ticket := fmt.Sprintf("%v", riskTracking.Ticket) var date time.Time if len(riskTracking.Date) > 0 { @@ -5071,7 +5065,7 @@ func checkTags(tags []string, where string) []string { // in order to prevent Path-Traversal like stuff... func removePathElementsFromImageFiles(overview model.Overview) model.Overview { - for i := range overview.Images { + for i, _ := range overview.Images { newValue := make(map[string]string) for file, desc := range overview.Images[i] { newValue[filepath.Base(file)] = desc @@ -5088,8 +5082,8 @@ func applyWildcardRiskTrackingEvaluation() { for syntheticRiskIdPattern, riskTracking := range deferredRiskTrackingDueToWildcardMatching { foundSome := false var matchingRiskIdExpression = regexp.MustCompile(strings.ReplaceAll(regexp.QuoteMeta(syntheticRiskIdPattern), `\*`, `[^@]+`)) - for syntheticRiskId := range model.GeneratedRisksBySyntheticId { - if matchingRiskIdExpression.Match([]byte(syntheticRiskId)) && hasNotYetAnyDirectNonWildcardRiskTracking(syntheticRiskId) { + for syntheticRiskId, _ := range model.GeneratedRisksBySyntheticId { + if matchingRiskIdExpression.Match([]byte(syntheticRiskId)) && hasNotYetAnyDirectNonWildcardRiskTrackings(syntheticRiskId) { foundSome = true model.ParsedModelRoot.RiskTracking[syntheticRiskId] = model.RiskTracking{ SyntheticRiskId: strings.TrimSpace(syntheticRiskId), @@ -5111,7 +5105,7 @@ func applyWildcardRiskTrackingEvaluation() { } } -func hasNotYetAnyDirectNonWildcardRiskTracking(syntheticRiskId string) bool { +func hasNotYetAnyDirectNonWildcardRiskTrackings(syntheticRiskId string) bool { if _, ok := model.ParsedModelRoot.RiskTracking[syntheticRiskId]; ok { return false } @@ -5205,7 +5199,7 @@ func checkNestedTrustBoundariesExisting() { func hash(s string) string { h := fnv.New32a() - _, _ = h.Write([]byte(s)) + h.Write([]byte(s)) return fmt.Sprintf("%v", h.Sum32()) } @@ -5289,7 +5283,7 @@ func writeDataAssetDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fi // Write the DOT file file, err := os.Create(diagramFilenameDOT) checkErr(err) - defer func() { _ = file.Close() }() + defer file.Close() _, err = fmt.Fprintln(file, dotContent.String()) checkErr(err) return file @@ -5369,7 +5363,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: keys := make([]string, 0) - for k := range model.ParsedModelRoot.TrustBoundaries { + for k, _ := range model.ParsedModelRoot.TrustBoundaries { keys = append(keys, k) } sort.Strings(keys) @@ -5394,10 +5388,10 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil } snippet.WriteString("\n subgraph cluster_" + hash(trustBoundary.Id) + " {\n") color, fontColor, bgColor, style, fontname := colors.RgbHexColorTwilight(), colors.RgbHexColorTwilight() /*"#550E0C"*/, "#FAFAFA", "dashed", "Verdana" - penWidth := 4.5 + penwidth := 4.5 if len(trustBoundary.TrustBoundariesNested) > 0 { //color, fontColor, style, fontname = colors.Blue, colors.Blue, "dashed", "Verdana" - penWidth = 5.5 + penwidth = 5.5 } if len(trustBoundary.ParentTrustBoundaryID()) > 0 { bgColor = "#F1F1F1" @@ -5417,7 +5411,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil bgcolor="` + bgColor + `" fontcolor="` + fontColor + `" fontname="` + fontname + `" - penwidth="` + fmt.Sprintf("%f", penWidth) + `" + penwidth="` + fmt.Sprintf("%f", penwidth) + `" forcelabels=true outputorder="nodesfirst" margin="50.0" @@ -5447,7 +5441,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil subgraphSnippetsById[hash(trustBoundary.Id)] = snippet.String() } // here replace links and remove from map after replacement (i.e. move snippet into nested) - for i := range subgraphSnippetsById { + for i, _ := range subgraphSnippetsById { re := regexp.MustCompile(`LINK-NEEDS-REPLACED-BY-cluster_([0-9]*);`) for { matches := re.FindStringSubmatch(subgraphSnippetsById[i]) @@ -5462,7 +5456,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil } // now write them all keys = make([]string, 0) - for k := range subgraphSnippetsById { + for k, _ := range subgraphSnippetsById { keys = append(keys, k) } sort.Strings(keys) @@ -5475,7 +5469,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: // Convert map to slice of values: - var techAssets []model.TechnicalAsset + techAssets := []model.TechnicalAsset{} for _, techAsset := range model.ParsedModelRoot.TechnicalAssets { techAssets = append(techAssets, techAsset) } @@ -5501,7 +5495,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil } dir := "forward" if dataFlow.IsBidirectional() { - if !suppressBidirectionalArrows { // as it does not work as bug in graphviz with ortho: https://gitlab.com/graphviz/graphviz/issues/144 + if !suppressBidirectionalArrows { // as it does not work as bug in grahviz with ortho: https://gitlab.com/graphviz/graphviz/issues/144 dir = "both" } } @@ -5532,7 +5526,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil // Write the DOT file file, err := os.Create(diagramFilenameDOT) checkErr(err) - defer func() { _ = file.Close() }() + defer file.Close() _, err = fmt.Fprintln(file, dotContent.String()) checkErr(err) return file @@ -5674,21 +5668,21 @@ func renderDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string) { fmt.Println("Rendering data flow diagram input") } // tmp files - tmpFileDOT, err := os.CreateTemp(model.TempFolder, "diagram-*-.gv") + tmpFileDOT, err := ioutil.TempFile(model.TempFolder, "diagram-*-.gv") checkErr(err) - defer func() { _ = os.Remove(tmpFileDOT.Name()) }() + defer os.Remove(tmpFileDOT.Name()) - tmpFilePNG, err := os.CreateTemp(model.TempFolder, "diagram-*-.png") + tmpFilePNG, err := ioutil.TempFile(model.TempFolder, "diagram-*-.png") checkErr(err) - defer func() { _ = os.Remove(tmpFilePNG.Name()) }() + defer os.Remove(tmpFilePNG.Name()) // copy into tmp file as input - input, err := os.ReadFile(dotFile.Name()) + input, err := ioutil.ReadFile(dotFile.Name()) if err != nil { fmt.Println(err) return } - err = os.WriteFile(tmpFileDOT.Name(), input, 0644) + err = ioutil.WriteFile(tmpFileDOT.Name(), input, 0644) if err != nil { fmt.Println("Error creating", tmpFileDOT.Name()) fmt.Println(err) @@ -5704,12 +5698,12 @@ func renderDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string) { panic(errors.New("graph rendering call failed with error:" + err.Error())) } // copy into resulting file - input, err = os.ReadFile(tmpFilePNG.Name()) + input, err = ioutil.ReadFile(tmpFilePNG.Name()) if err != nil { fmt.Println(err) return } - err = os.WriteFile(targetDir+"/"+dataFlowDiagramFilenamePNG, input, 0644) + err = ioutil.WriteFile(targetDir+"/"+dataFlowDiagramFilenamePNG, input, 0644) if err != nil { fmt.Println("Error creating", dataFlowDiagramFilenamePNG) fmt.Println(err) @@ -5722,21 +5716,21 @@ func renderDataAssetDiagramGraphvizImage(dotFile *os.File, targetDir string) { / fmt.Println("Rendering data asset diagram input") } // tmp files - tmpFileDOT, err := os.CreateTemp(model.TempFolder, "diagram-*-.gv") + tmpFileDOT, err := ioutil.TempFile(model.TempFolder, "diagram-*-.gv") checkErr(err) - defer func() { _ = os.Remove(tmpFileDOT.Name()) }() + defer os.Remove(tmpFileDOT.Name()) - tmpFilePNG, err := os.CreateTemp(model.TempFolder, "diagram-*-.png") + tmpFilePNG, err := ioutil.TempFile(model.TempFolder, "diagram-*-.png") checkErr(err) - defer func() { _ = os.Remove(tmpFilePNG.Name()) }() + defer os.Remove(tmpFilePNG.Name()) // copy into tmp file as input - input, err := os.ReadFile(dotFile.Name()) + input, err := ioutil.ReadFile(dotFile.Name()) if err != nil { fmt.Println(err) return } - err = os.WriteFile(tmpFileDOT.Name(), input, 0644) + err = ioutil.WriteFile(tmpFileDOT.Name(), input, 0644) if err != nil { fmt.Println("Error creating", tmpFileDOT.Name()) fmt.Println(err) @@ -5752,12 +5746,12 @@ func renderDataAssetDiagramGraphvizImage(dotFile *os.File, targetDir string) { / panic(errors.New("graph rendering call failed with error: " + err.Error())) } // copy into resulting file - input, err = os.ReadFile(tmpFilePNG.Name()) + input, err = ioutil.ReadFile(tmpFilePNG.Name()) if err != nil { fmt.Println(err) return } - err = os.WriteFile(targetDir+"/"+dataAssetDiagramFilenamePNG, input, 0644) + err = ioutil.WriteFile(targetDir+"/"+dataAssetDiagramFilenamePNG, input, 0644) if err != nil { fmt.Println("Error creating", dataAssetDiagramFilenamePNG) fmt.Println(err) diff --git a/model/types.go b/model/types.go index e904b59d..d22c16db 100644 --- a/model/types.go +++ b/model/types.go @@ -26,24 +26,14 @@ var GeneratedRisksBySyntheticId map[string]Risk var AllSupportedTags map[string]bool -var ( - _ = ParseEncryptionStyle - _ = SortedKeysOfDataAssets - _ = SortedKeysOfTechnicalAssets - _ = SortedDataAssetsByDataBreachProbabilityAndTitleStillAtRisk - _ = ReduceToOnlyHighRisk - _ = ReduceToOnlyMediumRisk - _ = ReduceToOnlyLowRisk -) - func Init() { - CommunicationLinks = make(map[string]CommunicationLink) - IncomingTechnicalCommunicationLinksMappedByTargetId = make(map[string][]CommunicationLink) - DirectContainingTrustBoundaryMappedByTechnicalAssetId = make(map[string]TrustBoundary) - DirectContainingSharedRuntimeMappedByTechnicalAssetId = make(map[string]SharedRuntime) - GeneratedRisksByCategory = make(map[RiskCategory][]Risk) - GeneratedRisksBySyntheticId = make(map[string]Risk) - AllSupportedTags = make(map[string]bool) + CommunicationLinks = make(map[string]CommunicationLink, 0) + IncomingTechnicalCommunicationLinksMappedByTargetId = make(map[string][]CommunicationLink, 0) + DirectContainingTrustBoundaryMappedByTechnicalAssetId = make(map[string]TrustBoundary, 0) + DirectContainingSharedRuntimeMappedByTechnicalAssetId = make(map[string]SharedRuntime, 0) + GeneratedRisksByCategory = make(map[RiskCategory][]Risk, 0) + GeneratedRisksBySyntheticId = make(map[string]Risk, 0) + AllSupportedTags = make(map[string]bool, 0) } func AddToListOfSupportedTags(tags []string) { @@ -62,10 +52,10 @@ type CustomRiskRule interface { func AddTagToModelInput(modelInput *ModelInput, tag string, dryRun bool, changes *[]string) { tag = NormalizeTag(tag) - if !Contains(modelInput.TagsAvailable, tag) { + if !Contains(modelInput.Tags_available, tag) { *changes = append(*changes, "adding tag: "+tag) if !dryRun { - modelInput.TagsAvailable = append(modelInput.TagsAvailable, tag) + modelInput.Tags_available = append(modelInput.Tags_available, tag) } } } @@ -82,138 +72,138 @@ func MakeID(val string) string { // === Model Type Stuff ====================================== type ModelInput struct { // TODO: Eventually remove this and directly use ParsedModelRoot? But then the error messages for model errors are not quite as good anymore... - ThreagileVersion string - Title string - Author Author - Date string - BusinessOverview Overview - TechnicalOverview Overview - BusinessCriticality string - ManagementSummaryComment string - Questions map[string]string - AbuseCases map[string]string - SecurityRequirements map[string]string - TagsAvailable []string - DataAssets map[string]InputDataAsset - TechnicalAssets map[string]InputTechnicalAsset - TrustBoundaries map[string]InputTrustBoundary - SharedRuntimes map[string]InputSharedRuntime - IndividualRiskCategories map[string]InputIndividualRiskCategory - RiskTracking map[string]InputRiskTracking - DiagramTweakNodesep, DiagramTweakRanksep int - DiagramTweakEdgeLayout string - DiagramTweakSuppressEdgeLabels bool - DiagramTweakLayoutLeftToRight bool - DiagramTweakInvisibleConnectionsBetweenAssets []string - DiagramTweakSameRankAssets []string + Threagile_version string + Title string + Author Author + Date string + Business_overview Overview + Technical_overview Overview + Business_criticality string + Management_summary_comment string + Questions map[string]string + Abuse_cases map[string]string + Security_requirements map[string]string + Tags_available []string + Data_assets map[string]InputDataAsset + Technical_assets map[string]InputTechnicalAsset + Trust_boundaries map[string]InputTrustBoundary + Shared_runtimes map[string]InputSharedRuntime + Individual_risk_categories map[string]InputIndividualRiskCategory + Risk_tracking map[string]InputRiskTracking + Diagram_tweak_nodesep, Diagram_tweak_ranksep int + Diagram_tweak_edge_layout string + Diagram_tweak_suppress_edge_labels bool + Diagram_tweak_layout_left_to_right bool + Diagram_tweak_invisible_connections_between_assets []string + Diagram_tweak_same_rank_assets []string } type InputDataAsset struct { - ID string `json:"id"` - Description string `json:"description"` - Usage string `json:"usage"` - Tags []string `json:"tags"` - Origin string `json:"origin"` - Owner string `json:"owner"` - Quantity string `json:"quantity"` - Confidentiality string `json:"confidentiality"` - Integrity string `json:"integrity"` - Availability string `json:"availability"` - JustificationCiaRating string `json:"justification_cia_rating"` + ID string `json:"id"` + Description string `json:"description"` + Usage string `json:"usage"` + Tags []string `json:"tags"` + Origin string `json:"origin"` + Owner string `json:"owner"` + Quantity string `json:"quantity"` + Confidentiality string `json:"confidentiality"` + Integrity string `json:"integrity"` + Availability string `json:"availability"` + Justification_cia_rating string `json:"justification_cia_rating"` } type InputTechnicalAsset struct { - ID string `json:"id"` - Description string `json:"description"` - Type string `json:"type"` - Usage string `json:"usage"` - UsedAsClientByHuman bool `json:"used_as_client_by_human"` - OutOfScope bool `json:"out_of_scope"` - JustificationOutOfScope string `json:"justification_out_of_scope"` - Size string `json:"size"` - Technology string `json:"technology"` - Tags []string `json:"tags"` - Internet bool `json:"internet"` - Machine string `json:"machine"` - Encryption string `json:"encryption"` - Owner string `json:"owner"` - Confidentiality string `json:"confidentiality"` - Integrity string `json:"integrity"` - Availability string `json:"availability"` - JustificationCiaRating string `json:"justification_cia_rating"` - MultiTenant bool `json:"multi_tenant"` - Redundant bool `json:"redundant"` - CustomDevelopedParts bool `json:"custom_developed_parts"` - DataAssetsProcessed []string `json:"data_assets_processed"` - DataAssetsStored []string `json:"data_assets_stored"` - DataFormatsAccepted []string `json:"data_formats_accepted"` - DiagramTweakOrder int `json:"diagram_tweak_order"` - CommunicationLinks map[string]InputCommunicationLink `json:"communication_links"` + ID string `json:"id"` + Description string `json:"description"` + Type string `json:"type"` + Usage string `json:"usage"` + Used_as_client_by_human bool `json:"used_as_client_by_human"` + Out_of_scope bool `json:"out_of_scope"` + Justification_out_of_scope string `json:"justification_out_of_scope"` + Size string `json:"size"` + Technology string `json:"technology"` + Tags []string `json:"tags"` + Internet bool `json:"internet"` + Machine string `json:"machine"` + Encryption string `json:"encryption"` + Owner string `json:"owner"` + Confidentiality string `json:"confidentiality"` + Integrity string `json:"integrity"` + Availability string `json:"availability"` + Justification_cia_rating string `json:"justification_cia_rating"` + Multi_tenant bool `json:"multi_tenant"` + Redundant bool `json:"redundant"` + Custom_developed_parts bool `json:"custom_developed_parts"` + Data_assets_processed []string `json:"data_assets_processed"` + Data_assets_stored []string `json:"data_assets_stored"` + Data_formats_accepted []string `json:"data_formats_accepted"` + Diagram_tweak_order int `json:"diagram_tweak_order"` + Communication_links map[string]InputCommunicationLink `json:"communication_links"` } type InputCommunicationLink struct { - Target string `json:"target"` - Description string `json:"description"` - Protocol string `json:"protocol"` - Authentication string `json:"authentication"` - Authorization string `json:"authorization"` - Tags []string `json:"tags"` - VPN bool `json:"vpn"` - IpFiltered bool `json:"ip_filtered"` - Readonly bool `json:"readonly"` - Usage string `json:"usage"` - DataAssetsSent []string `json:"data_assets_sent"` - DataAssetsReceived []string `json:"data_assets_received"` - DiagramTweakWeight int `json:"diagram_tweak_weight"` - DiagramTweakConstraint bool `json:"diagram_tweak_constraint"` + Target string `json:"target"` + Description string `json:"description"` + Protocol string `json:"protocol"` + Authentication string `json:"authentication"` + Authorization string `json:"authorization"` + Tags []string `json:"tags"` + VPN bool `json:"vpn"` + IP_filtered bool `json:"ip_filtered"` + Readonly bool `json:"readonly"` + Usage string `json:"usage"` + Data_assets_sent []string `json:"data_assets_sent"` + Data_assets_received []string `json:"data_assets_received"` + Diagram_tweak_weight int `json:"diagram_tweak_weight"` + Diagram_tweak_constraint bool `json:"diagram_tweak_constraint"` } type InputSharedRuntime struct { - ID string `json:"id"` - Description string `json:"description"` - Tags []string `json:"tags"` - TechnicalAssetsRunning []string `json:"technical_assets_running"` + ID string `json:"id"` + Description string `json:"description"` + Tags []string `json:"tags"` + Technical_assets_running []string `json:"technical_assets_running"` } type InputTrustBoundary struct { - ID string `json:"id"` - Description string `json:"description"` - Type string `json:"type"` - Tags []string `json:"tags"` - TechnicalAssetsInside []string `json:"technical_assets_inside"` - TrustBoundariesNested []string `json:"trust_boundaries_nested"` + ID string `json:"id"` + Description string `json:"description"` + Type string `json:"type"` + Tags []string `json:"tags"` + Technical_assets_inside []string `json:"technical_assets_inside"` + Trust_boundaries_nested []string `json:"trust_boundaries_nested"` } type InputIndividualRiskCategory struct { - ID string `json:"id"` - Description string `json:"description"` - Impact string `json:"impact"` - ASVS string `json:"asvs"` - CheatSheet string `json:"cheat_sheet"` - Action string `json:"action"` - Mitigation string `json:"mitigation"` - Check string `json:"check"` - Function string `json:"function"` - STRIDE string `json:"stride"` - DetectionLogic string `json:"detection_logic"` - RiskAssessment string `json:"risk_assessment"` - FalsePositives string `json:"false_positives"` - ModelFailurePossibleReason bool `json:"model_failure_possible_reason"` - CWE int `json:"cwe"` - RisksIdentified map[string]InputRiskIdentified `json:"risks_identified"` + ID string `json:"id"` + Description string `json:"description"` + Impact string `json:"impact"` + ASVS string `json:"asvs"` + Cheat_sheet string `json:"cheat_sheet"` + Action string `json:"action"` + Mitigation string `json:"mitigation"` + Check string `json:"check"` + Function string `json:"function"` + STRIDE string `json:"stride"` + Detection_logic string `json:"detection_logic"` + Risk_assessment string `json:"risk_assessment"` + False_positives string `json:"false_positives"` + Model_failure_possible_reason bool `json:"model_failure_possible_reason"` + CWE int `json:"cwe"` + Risks_identified map[string]InputRiskIdentified `json:"risks_identified"` } type InputRiskIdentified struct { - Severity string `json:"severity"` - ExploitationLikelihood string `json:"exploitation_likelihood"` - ExploitationImpact string `json:"exploitation_impact"` - DataBreachProbability string `json:"data_breach_probability"` - DataBreachTechnicalAssets []string `json:"data_breach_technical_assets"` - MostRelevantDataAsset string `json:"most_relevant_data_asset"` - MostRelevantTechnicalAsset string `json:"most_relevant_technical_asset"` - MostRelevantCommunicationLink string `json:"most_relevant_communication_link"` - MostRelevantTrustBoundary string `json:"most_relevant_trust_boundary"` - MostRelevantSharedRuntime string `json:"most_relevant_shared_runtime"` + Severity string `json:"severity"` + Exploitation_likelihood string `json:"exploitation_likelihood"` + Exploitation_impact string `json:"exploitation_impact"` + Data_breach_probability string `json:"data_breach_probability"` + Data_breach_technical_assets []string `json:"data_breach_technical_assets"` + Most_relevant_data_asset string `json:"most_relevant_data_asset"` + Most_relevant_technical_asset string `json:"most_relevant_technical_asset"` + Most_relevant_communication_link string `json:"most_relevant_communication_link"` + Most_relevant_trust_boundary string `json:"most_relevant_trust_boundary"` + Most_relevant_shared_runtime string `json:"most_relevant_shared_runtime"` } type InputRiskTracking struct { @@ -221,7 +211,7 @@ type InputRiskTracking struct { Justification string `json:"justification"` Ticket string `json:"ticket"` Date string `json:"date"` - CheckedBy string `json:"checked_by"` + Checked_by string `json:"checked_by"` } // TypeDescription contains a name for a type and its description @@ -520,14 +510,14 @@ type Authorization int const ( NoneAuthorization Authorization = iota TechnicalUser - EndUserIdentityPropagation + EnduserIdentityPropagation ) func AuthorizationValues() []TypeEnum { return []TypeEnum{ NoneAuthorization, TechnicalUser, - EndUserIdentityPropagation, + EnduserIdentityPropagation, } } @@ -640,7 +630,7 @@ const ( Transparent DataWithSymmetricSharedKey DataWithAsymmetricSharedKey - DataWithEndUserIndividualKey + DataWithEnduserIndividualKey ) func EncryptionStyleValues() []TypeEnum { @@ -649,7 +639,7 @@ func EncryptionStyleValues() []TypeEnum { Transparent, DataWithSymmetricSharedKey, DataWithAsymmetricSharedKey, - DataWithEndUserIndividualKey, + DataWithEnduserIndividualKey, } } @@ -681,7 +671,7 @@ func (what EncryptionStyle) Explain() string { } func (what EncryptionStyle) Title() string { - return [...]string{"None", "Transparent", "Data with Symmetric Shared Key", "Data with Asymmetric Shared Key", "Data with End-User Individual Key"}[what] + return [...]string{"None", "Transparent", "Data with Symmetric Shared Key", "Data with Asymmetric Shared Key", "Data with Enduser Individual Key"}[what] } type DataFormat int @@ -738,29 +728,29 @@ const ( HTTPS WS WSS - ReverseProxyWebProtocol - ReverseProxyWebProtocolEncrypted + Reverse_proxy_web_protocol + Reverse_proxy_web_protocol_encrypted MQTT JDBC - JdbcEncrypted + JDBC_encrypted ODBC - OdbcEncrypted - SqlAccessProtocol - SqlAccessProtocolEncrypted - NosqlAccessProtocol - NosqlAccessProtocolEncrypted + ODBC_encrypted + SQL_access_protocol + SQL_access_protocol_encrypted + NoSQL_access_protocol + NoSQL_access_protocol_encrypted BINARY - BinaryEncrypted + BINARY_encrypted TEXT - TextEncrypted + TEXT_encrypted SSH - SshTunnel + SSH_tunnel SMTP - SmtpEncrypted + SMTP_encrypted POP3 - Pop3Encrypted + POP3_encrypted IMAP - ImapEncrypted + IMAP_encrypted FTP FTPS SFTP @@ -770,14 +760,14 @@ const ( JMS NFS SMB - SmbEncrypted + SMB_encrypted LocalFileAccess NRPE XMPP IIOP - IiopEncrypted + IIOP_encrypted JRMP - JrmpEncrypted + JRMP_encrypted InProcessLibraryCall ContainerSpawning ) @@ -789,29 +779,29 @@ func ProtocolValues() []TypeEnum { HTTPS, WS, WSS, - ReverseProxyWebProtocol, - ReverseProxyWebProtocolEncrypted, + Reverse_proxy_web_protocol, + Reverse_proxy_web_protocol_encrypted, MQTT, JDBC, - JdbcEncrypted, + JDBC_encrypted, ODBC, - OdbcEncrypted, - SqlAccessProtocol, - SqlAccessProtocolEncrypted, - NosqlAccessProtocol, - NosqlAccessProtocolEncrypted, + ODBC_encrypted, + SQL_access_protocol, + SQL_access_protocol_encrypted, + NoSQL_access_protocol, + NoSQL_access_protocol_encrypted, BINARY, - BinaryEncrypted, + BINARY_encrypted, TEXT, - TextEncrypted, + TEXT_encrypted, SSH, - SshTunnel, + SSH_tunnel, SMTP, - SmtpEncrypted, + SMTP_encrypted, POP3, - Pop3Encrypted, + POP3_encrypted, IMAP, - ImapEncrypted, + IMAP_encrypted, FTP, FTPS, SFTP, @@ -821,14 +811,14 @@ func ProtocolValues() []TypeEnum { JMS, NFS, SMB, - SmbEncrypted, + SMB_encrypted, LocalFileAccess, NRPE, XMPP, IIOP, - IiopEncrypted, + IIOP_encrypted, JRMP, - JrmpEncrypted, + JRMP_encrypted, InProcessLibraryCall, ContainerSpawning, } @@ -898,24 +888,24 @@ func (what Protocol) IsProcessLocal() bool { } func (what Protocol) IsEncrypted() bool { - return what == HTTPS || what == WSS || what == JdbcEncrypted || what == OdbcEncrypted || - what == NosqlAccessProtocolEncrypted || what == SqlAccessProtocolEncrypted || what == BinaryEncrypted || what == TextEncrypted || what == SSH || what == SshTunnel || - what == FTPS || what == SFTP || what == SCP || what == LDAPS || what == ReverseProxyWebProtocolEncrypted || - what == IiopEncrypted || what == JrmpEncrypted || what == SmbEncrypted || what == SmtpEncrypted || what == Pop3Encrypted || what == ImapEncrypted + return what == HTTPS || what == WSS || what == JDBC_encrypted || what == ODBC_encrypted || + what == NoSQL_access_protocol_encrypted || what == SQL_access_protocol_encrypted || what == BINARY_encrypted || what == TEXT_encrypted || what == SSH || what == SSH_tunnel || + what == FTPS || what == SFTP || what == SCP || what == LDAPS || what == Reverse_proxy_web_protocol_encrypted || + what == IIOP_encrypted || what == JRMP_encrypted || what == SMB_encrypted || what == SMTP_encrypted || what == POP3_encrypted || what == IMAP_encrypted } func (what Protocol) IsPotentialDatabaseAccessProtocol(includingLaxDatabaseProtocols bool) bool { - strictlyDatabaseOnlyProtocol := what == JdbcEncrypted || what == OdbcEncrypted || - what == NosqlAccessProtocolEncrypted || what == SqlAccessProtocolEncrypted || what == JDBC || what == ODBC || what == NosqlAccessProtocol || what == SqlAccessProtocol + strictlyDatabaseOnlyProtocol := what == JDBC_encrypted || what == ODBC_encrypted || + what == NoSQL_access_protocol_encrypted || what == SQL_access_protocol_encrypted || what == JDBC || what == ODBC || what == NoSQL_access_protocol || what == SQL_access_protocol if includingLaxDatabaseProtocols { // include HTTP for REST-based NoSQL-DBs as well as unknown binary - return strictlyDatabaseOnlyProtocol || what == HTTPS || what == HTTP || what == BINARY || what == BinaryEncrypted + return strictlyDatabaseOnlyProtocol || what == HTTPS || what == HTTP || what == BINARY || what == BINARY_encrypted } return strictlyDatabaseOnlyProtocol } func (what Protocol) IsPotentialWebAccessProtocol() bool { - return what == HTTP || what == HTTPS || what == WS || what == WSS || what == ReverseProxyWebProtocol || what == ReverseProxyWebProtocolEncrypted + return what == HTTP || what == HTTPS || what == WS || what == WSS || what == Reverse_proxy_web_protocol || what == Reverse_proxy_web_protocol_encrypted } type TechnicalAssetTechnology int @@ -1127,7 +1117,7 @@ func (what TechnicalAssetTechnology) IsSecurityControlRelated() bool { return what == Vault || what == HSM || what == WAF || what == IDS || what == IPS } -func (what TechnicalAssetTechnology) IsUnprotectedCommunicationsTolerated() bool { +func (what TechnicalAssetTechnology) IsUnprotectedCommsTolerated() bool { return what == Monitoring || what == IDS || what == IPS } @@ -1160,11 +1150,11 @@ func (what TechnicalAssetTechnology) IsLessProtectedType() bool { what == Mainframe } -func (what TechnicalAssetTechnology) IsUsuallyProcessingEndUserRequests() bool { +func (what TechnicalAssetTechnology) IsUsuallyProcessingEnduserRequests() bool { return what == WebServer || what == WebApplication || what == ApplicationServer || what == ERP || what == WebServiceREST || what == WebServiceSOAP || what == EJB || what == ReportEngine } -func (what TechnicalAssetTechnology) IsUsuallyStoringEndUserData() bool { +func (what TechnicalAssetTechnology) IsUsuallyStoringEnduserData() bool { return what == Database || what == ERP || what == FileServer || what == LocalFileSystem || what == BlockStorage || what == MailServer || what == StreamProcessing || what == MessageQueue } @@ -1345,8 +1335,8 @@ func (what DataAsset) IsTaggedWithAny(tags ...string) bool { return ContainsCaseInsensitiveAny(what.Tags, tags...) } -func (what DataAsset) IsTaggedWithBaseTag(baseTag string) bool { - return IsTaggedWithBaseTag(what.Tags, baseTag) +func (what DataAsset) IsTaggedWithBaseTag(basetag string) bool { + return IsTaggedWithBaseTag(what.Tags, basetag) } /* @@ -1396,7 +1386,7 @@ func (what DataAsset) IdentifiedRisksByResponsibleTechnicalAssetId() map[string] } result := make(map[string][]Risk) - for techAssetId := range uniqueTechAssetIDsResponsibleForThisDataAsset { + for techAssetId, _ := range uniqueTechAssetIDsResponsibleForThisDataAsset { result[techAssetId] = append(result[techAssetId], ParsedModelRoot.TechnicalAssets[techAssetId].GeneratedRisks()...) } return result @@ -1548,11 +1538,11 @@ func (what DataAsset) ReceivedViaCommLinksSorted() []CommunicationLink { return result } -func IsTaggedWithBaseTag(tags []string, baseTag string) bool { // base tags are before the colon ":" like in "aws:ec2" it's "aws". The subtag is after the colon. Also, a pure "aws" tag matches the base tag "aws" - baseTag = strings.ToLower(strings.TrimSpace(baseTag)) +func IsTaggedWithBaseTag(tags []string, basetag string) bool { // basetags are before the colon ":" like in "aws:ec2" it's "aws". The subtag is after the colon. Also a pure "aws" tag matches the basetag "aws" + basetag = strings.ToLower(strings.TrimSpace(basetag)) for _, tag := range tags { tag = strings.ToLower(strings.TrimSpace(tag)) - if tag == baseTag || strings.HasPrefix(tag, baseTag+":") { + if tag == basetag || strings.HasPrefix(tag, basetag+":") { return true } } @@ -1585,12 +1575,11 @@ func (what TechnicalAsset) IsTaggedWithAny(tags ...string) bool { return ContainsCaseInsensitiveAny(what.Tags, tags...) } -func (what TechnicalAsset) IsTaggedWithBaseTag(baseTag string) bool { - return IsTaggedWithBaseTag(what.Tags, baseTag) +func (what TechnicalAsset) IsTaggedWithBaseTag(basetag string) bool { + return IsTaggedWithBaseTag(what.Tags, basetag) } // first use the tag(s) of the asset itself, then their trust boundaries (recursively up) and then their shared runtime - func (what TechnicalAsset) IsTaggedWithAnyTraversingUp(tags ...string) bool { if ContainsCaseInsensitiveAny(what.Tags, tags...) { return true @@ -1940,8 +1929,8 @@ func (what CommunicationLink) IsTaggedWithAny(tags ...string) bool { return ContainsCaseInsensitiveAny(what.Tags, tags...) } -func (what CommunicationLink) IsTaggedWithBaseTag(baseTag string) bool { - return IsTaggedWithBaseTag(what.Tags, baseTag) +func (what CommunicationLink) IsTaggedWithBaseTag(basetag string) bool { + return IsTaggedWithBaseTag(what.Tags, basetag) } type ByTechnicalCommunicationLinkIdSort []CommunicationLink @@ -1972,8 +1961,8 @@ func (what TrustBoundary) IsTaggedWithAny(tags ...string) bool { return ContainsCaseInsensitiveAny(what.Tags, tags...) } -func (what TrustBoundary) IsTaggedWithBaseTag(baseTag string) bool { - return IsTaggedWithBaseTag(what.Tags, baseTag) +func (what TrustBoundary) IsTaggedWithBaseTag(basetag string) bool { + return IsTaggedWithBaseTag(what.Tags, basetag) } func (what TrustBoundary) IsTaggedWithAnyTraversingUp(tags ...string) bool { @@ -2041,8 +2030,8 @@ func (what SharedRuntime) IsTaggedWithAny(tags ...string) bool { return ContainsCaseInsensitiveAny(what.Tags, tags...) } -func (what SharedRuntime) IsTaggedWithBaseTag(baseTag string) bool { - return IsTaggedWithBaseTag(what.Tags, baseTag) +func (what SharedRuntime) IsTaggedWithBaseTag(basetag string) bool { + return IsTaggedWithBaseTag(what.Tags, basetag) } func (what SharedRuntime) HighestConfidentiality() Confidentiality { @@ -2214,7 +2203,7 @@ type ParsedModel struct { func SortedTechnicalAssetIDs() []string { res := make([]string, 0) - for id := range ParsedModelRoot.TechnicalAssets { + for id, _ := range ParsedModelRoot.TechnicalAssets { res = append(res, id) } sort.Strings(res) @@ -2238,10 +2227,9 @@ func TagsActuallyUsed() []string { // === Sorting stuff ===================================== // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - func SortedKeysOfIndividualRiskCategories() []string { keys := make([]string, 0) - for k := range ParsedModelRoot.IndividualRiskCategories { + for k, _ := range ParsedModelRoot.IndividualRiskCategories { keys = append(keys, k) } sort.Strings(keys) @@ -2249,10 +2237,9 @@ func SortedKeysOfIndividualRiskCategories() []string { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - func SortedKeysOfSecurityRequirements() []string { keys := make([]string, 0) - for k := range ParsedModelRoot.SecurityRequirements { + for k, _ := range ParsedModelRoot.SecurityRequirements { keys = append(keys, k) } sort.Strings(keys) @@ -2260,10 +2247,9 @@ func SortedKeysOfSecurityRequirements() []string { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - func SortedKeysOfAbuseCases() []string { keys := make([]string, 0) - for k := range ParsedModelRoot.AbuseCases { + for k, _ := range ParsedModelRoot.AbuseCases { keys = append(keys, k) } sort.Strings(keys) @@ -2271,10 +2257,9 @@ func SortedKeysOfAbuseCases() []string { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - func SortedKeysOfQuestions() []string { keys := make([]string, 0) - for k := range ParsedModelRoot.Questions { + for k, _ := range ParsedModelRoot.Questions { keys = append(keys, k) } sort.Strings(keys) @@ -2282,10 +2267,9 @@ func SortedKeysOfQuestions() []string { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - func SortedKeysOfDataAssets() []string { keys := make([]string, 0) - for k := range ParsedModelRoot.DataAssets { + for k, _ := range ParsedModelRoot.DataAssets { keys = append(keys, k) } sort.Strings(keys) @@ -2293,10 +2277,9 @@ func SortedKeysOfDataAssets() []string { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - func SortedKeysOfTechnicalAssets() []string { keys := make([]string, 0) - for k := range ParsedModelRoot.TechnicalAssets { + for k, _ := range ParsedModelRoot.TechnicalAssets { keys = append(keys, k) } sort.Strings(keys) @@ -2356,7 +2339,6 @@ func SharedRuntimesTaggedWithAny(tags ...string) []SharedRuntime { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - func SortedTechnicalAssetsByTitle() []TechnicalAsset { assets := make([]TechnicalAsset, 0) for _, asset := range ParsedModelRoot.TechnicalAssets { @@ -2367,7 +2349,6 @@ func SortedTechnicalAssetsByTitle() []TechnicalAsset { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - func SortedDataAssetsByTitle() []DataAsset { assets := make([]DataAsset, 0) for _, asset := range ParsedModelRoot.DataAssets { @@ -2378,7 +2359,6 @@ func SortedDataAssetsByTitle() []DataAsset { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - func SortedDataAssetsByDataBreachProbabilityAndTitleStillAtRisk() []DataAsset { assets := make([]DataAsset, 0) for _, asset := range ParsedModelRoot.DataAssets { @@ -2389,7 +2369,6 @@ func SortedDataAssetsByDataBreachProbabilityAndTitleStillAtRisk() []DataAsset { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - func SortedDataAssetsByDataBreachProbabilityAndTitle() []DataAsset { assets := make([]DataAsset, 0) for _, asset := range ParsedModelRoot.DataAssets { @@ -2400,7 +2379,6 @@ func SortedDataAssetsByDataBreachProbabilityAndTitle() []DataAsset { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - func SortedTechnicalAssetsByRiskSeverityAndTitle() []TechnicalAsset { assets := make([]TechnicalAsset, 0) for _, asset := range ParsedModelRoot.TechnicalAssets { @@ -2411,7 +2389,6 @@ func SortedTechnicalAssetsByRiskSeverityAndTitle() []TechnicalAsset { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - func SortedTechnicalAssetsByRAAAndTitle() []TechnicalAsset { assets := make([]TechnicalAsset, 0) for _, asset := range ParsedModelRoot.TechnicalAssets { @@ -2447,10 +2424,9 @@ func OutOfScopeTechnicalAssets() []TechnicalAsset { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - func SortedKeysOfTrustBoundaries() []string { keys := make([]string, 0) - for k := range ParsedModelRoot.TrustBoundaries { + for k, _ := range ParsedModelRoot.TrustBoundaries { keys = append(keys, k) } sort.Strings(keys) @@ -2467,10 +2443,9 @@ func SortedTrustBoundariesByTitle() []TrustBoundary { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - func SortedKeysOfSharedRuntime() []string { keys := make([]string, 0) - for k := range ParsedModelRoot.SharedRuntimes { + for k, _ := range ParsedModelRoot.SharedRuntimes { keys = append(keys, k) } sort.Strings(keys) @@ -2501,7 +2476,6 @@ func QuestionsUnanswered() int { // Line Styles: // dotted when model forgery attempt (i.e. nothing being sent and received) - func (what CommunicationLink) DetermineArrowLineStyle() string { if len(what.DataAssetsSent) == 0 && len(what.DataAssetsReceived) == 0 { return "dotted" // dotted, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... @@ -2513,7 +2487,6 @@ func (what CommunicationLink) DetermineArrowLineStyle() string { } // dotted when model forgery attempt (i.e. nothing being processed or stored) - func (what TechnicalAsset) DetermineShapeBorderLineStyle() string { if len(what.DataAssetsProcessed) == 0 && len(what.DataAssetsStored) == 0 || what.OutOfScope { return "dotted" // dotted, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... @@ -2522,7 +2495,6 @@ func (what TechnicalAsset) DetermineShapeBorderLineStyle() string { } // 3 when redundant - func (what TechnicalAsset) DetermineShapePeripheries() int { if what.Redundant { return 2 @@ -2648,7 +2620,6 @@ func (what TechnicalAsset) ProcessesOrStoresDataAsset(dataAssetId string) bool { } // red when >= confidential data stored in unencrypted technical asset - func (what TechnicalAsset) DetermineLabelColor() string { // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here // Check for red @@ -2708,7 +2679,6 @@ func (what TechnicalAsset) DetermineLabelColor() string { // red when mission-critical integrity, but still unauthenticated (non-readonly) channels access it // amber when critical integrity, but still unauthenticated (non-readonly) channels access it // pink when model forgery attempt (i.e. nothing being processed or stored) - func (what TechnicalAsset) DetermineShapeBorderColor() string { // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here // Check for red @@ -2758,7 +2728,7 @@ func (what TechnicalAsset) DetermineShapeBorderColor() string { } if len(what.DataAssetsProcessed) == 0 && len(what.DataAssetsStored) == 0 { - return colors.Pink // pink, because it's strange when too many technical assets process no data... some are ok, but many in a diagram is a sign of model forgery... + return colors.Pink // pink, because it's strange when too many technical assets process no data... some ok, but many in a diagram ist a sign of model forgery... } return colors.Black @@ -2799,7 +2769,6 @@ func (what CommunicationLink) DetermineLabelColor() string { } // pink when model forgery attempt (i.e. nothing being sent and received) - func (what CommunicationLink) DetermineArrowColor() string { // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here if len(what.DataAssetsSent) == 0 && len(what.DataAssetsReceived) == 0 || @@ -2889,7 +2858,6 @@ func (what TechnicalAsset) DetermineShapeFillColor() string { fillColor = colors.BrightenHexColor(fillColor) case Serverless: fillColor = colors.BrightenHexColor(colors.BrightenHexColor(fillColor)) - case Virtual: } return fillColor } @@ -3300,7 +3268,7 @@ type Risk struct { // TODO: refactor all "Id" here to "ID"? } -func (what Risk) GetRiskTracking() RiskTracking { // TODO: Unify function naming regarding Get etc. +func (what Risk) GetRiskTracking() RiskTracking { // TODO: Unify function naming reagrding Get etc. var result RiskTracking if riskTracking, ok := ParsedModelRoot.RiskTracking[what.SyntheticId]; ok { result = riskTracking @@ -3436,10 +3404,9 @@ type RiskRule interface { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - func SortedRiskCategories() []RiskCategory { categories := make([]RiskCategory, 0) - for k := range GeneratedRisksByCategory { + for k, _ := range GeneratedRisksByCategory { categories = append(categories, k) } sort.Sort(ByRiskCategoryHighestContainingRiskSeveritySortStillAtRisk(categories)) @@ -3826,7 +3793,7 @@ func FilteredByOnlyLowRisks() []Risk { } func FilterByModelFailures(risksByCat map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) + result := make(map[RiskCategory][]Risk, 0) for riskCat, risks := range risksByCat { if riskCat.ModelFailurePossibleReason { result[riskCat] = risks diff --git a/raa/dummy/dummy.go b/raa/dummy/dummy.go index 1793be00..febfaf9a 100644 --- a/raa/dummy/dummy.go +++ b/raa/dummy/dummy.go @@ -8,12 +8,7 @@ import ( // JUST A DUMMY TO HAVE AN ALTERNATIVE PLUGIN TO USE/TEST -var ( - _ = CalculateRAA -) - // used from plugin caller: - func CalculateRAA() string { for techAssetID, techAsset := range model.ParsedModelRoot.TechnicalAssets { techAsset.RAA = float64(rand.Intn(100)) diff --git a/raa/raa/raa.go b/raa/raa/raa.go index a6babc82..a0b98aa3 100644 --- a/raa/raa/raa.go +++ b/raa/raa/raa.go @@ -5,12 +5,7 @@ import ( "sort" ) -var ( - _ = CalculateRAA -) - // used from plugin caller: - func CalculateRAA() string { for techAssetID, techAsset := range model.ParsedModelRoot.TechnicalAssets { aa := calculateAttackerAttractiveness(techAsset) @@ -38,7 +33,7 @@ func calculateRelativeAttackerAttractiveness(attractiveness float64) float64 { // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: keys := make([]string, 0) - for k := range model.ParsedModelRoot.TechnicalAssets { + for k, _ := range model.ParsedModelRoot.TechnicalAssets { keys = append(keys, k) } sort.Strings(keys) @@ -58,7 +53,7 @@ func calculateRelativeAttackerAttractiveness(attractiveness float64) float64 { } // calculate the percent value of the value within the defined min/max range value := attractiveness - attackerAttractivenessMinimum - percent := value / spread * 100 + percent := float64(value) / float64(spread) * 100 if percent <= 0 { percent = 1 // since 0 suggests no attacks at all } @@ -77,7 +72,7 @@ func calculatePivotingNeighbourEffectAdjustment(techAsset model.TechnicalAsset) delta := calculateRelativeAttackerAttractiveness(calculateAttackerAttractiveness(outgoingNeighbour)) - calculateRelativeAttackerAttractiveness(calculateAttackerAttractiveness(techAsset)) if delta > 0 { potentialIncrease := delta / 3 - //fmt.Println("Positive delta from", techAsset.Id, "to", outgoingNeighbour.Id, "is", delta, "yields to pivoting neighbour effect of an increase of", potentialIncrease) + //fmt.Println("Positive delta from", techAsset.Id, "to", outgoingNeighbour.Id, "is", delta, "yields to pivoting eighbour effect of an incrase of", potentialIncrease) if potentialIncrease > adjustment { adjustment = potentialIncrease } diff --git a/report/excel.go b/report/excel.go index 1c1e414f..3159b04b 100644 --- a/report/excel.go +++ b/report/excel.go @@ -454,7 +454,7 @@ func WriteRisksExcelToFile(filename string) { checkErr(err) } -func WriteTagsExcelToFile(filename string) { // TODO: eventually when len(sortedTagsAvailable) == 0 is: write a hint in the Excel that no tags are used +func WriteTagsExcelToFile(filename string) { // TODO: eventually when len(sortedTagsAvailable) == 0 is: write a hint in the execel that no tags are used excelRow = 0 excel := excelize.NewFile() sheetName := model.ParsedModelRoot.Title @@ -492,7 +492,7 @@ func WriteTagsExcelToFile(filename string) { // TODO: eventually when len(sorted }) checkErr(err) - err = excel.SetCellValue(sheetName, "A1", "Element") // TODO is "Element" the correct generic name when referencing assets, links, trust boundaries etc.? Eventually add separate column "type of element" like "technical asset" or "data asset"? + err = excel.SetCellValue(sheetName, "A1", "Element") // TODO is "Element" the correct generic name when referencing assets, links, trust boudaries etc.? Eventually add separate column "type of element" like "technical asset" or "data asset"? sortedTagsAvailable := model.TagsActuallyUsed() sort.Strings(sortedTagsAvailable) axis := "" @@ -615,7 +615,7 @@ func writeRow(excel *excelize.File, sheetName string, axis string, styleBlackLef var alphabet = []string{"A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z"} func determineColumnLetter(i int) string { - // can only have 700 columns in Excel that way, but that should be more than usable anyway ;)... otherwise think about your model... + // can only have 700 columns in excel that way, but that should be more than usable anyway ;)... otherwise think about your model... i++ if i < 26 { return alphabet[i] diff --git a/report/json.go b/report/json.go index a1456dab..bd4d8009 100644 --- a/report/json.go +++ b/report/json.go @@ -3,7 +3,7 @@ package report import ( "encoding/json" "github.com/threagile/threagile/model" - "os" + "io/ioutil" ) func WriteRisksJSON(filename string) { @@ -20,20 +20,19 @@ func WriteRisksJSON(filename string) { if err != nil { panic(err) } - err = os.WriteFile(filename, jsonBytes, 0644) + err = ioutil.WriteFile(filename, jsonBytes, 0644) if err != nil { panic(err) } } // TODO: also a "data assets" json? - func WriteTechnicalAssetsJSON(filename string) { jsonBytes, err := json.Marshal(model.ParsedModelRoot.TechnicalAssets) if err != nil { panic(err) } - err = os.WriteFile(filename, jsonBytes, 0644) + err = ioutil.WriteFile(filename, jsonBytes, 0644) if err != nil { panic(err) } @@ -44,7 +43,7 @@ func WriteStatsJSON(filename string) { if err != nil { panic(err) } - err = os.WriteFile(filename, jsonBytes, 0644) + err = ioutil.WriteFile(filename, jsonBytes, 0644) if err != nil { panic(err) } diff --git a/report/report.go b/report/report.go index 8bcfbe05..39d3bdff 100644 --- a/report/report.go +++ b/report/report.go @@ -52,6 +52,7 @@ import ( "github.com/wcharczuk/go-chart" "github.com/wcharczuk/go-chart/drawing" "image" + "io/ioutil" "log" "os" "path/filepath" @@ -69,8 +70,7 @@ const /*dataFlowDiagramFullscreen,*/ allowedPdfLandscapePages, embedDiagramLegen var isLandscapePage bool var pdf *gofpdf.Fpdf - -// var alreadyTemplateImported = false +var alreadyTemplateImported = false var coverTemplateId, contentTemplateId, diagramLegendTemplateId int var pageNo int var linkCounter int @@ -79,25 +79,6 @@ var homeLink int var currentChapterTitleBreadcrumb string var firstParagraphRegEx = regexp.MustCompile(`(.*?)((
)|(

))`) -var ( - _ = pdfColorDataAssets - _ = rgbHexColorDataAssets - _ = pdfColorTechnicalAssets - _ = rgbHexColorTechnicalAssets - _ = pdfColorTrustBoundaries - _ = pdfColorSharedRuntime - _ = rgbHexColorTrustBoundaries - _ = rgbHexColorSharedRuntime - _ = pdfColorRiskFindings - _ = rgbHexColorRiskFindings - _ = rgbHexColorDisclaimer - _ = rgbHexColorGray - _ = rgbHexColorLightGray - _ = rgbHexColorOutOfScope - _ = rgbHexColorBlack - _ = pdfColorRed - _ = rgbHexColorRed -) func initReport() { pdf = nil @@ -206,11 +187,11 @@ func parseBackgroundTemplate(templateFilename string) { /* imageBox, err := rice.FindBox("template") checkErr(err) - file, err := os.CreateTemp("", "background-*-.pdf") + file, err := ioutil.TempFile("", "background-*-.pdf") checkErr(err) defer os.Remove(file.Name()) backgroundBytes := imageBox.MustBytes("background.pdf") - err = os.WriteFile(file.Name(), backgroundBytes, 0644) + err = ioutil.WriteFile(file.Name(), backgroundBytes, 0644) checkErr(err) */ coverTemplateId = gofpdi.ImportPage(pdf, templateFilename, 1, "/MediaBox") @@ -732,7 +713,7 @@ func createDisclaimer() { "is obligated to ensure the highly confidential contents are kept secret. The recipient assumes responsibility " + "for further distribution of this document." + "

" + - "In this particular project, a time box approach was used to define the analysis effort. This means that the " + + "In this particular project, a timebox approach was used to define the analysis effort. This means that the " + "author allotted a prearranged amount of time to identify and document threats. Because of this, there " + "is no guarantee that all possible threats and risks are discovered. Furthermore, the analysis " + "applies to a snapshot of the current state of the modeled architecture (based on the architecture information provided " + @@ -1243,11 +1224,11 @@ func createRiskMitigationStatus() { // CAUTION: Long labels might cause endless loop, then remove labels and render them manually later inside the PDF func embedStackedBarChart(sbcChart chart.StackedBarChart, x float64, y float64) { - tmpFilePNG, err := os.CreateTemp(model.TempFolder, "chart-*-.png") + tmpFilePNG, err := ioutil.TempFile(model.TempFolder, "chart-*-.png") checkErr(err) - defer func() { _ = os.Remove(tmpFilePNG.Name()) }() + defer os.Remove(tmpFilePNG.Name()) file, _ := os.Create(tmpFilePNG.Name()) - defer func() { _ = file.Close() }() + defer file.Close() err = sbcChart.Render(chart.PNG, file) checkErr(err) var options gofpdf.ImageOptions @@ -1257,12 +1238,12 @@ func embedStackedBarChart(sbcChart chart.StackedBarChart, x float64, y float64) } func embedPieChart(pieChart chart.PieChart, x float64, y float64) { - tmpFilePNG, err := os.CreateTemp(model.TempFolder, "chart-*-.png") + tmpFilePNG, err := ioutil.TempFile(model.TempFolder, "chart-*-.png") checkErr(err) - defer func() { _ = os.Remove(tmpFilePNG.Name()) }() + defer os.Remove(tmpFilePNG.Name()) file, err := os.Create(tmpFilePNG.Name()) checkErr(err) - defer func() { _ = file.Close() }() + defer file.Close() err = pieChart.Render(chart.PNG, file) checkErr(err) var options gofpdf.ImageOptions @@ -4012,13 +3993,13 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim } for _, key := range model.SortedKeysOfIndividualRiskCategories() { - individualRiskCategory := model.ParsedModelRoot.IndividualRiskCategories[key] + indivRiskCat := model.ParsedModelRoot.IndividualRiskCategories[key] pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - pdf.CellFormat(190, 3, individualRiskCategory.Title, "0", 0, "", false, 0, "") + pdf.CellFormat(190, 3, indivRiskCat.Title, "0", 0, "", false, 0, "") pdf.Ln(-1) pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, individualRiskCategory.Id, "0", 0, "", false, 0, "") + pdf.CellFormat(190, 6, indivRiskCat.Id, "0", 0, "", false, 0, "") pdf.Ln(-1) pdf.SetFont("Helvetica", "I", fontSizeBody) pdf.CellFormat(190, 6, "Individual Risk Category", "0", 0, "", false, 0, "") @@ -4028,22 +4009,22 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") pdfColorBlack() - pdf.MultiCell(160, 6, individualRiskCategory.STRIDE.Title(), "0", "0", false) + pdf.MultiCell(160, 6, indivRiskCat.STRIDE.Title(), "0", "0", false) pdfColorGray() pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(individualRiskCategory.Description), "0", "0", false) + pdf.MultiCell(160, 6, firstParagraph(indivRiskCat.Description), "0", "0", false) pdfColorGray() pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") pdfColorBlack() - pdf.MultiCell(160, 6, individualRiskCategory.DetectionLogic, "0", "0", false) + pdf.MultiCell(160, 6, indivRiskCat.DetectionLogic, "0", "0", false) pdfColorGray() pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") pdfColorBlack() - pdf.MultiCell(160, 6, individualRiskCategory.RiskAssessment, "0", "0", false) + pdf.MultiCell(160, 6, indivRiskCat.RiskAssessment, "0", "0", false) } pdf.Ln(-1) @@ -5616,11 +5597,11 @@ func getHeightWhenWidthIsFix(imageFullFilename string, width float64) float64 { } /* #nosec imageFullFilename is not tainted (see caller restricting it to image files of model folder only) */ file, err := os.Open(imageFullFilename) - defer func() { _ = file.Close() }() + defer file.Close() checkErr(err) - img, _, err := image.DecodeConfig(file) + image, _, err := image.DecodeConfig(file) checkErr(err) - return float64(img.Height) / (float64(img.Width) / width) + return float64(image.Height) / (float64(image.Width) / width) } func embedDataFlowDiagram(diagramFilenamePNG string) { @@ -5642,7 +5623,7 @@ func embedDataFlowDiagram(diagramFilenamePNG string) { // check to rotate the image if it is wider than high /* #nosec diagramFilenamePNG is not tainted */ imagePath, _ := os.Open(diagramFilenamePNG) - defer func() { _ = imagePath.Close() }() + defer imagePath.Close() srcImage, _, _ := image.Decode(imagePath) srcDimensions := srcImage.Bounds() // wider than high? @@ -5662,7 +5643,7 @@ func embedDataFlowDiagram(diagramFilenamePNG string) { // so rotate the image left by 90 degrees // ok, use temp PNG then // now rotate left by 90 degrees - rotatedFile, err := os.CreateTemp(model.TempFolder, "diagram-*-.png") + rotatedFile, err := ioutil.TempFile(model.TempFolder, "diagram-*-.png") checkErr(err) defer os.Remove(rotatedFile.Name()) dstImage := image.NewRGBA(image.Rect(0, 0, srcDimensions.Dy(), srcDimensions.Dx())) @@ -5732,7 +5713,7 @@ func embedDataRiskMapping(diagramFilenamePNG string) { // check to rotate the image if it is wider than high /* #nosec diagramFilenamePNG is not tainted */ imagePath, _ := os.Open(diagramFilenamePNG) - defer func() { _ = imagePath.Close() }() + defer imagePath.Close() srcImage, _, _ := image.Decode(imagePath) srcDimensions := srcImage.Bounds() // wider than high? @@ -5752,7 +5733,7 @@ func embedDataRiskMapping(diagramFilenamePNG string) { // so rotate the image left by 90 degrees // ok, use temp PNG then // now rotate left by 90 degrees - rotatedFile, err := os.CreateTemp(model.TempFolder, "diagram-*-.png") + rotatedFile, err := ioutil.TempFile(model.TempFolder, "diagram-*-.png") checkErr(err) defer os.Remove(rotatedFile.Name()) dstImage := image.NewRGBA(image.Rect(0, 0, srcDimensions.Dy(), srcDimensions.Dx())) @@ -5850,7 +5831,6 @@ func rgbHexColorSharedRuntime() string { func pdfColorRiskFindings() { pdf.SetTextColor(160, 40, 30) } - func rgbHexColorRiskFindings() string { return "#A0281E" } diff --git a/risks/built-in/code-backdooring/code-backdooring-rule.go b/risks/built-in/code-backdooring/code-backdooring-rule.go index 22d8093d..1f6e518e 100644 --- a/risks/built-in/code-backdooring/code-backdooring-rule.go +++ b/risks/built-in/code-backdooring/code-backdooring-rule.go @@ -96,7 +96,7 @@ func createRisk(technicalAsset model.TechnicalAsset, elevatedRisk bool) model.Ri } } dataBreachTechnicalAssetIDs := make([]string, 0) - for key := range uniqueDataBreachTechnicalAssetIDs { + for key, _ := range uniqueDataBreachTechnicalAssetIDs { dataBreachTechnicalAssetIDs = append(dataBreachTechnicalAssetIDs, key) } // create risk diff --git a/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go b/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go index db2014c7..e491655d 100644 --- a/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go +++ b/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go @@ -40,7 +40,7 @@ func GenerateRisks() []model.Risk { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] if technicalAsset.OutOfScope || technicalAsset.Technology.IsTrafficForwarding() || - technicalAsset.Technology.IsUnprotectedCommunicationsTolerated() { + technicalAsset.Technology.IsUnprotectedCommsTolerated() { continue } if technicalAsset.HighestConfidentiality() >= model.Confidential || @@ -51,7 +51,7 @@ func GenerateRisks() []model.Risk { commLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, commLink := range commLinks { caller := model.ParsedModelRoot.TechnicalAssets[commLink.SourceId] - if caller.Technology.IsUnprotectedCommunicationsTolerated() || caller.Type == model.Datastore { + if caller.Technology.IsUnprotectedCommsTolerated() || caller.Type == model.Datastore { continue } if caller.UsedAsClientByHuman { @@ -65,7 +65,7 @@ func GenerateRisks() []model.Risk { callersCommLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[caller.Id] for _, callersCommLink := range callersCommLinks { callersCaller := model.ParsedModelRoot.TechnicalAssets[callersCommLink.SourceId] - if callersCaller.Technology.IsUnprotectedCommunicationsTolerated() || callersCaller.Type == model.Datastore { + if callersCaller.Technology.IsUnprotectedCommsTolerated() || callersCaller.Type == model.Datastore { continue } if callersCaller.UsedAsClientByHuman { diff --git a/risks/built-in/missing-authentication/missing-authentication-rule.go b/risks/built-in/missing-authentication/missing-authentication-rule.go index 82934af2..9d002242 100644 --- a/risks/built-in/missing-authentication/missing-authentication-rule.go +++ b/risks/built-in/missing-authentication/missing-authentication-rule.go @@ -49,7 +49,7 @@ func GenerateRisks() []model.Risk { commLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, commLink := range commLinks { caller := model.ParsedModelRoot.TechnicalAssets[commLink.SourceId] - if caller.Technology.IsUnprotectedCommunicationsTolerated() || caller.Type == model.Datastore { + if caller.Technology.IsUnprotectedCommsTolerated() || caller.Type == model.Datastore { continue } highRisk := commLink.HighestConfidentiality() == model.StrictlyConfidential || diff --git a/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go b/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go index 8941dc5b..1eb1662a 100644 --- a/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go +++ b/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go @@ -40,7 +40,7 @@ func GenerateRisks() []model.Risk { hasCustomDevelopedParts, hasBuildPipeline, hasSourcecodeRepo, hasDevOpsClient := false, false, false, false impact := model.LowImpact var mostRelevantAsset model.TechnicalAsset - for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset + for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with highest sensitivity as example asset technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] if technicalAsset.CustomDevelopedParts && !technicalAsset.OutOfScope { hasCustomDevelopedParts = true diff --git a/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go b/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go index 77539d81..e7dddb3a 100644 --- a/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go +++ b/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go @@ -36,7 +36,7 @@ func Category() model.RiskCategory { } } -var specificSubTagsAWS = []string{"aws:vpc", "aws:ec2", "aws:s3", "aws:ebs", "aws:apigateway", "aws:lambda", "aws:dynamodb", "aws:rds", "aws:sqs", "aws:iam"} +var specificSubtagsAWS = []string{"aws:vpc", "aws:ec2", "aws:s3", "aws:ebs", "aws:apigateway", "aws:lambda", "aws:dynamodb", "aws:rds", "aws:sqs", "aws:iam"} func SupportedTags() []string { res := []string{ @@ -45,50 +45,50 @@ func SupportedTags() []string { "gcp", // Google Cloud Platform "ocp", // Oracle Cloud Platform } - res = append(res, specificSubTagsAWS...) + res = append(res, specificSubtagsAWS...) return res } func GenerateRisks() []model.Risk { risks := make([]model.Risk, 0) - sharedRuntimesWithUnspecificCloudRisks := make(map[string]bool) - trustBoundariesWithUnspecificCloudRisks := make(map[string]bool) - techAssetsWithUnspecificCloudRisks := make(map[string]bool) + sharedRuntimesWithUnspecificCloudRisks := make(map[string]bool, 0) + trustBoundariesWithUnspecificCloudRisks := make(map[string]bool, 0) + techAssetsWithUnspecificCloudRisks := make(map[string]bool, 0) - sharedRuntimeIDsAWS := make(map[string]bool) - trustBoundaryIDsAWS := make(map[string]bool) - techAssetIDsAWS := make(map[string]bool) + sharedRuntimeIDsAWS := make(map[string]bool, 0) + trustBoundaryIDsAWS := make(map[string]bool, 0) + techAssetIDsAWS := make(map[string]bool, 0) - sharedRuntimeIDsAzure := make(map[string]bool) - trustBoundaryIDsAzure := make(map[string]bool) - techAssetIDsAzure := make(map[string]bool) + sharedRuntimeIDsAzure := make(map[string]bool, 0) + trustBoundaryIDsAzure := make(map[string]bool, 0) + techAssetIDsAzure := make(map[string]bool, 0) - sharedRuntimeIDsGCP := make(map[string]bool) - trustBoundaryIDsGCP := make(map[string]bool) - techAssetIDsGCP := make(map[string]bool) + sharedRuntimeIDsGCP := make(map[string]bool, 0) + trustBoundaryIDsGCP := make(map[string]bool, 0) + techAssetIDsGCP := make(map[string]bool, 0) - sharedRuntimeIDsOCP := make(map[string]bool) - trustBoundaryIDsOCP := make(map[string]bool) - techAssetIDsOCP := make(map[string]bool) + sharedRuntimeIDsOCP := make(map[string]bool, 0) + trustBoundaryIDsOCP := make(map[string]bool, 0) + techAssetIDsOCP := make(map[string]bool, 0) - techAssetIDsWithSubtagSpecificCloudRisks := make(map[string]bool) + techAssetIDsWithSubtagSpecificCloudRisks := make(map[string]bool, 0) for _, trustBoundary := range model.ParsedModelRoot.TrustBoundaries { taggedOuterTB := trustBoundary.IsTaggedWithAny(SupportedTags()...) // false = generic cloud risks only // true = cloud-individual risks if taggedOuterTB || trustBoundary.Type.IsWithinCloud() { - addTrustBoundaryAccordingToBaseTag(trustBoundary, trustBoundariesWithUnspecificCloudRisks, + addTrustBoundaryAccordingToBasetag(trustBoundary, trustBoundariesWithUnspecificCloudRisks, trustBoundaryIDsAWS, trustBoundaryIDsAzure, trustBoundaryIDsGCP, trustBoundaryIDsOCP) for _, techAssetID := range trustBoundary.RecursivelyAllTechnicalAssetIDsInside() { added := false tA := model.ParsedModelRoot.TechnicalAssets[techAssetID] if tA.IsTaggedWithAny(SupportedTags()...) { - addAccordingToBaseTag(tA, tA.Tags, + addAccordingToBasetag(tA, tA.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) added = true } else if taggedOuterTB { - addAccordingToBaseTag(tA, trustBoundary.Tags, + addAccordingToBasetag(tA, trustBoundary.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) added = true @@ -102,7 +102,7 @@ func GenerateRisks() []model.Risk { // now loop over all technical assets, trust boundaries, and shared runtimes model-wide by tag for _, tA := range model.TechnicalAssetsTaggedWithAny(SupportedTags()...) { - addAccordingToBaseTag(tA, tA.Tags, + addAccordingToBasetag(tA, tA.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) } @@ -110,22 +110,22 @@ func GenerateRisks() []model.Risk { for _, candidateID := range tB.RecursivelyAllTechnicalAssetIDsInside() { tA := model.ParsedModelRoot.TechnicalAssets[candidateID] if tA.IsTaggedWithAny(SupportedTags()...) { - addAccordingToBaseTag(tA, tA.Tags, + addAccordingToBasetag(tA, tA.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) } else { - addAccordingToBaseTag(tA, tB.Tags, + addAccordingToBasetag(tA, tB.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) } } } for _, sR := range model.SharedRuntimesTaggedWithAny(SupportedTags()...) { - addSharedRuntimeAccordingToBaseTag(sR, sharedRuntimesWithUnspecificCloudRisks, + addSharedRuntimeAccordingToBasetag(sR, sharedRuntimesWithUnspecificCloudRisks, sharedRuntimeIDsAWS, sharedRuntimeIDsAzure, sharedRuntimeIDsGCP, sharedRuntimeIDsOCP) for _, candidateID := range sR.TechnicalAssetsRunning { tA := model.ParsedModelRoot.TechnicalAssets[candidateID] - addAccordingToBaseTag(tA, sR.Tags, + addAccordingToBasetag(tA, sR.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) } @@ -269,7 +269,7 @@ func GenerateRisks() []model.Risk { return risks } -func addTrustBoundaryAccordingToBaseTag(trustBoundary model.TrustBoundary, +func addTrustBoundaryAccordingToBasetag(trustBoundary model.TrustBoundary, trustBoundariesWithUnspecificCloudRisks map[string]bool, trustBoundaryIDsAWS map[string]bool, trustBoundaryIDsAzure map[string]bool, @@ -293,7 +293,7 @@ func addTrustBoundaryAccordingToBaseTag(trustBoundary model.TrustBoundary, } } -func addSharedRuntimeAccordingToBaseTag(sharedRuntime model.SharedRuntime, +func addSharedRuntimeAccordingToBasetag(sharedRuntime model.SharedRuntime, sharedRuntimesWithUnspecificCloudRisks map[string]bool, sharedRuntimeIDsAWS map[string]bool, sharedRuntimeIDsAzure map[string]bool, @@ -317,13 +317,13 @@ func addSharedRuntimeAccordingToBaseTag(sharedRuntime model.SharedRuntime, } } -func addAccordingToBaseTag(techAsset model.TechnicalAsset, tags []string, +func addAccordingToBasetag(techAsset model.TechnicalAsset, tags []string, techAssetIDsWithTagSpecificCloudRisks map[string]bool, techAssetIDsAWS map[string]bool, techAssetIDsAzure map[string]bool, techAssetIDsGCP map[string]bool, techAssetIDsOCP map[string]bool) { - if techAsset.IsTaggedWithAny(specificSubTagsAWS...) { + if techAsset.IsTaggedWithAny(specificSubtagsAWS...) { techAssetIDsWithTagSpecificCloudRisks[techAsset.Id] = true } if model.IsTaggedWithBaseTag(tags, "aws") { diff --git a/risks/built-in/missing-file-validation/missing-file-validation-rule.go b/risks/built-in/missing-file-validation/missing-file-validation-rule.go index bc0b5d67..c8633038 100644 --- a/risks/built-in/missing-file-validation/missing-file-validation-rule.go +++ b/risks/built-in/missing-file-validation/missing-file-validation-rule.go @@ -15,7 +15,7 @@ func Category() model.RiskCategory { Action: "File Validation", Mitigation: "Filter by file extension and discard (if feasible) the name provided. Whitelist the accepted file types " + "and determine the mime-type on the server-side (for example via \"Apache Tika\" or similar checks). If the file is retrievable by " + - "end users and/or backoffice employees, consider performing scans for popular malware (if the files can be retrieved much later than they " + + "endusers and/or backoffice employees, consider performing scans for popular malware (if the files can be retrieved much later than they " + "were uploaded, also apply a fresh malware scan during retrieval to scan with newer signatures of popular malware). Also enforce " + "limits on maximum file size to avoid denial-of-service like scenarios.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", diff --git a/risks/built-in/missing-hardening/missing-hardening-rule.go b/risks/built-in/missing-hardening/missing-hardening-rule.go index d9061cfc..b4795740 100644 --- a/risks/built-in/missing-hardening/missing-hardening-rule.go +++ b/risks/built-in/missing-hardening/missing-hardening-rule.go @@ -24,7 +24,7 @@ func Category() model.RiskCategory { Function: model.Operations, STRIDE: model.Tampering, DetectionLogic: "In-scope technical assets with RAA values of " + strconv.Itoa(raaLimit) + " % or higher. " + - "Generally for high-value targets like data stores, application servers, identity providers and ERP systems this limit is reduced to " + strconv.Itoa(raaLimitReduced) + " %", + "Generally for high-value targets like datastores, application servers, identity providers and ERP systems this limit is reduced to " + strconv.Itoa(raaLimitReduced) + " %", RiskAssessment: "The risk rating depends on the sensitivity of the data processed or stored in the technical asset.", FalsePositives: "Usually no false positives.", ModelFailurePossibleReason: false, diff --git a/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go b/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go index 01e2ba38..31f10700 100644 --- a/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go +++ b/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go @@ -8,22 +8,22 @@ func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-identity-propagation", Title: "Missing Identity Propagation", - Description: "Technical assets (especially multi-tenant systems), which usually process data for end users should " + - "authorize every request based on the identity of the end user when the data flow is authenticated (i.e. non-public). " + + Description: "Technical assets (especially multi-tenant systems), which usually process data for endusers should " + + "authorize every request based on the identity of the enduser when the data flow is authenticated (i.e. non-public). " + "For DevOps usages at least a technical-user authorization is required.", Impact: "If this risk is unmitigated, attackers might be able to access or modify foreign data after a successful compromise of a component within " + "the system due to missing resource-based authorization checks.", ASVS: "V4 - Access Control Verification Requirements", CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/Access_Control_Cheat_Sheet.html", Action: "Identity Propagation and Resource-based Authorization", - Mitigation: "When processing requests for end users if possible authorize in the backend against the propagated " + - "identity of the end user. This can be achieved in passing JWTs or similar tokens and checking them in the backend " + + Mitigation: "When processing requests for endusers if possible authorize in the backend against the propagated " + + "identity of the enduser. This can be achieved in passing JWTs or similar tokens and checking them in the backend " + "services. For DevOps usages apply at least a technical-user authorization.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", Function: model.Architecture, STRIDE: model.ElevationOfPrivilege, - DetectionLogic: "In-scope service-like technical assets which usually process data based on end user requests, if authenticated " + - "(i.e. non-public), should authorize incoming requests based on the propagated end user identity when their rating is sensitive. " + + DetectionLogic: "In-scope service-like technical assets which usually process data based on enduser requests, if authenticated " + + "(i.e. non-public), should authorize incoming requests based on the propagated enduser identity when their rating is sensitive. " + "This is especially the case for all multi-tenant assets (there even less-sensitive rated ones). " + "DevOps usages are exempted from this risk.", RiskAssessment: "The risk rating (medium or high) " + @@ -46,7 +46,7 @@ func GenerateRisks() []model.Risk { if technicalAsset.OutOfScope { continue } - if technicalAsset.Technology.IsUsuallyProcessingEndUserRequests() && + if technicalAsset.Technology.IsUsuallyProcessingEnduserRequests() && (technicalAsset.Confidentiality >= model.Confidential || technicalAsset.Integrity >= model.Critical || technicalAsset.Availability >= model.Critical || @@ -62,7 +62,7 @@ func GenerateRisks() []model.Risk { continue } if commLink.Authentication != model.NoneAuthentication && - commLink.Authorization != model.EndUserIdentityPropagation { + commLink.Authorization != model.EnduserIdentityPropagation { if commLink.Usage == model.DevOps && commLink.Authorization != model.NoneAuthorization { continue } @@ -87,7 +87,7 @@ func createRisk(technicalAsset model.TechnicalAsset, incomingAccess model.Commun Severity: model.CalculateSeverity(model.Unlikely, impact), ExploitationLikelihood: model.Unlikely, ExploitationImpact: impact, - Title: "Missing End User Identity Propagation over communication link " + incomingAccess.Title + " " + + Title: "Missing Enduser Identity Propagation over communication link " + incomingAccess.Title + " " + "from " + model.ParsedModelRoot.TechnicalAssets[incomingAccess.SourceId].Title + " " + "to " + technicalAsset.Title + "", MostRelevantTechnicalAssetId: technicalAsset.Id, diff --git a/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go b/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go index 287cd751..acd44afd 100644 --- a/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go +++ b/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go @@ -8,18 +8,18 @@ func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-identity-provider-isolation", Title: "Missing Identity Provider Isolation", - Description: "Highly sensitive identity provider assets and their identity data stores should be isolated from other assets " + + Description: "Highly sensitive identity provider assets and their identity datastores should be isolated from other assets " + "by their own network segmentation trust-boundary (" + model.ExecutionEnvironment.String() + " boundaries do not count as network isolation).", Impact: "If this risk is unmitigated, attackers successfully attacking other components of the system might have an easy path towards " + - "highly sensitive identity provider assets and their identity data stores, as they are not separated by network segmentation.", + "highly sensitive identity provider assets and their identity datastores, as they are not separated by network segmentation.", ASVS: "V1 - Architecture, Design and Threat Modeling Requirements", CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", Action: "Network Segmentation", - Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive identity provider assets and their identity data stores.", + Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive identity provider assets and their identity datastores.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", Function: model.Operations, STRIDE: model.ElevationOfPrivilege, - DetectionLogic: "In-scope identity provider assets and their identity data stores " + + DetectionLogic: "In-scope identity provider assets and their identity datastores " + "when surrounded by other (not identity-related) assets (without a network trust-boundary in-between). " + "This risk is especially prevalent when other non-identity related assets are within the same execution environment (i.e. same database or same application server).", RiskAssessment: "Default is " + model.HighImpact.String() + " impact. The impact is increased to " + model.VeryHighImpact.String() + " when the asset missing the " + @@ -45,7 +45,7 @@ func GenerateRisks() []model.Risk { sameExecutionEnv := false createRiskEntry := false // now check for any other same-network assets of non-identity-related types - for sparringAssetCandidateId := range model.ParsedModelRoot.TechnicalAssets { // so inner loop again over all assets + for sparringAssetCandidateId, _ := range model.ParsedModelRoot.TechnicalAssets { // so inner loop again over all assets if technicalAsset.Id != sparringAssetCandidateId { sparringAssetCandidate := model.ParsedModelRoot.TechnicalAssets[sparringAssetCandidateId] if !sparringAssetCandidate.Technology.IsIdentityRelated() && !sparringAssetCandidate.Technology.IsCloseToHighValueTargetsTolerated() { diff --git a/risks/built-in/missing-identity-store/missing-identity-store-rule.go b/risks/built-in/missing-identity-store/missing-identity-store-rule.go index c985a39e..9096e320 100644 --- a/risks/built-in/missing-identity-store/missing-identity-store-rule.go +++ b/risks/built-in/missing-identity-store/missing-identity-store-rule.go @@ -19,8 +19,8 @@ func Category() model.RiskCategory { Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", Function: model.Architecture, STRIDE: model.Spoofing, - DetectionLogic: "Models with authenticated data-flows authorized via end user identity missing an in-scope identity store.", - RiskAssessment: "The risk rating depends on the sensitivity of the end user-identity authorized technical assets and " + + DetectionLogic: "Models with authenticated data-flows authorized via enduser-identity missing an in-scope identity store.", + RiskAssessment: "The risk rating depends on the sensitivity of the enduser-identity authorized technical assets and " + "their data assets processed and stored.", FalsePositives: "Models only offering data/services without any real authentication need " + "can be considered as false positives after individual review.", @@ -42,14 +42,14 @@ func GenerateRisks() []model.Risk { return risks } } - // now check if we have end user identity authorized communication links, then it's a risk + // now check if we have enduser-identity authorized communication links, then it's a risk riskIdentified := false var mostRelevantAsset model.TechnicalAsset impact := model.LowImpact - for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset + for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with highest sensitivity as example asset technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] - for _, commLink := range technicalAsset.CommunicationLinksSorted() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset - if commLink.Authorization == model.EndUserIdentityPropagation { + for _, commLink := range technicalAsset.CommunicationLinksSorted() { // use the sorted one to always get the same tech asset with highest sensitivity as example asset + if commLink.Authorization == model.EnduserIdentityPropagation { riskIdentified = true targetAsset := model.ParsedModelRoot.TechnicalAssets[commLink.TargetId] if impact == model.LowImpact { diff --git a/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go b/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go index 2b29af70..495a3215 100644 --- a/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go +++ b/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go @@ -11,7 +11,7 @@ func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-network-segmentation", Title: "Missing Network Segmentation", - Description: "Highly sensitive assets and/or data stores residing in the same network segment than other " + + Description: "Highly sensitive assets and/or datastores residing in the same network segment than other " + "lower sensitive assets (like webservers or content management systems etc.) should be better protected " + "by a network segmentation trust-boundary.", Impact: "If this risk is unmitigated, attackers successfully attacking other components of the system might have an easy path towards " + @@ -19,11 +19,11 @@ func Category() model.RiskCategory { ASVS: "V1 - Architecture, Design and Threat Modeling Requirements", CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", Action: "Network Segmentation", - Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive assets and/or data stores.", + Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive assets and/or datastores.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", Function: model.Operations, STRIDE: model.ElevationOfPrivilege, - DetectionLogic: "In-scope technical assets with high sensitivity and RAA values as well as data stores " + + DetectionLogic: "In-scope technical assets with high sensitivity and RAA values as well as datastores " + "when surrounded by assets (without a network trust-boundary in-between) which are of type " + model.ClientSystem.String() + ", " + model.WebServer.String() + ", " + model.WebApplication.String() + ", " + model.CMS.String() + ", " + model.WebServiceREST.String() + ", " + model.WebServiceSOAP.String() + ", " + model.BuildPipeline.String() + ", " + model.SourcecodeRepository.String() + ", " + model.Monitoring.String() + ", or similar and there is no direct connection between these " + @@ -46,7 +46,7 @@ func GenerateRisks() []model.Risk { // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: keys := make([]string, 0) - for k := range model.ParsedModelRoot.TechnicalAssets { + for k, _ := range model.ParsedModelRoot.TechnicalAssets { keys = append(keys, k) } sort.Strings(keys) diff --git a/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go b/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go index 0c8919b4..8ef10cdf 100644 --- a/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go +++ b/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go @@ -8,14 +8,14 @@ func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-vault-isolation", Title: "Missing Vault Isolation", - Description: "Highly sensitive vault assets and their data stores should be isolated from other assets " + + Description: "Highly sensitive vault assets and their datastores should be isolated from other assets " + "by their own network segmentation trust-boundary (" + model.ExecutionEnvironment.String() + " boundaries do not count as network isolation).", Impact: "If this risk is unmitigated, attackers successfully attacking other components of the system might have an easy path towards " + - "highly sensitive vault assets and their data stores, as they are not separated by network segmentation.", + "highly sensitive vault assets and their datastores, as they are not separated by network segmentation.", ASVS: "V1 - Architecture, Design and Threat Modeling Requirements", CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", Action: "Network Segmentation", - Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive vault assets and their data stores.", + Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive vault assets and their datastores.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", Function: model.Operations, STRIDE: model.ElevationOfPrivilege, @@ -45,7 +45,7 @@ func GenerateRisks() []model.Risk { sameExecutionEnv := false createRiskEntry := false // now check for any other same-network assets of non-vault-related types - for sparringAssetCandidateId := range model.ParsedModelRoot.TechnicalAssets { // so inner loop again over all assets + for sparringAssetCandidateId, _ := range model.ParsedModelRoot.TechnicalAssets { // so inner loop again over all assets if technicalAsset.Id != sparringAssetCandidateId { sparringAssetCandidate := model.ParsedModelRoot.TechnicalAssets[sparringAssetCandidateId] if sparringAssetCandidate.Technology != model.Vault && !isVaultStorage(technicalAsset, sparringAssetCandidate) { diff --git a/risks/built-in/missing-vault/missing-vault-rule.go b/risks/built-in/missing-vault/missing-vault-rule.go index ac3e4590..a046131b 100644 --- a/risks/built-in/missing-vault/missing-vault-rule.go +++ b/risks/built-in/missing-vault/missing-vault-rule.go @@ -39,7 +39,7 @@ func GenerateRisks() []model.Risk { hasVault := false var mostRelevantAsset model.TechnicalAsset impact := model.LowImpact - for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset + for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with highest sensitivity as example asset techAsset := model.ParsedModelRoot.TechnicalAssets[id] if techAsset.Technology == model.Vault { hasVault = true diff --git a/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go b/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go index 435a6ab9..3c9ab06b 100644 --- a/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go +++ b/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go @@ -41,7 +41,7 @@ func GenerateRisks() []model.Risk { risks := make([]model.Risk, 0) // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: keys := make([]string, 0) - for k := range model.ParsedModelRoot.SharedRuntimes { + for k, _ := range model.ParsedModelRoot.SharedRuntimes { keys = append(keys, k) } sort.Strings(keys) diff --git a/risks/built-in/search-query-injection/search-query-injection-rule.go b/risks/built-in/search-query-injection/search-query-injection-rule.go index 1f250b28..936ab6e6 100644 --- a/risks/built-in/search-query-injection/search-query-injection-rule.go +++ b/risks/built-in/search-query-injection/search-query-injection-rule.go @@ -43,7 +43,7 @@ func GenerateRisks() []model.Risk { continue } if incomingFlow.Protocol == model.HTTP || incomingFlow.Protocol == model.HTTPS || - incomingFlow.Protocol == model.BINARY || incomingFlow.Protocol == model.BinaryEncrypted { + incomingFlow.Protocol == model.BINARY || incomingFlow.Protocol == model.BINARY_encrypted { likelihood := model.VeryLikely if incomingFlow.Usage == model.DevOps { likelihood = model.Likely diff --git a/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go b/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go index a1844ae0..901c5fcf 100644 --- a/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go +++ b/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go @@ -80,7 +80,7 @@ func createRisk(technicalAsset model.TechnicalAsset, outgoingFlow model.Communic impact = model.MediumImpact } dataBreachTechnicalAssetIDs := make([]string, 0) - for key := range uniqueDataBreachTechnicalAssetIDs { + for key, _ := range uniqueDataBreachTechnicalAssetIDs { dataBreachTechnicalAssetIDs = append(dataBreachTechnicalAssetIDs, key) } likelihood := model.Likely diff --git a/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go b/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go index 80cac869..34baf45f 100644 --- a/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go +++ b/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go @@ -71,7 +71,7 @@ func createRisk(technicalAsset model.TechnicalAsset) model.Risk { } } dataBreachTechnicalAssetIDs := make([]string, 0) - for key := range uniqueDataBreachTechnicalAssetIDs { + for key, _ := range uniqueDataBreachTechnicalAssetIDs { dataBreachTechnicalAssetIDs = append(dataBreachTechnicalAssetIDs, key) } // create risk diff --git a/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go b/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go index 67c42992..cdf23c6b 100644 --- a/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go +++ b/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go @@ -23,7 +23,7 @@ func Category() model.RiskCategory { ", " + model.IPS.String() + " and embedded components like " + model.Library.String() + ") " + "storing data assets rated at least as " + model.Confidential.String() + " or " + model.Critical.String() + ". " + "For technical assets storing data assets rated as " + model.StrictlyConfidential.String() + " or " + model.MissionCritical.String() + " the " + - "encryption must be of type " + model.DataWithEndUserIndividualKey.String() + ".", + "encryption must be of type " + model.DataWithEnduserIndividualKey.String() + ".", RiskAssessment: "Depending on the confidentiality rating of the stored data-assets either medium or high risk.", FalsePositives: "When all sensitive data stored within the asset is already fully encrypted on document or data level.", ModelFailurePossibleReason: false, @@ -36,7 +36,6 @@ func SupportedTags() []string { } // check for technical assets that should be encrypted due to their confidentiality - func GenerateRisks() []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { @@ -46,16 +45,16 @@ func GenerateRisks() []model.Risk { technicalAsset.HighestIntegrity() >= model.Critical) { verySensitive := technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || technicalAsset.HighestIntegrity() == model.MissionCritical - requiresEndUserKey := verySensitive && technicalAsset.Technology.IsUsuallyStoringEndUserData() + requiresEnduserKey := verySensitive && technicalAsset.Technology.IsUsuallyStoringEnduserData() if technicalAsset.Encryption == model.NoneEncryption { impact := model.MediumImpact if verySensitive { impact = model.HighImpact } - risks = append(risks, createRisk(technicalAsset, impact, requiresEndUserKey)) - } else if requiresEndUserKey && + risks = append(risks, createRisk(technicalAsset, impact, requiresEnduserKey)) + } else if requiresEnduserKey && (technicalAsset.Encryption == model.Transparent || technicalAsset.Encryption == model.DataWithSymmetricSharedKey || technicalAsset.Encryption == model.DataWithAsymmetricSharedKey) { - risks = append(risks, createRisk(technicalAsset, model.MediumImpact, requiresEndUserKey)) + risks = append(risks, createRisk(technicalAsset, model.MediumImpact, requiresEnduserKey)) } } } @@ -64,17 +63,16 @@ func GenerateRisks() []model.Risk { // Simple routing assets like 'Reverse Proxy' or 'Load Balancer' usually don't have their own storage and thus have no // encryption requirement for the asset itself (though for the communication, but that's a different rule) - func IsEncryptionWaiver(asset model.TechnicalAsset) bool { return asset.Technology == model.ReverseProxy || asset.Technology == model.LoadBalancer || asset.Technology == model.WAF || asset.Technology == model.IDS || asset.Technology == model.IPS || asset.Technology.IsEmbeddedComponent() } -func createRisk(technicalAsset model.TechnicalAsset, impact model.RiskExploitationImpact, requiresEndUserKey bool) model.Risk { +func createRisk(technicalAsset model.TechnicalAsset, impact model.RiskExploitationImpact, requiresEnduserKey bool) model.Risk { title := "Unencrypted Technical Asset named " + technicalAsset.Title + "" - if requiresEndUserKey { - title += " missing end user individual encryption with " + model.DataWithEndUserIndividualKey.String() + if requiresEnduserKey { + title += " missing enduser-individual encryption with " + model.DataWithEnduserIndividualKey.String() } risk := model.Risk{ Category: Category(), diff --git a/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go b/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go index ed5a7eb3..0718540b 100644 --- a/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go +++ b/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go @@ -33,7 +33,6 @@ func SupportedTags() []string { } // check for communication links that should be encrypted due to their confidentiality and/or integrity - func GenerateRisks() []model.Risk { risks := make([]model.Risk, 0) for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { @@ -43,8 +42,8 @@ func GenerateRisks() []model.Risk { targetAsset := model.ParsedModelRoot.TechnicalAssets[dataFlow.TargetId] if !technicalAsset.OutOfScope || !sourceAsset.OutOfScope { if !dataFlow.Protocol.IsEncrypted() && !dataFlow.Protocol.IsProcessLocal() && - !sourceAsset.Technology.IsUnprotectedCommunicationsTolerated() && - !targetAsset.Technology.IsUnprotectedCommunicationsTolerated() { + !sourceAsset.Technology.IsUnprotectedCommsTolerated() && + !targetAsset.Technology.IsUnprotectedCommsTolerated() { addedOne := false for _, sentDataAsset := range dataFlow.DataAssetsSent { dataAsset := model.ParsedModelRoot.DataAssets[sentDataAsset] diff --git a/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go b/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go index e845f955..5fa8f9e2 100644 --- a/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go +++ b/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go @@ -8,8 +8,8 @@ func Category() model.RiskCategory { return model.RiskCategory{ Id: "unguarded-direct-datastore-access", Title: "Unguarded Direct Datastore Access", - Description: "Data stores accessed across trust boundaries must be guarded by some protecting service or application.", - Impact: "If this risk is unmitigated, attackers might be able to directly attack sensitive data stores without any protecting components in-between.", + Description: "Datastores accessed across trust boundaries must be guarded by some protecting service or application.", + Impact: "If this risk is unmitigated, attackers might be able to directly attack sensitive datastores without any protecting components in-between.", ASVS: "V1 - Architecture, Design and Threat Modeling Requirements", CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", Action: "Encapsulation of Datastore", @@ -34,8 +34,7 @@ func SupportedTags() []string { return []string{} } -// check for data stores that should not be accessed directly across trust boundaries - +// check for datastores that should not be accessed directly across trust boundaries func GenerateRisks() []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { diff --git a/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go b/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go index f4e5c8d6..5da7f2db 100644 --- a/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go +++ b/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go @@ -55,8 +55,8 @@ func GenerateRisks() []model.Risk { } // check for any incoming IIOP and JRMP protocols for _, commLink := range model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] { - if commLink.Protocol == model.IIOP || commLink.Protocol == model.IiopEncrypted || - commLink.Protocol == model.JRMP || commLink.Protocol == model.JrmpEncrypted { + if commLink.Protocol == model.IIOP || commLink.Protocol == model.IIOP_encrypted || + commLink.Protocol == model.JRMP || commLink.Protocol == model.JRMP_encrypted { hasOne = true if commLink.IsAcrossTrustBoundaryNetworkOnly() { acrossTrustBoundary = true diff --git a/risks/custom/demo/demo-rule.go b/risks/custom/demo/demo-rule.go index 2a2daacf..5eb8d672 100644 --- a/risks/custom/demo/demo-rule.go +++ b/risks/custom/demo/demo-rule.go @@ -7,7 +7,6 @@ import ( type customRiskRule string // exported as symbol (here simply as variable to interface to bundle many functions under one symbol) named "CustomRiskRule" - var CustomRiskRule customRiskRule func (r customRiskRule) Category() model.RiskCategory { From 7f9f45ded31cb6315f67d32aee6642b2e56d9be0 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Wed, 6 Dec 2023 12:25:40 -0800 Subject: [PATCH 03/68] made file paths cross-platform; eliminated IDE warnings; reformatted some code for readability --- colors/colors.go | 31 +- go.sum | 27 +- .../add-build-pipeline-macro.go | 878 +++---- macros/built-in/add-vault/add-vault-macro.go | 250 +- .../pretty-print/pretty-print-macro.go | 6 +- .../remove-unused-tags-macro.go | 8 +- .../seed-risk-tracking-macro.go | 12 +- macros/built-in/seed-tags/seed-tags-macro.go | 8 +- main.go | 2100 +++++++++-------- model/types.go | 432 ++-- raa/dummy/dummy.go | 5 + raa/raa/raa.go | 11 +- report/excel.go | 6 +- report/json.go | 9 +- report/report.go | 108 +- .../code-backdooring/code-backdooring-rule.go | 2 +- ...ssing-authentication-second-factor-rule.go | 6 +- .../missing-authentication-rule.go | 2 +- .../missing-build-infrastructure-rule.go | 2 +- .../missing-cloud-hardening-rule.go | 60 +- .../missing-file-validation-rule.go | 2 +- .../missing-hardening-rule.go | 2 +- .../missing-identity-propagation-rule.go | 18 +- ...issing-identity-provider-isolation-rule.go | 10 +- .../missing-identity-store-rule.go | 12 +- .../missing-network-segmentation-rule.go | 8 +- .../missing-vault-isolation-rule.go | 8 +- .../missing-vault/missing-vault-rule.go | 2 +- .../mixed-targets-on-shared-runtime-rule.go | 2 +- .../search-query-injection-rule.go | 2 +- .../server-side-request-forgery-rule.go | 2 +- .../unchecked-deployment-rule.go | 2 +- .../unencrypted-asset-rule.go | 18 +- .../unencrypted-communication-rule.go | 5 +- .../unguarded-direct-datastore-access-rule.go | 7 +- .../untrusted-deserialization-rule.go | 4 +- risks/custom/demo/demo-rule.go | 1 + 37 files changed, 2084 insertions(+), 1984 deletions(-) diff --git a/colors/colors.go b/colors/colors.go index 506e97a6..cac70f6d 100644 --- a/colors/colors.go +++ b/colors/colors.go @@ -5,10 +5,33 @@ import ( "github.com/jung-kurt/gofpdf" ) -const Red, Amber, Green, Blue, DarkBlue, Black, Gray, LightGray, MiddleLightGray, MoreLightGray, VeryLightGray, ExtremeLightGray, Pink, LightPink = "#CC0000", "#AF780E", "#008000", "#000080", "#000060", "#000000", "#444444", "#666666", "#999999", "#D2D2D2", "#E5E5E5", "#F6F6F6", "#F987C5", "#FFE7EF" -const ExtremeLightBlue, OutOfScopeFancy, CustomDevelopedParts = "#DDFFFF", "#D5D7FF", "#FFFC97" -const LightBlue = "#77FFFF" -const Brown = "#8C4C17" +const ( + Amber = "#AF780E" + Green = "#008000" + Blue = "#000080" + DarkBlue = "#000060" + Black = "#000000" + Gray = "#444444" + LightGray = "#666666" + MiddleLightGray = "#999999" + MoreLightGray = "#D2D2D2" + VeryLightGray = "#E5E5E5" + ExtremeLightGray = "#F6F6F6" + Pink = "#F987C5" + LightPink = "#FFE7EF" + Red = "#CC0000" + OutOfScopeFancy = "#D5D7FF" + CustomDevelopedParts = "#FFFC97" + ExtremeLightBlue = "#DDFFFF" + LightBlue = "#77FFFF" + Brown = "#8C4C17" +) + +var ( + _ = Green + Blue + MoreLightGray + ExtremeLightGray + LightBlue + _ = ColorOutOfScope + _ = RgbHexColorModelFailure +) func DarkenHexColor(hexString string) string { colorBytes, _ := hex.DecodeString(hexString[1:]) diff --git a/go.sum b/go.sum index 1211ed1e..827e8d8f 100644 --- a/go.sum +++ b/go.sum @@ -2,13 +2,10 @@ github.com/blend/go-sdk v1.20220411.3 h1:GFV4/FQX5UzXLPwWV03gP811pj7B8J2sbuq+GJQ github.com/blend/go-sdk v1.20220411.3/go.mod h1:7lnH8fTi6U4i1fArEXRyOIY2E1X4MALg09qsQqY1+ak= github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= -github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s= -github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U= github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM= github.com/bytedance/sonic v1.10.2 h1:GQebETVBxYB7JGWJtLBi07OVzWwt+8dWA00gEVW2ZFE= github.com/bytedance/sonic v1.10.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= -github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams= github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0= github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA= @@ -18,8 +15,6 @@ github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLI github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= -github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= @@ -32,8 +27,6 @@ github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/o github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= -github.com/go-playground/validator/v10 v10.14.0 h1:vgvQWe3XCz3gIeFDm/HnTIbj6UGmg/+t63MyGU2n5js= -github.com/go-playground/validator/v10 v10.14.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= github.com/go-playground/validator/v10 v10.15.5 h1:LEBecTWb/1j5TNY1YYG2RcOUN3R7NLylN+x8TTueE24= github.com/go-playground/validator/v10 v10.15.5/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= @@ -52,15 +45,11 @@ github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+ github.com/jung-kurt/gofpdf v1.16.2 h1:jgbatWHfRlPYiK85qgevsZTHviWXKwB1TTiKdz5PtRc= github.com/jung-kurt/gofpdf v1.16.2/go.mod h1:1hl7y57EsiPAkLbOwzpzqgx1A30nQCk/YmFV8S2vmK0= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= -github.com/klauspost/cpuid/v2 v2.2.4 h1:acbojRNwl3o09bUq+yDCtZFc1aiwaAAxtcn8YkZXnvk= -github.com/klauspost/cpuid/v2 v2.2.4/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY= github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= -github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= -github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -70,15 +59,11 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= -github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ= -github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4= github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4= github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= -github.com/phpdave11/gofpdi v1.0.7 h1:k2oy4yhkQopCK+qW8KjCla0iU2RpDow+QUDmH9DDt44= github.com/phpdave11/gofpdi v1.0.7/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= github.com/phpdave11/gofpdi v1.0.13 h1:o61duiW8M9sMlkVXWlvP92sZJtGKENvW3VExs6dZukQ= github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= -github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -100,8 +85,7 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.3 h1:RP3t2pwF7cMEbC1dqtB6poj3niw/9gnV4Cjg5oW5gtY= -github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= @@ -109,20 +93,16 @@ github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4d github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= github.com/wcharczuk/go-chart v2.0.1+incompatible h1:0pz39ZAycJFF7ju/1mepnk26RLVLBCWz1STcD3doU0A= github.com/wcharczuk/go-chart v2.0.1+incompatible/go.mod h1:PF5tmL4EIx/7Wf+hEkpCqYi5He4u90sw+0+6FhrryuE= -github.com/xuri/efp v0.0.0-20230802181842-ad255f2331ca h1:uvPMDVyP7PXMMioYdyPH+0O+Ta/UO1WFfNYMO3Wz0eg= github.com/xuri/efp v0.0.0-20230802181842-ad255f2331ca/go.mod h1:ybY/Jr0T0GTCnYjKqmdwxyxn2BQf2RcQIIvex5QldPI= github.com/xuri/efp v0.0.0-20231025114914-d1ff6096ae53 h1:Chd9DkqERQQuHpXjR/HSV1jLZA6uaoiwwH3vSuF3IW0= github.com/xuri/efp v0.0.0-20231025114914-d1ff6096ae53/go.mod h1:ybY/Jr0T0GTCnYjKqmdwxyxn2BQf2RcQIIvex5QldPI= github.com/xuri/excelize/v2 v2.8.0 h1:Vd4Qy809fupgp1v7X+nCS/MioeQmYVVzi495UCTqB7U= github.com/xuri/excelize/v2 v2.8.0/go.mod h1:6iA2edBTKxKbZAa7X5bDhcCg51xdOn1Ar5sfoXRGrQg= -github.com/xuri/nfp v0.0.0-20230819163627-dc951e3ffe1a h1:Mw2VNrNNNjDtw68VsEj2+st+oCSn4Uz7vZw6TbhcV1o= github.com/xuri/nfp v0.0.0-20230819163627-dc951e3ffe1a/go.mod h1:WwHg+CVyzlv/TX9xqBFXEZAuxOPxn2k1GNHwG41IIUQ= github.com/xuri/nfp v0.0.0-20230919160717-d98342af3f05 h1:qhbILQo1K3mphbwKh1vNm4oGezE1eF9fQWmNiIpSfI4= github.com/xuri/nfp v0.0.0-20230919160717-d98342af3f05/go.mod h1:WwHg+CVyzlv/TX9xqBFXEZAuxOPxn2k1GNHwG41IIUQ= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= -golang.org/x/arch v0.3.0 h1:02VY4/ZcO/gBOH6PUaoiptASxtXU10jazRCP865E97k= -golang.org/x/arch v0.3.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/arch v0.5.0 h1:jpGode6huXQxcskEIpOCvrU+tzo81b6+oFLUYXWtH/Y= golang.org/x/arch v0.5.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= @@ -131,7 +111,6 @@ golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98y golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/image v0.11.0 h1:ds2RoQvBvYTiJkwpSFDwCcDFNX7DqjL2WsUgTNk0Ooo= golang.org/x/image v0.11.0/go.mod h1:bglhjqbqVuEb9e9+eNR45Jfu7D+T4Qan+NhQk8Ck2P8= golang.org/x/image v0.13.0 h1:3cge/F/QTkNLauhf2QoE9zp+7sr+ZcL4HnoZmdwg9sg= golang.org/x/image v0.13.0/go.mod h1:6mmbMOeV28HuMTgA6OSRkdXKYw/t5W9Uwn2Yv1r3Yxk= @@ -142,7 +121,6 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= -golang.org/x/net v0.14.0 h1:BONx9s002vGdD9umnlX1Po8vOZmrgH34qlHcD1MfK14= golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= @@ -153,7 +131,6 @@ golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -183,8 +160,6 @@ golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= -google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= diff --git a/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go b/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go index dc68cea1..b6ec0514 100644 --- a/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go +++ b/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go @@ -145,7 +145,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { }, nil case 11: possibleAnswers := make([]string, 0) - for id, _ := range model.ParsedModelRoot.TechnicalAssets { + for id := range model.ParsedModelRoot.TechnicalAssets { possibleAnswers = append(possibleAnswers, id) } sort.Strings(possibleAnswers) @@ -299,12 +299,12 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry Confidentiality: model.Confidential.String(), Integrity: model.Critical.String(), Availability: model.Important.String(), - Justification_cia_rating: "Sourcecode is at least rated as 'critical' in terms of integrity, because any " + + JustificationCiaRating: "Sourcecode is at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", } *changeLogCollector = append(*changeLogCollector, "adding data asset: sourcecode") if !dryRun { - modelInput.Data_assets["Sourcecode"] = dataAsset + modelInput.DataAssets["Sourcecode"] = dataAsset } } @@ -321,12 +321,12 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry Confidentiality: model.Confidential.String(), Integrity: model.Critical.String(), Availability: model.Important.String(), - Justification_cia_rating: "Deployment units are at least rated as 'critical' in terms of integrity, because any " + + JustificationCiaRating: "Deployment units are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", } *changeLogCollector = append(*changeLogCollector, "adding data asset: deployment") if !dryRun { - modelInput.Data_assets["Deployment"] = dataAsset + modelInput.DataAssets["Deployment"] = dataAsset } } @@ -340,137 +340,137 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry commLinks := make(map[string]model.InputCommunicationLink) commLinks["Sourcecode Repository Traffic"] = model.InputCommunicationLink{ - Target: sourceRepoID, - Description: "Sourcecode Repository Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EnduserIdentityPropagation.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: false, - Usage: model.DevOps.String(), - Data_assets_sent: []string{"sourcecode"}, - Data_assets_received: []string{"sourcecode"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: sourceRepoID, + Description: "Sourcecode Repository Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.EndUserIdentityPropagation.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: false, + Usage: model.DevOps.String(), + DataAssetsSent: []string{"sourcecode"}, + DataAssetsReceived: []string{"sourcecode"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } commLinks["Build Pipeline Traffic"] = model.InputCommunicationLink{ - Target: buildPipelineID, - Description: "Build Pipeline Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EnduserIdentityPropagation.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: true, - Usage: model.DevOps.String(), - Data_assets_sent: nil, - Data_assets_received: []string{"deployment"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: buildPipelineID, + Description: "Build Pipeline Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.EndUserIdentityPropagation.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: true, + Usage: model.DevOps.String(), + DataAssetsSent: nil, + DataAssetsReceived: []string{"deployment"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } commLinks["Artifact Registry Traffic"] = model.InputCommunicationLink{ - Target: artifactRegistryID, - Description: "Artifact Registry Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EnduserIdentityPropagation.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: true, - Usage: model.DevOps.String(), - Data_assets_sent: nil, - Data_assets_received: []string{"deployment"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: artifactRegistryID, + Description: "Artifact Registry Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.EndUserIdentityPropagation.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: true, + Usage: model.DevOps.String(), + DataAssetsSent: nil, + DataAssetsReceived: []string{"deployment"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } if containerTechUsed { commLinks["Container Registry Traffic"] = model.InputCommunicationLink{ - Target: containerRepoID, - Description: "Container Registry Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EnduserIdentityPropagation.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: false, - Usage: model.DevOps.String(), - Data_assets_sent: []string{"deployment"}, - Data_assets_received: []string{"deployment"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: containerRepoID, + Description: "Container Registry Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.EndUserIdentityPropagation.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: false, + Usage: model.DevOps.String(), + DataAssetsSent: []string{"deployment"}, + DataAssetsReceived: []string{"deployment"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } commLinks["Container Platform Traffic"] = model.InputCommunicationLink{ - Target: containerPlatformID, - Description: "Container Platform Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EnduserIdentityPropagation.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: false, - Usage: model.DevOps.String(), - Data_assets_sent: []string{"deployment"}, - Data_assets_received: []string{"deployment"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: containerPlatformID, + Description: "Container Platform Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.EndUserIdentityPropagation.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: false, + Usage: model.DevOps.String(), + DataAssetsSent: []string{"deployment"}, + DataAssetsReceived: []string{"deployment"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } } if codeInspectionUsed { commLinks["Code Inspection Platform Traffic"] = model.InputCommunicationLink{ - Target: codeInspectionPlatformID, - Description: "Code Inspection Platform Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EnduserIdentityPropagation.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: true, - Usage: model.DevOps.String(), - Data_assets_sent: nil, - Data_assets_received: []string{"sourcecode"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: codeInspectionPlatformID, + Description: "Code Inspection Platform Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.EndUserIdentityPropagation.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: true, + Usage: model.DevOps.String(), + DataAssetsSent: nil, + DataAssetsReceived: []string{"sourcecode"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: "Development Client", - Type: model.ExternalEntity.String(), - Usage: model.DevOps.String(), - Used_as_client_by_human: true, - Out_of_scope: true, - Justification_out_of_scope: "Development client is not directly in-scope of the application.", - Size: model.System.String(), - Technology: model.DevOpsClient.String(), - Tags: []string{}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Physical.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), - Justification_cia_rating: "Sourcecode processing components are at least rated as 'critical' in terms of integrity, because any " + + ID: id, + Description: "Development Client", + Type: model.ExternalEntity.String(), + Usage: model.DevOps.String(), + UsedAsClientByHuman: true, + OutOfScope: true, + JustificationOutOfScope: "Development client is not directly in-scope of the application.", + Size: model.System.String(), + Technology: model.DevOpsClient.String(), + Tags: []string{}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Physical.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.Critical.String(), + Availability: model.Important.String(), + JustificationCiaRating: "Sourcecode processing components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", - Multi_tenant: false, - Redundant: false, - Custom_developed_parts: false, - Data_assets_processed: []string{"sourcecode", "deployment"}, - Data_assets_stored: []string{"sourcecode", "deployment"}, - Data_formats_accepted: []string{"file"}, - Communication_links: commLinks, + MultiTenant: false, + Redundant: false, + CustomDevelopedParts: false, + DataAssetsProcessed: []string{"sourcecode", "deployment"}, + DataAssetsStored: []string{"sourcecode", "deployment"}, + DataFormatsAccepted: []string{"file"}, + CommunicationLinks: commLinks, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.Technical_assets["Development Client"] = techAsset + modelInput.TechnicalAssets["Development Client"] = techAsset } } @@ -483,36 +483,36 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry encryption = model.Transparent.String() } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: macroState["source-repository"][0] + " Sourcecode Repository", - Type: model.Process.String(), - Usage: model.DevOps.String(), - Used_as_client_by_human: false, - Out_of_scope: false, - Justification_out_of_scope: "", - Size: model.Service.String(), - Technology: model.SourcecodeRepository.String(), - Tags: []string{model.NormalizeTag(macroState["source-repository"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), - Justification_cia_rating: "Sourcecode processing components are at least rated as 'critical' in terms of integrity, because any " + + ID: id, + Description: macroState["source-repository"][0] + " Sourcecode Repository", + Type: model.Process.String(), + Usage: model.DevOps.String(), + UsedAsClientByHuman: false, + OutOfScope: false, + JustificationOutOfScope: "", + Size: model.Service.String(), + Technology: model.SourcecodeRepository.String(), + Tags: []string{model.NormalizeTag(macroState["source-repository"][0])}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Virtual.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.Critical.String(), + Availability: model.Important.String(), + JustificationCiaRating: "Sourcecode processing components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", - Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - Custom_developed_parts: false, - Data_assets_processed: []string{"sourcecode"}, - Data_assets_stored: []string{"sourcecode"}, - Data_formats_accepted: []string{"file"}, - Communication_links: nil, + MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + CustomDevelopedParts: false, + DataAssetsProcessed: []string{"sourcecode"}, + DataAssetsStored: []string{"sourcecode"}, + DataFormatsAccepted: []string{"file"}, + CommunicationLinks: nil, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.Technical_assets[macroState["source-repository"][0]+" Sourcecode Repository"] = techAsset + modelInput.TechnicalAssets[macroState["source-repository"][0]+" Sourcecode Repository"] = techAsset } } @@ -526,36 +526,36 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry encryption = model.Transparent.String() } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: macroState["container-registry"][0] + " Container Registry", - Type: model.Process.String(), - Usage: model.DevOps.String(), - Used_as_client_by_human: false, - Out_of_scope: false, - Justification_out_of_scope: "", - Size: model.Service.String(), - Technology: model.ArtifactRegistry.String(), - Tags: []string{model.NormalizeTag(macroState["container-registry"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), - Justification_cia_rating: "Container registry components are at least rated as 'critical' in terms of integrity, because any " + + ID: id, + Description: macroState["container-registry"][0] + " Container Registry", + Type: model.Process.String(), + Usage: model.DevOps.String(), + UsedAsClientByHuman: false, + OutOfScope: false, + JustificationOutOfScope: "", + Size: model.Service.String(), + Technology: model.ArtifactRegistry.String(), + Tags: []string{model.NormalizeTag(macroState["container-registry"][0])}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Virtual.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.Critical.String(), + Availability: model.Important.String(), + JustificationCiaRating: "Container registry components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", - Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - Custom_developed_parts: false, - Data_assets_processed: []string{"deployment"}, - Data_assets_stored: []string{"deployment"}, - Data_formats_accepted: []string{"file"}, - Communication_links: nil, + MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + CustomDevelopedParts: false, + DataAssetsProcessed: []string{"deployment"}, + DataAssetsStored: []string{"deployment"}, + DataFormatsAccepted: []string{"file"}, + CommunicationLinks: nil, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.Technical_assets[macroState["container-registry"][0]+" Container Registry"] = techAsset + modelInput.TechnicalAssets[macroState["container-registry"][0]+" Container Registry"] = techAsset } } @@ -568,36 +568,36 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry encryption = model.Transparent.String() } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: macroState["container-platform"][0] + " Container Platform", - Type: model.Process.String(), - Usage: model.DevOps.String(), - Used_as_client_by_human: false, - Out_of_scope: false, - Justification_out_of_scope: "", - Size: model.System.String(), - Technology: model.ContainerPlatform.String(), - Tags: []string{model.NormalizeTag(macroState["container-platform"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.MissionCritical.String(), - Availability: model.MissionCritical.String(), - Justification_cia_rating: "Container platform components are rated as 'mission-critical' in terms of integrity and availability, because any " + + ID: id, + Description: macroState["container-platform"][0] + " Container Platform", + Type: model.Process.String(), + Usage: model.DevOps.String(), + UsedAsClientByHuman: false, + OutOfScope: false, + JustificationOutOfScope: "", + Size: model.System.String(), + Technology: model.ContainerPlatform.String(), + Tags: []string{model.NormalizeTag(macroState["container-platform"][0])}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Virtual.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.MissionCritical.String(), + Availability: model.MissionCritical.String(), + JustificationCiaRating: "Container platform components are rated as 'mission-critical' in terms of integrity and availability, because any " + "malicious modification of it might lead to a backdoored production system.", - Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - Custom_developed_parts: false, - Data_assets_processed: []string{"deployment"}, - Data_assets_stored: []string{"deployment"}, - Data_formats_accepted: []string{"file"}, - Communication_links: nil, + MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + CustomDevelopedParts: false, + DataAssetsProcessed: []string{"deployment"}, + DataAssetsStored: []string{"deployment"}, + DataFormatsAccepted: []string{"file"}, + CommunicationLinks: nil, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.Technical_assets[macroState["container-platform"][0]+" Container Platform"] = techAsset + modelInput.TechnicalAssets[macroState["container-platform"][0]+" Container Platform"] = techAsset } } } @@ -613,115 +613,115 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry commLinks := make(map[string]model.InputCommunicationLink) commLinks["Sourcecode Repository Traffic"] = model.InputCommunicationLink{ - Target: sourceRepoID, - Description: "Sourcecode Repository Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: true, - Usage: model.DevOps.String(), - Data_assets_sent: nil, - Data_assets_received: []string{"sourcecode"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: sourceRepoID, + Description: "Sourcecode Repository Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: true, + Usage: model.DevOps.String(), + DataAssetsSent: nil, + DataAssetsReceived: []string{"sourcecode"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } commLinks["Artifact Registry Traffic"] = model.InputCommunicationLink{ - Target: artifactRegistryID, - Description: "Artifact Registry Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: false, - Usage: model.DevOps.String(), - Data_assets_sent: []string{"deployment"}, - Data_assets_received: []string{"deployment"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: artifactRegistryID, + Description: "Artifact Registry Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: false, + Usage: model.DevOps.String(), + DataAssetsSent: []string{"deployment"}, + DataAssetsReceived: []string{"deployment"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } if containerTechUsed { commLinks["Container Registry Traffic"] = model.InputCommunicationLink{ - Target: containerRepoID, - Description: "Container Registry Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: false, - Usage: model.DevOps.String(), - Data_assets_sent: []string{"deployment"}, - Data_assets_received: []string{"deployment"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: containerRepoID, + Description: "Container Registry Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: false, + Usage: model.DevOps.String(), + DataAssetsSent: []string{"deployment"}, + DataAssetsReceived: []string{"deployment"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } if macroState["push-or-pull"][0] == pushOrPull[0] { // Push commLinks["Container Platform Push"] = model.InputCommunicationLink{ - Target: containerPlatformID, - Description: "Container Platform Push", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: false, - Usage: model.DevOps.String(), - Data_assets_sent: []string{"deployment"}, - Data_assets_received: []string{"deployment"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: containerPlatformID, + Description: "Container Platform Push", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: false, + Usage: model.DevOps.String(), + DataAssetsSent: []string{"deployment"}, + DataAssetsReceived: []string{"deployment"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } } else { // Pull commLinkPull := model.InputCommunicationLink{ - Target: containerRepoID, - Description: "Container Platform Pull", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: true, - Usage: model.DevOps.String(), - Data_assets_sent: nil, - Data_assets_received: []string{"deployment"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: containerRepoID, + Description: "Container Platform Pull", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: true, + Usage: model.DevOps.String(), + DataAssetsSent: nil, + DataAssetsReceived: []string{"deployment"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } if !dryRun { titleOfTargetAsset := macroState["container-platform"][0] + " Container Platform" - containerPlatform := modelInput.Technical_assets[titleOfTargetAsset] - if containerPlatform.Communication_links == nil { - containerPlatform.Communication_links = make(map[string]model.InputCommunicationLink, 0) + containerPlatform := modelInput.TechnicalAssets[titleOfTargetAsset] + if containerPlatform.CommunicationLinks == nil { + containerPlatform.CommunicationLinks = make(map[string]model.InputCommunicationLink) } - containerPlatform.Communication_links["Container Platform Pull"] = commLinkPull - modelInput.Technical_assets[titleOfTargetAsset] = containerPlatform + containerPlatform.CommunicationLinks["Container Platform Pull"] = commLinkPull + modelInput.TechnicalAssets[titleOfTargetAsset] = containerPlatform } } } if codeInspectionUsed { commLinks["Code Inspection Platform Traffic"] = model.InputCommunicationLink{ - Target: codeInspectionPlatformID, - Description: "Code Inspection Platform Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: false, - Usage: model.DevOps.String(), - Data_assets_sent: []string{"sourcecode"}, - Data_assets_received: []string{}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: codeInspectionPlatformID, + Description: "Code Inspection Platform Traffic", + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: false, + Usage: model.DevOps.String(), + DataAssetsSent: []string{"sourcecode"}, + DataAssetsReceived: []string{}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } } // The individual deployments @@ -729,73 +729,73 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry //fmt.Println("Adding deployment flow to:", deployTargetID) if containerTechUsed { if !dryRun { - containerPlatform := modelInput.Technical_assets[macroState["container-platform"][0]+" Container Platform"] - if containerPlatform.Communication_links == nil { - containerPlatform.Communication_links = make(map[string]model.InputCommunicationLink, 0) + containerPlatform := modelInput.TechnicalAssets[macroState["container-platform"][0]+" Container Platform"] + if containerPlatform.CommunicationLinks == nil { + containerPlatform.CommunicationLinks = make(map[string]model.InputCommunicationLink) } - containerPlatform.Communication_links["Container Spawning ("+deployTargetID+")"] = model.InputCommunicationLink{ - Target: deployTargetID, - Description: "Container Spawning " + deployTargetID, - Protocol: model.ContainerSpawning.String(), - Authentication: model.NoneAuthentication.String(), - Authorization: model.NoneAuthorization.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: false, - Usage: model.DevOps.String(), - Data_assets_sent: []string{"deployment"}, - Data_assets_received: nil, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + containerPlatform.CommunicationLinks["Container Spawning ("+deployTargetID+")"] = model.InputCommunicationLink{ + Target: deployTargetID, + Description: "Container Spawning " + deployTargetID, + Protocol: model.ContainerSpawning.String(), + Authentication: model.NoneAuthentication.String(), + Authorization: model.NoneAuthorization.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: false, + Usage: model.DevOps.String(), + DataAssetsSent: []string{"deployment"}, + DataAssetsReceived: nil, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } - modelInput.Technical_assets[macroState["container-platform"][0]+" Container Platform"] = containerPlatform + modelInput.TechnicalAssets[macroState["container-platform"][0]+" Container Platform"] = containerPlatform } } else { // No Containers used if macroState["push-or-pull"][0] == pushOrPull[0] { // Push commLinks["Deployment Push ("+deployTargetID+")"] = model.InputCommunicationLink{ - Target: deployTargetID, - Description: "Deployment Push to " + deployTargetID, - Protocol: model.SSH.String(), - Authentication: model.ClientCertificate.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: false, - Usage: model.DevOps.String(), - Data_assets_sent: []string{"deployment"}, - Data_assets_received: nil, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: deployTargetID, + Description: "Deployment Push to " + deployTargetID, + Protocol: model.SSH.String(), + Authentication: model.ClientCertificate.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: false, + Usage: model.DevOps.String(), + DataAssetsSent: []string{"deployment"}, + DataAssetsReceived: nil, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } } else { // Pull pullFromWhere := artifactRegistryID commLinkPull := model.InputCommunicationLink{ - Target: pullFromWhere, - Description: "Deployment Pull from " + deployTargetID, - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: true, - Usage: model.DevOps.String(), - Data_assets_sent: nil, - Data_assets_received: []string{"deployment"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: pullFromWhere, + Description: "Deployment Pull from " + deployTargetID, + Protocol: model.HTTPS.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: true, + Usage: model.DevOps.String(), + DataAssetsSent: nil, + DataAssetsReceived: []string{"deployment"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } if !dryRun { // take care to lookup by title (as keyed in input YAML by title and only in parsed model representation by ID) titleOfTargetAsset := model.ParsedModelRoot.TechnicalAssets[deployTargetID].Title - x := modelInput.Technical_assets[titleOfTargetAsset] - if x.Communication_links == nil { - x.Communication_links = make(map[string]model.InputCommunicationLink, 0) + x := modelInput.TechnicalAssets[titleOfTargetAsset] + if x.CommunicationLinks == nil { + x.CommunicationLinks = make(map[string]model.InputCommunicationLink) } - x.Communication_links["Deployment Pull ("+deployTargetID+")"] = commLinkPull - modelInput.Technical_assets[titleOfTargetAsset] = x + x.CommunicationLinks["Deployment Pull ("+deployTargetID+")"] = commLinkPull + modelInput.TechnicalAssets[titleOfTargetAsset] = x } } @@ -804,8 +804,8 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry // don't forget to also add the "deployment" data asset as stored on the target targetAssetTitle := model.ParsedModelRoot.TechnicalAssets[deployTargetID].Title assetsStored := make([]string, 0) - if modelInput.Technical_assets[targetAssetTitle].Data_assets_stored != nil { - for _, val := range modelInput.Technical_assets[targetAssetTitle].Data_assets_stored { + if modelInput.TechnicalAssets[targetAssetTitle].DataAssetsStored != nil { + for _, val := range modelInput.TechnicalAssets[targetAssetTitle].DataAssetsStored { assetsStored = append(assetsStored, fmt.Sprintf("%v", val)) } } @@ -815,43 +815,43 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } mergedArrays = append(mergedArrays, "deployment") if !dryRun { - x := modelInput.Technical_assets[targetAssetTitle] - x.Data_assets_stored = mergedArrays - modelInput.Technical_assets[targetAssetTitle] = x + x := modelInput.TechnicalAssets[targetAssetTitle] + x.DataAssetsStored = mergedArrays + modelInput.TechnicalAssets[targetAssetTitle] = x } } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: macroState["build-pipeline"][0] + " Build Pipeline", - Type: model.Process.String(), - Usage: model.DevOps.String(), - Used_as_client_by_human: false, - Out_of_scope: false, - Justification_out_of_scope: "", - Size: model.Service.String(), - Technology: model.BuildPipeline.String(), - Tags: []string{model.NormalizeTag(macroState["build-pipeline"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), - Justification_cia_rating: "Build pipeline components are at least rated as 'critical' in terms of integrity, because any " + + ID: id, + Description: macroState["build-pipeline"][0] + " Build Pipeline", + Type: model.Process.String(), + Usage: model.DevOps.String(), + UsedAsClientByHuman: false, + OutOfScope: false, + JustificationOutOfScope: "", + Size: model.Service.String(), + Technology: model.BuildPipeline.String(), + Tags: []string{model.NormalizeTag(macroState["build-pipeline"][0])}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Virtual.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.Critical.String(), + Availability: model.Important.String(), + JustificationCiaRating: "Build pipeline components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", - Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - Custom_developed_parts: false, - Data_assets_processed: []string{"sourcecode", "deployment"}, - Data_assets_stored: []string{"sourcecode", "deployment"}, - Data_formats_accepted: []string{"file"}, - Communication_links: commLinks, + MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + CustomDevelopedParts: false, + DataAssetsProcessed: []string{"sourcecode", "deployment"}, + DataAssetsStored: []string{"sourcecode", "deployment"}, + DataFormatsAccepted: []string{"file"}, + CommunicationLinks: commLinks, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.Technical_assets[macroState["build-pipeline"][0]+" Build Pipeline"] = techAsset + modelInput.TechnicalAssets[macroState["build-pipeline"][0]+" Build Pipeline"] = techAsset } } @@ -864,36 +864,36 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry encryption = model.Transparent.String() } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: macroState["artifact-registry"][0] + " Artifact Registry", - Type: model.Process.String(), - Usage: model.DevOps.String(), - Used_as_client_by_human: false, - Out_of_scope: false, - Justification_out_of_scope: "", - Size: model.Service.String(), - Technology: model.ArtifactRegistry.String(), - Tags: []string{model.NormalizeTag(macroState["artifact-registry"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), - Justification_cia_rating: "Artifact registry components are at least rated as 'critical' in terms of integrity, because any " + + ID: id, + Description: macroState["artifact-registry"][0] + " Artifact Registry", + Type: model.Process.String(), + Usage: model.DevOps.String(), + UsedAsClientByHuman: false, + OutOfScope: false, + JustificationOutOfScope: "", + Size: model.Service.String(), + Technology: model.ArtifactRegistry.String(), + Tags: []string{model.NormalizeTag(macroState["artifact-registry"][0])}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Virtual.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.Critical.String(), + Availability: model.Important.String(), + JustificationCiaRating: "Artifact registry components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", - Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - Custom_developed_parts: false, - Data_assets_processed: []string{"sourcecode", "deployment"}, - Data_assets_stored: []string{"sourcecode", "deployment"}, - Data_formats_accepted: []string{"file"}, - Communication_links: nil, + MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + CustomDevelopedParts: false, + DataAssetsProcessed: []string{"sourcecode", "deployment"}, + DataAssetsStored: []string{"sourcecode", "deployment"}, + DataFormatsAccepted: []string{"file"}, + CommunicationLinks: nil, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.Technical_assets[macroState["artifact-registry"][0]+" Artifact Registry"] = techAsset + modelInput.TechnicalAssets[macroState["artifact-registry"][0]+" Artifact Registry"] = techAsset } } @@ -907,36 +907,36 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry encryption = model.Transparent.String() } techAsset := model.InputTechnicalAsset{ - ID: id, - Description: macroState["code-inspection-platform"][0] + " Code Inspection Platform", - Type: model.Process.String(), - Usage: model.DevOps.String(), - Used_as_client_by_human: false, - Out_of_scope: false, - Justification_out_of_scope: "", - Size: model.Service.String(), - Technology: model.CodeInspectionPlatform.String(), - Tags: []string{model.NormalizeTag(macroState["code-inspection-platform"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), - Encryption: encryption, - Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Important.String(), - Availability: model.Operational.String(), - Justification_cia_rating: "Sourcecode inspection platforms are rated at least 'important' in terms of integrity, because any " + + ID: id, + Description: macroState["code-inspection-platform"][0] + " Code Inspection Platform", + Type: model.Process.String(), + Usage: model.DevOps.String(), + UsedAsClientByHuman: false, + OutOfScope: false, + JustificationOutOfScope: "", + Size: model.Service.String(), + Technology: model.CodeInspectionPlatform.String(), + Tags: []string{model.NormalizeTag(macroState["code-inspection-platform"][0])}, + Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Machine: model.Virtual.String(), + Encryption: encryption, + Owner: owner, + Confidentiality: model.Confidential.String(), + Integrity: model.Important.String(), + Availability: model.Operational.String(), + JustificationCiaRating: "Sourcecode inspection platforms are rated at least 'important' in terms of integrity, because any " + "malicious modification of it might lead to vulnerabilities found by the scanner engine not being shown.", - Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - Custom_developed_parts: false, - Data_assets_processed: []string{"sourcecode"}, - Data_assets_stored: []string{"sourcecode"}, - Data_formats_accepted: []string{"file"}, - Communication_links: nil, + MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + CustomDevelopedParts: false, + DataAssetsProcessed: []string{"sourcecode"}, + DataAssetsStored: []string{"sourcecode"}, + DataFormatsAccepted: []string{"file"}, + CommunicationLinks: nil, } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.Technical_assets[macroState["code-inspection-platform"][0]+" Code Inspection Platform"] = techAsset + modelInput.TechnicalAssets[macroState["code-inspection-platform"][0]+" Code Inspection Platform"] = techAsset } } } @@ -947,25 +947,25 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry //fmt.Println("Adding new trust boundary of type:", trustBoundaryType) title := "DevOps Network" trustBoundary := model.InputTrustBoundary{ - ID: "devops-network", - Description: "DevOps Network", - Type: trustBoundaryType, - Tags: []string{}, - Technical_assets_inside: serverSideTechAssets, - Trust_boundaries_nested: nil, + ID: "devops-network", + Description: "DevOps Network", + Type: trustBoundaryType, + Tags: []string{}, + TechnicalAssetsInside: serverSideTechAssets, + TrustBoundariesNested: nil, } *changeLogCollector = append(*changeLogCollector, "adding trust boundary: devops-network") if !dryRun { - modelInput.Trust_boundaries[title] = trustBoundary + modelInput.TrustBoundaries[title] = trustBoundary } } else { existingTrustBoundaryToAddTo := macroState["selected-trust-boundary"][0] //fmt.Println("Adding to existing trust boundary:", existingTrustBoundaryToAddTo) title := model.ParsedModelRoot.TrustBoundaries[existingTrustBoundaryToAddTo].Title assetsInside := make([]string, 0) - if modelInput.Trust_boundaries[title].Technical_assets_inside != nil { - vals := modelInput.Trust_boundaries[title].Technical_assets_inside - for _, val := range vals { + if modelInput.TrustBoundaries[title].TechnicalAssetsInside != nil { + values := modelInput.TrustBoundaries[title].TechnicalAssetsInside + for _, val := range values { assetsInside = append(assetsInside, fmt.Sprintf("%v", val)) } } @@ -976,12 +976,12 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry mergedArrays = append(mergedArrays, serverSideTechAssets...) *changeLogCollector = append(*changeLogCollector, "filling existing trust boundary: "+existingTrustBoundaryToAddTo) if !dryRun { - if modelInput.Trust_boundaries == nil { - modelInput.Trust_boundaries = make(map[string]model.InputTrustBoundary, 0) + if modelInput.TrustBoundaries == nil { + modelInput.TrustBoundaries = make(map[string]model.InputTrustBoundary) } - tb := modelInput.Trust_boundaries[title] - tb.Technical_assets_inside = mergedArrays - modelInput.Trust_boundaries[title] = tb + tb := modelInput.TrustBoundaries[title] + tb.TechnicalAssetsInside = mergedArrays + modelInput.TrustBoundaries[title] = tb } } } @@ -994,17 +994,17 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } title := macroState["container-platform"][0] + " Runtime" sharedRuntime := model.InputSharedRuntime{ - ID: containerSharedRuntimeID, - Description: title, - Tags: []string{model.NormalizeTag(macroState["container-platform"][0])}, - Technical_assets_running: assetsRunning, + ID: containerSharedRuntimeID, + Description: title, + Tags: []string{model.NormalizeTag(macroState["container-platform"][0])}, + TechnicalAssetsRunning: assetsRunning, } *changeLogCollector = append(*changeLogCollector, "adding shared runtime: "+containerSharedRuntimeID) if !dryRun { - if modelInput.Shared_runtimes == nil { - modelInput.Shared_runtimes = make(map[string]model.InputSharedRuntime, 0) + if modelInput.SharedRuntimes == nil { + modelInput.SharedRuntimes = make(map[string]model.InputSharedRuntime) } - modelInput.Shared_runtimes[title] = sharedRuntime + modelInput.SharedRuntimes[title] = sharedRuntime } } diff --git a/macros/built-in/add-vault/add-vault-macro.go b/macros/built-in/add-vault/add-vault-macro.go index 03ec5f57..06fc5065 100644 --- a/macros/built-in/add-vault/add-vault-macro.go +++ b/macros/built-in/add-vault/add-vault-macro.go @@ -84,7 +84,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { }, nil case 4: possibleAnswers := make([]string, 0) - for id, _ := range model.ParsedModelRoot.TechnicalAssets { + for id := range model.ParsedModelRoot.TechnicalAssets { possibleAnswers = append(possibleAnswers, id) } sort.Strings(possibleAnswers) @@ -181,21 +181,21 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry if _, exists := model.ParsedModelRoot.DataAssets["Configuration Secrets"]; !exists { dataAsset := model.InputDataAsset{ - ID: "configuration-secrets", - Description: "Configuration secrets (like credentials, keys, certificates, etc.) secured and managed by a vault", - Usage: model.DevOps.String(), - Tags: []string{}, - Origin: "", - Owner: "", - Quantity: model.VeryFew.String(), - Confidentiality: model.StrictlyConfidential.String(), - Integrity: model.Critical.String(), - Availability: model.Critical.String(), - Justification_cia_rating: "Configuration secrets are rated as being 'strictly-confidential'.", + ID: "configuration-secrets", + Description: "Configuration secrets (like credentials, keys, certificates, etc.) secured and managed by a vault", + Usage: model.DevOps.String(), + Tags: []string{}, + Origin: "", + Owner: "", + Quantity: model.VeryFew.String(), + Confidentiality: model.StrictlyConfidential.String(), + Integrity: model.Critical.String(), + Availability: model.Critical.String(), + JustificationCiaRating: "Configuration secrets are rated as being 'strictly-confidential'.", } *changeLogCollector = append(*changeLogCollector, "adding data asset: configuration-secrets") if !dryRun { - modelInput.Data_assets["Configuration Secrets"] = dataAsset + modelInput.DataAssets["Configuration Secrets"] = dataAsset } } @@ -213,35 +213,35 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry if _, exists := model.ParsedModelRoot.TechnicalAssets[storageID]; !exists { serverSideTechAssets = append(serverSideTechAssets, storageID) techAsset := model.InputTechnicalAsset{ - ID: storageID, - Description: "Vault Storage", - Type: model.Datastore.String(), - Usage: model.DevOps.String(), - Used_as_client_by_human: false, - Out_of_scope: false, - Justification_out_of_scope: "", - Size: model.Component.String(), - Technology: tech, - Tags: []string{}, // TODO: let user enter or too detailed for a wizard? - Internet: false, - Machine: model.Virtual.String(), // TODO: let user enter or too detailed for a wizard? - Encryption: model.DataWithSymmetricSharedKey.String(), // can be assumed for a vault product as at least having some good encryption - Owner: "", - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Critical.String(), - Justification_cia_rating: "Vault components are only rated as 'confidential' as vaults usually apply a trust barrier to encrypt all data-at-rest with a vault key.", - Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - Custom_developed_parts: false, - Data_assets_processed: nil, - Data_assets_stored: []string{"configuration-secrets"}, - Data_formats_accepted: nil, - Communication_links: nil, + ID: storageID, + Description: "Vault Storage", + Type: model.Datastore.String(), + Usage: model.DevOps.String(), + UsedAsClientByHuman: false, + OutOfScope: false, + JustificationOutOfScope: "", + Size: model.Component.String(), + Technology: tech, + Tags: []string{}, // TODO: let user enter or too detailed for a wizard? + Internet: false, + Machine: model.Virtual.String(), // TODO: let user enter or too detailed for a wizard? + Encryption: model.DataWithSymmetricSharedKey.String(), // can be assumed for a vault product as at least having some good encryption + Owner: "", + Confidentiality: model.Confidential.String(), + Integrity: model.Critical.String(), + Availability: model.Critical.String(), + JustificationCiaRating: "Vault components are only rated as 'confidential' as vaults usually apply a trust barrier to encrypt all data-at-rest with a vault key.", + MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + CustomDevelopedParts: false, + DataAssetsProcessed: nil, + DataAssetsStored: []string{"configuration-secrets"}, + DataFormatsAccepted: nil, + CommunicationLinks: nil, } *changeLogCollector = append(*changeLogCollector, "adding technical asset: "+storageID) if !dryRun { - modelInput.Technical_assets["Vault Storage"] = techAsset + modelInput.TechnicalAssets["Vault Storage"] = techAsset } } } @@ -254,23 +254,23 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry if databaseUsed || filesystemUsed { accessLink := model.InputCommunicationLink{ - Target: storageID, - Description: "Vault Storage Access", - Protocol: model.LocalFileAccess.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: false, - Usage: model.DevOps.String(), - Data_assets_sent: []string{"configuration-secrets"}, - Data_assets_received: []string{"configuration-secrets"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: storageID, + Description: "Vault Storage Access", + Protocol: model.LocalFileAccess.String(), + Authentication: model.Credentials.String(), + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: false, + Usage: model.DevOps.String(), + DataAssetsSent: []string{"configuration-secrets"}, + DataAssetsReceived: []string{"configuration-secrets"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } if databaseUsed { - accessLink.Protocol = model.SQL_access_protocol.String() // TODO ask if encrypted and ask if NoSQL? or to detailed for a wizard? + accessLink.Protocol = model.SqlAccessProtocol.String() // TODO ask if encrypted and ask if NoSQL? or to detailed for a wizard? } commLinks["Vault Storage Access"] = accessLink } @@ -287,31 +287,31 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } for _, clientID := range macroState["clients"] { // add a connection from each client clientAccessCommLink := model.InputCommunicationLink{ - Target: vaultID, - Description: "Vault Access Traffic (by " + clientID + ")", - Protocol: model.HTTPS.String(), - Authentication: authentication, - Authorization: model.TechnicalUser.String(), - Tags: []string{}, - VPN: false, - IP_filtered: false, - Readonly: true, - Usage: model.DevOps.String(), - Data_assets_sent: nil, - Data_assets_received: []string{"configuration-secrets"}, - Diagram_tweak_weight: 0, - Diagram_tweak_constraint: false, + Target: vaultID, + Description: "Vault Access Traffic (by " + clientID + ")", + Protocol: model.HTTPS.String(), + Authentication: authentication, + Authorization: model.TechnicalUser.String(), + Tags: []string{}, + VPN: false, + IpFiltered: false, + Readonly: true, + Usage: model.DevOps.String(), + DataAssetsSent: nil, + DataAssetsReceived: []string{"configuration-secrets"}, + DiagramTweakWeight: 0, + DiagramTweakConstraint: false, } clientAssetTitle := model.ParsedModelRoot.TechnicalAssets[clientID].Title if !dryRun { - client := modelInput.Technical_assets[clientAssetTitle] - client.Communication_links["Vault Access ("+clientID+")"] = clientAccessCommLink - modelInput.Technical_assets[clientAssetTitle] = client + client := modelInput.TechnicalAssets[clientAssetTitle] + client.CommunicationLinks["Vault Access ("+clientID+")"] = clientAccessCommLink + modelInput.TechnicalAssets[clientAssetTitle] = client } // don't forget to also add the "configuration-secrets" data asset as processed on the client assetsProcessed := make([]string, 0) - if modelInput.Technical_assets[clientAssetTitle].Data_assets_processed != nil { - for _, val := range modelInput.Technical_assets[clientAssetTitle].Data_assets_processed { + if modelInput.TechnicalAssets[clientAssetTitle].DataAssetsProcessed != nil { + for _, val := range modelInput.TechnicalAssets[clientAssetTitle].DataAssetsProcessed { assetsProcessed = append(assetsProcessed, fmt.Sprintf("%v", val)) } } @@ -321,45 +321,45 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } mergedArrays = append(mergedArrays, "configuration-secrets") if !dryRun { - x := modelInput.Technical_assets[clientAssetTitle] - x.Data_assets_processed = mergedArrays - modelInput.Technical_assets[clientAssetTitle] = x + x := modelInput.TechnicalAssets[clientAssetTitle] + x.DataAssetsProcessed = mergedArrays + modelInput.TechnicalAssets[clientAssetTitle] = x } } techAsset := model.InputTechnicalAsset{ - ID: vaultID, - Description: macroState["vault-name"][0] + " Vault", - Type: model.Process.String(), - Usage: model.DevOps.String(), - Used_as_client_by_human: false, - Out_of_scope: false, - Justification_out_of_scope: "", - Size: model.Service.String(), - Technology: model.Vault.String(), - Tags: []string{model.NormalizeTag(macroState["vault-name"][0])}, - Internet: false, - Machine: model.Virtual.String(), - Encryption: model.Transparent.String(), - Owner: "", - Confidentiality: model.StrictlyConfidential.String(), - Integrity: model.Critical.String(), - Availability: model.Critical.String(), - Justification_cia_rating: "Vault components are rated as 'strictly-confidential'.", - Multi_tenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", - Redundant: false, - Custom_developed_parts: false, - Data_assets_processed: []string{"configuration-secrets"}, - Data_assets_stored: nil, - Data_formats_accepted: nil, - Communication_links: commLinks, + ID: vaultID, + Description: macroState["vault-name"][0] + " Vault", + Type: model.Process.String(), + Usage: model.DevOps.String(), + UsedAsClientByHuman: false, + OutOfScope: false, + JustificationOutOfScope: "", + Size: model.Service.String(), + Technology: model.Vault.String(), + Tags: []string{model.NormalizeTag(macroState["vault-name"][0])}, + Internet: false, + Machine: model.Virtual.String(), + Encryption: model.Transparent.String(), + Owner: "", + Confidentiality: model.StrictlyConfidential.String(), + Integrity: model.Critical.String(), + Availability: model.Critical.String(), + JustificationCiaRating: "Vault components are rated as 'strictly-confidential'.", + MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + Redundant: false, + CustomDevelopedParts: false, + DataAssetsProcessed: []string{"configuration-secrets"}, + DataAssetsStored: nil, + DataFormatsAccepted: nil, + CommunicationLinks: commLinks, } if inMemoryUsed { - techAsset.Data_assets_stored = []string{"configuration-secrets"} + techAsset.DataAssetsStored = []string{"configuration-secrets"} } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+vaultID) if !dryRun { - modelInput.Technical_assets[macroState["vault-name"][0]+" Vault"] = techAsset + modelInput.TechnicalAssets[macroState["vault-name"][0]+" Vault"] = techAsset } } @@ -367,16 +367,16 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry if filesystemUsed { title := "Vault Environment" trustBoundary := model.InputTrustBoundary{ - ID: vaultEnvID, - Description: "Vault Environment", - Type: model.ExecutionEnvironment.String(), - Tags: []string{}, - Technical_assets_inside: []string{vaultID, storageID}, - Trust_boundaries_nested: nil, + ID: vaultEnvID, + Description: "Vault Environment", + Type: model.ExecutionEnvironment.String(), + Tags: []string{}, + TechnicalAssetsInside: []string{vaultID, storageID}, + TrustBoundariesNested: nil, } *changeLogCollector = append(*changeLogCollector, "adding trust boundary: "+vaultEnvID) if !dryRun { - modelInput.Trust_boundaries[title] = trustBoundary + modelInput.TrustBoundaries[title] = trustBoundary } } @@ -391,13 +391,13 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry Tags: []string{}, } if filesystemUsed { - trustBoundary.Trust_boundaries_nested = []string{vaultEnvID} + trustBoundary.TrustBoundariesNested = []string{vaultEnvID} } else { - trustBoundary.Technical_assets_inside = serverSideTechAssets + trustBoundary.TechnicalAssetsInside = serverSideTechAssets } *changeLogCollector = append(*changeLogCollector, "adding trust boundary: vault-network") if !dryRun { - modelInput.Trust_boundaries[title] = trustBoundary + modelInput.TrustBoundaries[title] = trustBoundary } } else { // adding to existing trust boundary existingTrustBoundaryToAddTo := macroState["selected-trust-boundary"][0] @@ -405,9 +405,9 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry if filesystemUsed { // ---------------------- nest as execution-environment trust boundary ---------------------- boundariesNested := make([]string, 0) - if modelInput.Trust_boundaries[title].Trust_boundaries_nested != nil { - vals := modelInput.Trust_boundaries[title].Trust_boundaries_nested - for _, val := range vals { + if modelInput.TrustBoundaries[title].TrustBoundariesNested != nil { + values := modelInput.TrustBoundaries[title].TrustBoundariesNested + for _, val := range values { boundariesNested = append(boundariesNested, fmt.Sprintf("%v", val)) } } @@ -418,15 +418,15 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry mergedArrays = append(mergedArrays, vaultEnvID) *changeLogCollector = append(*changeLogCollector, "filling existing trust boundary: "+existingTrustBoundaryToAddTo) if !dryRun { - tb := modelInput.Trust_boundaries[title] - tb.Trust_boundaries_nested = mergedArrays - modelInput.Trust_boundaries[title] = tb + tb := modelInput.TrustBoundaries[title] + tb.TrustBoundariesNested = mergedArrays + modelInput.TrustBoundaries[title] = tb } } else { // ---------------------- place assets inside directly ---------------------- assetsInside := make([]string, 0) - if modelInput.Trust_boundaries[title].Technical_assets_inside != nil { - vals := modelInput.Trust_boundaries[title].Technical_assets_inside - for _, val := range vals { + if modelInput.TrustBoundaries[title].TechnicalAssetsInside != nil { + values := modelInput.TrustBoundaries[title].TechnicalAssetsInside + for _, val := range values { assetsInside = append(assetsInside, fmt.Sprintf("%v", val)) } } @@ -437,9 +437,9 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry mergedArrays = append(mergedArrays, serverSideTechAssets...) *changeLogCollector = append(*changeLogCollector, "filling existing trust boundary: "+existingTrustBoundaryToAddTo) if !dryRun { - tb := modelInput.Trust_boundaries[title] - tb.Technical_assets_inside = mergedArrays - modelInput.Trust_boundaries[title] = tb + tb := modelInput.TrustBoundaries[title] + tb.TechnicalAssetsInside = mergedArrays + modelInput.TrustBoundaries[title] = tb } } } diff --git a/macros/built-in/pretty-print/pretty-print-macro.go b/macros/built-in/pretty-print/pretty-print-macro.go index 64149c57..51c05c05 100644 --- a/macros/built-in/pretty-print/pretty-print-macro.go +++ b/macros/built-in/pretty-print/pretty-print-macro.go @@ -14,7 +14,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { return model.NoMoreQuestions(), nil } -func ApplyAnswer(questionID string, answer ...string) (message string, validResult bool, err error) { +func ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { return "Answer processed", true, nil } @@ -22,10 +22,10 @@ func GoBack() (message string, validResult bool, err error) { return "Cannot go back further", false, nil } -func GetFinalChangeImpact(modelInput *model.ModelInput) (changes []string, message string, validResult bool, err error) { +func GetFinalChangeImpact(_ *model.ModelInput) (changes []string, message string, validResult bool, err error) { return []string{"pretty-printing the model file"}, "Changeset valid", true, err } -func Execute(modelInput *model.ModelInput) (message string, validResult bool, err error) { +func Execute(_ *model.ModelInput) (message string, validResult bool, err error) { return "Model pretty printing successful", true, nil } diff --git a/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go b/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go index f0deaec9..478ce5a2 100644 --- a/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go +++ b/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go @@ -18,7 +18,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { return model.NoMoreQuestions(), nil } -func ApplyAnswer(questionID string, answer ...string) (message string, validResult bool, err error) { +func ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { return "Answer processed", true, nil } @@ -26,12 +26,12 @@ func GoBack() (message string, validResult bool, err error) { return "Cannot go back further", false, nil } -func GetFinalChangeImpact(modelInput *model.ModelInput) (changes []string, message string, validResult bool, err error) { +func GetFinalChangeImpact(_ *model.ModelInput) (changes []string, message string, validResult bool, err error) { return []string{"remove unused tags from the model file"}, "Changeset valid", true, err } func Execute(modelInput *model.ModelInput) (message string, validResult bool, err error) { - tagUsageMap := make(map[string]bool, 0) + tagUsageMap := make(map[string]bool) for _, tag := range model.ParsedModelRoot.TagsAvailable { tagUsageMap[tag] = false // false = tag is not used } @@ -70,6 +70,6 @@ func Execute(modelInput *model.ModelInput) (message string, validResult bool, er } } sort.Strings(tagsSorted) - modelInput.Tags_available = tagsSorted + modelInput.TagsAvailable = tagsSorted return "Model file removal of " + strconv.Itoa(counter) + " unused tags successful", true, nil } diff --git a/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go b/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go index 9a64557a..8635c07b 100644 --- a/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go +++ b/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go @@ -18,7 +18,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { return model.NoMoreQuestions(), nil } -func ApplyAnswer(questionID string, answer ...string) (message string, validResult bool, err error) { +func ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { return "Answer processed", true, nil } @@ -26,7 +26,7 @@ func GoBack() (message string, validResult bool, err error) { return "Cannot go back further", false, nil } -func GetFinalChangeImpact(modelInput *model.ModelInput) (changes []string, message string, validResult bool, err error) { +func GetFinalChangeImpact(_ *model.ModelInput) (changes []string, message string, validResult bool, err error) { return []string{"seed the model file with with initial risk tracking entries for all untracked risks"}, "Changeset valid", true, err } @@ -38,16 +38,16 @@ func Execute(modelInput *model.ModelInput) (message string, validResult bool, er } } sort.Strings(syntheticRiskIDsToCreateTrackingFor) - if modelInput.Risk_tracking == nil { - modelInput.Risk_tracking = make(map[string]model.InputRiskTracking, 0) + if modelInput.RiskTracking == nil { + modelInput.RiskTracking = make(map[string]model.InputRiskTracking) } for _, id := range syntheticRiskIDsToCreateTrackingFor { - modelInput.Risk_tracking[id] = model.InputRiskTracking{ + modelInput.RiskTracking[id] = model.InputRiskTracking{ Status: model.Unchecked.String(), Justification: "", Ticket: "", Date: "", - Checked_by: "", + CheckedBy: "", } } return "Model file seeding with " + strconv.Itoa(len(syntheticRiskIDsToCreateTrackingFor)) + " initial risk tracking successful", true, nil diff --git a/macros/built-in/seed-tags/seed-tags-macro.go b/macros/built-in/seed-tags/seed-tags-macro.go index fc65c414..427a5281 100644 --- a/macros/built-in/seed-tags/seed-tags-macro.go +++ b/macros/built-in/seed-tags/seed-tags-macro.go @@ -18,7 +18,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { return model.NoMoreQuestions(), nil } -func ApplyAnswer(questionID string, answer ...string) (message string, validResult bool, err error) { +func ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { return "Answer processed", true, nil } @@ -26,12 +26,12 @@ func GoBack() (message string, validResult bool, err error) { return "Cannot go back further", false, nil } -func GetFinalChangeImpact(modelInput *model.ModelInput) (changes []string, message string, validResult bool, err error) { +func GetFinalChangeImpact(_ *model.ModelInput) (changes []string, message string, validResult bool, err error) { return []string{"seed the model file with supported tags from all risk rules"}, "Changeset valid", true, err } func Execute(modelInput *model.ModelInput) (message string, validResult bool, err error) { - tagMap := make(map[string]bool, 0) + tagMap := make(map[string]bool) for k, v := range model.AllSupportedTags { tagMap[k] = v } @@ -43,6 +43,6 @@ func Execute(modelInput *model.ModelInput) (message string, validResult bool, er tagsSorted = append(tagsSorted, tag) } sort.Strings(tagsSorted) - modelInput.Tags_available = tagsSorted + modelInput.TagsAvailable = tagsSorted return "Model file seeding with " + strconv.Itoa(len(model.AllSupportedTags)) + " tags successful", true, nil } diff --git a/main.go b/main.go index 938047aa..87532907 100644 --- a/main.go +++ b/main.go @@ -17,7 +17,6 @@ import ( "fmt" "hash/fnv" "io" - "io/ioutil" "log" "net/http" "os" @@ -34,83 +33,104 @@ import ( "github.com/gin-gonic/gin" "github.com/google/uuid" "github.com/threagile/threagile/colors" - add_build_pipeline "github.com/threagile/threagile/macros/built-in/add-build-pipeline" - add_vault "github.com/threagile/threagile/macros/built-in/add-vault" - pretty_print "github.com/threagile/threagile/macros/built-in/pretty-print" - remove_unused_tags "github.com/threagile/threagile/macros/built-in/remove-unused-tags" - seed_risk_tracking "github.com/threagile/threagile/macros/built-in/seed-risk-tracking" - seed_tags "github.com/threagile/threagile/macros/built-in/seed-tags" + addbuildpipeline "github.com/threagile/threagile/macros/built-in/add-build-pipeline" + addvault "github.com/threagile/threagile/macros/built-in/add-vault" + prettyprint "github.com/threagile/threagile/macros/built-in/pretty-print" + removeunusedtags "github.com/threagile/threagile/macros/built-in/remove-unused-tags" + seedrisktracking "github.com/threagile/threagile/macros/built-in/seed-risk-tracking" + seedtags "github.com/threagile/threagile/macros/built-in/seed-tags" "github.com/threagile/threagile/model" "github.com/threagile/threagile/report" - accidental_secret_leak "github.com/threagile/threagile/risks/built-in/accidental-secret-leak" - code_backdooring "github.com/threagile/threagile/risks/built-in/code-backdooring" - container_baseimage_backdooring "github.com/threagile/threagile/risks/built-in/container-baseimage-backdooring" - container_platform_escape "github.com/threagile/threagile/risks/built-in/container-platform-escape" - cross_site_request_forgery "github.com/threagile/threagile/risks/built-in/cross-site-request-forgery" - cross_site_scripting "github.com/threagile/threagile/risks/built-in/cross-site-scripting" - dos_risky_access_across_trust_boundary "github.com/threagile/threagile/risks/built-in/dos-risky-access-across-trust-boundary" - incomplete_model "github.com/threagile/threagile/risks/built-in/incomplete-model" - ldap_injection "github.com/threagile/threagile/risks/built-in/ldap-injection" - missing_authentication "github.com/threagile/threagile/risks/built-in/missing-authentication" - missing_authentication_second_factor "github.com/threagile/threagile/risks/built-in/missing-authentication-second-factor" - missing_build_infrastructure "github.com/threagile/threagile/risks/built-in/missing-build-infrastructure" - missing_cloud_hardening "github.com/threagile/threagile/risks/built-in/missing-cloud-hardening" - missing_file_validation "github.com/threagile/threagile/risks/built-in/missing-file-validation" - missing_hardening "github.com/threagile/threagile/risks/built-in/missing-hardening" - missing_identity_propagation "github.com/threagile/threagile/risks/built-in/missing-identity-propagation" - missing_identity_provider_isolation "github.com/threagile/threagile/risks/built-in/missing-identity-provider-isolation" - missing_identity_store "github.com/threagile/threagile/risks/built-in/missing-identity-store" - missing_network_segmentation "github.com/threagile/threagile/risks/built-in/missing-network-segmentation" - missing_vault "github.com/threagile/threagile/risks/built-in/missing-vault" - missing_vault_isolation "github.com/threagile/threagile/risks/built-in/missing-vault-isolation" - missing_waf "github.com/threagile/threagile/risks/built-in/missing-waf" - mixed_targets_on_shared_runtime "github.com/threagile/threagile/risks/built-in/mixed-targets-on-shared-runtime" - path_traversal "github.com/threagile/threagile/risks/built-in/path-traversal" - push_instead_of_pull_deployment "github.com/threagile/threagile/risks/built-in/push-instead-of-pull-deployment" - search_query_injection "github.com/threagile/threagile/risks/built-in/search-query-injection" - server_side_request_forgery "github.com/threagile/threagile/risks/built-in/server-side-request-forgery" - service_registry_poisoning "github.com/threagile/threagile/risks/built-in/service-registry-poisoning" - sql_nosql_injection "github.com/threagile/threagile/risks/built-in/sql-nosql-injection" - unchecked_deployment "github.com/threagile/threagile/risks/built-in/unchecked-deployment" - unencrypted_asset "github.com/threagile/threagile/risks/built-in/unencrypted-asset" - unencrypted_communication "github.com/threagile/threagile/risks/built-in/unencrypted-communication" - unguarded_access_from_internet "github.com/threagile/threagile/risks/built-in/unguarded-access-from-internet" - unguarded_direct_datastore_access "github.com/threagile/threagile/risks/built-in/unguarded-direct-datastore-access" - unnecessary_communication_link "github.com/threagile/threagile/risks/built-in/unnecessary-communication-link" - unnecessary_data_asset "github.com/threagile/threagile/risks/built-in/unnecessary-data-asset" - unnecessary_data_transfer "github.com/threagile/threagile/risks/built-in/unnecessary-data-transfer" - unnecessary_technical_asset "github.com/threagile/threagile/risks/built-in/unnecessary-technical-asset" - untrusted_deserialization "github.com/threagile/threagile/risks/built-in/untrusted-deserialization" - wrong_communication_link_content "github.com/threagile/threagile/risks/built-in/wrong-communication-link-content" - wrong_trust_boundary_content "github.com/threagile/threagile/risks/built-in/wrong-trust-boundary-content" - xml_external_entity "github.com/threagile/threagile/risks/built-in/xml-external-entity" + accidentalsecretleak "github.com/threagile/threagile/risks/built-in/accidental-secret-leak" + codebackdooring "github.com/threagile/threagile/risks/built-in/code-backdooring" + containerbaseimagebackdooring "github.com/threagile/threagile/risks/built-in/container-baseimage-backdooring" + containerplatformescape "github.com/threagile/threagile/risks/built-in/container-platform-escape" + crosssiterequestforgery "github.com/threagile/threagile/risks/built-in/cross-site-request-forgery" + crosssitescripting "github.com/threagile/threagile/risks/built-in/cross-site-scripting" + dosriskyaccessacrosstrustboundary "github.com/threagile/threagile/risks/built-in/dos-risky-access-across-trust-boundary" + incompletemodel "github.com/threagile/threagile/risks/built-in/incomplete-model" + ldapinjection "github.com/threagile/threagile/risks/built-in/ldap-injection" + missingauthentication "github.com/threagile/threagile/risks/built-in/missing-authentication" + missingauthenticationsecondfactor "github.com/threagile/threagile/risks/built-in/missing-authentication-second-factor" + missingbuildinfrastructure "github.com/threagile/threagile/risks/built-in/missing-build-infrastructure" + missingcloudhardening "github.com/threagile/threagile/risks/built-in/missing-cloud-hardening" + missingfilevalidation "github.com/threagile/threagile/risks/built-in/missing-file-validation" + missinghardening "github.com/threagile/threagile/risks/built-in/missing-hardening" + missingidentitypropagation "github.com/threagile/threagile/risks/built-in/missing-identity-propagation" + missingidentityproviderisolation "github.com/threagile/threagile/risks/built-in/missing-identity-provider-isolation" + missingidentitystore "github.com/threagile/threagile/risks/built-in/missing-identity-store" + missingnetworksegmentation "github.com/threagile/threagile/risks/built-in/missing-network-segmentation" + missingvault "github.com/threagile/threagile/risks/built-in/missing-vault" + missingvaultisolation "github.com/threagile/threagile/risks/built-in/missing-vault-isolation" + missingwaf "github.com/threagile/threagile/risks/built-in/missing-waf" + mixedtargetsonsharedruntime "github.com/threagile/threagile/risks/built-in/mixed-targets-on-shared-runtime" + pathtraversal "github.com/threagile/threagile/risks/built-in/path-traversal" + pushinsteadofpulldeployment "github.com/threagile/threagile/risks/built-in/push-instead-of-pull-deployment" + searchqueryinjection "github.com/threagile/threagile/risks/built-in/search-query-injection" + serversiderequestforgery "github.com/threagile/threagile/risks/built-in/server-side-request-forgery" + serviceregistrypoisoning "github.com/threagile/threagile/risks/built-in/service-registry-poisoning" + sqlnosqlinjection "github.com/threagile/threagile/risks/built-in/sql-nosql-injection" + uncheckeddeployment "github.com/threagile/threagile/risks/built-in/unchecked-deployment" + unencryptedasset "github.com/threagile/threagile/risks/built-in/unencrypted-asset" + unencryptedcommunication "github.com/threagile/threagile/risks/built-in/unencrypted-communication" + unguardedaccessfrominternet "github.com/threagile/threagile/risks/built-in/unguarded-access-from-internet" + unguardeddirectdatastoreaccess "github.com/threagile/threagile/risks/built-in/unguarded-direct-datastore-access" + unnecessarycommunicationlink "github.com/threagile/threagile/risks/built-in/unnecessary-communication-link" + unnecessarydataasset "github.com/threagile/threagile/risks/built-in/unnecessary-data-asset" + unnecessarydatatransfer "github.com/threagile/threagile/risks/built-in/unnecessary-data-transfer" + unnecessarytechnicalasset "github.com/threagile/threagile/risks/built-in/unnecessary-technical-asset" + untrusteddeserialization "github.com/threagile/threagile/risks/built-in/untrusted-deserialization" + wrongcommunicationlinkcontent "github.com/threagile/threagile/risks/built-in/wrong-communication-link-content" + wrongtrustboundarycontent "github.com/threagile/threagile/risks/built-in/wrong-trust-boundary-content" + xmlexternalentity "github.com/threagile/threagile/risks/built-in/xml-external-entity" "golang.org/x/crypto/argon2" "gopkg.in/yaml.v3" ) const keepDiagramSourceFiles = false -const defaultGraphvizDPI, maxGraphvizDPI = 120, 240 -const backupHistoryFilesToKeep = 50 - -const baseFolder, reportFilename, excelRisksFilename, excelTagsFilename, jsonRisksFilename, jsonTechnicalAssetsFilename, jsonStatsFilename, dataFlowDiagramFilenameDOT, dataFlowDiagramFilenamePNG, dataAssetDiagramFilenameDOT, dataAssetDiagramFilenamePNG, graphvizDataFlowDiagramConversionCall, graphvizDataAssetDiagramConversionCall = "/data", "report.pdf", "risks.xlsx", "tags.xlsx", "risks.json", "technical-assets.json", "stats.json", "data-flow-diagram.gv", "data-flow-diagram.png", "data-asset-diagram.gv", "data-asset-diagram.png", "render-data-flow-diagram.sh", "render-data-asset-diagram.sh" - -var globalLock sync.Mutex -var successCount, errorCount = 0, 0 - -var modelInput model.ModelInput - -var drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = true - -var buildTimestamp = "" +const ( + defaultGraphvizDPI, maxGraphvizDPI = 120, 240 + backupHistoryFilesToKeep = 50 +) -var modelFilename, templateFilename /*, diagramFilename, reportFilename, graphvizConversion*/ *string -var createExampleModel, createStubModel, createEditingSupport, verbose, ignoreOrphanedRiskTracking, generateDataFlowDiagram, generateDataAssetDiagram, generateRisksJSON, generateTechnicalAssetsJSON, generateStatsJSON, generateRisksExcel, generateTagsExcel, generateReportPDF *bool -var outputDir, raaPlugin, skipRiskRules, riskRulesPlugins, executeModelMacro *string -var customRiskRules map[string]model.CustomRiskRule -var diagramDPI, serverPort *int +const ( + tmpFolder = "/dev/shm" // TODO: make configurable via cmdline arg? + appFolder = "/app" + baseFolder = "/data" + reportFilename = "report.pdf" + excelRisksFilename = "risks.xlsx" + excelTagsFilename = "tags.xlsx" + jsonRisksFilename = "risks.json" + jsonTechnicalAssetsFilename = "technical-assets.json" + jsonStatsFilename = "stats.json" + dataFlowDiagramFilenameDOT = "data-flow-diagram.gv" + dataFlowDiagramFilenamePNG = "data-flow-diagram.png" + dataAssetDiagramFilenameDOT = "data-asset-diagram.gv" + dataAssetDiagramFilenamePNG = "data-asset-diagram.png" + graphvizDataFlowDiagramConversionCall = "render-data-flow-diagram.sh" + graphvizDataAssetDiagramConversionCall = "render-data-asset-diagram.sh" + outputFile = "threagile.yaml" +) -var deferredRiskTrackingDueToWildcardMatching = make(map[string]model.RiskTracking) +var ( + successCount, errorCount = 0, 0 + drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = true + buildTimestamp = "" + tempFolder = tmpFolder + + globalLock sync.Mutex + modelInput model.ModelInput + + modelFilename, templateFilename/* diagramFilename, reportFilename, graphvizConversion*/ *string + createExampleModel, createStubModel, createEditingSupport, verbose, ignoreOrphanedRiskTracking *bool + generateDataFlowDiagram, generateDataAssetDiagram, generateRisksJSON, generateTechnicalAssetsJSON *bool + generateStatsJSON, generateRisksExcel, generateTagsExcel, generateReportPDF *bool + outputDir, raaPlugin, skipRiskRules, riskRulesPlugins, executeModelMacro *string + customRiskRules map[string]model.CustomRiskRule + diagramDPI, serverPort *int + deferredRiskTrackingDueToWildcardMatching = make(map[string]model.RiskTracking) +) func applyRiskGeneration() { if *verbose { @@ -123,465 +143,465 @@ func applyRiskGeneration() { } } - if _, ok := skippedRules[unencrypted_asset.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unencrypted_asset.Category().Id) - delete(skippedRules, unencrypted_asset.Category().Id) + if _, ok := skippedRules[unencryptedasset.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unencryptedasset.Category().Id) + delete(skippedRules, unencryptedasset.Category().Id) } else { - model.AddToListOfSupportedTags(unencrypted_asset.SupportedTags()) - risks := unencrypted_asset.GenerateRisks() + model.AddToListOfSupportedTags(unencryptedasset.SupportedTags()) + risks := unencryptedasset.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unencrypted_asset.Category()] = risks + model.GeneratedRisksByCategory[unencryptedasset.Category()] = risks } } - if _, ok := skippedRules[unencrypted_communication.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unencrypted_communication.Category().Id) - delete(skippedRules, unencrypted_communication.Category().Id) + if _, ok := skippedRules[unencryptedcommunication.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unencryptedcommunication.Category().Id) + delete(skippedRules, unencryptedcommunication.Category().Id) } else { - model.AddToListOfSupportedTags(unencrypted_communication.SupportedTags()) - risks := unencrypted_communication.GenerateRisks() + model.AddToListOfSupportedTags(unencryptedcommunication.SupportedTags()) + risks := unencryptedcommunication.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unencrypted_communication.Category()] = risks + model.GeneratedRisksByCategory[unencryptedcommunication.Category()] = risks } } - if _, ok := skippedRules[unguarded_direct_datastore_access.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unguarded_direct_datastore_access.Category().Id) - delete(skippedRules, unguarded_direct_datastore_access.Category().Id) + if _, ok := skippedRules[unguardeddirectdatastoreaccess.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unguardeddirectdatastoreaccess.Category().Id) + delete(skippedRules, unguardeddirectdatastoreaccess.Category().Id) } else { - model.AddToListOfSupportedTags(unguarded_direct_datastore_access.SupportedTags()) - risks := unguarded_direct_datastore_access.GenerateRisks() + model.AddToListOfSupportedTags(unguardeddirectdatastoreaccess.SupportedTags()) + risks := unguardeddirectdatastoreaccess.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unguarded_direct_datastore_access.Category()] = risks + model.GeneratedRisksByCategory[unguardeddirectdatastoreaccess.Category()] = risks } } - if _, ok := skippedRules[unguarded_access_from_internet.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unguarded_access_from_internet.Category().Id) - delete(skippedRules, unguarded_access_from_internet.Category().Id) + if _, ok := skippedRules[unguardedaccessfrominternet.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unguardedaccessfrominternet.Category().Id) + delete(skippedRules, unguardedaccessfrominternet.Category().Id) } else { - model.AddToListOfSupportedTags(unguarded_access_from_internet.SupportedTags()) - risks := unguarded_access_from_internet.GenerateRisks() + model.AddToListOfSupportedTags(unguardedaccessfrominternet.SupportedTags()) + risks := unguardedaccessfrominternet.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unguarded_access_from_internet.Category()] = risks + model.GeneratedRisksByCategory[unguardedaccessfrominternet.Category()] = risks } } - if _, ok := skippedRules[dos_risky_access_across_trust_boundary.Category().Id]; ok { - fmt.Println("Skipping risk rule:", dos_risky_access_across_trust_boundary.Category().Id) - delete(skippedRules, dos_risky_access_across_trust_boundary.Category().Id) + if _, ok := skippedRules[dosriskyaccessacrosstrustboundary.Category().Id]; ok { + fmt.Println("Skipping risk rule:", dosriskyaccessacrosstrustboundary.Category().Id) + delete(skippedRules, dosriskyaccessacrosstrustboundary.Category().Id) } else { - model.AddToListOfSupportedTags(dos_risky_access_across_trust_boundary.SupportedTags()) - risks := dos_risky_access_across_trust_boundary.GenerateRisks() + model.AddToListOfSupportedTags(dosriskyaccessacrosstrustboundary.SupportedTags()) + risks := dosriskyaccessacrosstrustboundary.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[dos_risky_access_across_trust_boundary.Category()] = risks + model.GeneratedRisksByCategory[dosriskyaccessacrosstrustboundary.Category()] = risks } } - if _, ok := skippedRules[missing_network_segmentation.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_network_segmentation.Category().Id) - delete(skippedRules, missing_network_segmentation.Category().Id) + if _, ok := skippedRules[missingnetworksegmentation.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingnetworksegmentation.Category().Id) + delete(skippedRules, missingnetworksegmentation.Category().Id) } else { - model.AddToListOfSupportedTags(missing_network_segmentation.SupportedTags()) - risks := missing_network_segmentation.GenerateRisks() + model.AddToListOfSupportedTags(missingnetworksegmentation.SupportedTags()) + risks := missingnetworksegmentation.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_network_segmentation.Category()] = risks + model.GeneratedRisksByCategory[missingnetworksegmentation.Category()] = risks } } - if _, ok := skippedRules[mixed_targets_on_shared_runtime.Category().Id]; ok { - fmt.Println("Skipping risk rule:", mixed_targets_on_shared_runtime.Category().Id) - delete(skippedRules, mixed_targets_on_shared_runtime.Category().Id) + if _, ok := skippedRules[mixedtargetsonsharedruntime.Category().Id]; ok { + fmt.Println("Skipping risk rule:", mixedtargetsonsharedruntime.Category().Id) + delete(skippedRules, mixedtargetsonsharedruntime.Category().Id) } else { - model.AddToListOfSupportedTags(mixed_targets_on_shared_runtime.SupportedTags()) - risks := mixed_targets_on_shared_runtime.GenerateRisks() + model.AddToListOfSupportedTags(mixedtargetsonsharedruntime.SupportedTags()) + risks := mixedtargetsonsharedruntime.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[mixed_targets_on_shared_runtime.Category()] = risks + model.GeneratedRisksByCategory[mixedtargetsonsharedruntime.Category()] = risks } } - if _, ok := skippedRules[missing_identity_propagation.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_identity_propagation.Category().Id) - delete(skippedRules, missing_identity_propagation.Category().Id) + if _, ok := skippedRules[missingidentitypropagation.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingidentitypropagation.Category().Id) + delete(skippedRules, missingidentitypropagation.Category().Id) } else { - model.AddToListOfSupportedTags(missing_identity_propagation.SupportedTags()) - risks := missing_identity_propagation.GenerateRisks() + model.AddToListOfSupportedTags(missingidentitypropagation.SupportedTags()) + risks := missingidentitypropagation.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_identity_propagation.Category()] = risks + model.GeneratedRisksByCategory[missingidentitypropagation.Category()] = risks } } - if _, ok := skippedRules[missing_identity_store.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_identity_store.Category().Id) - delete(skippedRules, missing_identity_store.Category().Id) + if _, ok := skippedRules[missingidentitystore.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingidentitystore.Category().Id) + delete(skippedRules, missingidentitystore.Category().Id) } else { - model.AddToListOfSupportedTags(missing_identity_store.SupportedTags()) - risks := missing_identity_store.GenerateRisks() + model.AddToListOfSupportedTags(missingidentitystore.SupportedTags()) + risks := missingidentitystore.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_identity_store.Category()] = risks + model.GeneratedRisksByCategory[missingidentitystore.Category()] = risks } } - if _, ok := skippedRules[missing_authentication.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_authentication.Category().Id) - delete(skippedRules, missing_authentication.Category().Id) + if _, ok := skippedRules[missingauthentication.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingauthentication.Category().Id) + delete(skippedRules, missingauthentication.Category().Id) } else { - model.AddToListOfSupportedTags(missing_authentication.SupportedTags()) - risks := missing_authentication.GenerateRisks() + model.AddToListOfSupportedTags(missingauthentication.SupportedTags()) + risks := missingauthentication.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_authentication.Category()] = risks + model.GeneratedRisksByCategory[missingauthentication.Category()] = risks } } - if _, ok := skippedRules[missing_authentication_second_factor.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_authentication_second_factor.Category().Id) - delete(skippedRules, missing_authentication_second_factor.Category().Id) + if _, ok := skippedRules[missingauthenticationsecondfactor.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingauthenticationsecondfactor.Category().Id) + delete(skippedRules, missingauthenticationsecondfactor.Category().Id) } else { - model.AddToListOfSupportedTags(missing_authentication_second_factor.SupportedTags()) - risks := missing_authentication_second_factor.GenerateRisks() + model.AddToListOfSupportedTags(missingauthenticationsecondfactor.SupportedTags()) + risks := missingauthenticationsecondfactor.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_authentication_second_factor.Category()] = risks + model.GeneratedRisksByCategory[missingauthenticationsecondfactor.Category()] = risks } } - if _, ok := skippedRules[unnecessary_data_transfer.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unnecessary_data_transfer.Category().Id) - delete(skippedRules, unnecessary_data_transfer.Category().Id) + if _, ok := skippedRules[unnecessarydatatransfer.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unnecessarydatatransfer.Category().Id) + delete(skippedRules, unnecessarydatatransfer.Category().Id) } else { - model.AddToListOfSupportedTags(unnecessary_data_transfer.SupportedTags()) - risks := unnecessary_data_transfer.GenerateRisks() + model.AddToListOfSupportedTags(unnecessarydatatransfer.SupportedTags()) + risks := unnecessarydatatransfer.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unnecessary_data_transfer.Category()] = risks + model.GeneratedRisksByCategory[unnecessarydatatransfer.Category()] = risks } } - if _, ok := skippedRules[unnecessary_communication_link.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unnecessary_communication_link.Category().Id) - delete(skippedRules, unnecessary_communication_link.Category().Id) + if _, ok := skippedRules[unnecessarycommunicationlink.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unnecessarycommunicationlink.Category().Id) + delete(skippedRules, unnecessarycommunicationlink.Category().Id) } else { - model.AddToListOfSupportedTags(unnecessary_communication_link.SupportedTags()) - risks := unnecessary_communication_link.GenerateRisks() + model.AddToListOfSupportedTags(unnecessarycommunicationlink.SupportedTags()) + risks := unnecessarycommunicationlink.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unnecessary_communication_link.Category()] = risks + model.GeneratedRisksByCategory[unnecessarycommunicationlink.Category()] = risks } } - if _, ok := skippedRules[unnecessary_technical_asset.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unnecessary_technical_asset.Category().Id) - delete(skippedRules, unnecessary_technical_asset.Category().Id) + if _, ok := skippedRules[unnecessarytechnicalasset.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unnecessarytechnicalasset.Category().Id) + delete(skippedRules, unnecessarytechnicalasset.Category().Id) } else { - model.AddToListOfSupportedTags(unnecessary_technical_asset.SupportedTags()) - risks := unnecessary_technical_asset.GenerateRisks() + model.AddToListOfSupportedTags(unnecessarytechnicalasset.SupportedTags()) + risks := unnecessarytechnicalasset.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unnecessary_technical_asset.Category()] = risks + model.GeneratedRisksByCategory[unnecessarytechnicalasset.Category()] = risks } } - if _, ok := skippedRules[unnecessary_data_asset.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unnecessary_data_asset.Category().Id) - delete(skippedRules, unnecessary_data_asset.Category().Id) + if _, ok := skippedRules[unnecessarydataasset.Category().Id]; ok { + fmt.Println("Skipping risk rule:", unnecessarydataasset.Category().Id) + delete(skippedRules, unnecessarydataasset.Category().Id) } else { - model.AddToListOfSupportedTags(unnecessary_data_asset.SupportedTags()) - risks := unnecessary_data_asset.GenerateRisks() + model.AddToListOfSupportedTags(unnecessarydataasset.SupportedTags()) + risks := unnecessarydataasset.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unnecessary_data_asset.Category()] = risks + model.GeneratedRisksByCategory[unnecessarydataasset.Category()] = risks } } - if _, ok := skippedRules[sql_nosql_injection.Category().Id]; ok { - fmt.Println("Skipping risk rule:", sql_nosql_injection.Category().Id) - delete(skippedRules, sql_nosql_injection.Category().Id) + if _, ok := skippedRules[sqlnosqlinjection.Category().Id]; ok { + fmt.Println("Skipping risk rule:", sqlnosqlinjection.Category().Id) + delete(skippedRules, sqlnosqlinjection.Category().Id) } else { - model.AddToListOfSupportedTags(sql_nosql_injection.SupportedTags()) - risks := sql_nosql_injection.GenerateRisks() + model.AddToListOfSupportedTags(sqlnosqlinjection.SupportedTags()) + risks := sqlnosqlinjection.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[sql_nosql_injection.Category()] = risks + model.GeneratedRisksByCategory[sqlnosqlinjection.Category()] = risks } } - if _, ok := skippedRules[ldap_injection.Category().Id]; ok { - fmt.Println("Skipping risk rule:", ldap_injection.Category().Id) - delete(skippedRules, ldap_injection.Category().Id) + if _, ok := skippedRules[ldapinjection.Category().Id]; ok { + fmt.Println("Skipping risk rule:", ldapinjection.Category().Id) + delete(skippedRules, ldapinjection.Category().Id) } else { - model.AddToListOfSupportedTags(ldap_injection.SupportedTags()) - risks := ldap_injection.GenerateRisks() + model.AddToListOfSupportedTags(ldapinjection.SupportedTags()) + risks := ldapinjection.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[ldap_injection.Category()] = risks + model.GeneratedRisksByCategory[ldapinjection.Category()] = risks } } - if _, ok := skippedRules[cross_site_scripting.Category().Id]; ok { - fmt.Println("Skipping risk rule:", cross_site_scripting.Category().Id) - delete(skippedRules, cross_site_scripting.Category().Id) + if _, ok := skippedRules[crosssitescripting.Category().Id]; ok { + fmt.Println("Skipping risk rule:", crosssitescripting.Category().Id) + delete(skippedRules, crosssitescripting.Category().Id) } else { - model.AddToListOfSupportedTags(cross_site_scripting.SupportedTags()) - risks := cross_site_scripting.GenerateRisks() + model.AddToListOfSupportedTags(crosssitescripting.SupportedTags()) + risks := crosssitescripting.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[cross_site_scripting.Category()] = risks + model.GeneratedRisksByCategory[crosssitescripting.Category()] = risks } } - if _, ok := skippedRules[cross_site_request_forgery.Category().Id]; ok { - fmt.Println("Skipping risk rule:", cross_site_request_forgery.Category().Id) - delete(skippedRules, cross_site_request_forgery.Category().Id) + if _, ok := skippedRules[crosssiterequestforgery.Category().Id]; ok { + fmt.Println("Skipping risk rule:", crosssiterequestforgery.Category().Id) + delete(skippedRules, crosssiterequestforgery.Category().Id) } else { - model.AddToListOfSupportedTags(cross_site_request_forgery.SupportedTags()) - risks := cross_site_request_forgery.GenerateRisks() + model.AddToListOfSupportedTags(crosssiterequestforgery.SupportedTags()) + risks := crosssiterequestforgery.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[cross_site_request_forgery.Category()] = risks + model.GeneratedRisksByCategory[crosssiterequestforgery.Category()] = risks } } - if _, ok := skippedRules[server_side_request_forgery.Category().Id]; ok { - fmt.Println("Skipping risk rule:", server_side_request_forgery.Category().Id) - delete(skippedRules, server_side_request_forgery.Category().Id) + if _, ok := skippedRules[serversiderequestforgery.Category().Id]; ok { + fmt.Println("Skipping risk rule:", serversiderequestforgery.Category().Id) + delete(skippedRules, serversiderequestforgery.Category().Id) } else { - model.AddToListOfSupportedTags(server_side_request_forgery.SupportedTags()) - risks := server_side_request_forgery.GenerateRisks() + model.AddToListOfSupportedTags(serversiderequestforgery.SupportedTags()) + risks := serversiderequestforgery.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[server_side_request_forgery.Category()] = risks + model.GeneratedRisksByCategory[serversiderequestforgery.Category()] = risks } } - if _, ok := skippedRules[path_traversal.Category().Id]; ok { - fmt.Println("Skipping risk rule:", path_traversal.Category().Id) - delete(skippedRules, path_traversal.Category().Id) + if _, ok := skippedRules[pathtraversal.Category().Id]; ok { + fmt.Println("Skipping risk rule:", pathtraversal.Category().Id) + delete(skippedRules, pathtraversal.Category().Id) } else { - model.AddToListOfSupportedTags(path_traversal.SupportedTags()) - risks := path_traversal.GenerateRisks() + model.AddToListOfSupportedTags(pathtraversal.SupportedTags()) + risks := pathtraversal.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[path_traversal.Category()] = risks + model.GeneratedRisksByCategory[pathtraversal.Category()] = risks } } - if _, ok := skippedRules[push_instead_of_pull_deployment.Category().Id]; ok { - fmt.Println("Skipping risk rule:", push_instead_of_pull_deployment.Category().Id) - delete(skippedRules, push_instead_of_pull_deployment.Category().Id) + if _, ok := skippedRules[pushinsteadofpulldeployment.Category().Id]; ok { + fmt.Println("Skipping risk rule:", pushinsteadofpulldeployment.Category().Id) + delete(skippedRules, pushinsteadofpulldeployment.Category().Id) } else { - model.AddToListOfSupportedTags(push_instead_of_pull_deployment.SupportedTags()) - risks := push_instead_of_pull_deployment.GenerateRisks() + model.AddToListOfSupportedTags(pushinsteadofpulldeployment.SupportedTags()) + risks := pushinsteadofpulldeployment.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[push_instead_of_pull_deployment.Category()] = risks + model.GeneratedRisksByCategory[pushinsteadofpulldeployment.Category()] = risks } } - if _, ok := skippedRules[search_query_injection.Category().Id]; ok { - fmt.Println("Skipping risk rule:", search_query_injection.Category().Id) - delete(skippedRules, search_query_injection.Category().Id) + if _, ok := skippedRules[searchqueryinjection.Category().Id]; ok { + fmt.Println("Skipping risk rule:", searchqueryinjection.Category().Id) + delete(skippedRules, searchqueryinjection.Category().Id) } else { - model.AddToListOfSupportedTags(search_query_injection.SupportedTags()) - risks := search_query_injection.GenerateRisks() + model.AddToListOfSupportedTags(searchqueryinjection.SupportedTags()) + risks := searchqueryinjection.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[search_query_injection.Category()] = risks + model.GeneratedRisksByCategory[searchqueryinjection.Category()] = risks } } - if _, ok := skippedRules[service_registry_poisoning.Category().Id]; ok { - fmt.Println("Skipping risk rule:", service_registry_poisoning.Category().Id) - delete(skippedRules, service_registry_poisoning.Category().Id) + if _, ok := skippedRules[serviceregistrypoisoning.Category().Id]; ok { + fmt.Println("Skipping risk rule:", serviceregistrypoisoning.Category().Id) + delete(skippedRules, serviceregistrypoisoning.Category().Id) } else { - model.AddToListOfSupportedTags(service_registry_poisoning.SupportedTags()) - risks := service_registry_poisoning.GenerateRisks() + model.AddToListOfSupportedTags(serviceregistrypoisoning.SupportedTags()) + risks := serviceregistrypoisoning.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[service_registry_poisoning.Category()] = risks + model.GeneratedRisksByCategory[serviceregistrypoisoning.Category()] = risks } } - if _, ok := skippedRules[untrusted_deserialization.Category().Id]; ok { - fmt.Println("Skipping risk rule:", untrusted_deserialization.Category().Id) - delete(skippedRules, untrusted_deserialization.Category().Id) + if _, ok := skippedRules[untrusteddeserialization.Category().Id]; ok { + fmt.Println("Skipping risk rule:", untrusteddeserialization.Category().Id) + delete(skippedRules, untrusteddeserialization.Category().Id) } else { - model.AddToListOfSupportedTags(untrusted_deserialization.SupportedTags()) - risks := untrusted_deserialization.GenerateRisks() + model.AddToListOfSupportedTags(untrusteddeserialization.SupportedTags()) + risks := untrusteddeserialization.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[untrusted_deserialization.Category()] = risks + model.GeneratedRisksByCategory[untrusteddeserialization.Category()] = risks } } - if _, ok := skippedRules[xml_external_entity.Category().Id]; ok { - fmt.Println("Skipping risk rule:", xml_external_entity.Category().Id) - delete(skippedRules, xml_external_entity.Category().Id) + if _, ok := skippedRules[xmlexternalentity.Category().Id]; ok { + fmt.Println("Skipping risk rule:", xmlexternalentity.Category().Id) + delete(skippedRules, xmlexternalentity.Category().Id) } else { - model.AddToListOfSupportedTags(xml_external_entity.SupportedTags()) - risks := xml_external_entity.GenerateRisks() + model.AddToListOfSupportedTags(xmlexternalentity.SupportedTags()) + risks := xmlexternalentity.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[xml_external_entity.Category()] = risks + model.GeneratedRisksByCategory[xmlexternalentity.Category()] = risks } } - if _, ok := skippedRules[missing_cloud_hardening.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_cloud_hardening.Category().Id) - delete(skippedRules, missing_cloud_hardening.Category().Id) + if _, ok := skippedRules[missingcloudhardening.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingcloudhardening.Category().Id) + delete(skippedRules, missingcloudhardening.Category().Id) } else { - model.AddToListOfSupportedTags(missing_cloud_hardening.SupportedTags()) - risks := missing_cloud_hardening.GenerateRisks() + model.AddToListOfSupportedTags(missingcloudhardening.SupportedTags()) + risks := missingcloudhardening.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_cloud_hardening.Category()] = risks + model.GeneratedRisksByCategory[missingcloudhardening.Category()] = risks } } - if _, ok := skippedRules[missing_file_validation.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_file_validation.Category().Id) - delete(skippedRules, missing_file_validation.Category().Id) + if _, ok := skippedRules[missingfilevalidation.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingfilevalidation.Category().Id) + delete(skippedRules, missingfilevalidation.Category().Id) } else { - model.AddToListOfSupportedTags(missing_file_validation.SupportedTags()) - risks := missing_file_validation.GenerateRisks() + model.AddToListOfSupportedTags(missingfilevalidation.SupportedTags()) + risks := missingfilevalidation.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_file_validation.Category()] = risks + model.GeneratedRisksByCategory[missingfilevalidation.Category()] = risks } } - if _, ok := skippedRules[missing_hardening.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_hardening.Category().Id) - delete(skippedRules, missing_hardening.Category().Id) + if _, ok := skippedRules[missinghardening.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missinghardening.Category().Id) + delete(skippedRules, missinghardening.Category().Id) } else { - model.AddToListOfSupportedTags(missing_hardening.SupportedTags()) - risks := missing_hardening.GenerateRisks() + model.AddToListOfSupportedTags(missinghardening.SupportedTags()) + risks := missinghardening.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_hardening.Category()] = risks + model.GeneratedRisksByCategory[missinghardening.Category()] = risks } } - if _, ok := skippedRules[accidental_secret_leak.Category().Id]; ok { - fmt.Println("Skipping risk rule:", accidental_secret_leak.Category().Id) - delete(skippedRules, accidental_secret_leak.Category().Id) + if _, ok := skippedRules[accidentalsecretleak.Category().Id]; ok { + fmt.Println("Skipping risk rule:", accidentalsecretleak.Category().Id) + delete(skippedRules, accidentalsecretleak.Category().Id) } else { - model.AddToListOfSupportedTags(accidental_secret_leak.SupportedTags()) - risks := accidental_secret_leak.GenerateRisks() + model.AddToListOfSupportedTags(accidentalsecretleak.SupportedTags()) + risks := accidentalsecretleak.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[accidental_secret_leak.Category()] = risks + model.GeneratedRisksByCategory[accidentalsecretleak.Category()] = risks } } - if _, ok := skippedRules[code_backdooring.Category().Id]; ok { - fmt.Println("Skipping risk rule:", code_backdooring.Category().Id) - delete(skippedRules, code_backdooring.Category().Id) + if _, ok := skippedRules[codebackdooring.Category().Id]; ok { + fmt.Println("Skipping risk rule:", codebackdooring.Category().Id) + delete(skippedRules, codebackdooring.Category().Id) } else { - model.AddToListOfSupportedTags(code_backdooring.SupportedTags()) - risks := code_backdooring.GenerateRisks() + model.AddToListOfSupportedTags(codebackdooring.SupportedTags()) + risks := codebackdooring.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[code_backdooring.Category()] = risks + model.GeneratedRisksByCategory[codebackdooring.Category()] = risks } } - if _, ok := skippedRules[container_baseimage_backdooring.Category().Id]; ok { - fmt.Println("Skipping risk rule:", container_baseimage_backdooring.Category().Id) - delete(skippedRules, container_baseimage_backdooring.Category().Id) + if _, ok := skippedRules[containerbaseimagebackdooring.Category().Id]; ok { + fmt.Println("Skipping risk rule:", containerbaseimagebackdooring.Category().Id) + delete(skippedRules, containerbaseimagebackdooring.Category().Id) } else { - model.AddToListOfSupportedTags(container_baseimage_backdooring.SupportedTags()) - risks := container_baseimage_backdooring.GenerateRisks() + model.AddToListOfSupportedTags(containerbaseimagebackdooring.SupportedTags()) + risks := containerbaseimagebackdooring.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[container_baseimage_backdooring.Category()] = risks + model.GeneratedRisksByCategory[containerbaseimagebackdooring.Category()] = risks } } - if _, ok := skippedRules[container_platform_escape.Category().Id]; ok { - fmt.Println("Skipping risk rule:", container_platform_escape.Category().Id) - delete(skippedRules, container_platform_escape.Category().Id) + if _, ok := skippedRules[containerplatformescape.Category().Id]; ok { + fmt.Println("Skipping risk rule:", containerplatformescape.Category().Id) + delete(skippedRules, containerplatformescape.Category().Id) } else { - model.AddToListOfSupportedTags(container_platform_escape.SupportedTags()) - risks := container_platform_escape.GenerateRisks() + model.AddToListOfSupportedTags(containerplatformescape.SupportedTags()) + risks := containerplatformescape.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[container_platform_escape.Category()] = risks + model.GeneratedRisksByCategory[containerplatformescape.Category()] = risks } } - if _, ok := skippedRules[incomplete_model.Category().Id]; ok { - fmt.Println("Skipping risk rule:", incomplete_model.Category().Id) - delete(skippedRules, incomplete_model.Category().Id) + if _, ok := skippedRules[incompletemodel.Category().Id]; ok { + fmt.Println("Skipping risk rule:", incompletemodel.Category().Id) + delete(skippedRules, incompletemodel.Category().Id) } else { - model.AddToListOfSupportedTags(incomplete_model.SupportedTags()) - risks := incomplete_model.GenerateRisks() + model.AddToListOfSupportedTags(incompletemodel.SupportedTags()) + risks := incompletemodel.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[incomplete_model.Category()] = risks + model.GeneratedRisksByCategory[incompletemodel.Category()] = risks } } - if _, ok := skippedRules[unchecked_deployment.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unchecked_deployment.Category().Id) - delete(skippedRules, unchecked_deployment.Category().Id) + if _, ok := skippedRules[uncheckeddeployment.Category().Id]; ok { + fmt.Println("Skipping risk rule:", uncheckeddeployment.Category().Id) + delete(skippedRules, uncheckeddeployment.Category().Id) } else { - model.AddToListOfSupportedTags(unchecked_deployment.SupportedTags()) - risks := unchecked_deployment.GenerateRisks() + model.AddToListOfSupportedTags(uncheckeddeployment.SupportedTags()) + risks := uncheckeddeployment.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[unchecked_deployment.Category()] = risks + model.GeneratedRisksByCategory[uncheckeddeployment.Category()] = risks } } - if _, ok := skippedRules[missing_build_infrastructure.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_build_infrastructure.Category().Id) - delete(skippedRules, missing_build_infrastructure.Category().Id) + if _, ok := skippedRules[missingbuildinfrastructure.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingbuildinfrastructure.Category().Id) + delete(skippedRules, missingbuildinfrastructure.Category().Id) } else { - model.AddToListOfSupportedTags(missing_build_infrastructure.SupportedTags()) - risks := missing_build_infrastructure.GenerateRisks() + model.AddToListOfSupportedTags(missingbuildinfrastructure.SupportedTags()) + risks := missingbuildinfrastructure.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_build_infrastructure.Category()] = risks + model.GeneratedRisksByCategory[missingbuildinfrastructure.Category()] = risks } } - if _, ok := skippedRules[missing_identity_provider_isolation.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_identity_provider_isolation.Category().Id) - delete(skippedRules, missing_identity_provider_isolation.Category().Id) + if _, ok := skippedRules[missingidentityproviderisolation.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingidentityproviderisolation.Category().Id) + delete(skippedRules, missingidentityproviderisolation.Category().Id) } else { - model.AddToListOfSupportedTags(missing_identity_provider_isolation.SupportedTags()) - risks := missing_identity_provider_isolation.GenerateRisks() + model.AddToListOfSupportedTags(missingidentityproviderisolation.SupportedTags()) + risks := missingidentityproviderisolation.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_identity_provider_isolation.Category()] = risks + model.GeneratedRisksByCategory[missingidentityproviderisolation.Category()] = risks } } - if _, ok := skippedRules[missing_vault.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_vault.Category().Id) - delete(skippedRules, missing_vault.Category().Id) + if _, ok := skippedRules[missingvault.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingvault.Category().Id) + delete(skippedRules, missingvault.Category().Id) } else { - model.AddToListOfSupportedTags(missing_vault.SupportedTags()) - risks := missing_vault.GenerateRisks() + model.AddToListOfSupportedTags(missingvault.SupportedTags()) + risks := missingvault.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_vault.Category()] = risks + model.GeneratedRisksByCategory[missingvault.Category()] = risks } } - if _, ok := skippedRules[missing_vault_isolation.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_vault_isolation.Category().Id) - delete(skippedRules, missing_vault_isolation.Category().Id) + if _, ok := skippedRules[missingvaultisolation.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingvaultisolation.Category().Id) + delete(skippedRules, missingvaultisolation.Category().Id) } else { - model.AddToListOfSupportedTags(missing_vault_isolation.SupportedTags()) - risks := missing_vault_isolation.GenerateRisks() + model.AddToListOfSupportedTags(missingvaultisolation.SupportedTags()) + risks := missingvaultisolation.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_vault_isolation.Category()] = risks + model.GeneratedRisksByCategory[missingvaultisolation.Category()] = risks } } - if _, ok := skippedRules[missing_waf.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missing_waf.Category().Id) - delete(skippedRules, missing_waf.Category().Id) + if _, ok := skippedRules[missingwaf.Category().Id]; ok { + fmt.Println("Skipping risk rule:", missingwaf.Category().Id) + delete(skippedRules, missingwaf.Category().Id) } else { - model.AddToListOfSupportedTags(missing_waf.SupportedTags()) - risks := missing_waf.GenerateRisks() + model.AddToListOfSupportedTags(missingwaf.SupportedTags()) + risks := missingwaf.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[missing_waf.Category()] = risks + model.GeneratedRisksByCategory[missingwaf.Category()] = risks } } - if _, ok := skippedRules[wrong_communication_link_content.Category().Id]; ok { - fmt.Println("Skipping risk rule:", wrong_communication_link_content.Category().Id) - delete(skippedRules, wrong_communication_link_content.Category().Id) + if _, ok := skippedRules[wrongcommunicationlinkcontent.Category().Id]; ok { + fmt.Println("Skipping risk rule:", wrongcommunicationlinkcontent.Category().Id) + delete(skippedRules, wrongcommunicationlinkcontent.Category().Id) } else { - model.AddToListOfSupportedTags(wrong_communication_link_content.SupportedTags()) - risks := wrong_communication_link_content.GenerateRisks() + model.AddToListOfSupportedTags(wrongcommunicationlinkcontent.SupportedTags()) + risks := wrongcommunicationlinkcontent.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[wrong_communication_link_content.Category()] = risks + model.GeneratedRisksByCategory[wrongcommunicationlinkcontent.Category()] = risks } } - if _, ok := skippedRules[wrong_trust_boundary_content.Category().Id]; ok { - fmt.Println("Skipping risk rule:", wrong_trust_boundary_content.Category().Id) - delete(skippedRules, wrong_trust_boundary_content.Category().Id) + if _, ok := skippedRules[wrongtrustboundarycontent.Category().Id]; ok { + fmt.Println("Skipping risk rule:", wrongtrustboundarycontent.Category().Id) + delete(skippedRules, wrongtrustboundarycontent.Category().Id) } else { - model.AddToListOfSupportedTags(wrong_trust_boundary_content.SupportedTags()) - risks := wrong_trust_boundary_content.GenerateRisks() + model.AddToListOfSupportedTags(wrongtrustboundarycontent.SupportedTags()) + risks := wrongtrustboundarycontent.GenerateRisks() if len(risks) > 0 { - model.GeneratedRisksByCategory[wrong_trust_boundary_content.Category()] = risks + model.GeneratedRisksByCategory[wrongtrustboundarycontent.Category()] = risks } } @@ -649,8 +669,8 @@ func checkRiskTracking() { } // save also the risk-category-id and risk-status directly in the risk for better JSON marshalling - for category, _ := range model.GeneratedRisksByCategory { - for i, _ := range model.GeneratedRisksByCategory[category] { + for category := range model.GeneratedRisksByCategory { + for i := range model.GeneratedRisksByCategory[category] { model.GeneratedRisksByCategory[category][i].CategoryId = category.Id model.GeneratedRisksByCategory[category][i].RiskStatus = model.GeneratedRisksByCategory[category][i].GetRiskTrackingStatusDefaultingUnchecked() } @@ -683,26 +703,26 @@ func unzip(src string, dest string) ([]string, error) { if err != nil { return filenames, err } - defer r.Close() + defer func() { _ = r.Close() }() for _, f := range r.File { // Store filename/path for returning and using later on - fpath := filepath.Join(dest, f.Name) + path := filepath.Join(dest, f.Name) // Check for ZipSlip. More Info: http://bit.ly/2MsjAWE - if !strings.HasPrefix(fpath, filepath.Clean(dest)+string(os.PathSeparator)) { - return filenames, fmt.Errorf("%s: illegal file path", fpath) + if !strings.HasPrefix(path, filepath.Clean(dest)+string(os.PathSeparator)) { + return filenames, fmt.Errorf("%s: illegal file path", path) } - filenames = append(filenames, fpath) + filenames = append(filenames, path) if f.FileInfo().IsDir() { // Make Folder - os.MkdirAll(fpath, os.ModePerm) + _ = os.MkdirAll(path, os.ModePerm) continue } // Make File - if err = os.MkdirAll(filepath.Dir(fpath), os.ModePerm); err != nil { + if err = os.MkdirAll(filepath.Dir(path), os.ModePerm); err != nil { return filenames, err } - outFile, err := os.OpenFile(fpath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode()) + outFile, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode()) if err != nil { return filenames, err } @@ -712,8 +732,8 @@ func unzip(src string, dest string) ([]string, error) { } _, err = io.Copy(outFile, rc) // Close the file without defer to close before next iteration of loop - outFile.Close() - rc.Close() + _ = outFile.Close() + _ = rc.Close() if err != nil { return filenames, err } @@ -729,10 +749,10 @@ func zipFiles(filename string, files []string) error { if err != nil { return err } - defer newZipFile.Close() + defer func() { _ = newZipFile.Close() }() zipWriter := zip.NewWriter(newZipFile) - defer zipWriter.Close() + defer func() { _ = zipWriter.Close() }() // Add files to zip for _, file := range files { @@ -748,7 +768,7 @@ func addFileToZip(zipWriter *zip.Writer, filename string) error { if err != nil { return err } - defer fileToZip.Close() + defer func() { _ = fileToZip.Close() }() // Get the file information info, err := fileToZip.Stat() @@ -785,7 +805,7 @@ func doIt(inputFilename string, outputDirectory string) { if *verbose { log.Println(err) } - os.Stderr.WriteString(err.Error() + "\n") + _, _ = os.Stderr.WriteString(err.Error() + "\n") os.Exit(2) } }() @@ -808,18 +828,18 @@ func doIt(inputFilename string, outputDirectory string) { if len(*executeModelMacro) > 0 { var macroDetails model.MacroDetails switch *executeModelMacro { - case add_build_pipeline.GetMacroDetails().ID: - macroDetails = add_build_pipeline.GetMacroDetails() - case add_vault.GetMacroDetails().ID: - macroDetails = add_vault.GetMacroDetails() - case pretty_print.GetMacroDetails().ID: - macroDetails = pretty_print.GetMacroDetails() - case remove_unused_tags.GetMacroDetails().ID: - macroDetails = remove_unused_tags.GetMacroDetails() - case seed_risk_tracking.GetMacroDetails().ID: - macroDetails = seed_risk_tracking.GetMacroDetails() - case seed_tags.GetMacroDetails().ID: - macroDetails = seed_tags.GetMacroDetails() + case addbuildpipeline.GetMacroDetails().ID: + macroDetails = addbuildpipeline.GetMacroDetails() + case addvault.GetMacroDetails().ID: + macroDetails = addvault.GetMacroDetails() + case prettyprint.GetMacroDetails().ID: + macroDetails = prettyprint.GetMacroDetails() + case removeunusedtags.GetMacroDetails().ID: + macroDetails = removeunusedtags.GetMacroDetails() + case seedrisktracking.GetMacroDetails().ID: + macroDetails = seedrisktracking.GetMacroDetails() + case seedtags.GetMacroDetails().ID: + macroDetails = seedtags.GetMacroDetails() default: log.Fatal("Unknown model macro: ", *executeModelMacro) } @@ -838,18 +858,18 @@ func doIt(inputFilename string, outputDirectory string) { var nextQuestion model.MacroQuestion for { switch macroDetails.ID { - case add_build_pipeline.GetMacroDetails().ID: - nextQuestion, err = add_build_pipeline.GetNextQuestion() - case add_vault.GetMacroDetails().ID: - nextQuestion, err = add_vault.GetNextQuestion() - case pretty_print.GetMacroDetails().ID: - nextQuestion, err = pretty_print.GetNextQuestion() - case remove_unused_tags.GetMacroDetails().ID: - nextQuestion, err = remove_unused_tags.GetNextQuestion() - case seed_risk_tracking.GetMacroDetails().ID: - nextQuestion, err = seed_risk_tracking.GetNextQuestion() - case seed_tags.GetMacroDetails().ID: - nextQuestion, err = seed_tags.GetNextQuestion() + case addbuildpipeline.GetMacroDetails().ID: + nextQuestion, err = addbuildpipeline.GetNextQuestion() + case addvault.GetMacroDetails().ID: + nextQuestion, err = addvault.GetNextQuestion() + case prettyprint.GetMacroDetails().ID: + nextQuestion, err = prettyprint.GetNextQuestion() + case removeunusedtags.GetMacroDetails().ID: + nextQuestion, err = removeunusedtags.GetNextQuestion() + case seedrisktracking.GetMacroDetails().ID: + nextQuestion, err = seedrisktracking.GetNextQuestion() + case seedtags.GetMacroDetails().ID: + nextQuestion, err = seedtags.GetNextQuestion() } checkErr(err) if nextQuestion.NoMoreQuestions() { @@ -865,7 +885,7 @@ func doIt(inputFilename string, outputDirectory string) { resultingMultiValueSelection := make([]string, 0) if nextQuestion.IsValueConstrained() { if nextQuestion.MultiSelect { - selectedValues := make(map[string]bool, 0) + selectedValues := make(map[string]bool) for { fmt.Println("Please select (multiple executions possible) from the following values (use number to select/deselect):") fmt.Println(" 0:", "SELECTION PROCESS FINISHED: CONTINUE TO NEXT QUESTION") @@ -939,18 +959,18 @@ func doIt(inputFilename string, outputDirectory string) { return } else if strings.ToLower(answer) == "back" { switch macroDetails.ID { - case add_build_pipeline.GetMacroDetails().ID: - message, validResult, err = add_build_pipeline.GoBack() - case add_vault.GetMacroDetails().ID: - message, validResult, err = add_vault.GoBack() - case pretty_print.GetMacroDetails().ID: - message, validResult, err = pretty_print.GoBack() - case remove_unused_tags.GetMacroDetails().ID: - message, validResult, err = remove_unused_tags.GoBack() - case seed_risk_tracking.GetMacroDetails().ID: - message, validResult, err = seed_risk_tracking.GoBack() - case seed_tags.GetMacroDetails().ID: - message, validResult, err = seed_tags.GoBack() + case addbuildpipeline.GetMacroDetails().ID: + message, validResult, err = addbuildpipeline.GoBack() + case addvault.GetMacroDetails().ID: + message, validResult, err = addvault.GoBack() + case prettyprint.GetMacroDetails().ID: + message, validResult, err = prettyprint.GoBack() + case removeunusedtags.GetMacroDetails().ID: + message, validResult, err = removeunusedtags.GoBack() + case seedrisktracking.GetMacroDetails().ID: + message, validResult, err = seedrisktracking.GoBack() + case seedtags.GetMacroDetails().ID: + message, validResult, err = seedtags.GoBack() } } else if len(answer) > 0 { // individual answer if nextQuestion.IsValueConstrained() { @@ -962,34 +982,34 @@ func doIt(inputFilename string, outputDirectory string) { } } switch macroDetails.ID { - case add_build_pipeline.GetMacroDetails().ID: - message, validResult, err = add_build_pipeline.ApplyAnswer(nextQuestion.ID, answer) - case add_vault.GetMacroDetails().ID: - message, validResult, err = add_vault.ApplyAnswer(nextQuestion.ID, answer) - case pretty_print.GetMacroDetails().ID: - message, validResult, err = pretty_print.ApplyAnswer(nextQuestion.ID, answer) - case remove_unused_tags.GetMacroDetails().ID: - message, validResult, err = remove_unused_tags.ApplyAnswer(nextQuestion.ID, answer) - case seed_risk_tracking.GetMacroDetails().ID: - message, validResult, err = seed_risk_tracking.ApplyAnswer(nextQuestion.ID, answer) - case seed_tags.GetMacroDetails().ID: - message, validResult, err = seed_tags.ApplyAnswer(nextQuestion.ID, answer) + case addbuildpipeline.GetMacroDetails().ID: + message, validResult, err = addbuildpipeline.ApplyAnswer(nextQuestion.ID, answer) + case addvault.GetMacroDetails().ID: + message, validResult, err = addvault.ApplyAnswer(nextQuestion.ID, answer) + case prettyprint.GetMacroDetails().ID: + message, validResult, err = prettyprint.ApplyAnswer(nextQuestion.ID, answer) + case removeunusedtags.GetMacroDetails().ID: + message, validResult, err = removeunusedtags.ApplyAnswer(nextQuestion.ID, answer) + case seedrisktracking.GetMacroDetails().ID: + message, validResult, err = seedrisktracking.ApplyAnswer(nextQuestion.ID, answer) + case seedtags.GetMacroDetails().ID: + message, validResult, err = seedtags.ApplyAnswer(nextQuestion.ID, answer) } } } else { switch macroDetails.ID { - case add_build_pipeline.GetMacroDetails().ID: - message, validResult, err = add_build_pipeline.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case add_vault.GetMacroDetails().ID: - message, validResult, err = add_vault.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case pretty_print.GetMacroDetails().ID: - message, validResult, err = pretty_print.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case remove_unused_tags.GetMacroDetails().ID: - message, validResult, err = remove_unused_tags.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case seed_risk_tracking.GetMacroDetails().ID: - message, validResult, err = seed_risk_tracking.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case seed_tags.GetMacroDetails().ID: - message, validResult, err = seed_tags.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case addbuildpipeline.GetMacroDetails().ID: + message, validResult, err = addbuildpipeline.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case addvault.GetMacroDetails().ID: + message, validResult, err = addvault.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case prettyprint.GetMacroDetails().ID: + message, validResult, err = prettyprint.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case removeunusedtags.GetMacroDetails().ID: + message, validResult, err = removeunusedtags.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case seedrisktracking.GetMacroDetails().ID: + message, validResult, err = seedrisktracking.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case seedtags.GetMacroDetails().ID: + message, validResult, err = seedtags.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) } } checkErr(err) @@ -1013,18 +1033,18 @@ func doIt(inputFilename string, outputDirectory string) { validResult := true var err error switch macroDetails.ID { - case add_build_pipeline.GetMacroDetails().ID: - changes, message, validResult, err = add_build_pipeline.GetFinalChangeImpact(&modelInput) - case add_vault.GetMacroDetails().ID: - changes, message, validResult, err = add_vault.GetFinalChangeImpact(&modelInput) - case pretty_print.GetMacroDetails().ID: - changes, message, validResult, err = pretty_print.GetFinalChangeImpact(&modelInput) - case remove_unused_tags.GetMacroDetails().ID: - changes, message, validResult, err = remove_unused_tags.GetFinalChangeImpact(&modelInput) - case seed_risk_tracking.GetMacroDetails().ID: - changes, message, validResult, err = seed_risk_tracking.GetFinalChangeImpact(&modelInput) - case seed_tags.GetMacroDetails().ID: - changes, message, validResult, err = seed_tags.GetFinalChangeImpact(&modelInput) + case addbuildpipeline.GetMacroDetails().ID: + changes, message, validResult, err = addbuildpipeline.GetFinalChangeImpact(&modelInput) + case addvault.GetMacroDetails().ID: + changes, message, validResult, err = addvault.GetFinalChangeImpact(&modelInput) + case prettyprint.GetMacroDetails().ID: + changes, message, validResult, err = prettyprint.GetFinalChangeImpact(&modelInput) + case removeunusedtags.GetMacroDetails().ID: + changes, message, validResult, err = removeunusedtags.GetFinalChangeImpact(&modelInput) + case seedrisktracking.GetMacroDetails().ID: + changes, message, validResult, err = seedrisktracking.GetFinalChangeImpact(&modelInput) + case seedtags.GetMacroDetails().ID: + changes, message, validResult, err = seedtags.GetFinalChangeImpact(&modelInput) } checkErr(err) for _, change := range changes { @@ -1049,18 +1069,18 @@ func doIt(inputFilename string, outputDirectory string) { validResult := true var err error switch macroDetails.ID { - case add_build_pipeline.GetMacroDetails().ID: - message, validResult, err = add_build_pipeline.Execute(&modelInput) - case add_vault.GetMacroDetails().ID: - message, validResult, err = add_vault.Execute(&modelInput) - case pretty_print.GetMacroDetails().ID: - message, validResult, err = pretty_print.Execute(&modelInput) - case remove_unused_tags.GetMacroDetails().ID: - message, validResult, err = remove_unused_tags.Execute(&modelInput) - case seed_risk_tracking.GetMacroDetails().ID: - message, validResult, err = seed_risk_tracking.Execute(&modelInput) - case seed_tags.GetMacroDetails().ID: - message, validResult, err = seed_tags.Execute(&modelInput) + case addbuildpipeline.GetMacroDetails().ID: + message, validResult, err = addbuildpipeline.Execute(&modelInput) + case addvault.GetMacroDetails().ID: + message, validResult, err = addvault.Execute(&modelInput) + case prettyprint.GetMacroDetails().ID: + message, validResult, err = prettyprint.Execute(&modelInput) + case removeunusedtags.GetMacroDetails().ID: + message, validResult, err = removeunusedtags.Execute(&modelInput) + case seedrisktracking.GetMacroDetails().ID: + message, validResult, err = seedrisktracking.Execute(&modelInput) + case seedtags.GetMacroDetails().ID: + message, validResult, err = seedtags.Execute(&modelInput) } checkErr(err) if !validResult { @@ -1080,7 +1100,7 @@ func doIt(inputFilename string, outputDirectory string) { yamlBytes = model.ReformatYAML(yamlBytes) */ fmt.Println("Writing model file:", inputFilename) - err = ioutil.WriteFile(inputFilename, yamlBytes, 0400) + err = os.WriteFile(inputFilename, yamlBytes, 0400) checkErr(err) fmt.Println("Model file successfully updated") return @@ -1089,7 +1109,6 @@ func doIt(inputFilename string, outputDirectory string) { return } } - fmt.Println() return } @@ -1100,24 +1119,24 @@ func doIt(inputFilename string, outputDirectory string) { // Data-flow Diagram rendering if renderDataFlowDiagram { - gvFile := outputDirectory + "/" + dataFlowDiagramFilenameDOT + gvFile := filepath.Join(outputDirectory, dataFlowDiagramFilenameDOT) if !keepDiagramSourceFiles { - tmpFileGV, err := ioutil.TempFile(model.TempFolder, dataFlowDiagramFilenameDOT) + tmpFileGV, err := os.CreateTemp(tempFolder, dataFlowDiagramFilenameDOT) checkErr(err) gvFile = tmpFileGV.Name() - defer os.Remove(gvFile) + defer func() { _ = os.Remove(gvFile) }() } dotFile := writeDataFlowDiagramGraphvizDOT(gvFile, *diagramDPI) renderDataFlowDiagramGraphvizImage(dotFile, outputDirectory) } // Data Asset Diagram rendering if renderDataAssetDiagram { - gvFile := outputDirectory + "/" + dataAssetDiagramFilenameDOT + gvFile := filepath.Join(outputDirectory, dataAssetDiagramFilenameDOT) if !keepDiagramSourceFiles { - tmpFile, err := ioutil.TempFile(model.TempFolder, dataAssetDiagramFilenameDOT) + tmpFile, err := os.CreateTemp(tempFolder, dataAssetDiagramFilenameDOT) checkErr(err) gvFile = tmpFile.Name() - defer os.Remove(gvFile) + defer func() { _ = os.Remove(gvFile) }() } dotFile := writeDataAssetDiagramGraphvizDOT(gvFile, *diagramDPI) renderDataAssetDiagramGraphvizImage(dotFile, outputDirectory) @@ -1128,7 +1147,7 @@ func doIt(inputFilename string, outputDirectory string) { if *verbose { fmt.Println("Writing risks json") } - report.WriteRisksJSON(outputDirectory + "/" + jsonRisksFilename) + report.WriteRisksJSON(filepath.Join(outputDirectory, jsonRisksFilename)) } // technical assets json @@ -1136,7 +1155,7 @@ func doIt(inputFilename string, outputDirectory string) { if *verbose { fmt.Println("Writing technical assets json") } - report.WriteTechnicalAssetsJSON(outputDirectory + "/" + jsonTechnicalAssetsFilename) + report.WriteTechnicalAssetsJSON(filepath.Join(outputDirectory, jsonTechnicalAssetsFilename)) } // risks as risks json @@ -1144,7 +1163,7 @@ func doIt(inputFilename string, outputDirectory string) { if *verbose { fmt.Println("Writing stats json") } - report.WriteStatsJSON(outputDirectory + "/" + jsonStatsFilename) + report.WriteStatsJSON(filepath.Join(outputDirectory, jsonStatsFilename)) } // risks Excel @@ -1152,7 +1171,7 @@ func doIt(inputFilename string, outputDirectory string) { if *verbose { fmt.Println("Writing risks excel") } - report.WriteRisksExcelToFile(outputDirectory + "/" + excelRisksFilename) + report.WriteRisksExcelToFile(filepath.Join(outputDirectory, excelRisksFilename)) } // tags Excel @@ -1160,14 +1179,14 @@ func doIt(inputFilename string, outputDirectory string) { if *verbose { fmt.Println("Writing tags excel") } - report.WriteTagsExcelToFile(outputDirectory + "/" + excelTagsFilename) + report.WriteTagsExcelToFile(filepath.Join(outputDirectory, excelTagsFilename)) } if renderPDF { // hash the YAML input file f, err := os.Open(inputFilename) checkErr(err) - defer f.Close() + defer func() { _ = f.Close() }() hasher := sha256.New() if _, err := io.Copy(hasher, f); err != nil { panic(err) @@ -1177,16 +1196,17 @@ func doIt(inputFilename string, outputDirectory string) { if *verbose { fmt.Println("Writing report pdf") } - report.WriteReportPDF(outputDirectory+"/"+reportFilename, + report.WriteReportPDF(filepath.Join(outputDirectory, reportFilename), *templateFilename, - outputDirectory+"/"+dataFlowDiagramFilenamePNG, - outputDirectory+"/"+dataAssetDiagramFilenamePNG, + filepath.Join(outputDirectory, dataFlowDiagramFilenamePNG), + filepath.Join(outputDirectory, dataAssetDiagramFilenamePNG), inputFilename, *skipRiskRules, buildTimestamp, modelHash, introTextRAA, - customRiskRules) + customRiskRules, + tempFolder) } } @@ -1205,7 +1225,7 @@ func applyRAA() string { if *verbose { fmt.Println("Applying RAA calculation:", *raaPlugin) } - // determine plugin to load + // determine plugin to load. // load plugin: open the ".so" file to load the symbols plug, err := plugin.Open(*raaPlugin) checkErr(err) @@ -1222,7 +1242,7 @@ func applyRAA() string { } func loadCustomRiskRules() { - customRiskRules = make(map[string]model.CustomRiskRule, 0) + customRiskRules = make(map[string]model.CustomRiskRule) if len(*riskRulesPlugins) > 0 { if *verbose { fmt.Println("Loading custom risk rules:", *riskRulesPlugins) @@ -1310,13 +1330,13 @@ func execute(context *gin.Context, dryRun bool) (yamlContent []byte, ok bool) { filenameUploaded := strings.TrimSpace(header.Filename) - tmpInputDir, err := ioutil.TempDir(model.TempFolder, "threagile-input-") + tmpInputDir, err := os.MkdirTemp(tempFolder, "threagile-input-") checkErr(err) - defer os.RemoveAll(tmpInputDir) + defer func() { _ = os.RemoveAll(tmpInputDir) }() - tmpModelFile, err := ioutil.TempFile(tmpInputDir, "threagile-model-*") + tmpModelFile, err := os.CreateTemp(tmpInputDir, "threagile-model-*") checkErr(err) - defer os.Remove(tmpModelFile.Name()) + defer func() { _ = os.Remove(tmpModelFile.Name()) }() _, err = io.Copy(tmpModelFile, fileUploaded) checkErr(err) @@ -1342,13 +1362,13 @@ func execute(context *gin.Context, dryRun bool) (yamlContent []byte, ok bool) { } } - tmpOutputDir, err := ioutil.TempDir(model.TempFolder, "threagile-output-") + tmpOutputDir, err := os.MkdirTemp(tempFolder, "threagile-output-") checkErr(err) - defer os.RemoveAll(tmpOutputDir) + defer func() { _ = os.RemoveAll(tmpOutputDir) }() - tmpResultFile, err := ioutil.TempFile(model.TempFolder, "threagile-result-*.zip") + tmpResultFile, err := os.CreateTemp(tempFolder, "threagile-result-*.zip") checkErr(err) - defer os.Remove(tmpResultFile.Name()) + defer func() { _ = os.Remove(tmpResultFile.Name()) }() if dryRun { doItViaRuntimeCall(yamlFile, tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, false, false, false, true, true, true, 40) @@ -1357,26 +1377,26 @@ func execute(context *gin.Context, dryRun bool) (yamlContent []byte, ok bool) { } checkErr(err) - yamlContent, err = ioutil.ReadFile(yamlFile) + yamlContent, err = os.ReadFile(yamlFile) checkErr(err) - err = ioutil.WriteFile(tmpOutputDir+"/threagile.yaml", yamlContent, 0400) + err = os.WriteFile(filepath.Join(tmpOutputDir, outputFile), yamlContent, 0400) checkErr(err) if !dryRun { files := []string{ - tmpOutputDir + "/threagile.yaml", - tmpOutputDir + "/" + dataFlowDiagramFilenamePNG, - tmpOutputDir + "/" + dataAssetDiagramFilenamePNG, - tmpOutputDir + "/" + reportFilename, - tmpOutputDir + "/" + excelRisksFilename, - tmpOutputDir + "/" + excelTagsFilename, - tmpOutputDir + "/" + jsonRisksFilename, - tmpOutputDir + "/" + jsonTechnicalAssetsFilename, - tmpOutputDir + "/" + jsonStatsFilename, + filepath.Join(tmpOutputDir, outputFile), + filepath.Join(tmpOutputDir, dataFlowDiagramFilenamePNG), + filepath.Join(tmpOutputDir, dataAssetDiagramFilenamePNG), + filepath.Join(tmpOutputDir, reportFilename), + filepath.Join(tmpOutputDir, excelRisksFilename), + filepath.Join(tmpOutputDir, excelTagsFilename), + filepath.Join(tmpOutputDir, jsonRisksFilename), + filepath.Join(tmpOutputDir, jsonTechnicalAssetsFilename), + filepath.Join(tmpOutputDir, jsonStatsFilename), } if keepDiagramSourceFiles { - files = append(files, tmpOutputDir+"/"+dataFlowDiagramFilenameDOT) - files = append(files, tmpOutputDir+"/"+dataAssetDiagramFilenameDOT) + files = append(files, filepath.Join(tmpOutputDir, dataFlowDiagramFilenameDOT)) + files = append(files, filepath.Join(tmpOutputDir, dataAssetDiagramFilenameDOT)) } err = zipFiles(tmpResultFile.Name(), files) checkErr(err) @@ -1442,14 +1462,14 @@ func doItViaRuntimeCall(modelFile string, outputDir string, executeModelMacro st func startServer() { router := gin.Default() - router.LoadHTMLGlob("server/static/*.html") + router.LoadHTMLGlob("server/static/*.html") // <== router.GET("/", func(c *gin.Context) { c.HTML(http.StatusOK, "index.html", gin.H{}) }) router.HEAD("/", func(c *gin.Context) { c.HTML(http.StatusOK, "index.html", gin.H{}) }) - router.StaticFile("/threagile.png", "server/static/threagile.png") + router.StaticFile("/threagile.png", "server/static/threagile.png") // <== router.StaticFile("/site.webmanifest", "server/static/site.webmanifest") router.StaticFile("/favicon.ico", "server/static/favicon.ico") router.StaticFile("/favicon-32x32.png", "server/static/favicon-32x32.png") @@ -1467,7 +1487,7 @@ func startServer() { router.StaticFile("/swagger-ui/swagger-ui.css", "server/static/swagger-ui/swagger-ui.css") router.StaticFile("/swagger-ui/swagger-ui.js", "server/static/swagger-ui/swagger-ui.js") router.StaticFile("/swagger-ui/swagger-ui-bundle.js", "server/static/swagger-ui/swagger-ui-bundle.js") - router.StaticFile("/swagger-ui/swagger-ui-standalone-preset.js", "server/static/swagger-ui/swagger-ui-standalone-preset.js") + router.StaticFile("/swagger-ui/swagger-ui-standalone-preset.js", "server/static/swagger-ui/swagger-ui-standalone-preset.js") // <== router.GET("/threagile-example-model.yaml", exampleFile) router.GET("/threagile-stub-model.yaml", stubFile) @@ -1570,153 +1590,153 @@ func startServer() { router.DELETE("/models/:model-id/shared-runtimes/:shared-runtime-id", deleteSharedRuntime) fmt.Println("Threagile server running...") - router.Run(":" + strconv.Itoa(*serverPort)) // listen and serve on 0.0.0.0:8080 or whatever port was specified + _ = router.Run(":" + strconv.Itoa(*serverPort)) // listen and serve on 0.0.0.0:8080 or whatever port was specified } func exampleFile(context *gin.Context) { - example, err := ioutil.ReadFile("/app/threagile-example-model.yaml") + example, err := os.ReadFile(filepath.Join(appFolder, "threagile-example-model.yaml")) checkErr(err) context.Data(http.StatusOK, gin.MIMEYAML, example) } func stubFile(context *gin.Context) { - stub, err := ioutil.ReadFile("/app/threagile-stub-model.yaml") + stub, err := os.ReadFile(filepath.Join(appFolder, "threagile-stub-model.yaml")) checkErr(err) context.Data(http.StatusOK, gin.MIMEYAML, addSupportedTags(stub)) // TODO use also the MIMEYAML way of serving YAML in model export? } func addSupportedTags(input []byte) []byte { // add distinct tags as "tags_available" - supportedTags := make(map[string]bool, 0) + supportedTags := make(map[string]bool) for _, customRule := range customRiskRules { for _, tag := range customRule.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } } - for _, tag := range accidental_secret_leak.SupportedTags() { + for _, tag := range accidentalsecretleak.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range code_backdooring.SupportedTags() { + for _, tag := range codebackdooring.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range container_baseimage_backdooring.SupportedTags() { + for _, tag := range containerbaseimagebackdooring.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range container_platform_escape.SupportedTags() { + for _, tag := range containerplatformescape.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range cross_site_request_forgery.SupportedTags() { + for _, tag := range crosssiterequestforgery.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range cross_site_scripting.SupportedTags() { + for _, tag := range crosssitescripting.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range dos_risky_access_across_trust_boundary.SupportedTags() { + for _, tag := range dosriskyaccessacrosstrustboundary.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range incomplete_model.SupportedTags() { + for _, tag := range incompletemodel.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range ldap_injection.SupportedTags() { + for _, tag := range ldapinjection.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_authentication.SupportedTags() { + for _, tag := range missingauthentication.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_authentication_second_factor.SupportedTags() { + for _, tag := range missingauthenticationsecondfactor.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_build_infrastructure.SupportedTags() { + for _, tag := range missingbuildinfrastructure.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_cloud_hardening.SupportedTags() { + for _, tag := range missingcloudhardening.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_file_validation.SupportedTags() { + for _, tag := range missingfilevalidation.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_hardening.SupportedTags() { + for _, tag := range missinghardening.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_identity_propagation.SupportedTags() { + for _, tag := range missingidentitypropagation.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_identity_provider_isolation.SupportedTags() { + for _, tag := range missingidentityproviderisolation.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_identity_store.SupportedTags() { + for _, tag := range missingidentitystore.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_network_segmentation.SupportedTags() { + for _, tag := range missingnetworksegmentation.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_vault.SupportedTags() { + for _, tag := range missingvault.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_vault_isolation.SupportedTags() { + for _, tag := range missingvaultisolation.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range missing_waf.SupportedTags() { + for _, tag := range missingwaf.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range mixed_targets_on_shared_runtime.SupportedTags() { + for _, tag := range mixedtargetsonsharedruntime.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range path_traversal.SupportedTags() { + for _, tag := range pathtraversal.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range push_instead_of_pull_deployment.SupportedTags() { + for _, tag := range pushinsteadofpulldeployment.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range search_query_injection.SupportedTags() { + for _, tag := range searchqueryinjection.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range server_side_request_forgery.SupportedTags() { + for _, tag := range serversiderequestforgery.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range service_registry_poisoning.SupportedTags() { + for _, tag := range serviceregistrypoisoning.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range sql_nosql_injection.SupportedTags() { + for _, tag := range sqlnosqlinjection.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unchecked_deployment.SupportedTags() { + for _, tag := range uncheckeddeployment.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unencrypted_asset.SupportedTags() { + for _, tag := range unencryptedasset.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unencrypted_communication.SupportedTags() { + for _, tag := range unencryptedcommunication.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unguarded_access_from_internet.SupportedTags() { + for _, tag := range unguardedaccessfrominternet.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unguarded_direct_datastore_access.SupportedTags() { + for _, tag := range unguardeddirectdatastoreaccess.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unnecessary_communication_link.SupportedTags() { + for _, tag := range unnecessarycommunicationlink.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unnecessary_data_asset.SupportedTags() { + for _, tag := range unnecessarydataasset.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unnecessary_data_transfer.SupportedTags() { + for _, tag := range unnecessarydatatransfer.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range unnecessary_technical_asset.SupportedTags() { + for _, tag := range unnecessarytechnicalasset.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range untrusted_deserialization.SupportedTags() { + for _, tag := range untrusteddeserialization.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range wrong_communication_link_content.SupportedTags() { + for _, tag := range wrongcommunicationlinkcontent.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range wrong_trust_boundary_content.SupportedTags() { + for _, tag := range wrongtrustboundarycontent.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } - for _, tag := range xml_external_entity.SupportedTags() { + for _, tag := range xmlexternalentity.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } tags := make([]string, 0, len(supportedTags)) @@ -1747,8 +1767,8 @@ func addSupportedTags(input []byte) []byte { const keySize = 32 type timeoutStruct struct { - xorRand []byte - createdNanotime, lastAcessedNanotime int64 + xorRand []byte + createdNanoTime, lastAccessedNanoTime int64 } var mapTokenHashToTimeoutStruct = make(map[string]timeoutStruct) @@ -1780,9 +1800,9 @@ func createToken(context *gin.Context) { tokenHash := hashSHA256(token) housekeepingTokenMaps() mapTokenHashToTimeoutStruct[tokenHash] = timeoutStruct{ - xorRand: xorBytesArr, - createdNanotime: now, - lastAcessedNanotime: now, + xorRand: xorBytesArr, + createdNanoTime: now, + lastAccessedNanoTime: now, } mapFolderNameToTokenHash[folderName] = tokenHash context.JSON(http.StatusCreated, gin.H{ @@ -1824,14 +1844,14 @@ func housekeepingTokenMaps() { if extremeShortTimeoutsForTesting { // remove all elements older than 1 minute (= 60000000000 ns) soft // and all elements older than 3 minutes (= 180000000000 ns) hard - if now-val.lastAcessedNanotime > 60000000000 || now-val.createdNanotime > 180000000000 { + if now-val.lastAccessedNanoTime > 60000000000 || now-val.createdNanoTime > 180000000000 { fmt.Println("About to remove a token hash from maps") deleteTokenHashFromMaps(tokenHash) } } else { // remove all elements older than 30 minutes (= 1800000000000 ns) soft // and all elements older than 10 hours (= 36000000000000 ns) hard - if now-val.lastAcessedNanotime > 1800000000000 || now-val.createdNanotime > 36000000000000 { + if now-val.lastAccessedNanoTime > 1800000000000 || now-val.createdNanoTime > 36000000000000 { deleteTokenHashFromMaps(tokenHash) } } @@ -1891,49 +1911,49 @@ func analyzeModelOnServerDirectly(context *gin.Context) { if !ok { return } - tmpModelFile, err := ioutil.TempFile(model.TempFolder, "threagile-direct-analyze-*") + tmpModelFile, err := os.CreateTemp(tempFolder, "threagile-direct-analyze-*") if err != nil { handleErrorInServiceCall(err, context) return } - defer os.Remove(tmpModelFile.Name()) - tmpOutputDir, err := ioutil.TempDir(model.TempFolder, "threagile-direct-analyze-") + defer func() { _ = os.Remove(tmpModelFile.Name()) }() + tmpOutputDir, err := os.MkdirTemp(tempFolder, "threagile-direct-analyze-") if err != nil { handleErrorInServiceCall(err, context) return } - defer os.RemoveAll(tmpOutputDir) - tmpResultFile, err := ioutil.TempFile(model.TempFolder, "threagile-result-*.zip") + defer func() { _ = os.RemoveAll(tmpOutputDir) }() + tmpResultFile, err := os.CreateTemp(tempFolder, "threagile-result-*.zip") checkErr(err) - defer os.Remove(tmpResultFile.Name()) + defer func() { _ = os.Remove(tmpResultFile.Name()) }() - err = ioutil.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) + err = os.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, true, true, true, true, true, true, true, true, dpi) if err != nil { handleErrorInServiceCall(err, context) return } - err = ioutil.WriteFile(tmpOutputDir+"/threagile.yaml", []byte(yamlText), 0400) + err = os.WriteFile(filepath.Join(tmpOutputDir, outputFile), []byte(yamlText), 0400) if err != nil { handleErrorInServiceCall(err, context) return } files := []string{ - tmpOutputDir + "/threagile.yaml", - tmpOutputDir + "/" + dataFlowDiagramFilenamePNG, - tmpOutputDir + "/" + dataAssetDiagramFilenamePNG, - tmpOutputDir + "/" + reportFilename, - tmpOutputDir + "/" + excelRisksFilename, - tmpOutputDir + "/" + excelTagsFilename, - tmpOutputDir + "/" + jsonRisksFilename, - tmpOutputDir + "/" + jsonTechnicalAssetsFilename, - tmpOutputDir + "/" + jsonStatsFilename, + filepath.Join(tmpOutputDir, outputFile), + filepath.Join(tmpOutputDir, dataFlowDiagramFilenamePNG), + filepath.Join(tmpOutputDir, dataAssetDiagramFilenamePNG), + filepath.Join(tmpOutputDir, reportFilename), + filepath.Join(tmpOutputDir, excelRisksFilename), + filepath.Join(tmpOutputDir, excelTagsFilename), + filepath.Join(tmpOutputDir, jsonRisksFilename), + filepath.Join(tmpOutputDir, jsonTechnicalAssetsFilename), + filepath.Join(tmpOutputDir, jsonStatsFilename), } if keepDiagramSourceFiles { - files = append(files, tmpOutputDir+"/"+dataFlowDiagramFilenameDOT) - files = append(files, tmpOutputDir+"/"+dataAssetDiagramFilenameDOT) + files = append(files, filepath.Join(tmpOutputDir, dataFlowDiagramFilenameDOT)) + files = append(files, filepath.Join(tmpOutputDir, dataAssetDiagramFilenameDOT)) } err = zipFiles(tmpResultFile.Name(), files) checkErr(err) @@ -2010,61 +2030,61 @@ func streamResponse(context *gin.Context, responseType responseType) { if !ok { return } - tmpModelFile, err := ioutil.TempFile(model.TempFolder, "threagile-render-*") + tmpModelFile, err := os.CreateTemp(tempFolder, "threagile-render-*") if err != nil { handleErrorInServiceCall(err, context) return } - defer os.Remove(tmpModelFile.Name()) - tmpOutputDir, err := ioutil.TempDir(model.TempFolder, "threagile-render-") + defer func() { _ = os.Remove(tmpModelFile.Name()) }() + tmpOutputDir, err := os.MkdirTemp(tempFolder, "threagile-render-") if err != nil { handleErrorInServiceCall(err, context) return } - defer os.RemoveAll(tmpOutputDir) - err = ioutil.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) + defer func() { _ = os.RemoveAll(tmpOutputDir) }() + err = os.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) if responseType == dataFlowDiagram { doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, true, false, false, false, false, false, false, false, dpi) if err != nil { handleErrorInServiceCall(err, context) return } - context.File(tmpOutputDir + "/" + dataFlowDiagramFilenamePNG) + context.File(filepath.Join(tmpOutputDir, dataFlowDiagramFilenamePNG)) } else if responseType == dataAssetDiagram { doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, false, true, false, false, false, false, false, false, dpi) if err != nil { handleErrorInServiceCall(err, context) return } - context.File(tmpOutputDir + "/" + dataAssetDiagramFilenamePNG) + context.File(filepath.Join(tmpOutputDir, dataAssetDiagramFilenamePNG)) } else if responseType == reportPDF { doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, true, false, false, false, false, false, dpi) if err != nil { handleErrorInServiceCall(err, context) return } - context.FileAttachment(tmpOutputDir+"/"+reportFilename, reportFilename) + context.FileAttachment(filepath.Join(tmpOutputDir, reportFilename), reportFilename) } else if responseType == risksExcel { doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, false, true, false, false, false, false, dpi) if err != nil { handleErrorInServiceCall(err, context) return } - context.FileAttachment(tmpOutputDir+"/"+excelRisksFilename, excelRisksFilename) + context.FileAttachment(filepath.Join(tmpOutputDir, excelRisksFilename), excelRisksFilename) } else if responseType == tagsExcel { doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, false, false, true, false, false, false, dpi) if err != nil { handleErrorInServiceCall(err, context) return } - context.FileAttachment(tmpOutputDir+"/"+excelTagsFilename, excelTagsFilename) + context.FileAttachment(filepath.Join(tmpOutputDir, excelTagsFilename), excelTagsFilename) } else if responseType == risksJSON { doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, false, false, false, true, false, false, dpi) if err != nil { handleErrorInServiceCall(err, context) return } - json, err := ioutil.ReadFile(tmpOutputDir + "/" + jsonRisksFilename) + json, err := os.ReadFile(filepath.Join(tmpOutputDir, jsonRisksFilename)) if err != nil { handleErrorInServiceCall(err, context) return @@ -2076,7 +2096,7 @@ func streamResponse(context *gin.Context, responseType responseType) { handleErrorInServiceCall(err, context) return } - json, err := ioutil.ReadFile(tmpOutputDir + "/" + jsonTechnicalAssetsFilename) + json, err := os.ReadFile(filepath.Join(tmpOutputDir, jsonTechnicalAssetsFilename)) if err != nil { handleErrorInServiceCall(err, context) return @@ -2088,7 +2108,7 @@ func streamResponse(context *gin.Context, responseType responseType) { handleErrorInServiceCall(err, context) return } - json, err := ioutil.ReadFile(tmpOutputDir + "/" + jsonStatsFilename) + json, err := os.ReadFile(filepath.Join(tmpOutputDir, jsonStatsFilename)) if err != nil { handleErrorInServiceCall(err, context) return @@ -2106,14 +2126,14 @@ func importModel(context *gin.Context) { lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - uuid := context.Param("model-id") // UUID is syntactically validated in readModel+checkModelFolder (next line) via uuid.Parse(modelUUID) - _, _, ok = readModel(context, uuid, key, folderNameOfKey) + aUuid := context.Param("model-id") // UUID is syntactically validated in readModel+checkModelFolder (next line) via uuid.Parse(modelUUID) + _, _, ok = readModel(context, aUuid, key, folderNameOfKey) if ok { // first analyze it simply by executing the full risk process (just discard the result) to ensure that everything would work yamlContent, ok := execute(context, true) if ok { // if we're here, then no problem was raised, so ok to proceed - ok = writeModelYAML(context, string(yamlContent), key, folderNameForModel(folderNameOfKey, uuid), "Model Import", false) + ok = writeModelYAML(context, string(yamlContent), key, folderNameForModel(folderNameOfKey, aUuid), "Model Import", false) if ok { context.JSON(http.StatusCreated, gin.H{ "message": "model imported", @@ -2125,7 +2145,7 @@ func importModel(context *gin.Context) { func stats(context *gin.Context) { keyCount, modelCount := 0, 0 - keyFolders, err := ioutil.ReadDir(baseFolder) + keyFolders, err := os.ReadDir(baseFolder) if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -2136,7 +2156,7 @@ func stats(context *gin.Context) { for _, keyFolder := range keyFolders { if len(keyFolder.Name()) == 128 { // it's a sha512 token hash probably, so count it as token folder for the stats keyCount++ - modelFolders, err := ioutil.ReadDir(baseFolder + "/" + keyFolder.Name()) + modelFolders, err := os.ReadDir(filepath.Join(baseFolder, keyFolder.Name())) if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -2170,7 +2190,7 @@ func getDataAsset(context *gin.Context) { modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, dataAsset := range modelInput.Data_assets { + for title, dataAsset := range modelInput.DataAssets { if dataAsset.ID == context.Param("data-asset-id") { context.JSON(http.StatusOK, gin.H{ title: dataAsset, @@ -2195,81 +2215,81 @@ func deleteDataAsset(context *gin.Context) { if ok { referencesDeleted := false // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, dataAsset := range modelInput.Data_assets { + for title, dataAsset := range modelInput.DataAssets { if dataAsset.ID == context.Param("data-asset-id") { // also remove all usages of this data asset !! - for _, techAsset := range modelInput.Technical_assets { - if techAsset.Data_assets_processed != nil { - for i, parsedChangeCandidateAsset := range techAsset.Data_assets_processed { + for _, techAsset := range modelInput.TechnicalAssets { + if techAsset.DataAssetsProcessed != nil { + for i, parsedChangeCandidateAsset := range techAsset.DataAssetsProcessed { referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) if referencedAsset == dataAsset.ID { // apply the removal referencesDeleted = true // Remove the element at index i // TODO needs more testing - copy(techAsset.Data_assets_processed[i:], techAsset.Data_assets_processed[i+1:]) // Shift a[i+1:] left one index. - techAsset.Data_assets_processed[len(techAsset.Data_assets_processed)-1] = "" // Erase last element (write zero value). - techAsset.Data_assets_processed = techAsset.Data_assets_processed[:len(techAsset.Data_assets_processed)-1] // Truncate slice. + copy(techAsset.DataAssetsProcessed[i:], techAsset.DataAssetsProcessed[i+1:]) // Shift a[i+1:] left one index. + techAsset.DataAssetsProcessed[len(techAsset.DataAssetsProcessed)-1] = "" // Erase last element (write zero value). + techAsset.DataAssetsProcessed = techAsset.DataAssetsProcessed[:len(techAsset.DataAssetsProcessed)-1] // Truncate slice. } } } - if techAsset.Data_assets_stored != nil { - for i, parsedChangeCandidateAsset := range techAsset.Data_assets_stored { + if techAsset.DataAssetsStored != nil { + for i, parsedChangeCandidateAsset := range techAsset.DataAssetsStored { referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) if referencedAsset == dataAsset.ID { // apply the removal referencesDeleted = true // Remove the element at index i // TODO needs more testing - copy(techAsset.Data_assets_stored[i:], techAsset.Data_assets_stored[i+1:]) // Shift a[i+1:] left one index. - techAsset.Data_assets_stored[len(techAsset.Data_assets_stored)-1] = "" // Erase last element (write zero value). - techAsset.Data_assets_stored = techAsset.Data_assets_stored[:len(techAsset.Data_assets_stored)-1] // Truncate slice. + copy(techAsset.DataAssetsStored[i:], techAsset.DataAssetsStored[i+1:]) // Shift a[i+1:] left one index. + techAsset.DataAssetsStored[len(techAsset.DataAssetsStored)-1] = "" // Erase last element (write zero value). + techAsset.DataAssetsStored = techAsset.DataAssetsStored[:len(techAsset.DataAssetsStored)-1] // Truncate slice. } } } - if techAsset.Communication_links != nil { - for title, commLink := range techAsset.Communication_links { - for i, dataAssetSent := range commLink.Data_assets_sent { + if techAsset.CommunicationLinks != nil { + for title, commLink := range techAsset.CommunicationLinks { + for i, dataAssetSent := range commLink.DataAssetsSent { referencedAsset := fmt.Sprintf("%v", dataAssetSent) if referencedAsset == dataAsset.ID { // apply the removal referencesDeleted = true // Remove the element at index i // TODO needs more testing - copy(techAsset.Communication_links[title].Data_assets_sent[i:], techAsset.Communication_links[title].Data_assets_sent[i+1:]) // Shift a[i+1:] left one index. - techAsset.Communication_links[title].Data_assets_sent[len(techAsset.Communication_links[title].Data_assets_sent)-1] = "" // Erase last element (write zero value). - x := techAsset.Communication_links[title] - x.Data_assets_sent = techAsset.Communication_links[title].Data_assets_sent[:len(techAsset.Communication_links[title].Data_assets_sent)-1] // Truncate slice. - techAsset.Communication_links[title] = x + copy(techAsset.CommunicationLinks[title].DataAssetsSent[i:], techAsset.CommunicationLinks[title].DataAssetsSent[i+1:]) // Shift a[i+1:] left one index. + techAsset.CommunicationLinks[title].DataAssetsSent[len(techAsset.CommunicationLinks[title].DataAssetsSent)-1] = "" // Erase last element (write zero value). + x := techAsset.CommunicationLinks[title] + x.DataAssetsSent = techAsset.CommunicationLinks[title].DataAssetsSent[:len(techAsset.CommunicationLinks[title].DataAssetsSent)-1] // Truncate slice. + techAsset.CommunicationLinks[title] = x } } - for i, dataAssetReceived := range commLink.Data_assets_received { + for i, dataAssetReceived := range commLink.DataAssetsReceived { referencedAsset := fmt.Sprintf("%v", dataAssetReceived) if referencedAsset == dataAsset.ID { // apply the removal referencesDeleted = true // Remove the element at index i // TODO needs more testing - copy(techAsset.Communication_links[title].Data_assets_received[i:], techAsset.Communication_links[title].Data_assets_received[i+1:]) // Shift a[i+1:] left one index. - techAsset.Communication_links[title].Data_assets_received[len(techAsset.Communication_links[title].Data_assets_received)-1] = "" // Erase last element (write zero value). - x := techAsset.Communication_links[title] - x.Data_assets_received = techAsset.Communication_links[title].Data_assets_received[:len(techAsset.Communication_links[title].Data_assets_received)-1] // Truncate slice. - techAsset.Communication_links[title] = x + copy(techAsset.CommunicationLinks[title].DataAssetsReceived[i:], techAsset.CommunicationLinks[title].DataAssetsReceived[i+1:]) // Shift a[i+1:] left one index. + techAsset.CommunicationLinks[title].DataAssetsReceived[len(techAsset.CommunicationLinks[title].DataAssetsReceived)-1] = "" // Erase last element (write zero value). + x := techAsset.CommunicationLinks[title] + x.DataAssetsReceived = techAsset.CommunicationLinks[title].DataAssetsReceived[:len(techAsset.CommunicationLinks[title].DataAssetsReceived)-1] // Truncate slice. + techAsset.CommunicationLinks[title] = x } } } } } - for indivRiskCatTitle, indivRiskCat := range modelInput.Individual_risk_categories { - if indivRiskCat.Risks_identified != nil { - for indivRiskInstanceTitle, indivRiskInstance := range indivRiskCat.Risks_identified { - if indivRiskInstance.Most_relevant_data_asset == dataAsset.ID { // apply the removal + for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { + if individualRiskCat.RisksIdentified != nil { + for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { + if individualRiskInstance.MostRelevantDataAsset == dataAsset.ID { // apply the removal referencesDeleted = true - x := modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] - x.Most_relevant_data_asset = "" // TODO needs more testing - modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] = x + x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] + x.MostRelevantDataAsset = "" // TODO needs more testing + modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x } } } } // remove it itself - delete(modelInput.Data_assets, title) + delete(modelInput.DataAssets, title) ok = writeModel(context, key, folderNameOfKey, &modelInput, "Data Asset Deletion") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2297,7 +2317,7 @@ func setSharedRuntime(context *gin.Context) { modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, sharedRuntime := range modelInput.Shared_runtimes { + for title, sharedRuntime := range modelInput.SharedRuntimes { if sharedRuntime.ID == context.Param("shared-runtime-id") { payload := payloadSharedRuntime{} err := context.BindJSON(&payload) @@ -2313,17 +2333,17 @@ func setSharedRuntime(context *gin.Context) { return } // in order to also update the title, remove the shared runtime from the map and re-insert it (with new key) - delete(modelInput.Shared_runtimes, title) - modelInput.Shared_runtimes[payload.Title] = sharedRuntimeInput + delete(modelInput.SharedRuntimes, title) + modelInput.SharedRuntimes[payload.Title] = sharedRuntimeInput idChanged := sharedRuntimeInput.ID != sharedRuntime.ID if idChanged { // ID-CHANGE-PROPAGATION - for indivRiskCatTitle, indivRiskCat := range modelInput.Individual_risk_categories { - if indivRiskCat.Risks_identified != nil { - for indivRiskInstanceTitle, indivRiskInstance := range indivRiskCat.Risks_identified { - if indivRiskInstance.Most_relevant_shared_runtime == sharedRuntime.ID { // apply the ID change - x := modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] - x.Most_relevant_shared_runtime = sharedRuntimeInput.ID // TODO needs more testing - modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] = x + for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { + if individualRiskCat.RisksIdentified != nil { + for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { + if individualRiskInstance.MostRelevantSharedRuntime == sharedRuntime.ID { // apply the ID change + x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] + x.MostRelevantSharedRuntime = sharedRuntimeInput.ID // TODO needs more testing + modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x } } } @@ -2356,7 +2376,7 @@ func setDataAsset(context *gin.Context) { modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, dataAsset := range modelInput.Data_assets { + for title, dataAsset := range modelInput.DataAssets { if dataAsset.ID == context.Param("data-asset-id") { payload := payloadDataAsset{} err := context.BindJSON(&payload) @@ -2372,52 +2392,52 @@ func setDataAsset(context *gin.Context) { return } // in order to also update the title, remove the asset from the map and re-insert it (with new key) - delete(modelInput.Data_assets, title) - modelInput.Data_assets[payload.Title] = dataAssetInput + delete(modelInput.DataAssets, title) + modelInput.DataAssets[payload.Title] = dataAssetInput idChanged := dataAssetInput.ID != dataAsset.ID if idChanged { // ID-CHANGE-PROPAGATION // also update all usages to point to the new (changed) ID !! - for techAssetTitle, techAsset := range modelInput.Technical_assets { - if techAsset.Data_assets_processed != nil { - for i, parsedChangeCandidateAsset := range techAsset.Data_assets_processed { + for techAssetTitle, techAsset := range modelInput.TechnicalAssets { + if techAsset.DataAssetsProcessed != nil { + for i, parsedChangeCandidateAsset := range techAsset.DataAssetsProcessed { referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.Technical_assets[techAssetTitle].Data_assets_processed[i] = dataAssetInput.ID + modelInput.TechnicalAssets[techAssetTitle].DataAssetsProcessed[i] = dataAssetInput.ID } } } - if techAsset.Data_assets_stored != nil { - for i, parsedChangeCandidateAsset := range techAsset.Data_assets_stored { + if techAsset.DataAssetsStored != nil { + for i, parsedChangeCandidateAsset := range techAsset.DataAssetsStored { referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.Technical_assets[techAssetTitle].Data_assets_stored[i] = dataAssetInput.ID + modelInput.TechnicalAssets[techAssetTitle].DataAssetsStored[i] = dataAssetInput.ID } } } - if techAsset.Communication_links != nil { - for title, commLink := range techAsset.Communication_links { - for i, dataAssetSent := range commLink.Data_assets_sent { + if techAsset.CommunicationLinks != nil { + for title, commLink := range techAsset.CommunicationLinks { + for i, dataAssetSent := range commLink.DataAssetsSent { referencedAsset := fmt.Sprintf("%v", dataAssetSent) if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.Technical_assets[techAssetTitle].Communication_links[title].Data_assets_sent[i] = dataAssetInput.ID + modelInput.TechnicalAssets[techAssetTitle].CommunicationLinks[title].DataAssetsSent[i] = dataAssetInput.ID } } - for i, dataAssetReceived := range commLink.Data_assets_received { + for i, dataAssetReceived := range commLink.DataAssetsReceived { referencedAsset := fmt.Sprintf("%v", dataAssetReceived) if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.Technical_assets[techAssetTitle].Communication_links[title].Data_assets_received[i] = dataAssetInput.ID + modelInput.TechnicalAssets[techAssetTitle].CommunicationLinks[title].DataAssetsReceived[i] = dataAssetInput.ID } } } } } - for indivRiskCatTitle, indivRiskCat := range modelInput.Individual_risk_categories { - if indivRiskCat.Risks_identified != nil { - for indivRiskInstanceTitle, indivRiskInstance := range indivRiskCat.Risks_identified { - if indivRiskInstance.Most_relevant_data_asset == dataAsset.ID { // apply the ID change - x := modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] - x.Most_relevant_data_asset = dataAssetInput.ID // TODO needs more testing - modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] = x + for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { + if individualRiskCat.RisksIdentified != nil { + for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { + if individualRiskInstance.MostRelevantDataAsset == dataAsset.ID { // apply the ID change + x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] + x.MostRelevantDataAsset = dataAssetInput.ID // TODO needs more testing + modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x } } } @@ -2450,7 +2470,7 @@ func getSharedRuntime(context *gin.Context) { modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, sharedRuntime := range modelInput.Shared_runtimes { + for title, sharedRuntime := range modelInput.SharedRuntimes { if sharedRuntime.ID == context.Param("shared-runtime-id") { context.JSON(http.StatusOK, gin.H{ title: sharedRuntime, @@ -2483,14 +2503,14 @@ func createNewSharedRuntime(context *gin.Context) { return } // yes, here keyed by title in YAML for better readability in the YAML file itself - if _, exists := modelInput.Shared_runtimes[payload.Title]; exists { + if _, exists := modelInput.SharedRuntimes[payload.Title]; exists { context.JSON(http.StatusConflict, gin.H{ "error": "shared runtime with this title already exists", }) return } - // but later it will in memory keyed by it's "id", so do this uniqueness check also - for _, runtime := range modelInput.Shared_runtimes { + // but later it will in memory keyed by its "id", so do this uniqueness check also + for _, runtime := range modelInput.SharedRuntimes { if runtime.ID == payload.Id { context.JSON(http.StatusConflict, gin.H{ "error": "shared runtime with this id already exists", @@ -2498,7 +2518,7 @@ func createNewSharedRuntime(context *gin.Context) { return } } - if !checkTechnicalAssetsExisting(modelInput, payload.Technical_assets_running) { + if !checkTechnicalAssetsExisting(modelInput, payload.TechnicalAssetsRunning) { context.JSON(http.StatusBadRequest, gin.H{ "error": "referenced technical asset does not exist", }) @@ -2508,10 +2528,10 @@ func createNewSharedRuntime(context *gin.Context) { if !ok { return } - if modelInput.Shared_runtimes == nil { - modelInput.Shared_runtimes = make(map[string]model.InputSharedRuntime) + if modelInput.SharedRuntimes == nil { + modelInput.SharedRuntimes = make(map[string]model.InputSharedRuntime) } - modelInput.Shared_runtimes[payload.Title] = sharedRuntimeInput + modelInput.SharedRuntimes[payload.Title] = sharedRuntimeInput ok = writeModel(context, key, folderNameOfKey, &modelInput, "Shared Runtime Creation") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2525,7 +2545,7 @@ func createNewSharedRuntime(context *gin.Context) { func checkTechnicalAssetsExisting(modelInput model.ModelInput, techAssetIDs []string) (ok bool) { for _, techAssetID := range techAssetIDs { exists := false - for _, val := range modelInput.Technical_assets { + for _, val := range modelInput.TechnicalAssets { if val.ID == techAssetID { exists = true break @@ -2538,12 +2558,12 @@ func checkTechnicalAssetsExisting(modelInput model.ModelInput, techAssetIDs []st return true } -func populateSharedRuntime(context *gin.Context, payload payloadSharedRuntime) (sharedRuntimeInput model.InputSharedRuntime, ok bool) { +func populateSharedRuntime(_ *gin.Context, payload payloadSharedRuntime) (sharedRuntimeInput model.InputSharedRuntime, ok bool) { sharedRuntimeInput = model.InputSharedRuntime{ - ID: payload.Id, - Description: payload.Description, - Tags: lowerCaseAndTrim(payload.Tags), - Technical_assets_running: payload.Technical_assets_running, + ID: payload.Id, + Description: payload.Description, + Tags: lowerCaseAndTrim(payload.Tags), + TechnicalAssetsRunning: payload.TechnicalAssetsRunning, } return sharedRuntimeInput, true } @@ -2559,23 +2579,23 @@ func deleteSharedRuntime(context *gin.Context) { if ok { referencesDeleted := false // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, sharedRuntime := range modelInput.Shared_runtimes { + for title, sharedRuntime := range modelInput.SharedRuntimes { if sharedRuntime.ID == context.Param("shared-runtime-id") { // also remove all usages of this shared runtime !! - for indivRiskCatTitle, indivRiskCat := range modelInput.Individual_risk_categories { - if indivRiskCat.Risks_identified != nil { - for indivRiskInstanceTitle, indivRiskInstance := range indivRiskCat.Risks_identified { - if indivRiskInstance.Most_relevant_shared_runtime == sharedRuntime.ID { // apply the removal + for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { + if individualRiskCat.RisksIdentified != nil { + for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { + if individualRiskInstance.MostRelevantSharedRuntime == sharedRuntime.ID { // apply the removal referencesDeleted = true - x := modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] - x.Most_relevant_shared_runtime = "" // TODO needs more testing - modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] = x + x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] + x.MostRelevantSharedRuntime = "" // TODO needs more testing + modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x } } } } // remove it itself - delete(modelInput.Shared_runtimes, title) + delete(modelInput.SharedRuntimes, title) ok = writeModel(context, key, folderNameOfKey, &modelInput, "Shared Runtime Deletion") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2612,14 +2632,14 @@ func createNewDataAsset(context *gin.Context) { return } // yes, here keyed by title in YAML for better readability in the YAML file itself - if _, exists := modelInput.Data_assets[payload.Title]; exists { + if _, exists := modelInput.DataAssets[payload.Title]; exists { context.JSON(http.StatusConflict, gin.H{ "error": "data asset with this title already exists", }) return } - // but later it will in memory keyed by it's "id", so do this uniqueness check also - for _, asset := range modelInput.Data_assets { + // but later it will in memory keyed by its "id", so do this uniqueness check also + for _, asset := range modelInput.DataAssets { if asset.ID == payload.Id { context.JSON(http.StatusConflict, gin.H{ "error": "data asset with this id already exists", @@ -2631,10 +2651,10 @@ func createNewDataAsset(context *gin.Context) { if !ok { return } - if modelInput.Data_assets == nil { - modelInput.Data_assets = make(map[string]model.InputDataAsset) + if modelInput.DataAssets == nil { + modelInput.DataAssets = make(map[string]model.InputDataAsset) } - modelInput.Data_assets[payload.Title] = dataAssetInput + modelInput.DataAssets[payload.Title] = dataAssetInput ok = writeModel(context, key, folderNameOfKey, &modelInput, "Data Asset Creation") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2672,17 +2692,17 @@ func populateDataAsset(context *gin.Context, payload payloadDataAsset) (dataAsse return dataAssetInput, false } dataAssetInput = model.InputDataAsset{ - ID: payload.Id, - Description: payload.Description, - Usage: usage.String(), - Tags: lowerCaseAndTrim(payload.Tags), - Origin: payload.Origin, - Owner: payload.Owner, - Quantity: quantity.String(), - Confidentiality: confidentiality.String(), - Integrity: integrity.String(), - Availability: availability.String(), - Justification_cia_rating: payload.Justification_cia_rating, + ID: payload.Id, + Description: payload.Description, + Usage: usage.String(), + Tags: lowerCaseAndTrim(payload.Tags), + Origin: payload.Origin, + Owner: payload.Owner, + Quantity: quantity.String(), + Confidentiality: confidentiality.String(), + Integrity: integrity.String(), + Availability: availability.String(), + JustificationCiaRating: payload.JustificationCiaRating, } return dataAssetInput, true } @@ -2694,9 +2714,9 @@ func getDataAssets(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, model.Data_assets) + context.JSON(http.StatusOK, aModel.DataAssets) } } @@ -2707,9 +2727,9 @@ func getTrustBoundaries(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, model.Trust_boundaries) + context.JSON(http.StatusOK, aModel.TrustBoundaries) } } @@ -2720,9 +2740,9 @@ func getSharedRuntimes(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, model.Shared_runtimes) + context.JSON(http.StatusOK, aModel.SharedRuntimes) } } @@ -2743,9 +2763,9 @@ func getModel(context *gin.Context) { defer unlockFolder(folderNameOfKey) _, yamlText, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { - tmpResultFile, err := ioutil.TempFile(model.TempFolder, "threagile-*.yaml") + tmpResultFile, err := os.CreateTemp(tempFolder, "threagile-*.yaml") checkErr(err) - err = ioutil.WriteFile(tmpResultFile.Name(), []byte(yamlText), 0400) + err = os.WriteFile(tmpResultFile.Name(), []byte(yamlText), 0400) if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -2753,16 +2773,16 @@ func getModel(context *gin.Context) { }) return } - defer os.Remove(tmpResultFile.Name()) - context.FileAttachment(tmpResultFile.Name(), "threagile.yaml") + defer func() { _ = os.Remove(tmpResultFile.Name()) }() + context.FileAttachment(tmpResultFile.Name(), outputFile) } } type payloadModels struct { - ID string `json:"id"` - Title string `json:"title"` - Timestamp_created time.Time `json:"timestamp_created"` - Timestamp_modified time.Time `json:"timestamp_modified"` + ID string `json:"id"` + Title string `json:"title"` + TimestampCreated time.Time `json:"timestamp_created"` + TimestampModified time.Time `json:"timestamp_modified"` } type payloadCover struct { @@ -2772,10 +2792,10 @@ type payloadCover struct { } type payloadOverview struct { - Management_summary_comment string `json:"management_summary_comment"` - Business_criticality string `json:"business_criticality"` - Business_overview model.Overview `json:"business_overview"` - Technical_overview model.Overview `json:"technical_overview"` + ManagementSummaryComment string `json:"management_summary_comment"` + BusinessCriticality string `json:"business_criticality"` + BusinessOverview model.Overview `json:"business_overview"` + TechnicalOverview model.Overview `json:"technical_overview"` } type payloadAbuseCases map[string]string @@ -2783,26 +2803,26 @@ type payloadAbuseCases map[string]string type payloadSecurityRequirements map[string]string type payloadDataAsset struct { - Title string `json:"title"` - Id string `json:"id"` - Description string `json:"description"` - Usage string `json:"usage"` - Tags []string `json:"tags"` - Origin string `json:"origin"` - Owner string `json:"owner"` - Quantity string `json:"quantity"` - Confidentiality string `json:"confidentiality"` - Integrity string `json:"integrity"` - Availability string `json:"availability"` - Justification_cia_rating string `json:"justification_cia_rating"` + Title string `json:"title"` + Id string `json:"id"` + Description string `json:"description"` + Usage string `json:"usage"` + Tags []string `json:"tags"` + Origin string `json:"origin"` + Owner string `json:"owner"` + Quantity string `json:"quantity"` + Confidentiality string `json:"confidentiality"` + Integrity string `json:"integrity"` + Availability string `json:"availability"` + JustificationCiaRating string `json:"justification_cia_rating"` } type payloadSharedRuntime struct { - Title string `json:"title"` - Id string `json:"id"` - Description string `json:"description"` - Tags []string `json:"tags"` - Technical_assets_running []string `json:"technical_assets_running"` + Title string `json:"title"` + Id string `json:"id"` + Description string `json:"description"` + Tags []string `json:"tags"` + TechnicalAssetsRunning []string `json:"technical_assets_running"` } func setSecurityRequirements(context *gin.Context) { @@ -2823,7 +2843,7 @@ func setSecurityRequirements(context *gin.Context) { }) return } - modelInput.Security_requirements = payload + modelInput.SecurityRequirements = payload ok = writeModel(context, key, folderNameOfKey, &modelInput, "Security Requirements Update") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2840,9 +2860,9 @@ func getSecurityRequirements(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, model.Security_requirements) + context.JSON(http.StatusOK, aModel.SecurityRequirements) } } @@ -2864,7 +2884,7 @@ func setAbuseCases(context *gin.Context) { }) return } - modelInput.Abuse_cases = payload + modelInput.AbuseCases = payload ok = writeModel(context, key, folderNameOfKey, &modelInput, "Abuse Cases Update") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2881,9 +2901,9 @@ func getAbuseCases(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, model.Abuse_cases) + context.JSON(http.StatusOK, aModel.AbuseCases) } } @@ -2905,17 +2925,17 @@ func setOverview(context *gin.Context) { }) return } - criticality, err := model.ParseCriticality(payload.Business_criticality) + criticality, err := model.ParseCriticality(payload.BusinessCriticality) if err != nil { handleErrorInServiceCall(err, context) return } - modelInput.Management_summary_comment = payload.Management_summary_comment - modelInput.Business_criticality = criticality.String() - modelInput.Business_overview.Description = payload.Business_overview.Description - modelInput.Business_overview.Images = payload.Business_overview.Images - modelInput.Technical_overview.Description = payload.Technical_overview.Description - modelInput.Technical_overview.Images = payload.Technical_overview.Images + modelInput.ManagementSummaryComment = payload.ManagementSummaryComment + modelInput.BusinessCriticality = criticality.String() + modelInput.BusinessOverview.Description = payload.BusinessOverview.Description + modelInput.BusinessOverview.Images = payload.BusinessOverview.Images + modelInput.TechnicalOverview.Description = payload.TechnicalOverview.Description + modelInput.TechnicalOverview.Images = payload.TechnicalOverview.Images ok = writeModel(context, key, folderNameOfKey, &modelInput, "Overview Update") if ok { context.JSON(http.StatusOK, gin.H{ @@ -2939,13 +2959,13 @@ func getOverview(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { context.JSON(http.StatusOK, gin.H{ - "management_summary_comment": model.Management_summary_comment, - "business_criticality": model.Business_criticality, - "business_overview": model.Business_overview, - "technical_overview": model.Technical_overview, + "management_summary_comment": aModel.ManagementSummaryComment, + "business_criticality": aModel.BusinessCriticality, + "business_overview": aModel.BusinessOverview, + "technical_overview": aModel.TechnicalOverview, }) } } @@ -2989,12 +3009,12 @@ func getCover(context *gin.Context) { } lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) if ok { context.JSON(http.StatusOK, gin.H{ - "title": model.Title, - "date": model.Date, - "author": model.Author, + "title": aModel.Title, + "date": aModel.Date, + "author": aModel.Author, }) } } @@ -3012,8 +3032,8 @@ func createNewModel(context *gin.Context) { lockFolder(folderNameOfKey) defer unlockFolder(folderNameOfKey) - uuid := uuid.New().String() - err := os.Mkdir(folderNameForModel(folderNameOfKey, uuid), 0700) + aUuid := uuid.New().String() + err := os.Mkdir(folderNameForModel(folderNameOfKey, aUuid), 0700) if err != nil { context.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to create model", @@ -3021,7 +3041,7 @@ func createNewModel(context *gin.Context) { return } - yaml := `title: New Threat Model + aYaml := `title: New Threat Model threagile_version: ` + model.ThreagileVersion + ` author: name: "" @@ -3052,11 +3072,11 @@ diagram_tweak_suppress_edge_labels: false diagram_tweak_invisible_connections_between_assets: [] diagram_tweak_same_rank_assets: []` - ok = writeModelYAML(context, yaml, key, folderNameForModel(folderNameOfKey, uuid), "New Model Creation", true) + ok = writeModelYAML(context, aYaml, key, folderNameForModel(folderNameOfKey, aUuid), "New Model Creation", true) if ok { context.JSON(http.StatusCreated, gin.H{ "message": "model created", - "id": uuid, + "id": aUuid, }) } } @@ -3070,7 +3090,7 @@ func listModels(context *gin.Context) { // TODO currently returns error when any defer unlockFolder(folderNameOfKey) result := make([]payloadModels, 0) - modelFolders, err := ioutil.ReadDir(folderNameOfKey) + modelFolders, err := os.ReadDir(folderNameOfKey) if err != nil { log.Println(err) context.JSON(http.StatusNotFound, gin.H{ @@ -3078,9 +3098,9 @@ func listModels(context *gin.Context) { // TODO currently returns error when any }) return } - for _, fileInfo := range modelFolders { - if fileInfo.IsDir() { - modelStat, err := os.Stat(folderNameOfKey + "/" + fileInfo.Name() + "/threagile.yaml") + for _, dirEntry := range modelFolders { + if dirEntry.IsDir() { + modelStat, err := os.Stat(filepath.Join(folderNameOfKey, dirEntry.Name(), outputFile)) if err != nil { log.Println(err) context.JSON(http.StatusNotFound, gin.H{ @@ -3088,15 +3108,23 @@ func listModels(context *gin.Context) { // TODO currently returns error when any }) return } - model, _, ok := readModel(context, fileInfo.Name(), key, folderNameOfKey) + aModel, _, ok := readModel(context, dirEntry.Name(), key, folderNameOfKey) if !ok { return } + fileInfo, err := dirEntry.Info() + if err != nil { + log.Println(err) + context.JSON(http.StatusNotFound, gin.H{ + "error": "unable to get file info", + }) + return + } result = append(result, payloadModels{ - ID: fileInfo.Name(), - Title: model.Title, - Timestamp_created: fileInfo.ModTime(), - Timestamp_modified: modelStat.ModTime(), + ID: dirEntry.Name(), + Title: aModel.Title, + TimestampCreated: fileInfo.ModTime(), + TimestampModified: modelStat.ModTime(), }) } } @@ -3156,7 +3184,7 @@ func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfK }) return modelInputResult, yamlText, false } - aesgcm, err := cipher.NewGCM(block) + aesGcm, err := cipher.NewGCM(block) if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -3165,7 +3193,7 @@ func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfK return modelInputResult, yamlText, false } - fileBytes, err := ioutil.ReadFile(modelFolder + "/threagile.yaml") + fileBytes, err := os.ReadFile(filepath.Join(modelFolder, outputFile)) if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -3176,7 +3204,7 @@ func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfK nonce := fileBytes[0:12] ciphertext := fileBytes[12:] - plaintext, err := aesgcm.Open(nil, nonce, ciphertext, nil) + plaintext, err := aesGcm.Open(nil, nonce, ciphertext, nil) if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -3194,7 +3222,7 @@ func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfK return modelInputResult, yamlText, false } buf := new(bytes.Buffer) - buf.ReadFrom(r) + _, _ = buf.ReadFrom(r) modelInput := model.ModelInput{} yamlBytes := buf.Bytes() err = yaml.Unmarshal(yamlBytes, &modelInput) @@ -3211,7 +3239,7 @@ func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfK func writeModel(context *gin.Context, key []byte, folderNameOfKey string, modelInput *model.ModelInput, changeReasonForHistory string) (ok bool) { modelFolder, ok := checkModelFolder(context, context.Param("model-id"), folderNameOfKey) if ok { - modelInput.Threagile_version = model.ThreagileVersion + modelInput.ThreagileVersion = model.ThreagileVersion yamlBytes, err := yaml.Marshal(modelInput) if err != nil { log.Println(err) @@ -3234,8 +3262,8 @@ func writeModelYAML(context *gin.Context, yaml string, key []byte, modelFolder s } var b bytes.Buffer w := gzip.NewWriter(&b) - w.Write([]byte(yaml)) - w.Close() + _, _ = w.Write([]byte(yaml)) + _ = w.Close() plaintext := b.Bytes() cryptoKey := generateKeyFromAlreadyStrongRandomInput(key) block, err := aes.NewCipher(cryptoKey) @@ -3255,7 +3283,7 @@ func writeModelYAML(context *gin.Context, yaml string, key []byte, modelFolder s }) return false } - aesgcm, err := cipher.NewGCM(block) + aesGcm, err := cipher.NewGCM(block) if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -3263,7 +3291,7 @@ func writeModelYAML(context *gin.Context, yaml string, key []byte, modelFolder s }) return false } - ciphertext := aesgcm.Seal(nil, nonce, plaintext, nil) + ciphertext := aesGcm.Seal(nil, nonce, plaintext, nil) if !skipBackup { err = backupModelToHistory(modelFolder, changeReasonForHistory) if err != nil { @@ -3274,7 +3302,7 @@ func writeModelYAML(context *gin.Context, yaml string, key []byte, modelFolder s return false } } - f, err := os.Create(modelFolder + "/threagile.yaml") + f, err := os.Create(filepath.Join(modelFolder, outputFile)) if err != nil { log.Println(err) context.JSON(http.StatusInternalServerError, gin.H{ @@ -3282,31 +3310,31 @@ func writeModelYAML(context *gin.Context, yaml string, key []byte, modelFolder s }) return false } - f.Write(nonce) - f.Write(ciphertext) - f.Close() + _, _ = f.Write(nonce) + _, _ = f.Write(ciphertext) + _ = f.Close() return true } func backupModelToHistory(modelFolder string, changeReasonForHistory string) (err error) { - historyFolder := modelFolder + "/history" + historyFolder := filepath.Join(modelFolder, "history") if _, err := os.Stat(historyFolder); os.IsNotExist(err) { err = os.Mkdir(historyFolder, 0700) if err != nil { return err } } - input, err := ioutil.ReadFile(modelFolder + "/threagile.yaml") + input, err := os.ReadFile(filepath.Join(modelFolder, outputFile)) if err != nil { return err } - historyFile := historyFolder + "/" + time.Now().Format("2006-01-02 15:04:05") + " " + changeReasonForHistory + ".backup" - err = ioutil.WriteFile(historyFile, input, 0400) + historyFile := filepath.Join(historyFolder, time.Now().Format("2006-01-02 15:04:05")+" "+changeReasonForHistory+".backup") + err = os.WriteFile(historyFile, input, 0400) if err != nil { return err } // now delete any old files if over limit to keep - files, err := ioutil.ReadDir(historyFolder) + files, err := os.ReadDir(historyFolder) if err != nil { return err } @@ -3317,7 +3345,7 @@ func backupModelToHistory(modelFolder string, changeReasonForHistory string) (er }) for _, file := range files { requiredToDelete-- - err = os.Remove(historyFolder + "/" + file.Name()) + err = os.Remove(filepath.Join(historyFolder, file.Name())) if err != nil { return err } @@ -3353,7 +3381,7 @@ func generateKeyFromAlreadyStrongRandomInput(alreadyRandomInput []byte) []byte { } func folderNameForModel(folderNameOfKey string, uuid string) string { - return folderNameOfKey + "/" + uuid + return filepath.Join(folderNameOfKey, uuid) } var throttlerLock sync.Mutex @@ -3366,7 +3394,7 @@ func checkObjectCreationThrottler(context *gin.Context, typeName string) bool { // remove all elements older than 3 minutes (= 180000000000 ns) now := time.Now().UnixNano() cutoff := now - 180000000000 - for keyCheck, _ := range createdObjectsThrottler { + for keyCheck := range createdObjectsThrottler { for i := 0; i < len(createdObjectsThrottler[keyCheck]); i++ { if createdObjectsThrottler[keyCheck][i] < cutoff { // Remove the element at index i from slice (safe while looping using i as iterator) @@ -3430,7 +3458,7 @@ type keyHeader struct { func folderNameFromKey(key []byte) string { sha512Hash := hashSHA256(key) - return baseFolder + "/" + sha512Hash + return filepath.Join(baseFolder, sha512Hash) } func hashSHA256(key []byte) string { @@ -3503,7 +3531,7 @@ func checkTokenToFolderName(context *gin.Context) (folderNameOfKey string, key [ }) return folderNameOfKey, key, false } - timeoutStruct.lastAcessedNanotime = time.Now().UnixNano() + timeoutStruct.lastAccessedNanoTime = time.Now().UnixNano() return folderNameOfKey, key, true } else { context.JSON(http.StatusNotFound, gin.H{ @@ -3564,7 +3592,7 @@ func deleteKey(context *gin.Context) { } func parseCommandlineArgs() { - modelFilename = flag.String("model", "threagile.yaml", "input model yaml file") + modelFilename = flag.String("model", outputFile, "input model yaml file") outputDir = flag.String("output", ".", "output directory") raaPlugin = flag.String("raa-plugin", "raa.so", "RAA calculation plugin (.so shared object) file name") executeModelMacro = flag.String("execute-model-macro", "", "Execute model macro (by ID)") @@ -3597,7 +3625,7 @@ func parseCommandlineArgs() { license := flag.Bool("print-license", false, "print license information") flag.Usage = func() { printLogo() - fmt.Fprintf(os.Stderr, "Usage: threagile [options]") + _, _ = fmt.Fprintf(os.Stderr, "Usage: threagile [options]") fmt.Println() fmt.Println() fmt.Println() @@ -3610,17 +3638,17 @@ func parseCommandlineArgs() { fmt.Println() fmt.Println("If you want to create an example model (via docker) as a starting point to learn about Threagile just run: ") fmt.Println(" docker run --rm -it " + - "-v \"$(pwd)\":/app/work " + + "-v \"$(pwd)\":" + filepath.Join(appFolder, "work") + " " + "threagile/threagile " + "-create-example-model " + - "-output /app/work") + "-output " + filepath.Join(appFolder, "work")) fmt.Println() fmt.Println("If you want to create a minimal stub model (via docker) as a starting point for your own model just run: ") fmt.Println(" docker run --rm -it " + - "-v \"$(pwd)\":/app/work " + + "-v \"$(pwd)\":" + filepath.Join(appFolder, "work") + " " + "threagile/threagile " + "-create-stub-model " + - "-output /app/work") + "-output " + filepath.Join(appFolder, "work")) fmt.Println() printExamples() fmt.Println() @@ -3697,12 +3725,12 @@ func parseCommandlineArgs() { fmt.Println("----------------------") fmt.Println("Built-in model macros:") fmt.Println("----------------------") - fmt.Println(add_build_pipeline.GetMacroDetails().ID, "-->", add_build_pipeline.GetMacroDetails().Title) - fmt.Println(add_vault.GetMacroDetails().ID, "-->", add_vault.GetMacroDetails().Title) - fmt.Println(pretty_print.GetMacroDetails().ID, "-->", pretty_print.GetMacroDetails().Title) - fmt.Println(remove_unused_tags.GetMacroDetails().ID, "-->", remove_unused_tags.GetMacroDetails().Title) - fmt.Println(seed_risk_tracking.GetMacroDetails().ID, "-->", seed_risk_tracking.GetMacroDetails().Title) - fmt.Println(seed_tags.GetMacroDetails().ID, "-->", seed_tags.GetMacroDetails().Title) + fmt.Println(addbuildpipeline.GetMacroDetails().ID, "-->", addbuildpipeline.GetMacroDetails().Title) + fmt.Println(addvault.GetMacroDetails().ID, "-->", addvault.GetMacroDetails().Title) + fmt.Println(prettyprint.GetMacroDetails().ID, "-->", prettyprint.GetMacroDetails().Title) + fmt.Println(removeunusedtags.GetMacroDetails().ID, "-->", removeunusedtags.GetMacroDetails().Title) + fmt.Println(seedrisktracking.GetMacroDetails().ID, "-->", seedrisktracking.GetMacroDetails().Title) + fmt.Println(seedtags.GetMacroDetails().ID, "-->", seedtags.GetMacroDetails().Title) fmt.Println() os.Exit(0) } @@ -3721,48 +3749,48 @@ func parseCommandlineArgs() { fmt.Println("--------------------") fmt.Println("Built-in risk rules:") fmt.Println("--------------------") - fmt.Println(accidental_secret_leak.Category().Id, "-->", accidental_secret_leak.Category().Title, "--> with tags:", accidental_secret_leak.SupportedTags()) - fmt.Println(code_backdooring.Category().Id, "-->", code_backdooring.Category().Title, "--> with tags:", code_backdooring.SupportedTags()) - fmt.Println(container_baseimage_backdooring.Category().Id, "-->", container_baseimage_backdooring.Category().Title, "--> with tags:", container_baseimage_backdooring.SupportedTags()) - fmt.Println(container_platform_escape.Category().Id, "-->", container_platform_escape.Category().Title, "--> with tags:", container_platform_escape.SupportedTags()) - fmt.Println(cross_site_request_forgery.Category().Id, "-->", cross_site_request_forgery.Category().Title, "--> with tags:", cross_site_request_forgery.SupportedTags()) - fmt.Println(cross_site_scripting.Category().Id, "-->", cross_site_scripting.Category().Title, "--> with tags:", cross_site_scripting.SupportedTags()) - fmt.Println(dos_risky_access_across_trust_boundary.Category().Id, "-->", dos_risky_access_across_trust_boundary.Category().Title, "--> with tags:", dos_risky_access_across_trust_boundary.SupportedTags()) - fmt.Println(incomplete_model.Category().Id, "-->", incomplete_model.Category().Title, "--> with tags:", incomplete_model.SupportedTags()) - fmt.Println(ldap_injection.Category().Id, "-->", ldap_injection.Category().Title, "--> with tags:", ldap_injection.SupportedTags()) - fmt.Println(missing_authentication.Category().Id, "-->", missing_authentication.Category().Title, "--> with tags:", missing_authentication.SupportedTags()) - fmt.Println(missing_authentication_second_factor.Category().Id, "-->", missing_authentication_second_factor.Category().Title, "--> with tags:", missing_authentication_second_factor.SupportedTags()) - fmt.Println(missing_build_infrastructure.Category().Id, "-->", missing_build_infrastructure.Category().Title, "--> with tags:", missing_build_infrastructure.SupportedTags()) - fmt.Println(missing_cloud_hardening.Category().Id, "-->", missing_cloud_hardening.Category().Title, "--> with tags:", missing_cloud_hardening.SupportedTags()) - fmt.Println(missing_file_validation.Category().Id, "-->", missing_file_validation.Category().Title, "--> with tags:", missing_file_validation.SupportedTags()) - fmt.Println(missing_hardening.Category().Id, "-->", missing_hardening.Category().Title, "--> with tags:", missing_hardening.SupportedTags()) - fmt.Println(missing_identity_propagation.Category().Id, "-->", missing_identity_propagation.Category().Title, "--> with tags:", missing_identity_propagation.SupportedTags()) - fmt.Println(missing_identity_provider_isolation.Category().Id, "-->", missing_identity_provider_isolation.Category().Title, "--> with tags:", missing_identity_provider_isolation.SupportedTags()) - fmt.Println(missing_identity_store.Category().Id, "-->", missing_identity_store.Category().Title, "--> with tags:", missing_identity_store.SupportedTags()) - fmt.Println(missing_network_segmentation.Category().Id, "-->", missing_network_segmentation.Category().Title, "--> with tags:", missing_network_segmentation.SupportedTags()) - fmt.Println(missing_vault.Category().Id, "-->", missing_vault.Category().Title, "--> with tags:", missing_vault.SupportedTags()) - fmt.Println(missing_vault_isolation.Category().Id, "-->", missing_vault_isolation.Category().Title, "--> with tags:", missing_vault_isolation.SupportedTags()) - fmt.Println(missing_waf.Category().Id, "-->", missing_waf.Category().Title, "--> with tags:", missing_waf.SupportedTags()) - fmt.Println(mixed_targets_on_shared_runtime.Category().Id, "-->", mixed_targets_on_shared_runtime.Category().Title, "--> with tags:", mixed_targets_on_shared_runtime.SupportedTags()) - fmt.Println(path_traversal.Category().Id, "-->", path_traversal.Category().Title, "--> with tags:", path_traversal.SupportedTags()) - fmt.Println(push_instead_of_pull_deployment.Category().Id, "-->", push_instead_of_pull_deployment.Category().Title, "--> with tags:", push_instead_of_pull_deployment.SupportedTags()) - fmt.Println(search_query_injection.Category().Id, "-->", search_query_injection.Category().Title, "--> with tags:", search_query_injection.SupportedTags()) - fmt.Println(server_side_request_forgery.Category().Id, "-->", server_side_request_forgery.Category().Title, "--> with tags:", server_side_request_forgery.SupportedTags()) - fmt.Println(service_registry_poisoning.Category().Id, "-->", service_registry_poisoning.Category().Title, "--> with tags:", service_registry_poisoning.SupportedTags()) - fmt.Println(sql_nosql_injection.Category().Id, "-->", sql_nosql_injection.Category().Title, "--> with tags:", sql_nosql_injection.SupportedTags()) - fmt.Println(unchecked_deployment.Category().Id, "-->", unchecked_deployment.Category().Title, "--> with tags:", unchecked_deployment.SupportedTags()) - fmt.Println(unencrypted_asset.Category().Id, "-->", unencrypted_asset.Category().Title, "--> with tags:", unencrypted_asset.SupportedTags()) - fmt.Println(unencrypted_communication.Category().Id, "-->", unencrypted_communication.Category().Title, "--> with tags:", unencrypted_communication.SupportedTags()) - fmt.Println(unguarded_access_from_internet.Category().Id, "-->", unguarded_access_from_internet.Category().Title, "--> with tags:", unguarded_access_from_internet.SupportedTags()) - fmt.Println(unguarded_direct_datastore_access.Category().Id, "-->", unguarded_direct_datastore_access.Category().Title, "--> with tags:", unguarded_direct_datastore_access.SupportedTags()) - fmt.Println(unnecessary_communication_link.Category().Id, "-->", unnecessary_communication_link.Category().Title, "--> with tags:", unnecessary_communication_link.SupportedTags()) - fmt.Println(unnecessary_data_asset.Category().Id, "-->", unnecessary_data_asset.Category().Title, "--> with tags:", unnecessary_data_asset.SupportedTags()) - fmt.Println(unnecessary_data_transfer.Category().Id, "-->", unnecessary_data_transfer.Category().Title, "--> with tags:", unnecessary_data_transfer.SupportedTags()) - fmt.Println(unnecessary_technical_asset.Category().Id, "-->", unnecessary_technical_asset.Category().Title, "--> with tags:", unnecessary_technical_asset.SupportedTags()) - fmt.Println(untrusted_deserialization.Category().Id, "-->", untrusted_deserialization.Category().Title, "--> with tags:", untrusted_deserialization.SupportedTags()) - fmt.Println(wrong_communication_link_content.Category().Id, "-->", wrong_communication_link_content.Category().Title, "--> with tags:", wrong_communication_link_content.SupportedTags()) - fmt.Println(wrong_trust_boundary_content.Category().Id, "-->", wrong_trust_boundary_content.Category().Title, "--> with tags:", wrong_trust_boundary_content.SupportedTags()) - fmt.Println(xml_external_entity.Category().Id, "-->", xml_external_entity.Category().Title, "--> with tags:", xml_external_entity.SupportedTags()) + fmt.Println(accidentalsecretleak.Category().Id, "-->", accidentalsecretleak.Category().Title, "--> with tags:", accidentalsecretleak.SupportedTags()) + fmt.Println(codebackdooring.Category().Id, "-->", codebackdooring.Category().Title, "--> with tags:", codebackdooring.SupportedTags()) + fmt.Println(containerbaseimagebackdooring.Category().Id, "-->", containerbaseimagebackdooring.Category().Title, "--> with tags:", containerbaseimagebackdooring.SupportedTags()) + fmt.Println(containerplatformescape.Category().Id, "-->", containerplatformescape.Category().Title, "--> with tags:", containerplatformescape.SupportedTags()) + fmt.Println(crosssiterequestforgery.Category().Id, "-->", crosssiterequestforgery.Category().Title, "--> with tags:", crosssiterequestforgery.SupportedTags()) + fmt.Println(crosssitescripting.Category().Id, "-->", crosssitescripting.Category().Title, "--> with tags:", crosssitescripting.SupportedTags()) + fmt.Println(dosriskyaccessacrosstrustboundary.Category().Id, "-->", dosriskyaccessacrosstrustboundary.Category().Title, "--> with tags:", dosriskyaccessacrosstrustboundary.SupportedTags()) + fmt.Println(incompletemodel.Category().Id, "-->", incompletemodel.Category().Title, "--> with tags:", incompletemodel.SupportedTags()) + fmt.Println(ldapinjection.Category().Id, "-->", ldapinjection.Category().Title, "--> with tags:", ldapinjection.SupportedTags()) + fmt.Println(missingauthentication.Category().Id, "-->", missingauthentication.Category().Title, "--> with tags:", missingauthentication.SupportedTags()) + fmt.Println(missingauthenticationsecondfactor.Category().Id, "-->", missingauthenticationsecondfactor.Category().Title, "--> with tags:", missingauthenticationsecondfactor.SupportedTags()) + fmt.Println(missingbuildinfrastructure.Category().Id, "-->", missingbuildinfrastructure.Category().Title, "--> with tags:", missingbuildinfrastructure.SupportedTags()) + fmt.Println(missingcloudhardening.Category().Id, "-->", missingcloudhardening.Category().Title, "--> with tags:", missingcloudhardening.SupportedTags()) + fmt.Println(missingfilevalidation.Category().Id, "-->", missingfilevalidation.Category().Title, "--> with tags:", missingfilevalidation.SupportedTags()) + fmt.Println(missinghardening.Category().Id, "-->", missinghardening.Category().Title, "--> with tags:", missinghardening.SupportedTags()) + fmt.Println(missingidentitypropagation.Category().Id, "-->", missingidentitypropagation.Category().Title, "--> with tags:", missingidentitypropagation.SupportedTags()) + fmt.Println(missingidentityproviderisolation.Category().Id, "-->", missingidentityproviderisolation.Category().Title, "--> with tags:", missingidentityproviderisolation.SupportedTags()) + fmt.Println(missingidentitystore.Category().Id, "-->", missingidentitystore.Category().Title, "--> with tags:", missingidentitystore.SupportedTags()) + fmt.Println(missingnetworksegmentation.Category().Id, "-->", missingnetworksegmentation.Category().Title, "--> with tags:", missingnetworksegmentation.SupportedTags()) + fmt.Println(missingvault.Category().Id, "-->", missingvault.Category().Title, "--> with tags:", missingvault.SupportedTags()) + fmt.Println(missingvaultisolation.Category().Id, "-->", missingvaultisolation.Category().Title, "--> with tags:", missingvaultisolation.SupportedTags()) + fmt.Println(missingwaf.Category().Id, "-->", missingwaf.Category().Title, "--> with tags:", missingwaf.SupportedTags()) + fmt.Println(mixedtargetsonsharedruntime.Category().Id, "-->", mixedtargetsonsharedruntime.Category().Title, "--> with tags:", mixedtargetsonsharedruntime.SupportedTags()) + fmt.Println(pathtraversal.Category().Id, "-->", pathtraversal.Category().Title, "--> with tags:", pathtraversal.SupportedTags()) + fmt.Println(pushinsteadofpulldeployment.Category().Id, "-->", pushinsteadofpulldeployment.Category().Title, "--> with tags:", pushinsteadofpulldeployment.SupportedTags()) + fmt.Println(searchqueryinjection.Category().Id, "-->", searchqueryinjection.Category().Title, "--> with tags:", searchqueryinjection.SupportedTags()) + fmt.Println(serversiderequestforgery.Category().Id, "-->", serversiderequestforgery.Category().Title, "--> with tags:", serversiderequestforgery.SupportedTags()) + fmt.Println(serviceregistrypoisoning.Category().Id, "-->", serviceregistrypoisoning.Category().Title, "--> with tags:", serviceregistrypoisoning.SupportedTags()) + fmt.Println(sqlnosqlinjection.Category().Id, "-->", sqlnosqlinjection.Category().Title, "--> with tags:", sqlnosqlinjection.SupportedTags()) + fmt.Println(uncheckeddeployment.Category().Id, "-->", uncheckeddeployment.Category().Title, "--> with tags:", uncheckeddeployment.SupportedTags()) + fmt.Println(unencryptedasset.Category().Id, "-->", unencryptedasset.Category().Title, "--> with tags:", unencryptedasset.SupportedTags()) + fmt.Println(unencryptedcommunication.Category().Id, "-->", unencryptedcommunication.Category().Title, "--> with tags:", unencryptedcommunication.SupportedTags()) + fmt.Println(unguardedaccessfrominternet.Category().Id, "-->", unguardedaccessfrominternet.Category().Title, "--> with tags:", unguardedaccessfrominternet.SupportedTags()) + fmt.Println(unguardeddirectdatastoreaccess.Category().Id, "-->", unguardeddirectdatastoreaccess.Category().Title, "--> with tags:", unguardeddirectdatastoreaccess.SupportedTags()) + fmt.Println(unnecessarycommunicationlink.Category().Id, "-->", unnecessarycommunicationlink.Category().Title, "--> with tags:", unnecessarycommunicationlink.SupportedTags()) + fmt.Println(unnecessarydataasset.Category().Id, "-->", unnecessarydataasset.Category().Title, "--> with tags:", unnecessarydataasset.SupportedTags()) + fmt.Println(unnecessarydatatransfer.Category().Id, "-->", unnecessarydatatransfer.Category().Title, "--> with tags:", unnecessarydatatransfer.SupportedTags()) + fmt.Println(unnecessarytechnicalasset.Category().Id, "-->", unnecessarytechnicalasset.Category().Title, "--> with tags:", unnecessarytechnicalasset.SupportedTags()) + fmt.Println(untrusteddeserialization.Category().Id, "-->", untrusteddeserialization.Category().Title, "--> with tags:", untrusteddeserialization.SupportedTags()) + fmt.Println(wrongcommunicationlinkcontent.Category().Id, "-->", wrongcommunicationlinkcontent.Category().Title, "--> with tags:", wrongcommunicationlinkcontent.SupportedTags()) + fmt.Println(wrongtrustboundarycontent.Category().Id, "-->", wrongtrustboundarycontent.Category().Title, "--> with tags:", wrongtrustboundarycontent.SupportedTags()) + fmt.Println(xmlexternalentity.Category().Id, "-->", xmlexternalentity.Category().Title, "--> with tags:", xmlexternalentity.SupportedTags()) fmt.Println() os.Exit(0) } @@ -3798,12 +3826,12 @@ func parseCommandlineArgs() { printLogo() fmt.Println("Explanation for the model macros:") fmt.Println() - fmt.Printf("%v: %v\n", add_build_pipeline.GetMacroDetails().ID, add_build_pipeline.GetMacroDetails().Description) - fmt.Printf("%v: %v\n", add_vault.GetMacroDetails().ID, add_vault.GetMacroDetails().Description) - fmt.Printf("%v: %v\n", pretty_print.GetMacroDetails().ID, pretty_print.GetMacroDetails().Description) - fmt.Printf("%v: %v\n", remove_unused_tags.GetMacroDetails().ID, remove_unused_tags.GetMacroDetails().Description) - fmt.Printf("%v: %v\n", seed_risk_tracking.GetMacroDetails().ID, seed_risk_tracking.GetMacroDetails().Description) - fmt.Printf("%v: %v\n", seed_tags.GetMacroDetails().ID, seed_tags.GetMacroDetails().Description) + fmt.Printf("%v: %v\n", addbuildpipeline.GetMacroDetails().ID, addbuildpipeline.GetMacroDetails().Description) + fmt.Printf("%v: %v\n", addvault.GetMacroDetails().ID, addvault.GetMacroDetails().Description) + fmt.Printf("%v: %v\n", prettyprint.GetMacroDetails().ID, prettyprint.GetMacroDetails().Description) + fmt.Printf("%v: %v\n", removeunusedtags.GetMacroDetails().ID, removeunusedtags.GetMacroDetails().Description) + fmt.Printf("%v: %v\n", seedrisktracking.GetMacroDetails().ID, seedrisktracking.GetMacroDetails().Description) + fmt.Printf("%v: %v\n", seedtags.GetMacroDetails().ID, seedtags.GetMacroDetails().Description) fmt.Println() os.Exit(0) @@ -3812,48 +3840,48 @@ func parseCommandlineArgs() { printLogo() fmt.Println("Explanation for risk rules:") fmt.Println() - fmt.Printf("%v: %v\n", accidental_secret_leak.Category().Id, accidental_secret_leak.Category().Description) - fmt.Printf("%v: %v\n", code_backdooring.Category().Id, code_backdooring.Category().Description) - fmt.Printf("%v: %v\n", container_baseimage_backdooring.Category().Id, container_baseimage_backdooring.Category().Description) - fmt.Printf("%v: %v\n", container_platform_escape.Category().Id, container_platform_escape.Category().Description) - fmt.Printf("%v: %v\n", cross_site_request_forgery.Category().Id, cross_site_request_forgery.Category().Description) - fmt.Printf("%v: %v\n", cross_site_scripting.Category().Id, cross_site_scripting.Category().Description) - fmt.Printf("%v: %v\n", dos_risky_access_across_trust_boundary.Category().Id, dos_risky_access_across_trust_boundary.Category().Description) - fmt.Printf("%v: %v\n", incomplete_model.Category().Id, incomplete_model.Category().Description) - fmt.Printf("%v: %v\n", ldap_injection.Category().Id, ldap_injection.Category().Description) - fmt.Printf("%v: %v\n", missing_authentication.Category().Id, missing_authentication.Category().Description) - fmt.Printf("%v: %v\n", missing_authentication_second_factor.Category().Id, missing_authentication_second_factor.Category().Description) - fmt.Printf("%v: %v\n", missing_build_infrastructure.Category().Id, missing_build_infrastructure.Category().Description) - fmt.Printf("%v: %v\n", missing_cloud_hardening.Category().Id, missing_cloud_hardening.Category().Description) - fmt.Printf("%v: %v\n", missing_file_validation.Category().Id, missing_file_validation.Category().Description) - fmt.Printf("%v: %v\n", missing_hardening.Category().Id, missing_hardening.Category().Description) - fmt.Printf("%v: %v\n", missing_identity_propagation.Category().Id, missing_identity_propagation.Category().Description) - fmt.Printf("%v: %v\n", missing_identity_provider_isolation.Category().Id, missing_identity_provider_isolation.Category().Description) - fmt.Printf("%v: %v\n", missing_identity_store.Category().Id, missing_identity_store.Category().Description) - fmt.Printf("%v: %v\n", missing_network_segmentation.Category().Id, missing_network_segmentation.Category().Description) - fmt.Printf("%v: %v\n", missing_vault.Category().Id, missing_vault.Category().Description) - fmt.Printf("%v: %v\n", missing_vault_isolation.Category().Id, missing_vault_isolation.Category().Description) - fmt.Printf("%v: %v\n", missing_waf.Category().Id, missing_waf.Category().Description) - fmt.Printf("%v: %v\n", mixed_targets_on_shared_runtime.Category().Id, mixed_targets_on_shared_runtime.Category().Description) - fmt.Printf("%v: %v\n", path_traversal.Category().Id, path_traversal.Category().Description) - fmt.Printf("%v: %v\n", push_instead_of_pull_deployment.Category().Id, push_instead_of_pull_deployment.Category().Description) - fmt.Printf("%v: %v\n", search_query_injection.Category().Id, search_query_injection.Category().Description) - fmt.Printf("%v: %v\n", server_side_request_forgery.Category().Id, server_side_request_forgery.Category().Description) - fmt.Printf("%v: %v\n", service_registry_poisoning.Category().Id, service_registry_poisoning.Category().Description) - fmt.Printf("%v: %v\n", sql_nosql_injection.Category().Id, sql_nosql_injection.Category().Description) - fmt.Printf("%v: %v\n", unchecked_deployment.Category().Id, unchecked_deployment.Category().Description) - fmt.Printf("%v: %v\n", unencrypted_asset.Category().Id, unencrypted_asset.Category().Description) - fmt.Printf("%v: %v\n", unencrypted_communication.Category().Id, unencrypted_communication.Category().Description) - fmt.Printf("%v: %v\n", unguarded_access_from_internet.Category().Id, unguarded_access_from_internet.Category().Description) - fmt.Printf("%v: %v\n", unguarded_direct_datastore_access.Category().Id, unguarded_direct_datastore_access.Category().Description) - fmt.Printf("%v: %v\n", unnecessary_communication_link.Category().Id, unnecessary_communication_link.Category().Description) - fmt.Printf("%v: %v\n", unnecessary_data_asset.Category().Id, unnecessary_data_asset.Category().Description) - fmt.Printf("%v: %v\n", unnecessary_data_transfer.Category().Id, unnecessary_data_transfer.Category().Description) - fmt.Printf("%v: %v\n", unnecessary_technical_asset.Category().Id, unnecessary_technical_asset.Category().Description) - fmt.Printf("%v: %v\n", untrusted_deserialization.Category().Id, untrusted_deserialization.Category().Description) - fmt.Printf("%v: %v\n", wrong_communication_link_content.Category().Id, wrong_communication_link_content.Category().Description) - fmt.Printf("%v: %v\n", wrong_trust_boundary_content.Category().Id, wrong_trust_boundary_content.Category().Description) - fmt.Printf("%v: %v\n", xml_external_entity.Category().Id, xml_external_entity.Category().Description) + fmt.Printf("%v: %v\n", accidentalsecretleak.Category().Id, accidentalsecretleak.Category().Description) + fmt.Printf("%v: %v\n", codebackdooring.Category().Id, codebackdooring.Category().Description) + fmt.Printf("%v: %v\n", containerbaseimagebackdooring.Category().Id, containerbaseimagebackdooring.Category().Description) + fmt.Printf("%v: %v\n", containerplatformescape.Category().Id, containerplatformescape.Category().Description) + fmt.Printf("%v: %v\n", crosssiterequestforgery.Category().Id, crosssiterequestforgery.Category().Description) + fmt.Printf("%v: %v\n", crosssitescripting.Category().Id, crosssitescripting.Category().Description) + fmt.Printf("%v: %v\n", dosriskyaccessacrosstrustboundary.Category().Id, dosriskyaccessacrosstrustboundary.Category().Description) + fmt.Printf("%v: %v\n", incompletemodel.Category().Id, incompletemodel.Category().Description) + fmt.Printf("%v: %v\n", ldapinjection.Category().Id, ldapinjection.Category().Description) + fmt.Printf("%v: %v\n", missingauthentication.Category().Id, missingauthentication.Category().Description) + fmt.Printf("%v: %v\n", missingauthenticationsecondfactor.Category().Id, missingauthenticationsecondfactor.Category().Description) + fmt.Printf("%v: %v\n", missingbuildinfrastructure.Category().Id, missingbuildinfrastructure.Category().Description) + fmt.Printf("%v: %v\n", missingcloudhardening.Category().Id, missingcloudhardening.Category().Description) + fmt.Printf("%v: %v\n", missingfilevalidation.Category().Id, missingfilevalidation.Category().Description) + fmt.Printf("%v: %v\n", missinghardening.Category().Id, missinghardening.Category().Description) + fmt.Printf("%v: %v\n", missingidentitypropagation.Category().Id, missingidentitypropagation.Category().Description) + fmt.Printf("%v: %v\n", missingidentityproviderisolation.Category().Id, missingidentityproviderisolation.Category().Description) + fmt.Printf("%v: %v\n", missingidentitystore.Category().Id, missingidentitystore.Category().Description) + fmt.Printf("%v: %v\n", missingnetworksegmentation.Category().Id, missingnetworksegmentation.Category().Description) + fmt.Printf("%v: %v\n", missingvault.Category().Id, missingvault.Category().Description) + fmt.Printf("%v: %v\n", missingvaultisolation.Category().Id, missingvaultisolation.Category().Description) + fmt.Printf("%v: %v\n", missingwaf.Category().Id, missingwaf.Category().Description) + fmt.Printf("%v: %v\n", mixedtargetsonsharedruntime.Category().Id, mixedtargetsonsharedruntime.Category().Description) + fmt.Printf("%v: %v\n", pathtraversal.Category().Id, pathtraversal.Category().Description) + fmt.Printf("%v: %v\n", pushinsteadofpulldeployment.Category().Id, pushinsteadofpulldeployment.Category().Description) + fmt.Printf("%v: %v\n", searchqueryinjection.Category().Id, searchqueryinjection.Category().Description) + fmt.Printf("%v: %v\n", serversiderequestforgery.Category().Id, serversiderequestforgery.Category().Description) + fmt.Printf("%v: %v\n", serviceregistrypoisoning.Category().Id, serviceregistrypoisoning.Category().Description) + fmt.Printf("%v: %v\n", sqlnosqlinjection.Category().Id, sqlnosqlinjection.Category().Description) + fmt.Printf("%v: %v\n", uncheckeddeployment.Category().Id, uncheckeddeployment.Category().Description) + fmt.Printf("%v: %v\n", unencryptedasset.Category().Id, unencryptedasset.Category().Description) + fmt.Printf("%v: %v\n", unencryptedcommunication.Category().Id, unencryptedcommunication.Category().Description) + fmt.Printf("%v: %v\n", unguardedaccessfrominternet.Category().Id, unguardedaccessfrominternet.Category().Description) + fmt.Printf("%v: %v\n", unguardeddirectdatastoreaccess.Category().Id, unguardeddirectdatastoreaccess.Category().Description) + fmt.Printf("%v: %v\n", unnecessarycommunicationlink.Category().Id, unnecessarycommunicationlink.Category().Description) + fmt.Printf("%v: %v\n", unnecessarydataasset.Category().Id, unnecessarydataasset.Category().Description) + fmt.Printf("%v: %v\n", unnecessarydatatransfer.Category().Id, unnecessarydatatransfer.Category().Description) + fmt.Printf("%v: %v\n", unnecessarytechnicalasset.Category().Id, unnecessarytechnicalasset.Category().Description) + fmt.Printf("%v: %v\n", untrusteddeserialization.Category().Id, untrusteddeserialization.Category().Description) + fmt.Printf("%v: %v\n", wrongcommunicationlinkcontent.Category().Id, wrongcommunicationlinkcontent.Category().Description) + fmt.Printf("%v: %v\n", wrongtrustboundarycontent.Category().Id, wrongtrustboundarycontent.Category().Description) + fmt.Printf("%v: %v\n", xmlexternalentity.Category().Id, xmlexternalentity.Category().Description) fmt.Println() os.Exit(0) } @@ -3875,7 +3903,7 @@ func parseCommandlineArgs() { } if *license { printLogo() - content, err := ioutil.ReadFile("/app/LICENSE.txt") + content, err := os.ReadFile(filepath.Join(appFolder, "LICENSE.txt")) checkErr(err) fmt.Print(string(content)) fmt.Println() @@ -3935,30 +3963,30 @@ func printVersion() { } func createExampleModelFile() { - copyFile("/app/threagile-example-model.yaml", *outputDir+"/threagile-example-model.yaml") + _, _ = copyFile(filepath.Join(appFolder, "threagile-example-model.yaml"), filepath.Join(*outputDir, "threagile-example-model.yaml")) } func createStubModelFile() { loadCustomRiskRules() - stub, err := ioutil.ReadFile("/app/threagile-stub-model.yaml") + stub, err := os.ReadFile(filepath.Join(appFolder, "threagile-stub-model.yaml")) checkErr(err) - err = ioutil.WriteFile(*outputDir+"/threagile-stub-model.yaml", addSupportedTags(stub), 0644) + err = os.WriteFile(filepath.Join(*outputDir, "threagile-stub-model.yaml"), addSupportedTags(stub), 0644) checkErr(err) } func createEditingSupportFiles() { - copyFile("/app/schema.json", *outputDir+"/schema.json") - copyFile("/app/live-templates.txt", *outputDir+"/live-templates.txt") + _, _ = copyFile(filepath.Join(appFolder, "schema.json"), filepath.Join(*outputDir, "schema.json")) + _, _ = copyFile(filepath.Join(appFolder, "live-templates.txt"), filepath.Join(*outputDir, "live-templates.txt")) } func printExamples() { fmt.Println("If you want to execute Threagile on a model yaml file (via docker): ") fmt.Println(" docker run --rm -it " + - "-v \"$(pwd)\":/app/work " + + "-v \"$(pwd)\":" + filepath.Join(appFolder, "work") + " " + "threagile/threagile " + "-verbose " + - "-model /app/work/threagile.yaml " + - "-output /app/work") + "-model " + filepath.Join(appFolder, "work", outputFile) + " " + + "-output " + filepath.Join(appFolder, "work")) fmt.Println() fmt.Println("If you want to run Threagile as a server (REST API) on some port (here 8080): ") fmt.Println(" docker run --rm -it " + @@ -3972,13 +4000,13 @@ func printExamples() { fmt.Println(" docker run --rm -it threagile/threagile -list-types") fmt.Println() fmt.Println("If you want to use some nice editing help (syntax validation, autocompletion, and live templates) in your favourite IDE: ") - fmt.Println(" docker run --rm -it -v \"$(pwd)\":/app/work threagile/threagile -create-editing-support -output /app/work") + fmt.Println(" docker run --rm -it -v \"$(pwd)\":" + filepath.Join(appFolder, "work") + " threagile/threagile -create-editing-support -output " + filepath.Join(appFolder, "work")) fmt.Println() fmt.Println("If you want to list all available model macros (which are macros capable of reading a model yaml file, asking you questions in a wizard-style and then update the model yaml file accordingly): ") fmt.Println(" docker run --rm -it threagile/threagile -list-model-macros") fmt.Println() fmt.Println("If you want to execute a certain model macro on the model yaml file (here the macro add-build-pipeline): ") - fmt.Println(" docker run --rm -it -v \"$(pwd)\":/app/work threagile/threagile -model /app/work/threagile.yaml -output /app/work -execute-model-macro add-build-pipeline") + fmt.Println(" docker run --rm -it -v \"$(pwd)\":" + filepath.Join(appFolder, "work") + " threagile/threagile -model " + filepath.Join(appFolder, "work", outputFile) + " -output " + filepath.Join(appFolder, "work") + " -execute-model-macro add-build-pipeline") } func printTypes(title string, value interface{}) { @@ -4007,13 +4035,13 @@ func copyFile(src, dst string) (int64, error) { if err != nil { return 0, err } - defer source.Close() + defer func() { _ = source.Close() }() destination, err := os.Create(dst) if err != nil { return 0, err } - defer destination.Close() + defer func() { _ = destination.Close() }() nBytes, err := io.Copy(destination, source) return nBytes, err } @@ -4022,7 +4050,7 @@ func parseModel(inputFilename string) { if *verbose { fmt.Println("Parsing model:", inputFilename) } - modelYaml, err := ioutil.ReadFile(inputFilename) + modelYaml, err := os.ReadFile(inputFilename) if err == nil { modelInput = model.ModelInput{} err = yaml.Unmarshal(modelYaml, &modelInput) @@ -4030,7 +4058,7 @@ func parseModel(inputFilename string) { //fmt.Println(modelInput) var businessCriticality model.Criticality - switch modelInput.Business_criticality { + switch modelInput.BusinessCriticality { case model.Archive.String(): businessCriticality = model.Archive case model.Operational.String(): @@ -4042,7 +4070,7 @@ func parseModel(inputFilename string) { case model.MissionCritical.String(): businessCriticality = model.MissionCritical default: - panic(errors.New("unknown 'business_criticality' value of application: " + modelInput.Business_criticality)) + panic(errors.New("unknown 'business_criticality' value of application: " + modelInput.BusinessCriticality)) } reportDate := time.Now() @@ -4057,21 +4085,21 @@ func parseModel(inputFilename string) { Author: modelInput.Author, Title: modelInput.Title, Date: reportDate, - ManagementSummaryComment: modelInput.Management_summary_comment, + ManagementSummaryComment: modelInput.ManagementSummaryComment, BusinessCriticality: businessCriticality, - BusinessOverview: removePathElementsFromImageFiles(modelInput.Business_overview), - TechnicalOverview: removePathElementsFromImageFiles(modelInput.Technical_overview), + BusinessOverview: removePathElementsFromImageFiles(modelInput.BusinessOverview), + TechnicalOverview: removePathElementsFromImageFiles(modelInput.TechnicalOverview), Questions: modelInput.Questions, - AbuseCases: modelInput.Abuse_cases, - SecurityRequirements: modelInput.Security_requirements, - TagsAvailable: lowerCaseAndTrim(modelInput.Tags_available), - DiagramTweakNodesep: modelInput.Diagram_tweak_nodesep, - DiagramTweakRanksep: modelInput.Diagram_tweak_ranksep, - DiagramTweakEdgeLayout: modelInput.Diagram_tweak_edge_layout, - DiagramTweakSuppressEdgeLabels: modelInput.Diagram_tweak_suppress_edge_labels, - DiagramTweakLayoutLeftToRight: modelInput.Diagram_tweak_layout_left_to_right, - DiagramTweakInvisibleConnectionsBetweenAssets: modelInput.Diagram_tweak_invisible_connections_between_assets, - DiagramTweakSameRankAssets: modelInput.Diagram_tweak_same_rank_assets, + AbuseCases: modelInput.AbuseCases, + SecurityRequirements: modelInput.SecurityRequirements, + TagsAvailable: lowerCaseAndTrim(modelInput.TagsAvailable), + DiagramTweakNodesep: modelInput.DiagramTweakNodesep, + DiagramTweakRanksep: modelInput.DiagramTweakRanksep, + DiagramTweakEdgeLayout: modelInput.DiagramTweakEdgeLayout, + DiagramTweakSuppressEdgeLabels: modelInput.DiagramTweakSuppressEdgeLabels, + DiagramTweakLayoutLeftToRight: modelInput.DiagramTweakLayoutLeftToRight, + DiagramTweakInvisibleConnectionsBetweenAssets: modelInput.DiagramTweakInvisibleConnectionsBetweenAssets, + DiagramTweakSameRankAssets: modelInput.DiagramTweakSameRankAssets, } if model.ParsedModelRoot.DiagramTweakNodesep == 0 { model.ParsedModelRoot.DiagramTweakNodesep = 2 @@ -4082,7 +4110,7 @@ func parseModel(inputFilename string) { // Data Assets =============================================================================== model.ParsedModelRoot.DataAssets = make(map[string]model.DataAsset) - for title, asset := range modelInput.Data_assets { + for title, asset := range modelInput.DataAssets { id := fmt.Sprintf("%v", asset.ID) var usage model.Usage @@ -4173,13 +4201,13 @@ func parseModel(inputFilename string) { Confidentiality: confidentiality, Integrity: integrity, Availability: availability, - JustificationCiaRating: fmt.Sprintf("%v", asset.Justification_cia_rating), + JustificationCiaRating: fmt.Sprintf("%v", asset.JustificationCiaRating), } } // Technical Assets =============================================================================== model.ParsedModelRoot.TechnicalAssets = make(map[string]model.TechnicalAsset) - for title, asset := range modelInput.Technical_assets { + for title, asset := range modelInput.TechnicalAssets { id := fmt.Sprintf("%v", asset.ID) var usage model.Usage @@ -4193,9 +4221,9 @@ func parseModel(inputFilename string) { } var dataAssetsProcessed = make([]string, 0) - if asset.Data_assets_processed != nil { - dataAssetsProcessed = make([]string, len(asset.Data_assets_processed)) - for i, parsedProcessedAsset := range asset.Data_assets_processed { + if asset.DataAssetsProcessed != nil { + dataAssetsProcessed = make([]string, len(asset.DataAssetsProcessed)) + for i, parsedProcessedAsset := range asset.DataAssetsProcessed { referencedAsset := fmt.Sprintf("%v", parsedProcessedAsset) checkDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") dataAssetsProcessed[i] = referencedAsset @@ -4203,9 +4231,9 @@ func parseModel(inputFilename string) { } var dataAssetsStored = make([]string, 0) - if asset.Data_assets_stored != nil { - dataAssetsStored = make([]string, len(asset.Data_assets_stored)) - for i, parsedStoredAssets := range asset.Data_assets_stored { + if asset.DataAssetsStored != nil { + dataAssetsStored = make([]string, len(asset.DataAssetsStored)) + for i, parsedStoredAssets := range asset.DataAssetsStored { referencedAsset := fmt.Sprintf("%v", parsedStoredAssets) checkDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") dataAssetsStored[i] = referencedAsset @@ -4368,8 +4396,8 @@ func parseModel(inputFilename string) { encryption = model.DataWithSymmetricSharedKey case model.DataWithAsymmetricSharedKey.String(): encryption = model.DataWithAsymmetricSharedKey - case model.DataWithEnduserIndividualKey.String(): - encryption = model.DataWithEnduserIndividualKey + case model.DataWithEndUserIndividualKey.String(): + encryption = model.DataWithEndUserIndividualKey default: panic(errors.New("unknown 'encryption' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Encryption))) } @@ -4437,8 +4465,8 @@ func parseModel(inputFilename string) { } dataFormatsAccepted := make([]model.DataFormat, 0) - if asset.Data_formats_accepted != nil { - for _, dataFormatName := range asset.Data_formats_accepted { + if asset.DataFormatsAccepted != nil { + for _, dataFormatName := range asset.DataFormatsAccepted { switch dataFormatName { case model.JSON.String(): dataFormatsAccepted = append(dataFormatsAccepted, model.JSON) @@ -4457,8 +4485,8 @@ func parseModel(inputFilename string) { } communicationLinks := make([]model.CommunicationLink, 0) - if asset.Communication_links != nil { - for commLinkTitle, commLink := range asset.Communication_links { + if asset.CommunicationLinks != nil { + for commLinkTitle, commLink := range asset.CommunicationLinks { constraint := true weight := 1 var protocol model.Protocol @@ -4492,8 +4520,8 @@ func parseModel(inputFilename string) { authorization = model.NoneAuthorization case model.TechnicalUser.String(): authorization = model.TechnicalUser - case model.EnduserIdentityPropagation.String(): - authorization = model.EnduserIdentityPropagation + case model.EndUserIdentityPropagation.String(): + authorization = model.EndUserIdentityPropagation default: panic(errors.New("unknown 'authorization' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Authorization))) } @@ -4522,44 +4550,44 @@ func parseModel(inputFilename string) { protocol = model.MQTT case model.JDBC.String(): protocol = model.JDBC - case model.JDBC_encrypted.String(): - protocol = model.JDBC_encrypted + case model.JdbcEncrypted.String(): + protocol = model.JdbcEncrypted case model.ODBC.String(): protocol = model.ODBC - case model.ODBC_encrypted.String(): - protocol = model.ODBC_encrypted - case model.SQL_access_protocol.String(): - protocol = model.SQL_access_protocol - case model.SQL_access_protocol_encrypted.String(): - protocol = model.SQL_access_protocol_encrypted - case model.NoSQL_access_protocol.String(): - protocol = model.NoSQL_access_protocol - case model.NoSQL_access_protocol_encrypted.String(): - protocol = model.NoSQL_access_protocol_encrypted + case model.OdbcEncrypted.String(): + protocol = model.OdbcEncrypted + case model.SqlAccessProtocol.String(): + protocol = model.SqlAccessProtocol + case model.SqlAccessProtocolEncrypted.String(): + protocol = model.SqlAccessProtocolEncrypted + case model.NosqlAccessProtocol.String(): + protocol = model.NosqlAccessProtocol + case model.NosqlAccessProtocolEncrypted.String(): + protocol = model.NosqlAccessProtocolEncrypted case model.TEXT.String(): protocol = model.TEXT - case model.TEXT_encrypted.String(): - protocol = model.TEXT_encrypted + case model.TextEncrypted.String(): + protocol = model.TextEncrypted case model.BINARY.String(): protocol = model.BINARY - case model.BINARY_encrypted.String(): - protocol = model.BINARY_encrypted + case model.BinaryEncrypted.String(): + protocol = model.BinaryEncrypted case model.SSH.String(): protocol = model.SSH - case model.SSH_tunnel.String(): - protocol = model.SSH_tunnel + case model.SshTunnel.String(): + protocol = model.SshTunnel case model.SMTP.String(): protocol = model.SMTP - case model.SMTP_encrypted.String(): - protocol = model.SMTP_encrypted + case model.SmtpEncrypted.String(): + protocol = model.SmtpEncrypted case model.POP3.String(): protocol = model.POP3 - case model.POP3_encrypted.String(): - protocol = model.POP3_encrypted + case model.Pop3Encrypted.String(): + protocol = model.Pop3Encrypted case model.IMAP.String(): protocol = model.IMAP - case model.IMAP_encrypted.String(): - protocol = model.IMAP_encrypted + case model.ImapEncrypted.String(): + protocol = model.ImapEncrypted case model.FTP.String(): protocol = model.FTP case model.FTPS.String(): @@ -4578,8 +4606,8 @@ func parseModel(inputFilename string) { protocol = model.NFS case model.SMB.String(): protocol = model.SMB - case model.SMB_encrypted.String(): - protocol = model.SMB_encrypted + case model.SmbEncrypted.String(): + protocol = model.SmbEncrypted case model.LocalFileAccess.String(): protocol = model.LocalFileAccess case model.NRPE.String(): @@ -4588,12 +4616,12 @@ func parseModel(inputFilename string) { protocol = model.XMPP case model.IIOP.String(): protocol = model.IIOP - case model.IIOP_encrypted.String(): - protocol = model.IIOP_encrypted + case model.IiopEncrypted.String(): + protocol = model.IiopEncrypted case model.JRMP.String(): protocol = model.JRMP - case model.JRMP_encrypted.String(): - protocol = model.JRMP_encrypted + case model.JrmpEncrypted.String(): + protocol = model.JrmpEncrypted case model.InProcessLibraryCall.String(): protocol = model.InProcessLibraryCall case model.ContainerSpawning.String(): @@ -4602,27 +4630,27 @@ func parseModel(inputFilename string) { panic(errors.New("unknown 'protocol' of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Protocol))) } - if commLink.Data_assets_sent != nil { - for _, dataAssetSent := range commLink.Data_assets_sent { + if commLink.DataAssetsSent != nil { + for _, dataAssetSent := range commLink.DataAssetsSent { referencedAsset := fmt.Sprintf("%v", dataAssetSent) checkDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") dataAssetsSent = append(dataAssetsSent, referencedAsset) } } - if commLink.Data_assets_received != nil { - for _, dataAssetReceived := range commLink.Data_assets_received { + if commLink.DataAssetsReceived != nil { + for _, dataAssetReceived := range commLink.DataAssetsReceived { referencedAsset := fmt.Sprintf("%v", dataAssetReceived) checkDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") dataAssetsReceived = append(dataAssetsReceived, referencedAsset) } } - if commLink.Diagram_tweak_weight > 0 { - weight = commLink.Diagram_tweak_weight + if commLink.DiagramTweakWeight > 0 { + weight = commLink.DiagramTweakWeight } - constraint = !commLink.Diagram_tweak_constraint + constraint = !commLink.DiagramTweakConstraint checkErr(err) @@ -4639,7 +4667,7 @@ func parseModel(inputFilename string) { Usage: usage, Tags: checkTags(lowerCaseAndTrim(commLink.Tags), "communication link '"+commLinkTitle+"' of technical asset '"+title+"'"), VPN: commLink.VPN, - IpFiltered: commLink.IP_filtered, + IpFiltered: commLink.IpFiltered, Readonly: commLink.Readonly, DataAssetsSent: dataAssetsSent, DataAssetsReceived: dataAssetsReceived, @@ -4649,7 +4677,7 @@ func parseModel(inputFilename string) { communicationLinks = append(communicationLinks, commLink) // track all comm links model.CommunicationLinks[commLink.Id] = commLink - // keep track of map of *all* comm links mapped by target-id (to be able to lookup "who is calling me" kind of things) + // keep track of map of *all* comm links mapped by target-id (to be able to look up "who is calling me" kind of things) model.IncomingTechnicalCommunicationLinksMappedByTargetId[commLink.TargetId] = append( model.IncomingTechnicalCommunicationLinksMappedByTargetId[commLink.TargetId], commLink) } @@ -4671,34 +4699,34 @@ func parseModel(inputFilename string) { Machine: technicalAssetMachine, Internet: asset.Internet, Encryption: encryption, - MultiTenant: asset.Multi_tenant, + MultiTenant: asset.MultiTenant, Redundant: asset.Redundant, - CustomDevelopedParts: asset.Custom_developed_parts, - UsedAsClientByHuman: asset.Used_as_client_by_human, - OutOfScope: asset.Out_of_scope, - JustificationOutOfScope: fmt.Sprintf("%v", asset.Justification_out_of_scope), + CustomDevelopedParts: asset.CustomDevelopedParts, + UsedAsClientByHuman: asset.UsedAsClientByHuman, + OutOfScope: asset.OutOfScope, + JustificationOutOfScope: fmt.Sprintf("%v", asset.JustificationOutOfScope), Owner: fmt.Sprintf("%v", asset.Owner), Confidentiality: confidentiality, Integrity: integrity, Availability: availability, - JustificationCiaRating: fmt.Sprintf("%v", asset.Justification_cia_rating), + JustificationCiaRating: fmt.Sprintf("%v", asset.JustificationCiaRating), DataAssetsProcessed: dataAssetsProcessed, DataAssetsStored: dataAssetsStored, DataFormatsAccepted: dataFormatsAccepted, CommunicationLinks: communicationLinks, - DiagramTweakOrder: asset.Diagram_tweak_order, + DiagramTweakOrder: asset.DiagramTweakOrder, } } // Trust Boundaries =============================================================================== checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries := make(map[string]bool) model.ParsedModelRoot.TrustBoundaries = make(map[string]model.TrustBoundary) - for title, boundary := range modelInput.Trust_boundaries { + for title, boundary := range modelInput.TrustBoundaries { id := fmt.Sprintf("%v", boundary.ID) var technicalAssetsInside = make([]string, 0) - if boundary.Technical_assets_inside != nil { - parsedInsideAssets := boundary.Technical_assets_inside + if boundary.TechnicalAssetsInside != nil { + parsedInsideAssets := boundary.TechnicalAssetsInside technicalAssetsInside = make([]string, len(parsedInsideAssets)) for i, parsedInsideAsset := range parsedInsideAssets { technicalAssetsInside[i] = fmt.Sprintf("%v", parsedInsideAsset) @@ -4715,8 +4743,8 @@ func parseModel(inputFilename string) { } var trustBoundariesNested = make([]string, 0) - if boundary.Trust_boundaries_nested != nil { - parsedNestedBoundaries := boundary.Trust_boundaries_nested + if boundary.TrustBoundariesNested != nil { + parsedNestedBoundaries := boundary.TrustBoundariesNested trustBoundariesNested = make([]string, len(parsedNestedBoundaries)) for i, parsedNestedBoundary := range parsedNestedBoundaries { trustBoundariesNested[i] = fmt.Sprintf("%v", parsedNestedBoundary) @@ -4766,12 +4794,12 @@ func parseModel(inputFilename string) { // Shared Runtime =============================================================================== model.ParsedModelRoot.SharedRuntimes = make(map[string]model.SharedRuntime) - for title, runtime := range modelInput.Shared_runtimes { + for title, runtime := range modelInput.SharedRuntimes { id := fmt.Sprintf("%v", runtime.ID) var technicalAssetsRunning = make([]string, 0) - if runtime.Technical_assets_running != nil { - parsedRunningAssets := runtime.Technical_assets_running + if runtime.TechnicalAssetsRunning != nil { + parsedRunningAssets := runtime.TechnicalAssetsRunning technicalAssetsRunning = make([]string, len(parsedRunningAssets)) for i, parsedRunningAsset := range parsedRunningAssets { assetId := fmt.Sprintf("%v", parsedRunningAsset) @@ -4784,7 +4812,7 @@ func parseModel(inputFilename string) { Id: id, Title: title, //fmt.Sprintf("%v", boundary["title"]), Description: withDefault(fmt.Sprintf("%v", runtime.Description), title), - Tags: checkTags((runtime.Tags), "shared runtime '"+title+"'"), + Tags: checkTags(runtime.Tags, "shared runtime '"+title+"'"), TechnicalAssetsRunning: technicalAssetsRunning, } checkIdSyntax(id) @@ -4799,11 +4827,11 @@ func parseModel(inputFilename string) { // Individual Risk Categories (just used as regular risk categories) =============================================================================== model.ParsedModelRoot.IndividualRiskCategories = make(map[string]model.RiskCategory) - for title, indivCat := range modelInput.Individual_risk_categories { - id := fmt.Sprintf("%v", indivCat.ID) + for title, individualCategory := range modelInput.IndividualRiskCategories { + id := fmt.Sprintf("%v", individualCategory.ID) var function model.RiskFunction - switch indivCat.Function { + switch individualCategory.Function { case model.BusinessSide.String(): function = model.BusinessSide case model.Architecture.String(): @@ -4813,11 +4841,11 @@ func parseModel(inputFilename string) { case model.Operations.String(): function = model.Operations default: - panic(errors.New("unknown 'function' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", indivCat.Function))) + panic(errors.New("unknown 'function' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.Function))) } var stride model.STRIDE - switch indivCat.STRIDE { + switch individualCategory.STRIDE { case model.Spoofing.String(): stride = model.Spoofing case model.Tampering.String(): @@ -4831,26 +4859,26 @@ func parseModel(inputFilename string) { case model.ElevationOfPrivilege.String(): stride = model.ElevationOfPrivilege default: - panic(errors.New("unknown 'stride' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", indivCat.STRIDE))) + panic(errors.New("unknown 'stride' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.STRIDE))) } cat := model.RiskCategory{ Id: id, Title: title, - Description: withDefault(fmt.Sprintf("%v", indivCat.Description), title), - Impact: fmt.Sprintf("%v", indivCat.Impact), - ASVS: fmt.Sprintf("%v", indivCat.ASVS), - CheatSheet: fmt.Sprintf("%v", indivCat.Cheat_sheet), - Action: fmt.Sprintf("%v", indivCat.Action), - Mitigation: fmt.Sprintf("%v", indivCat.Mitigation), - Check: fmt.Sprintf("%v", indivCat.Check), - DetectionLogic: fmt.Sprintf("%v", indivCat.Detection_logic), - RiskAssessment: fmt.Sprintf("%v", indivCat.Risk_assessment), - FalsePositives: fmt.Sprintf("%v", indivCat.False_positives), + Description: withDefault(fmt.Sprintf("%v", individualCategory.Description), title), + Impact: fmt.Sprintf("%v", individualCategory.Impact), + ASVS: fmt.Sprintf("%v", individualCategory.ASVS), + CheatSheet: fmt.Sprintf("%v", individualCategory.CheatSheet), + Action: fmt.Sprintf("%v", individualCategory.Action), + Mitigation: fmt.Sprintf("%v", individualCategory.Mitigation), + Check: fmt.Sprintf("%v", individualCategory.Check), + DetectionLogic: fmt.Sprintf("%v", individualCategory.DetectionLogic), + RiskAssessment: fmt.Sprintf("%v", individualCategory.RiskAssessment), + FalsePositives: fmt.Sprintf("%v", individualCategory.FalsePositives), Function: function, STRIDE: stride, - ModelFailurePossibleReason: indivCat.Model_failure_possible_reason, - CWE: indivCat.CWE, + ModelFailurePossibleReason: individualCategory.ModelFailurePossibleReason, + CWE: individualCategory.CWE, } checkIdSyntax(id) if _, exists := model.ParsedModelRoot.IndividualRiskCategories[id]; exists { @@ -4860,8 +4888,8 @@ func parseModel(inputFilename string) { // NOW THE INDIVIDUAL RISK INSTANCES: //individualRiskInstances := make([]model.Risk, 0) - if indivCat.Risks_identified != nil { // TODO: also add syntax checks of input YAML when linked asset is not found or when syntehtic-id is already used... - for title, indivRiskInstance := range indivCat.Risks_identified { + if individualCategory.RisksIdentified != nil { // TODO: also add syntax checks of input YAML when linked asset is not found or when synthetic-id is already used... + for title, individualRiskInstance := range individualCategory.RisksIdentified { var severity model.RiskSeverity var exploitationLikelihood model.RiskExploitationLikelihood var exploitationImpact model.RiskExploitationImpact @@ -4869,7 +4897,7 @@ func parseModel(inputFilename string) { var dataBreachProbability model.DataBreachProbability var dataBreachTechnicalAssetIDs []string - switch indivRiskInstance.Severity { + switch individualRiskInstance.Severity { case model.LowSeverity.String(): severity = model.LowSeverity case model.MediumSeverity.String(): @@ -4883,10 +4911,10 @@ func parseModel(inputFilename string) { case "": // added default severity = model.MediumSeverity default: - panic(errors.New("unknown 'severity' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", indivRiskInstance.Severity))) + panic(errors.New("unknown 'severity' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.Severity))) } - switch indivRiskInstance.Exploitation_likelihood { + switch individualRiskInstance.ExploitationLikelihood { case model.Unlikely.String(): exploitationLikelihood = model.Unlikely case model.Likely.String(): @@ -4898,10 +4926,10 @@ func parseModel(inputFilename string) { case "": // added default exploitationLikelihood = model.Likely default: - panic(errors.New("unknown 'exploitation_likelihood' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", indivRiskInstance.Exploitation_likelihood))) + panic(errors.New("unknown 'exploitation_likelihood' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationLikelihood))) } - switch indivRiskInstance.Exploitation_impact { + switch individualRiskInstance.ExploitationImpact { case model.LowImpact.String(): exploitationImpact = model.LowImpact case model.MediumImpact.String(): @@ -4913,35 +4941,35 @@ func parseModel(inputFilename string) { case "": // added default exploitationImpact = model.MediumImpact default: - panic(errors.New("unknown 'exploitation_impact' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", indivRiskInstance.Exploitation_impact))) + panic(errors.New("unknown 'exploitation_impact' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationImpact))) } - if len(indivRiskInstance.Most_relevant_data_asset) > 0 { - mostRelevantDataAssetId = fmt.Sprintf("%v", indivRiskInstance.Most_relevant_data_asset) + if len(individualRiskInstance.MostRelevantDataAsset) > 0 { + mostRelevantDataAssetId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantDataAsset) checkDataAssetTargetExists(mostRelevantDataAssetId, "individual risk '"+title+"'") } - if len(indivRiskInstance.Most_relevant_technical_asset) > 0 { - mostRelevantTechnicalAssetId = fmt.Sprintf("%v", indivRiskInstance.Most_relevant_technical_asset) + if len(individualRiskInstance.MostRelevantTechnicalAsset) > 0 { + mostRelevantTechnicalAssetId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantTechnicalAsset) checkTechnicalAssetExists(mostRelevantTechnicalAssetId, "individual risk '"+title+"'", false) } - if len(indivRiskInstance.Most_relevant_communication_link) > 0 { - mostRelevantCommunicationLinkId = fmt.Sprintf("%v", indivRiskInstance.Most_relevant_communication_link) + if len(individualRiskInstance.MostRelevantCommunicationLink) > 0 { + mostRelevantCommunicationLinkId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantCommunicationLink) checkCommunicationLinkExists(mostRelevantCommunicationLinkId, "individual risk '"+title+"'") } - if len(indivRiskInstance.Most_relevant_trust_boundary) > 0 { - mostRelevantTrustBoundaryId = fmt.Sprintf("%v", indivRiskInstance.Most_relevant_trust_boundary) + if len(individualRiskInstance.MostRelevantTrustBoundary) > 0 { + mostRelevantTrustBoundaryId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantTrustBoundary) checkTrustBoundaryExists(mostRelevantTrustBoundaryId, "individual risk '"+title+"'") } - if len(indivRiskInstance.Most_relevant_shared_runtime) > 0 { - mostRelevantSharedRuntimeId = fmt.Sprintf("%v", indivRiskInstance.Most_relevant_shared_runtime) + if len(individualRiskInstance.MostRelevantSharedRuntime) > 0 { + mostRelevantSharedRuntimeId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantSharedRuntime) checkSharedRuntimeExists(mostRelevantSharedRuntimeId, "individual risk '"+title+"'") } - switch indivRiskInstance.Data_breach_probability { + switch individualRiskInstance.DataBreachProbability { case model.Improbable.String(): dataBreachProbability = model.Improbable case model.Possible.String(): @@ -4951,12 +4979,12 @@ func parseModel(inputFilename string) { case "": // added default dataBreachProbability = model.Possible default: - panic(errors.New("unknown 'data_breach_probability' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", indivRiskInstance.Data_breach_probability))) + panic(errors.New("unknown 'data_breach_probability' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.DataBreachProbability))) } - if indivRiskInstance.Data_breach_technical_assets != nil { - dataBreachTechnicalAssetIDs = make([]string, len(indivRiskInstance.Data_breach_technical_assets)) - for i, parsedReferencedAsset := range indivRiskInstance.Data_breach_technical_assets { + if individualRiskInstance.DataBreachTechnicalAssets != nil { + dataBreachTechnicalAssetIDs = make([]string, len(individualRiskInstance.DataBreachTechnicalAssets)) + for i, parsedReferencedAsset := range individualRiskInstance.DataBreachTechnicalAssets { assetId := fmt.Sprintf("%v", parsedReferencedAsset) checkTechnicalAssetExists(assetId, "data breach technical assets of individual risk '"+title+"'", false) dataBreachTechnicalAssetIDs[i] = assetId @@ -4965,7 +4993,7 @@ func parseModel(inputFilename string) { checkErr(err) - indivRiskInstance := model.Risk{ + individualRiskInstance := model.Risk{ SyntheticId: createSyntheticId(cat.Id, mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId), Title: fmt.Sprintf("%v", title), Category: cat, @@ -4980,16 +5008,16 @@ func parseModel(inputFilename string) { DataBreachProbability: dataBreachProbability, DataBreachTechnicalAssetIDs: dataBreachTechnicalAssetIDs, } - model.GeneratedRisksByCategory[cat] = append(model.GeneratedRisksByCategory[cat], indivRiskInstance) + model.GeneratedRisksByCategory[cat] = append(model.GeneratedRisksByCategory[cat], individualRiskInstance) } } } // Risk Tracking =============================================================================== model.ParsedModelRoot.RiskTracking = make(map[string]model.RiskTracking) - for syntheticRiskId, riskTracking := range modelInput.Risk_tracking { + for syntheticRiskId, riskTracking := range modelInput.RiskTracking { justification := fmt.Sprintf("%v", riskTracking.Justification) - checkedBy := fmt.Sprintf("%v", riskTracking.Checked_by) + checkedBy := fmt.Sprintf("%v", riskTracking.CheckedBy) ticket := fmt.Sprintf("%v", riskTracking.Ticket) var date time.Time if len(riskTracking.Date) > 0 { @@ -5065,7 +5093,7 @@ func checkTags(tags []string, where string) []string { // in order to prevent Path-Traversal like stuff... func removePathElementsFromImageFiles(overview model.Overview) model.Overview { - for i, _ := range overview.Images { + for i := range overview.Images { newValue := make(map[string]string) for file, desc := range overview.Images[i] { newValue[filepath.Base(file)] = desc @@ -5082,8 +5110,8 @@ func applyWildcardRiskTrackingEvaluation() { for syntheticRiskIdPattern, riskTracking := range deferredRiskTrackingDueToWildcardMatching { foundSome := false var matchingRiskIdExpression = regexp.MustCompile(strings.ReplaceAll(regexp.QuoteMeta(syntheticRiskIdPattern), `\*`, `[^@]+`)) - for syntheticRiskId, _ := range model.GeneratedRisksBySyntheticId { - if matchingRiskIdExpression.Match([]byte(syntheticRiskId)) && hasNotYetAnyDirectNonWildcardRiskTrackings(syntheticRiskId) { + for syntheticRiskId := range model.GeneratedRisksBySyntheticId { + if matchingRiskIdExpression.Match([]byte(syntheticRiskId)) && hasNotYetAnyDirectNonWildcardRiskTracking(syntheticRiskId) { foundSome = true model.ParsedModelRoot.RiskTracking[syntheticRiskId] = model.RiskTracking{ SyntheticRiskId: strings.TrimSpace(syntheticRiskId), @@ -5105,7 +5133,7 @@ func applyWildcardRiskTrackingEvaluation() { } } -func hasNotYetAnyDirectNonWildcardRiskTrackings(syntheticRiskId string) bool { +func hasNotYetAnyDirectNonWildcardRiskTracking(syntheticRiskId string) bool { if _, ok := model.ParsedModelRoot.RiskTracking[syntheticRiskId]; ok { return false } @@ -5199,7 +5227,7 @@ func checkNestedTrustBoundariesExisting() { func hash(s string) string { h := fnv.New32a() - h.Write([]byte(s)) + _, _ = h.Write([]byte(s)) return fmt.Sprintf("%v", h.Sum32()) } @@ -5283,7 +5311,7 @@ func writeDataAssetDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fi // Write the DOT file file, err := os.Create(diagramFilenameDOT) checkErr(err) - defer file.Close() + defer func() { _ = file.Close() }() _, err = fmt.Fprintln(file, dotContent.String()) checkErr(err) return file @@ -5363,7 +5391,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: keys := make([]string, 0) - for k, _ := range model.ParsedModelRoot.TrustBoundaries { + for k := range model.ParsedModelRoot.TrustBoundaries { keys = append(keys, k) } sort.Strings(keys) @@ -5388,10 +5416,10 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil } snippet.WriteString("\n subgraph cluster_" + hash(trustBoundary.Id) + " {\n") color, fontColor, bgColor, style, fontname := colors.RgbHexColorTwilight(), colors.RgbHexColorTwilight() /*"#550E0C"*/, "#FAFAFA", "dashed", "Verdana" - penwidth := 4.5 + penWidth := 4.5 if len(trustBoundary.TrustBoundariesNested) > 0 { //color, fontColor, style, fontname = colors.Blue, colors.Blue, "dashed", "Verdana" - penwidth = 5.5 + penWidth = 5.5 } if len(trustBoundary.ParentTrustBoundaryID()) > 0 { bgColor = "#F1F1F1" @@ -5411,7 +5439,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil bgcolor="` + bgColor + `" fontcolor="` + fontColor + `" fontname="` + fontname + `" - penwidth="` + fmt.Sprintf("%f", penwidth) + `" + penwidth="` + fmt.Sprintf("%f", penWidth) + `" forcelabels=true outputorder="nodesfirst" margin="50.0" @@ -5441,7 +5469,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil subgraphSnippetsById[hash(trustBoundary.Id)] = snippet.String() } // here replace links and remove from map after replacement (i.e. move snippet into nested) - for i, _ := range subgraphSnippetsById { + for i := range subgraphSnippetsById { re := regexp.MustCompile(`LINK-NEEDS-REPLACED-BY-cluster_([0-9]*);`) for { matches := re.FindStringSubmatch(subgraphSnippetsById[i]) @@ -5456,7 +5484,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil } // now write them all keys = make([]string, 0) - for k, _ := range subgraphSnippetsById { + for k := range subgraphSnippetsById { keys = append(keys, k) } sort.Strings(keys) @@ -5469,7 +5497,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: // Convert map to slice of values: - techAssets := []model.TechnicalAsset{} + var techAssets []model.TechnicalAsset for _, techAsset := range model.ParsedModelRoot.TechnicalAssets { techAssets = append(techAssets, techAsset) } @@ -5495,7 +5523,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil } dir := "forward" if dataFlow.IsBidirectional() { - if !suppressBidirectionalArrows { // as it does not work as bug in grahviz with ortho: https://gitlab.com/graphviz/graphviz/issues/144 + if !suppressBidirectionalArrows { // as it does not work as bug in graphviz with ortho: https://gitlab.com/graphviz/graphviz/issues/144 dir = "both" } } @@ -5526,7 +5554,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil // Write the DOT file file, err := os.Create(diagramFilenameDOT) checkErr(err) - defer file.Close() + defer func() { _ = file.Close() }() _, err = fmt.Fprintln(file, dotContent.String()) checkErr(err) return file @@ -5668,21 +5696,21 @@ func renderDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string) { fmt.Println("Rendering data flow diagram input") } // tmp files - tmpFileDOT, err := ioutil.TempFile(model.TempFolder, "diagram-*-.gv") + tmpFileDOT, err := os.CreateTemp(tempFolder, "diagram-*-.gv") checkErr(err) - defer os.Remove(tmpFileDOT.Name()) + defer func() { _ = os.Remove(tmpFileDOT.Name()) }() - tmpFilePNG, err := ioutil.TempFile(model.TempFolder, "diagram-*-.png") + tmpFilePNG, err := os.CreateTemp(tempFolder, "diagram-*-.png") checkErr(err) - defer os.Remove(tmpFilePNG.Name()) + defer func() { _ = os.Remove(tmpFilePNG.Name()) }() // copy into tmp file as input - input, err := ioutil.ReadFile(dotFile.Name()) + input, err := os.ReadFile(dotFile.Name()) if err != nil { fmt.Println(err) return } - err = ioutil.WriteFile(tmpFileDOT.Name(), input, 0644) + err = os.WriteFile(tmpFileDOT.Name(), input, 0644) if err != nil { fmt.Println("Error creating", tmpFileDOT.Name()) fmt.Println(err) @@ -5698,12 +5726,12 @@ func renderDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string) { panic(errors.New("graph rendering call failed with error:" + err.Error())) } // copy into resulting file - input, err = ioutil.ReadFile(tmpFilePNG.Name()) + input, err = os.ReadFile(tmpFilePNG.Name()) if err != nil { fmt.Println(err) return } - err = ioutil.WriteFile(targetDir+"/"+dataFlowDiagramFilenamePNG, input, 0644) + err = os.WriteFile(filepath.Join(targetDir, dataFlowDiagramFilenamePNG), input, 0644) if err != nil { fmt.Println("Error creating", dataFlowDiagramFilenamePNG) fmt.Println(err) @@ -5716,21 +5744,21 @@ func renderDataAssetDiagramGraphvizImage(dotFile *os.File, targetDir string) { / fmt.Println("Rendering data asset diagram input") } // tmp files - tmpFileDOT, err := ioutil.TempFile(model.TempFolder, "diagram-*-.gv") + tmpFileDOT, err := os.CreateTemp(tempFolder, "diagram-*-.gv") checkErr(err) - defer os.Remove(tmpFileDOT.Name()) + defer func() { _ = os.Remove(tmpFileDOT.Name()) }() - tmpFilePNG, err := ioutil.TempFile(model.TempFolder, "diagram-*-.png") + tmpFilePNG, err := os.CreateTemp(tempFolder, "diagram-*-.png") checkErr(err) - defer os.Remove(tmpFilePNG.Name()) + defer func() { _ = os.Remove(tmpFilePNG.Name()) }() // copy into tmp file as input - input, err := ioutil.ReadFile(dotFile.Name()) + input, err := os.ReadFile(dotFile.Name()) if err != nil { fmt.Println(err) return } - err = ioutil.WriteFile(tmpFileDOT.Name(), input, 0644) + err = os.WriteFile(tmpFileDOT.Name(), input, 0644) if err != nil { fmt.Println("Error creating", tmpFileDOT.Name()) fmt.Println(err) @@ -5746,12 +5774,12 @@ func renderDataAssetDiagramGraphvizImage(dotFile *os.File, targetDir string) { / panic(errors.New("graph rendering call failed with error: " + err.Error())) } // copy into resulting file - input, err = ioutil.ReadFile(tmpFilePNG.Name()) + input, err = os.ReadFile(tmpFilePNG.Name()) if err != nil { fmt.Println(err) return } - err = ioutil.WriteFile(targetDir+"/"+dataAssetDiagramFilenamePNG, input, 0644) + err = os.WriteFile(filepath.Join(targetDir, dataAssetDiagramFilenamePNG), input, 0644) if err != nil { fmt.Println("Error creating", dataAssetDiagramFilenamePNG) fmt.Println(err) diff --git a/model/types.go b/model/types.go index d22c16db..c536a358 100644 --- a/model/types.go +++ b/model/types.go @@ -12,7 +12,6 @@ import ( ) const ThreagileVersion = "1.0.0" // Also update into example and stub model files and openapi.yaml -const TempFolder = "/dev/shm" // TODO: make configurable via cmdline arg? var ParsedModelRoot ParsedModel @@ -26,14 +25,24 @@ var GeneratedRisksBySyntheticId map[string]Risk var AllSupportedTags map[string]bool +var ( + _ = ParseEncryptionStyle + _ = SortedKeysOfDataAssets + _ = SortedKeysOfTechnicalAssets + _ = SortedDataAssetsByDataBreachProbabilityAndTitleStillAtRisk + _ = ReduceToOnlyHighRisk + _ = ReduceToOnlyMediumRisk + _ = ReduceToOnlyLowRisk +) + func Init() { - CommunicationLinks = make(map[string]CommunicationLink, 0) - IncomingTechnicalCommunicationLinksMappedByTargetId = make(map[string][]CommunicationLink, 0) - DirectContainingTrustBoundaryMappedByTechnicalAssetId = make(map[string]TrustBoundary, 0) - DirectContainingSharedRuntimeMappedByTechnicalAssetId = make(map[string]SharedRuntime, 0) - GeneratedRisksByCategory = make(map[RiskCategory][]Risk, 0) - GeneratedRisksBySyntheticId = make(map[string]Risk, 0) - AllSupportedTags = make(map[string]bool, 0) + CommunicationLinks = make(map[string]CommunicationLink) + IncomingTechnicalCommunicationLinksMappedByTargetId = make(map[string][]CommunicationLink) + DirectContainingTrustBoundaryMappedByTechnicalAssetId = make(map[string]TrustBoundary) + DirectContainingSharedRuntimeMappedByTechnicalAssetId = make(map[string]SharedRuntime) + GeneratedRisksByCategory = make(map[RiskCategory][]Risk) + GeneratedRisksBySyntheticId = make(map[string]Risk) + AllSupportedTags = make(map[string]bool) } func AddToListOfSupportedTags(tags []string) { @@ -52,10 +61,10 @@ type CustomRiskRule interface { func AddTagToModelInput(modelInput *ModelInput, tag string, dryRun bool, changes *[]string) { tag = NormalizeTag(tag) - if !Contains(modelInput.Tags_available, tag) { + if !Contains(modelInput.TagsAvailable, tag) { *changes = append(*changes, "adding tag: "+tag) if !dryRun { - modelInput.Tags_available = append(modelInput.Tags_available, tag) + modelInput.TagsAvailable = append(modelInput.TagsAvailable, tag) } } } @@ -72,138 +81,138 @@ func MakeID(val string) string { // === Model Type Stuff ====================================== type ModelInput struct { // TODO: Eventually remove this and directly use ParsedModelRoot? But then the error messages for model errors are not quite as good anymore... - Threagile_version string - Title string - Author Author - Date string - Business_overview Overview - Technical_overview Overview - Business_criticality string - Management_summary_comment string - Questions map[string]string - Abuse_cases map[string]string - Security_requirements map[string]string - Tags_available []string - Data_assets map[string]InputDataAsset - Technical_assets map[string]InputTechnicalAsset - Trust_boundaries map[string]InputTrustBoundary - Shared_runtimes map[string]InputSharedRuntime - Individual_risk_categories map[string]InputIndividualRiskCategory - Risk_tracking map[string]InputRiskTracking - Diagram_tweak_nodesep, Diagram_tweak_ranksep int - Diagram_tweak_edge_layout string - Diagram_tweak_suppress_edge_labels bool - Diagram_tweak_layout_left_to_right bool - Diagram_tweak_invisible_connections_between_assets []string - Diagram_tweak_same_rank_assets []string + ThreagileVersion string + Title string + Author Author + Date string + BusinessOverview Overview + TechnicalOverview Overview + BusinessCriticality string + ManagementSummaryComment string + Questions map[string]string + AbuseCases map[string]string + SecurityRequirements map[string]string + TagsAvailable []string + DataAssets map[string]InputDataAsset + TechnicalAssets map[string]InputTechnicalAsset + TrustBoundaries map[string]InputTrustBoundary + SharedRuntimes map[string]InputSharedRuntime + IndividualRiskCategories map[string]InputIndividualRiskCategory + RiskTracking map[string]InputRiskTracking + DiagramTweakNodesep, DiagramTweakRanksep int + DiagramTweakEdgeLayout string + DiagramTweakSuppressEdgeLabels bool + DiagramTweakLayoutLeftToRight bool + DiagramTweakInvisibleConnectionsBetweenAssets []string + DiagramTweakSameRankAssets []string } type InputDataAsset struct { - ID string `json:"id"` - Description string `json:"description"` - Usage string `json:"usage"` - Tags []string `json:"tags"` - Origin string `json:"origin"` - Owner string `json:"owner"` - Quantity string `json:"quantity"` - Confidentiality string `json:"confidentiality"` - Integrity string `json:"integrity"` - Availability string `json:"availability"` - Justification_cia_rating string `json:"justification_cia_rating"` + ID string `json:"id"` + Description string `json:"description"` + Usage string `json:"usage"` + Tags []string `json:"tags"` + Origin string `json:"origin"` + Owner string `json:"owner"` + Quantity string `json:"quantity"` + Confidentiality string `json:"confidentiality"` + Integrity string `json:"integrity"` + Availability string `json:"availability"` + JustificationCiaRating string `json:"justification_cia_rating"` } type InputTechnicalAsset struct { - ID string `json:"id"` - Description string `json:"description"` - Type string `json:"type"` - Usage string `json:"usage"` - Used_as_client_by_human bool `json:"used_as_client_by_human"` - Out_of_scope bool `json:"out_of_scope"` - Justification_out_of_scope string `json:"justification_out_of_scope"` - Size string `json:"size"` - Technology string `json:"technology"` - Tags []string `json:"tags"` - Internet bool `json:"internet"` - Machine string `json:"machine"` - Encryption string `json:"encryption"` - Owner string `json:"owner"` - Confidentiality string `json:"confidentiality"` - Integrity string `json:"integrity"` - Availability string `json:"availability"` - Justification_cia_rating string `json:"justification_cia_rating"` - Multi_tenant bool `json:"multi_tenant"` - Redundant bool `json:"redundant"` - Custom_developed_parts bool `json:"custom_developed_parts"` - Data_assets_processed []string `json:"data_assets_processed"` - Data_assets_stored []string `json:"data_assets_stored"` - Data_formats_accepted []string `json:"data_formats_accepted"` - Diagram_tweak_order int `json:"diagram_tweak_order"` - Communication_links map[string]InputCommunicationLink `json:"communication_links"` + ID string `json:"id"` + Description string `json:"description"` + Type string `json:"type"` + Usage string `json:"usage"` + UsedAsClientByHuman bool `json:"used_as_client_by_human"` + OutOfScope bool `json:"out_of_scope"` + JustificationOutOfScope string `json:"justification_out_of_scope"` + Size string `json:"size"` + Technology string `json:"technology"` + Tags []string `json:"tags"` + Internet bool `json:"internet"` + Machine string `json:"machine"` + Encryption string `json:"encryption"` + Owner string `json:"owner"` + Confidentiality string `json:"confidentiality"` + Integrity string `json:"integrity"` + Availability string `json:"availability"` + JustificationCiaRating string `json:"justification_cia_rating"` + MultiTenant bool `json:"multi_tenant"` + Redundant bool `json:"redundant"` + CustomDevelopedParts bool `json:"custom_developed_parts"` + DataAssetsProcessed []string `json:"data_assets_processed"` + DataAssetsStored []string `json:"data_assets_stored"` + DataFormatsAccepted []string `json:"data_formats_accepted"` + DiagramTweakOrder int `json:"diagram_tweak_order"` + CommunicationLinks map[string]InputCommunicationLink `json:"communication_links"` } type InputCommunicationLink struct { - Target string `json:"target"` - Description string `json:"description"` - Protocol string `json:"protocol"` - Authentication string `json:"authentication"` - Authorization string `json:"authorization"` - Tags []string `json:"tags"` - VPN bool `json:"vpn"` - IP_filtered bool `json:"ip_filtered"` - Readonly bool `json:"readonly"` - Usage string `json:"usage"` - Data_assets_sent []string `json:"data_assets_sent"` - Data_assets_received []string `json:"data_assets_received"` - Diagram_tweak_weight int `json:"diagram_tweak_weight"` - Diagram_tweak_constraint bool `json:"diagram_tweak_constraint"` + Target string `json:"target"` + Description string `json:"description"` + Protocol string `json:"protocol"` + Authentication string `json:"authentication"` + Authorization string `json:"authorization"` + Tags []string `json:"tags"` + VPN bool `json:"vpn"` + IpFiltered bool `json:"ip_filtered"` + Readonly bool `json:"readonly"` + Usage string `json:"usage"` + DataAssetsSent []string `json:"data_assets_sent"` + DataAssetsReceived []string `json:"data_assets_received"` + DiagramTweakWeight int `json:"diagram_tweak_weight"` + DiagramTweakConstraint bool `json:"diagram_tweak_constraint"` } type InputSharedRuntime struct { - ID string `json:"id"` - Description string `json:"description"` - Tags []string `json:"tags"` - Technical_assets_running []string `json:"technical_assets_running"` + ID string `json:"id"` + Description string `json:"description"` + Tags []string `json:"tags"` + TechnicalAssetsRunning []string `json:"technical_assets_running"` } type InputTrustBoundary struct { - ID string `json:"id"` - Description string `json:"description"` - Type string `json:"type"` - Tags []string `json:"tags"` - Technical_assets_inside []string `json:"technical_assets_inside"` - Trust_boundaries_nested []string `json:"trust_boundaries_nested"` + ID string `json:"id"` + Description string `json:"description"` + Type string `json:"type"` + Tags []string `json:"tags"` + TechnicalAssetsInside []string `json:"technical_assets_inside"` + TrustBoundariesNested []string `json:"trust_boundaries_nested"` } type InputIndividualRiskCategory struct { - ID string `json:"id"` - Description string `json:"description"` - Impact string `json:"impact"` - ASVS string `json:"asvs"` - Cheat_sheet string `json:"cheat_sheet"` - Action string `json:"action"` - Mitigation string `json:"mitigation"` - Check string `json:"check"` - Function string `json:"function"` - STRIDE string `json:"stride"` - Detection_logic string `json:"detection_logic"` - Risk_assessment string `json:"risk_assessment"` - False_positives string `json:"false_positives"` - Model_failure_possible_reason bool `json:"model_failure_possible_reason"` - CWE int `json:"cwe"` - Risks_identified map[string]InputRiskIdentified `json:"risks_identified"` + ID string `json:"id"` + Description string `json:"description"` + Impact string `json:"impact"` + ASVS string `json:"asvs"` + CheatSheet string `json:"cheat_sheet"` + Action string `json:"action"` + Mitigation string `json:"mitigation"` + Check string `json:"check"` + Function string `json:"function"` + STRIDE string `json:"stride"` + DetectionLogic string `json:"detection_logic"` + RiskAssessment string `json:"risk_assessment"` + FalsePositives string `json:"false_positives"` + ModelFailurePossibleReason bool `json:"model_failure_possible_reason"` + CWE int `json:"cwe"` + RisksIdentified map[string]InputRiskIdentified `json:"risks_identified"` } type InputRiskIdentified struct { - Severity string `json:"severity"` - Exploitation_likelihood string `json:"exploitation_likelihood"` - Exploitation_impact string `json:"exploitation_impact"` - Data_breach_probability string `json:"data_breach_probability"` - Data_breach_technical_assets []string `json:"data_breach_technical_assets"` - Most_relevant_data_asset string `json:"most_relevant_data_asset"` - Most_relevant_technical_asset string `json:"most_relevant_technical_asset"` - Most_relevant_communication_link string `json:"most_relevant_communication_link"` - Most_relevant_trust_boundary string `json:"most_relevant_trust_boundary"` - Most_relevant_shared_runtime string `json:"most_relevant_shared_runtime"` + Severity string `json:"severity"` + ExploitationLikelihood string `json:"exploitation_likelihood"` + ExploitationImpact string `json:"exploitation_impact"` + DataBreachProbability string `json:"data_breach_probability"` + DataBreachTechnicalAssets []string `json:"data_breach_technical_assets"` + MostRelevantDataAsset string `json:"most_relevant_data_asset"` + MostRelevantTechnicalAsset string `json:"most_relevant_technical_asset"` + MostRelevantCommunicationLink string `json:"most_relevant_communication_link"` + MostRelevantTrustBoundary string `json:"most_relevant_trust_boundary"` + MostRelevantSharedRuntime string `json:"most_relevant_shared_runtime"` } type InputRiskTracking struct { @@ -211,7 +220,7 @@ type InputRiskTracking struct { Justification string `json:"justification"` Ticket string `json:"ticket"` Date string `json:"date"` - Checked_by string `json:"checked_by"` + CheckedBy string `json:"checked_by"` } // TypeDescription contains a name for a type and its description @@ -510,14 +519,14 @@ type Authorization int const ( NoneAuthorization Authorization = iota TechnicalUser - EnduserIdentityPropagation + EndUserIdentityPropagation ) func AuthorizationValues() []TypeEnum { return []TypeEnum{ NoneAuthorization, TechnicalUser, - EnduserIdentityPropagation, + EndUserIdentityPropagation, } } @@ -630,7 +639,7 @@ const ( Transparent DataWithSymmetricSharedKey DataWithAsymmetricSharedKey - DataWithEnduserIndividualKey + DataWithEndUserIndividualKey ) func EncryptionStyleValues() []TypeEnum { @@ -639,7 +648,7 @@ func EncryptionStyleValues() []TypeEnum { Transparent, DataWithSymmetricSharedKey, DataWithAsymmetricSharedKey, - DataWithEnduserIndividualKey, + DataWithEndUserIndividualKey, } } @@ -671,7 +680,7 @@ func (what EncryptionStyle) Explain() string { } func (what EncryptionStyle) Title() string { - return [...]string{"None", "Transparent", "Data with Symmetric Shared Key", "Data with Asymmetric Shared Key", "Data with Enduser Individual Key"}[what] + return [...]string{"None", "Transparent", "Data with Symmetric Shared Key", "Data with Asymmetric Shared Key", "Data with End-User Individual Key"}[what] } type DataFormat int @@ -728,29 +737,29 @@ const ( HTTPS WS WSS - Reverse_proxy_web_protocol - Reverse_proxy_web_protocol_encrypted + ReverseProxyWebProtocol + ReverseProxyWebProtocolEncrypted MQTT JDBC - JDBC_encrypted + JdbcEncrypted ODBC - ODBC_encrypted - SQL_access_protocol - SQL_access_protocol_encrypted - NoSQL_access_protocol - NoSQL_access_protocol_encrypted + OdbcEncrypted + SqlAccessProtocol + SqlAccessProtocolEncrypted + NosqlAccessProtocol + NosqlAccessProtocolEncrypted BINARY - BINARY_encrypted + BinaryEncrypted TEXT - TEXT_encrypted + TextEncrypted SSH - SSH_tunnel + SshTunnel SMTP - SMTP_encrypted + SmtpEncrypted POP3 - POP3_encrypted + Pop3Encrypted IMAP - IMAP_encrypted + ImapEncrypted FTP FTPS SFTP @@ -760,14 +769,14 @@ const ( JMS NFS SMB - SMB_encrypted + SmbEncrypted LocalFileAccess NRPE XMPP IIOP - IIOP_encrypted + IiopEncrypted JRMP - JRMP_encrypted + JrmpEncrypted InProcessLibraryCall ContainerSpawning ) @@ -779,29 +788,29 @@ func ProtocolValues() []TypeEnum { HTTPS, WS, WSS, - Reverse_proxy_web_protocol, - Reverse_proxy_web_protocol_encrypted, + ReverseProxyWebProtocol, + ReverseProxyWebProtocolEncrypted, MQTT, JDBC, - JDBC_encrypted, + JdbcEncrypted, ODBC, - ODBC_encrypted, - SQL_access_protocol, - SQL_access_protocol_encrypted, - NoSQL_access_protocol, - NoSQL_access_protocol_encrypted, + OdbcEncrypted, + SqlAccessProtocol, + SqlAccessProtocolEncrypted, + NosqlAccessProtocol, + NosqlAccessProtocolEncrypted, BINARY, - BINARY_encrypted, + BinaryEncrypted, TEXT, - TEXT_encrypted, + TextEncrypted, SSH, - SSH_tunnel, + SshTunnel, SMTP, - SMTP_encrypted, + SmtpEncrypted, POP3, - POP3_encrypted, + Pop3Encrypted, IMAP, - IMAP_encrypted, + ImapEncrypted, FTP, FTPS, SFTP, @@ -811,14 +820,14 @@ func ProtocolValues() []TypeEnum { JMS, NFS, SMB, - SMB_encrypted, + SmbEncrypted, LocalFileAccess, NRPE, XMPP, IIOP, - IIOP_encrypted, + IiopEncrypted, JRMP, - JRMP_encrypted, + JrmpEncrypted, InProcessLibraryCall, ContainerSpawning, } @@ -888,24 +897,24 @@ func (what Protocol) IsProcessLocal() bool { } func (what Protocol) IsEncrypted() bool { - return what == HTTPS || what == WSS || what == JDBC_encrypted || what == ODBC_encrypted || - what == NoSQL_access_protocol_encrypted || what == SQL_access_protocol_encrypted || what == BINARY_encrypted || what == TEXT_encrypted || what == SSH || what == SSH_tunnel || - what == FTPS || what == SFTP || what == SCP || what == LDAPS || what == Reverse_proxy_web_protocol_encrypted || - what == IIOP_encrypted || what == JRMP_encrypted || what == SMB_encrypted || what == SMTP_encrypted || what == POP3_encrypted || what == IMAP_encrypted + return what == HTTPS || what == WSS || what == JdbcEncrypted || what == OdbcEncrypted || + what == NosqlAccessProtocolEncrypted || what == SqlAccessProtocolEncrypted || what == BinaryEncrypted || what == TextEncrypted || what == SSH || what == SshTunnel || + what == FTPS || what == SFTP || what == SCP || what == LDAPS || what == ReverseProxyWebProtocolEncrypted || + what == IiopEncrypted || what == JrmpEncrypted || what == SmbEncrypted || what == SmtpEncrypted || what == Pop3Encrypted || what == ImapEncrypted } func (what Protocol) IsPotentialDatabaseAccessProtocol(includingLaxDatabaseProtocols bool) bool { - strictlyDatabaseOnlyProtocol := what == JDBC_encrypted || what == ODBC_encrypted || - what == NoSQL_access_protocol_encrypted || what == SQL_access_protocol_encrypted || what == JDBC || what == ODBC || what == NoSQL_access_protocol || what == SQL_access_protocol + strictlyDatabaseOnlyProtocol := what == JdbcEncrypted || what == OdbcEncrypted || + what == NosqlAccessProtocolEncrypted || what == SqlAccessProtocolEncrypted || what == JDBC || what == ODBC || what == NosqlAccessProtocol || what == SqlAccessProtocol if includingLaxDatabaseProtocols { // include HTTP for REST-based NoSQL-DBs as well as unknown binary - return strictlyDatabaseOnlyProtocol || what == HTTPS || what == HTTP || what == BINARY || what == BINARY_encrypted + return strictlyDatabaseOnlyProtocol || what == HTTPS || what == HTTP || what == BINARY || what == BinaryEncrypted } return strictlyDatabaseOnlyProtocol } func (what Protocol) IsPotentialWebAccessProtocol() bool { - return what == HTTP || what == HTTPS || what == WS || what == WSS || what == Reverse_proxy_web_protocol || what == Reverse_proxy_web_protocol_encrypted + return what == HTTP || what == HTTPS || what == WS || what == WSS || what == ReverseProxyWebProtocol || what == ReverseProxyWebProtocolEncrypted } type TechnicalAssetTechnology int @@ -1117,7 +1126,7 @@ func (what TechnicalAssetTechnology) IsSecurityControlRelated() bool { return what == Vault || what == HSM || what == WAF || what == IDS || what == IPS } -func (what TechnicalAssetTechnology) IsUnprotectedCommsTolerated() bool { +func (what TechnicalAssetTechnology) IsUnprotectedCommunicationsTolerated() bool { return what == Monitoring || what == IDS || what == IPS } @@ -1150,11 +1159,11 @@ func (what TechnicalAssetTechnology) IsLessProtectedType() bool { what == Mainframe } -func (what TechnicalAssetTechnology) IsUsuallyProcessingEnduserRequests() bool { +func (what TechnicalAssetTechnology) IsUsuallyProcessingEndUserRequests() bool { return what == WebServer || what == WebApplication || what == ApplicationServer || what == ERP || what == WebServiceREST || what == WebServiceSOAP || what == EJB || what == ReportEngine } -func (what TechnicalAssetTechnology) IsUsuallyStoringEnduserData() bool { +func (what TechnicalAssetTechnology) IsUsuallyStoringEndUserData() bool { return what == Database || what == ERP || what == FileServer || what == LocalFileSystem || what == BlockStorage || what == MailServer || what == StreamProcessing || what == MessageQueue } @@ -1335,8 +1344,8 @@ func (what DataAsset) IsTaggedWithAny(tags ...string) bool { return ContainsCaseInsensitiveAny(what.Tags, tags...) } -func (what DataAsset) IsTaggedWithBaseTag(basetag string) bool { - return IsTaggedWithBaseTag(what.Tags, basetag) +func (what DataAsset) IsTaggedWithBaseTag(baseTag string) bool { + return IsTaggedWithBaseTag(what.Tags, baseTag) } /* @@ -1354,6 +1363,7 @@ func (what DataAsset) IsAtRisk() bool { return false } */ + /* func (what DataAsset) IdentifiedRiskSeverityStillAtRisk() RiskSeverity { highestRiskSeverity := Low @@ -1372,6 +1382,7 @@ func (what DataAsset) IdentifiedRiskSeverityStillAtRisk() RiskSeverity { return highestRiskSeverity } */ + func (what DataAsset) IdentifiedRisksByResponsibleTechnicalAssetId() map[string][]Risk { uniqueTechAssetIDsResponsibleForThisDataAsset := make(map[string]interface{}) for _, techAsset := range what.ProcessedByTechnicalAssetsSorted() { @@ -1386,7 +1397,7 @@ func (what DataAsset) IdentifiedRisksByResponsibleTechnicalAssetId() map[string] } result := make(map[string][]Risk) - for techAssetId, _ := range uniqueTechAssetIDsResponsibleForThisDataAsset { + for techAssetId := range uniqueTechAssetIDsResponsibleForThisDataAsset { result[techAssetId] = append(result[techAssetId], ParsedModelRoot.TechnicalAssets[techAssetId].GeneratedRisks()...) } return result @@ -1538,11 +1549,11 @@ func (what DataAsset) ReceivedViaCommLinksSorted() []CommunicationLink { return result } -func IsTaggedWithBaseTag(tags []string, basetag string) bool { // basetags are before the colon ":" like in "aws:ec2" it's "aws". The subtag is after the colon. Also a pure "aws" tag matches the basetag "aws" - basetag = strings.ToLower(strings.TrimSpace(basetag)) +func IsTaggedWithBaseTag(tags []string, baseTag string) bool { // base tags are before the colon ":" like in "aws:ec2" it's "aws". The subtag is after the colon. Also, a pure "aws" tag matches the base tag "aws" + baseTag = strings.ToLower(strings.TrimSpace(baseTag)) for _, tag := range tags { tag = strings.ToLower(strings.TrimSpace(tag)) - if tag == basetag || strings.HasPrefix(tag, basetag+":") { + if tag == baseTag || strings.HasPrefix(tag, baseTag+":") { return true } } @@ -1575,11 +1586,12 @@ func (what TechnicalAsset) IsTaggedWithAny(tags ...string) bool { return ContainsCaseInsensitiveAny(what.Tags, tags...) } -func (what TechnicalAsset) IsTaggedWithBaseTag(basetag string) bool { - return IsTaggedWithBaseTag(what.Tags, basetag) +func (what TechnicalAsset) IsTaggedWithBaseTag(baseTag string) bool { + return IsTaggedWithBaseTag(what.Tags, baseTag) } // first use the tag(s) of the asset itself, then their trust boundaries (recursively up) and then their shared runtime + func (what TechnicalAsset) IsTaggedWithAnyTraversingUp(tags ...string) bool { if ContainsCaseInsensitiveAny(what.Tags, tags...) { return true @@ -1929,8 +1941,8 @@ func (what CommunicationLink) IsTaggedWithAny(tags ...string) bool { return ContainsCaseInsensitiveAny(what.Tags, tags...) } -func (what CommunicationLink) IsTaggedWithBaseTag(basetag string) bool { - return IsTaggedWithBaseTag(what.Tags, basetag) +func (what CommunicationLink) IsTaggedWithBaseTag(baseTag string) bool { + return IsTaggedWithBaseTag(what.Tags, baseTag) } type ByTechnicalCommunicationLinkIdSort []CommunicationLink @@ -1961,8 +1973,8 @@ func (what TrustBoundary) IsTaggedWithAny(tags ...string) bool { return ContainsCaseInsensitiveAny(what.Tags, tags...) } -func (what TrustBoundary) IsTaggedWithBaseTag(basetag string) bool { - return IsTaggedWithBaseTag(what.Tags, basetag) +func (what TrustBoundary) IsTaggedWithBaseTag(baseTag string) bool { + return IsTaggedWithBaseTag(what.Tags, baseTag) } func (what TrustBoundary) IsTaggedWithAnyTraversingUp(tags ...string) bool { @@ -2030,8 +2042,8 @@ func (what SharedRuntime) IsTaggedWithAny(tags ...string) bool { return ContainsCaseInsensitiveAny(what.Tags, tags...) } -func (what SharedRuntime) IsTaggedWithBaseTag(basetag string) bool { - return IsTaggedWithBaseTag(what.Tags, basetag) +func (what SharedRuntime) IsTaggedWithBaseTag(baseTag string) bool { + return IsTaggedWithBaseTag(what.Tags, baseTag) } func (what SharedRuntime) HighestConfidentiality() Confidentiality { @@ -2203,7 +2215,7 @@ type ParsedModel struct { func SortedTechnicalAssetIDs() []string { res := make([]string, 0) - for id, _ := range ParsedModelRoot.TechnicalAssets { + for id := range ParsedModelRoot.TechnicalAssets { res = append(res, id) } sort.Strings(res) @@ -2227,9 +2239,10 @@ func TagsActuallyUsed() []string { // === Sorting stuff ===================================== // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedKeysOfIndividualRiskCategories() []string { keys := make([]string, 0) - for k, _ := range ParsedModelRoot.IndividualRiskCategories { + for k := range ParsedModelRoot.IndividualRiskCategories { keys = append(keys, k) } sort.Strings(keys) @@ -2237,9 +2250,10 @@ func SortedKeysOfIndividualRiskCategories() []string { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedKeysOfSecurityRequirements() []string { keys := make([]string, 0) - for k, _ := range ParsedModelRoot.SecurityRequirements { + for k := range ParsedModelRoot.SecurityRequirements { keys = append(keys, k) } sort.Strings(keys) @@ -2247,9 +2261,10 @@ func SortedKeysOfSecurityRequirements() []string { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedKeysOfAbuseCases() []string { keys := make([]string, 0) - for k, _ := range ParsedModelRoot.AbuseCases { + for k := range ParsedModelRoot.AbuseCases { keys = append(keys, k) } sort.Strings(keys) @@ -2257,9 +2272,10 @@ func SortedKeysOfAbuseCases() []string { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedKeysOfQuestions() []string { keys := make([]string, 0) - for k, _ := range ParsedModelRoot.Questions { + for k := range ParsedModelRoot.Questions { keys = append(keys, k) } sort.Strings(keys) @@ -2267,9 +2283,10 @@ func SortedKeysOfQuestions() []string { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedKeysOfDataAssets() []string { keys := make([]string, 0) - for k, _ := range ParsedModelRoot.DataAssets { + for k := range ParsedModelRoot.DataAssets { keys = append(keys, k) } sort.Strings(keys) @@ -2277,9 +2294,10 @@ func SortedKeysOfDataAssets() []string { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedKeysOfTechnicalAssets() []string { keys := make([]string, 0) - for k, _ := range ParsedModelRoot.TechnicalAssets { + for k := range ParsedModelRoot.TechnicalAssets { keys = append(keys, k) } sort.Strings(keys) @@ -2339,6 +2357,7 @@ func SharedRuntimesTaggedWithAny(tags ...string) []SharedRuntime { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedTechnicalAssetsByTitle() []TechnicalAsset { assets := make([]TechnicalAsset, 0) for _, asset := range ParsedModelRoot.TechnicalAssets { @@ -2349,6 +2368,7 @@ func SortedTechnicalAssetsByTitle() []TechnicalAsset { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedDataAssetsByTitle() []DataAsset { assets := make([]DataAsset, 0) for _, asset := range ParsedModelRoot.DataAssets { @@ -2359,6 +2379,7 @@ func SortedDataAssetsByTitle() []DataAsset { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedDataAssetsByDataBreachProbabilityAndTitleStillAtRisk() []DataAsset { assets := make([]DataAsset, 0) for _, asset := range ParsedModelRoot.DataAssets { @@ -2369,6 +2390,7 @@ func SortedDataAssetsByDataBreachProbabilityAndTitleStillAtRisk() []DataAsset { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedDataAssetsByDataBreachProbabilityAndTitle() []DataAsset { assets := make([]DataAsset, 0) for _, asset := range ParsedModelRoot.DataAssets { @@ -2379,6 +2401,7 @@ func SortedDataAssetsByDataBreachProbabilityAndTitle() []DataAsset { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedTechnicalAssetsByRiskSeverityAndTitle() []TechnicalAsset { assets := make([]TechnicalAsset, 0) for _, asset := range ParsedModelRoot.TechnicalAssets { @@ -2389,6 +2412,7 @@ func SortedTechnicalAssetsByRiskSeverityAndTitle() []TechnicalAsset { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedTechnicalAssetsByRAAAndTitle() []TechnicalAsset { assets := make([]TechnicalAsset, 0) for _, asset := range ParsedModelRoot.TechnicalAssets { @@ -2424,9 +2448,10 @@ func OutOfScopeTechnicalAssets() []TechnicalAsset { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedKeysOfTrustBoundaries() []string { keys := make([]string, 0) - for k, _ := range ParsedModelRoot.TrustBoundaries { + for k := range ParsedModelRoot.TrustBoundaries { keys = append(keys, k) } sort.Strings(keys) @@ -2443,9 +2468,10 @@ func SortedTrustBoundariesByTitle() []TrustBoundary { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedKeysOfSharedRuntime() []string { keys := make([]string, 0) - for k, _ := range ParsedModelRoot.SharedRuntimes { + for k := range ParsedModelRoot.SharedRuntimes { keys = append(keys, k) } sort.Strings(keys) @@ -2476,6 +2502,7 @@ func QuestionsUnanswered() int { // Line Styles: // dotted when model forgery attempt (i.e. nothing being sent and received) + func (what CommunicationLink) DetermineArrowLineStyle() string { if len(what.DataAssetsSent) == 0 && len(what.DataAssetsReceived) == 0 { return "dotted" // dotted, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... @@ -2487,6 +2514,7 @@ func (what CommunicationLink) DetermineArrowLineStyle() string { } // dotted when model forgery attempt (i.e. nothing being processed or stored) + func (what TechnicalAsset) DetermineShapeBorderLineStyle() string { if len(what.DataAssetsProcessed) == 0 && len(what.DataAssetsStored) == 0 || what.OutOfScope { return "dotted" // dotted, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... @@ -2495,6 +2523,7 @@ func (what TechnicalAsset) DetermineShapeBorderLineStyle() string { } // 3 when redundant + func (what TechnicalAsset) DetermineShapePeripheries() int { if what.Redundant { return 2 @@ -2620,6 +2649,7 @@ func (what TechnicalAsset) ProcessesOrStoresDataAsset(dataAssetId string) bool { } // red when >= confidential data stored in unencrypted technical asset + func (what TechnicalAsset) DetermineLabelColor() string { // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here // Check for red @@ -2679,6 +2709,7 @@ func (what TechnicalAsset) DetermineLabelColor() string { // red when mission-critical integrity, but still unauthenticated (non-readonly) channels access it // amber when critical integrity, but still unauthenticated (non-readonly) channels access it // pink when model forgery attempt (i.e. nothing being processed or stored) + func (what TechnicalAsset) DetermineShapeBorderColor() string { // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here // Check for red @@ -2728,7 +2759,7 @@ func (what TechnicalAsset) DetermineShapeBorderColor() string { } if len(what.DataAssetsProcessed) == 0 && len(what.DataAssetsStored) == 0 { - return colors.Pink // pink, because it's strange when too many technical assets process no data... some ok, but many in a diagram ist a sign of model forgery... + return colors.Pink // pink, because it's strange when too many technical assets process no data... some are ok, but many in a diagram is a sign of model forgery... } return colors.Black @@ -2769,6 +2800,7 @@ func (what CommunicationLink) DetermineLabelColor() string { } // pink when model forgery attempt (i.e. nothing being sent and received) + func (what CommunicationLink) DetermineArrowColor() string { // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here if len(what.DataAssetsSent) == 0 && len(what.DataAssetsReceived) == 0 || @@ -2858,6 +2890,7 @@ func (what TechnicalAsset) DetermineShapeFillColor() string { fillColor = colors.BrightenHexColor(fillColor) case Serverless: fillColor = colors.BrightenHexColor(colors.BrightenHexColor(fillColor)) + case Virtual: } return fillColor } @@ -3268,7 +3301,7 @@ type Risk struct { // TODO: refactor all "Id" here to "ID"? } -func (what Risk) GetRiskTracking() RiskTracking { // TODO: Unify function naming reagrding Get etc. +func (what Risk) GetRiskTracking() RiskTracking { // TODO: Unify function naming regarding Get etc. var result RiskTracking if riskTracking, ok := ParsedModelRoot.RiskTracking[what.SyntheticId]; ok { result = riskTracking @@ -3404,9 +3437,10 @@ type RiskRule interface { } // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + func SortedRiskCategories() []RiskCategory { categories := make([]RiskCategory, 0) - for k, _ := range GeneratedRisksByCategory { + for k := range GeneratedRisksByCategory { categories = append(categories, k) } sort.Sort(ByRiskCategoryHighestContainingRiskSeveritySortStillAtRisk(categories)) @@ -3793,7 +3827,7 @@ func FilteredByOnlyLowRisks() []Risk { } func FilterByModelFailures(risksByCat map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk, 0) + result := make(map[RiskCategory][]Risk) for riskCat, risks := range risksByCat { if riskCat.ModelFailurePossibleReason { result[riskCat] = risks diff --git a/raa/dummy/dummy.go b/raa/dummy/dummy.go index febfaf9a..1793be00 100644 --- a/raa/dummy/dummy.go +++ b/raa/dummy/dummy.go @@ -8,7 +8,12 @@ import ( // JUST A DUMMY TO HAVE AN ALTERNATIVE PLUGIN TO USE/TEST +var ( + _ = CalculateRAA +) + // used from plugin caller: + func CalculateRAA() string { for techAssetID, techAsset := range model.ParsedModelRoot.TechnicalAssets { techAsset.RAA = float64(rand.Intn(100)) diff --git a/raa/raa/raa.go b/raa/raa/raa.go index a0b98aa3..a6babc82 100644 --- a/raa/raa/raa.go +++ b/raa/raa/raa.go @@ -5,7 +5,12 @@ import ( "sort" ) +var ( + _ = CalculateRAA +) + // used from plugin caller: + func CalculateRAA() string { for techAssetID, techAsset := range model.ParsedModelRoot.TechnicalAssets { aa := calculateAttackerAttractiveness(techAsset) @@ -33,7 +38,7 @@ func calculateRelativeAttackerAttractiveness(attractiveness float64) float64 { // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: keys := make([]string, 0) - for k, _ := range model.ParsedModelRoot.TechnicalAssets { + for k := range model.ParsedModelRoot.TechnicalAssets { keys = append(keys, k) } sort.Strings(keys) @@ -53,7 +58,7 @@ func calculateRelativeAttackerAttractiveness(attractiveness float64) float64 { } // calculate the percent value of the value within the defined min/max range value := attractiveness - attackerAttractivenessMinimum - percent := float64(value) / float64(spread) * 100 + percent := value / spread * 100 if percent <= 0 { percent = 1 // since 0 suggests no attacks at all } @@ -72,7 +77,7 @@ func calculatePivotingNeighbourEffectAdjustment(techAsset model.TechnicalAsset) delta := calculateRelativeAttackerAttractiveness(calculateAttackerAttractiveness(outgoingNeighbour)) - calculateRelativeAttackerAttractiveness(calculateAttackerAttractiveness(techAsset)) if delta > 0 { potentialIncrease := delta / 3 - //fmt.Println("Positive delta from", techAsset.Id, "to", outgoingNeighbour.Id, "is", delta, "yields to pivoting eighbour effect of an incrase of", potentialIncrease) + //fmt.Println("Positive delta from", techAsset.Id, "to", outgoingNeighbour.Id, "is", delta, "yields to pivoting neighbour effect of an increase of", potentialIncrease) if potentialIncrease > adjustment { adjustment = potentialIncrease } diff --git a/report/excel.go b/report/excel.go index 3159b04b..1c1e414f 100644 --- a/report/excel.go +++ b/report/excel.go @@ -454,7 +454,7 @@ func WriteRisksExcelToFile(filename string) { checkErr(err) } -func WriteTagsExcelToFile(filename string) { // TODO: eventually when len(sortedTagsAvailable) == 0 is: write a hint in the execel that no tags are used +func WriteTagsExcelToFile(filename string) { // TODO: eventually when len(sortedTagsAvailable) == 0 is: write a hint in the Excel that no tags are used excelRow = 0 excel := excelize.NewFile() sheetName := model.ParsedModelRoot.Title @@ -492,7 +492,7 @@ func WriteTagsExcelToFile(filename string) { // TODO: eventually when len(sorted }) checkErr(err) - err = excel.SetCellValue(sheetName, "A1", "Element") // TODO is "Element" the correct generic name when referencing assets, links, trust boudaries etc.? Eventually add separate column "type of element" like "technical asset" or "data asset"? + err = excel.SetCellValue(sheetName, "A1", "Element") // TODO is "Element" the correct generic name when referencing assets, links, trust boundaries etc.? Eventually add separate column "type of element" like "technical asset" or "data asset"? sortedTagsAvailable := model.TagsActuallyUsed() sort.Strings(sortedTagsAvailable) axis := "" @@ -615,7 +615,7 @@ func writeRow(excel *excelize.File, sheetName string, axis string, styleBlackLef var alphabet = []string{"A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z"} func determineColumnLetter(i int) string { - // can only have 700 columns in excel that way, but that should be more than usable anyway ;)... otherwise think about your model... + // can only have 700 columns in Excel that way, but that should be more than usable anyway ;)... otherwise think about your model... i++ if i < 26 { return alphabet[i] diff --git a/report/json.go b/report/json.go index bd4d8009..a1456dab 100644 --- a/report/json.go +++ b/report/json.go @@ -3,7 +3,7 @@ package report import ( "encoding/json" "github.com/threagile/threagile/model" - "io/ioutil" + "os" ) func WriteRisksJSON(filename string) { @@ -20,19 +20,20 @@ func WriteRisksJSON(filename string) { if err != nil { panic(err) } - err = ioutil.WriteFile(filename, jsonBytes, 0644) + err = os.WriteFile(filename, jsonBytes, 0644) if err != nil { panic(err) } } // TODO: also a "data assets" json? + func WriteTechnicalAssetsJSON(filename string) { jsonBytes, err := json.Marshal(model.ParsedModelRoot.TechnicalAssets) if err != nil { panic(err) } - err = ioutil.WriteFile(filename, jsonBytes, 0644) + err = os.WriteFile(filename, jsonBytes, 0644) if err != nil { panic(err) } @@ -43,7 +44,7 @@ func WriteStatsJSON(filename string) { if err != nil { panic(err) } - err = ioutil.WriteFile(filename, jsonBytes, 0644) + err = os.WriteFile(filename, jsonBytes, 0644) if err != nil { panic(err) } diff --git a/report/report.go b/report/report.go index 39d3bdff..060ff442 100644 --- a/report/report.go +++ b/report/report.go @@ -52,7 +52,6 @@ import ( "github.com/wcharczuk/go-chart" "github.com/wcharczuk/go-chart/drawing" "image" - "io/ioutil" "log" "os" "path/filepath" @@ -70,7 +69,8 @@ const /*dataFlowDiagramFullscreen,*/ allowedPdfLandscapePages, embedDiagramLegen var isLandscapePage bool var pdf *gofpdf.Fpdf -var alreadyTemplateImported = false + +// var alreadyTemplateImported = false var coverTemplateId, contentTemplateId, diagramLegendTemplateId int var pageNo int var linkCounter int @@ -79,6 +79,25 @@ var homeLink int var currentChapterTitleBreadcrumb string var firstParagraphRegEx = regexp.MustCompile(`(.*?)((
)|(

))`) +var ( + _ = pdfColorDataAssets + _ = rgbHexColorDataAssets + _ = pdfColorTechnicalAssets + _ = rgbHexColorTechnicalAssets + _ = pdfColorTrustBoundaries + _ = pdfColorSharedRuntime + _ = rgbHexColorTrustBoundaries + _ = rgbHexColorSharedRuntime + _ = pdfColorRiskFindings + _ = rgbHexColorRiskFindings + _ = rgbHexColorDisclaimer + _ = rgbHexColorGray + _ = rgbHexColorLightGray + _ = rgbHexColorOutOfScope + _ = rgbHexColorBlack + _ = pdfColorRed + _ = rgbHexColorRed +) func initReport() { pdf = nil @@ -98,25 +117,27 @@ func WriteReportPDF(reportFilename string, skipRiskRules string, buildTimestamp string, modelHash string, - introTextRAA string, customRiskRules map[string]model.CustomRiskRule) { + introTextRAA string, + customRiskRules map[string]model.CustomRiskRule, + tempFolder string) { initReport() createPdfAndInitMetadata() parseBackgroundTemplate(templateFilename) createCover() createTableOfContents() - createManagementSummary() + createManagementSummary(tempFolder) createImpactInitialRisks() - createRiskMitigationStatus() + createRiskMitigationStatus(tempFolder) createImpactRemainingRisks() createTargetDescription(filepath.Dir(modelFilename)) - embedDataFlowDiagram(dataFlowDiagramFilenamePNG) + embedDataFlowDiagram(dataFlowDiagramFilenamePNG, tempFolder) createSecurityRequirements() createAbuseCases() createTagListing() createSTRIDE() createAssignmentByFunction() createRAA(introTextRAA) - embedDataRiskMapping(dataAssetDiagramFilenamePNG) + embedDataRiskMapping(dataAssetDiagramFilenamePNG, tempFolder) //createDataRiskQuickWins() createOutOfScopeAssets() createModelFailures() @@ -187,11 +208,11 @@ func parseBackgroundTemplate(templateFilename string) { /* imageBox, err := rice.FindBox("template") checkErr(err) - file, err := ioutil.TempFile("", "background-*-.pdf") + file, err := os.CreateTemp("", "background-*-.pdf") checkErr(err) defer os.Remove(file.Name()) backgroundBytes := imageBox.MustBytes("background.pdf") - err = ioutil.WriteFile(file.Name(), backgroundBytes, 0644) + err = os.WriteFile(file.Name(), backgroundBytes, 0644) checkErr(err) */ coverTemplateId = gofpdi.ImportPage(pdf, templateFilename, 1, "/MediaBox") @@ -713,7 +734,7 @@ func createDisclaimer() { "is obligated to ensure the highly confidential contents are kept secret. The recipient assumes responsibility " + "for further distribution of this document." + "

" + - "In this particular project, a timebox approach was used to define the analysis effort. This means that the " + + "In this particular project, a time box approach was used to define the analysis effort. This means that the " + "author allotted a prearranged amount of time to identify and document threats. Because of this, there " + "is no guarantee that all possible threats and risks are discovered. Furthermore, the analysis " + "applies to a snapshot of the current state of the modeled architecture (based on the architecture information provided " + @@ -729,7 +750,7 @@ func createDisclaimer() { pdfColorBlack() } -func createManagementSummary() { +func createManagementSummary(tempFolder string) { uni := pdf.UnicodeTranslatorFromDescriptor("") pdf.SetTextColor(0, 0, 0) title := "Management Summary" @@ -905,8 +926,8 @@ func createManagementSummary() { } y := pdf.GetY() + 5 - embedPieChart(pieChartRiskSeverity, 15.0, y) - embedPieChart(pieChartRiskStatus, 110.0, y) + embedPieChart(pieChartRiskSeverity, 15.0, y, tempFolder) + embedPieChart(pieChartRiskStatus, 110.0, y, tempFolder) // individual management summary comment pdfColorBlack() @@ -916,7 +937,7 @@ func createManagementSummary() { } } -func createRiskMitigationStatus() { +func createRiskMitigationStatus(tempFolder string) { pdf.SetTextColor(0, 0, 0) stillAtRisk := model.FilteredByStillAtRisk() count := len(stillAtRisk) @@ -1041,7 +1062,7 @@ func createRiskMitigationStatus() { } y := pdf.GetY() + 12 - embedStackedBarChart(stackedBarChartRiskTracking, 15.0, y) + embedStackedBarChart(stackedBarChartRiskTracking, 15.0, y, tempFolder) // draw the X-Axis legend on my own pdf.SetFont("Helvetica", "", fontSizeSmall) @@ -1168,8 +1189,8 @@ func createRiskMitigationStatus() { }, } - embedPieChart(pieChartRemainingRiskSeverity, 15.0, 216) - embedPieChart(pieChartRemainingRisksByFunction, 110.0, 216) + embedPieChart(pieChartRemainingRiskSeverity, 15.0, 216, tempFolder) + embedPieChart(pieChartRemainingRisksByFunction, 110.0, 216, tempFolder) pdf.SetFont("Helvetica", "B", fontSizeBody) pdf.Ln(8) @@ -1223,12 +1244,12 @@ func createRiskMitigationStatus() { } // CAUTION: Long labels might cause endless loop, then remove labels and render them manually later inside the PDF -func embedStackedBarChart(sbcChart chart.StackedBarChart, x float64, y float64) { - tmpFilePNG, err := ioutil.TempFile(model.TempFolder, "chart-*-.png") +func embedStackedBarChart(sbcChart chart.StackedBarChart, x float64, y float64, tempFolder string) { + tmpFilePNG, err := os.CreateTemp(tempFolder, "chart-*-.png") checkErr(err) - defer os.Remove(tmpFilePNG.Name()) + defer func() { _ = os.Remove(tmpFilePNG.Name()) }() file, _ := os.Create(tmpFilePNG.Name()) - defer file.Close() + defer func() { _ = file.Close() }() err = sbcChart.Render(chart.PNG, file) checkErr(err) var options gofpdf.ImageOptions @@ -1237,13 +1258,13 @@ func embedStackedBarChart(sbcChart chart.StackedBarChart, x float64, y float64) pdf.ImageOptions(tmpFilePNG.Name(), x, y, 0, 110, false, options, 0, "") } -func embedPieChart(pieChart chart.PieChart, x float64, y float64) { - tmpFilePNG, err := ioutil.TempFile(model.TempFolder, "chart-*-.png") +func embedPieChart(pieChart chart.PieChart, x float64, y float64, tempFolder string) { + tmpFilePNG, err := os.CreateTemp(tempFolder, "chart-*-.png") checkErr(err) - defer os.Remove(tmpFilePNG.Name()) + defer func() { _ = os.Remove(tmpFilePNG.Name()) }() file, err := os.Create(tmpFilePNG.Name()) checkErr(err) - defer file.Close() + defer func() { _ = file.Close() }() err = pieChart.Render(chart.PNG, file) checkErr(err) var options gofpdf.ImageOptions @@ -3993,13 +4014,13 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim } for _, key := range model.SortedKeysOfIndividualRiskCategories() { - indivRiskCat := model.ParsedModelRoot.IndividualRiskCategories[key] + individualRiskCategory := model.ParsedModelRoot.IndividualRiskCategories[key] pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - pdf.CellFormat(190, 3, indivRiskCat.Title, "0", 0, "", false, 0, "") + pdf.CellFormat(190, 3, individualRiskCategory.Title, "0", 0, "", false, 0, "") pdf.Ln(-1) pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, indivRiskCat.Id, "0", 0, "", false, 0, "") + pdf.CellFormat(190, 6, individualRiskCategory.Id, "0", 0, "", false, 0, "") pdf.Ln(-1) pdf.SetFont("Helvetica", "I", fontSizeBody) pdf.CellFormat(190, 6, "Individual Risk Category", "0", 0, "", false, 0, "") @@ -4009,22 +4030,22 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") pdfColorBlack() - pdf.MultiCell(160, 6, indivRiskCat.STRIDE.Title(), "0", "0", false) + pdf.MultiCell(160, 6, individualRiskCategory.STRIDE.Title(), "0", "0", false) pdfColorGray() pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(indivRiskCat.Description), "0", "0", false) + pdf.MultiCell(160, 6, firstParagraph(individualRiskCategory.Description), "0", "0", false) pdfColorGray() pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") pdfColorBlack() - pdf.MultiCell(160, 6, indivRiskCat.DetectionLogic, "0", "0", false) + pdf.MultiCell(160, 6, individualRiskCategory.DetectionLogic, "0", "0", false) pdfColorGray() pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") pdfColorBlack() - pdf.MultiCell(160, 6, indivRiskCat.RiskAssessment, "0", "0", false) + pdf.MultiCell(160, 6, individualRiskCategory.RiskAssessment, "0", "0", false) } pdf.Ln(-1) @@ -5559,7 +5580,7 @@ func addCustomImages(customImages []map[string]string, baseFolder string, html g // check JPEG, PNG or GIF extension := strings.ToLower(filepath.Ext(imageFilenameWithoutPath)) if extension == ".jpeg" || extension == ".jpg" || extension == ".png" || extension == ".gif" { - imageFullFilename := baseFolder + "/" + imageFilenameWithoutPath + imageFullFilename := filepath.Join(baseFolder, imageFilenameWithoutPath) if pdf.GetY()+getHeightWhenWidthIsFix(imageFullFilename, 180) > 250 { pageBreak() pdf.SetY(36) @@ -5597,14 +5618,14 @@ func getHeightWhenWidthIsFix(imageFullFilename string, width float64) float64 { } /* #nosec imageFullFilename is not tainted (see caller restricting it to image files of model folder only) */ file, err := os.Open(imageFullFilename) - defer file.Close() + defer func() { _ = file.Close() }() checkErr(err) - image, _, err := image.DecodeConfig(file) + img, _, err := image.DecodeConfig(file) checkErr(err) - return float64(image.Height) / (float64(image.Width) / width) + return float64(img.Height) / (float64(img.Width) / width) } -func embedDataFlowDiagram(diagramFilenamePNG string) { +func embedDataFlowDiagram(diagramFilenamePNG string, tempFolder string) { pdf.SetTextColor(0, 0, 0) title := "Data-Flow Diagram" addHeadline(title, false) @@ -5623,13 +5644,14 @@ func embedDataFlowDiagram(diagramFilenamePNG string) { // check to rotate the image if it is wider than high /* #nosec diagramFilenamePNG is not tainted */ imagePath, _ := os.Open(diagramFilenamePNG) - defer imagePath.Close() + defer func() { _ = imagePath.Close() }() srcImage, _, _ := image.Decode(imagePath) srcDimensions := srcImage.Bounds() // wider than high? muchWiderThanHigh := srcDimensions.Dx() > int(float64(srcDimensions.Dy())*1.25) // fresh page (eventually landscape)? isLandscapePage = false + _ = tempFolder /* pinnedWidth, pinnedHeight := 190.0, 210.0 if dataFlowDiagramFullscreen { @@ -5643,7 +5665,7 @@ func embedDataFlowDiagram(diagramFilenamePNG string) { // so rotate the image left by 90 degrees // ok, use temp PNG then // now rotate left by 90 degrees - rotatedFile, err := ioutil.TempFile(model.TempFolder, "diagram-*-.png") + rotatedFile, err := os.CreateTemp(tempFolder, "diagram-*-.png") checkErr(err) defer os.Remove(rotatedFile.Name()) dstImage := image.NewRGBA(image.Rect(0, 0, srcDimensions.Dy(), srcDimensions.Dx())) @@ -5691,7 +5713,7 @@ func embedDataFlowDiagram(diagramFilenamePNG string) { } } -func embedDataRiskMapping(diagramFilenamePNG string) { +func embedDataRiskMapping(diagramFilenamePNG string, tempFolder string) { pdf.SetTextColor(0, 0, 0) title := "Data Mapping" addHeadline(title, false) @@ -5713,7 +5735,7 @@ func embedDataRiskMapping(diagramFilenamePNG string) { // check to rotate the image if it is wider than high /* #nosec diagramFilenamePNG is not tainted */ imagePath, _ := os.Open(diagramFilenamePNG) - defer imagePath.Close() + defer func() { _ = imagePath.Close() }() srcImage, _, _ := image.Decode(imagePath) srcDimensions := srcImage.Bounds() // wider than high? @@ -5721,6 +5743,7 @@ func embedDataRiskMapping(diagramFilenamePNG string) { pinnedWidth, pinnedHeight := 190.0, 195.0 // fresh page (eventually landscape)? isLandscapePage = false + _ = tempFolder /* if dataFlowDiagramFullscreen { pinnedHeight = 235.0 @@ -5733,7 +5756,7 @@ func embedDataRiskMapping(diagramFilenamePNG string) { // so rotate the image left by 90 degrees // ok, use temp PNG then // now rotate left by 90 degrees - rotatedFile, err := ioutil.TempFile(model.TempFolder, "diagram-*-.png") + rotatedFile, err := os.CreateTemp(tempFolder, "diagram-*-.png") checkErr(err) defer os.Remove(rotatedFile.Name()) dstImage := image.NewRGBA(image.Rect(0, 0, srcDimensions.Dy(), srcDimensions.Dx())) @@ -5831,6 +5854,7 @@ func rgbHexColorSharedRuntime() string { func pdfColorRiskFindings() { pdf.SetTextColor(160, 40, 30) } + func rgbHexColorRiskFindings() string { return "#A0281E" } diff --git a/risks/built-in/code-backdooring/code-backdooring-rule.go b/risks/built-in/code-backdooring/code-backdooring-rule.go index 1f6e518e..22d8093d 100644 --- a/risks/built-in/code-backdooring/code-backdooring-rule.go +++ b/risks/built-in/code-backdooring/code-backdooring-rule.go @@ -96,7 +96,7 @@ func createRisk(technicalAsset model.TechnicalAsset, elevatedRisk bool) model.Ri } } dataBreachTechnicalAssetIDs := make([]string, 0) - for key, _ := range uniqueDataBreachTechnicalAssetIDs { + for key := range uniqueDataBreachTechnicalAssetIDs { dataBreachTechnicalAssetIDs = append(dataBreachTechnicalAssetIDs, key) } // create risk diff --git a/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go b/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go index e491655d..db2014c7 100644 --- a/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go +++ b/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go @@ -40,7 +40,7 @@ func GenerateRisks() []model.Risk { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] if technicalAsset.OutOfScope || technicalAsset.Technology.IsTrafficForwarding() || - technicalAsset.Technology.IsUnprotectedCommsTolerated() { + technicalAsset.Technology.IsUnprotectedCommunicationsTolerated() { continue } if technicalAsset.HighestConfidentiality() >= model.Confidential || @@ -51,7 +51,7 @@ func GenerateRisks() []model.Risk { commLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, commLink := range commLinks { caller := model.ParsedModelRoot.TechnicalAssets[commLink.SourceId] - if caller.Technology.IsUnprotectedCommsTolerated() || caller.Type == model.Datastore { + if caller.Technology.IsUnprotectedCommunicationsTolerated() || caller.Type == model.Datastore { continue } if caller.UsedAsClientByHuman { @@ -65,7 +65,7 @@ func GenerateRisks() []model.Risk { callersCommLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[caller.Id] for _, callersCommLink := range callersCommLinks { callersCaller := model.ParsedModelRoot.TechnicalAssets[callersCommLink.SourceId] - if callersCaller.Technology.IsUnprotectedCommsTolerated() || callersCaller.Type == model.Datastore { + if callersCaller.Technology.IsUnprotectedCommunicationsTolerated() || callersCaller.Type == model.Datastore { continue } if callersCaller.UsedAsClientByHuman { diff --git a/risks/built-in/missing-authentication/missing-authentication-rule.go b/risks/built-in/missing-authentication/missing-authentication-rule.go index 9d002242..82934af2 100644 --- a/risks/built-in/missing-authentication/missing-authentication-rule.go +++ b/risks/built-in/missing-authentication/missing-authentication-rule.go @@ -49,7 +49,7 @@ func GenerateRisks() []model.Risk { commLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, commLink := range commLinks { caller := model.ParsedModelRoot.TechnicalAssets[commLink.SourceId] - if caller.Technology.IsUnprotectedCommsTolerated() || caller.Type == model.Datastore { + if caller.Technology.IsUnprotectedCommunicationsTolerated() || caller.Type == model.Datastore { continue } highRisk := commLink.HighestConfidentiality() == model.StrictlyConfidential || diff --git a/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go b/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go index 1eb1662a..8941dc5b 100644 --- a/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go +++ b/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go @@ -40,7 +40,7 @@ func GenerateRisks() []model.Risk { hasCustomDevelopedParts, hasBuildPipeline, hasSourcecodeRepo, hasDevOpsClient := false, false, false, false impact := model.LowImpact var mostRelevantAsset model.TechnicalAsset - for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with highest sensitivity as example asset + for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] if technicalAsset.CustomDevelopedParts && !technicalAsset.OutOfScope { hasCustomDevelopedParts = true diff --git a/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go b/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go index e7dddb3a..77539d81 100644 --- a/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go +++ b/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go @@ -36,7 +36,7 @@ func Category() model.RiskCategory { } } -var specificSubtagsAWS = []string{"aws:vpc", "aws:ec2", "aws:s3", "aws:ebs", "aws:apigateway", "aws:lambda", "aws:dynamodb", "aws:rds", "aws:sqs", "aws:iam"} +var specificSubTagsAWS = []string{"aws:vpc", "aws:ec2", "aws:s3", "aws:ebs", "aws:apigateway", "aws:lambda", "aws:dynamodb", "aws:rds", "aws:sqs", "aws:iam"} func SupportedTags() []string { res := []string{ @@ -45,50 +45,50 @@ func SupportedTags() []string { "gcp", // Google Cloud Platform "ocp", // Oracle Cloud Platform } - res = append(res, specificSubtagsAWS...) + res = append(res, specificSubTagsAWS...) return res } func GenerateRisks() []model.Risk { risks := make([]model.Risk, 0) - sharedRuntimesWithUnspecificCloudRisks := make(map[string]bool, 0) - trustBoundariesWithUnspecificCloudRisks := make(map[string]bool, 0) - techAssetsWithUnspecificCloudRisks := make(map[string]bool, 0) + sharedRuntimesWithUnspecificCloudRisks := make(map[string]bool) + trustBoundariesWithUnspecificCloudRisks := make(map[string]bool) + techAssetsWithUnspecificCloudRisks := make(map[string]bool) - sharedRuntimeIDsAWS := make(map[string]bool, 0) - trustBoundaryIDsAWS := make(map[string]bool, 0) - techAssetIDsAWS := make(map[string]bool, 0) + sharedRuntimeIDsAWS := make(map[string]bool) + trustBoundaryIDsAWS := make(map[string]bool) + techAssetIDsAWS := make(map[string]bool) - sharedRuntimeIDsAzure := make(map[string]bool, 0) - trustBoundaryIDsAzure := make(map[string]bool, 0) - techAssetIDsAzure := make(map[string]bool, 0) + sharedRuntimeIDsAzure := make(map[string]bool) + trustBoundaryIDsAzure := make(map[string]bool) + techAssetIDsAzure := make(map[string]bool) - sharedRuntimeIDsGCP := make(map[string]bool, 0) - trustBoundaryIDsGCP := make(map[string]bool, 0) - techAssetIDsGCP := make(map[string]bool, 0) + sharedRuntimeIDsGCP := make(map[string]bool) + trustBoundaryIDsGCP := make(map[string]bool) + techAssetIDsGCP := make(map[string]bool) - sharedRuntimeIDsOCP := make(map[string]bool, 0) - trustBoundaryIDsOCP := make(map[string]bool, 0) - techAssetIDsOCP := make(map[string]bool, 0) + sharedRuntimeIDsOCP := make(map[string]bool) + trustBoundaryIDsOCP := make(map[string]bool) + techAssetIDsOCP := make(map[string]bool) - techAssetIDsWithSubtagSpecificCloudRisks := make(map[string]bool, 0) + techAssetIDsWithSubtagSpecificCloudRisks := make(map[string]bool) for _, trustBoundary := range model.ParsedModelRoot.TrustBoundaries { taggedOuterTB := trustBoundary.IsTaggedWithAny(SupportedTags()...) // false = generic cloud risks only // true = cloud-individual risks if taggedOuterTB || trustBoundary.Type.IsWithinCloud() { - addTrustBoundaryAccordingToBasetag(trustBoundary, trustBoundariesWithUnspecificCloudRisks, + addTrustBoundaryAccordingToBaseTag(trustBoundary, trustBoundariesWithUnspecificCloudRisks, trustBoundaryIDsAWS, trustBoundaryIDsAzure, trustBoundaryIDsGCP, trustBoundaryIDsOCP) for _, techAssetID := range trustBoundary.RecursivelyAllTechnicalAssetIDsInside() { added := false tA := model.ParsedModelRoot.TechnicalAssets[techAssetID] if tA.IsTaggedWithAny(SupportedTags()...) { - addAccordingToBasetag(tA, tA.Tags, + addAccordingToBaseTag(tA, tA.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) added = true } else if taggedOuterTB { - addAccordingToBasetag(tA, trustBoundary.Tags, + addAccordingToBaseTag(tA, trustBoundary.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) added = true @@ -102,7 +102,7 @@ func GenerateRisks() []model.Risk { // now loop over all technical assets, trust boundaries, and shared runtimes model-wide by tag for _, tA := range model.TechnicalAssetsTaggedWithAny(SupportedTags()...) { - addAccordingToBasetag(tA, tA.Tags, + addAccordingToBaseTag(tA, tA.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) } @@ -110,22 +110,22 @@ func GenerateRisks() []model.Risk { for _, candidateID := range tB.RecursivelyAllTechnicalAssetIDsInside() { tA := model.ParsedModelRoot.TechnicalAssets[candidateID] if tA.IsTaggedWithAny(SupportedTags()...) { - addAccordingToBasetag(tA, tA.Tags, + addAccordingToBaseTag(tA, tA.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) } else { - addAccordingToBasetag(tA, tB.Tags, + addAccordingToBaseTag(tA, tB.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) } } } for _, sR := range model.SharedRuntimesTaggedWithAny(SupportedTags()...) { - addSharedRuntimeAccordingToBasetag(sR, sharedRuntimesWithUnspecificCloudRisks, + addSharedRuntimeAccordingToBaseTag(sR, sharedRuntimesWithUnspecificCloudRisks, sharedRuntimeIDsAWS, sharedRuntimeIDsAzure, sharedRuntimeIDsGCP, sharedRuntimeIDsOCP) for _, candidateID := range sR.TechnicalAssetsRunning { tA := model.ParsedModelRoot.TechnicalAssets[candidateID] - addAccordingToBasetag(tA, sR.Tags, + addAccordingToBaseTag(tA, sR.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) } @@ -269,7 +269,7 @@ func GenerateRisks() []model.Risk { return risks } -func addTrustBoundaryAccordingToBasetag(trustBoundary model.TrustBoundary, +func addTrustBoundaryAccordingToBaseTag(trustBoundary model.TrustBoundary, trustBoundariesWithUnspecificCloudRisks map[string]bool, trustBoundaryIDsAWS map[string]bool, trustBoundaryIDsAzure map[string]bool, @@ -293,7 +293,7 @@ func addTrustBoundaryAccordingToBasetag(trustBoundary model.TrustBoundary, } } -func addSharedRuntimeAccordingToBasetag(sharedRuntime model.SharedRuntime, +func addSharedRuntimeAccordingToBaseTag(sharedRuntime model.SharedRuntime, sharedRuntimesWithUnspecificCloudRisks map[string]bool, sharedRuntimeIDsAWS map[string]bool, sharedRuntimeIDsAzure map[string]bool, @@ -317,13 +317,13 @@ func addSharedRuntimeAccordingToBasetag(sharedRuntime model.SharedRuntime, } } -func addAccordingToBasetag(techAsset model.TechnicalAsset, tags []string, +func addAccordingToBaseTag(techAsset model.TechnicalAsset, tags []string, techAssetIDsWithTagSpecificCloudRisks map[string]bool, techAssetIDsAWS map[string]bool, techAssetIDsAzure map[string]bool, techAssetIDsGCP map[string]bool, techAssetIDsOCP map[string]bool) { - if techAsset.IsTaggedWithAny(specificSubtagsAWS...) { + if techAsset.IsTaggedWithAny(specificSubTagsAWS...) { techAssetIDsWithTagSpecificCloudRisks[techAsset.Id] = true } if model.IsTaggedWithBaseTag(tags, "aws") { diff --git a/risks/built-in/missing-file-validation/missing-file-validation-rule.go b/risks/built-in/missing-file-validation/missing-file-validation-rule.go index c8633038..bc0b5d67 100644 --- a/risks/built-in/missing-file-validation/missing-file-validation-rule.go +++ b/risks/built-in/missing-file-validation/missing-file-validation-rule.go @@ -15,7 +15,7 @@ func Category() model.RiskCategory { Action: "File Validation", Mitigation: "Filter by file extension and discard (if feasible) the name provided. Whitelist the accepted file types " + "and determine the mime-type on the server-side (for example via \"Apache Tika\" or similar checks). If the file is retrievable by " + - "endusers and/or backoffice employees, consider performing scans for popular malware (if the files can be retrieved much later than they " + + "end users and/or backoffice employees, consider performing scans for popular malware (if the files can be retrieved much later than they " + "were uploaded, also apply a fresh malware scan during retrieval to scan with newer signatures of popular malware). Also enforce " + "limits on maximum file size to avoid denial-of-service like scenarios.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", diff --git a/risks/built-in/missing-hardening/missing-hardening-rule.go b/risks/built-in/missing-hardening/missing-hardening-rule.go index b4795740..d9061cfc 100644 --- a/risks/built-in/missing-hardening/missing-hardening-rule.go +++ b/risks/built-in/missing-hardening/missing-hardening-rule.go @@ -24,7 +24,7 @@ func Category() model.RiskCategory { Function: model.Operations, STRIDE: model.Tampering, DetectionLogic: "In-scope technical assets with RAA values of " + strconv.Itoa(raaLimit) + " % or higher. " + - "Generally for high-value targets like datastores, application servers, identity providers and ERP systems this limit is reduced to " + strconv.Itoa(raaLimitReduced) + " %", + "Generally for high-value targets like data stores, application servers, identity providers and ERP systems this limit is reduced to " + strconv.Itoa(raaLimitReduced) + " %", RiskAssessment: "The risk rating depends on the sensitivity of the data processed or stored in the technical asset.", FalsePositives: "Usually no false positives.", ModelFailurePossibleReason: false, diff --git a/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go b/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go index 31f10700..01e2ba38 100644 --- a/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go +++ b/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go @@ -8,22 +8,22 @@ func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-identity-propagation", Title: "Missing Identity Propagation", - Description: "Technical assets (especially multi-tenant systems), which usually process data for endusers should " + - "authorize every request based on the identity of the enduser when the data flow is authenticated (i.e. non-public). " + + Description: "Technical assets (especially multi-tenant systems), which usually process data for end users should " + + "authorize every request based on the identity of the end user when the data flow is authenticated (i.e. non-public). " + "For DevOps usages at least a technical-user authorization is required.", Impact: "If this risk is unmitigated, attackers might be able to access or modify foreign data after a successful compromise of a component within " + "the system due to missing resource-based authorization checks.", ASVS: "V4 - Access Control Verification Requirements", CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/Access_Control_Cheat_Sheet.html", Action: "Identity Propagation and Resource-based Authorization", - Mitigation: "When processing requests for endusers if possible authorize in the backend against the propagated " + - "identity of the enduser. This can be achieved in passing JWTs or similar tokens and checking them in the backend " + + Mitigation: "When processing requests for end users if possible authorize in the backend against the propagated " + + "identity of the end user. This can be achieved in passing JWTs or similar tokens and checking them in the backend " + "services. For DevOps usages apply at least a technical-user authorization.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", Function: model.Architecture, STRIDE: model.ElevationOfPrivilege, - DetectionLogic: "In-scope service-like technical assets which usually process data based on enduser requests, if authenticated " + - "(i.e. non-public), should authorize incoming requests based on the propagated enduser identity when their rating is sensitive. " + + DetectionLogic: "In-scope service-like technical assets which usually process data based on end user requests, if authenticated " + + "(i.e. non-public), should authorize incoming requests based on the propagated end user identity when their rating is sensitive. " + "This is especially the case for all multi-tenant assets (there even less-sensitive rated ones). " + "DevOps usages are exempted from this risk.", RiskAssessment: "The risk rating (medium or high) " + @@ -46,7 +46,7 @@ func GenerateRisks() []model.Risk { if technicalAsset.OutOfScope { continue } - if technicalAsset.Technology.IsUsuallyProcessingEnduserRequests() && + if technicalAsset.Technology.IsUsuallyProcessingEndUserRequests() && (technicalAsset.Confidentiality >= model.Confidential || technicalAsset.Integrity >= model.Critical || technicalAsset.Availability >= model.Critical || @@ -62,7 +62,7 @@ func GenerateRisks() []model.Risk { continue } if commLink.Authentication != model.NoneAuthentication && - commLink.Authorization != model.EnduserIdentityPropagation { + commLink.Authorization != model.EndUserIdentityPropagation { if commLink.Usage == model.DevOps && commLink.Authorization != model.NoneAuthorization { continue } @@ -87,7 +87,7 @@ func createRisk(technicalAsset model.TechnicalAsset, incomingAccess model.Commun Severity: model.CalculateSeverity(model.Unlikely, impact), ExploitationLikelihood: model.Unlikely, ExploitationImpact: impact, - Title: "Missing Enduser Identity Propagation over communication link " + incomingAccess.Title + " " + + Title: "Missing End User Identity Propagation over communication link " + incomingAccess.Title + " " + "from " + model.ParsedModelRoot.TechnicalAssets[incomingAccess.SourceId].Title + " " + "to " + technicalAsset.Title + "", MostRelevantTechnicalAssetId: technicalAsset.Id, diff --git a/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go b/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go index acd44afd..287cd751 100644 --- a/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go +++ b/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go @@ -8,18 +8,18 @@ func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-identity-provider-isolation", Title: "Missing Identity Provider Isolation", - Description: "Highly sensitive identity provider assets and their identity datastores should be isolated from other assets " + + Description: "Highly sensitive identity provider assets and their identity data stores should be isolated from other assets " + "by their own network segmentation trust-boundary (" + model.ExecutionEnvironment.String() + " boundaries do not count as network isolation).", Impact: "If this risk is unmitigated, attackers successfully attacking other components of the system might have an easy path towards " + - "highly sensitive identity provider assets and their identity datastores, as they are not separated by network segmentation.", + "highly sensitive identity provider assets and their identity data stores, as they are not separated by network segmentation.", ASVS: "V1 - Architecture, Design and Threat Modeling Requirements", CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", Action: "Network Segmentation", - Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive identity provider assets and their identity datastores.", + Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive identity provider assets and their identity data stores.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", Function: model.Operations, STRIDE: model.ElevationOfPrivilege, - DetectionLogic: "In-scope identity provider assets and their identity datastores " + + DetectionLogic: "In-scope identity provider assets and their identity data stores " + "when surrounded by other (not identity-related) assets (without a network trust-boundary in-between). " + "This risk is especially prevalent when other non-identity related assets are within the same execution environment (i.e. same database or same application server).", RiskAssessment: "Default is " + model.HighImpact.String() + " impact. The impact is increased to " + model.VeryHighImpact.String() + " when the asset missing the " + @@ -45,7 +45,7 @@ func GenerateRisks() []model.Risk { sameExecutionEnv := false createRiskEntry := false // now check for any other same-network assets of non-identity-related types - for sparringAssetCandidateId, _ := range model.ParsedModelRoot.TechnicalAssets { // so inner loop again over all assets + for sparringAssetCandidateId := range model.ParsedModelRoot.TechnicalAssets { // so inner loop again over all assets if technicalAsset.Id != sparringAssetCandidateId { sparringAssetCandidate := model.ParsedModelRoot.TechnicalAssets[sparringAssetCandidateId] if !sparringAssetCandidate.Technology.IsIdentityRelated() && !sparringAssetCandidate.Technology.IsCloseToHighValueTargetsTolerated() { diff --git a/risks/built-in/missing-identity-store/missing-identity-store-rule.go b/risks/built-in/missing-identity-store/missing-identity-store-rule.go index 9096e320..c985a39e 100644 --- a/risks/built-in/missing-identity-store/missing-identity-store-rule.go +++ b/risks/built-in/missing-identity-store/missing-identity-store-rule.go @@ -19,8 +19,8 @@ func Category() model.RiskCategory { Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", Function: model.Architecture, STRIDE: model.Spoofing, - DetectionLogic: "Models with authenticated data-flows authorized via enduser-identity missing an in-scope identity store.", - RiskAssessment: "The risk rating depends on the sensitivity of the enduser-identity authorized technical assets and " + + DetectionLogic: "Models with authenticated data-flows authorized via end user identity missing an in-scope identity store.", + RiskAssessment: "The risk rating depends on the sensitivity of the end user-identity authorized technical assets and " + "their data assets processed and stored.", FalsePositives: "Models only offering data/services without any real authentication need " + "can be considered as false positives after individual review.", @@ -42,14 +42,14 @@ func GenerateRisks() []model.Risk { return risks } } - // now check if we have enduser-identity authorized communication links, then it's a risk + // now check if we have end user identity authorized communication links, then it's a risk riskIdentified := false var mostRelevantAsset model.TechnicalAsset impact := model.LowImpact - for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with highest sensitivity as example asset + for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] - for _, commLink := range technicalAsset.CommunicationLinksSorted() { // use the sorted one to always get the same tech asset with highest sensitivity as example asset - if commLink.Authorization == model.EnduserIdentityPropagation { + for _, commLink := range technicalAsset.CommunicationLinksSorted() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset + if commLink.Authorization == model.EndUserIdentityPropagation { riskIdentified = true targetAsset := model.ParsedModelRoot.TechnicalAssets[commLink.TargetId] if impact == model.LowImpact { diff --git a/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go b/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go index 495a3215..2b29af70 100644 --- a/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go +++ b/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go @@ -11,7 +11,7 @@ func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-network-segmentation", Title: "Missing Network Segmentation", - Description: "Highly sensitive assets and/or datastores residing in the same network segment than other " + + Description: "Highly sensitive assets and/or data stores residing in the same network segment than other " + "lower sensitive assets (like webservers or content management systems etc.) should be better protected " + "by a network segmentation trust-boundary.", Impact: "If this risk is unmitigated, attackers successfully attacking other components of the system might have an easy path towards " + @@ -19,11 +19,11 @@ func Category() model.RiskCategory { ASVS: "V1 - Architecture, Design and Threat Modeling Requirements", CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", Action: "Network Segmentation", - Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive assets and/or datastores.", + Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive assets and/or data stores.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", Function: model.Operations, STRIDE: model.ElevationOfPrivilege, - DetectionLogic: "In-scope technical assets with high sensitivity and RAA values as well as datastores " + + DetectionLogic: "In-scope technical assets with high sensitivity and RAA values as well as data stores " + "when surrounded by assets (without a network trust-boundary in-between) which are of type " + model.ClientSystem.String() + ", " + model.WebServer.String() + ", " + model.WebApplication.String() + ", " + model.CMS.String() + ", " + model.WebServiceREST.String() + ", " + model.WebServiceSOAP.String() + ", " + model.BuildPipeline.String() + ", " + model.SourcecodeRepository.String() + ", " + model.Monitoring.String() + ", or similar and there is no direct connection between these " + @@ -46,7 +46,7 @@ func GenerateRisks() []model.Risk { // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: keys := make([]string, 0) - for k, _ := range model.ParsedModelRoot.TechnicalAssets { + for k := range model.ParsedModelRoot.TechnicalAssets { keys = append(keys, k) } sort.Strings(keys) diff --git a/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go b/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go index 8ef10cdf..0c8919b4 100644 --- a/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go +++ b/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go @@ -8,14 +8,14 @@ func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-vault-isolation", Title: "Missing Vault Isolation", - Description: "Highly sensitive vault assets and their datastores should be isolated from other assets " + + Description: "Highly sensitive vault assets and their data stores should be isolated from other assets " + "by their own network segmentation trust-boundary (" + model.ExecutionEnvironment.String() + " boundaries do not count as network isolation).", Impact: "If this risk is unmitigated, attackers successfully attacking other components of the system might have an easy path towards " + - "highly sensitive vault assets and their datastores, as they are not separated by network segmentation.", + "highly sensitive vault assets and their data stores, as they are not separated by network segmentation.", ASVS: "V1 - Architecture, Design and Threat Modeling Requirements", CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", Action: "Network Segmentation", - Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive vault assets and their datastores.", + Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive vault assets and their data stores.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", Function: model.Operations, STRIDE: model.ElevationOfPrivilege, @@ -45,7 +45,7 @@ func GenerateRisks() []model.Risk { sameExecutionEnv := false createRiskEntry := false // now check for any other same-network assets of non-vault-related types - for sparringAssetCandidateId, _ := range model.ParsedModelRoot.TechnicalAssets { // so inner loop again over all assets + for sparringAssetCandidateId := range model.ParsedModelRoot.TechnicalAssets { // so inner loop again over all assets if technicalAsset.Id != sparringAssetCandidateId { sparringAssetCandidate := model.ParsedModelRoot.TechnicalAssets[sparringAssetCandidateId] if sparringAssetCandidate.Technology != model.Vault && !isVaultStorage(technicalAsset, sparringAssetCandidate) { diff --git a/risks/built-in/missing-vault/missing-vault-rule.go b/risks/built-in/missing-vault/missing-vault-rule.go index a046131b..ac3e4590 100644 --- a/risks/built-in/missing-vault/missing-vault-rule.go +++ b/risks/built-in/missing-vault/missing-vault-rule.go @@ -39,7 +39,7 @@ func GenerateRisks() []model.Risk { hasVault := false var mostRelevantAsset model.TechnicalAsset impact := model.LowImpact - for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with highest sensitivity as example asset + for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset techAsset := model.ParsedModelRoot.TechnicalAssets[id] if techAsset.Technology == model.Vault { hasVault = true diff --git a/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go b/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go index 3c9ab06b..435a6ab9 100644 --- a/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go +++ b/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go @@ -41,7 +41,7 @@ func GenerateRisks() []model.Risk { risks := make([]model.Risk, 0) // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: keys := make([]string, 0) - for k, _ := range model.ParsedModelRoot.SharedRuntimes { + for k := range model.ParsedModelRoot.SharedRuntimes { keys = append(keys, k) } sort.Strings(keys) diff --git a/risks/built-in/search-query-injection/search-query-injection-rule.go b/risks/built-in/search-query-injection/search-query-injection-rule.go index 936ab6e6..1f250b28 100644 --- a/risks/built-in/search-query-injection/search-query-injection-rule.go +++ b/risks/built-in/search-query-injection/search-query-injection-rule.go @@ -43,7 +43,7 @@ func GenerateRisks() []model.Risk { continue } if incomingFlow.Protocol == model.HTTP || incomingFlow.Protocol == model.HTTPS || - incomingFlow.Protocol == model.BINARY || incomingFlow.Protocol == model.BINARY_encrypted { + incomingFlow.Protocol == model.BINARY || incomingFlow.Protocol == model.BinaryEncrypted { likelihood := model.VeryLikely if incomingFlow.Usage == model.DevOps { likelihood = model.Likely diff --git a/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go b/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go index 901c5fcf..a1844ae0 100644 --- a/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go +++ b/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go @@ -80,7 +80,7 @@ func createRisk(technicalAsset model.TechnicalAsset, outgoingFlow model.Communic impact = model.MediumImpact } dataBreachTechnicalAssetIDs := make([]string, 0) - for key, _ := range uniqueDataBreachTechnicalAssetIDs { + for key := range uniqueDataBreachTechnicalAssetIDs { dataBreachTechnicalAssetIDs = append(dataBreachTechnicalAssetIDs, key) } likelihood := model.Likely diff --git a/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go b/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go index 34baf45f..80cac869 100644 --- a/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go +++ b/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go @@ -71,7 +71,7 @@ func createRisk(technicalAsset model.TechnicalAsset) model.Risk { } } dataBreachTechnicalAssetIDs := make([]string, 0) - for key, _ := range uniqueDataBreachTechnicalAssetIDs { + for key := range uniqueDataBreachTechnicalAssetIDs { dataBreachTechnicalAssetIDs = append(dataBreachTechnicalAssetIDs, key) } // create risk diff --git a/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go b/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go index cdf23c6b..67c42992 100644 --- a/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go +++ b/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go @@ -23,7 +23,7 @@ func Category() model.RiskCategory { ", " + model.IPS.String() + " and embedded components like " + model.Library.String() + ") " + "storing data assets rated at least as " + model.Confidential.String() + " or " + model.Critical.String() + ". " + "For technical assets storing data assets rated as " + model.StrictlyConfidential.String() + " or " + model.MissionCritical.String() + " the " + - "encryption must be of type " + model.DataWithEnduserIndividualKey.String() + ".", + "encryption must be of type " + model.DataWithEndUserIndividualKey.String() + ".", RiskAssessment: "Depending on the confidentiality rating of the stored data-assets either medium or high risk.", FalsePositives: "When all sensitive data stored within the asset is already fully encrypted on document or data level.", ModelFailurePossibleReason: false, @@ -36,6 +36,7 @@ func SupportedTags() []string { } // check for technical assets that should be encrypted due to their confidentiality + func GenerateRisks() []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { @@ -45,16 +46,16 @@ func GenerateRisks() []model.Risk { technicalAsset.HighestIntegrity() >= model.Critical) { verySensitive := technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || technicalAsset.HighestIntegrity() == model.MissionCritical - requiresEnduserKey := verySensitive && technicalAsset.Technology.IsUsuallyStoringEnduserData() + requiresEndUserKey := verySensitive && technicalAsset.Technology.IsUsuallyStoringEndUserData() if technicalAsset.Encryption == model.NoneEncryption { impact := model.MediumImpact if verySensitive { impact = model.HighImpact } - risks = append(risks, createRisk(technicalAsset, impact, requiresEnduserKey)) - } else if requiresEnduserKey && + risks = append(risks, createRisk(technicalAsset, impact, requiresEndUserKey)) + } else if requiresEndUserKey && (technicalAsset.Encryption == model.Transparent || technicalAsset.Encryption == model.DataWithSymmetricSharedKey || technicalAsset.Encryption == model.DataWithAsymmetricSharedKey) { - risks = append(risks, createRisk(technicalAsset, model.MediumImpact, requiresEnduserKey)) + risks = append(risks, createRisk(technicalAsset, model.MediumImpact, requiresEndUserKey)) } } } @@ -63,16 +64,17 @@ func GenerateRisks() []model.Risk { // Simple routing assets like 'Reverse Proxy' or 'Load Balancer' usually don't have their own storage and thus have no // encryption requirement for the asset itself (though for the communication, but that's a different rule) + func IsEncryptionWaiver(asset model.TechnicalAsset) bool { return asset.Technology == model.ReverseProxy || asset.Technology == model.LoadBalancer || asset.Technology == model.WAF || asset.Technology == model.IDS || asset.Technology == model.IPS || asset.Technology.IsEmbeddedComponent() } -func createRisk(technicalAsset model.TechnicalAsset, impact model.RiskExploitationImpact, requiresEnduserKey bool) model.Risk { +func createRisk(technicalAsset model.TechnicalAsset, impact model.RiskExploitationImpact, requiresEndUserKey bool) model.Risk { title := "Unencrypted Technical Asset named " + technicalAsset.Title + "" - if requiresEnduserKey { - title += " missing enduser-individual encryption with " + model.DataWithEnduserIndividualKey.String() + if requiresEndUserKey { + title += " missing end user individual encryption with " + model.DataWithEndUserIndividualKey.String() } risk := model.Risk{ Category: Category(), diff --git a/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go b/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go index 0718540b..ed5a7eb3 100644 --- a/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go +++ b/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go @@ -33,6 +33,7 @@ func SupportedTags() []string { } // check for communication links that should be encrypted due to their confidentiality and/or integrity + func GenerateRisks() []model.Risk { risks := make([]model.Risk, 0) for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { @@ -42,8 +43,8 @@ func GenerateRisks() []model.Risk { targetAsset := model.ParsedModelRoot.TechnicalAssets[dataFlow.TargetId] if !technicalAsset.OutOfScope || !sourceAsset.OutOfScope { if !dataFlow.Protocol.IsEncrypted() && !dataFlow.Protocol.IsProcessLocal() && - !sourceAsset.Technology.IsUnprotectedCommsTolerated() && - !targetAsset.Technology.IsUnprotectedCommsTolerated() { + !sourceAsset.Technology.IsUnprotectedCommunicationsTolerated() && + !targetAsset.Technology.IsUnprotectedCommunicationsTolerated() { addedOne := false for _, sentDataAsset := range dataFlow.DataAssetsSent { dataAsset := model.ParsedModelRoot.DataAssets[sentDataAsset] diff --git a/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go b/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go index 5fa8f9e2..e845f955 100644 --- a/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go +++ b/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go @@ -8,8 +8,8 @@ func Category() model.RiskCategory { return model.RiskCategory{ Id: "unguarded-direct-datastore-access", Title: "Unguarded Direct Datastore Access", - Description: "Datastores accessed across trust boundaries must be guarded by some protecting service or application.", - Impact: "If this risk is unmitigated, attackers might be able to directly attack sensitive datastores without any protecting components in-between.", + Description: "Data stores accessed across trust boundaries must be guarded by some protecting service or application.", + Impact: "If this risk is unmitigated, attackers might be able to directly attack sensitive data stores without any protecting components in-between.", ASVS: "V1 - Architecture, Design and Threat Modeling Requirements", CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", Action: "Encapsulation of Datastore", @@ -34,7 +34,8 @@ func SupportedTags() []string { return []string{} } -// check for datastores that should not be accessed directly across trust boundaries +// check for data stores that should not be accessed directly across trust boundaries + func GenerateRisks() []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { diff --git a/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go b/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go index 5da7f2db..f4e5c8d6 100644 --- a/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go +++ b/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go @@ -55,8 +55,8 @@ func GenerateRisks() []model.Risk { } // check for any incoming IIOP and JRMP protocols for _, commLink := range model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] { - if commLink.Protocol == model.IIOP || commLink.Protocol == model.IIOP_encrypted || - commLink.Protocol == model.JRMP || commLink.Protocol == model.JRMP_encrypted { + if commLink.Protocol == model.IIOP || commLink.Protocol == model.IiopEncrypted || + commLink.Protocol == model.JRMP || commLink.Protocol == model.JrmpEncrypted { hasOne = true if commLink.IsAcrossTrustBoundaryNetworkOnly() { acrossTrustBoundary = true diff --git a/risks/custom/demo/demo-rule.go b/risks/custom/demo/demo-rule.go index 5eb8d672..2a2daacf 100644 --- a/risks/custom/demo/demo-rule.go +++ b/risks/custom/demo/demo-rule.go @@ -7,6 +7,7 @@ import ( type customRiskRule string // exported as symbol (here simply as variable to interface to bundle many functions under one symbol) named "CustomRiskRule" + var CustomRiskRule customRiskRule func (r customRiskRule) Category() model.RiskCategory { From 18585ce6e62b4a3e6d458c4f08ea9682a5ab4e02 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Fri, 15 Dec 2023 10:34:37 -0800 Subject: [PATCH 04/68] implemented multi-yaml support --- go.mod | 1 + go.sum | 2 + main.go | 3497 +++++++++++---------- model/types.go | 508 ++- test/abuse_cases.yaml | 30 + test/all.yaml | 1354 ++++++++ test/data_assets.yaml | 164 + test/diagram_tweak.yaml | 13 + test/main.yaml | 27 + test/meta.yaml | 9 + test/overview.yaml | 16 + test/questions.yaml | 6 + test/risk_categories.yaml | 44 + test/risk_tracking.yaml | 52 + test/security_requirements.yaml | 4 + test/shared_runtimes.yaml | 16 + test/tags.yaml | 16 + test/technical_assets.yaml | 6 + test/technical_assets_clients.yaml | 211 ++ test/technical_assets_databases.yaml | 71 + test/technical_assets_devops.yaml | 224 ++ test/technical_assets_infrastructure.yaml | 75 + test/technical_assets_servers.yaml | 295 ++ test/trust_boundaries.yaml | 67 + 24 files changed, 4878 insertions(+), 1830 deletions(-) create mode 100644 test/abuse_cases.yaml create mode 100644 test/all.yaml create mode 100644 test/data_assets.yaml create mode 100644 test/diagram_tweak.yaml create mode 100644 test/main.yaml create mode 100644 test/meta.yaml create mode 100644 test/overview.yaml create mode 100644 test/questions.yaml create mode 100644 test/risk_categories.yaml create mode 100644 test/risk_tracking.yaml create mode 100644 test/security_requirements.yaml create mode 100644 test/shared_runtimes.yaml create mode 100644 test/tags.yaml create mode 100644 test/technical_assets.yaml create mode 100644 test/technical_assets_clients.yaml create mode 100644 test/technical_assets_databases.yaml create mode 100644 test/technical_assets_devops.yaml create mode 100644 test/technical_assets_infrastructure.yaml create mode 100644 test/technical_assets_servers.yaml create mode 100644 test/trust_boundaries.yaml diff --git a/go.mod b/go.mod index 5a91844f..6b20efe3 100644 --- a/go.mod +++ b/go.mod @@ -13,6 +13,7 @@ require ( ) require ( + github.com/akedrou/textdiff v0.0.0-20230423230343-2ebdcebdccc1 // indirect github.com/blend/go-sdk v1.20220411.3 // indirect github.com/bytedance/sonic v1.10.2 // indirect github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect diff --git a/go.sum b/go.sum index 827e8d8f..4a517668 100644 --- a/go.sum +++ b/go.sum @@ -1,3 +1,5 @@ +github.com/akedrou/textdiff v0.0.0-20230423230343-2ebdcebdccc1 h1:XfKKiQL7irIGI7nfu4a6IKhrgUHvKwhH/AnuHgZy/+U= +github.com/akedrou/textdiff v0.0.0-20230423230343-2ebdcebdccc1/go.mod h1:PJwvxBpzqjdeomc0r8Hgc+xJC7k6z+k371tffCGXR2M= github.com/blend/go-sdk v1.20220411.3 h1:GFV4/FQX5UzXLPwWV03gP811pj7B8J2sbuq+GJQofXc= github.com/blend/go-sdk v1.20220411.3/go.mod h1:7lnH8fTi6U4i1fArEXRyOIY2E1X4MALg09qsQqY1+ak= github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= diff --git a/main.go b/main.go index 87532907..39faa05b 100644 --- a/main.go +++ b/main.go @@ -12,9 +12,11 @@ import ( "crypto/sha512" "encoding/base64" "encoding/hex" + "encoding/json" "errors" "flag" "fmt" + "github.com/akedrou/textdiff" "hash/fnv" "io" "log" @@ -87,7 +89,10 @@ import ( "gopkg.in/yaml.v3" ) -const keepDiagramSourceFiles = false +const ( + keepDiagramSourceFiles = false + addModelTitle = false +) const ( defaultGraphvizDPI, maxGraphvizDPI = 120, 240 @@ -95,6 +100,7 @@ const ( ) const ( + buildTimestamp = "" tmpFolder = "/dev/shm" // TODO: make configurable via cmdline arg? appFolder = "/app" baseFolder = "/data" @@ -113,32 +119,51 @@ const ( outputFile = "threagile.yaml" ) -var ( - successCount, errorCount = 0, 0 - drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = true - buildTimestamp = "" - tempFolder = tmpFolder +type Context struct { + successCount int + errorCount int + drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks bool + buildTimestamp string globalLock sync.Mutex modelInput model.ModelInput - modelFilename, templateFilename/* diagramFilename, reportFilename, graphvizConversion*/ *string + tempFolder *string + modelFilename, templateFilename *string + testParseModel *bool createExampleModel, createStubModel, createEditingSupport, verbose, ignoreOrphanedRiskTracking *bool generateDataFlowDiagram, generateDataAssetDiagram, generateRisksJSON, generateTechnicalAssetsJSON *bool generateStatsJSON, generateRisksExcel, generateTagsExcel, generateReportPDF *bool outputDir, raaPlugin, skipRiskRules, riskRulesPlugins, executeModelMacro *string customRiskRules map[string]model.CustomRiskRule diagramDPI, serverPort *int - deferredRiskTrackingDueToWildcardMatching = make(map[string]model.RiskTracking) -) + deferredRiskTrackingDueToWildcardMatching map[string]model.RiskTracking + addModelTitle bool + keepDiagramSourceFiles bool + appFolder *string + baseFolder *string +} + +func (context *Context) Defaults() *Context { + *context = Context{ + keepDiagramSourceFiles: keepDiagramSourceFiles, + addModelTitle: addModelTitle, + buildTimestamp: buildTimestamp, + customRiskRules: make(map[string]model.CustomRiskRule), + deferredRiskTrackingDueToWildcardMatching: make(map[string]model.RiskTracking), + drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks: true, + } + + return context +} -func applyRiskGeneration() { - if *verbose { +func (context *Context) applyRiskGeneration() { + if *context.verbose { fmt.Println("Applying risk generation") } skippedRules := make(map[string]interface{}) - if len(*skipRiskRules) > 0 { - for _, id := range strings.Split(*skipRiskRules, ",") { + if len(*context.skipRiskRules) > 0 { + for _, id := range strings.Split(*context.skipRiskRules, ",") { skippedRules[id] = true } } @@ -606,14 +631,14 @@ func applyRiskGeneration() { } // NOW THE CUSTOM RISK RULES (if any) - for id, customRule := range customRiskRules { + for id, customRule := range context.customRiskRules { if _, ok := skippedRules[customRule.Category().Id]; ok { - if *verbose { + if *context.verbose { fmt.Println("Skipping custom risk rule:", id) } delete(skippedRules, id) } else { - if *verbose { + if *context.verbose { fmt.Println("Executing custom risk rule:", id) } model.AddToListOfSupportedTags(customRule.SupportedTags()) @@ -621,7 +646,7 @@ func applyRiskGeneration() { if len(risks) > 0 { model.GeneratedRisksByCategory[customRule.Category()] = risks } - if *verbose { + if *context.verbose { fmt.Println("Added custom risks:", len(risks)) } } @@ -646,13 +671,13 @@ func applyRiskGeneration() { } } -func checkRiskTracking() { - if *verbose { +func (context *Context) checkRiskTracking() { + if *context.verbose { fmt.Println("Checking risk tracking") } for _, tracking := range model.ParsedModelRoot.RiskTracking { if _, ok := model.GeneratedRisksBySyntheticId[tracking.SyntheticRiskId]; !ok { - if *ignoreOrphanedRiskTracking { + if *context.ignoreOrphanedRiskTracking { fmt.Println("Risk tracking references unknown risk (risk id not found): " + tracking.SyntheticRiskId) } else { panic(errors.New("Risk tracking references unknown risk (risk id not found) - you might want to use the option -ignore-orphaned-risk-tracking: " + tracking.SyntheticRiskId + @@ -686,17 +711,18 @@ func checkErr(err error) { } func main() { - parseCommandlineArgs() - if *serverPort > 0 { - startServer() + context := new(Context).Defaults() + context.parseCommandlineArgs() + if *context.serverPort > 0 { + context.startServer() } else { - doIt(*modelFilename, *outputDir) + context.doIt() } } // Unzip will decompress a zip archive, moving all files and folders // within the zip file (parameter 1) to an output directory (parameter 2). -func unzip(src string, dest string) ([]string, error) { +func (context *Context) unzip(src string, dest string) ([]string, error) { var filenames []string r, err := zip.OpenReader(src) @@ -744,7 +770,7 @@ func unzip(src string, dest string) ([]string, error) { // ZipFiles compresses one or many files into a single zip archive file. // Param 1: filename is the output zip file's name. // Param 2: files is a list of files to add to the zip. -func zipFiles(filename string, files []string) error { +func (context *Context) zipFiles(filename string, files []string) error { newZipFile, err := os.Create(filename) if err != nil { return err @@ -756,14 +782,14 @@ func zipFiles(filename string, files []string) error { // Add files to zip for _, file := range files { - if err = addFileToZip(zipWriter, file); err != nil { + if err = context.addFileToZip(zipWriter, file); err != nil { return err } } return nil } -func addFileToZip(zipWriter *zip.Writer, filename string) error { +func (context *Context) addFileToZip(zipWriter *zip.Writer, filename string) error { fileToZip, err := os.Open(filename) if err != nil { return err @@ -797,37 +823,37 @@ func addFileToZip(zipWriter *zip.Writer, filename string) error { return err } -func doIt(inputFilename string, outputDirectory string) { +func (context *Context) doIt() { defer func() { var err error if r := recover(); r != nil { err = r.(error) - if *verbose { + if *context.verbose { log.Println(err) } _, _ = os.Stderr.WriteString(err.Error() + "\n") os.Exit(2) } }() - if len(*executeModelMacro) > 0 { - printLogo() + if len(*context.executeModelMacro) > 0 { + context.printLogo() } else { - if *verbose { - fmt.Println("Writing into output directory:", outputDirectory) + if *context.verbose { + fmt.Println("Writing into output directory:", *context.outputDir) } } model.Init() - parseModel(inputFilename) - introTextRAA := applyRAA() - loadCustomRiskRules() - applyRiskGeneration() - applyWildcardRiskTrackingEvaluation() - checkRiskTracking() - - if len(*executeModelMacro) > 0 { + context.parseModel() + introTextRAA := context.applyRAA() + context.loadCustomRiskRules() + context.applyRiskGeneration() + context.applyWildcardRiskTrackingEvaluation() + context.checkRiskTracking() + + if len(*context.executeModelMacro) > 0 { var macroDetails model.MacroDetails - switch *executeModelMacro { + switch *context.executeModelMacro { case addbuildpipeline.GetMacroDetails().ID: macroDetails = addbuildpipeline.GetMacroDetails() case addvault.GetMacroDetails().ID: @@ -841,14 +867,14 @@ func doIt(inputFilename string, outputDirectory string) { case seedtags.GetMacroDetails().ID: macroDetails = seedtags.GetMacroDetails() default: - log.Fatal("Unknown model macro: ", *executeModelMacro) + log.Fatal("Unknown model macro: ", *context.executeModelMacro) } fmt.Println("Executing model macro:", macroDetails.ID) fmt.Println() fmt.Println() - printBorder(len(macroDetails.Title), true) + context.printBorder(len(macroDetails.Title), true) fmt.Println(macroDetails.Title) - printBorder(len(macroDetails.Title), true) + context.printBorder(len(macroDetails.Title), true) if len(macroDetails.Description) > 0 { fmt.Println(macroDetails.Description) } @@ -876,9 +902,9 @@ func doIt(inputFilename string, outputDirectory string) { break } fmt.Println() - printBorder(len(nextQuestion.Title), false) + context.printBorder(len(nextQuestion.Title), false) fmt.Println(nextQuestion.Title) - printBorder(len(nextQuestion.Title), false) + context.printBorder(len(nextQuestion.Title), false) if len(nextQuestion.Description) > 0 { fmt.Println(nextQuestion.Description) } @@ -1034,17 +1060,17 @@ func doIt(inputFilename string, outputDirectory string) { var err error switch macroDetails.ID { case addbuildpipeline.GetMacroDetails().ID: - changes, message, validResult, err = addbuildpipeline.GetFinalChangeImpact(&modelInput) + changes, message, validResult, err = addbuildpipeline.GetFinalChangeImpact(&context.modelInput) case addvault.GetMacroDetails().ID: - changes, message, validResult, err = addvault.GetFinalChangeImpact(&modelInput) + changes, message, validResult, err = addvault.GetFinalChangeImpact(&context.modelInput) case prettyprint.GetMacroDetails().ID: - changes, message, validResult, err = prettyprint.GetFinalChangeImpact(&modelInput) + changes, message, validResult, err = prettyprint.GetFinalChangeImpact(&context.modelInput) case removeunusedtags.GetMacroDetails().ID: - changes, message, validResult, err = removeunusedtags.GetFinalChangeImpact(&modelInput) + changes, message, validResult, err = removeunusedtags.GetFinalChangeImpact(&context.modelInput) case seedrisktracking.GetMacroDetails().ID: - changes, message, validResult, err = seedrisktracking.GetFinalChangeImpact(&modelInput) + changes, message, validResult, err = seedrisktracking.GetFinalChangeImpact(&context.modelInput) case seedtags.GetMacroDetails().ID: - changes, message, validResult, err = seedtags.GetFinalChangeImpact(&modelInput) + changes, message, validResult, err = seedtags.GetFinalChangeImpact(&context.modelInput) } checkErr(err) for _, change := range changes { @@ -1070,17 +1096,17 @@ func doIt(inputFilename string, outputDirectory string) { var err error switch macroDetails.ID { case addbuildpipeline.GetMacroDetails().ID: - message, validResult, err = addbuildpipeline.Execute(&modelInput) + message, validResult, err = addbuildpipeline.Execute(&context.modelInput) case addvault.GetMacroDetails().ID: - message, validResult, err = addvault.Execute(&modelInput) + message, validResult, err = addvault.Execute(&context.modelInput) case prettyprint.GetMacroDetails().ID: - message, validResult, err = prettyprint.Execute(&modelInput) + message, validResult, err = prettyprint.Execute(&context.modelInput) case removeunusedtags.GetMacroDetails().ID: - message, validResult, err = removeunusedtags.Execute(&modelInput) + message, validResult, err = removeunusedtags.Execute(&context.modelInput) case seedrisktracking.GetMacroDetails().ID: - message, validResult, err = seedrisktracking.Execute(&modelInput) + message, validResult, err = seedrisktracking.Execute(&context.modelInput) case seedtags.GetMacroDetails().ID: - message, validResult, err = seedtags.Execute(&modelInput) + message, validResult, err = seedtags.Execute(&context.modelInput) } checkErr(err) if !validResult { @@ -1089,18 +1115,18 @@ func doIt(inputFilename string, outputDirectory string) { } fmt.Println(message) fmt.Println() - backupFilename := inputFilename + ".backup" + backupFilename := *context.modelFilename + ".backup" fmt.Println("Creating backup model file:", backupFilename) // TODO add random files in /dev/shm space? - _, err = copyFile(inputFilename, backupFilename) + _, err = copyFile(*context.modelFilename, backupFilename) checkErr(err) fmt.Println("Updating model") - yamlBytes, err := yaml.Marshal(modelInput) + yamlBytes, err := yaml.Marshal(context.modelInput) checkErr(err) /* yamlBytes = model.ReformatYAML(yamlBytes) */ - fmt.Println("Writing model file:", inputFilename) - err = os.WriteFile(inputFilename, yamlBytes, 0400) + fmt.Println("Writing model file:", *context.modelFilename) + err = os.WriteFile(*context.modelFilename, yamlBytes, 0400) checkErr(err) fmt.Println("Model file successfully updated") return @@ -1112,79 +1138,86 @@ func doIt(inputFilename string, outputDirectory string) { return } - renderDataFlowDiagram, renderDataAssetDiagram, renderRisksJSON, renderTechnicalAssetsJSON, renderStatsJSON, renderRisksExcel, renderTagsExcel, renderPDF := *generateDataFlowDiagram, *generateDataAssetDiagram, *generateRisksJSON, *generateTechnicalAssetsJSON, *generateStatsJSON, *generateRisksExcel, *generateTagsExcel, *generateReportPDF + renderDataFlowDiagram := *context.generateDataFlowDiagram + renderDataAssetDiagram := *context.generateDataAssetDiagram + renderRisksJSON := *context.generateRisksJSON + renderTechnicalAssetsJSON := *context.generateTechnicalAssetsJSON + renderStatsJSON := *context.generateStatsJSON + renderRisksExcel := *context.generateRisksExcel + renderTagsExcel := *context.generateTagsExcel + renderPDF := *context.generateReportPDF if renderPDF { // as the PDF report includes both diagrams renderDataFlowDiagram, renderDataAssetDiagram = true, true } // Data-flow Diagram rendering if renderDataFlowDiagram { - gvFile := filepath.Join(outputDirectory, dataFlowDiagramFilenameDOT) - if !keepDiagramSourceFiles { - tmpFileGV, err := os.CreateTemp(tempFolder, dataFlowDiagramFilenameDOT) + gvFile := filepath.Join(*context.outputDir, dataFlowDiagramFilenameDOT) + if !context.keepDiagramSourceFiles { + tmpFileGV, err := os.CreateTemp(*context.tempFolder, dataFlowDiagramFilenameDOT) checkErr(err) gvFile = tmpFileGV.Name() defer func() { _ = os.Remove(gvFile) }() } - dotFile := writeDataFlowDiagramGraphvizDOT(gvFile, *diagramDPI) - renderDataFlowDiagramGraphvizImage(dotFile, outputDirectory) + dotFile := context.writeDataFlowDiagramGraphvizDOT(gvFile, *context.diagramDPI) + context.renderDataFlowDiagramGraphvizImage(dotFile, *context.outputDir) } // Data Asset Diagram rendering if renderDataAssetDiagram { - gvFile := filepath.Join(outputDirectory, dataAssetDiagramFilenameDOT) - if !keepDiagramSourceFiles { - tmpFile, err := os.CreateTemp(tempFolder, dataAssetDiagramFilenameDOT) + gvFile := filepath.Join(*context.outputDir, dataAssetDiagramFilenameDOT) + if !context.keepDiagramSourceFiles { + tmpFile, err := os.CreateTemp(*context.tempFolder, dataAssetDiagramFilenameDOT) checkErr(err) gvFile = tmpFile.Name() defer func() { _ = os.Remove(gvFile) }() } - dotFile := writeDataAssetDiagramGraphvizDOT(gvFile, *diagramDPI) - renderDataAssetDiagramGraphvizImage(dotFile, outputDirectory) + dotFile := context.writeDataAssetDiagramGraphvizDOT(gvFile, *context.diagramDPI) + context.renderDataAssetDiagramGraphvizImage(dotFile, *context.outputDir) } // risks as risks json if renderRisksJSON { - if *verbose { + if *context.verbose { fmt.Println("Writing risks json") } - report.WriteRisksJSON(filepath.Join(outputDirectory, jsonRisksFilename)) + report.WriteRisksJSON(filepath.Join(*context.outputDir, jsonRisksFilename)) } // technical assets json if renderTechnicalAssetsJSON { - if *verbose { + if *context.verbose { fmt.Println("Writing technical assets json") } - report.WriteTechnicalAssetsJSON(filepath.Join(outputDirectory, jsonTechnicalAssetsFilename)) + report.WriteTechnicalAssetsJSON(filepath.Join(*context.outputDir, jsonTechnicalAssetsFilename)) } // risks as risks json if renderStatsJSON { - if *verbose { + if *context.verbose { fmt.Println("Writing stats json") } - report.WriteStatsJSON(filepath.Join(outputDirectory, jsonStatsFilename)) + report.WriteStatsJSON(filepath.Join(*context.outputDir, jsonStatsFilename)) } // risks Excel if renderRisksExcel { - if *verbose { + if *context.verbose { fmt.Println("Writing risks excel") } - report.WriteRisksExcelToFile(filepath.Join(outputDirectory, excelRisksFilename)) + report.WriteRisksExcelToFile(filepath.Join(*context.outputDir, excelRisksFilename)) } // tags Excel if renderTagsExcel { - if *verbose { + if *context.verbose { fmt.Println("Writing tags excel") } - report.WriteTagsExcelToFile(filepath.Join(outputDirectory, excelTagsFilename)) + report.WriteTagsExcelToFile(filepath.Join(*context.outputDir, excelTagsFilename)) } if renderPDF { // hash the YAML input file - f, err := os.Open(inputFilename) + f, err := os.Open(*context.modelFilename) checkErr(err) defer func() { _ = f.Close() }() hasher := sha256.New() @@ -1193,24 +1226,24 @@ func doIt(inputFilename string, outputDirectory string) { } modelHash := hex.EncodeToString(hasher.Sum(nil)) // report PDF - if *verbose { + if *context.verbose { fmt.Println("Writing report pdf") } - report.WriteReportPDF(filepath.Join(outputDirectory, reportFilename), - *templateFilename, - filepath.Join(outputDirectory, dataFlowDiagramFilenamePNG), - filepath.Join(outputDirectory, dataAssetDiagramFilenamePNG), - inputFilename, - *skipRiskRules, - buildTimestamp, + report.WriteReportPDF(filepath.Join(*context.outputDir, reportFilename), + *context.templateFilename, + filepath.Join(*context.outputDir, dataFlowDiagramFilenamePNG), + filepath.Join(*context.outputDir, dataAssetDiagramFilenamePNG), + *context.modelFilename, + *context.skipRiskRules, + context.buildTimestamp, modelHash, introTextRAA, - customRiskRules, - tempFolder) + context.customRiskRules, + *context.tempFolder) } } -func printBorder(length int, bold bool) { +func (context *Context) printBorder(length int, bold bool) { char := "-" if bold { char = "=" @@ -1221,33 +1254,46 @@ func printBorder(length int, bold bool) { fmt.Println() } -func applyRAA() string { - if *verbose { - fmt.Println("Applying RAA calculation:", *raaPlugin) +func (context *Context) applyRAA() string { + if *context.verbose { + fmt.Println("Applying RAA calculation:", *context.raaPlugin) } // determine plugin to load. // load plugin: open the ".so" file to load the symbols - plug, err := plugin.Open(*raaPlugin) - checkErr(err) + plug, err := plugin.Open(*context.raaPlugin) + if err != nil { + fmt.Printf("WARNING: plugin %q not applied: %v\n", *context.raaPlugin, err) + return "" + } + // checkErr(err) // look up a symbol (an exported function or variable): in this case, function CalculateRAA symCalculateRAA, err := plug.Lookup("CalculateRAA") - checkErr(err) + if err != nil { + fmt.Printf("WARNING: plugin %q not applied: %v\n", *context.raaPlugin, err) + return "" + } + // checkErr(err) // use the plugin raaCalcFunc, ok := symCalculateRAA.(func() string) // symCalculateRAA.(func(model.ParsedModel) string) if !ok { - panic(errors.New("RAA plugin has no 'CalculateRAA() string' function")) + fmt.Printf("WARNING: invalid plugin %q\n", *context.raaPlugin) + return "" } + /* if !ok { + panic(errors.New("RAA plugin has no 'CalculateRAA() string' function")) + } + */ // call it return raaCalcFunc() } -func loadCustomRiskRules() { - customRiskRules = make(map[string]model.CustomRiskRule) - if len(*riskRulesPlugins) > 0 { - if *verbose { - fmt.Println("Loading custom risk rules:", *riskRulesPlugins) +func (context *Context) loadCustomRiskRules() { + context.customRiskRules = make(map[string]model.CustomRiskRule) + if len(*context.riskRulesPlugins) > 0 { + if *context.verbose { + fmt.Println("Loading custom risk rules:", *context.riskRulesPlugins) } - for _, pluginFile := range strings.Split(*riskRulesPlugins, ",") { + for _, pluginFile := range strings.Split(*context.riskRulesPlugins, ",") { if len(pluginFile) > 0 { // check that the plugin file to load exists _, err := os.Stat(pluginFile) @@ -1267,62 +1313,63 @@ func loadCustomRiskRules() { } // simply add to a map (just convenience) where key is the category id and value the rule's execution function ruleID := symCustomRiskRuleVar.Category().Id - customRiskRules[ruleID] = symCustomRiskRuleVar - if *verbose { + context.customRiskRules[ruleID] = symCustomRiskRuleVar + if *context.verbose { fmt.Println("Custom risk rule loaded:", ruleID) } } } - if *verbose { - fmt.Println("Loaded custom risk rules:", len(customRiskRules)) + if *context.verbose { + fmt.Println("Loaded custom risk rules:", len(context.customRiskRules)) } } } var validIdSyntax = regexp.MustCompile(`^[a-zA-Z0-9\-]+$`) -func checkIdSyntax(id string) { +func (context *Context) checkIdSyntax(id string) { if !validIdSyntax.MatchString(id) { panic(errors.New("invalid id syntax used (only letters, numbers, and hyphen allowed): " + id)) } } -func analyze(context *gin.Context) { - execute(context, false) +func (context *Context) analyze(ginContext *gin.Context) { + context.execute(ginContext, false) } -func check(context *gin.Context) { - _, ok := execute(context, true) + +func (context *Context) check(ginContext *gin.Context) { + _, ok := context.execute(ginContext, true) if ok { - context.JSON(http.StatusOK, gin.H{ + ginContext.JSON(http.StatusOK, gin.H{ "message": "model is ok", }) } } -func execute(context *gin.Context, dryRun bool) (yamlContent []byte, ok bool) { +func (context *Context) execute(ginContext *gin.Context, dryRun bool) (yamlContent []byte, ok bool) { defer func() { var err error if r := recover(); r != nil { - errorCount++ + context.errorCount++ err = r.(error) log.Println(err) - context.JSON(http.StatusBadRequest, gin.H{ + ginContext.JSON(http.StatusBadRequest, gin.H{ "error": strings.TrimSpace(err.Error()), }) ok = false } }() - dpi, err := strconv.Atoi(context.DefaultQuery("dpi", strconv.Itoa(defaultGraphvizDPI))) + dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(defaultGraphvizDPI))) checkErr(err) - fileUploaded, header, err := context.Request.FormFile("file") + fileUploaded, header, err := ginContext.Request.FormFile("file") checkErr(err) if header.Size > 50000000 { msg := "maximum model upload file size exceeded (denial-of-service protection)" log.Println(msg) - context.JSON(http.StatusRequestEntityTooLarge, gin.H{ + ginContext.JSON(http.StatusRequestEntityTooLarge, gin.H{ "error": msg, }) return yamlContent, false @@ -1330,7 +1377,7 @@ func execute(context *gin.Context, dryRun bool) (yamlContent []byte, ok bool) { filenameUploaded := strings.TrimSpace(header.Filename) - tmpInputDir, err := os.MkdirTemp(tempFolder, "threagile-input-") + tmpInputDir, err := os.MkdirTemp(*context.tempFolder, "threagile-input-") checkErr(err) defer func() { _ = os.RemoveAll(tmpInputDir) }() @@ -1344,10 +1391,10 @@ func execute(context *gin.Context, dryRun bool) (yamlContent []byte, ok bool) { if strings.ToLower(filepath.Ext(filenameUploaded)) == ".zip" { // unzip first (including the resources like images etc.) - if *verbose { + if *context.verbose { fmt.Println("Decompressing uploaded archive") } - filenamesUnzipped, err := unzip(tmpModelFile.Name(), tmpInputDir) + filenamesUnzipped, err := context.unzip(tmpModelFile.Name(), tmpInputDir) checkErr(err) found := false for _, name := range filenamesUnzipped { @@ -1362,18 +1409,18 @@ func execute(context *gin.Context, dryRun bool) (yamlContent []byte, ok bool) { } } - tmpOutputDir, err := os.MkdirTemp(tempFolder, "threagile-output-") + tmpOutputDir, err := os.MkdirTemp(*context.tempFolder, "threagile-output-") checkErr(err) defer func() { _ = os.RemoveAll(tmpOutputDir) }() - tmpResultFile, err := os.CreateTemp(tempFolder, "threagile-result-*.zip") + tmpResultFile, err := os.CreateTemp(*context.tempFolder, "threagile-result-*.zip") checkErr(err) defer func() { _ = os.Remove(tmpResultFile.Name()) }() if dryRun { - doItViaRuntimeCall(yamlFile, tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, false, false, false, true, true, true, 40) + context.doItViaRuntimeCall(yamlFile, tmpOutputDir, false, false, false, false, false, true, true, true, 40) } else { - doItViaRuntimeCall(yamlFile, tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, true, true, true, true, true, true, true, true, dpi) + context.doItViaRuntimeCall(yamlFile, tmpOutputDir, true, true, true, true, true, true, true, true, dpi) } checkErr(err) @@ -1398,28 +1445,28 @@ func execute(context *gin.Context, dryRun bool) (yamlContent []byte, ok bool) { files = append(files, filepath.Join(tmpOutputDir, dataFlowDiagramFilenameDOT)) files = append(files, filepath.Join(tmpOutputDir, dataAssetDiagramFilenameDOT)) } - err = zipFiles(tmpResultFile.Name(), files) + err = context.zipFiles(tmpResultFile.Name(), files) checkErr(err) - if *verbose { + if *context.verbose { log.Println("Streaming back result file: " + tmpResultFile.Name()) } - context.FileAttachment(tmpResultFile.Name(), "threagile-result.zip") + ginContext.FileAttachment(tmpResultFile.Name(), "threagile-result.zip") } - successCount++ + context.successCount++ return yamlContent, true } // ultimately to avoid any in-process memory and/or data leaks by the used third party libs like PDF generation: exec and quit -func doItViaRuntimeCall(modelFile string, outputDir string, executeModelMacro string, raaPlugin string, customRiskRulesPlugins string, skipRiskRules string, ignoreOrphanedRiskTracking bool, +func (context *Context) doItViaRuntimeCall(modelFile string, outputDir string, generateDataFlowDiagram, generateDataAssetDiagram, generateReportPdf, generateRisksExcel, generateTagsExcel, generateRisksJSON, generateTechnicalAssetsJSON, generateStatsJSON bool, dpi int) { // Remember to also add the same args to the exec based sub-process calls! var cmd *exec.Cmd - args := []string{"-model", modelFile, "-output", outputDir, "-execute-model-macro", executeModelMacro, "-raa-plugin", raaPlugin, "-custom-risk-rules-plugins", customRiskRulesPlugins, "-skip-risk-rules", skipRiskRules, "-diagram-dpi", strconv.Itoa(dpi)} - if *verbose { + args := []string{"-model", modelFile, "-output", outputDir, "-execute-model-macro", *context.executeModelMacro, "-raa-plugin", *context.raaPlugin, "-custom-risk-rules-plugins", *context.riskRulesPlugins, "-skip-risk-rules", *context.skipRiskRules, "-diagram-dpi", strconv.Itoa(dpi)} + if *context.verbose { args = append(args, "-verbose") } - if ignoreOrphanedRiskTracking { // TODO why add all them as arguments, when they are also variables on outer level? + if *context.ignoreOrphanedRiskTracking { // TODO why add all them as arguments, when they are also variables on outer level? args = append(args, "-ignore-orphaned-risk-tracking") } if generateDataFlowDiagram { @@ -1452,7 +1499,7 @@ func doItViaRuntimeCall(modelFile string, outputDir string, executeModelMacro st if err != nil { panic(errors.New(string(out))) } else { - if *verbose && len(out) > 0 { + if *context.verbose && len(out) > 0 { fmt.Println("---") fmt.Print(string(out)) fmt.Println("---") @@ -1460,7 +1507,7 @@ func doItViaRuntimeCall(modelFile string, outputDir string, executeModelMacro st } } -func startServer() { +func (context *Context) startServer() { router := gin.Default() router.LoadHTMLGlob("server/static/*.html") // <== router.GET("/", func(c *gin.Context) { @@ -1489,8 +1536,8 @@ func startServer() { router.StaticFile("/swagger-ui/swagger-ui-bundle.js", "server/static/swagger-ui/swagger-ui-bundle.js") router.StaticFile("/swagger-ui/swagger-ui-standalone-preset.js", "server/static/swagger-ui/swagger-ui-standalone-preset.js") // <== - router.GET("/threagile-example-model.yaml", exampleFile) - router.GET("/threagile-stub-model.yaml", stubFile) + router.GET("/threagile-example-model.yaml", context.exampleFile) + router.GET("/threagile-stub-model.yaml", context.stubFile) router.GET("/meta/ping", func(c *gin.Context) { c.JSON(200, gin.H{ @@ -1500,115 +1547,115 @@ func startServer() { router.GET("/meta/version", func(c *gin.Context) { c.JSON(200, gin.H{ "version": model.ThreagileVersion, - "build_timestamp": buildTimestamp, + "build_timestamp": context.buildTimestamp, }) }) router.GET("/meta/types", func(c *gin.Context) { c.JSON(200, gin.H{ - "quantity": arrayOfStringValues(model.QuantityValues()), - "confidentiality": arrayOfStringValues(model.ConfidentialityValues()), - "criticality": arrayOfStringValues(model.CriticalityValues()), - "technical_asset_type": arrayOfStringValues(model.TechnicalAssetTypeValues()), - "technical_asset_size": arrayOfStringValues(model.TechnicalAssetSizeValues()), - "authorization": arrayOfStringValues(model.AuthorizationValues()), - "authentication": arrayOfStringValues(model.AuthenticationValues()), - "usage": arrayOfStringValues(model.UsageValues()), - "encryption": arrayOfStringValues(model.EncryptionStyleValues()), - "data_format": arrayOfStringValues(model.DataFormatValues()), - "protocol": arrayOfStringValues(model.ProtocolValues()), - "technical_asset_technology": arrayOfStringValues(model.TechnicalAssetTechnologyValues()), - "technical_asset_machine": arrayOfStringValues(model.TechnicalAssetMachineValues()), - "trust_boundary_type": arrayOfStringValues(model.TrustBoundaryTypeValues()), - "data_breach_probability": arrayOfStringValues(model.DataBreachProbabilityValues()), - "risk_severity": arrayOfStringValues(model.RiskSeverityValues()), - "risk_exploitation_likelihood": arrayOfStringValues(model.RiskExploitationLikelihoodValues()), - "risk_exploitation_impact": arrayOfStringValues(model.RiskExploitationImpactValues()), - "risk_function": arrayOfStringValues(model.RiskFunctionValues()), - "risk_status": arrayOfStringValues(model.RiskStatusValues()), - "stride": arrayOfStringValues(model.STRIDEValues()), + "quantity": context.arrayOfStringValues(model.QuantityValues()), + "confidentiality": context.arrayOfStringValues(model.ConfidentialityValues()), + "criticality": context.arrayOfStringValues(model.CriticalityValues()), + "technical_asset_type": context.arrayOfStringValues(model.TechnicalAssetTypeValues()), + "technical_asset_size": context.arrayOfStringValues(model.TechnicalAssetSizeValues()), + "authorization": context.arrayOfStringValues(model.AuthorizationValues()), + "authentication": context.arrayOfStringValues(model.AuthenticationValues()), + "usage": context.arrayOfStringValues(model.UsageValues()), + "encryption": context.arrayOfStringValues(model.EncryptionStyleValues()), + "data_format": context.arrayOfStringValues(model.DataFormatValues()), + "protocol": context.arrayOfStringValues(model.ProtocolValues()), + "technical_asset_technology": context.arrayOfStringValues(model.TechnicalAssetTechnologyValues()), + "technical_asset_machine": context.arrayOfStringValues(model.TechnicalAssetMachineValues()), + "trust_boundary_type": context.arrayOfStringValues(model.TrustBoundaryTypeValues()), + "data_breach_probability": context.arrayOfStringValues(model.DataBreachProbabilityValues()), + "risk_severity": context.arrayOfStringValues(model.RiskSeverityValues()), + "risk_exploitation_likelihood": context.arrayOfStringValues(model.RiskExploitationLikelihoodValues()), + "risk_exploitation_impact": context.arrayOfStringValues(model.RiskExploitationImpactValues()), + "risk_function": context.arrayOfStringValues(model.RiskFunctionValues()), + "risk_status": context.arrayOfStringValues(model.RiskStatusValues()), + "stride": context.arrayOfStringValues(model.STRIDEValues()), }) }) // TODO router.GET("/meta/risk-rules", listRiskRules) // TODO router.GET("/meta/model-macros", listModelMacros) - router.GET("/meta/stats", stats) - - router.POST("/direct/analyze", analyze) - router.POST("/direct/check", check) - router.GET("/direct/stub", stubFile) - - router.POST("/auth/keys", createKey) - router.DELETE("/auth/keys", deleteKey) - router.POST("/auth/tokens", createToken) - router.DELETE("/auth/tokens", deleteToken) - - router.POST("/models", createNewModel) - router.GET("/models", listModels) - router.DELETE("/models/:model-id", deleteModel) - router.GET("/models/:model-id", getModel) - router.PUT("/models/:model-id", importModel) - router.GET("/models/:model-id/data-flow-diagram", streamDataFlowDiagram) - router.GET("/models/:model-id/data-asset-diagram", streamDataAssetDiagram) - router.GET("/models/:model-id/report-pdf", streamReportPDF) - router.GET("/models/:model-id/risks-excel", streamRisksExcel) - router.GET("/models/:model-id/tags-excel", streamTagsExcel) - router.GET("/models/:model-id/risks", streamRisksJSON) - router.GET("/models/:model-id/technical-assets", streamTechnicalAssetsJSON) - router.GET("/models/:model-id/stats", streamStatsJSON) - router.GET("/models/:model-id/analysis", analyzeModelOnServerDirectly) - - router.GET("/models/:model-id/cover", getCover) - router.PUT("/models/:model-id/cover", setCover) - router.GET("/models/:model-id/overview", getOverview) - router.PUT("/models/:model-id/overview", setOverview) + router.GET("/meta/stats", context.stats) + + router.POST("/direct/analyze", context.analyze) + router.POST("/direct/check", context.check) + router.GET("/direct/stub", context.stubFile) + + router.POST("/auth/keys", context.createKey) + router.DELETE("/auth/keys", context.deleteKey) + router.POST("/auth/tokens", context.createToken) + router.DELETE("/auth/tokens", context.deleteToken) + + router.POST("/models", context.createNewModel) + router.GET("/models", context.listModels) + router.DELETE("/models/:model-id", context.deleteModel) + router.GET("/models/:model-id", context.getModel) + router.PUT("/models/:model-id", context.importModel) + router.GET("/models/:model-id/data-flow-diagram", context.streamDataFlowDiagram) + router.GET("/models/:model-id/data-asset-diagram", context.streamDataAssetDiagram) + router.GET("/models/:model-id/report-pdf", context.streamReportPDF) + router.GET("/models/:model-id/risks-excel", context.streamRisksExcel) + router.GET("/models/:model-id/tags-excel", context.streamTagsExcel) + router.GET("/models/:model-id/risks", context.streamRisksJSON) + router.GET("/models/:model-id/technical-assets", context.streamTechnicalAssetsJSON) + router.GET("/models/:model-id/stats", context.streamStatsJSON) + router.GET("/models/:model-id/analysis", context.analyzeModelOnServerDirectly) + + router.GET("/models/:model-id/cover", context.getCover) + router.PUT("/models/:model-id/cover", context.setCover) + router.GET("/models/:model-id/overview", context.getOverview) + router.PUT("/models/:model-id/overview", context.setOverview) //router.GET("/models/:model-id/questions", getQuestions) //router.PUT("/models/:model-id/questions", setQuestions) - router.GET("/models/:model-id/abuse-cases", getAbuseCases) - router.PUT("/models/:model-id/abuse-cases", setAbuseCases) - router.GET("/models/:model-id/security-requirements", getSecurityRequirements) - router.PUT("/models/:model-id/security-requirements", setSecurityRequirements) + router.GET("/models/:model-id/abuse-cases", context.getAbuseCases) + router.PUT("/models/:model-id/abuse-cases", context.setAbuseCases) + router.GET("/models/:model-id/security-requirements", context.getSecurityRequirements) + router.PUT("/models/:model-id/security-requirements", context.setSecurityRequirements) //router.GET("/models/:model-id/tags", getTags) //router.PUT("/models/:model-id/tags", setTags) - router.GET("/models/:model-id/data-assets", getDataAssets) - router.POST("/models/:model-id/data-assets", createNewDataAsset) - router.GET("/models/:model-id/data-assets/:data-asset-id", getDataAsset) - router.PUT("/models/:model-id/data-assets/:data-asset-id", setDataAsset) - router.DELETE("/models/:model-id/data-assets/:data-asset-id", deleteDataAsset) + router.GET("/models/:model-id/data-assets", context.getDataAssets) + router.POST("/models/:model-id/data-assets", context.createNewDataAsset) + router.GET("/models/:model-id/data-assets/:data-asset-id", context.getDataAsset) + router.PUT("/models/:model-id/data-assets/:data-asset-id", context.setDataAsset) + router.DELETE("/models/:model-id/data-assets/:data-asset-id", context.deleteDataAsset) - router.GET("/models/:model-id/trust-boundaries", getTrustBoundaries) + router.GET("/models/:model-id/trust-boundaries", context.getTrustBoundaries) // router.POST("/models/:model-id/trust-boundaries", createNewTrustBoundary) // router.GET("/models/:model-id/trust-boundaries/:trust-boundary-id", getTrustBoundary) // router.PUT("/models/:model-id/trust-boundaries/:trust-boundary-id", setTrustBoundary) // router.DELETE("/models/:model-id/trust-boundaries/:trust-boundary-id", deleteTrustBoundary) - router.GET("/models/:model-id/shared-runtimes", getSharedRuntimes) - router.POST("/models/:model-id/shared-runtimes", createNewSharedRuntime) - router.GET("/models/:model-id/shared-runtimes/:shared-runtime-id", getSharedRuntime) - router.PUT("/models/:model-id/shared-runtimes/:shared-runtime-id", setSharedRuntime) - router.DELETE("/models/:model-id/shared-runtimes/:shared-runtime-id", deleteSharedRuntime) + router.GET("/models/:model-id/shared-runtimes", context.getSharedRuntimes) + router.POST("/models/:model-id/shared-runtimes", context.createNewSharedRuntime) + router.GET("/models/:model-id/shared-runtimes/:shared-runtime-id", context.getSharedRuntime) + router.PUT("/models/:model-id/shared-runtimes/:shared-runtime-id", context.setSharedRuntime) + router.DELETE("/models/:model-id/shared-runtimes/:shared-runtime-id", context.deleteSharedRuntime) fmt.Println("Threagile server running...") - _ = router.Run(":" + strconv.Itoa(*serverPort)) // listen and serve on 0.0.0.0:8080 or whatever port was specified + _ = router.Run(":" + strconv.Itoa(*context.serverPort)) // listen and serve on 0.0.0.0:8080 or whatever port was specified } -func exampleFile(context *gin.Context) { - example, err := os.ReadFile(filepath.Join(appFolder, "threagile-example-model.yaml")) +func (context *Context) exampleFile(ginContext *gin.Context) { + example, err := os.ReadFile(filepath.Join(*context.appFolder, "threagile-example-model.yaml")) checkErr(err) - context.Data(http.StatusOK, gin.MIMEYAML, example) + ginContext.Data(http.StatusOK, gin.MIMEYAML, example) } -func stubFile(context *gin.Context) { - stub, err := os.ReadFile(filepath.Join(appFolder, "threagile-stub-model.yaml")) +func (context *Context) stubFile(ginContext *gin.Context) { + stub, err := os.ReadFile(filepath.Join(*context.appFolder, "threagile-stub-model.yaml")) checkErr(err) - context.Data(http.StatusOK, gin.MIMEYAML, addSupportedTags(stub)) // TODO use also the MIMEYAML way of serving YAML in model export? + ginContext.Data(http.StatusOK, gin.MIMEYAML, context.addSupportedTags(stub)) // TODO use also the MIMEYAML way of serving YAML in model export? } -func addSupportedTags(input []byte) []byte { +func (context *Context) addSupportedTags(input []byte) []byte { // add distinct tags as "tags_available" supportedTags := make(map[string]bool) - for _, customRule := range customRiskRules { + for _, customRule := range context.customRiskRules { for _, tag := range customRule.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } @@ -1747,7 +1794,7 @@ func addSupportedTags(input []byte) []byte { return input } sort.Strings(tags) - if *verbose { + if *context.verbose { fmt.Print("Supported tags of all risk rules: ") for i, tag := range tags { if i > 0 { @@ -1774,13 +1821,13 @@ type timeoutStruct struct { var mapTokenHashToTimeoutStruct = make(map[string]timeoutStruct) var mapFolderNameToTokenHash = make(map[string]string) -func createToken(context *gin.Context) { - folderName, key, ok := checkKeyToFolderName(context) +func (context *Context) createToken(ginContext *gin.Context) { + folderName, key, ok := context.checkKeyToFolderName(ginContext) if !ok { return } - globalLock.Lock() - defer globalLock.Unlock() + context.globalLock.Lock() + defer context.globalLock.Unlock() if tokenHash, exists := mapFolderNameToTokenHash[folderName]; exists { // invalidate previous token delete(mapTokenHashToTimeoutStruct, tokenHash) @@ -1790,7 +1837,7 @@ func createToken(context *gin.Context) { n, err := rand.Read(xorBytesArr[:]) if n != keySize || err != nil { log.Println(err) - context.JSON(http.StatusInternalServerError, gin.H{ + ginContext.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to create token", }) return @@ -1805,15 +1852,15 @@ func createToken(context *gin.Context) { lastAccessedNanoTime: now, } mapFolderNameToTokenHash[folderName] = tokenHash - context.JSON(http.StatusCreated, gin.H{ + ginContext.JSON(http.StatusCreated, gin.H{ "token": base64.RawURLEncoding.EncodeToString(token[:]), }) } -func deleteToken(context *gin.Context) { +func (context *Context) deleteToken(ginContext *gin.Context) { header := tokenHeader{} - if err := context.ShouldBindHeader(&header); err != nil { - context.JSON(http.StatusNotFound, gin.H{ + if err := ginContext.ShouldBindHeader(&header); err != nil { + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "token not found", }) return @@ -1823,15 +1870,15 @@ func deleteToken(context *gin.Context) { if err != nil { log.Println(err) } - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "token not found", }) return } - globalLock.Lock() - defer globalLock.Unlock() + context.globalLock.Lock() + defer context.globalLock.Unlock() deleteTokenHashFromMaps(hashSHA256(token)) - context.JSON(http.StatusOK, gin.H{ + ginContext.JSON(http.StatusOK, gin.H{ "message": "token deleted", }) } @@ -1879,64 +1926,64 @@ func xor(key []byte, xor []byte) []byte { return result } -func analyzeModelOnServerDirectly(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) analyzeModelOnServerDirectly(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) + context.lockFolder(folderNameOfKey) defer func() { - unlockFolder(folderNameOfKey) + context.unlockFolder(folderNameOfKey) var err error if r := recover(); r != nil { err = r.(error) - if *verbose { + if *context.verbose { log.Println(err) } log.Println(err) - context.JSON(http.StatusBadRequest, gin.H{ + ginContext.JSON(http.StatusBadRequest, gin.H{ "error": strings.TrimSpace(err.Error()), }) ok = false } }() - dpi, err := strconv.Atoi(context.DefaultQuery("dpi", strconv.Itoa(defaultGraphvizDPI))) + dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(defaultGraphvizDPI))) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return } - _, yamlText, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + _, yamlText, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if !ok { return } - tmpModelFile, err := os.CreateTemp(tempFolder, "threagile-direct-analyze-*") + tmpModelFile, err := os.CreateTemp(*context.tempFolder, "threagile-direct-analyze-*") if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return } defer func() { _ = os.Remove(tmpModelFile.Name()) }() - tmpOutputDir, err := os.MkdirTemp(tempFolder, "threagile-direct-analyze-") + tmpOutputDir, err := os.MkdirTemp(*context.tempFolder, "threagile-direct-analyze-") if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return } defer func() { _ = os.RemoveAll(tmpOutputDir) }() - tmpResultFile, err := os.CreateTemp(tempFolder, "threagile-result-*.zip") + tmpResultFile, err := os.CreateTemp(*context.tempFolder, "threagile-result-*.zip") checkErr(err) defer func() { _ = os.Remove(tmpResultFile.Name()) }() err = os.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) - doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, true, true, true, true, true, true, true, true, dpi) + context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, true, true, true, true, true, true, true, true, dpi) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return } err = os.WriteFile(filepath.Join(tmpOutputDir, outputFile), []byte(yamlText), 0400) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return } @@ -1955,12 +2002,12 @@ func analyzeModelOnServerDirectly(context *gin.Context) { files = append(files, filepath.Join(tmpOutputDir, dataFlowDiagramFilenameDOT)) files = append(files, filepath.Join(tmpOutputDir, dataAssetDiagramFilenameDOT)) } - err = zipFiles(tmpResultFile.Name(), files) + err = context.zipFiles(tmpResultFile.Name(), files) checkErr(err) - if *verbose { + if *context.verbose { fmt.Println("Streaming back result file: " + tmpResultFile.Name()) } - context.FileAttachment(tmpResultFile.Name(), "threagile-result.zip") + ginContext.FileAttachment(tmpResultFile.Name(), "threagile-result.zip") } type responseType int @@ -1976,166 +2023,174 @@ const ( statsJSON ) -func streamDataFlowDiagram(context *gin.Context) { - streamResponse(context, dataFlowDiagram) +func (context *Context) streamDataFlowDiagram(ginContext *gin.Context) { + context.streamResponse(ginContext, dataFlowDiagram) } -func streamDataAssetDiagram(context *gin.Context) { - streamResponse(context, dataAssetDiagram) + +func (context *Context) streamDataAssetDiagram(ginContext *gin.Context) { + context.streamResponse(ginContext, dataAssetDiagram) } -func streamReportPDF(context *gin.Context) { - streamResponse(context, reportPDF) + +func (context *Context) streamReportPDF(ginContext *gin.Context) { + context.streamResponse(ginContext, reportPDF) } -func streamRisksExcel(context *gin.Context) { - streamResponse(context, risksExcel) + +func (context *Context) streamRisksExcel(ginContext *gin.Context) { + context.streamResponse(ginContext, risksExcel) } -func streamTagsExcel(context *gin.Context) { - streamResponse(context, tagsExcel) + +func (context *Context) streamTagsExcel(ginContext *gin.Context) { + context.streamResponse(ginContext, tagsExcel) } -func streamRisksJSON(context *gin.Context) { - streamResponse(context, risksJSON) + +func (context *Context) streamRisksJSON(ginContext *gin.Context) { + context.streamResponse(ginContext, risksJSON) } -func streamTechnicalAssetsJSON(context *gin.Context) { - streamResponse(context, technicalAssetsJSON) + +func (context *Context) streamTechnicalAssetsJSON(ginContext *gin.Context) { + context.streamResponse(ginContext, technicalAssetsJSON) } -func streamStatsJSON(context *gin.Context) { - streamResponse(context, statsJSON) + +func (context *Context) streamStatsJSON(ginContext *gin.Context) { + context.streamResponse(ginContext, statsJSON) } -func streamResponse(context *gin.Context, responseType responseType) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) + +func (context *Context) streamResponse(ginContext *gin.Context, responseType responseType) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) + context.lockFolder(folderNameOfKey) defer func() { - unlockFolder(folderNameOfKey) + context.unlockFolder(folderNameOfKey) var err error if r := recover(); r != nil { err = r.(error) - if *verbose { + if *context.verbose { log.Println(err) } log.Println(err) - context.JSON(http.StatusBadRequest, gin.H{ + ginContext.JSON(http.StatusBadRequest, gin.H{ "error": strings.TrimSpace(err.Error()), }) ok = false } }() - dpi, err := strconv.Atoi(context.DefaultQuery("dpi", strconv.Itoa(defaultGraphvizDPI))) + dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(defaultGraphvizDPI))) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return } - _, yamlText, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + _, yamlText, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if !ok { return } - tmpModelFile, err := os.CreateTemp(tempFolder, "threagile-render-*") + tmpModelFile, err := os.CreateTemp(*context.tempFolder, "threagile-render-*") if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return } defer func() { _ = os.Remove(tmpModelFile.Name()) }() - tmpOutputDir, err := os.MkdirTemp(tempFolder, "threagile-render-") + tmpOutputDir, err := os.MkdirTemp(*context.tempFolder, "threagile-render-") if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return } defer func() { _ = os.RemoveAll(tmpOutputDir) }() err = os.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) if responseType == dataFlowDiagram { - doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, true, false, false, false, false, false, false, false, dpi) + context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, true, false, false, false, false, false, false, false, dpi) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return } - context.File(filepath.Join(tmpOutputDir, dataFlowDiagramFilenamePNG)) + ginContext.File(filepath.Join(tmpOutputDir, dataFlowDiagramFilenamePNG)) } else if responseType == dataAssetDiagram { - doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, false, true, false, false, false, false, false, false, dpi) + context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, true, false, false, false, false, false, false, dpi) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return } - context.File(filepath.Join(tmpOutputDir, dataAssetDiagramFilenamePNG)) + ginContext.File(filepath.Join(tmpOutputDir, dataAssetDiagramFilenamePNG)) } else if responseType == reportPDF { - doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, true, false, false, false, false, false, dpi) + context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, true, false, false, false, false, false, dpi) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return } - context.FileAttachment(filepath.Join(tmpOutputDir, reportFilename), reportFilename) + ginContext.FileAttachment(filepath.Join(tmpOutputDir, reportFilename), reportFilename) } else if responseType == risksExcel { - doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, false, true, false, false, false, false, dpi) + context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, true, false, false, false, false, dpi) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return } - context.FileAttachment(filepath.Join(tmpOutputDir, excelRisksFilename), excelRisksFilename) + ginContext.FileAttachment(filepath.Join(tmpOutputDir, excelRisksFilename), excelRisksFilename) } else if responseType == tagsExcel { - doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, false, false, true, false, false, false, dpi) + context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, true, false, false, false, dpi) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return } - context.FileAttachment(filepath.Join(tmpOutputDir, excelTagsFilename), excelTagsFilename) + ginContext.FileAttachment(filepath.Join(tmpOutputDir, excelTagsFilename), excelTagsFilename) } else if responseType == risksJSON { - doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, false, false, false, true, false, false, dpi) + context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, true, false, false, dpi) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return } - json, err := os.ReadFile(filepath.Join(tmpOutputDir, jsonRisksFilename)) + jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, jsonRisksFilename)) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return } - context.Data(http.StatusOK, "application/json", json) // stream directly with JSON content-type in response instead of file download + ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download } else if responseType == technicalAssetsJSON { - doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, false, false, false, true, true, false, dpi) + context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, true, true, false, dpi) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return } - json, err := os.ReadFile(filepath.Join(tmpOutputDir, jsonTechnicalAssetsFilename)) + jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, jsonTechnicalAssetsFilename)) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return } - context.Data(http.StatusOK, "application/json", json) // stream directly with JSON content-type in response instead of file download + ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download } else if responseType == statsJSON { - doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *riskRulesPlugins, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, false, false, false, false, false, true, dpi) + context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, false, false, true, dpi) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return } - json, err := os.ReadFile(filepath.Join(tmpOutputDir, jsonStatsFilename)) + jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, jsonStatsFilename)) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return } - context.Data(http.StatusOK, "application/json", json) // stream directly with JSON content-type in response instead of file download + ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download } } // fully replaces threagile.yaml in sub-folder given by UUID -func importModel(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) importModel(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) - aUuid := context.Param("model-id") // UUID is syntactically validated in readModel+checkModelFolder (next line) via uuid.Parse(modelUUID) - _, _, ok = readModel(context, aUuid, key, folderNameOfKey) + aUuid := ginContext.Param("model-id") // UUID is syntactically validated in readModel+checkModelFolder (next line) via uuid.Parse(modelUUID) + _, _, ok = context.readModel(ginContext, aUuid, key, folderNameOfKey) if ok { // first analyze it simply by executing the full risk process (just discard the result) to ensure that everything would work - yamlContent, ok := execute(context, true) + yamlContent, ok := context.execute(ginContext, true) if ok { // if we're here, then no problem was raised, so ok to proceed - ok = writeModelYAML(context, string(yamlContent), key, folderNameForModel(folderNameOfKey, aUuid), "Model Import", false) + ok = context.writeModelYAML(ginContext, string(yamlContent), key, context.folderNameForModel(folderNameOfKey, aUuid), "Model Import", false) if ok { - context.JSON(http.StatusCreated, gin.H{ + ginContext.JSON(http.StatusCreated, gin.H{ "message": "model imported", }) } @@ -2143,12 +2198,12 @@ func importModel(context *gin.Context) { } } -func stats(context *gin.Context) { +func (context *Context) stats(ginContext *gin.Context) { keyCount, modelCount := 0, 0 - keyFolders, err := os.ReadDir(baseFolder) + keyFolders, err := os.ReadDir(*context.baseFolder) if err != nil { log.Println(err) - context.JSON(http.StatusInternalServerError, gin.H{ + ginContext.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to collect stats", }) return @@ -2156,10 +2211,10 @@ func stats(context *gin.Context) { for _, keyFolder := range keyFolders { if len(keyFolder.Name()) == 128 { // it's a sha512 token hash probably, so count it as token folder for the stats keyCount++ - modelFolders, err := os.ReadDir(filepath.Join(baseFolder, keyFolder.Name())) + modelFolders, err := os.ReadDir(filepath.Join(*context.baseFolder, keyFolder.Name())) if err != nil { log.Println(err) - context.JSON(http.StatusInternalServerError, gin.H{ + ginContext.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to collect stats", }) return @@ -2172,51 +2227,51 @@ func stats(context *gin.Context) { } } // TODO collect and deliver more stats (old model count?) and health info - context.JSON(http.StatusOK, gin.H{ + ginContext.JSON(http.StatusOK, gin.H{ "key_count": keyCount, "model_count": modelCount, - "success_count": successCount, - "error_count": errorCount, + "success_count": context.successCount, + "error_count": context.errorCount, }) } -func getDataAsset(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) getDataAsset(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { // yes, here keyed by title in YAML for better readability in the YAML file itself for title, dataAsset := range modelInput.DataAssets { - if dataAsset.ID == context.Param("data-asset-id") { - context.JSON(http.StatusOK, gin.H{ + if dataAsset.ID == ginContext.Param("data-asset-id") { + ginContext.JSON(http.StatusOK, gin.H{ title: dataAsset, }) return } } - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "data asset not found", }) } } -func deleteDataAsset(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) deleteDataAsset(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { referencesDeleted := false // yes, here keyed by title in YAML for better readability in the YAML file itself for title, dataAsset := range modelInput.DataAssets { - if dataAsset.ID == context.Param("data-asset-id") { + if dataAsset.ID == ginContext.Param("data-asset-id") { // also remove all usages of this data asset !! for _, techAsset := range modelInput.TechnicalAssets { if techAsset.DataAssetsProcessed != nil { @@ -2290,9 +2345,9 @@ func deleteDataAsset(context *gin.Context) { } // remove it itself delete(modelInput.DataAssets, title) - ok = writeModel(context, key, folderNameOfKey, &modelInput, "Data Asset Deletion") + ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Data Asset Deletion") if ok { - context.JSON(http.StatusOK, gin.H{ + ginContext.JSON(http.StatusOK, gin.H{ "message": "data asset deleted", "id": dataAsset.ID, "references_deleted": referencesDeleted, // in order to signal to clients, that other model parts might've been deleted as well @@ -2301,34 +2356,34 @@ func deleteDataAsset(context *gin.Context) { return } } - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "data asset not found", }) } } -func setSharedRuntime(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) setSharedRuntime(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { // yes, here keyed by title in YAML for better readability in the YAML file itself for title, sharedRuntime := range modelInput.SharedRuntimes { - if sharedRuntime.ID == context.Param("shared-runtime-id") { + if sharedRuntime.ID == ginContext.Param("shared-runtime-id") { payload := payloadSharedRuntime{} - err := context.BindJSON(&payload) + err := ginContext.BindJSON(&payload) if err != nil { log.Println(err) - context.JSON(http.StatusBadRequest, gin.H{ + ginContext.JSON(http.StatusBadRequest, gin.H{ "error": "unable to parse request payload", }) return } - sharedRuntimeInput, ok := populateSharedRuntime(context, payload) + sharedRuntimeInput, ok := context.populateSharedRuntime(ginContext, payload) if !ok { return } @@ -2349,9 +2404,9 @@ func setSharedRuntime(context *gin.Context) { } } } - ok = writeModel(context, key, folderNameOfKey, &modelInput, "Shared Runtime Update") + ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Shared Runtime Update") if ok { - context.JSON(http.StatusOK, gin.H{ + ginContext.JSON(http.StatusOK, gin.H{ "message": "shared runtime updated", "id": sharedRuntimeInput.ID, "id_changed": idChanged, // in order to signal to clients, that other model parts might've received updates as well and should be reloaded @@ -2360,34 +2415,34 @@ func setSharedRuntime(context *gin.Context) { return } } - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "shared runtime not found", }) } } -func setDataAsset(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) setDataAsset(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { // yes, here keyed by title in YAML for better readability in the YAML file itself for title, dataAsset := range modelInput.DataAssets { - if dataAsset.ID == context.Param("data-asset-id") { + if dataAsset.ID == ginContext.Param("data-asset-id") { payload := payloadDataAsset{} - err := context.BindJSON(&payload) + err := ginContext.BindJSON(&payload) if err != nil { log.Println(err) - context.JSON(http.StatusBadRequest, gin.H{ + ginContext.JSON(http.StatusBadRequest, gin.H{ "error": "unable to parse request payload", }) return } - dataAssetInput, ok := populateDataAsset(context, payload) + dataAssetInput, ok := context.populateDataAsset(ginContext, payload) if !ok { return } @@ -2443,9 +2498,9 @@ func setDataAsset(context *gin.Context) { } } } - ok = writeModel(context, key, folderNameOfKey, &modelInput, "Data Asset Update") + ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Data Asset Update") if ok { - context.JSON(http.StatusOK, gin.H{ + ginContext.JSON(http.StatusOK, gin.H{ "message": "data asset updated", "id": dataAssetInput.ID, "id_changed": idChanged, // in order to signal to clients, that other model parts might've received updates as well and should be reloaded @@ -2454,57 +2509,57 @@ func setDataAsset(context *gin.Context) { return } } - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "data asset not found", }) } } -func getSharedRuntime(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) getSharedRuntime(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { // yes, here keyed by title in YAML for better readability in the YAML file itself for title, sharedRuntime := range modelInput.SharedRuntimes { - if sharedRuntime.ID == context.Param("shared-runtime-id") { - context.JSON(http.StatusOK, gin.H{ + if sharedRuntime.ID == ginContext.Param("shared-runtime-id") { + ginContext.JSON(http.StatusOK, gin.H{ title: sharedRuntime, }) return } } - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "shared runtime not found", }) } } -func createNewSharedRuntime(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) createNewSharedRuntime(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { payload := payloadSharedRuntime{} - err := context.BindJSON(&payload) + err := ginContext.BindJSON(&payload) if err != nil { log.Println(err) - context.JSON(http.StatusBadRequest, gin.H{ + ginContext.JSON(http.StatusBadRequest, gin.H{ "error": "unable to parse request payload", }) return } // yes, here keyed by title in YAML for better readability in the YAML file itself if _, exists := modelInput.SharedRuntimes[payload.Title]; exists { - context.JSON(http.StatusConflict, gin.H{ + ginContext.JSON(http.StatusConflict, gin.H{ "error": "shared runtime with this title already exists", }) return @@ -2512,19 +2567,19 @@ func createNewSharedRuntime(context *gin.Context) { // but later it will in memory keyed by its "id", so do this uniqueness check also for _, runtime := range modelInput.SharedRuntimes { if runtime.ID == payload.Id { - context.JSON(http.StatusConflict, gin.H{ + ginContext.JSON(http.StatusConflict, gin.H{ "error": "shared runtime with this id already exists", }) return } } - if !checkTechnicalAssetsExisting(modelInput, payload.TechnicalAssetsRunning) { - context.JSON(http.StatusBadRequest, gin.H{ + if !context.checkTechnicalAssetsExisting(modelInput, payload.TechnicalAssetsRunning) { + ginContext.JSON(http.StatusBadRequest, gin.H{ "error": "referenced technical asset does not exist", }) return } - sharedRuntimeInput, ok := populateSharedRuntime(context, payload) + sharedRuntimeInput, ok := context.populateSharedRuntime(ginContext, payload) if !ok { return } @@ -2532,9 +2587,9 @@ func createNewSharedRuntime(context *gin.Context) { modelInput.SharedRuntimes = make(map[string]model.InputSharedRuntime) } modelInput.SharedRuntimes[payload.Title] = sharedRuntimeInput - ok = writeModel(context, key, folderNameOfKey, &modelInput, "Shared Runtime Creation") + ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Shared Runtime Creation") if ok { - context.JSON(http.StatusOK, gin.H{ + ginContext.JSON(http.StatusOK, gin.H{ "message": "shared runtime created", "id": sharedRuntimeInput.ID, }) @@ -2542,7 +2597,7 @@ func createNewSharedRuntime(context *gin.Context) { } } -func checkTechnicalAssetsExisting(modelInput model.ModelInput, techAssetIDs []string) (ok bool) { +func (context *Context) checkTechnicalAssetsExisting(modelInput model.ModelInput, techAssetIDs []string) (ok bool) { for _, techAssetID := range techAssetIDs { exists := false for _, val := range modelInput.TechnicalAssets { @@ -2558,7 +2613,7 @@ func checkTechnicalAssetsExisting(modelInput model.ModelInput, techAssetIDs []st return true } -func populateSharedRuntime(_ *gin.Context, payload payloadSharedRuntime) (sharedRuntimeInput model.InputSharedRuntime, ok bool) { +func (context *Context) populateSharedRuntime(_ *gin.Context, payload payloadSharedRuntime) (sharedRuntimeInput model.InputSharedRuntime, ok bool) { sharedRuntimeInput = model.InputSharedRuntime{ ID: payload.Id, Description: payload.Description, @@ -2568,19 +2623,19 @@ func populateSharedRuntime(_ *gin.Context, payload payloadSharedRuntime) (shared return sharedRuntimeInput, true } -func deleteSharedRuntime(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) deleteSharedRuntime(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { referencesDeleted := false // yes, here keyed by title in YAML for better readability in the YAML file itself for title, sharedRuntime := range modelInput.SharedRuntimes { - if sharedRuntime.ID == context.Param("shared-runtime-id") { + if sharedRuntime.ID == ginContext.Param("shared-runtime-id") { // also remove all usages of this shared runtime !! for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { if individualRiskCat.RisksIdentified != nil { @@ -2596,9 +2651,9 @@ func deleteSharedRuntime(context *gin.Context) { } // remove it itself delete(modelInput.SharedRuntimes, title) - ok = writeModel(context, key, folderNameOfKey, &modelInput, "Shared Runtime Deletion") + ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Shared Runtime Deletion") if ok { - context.JSON(http.StatusOK, gin.H{ + ginContext.JSON(http.StatusOK, gin.H{ "message": "shared runtime deleted", "id": sharedRuntime.ID, "references_deleted": referencesDeleted, // in order to signal to clients, that other model parts might've been deleted as well @@ -2607,33 +2662,33 @@ func deleteSharedRuntime(context *gin.Context) { return } } - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "shared runtime not found", }) } } -func createNewDataAsset(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) createNewDataAsset(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { payload := payloadDataAsset{} - err := context.BindJSON(&payload) + err := ginContext.BindJSON(&payload) if err != nil { log.Println(err) - context.JSON(http.StatusBadRequest, gin.H{ + ginContext.JSON(http.StatusBadRequest, gin.H{ "error": "unable to parse request payload", }) return } // yes, here keyed by title in YAML for better readability in the YAML file itself if _, exists := modelInput.DataAssets[payload.Title]; exists { - context.JSON(http.StatusConflict, gin.H{ + ginContext.JSON(http.StatusConflict, gin.H{ "error": "data asset with this title already exists", }) return @@ -2641,13 +2696,13 @@ func createNewDataAsset(context *gin.Context) { // but later it will in memory keyed by its "id", so do this uniqueness check also for _, asset := range modelInput.DataAssets { if asset.ID == payload.Id { - context.JSON(http.StatusConflict, gin.H{ + ginContext.JSON(http.StatusConflict, gin.H{ "error": "data asset with this id already exists", }) return } } - dataAssetInput, ok := populateDataAsset(context, payload) + dataAssetInput, ok := context.populateDataAsset(ginContext, payload) if !ok { return } @@ -2655,9 +2710,9 @@ func createNewDataAsset(context *gin.Context) { modelInput.DataAssets = make(map[string]model.InputDataAsset) } modelInput.DataAssets[payload.Title] = dataAssetInput - ok = writeModel(context, key, folderNameOfKey, &modelInput, "Data Asset Creation") + ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Data Asset Creation") if ok { - context.JSON(http.StatusOK, gin.H{ + ginContext.JSON(http.StatusOK, gin.H{ "message": "data asset created", "id": dataAssetInput.ID, }) @@ -2665,30 +2720,30 @@ func createNewDataAsset(context *gin.Context) { } } -func populateDataAsset(context *gin.Context, payload payloadDataAsset) (dataAssetInput model.InputDataAsset, ok bool) { +func (context *Context) populateDataAsset(ginContext *gin.Context, payload payloadDataAsset) (dataAssetInput model.InputDataAsset, ok bool) { usage, err := model.ParseUsage(payload.Usage) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return dataAssetInput, false } quantity, err := model.ParseQuantity(payload.Quantity) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return dataAssetInput, false } confidentiality, err := model.ParseConfidentiality(payload.Confidentiality) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return dataAssetInput, false } integrity, err := model.ParseCriticality(payload.Integrity) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return dataAssetInput, false } availability, err := model.ParseCriticality(payload.Availability) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return dataAssetInput, false } dataAssetInput = model.InputDataAsset{ @@ -2707,46 +2762,46 @@ func populateDataAsset(context *gin.Context, payload payloadDataAsset) (dataAsse return dataAssetInput, true } -func getDataAssets(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) getDataAssets(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, aModel.DataAssets) + ginContext.JSON(http.StatusOK, aModel.DataAssets) } } -func getTrustBoundaries(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) getTrustBoundaries(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, aModel.TrustBoundaries) + ginContext.JSON(http.StatusOK, aModel.TrustBoundaries) } } -func getSharedRuntimes(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) getSharedRuntimes(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, aModel.SharedRuntimes) + ginContext.JSON(http.StatusOK, aModel.SharedRuntimes) } } -func arrayOfStringValues(values []model.TypeEnum) []string { +func (context *Context) arrayOfStringValues(values []model.TypeEnum) []string { result := make([]string, 0) for _, value := range values { result = append(result, value.String()) @@ -2754,48 +2809,48 @@ func arrayOfStringValues(values []model.TypeEnum) []string { return result } -func getModel(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) getModel(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - _, yamlText, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + _, yamlText, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { - tmpResultFile, err := os.CreateTemp(tempFolder, "threagile-*.yaml") + tmpResultFile, err := os.CreateTemp(*context.tempFolder, "threagile-*.yaml") checkErr(err) err = os.WriteFile(tmpResultFile.Name(), []byte(yamlText), 0400) if err != nil { log.Println(err) - context.JSON(http.StatusInternalServerError, gin.H{ + ginContext.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to stream model file", }) return } defer func() { _ = os.Remove(tmpResultFile.Name()) }() - context.FileAttachment(tmpResultFile.Name(), outputFile) + ginContext.FileAttachment(tmpResultFile.Name(), outputFile) } } type payloadModels struct { - ID string `json:"id"` - Title string `json:"title"` - TimestampCreated time.Time `json:"timestamp_created"` - TimestampModified time.Time `json:"timestamp_modified"` + ID string `yaml:"id" json:"id"` + Title string `yaml:"title" json:"title"` + TimestampCreated time.Time `yaml:"timestamp_created" json:"timestamp_created"` + TimestampModified time.Time `yaml:"timestamp_modified" json:"timestamp_modified"` } type payloadCover struct { - Title string `json:"title"` - Date time.Time `json:"date"` - Author model.Author `json:"author"` + Title string `yaml:"title" json:"title"` + Date time.Time `yaml:"date" json:"date"` + Author model.Author `yaml:"author" json:"author"` } type payloadOverview struct { - ManagementSummaryComment string `json:"management_summary_comment"` - BusinessCriticality string `json:"business_criticality"` - BusinessOverview model.Overview `json:"business_overview"` - TechnicalOverview model.Overview `json:"technical_overview"` + ManagementSummaryComment string `yaml:"management_summary_comment" json:"management_summary_comment"` + BusinessCriticality string `yaml:"business_criticality" json:"business_criticality"` + BusinessOverview model.Overview `yaml:"business_overview" json:"business_overview"` + TechnicalOverview model.Overview `yaml:"technical_overview" json:"technical_overview"` } type payloadAbuseCases map[string]string @@ -2803,131 +2858,131 @@ type payloadAbuseCases map[string]string type payloadSecurityRequirements map[string]string type payloadDataAsset struct { - Title string `json:"title"` - Id string `json:"id"` - Description string `json:"description"` - Usage string `json:"usage"` - Tags []string `json:"tags"` - Origin string `json:"origin"` - Owner string `json:"owner"` - Quantity string `json:"quantity"` - Confidentiality string `json:"confidentiality"` - Integrity string `json:"integrity"` - Availability string `json:"availability"` - JustificationCiaRating string `json:"justification_cia_rating"` + Title string `yaml:"title" json:"title"` + Id string `yaml:"id" json:"id"` + Description string `yaml:"description" json:"description"` + Usage string `yaml:"usage" json:"usage"` + Tags []string `yaml:"tags" json:"tags"` + Origin string `yaml:"origin" json:"origin"` + Owner string `yaml:"owner" json:"owner"` + Quantity string `yaml:"quantity" json:"quantity"` + Confidentiality string `yaml:"confidentiality" json:"confidentiality"` + Integrity string `yaml:"integrity" json:"integrity"` + Availability string `yaml:"availability" json:"availability"` + JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` } type payloadSharedRuntime struct { - Title string `json:"title"` - Id string `json:"id"` - Description string `json:"description"` - Tags []string `json:"tags"` - TechnicalAssetsRunning []string `json:"technical_assets_running"` + Title string `yaml:"title" json:"title"` + Id string `yaml:"id" json:"id"` + Description string `yaml:"description" json:"description"` + Tags []string `yaml:"tags" json:"tags"` + TechnicalAssetsRunning []string `yaml:"technical_assets_running" json:"technical_assets_running"` } -func setSecurityRequirements(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) setSecurityRequirements(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { payload := payloadSecurityRequirements{} - err := context.BindJSON(&payload) + err := ginContext.BindJSON(&payload) if err != nil { log.Println(err) - context.JSON(http.StatusBadRequest, gin.H{ + ginContext.JSON(http.StatusBadRequest, gin.H{ "error": "unable to parse request payload", }) return } modelInput.SecurityRequirements = payload - ok = writeModel(context, key, folderNameOfKey, &modelInput, "Security Requirements Update") + ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Security Requirements Update") if ok { - context.JSON(http.StatusOK, gin.H{ + ginContext.JSON(http.StatusOK, gin.H{ "message": "model updated", }) } } } -func getSecurityRequirements(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) getSecurityRequirements(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, aModel.SecurityRequirements) + ginContext.JSON(http.StatusOK, aModel.SecurityRequirements) } } -func setAbuseCases(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) setAbuseCases(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { payload := payloadAbuseCases{} - err := context.BindJSON(&payload) + err := ginContext.BindJSON(&payload) if err != nil { log.Println(err) - context.JSON(http.StatusBadRequest, gin.H{ + ginContext.JSON(http.StatusBadRequest, gin.H{ "error": "unable to parse request payload", }) return } modelInput.AbuseCases = payload - ok = writeModel(context, key, folderNameOfKey, &modelInput, "Abuse Cases Update") + ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Abuse Cases Update") if ok { - context.JSON(http.StatusOK, gin.H{ + ginContext.JSON(http.StatusOK, gin.H{ "message": "model updated", }) } } } -func getAbuseCases(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) getAbuseCases(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, aModel.AbuseCases) + ginContext.JSON(http.StatusOK, aModel.AbuseCases) } } -func setOverview(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) setOverview(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { payload := payloadOverview{} - err := context.BindJSON(&payload) + err := ginContext.BindJSON(&payload) if err != nil { log.Println(err) - context.JSON(http.StatusBadRequest, gin.H{ + ginContext.JSON(http.StatusBadRequest, gin.H{ "error": "unable to parse request payload", }) return } criticality, err := model.ParseCriticality(payload.BusinessCriticality) if err != nil { - handleErrorInServiceCall(err, context) + context.handleErrorInServiceCall(err, ginContext) return } modelInput.ManagementSummaryComment = payload.ManagementSummaryComment @@ -2936,32 +2991,32 @@ func setOverview(context *gin.Context) { modelInput.BusinessOverview.Images = payload.BusinessOverview.Images modelInput.TechnicalOverview.Description = payload.TechnicalOverview.Description modelInput.TechnicalOverview.Images = payload.TechnicalOverview.Images - ok = writeModel(context, key, folderNameOfKey, &modelInput, "Overview Update") + ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Overview Update") if ok { - context.JSON(http.StatusOK, gin.H{ + ginContext.JSON(http.StatusOK, gin.H{ "message": "model updated", }) } } } -func handleErrorInServiceCall(err error, context *gin.Context) { +func (context *Context) handleErrorInServiceCall(err error, ginContext *gin.Context) { log.Println(err) - context.JSON(http.StatusBadRequest, gin.H{ + ginContext.JSON(http.StatusBadRequest, gin.H{ "error": strings.TrimSpace(err.Error()), }) } -func getOverview(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) getOverview(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, gin.H{ + ginContext.JSON(http.StatusOK, gin.H{ "management_summary_comment": aModel.ManagementSummaryComment, "business_criticality": aModel.BusinessCriticality, "business_overview": aModel.BusinessOverview, @@ -2970,19 +3025,19 @@ func getOverview(context *gin.Context) { } } -func setCover(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) setCover(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { payload := payloadCover{} - err := context.BindJSON(&payload) + err := ginContext.BindJSON(&payload) if err != nil { - context.JSON(http.StatusBadRequest, gin.H{ + ginContext.JSON(http.StatusBadRequest, gin.H{ "error": "unable to parse request payload", }) return @@ -2993,25 +3048,25 @@ func setCover(context *gin.Context) { } modelInput.Author.Name = payload.Author.Name modelInput.Author.Homepage = payload.Author.Homepage - ok = writeModel(context, key, folderNameOfKey, &modelInput, "Cover Update") + ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Cover Update") if ok { - context.JSON(http.StatusOK, gin.H{ + ginContext.JSON(http.StatusOK, gin.H{ "message": "model updated", }) } } } -func getCover(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) getCover(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - aModel, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { - context.JSON(http.StatusOK, gin.H{ + ginContext.JSON(http.StatusOK, gin.H{ "title": aModel.Title, "date": aModel.Date, "author": aModel.Author, @@ -3020,22 +3075,22 @@ func getCover(context *gin.Context) { } // creates a sub-folder (named by a new UUID) inside the token folder -func createNewModel(context *gin.Context) { - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) createNewModel(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - ok = checkObjectCreationThrottler(context, "MODEL") + ok = context.checkObjectCreationThrottler(ginContext, "MODEL") if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) aUuid := uuid.New().String() - err := os.Mkdir(folderNameForModel(folderNameOfKey, aUuid), 0700) + err := os.Mkdir(context.folderNameForModel(folderNameOfKey, aUuid), 0700) if err != nil { - context.JSON(http.StatusInternalServerError, gin.H{ + ginContext.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to create model", }) return @@ -3072,28 +3127,28 @@ diagram_tweak_suppress_edge_labels: false diagram_tweak_invisible_connections_between_assets: [] diagram_tweak_same_rank_assets: []` - ok = writeModelYAML(context, aYaml, key, folderNameForModel(folderNameOfKey, aUuid), "New Model Creation", true) + ok = context.writeModelYAML(ginContext, aYaml, key, context.folderNameForModel(folderNameOfKey, aUuid), "New Model Creation", true) if ok { - context.JSON(http.StatusCreated, gin.H{ + ginContext.JSON(http.StatusCreated, gin.H{ "message": "model created", "id": aUuid, }) } } -func listModels(context *gin.Context) { // TODO currently returns error when any model is no longer valid in syntax, so eventually have some fallback to not just bark on an invalid model... - folderNameOfKey, key, ok := checkTokenToFolderName(context) +func (context *Context) listModels(ginContext *gin.Context) { // TODO currently returns error when any model is no longer valid in syntax, so eventually have some fallback to not just bark on an invalid model... + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) result := make([]payloadModels, 0) modelFolders, err := os.ReadDir(folderNameOfKey) if err != nil { log.Println(err) - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "token not found", }) return @@ -3103,19 +3158,19 @@ func listModels(context *gin.Context) { // TODO currently returns error when any modelStat, err := os.Stat(filepath.Join(folderNameOfKey, dirEntry.Name(), outputFile)) if err != nil { log.Println(err) - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "unable to list model", }) return } - aModel, _, ok := readModel(context, dirEntry.Name(), key, folderNameOfKey) + aModel, _, ok := context.readModel(ginContext, dirEntry.Name(), key, folderNameOfKey) if !ok { return } fileInfo, err := dirEntry.Info() if err != nil { log.Println(err) - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "unable to get file info", }) return @@ -3128,41 +3183,41 @@ func listModels(context *gin.Context) { // TODO currently returns error when any }) } } - context.JSON(http.StatusOK, result) + ginContext.JSON(http.StatusOK, result) } -func deleteModel(context *gin.Context) { - folderNameOfKey, _, ok := checkTokenToFolderName(context) +func (context *Context) deleteModel(ginContext *gin.Context) { + folderNameOfKey, _, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - lockFolder(folderNameOfKey) - defer unlockFolder(folderNameOfKey) - folder, ok := checkModelFolder(context, context.Param("model-id"), folderNameOfKey) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + folder, ok := context.checkModelFolder(ginContext, ginContext.Param("model-id"), folderNameOfKey) if ok { err := os.RemoveAll(folder) if err != nil { - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "model not found", }) } - context.JSON(http.StatusOK, gin.H{ + ginContext.JSON(http.StatusOK, gin.H{ "message": "model deleted", }) } } -func checkModelFolder(context *gin.Context, modelUUID string, folderNameOfKey string) (modelFolder string, ok bool) { +func (context *Context) checkModelFolder(ginContext *gin.Context, modelUUID string, folderNameOfKey string) (modelFolder string, ok bool) { uuidParsed, err := uuid.Parse(modelUUID) if err != nil { - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "model not found", }) return modelFolder, false } - modelFolder = folderNameForModel(folderNameOfKey, uuidParsed.String()) + modelFolder = context.folderNameForModel(folderNameOfKey, uuidParsed.String()) if _, err := os.Stat(modelFolder); os.IsNotExist(err) { - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "model not found", }) return modelFolder, false @@ -3170,16 +3225,16 @@ func checkModelFolder(context *gin.Context, modelUUID string, folderNameOfKey st return modelFolder, true } -func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfKey string) (modelInputResult model.ModelInput, yamlText string, ok bool) { - modelFolder, ok := checkModelFolder(context, modelUUID, folderNameOfKey) +func (context *Context) readModel(ginContext *gin.Context, modelUUID string, key []byte, folderNameOfKey string) (modelInputResult model.ModelInput, yamlText string, ok bool) { + modelFolder, ok := context.checkModelFolder(ginContext, modelUUID, folderNameOfKey) if !ok { return modelInputResult, yamlText, false } - cryptoKey := generateKeyFromAlreadyStrongRandomInput(key) + cryptoKey := context.generateKeyFromAlreadyStrongRandomInput(key) block, err := aes.NewCipher(cryptoKey) if err != nil { log.Println(err) - context.JSON(http.StatusInternalServerError, gin.H{ + ginContext.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to open model", }) return modelInputResult, yamlText, false @@ -3187,7 +3242,7 @@ func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfK aesGcm, err := cipher.NewGCM(block) if err != nil { log.Println(err) - context.JSON(http.StatusInternalServerError, gin.H{ + ginContext.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to open model", }) return modelInputResult, yamlText, false @@ -3196,7 +3251,7 @@ func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfK fileBytes, err := os.ReadFile(filepath.Join(modelFolder, outputFile)) if err != nil { log.Println(err) - context.JSON(http.StatusInternalServerError, gin.H{ + ginContext.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to open model", }) return modelInputResult, yamlText, false @@ -3207,7 +3262,7 @@ func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfK plaintext, err := aesGcm.Open(nil, nonce, ciphertext, nil) if err != nil { log.Println(err) - context.JSON(http.StatusInternalServerError, gin.H{ + ginContext.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to open model", }) return modelInputResult, yamlText, false @@ -3216,34 +3271,34 @@ func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfK r, err := gzip.NewReader(bytes.NewReader(plaintext)) if err != nil { log.Println(err) - context.JSON(http.StatusInternalServerError, gin.H{ + ginContext.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to open model", }) return modelInputResult, yamlText, false } buf := new(bytes.Buffer) _, _ = buf.ReadFrom(r) - modelInput := model.ModelInput{} + modelInput := new(model.ModelInput).Defaults() yamlBytes := buf.Bytes() err = yaml.Unmarshal(yamlBytes, &modelInput) if err != nil { log.Println(err) - context.JSON(http.StatusInternalServerError, gin.H{ + ginContext.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to open model", }) return modelInputResult, yamlText, false } - return modelInput, string(yamlBytes), true + return *modelInput, string(yamlBytes), true } -func writeModel(context *gin.Context, key []byte, folderNameOfKey string, modelInput *model.ModelInput, changeReasonForHistory string) (ok bool) { - modelFolder, ok := checkModelFolder(context, context.Param("model-id"), folderNameOfKey) +func (context *Context) writeModel(ginContext *gin.Context, key []byte, folderNameOfKey string, modelInput *model.ModelInput, changeReasonForHistory string) (ok bool) { + modelFolder, ok := context.checkModelFolder(ginContext, ginContext.Param("model-id"), folderNameOfKey) if ok { modelInput.ThreagileVersion = model.ThreagileVersion yamlBytes, err := yaml.Marshal(modelInput) if err != nil { log.Println(err) - context.JSON(http.StatusInternalServerError, gin.H{ + ginContext.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to write model", }) return false @@ -3251,13 +3306,13 @@ func writeModel(context *gin.Context, key []byte, folderNameOfKey string, modelI /* yamlBytes = model.ReformatYAML(yamlBytes) */ - return writeModelYAML(context, string(yamlBytes), key, modelFolder, changeReasonForHistory, false) + return context.writeModelYAML(ginContext, string(yamlBytes), key, modelFolder, changeReasonForHistory, false) } return false } -func writeModelYAML(context *gin.Context, yaml string, key []byte, modelFolder string, changeReasonForHistory string, skipBackup bool) (ok bool) { - if *verbose { +func (context *Context) writeModelYAML(ginContext *gin.Context, yaml string, key []byte, modelFolder string, changeReasonForHistory string, skipBackup bool) (ok bool) { + if *context.verbose { fmt.Println("about to write " + strconv.Itoa(len(yaml)) + " bytes of yaml into model folder: " + modelFolder) } var b bytes.Buffer @@ -3265,11 +3320,11 @@ func writeModelYAML(context *gin.Context, yaml string, key []byte, modelFolder s _, _ = w.Write([]byte(yaml)) _ = w.Close() plaintext := b.Bytes() - cryptoKey := generateKeyFromAlreadyStrongRandomInput(key) + cryptoKey := context.generateKeyFromAlreadyStrongRandomInput(key) block, err := aes.NewCipher(cryptoKey) if err != nil { log.Println(err) - context.JSON(http.StatusInternalServerError, gin.H{ + ginContext.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to write model", }) return false @@ -3278,7 +3333,7 @@ func writeModelYAML(context *gin.Context, yaml string, key []byte, modelFolder s nonce := make([]byte, 12) if _, err := io.ReadFull(rand.Reader, nonce); err != nil { log.Println(err) - context.JSON(http.StatusInternalServerError, gin.H{ + ginContext.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to write model", }) return false @@ -3286,17 +3341,17 @@ func writeModelYAML(context *gin.Context, yaml string, key []byte, modelFolder s aesGcm, err := cipher.NewGCM(block) if err != nil { log.Println(err) - context.JSON(http.StatusInternalServerError, gin.H{ + ginContext.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to write model", }) return false } ciphertext := aesGcm.Seal(nil, nonce, plaintext, nil) if !skipBackup { - err = backupModelToHistory(modelFolder, changeReasonForHistory) + err = context.backupModelToHistory(modelFolder, changeReasonForHistory) if err != nil { log.Println(err) - context.JSON(http.StatusInternalServerError, gin.H{ + ginContext.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to write model", }) return false @@ -3305,7 +3360,7 @@ func writeModelYAML(context *gin.Context, yaml string, key []byte, modelFolder s f, err := os.Create(filepath.Join(modelFolder, outputFile)) if err != nil { log.Println(err) - context.JSON(http.StatusInternalServerError, gin.H{ + ginContext.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to write model", }) return false @@ -3316,7 +3371,7 @@ func writeModelYAML(context *gin.Context, yaml string, key []byte, modelFolder s return true } -func backupModelToHistory(modelFolder string, changeReasonForHistory string) (err error) { +func (context *Context) backupModelToHistory(modelFolder string, changeReasonForHistory string) (err error) { historyFolder := filepath.Join(modelFolder, "history") if _, err := os.Stat(historyFolder); os.IsNotExist(err) { err = os.Mkdir(historyFolder, 0700) @@ -3365,7 +3420,7 @@ type argon2Params struct { keyLength uint32 } -func generateKeyFromAlreadyStrongRandomInput(alreadyRandomInput []byte) []byte { +func (context *Context) generateKeyFromAlreadyStrongRandomInput(alreadyRandomInput []byte) []byte { // Establish the parameters to use for Argon2. p := &argon2Params{ memory: 64 * 1024, @@ -3380,14 +3435,14 @@ func generateKeyFromAlreadyStrongRandomInput(alreadyRandomInput []byte) []byte { return hash } -func folderNameForModel(folderNameOfKey string, uuid string) string { +func (context *Context) folderNameForModel(folderNameOfKey string, uuid string) string { return filepath.Join(folderNameOfKey, uuid) } var throttlerLock sync.Mutex var createdObjectsThrottler = make(map[string][]int64) -func checkObjectCreationThrottler(context *gin.Context, typeName string) bool { +func (context *Context) checkObjectCreationThrottler(ginContext *gin.Context, typeName string) bool { throttlerLock.Lock() defer throttlerLock.Unlock() @@ -3424,7 +3479,7 @@ func checkObjectCreationThrottler(context *gin.Context, typeName string) bool { createdObjectsThrottler[keyHash] = append(createdObjectsThrottler[keyHash], now) return true } - context.JSON(http.StatusTooManyRequests, gin.H{ + ginContext.JSON(http.StatusTooManyRequests, gin.H{ "error": "object creation throttling exceeded (denial-of-service protection): please wait some time and try again", }) return false @@ -3432,9 +3487,9 @@ func checkObjectCreationThrottler(context *gin.Context, typeName string) bool { var locksByFolderName = make(map[string]*sync.Mutex) -func lockFolder(folderName string) { - globalLock.Lock() - defer globalLock.Unlock() +func (context *Context) lockFolder(folderName string) { + context.globalLock.Lock() + defer context.globalLock.Unlock() _, exists := locksByFolderName[folderName] if !exists { locksByFolderName[folderName] = &sync.Mutex{} @@ -3442,7 +3497,7 @@ func lockFolder(folderName string) { locksByFolderName[folderName].Lock() } -func unlockFolder(folderName string) { +func (context *Context) unlockFolder(folderName string) { if _, exists := locksByFolderName[folderName]; exists { locksByFolderName[folderName].Unlock() delete(locksByFolderName, folderName) @@ -3456,9 +3511,9 @@ type keyHeader struct { Key string `header:"key"` } -func folderNameFromKey(key []byte) string { +func (context *Context) folderNameFromKey(key []byte) string { sha512Hash := hashSHA256(key) - return filepath.Join(baseFolder, sha512Hash) + return filepath.Join(*context.baseFolder, sha512Hash) } func hashSHA256(key []byte) string { @@ -3467,41 +3522,41 @@ func hashSHA256(key []byte) string { return hex.EncodeToString(hasher.Sum(nil)) } -func createKey(context *gin.Context) { - ok := checkObjectCreationThrottler(context, "KEY") +func (context *Context) createKey(ginContext *gin.Context) { + ok := context.checkObjectCreationThrottler(ginContext, "KEY") if !ok { return } - globalLock.Lock() - defer globalLock.Unlock() + context.globalLock.Lock() + defer context.globalLock.Unlock() keyBytesArr := make([]byte, keySize) n, err := rand.Read(keyBytesArr[:]) if n != keySize || err != nil { log.Println(err) - context.JSON(http.StatusInternalServerError, gin.H{ + ginContext.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to create key", }) return } - err = os.Mkdir(folderNameFromKey(keyBytesArr), 0700) + err = os.Mkdir(context.folderNameFromKey(keyBytesArr), 0700) if err != nil { log.Println(err) - context.JSON(http.StatusInternalServerError, gin.H{ + ginContext.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to create key", }) return } - context.JSON(http.StatusCreated, gin.H{ + ginContext.JSON(http.StatusCreated, gin.H{ "key": base64.RawURLEncoding.EncodeToString(keyBytesArr[:]), }) } -func checkTokenToFolderName(context *gin.Context) (folderNameOfKey string, key []byte, ok bool) { +func (context *Context) checkTokenToFolderName(ginContext *gin.Context) (folderNameOfKey string, key []byte, ok bool) { header := tokenHeader{} - if err := context.ShouldBindHeader(&header); err != nil { + if err := ginContext.ShouldBindHeader(&header); err != nil { log.Println(err) - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "token not found", }) return folderNameOfKey, key, false @@ -3511,22 +3566,22 @@ func checkTokenToFolderName(context *gin.Context) (folderNameOfKey string, key [ if err != nil { log.Println(err) } - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "token not found", }) return folderNameOfKey, key, false } - globalLock.Lock() - defer globalLock.Unlock() + context.globalLock.Lock() + defer context.globalLock.Unlock() housekeepingTokenMaps() // to remove timed-out ones tokenHash := hashSHA256(token) if timeoutStruct, exists := mapTokenHashToTimeoutStruct[tokenHash]; exists { // re-create the key from token key := xor(token, timeoutStruct.xorRand) - folderNameOfKey := folderNameFromKey(key) + folderNameOfKey := context.folderNameFromKey(key) if _, err := os.Stat(folderNameOfKey); os.IsNotExist(err) { log.Println(err) - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "token not found", }) return folderNameOfKey, key, false @@ -3534,18 +3589,18 @@ func checkTokenToFolderName(context *gin.Context) (folderNameOfKey string, key [ timeoutStruct.lastAccessedNanoTime = time.Now().UnixNano() return folderNameOfKey, key, true } else { - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "token not found", }) return folderNameOfKey, key, false } } -func checkKeyToFolderName(context *gin.Context) (folderNameOfKey string, key []byte, ok bool) { +func (context *Context) checkKeyToFolderName(ginContext *gin.Context) (folderNameOfKey string, key []byte, ok bool) { header := keyHeader{} - if err := context.ShouldBindHeader(&header); err != nil { + if err := ginContext.ShouldBindHeader(&header); err != nil { log.Println(err) - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "key not found", }) return folderNameOfKey, key, false @@ -3555,15 +3610,15 @@ func checkKeyToFolderName(context *gin.Context) (folderNameOfKey string, key []b if err != nil { log.Println(err) } - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "key not found", }) return folderNameOfKey, key, false } - folderNameOfKey = folderNameFromKey(key) + folderNameOfKey = context.folderNameFromKey(key) if _, err := os.Stat(folderNameOfKey); os.IsNotExist(err) { log.Println(err) - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "key not found", }) return folderNameOfKey, key, false @@ -3571,49 +3626,53 @@ func checkKeyToFolderName(context *gin.Context) (folderNameOfKey string, key []b return folderNameOfKey, key, true } -func deleteKey(context *gin.Context) { - folderName, _, ok := checkKeyToFolderName(context) +func (context *Context) deleteKey(ginContext *gin.Context) { + folderName, _, ok := context.checkKeyToFolderName(ginContext) if !ok { return } - globalLock.Lock() - defer globalLock.Unlock() + context.globalLock.Lock() + defer context.globalLock.Unlock() err := os.RemoveAll(folderName) if err != nil { log.Println("error during key delete: " + err.Error()) - context.JSON(http.StatusNotFound, gin.H{ + ginContext.JSON(http.StatusNotFound, gin.H{ "error": "key not found", }) return } - context.JSON(http.StatusOK, gin.H{ + ginContext.JSON(http.StatusOK, gin.H{ "message": "key deleted", }) } -func parseCommandlineArgs() { - modelFilename = flag.String("model", outputFile, "input model yaml file") - outputDir = flag.String("output", ".", "output directory") - raaPlugin = flag.String("raa-plugin", "raa.so", "RAA calculation plugin (.so shared object) file name") - executeModelMacro = flag.String("execute-model-macro", "", "Execute model macro (by ID)") - createExampleModel = flag.Bool("create-example-model", false, "just create an example model named threagile-example-model.yaml in the output directory") - createStubModel = flag.Bool("create-stub-model", false, "just create a minimal stub model named threagile-stub-model.yaml in the output directory") - createEditingSupport = flag.Bool("create-editing-support", false, "just create some editing support stuff in the output directory") - serverPort = flag.Int("server", 0, "start a server (instead of commandline execution) on the given port") - templateFilename = flag.String("background", "background.pdf", "background pdf file") - generateDataFlowDiagram = flag.Bool("generate-data-flow-diagram", true, "generate data-flow diagram") - generateDataAssetDiagram = flag.Bool("generate-data-asset-diagram", true, "generate data asset diagram") - generateRisksJSON = flag.Bool("generate-risks-json", true, "generate risks json") - generateTechnicalAssetsJSON = flag.Bool("generate-technical-assets-json", true, "generate technical assets json") - generateStatsJSON = flag.Bool("generate-stats-json", true, "generate stats json") - generateRisksExcel = flag.Bool("generate-risks-excel", true, "generate risks excel") - generateTagsExcel = flag.Bool("generate-tags-excel", true, "generate tags excel") - generateReportPDF = flag.Bool("generate-report-pdf", true, "generate report pdf, including diagrams") - diagramDPI = flag.Int("diagram-dpi", defaultGraphvizDPI, "DPI used to render: maximum is "+strconv.Itoa(maxGraphvizDPI)+"") - skipRiskRules = flag.String("skip-risk-rules", "", "comma-separated list of risk rules (by their ID) to skip") - riskRulesPlugins = flag.String("custom-risk-rules-plugins", "", "comma-separated list of plugins (.so shared object) file names with custom risk rules to load") - verbose = flag.Bool("verbose", false, "verbose output") - ignoreOrphanedRiskTracking = flag.Bool("ignore-orphaned-risk-tracking", false, "ignore orphaned risk tracking (just log them) not matching a concrete risk") +func (context *Context) parseCommandlineArgs() { + context.tempFolder = flag.String("temp-dir", tmpFolder, "temporary folder location") + context.modelFilename = flag.String("model", outputFile, "input model yaml file") + context.outputDir = flag.String("output", ".", "output directory") + context.raaPlugin = flag.String("raa-plugin", "raa.so", "RAA calculation plugin (.so shared object) file name") + context.executeModelMacro = flag.String("execute-model-macro", "", "Execute model macro (by ID)") + context.testParseModel = flag.Bool("test-parse-model", false, "test parse model functionality") + context.createExampleModel = flag.Bool("create-example-model", false, "just create an example model named threagile-example-model.yaml in the output directory") + context.createStubModel = flag.Bool("create-stub-model", false, "just create a minimal stub model named threagile-stub-model.yaml in the output directory") + context.createEditingSupport = flag.Bool("create-editing-support", false, "just create some editing support stuff in the output directory") + context.serverPort = flag.Int("server", 0, "start a server (instead of commandline execution) on the given port") + context.templateFilename = flag.String("background", "background.pdf", "background pdf file") + context.generateDataFlowDiagram = flag.Bool("generate-data-flow-diagram", true, "generate data-flow diagram") + context.generateDataAssetDiagram = flag.Bool("generate-data-asset-diagram", true, "generate data asset diagram") + context.generateRisksJSON = flag.Bool("generate-risks-json", true, "generate risks json") + context.generateTechnicalAssetsJSON = flag.Bool("generate-technical-assets-json", true, "generate technical assets json") + context.generateStatsJSON = flag.Bool("generate-stats-json", true, "generate stats json") + context.generateRisksExcel = flag.Bool("generate-risks-excel", true, "generate risks excel") + context.generateTagsExcel = flag.Bool("generate-tags-excel", true, "generate tags excel") + context.generateReportPDF = flag.Bool("generate-report-pdf", true, "generate report pdf, including diagrams") + context.diagramDPI = flag.Int("diagram-dpi", defaultGraphvizDPI, "DPI used to render: maximum is "+strconv.Itoa(maxGraphvizDPI)+"") + context.skipRiskRules = flag.String("skip-risk-rules", "", "comma-separated list of risk rules (by their ID) to skip") + context.riskRulesPlugins = flag.String("custom-risk-rules-plugins", "", "comma-separated list of plugins (.so shared object) file names with custom risk rules to load") + context.verbose = flag.Bool("verbose", false, "verbose output") + context.ignoreOrphanedRiskTracking = flag.Bool("ignore-orphaned-risk-tracking", false, "ignore orphaned risk tracking (just log them) not matching a concrete risk") + context.appFolder = flag.String("app-folder", appFolder, "app folder (default: "+appFolder+")") + context.baseFolder = flag.String("base-folder", baseFolder, "base folder (default: "+baseFolder+")") version := flag.Bool("version", false, "print version") listTypes := flag.Bool("list-types", false, "print type information (enum values to be used in models)") listRiskRules := flag.Bool("list-risk-rules", false, "print risk rules") @@ -3624,7 +3683,7 @@ func parseCommandlineArgs() { print3rdParty := flag.Bool("print-3rd-party-licenses", false, "print 3rd-party license information") license := flag.Bool("print-license", false, "print license information") flag.Usage = func() { - printLogo() + context.printLogo() _, _ = fmt.Fprintf(os.Stderr, "Usage: threagile [options]") fmt.Println() fmt.Println() @@ -3638,33 +3697,33 @@ func parseCommandlineArgs() { fmt.Println() fmt.Println("If you want to create an example model (via docker) as a starting point to learn about Threagile just run: ") fmt.Println(" docker run --rm -it " + - "-v \"$(pwd)\":" + filepath.Join(appFolder, "work") + " " + + "-v \"$(pwd)\":" + filepath.Join(*context.appFolder, "work") + " " + "threagile/threagile " + "-create-example-model " + - "-output " + filepath.Join(appFolder, "work")) + "-output " + filepath.Join(*context.appFolder, "work")) fmt.Println() fmt.Println("If you want to create a minimal stub model (via docker) as a starting point for your own model just run: ") fmt.Println(" docker run --rm -it " + - "-v \"$(pwd)\":" + filepath.Join(appFolder, "work") + " " + + "-v \"$(pwd)\":" + filepath.Join(*context.appFolder, "work") + " " + "threagile/threagile " + "-create-stub-model " + - "-output " + filepath.Join(appFolder, "work")) + "-output " + filepath.Join(*context.appFolder, "work")) fmt.Println() - printExamples() + context.printExamples() fmt.Println() } flag.Parse() - if *diagramDPI < 20 { - *diagramDPI = 20 - } else if *diagramDPI > maxGraphvizDPI { - *diagramDPI = 300 + if *context.diagramDPI < 20 { + *context.diagramDPI = 20 + } else if *context.diagramDPI > maxGraphvizDPI { + *context.diagramDPI = 300 } if *version { - printLogo() + context.printLogo() os.Exit(0) } if *listTypes { - printLogo() + context.printLogo() fmt.Println("The following types are available (can be extended for custom rules):") fmt.Println() printTypes("Authentication", model.AuthenticationValues()) @@ -3712,7 +3771,7 @@ func parseCommandlineArgs() { os.Exit(0) } if *listModelMacros { - printLogo() + context.printLogo() fmt.Println("The following model macros are available (can be extended via custom model macros):") fmt.Println() /* TODO finish plugin stuff @@ -3735,14 +3794,14 @@ func parseCommandlineArgs() { os.Exit(0) } if *listRiskRules { - printLogo() + context.printLogo() fmt.Println("The following risk rules are available (can be extended via custom risk rules):") fmt.Println() fmt.Println("------------------") fmt.Println("Custom risk rules:") fmt.Println("------------------") - loadCustomRiskRules() - for id, customRule := range customRiskRules { + context.loadCustomRiskRules() + for id, customRule := range context.customRiskRules { fmt.Println(id, "-->", customRule.Category().Title, "--> with tags:", customRule.SupportedTags()) } fmt.Println() @@ -3795,7 +3854,7 @@ func parseCommandlineArgs() { os.Exit(0) } if *explainTypes { - printLogo() + context.printLogo() fmt.Println("Explanation for the types:") fmt.Println() printExplainTypes("Authentication", model.AuthenticationValues()) @@ -3823,7 +3882,7 @@ func parseCommandlineArgs() { os.Exit(0) } if *explainModelMacros { - printLogo() + context.printLogo() fmt.Println("Explanation for the model macros:") fmt.Println() fmt.Printf("%v: %v\n", addbuildpipeline.GetMacroDetails().ID, addbuildpipeline.GetMacroDetails().Description) @@ -3837,7 +3896,7 @@ func parseCommandlineArgs() { } if *explainRiskRules { - printLogo() + context.printLogo() fmt.Println("Explanation for risk rules:") fmt.Println() fmt.Printf("%v: %v\n", accidentalsecretleak.Category().Id, accidentalsecretleak.Category().Description) @@ -3886,7 +3945,7 @@ func parseCommandlineArgs() { os.Exit(0) } if *print3rdParty { - printLogo() + context.printLogo() fmt.Println("Kudos & Credits to the following open-source projects:") fmt.Println(" - golang (Google Go License): https://golang.org/LICENSE") fmt.Println(" - go-yaml (MIT License): https://github.com/go-yaml/yaml/blob/v3/LICENSE") @@ -3902,34 +3961,56 @@ func parseCommandlineArgs() { os.Exit(0) } if *license { - printLogo() - content, err := os.ReadFile(filepath.Join(appFolder, "LICENSE.txt")) + context.printLogo() + content, err := os.ReadFile(filepath.Join(*context.appFolder, "LICENSE.txt")) checkErr(err) fmt.Print(string(content)) fmt.Println() os.Exit(0) } - if *createExampleModel { - createExampleModelFile() - printLogo() + if *context.testParseModel { + testError := context.goTestParseModel() + if testError != nil { + log.Fatalf("parse test failed: %v", testError) + return + } + fmt.Println("Parse test successful.") + fmt.Println() + os.Exit(0) + } + if *context.createExampleModel { + exampleError := context.createExampleModelFile() + if exampleError != nil { + log.Fatalf("Unable to copy example model: %v", exampleError) + return + } + context.printLogo() fmt.Println("An example model was created named threagile-example-model.yaml in the output directory.") fmt.Println() - printExamples() + context.printExamples() fmt.Println() os.Exit(0) } - if *createStubModel { - createStubModelFile() - printLogo() + if *context.createStubModel { + stubError := context.createStubModelFile() + if stubError != nil { + log.Fatalf("Unable to copy stub model: %v", stubError) + return + } + context.printLogo() fmt.Println("A minimal stub model was created named threagile-stub-model.yaml in the output directory.") fmt.Println() - printExamples() + context.printExamples() fmt.Println() os.Exit(0) } - if *createEditingSupport { - createEditingSupportFiles() - printLogo() + if *context.createEditingSupport { + supportError := context.createEditingSupportFiles() + if supportError != nil { + log.Fatalf("Unable to copy editing support files: %v", supportError) + return + } + context.printLogo() fmt.Println("The following files were created in the output directory:") fmt.Println(" - schema.json") fmt.Println(" - live-templates.txt") @@ -3943,50 +4024,58 @@ func parseCommandlineArgs() { } } -func printLogo() { +func (context *Context) printLogo() { fmt.Println() fmt.Println(" _____ _ _ _ \n |_ _| |__ _ __ ___ __ _ __ _(_) | ___ \n | | | '_ \\| '__/ _ \\/ _` |/ _` | | |/ _ \\\n | | | | | | | | __/ (_| | (_| | | | __/\n |_| |_| |_|_| \\___|\\__,_|\\__, |_|_|\\___|\n |___/ ") fmt.Println("Threagile - Agile Threat Modeling") fmt.Println() fmt.Println() - printVersion() + context.printVersion() } -func printVersion() { +func (context *Context) printVersion() { fmt.Println("Documentation: https://threagile.io") fmt.Println("Docker Images: https://hub.docker.com/r/threagile/threagile") fmt.Println("Sourcecode: https://github.com/threagile") fmt.Println("License: Open-Source (MIT License)") - fmt.Println("Version: " + model.ThreagileVersion + " (" + buildTimestamp + ")") + fmt.Println("Version: " + model.ThreagileVersion + " (" + context.buildTimestamp + ")") fmt.Println() fmt.Println() } -func createExampleModelFile() { - _, _ = copyFile(filepath.Join(appFolder, "threagile-example-model.yaml"), filepath.Join(*outputDir, "threagile-example-model.yaml")) +func (context *Context) createExampleModelFile() error { + _, err := copyFile(filepath.Join(*context.appFolder, "threagile-example-model.yaml"), filepath.Join(*context.outputDir, "threagile-example-model.yaml")) + return err } -func createStubModelFile() { - loadCustomRiskRules() - stub, err := os.ReadFile(filepath.Join(appFolder, "threagile-stub-model.yaml")) - checkErr(err) - err = os.WriteFile(filepath.Join(*outputDir, "threagile-stub-model.yaml"), addSupportedTags(stub), 0644) - checkErr(err) +func (context *Context) createStubModelFile() error { + context.loadCustomRiskRules() + stub, err := os.ReadFile(filepath.Join(*context.appFolder, "threagile-stub-model.yaml")) + if err != nil { + return err + } + + return os.WriteFile(filepath.Join(*context.outputDir, "threagile-stub-model.yaml"), context.addSupportedTags(stub), 0644) } -func createEditingSupportFiles() { - _, _ = copyFile(filepath.Join(appFolder, "schema.json"), filepath.Join(*outputDir, "schema.json")) - _, _ = copyFile(filepath.Join(appFolder, "live-templates.txt"), filepath.Join(*outputDir, "live-templates.txt")) +func (context *Context) createEditingSupportFiles() error { + _, schemaError := copyFile(filepath.Join(*context.appFolder, "schema.json"), filepath.Join(*context.outputDir, "schema.json")) + if schemaError != nil { + return schemaError + } + + _, templateError := copyFile(filepath.Join(*context.appFolder, "live-templates.txt"), filepath.Join(*context.outputDir, "live-templates.txt")) + return templateError } -func printExamples() { +func (context *Context) printExamples() { fmt.Println("If you want to execute Threagile on a model yaml file (via docker): ") fmt.Println(" docker run --rm -it " + - "-v \"$(pwd)\":" + filepath.Join(appFolder, "work") + " " + + "-v \"$(pwd)\":" + filepath.Join(*context.appFolder, "work") + " " + "threagile/threagile " + "-verbose " + - "-model " + filepath.Join(appFolder, "work", outputFile) + " " + - "-output " + filepath.Join(appFolder, "work")) + "-model " + filepath.Join(*context.appFolder, "work", outputFile) + " " + + "-output " + filepath.Join(*context.appFolder, "work")) fmt.Println() fmt.Println("If you want to run Threagile as a server (REST API) on some port (here 8080): ") fmt.Println(" docker run --rm -it " + @@ -4000,13 +4089,13 @@ func printExamples() { fmt.Println(" docker run --rm -it threagile/threagile -list-types") fmt.Println() fmt.Println("If you want to use some nice editing help (syntax validation, autocompletion, and live templates) in your favourite IDE: ") - fmt.Println(" docker run --rm -it -v \"$(pwd)\":" + filepath.Join(appFolder, "work") + " threagile/threagile -create-editing-support -output " + filepath.Join(appFolder, "work")) + fmt.Println(" docker run --rm -it -v \"$(pwd)\":" + filepath.Join(*context.appFolder, "work") + " threagile/threagile -create-editing-support -output " + filepath.Join(*context.appFolder, "work")) fmt.Println() fmt.Println("If you want to list all available model macros (which are macros capable of reading a model yaml file, asking you questions in a wizard-style and then update the model yaml file accordingly): ") fmt.Println(" docker run --rm -it threagile/threagile -list-model-macros") fmt.Println() fmt.Println("If you want to execute a certain model macro on the model yaml file (here the macro add-build-pipeline): ") - fmt.Println(" docker run --rm -it -v \"$(pwd)\":" + filepath.Join(appFolder, "work") + " threagile/threagile -model " + filepath.Join(appFolder, "work", outputFile) + " -output " + filepath.Join(appFolder, "work") + " -execute-model-macro add-build-pipeline") + fmt.Println(" docker run --rm -it -v \"$(pwd)\":" + filepath.Join(*context.appFolder, "work") + " threagile/threagile -model " + filepath.Join(*context.appFolder, "work", outputFile) + " -output " + filepath.Join(*context.appFolder, "work") + " -execute-model-macro add-build-pipeline") } func printTypes(title string, value interface{}) { @@ -4046,1028 +4135,1069 @@ func copyFile(src, dst string) (int64, error) { return nBytes, err } -func parseModel(inputFilename string) { - if *verbose { - fmt.Println("Parsing model:", inputFilename) +func (context *Context) goTestParseModel() error { + flatModelFile := filepath.Join("test", "all.yaml") + flatModel := *new(model.ModelInput).Defaults() + flatLoadError := flatModel.Load(flatModelFile) + if flatLoadError != nil { + return fmt.Errorf("unable to parse model yaml %q: %v", flatModelFile, flatLoadError) } - modelYaml, err := os.ReadFile(inputFilename) - if err == nil { - modelInput = model.ModelInput{} - err = yaml.Unmarshal(modelYaml, &modelInput) - checkErr(err) - //fmt.Println(modelInput) - var businessCriticality model.Criticality - switch modelInput.BusinessCriticality { - case model.Archive.String(): - businessCriticality = model.Archive - case model.Operational.String(): - businessCriticality = model.Operational - case model.Important.String(): - businessCriticality = model.Important - case model.Critical.String(): - businessCriticality = model.Critical - case model.MissionCritical.String(): - businessCriticality = model.MissionCritical - default: - panic(errors.New("unknown 'business_criticality' value of application: " + modelInput.BusinessCriticality)) - } + sort.Slice(flatModel.TagsAvailable, func(i, j int) bool { + return flatModel.TagsAvailable[i] < flatModel.TagsAvailable[j] + }) + flatModel.TagsAvailable = []string{strings.Join(flatModel.TagsAvailable, ", ")} - reportDate := time.Now() - if len(modelInput.Date) > 0 { - reportDate, err = time.Parse("2006-01-02", modelInput.Date) - if err != nil { - panic(errors.New("unable to parse 'date' value of model file")) - } - } + flatData, flatMarshalError := json.MarshalIndent(flatModel, "", " ") + if flatMarshalError != nil { + return fmt.Errorf("unable to print model yaml %q: %v", flatModelFile, flatMarshalError) + } - model.ParsedModelRoot = model.ParsedModel{ - Author: modelInput.Author, - Title: modelInput.Title, - Date: reportDate, - ManagementSummaryComment: modelInput.ManagementSummaryComment, - BusinessCriticality: businessCriticality, - BusinessOverview: removePathElementsFromImageFiles(modelInput.BusinessOverview), - TechnicalOverview: removePathElementsFromImageFiles(modelInput.TechnicalOverview), - Questions: modelInput.Questions, - AbuseCases: modelInput.AbuseCases, - SecurityRequirements: modelInput.SecurityRequirements, - TagsAvailable: lowerCaseAndTrim(modelInput.TagsAvailable), - DiagramTweakNodesep: modelInput.DiagramTweakNodesep, - DiagramTweakRanksep: modelInput.DiagramTweakRanksep, - DiagramTweakEdgeLayout: modelInput.DiagramTweakEdgeLayout, - DiagramTweakSuppressEdgeLabels: modelInput.DiagramTweakSuppressEdgeLabels, - DiagramTweakLayoutLeftToRight: modelInput.DiagramTweakLayoutLeftToRight, - DiagramTweakInvisibleConnectionsBetweenAssets: modelInput.DiagramTweakInvisibleConnectionsBetweenAssets, - DiagramTweakSameRankAssets: modelInput.DiagramTweakSameRankAssets, - } - if model.ParsedModelRoot.DiagramTweakNodesep == 0 { - model.ParsedModelRoot.DiagramTweakNodesep = 2 - } - if model.ParsedModelRoot.DiagramTweakRanksep == 0 { - model.ParsedModelRoot.DiagramTweakRanksep = 2 - } - - // Data Assets =============================================================================== - model.ParsedModelRoot.DataAssets = make(map[string]model.DataAsset) - for title, asset := range modelInput.DataAssets { - id := fmt.Sprintf("%v", asset.ID) - - var usage model.Usage - switch asset.Usage { - case model.Business.String(): - usage = model.Business - case model.DevOps.String(): - usage = model.DevOps - default: - panic(errors.New("unknown 'usage' value of data asset '" + title + "': " + asset.Usage)) - } + splitModelFile := filepath.Join("test", "main.yaml") + splitModel := *new(model.ModelInput).Defaults() + splitLoadError := splitModel.Load(splitModelFile) + if splitLoadError != nil { + return fmt.Errorf("unable to parse model yaml %q: %v", splitModelFile, splitLoadError) + } - var quantity model.Quantity - switch asset.Quantity { - case model.VeryFew.String(): - quantity = model.VeryFew - case model.Few.String(): - quantity = model.Few - case model.Many.String(): - quantity = model.Many - case model.VeryMany.String(): - quantity = model.VeryMany - default: - panic(errors.New("unknown 'quantity' value of data asset '" + title + "': " + asset.Quantity)) - } + sort.Slice(splitModel.TagsAvailable, func(i, j int) bool { + return splitModel.TagsAvailable[i] < splitModel.TagsAvailable[j] + }) + splitModel.TagsAvailable = []string{strings.Join(splitModel.TagsAvailable, ", ")} - var confidentiality model.Confidentiality - switch asset.Confidentiality { - case model.Public.String(): - confidentiality = model.Public - case model.Internal.String(): - confidentiality = model.Internal - case model.Restricted.String(): - confidentiality = model.Restricted - case model.Confidential.String(): - confidentiality = model.Confidential - case model.StrictlyConfidential.String(): - confidentiality = model.StrictlyConfidential - default: - panic(errors.New("unknown 'confidentiality' value of data asset '" + title + "': " + asset.Confidentiality)) - } + splitModel.Includes = flatModel.Includes + splitData, splitMarshalError := json.MarshalIndent(splitModel, "", " ") + if splitMarshalError != nil { + return fmt.Errorf("unable to print model yaml %q: %v", splitModelFile, splitMarshalError) + } - var integrity model.Criticality - switch asset.Integrity { - case model.Archive.String(): - integrity = model.Archive - case model.Operational.String(): - integrity = model.Operational - case model.Important.String(): - integrity = model.Important - case model.Critical.String(): - integrity = model.Critical - case model.MissionCritical.String(): - integrity = model.MissionCritical - default: - panic(errors.New("unknown 'integrity' value of data asset '" + title + "': " + asset.Integrity)) - } + if string(flatData) != string(splitData) { + return fmt.Errorf("parsing split model files is broken; diff: %v", textdiff.Unified(flatModelFile, splitModelFile, string(flatData), string(splitData))) + } - var availability model.Criticality - switch asset.Availability { - case model.Archive.String(): - availability = model.Archive - case model.Operational.String(): - availability = model.Operational - case model.Important.String(): - availability = model.Important - case model.Critical.String(): - availability = model.Critical - case model.MissionCritical.String(): - availability = model.MissionCritical - default: - panic(errors.New("unknown 'availability' value of data asset '" + title + "': " + asset.Availability)) - } + return nil +} - checkIdSyntax(id) - if _, exists := model.ParsedModelRoot.DataAssets[id]; exists { - panic(errors.New("duplicate id used: " + id)) - } - model.ParsedModelRoot.DataAssets[id] = model.DataAsset{ - Id: id, - Title: title, - Usage: usage, - Description: withDefault(fmt.Sprintf("%v", asset.Description), title), - Quantity: quantity, - Tags: checkTags(lowerCaseAndTrim(asset.Tags), "data asset '"+title+"'"), - Origin: fmt.Sprintf("%v", asset.Origin), - Owner: fmt.Sprintf("%v", asset.Owner), - Confidentiality: confidentiality, - Integrity: integrity, - Availability: availability, - JustificationCiaRating: fmt.Sprintf("%v", asset.JustificationCiaRating), - } - } +func (context *Context) parseModel() { + if *context.verbose { + fmt.Println("Parsing model:", *context.modelFilename) + } - // Technical Assets =============================================================================== - model.ParsedModelRoot.TechnicalAssets = make(map[string]model.TechnicalAsset) - for title, asset := range modelInput.TechnicalAssets { - id := fmt.Sprintf("%v", asset.ID) + context.modelInput = *new(model.ModelInput).Defaults() + loadError := context.modelInput.Load(*context.modelFilename) + if loadError != nil { + log.Fatal("Unable to parse model yaml: ", loadError) + } - var usage model.Usage - switch asset.Usage { - case model.Business.String(): - usage = model.Business - case model.DevOps.String(): - usage = model.DevOps - default: - panic(errors.New("unknown 'usage' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Usage))) - } + data, _ := json.MarshalIndent(context.modelInput, "", " ") + fmt.Printf("%v\n", string(data)) - var dataAssetsProcessed = make([]string, 0) - if asset.DataAssetsProcessed != nil { - dataAssetsProcessed = make([]string, len(asset.DataAssetsProcessed)) - for i, parsedProcessedAsset := range asset.DataAssetsProcessed { - referencedAsset := fmt.Sprintf("%v", parsedProcessedAsset) - checkDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") - dataAssetsProcessed[i] = referencedAsset - } - } + var businessCriticality model.Criticality + switch context.modelInput.BusinessCriticality { + case model.Archive.String(): + businessCriticality = model.Archive + case model.Operational.String(): + businessCriticality = model.Operational + case model.Important.String(): + businessCriticality = model.Important + case model.Critical.String(): + businessCriticality = model.Critical + case model.MissionCritical.String(): + businessCriticality = model.MissionCritical + default: + panic(errors.New("unknown 'business_criticality' value of application: " + context.modelInput.BusinessCriticality)) + } - var dataAssetsStored = make([]string, 0) - if asset.DataAssetsStored != nil { - dataAssetsStored = make([]string, len(asset.DataAssetsStored)) - for i, parsedStoredAssets := range asset.DataAssetsStored { - referencedAsset := fmt.Sprintf("%v", parsedStoredAssets) - checkDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") - dataAssetsStored[i] = referencedAsset - } - } + reportDate := time.Now() + if len(context.modelInput.Date) > 0 { + var parseError error + reportDate, parseError = time.Parse("2006-01-02", context.modelInput.Date) + if parseError != nil { + panic(errors.New("unable to parse 'date' value of model file")) + } + } - var technicalAssetType model.TechnicalAssetType - switch asset.Type { - case model.ExternalEntity.String(): - technicalAssetType = model.ExternalEntity - case model.Process.String(): - technicalAssetType = model.Process - case model.Datastore.String(): - technicalAssetType = model.Datastore - default: - panic(errors.New("unknown 'type' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Type))) - } + model.ParsedModelRoot = model.ParsedModel{ + Author: context.modelInput.Author, + Title: context.modelInput.Title, + Date: reportDate, + ManagementSummaryComment: context.modelInput.ManagementSummaryComment, + BusinessCriticality: businessCriticality, + BusinessOverview: removePathElementsFromImageFiles(context.modelInput.BusinessOverview), + TechnicalOverview: removePathElementsFromImageFiles(context.modelInput.TechnicalOverview), + Questions: context.modelInput.Questions, + AbuseCases: context.modelInput.AbuseCases, + SecurityRequirements: context.modelInput.SecurityRequirements, + TagsAvailable: lowerCaseAndTrim(context.modelInput.TagsAvailable), + DiagramTweakNodesep: context.modelInput.DiagramTweakNodesep, + DiagramTweakRanksep: context.modelInput.DiagramTweakRanksep, + DiagramTweakEdgeLayout: context.modelInput.DiagramTweakEdgeLayout, + DiagramTweakSuppressEdgeLabels: context.modelInput.DiagramTweakSuppressEdgeLabels, + DiagramTweakLayoutLeftToRight: context.modelInput.DiagramTweakLayoutLeftToRight, + DiagramTweakInvisibleConnectionsBetweenAssets: context.modelInput.DiagramTweakInvisibleConnectionsBetweenAssets, + DiagramTweakSameRankAssets: context.modelInput.DiagramTweakSameRankAssets, + } + if model.ParsedModelRoot.DiagramTweakNodesep == 0 { + model.ParsedModelRoot.DiagramTweakNodesep = 2 + } + if model.ParsedModelRoot.DiagramTweakRanksep == 0 { + model.ParsedModelRoot.DiagramTweakRanksep = 2 + } - var technicalAssetSize model.TechnicalAssetSize - switch asset.Size { - case model.Service.String(): - technicalAssetSize = model.Service - case model.System.String(): - technicalAssetSize = model.System - case model.Application.String(): - technicalAssetSize = model.Application - case model.Component.String(): - technicalAssetSize = model.Component - default: - panic(errors.New("unknown 'size' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Size))) - } + // Data Assets =============================================================================== + model.ParsedModelRoot.DataAssets = make(map[string]model.DataAsset) + for title, asset := range context.modelInput.DataAssets { + id := fmt.Sprintf("%v", asset.ID) + + var usage model.Usage + switch asset.Usage { + case model.Business.String(): + usage = model.Business + case model.DevOps.String(): + usage = model.DevOps + default: + panic(errors.New("unknown 'usage' value of data asset '" + title + "': " + asset.Usage)) + } + + var quantity model.Quantity + switch asset.Quantity { + case model.VeryFew.String(): + quantity = model.VeryFew + case model.Few.String(): + quantity = model.Few + case model.Many.String(): + quantity = model.Many + case model.VeryMany.String(): + quantity = model.VeryMany + default: + panic(errors.New("unknown 'quantity' value of data asset '" + title + "': " + asset.Quantity)) + } + + var confidentiality model.Confidentiality + switch asset.Confidentiality { + case model.Public.String(): + confidentiality = model.Public + case model.Internal.String(): + confidentiality = model.Internal + case model.Restricted.String(): + confidentiality = model.Restricted + case model.Confidential.String(): + confidentiality = model.Confidential + case model.StrictlyConfidential.String(): + confidentiality = model.StrictlyConfidential + default: + panic(errors.New("unknown 'confidentiality' value of data asset '" + title + "': " + asset.Confidentiality)) + } - var technicalAssetTechnology model.TechnicalAssetTechnology - switch asset.Technology { - case model.UnknownTechnology.String(): - technicalAssetTechnology = model.UnknownTechnology - case model.ClientSystem.String(): - technicalAssetTechnology = model.ClientSystem - case model.Browser.String(): - technicalAssetTechnology = model.Browser - case model.Desktop.String(): - technicalAssetTechnology = model.Desktop - case model.MobileApp.String(): - technicalAssetTechnology = model.MobileApp - case model.DevOpsClient.String(): - technicalAssetTechnology = model.DevOpsClient - case model.WebServer.String(): - technicalAssetTechnology = model.WebServer - case model.WebApplication.String(): - technicalAssetTechnology = model.WebApplication - case model.ApplicationServer.String(): - technicalAssetTechnology = model.ApplicationServer - case model.Database.String(): - technicalAssetTechnology = model.Database - case model.FileServer.String(): - technicalAssetTechnology = model.FileServer - case model.LocalFileSystem.String(): - technicalAssetTechnology = model.LocalFileSystem - case model.ERP.String(): - technicalAssetTechnology = model.ERP - case model.CMS.String(): - technicalAssetTechnology = model.CMS - case model.WebServiceREST.String(): - technicalAssetTechnology = model.WebServiceREST - case model.WebServiceSOAP.String(): - technicalAssetTechnology = model.WebServiceSOAP - case model.EJB.String(): - technicalAssetTechnology = model.EJB - case model.SearchIndex.String(): - technicalAssetTechnology = model.SearchIndex - case model.SearchEngine.String(): - technicalAssetTechnology = model.SearchEngine - case model.ServiceRegistry.String(): - technicalAssetTechnology = model.ServiceRegistry - case model.ReverseProxy.String(): - technicalAssetTechnology = model.ReverseProxy - case model.LoadBalancer.String(): - technicalAssetTechnology = model.LoadBalancer - case model.BuildPipeline.String(): - technicalAssetTechnology = model.BuildPipeline - case model.SourcecodeRepository.String(): - technicalAssetTechnology = model.SourcecodeRepository - case model.ArtifactRegistry.String(): - technicalAssetTechnology = model.ArtifactRegistry - case model.CodeInspectionPlatform.String(): - technicalAssetTechnology = model.CodeInspectionPlatform - case model.Monitoring.String(): - technicalAssetTechnology = model.Monitoring - case model.LDAPServer.String(): - technicalAssetTechnology = model.LDAPServer - case model.ContainerPlatform.String(): - technicalAssetTechnology = model.ContainerPlatform - case model.BatchProcessing.String(): - technicalAssetTechnology = model.BatchProcessing - case model.EventListener.String(): - technicalAssetTechnology = model.EventListener - case model.IdentityProvider.String(): - technicalAssetTechnology = model.IdentityProvider - case model.IdentityStoreLDAP.String(): - technicalAssetTechnology = model.IdentityStoreLDAP - case model.IdentityStoreDatabase.String(): - technicalAssetTechnology = model.IdentityStoreDatabase - case model.Tool.String(): - technicalAssetTechnology = model.Tool - case model.CLI.String(): - technicalAssetTechnology = model.CLI - case model.Task.String(): - technicalAssetTechnology = model.Task - case model.Function.String(): - technicalAssetTechnology = model.Function - case model.Gateway.String(): - technicalAssetTechnology = model.Gateway - case model.IoTDevice.String(): - technicalAssetTechnology = model.IoTDevice - case model.MessageQueue.String(): - technicalAssetTechnology = model.MessageQueue - case model.StreamProcessing.String(): - technicalAssetTechnology = model.StreamProcessing - case model.ServiceMesh.String(): - technicalAssetTechnology = model.ServiceMesh - case model.DataLake.String(): - technicalAssetTechnology = model.DataLake - case model.BigDataPlatform.String(): - technicalAssetTechnology = model.BigDataPlatform - case model.ReportEngine.String(): - technicalAssetTechnology = model.ReportEngine - case model.AI.String(): - technicalAssetTechnology = model.AI - case model.MailServer.String(): - technicalAssetTechnology = model.MailServer - case model.Vault.String(): - technicalAssetTechnology = model.Vault - case model.HSM.String(): - technicalAssetTechnology = model.HSM - case model.WAF.String(): - technicalAssetTechnology = model.WAF - case model.IDS.String(): - technicalAssetTechnology = model.IDS - case model.IPS.String(): - technicalAssetTechnology = model.IPS - case model.Scheduler.String(): - technicalAssetTechnology = model.Scheduler - case model.Mainframe.String(): - technicalAssetTechnology = model.Mainframe - case model.BlockStorage.String(): - technicalAssetTechnology = model.BlockStorage - case model.Library.String(): - technicalAssetTechnology = model.Library - default: - panic(errors.New("unknown 'technology' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Technology))) - } + var integrity model.Criticality + switch asset.Integrity { + case model.Archive.String(): + integrity = model.Archive + case model.Operational.String(): + integrity = model.Operational + case model.Important.String(): + integrity = model.Important + case model.Critical.String(): + integrity = model.Critical + case model.MissionCritical.String(): + integrity = model.MissionCritical + default: + panic(errors.New("unknown 'integrity' value of data asset '" + title + "': " + asset.Integrity)) + } - var encryption model.EncryptionStyle - switch asset.Encryption { - case model.NoneEncryption.String(): - encryption = model.NoneEncryption - case model.Transparent.String(): - encryption = model.Transparent - case model.DataWithSymmetricSharedKey.String(): - encryption = model.DataWithSymmetricSharedKey - case model.DataWithAsymmetricSharedKey.String(): - encryption = model.DataWithAsymmetricSharedKey - case model.DataWithEndUserIndividualKey.String(): - encryption = model.DataWithEndUserIndividualKey - default: - panic(errors.New("unknown 'encryption' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Encryption))) - } + var availability model.Criticality + switch asset.Availability { + case model.Archive.String(): + availability = model.Archive + case model.Operational.String(): + availability = model.Operational + case model.Important.String(): + availability = model.Important + case model.Critical.String(): + availability = model.Critical + case model.MissionCritical.String(): + availability = model.MissionCritical + default: + panic(errors.New("unknown 'availability' value of data asset '" + title + "': " + asset.Availability)) + } - var technicalAssetMachine model.TechnicalAssetMachine - switch asset.Machine { - case model.Physical.String(): - technicalAssetMachine = model.Physical - case model.Virtual.String(): - technicalAssetMachine = model.Virtual - case model.Container.String(): - technicalAssetMachine = model.Container - case model.Serverless.String(): - technicalAssetMachine = model.Serverless - default: - panic(errors.New("unknown 'machine' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Machine))) - } + context.checkIdSyntax(id) + if _, exists := model.ParsedModelRoot.DataAssets[id]; exists { + panic(errors.New("duplicate id used: " + id)) + } + model.ParsedModelRoot.DataAssets[id] = model.DataAsset{ + Id: id, + Title: title, + Usage: usage, + Description: withDefault(fmt.Sprintf("%v", asset.Description), title), + Quantity: quantity, + Tags: checkTags(lowerCaseAndTrim(asset.Tags), "data asset '"+title+"'"), + Origin: fmt.Sprintf("%v", asset.Origin), + Owner: fmt.Sprintf("%v", asset.Owner), + Confidentiality: confidentiality, + Integrity: integrity, + Availability: availability, + JustificationCiaRating: fmt.Sprintf("%v", asset.JustificationCiaRating), + } + } - var confidentiality model.Confidentiality - switch asset.Confidentiality { - case model.Public.String(): - confidentiality = model.Public - case model.Internal.String(): - confidentiality = model.Internal - case model.Restricted.String(): - confidentiality = model.Restricted - case model.Confidential.String(): - confidentiality = model.Confidential - case model.StrictlyConfidential.String(): - confidentiality = model.StrictlyConfidential - default: - panic(errors.New("unknown 'confidentiality' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Confidentiality))) - } + // Technical Assets =============================================================================== + model.ParsedModelRoot.TechnicalAssets = make(map[string]model.TechnicalAsset) + for title, asset := range context.modelInput.TechnicalAssets { + id := fmt.Sprintf("%v", asset.ID) + + var usage model.Usage + switch asset.Usage { + case model.Business.String(): + usage = model.Business + case model.DevOps.String(): + usage = model.DevOps + default: + panic(errors.New("unknown 'usage' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Usage))) + } - var integrity model.Criticality - switch asset.Integrity { - case model.Archive.String(): - integrity = model.Archive - case model.Operational.String(): - integrity = model.Operational - case model.Important.String(): - integrity = model.Important - case model.Critical.String(): - integrity = model.Critical - case model.MissionCritical.String(): - integrity = model.MissionCritical - default: - panic(errors.New("unknown 'integrity' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Integrity))) + var dataAssetsProcessed = make([]string, 0) + if asset.DataAssetsProcessed != nil { + dataAssetsProcessed = make([]string, len(asset.DataAssetsProcessed)) + for i, parsedProcessedAsset := range asset.DataAssetsProcessed { + referencedAsset := fmt.Sprintf("%v", parsedProcessedAsset) + checkDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") + dataAssetsProcessed[i] = referencedAsset } + } - var availability model.Criticality - switch asset.Availability { - case model.Archive.String(): - availability = model.Archive - case model.Operational.String(): - availability = model.Operational - case model.Important.String(): - availability = model.Important - case model.Critical.String(): - availability = model.Critical - case model.MissionCritical.String(): - availability = model.MissionCritical - default: - panic(errors.New("unknown 'availability' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Availability))) + var dataAssetsStored = make([]string, 0) + if asset.DataAssetsStored != nil { + dataAssetsStored = make([]string, len(asset.DataAssetsStored)) + for i, parsedStoredAssets := range asset.DataAssetsStored { + referencedAsset := fmt.Sprintf("%v", parsedStoredAssets) + checkDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") + dataAssetsStored[i] = referencedAsset } + } - dataFormatsAccepted := make([]model.DataFormat, 0) - if asset.DataFormatsAccepted != nil { - for _, dataFormatName := range asset.DataFormatsAccepted { - switch dataFormatName { - case model.JSON.String(): - dataFormatsAccepted = append(dataFormatsAccepted, model.JSON) - case model.XML.String(): - dataFormatsAccepted = append(dataFormatsAccepted, model.XML) - case model.Serialization.String(): - dataFormatsAccepted = append(dataFormatsAccepted, model.Serialization) - case model.File.String(): - dataFormatsAccepted = append(dataFormatsAccepted, model.File) - case model.CSV.String(): - dataFormatsAccepted = append(dataFormatsAccepted, model.CSV) - default: - panic(errors.New("unknown 'data_formats_accepted' value of technical asset '" + title + "': " + fmt.Sprintf("%v", dataFormatName))) - } + var technicalAssetType model.TechnicalAssetType + switch asset.Type { + case model.ExternalEntity.String(): + technicalAssetType = model.ExternalEntity + case model.Process.String(): + technicalAssetType = model.Process + case model.Datastore.String(): + technicalAssetType = model.Datastore + default: + panic(errors.New("unknown 'type' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Type))) + } + + var technicalAssetSize model.TechnicalAssetSize + switch asset.Size { + case model.Service.String(): + technicalAssetSize = model.Service + case model.System.String(): + technicalAssetSize = model.System + case model.Application.String(): + technicalAssetSize = model.Application + case model.Component.String(): + technicalAssetSize = model.Component + default: + panic(errors.New("unknown 'size' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Size))) + } + + var technicalAssetTechnology model.TechnicalAssetTechnology + switch asset.Technology { + case model.UnknownTechnology.String(): + technicalAssetTechnology = model.UnknownTechnology + case model.ClientSystem.String(): + technicalAssetTechnology = model.ClientSystem + case model.Browser.String(): + technicalAssetTechnology = model.Browser + case model.Desktop.String(): + technicalAssetTechnology = model.Desktop + case model.MobileApp.String(): + technicalAssetTechnology = model.MobileApp + case model.DevOpsClient.String(): + technicalAssetTechnology = model.DevOpsClient + case model.WebServer.String(): + technicalAssetTechnology = model.WebServer + case model.WebApplication.String(): + technicalAssetTechnology = model.WebApplication + case model.ApplicationServer.String(): + technicalAssetTechnology = model.ApplicationServer + case model.Database.String(): + technicalAssetTechnology = model.Database + case model.FileServer.String(): + technicalAssetTechnology = model.FileServer + case model.LocalFileSystem.String(): + technicalAssetTechnology = model.LocalFileSystem + case model.ERP.String(): + technicalAssetTechnology = model.ERP + case model.CMS.String(): + technicalAssetTechnology = model.CMS + case model.WebServiceREST.String(): + technicalAssetTechnology = model.WebServiceREST + case model.WebServiceSOAP.String(): + technicalAssetTechnology = model.WebServiceSOAP + case model.EJB.String(): + technicalAssetTechnology = model.EJB + case model.SearchIndex.String(): + technicalAssetTechnology = model.SearchIndex + case model.SearchEngine.String(): + technicalAssetTechnology = model.SearchEngine + case model.ServiceRegistry.String(): + technicalAssetTechnology = model.ServiceRegistry + case model.ReverseProxy.String(): + technicalAssetTechnology = model.ReverseProxy + case model.LoadBalancer.String(): + technicalAssetTechnology = model.LoadBalancer + case model.BuildPipeline.String(): + technicalAssetTechnology = model.BuildPipeline + case model.SourcecodeRepository.String(): + technicalAssetTechnology = model.SourcecodeRepository + case model.ArtifactRegistry.String(): + technicalAssetTechnology = model.ArtifactRegistry + case model.CodeInspectionPlatform.String(): + technicalAssetTechnology = model.CodeInspectionPlatform + case model.Monitoring.String(): + technicalAssetTechnology = model.Monitoring + case model.LDAPServer.String(): + technicalAssetTechnology = model.LDAPServer + case model.ContainerPlatform.String(): + technicalAssetTechnology = model.ContainerPlatform + case model.BatchProcessing.String(): + technicalAssetTechnology = model.BatchProcessing + case model.EventListener.String(): + technicalAssetTechnology = model.EventListener + case model.IdentityProvider.String(): + technicalAssetTechnology = model.IdentityProvider + case model.IdentityStoreLDAP.String(): + technicalAssetTechnology = model.IdentityStoreLDAP + case model.IdentityStoreDatabase.String(): + technicalAssetTechnology = model.IdentityStoreDatabase + case model.Tool.String(): + technicalAssetTechnology = model.Tool + case model.CLI.String(): + technicalAssetTechnology = model.CLI + case model.Task.String(): + technicalAssetTechnology = model.Task + case model.Function.String(): + technicalAssetTechnology = model.Function + case model.Gateway.String(): + technicalAssetTechnology = model.Gateway + case model.IoTDevice.String(): + technicalAssetTechnology = model.IoTDevice + case model.MessageQueue.String(): + technicalAssetTechnology = model.MessageQueue + case model.StreamProcessing.String(): + technicalAssetTechnology = model.StreamProcessing + case model.ServiceMesh.String(): + technicalAssetTechnology = model.ServiceMesh + case model.DataLake.String(): + technicalAssetTechnology = model.DataLake + case model.BigDataPlatform.String(): + technicalAssetTechnology = model.BigDataPlatform + case model.ReportEngine.String(): + technicalAssetTechnology = model.ReportEngine + case model.AI.String(): + technicalAssetTechnology = model.AI + case model.MailServer.String(): + technicalAssetTechnology = model.MailServer + case model.Vault.String(): + technicalAssetTechnology = model.Vault + case model.HSM.String(): + technicalAssetTechnology = model.HSM + case model.WAF.String(): + technicalAssetTechnology = model.WAF + case model.IDS.String(): + technicalAssetTechnology = model.IDS + case model.IPS.String(): + technicalAssetTechnology = model.IPS + case model.Scheduler.String(): + technicalAssetTechnology = model.Scheduler + case model.Mainframe.String(): + technicalAssetTechnology = model.Mainframe + case model.BlockStorage.String(): + technicalAssetTechnology = model.BlockStorage + case model.Library.String(): + technicalAssetTechnology = model.Library + default: + panic(errors.New("unknown 'technology' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Technology))) + } + + var encryption model.EncryptionStyle + switch asset.Encryption { + case model.NoneEncryption.String(): + encryption = model.NoneEncryption + case model.Transparent.String(): + encryption = model.Transparent + case model.DataWithSymmetricSharedKey.String(): + encryption = model.DataWithSymmetricSharedKey + case model.DataWithAsymmetricSharedKey.String(): + encryption = model.DataWithAsymmetricSharedKey + case model.DataWithEndUserIndividualKey.String(): + encryption = model.DataWithEndUserIndividualKey + default: + panic(errors.New("unknown 'encryption' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Encryption))) + } + + var technicalAssetMachine model.TechnicalAssetMachine + switch asset.Machine { + case model.Physical.String(): + technicalAssetMachine = model.Physical + case model.Virtual.String(): + technicalAssetMachine = model.Virtual + case model.Container.String(): + technicalAssetMachine = model.Container + case model.Serverless.String(): + technicalAssetMachine = model.Serverless + default: + panic(errors.New("unknown 'machine' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Machine))) + } + + var confidentiality model.Confidentiality + switch asset.Confidentiality { + case model.Public.String(): + confidentiality = model.Public + case model.Internal.String(): + confidentiality = model.Internal + case model.Restricted.String(): + confidentiality = model.Restricted + case model.Confidential.String(): + confidentiality = model.Confidential + case model.StrictlyConfidential.String(): + confidentiality = model.StrictlyConfidential + default: + panic(errors.New("unknown 'confidentiality' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Confidentiality))) + } + + var integrity model.Criticality + switch asset.Integrity { + case model.Archive.String(): + integrity = model.Archive + case model.Operational.String(): + integrity = model.Operational + case model.Important.String(): + integrity = model.Important + case model.Critical.String(): + integrity = model.Critical + case model.MissionCritical.String(): + integrity = model.MissionCritical + default: + panic(errors.New("unknown 'integrity' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Integrity))) + } + + var availability model.Criticality + switch asset.Availability { + case model.Archive.String(): + availability = model.Archive + case model.Operational.String(): + availability = model.Operational + case model.Important.String(): + availability = model.Important + case model.Critical.String(): + availability = model.Critical + case model.MissionCritical.String(): + availability = model.MissionCritical + default: + panic(errors.New("unknown 'availability' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Availability))) + } + + dataFormatsAccepted := make([]model.DataFormat, 0) + if asset.DataFormatsAccepted != nil { + for _, dataFormatName := range asset.DataFormatsAccepted { + switch dataFormatName { + case model.JSON.String(): + dataFormatsAccepted = append(dataFormatsAccepted, model.JSON) + case model.XML.String(): + dataFormatsAccepted = append(dataFormatsAccepted, model.XML) + case model.Serialization.String(): + dataFormatsAccepted = append(dataFormatsAccepted, model.Serialization) + case model.File.String(): + dataFormatsAccepted = append(dataFormatsAccepted, model.File) + case model.CSV.String(): + dataFormatsAccepted = append(dataFormatsAccepted, model.CSV) + default: + panic(errors.New("unknown 'data_formats_accepted' value of technical asset '" + title + "': " + fmt.Sprintf("%v", dataFormatName))) } } + } - communicationLinks := make([]model.CommunicationLink, 0) - if asset.CommunicationLinks != nil { - for commLinkTitle, commLink := range asset.CommunicationLinks { - constraint := true - weight := 1 - var protocol model.Protocol - var authentication model.Authentication - var authorization model.Authorization - var usage model.Usage - var dataAssetsSent []string - var dataAssetsReceived []string - - switch commLink.Authentication { - case model.NoneAuthentication.String(): - authentication = model.NoneAuthentication - case model.Credentials.String(): - authentication = model.Credentials - case model.SessionId.String(): - authentication = model.SessionId - case model.Token.String(): - authentication = model.Token - case model.ClientCertificate.String(): - authentication = model.ClientCertificate - case model.TwoFactor.String(): - authentication = model.TwoFactor - case model.Externalized.String(): - authentication = model.Externalized - default: - panic(errors.New("unknown 'authentication' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Authentication))) - } + communicationLinks := make([]model.CommunicationLink, 0) + if asset.CommunicationLinks != nil { + for commLinkTitle, commLink := range asset.CommunicationLinks { + constraint := true + weight := 1 + var protocol model.Protocol + var authentication model.Authentication + var authorization model.Authorization + var usage model.Usage + var dataAssetsSent []string + var dataAssetsReceived []string + + switch commLink.Authentication { + case model.NoneAuthentication.String(): + authentication = model.NoneAuthentication + case model.Credentials.String(): + authentication = model.Credentials + case model.SessionId.String(): + authentication = model.SessionId + case model.Token.String(): + authentication = model.Token + case model.ClientCertificate.String(): + authentication = model.ClientCertificate + case model.TwoFactor.String(): + authentication = model.TwoFactor + case model.Externalized.String(): + authentication = model.Externalized + default: + panic(errors.New("unknown 'authentication' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Authentication))) + } - switch commLink.Authorization { - case model.NoneAuthorization.String(): - authorization = model.NoneAuthorization - case model.TechnicalUser.String(): - authorization = model.TechnicalUser - case model.EndUserIdentityPropagation.String(): - authorization = model.EndUserIdentityPropagation - default: - panic(errors.New("unknown 'authorization' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Authorization))) - } + switch commLink.Authorization { + case model.NoneAuthorization.String(): + authorization = model.NoneAuthorization + case model.TechnicalUser.String(): + authorization = model.TechnicalUser + case model.EndUserIdentityPropagation.String(): + authorization = model.EndUserIdentityPropagation + default: + panic(errors.New("unknown 'authorization' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Authorization))) + } - switch commLink.Usage { - case model.Business.String(): - usage = model.Business - case model.DevOps.String(): - usage = model.DevOps - default: - panic(errors.New("unknown 'usage' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Usage))) - } + switch commLink.Usage { + case model.Business.String(): + usage = model.Business + case model.DevOps.String(): + usage = model.DevOps + default: + panic(errors.New("unknown 'usage' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Usage))) + } - switch commLink.Protocol { - case model.UnknownProtocol.String(): - protocol = model.UnknownProtocol - case model.HTTP.String(): - protocol = model.HTTP - case model.HTTPS.String(): - protocol = model.HTTPS - case model.WS.String(): - protocol = model.WS - case model.WSS.String(): - protocol = model.WSS - case model.MQTT.String(): - protocol = model.MQTT - case model.JDBC.String(): - protocol = model.JDBC - case model.JdbcEncrypted.String(): - protocol = model.JdbcEncrypted - case model.ODBC.String(): - protocol = model.ODBC - case model.OdbcEncrypted.String(): - protocol = model.OdbcEncrypted - case model.SqlAccessProtocol.String(): - protocol = model.SqlAccessProtocol - case model.SqlAccessProtocolEncrypted.String(): - protocol = model.SqlAccessProtocolEncrypted - case model.NosqlAccessProtocol.String(): - protocol = model.NosqlAccessProtocol - case model.NosqlAccessProtocolEncrypted.String(): - protocol = model.NosqlAccessProtocolEncrypted - case model.TEXT.String(): - protocol = model.TEXT - case model.TextEncrypted.String(): - protocol = model.TextEncrypted - case model.BINARY.String(): - protocol = model.BINARY - case model.BinaryEncrypted.String(): - protocol = model.BinaryEncrypted - case model.SSH.String(): - protocol = model.SSH - case model.SshTunnel.String(): - protocol = model.SshTunnel - case model.SMTP.String(): - protocol = model.SMTP - case model.SmtpEncrypted.String(): - protocol = model.SmtpEncrypted - case model.POP3.String(): - protocol = model.POP3 - case model.Pop3Encrypted.String(): - protocol = model.Pop3Encrypted - case model.IMAP.String(): - protocol = model.IMAP - case model.ImapEncrypted.String(): - protocol = model.ImapEncrypted - case model.FTP.String(): - protocol = model.FTP - case model.FTPS.String(): - protocol = model.FTPS - case model.SFTP.String(): - protocol = model.SFTP - case model.SCP.String(): - protocol = model.SCP - case model.LDAP.String(): - protocol = model.LDAP - case model.LDAPS.String(): - protocol = model.LDAPS - case model.JMS.String(): - protocol = model.JMS - case model.NFS.String(): - protocol = model.NFS - case model.SMB.String(): - protocol = model.SMB - case model.SmbEncrypted.String(): - protocol = model.SmbEncrypted - case model.LocalFileAccess.String(): - protocol = model.LocalFileAccess - case model.NRPE.String(): - protocol = model.NRPE - case model.XMPP.String(): - protocol = model.XMPP - case model.IIOP.String(): - protocol = model.IIOP - case model.IiopEncrypted.String(): - protocol = model.IiopEncrypted - case model.JRMP.String(): - protocol = model.JRMP - case model.JrmpEncrypted.String(): - protocol = model.JrmpEncrypted - case model.InProcessLibraryCall.String(): - protocol = model.InProcessLibraryCall - case model.ContainerSpawning.String(): - protocol = model.ContainerSpawning - default: - panic(errors.New("unknown 'protocol' of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Protocol))) - } + switch commLink.Protocol { + case model.UnknownProtocol.String(): + protocol = model.UnknownProtocol + case model.HTTP.String(): + protocol = model.HTTP + case model.HTTPS.String(): + protocol = model.HTTPS + case model.WS.String(): + protocol = model.WS + case model.WSS.String(): + protocol = model.WSS + case model.MQTT.String(): + protocol = model.MQTT + case model.JDBC.String(): + protocol = model.JDBC + case model.JdbcEncrypted.String(): + protocol = model.JdbcEncrypted + case model.ODBC.String(): + protocol = model.ODBC + case model.OdbcEncrypted.String(): + protocol = model.OdbcEncrypted + case model.SqlAccessProtocol.String(): + protocol = model.SqlAccessProtocol + case model.SqlAccessProtocolEncrypted.String(): + protocol = model.SqlAccessProtocolEncrypted + case model.NosqlAccessProtocol.String(): + protocol = model.NosqlAccessProtocol + case model.NosqlAccessProtocolEncrypted.String(): + protocol = model.NosqlAccessProtocolEncrypted + case model.TEXT.String(): + protocol = model.TEXT + case model.TextEncrypted.String(): + protocol = model.TextEncrypted + case model.BINARY.String(): + protocol = model.BINARY + case model.BinaryEncrypted.String(): + protocol = model.BinaryEncrypted + case model.SSH.String(): + protocol = model.SSH + case model.SshTunnel.String(): + protocol = model.SshTunnel + case model.SMTP.String(): + protocol = model.SMTP + case model.SmtpEncrypted.String(): + protocol = model.SmtpEncrypted + case model.POP3.String(): + protocol = model.POP3 + case model.Pop3Encrypted.String(): + protocol = model.Pop3Encrypted + case model.IMAP.String(): + protocol = model.IMAP + case model.ImapEncrypted.String(): + protocol = model.ImapEncrypted + case model.FTP.String(): + protocol = model.FTP + case model.FTPS.String(): + protocol = model.FTPS + case model.SFTP.String(): + protocol = model.SFTP + case model.SCP.String(): + protocol = model.SCP + case model.LDAP.String(): + protocol = model.LDAP + case model.LDAPS.String(): + protocol = model.LDAPS + case model.JMS.String(): + protocol = model.JMS + case model.NFS.String(): + protocol = model.NFS + case model.SMB.String(): + protocol = model.SMB + case model.SmbEncrypted.String(): + protocol = model.SmbEncrypted + case model.LocalFileAccess.String(): + protocol = model.LocalFileAccess + case model.NRPE.String(): + protocol = model.NRPE + case model.XMPP.String(): + protocol = model.XMPP + case model.IIOP.String(): + protocol = model.IIOP + case model.IiopEncrypted.String(): + protocol = model.IiopEncrypted + case model.JRMP.String(): + protocol = model.JRMP + case model.JrmpEncrypted.String(): + protocol = model.JrmpEncrypted + case model.InProcessLibraryCall.String(): + protocol = model.InProcessLibraryCall + case model.ContainerSpawning.String(): + protocol = model.ContainerSpawning + default: + panic(errors.New("unknown 'protocol' of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Protocol))) + } - if commLink.DataAssetsSent != nil { - for _, dataAssetSent := range commLink.DataAssetsSent { - referencedAsset := fmt.Sprintf("%v", dataAssetSent) - checkDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") - dataAssetsSent = append(dataAssetsSent, referencedAsset) - } + if commLink.DataAssetsSent != nil { + for _, dataAssetSent := range commLink.DataAssetsSent { + referencedAsset := fmt.Sprintf("%v", dataAssetSent) + checkDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") + dataAssetsSent = append(dataAssetsSent, referencedAsset) } + } - if commLink.DataAssetsReceived != nil { - for _, dataAssetReceived := range commLink.DataAssetsReceived { - referencedAsset := fmt.Sprintf("%v", dataAssetReceived) - checkDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") - dataAssetsReceived = append(dataAssetsReceived, referencedAsset) - } + if commLink.DataAssetsReceived != nil { + for _, dataAssetReceived := range commLink.DataAssetsReceived { + referencedAsset := fmt.Sprintf("%v", dataAssetReceived) + checkDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") + dataAssetsReceived = append(dataAssetsReceived, referencedAsset) } + } - if commLink.DiagramTweakWeight > 0 { - weight = commLink.DiagramTweakWeight - } + if commLink.DiagramTweakWeight > 0 { + weight = commLink.DiagramTweakWeight + } - constraint = !commLink.DiagramTweakConstraint - - checkErr(err) - - dataFlowTitle := fmt.Sprintf("%v", commLinkTitle) - commLink := model.CommunicationLink{ - Id: createDataFlowId(id, dataFlowTitle), - SourceId: id, - TargetId: commLink.Target, - Title: dataFlowTitle, - Description: withDefault(commLink.Description, dataFlowTitle), - Protocol: protocol, - Authentication: authentication, - Authorization: authorization, - Usage: usage, - Tags: checkTags(lowerCaseAndTrim(commLink.Tags), "communication link '"+commLinkTitle+"' of technical asset '"+title+"'"), - VPN: commLink.VPN, - IpFiltered: commLink.IpFiltered, - Readonly: commLink.Readonly, - DataAssetsSent: dataAssetsSent, - DataAssetsReceived: dataAssetsReceived, - DiagramTweakWeight: weight, - DiagramTweakConstraint: constraint, - } - communicationLinks = append(communicationLinks, commLink) - // track all comm links - model.CommunicationLinks[commLink.Id] = commLink - // keep track of map of *all* comm links mapped by target-id (to be able to look up "who is calling me" kind of things) - model.IncomingTechnicalCommunicationLinksMappedByTargetId[commLink.TargetId] = append( - model.IncomingTechnicalCommunicationLinksMappedByTargetId[commLink.TargetId], commLink) + constraint = !commLink.DiagramTweakConstraint + + dataFlowTitle := fmt.Sprintf("%v", commLinkTitle) + commLink := model.CommunicationLink{ + Id: createDataFlowId(id, dataFlowTitle), + SourceId: id, + TargetId: commLink.Target, + Title: dataFlowTitle, + Description: withDefault(commLink.Description, dataFlowTitle), + Protocol: protocol, + Authentication: authentication, + Authorization: authorization, + Usage: usage, + Tags: checkTags(lowerCaseAndTrim(commLink.Tags), "communication link '"+commLinkTitle+"' of technical asset '"+title+"'"), + VPN: commLink.VPN, + IpFiltered: commLink.IpFiltered, + Readonly: commLink.Readonly, + DataAssetsSent: dataAssetsSent, + DataAssetsReceived: dataAssetsReceived, + DiagramTweakWeight: weight, + DiagramTweakConstraint: constraint, } + communicationLinks = append(communicationLinks, commLink) + // track all comm links + model.CommunicationLinks[commLink.Id] = commLink + // keep track of map of *all* comm links mapped by target-id (to be able to look up "who is calling me" kind of things) + model.IncomingTechnicalCommunicationLinksMappedByTargetId[commLink.TargetId] = append( + model.IncomingTechnicalCommunicationLinksMappedByTargetId[commLink.TargetId], commLink) } + } - checkIdSyntax(id) - if _, exists := model.ParsedModelRoot.TechnicalAssets[id]; exists { - panic(errors.New("duplicate id used: " + id)) - } - model.ParsedModelRoot.TechnicalAssets[id] = model.TechnicalAsset{ - Id: id, - Usage: usage, - Title: title, //fmt.Sprintf("%v", asset["title"]), - Description: withDefault(fmt.Sprintf("%v", asset.Description), title), - Type: technicalAssetType, - Size: technicalAssetSize, - Technology: technicalAssetTechnology, - Tags: checkTags(lowerCaseAndTrim(asset.Tags), "technical asset '"+title+"'"), - Machine: technicalAssetMachine, - Internet: asset.Internet, - Encryption: encryption, - MultiTenant: asset.MultiTenant, - Redundant: asset.Redundant, - CustomDevelopedParts: asset.CustomDevelopedParts, - UsedAsClientByHuman: asset.UsedAsClientByHuman, - OutOfScope: asset.OutOfScope, - JustificationOutOfScope: fmt.Sprintf("%v", asset.JustificationOutOfScope), - Owner: fmt.Sprintf("%v", asset.Owner), - Confidentiality: confidentiality, - Integrity: integrity, - Availability: availability, - JustificationCiaRating: fmt.Sprintf("%v", asset.JustificationCiaRating), - DataAssetsProcessed: dataAssetsProcessed, - DataAssetsStored: dataAssetsStored, - DataFormatsAccepted: dataFormatsAccepted, - CommunicationLinks: communicationLinks, - DiagramTweakOrder: asset.DiagramTweakOrder, - } + context.checkIdSyntax(id) + if _, exists := model.ParsedModelRoot.TechnicalAssets[id]; exists { + panic(errors.New("duplicate id used: " + id)) + } + model.ParsedModelRoot.TechnicalAssets[id] = model.TechnicalAsset{ + Id: id, + Usage: usage, + Title: title, //fmt.Sprintf("%v", asset["title"]), + Description: withDefault(fmt.Sprintf("%v", asset.Description), title), + Type: technicalAssetType, + Size: technicalAssetSize, + Technology: technicalAssetTechnology, + Tags: checkTags(lowerCaseAndTrim(asset.Tags), "technical asset '"+title+"'"), + Machine: technicalAssetMachine, + Internet: asset.Internet, + Encryption: encryption, + MultiTenant: asset.MultiTenant, + Redundant: asset.Redundant, + CustomDevelopedParts: asset.CustomDevelopedParts, + UsedAsClientByHuman: asset.UsedAsClientByHuman, + OutOfScope: asset.OutOfScope, + JustificationOutOfScope: fmt.Sprintf("%v", asset.JustificationOutOfScope), + Owner: fmt.Sprintf("%v", asset.Owner), + Confidentiality: confidentiality, + Integrity: integrity, + Availability: availability, + JustificationCiaRating: fmt.Sprintf("%v", asset.JustificationCiaRating), + DataAssetsProcessed: dataAssetsProcessed, + DataAssetsStored: dataAssetsStored, + DataFormatsAccepted: dataFormatsAccepted, + CommunicationLinks: communicationLinks, + DiagramTweakOrder: asset.DiagramTweakOrder, } + } - // Trust Boundaries =============================================================================== - checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries := make(map[string]bool) - model.ParsedModelRoot.TrustBoundaries = make(map[string]model.TrustBoundary) - for title, boundary := range modelInput.TrustBoundaries { - id := fmt.Sprintf("%v", boundary.ID) - - var technicalAssetsInside = make([]string, 0) - if boundary.TechnicalAssetsInside != nil { - parsedInsideAssets := boundary.TechnicalAssetsInside - technicalAssetsInside = make([]string, len(parsedInsideAssets)) - for i, parsedInsideAsset := range parsedInsideAssets { - technicalAssetsInside[i] = fmt.Sprintf("%v", parsedInsideAsset) - _, found := model.ParsedModelRoot.TechnicalAssets[technicalAssetsInside[i]] - if !found { - panic(errors.New("missing referenced technical asset " + technicalAssetsInside[i] + " at trust boundary '" + title + "'")) - } - if checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries[technicalAssetsInside[i]] == true { - panic(errors.New("referenced technical asset " + technicalAssetsInside[i] + " at trust boundary '" + title + "' is modeled in multiple trust boundaries")) - } - checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries[technicalAssetsInside[i]] = true - //fmt.Println("asset "+technicalAssetsInside[i]+" at i="+strconv.Itoa(i)) + // Trust Boundaries =============================================================================== + checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries := make(map[string]bool) + model.ParsedModelRoot.TrustBoundaries = make(map[string]model.TrustBoundary) + for title, boundary := range context.modelInput.TrustBoundaries { + id := fmt.Sprintf("%v", boundary.ID) + + var technicalAssetsInside = make([]string, 0) + if boundary.TechnicalAssetsInside != nil { + parsedInsideAssets := boundary.TechnicalAssetsInside + technicalAssetsInside = make([]string, len(parsedInsideAssets)) + for i, parsedInsideAsset := range parsedInsideAssets { + technicalAssetsInside[i] = fmt.Sprintf("%v", parsedInsideAsset) + _, found := model.ParsedModelRoot.TechnicalAssets[technicalAssetsInside[i]] + if !found { + panic(errors.New("missing referenced technical asset " + technicalAssetsInside[i] + " at trust boundary '" + title + "'")) } - } - - var trustBoundariesNested = make([]string, 0) - if boundary.TrustBoundariesNested != nil { - parsedNestedBoundaries := boundary.TrustBoundariesNested - trustBoundariesNested = make([]string, len(parsedNestedBoundaries)) - for i, parsedNestedBoundary := range parsedNestedBoundaries { - trustBoundariesNested[i] = fmt.Sprintf("%v", parsedNestedBoundary) + if checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries[technicalAssetsInside[i]] == true { + panic(errors.New("referenced technical asset " + technicalAssetsInside[i] + " at trust boundary '" + title + "' is modeled in multiple trust boundaries")) } + checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries[technicalAssetsInside[i]] = true + //fmt.Println("asset "+technicalAssetsInside[i]+" at i="+strconv.Itoa(i)) } + } - var trustBoundaryType model.TrustBoundaryType - switch boundary.Type { - case model.NetworkOnPrem.String(): - trustBoundaryType = model.NetworkOnPrem - case model.NetworkDedicatedHoster.String(): - trustBoundaryType = model.NetworkDedicatedHoster - case model.NetworkVirtualLAN.String(): - trustBoundaryType = model.NetworkVirtualLAN - case model.NetworkCloudProvider.String(): - trustBoundaryType = model.NetworkCloudProvider - case model.NetworkCloudSecurityGroup.String(): - trustBoundaryType = model.NetworkCloudSecurityGroup - case model.NetworkPolicyNamespaceIsolation.String(): - trustBoundaryType = model.NetworkPolicyNamespaceIsolation - case model.ExecutionEnvironment.String(): - trustBoundaryType = model.ExecutionEnvironment - default: - panic(errors.New("unknown 'type' of trust boundary '" + title + "': " + fmt.Sprintf("%v", boundary.Type))) - } - - trustBoundary := model.TrustBoundary{ - Id: id, - Title: title, //fmt.Sprintf("%v", boundary["title"]), - Description: withDefault(fmt.Sprintf("%v", boundary.Description), title), - Type: trustBoundaryType, - Tags: checkTags(lowerCaseAndTrim(boundary.Tags), "trust boundary '"+title+"'"), - TechnicalAssetsInside: technicalAssetsInside, - TrustBoundariesNested: trustBoundariesNested, - } - checkIdSyntax(id) - if _, exists := model.ParsedModelRoot.TrustBoundaries[id]; exists { - panic(errors.New("duplicate id used: " + id)) - } - model.ParsedModelRoot.TrustBoundaries[id] = trustBoundary - for _, technicalAsset := range trustBoundary.TechnicalAssetsInside { - model.DirectContainingTrustBoundaryMappedByTechnicalAssetId[technicalAsset] = trustBoundary - //fmt.Println("Asset "+technicalAsset+" is directly in trust boundary "+trustBoundary.Id) + var trustBoundariesNested = make([]string, 0) + if boundary.TrustBoundariesNested != nil { + parsedNestedBoundaries := boundary.TrustBoundariesNested + trustBoundariesNested = make([]string, len(parsedNestedBoundaries)) + for i, parsedNestedBoundary := range parsedNestedBoundaries { + trustBoundariesNested[i] = fmt.Sprintf("%v", parsedNestedBoundary) } } - checkNestedTrustBoundariesExisting() - - // Shared Runtime =============================================================================== - model.ParsedModelRoot.SharedRuntimes = make(map[string]model.SharedRuntime) - for title, runtime := range modelInput.SharedRuntimes { - id := fmt.Sprintf("%v", runtime.ID) - - var technicalAssetsRunning = make([]string, 0) - if runtime.TechnicalAssetsRunning != nil { - parsedRunningAssets := runtime.TechnicalAssetsRunning - technicalAssetsRunning = make([]string, len(parsedRunningAssets)) - for i, parsedRunningAsset := range parsedRunningAssets { - assetId := fmt.Sprintf("%v", parsedRunningAsset) - checkTechnicalAssetExists(assetId, "shared runtime '"+title+"'", false) - technicalAssetsRunning[i] = assetId - } - } - sharedRuntime := model.SharedRuntime{ - Id: id, - Title: title, //fmt.Sprintf("%v", boundary["title"]), - Description: withDefault(fmt.Sprintf("%v", runtime.Description), title), - Tags: checkTags(runtime.Tags, "shared runtime '"+title+"'"), - TechnicalAssetsRunning: technicalAssetsRunning, - } - checkIdSyntax(id) - if _, exists := model.ParsedModelRoot.SharedRuntimes[id]; exists { - panic(errors.New("duplicate id used: " + id)) - } - model.ParsedModelRoot.SharedRuntimes[id] = sharedRuntime - for _, technicalAssetId := range sharedRuntime.TechnicalAssetsRunning { - model.DirectContainingSharedRuntimeMappedByTechnicalAssetId[technicalAssetId] = sharedRuntime + var trustBoundaryType model.TrustBoundaryType + switch boundary.Type { + case model.NetworkOnPrem.String(): + trustBoundaryType = model.NetworkOnPrem + case model.NetworkDedicatedHoster.String(): + trustBoundaryType = model.NetworkDedicatedHoster + case model.NetworkVirtualLAN.String(): + trustBoundaryType = model.NetworkVirtualLAN + case model.NetworkCloudProvider.String(): + trustBoundaryType = model.NetworkCloudProvider + case model.NetworkCloudSecurityGroup.String(): + trustBoundaryType = model.NetworkCloudSecurityGroup + case model.NetworkPolicyNamespaceIsolation.String(): + trustBoundaryType = model.NetworkPolicyNamespaceIsolation + case model.ExecutionEnvironment.String(): + trustBoundaryType = model.ExecutionEnvironment + default: + panic(errors.New("unknown 'type' of trust boundary '" + title + "': " + fmt.Sprintf("%v", boundary.Type))) + } + + trustBoundary := model.TrustBoundary{ + Id: id, + Title: title, //fmt.Sprintf("%v", boundary["title"]), + Description: withDefault(fmt.Sprintf("%v", boundary.Description), title), + Type: trustBoundaryType, + Tags: checkTags(lowerCaseAndTrim(boundary.Tags), "trust boundary '"+title+"'"), + TechnicalAssetsInside: technicalAssetsInside, + TrustBoundariesNested: trustBoundariesNested, + } + context.checkIdSyntax(id) + if _, exists := model.ParsedModelRoot.TrustBoundaries[id]; exists { + panic(errors.New("duplicate id used: " + id)) + } + model.ParsedModelRoot.TrustBoundaries[id] = trustBoundary + for _, technicalAsset := range trustBoundary.TechnicalAssetsInside { + model.DirectContainingTrustBoundaryMappedByTechnicalAssetId[technicalAsset] = trustBoundary + //fmt.Println("Asset "+technicalAsset+" is directly in trust boundary "+trustBoundary.Id) + } + } + checkNestedTrustBoundariesExisting() + + // Shared Runtime =============================================================================== + model.ParsedModelRoot.SharedRuntimes = make(map[string]model.SharedRuntime) + for title, runtime := range context.modelInput.SharedRuntimes { + id := fmt.Sprintf("%v", runtime.ID) + + var technicalAssetsRunning = make([]string, 0) + if runtime.TechnicalAssetsRunning != nil { + parsedRunningAssets := runtime.TechnicalAssetsRunning + technicalAssetsRunning = make([]string, len(parsedRunningAssets)) + for i, parsedRunningAsset := range parsedRunningAssets { + assetId := fmt.Sprintf("%v", parsedRunningAsset) + checkTechnicalAssetExists(assetId, "shared runtime '"+title+"'", false) + technicalAssetsRunning[i] = assetId } } - // Individual Risk Categories (just used as regular risk categories) =============================================================================== - model.ParsedModelRoot.IndividualRiskCategories = make(map[string]model.RiskCategory) - for title, individualCategory := range modelInput.IndividualRiskCategories { - id := fmt.Sprintf("%v", individualCategory.ID) - - var function model.RiskFunction - switch individualCategory.Function { - case model.BusinessSide.String(): - function = model.BusinessSide - case model.Architecture.String(): - function = model.Architecture - case model.Development.String(): - function = model.Development - case model.Operations.String(): - function = model.Operations - default: - panic(errors.New("unknown 'function' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.Function))) - } + sharedRuntime := model.SharedRuntime{ + Id: id, + Title: title, //fmt.Sprintf("%v", boundary["title"]), + Description: withDefault(fmt.Sprintf("%v", runtime.Description), title), + Tags: checkTags(runtime.Tags, "shared runtime '"+title+"'"), + TechnicalAssetsRunning: technicalAssetsRunning, + } + context.checkIdSyntax(id) + if _, exists := model.ParsedModelRoot.SharedRuntimes[id]; exists { + panic(errors.New("duplicate id used: " + id)) + } + model.ParsedModelRoot.SharedRuntimes[id] = sharedRuntime + for _, technicalAssetId := range sharedRuntime.TechnicalAssetsRunning { + model.DirectContainingSharedRuntimeMappedByTechnicalAssetId[technicalAssetId] = sharedRuntime + } + } - var stride model.STRIDE - switch individualCategory.STRIDE { - case model.Spoofing.String(): - stride = model.Spoofing - case model.Tampering.String(): - stride = model.Tampering - case model.Repudiation.String(): - stride = model.Repudiation - case model.InformationDisclosure.String(): - stride = model.InformationDisclosure - case model.DenialOfService.String(): - stride = model.DenialOfService - case model.ElevationOfPrivilege.String(): - stride = model.ElevationOfPrivilege - default: - panic(errors.New("unknown 'stride' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.STRIDE))) - } + // Individual Risk Categories (just used as regular risk categories) =============================================================================== + model.ParsedModelRoot.IndividualRiskCategories = make(map[string]model.RiskCategory) + for title, individualCategory := range context.modelInput.IndividualRiskCategories { + id := fmt.Sprintf("%v", individualCategory.ID) - cat := model.RiskCategory{ - Id: id, - Title: title, - Description: withDefault(fmt.Sprintf("%v", individualCategory.Description), title), - Impact: fmt.Sprintf("%v", individualCategory.Impact), - ASVS: fmt.Sprintf("%v", individualCategory.ASVS), - CheatSheet: fmt.Sprintf("%v", individualCategory.CheatSheet), - Action: fmt.Sprintf("%v", individualCategory.Action), - Mitigation: fmt.Sprintf("%v", individualCategory.Mitigation), - Check: fmt.Sprintf("%v", individualCategory.Check), - DetectionLogic: fmt.Sprintf("%v", individualCategory.DetectionLogic), - RiskAssessment: fmt.Sprintf("%v", individualCategory.RiskAssessment), - FalsePositives: fmt.Sprintf("%v", individualCategory.FalsePositives), - Function: function, - STRIDE: stride, - ModelFailurePossibleReason: individualCategory.ModelFailurePossibleReason, - CWE: individualCategory.CWE, - } - checkIdSyntax(id) - if _, exists := model.ParsedModelRoot.IndividualRiskCategories[id]; exists { - panic(errors.New("duplicate id used: " + id)) - } - model.ParsedModelRoot.IndividualRiskCategories[id] = cat - - // NOW THE INDIVIDUAL RISK INSTANCES: - //individualRiskInstances := make([]model.Risk, 0) - if individualCategory.RisksIdentified != nil { // TODO: also add syntax checks of input YAML when linked asset is not found or when synthetic-id is already used... - for title, individualRiskInstance := range individualCategory.RisksIdentified { - var severity model.RiskSeverity - var exploitationLikelihood model.RiskExploitationLikelihood - var exploitationImpact model.RiskExploitationImpact - var mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId string - var dataBreachProbability model.DataBreachProbability - var dataBreachTechnicalAssetIDs []string - - switch individualRiskInstance.Severity { - case model.LowSeverity.String(): - severity = model.LowSeverity - case model.MediumSeverity.String(): - severity = model.MediumSeverity - case model.ElevatedSeverity.String(): - severity = model.ElevatedSeverity - case model.HighSeverity.String(): - severity = model.HighSeverity - case model.CriticalSeverity.String(): - severity = model.CriticalSeverity - case "": // added default - severity = model.MediumSeverity - default: - panic(errors.New("unknown 'severity' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.Severity))) - } + var function model.RiskFunction + switch individualCategory.Function { + case model.BusinessSide.String(): + function = model.BusinessSide + case model.Architecture.String(): + function = model.Architecture + case model.Development.String(): + function = model.Development + case model.Operations.String(): + function = model.Operations + default: + panic(errors.New("unknown 'function' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.Function))) + } + + var stride model.STRIDE + switch individualCategory.STRIDE { + case model.Spoofing.String(): + stride = model.Spoofing + case model.Tampering.String(): + stride = model.Tampering + case model.Repudiation.String(): + stride = model.Repudiation + case model.InformationDisclosure.String(): + stride = model.InformationDisclosure + case model.DenialOfService.String(): + stride = model.DenialOfService + case model.ElevationOfPrivilege.String(): + stride = model.ElevationOfPrivilege + default: + panic(errors.New("unknown 'stride' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.STRIDE))) + } + + cat := model.RiskCategory{ + Id: id, + Title: title, + Description: withDefault(fmt.Sprintf("%v", individualCategory.Description), title), + Impact: fmt.Sprintf("%v", individualCategory.Impact), + ASVS: fmt.Sprintf("%v", individualCategory.ASVS), + CheatSheet: fmt.Sprintf("%v", individualCategory.CheatSheet), + Action: fmt.Sprintf("%v", individualCategory.Action), + Mitigation: fmt.Sprintf("%v", individualCategory.Mitigation), + Check: fmt.Sprintf("%v", individualCategory.Check), + DetectionLogic: fmt.Sprintf("%v", individualCategory.DetectionLogic), + RiskAssessment: fmt.Sprintf("%v", individualCategory.RiskAssessment), + FalsePositives: fmt.Sprintf("%v", individualCategory.FalsePositives), + Function: function, + STRIDE: stride, + ModelFailurePossibleReason: individualCategory.ModelFailurePossibleReason, + CWE: individualCategory.CWE, + } + context.checkIdSyntax(id) + if _, exists := model.ParsedModelRoot.IndividualRiskCategories[id]; exists { + panic(errors.New("duplicate id used: " + id)) + } + model.ParsedModelRoot.IndividualRiskCategories[id] = cat + + // NOW THE INDIVIDUAL RISK INSTANCES: + //individualRiskInstances := make([]model.Risk, 0) + if individualCategory.RisksIdentified != nil { // TODO: also add syntax checks of input YAML when linked asset is not found or when synthetic-id is already used... + for title, individualRiskInstance := range individualCategory.RisksIdentified { + var severity model.RiskSeverity + var exploitationLikelihood model.RiskExploitationLikelihood + var exploitationImpact model.RiskExploitationImpact + var mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId string + var dataBreachProbability model.DataBreachProbability + var dataBreachTechnicalAssetIDs []string + + switch individualRiskInstance.Severity { + case model.LowSeverity.String(): + severity = model.LowSeverity + case model.MediumSeverity.String(): + severity = model.MediumSeverity + case model.ElevatedSeverity.String(): + severity = model.ElevatedSeverity + case model.HighSeverity.String(): + severity = model.HighSeverity + case model.CriticalSeverity.String(): + severity = model.CriticalSeverity + case "": // added default + severity = model.MediumSeverity + default: + panic(errors.New("unknown 'severity' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.Severity))) + } - switch individualRiskInstance.ExploitationLikelihood { - case model.Unlikely.String(): - exploitationLikelihood = model.Unlikely - case model.Likely.String(): - exploitationLikelihood = model.Likely - case model.VeryLikely.String(): - exploitationLikelihood = model.VeryLikely - case model.Frequent.String(): - exploitationLikelihood = model.Frequent - case "": // added default - exploitationLikelihood = model.Likely - default: - panic(errors.New("unknown 'exploitation_likelihood' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationLikelihood))) - } + switch individualRiskInstance.ExploitationLikelihood { + case model.Unlikely.String(): + exploitationLikelihood = model.Unlikely + case model.Likely.String(): + exploitationLikelihood = model.Likely + case model.VeryLikely.String(): + exploitationLikelihood = model.VeryLikely + case model.Frequent.String(): + exploitationLikelihood = model.Frequent + case "": // added default + exploitationLikelihood = model.Likely + default: + panic(errors.New("unknown 'exploitation_likelihood' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationLikelihood))) + } - switch individualRiskInstance.ExploitationImpact { - case model.LowImpact.String(): - exploitationImpact = model.LowImpact - case model.MediumImpact.String(): - exploitationImpact = model.MediumImpact - case model.HighImpact.String(): - exploitationImpact = model.HighImpact - case model.VeryHighImpact.String(): - exploitationImpact = model.VeryHighImpact - case "": // added default - exploitationImpact = model.MediumImpact - default: - panic(errors.New("unknown 'exploitation_impact' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationImpact))) - } + switch individualRiskInstance.ExploitationImpact { + case model.LowImpact.String(): + exploitationImpact = model.LowImpact + case model.MediumImpact.String(): + exploitationImpact = model.MediumImpact + case model.HighImpact.String(): + exploitationImpact = model.HighImpact + case model.VeryHighImpact.String(): + exploitationImpact = model.VeryHighImpact + case "": // added default + exploitationImpact = model.MediumImpact + default: + panic(errors.New("unknown 'exploitation_impact' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationImpact))) + } - if len(individualRiskInstance.MostRelevantDataAsset) > 0 { - mostRelevantDataAssetId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantDataAsset) - checkDataAssetTargetExists(mostRelevantDataAssetId, "individual risk '"+title+"'") - } + if len(individualRiskInstance.MostRelevantDataAsset) > 0 { + mostRelevantDataAssetId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantDataAsset) + checkDataAssetTargetExists(mostRelevantDataAssetId, "individual risk '"+title+"'") + } - if len(individualRiskInstance.MostRelevantTechnicalAsset) > 0 { - mostRelevantTechnicalAssetId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantTechnicalAsset) - checkTechnicalAssetExists(mostRelevantTechnicalAssetId, "individual risk '"+title+"'", false) - } + if len(individualRiskInstance.MostRelevantTechnicalAsset) > 0 { + mostRelevantTechnicalAssetId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantTechnicalAsset) + checkTechnicalAssetExists(mostRelevantTechnicalAssetId, "individual risk '"+title+"'", false) + } - if len(individualRiskInstance.MostRelevantCommunicationLink) > 0 { - mostRelevantCommunicationLinkId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantCommunicationLink) - checkCommunicationLinkExists(mostRelevantCommunicationLinkId, "individual risk '"+title+"'") - } + if len(individualRiskInstance.MostRelevantCommunicationLink) > 0 { + mostRelevantCommunicationLinkId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantCommunicationLink) + checkCommunicationLinkExists(mostRelevantCommunicationLinkId, "individual risk '"+title+"'") + } - if len(individualRiskInstance.MostRelevantTrustBoundary) > 0 { - mostRelevantTrustBoundaryId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantTrustBoundary) - checkTrustBoundaryExists(mostRelevantTrustBoundaryId, "individual risk '"+title+"'") - } + if len(individualRiskInstance.MostRelevantTrustBoundary) > 0 { + mostRelevantTrustBoundaryId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantTrustBoundary) + checkTrustBoundaryExists(mostRelevantTrustBoundaryId, "individual risk '"+title+"'") + } - if len(individualRiskInstance.MostRelevantSharedRuntime) > 0 { - mostRelevantSharedRuntimeId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantSharedRuntime) - checkSharedRuntimeExists(mostRelevantSharedRuntimeId, "individual risk '"+title+"'") - } + if len(individualRiskInstance.MostRelevantSharedRuntime) > 0 { + mostRelevantSharedRuntimeId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantSharedRuntime) + checkSharedRuntimeExists(mostRelevantSharedRuntimeId, "individual risk '"+title+"'") + } - switch individualRiskInstance.DataBreachProbability { - case model.Improbable.String(): - dataBreachProbability = model.Improbable - case model.Possible.String(): - dataBreachProbability = model.Possible - case model.Probable.String(): - dataBreachProbability = model.Probable - case "": // added default - dataBreachProbability = model.Possible - default: - panic(errors.New("unknown 'data_breach_probability' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.DataBreachProbability))) - } + switch individualRiskInstance.DataBreachProbability { + case model.Improbable.String(): + dataBreachProbability = model.Improbable + case model.Possible.String(): + dataBreachProbability = model.Possible + case model.Probable.String(): + dataBreachProbability = model.Probable + case "": // added default + dataBreachProbability = model.Possible + default: + panic(errors.New("unknown 'data_breach_probability' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.DataBreachProbability))) + } - if individualRiskInstance.DataBreachTechnicalAssets != nil { - dataBreachTechnicalAssetIDs = make([]string, len(individualRiskInstance.DataBreachTechnicalAssets)) - for i, parsedReferencedAsset := range individualRiskInstance.DataBreachTechnicalAssets { - assetId := fmt.Sprintf("%v", parsedReferencedAsset) - checkTechnicalAssetExists(assetId, "data breach technical assets of individual risk '"+title+"'", false) - dataBreachTechnicalAssetIDs[i] = assetId - } + if individualRiskInstance.DataBreachTechnicalAssets != nil { + dataBreachTechnicalAssetIDs = make([]string, len(individualRiskInstance.DataBreachTechnicalAssets)) + for i, parsedReferencedAsset := range individualRiskInstance.DataBreachTechnicalAssets { + assetId := fmt.Sprintf("%v", parsedReferencedAsset) + checkTechnicalAssetExists(assetId, "data breach technical assets of individual risk '"+title+"'", false) + dataBreachTechnicalAssetIDs[i] = assetId } + } - checkErr(err) - - individualRiskInstance := model.Risk{ - SyntheticId: createSyntheticId(cat.Id, mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId), - Title: fmt.Sprintf("%v", title), - Category: cat, - Severity: severity, - ExploitationLikelihood: exploitationLikelihood, - ExploitationImpact: exploitationImpact, - MostRelevantDataAssetId: mostRelevantDataAssetId, - MostRelevantTechnicalAssetId: mostRelevantTechnicalAssetId, - MostRelevantCommunicationLinkId: mostRelevantCommunicationLinkId, - MostRelevantTrustBoundaryId: mostRelevantTrustBoundaryId, - MostRelevantSharedRuntimeId: mostRelevantSharedRuntimeId, - DataBreachProbability: dataBreachProbability, - DataBreachTechnicalAssetIDs: dataBreachTechnicalAssetIDs, - } - model.GeneratedRisksByCategory[cat] = append(model.GeneratedRisksByCategory[cat], individualRiskInstance) + individualRiskInstance := model.Risk{ + SyntheticId: createSyntheticId(cat.Id, mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId), + Title: fmt.Sprintf("%v", title), + Category: cat, + Severity: severity, + ExploitationLikelihood: exploitationLikelihood, + ExploitationImpact: exploitationImpact, + MostRelevantDataAssetId: mostRelevantDataAssetId, + MostRelevantTechnicalAssetId: mostRelevantTechnicalAssetId, + MostRelevantCommunicationLinkId: mostRelevantCommunicationLinkId, + MostRelevantTrustBoundaryId: mostRelevantTrustBoundaryId, + MostRelevantSharedRuntimeId: mostRelevantSharedRuntimeId, + DataBreachProbability: dataBreachProbability, + DataBreachTechnicalAssetIDs: dataBreachTechnicalAssetIDs, } + model.GeneratedRisksByCategory[cat] = append(model.GeneratedRisksByCategory[cat], individualRiskInstance) } } + } - // Risk Tracking =============================================================================== - model.ParsedModelRoot.RiskTracking = make(map[string]model.RiskTracking) - for syntheticRiskId, riskTracking := range modelInput.RiskTracking { - justification := fmt.Sprintf("%v", riskTracking.Justification) - checkedBy := fmt.Sprintf("%v", riskTracking.CheckedBy) - ticket := fmt.Sprintf("%v", riskTracking.Ticket) - var date time.Time - if len(riskTracking.Date) > 0 { - date, err = time.Parse("2006-01-02", riskTracking.Date) - if err != nil { - panic(errors.New("unable to parse 'date' of risk tracking '" + syntheticRiskId + "': " + riskTracking.Date)) - } + // Risk Tracking =============================================================================== + model.ParsedModelRoot.RiskTracking = make(map[string]model.RiskTracking) + for syntheticRiskId, riskTracking := range context.modelInput.RiskTracking { + justification := fmt.Sprintf("%v", riskTracking.Justification) + checkedBy := fmt.Sprintf("%v", riskTracking.CheckedBy) + ticket := fmt.Sprintf("%v", riskTracking.Ticket) + var date time.Time + if len(riskTracking.Date) > 0 { + var parseError error + date, parseError = time.Parse("2006-01-02", riskTracking.Date) + if parseError != nil { + panic(errors.New("unable to parse 'date' of risk tracking '" + syntheticRiskId + "': " + riskTracking.Date)) } + } - var status model.RiskStatus - switch riskTracking.Status { - case model.Unchecked.String(): - status = model.Unchecked - case model.Mitigated.String(): - status = model.Mitigated - case model.InProgress.String(): - status = model.InProgress - case model.Accepted.String(): - status = model.Accepted - case model.InDiscussion.String(): - status = model.InDiscussion - case model.FalsePositive.String(): - status = model.FalsePositive - default: - panic(errors.New("unknown 'status' value of risk tracking '" + syntheticRiskId + "': " + riskTracking.Status)) - } + var status model.RiskStatus + switch riskTracking.Status { + case model.Unchecked.String(): + status = model.Unchecked + case model.Mitigated.String(): + status = model.Mitigated + case model.InProgress.String(): + status = model.InProgress + case model.Accepted.String(): + status = model.Accepted + case model.InDiscussion.String(): + status = model.InDiscussion + case model.FalsePositive.String(): + status = model.FalsePositive + default: + panic(errors.New("unknown 'status' value of risk tracking '" + syntheticRiskId + "': " + riskTracking.Status)) + } - tracking := model.RiskTracking{ - SyntheticRiskId: strings.TrimSpace(syntheticRiskId), - Justification: justification, - CheckedBy: checkedBy, - Ticket: ticket, - Date: date, - Status: status, - } - if strings.Contains(syntheticRiskId, "*") { // contains a wildcard char - deferredRiskTrackingDueToWildcardMatching[syntheticRiskId] = tracking - } else { - model.ParsedModelRoot.RiskTracking[syntheticRiskId] = tracking - } + tracking := model.RiskTracking{ + SyntheticRiskId: strings.TrimSpace(syntheticRiskId), + Justification: justification, + CheckedBy: checkedBy, + Ticket: ticket, + Date: date, + Status: status, + } + if strings.Contains(syntheticRiskId, "*") { // contains a wildcard char + context.deferredRiskTrackingDueToWildcardMatching[syntheticRiskId] = tracking + } else { + model.ParsedModelRoot.RiskTracking[syntheticRiskId] = tracking } + } - // ====================== model consistency check (linking) - for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { - for _, commLink := range technicalAsset.CommunicationLinks { - checkTechnicalAssetExists(commLink.TargetId, "communication link '"+commLink.Title+"' of technical asset '"+technicalAsset.Title+"'", false) - } + // ====================== model consistency check (linking) + for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { + for _, commLink := range technicalAsset.CommunicationLinks { + checkTechnicalAssetExists(commLink.TargetId, "communication link '"+commLink.Title+"' of technical asset '"+technicalAsset.Title+"'", false) } - } else { - log.Fatal("Unable to read/parse model yaml: ", err) } } @@ -5103,11 +5233,11 @@ func removePathElementsFromImageFiles(overview model.Overview) model.Overview { return overview } -func applyWildcardRiskTrackingEvaluation() { - if *verbose { +func (context *Context) applyWildcardRiskTrackingEvaluation() { + if *context.verbose { fmt.Println("Executing risk tracking evaluation") } - for syntheticRiskIdPattern, riskTracking := range deferredRiskTrackingDueToWildcardMatching { + for syntheticRiskIdPattern, riskTracking := range context.deferredRiskTrackingDueToWildcardMatching { foundSome := false var matchingRiskIdExpression = regexp.MustCompile(strings.ReplaceAll(regexp.QuoteMeta(syntheticRiskIdPattern), `\*`, `[^@]+`)) for syntheticRiskId := range model.GeneratedRisksBySyntheticId { @@ -5124,7 +5254,7 @@ func applyWildcardRiskTrackingEvaluation() { } } if !foundSome { - if *ignoreOrphanedRiskTracking { + if *context.ignoreOrphanedRiskTracking { fmt.Println("Wildcard risk tracking does not match any risk id: " + syntheticRiskIdPattern) } else { panic(errors.New("wildcard risk tracking does not match any risk id: " + syntheticRiskIdPattern)) @@ -5231,8 +5361,8 @@ func hash(s string) string { return fmt.Sprintf("%v", h.Sum32()) } -func writeDataAssetDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.File { - if *verbose { +func (context *Context) writeDataAssetDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.File { + if *context.verbose { fmt.Println("Writing data asset diagram input") } var dotContent strings.Builder @@ -5317,8 +5447,8 @@ func writeDataAssetDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fi return file } -func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.File { - if *verbose { +func (context *Context) writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.File { + if *context.verbose { fmt.Println("Writing data flow diagram input") } var dotContent strings.Builder @@ -5338,19 +5468,19 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil switch model.ParsedModelRoot.DiagramTweakEdgeLayout { case "spline": splines = "spline" - drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false + context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false case "polyline": splines = "polyline" - drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false + context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false case "ortho": splines = "ortho" suppressBidirectionalArrows = true case "curved": splines = "curved" - drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false + context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false case "false": splines = "false" - drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false + context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false default: panic(errors.New("unknown value for diagram_tweak_suppress_edge_labels (spline, polyline, ortho, curved, false): " + model.ParsedModelRoot.DiagramTweakEdgeLayout)) @@ -5361,8 +5491,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil rankdir = "LR" } modelTitle := "" - addModelTitle := false - if addModelTitle { + if context.addModelTitle { modelTitle = `label="` + model.ParsedModelRoot.Title + `"` } dotContent.WriteString(` graph [ ` + modelTitle + ` @@ -5399,7 +5528,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil trustBoundary := model.ParsedModelRoot.TrustBoundaries[key] var snippet strings.Builder if len(trustBoundary.TechnicalAssetsInside) > 0 || len(trustBoundary.TrustBoundariesNested) > 0 { - if drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks { + if context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks { // see https://stackoverflow.com/questions/17247455/how-do-i-add-extra-space-between-clusters?noredirect=1&lq=1 snippet.WriteString("\n subgraph cluster_space_boundary_for_layout_only_1" + hash(trustBoundary.Id) + " {\n") snippet.WriteString(` graph [ @@ -5462,7 +5591,7 @@ func writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.Fil snippet.WriteString(";\n") } snippet.WriteString(" }\n\n") - if drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks { + if context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks { snippet.WriteString(" }\n\n") } } @@ -5691,16 +5820,16 @@ func encode(value string) string { return strings.ReplaceAll(value, "&", "&") } -func renderDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string) { - if *verbose { +func (context *Context) renderDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string) { + if *context.verbose { fmt.Println("Rendering data flow diagram input") } // tmp files - tmpFileDOT, err := os.CreateTemp(tempFolder, "diagram-*-.gv") + tmpFileDOT, err := os.CreateTemp(*context.tempFolder, "diagram-*-.gv") checkErr(err) defer func() { _ = os.Remove(tmpFileDOT.Name()) }() - tmpFilePNG, err := os.CreateTemp(tempFolder, "diagram-*-.png") + tmpFilePNG, err := os.CreateTemp(*context.tempFolder, "diagram-*-.png") checkErr(err) defer func() { _ = os.Remove(tmpFilePNG.Name()) }() @@ -5739,16 +5868,16 @@ func renderDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string) { } } -func renderDataAssetDiagramGraphvizImage(dotFile *os.File, targetDir string) { // TODO dedupe with other render...() method here - if *verbose { +func (context *Context) renderDataAssetDiagramGraphvizImage(dotFile *os.File, targetDir string) { // TODO dedupe with other render...() method here + if *context.verbose { fmt.Println("Rendering data asset diagram input") } // tmp files - tmpFileDOT, err := os.CreateTemp(tempFolder, "diagram-*-.gv") + tmpFileDOT, err := os.CreateTemp(*context.tempFolder, "diagram-*-.gv") checkErr(err) defer func() { _ = os.Remove(tmpFileDOT.Name()) }() - tmpFilePNG, err := os.CreateTemp(tempFolder, "diagram-*-.png") + tmpFilePNG, err := os.CreateTemp(*context.tempFolder, "diagram-*-.png") checkErr(err) defer func() { _ = os.Remove(tmpFilePNG.Name()) }() diff --git a/model/types.go b/model/types.go index c536a358..b432ad6e 100644 --- a/model/types.go +++ b/model/types.go @@ -5,6 +5,10 @@ import ( "errors" "fmt" "github.com/threagile/threagile/colors" + "gopkg.in/yaml.v3" + "log" + "os" + "path/filepath" "regexp" "sort" "strings" @@ -81,146 +85,356 @@ func MakeID(val string) string { // === Model Type Stuff ====================================== type ModelInput struct { // TODO: Eventually remove this and directly use ParsedModelRoot? But then the error messages for model errors are not quite as good anymore... - ThreagileVersion string - Title string - Author Author - Date string - BusinessOverview Overview - TechnicalOverview Overview - BusinessCriticality string - ManagementSummaryComment string - Questions map[string]string - AbuseCases map[string]string - SecurityRequirements map[string]string - TagsAvailable []string - DataAssets map[string]InputDataAsset - TechnicalAssets map[string]InputTechnicalAsset - TrustBoundaries map[string]InputTrustBoundary - SharedRuntimes map[string]InputSharedRuntime - IndividualRiskCategories map[string]InputIndividualRiskCategory - RiskTracking map[string]InputRiskTracking - DiagramTweakNodesep, DiagramTweakRanksep int - DiagramTweakEdgeLayout string - DiagramTweakSuppressEdgeLabels bool - DiagramTweakLayoutLeftToRight bool - DiagramTweakInvisibleConnectionsBetweenAssets []string - DiagramTweakSameRankAssets []string + Includes []string `yaml:"includes" json:"includes"` + ThreagileVersion string `yaml:"threagile_version" json:"threagile_version"` + Title string `yaml:"title" json:"title"` + Author Author `yaml:"author" json:"author"` + Date string `yaml:"date" json:"date"` + BusinessOverview Overview `yaml:"business_overview" json:"business_overview"` + TechnicalOverview Overview `yaml:"technical_overview" json:"technical_overview"` + BusinessCriticality string `yaml:"business_criticality" json:"business_criticality"` + ManagementSummaryComment string `yaml:"management_summary_comment" json:"management_summary_comment"` + Questions map[string]string `yaml:"questions" json:"questions"` + AbuseCases map[string]string `yaml:"abuse_cases" json:"abuse_cases"` + SecurityRequirements map[string]string `yaml:"security_requirements" json:"security_requirements"` + TagsAvailable []string `yaml:"tags_available" json:"tags_available"` + DataAssets map[string]InputDataAsset `yaml:"data_assets" json:"data_assets"` + TechnicalAssets map[string]InputTechnicalAsset `yaml:"technical_assets" json:"technical_assets"` + TrustBoundaries map[string]InputTrustBoundary `yaml:"trust_boundaries" json:"trust_boundaries"` + SharedRuntimes map[string]InputSharedRuntime `yaml:"shared_runtimes" json:"shared_runtimes"` + IndividualRiskCategories map[string]InputIndividualRiskCategory `yaml:"individual_risk_categories" json:"individual_risk_categories"` + RiskTracking map[string]InputRiskTracking `yaml:"risk_tracking" json:"risk_tracking"` + DiagramTweakNodesep int `yaml:"diagram_tweak_nodesep" json:"diagram_tweak_nodesep"` + DiagramTweakRanksep int `yaml:"diagram_tweak_ranksep" json:"diagram_tweak_ranksep"` + DiagramTweakEdgeLayout string `yaml:"diagram_tweak_edge_layout" json:"diagram_tweak_edge_layout"` + DiagramTweakSuppressEdgeLabels bool `yaml:"diagram_tweak_suppress_edge_labels" json:"diagram_tweak_suppress_edge_labels"` + DiagramTweakLayoutLeftToRight bool `yaml:"diagram_tweak_layout_left_to_right" json:"diagram_tweak_layout_left_to_right"` + DiagramTweakInvisibleConnectionsBetweenAssets []string `yaml:"diagram_tweak_invisible_connections_between_assets" json:"diagram_tweak_invisible_connections_between_assets"` + DiagramTweakSameRankAssets []string `yaml:"diagram_tweak_same_rank_assets" json:"diagram_tweak_same_rank_assets"` +} + +func (model *ModelInput) Defaults() *ModelInput { + *model = ModelInput{ + Questions: make(map[string]string), + AbuseCases: make(map[string]string), + SecurityRequirements: make(map[string]string), + DataAssets: make(map[string]InputDataAsset), + TechnicalAssets: make(map[string]InputTechnicalAsset), + TrustBoundaries: make(map[string]InputTrustBoundary), + SharedRuntimes: make(map[string]InputSharedRuntime), + IndividualRiskCategories: make(map[string]InputIndividualRiskCategory), + RiskTracking: make(map[string]InputRiskTracking), + } + + return model +} + +func (model *ModelInput) Load(inputFilename string) error { + modelYaml, readError := os.ReadFile(inputFilename) + if readError != nil { + log.Fatal("Unable to read model file: ", readError) + } + + unmarshalError := yaml.Unmarshal(modelYaml, &model) + if unmarshalError != nil { + log.Fatal("Unable to parse model yaml: ", unmarshalError) + } + + for _, includeFile := range model.Includes { + mergeError := model.Merge(filepath.Dir(inputFilename), includeFile) + if mergeError != nil { + log.Fatalf("Unable to merge model include %q: %v", includeFile, mergeError) + } + } + + return nil +} + +type UniqueStringSlice []string + +func (slice UniqueStringSlice) Merge(otherSlice []string) []string { + valueMap := make(map[string]bool) + for _, value := range slice { + valueMap[value] = true + } + + for _, value := range otherSlice { + valueMap[value] = true + } + + valueSlice := make(UniqueStringSlice, 0) + for key := range valueMap { + valueSlice = append(valueSlice, key) + } + + return valueSlice +} + +func (model *ModelInput) Merge(dir string, includeFilename string) error { + modelYaml, readError := os.ReadFile(filepath.Join(dir, includeFilename)) + if readError != nil { + return fmt.Errorf("unable to read model file: %v", readError) + } + + var fileStructure map[string]any + unmarshalStructureError := yaml.Unmarshal(modelYaml, &fileStructure) + if unmarshalStructureError != nil { + return fmt.Errorf("unable to parse model structure: %v", unmarshalStructureError) + } + + var includedModel ModelInput + unmarshalError := yaml.Unmarshal(modelYaml, &includedModel) + if unmarshalError != nil { + return fmt.Errorf("unable to parse model yaml: %v", unmarshalError) + } + + for item := range fileStructure { + switch strings.ToLower(item) { + case strings.ToLower("includes"): + for _, includeFile := range includedModel.Includes { + mergeError := model.Merge(filepath.Join(dir, filepath.Dir(includeFilename)), includeFile) + if mergeError != nil { + return fmt.Errorf("unable to merge model include %q: %v", includeFile, mergeError) + } + } + break + + case strings.ToLower("threagile_version"): + model.ThreagileVersion = includedModel.ThreagileVersion + break + + case strings.ToLower("title"): + model.Title = includedModel.Title + break + + case strings.ToLower("author"): + model.Author = includedModel.Author + break + + case strings.ToLower("date"): + model.Date = includedModel.Date + break + + case strings.ToLower("business_overview"): + model.BusinessOverview = includedModel.BusinessOverview + break + + case strings.ToLower("technical_overview"): + model.TechnicalOverview = includedModel.TechnicalOverview + break + + case strings.ToLower("business_criticality"): + model.BusinessCriticality = includedModel.BusinessCriticality + break + + case strings.ToLower("management_summary_comment"): + model.ManagementSummaryComment = includedModel.ManagementSummaryComment + break + + case strings.ToLower("questions"): + for mapKey, mapValue := range includedModel.Questions { + model.Questions[mapKey] = mapValue + } + break + + case strings.ToLower("abuse_cases"): + for mapKey, mapValue := range includedModel.AbuseCases { + model.AbuseCases[mapKey] = mapValue + } + break + + case strings.ToLower("security_requirements"): + for mapKey, mapValue := range includedModel.SecurityRequirements { + model.SecurityRequirements[mapKey] = mapValue + } + break + + case strings.ToLower("tags_available"): + model.TagsAvailable = UniqueStringSlice(model.TagsAvailable).Merge(includedModel.TagsAvailable) + break + + case strings.ToLower("data_assets"): + for mapKey, mapValue := range includedModel.DataAssets { + model.DataAssets[mapKey] = mapValue + } + break + + case strings.ToLower("technical_assets"): + for mapKey, mapValue := range includedModel.TechnicalAssets { + model.TechnicalAssets[mapKey] = mapValue + } + break + + case strings.ToLower("trust_boundaries"): + for mapKey, mapValue := range includedModel.TrustBoundaries { + model.TrustBoundaries[mapKey] = mapValue + } + break + + case strings.ToLower("shared_runtimes"): + for mapKey, mapValue := range includedModel.SharedRuntimes { + model.SharedRuntimes[mapKey] = mapValue + } + break + + case strings.ToLower("individual_risk_categories"): + for mapKey, mapValue := range includedModel.IndividualRiskCategories { + model.IndividualRiskCategories[mapKey] = mapValue + } + break + + case strings.ToLower("risk_tracking"): + for mapKey, mapValue := range includedModel.RiskTracking { + model.RiskTracking[mapKey] = mapValue + } + break + + case "diagram_tweak_nodesep": + model.DiagramTweakNodesep = includedModel.DiagramTweakNodesep + break + + case "diagram_tweak_ranksep": + model.DiagramTweakRanksep = includedModel.DiagramTweakRanksep + break + + case "diagram_tweak_edge_layout": + model.DiagramTweakEdgeLayout = includedModel.DiagramTweakEdgeLayout + break + + case "diagram_tweak_suppress_edge_labels": + model.DiagramTweakSuppressEdgeLabels = includedModel.DiagramTweakSuppressEdgeLabels + break + + case "diagram_tweak_layout_left_to_right": + model.DiagramTweakLayoutLeftToRight = includedModel.DiagramTweakLayoutLeftToRight + break + + case "diagram_tweak_invisible_connections_between_assets": + model.DiagramTweakInvisibleConnectionsBetweenAssets = append(model.DiagramTweakInvisibleConnectionsBetweenAssets, includedModel.DiagramTweakInvisibleConnectionsBetweenAssets...) + break + + case "diagram_tweak_same_rank_assets": + model.DiagramTweakSameRankAssets = append(model.DiagramTweakSameRankAssets, includedModel.DiagramTweakSameRankAssets...) + } + } + + return nil } type InputDataAsset struct { - ID string `json:"id"` - Description string `json:"description"` - Usage string `json:"usage"` - Tags []string `json:"tags"` - Origin string `json:"origin"` - Owner string `json:"owner"` - Quantity string `json:"quantity"` - Confidentiality string `json:"confidentiality"` - Integrity string `json:"integrity"` - Availability string `json:"availability"` - JustificationCiaRating string `json:"justification_cia_rating"` + ID string `yaml:"id" json:"id"` + Description string `yaml:"description" json:"description"` + Usage string `yaml:"usage" json:"usage"` + Tags []string `yaml:"tags" json:"tags"` + Origin string `yaml:"origin" json:"origin"` + Owner string `yaml:"owner" json:"owner"` + Quantity string `yaml:"quantity" json:"quantity"` + Confidentiality string `yaml:"confidentiality" json:"confidentiality"` + Integrity string `yaml:"integrity" json:"integrity"` + Availability string `yaml:"availability" json:"availability"` + JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` } type InputTechnicalAsset struct { - ID string `json:"id"` - Description string `json:"description"` - Type string `json:"type"` - Usage string `json:"usage"` - UsedAsClientByHuman bool `json:"used_as_client_by_human"` - OutOfScope bool `json:"out_of_scope"` - JustificationOutOfScope string `json:"justification_out_of_scope"` - Size string `json:"size"` - Technology string `json:"technology"` - Tags []string `json:"tags"` - Internet bool `json:"internet"` - Machine string `json:"machine"` - Encryption string `json:"encryption"` - Owner string `json:"owner"` - Confidentiality string `json:"confidentiality"` - Integrity string `json:"integrity"` - Availability string `json:"availability"` - JustificationCiaRating string `json:"justification_cia_rating"` - MultiTenant bool `json:"multi_tenant"` - Redundant bool `json:"redundant"` - CustomDevelopedParts bool `json:"custom_developed_parts"` - DataAssetsProcessed []string `json:"data_assets_processed"` - DataAssetsStored []string `json:"data_assets_stored"` - DataFormatsAccepted []string `json:"data_formats_accepted"` - DiagramTweakOrder int `json:"diagram_tweak_order"` - CommunicationLinks map[string]InputCommunicationLink `json:"communication_links"` + ID string `yaml:"id" json:"id"` + Description string `yaml:"description" json:"description"` + Type string `yaml:"type" json:"type"` + Usage string `yaml:"usage" json:"usage"` + UsedAsClientByHuman bool `yaml:"used_as_client_by_human" json:"used_as_client_by_human"` + OutOfScope bool `yaml:"out_of_scope" json:"out_of_scope"` + JustificationOutOfScope string `yaml:"justification_out_of_scope" json:"justification_out_of_scope"` + Size string `yaml:"size" json:"size"` + Technology string `yaml:"technology" json:"technology"` + Tags []string `yaml:"tags" json:"tags"` + Internet bool `yaml:"internet" json:"internet"` + Machine string `yaml:"machine" json:"machine"` + Encryption string `yaml:"encryption" json:"encryption"` + Owner string `yaml:"owner" json:"owner"` + Confidentiality string `yaml:"confidentiality" json:"confidentiality"` + Integrity string `yaml:"integrity" json:"integrity"` + Availability string `yaml:"availability" json:"availability"` + JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` + MultiTenant bool `yaml:"multi_tenant" json:"multi_tenant"` + Redundant bool `yaml:"redundant" json:"redundant"` + CustomDevelopedParts bool `yaml:"custom_developed_parts" json:"custom_developed_parts"` + DataAssetsProcessed []string `yaml:"data_assets_processed" json:"data_assets_processed"` + DataAssetsStored []string `yaml:"data_assets_stored" json:"data_assets_stored"` + DataFormatsAccepted []string `yaml:"data_formats_accepted" json:"data_formats_accepted"` + DiagramTweakOrder int `yaml:"diagram_tweak_order" json:"diagram_tweak_order"` + CommunicationLinks map[string]InputCommunicationLink `yaml:"communication_links" json:"communication_links"` } type InputCommunicationLink struct { - Target string `json:"target"` - Description string `json:"description"` - Protocol string `json:"protocol"` - Authentication string `json:"authentication"` - Authorization string `json:"authorization"` - Tags []string `json:"tags"` - VPN bool `json:"vpn"` - IpFiltered bool `json:"ip_filtered"` - Readonly bool `json:"readonly"` - Usage string `json:"usage"` - DataAssetsSent []string `json:"data_assets_sent"` - DataAssetsReceived []string `json:"data_assets_received"` - DiagramTweakWeight int `json:"diagram_tweak_weight"` - DiagramTweakConstraint bool `json:"diagram_tweak_constraint"` + Target string `yaml:"target" json:"target"` + Description string `yaml:"description" json:"description"` + Protocol string `yaml:"protocol" json:"protocol"` + Authentication string `yaml:"authentication" json:"authentication"` + Authorization string `yaml:"authorization" json:"authorization"` + Tags []string `yaml:"tags" json:"tags"` + VPN bool `yaml:"vpn" json:"vpn"` + IpFiltered bool `yaml:"ip_filtered" json:"ip_filtered"` + Readonly bool `yaml:"readonly" json:"readonly"` + Usage string `yaml:"usage" json:"usage"` + DataAssetsSent []string `yaml:"data_assets_sent" json:"data_assets_sent"` + DataAssetsReceived []string `yaml:"data_assets_received" json:"data_assets_received"` + DiagramTweakWeight int `yaml:"diagram_tweak_weight" json:"diagram_tweak_weight"` + DiagramTweakConstraint bool `yaml:"diagram_tweak_constraint" json:"diagram_tweak_constraint"` } type InputSharedRuntime struct { - ID string `json:"id"` - Description string `json:"description"` - Tags []string `json:"tags"` - TechnicalAssetsRunning []string `json:"technical_assets_running"` + ID string `yaml:"id" json:"id"` + Description string `yaml:"description" json:"description"` + Tags []string `yaml:"tags" json:"tags"` + TechnicalAssetsRunning []string `yaml:"technical_assets_running" json:"technical_assets_running"` } type InputTrustBoundary struct { - ID string `json:"id"` - Description string `json:"description"` - Type string `json:"type"` - Tags []string `json:"tags"` - TechnicalAssetsInside []string `json:"technical_assets_inside"` - TrustBoundariesNested []string `json:"trust_boundaries_nested"` + ID string `yaml:"id" json:"id"` + Description string `yaml:"description" json:"description"` + Type string `yaml:"type" json:"type"` + Tags []string `yaml:"tags" json:"tags"` + TechnicalAssetsInside []string `yaml:"technical_assets_inside" json:"technical_assets_inside"` + TrustBoundariesNested []string `yaml:"trust_boundaries_nested" json:"trust_boundaries_nested"` } type InputIndividualRiskCategory struct { - ID string `json:"id"` - Description string `json:"description"` - Impact string `json:"impact"` - ASVS string `json:"asvs"` - CheatSheet string `json:"cheat_sheet"` - Action string `json:"action"` - Mitigation string `json:"mitigation"` - Check string `json:"check"` - Function string `json:"function"` - STRIDE string `json:"stride"` - DetectionLogic string `json:"detection_logic"` - RiskAssessment string `json:"risk_assessment"` - FalsePositives string `json:"false_positives"` - ModelFailurePossibleReason bool `json:"model_failure_possible_reason"` - CWE int `json:"cwe"` - RisksIdentified map[string]InputRiskIdentified `json:"risks_identified"` + ID string `yaml:"id" json:"id"` + Description string `yaml:"description" json:"description"` + Impact string `yaml:"impact" json:"impact"` + ASVS string `yaml:"asvs" json:"asvs"` + CheatSheet string `yaml:"cheat_sheet" json:"cheat_sheet"` + Action string `yaml:"action" json:"action"` + Mitigation string `yaml:"mitigation" json:"mitigation"` + Check string `yaml:"check" json:"check"` + Function string `yaml:"function" json:"function"` + STRIDE string `yaml:"stride" json:"stride"` + DetectionLogic string `yaml:"detection_logic" json:"detection_logic"` + RiskAssessment string `yaml:"risk_assessment" json:"risk_assessment"` + FalsePositives string `yaml:"false_positives" json:"false_positives"` + ModelFailurePossibleReason bool `yaml:"model_failure_possible_reason" json:"model_failure_possible_reason"` + CWE int `yaml:"cwe" json:"cwe"` + RisksIdentified map[string]InputRiskIdentified `yaml:"risks_identified" json:"risks_identified"` } type InputRiskIdentified struct { - Severity string `json:"severity"` - ExploitationLikelihood string `json:"exploitation_likelihood"` - ExploitationImpact string `json:"exploitation_impact"` - DataBreachProbability string `json:"data_breach_probability"` - DataBreachTechnicalAssets []string `json:"data_breach_technical_assets"` - MostRelevantDataAsset string `json:"most_relevant_data_asset"` - MostRelevantTechnicalAsset string `json:"most_relevant_technical_asset"` - MostRelevantCommunicationLink string `json:"most_relevant_communication_link"` - MostRelevantTrustBoundary string `json:"most_relevant_trust_boundary"` - MostRelevantSharedRuntime string `json:"most_relevant_shared_runtime"` + Severity string `yaml:"severity" json:"severity"` + ExploitationLikelihood string `yaml:"exploitation_likelihood" json:"exploitation_likelihood"` + ExploitationImpact string `yaml:"exploitation_impact" json:"exploitation_impact"` + DataBreachProbability string `yaml:"data_breach_probability" json:"data_breach_probability"` + DataBreachTechnicalAssets []string `yaml:"data_breach_technical_assets" json:"data_breach_technical_assets"` + MostRelevantDataAsset string `yaml:"most_relevant_data_asset" json:"most_relevant_data_asset"` + MostRelevantTechnicalAsset string `yaml:"most_relevant_technical_asset" json:"most_relevant_technical_asset"` + MostRelevantCommunicationLink string `yaml:"most_relevant_communication_link" json:"most_relevant_communication_link"` + MostRelevantTrustBoundary string `yaml:"most_relevant_trust_boundary" json:"most_relevant_trust_boundary"` + MostRelevantSharedRuntime string `yaml:"most_relevant_shared_runtime" json:"most_relevant_shared_runtime"` } type InputRiskTracking struct { - Status string `json:"status"` - Justification string `json:"justification"` - Ticket string `json:"ticket"` - Date string `json:"date"` - CheckedBy string `json:"checked_by"` + Status string `yaml:"status" json:"status"` + Justification string `yaml:"justification" json:"justification"` + Ticket string `yaml:"ticket" json:"ticket"` + Date string `yaml:"date" json:"date"` + CheckedBy string `yaml:"checked_by" json:"checked_by"` } // TypeDescription contains a name for a type and its description @@ -1328,16 +1542,18 @@ func IsSharingSameParentTrustBoundary(left, right TechnicalAsset) bool { } type DataAsset struct { - Id string `json:"id"` // TODO: tag here still required? - Title string `json:"title"` // TODO: tag here still required? - Description string `json:"description"` // TODO: tag here still required? - Usage Usage - Tags []string - Origin, Owner string - Quantity Quantity - Confidentiality Confidentiality - Integrity, Availability Criticality - JustificationCiaRating string + Id string `yaml:"id" json:"id"` // TODO: tag here still required? + Title string `yaml:"title" json:"title"` // TODO: tag here still required? + Description string `yaml:"description" json:"description"` // TODO: tag here still required? + Usage Usage `yaml:"usage" json:"usage"` + Tags []string `yaml:"tags" json:"tags"` + Origin string `yaml:"origin" json:"origin"` + Owner string `yaml:"owner" json:"owner"` + Quantity Quantity `yaml:"quantity" json:"quantity"` + Confidentiality Confidentiality `yaml:"confidentiality" json:"confidentiality"` + Integrity Criticality `yaml:"integrity" json:"integrity"` + Availability Criticality `yaml:"availability" json:"availability"` + JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` } func (what DataAsset) IsTaggedWithAny(tags ...string) bool { @@ -2178,13 +2394,13 @@ func (what CommunicationLink) DataAssetsReceivedSorted() []DataAsset { } type Author struct { - Name string `json:"name"` - Homepage string `json:"homepage"` + Name string `yaml:"name" json:"name"` + Homepage string `yaml:"homepage" json:"homepage"` } type Overview struct { - Description string `json:"description"` - Images []map[string]string `json:"images"` // yes, array of map here, as array keeps the order of the image keys + Description string `yaml:"description" json:"description"` + Images []map[string]string `yaml:"images" json:"images"` // yes, array of map here, as array keeps the order of the image keys } type ParsedModel struct { @@ -3279,25 +3495,25 @@ func (what ByRiskCategoryHighestContainingRiskSeveritySortStillAtRisk) Less(i, j type RiskStatistics struct { // TODO add also some more like before / after (i.e. with mitigation applied) - Risks map[string]map[string]int `json:"risks"` + Risks map[string]map[string]int `yaml:"risks" json:"risks"` } type Risk struct { - Category RiskCategory `json:"-"` // just for navigational convenience... not JSON marshalled - CategoryId string `json:"category"` // used for better JSON marshalling, is assigned in risk evaluation phase automatically - RiskStatus RiskStatus `json:"risk_status"` // used for better JSON marshalling, is assigned in risk evaluation phase automatically - Severity RiskSeverity `json:"severity"` - ExploitationLikelihood RiskExploitationLikelihood `json:"exploitation_likelihood"` - ExploitationImpact RiskExploitationImpact `json:"exploitation_impact"` - Title string `json:"title"` - SyntheticId string `json:"synthetic_id"` - MostRelevantDataAssetId string `json:"most_relevant_data_asset"` - MostRelevantTechnicalAssetId string `json:"most_relevant_technical_asset"` - MostRelevantTrustBoundaryId string `json:"most_relevant_trust_boundary"` - MostRelevantSharedRuntimeId string `json:"most_relevant_shared_runtime"` - MostRelevantCommunicationLinkId string `json:"most_relevant_communication_link"` - DataBreachProbability DataBreachProbability `json:"data_breach_probability"` - DataBreachTechnicalAssetIDs []string `json:"data_breach_technical_assets"` + Category RiskCategory `yaml:"-" json:"-"` // just for navigational convenience... not JSON marshalled + CategoryId string `yaml:"category" json:"category"` // used for better JSON marshalling, is assigned in risk evaluation phase automatically + RiskStatus RiskStatus `yaml:"risk_status" json:"risk_status"` // used for better JSON marshalling, is assigned in risk evaluation phase automatically + Severity RiskSeverity `yaml:"severity" json:"severity"` + ExploitationLikelihood RiskExploitationLikelihood `yaml:"exploitation_likelihood" json:"exploitation_likelihood"` + ExploitationImpact RiskExploitationImpact `yaml:"exploitation_impact" json:"exploitation_impact"` + Title string `yaml:"title" json:"title"` + SyntheticId string `yaml:"synthetic_id" json:"synthetic_id"` + MostRelevantDataAssetId string `yaml:"most_relevant_data_asset" json:"most_relevant_data_asset"` + MostRelevantTechnicalAssetId string `yaml:"most_relevant_technical_asset" json:"most_relevant_technical_asset"` + MostRelevantTrustBoundaryId string `yaml:"most_relevant_trust_boundary" json:"most_relevant_trust_boundary"` + MostRelevantSharedRuntimeId string `yaml:"most_relevant_shared_runtime" json:"most_relevant_shared_runtime"` + MostRelevantCommunicationLinkId string `yaml:"most_relevant_communication_link" json:"most_relevant_communication_link"` + DataBreachProbability DataBreachProbability `yaml:"data_breach_probability" json:"data_breach_probability"` + DataBreachTechnicalAssetIDs []string `yaml:"data_breach_technical_assets" json:"data_breach_technical_assets"` // TODO: refactor all "Id" here to "ID"? } diff --git a/test/abuse_cases.yaml b/test/abuse_cases.yaml new file mode 100644 index 00000000..6315745b --- /dev/null +++ b/test/abuse_cases.yaml @@ -0,0 +1,30 @@ +abuse_cases: + Denial-of-Service: > + As a hacker I want to disturb the functionality of the backend system in order to cause indirect + financial damage via unusable features. + CPU-Cycle Theft: > + As a hacker I want to steal CPU cycles in order to transform them into money via installed crypto currency miners. + Ransomware: > + As a hacker I want to encrypt the storage and file systems in order to demand ransom. + Identity Theft: > + As a hacker I want to steal identity data in order to reuse credentials and/or keys on other targets of the same company or outside. + PII Theft: > + As a hacker I want to steal PII (Personally Identifiable Information) data in order to blackmail the company and/or damage + their repudiation by publishing them. + + ERP-System Compromise: > + As a hacker I want to access the ERP-System in order to steal/modify sensitive business data. + Database Compromise: > + As a hacker I want to access the database backend of the ERP-System in order to steal/modify sensitive + business data. + Contract Filesystem Compromise: > + As a hacker I want to access the filesystem storing the contract PDFs in order to steal/modify contract data. + Cross-Site Scripting Attacks: > + As a hacker I want to execute Cross-Site Scripting (XSS) and similar attacks in order to takeover victim sessions and + cause reputational damage. + Denial-of-Service of Enduser Functionality: > + As a hacker I want to disturb the functionality of the enduser parts of the application in order to cause direct financial + damage (lower sales). + Denial-of-Service of ERP/DB Functionality: > + As a hacker I want to disturb the functionality of the ERP system and/or it's database in order to cause indirect + financial damage via unusable internal ERP features (not related to customer portal). diff --git a/test/all.yaml b/test/all.yaml new file mode 100644 index 00000000..77815f77 --- /dev/null +++ b/test/all.yaml @@ -0,0 +1,1354 @@ +threagile_version: 1.0.0 + +# NOTE: +# +# For a perfect editing experience within your IDE of choice you can easily +# get model syntax validation and autocompletion (very handy for enum values) +# as well as live templates: Just import the schema.json into your IDE and assign +# it as "schema" to each Threagile YAML file. Also try to import individual parts +# from the live-templates.txt file into your IDE as live editing templates. +# +# You might also want to try the REST API when running in server mode... + + + +title: Some Example Application + +date: 2020-07-01 + +author: + name: John Doe + homepage: www.example.com + + + + +management_summary_comment: > + Just some more custom summary possible here... + +business_criticality: important # values: archive, operational, important, critical, mission-critical + + + + +business_overview: + description: Some more demo text here and even images... + images: +# - custom-image-1.png: Some dummy image 1 +# - custom-image-2.png: Some dummy image 2 + + +technical_overview: + description: Some more demo text here and even images... + images: +# - custom-image-1.png: Some dummy image 1 +# - custom-image-2.png: Some dummy image 2 + + + +questions: # simply use "" as answer to signal "unanswered" + How are the admin clients managed/protected against compromise?: "" + How are the development clients managed/protected against compromise?: > + Managed by XYZ + How are the build pipeline components managed/protected against compromise?: > + Managed by XYZ + + + +abuse_cases: + Denial-of-Service: > + As a hacker I want to disturb the functionality of the backend system in order to cause indirect + financial damage via unusable features. + CPU-Cycle Theft: > + As a hacker I want to steal CPU cycles in order to transform them into money via installed crypto currency miners. + Ransomware: > + As a hacker I want to encrypt the storage and file systems in order to demand ransom. + Identity Theft: > + As a hacker I want to steal identity data in order to reuse credentials and/or keys on other targets of the same company or outside. + PII Theft: > + As a hacker I want to steal PII (Personally Identifiable Information) data in order to blackmail the company and/or damage + their repudiation by publishing them. + + ERP-System Compromise: > + As a hacker I want to access the ERP-System in order to steal/modify sensitive business data. + Database Compromise: > + As a hacker I want to access the database backend of the ERP-System in order to steal/modify sensitive + business data. + Contract Filesystem Compromise: > + As a hacker I want to access the filesystem storing the contract PDFs in order to steal/modify contract data. + Cross-Site Scripting Attacks: > + As a hacker I want to execute Cross-Site Scripting (XSS) and similar attacks in order to takeover victim sessions and + cause reputational damage. + Denial-of-Service of Enduser Functionality: > + As a hacker I want to disturb the functionality of the enduser parts of the application in order to cause direct financial + damage (lower sales). + Denial-of-Service of ERP/DB Functionality: > + As a hacker I want to disturb the functionality of the ERP system and/or it's database in order to cause indirect + financial damage via unusable internal ERP features (not related to customer portal). + + +security_requirements: + Input Validation: Strict input validation is required to reduce the overall attack surface. + Securing Administrative Access: Administrative access must be secured with strong encryption and multi-factor authentication. + EU-DSGVO: Mandatory EU-Datenschutzgrundverordnung + + +# Tags can be used for anything, it's just a tag. Also risk rules can act based on tags if you like. +# Tags can be used for example to name the products used (which is more concrete than the technology types that only specify the type) +tags_available: + - linux + - apache + - mysql + - jboss + - keycloak + - jenkins + - git + - oracle + - some-erp + - vmware + - aws + - aws:ec2 + - aws:s3 + + + + +data_assets: + + + Customer Contracts: &customer-contracts # this example shows the inheritance-like features of YAML + id: customer-contracts + description: Customer Contracts (PDF) + usage: business # values: business, devops + tags: + origin: Customer + owner: Company XYZ + quantity: many # values: very-few, few, many, very-many + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Contract data might contain financial data as well as personally identifiable information (PII). The integrity and + availability of contract data is required for clearing payment disputes. + + + Customer Contract Summaries: + <<: *customer-contracts # here we're referencing the above created asset as base and just overwrite few values + id: contract-summaries + description: Customer Contract Summaries + quantity: very-few # values: very-few, few, many, very-many + confidentiality: restricted # values: public, internal, restricted, confidential, strictly-confidential + integrity: operational # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Just some summaries. + + + Customer Operational Data: + <<: *customer-contracts # here we're referencing the above created asset as base and just overwrite few values + id: customer-operational-data + description: Customer Operational Data + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Customer operational data for using the portal are required to be available to offer the portal functionality + and are used in the backend transactions. + + + Customer Accounts: + <<: *customer-contracts # here we're referencing the above created asset as base and just overwrite few values + id: customer-accounts + description: Customer Accounts (including transient credentials when entered for checking them) + confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Customer account data for using the portal are required to be available to offer the portal functionality. + + + Some Internal Business Data: + id: internal-business-data + description: Internal business data of the ERP system used unrelated to the customer-facing processes. + usage: business # values: business, devops + tags: + origin: Company XYZ + owner: Company XYZ + quantity: few # values: very-few, few, many, very-many + confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Data used and/or generated during unrelated other usecases of the ERP-system (when used also by Company XYZ for + internal non-customer-portal-related stuff). + + + Client Application Code: &client-application-code # this example shows the inheritance-like features of YAML + id: client-application-code + description: Angular and other client-side code delivered by the application. + usage: devops # values: business, devops + tags: + origin: Company ABC + owner: Company ABC + quantity: very-few # values: very-few, few, many, very-many + confidentiality: public # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The integrity of the public data is critical to avoid reputational damage and the availability is important on the + long-term scale (but not critical) to keep the growth rate of the customer base steady. + + + Server Application Code: + <<: *client-application-code # here we're referencing the above created asset as base and just overwrite few values + id: server-application-code + description: API and other server-side code of the application. + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: mission-critical # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The integrity of the API code is critical to avoid reputational damage and the availability is important on the + long-term scale (but not critical) to keep the growth rate of the customer base steady. + + + Build Job Config: + id: build-job-config + description: Data for customizing of the build job system. + usage: devops # values: business, devops + tags: + origin: Company XYZ + owner: Company XYZ + quantity: very-few # values: very-few, few, many, very-many + confidentiality: restricted # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Data for customizing of the build job system. + + + Marketing Material: + <<: *client-application-code # here we're referencing the above created asset as base and just overwrite few values + id: marketing-material + description: Website and marketing data to inform potential customers and generate new leads. + integrity: important # values: archive, operational, important, critical, mission-critical + + + ERP Logs: + id: erp-logs + description: Logs generated by the ERP system. + usage: devops # values: business, devops + tags: + origin: Company XYZ + owner: Company XYZ + quantity: many # values: very-few, few, many, very-many + confidentiality: restricted # values: public, internal, restricted, confidential, strictly-confidential + integrity: archive # values: archive, operational, important, critical, mission-critical + availability: archive # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Logs should not contain PII data and are only required for failure analysis, i.e. they are not considered as hard + transactional logs. + + + ERP Customizing Data: + id: erp-customizing + description: Data for customizing of the ERP system. + usage: devops # values: business, devops + tags: + origin: Company XYZ + owner: Company XYZ + quantity: very-few # values: very-few, few, many, very-many + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Data for customizing of the ERP system. + + + Database Customizing and Dumps: + id: db-dumps + description: Data for customizing of the DB system, which might include full database dumps. + usage: devops # values: business, devops + tags: + - oracle + origin: Company XYZ + owner: Company XYZ + quantity: very-few # values: very-few, few, many, very-many + confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Data for customizing of the DB system, which might include full database dumps. + + + + + + +technical_assets: + + + Customer Web Client: + id: customer-client + description: Customer Web Client + type: external-entity # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: true + out_of_scope: true + justification_out_of_scope: Owned and managed by enduser customer + size: component # values: system, service, application, component + technology: browser # values: see help + tags: + internet: true + machine: physical # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Customer + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: operational # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The client used by the customer to access the system. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - client-application-code + - marketing-material + data_assets_stored: # sequence of IDs to reference + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + Customer Traffic: + target: load-balancer + description: Link to the load balancer + protocol: https # values: see help + authentication: session-id # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: enduser-identity-propagation # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + data_assets_received: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - client-application-code + - marketing-material + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + + + Backoffice Client: + id: backoffice-client + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: Backoffice client + type: external-entity # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: true + out_of_scope: true + justification_out_of_scope: Owned and managed by Company XYZ company + size: component # values: system, service, application, component + technology: desktop # values: see help + tags: + internet: false + machine: physical # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company XYZ + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: important # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The client used by Company XYZ to administer and use the system. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-contracts + - internal-business-data + - erp-logs + data_assets_stored: # sequence of IDs to reference + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + ERP Internal Access: + target: erp-system + description: Link to the ERP system + protocol: https # values: see help + authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: enduser-identity-propagation # values: none, technical-user, enduser-identity-propagation + tags: + - some-erp + vpn: true + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - internal-business-data + data_assets_received: # sequence of IDs to reference + - customer-contracts + - internal-business-data + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + Marketing CMS Editing: + target: marketing-cms + description: Link to the CMS for editing content + protocol: https # values: see help + authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: enduser-identity-propagation # values: none, technical-user, enduser-identity-propagation + tags: + vpn: true + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - marketing-material + data_assets_received: # sequence of IDs to reference + - marketing-material + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + + + Backend Admin Client: + id: backend-admin-client + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: Backend admin client + type: external-entity # values: external-entity, process, datastore + usage: devops # values: business, devops + used_as_client_by_human: true + out_of_scope: true + justification_out_of_scope: Owned and managed by ops provider + size: component # values: system, service, application, component + technology: browser # values: see help + tags: + internet: false + machine: physical # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company XYZ + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: operational # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The client used by Company XYZ to administer the system. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - erp-logs + data_assets_stored: # sequence of IDs to reference + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + ERP Web Access: + target: erp-system + description: Link to the ERP system (Web) + protocol: https # values: see help + authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - erp-customizing + data_assets_received: # sequence of IDs to reference + - erp-logs + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + DB Update Access: + target: sql-database + description: Link to the database (JDBC tunneled via SSH) + protocol: ssh # values: see help + authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - db-dumps + data_assets_received: # sequence of IDs to reference + - db-dumps + - erp-logs + - customer-accounts + - customer-operational-data + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + User Management Access: + target: ldap-auth-server + description: Link to the LDAP auth server for managing users + protocol: ldaps # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + data_assets_received: # sequence of IDs to reference + - customer-accounts + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + + + Load Balancer: + id: load-balancer + #diagram_tweak_order: 50 # affects left to right positioning (only within a trust boundary) + description: Load Balancer (HA-Proxy) + type: process # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: component # values: system, service, application, component + technology: load-balancer # values: see help + tags: + internet: false + machine: physical # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: mission-critical # values: archive, operational, important, critical, mission-critical + availability: mission-critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The correct configuration and reachability of the load balancer is mandatory for all customer and Company XYZ + usages of the portal and ERP system. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - internal-business-data + - client-application-code + - marketing-material + data_assets_stored: # sequence of IDs to reference + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + Web Application Traffic: + target: apache-webserver + description: Link to the web server + protocol: http # values: see help + authentication: session-id # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: enduser-identity-propagation # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + data_assets_received: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - client-application-code + #diagram_tweak_weight: 5 + #diagram_tweak_constraint: false + CMS Content Traffic: + target: marketing-cms + description: Link to the CMS server + protocol: http # values: see help + authentication: none # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: none # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: true + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + data_assets_received: # sequence of IDs to reference + - marketing-material + #diagram_tweak_weight: 5 + #diagram_tweak_constraint: false + + + Apache Webserver: + id: apache-webserver + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: Apache Webserver hosting the API code and client-side code + type: process # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: application # values: system, service, application, component + technology: web-server # values: see help + tags: + - linux + - apache + - aws:ec2 + internet: false + machine: container # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The correct configuration and reachability of the web server is mandatory for all customer usages of the portal. + multi_tenant: false + redundant: false + custom_developed_parts: true + data_assets_processed: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - internal-business-data + - client-application-code + - server-application-code + data_assets_stored: # sequence of IDs to reference + - client-application-code + - server-application-code + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - json + - file + communication_links: + ERP System Traffic: + target: erp-system + description: Link to the ERP system + protocol: https # values: see help + authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - internal-business-data + data_assets_received: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - internal-business-data + #diagram_tweak_weight: 5 + #diagram_tweak_constraint: false + Auth Credential Check Traffic: + target: identity-provider + description: Link to the identity provider server + protocol: https # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + data_assets_received: # sequence of IDs to reference + + + Identity Provider: + id: identity-provider + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: Identity provider server + type: process # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: component # values: system, service, application, component + technology: identity-provider # values: see help + tags: + - linux + - jboss + - keycloak + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The auth data of the application + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-accounts + data_assets_stored: # sequence of IDs to reference + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + LDAP Credential Check Traffic: + target: ldap-auth-server + description: Link to the LDAP server + protocol: ldaps # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + data_assets_received: # sequence of IDs to reference + + + LDAP Auth Server: + id: ldap-auth-server + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: LDAP authentication server + type: datastore # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: component # values: system, service, application, component + technology: identity-store-ldap # values: see help + tags: + - linux + internet: false + machine: physical # values: physical, virtual, container, serverless + encryption: transparent # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The auth data of the application + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-accounts + data_assets_stored: # sequence of IDs to reference + - customer-accounts + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + + + Marketing CMS: + id: marketing-cms + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: CMS for the marketing content + type: process # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: application # values: system, service, application, component + technology: cms # values: see help + tags: + - linux + internet: false + machine: container # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: important # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The correct configuration and reachability of the web server is mandatory for all customer usages of the portal. + multi_tenant: false + redundant: false + custom_developed_parts: true + data_assets_processed: # sequence of IDs to reference + - marketing-material + - customer-accounts + data_assets_stored: # sequence of IDs to reference + - marketing-material + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + Auth Traffic: + target: ldap-auth-server + description: Link to the LDAP auth server + protocol: ldap # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: true + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + data_assets_received: # sequence of IDs to reference + - customer-accounts + #diagram_tweak_weight: 5 + #diagram_tweak_constraint: false + + + Backoffice ERP System: + id: erp-system + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: ERP system + type: process # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: system # values: system, service, application, component + technology: erp # values: see help + tags: + - linux + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: mission-critical # values: archive, operational, important, critical, mission-critical + availability: mission-critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The ERP system contains business-relevant sensitive data for the leasing processes and eventually also for other + Company XYZ internal processes. + multi_tenant: false + redundant: true + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - internal-business-data + - erp-customizing + data_assets_stored: # sequence of IDs to reference + - erp-logs + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - xml + - file + - serialization + communication_links: + Database Traffic: + target: sql-database + description: Link to the DB system + protocol: jdbc # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - internal-business-data + data_assets_received: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - internal-business-data + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + NFS Filesystem Access: + target: contract-fileserver + description: Link to the file system + protocol: nfs # values: see help + authentication: none # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: none # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-contracts + data_assets_received: # sequence of IDs to reference + - customer-contracts + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + + + Contract Fileserver: + id: contract-fileserver + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: NFS Filesystem for storing the contract PDFs + type: datastore # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: component # values: system, service, application, component + technology: file-server # values: see help + tags: + - linux + - aws:s3 + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Contract data might contain financial data as well as personally identifiable information (PII). The integrity and + availability of contract data is required for clearing payment disputes. The filesystem is also required to be available + for storing new contracts of freshly generated customers. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + data_assets_stored: # sequence of IDs to reference + - customer-contracts + - contract-summaries + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - file + communication_links: + + + Customer Contract Database: + id: sql-database + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: The database behind the ERP system + type: datastore # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: component # values: system, service, application, component + technology: database # values: see help + tags: + - linux + - mysql + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: data-with-symmetric-shared-key # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: mission-critical # values: archive, operational, important, critical, mission-critical + availability: mission-critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The ERP system's database contains business-relevant sensitive data for the leasing processes and eventually also + for other Company XYZ internal processes. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - db-dumps + data_assets_stored: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - internal-business-data + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + + + External Development Client: + id: external-dev-client + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: External developer client + type: external-entity # values: external-entity, process, datastore + usage: devops # values: business, devops + used_as_client_by_human: true + out_of_scope: true + justification_out_of_scope: Owned and managed by external developers + size: system # values: system, service, application, component + technology: devops-client # values: see help + tags: + - linux + internet: true + machine: physical # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: External Developers + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The clients used by external developers to create parts of the application code. + multi_tenant: true + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - client-application-code + - server-application-code + data_assets_stored: # sequence of IDs to reference + - client-application-code + - server-application-code + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - file + communication_links: + Git-Repo Code Write Access: + target: git-repo + description: Link to the Git repo + protocol: ssh # values: see help + authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - client-application-code + - server-application-code + data_assets_received: # sequence of IDs to reference + - client-application-code + - server-application-code + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + Git-Repo Web-UI Access: + target: git-repo + description: Link to the Git repo + protocol: https # values: see help + authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - client-application-code + - server-application-code + data_assets_received: # sequence of IDs to reference + - client-application-code + - server-application-code + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + Jenkins Web-UI Access: + target: jenkins-buildserver + description: Link to the Jenkins build server + protocol: https # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - build-job-config + data_assets_received: # sequence of IDs to reference + - build-job-config + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + + + Git Repository: + id: git-repo + #diagram_tweak_order: 99 # affects left to right positioning (only within a trust boundary) + description: Git repository server + type: process # values: external-entity, process, datastore + usage: devops # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: system # values: system, service, application, component + technology: sourcecode-repository # values: see help + tags: + - linux + - git + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: important # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The code repo pipeline might contain sensitive configuration values like backend credentials, certificates etc. and is + therefore rated as confidential. + multi_tenant: true + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - client-application-code + - server-application-code + data_assets_stored: # sequence of IDs to reference + - client-application-code + - server-application-code + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - file + communication_links: + + + Jenkins Buildserver: + id: jenkins-buildserver + #diagram_tweak_order: 99 # affects left to right positioning (only within a trust boundary) + description: Jenkins buildserver + type: process # values: external-entity, process, datastore + usage: devops # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: system # values: system, service, application, component + technology: build-pipeline # values: see help + tags: + - linux + - jenkins + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The build pipeline might contain sensitive configuration values like backend credentials, certificates etc. and is + therefore rated as confidential. The integrity and availability is rated as critical and important due to the risk + of reputation damage and application update unavailability when the build pipeline is compromised. + multi_tenant: true + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - build-job-config + - client-application-code + - server-application-code + - marketing-material + data_assets_stored: # sequence of IDs to reference + - build-job-config + - client-application-code + - server-application-code + - marketing-material + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - file + - serialization + communication_links: + Git Repo Code Read Access: + target: git-repo + description: Link to the Git repository server + protocol: ssh # values: see help + authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: true + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + data_assets_received: # sequence of IDs to reference + - client-application-code + - server-application-code + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + Application Deployment: + target: apache-webserver + description: Link to the Apache webserver + protocol: ssh # values: see help + authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - client-application-code + - server-application-code + data_assets_received: # sequence of IDs to reference + CMS Updates: + target: marketing-cms + description: Link to the CMS + protocol: ssh # values: see help + authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - marketing-material + data_assets_received: # sequence of IDs to reference + + + + + +trust_boundaries: + + + Web DMZ: + id: web-dmz + description: Web DMZ + type: network-cloud-security-group # values: see help + tags: + technical_assets_inside: # sequence of IDs to reference + - apache-webserver + - marketing-cms + trust_boundaries_nested: # sequence of IDs to reference + + + ERP DMZ: + id: erp-dmz + description: ERP DMZ + type: network-cloud-security-group # values: see help + tags: + - some-erp + technical_assets_inside: # sequence of IDs to reference + - erp-system + - contract-fileserver + - sql-database + trust_boundaries_nested: # sequence of IDs to reference + + + Application Network: + id: application-network + description: Application Network + type: network-cloud-provider # values: see help + tags: + - aws + technical_assets_inside: # sequence of IDs to reference + - load-balancer + trust_boundaries_nested: # sequence of IDs to reference + - web-dmz + - erp-dmz + - auth-env + + + Auth Handling Environment: + id: auth-env + description: Auth Handling Environment + type: execution-environment # values: see help + tags: + technical_assets_inside: # sequence of IDs to reference + - identity-provider + - ldap-auth-server + trust_boundaries_nested: # sequence of IDs to reference + + + Dev Network: + id: dev-network + description: Development Network + type: network-on-prem # values: see help + tags: + technical_assets_inside: # sequence of IDs to reference + - jenkins-buildserver + - git-repo + - backend-admin-client + - backoffice-client + trust_boundaries_nested: # sequence of IDs to reference + + + + + +shared_runtimes: + + + WebApp and Backoffice Virtualization: + id: webapp-virtualization + description: WebApp Virtualization + tags: + - vmware + technical_assets_running: # sequence of IDs to reference + - apache-webserver + - marketing-cms + - erp-system + - contract-fileserver + - sql-database + + + + +individual_risk_categories: # used for adding custom manually identified risks + + + Some Individual Risk Example: + id: something-strange + description: Some text describing the risk category... + impact: Some text describing the impact... + asvs: V0 - Something Strange + cheat_sheet: https://example.com + action: Some text describing the action... + mitigation: Some text describing the mitigation... + check: Check if XYZ... + function: business-side # values: business-side, architecture, development, operations + stride: repudiation # values: spoofing, tampering, repudiation, information-disclosure, denial-of-service, elevation-of-privilege + detection_logic: Some text describing the detection logic... + risk_assessment: Some text describing the risk assessment... + false_positives: Some text describing the most common types of false positives... + model_failure_possible_reason: false + cwe: 693 + risks_identified: + Example Individual Risk at Database: + severity: critical # values: low, medium, elevated, high, critical + exploitation_likelihood: likely # values: unlikely, likely, very-likely, frequent + exploitation_impact: medium # values: low, medium, high, very-high + data_breach_probability: probable # values: improbable, possible, probable + data_breach_technical_assets: # list of technical asset IDs which might have data breach + - sql-database + most_relevant_data_asset: + most_relevant_technical_asset: sql-database + most_relevant_communication_link: + most_relevant_trust_boundary: + most_relevant_shared_runtime: + Example Individual Risk at Contract Filesystem: + severity: medium # values: low, medium, elevated, high, critical + exploitation_likelihood: frequent # values: unlikely, likely, very-likely, frequent + exploitation_impact: very-high # values: low, medium, high, very-high + data_breach_probability: improbable # values: improbable, possible, probable + data_breach_technical_assets: # list of technical asset IDs which might have data breach + most_relevant_data_asset: + most_relevant_technical_asset: contract-fileserver + most_relevant_communication_link: + most_relevant_trust_boundary: + most_relevant_shared_runtime: + + + +# NOTE: +# For risk tracking each risk-id needs to be defined (the string with the @ sign in it). These unique risk IDs +# are visible in the PDF report (the small grey string under each risk), the Excel (column "ID"), as well as the JSON responses. +# Some risk IDs have only one @ sign in them, while others multiple. The idea is to allow for unique but still speaking IDs. +# Therefore each risk instance creates its individual ID by taking all affected elements causing the risk to be within an @-delimited part. +# Using wildcards (the * sign) for parts delimited by @ signs allows to handle groups of certain risks at once. Best is to lookup the IDs +# to use in the created Excel file. Alternatively a model macro "seed-risk-tracking" is available that helps in initially +# seeding the risk tracking part here based on already identified and not yet handled risks. +risk_tracking: + + untrusted-deserialization@erp-system: # wildcards "*" between the @ characters are possible + status: accepted # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: Risk accepted as tolerable + ticket: XYZ-1234 + date: 2020-01-04 + checked_by: John Doe + + ldap-injection@*@ldap-auth-server@*: # wildcards "*" between the @ characters are possible + status: mitigated # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: The hardening measures were implemented and checked + ticket: XYZ-5678 + date: 2020-01-05 + checked_by: John Doe + + unencrypted-asset@*: # wildcards "*" between the @ characters are possible + status: mitigated # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: The hardening measures were implemented and checked + ticket: XYZ-1234 + date: 2020-01-04 + checked_by: John Doe + + missing-authentication-second-factor@*@*@*: # wildcards "*" between the @ characters are possible + status: mitigated # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: The hardening measures were implemented and checked + ticket: XYZ-1234 + date: 2020-01-04 + checked_by: John Doe + + missing-hardening@*: # wildcards "*" between the @ characters are possible + status: mitigated # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: The hardening measures were implemented and checked + ticket: XYZ-1234 + date: 2020-01-04 + checked_by: John Doe + + dos-risky-access-across-trust-boundary@*@*@*: # wildcards "*" between the @ characters are possible + status: in-progress # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: The hardening measures are being implemented and checked + ticket: XYZ-1234 + date: 2020-01-04 + checked_by: John Doe + + + +#diagram_tweak_edge_layout: spline # values: spline, polyline, false, ortho (this suppresses edge labels), curved (this suppresses edge labels and can cause problems with edges) + +#diagram_tweak_suppress_edge_labels: true +#diagram_tweak_layout_left_to_right: true +#diagram_tweak_nodesep: 2 +#diagram_tweak_ranksep: 2 +#diagram_tweak_invisible_connections_between_assets: +# - tech-asset-source-id-A:tech-asset-target-id-B +# - tech-asset-source-id-C:tech-asset-target-id-D +#diagram_tweak_same_rank_assets: +# - tech-asset-source-id-E:tech-asset-target-id-F:tech-asset-source-id-G:tech-asset-target-id-H +# - tech-asset-source-id-M:tech-asset-target-id-N:tech-asset-source-id-O diff --git a/test/data_assets.yaml b/test/data_assets.yaml new file mode 100644 index 00000000..43bc0c7d --- /dev/null +++ b/test/data_assets.yaml @@ -0,0 +1,164 @@ + +data_assets: + + + Customer Contracts: &customer-contracts # this example shows the inheritance-like features of YAML + id: customer-contracts + description: Customer Contracts (PDF) + usage: business # values: business, devops + tags: + origin: Customer + owner: Company XYZ + quantity: many # values: very-few, few, many, very-many + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Contract data might contain financial data as well as personally identifiable information (PII). The integrity and + availability of contract data is required for clearing payment disputes. + + + Customer Contract Summaries: + <<: *customer-contracts # here we're referencing the above created asset as base and just overwrite few values + id: contract-summaries + description: Customer Contract Summaries + quantity: very-few # values: very-few, few, many, very-many + confidentiality: restricted # values: public, internal, restricted, confidential, strictly-confidential + integrity: operational # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Just some summaries. + + + Customer Operational Data: + <<: *customer-contracts # here we're referencing the above created asset as base and just overwrite few values + id: customer-operational-data + description: Customer Operational Data + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Customer operational data for using the portal are required to be available to offer the portal functionality + and are used in the backend transactions. + + + Customer Accounts: + <<: *customer-contracts # here we're referencing the above created asset as base and just overwrite few values + id: customer-accounts + description: Customer Accounts (including transient credentials when entered for checking them) + confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Customer account data for using the portal are required to be available to offer the portal functionality. + + + Some Internal Business Data: + id: internal-business-data + description: Internal business data of the ERP system used unrelated to the customer-facing processes. + usage: business # values: business, devops + tags: + origin: Company XYZ + owner: Company XYZ + quantity: few # values: very-few, few, many, very-many + confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Data used and/or generated during unrelated other usecases of the ERP-system (when used also by Company XYZ for + internal non-customer-portal-related stuff). + + + Client Application Code: &client-application-code # this example shows the inheritance-like features of YAML + id: client-application-code + description: Angular and other client-side code delivered by the application. + usage: devops # values: business, devops + tags: + origin: Company ABC + owner: Company ABC + quantity: very-few # values: very-few, few, many, very-many + confidentiality: public # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The integrity of the public data is critical to avoid reputational damage and the availability is important on the + long-term scale (but not critical) to keep the growth rate of the customer base steady. + + + Server Application Code: + <<: *client-application-code # here we're referencing the above created asset as base and just overwrite few values + id: server-application-code + description: API and other server-side code of the application. + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: mission-critical # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The integrity of the API code is critical to avoid reputational damage and the availability is important on the + long-term scale (but not critical) to keep the growth rate of the customer base steady. + + + Build Job Config: + id: build-job-config + description: Data for customizing of the build job system. + usage: devops # values: business, devops + tags: + origin: Company XYZ + owner: Company XYZ + quantity: very-few # values: very-few, few, many, very-many + confidentiality: restricted # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Data for customizing of the build job system. + + + Marketing Material: + <<: *client-application-code # here we're referencing the above created asset as base and just overwrite few values + id: marketing-material + description: Website and marketing data to inform potential customers and generate new leads. + integrity: important # values: archive, operational, important, critical, mission-critical + + + ERP Logs: + id: erp-logs + description: Logs generated by the ERP system. + usage: devops # values: business, devops + tags: + origin: Company XYZ + owner: Company XYZ + quantity: many # values: very-few, few, many, very-many + confidentiality: restricted # values: public, internal, restricted, confidential, strictly-confidential + integrity: archive # values: archive, operational, important, critical, mission-critical + availability: archive # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Logs should not contain PII data and are only required for failure analysis, i.e. they are not considered as hard + transactional logs. + + + ERP Customizing Data: + id: erp-customizing + description: Data for customizing of the ERP system. + usage: devops # values: business, devops + tags: + origin: Company XYZ + owner: Company XYZ + quantity: very-few # values: very-few, few, many, very-many + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Data for customizing of the ERP system. + + + Database Customizing and Dumps: + id: db-dumps + description: Data for customizing of the DB system, which might include full database dumps. + usage: devops # values: business, devops + tags: + - oracle + origin: Company XYZ + owner: Company XYZ + quantity: very-few # values: very-few, few, many, very-many + confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Data for customizing of the DB system, which might include full database dumps. + diff --git a/test/diagram_tweak.yaml b/test/diagram_tweak.yaml new file mode 100644 index 00000000..eff09615 --- /dev/null +++ b/test/diagram_tweak.yaml @@ -0,0 +1,13 @@ + +#diagram_tweak_edge_layout: spline # values: spline, polyline, false, ortho (this suppresses edge labels), curved (this suppresses edge labels and can cause problems with edges) + +#diagram_tweak_suppress_edge_labels: true +#diagram_tweak_layout_left_to_right: true +#diagram_tweak_nodesep: 2 +#diagram_tweak_ranksep: 2 +#diagram_tweak_invisible_connections_between_assets: +# - tech-asset-source-id-A:tech-asset-target-id-B +# - tech-asset-source-id-C:tech-asset-target-id-D +#diagram_tweak_same_rank_assets: +# - tech-asset-source-id-E:tech-asset-target-id-F:tech-asset-source-id-G:tech-asset-target-id-H +# - tech-asset-source-id-M:tech-asset-target-id-N:tech-asset-source-id-O diff --git a/test/main.yaml b/test/main.yaml new file mode 100644 index 00000000..a90256a3 --- /dev/null +++ b/test/main.yaml @@ -0,0 +1,27 @@ +threagile_version: 1.0.0 + +# NOTE: +# +# For a perfect editing experience within your IDE of choice you can easily +# get model syntax validation and autocompletion (very handy for enum values) +# as well as live templates: Just import the schema.json into your IDE and assign +# it as "schema" to each Threagile YAML file. Also try to import individual parts +# from the live-templates.txt file into your IDE as live editing templates. +# +# You might also want to try the REST API when running in server mode... + +includes: + - meta.yaml + - overview.yaml + - questions.yaml + - abuse_cases.yaml + - security_requirements.yaml + - tags.yaml + - data_assets.yaml + - technical_assets.yaml + - trust_boundaries.yaml + - shared_runtimes.yaml + - risk_categories.yaml + - risk_tracking.yaml + - diagram_tweak.yaml + diff --git a/test/meta.yaml b/test/meta.yaml new file mode 100644 index 00000000..c368ce1a --- /dev/null +++ b/test/meta.yaml @@ -0,0 +1,9 @@ +title: Some Example Application + +date: 2020-07-01 + +author: + name: John Doe + homepage: www.example.com + +business_criticality: important # values: archive, operational, important, critical, mission-critical diff --git a/test/overview.yaml b/test/overview.yaml new file mode 100644 index 00000000..52b83ac7 --- /dev/null +++ b/test/overview.yaml @@ -0,0 +1,16 @@ +management_summary_comment: > + Just some more custom summary possible here... + + +business_overview: + description: Some more demo text here and even images... + images: +# - custom-image-1.png: Some dummy image 1 +# - custom-image-2.png: Some dummy image 2 + + +technical_overview: + description: Some more demo text here and even images... + images: +# - custom-image-1.png: Some dummy image 1 +# - custom-image-2.png: Some dummy image 2 diff --git a/test/questions.yaml b/test/questions.yaml new file mode 100644 index 00000000..4f8fc38a --- /dev/null +++ b/test/questions.yaml @@ -0,0 +1,6 @@ +questions: # simply use "" as answer to signal "unanswered" + How are the admin clients managed/protected against compromise?: "" + How are the development clients managed/protected against compromise?: > + Managed by XYZ + How are the build pipeline components managed/protected against compromise?: > + Managed by XYZ diff --git a/test/risk_categories.yaml b/test/risk_categories.yaml new file mode 100644 index 00000000..e63c87e8 --- /dev/null +++ b/test/risk_categories.yaml @@ -0,0 +1,44 @@ + +individual_risk_categories: # used for adding custom manually identified risks + + Some Individual Risk Example: + id: something-strange + description: Some text describing the risk category... + impact: Some text describing the impact... + asvs: V0 - Something Strange + cheat_sheet: https://example.com + action: Some text describing the action... + mitigation: Some text describing the mitigation... + check: Check if XYZ... + function: business-side # values: business-side, architecture, development, operations + stride: repudiation # values: spoofing, tampering, repudiation, information-disclosure, denial-of-service, elevation-of-privilege + detection_logic: Some text describing the detection logic... + risk_assessment: Some text describing the risk assessment... + false_positives: Some text describing the most common types of false positives... + model_failure_possible_reason: false + cwe: 693 + risks_identified: + Example Individual Risk at Database: + severity: critical # values: low, medium, elevated, high, critical + exploitation_likelihood: likely # values: unlikely, likely, very-likely, frequent + exploitation_impact: medium # values: low, medium, high, very-high + data_breach_probability: probable # values: improbable, possible, probable + data_breach_technical_assets: # list of technical asset IDs which might have data breach + - sql-database + most_relevant_data_asset: + most_relevant_technical_asset: sql-database + most_relevant_communication_link: + most_relevant_trust_boundary: + most_relevant_shared_runtime: + Example Individual Risk at Contract Filesystem: + severity: medium # values: low, medium, elevated, high, critical + exploitation_likelihood: frequent # values: unlikely, likely, very-likely, frequent + exploitation_impact: very-high # values: low, medium, high, very-high + data_breach_probability: improbable # values: improbable, possible, probable + data_breach_technical_assets: # list of technical asset IDs which might have data breach + most_relevant_data_asset: + most_relevant_technical_asset: contract-fileserver + most_relevant_communication_link: + most_relevant_trust_boundary: + most_relevant_shared_runtime: + diff --git a/test/risk_tracking.yaml b/test/risk_tracking.yaml new file mode 100644 index 00000000..30c84a11 --- /dev/null +++ b/test/risk_tracking.yaml @@ -0,0 +1,52 @@ + +# NOTE: +# For risk tracking each risk-id needs to be defined (the string with the @ sign in it). These unique risk IDs +# are visible in the PDF report (the small grey string under each risk), the Excel (column "ID"), as well as the JSON responses. +# Some risk IDs have only one @ sign in them, while others multiple. The idea is to allow for unique but still speaking IDs. +# Therefore each risk instance creates its individual ID by taking all affected elements causing the risk to be within an @-delimited part. +# Using wildcards (the * sign) for parts delimited by @ signs allows to handle groups of certain risks at once. Best is to lookup the IDs +# to use in the created Excel file. Alternatively a model macro "seed-risk-tracking" is available that helps in initially +# seeding the risk tracking part here based on already identified and not yet handled risks. +risk_tracking: + + untrusted-deserialization@erp-system: # wildcards "*" between the @ characters are possible + status: accepted # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: Risk accepted as tolerable + ticket: XYZ-1234 + date: 2020-01-04 + checked_by: John Doe + + ldap-injection@*@ldap-auth-server@*: # wildcards "*" between the @ characters are possible + status: mitigated # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: The hardening measures were implemented and checked + ticket: XYZ-5678 + date: 2020-01-05 + checked_by: John Doe + + unencrypted-asset@*: # wildcards "*" between the @ characters are possible + status: mitigated # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: The hardening measures were implemented and checked + ticket: XYZ-1234 + date: 2020-01-04 + checked_by: John Doe + + missing-authentication-second-factor@*@*@*: # wildcards "*" between the @ characters are possible + status: mitigated # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: The hardening measures were implemented and checked + ticket: XYZ-1234 + date: 2020-01-04 + checked_by: John Doe + + missing-hardening@*: # wildcards "*" between the @ characters are possible + status: mitigated # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: The hardening measures were implemented and checked + ticket: XYZ-1234 + date: 2020-01-04 + checked_by: John Doe + + dos-risky-access-across-trust-boundary@*@*@*: # wildcards "*" between the @ characters are possible + status: in-progress # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: The hardening measures are being implemented and checked + ticket: XYZ-1234 + date: 2020-01-04 + checked_by: John Doe diff --git a/test/security_requirements.yaml b/test/security_requirements.yaml new file mode 100644 index 00000000..d8905dfc --- /dev/null +++ b/test/security_requirements.yaml @@ -0,0 +1,4 @@ +security_requirements: + Input Validation: Strict input validation is required to reduce the overall attack surface. + Securing Administrative Access: Administrative access must be secured with strong encryption and multi-factor authentication. + EU-DSGVO: Mandatory EU-Datenschutzgrundverordnung diff --git a/test/shared_runtimes.yaml b/test/shared_runtimes.yaml new file mode 100644 index 00000000..c9bee4d6 --- /dev/null +++ b/test/shared_runtimes.yaml @@ -0,0 +1,16 @@ + +shared_runtimes: + + + WebApp and Backoffice Virtualization: + id: webapp-virtualization + description: WebApp Virtualization + tags: + - vmware + technical_assets_running: # sequence of IDs to reference + - apache-webserver + - marketing-cms + - erp-system + - contract-fileserver + - sql-database + diff --git a/test/tags.yaml b/test/tags.yaml new file mode 100644 index 00000000..527412e5 --- /dev/null +++ b/test/tags.yaml @@ -0,0 +1,16 @@ +# Tags can be used for anything, it's just a tag. Also risk rules can act based on tags if you like. +# Tags can be used for example to name the products used (which is more concrete than the technology types that only specify the type) +tags_available: + - linux + - apache + - mysql + - jboss + - keycloak + - jenkins + - git + - oracle + - some-erp + - vmware + - aws + - aws:ec2 + - aws:s3 diff --git a/test/technical_assets.yaml b/test/technical_assets.yaml new file mode 100644 index 00000000..427d2806 --- /dev/null +++ b/test/technical_assets.yaml @@ -0,0 +1,6 @@ +includes: + - technical_assets_clients.yaml + - technical_assets_infrastructure.yaml + - technical_assets_servers.yaml + - technical_assets_databases.yaml + - technical_assets_devops.yaml diff --git a/test/technical_assets_clients.yaml b/test/technical_assets_clients.yaml new file mode 100644 index 00000000..d9091f13 --- /dev/null +++ b/test/technical_assets_clients.yaml @@ -0,0 +1,211 @@ + +technical_assets: + + Customer Web Client: + id: customer-client + description: Customer Web Client + type: external-entity # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: true + out_of_scope: true + justification_out_of_scope: Owned and managed by enduser customer + size: component # values: system, service, application, component + technology: browser # values: see help + tags: + internet: true + machine: physical # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Customer + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: operational # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The client used by the customer to access the system. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - client-application-code + - marketing-material + data_assets_stored: # sequence of IDs to reference + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + Customer Traffic: + target: load-balancer + description: Link to the load balancer + protocol: https # values: see help + authentication: session-id # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: enduser-identity-propagation # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + data_assets_received: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - client-application-code + - marketing-material + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + + + Backoffice Client: + id: backoffice-client + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: Backoffice client + type: external-entity # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: true + out_of_scope: true + justification_out_of_scope: Owned and managed by Company XYZ company + size: component # values: system, service, application, component + technology: desktop # values: see help + tags: + internet: false + machine: physical # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company XYZ + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: important # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The client used by Company XYZ to administer and use the system. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-contracts + - internal-business-data + - erp-logs + data_assets_stored: # sequence of IDs to reference + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + ERP Internal Access: + target: erp-system + description: Link to the ERP system + protocol: https # values: see help + authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: enduser-identity-propagation # values: none, technical-user, enduser-identity-propagation + tags: + - some-erp + vpn: true + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - internal-business-data + data_assets_received: # sequence of IDs to reference + - customer-contracts + - internal-business-data + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + Marketing CMS Editing: + target: marketing-cms + description: Link to the CMS for editing content + protocol: https # values: see help + authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: enduser-identity-propagation # values: none, technical-user, enduser-identity-propagation + tags: + vpn: true + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - marketing-material + data_assets_received: # sequence of IDs to reference + - marketing-material + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + + + Backend Admin Client: + id: backend-admin-client + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: Backend admin client + type: external-entity # values: external-entity, process, datastore + usage: devops # values: business, devops + used_as_client_by_human: true + out_of_scope: true + justification_out_of_scope: Owned and managed by ops provider + size: component # values: system, service, application, component + technology: browser # values: see help + tags: + internet: false + machine: physical # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company XYZ + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: operational # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The client used by Company XYZ to administer the system. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - erp-logs + data_assets_stored: # sequence of IDs to reference + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + ERP Web Access: + target: erp-system + description: Link to the ERP system (Web) + protocol: https # values: see help + authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - erp-customizing + data_assets_received: # sequence of IDs to reference + - erp-logs + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + DB Update Access: + target: sql-database + description: Link to the database (JDBC tunneled via SSH) + protocol: ssh # values: see help + authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - db-dumps + data_assets_received: # sequence of IDs to reference + - db-dumps + - erp-logs + - customer-accounts + - customer-operational-data + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + User Management Access: + target: ldap-auth-server + description: Link to the LDAP auth server for managing users + protocol: ldaps # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + data_assets_received: # sequence of IDs to reference + - customer-accounts + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false diff --git a/test/technical_assets_databases.yaml b/test/technical_assets_databases.yaml new file mode 100644 index 00000000..54fe362c --- /dev/null +++ b/test/technical_assets_databases.yaml @@ -0,0 +1,71 @@ + +technical_assets: + + LDAP Auth Server: + id: ldap-auth-server + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: LDAP authentication server + type: datastore # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: component # values: system, service, application, component + technology: identity-store-ldap # values: see help + tags: + - linux + internet: false + machine: physical # values: physical, virtual, container, serverless + encryption: transparent # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The auth data of the application + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-accounts + data_assets_stored: # sequence of IDs to reference + - customer-accounts + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + + + Customer Contract Database: + id: sql-database + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: The database behind the ERP system + type: datastore # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: component # values: system, service, application, component + technology: database # values: see help + tags: + - linux + - mysql + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: data-with-symmetric-shared-key # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: mission-critical # values: archive, operational, important, critical, mission-critical + availability: mission-critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The ERP system's database contains business-relevant sensitive data for the leasing processes and eventually also + for other Company XYZ internal processes. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - db-dumps + data_assets_stored: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - internal-business-data + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: diff --git a/test/technical_assets_devops.yaml b/test/technical_assets_devops.yaml new file mode 100644 index 00000000..3107c76c --- /dev/null +++ b/test/technical_assets_devops.yaml @@ -0,0 +1,224 @@ +includes: + - technical_assets_clients.yaml + - + +technical_assets: + + External Development Client: + id: external-dev-client + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: External developer client + type: external-entity # values: external-entity, process, datastore + usage: devops # values: business, devops + used_as_client_by_human: true + out_of_scope: true + justification_out_of_scope: Owned and managed by external developers + size: system # values: system, service, application, component + technology: devops-client # values: see help + tags: + - linux + internet: true + machine: physical # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: External Developers + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The clients used by external developers to create parts of the application code. + multi_tenant: true + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - client-application-code + - server-application-code + data_assets_stored: # sequence of IDs to reference + - client-application-code + - server-application-code + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - file + communication_links: + Git-Repo Code Write Access: + target: git-repo + description: Link to the Git repo + protocol: ssh # values: see help + authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - client-application-code + - server-application-code + data_assets_received: # sequence of IDs to reference + - client-application-code + - server-application-code + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + Git-Repo Web-UI Access: + target: git-repo + description: Link to the Git repo + protocol: https # values: see help + authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - client-application-code + - server-application-code + data_assets_received: # sequence of IDs to reference + - client-application-code + - server-application-code + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + Jenkins Web-UI Access: + target: jenkins-buildserver + description: Link to the Jenkins build server + protocol: https # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - build-job-config + data_assets_received: # sequence of IDs to reference + - build-job-config + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + + + Git Repository: + id: git-repo + #diagram_tweak_order: 99 # affects left to right positioning (only within a trust boundary) + description: Git repository server + type: process # values: external-entity, process, datastore + usage: devops # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: system # values: system, service, application, component + technology: sourcecode-repository # values: see help + tags: + - linux + - git + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: important # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The code repo pipeline might contain sensitive configuration values like backend credentials, certificates etc. and is + therefore rated as confidential. + multi_tenant: true + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - client-application-code + - server-application-code + data_assets_stored: # sequence of IDs to reference + - client-application-code + - server-application-code + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - file + communication_links: + + + Jenkins Buildserver: + id: jenkins-buildserver + #diagram_tweak_order: 99 # affects left to right positioning (only within a trust boundary) + description: Jenkins buildserver + type: process # values: external-entity, process, datastore + usage: devops # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: system # values: system, service, application, component + technology: build-pipeline # values: see help + tags: + - linux + - jenkins + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The build pipeline might contain sensitive configuration values like backend credentials, certificates etc. and is + therefore rated as confidential. The integrity and availability is rated as critical and important due to the risk + of reputation damage and application update unavailability when the build pipeline is compromised. + multi_tenant: true + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - build-job-config + - client-application-code + - server-application-code + - marketing-material + data_assets_stored: # sequence of IDs to reference + - build-job-config + - client-application-code + - server-application-code + - marketing-material + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - file + - serialization + communication_links: + Git Repo Code Read Access: + target: git-repo + description: Link to the Git repository server + protocol: ssh # values: see help + authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: true + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + data_assets_received: # sequence of IDs to reference + - client-application-code + - server-application-code + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + Application Deployment: + target: apache-webserver + description: Link to the Apache webserver + protocol: ssh # values: see help + authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - client-application-code + - server-application-code + data_assets_received: # sequence of IDs to reference + CMS Updates: + target: marketing-cms + description: Link to the CMS + protocol: ssh # values: see help + authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - marketing-material + data_assets_received: # sequence of IDs to reference + diff --git a/test/technical_assets_infrastructure.yaml b/test/technical_assets_infrastructure.yaml new file mode 100644 index 00000000..00e7b697 --- /dev/null +++ b/test/technical_assets_infrastructure.yaml @@ -0,0 +1,75 @@ + +technical_assets: + + Load Balancer: + id: load-balancer + #diagram_tweak_order: 50 # affects left to right positioning (only within a trust boundary) + description: Load Balancer (HA-Proxy) + type: process # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: component # values: system, service, application, component + technology: load-balancer # values: see help + tags: + internet: false + machine: physical # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: mission-critical # values: archive, operational, important, critical, mission-critical + availability: mission-critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The correct configuration and reachability of the load balancer is mandatory for all customer and Company XYZ + usages of the portal and ERP system. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - internal-business-data + - client-application-code + - marketing-material + data_assets_stored: # sequence of IDs to reference + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + Web Application Traffic: + target: apache-webserver + description: Link to the web server + protocol: http # values: see help + authentication: session-id # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: enduser-identity-propagation # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + data_assets_received: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - client-application-code + #diagram_tweak_weight: 5 + #diagram_tweak_constraint: false + CMS Content Traffic: + target: marketing-cms + description: Link to the CMS server + protocol: http # values: see help + authentication: none # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: none # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: true + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + data_assets_received: # sequence of IDs to reference + - marketing-material + #diagram_tweak_weight: 5 + #diagram_tweak_constraint: false diff --git a/test/technical_assets_servers.yaml b/test/technical_assets_servers.yaml new file mode 100644 index 00000000..b8e025b3 --- /dev/null +++ b/test/technical_assets_servers.yaml @@ -0,0 +1,295 @@ + +technical_assets: + + Apache Webserver: + id: apache-webserver + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: Apache Webserver hosting the API code and client-side code + type: process # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: application # values: system, service, application, component + technology: web-server # values: see help + tags: + - linux + - apache + - aws:ec2 + internet: false + machine: container # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The correct configuration and reachability of the web server is mandatory for all customer usages of the portal. + multi_tenant: false + redundant: false + custom_developed_parts: true + data_assets_processed: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - internal-business-data + - client-application-code + - server-application-code + data_assets_stored: # sequence of IDs to reference + - client-application-code + - server-application-code + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - json + - file + communication_links: + ERP System Traffic: + target: erp-system + description: Link to the ERP system + protocol: https # values: see help + authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - internal-business-data + data_assets_received: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - internal-business-data + #diagram_tweak_weight: 5 + #diagram_tweak_constraint: false + Auth Credential Check Traffic: + target: identity-provider + description: Link to the identity provider server + protocol: https # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + data_assets_received: # sequence of IDs to reference + + + Identity Provider: + id: identity-provider + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: Identity provider server + type: process # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: component # values: system, service, application, component + technology: identity-provider # values: see help + tags: + - linux + - jboss + - keycloak + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The auth data of the application + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-accounts + data_assets_stored: # sequence of IDs to reference + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + LDAP Credential Check Traffic: + target: ldap-auth-server + description: Link to the LDAP server + protocol: ldaps # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + data_assets_received: # sequence of IDs to reference + + + Marketing CMS: + id: marketing-cms + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: CMS for the marketing content + type: process # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: application # values: system, service, application, component + technology: cms # values: see help + tags: + - linux + internet: false + machine: container # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: important # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The correct configuration and reachability of the web server is mandatory for all customer usages of the portal. + multi_tenant: false + redundant: false + custom_developed_parts: true + data_assets_processed: # sequence of IDs to reference + - marketing-material + - customer-accounts + data_assets_stored: # sequence of IDs to reference + - marketing-material + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + Auth Traffic: + target: ldap-auth-server + description: Link to the LDAP auth server + protocol: ldap # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: true + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + data_assets_received: # sequence of IDs to reference + - customer-accounts + #diagram_tweak_weight: 5 + #diagram_tweak_constraint: false + + + Backoffice ERP System: + id: erp-system + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: ERP system + type: process # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: system # values: system, service, application, component + technology: erp # values: see help + tags: + - linux + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: mission-critical # values: archive, operational, important, critical, mission-critical + availability: mission-critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The ERP system contains business-relevant sensitive data for the leasing processes and eventually also for other + Company XYZ internal processes. + multi_tenant: false + redundant: true + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - internal-business-data + - erp-customizing + data_assets_stored: # sequence of IDs to reference + - erp-logs + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - xml + - file + - serialization + communication_links: + Database Traffic: + target: sql-database + description: Link to the DB system + protocol: jdbc # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - internal-business-data + data_assets_received: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - internal-business-data + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + NFS Filesystem Access: + target: contract-fileserver + description: Link to the file system + protocol: nfs # values: see help + authentication: none # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: none # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-contracts + data_assets_received: # sequence of IDs to reference + - customer-contracts + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + + + Contract Fileserver: + id: contract-fileserver + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: NFS Filesystem for storing the contract PDFs + type: datastore # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: component # values: system, service, application, component + technology: file-server # values: see help + tags: + - linux + - aws:s3 + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Contract data might contain financial data as well as personally identifiable information (PII). The integrity and + availability of contract data is required for clearing payment disputes. The filesystem is also required to be available + for storing new contracts of freshly generated customers. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + data_assets_stored: # sequence of IDs to reference + - customer-contracts + - contract-summaries + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - file + communication_links: diff --git a/test/trust_boundaries.yaml b/test/trust_boundaries.yaml new file mode 100644 index 00000000..b6ae70fd --- /dev/null +++ b/test/trust_boundaries.yaml @@ -0,0 +1,67 @@ + +trust_boundaries: + + + Web DMZ: + id: web-dmz + description: Web DMZ + type: network-cloud-security-group # values: see help + tags: + technical_assets_inside: # sequence of IDs to reference + - apache-webserver + - marketing-cms + trust_boundaries_nested: # sequence of IDs to reference + + + ERP DMZ: + id: erp-dmz + description: ERP DMZ + type: network-cloud-security-group # values: see help + tags: + - some-erp + technical_assets_inside: # sequence of IDs to reference + - erp-system + - contract-fileserver + - sql-database + trust_boundaries_nested: # sequence of IDs to reference + + + Application Network: + id: application-network + description: Application Network + type: network-cloud-provider # values: see help + tags: + - aws + technical_assets_inside: # sequence of IDs to reference + - load-balancer + trust_boundaries_nested: # sequence of IDs to reference + - web-dmz + - erp-dmz + - auth-env + + + Auth Handling Environment: + id: auth-env + description: Auth Handling Environment + type: execution-environment # values: see help + tags: + technical_assets_inside: # sequence of IDs to reference + - identity-provider + - ldap-auth-server + trust_boundaries_nested: # sequence of IDs to reference + + + Dev Network: + id: dev-network + description: Development Network + type: network-on-prem # values: see help + tags: + technical_assets_inside: # sequence of IDs to reference + - jenkins-buildserver + - git-repo + - backend-admin-client + - backoffice-client + trust_boundaries_nested: # sequence of IDs to reference + + + From 7a5366b05d357acab62626c976218c28874fc214 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Fri, 15 Dec 2023 10:59:20 -0800 Subject: [PATCH 05/68] added support for directly pointing at the repo demo folders when creating stub and example models --- main.go | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/main.go b/main.go index 39faa05b..8cbaef33 100644 --- a/main.go +++ b/main.go @@ -4045,17 +4045,30 @@ func (context *Context) printVersion() { func (context *Context) createExampleModelFile() error { _, err := copyFile(filepath.Join(*context.appFolder, "threagile-example-model.yaml"), filepath.Join(*context.outputDir, "threagile-example-model.yaml")) - return err + if err == nil { + return nil + } + + _, altError := copyFile(filepath.Join(*context.appFolder, "threagile.yaml"), filepath.Join(*context.outputDir, "threagile-example-model.yaml")) + if altError != nil { + return err + } + + return nil } func (context *Context) createStubModelFile() error { - context.loadCustomRiskRules() - stub, err := os.ReadFile(filepath.Join(*context.appFolder, "threagile-stub-model.yaml")) - if err != nil { + _, err := copyFile(filepath.Join(*context.appFolder, "threagile-stub-model.yaml"), filepath.Join(*context.outputDir, "threagile-stub-model.yaml")) + if err == nil { + return nil + } + + _, altError := copyFile(filepath.Join(*context.appFolder, "threagile.yaml"), filepath.Join(*context.outputDir, "threagile-stub-model.yaml")) + if altError != nil { return err } - return os.WriteFile(filepath.Join(*context.outputDir, "threagile-stub-model.yaml"), context.addSupportedTags(stub), 0644) + return nil } func (context *Context) createEditingSupportFiles() error { From a3baeea8ee63fa558cc79f2026b56f7ceb86bc8e Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Sat, 16 Dec 2023 10:27:04 -0800 Subject: [PATCH 06/68] add install mechanism --- .gitignore | 6 ++++- Makefile | 64 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 69 insertions(+), 1 deletion(-) create mode 100644 Makefile diff --git a/.gitignore b/.gitignore index 9f411951..f41a2eac 100644 --- a/.gitignore +++ b/.gitignore @@ -23,5 +23,9 @@ stats.json # Output of the go coverage tool, specifically when used with LiteIDE *.out +# build artifacts +vendor/ +bin/ + # IDE stuff -.idea/ \ No newline at end of file +.idea/ diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..71d0f1a4 --- /dev/null +++ b/Makefile @@ -0,0 +1,64 @@ +# Files and Folders +ASSET_DIR = $(HOME)/.threagile +BIN_DIR = $(HOME)/bin +ASSETS = \ + LICENSE.txt \ + report/template/background.pdf \ + support/openapi.yaml \ + support/schema.json \ + support/live-templates.txt \ + server +BIN = \ + raa \ + raa_dummy \ + risk_demo \ + threagile +SCRIPTS = \ + support/render-data-asset-diagram.sh \ + support/render-data-flow-diagram.sh + +# Commands and Flags +GOFLAGS = -a -ldflags="-s -w -X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" +GO = env GO111MODULE=on go +MKDIR = mkdir -p +CP = cp -r +RM = rm -rf + +# Targets +.phony: all install clean uninstall + +default: all + +prep: + env GO111MODULE=on go mod vendor + $(MKDIR) bin + +all: prep $(addprefix bin/,$(BIN)) + +clean: + $(RM) bin vendor + +install: all + mkdir -p $(BIN_DIR) $(ASSET_DIR) + $(CP) $(addprefix bin/,$(BIN)) $(BIN_DIR) + $(CP) $(SCRIPTS) $(BIN_DIR) + $(CP) $(ASSETS) $(ASSET_DIR) + $(CP) demo/example/threagile.yaml $(ASSET_DIR)/threagile-example-model.yaml + $(CP) demo/stub/threagile.yaml $(ASSET_DIR)/threagile-stub-model.yaml + +uninstall: + $(RM) $(addprefix $(BIN_DIR)/,$(BIN)) + $(RM) $(addprefix $(BIN_DIR)/,$(notdir $(SCRIPTS))) + $(RM) $(ASSET_DIR) + +bin/raa: raa/raa/raa.go + $(GO) build $(GOFLAGS) -buildmode=plugin -o $@ $< + +bin/raa_dummy: raa/dummy/dummy.go + $(GO) build $(GOFLAGS) -buildmode=plugin -o $@ $< + +bin/risk_demo: risks/custom/demo/demo-rule.go + $(GO) build $(GOFLAGS) -buildmode=plugin -o $@ $< + +bin/threagile: main.go + $(GO) build $(GOFLAGS) -o $@ $< From 0233bc4b3b564e93425a9d701cd1f46c1d511d61 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Mon, 18 Dec 2023 13:34:07 -0800 Subject: [PATCH 07/68] replace plugins with system calls to regular binaries --- Dockerfile | 6 +- Makefile | 6 +- main.go | 720 ++++-------------- model/types.go | 59 +- raa/dummy/dummy.go | 42 +- raa/raa/raa.go | 80 +- report/report.go | 151 ++-- .../accidental-secret-leak-rule.go | 10 +- .../code-backdooring/code-backdooring-rule.go | 10 +- .../container-baseimage-backdooring-rule.go | 10 +- .../container-platform-escape-rule.go | 10 +- .../cross-site-request-forgery-rule.go | 10 +- .../cross-site-scripting-rule.go | 10 +- ...risky-access-across-trust-boundary-rule.go | 10 +- .../incomplete-model/incomplete-model-rule.go | 10 +- .../ldap-injection/ldap-injection-rule.go | 10 +- ...ssing-authentication-second-factor-rule.go | 10 +- .../missing-authentication-rule.go | 10 +- .../missing-build-infrastructure-rule.go | 10 +- .../missing-cloud-hardening-rule.go | 10 +- .../missing-file-validation-rule.go | 10 +- .../missing-hardening-rule.go | 10 +- .../missing-identity-propagation-rule.go | 10 +- ...issing-identity-provider-isolation-rule.go | 10 +- .../missing-identity-store-rule.go | 10 +- .../missing-network-segmentation-rule.go | 10 +- .../missing-vault-isolation-rule.go | 10 +- .../missing-vault/missing-vault-rule.go | 10 +- .../built-in/missing-waf/missing-waf-rule.go | 10 +- .../mixed-targets-on-shared-runtime-rule.go | 10 +- .../path-traversal/path-traversal-rule.go | 10 +- .../push-instead-of-pull-deployment-rule.go | 10 +- .../search-query-injection-rule.go | 10 +- .../server-side-request-forgery-rule.go | 10 +- .../service-registry-poisoning-rule.go | 10 +- .../sql-nosql-injection-rule.go | 10 +- .../unchecked-deployment-rule.go | 10 +- .../unencrypted-asset-rule.go | 10 +- .../unencrypted-communication-rule.go | 10 +- .../unguarded-access-from-internet-rule.go | 10 +- .../unguarded-direct-datastore-access-rule.go | 10 +- .../unnecessary-communication-link-rule.go | 10 +- .../unnecessary-data-asset-rule.go | 10 +- .../unnecessary-data-transfer-rule.go | 10 +- .../unnecessary-technical-asset-rule.go | 10 +- .../untrusted-deserialization-rule.go | 10 +- .../wrong-communication-link-content-rule.go | 10 +- .../wrong-trust-boundary-content.go | 10 +- .../xml-external-entity-rule.go | 10 +- risks/custom/demo/demo-rule.go | 70 +- risks/risk.go | 34 + run/runner.go | 105 +++ 52 files changed, 968 insertions(+), 725 deletions(-) create mode 100644 risks/risk.go create mode 100644 run/runner.go diff --git a/Dockerfile b/Dockerfile index ce3c034a..a552ac79 100644 --- a/Dockerfile +++ b/Dockerfile @@ -19,9 +19,9 @@ WORKDIR /app COPY --from=clone /app/threagile /app RUN go version RUN go test ./... -RUN GOOS=linux go build -a -trimpath -ldflags="-s -w -X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -buildmode=plugin -o raa.so raa/raa/raa.go -RUN GOOS=linux go build -a -trimpath -ldflags="-s -w -X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -buildmode=plugin -o dummy.so raa/dummy/dummy.go -RUN GOOS=linux go build -a -trimpath -ldflags="-s -w -X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -buildmode=plugin -o demo-rule.so risks/custom/demo/demo-rule.go +RUN GOOS=linux go build -a -trimpath -ldflags="-s -w -X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -buildmode=run -o raa.so raa/raa/raa.go +RUN GOOS=linux go build -a -trimpath -ldflags="-s -w -X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -buildmode=run -o dummy.so raa/dummy/dummy.go +RUN GOOS=linux go build -a -trimpath -ldflags="-s -w -X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -buildmode=run -o demo-rule.so risks/custom/demo/demo-rule.go RUN GOOS=linux go build -a -trimpath -ldflags="-s -w -X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o threagile # add the -race parameter to go build call in order to instrument with race condition detector: https://blog.golang.org/race-detector # NOTE: copy files with final name to send to final build diff --git a/Makefile b/Makefile index 71d0f1a4..49bf843a 100644 --- a/Makefile +++ b/Makefile @@ -52,13 +52,13 @@ uninstall: $(RM) $(ASSET_DIR) bin/raa: raa/raa/raa.go - $(GO) build $(GOFLAGS) -buildmode=plugin -o $@ $< + $(GO) build $(GOFLAGS) -o $@ $< bin/raa_dummy: raa/dummy/dummy.go - $(GO) build $(GOFLAGS) -buildmode=plugin -o $@ $< + $(GO) build $(GOFLAGS) -o $@ $< bin/risk_demo: risks/custom/demo/demo-rule.go - $(GO) build $(GOFLAGS) -buildmode=plugin -o $@ $< + $(GO) build $(GOFLAGS) -o $@ $< bin/threagile: main.go $(GO) build $(GOFLAGS) -o $@ $< diff --git a/main.go b/main.go index 8cbaef33..0c7df67a 100644 --- a/main.go +++ b/main.go @@ -17,6 +17,7 @@ import ( "flag" "fmt" "github.com/akedrou/textdiff" + "github.com/threagile/threagile/risks" "hash/fnv" "io" "log" @@ -24,7 +25,6 @@ import ( "os" "os/exec" "path/filepath" - "plugin" "regexp" "sort" "strconv" @@ -85,6 +85,7 @@ import ( wrongcommunicationlinkcontent "github.com/threagile/threagile/risks/built-in/wrong-communication-link-content" wrongtrustboundarycontent "github.com/threagile/threagile/risks/built-in/wrong-trust-boundary-content" xmlexternalentity "github.com/threagile/threagile/risks/built-in/xml-external-entity" + "github.com/threagile/threagile/run" "golang.org/x/crypto/argon2" "gopkg.in/yaml.v3" ) @@ -101,9 +102,11 @@ const ( const ( buildTimestamp = "" - tmpFolder = "/dev/shm" // TODO: make configurable via cmdline arg? - appFolder = "/app" - baseFolder = "/data" + tempDir = "/dev/shm" // TODO: make configurable via cmdline arg? + binDir = "/app" + appDir = "/app" + dataDir = "/data" + keyDir = "keys" reportFilename = "report.pdf" excelRisksFilename = "risks.xlsx" excelTagsFilename = "tags.xlsx" @@ -116,7 +119,7 @@ const ( dataAssetDiagramFilenamePNG = "data-asset-diagram.png" graphvizDataFlowDiagramConversionCall = "render-data-flow-diagram.sh" graphvizDataAssetDiagramConversionCall = "render-data-asset-diagram.sh" - outputFile = "threagile.yaml" + inputFile = "threagile.yaml" ) type Context struct { @@ -128,20 +131,21 @@ type Context struct { globalLock sync.Mutex modelInput model.ModelInput - tempFolder *string modelFilename, templateFilename *string testParseModel *bool createExampleModel, createStubModel, createEditingSupport, verbose, ignoreOrphanedRiskTracking *bool generateDataFlowDiagram, generateDataAssetDiagram, generateRisksJSON, generateTechnicalAssetsJSON *bool generateStatsJSON, generateRisksExcel, generateTagsExcel, generateReportPDF *bool outputDir, raaPlugin, skipRiskRules, riskRulesPlugins, executeModelMacro *string - customRiskRules map[string]model.CustomRiskRule + customRiskRules map[string]*risks.CustomRisk diagramDPI, serverPort *int deferredRiskTrackingDueToWildcardMatching map[string]model.RiskTracking addModelTitle bool keepDiagramSourceFiles bool appFolder *string - baseFolder *string + binFolder *string + serverFolder *string + tempFolder *string } func (context *Context) Defaults() *Context { @@ -149,7 +153,7 @@ func (context *Context) Defaults() *Context { keepDiagramSourceFiles: keepDiagramSourceFiles, addModelTitle: addModelTitle, buildTimestamp: buildTimestamp, - customRiskRules: make(map[string]model.CustomRiskRule), + customRiskRules: make(map[string]*risks.CustomRisk), deferredRiskTrackingDueToWildcardMatching: make(map[string]model.RiskTracking), drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks: true, } @@ -157,482 +161,85 @@ func (context *Context) Defaults() *Context { return context } -func (context *Context) applyRiskGeneration() { - if *context.verbose { - fmt.Println("Applying risk generation") - } - skippedRules := make(map[string]interface{}) - if len(*context.skipRiskRules) > 0 { - for _, id := range strings.Split(*context.skipRiskRules, ",") { - skippedRules[id] = true - } - } - - if _, ok := skippedRules[unencryptedasset.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unencryptedasset.Category().Id) - delete(skippedRules, unencryptedasset.Category().Id) - } else { - model.AddToListOfSupportedTags(unencryptedasset.SupportedTags()) - risks := unencryptedasset.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[unencryptedasset.Category()] = risks - } - } - - if _, ok := skippedRules[unencryptedcommunication.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unencryptedcommunication.Category().Id) - delete(skippedRules, unencryptedcommunication.Category().Id) - } else { - model.AddToListOfSupportedTags(unencryptedcommunication.SupportedTags()) - risks := unencryptedcommunication.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[unencryptedcommunication.Category()] = risks - } - } - - if _, ok := skippedRules[unguardeddirectdatastoreaccess.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unguardeddirectdatastoreaccess.Category().Id) - delete(skippedRules, unguardeddirectdatastoreaccess.Category().Id) - } else { - model.AddToListOfSupportedTags(unguardeddirectdatastoreaccess.SupportedTags()) - risks := unguardeddirectdatastoreaccess.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[unguardeddirectdatastoreaccess.Category()] = risks - } - } - - if _, ok := skippedRules[unguardedaccessfrominternet.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unguardedaccessfrominternet.Category().Id) - delete(skippedRules, unguardedaccessfrominternet.Category().Id) - } else { - model.AddToListOfSupportedTags(unguardedaccessfrominternet.SupportedTags()) - risks := unguardedaccessfrominternet.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[unguardedaccessfrominternet.Category()] = risks - } - } - - if _, ok := skippedRules[dosriskyaccessacrosstrustboundary.Category().Id]; ok { - fmt.Println("Skipping risk rule:", dosriskyaccessacrosstrustboundary.Category().Id) - delete(skippedRules, dosriskyaccessacrosstrustboundary.Category().Id) - } else { - model.AddToListOfSupportedTags(dosriskyaccessacrosstrustboundary.SupportedTags()) - risks := dosriskyaccessacrosstrustboundary.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[dosriskyaccessacrosstrustboundary.Category()] = risks - } - } - - if _, ok := skippedRules[missingnetworksegmentation.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingnetworksegmentation.Category().Id) - delete(skippedRules, missingnetworksegmentation.Category().Id) - } else { - model.AddToListOfSupportedTags(missingnetworksegmentation.SupportedTags()) - risks := missingnetworksegmentation.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[missingnetworksegmentation.Category()] = risks - } - } - - if _, ok := skippedRules[mixedtargetsonsharedruntime.Category().Id]; ok { - fmt.Println("Skipping risk rule:", mixedtargetsonsharedruntime.Category().Id) - delete(skippedRules, mixedtargetsonsharedruntime.Category().Id) - } else { - model.AddToListOfSupportedTags(mixedtargetsonsharedruntime.SupportedTags()) - risks := mixedtargetsonsharedruntime.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[mixedtargetsonsharedruntime.Category()] = risks - } - } - - if _, ok := skippedRules[missingidentitypropagation.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingidentitypropagation.Category().Id) - delete(skippedRules, missingidentitypropagation.Category().Id) - } else { - model.AddToListOfSupportedTags(missingidentitypropagation.SupportedTags()) - risks := missingidentitypropagation.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[missingidentitypropagation.Category()] = risks - } - } - - if _, ok := skippedRules[missingidentitystore.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingidentitystore.Category().Id) - delete(skippedRules, missingidentitystore.Category().Id) - } else { - model.AddToListOfSupportedTags(missingidentitystore.SupportedTags()) - risks := missingidentitystore.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[missingidentitystore.Category()] = risks - } - } - - if _, ok := skippedRules[missingauthentication.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingauthentication.Category().Id) - delete(skippedRules, missingauthentication.Category().Id) - } else { - model.AddToListOfSupportedTags(missingauthentication.SupportedTags()) - risks := missingauthentication.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[missingauthentication.Category()] = risks - } - } - - if _, ok := skippedRules[missingauthenticationsecondfactor.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingauthenticationsecondfactor.Category().Id) - delete(skippedRules, missingauthenticationsecondfactor.Category().Id) - } else { - model.AddToListOfSupportedTags(missingauthenticationsecondfactor.SupportedTags()) - risks := missingauthenticationsecondfactor.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[missingauthenticationsecondfactor.Category()] = risks - } - } - - if _, ok := skippedRules[unnecessarydatatransfer.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unnecessarydatatransfer.Category().Id) - delete(skippedRules, unnecessarydatatransfer.Category().Id) - } else { - model.AddToListOfSupportedTags(unnecessarydatatransfer.SupportedTags()) - risks := unnecessarydatatransfer.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[unnecessarydatatransfer.Category()] = risks - } - } - - if _, ok := skippedRules[unnecessarycommunicationlink.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unnecessarycommunicationlink.Category().Id) - delete(skippedRules, unnecessarycommunicationlink.Category().Id) - } else { - model.AddToListOfSupportedTags(unnecessarycommunicationlink.SupportedTags()) - risks := unnecessarycommunicationlink.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[unnecessarycommunicationlink.Category()] = risks - } - } - - if _, ok := skippedRules[unnecessarytechnicalasset.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unnecessarytechnicalasset.Category().Id) - delete(skippedRules, unnecessarytechnicalasset.Category().Id) - } else { - model.AddToListOfSupportedTags(unnecessarytechnicalasset.SupportedTags()) - risks := unnecessarytechnicalasset.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[unnecessarytechnicalasset.Category()] = risks - } - } - - if _, ok := skippedRules[unnecessarydataasset.Category().Id]; ok { - fmt.Println("Skipping risk rule:", unnecessarydataasset.Category().Id) - delete(skippedRules, unnecessarydataasset.Category().Id) - } else { - model.AddToListOfSupportedTags(unnecessarydataasset.SupportedTags()) - risks := unnecessarydataasset.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[unnecessarydataasset.Category()] = risks - } - } - - if _, ok := skippedRules[sqlnosqlinjection.Category().Id]; ok { - fmt.Println("Skipping risk rule:", sqlnosqlinjection.Category().Id) - delete(skippedRules, sqlnosqlinjection.Category().Id) - } else { - model.AddToListOfSupportedTags(sqlnosqlinjection.SupportedTags()) - risks := sqlnosqlinjection.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[sqlnosqlinjection.Category()] = risks - } - } - - if _, ok := skippedRules[ldapinjection.Category().Id]; ok { - fmt.Println("Skipping risk rule:", ldapinjection.Category().Id) - delete(skippedRules, ldapinjection.Category().Id) - } else { - model.AddToListOfSupportedTags(ldapinjection.SupportedTags()) - risks := ldapinjection.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[ldapinjection.Category()] = risks - } - } - - if _, ok := skippedRules[crosssitescripting.Category().Id]; ok { - fmt.Println("Skipping risk rule:", crosssitescripting.Category().Id) - delete(skippedRules, crosssitescripting.Category().Id) - } else { - model.AddToListOfSupportedTags(crosssitescripting.SupportedTags()) - risks := crosssitescripting.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[crosssitescripting.Category()] = risks - } - } - - if _, ok := skippedRules[crosssiterequestforgery.Category().Id]; ok { - fmt.Println("Skipping risk rule:", crosssiterequestforgery.Category().Id) - delete(skippedRules, crosssiterequestforgery.Category().Id) - } else { - model.AddToListOfSupportedTags(crosssiterequestforgery.SupportedTags()) - risks := crosssiterequestforgery.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[crosssiterequestforgery.Category()] = risks - } - } - - if _, ok := skippedRules[serversiderequestforgery.Category().Id]; ok { - fmt.Println("Skipping risk rule:", serversiderequestforgery.Category().Id) - delete(skippedRules, serversiderequestforgery.Category().Id) - } else { - model.AddToListOfSupportedTags(serversiderequestforgery.SupportedTags()) - risks := serversiderequestforgery.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[serversiderequestforgery.Category()] = risks - } - } - - if _, ok := skippedRules[pathtraversal.Category().Id]; ok { - fmt.Println("Skipping risk rule:", pathtraversal.Category().Id) - delete(skippedRules, pathtraversal.Category().Id) - } else { - model.AddToListOfSupportedTags(pathtraversal.SupportedTags()) - risks := pathtraversal.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[pathtraversal.Category()] = risks - } - } - - if _, ok := skippedRules[pushinsteadofpulldeployment.Category().Id]; ok { - fmt.Println("Skipping risk rule:", pushinsteadofpulldeployment.Category().Id) - delete(skippedRules, pushinsteadofpulldeployment.Category().Id) - } else { - model.AddToListOfSupportedTags(pushinsteadofpulldeployment.SupportedTags()) - risks := pushinsteadofpulldeployment.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[pushinsteadofpulldeployment.Category()] = risks - } - } - - if _, ok := skippedRules[searchqueryinjection.Category().Id]; ok { - fmt.Println("Skipping risk rule:", searchqueryinjection.Category().Id) - delete(skippedRules, searchqueryinjection.Category().Id) - } else { - model.AddToListOfSupportedTags(searchqueryinjection.SupportedTags()) - risks := searchqueryinjection.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[searchqueryinjection.Category()] = risks - } - } - - if _, ok := skippedRules[serviceregistrypoisoning.Category().Id]; ok { - fmt.Println("Skipping risk rule:", serviceregistrypoisoning.Category().Id) - delete(skippedRules, serviceregistrypoisoning.Category().Id) - } else { - model.AddToListOfSupportedTags(serviceregistrypoisoning.SupportedTags()) - risks := serviceregistrypoisoning.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[serviceregistrypoisoning.Category()] = risks - } - } - - if _, ok := skippedRules[untrusteddeserialization.Category().Id]; ok { - fmt.Println("Skipping risk rule:", untrusteddeserialization.Category().Id) - delete(skippedRules, untrusteddeserialization.Category().Id) - } else { - model.AddToListOfSupportedTags(untrusteddeserialization.SupportedTags()) - risks := untrusteddeserialization.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[untrusteddeserialization.Category()] = risks - } - } - - if _, ok := skippedRules[xmlexternalentity.Category().Id]; ok { - fmt.Println("Skipping risk rule:", xmlexternalentity.Category().Id) - delete(skippedRules, xmlexternalentity.Category().Id) - } else { - model.AddToListOfSupportedTags(xmlexternalentity.SupportedTags()) - risks := xmlexternalentity.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[xmlexternalentity.Category()] = risks - } - } - - if _, ok := skippedRules[missingcloudhardening.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingcloudhardening.Category().Id) - delete(skippedRules, missingcloudhardening.Category().Id) - } else { - model.AddToListOfSupportedTags(missingcloudhardening.SupportedTags()) - risks := missingcloudhardening.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[missingcloudhardening.Category()] = risks - } - } - - if _, ok := skippedRules[missingfilevalidation.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingfilevalidation.Category().Id) - delete(skippedRules, missingfilevalidation.Category().Id) - } else { - model.AddToListOfSupportedTags(missingfilevalidation.SupportedTags()) - risks := missingfilevalidation.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[missingfilevalidation.Category()] = risks - } - } - - if _, ok := skippedRules[missinghardening.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missinghardening.Category().Id) - delete(skippedRules, missinghardening.Category().Id) - } else { - model.AddToListOfSupportedTags(missinghardening.SupportedTags()) - risks := missinghardening.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[missinghardening.Category()] = risks - } - } - - if _, ok := skippedRules[accidentalsecretleak.Category().Id]; ok { - fmt.Println("Skipping risk rule:", accidentalsecretleak.Category().Id) - delete(skippedRules, accidentalsecretleak.Category().Id) - } else { - model.AddToListOfSupportedTags(accidentalsecretleak.SupportedTags()) - risks := accidentalsecretleak.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[accidentalsecretleak.Category()] = risks - } - } - - if _, ok := skippedRules[codebackdooring.Category().Id]; ok { - fmt.Println("Skipping risk rule:", codebackdooring.Category().Id) - delete(skippedRules, codebackdooring.Category().Id) - } else { - model.AddToListOfSupportedTags(codebackdooring.SupportedTags()) - risks := codebackdooring.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[codebackdooring.Category()] = risks - } - } - - if _, ok := skippedRules[containerbaseimagebackdooring.Category().Id]; ok { - fmt.Println("Skipping risk rule:", containerbaseimagebackdooring.Category().Id) - delete(skippedRules, containerbaseimagebackdooring.Category().Id) - } else { - model.AddToListOfSupportedTags(containerbaseimagebackdooring.SupportedTags()) - risks := containerbaseimagebackdooring.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[containerbaseimagebackdooring.Category()] = risks - } - } - - if _, ok := skippedRules[containerplatformescape.Category().Id]; ok { - fmt.Println("Skipping risk rule:", containerplatformescape.Category().Id) - delete(skippedRules, containerplatformescape.Category().Id) - } else { - model.AddToListOfSupportedTags(containerplatformescape.SupportedTags()) - risks := containerplatformescape.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[containerplatformescape.Category()] = risks - } - } - - if _, ok := skippedRules[incompletemodel.Category().Id]; ok { - fmt.Println("Skipping risk rule:", incompletemodel.Category().Id) - delete(skippedRules, incompletemodel.Category().Id) - } else { - model.AddToListOfSupportedTags(incompletemodel.SupportedTags()) - risks := incompletemodel.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[incompletemodel.Category()] = risks - } - } - - if _, ok := skippedRules[uncheckeddeployment.Category().Id]; ok { - fmt.Println("Skipping risk rule:", uncheckeddeployment.Category().Id) - delete(skippedRules, uncheckeddeployment.Category().Id) - } else { - model.AddToListOfSupportedTags(uncheckeddeployment.SupportedTags()) - risks := uncheckeddeployment.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[uncheckeddeployment.Category()] = risks - } - } - - if _, ok := skippedRules[missingbuildinfrastructure.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingbuildinfrastructure.Category().Id) - delete(skippedRules, missingbuildinfrastructure.Category().Id) - } else { - model.AddToListOfSupportedTags(missingbuildinfrastructure.SupportedTags()) - risks := missingbuildinfrastructure.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[missingbuildinfrastructure.Category()] = risks - } - } +func (context *Context) applyRisk(rule model.CustomRiskRule, skippedRules *map[string]bool) { + id := rule.Category().Id + _, ok := (*skippedRules)[id] - if _, ok := skippedRules[missingidentityproviderisolation.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingidentityproviderisolation.Category().Id) - delete(skippedRules, missingidentityproviderisolation.Category().Id) - } else { - model.AddToListOfSupportedTags(missingidentityproviderisolation.SupportedTags()) - risks := missingidentityproviderisolation.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[missingidentityproviderisolation.Category()] = risks - } - } - - if _, ok := skippedRules[missingvault.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingvault.Category().Id) - delete(skippedRules, missingvault.Category().Id) - } else { - model.AddToListOfSupportedTags(missingvault.SupportedTags()) - risks := missingvault.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[missingvault.Category()] = risks - } - } - - if _, ok := skippedRules[missingvaultisolation.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingvaultisolation.Category().Id) - delete(skippedRules, missingvaultisolation.Category().Id) + if ok { + fmt.Printf("Skipping risk rule %q\n", rule.Category().Id) + delete(*skippedRules, rule.Category().Id) } else { - model.AddToListOfSupportedTags(missingvaultisolation.SupportedTags()) - risks := missingvaultisolation.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[missingvaultisolation.Category()] = risks + model.AddToListOfSupportedTags(rule.SupportedTags()) + generatedRisks := rule.GenerateRisks(&context.modelInput) + if generatedRisks != nil { + if len(generatedRisks) > 0 { + model.GeneratedRisksByCategory[rule.Category()] = generatedRisks + } + } else { + fmt.Printf("Failed to generate risks for %q\n", id) } } +} - if _, ok := skippedRules[missingwaf.Category().Id]; ok { - fmt.Println("Skipping risk rule:", missingwaf.Category().Id) - delete(skippedRules, missingwaf.Category().Id) - } else { - model.AddToListOfSupportedTags(missingwaf.SupportedTags()) - risks := missingwaf.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[missingwaf.Category()] = risks - } +func (context *Context) applyRiskGeneration() { + if *context.verbose { + fmt.Println("Applying risk generation") } - if _, ok := skippedRules[wrongcommunicationlinkcontent.Category().Id]; ok { - fmt.Println("Skipping risk rule:", wrongcommunicationlinkcontent.Category().Id) - delete(skippedRules, wrongcommunicationlinkcontent.Category().Id) - } else { - model.AddToListOfSupportedTags(wrongcommunicationlinkcontent.SupportedTags()) - risks := wrongcommunicationlinkcontent.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[wrongcommunicationlinkcontent.Category()] = risks + skippedRules := make(map[string]bool) + if len(*context.skipRiskRules) > 0 { + for _, id := range strings.Split(*context.skipRiskRules, ",") { + skippedRules[id] = true } } - if _, ok := skippedRules[wrongtrustboundarycontent.Category().Id]; ok { - fmt.Println("Skipping risk rule:", wrongtrustboundarycontent.Category().Id) - delete(skippedRules, wrongtrustboundarycontent.Category().Id) - } else { - model.AddToListOfSupportedTags(wrongtrustboundarycontent.SupportedTags()) - risks := wrongtrustboundarycontent.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[wrongtrustboundarycontent.Category()] = risks - } - } + context.applyRisk(accidentalsecretleak.Rule(), &skippedRules) + context.applyRisk(codebackdooring.Rule(), &skippedRules) + context.applyRisk(containerbaseimagebackdooring.Rule(), &skippedRules) + context.applyRisk(containerplatformescape.Rule(), &skippedRules) + context.applyRisk(crosssiterequestforgery.Rule(), &skippedRules) + context.applyRisk(crosssitescripting.Rule(), &skippedRules) + context.applyRisk(dosriskyaccessacrosstrustboundary.Rule(), &skippedRules) + context.applyRisk(incompletemodel.Rule(), &skippedRules) + context.applyRisk(ldapinjection.Rule(), &skippedRules) + context.applyRisk(missingauthentication.Rule(), &skippedRules) + context.applyRisk(missingauthenticationsecondfactor.Rule(), &skippedRules) + context.applyRisk(missingbuildinfrastructure.Rule(), &skippedRules) + context.applyRisk(missingcloudhardening.Rule(), &skippedRules) + context.applyRisk(missingfilevalidation.Rule(), &skippedRules) + context.applyRisk(missinghardening.Rule(), &skippedRules) + context.applyRisk(missingidentitypropagation.Rule(), &skippedRules) + context.applyRisk(missingidentityproviderisolation.Rule(), &skippedRules) + context.applyRisk(missingidentitystore.Rule(), &skippedRules) + context.applyRisk(missingnetworksegmentation.Rule(), &skippedRules) + context.applyRisk(missingvault.Rule(), &skippedRules) + context.applyRisk(missingvaultisolation.Rule(), &skippedRules) + context.applyRisk(missingwaf.Rule(), &skippedRules) + context.applyRisk(mixedtargetsonsharedruntime.Rule(), &skippedRules) + context.applyRisk(pathtraversal.Rule(), &skippedRules) + context.applyRisk(pushinsteadofpulldeployment.Rule(), &skippedRules) + context.applyRisk(searchqueryinjection.Rule(), &skippedRules) + context.applyRisk(serversiderequestforgery.Rule(), &skippedRules) + context.applyRisk(serviceregistrypoisoning.Rule(), &skippedRules) + context.applyRisk(sqlnosqlinjection.Rule(), &skippedRules) + context.applyRisk(uncheckeddeployment.Rule(), &skippedRules) + context.applyRisk(unencryptedasset.Rule(), &skippedRules) + context.applyRisk(unencryptedcommunication.Rule(), &skippedRules) + context.applyRisk(unguardedaccessfrominternet.Rule(), &skippedRules) + context.applyRisk(unguardeddirectdatastoreaccess.Rule(), &skippedRules) + context.applyRisk(unnecessarycommunicationlink.Rule(), &skippedRules) + context.applyRisk(unnecessarydataasset.Rule(), &skippedRules) + context.applyRisk(unnecessarydatatransfer.Rule(), &skippedRules) + context.applyRisk(unnecessarytechnicalasset.Rule(), &skippedRules) + context.applyRisk(untrusteddeserialization.Rule(), &skippedRules) + context.applyRisk(wrongcommunicationlinkcontent.Rule(), &skippedRules) + context.applyRisk(wrongtrustboundarycontent.Rule(), &skippedRules) + context.applyRisk(xmlexternalentity.Rule(), &skippedRules) // NOW THE CUSTOM RISK RULES (if any) for id, customRule := range context.customRiskRules { - if _, ok := skippedRules[customRule.Category().Id]; ok { + _, ok := skippedRules[customRule.ID] + if ok { if *context.verbose { fmt.Println("Skipping custom risk rule:", id) } @@ -641,13 +248,14 @@ func (context *Context) applyRiskGeneration() { if *context.verbose { fmt.Println("Executing custom risk rule:", id) } - model.AddToListOfSupportedTags(customRule.SupportedTags()) - risks := customRule.GenerateRisks() - if len(risks) > 0 { - model.GeneratedRisksByCategory[customRule.Category()] = risks + model.AddToListOfSupportedTags(customRule.Tags) + customRisks := customRule.GenerateRisks(&model.ParsedModelRoot) + if len(customRisks) > 0 { + model.GeneratedRisksByCategory[customRule.Category] = customRisks } + if *context.verbose { - fmt.Println("Added custom risks:", len(risks)) + fmt.Println("Added custom risks:", len(customRisks)) } } } @@ -664,8 +272,8 @@ func (context *Context) applyRiskGeneration() { // save also in map keyed by synthetic risk-id for _, category := range model.SortedRiskCategories() { - risks := model.SortedRisksOfCategory(category) - for _, risk := range risks { + someRisks := model.SortedRisksOfCategory(category) + for _, risk := range someRisks { model.GeneratedRisksBySyntheticId[strings.ToLower(risk.SyntheticId)] = risk } } @@ -1230,7 +838,7 @@ func (context *Context) doIt() { fmt.Println("Writing report pdf") } report.WriteReportPDF(filepath.Join(*context.outputDir, reportFilename), - *context.templateFilename, + filepath.Join(*context.appFolder, *context.templateFilename), filepath.Join(*context.outputDir, dataFlowDiagramFilenamePNG), filepath.Join(*context.outputDir, dataAssetDiagramFilenamePNG), *context.modelFilename, @@ -1258,67 +866,55 @@ func (context *Context) applyRAA() string { if *context.verbose { fmt.Println("Applying RAA calculation:", *context.raaPlugin) } - // determine plugin to load. - // load plugin: open the ".so" file to load the symbols - plug, err := plugin.Open(*context.raaPlugin) - if err != nil { - fmt.Printf("WARNING: plugin %q not applied: %v\n", *context.raaPlugin, err) - return "" - } - // checkErr(err) - // look up a symbol (an exported function or variable): in this case, function CalculateRAA - symCalculateRAA, err := plug.Lookup("CalculateRAA") - if err != nil { - fmt.Printf("WARNING: plugin %q not applied: %v\n", *context.raaPlugin, err) - return "" + + raa := *context.raaPlugin + runner, loadError := new(run.Runner).Load(filepath.Join(*context.binFolder, *context.raaPlugin)) + if loadError != nil { + raa = strings.TrimSuffix(raa, filepath.Ext(raa)) + runner, loadError = new(run.Runner).Load(filepath.Join(*context.binFolder, raa)) + if loadError != nil { + fmt.Printf("WARNING: raa %q not loaded: %v\n", *context.raaPlugin, loadError) + return "" + } } - // checkErr(err) - // use the plugin - raaCalcFunc, ok := symCalculateRAA.(func() string) // symCalculateRAA.(func(model.ParsedModel) string) - if !ok { - fmt.Printf("WARNING: invalid plugin %q\n", *context.raaPlugin) + + runError := runner.Run(model.ParsedModelRoot, &model.ParsedModelRoot) + if runError != nil { + fmt.Printf("WARNING: raa %q not applied: %v\n", *context.raaPlugin, runError) return "" } - /* if !ok { - panic(errors.New("RAA plugin has no 'CalculateRAA() string' function")) - } - */ - // call it - return raaCalcFunc() + + return runner.ErrorOutput } func (context *Context) loadCustomRiskRules() { - context.customRiskRules = make(map[string]model.CustomRiskRule) + context.customRiskRules = make(map[string]*risks.CustomRisk) if len(*context.riskRulesPlugins) > 0 { if *context.verbose { fmt.Println("Loading custom risk rules:", *context.riskRulesPlugins) } + for _, pluginFile := range strings.Split(*context.riskRulesPlugins, ",") { if len(pluginFile) > 0 { - // check that the plugin file to load exists - _, err := os.Stat(pluginFile) - if os.IsNotExist(err) { - log.Fatal("Custom risk rule implementation file not found: ", pluginFile) + runner, loadError := new(run.Runner).Load(pluginFile) + if loadError != nil { + log.Fatalf("WARNING: Custom risk rule %q not loaded: %v\n", pluginFile, loadError) } - // load plugin: open the ".so" file to load the symbols - plug, err := plugin.Open(pluginFile) - checkErr(err) - // look up a symbol (an exported function or variable): in this case variable CustomRiskRule - symCustomRiskRule, err := plug.Lookup("CustomRiskRule") - checkErr(err) - // register the risk rule plugin for later use: in this case interface type model.CustomRiskRule (defined above) - symCustomRiskRuleVar, ok := symCustomRiskRule.(model.CustomRiskRule) - if !ok { - panic(errors.New("custom risk rule plugin has no 'CustomRiskRule' variable")) + + risk := new(risks.CustomRisk) + runError := runner.Run(nil, &risk, "-get-info") + if runError != nil { + log.Fatalf("WARNING: Failed to get ID for custom risk rule %q: %v\n", pluginFile, runError) } - // simply add to a map (just convenience) where key is the category id and value the rule's execution function - ruleID := symCustomRiskRuleVar.Category().Id - context.customRiskRules[ruleID] = symCustomRiskRuleVar + + risk.Runner = runner + context.customRiskRules[risk.ID] = risk if *context.verbose { - fmt.Println("Custom risk rule loaded:", ruleID) + fmt.Println("Custom risk rule loaded:", risk.ID) } } } + if *context.verbose { fmt.Println("Loaded custom risk rules:", len(context.customRiskRules)) } @@ -1426,12 +1022,12 @@ func (context *Context) execute(ginContext *gin.Context, dryRun bool) (yamlConte yamlContent, err = os.ReadFile(yamlFile) checkErr(err) - err = os.WriteFile(filepath.Join(tmpOutputDir, outputFile), yamlContent, 0400) + err = os.WriteFile(filepath.Join(tmpOutputDir, inputFile), yamlContent, 0400) checkErr(err) if !dryRun { files := []string{ - filepath.Join(tmpOutputDir, outputFile), + filepath.Join(tmpOutputDir, inputFile), filepath.Join(tmpOutputDir, dataFlowDiagramFilenamePNG), filepath.Join(tmpOutputDir, dataAssetDiagramFilenamePNG), filepath.Join(tmpOutputDir, reportFilename), @@ -1462,7 +1058,7 @@ func (context *Context) doItViaRuntimeCall(modelFile string, outputDir string, dpi int) { // Remember to also add the same args to the exec based sub-process calls! var cmd *exec.Cmd - args := []string{"-model", modelFile, "-output", outputDir, "-execute-model-macro", *context.executeModelMacro, "-raa-plugin", *context.raaPlugin, "-custom-risk-rules-plugins", *context.riskRulesPlugins, "-skip-risk-rules", *context.skipRiskRules, "-diagram-dpi", strconv.Itoa(dpi)} + args := []string{"-model", modelFile, "-output", outputDir, "-execute-model-macro", *context.executeModelMacro, "-raa-run", *context.raaPlugin, "-custom-risk-rules-plugins", *context.riskRulesPlugins, "-skip-risk-rules", *context.skipRiskRules, "-diagram-dpi", strconv.Itoa(dpi)} if *context.verbose { args = append(args, "-verbose") } @@ -1656,7 +1252,7 @@ func (context *Context) addSupportedTags(input []byte) []byte { // add distinct tags as "tags_available" supportedTags := make(map[string]bool) for _, customRule := range context.customRiskRules { - for _, tag := range customRule.SupportedTags() { + for _, tag := range customRule.Tags { supportedTags[strings.ToLower(tag)] = true } } @@ -1981,14 +1577,14 @@ func (context *Context) analyzeModelOnServerDirectly(ginContext *gin.Context) { context.handleErrorInServiceCall(err, ginContext) return } - err = os.WriteFile(filepath.Join(tmpOutputDir, outputFile), []byte(yamlText), 0400) + err = os.WriteFile(filepath.Join(tmpOutputDir, inputFile), []byte(yamlText), 0400) if err != nil { context.handleErrorInServiceCall(err, ginContext) return } files := []string{ - filepath.Join(tmpOutputDir, outputFile), + filepath.Join(tmpOutputDir, inputFile), filepath.Join(tmpOutputDir, dataFlowDiagramFilenamePNG), filepath.Join(tmpOutputDir, dataAssetDiagramFilenamePNG), filepath.Join(tmpOutputDir, reportFilename), @@ -2200,7 +1796,7 @@ func (context *Context) importModel(ginContext *gin.Context) { func (context *Context) stats(ginContext *gin.Context) { keyCount, modelCount := 0, 0 - keyFolders, err := os.ReadDir(*context.baseFolder) + keyFolders, err := os.ReadDir(filepath.Join(*context.serverFolder, keyDir)) if err != nil { log.Println(err) ginContext.JSON(http.StatusInternalServerError, gin.H{ @@ -2211,7 +1807,7 @@ func (context *Context) stats(ginContext *gin.Context) { for _, keyFolder := range keyFolders { if len(keyFolder.Name()) == 128 { // it's a sha512 token hash probably, so count it as token folder for the stats keyCount++ - modelFolders, err := os.ReadDir(filepath.Join(*context.baseFolder, keyFolder.Name())) + modelFolders, err := os.ReadDir(filepath.Join(*context.serverFolder, keyDir, keyFolder.Name())) if err != nil { log.Println(err) ginContext.JSON(http.StatusInternalServerError, gin.H{ @@ -2829,7 +2425,7 @@ func (context *Context) getModel(ginContext *gin.Context) { return } defer func() { _ = os.Remove(tmpResultFile.Name()) }() - ginContext.FileAttachment(tmpResultFile.Name(), outputFile) + ginContext.FileAttachment(tmpResultFile.Name(), inputFile) } } @@ -3155,7 +2751,7 @@ func (context *Context) listModels(ginContext *gin.Context) { // TODO currently } for _, dirEntry := range modelFolders { if dirEntry.IsDir() { - modelStat, err := os.Stat(filepath.Join(folderNameOfKey, dirEntry.Name(), outputFile)) + modelStat, err := os.Stat(filepath.Join(folderNameOfKey, dirEntry.Name(), inputFile)) if err != nil { log.Println(err) ginContext.JSON(http.StatusNotFound, gin.H{ @@ -3248,7 +2844,7 @@ func (context *Context) readModel(ginContext *gin.Context, modelUUID string, key return modelInputResult, yamlText, false } - fileBytes, err := os.ReadFile(filepath.Join(modelFolder, outputFile)) + fileBytes, err := os.ReadFile(filepath.Join(modelFolder, inputFile)) if err != nil { log.Println(err) ginContext.JSON(http.StatusInternalServerError, gin.H{ @@ -3357,7 +2953,7 @@ func (context *Context) writeModelYAML(ginContext *gin.Context, yaml string, key return false } } - f, err := os.Create(filepath.Join(modelFolder, outputFile)) + f, err := os.Create(filepath.Join(modelFolder, inputFile)) if err != nil { log.Println(err) ginContext.JSON(http.StatusInternalServerError, gin.H{ @@ -3379,7 +2975,7 @@ func (context *Context) backupModelToHistory(modelFolder string, changeReasonFor return err } } - input, err := os.ReadFile(filepath.Join(modelFolder, outputFile)) + input, err := os.ReadFile(filepath.Join(modelFolder, inputFile)) if err != nil { return err } @@ -3513,7 +3109,7 @@ type keyHeader struct { func (context *Context) folderNameFromKey(key []byte) string { sha512Hash := hashSHA256(key) - return filepath.Join(*context.baseFolder, sha512Hash) + return filepath.Join(*context.serverFolder, keyDir, sha512Hash) } func hashSHA256(key []byte) string { @@ -3539,7 +3135,7 @@ func (context *Context) createKey(ginContext *gin.Context) { }) return } - err = os.Mkdir(context.folderNameFromKey(keyBytesArr), 0700) + err = os.MkdirAll(context.folderNameFromKey(keyBytesArr), 0700) if err != nil { log.Println(err) ginContext.JSON(http.StatusInternalServerError, gin.H{ @@ -3647,10 +3243,13 @@ func (context *Context) deleteKey(ginContext *gin.Context) { } func (context *Context) parseCommandlineArgs() { - context.tempFolder = flag.String("temp-dir", tmpFolder, "temporary folder location") - context.modelFilename = flag.String("model", outputFile, "input model yaml file") + context.tempFolder = flag.String("temp-dir", tempDir, "temporary folder location") + context.binFolder = flag.String("bin-dir", binDir, "binary folder location") + context.appFolder = flag.String("app-dir", appDir, "app folder (default: "+appDir+")") + context.serverFolder = flag.String("server-dir", dataDir, "base folder for server mode (default: "+dataDir+")") + context.modelFilename = flag.String("model", inputFile, "input model yaml file") context.outputDir = flag.String("output", ".", "output directory") - context.raaPlugin = flag.String("raa-plugin", "raa.so", "RAA calculation plugin (.so shared object) file name") + context.raaPlugin = flag.String("raa-run", "raa.so", "RAA calculation run (.so shared object) file name") context.executeModelMacro = flag.String("execute-model-macro", "", "Execute model macro (by ID)") context.testParseModel = flag.Bool("test-parse-model", false, "test parse model functionality") context.createExampleModel = flag.Bool("create-example-model", false, "just create an example model named threagile-example-model.yaml in the output directory") @@ -3671,8 +3270,6 @@ func (context *Context) parseCommandlineArgs() { context.riskRulesPlugins = flag.String("custom-risk-rules-plugins", "", "comma-separated list of plugins (.so shared object) file names with custom risk rules to load") context.verbose = flag.Bool("verbose", false, "verbose output") context.ignoreOrphanedRiskTracking = flag.Bool("ignore-orphaned-risk-tracking", false, "ignore orphaned risk tracking (just log them) not matching a concrete risk") - context.appFolder = flag.String("app-folder", appFolder, "app folder (default: "+appFolder+")") - context.baseFolder = flag.String("base-folder", baseFolder, "base folder (default: "+baseFolder+")") version := flag.Bool("version", false, "print version") listTypes := flag.Bool("list-types", false, "print type information (enum values to be used in models)") listRiskRules := flag.Bool("list-risk-rules", false, "print risk rules") @@ -3774,7 +3371,7 @@ func (context *Context) parseCommandlineArgs() { context.printLogo() fmt.Println("The following model macros are available (can be extended via custom model macros):") fmt.Println() - /* TODO finish plugin stuff + /* TODO finish run stuff fmt.Println("Custom model macros:") for id, customModelMacro := range customModelMacros { fmt.Println(id, "-->", customModelMacro.GetMacroDetails().Title) @@ -3802,7 +3399,7 @@ func (context *Context) parseCommandlineArgs() { fmt.Println("------------------") context.loadCustomRiskRules() for id, customRule := range context.customRiskRules { - fmt.Println(id, "-->", customRule.Category().Title, "--> with tags:", customRule.SupportedTags()) + fmt.Println(id, "-->", customRule.Category.Title, "--> with tags:", customRule.Tags) } fmt.Println() fmt.Println("--------------------") @@ -4087,7 +3684,7 @@ func (context *Context) printExamples() { "-v \"$(pwd)\":" + filepath.Join(*context.appFolder, "work") + " " + "threagile/threagile " + "-verbose " + - "-model " + filepath.Join(*context.appFolder, "work", outputFile) + " " + + "-model " + filepath.Join(*context.appFolder, "work", inputFile) + " " + "-output " + filepath.Join(*context.appFolder, "work")) fmt.Println() fmt.Println("If you want to run Threagile as a server (REST API) on some port (here 8080): ") @@ -4108,7 +3705,7 @@ func (context *Context) printExamples() { fmt.Println(" docker run --rm -it threagile/threagile -list-model-macros") fmt.Println() fmt.Println("If you want to execute a certain model macro on the model yaml file (here the macro add-build-pipeline): ") - fmt.Println(" docker run --rm -it -v \"$(pwd)\":" + filepath.Join(*context.appFolder, "work") + " threagile/threagile -model " + filepath.Join(*context.appFolder, "work", outputFile) + " -output " + filepath.Join(*context.appFolder, "work") + " -execute-model-macro add-build-pipeline") + fmt.Println(" docker run --rm -it -v \"$(pwd)\":" + filepath.Join(*context.appFolder, "work") + " threagile/threagile -model " + filepath.Join(*context.appFolder, "work", inputFile) + " -output " + filepath.Join(*context.appFolder, "work") + " -execute-model-macro add-build-pipeline") } func printTypes(title string, value interface{}) { @@ -4156,9 +3753,7 @@ func (context *Context) goTestParseModel() error { return fmt.Errorf("unable to parse model yaml %q: %v", flatModelFile, flatLoadError) } - sort.Slice(flatModel.TagsAvailable, func(i, j int) bool { - return flatModel.TagsAvailable[i] < flatModel.TagsAvailable[j] - }) + sort.Strings(flatModel.TagsAvailable) flatModel.TagsAvailable = []string{strings.Join(flatModel.TagsAvailable, ", ")} flatData, flatMarshalError := json.MarshalIndent(flatModel, "", " ") @@ -4173,9 +3768,7 @@ func (context *Context) goTestParseModel() error { return fmt.Errorf("unable to parse model yaml %q: %v", splitModelFile, splitLoadError) } - sort.Slice(splitModel.TagsAvailable, func(i, j int) bool { - return splitModel.TagsAvailable[i] < splitModel.TagsAvailable[j] - }) + sort.Strings(splitModel.TagsAvailable) splitModel.TagsAvailable = []string{strings.Join(splitModel.TagsAvailable, ", ")} splitModel.Includes = flatModel.Includes @@ -4202,8 +3795,8 @@ func (context *Context) parseModel() { log.Fatal("Unable to parse model yaml: ", loadError) } - data, _ := json.MarshalIndent(context.modelInput, "", " ") - fmt.Printf("%v\n", string(data)) + // data, _ := json.MarshalIndent(context.modelInput, "", " ") + // fmt.Printf("%v\n", string(data)) var businessCriticality model.Criticality switch context.modelInput.BusinessCriticality { @@ -5251,6 +4844,10 @@ func (context *Context) applyWildcardRiskTrackingEvaluation() { fmt.Println("Executing risk tracking evaluation") } for syntheticRiskIdPattern, riskTracking := range context.deferredRiskTrackingDueToWildcardMatching { + if *context.verbose { + fmt.Println("Applying wildcard risk tracking for risk id: " + syntheticRiskIdPattern) + } + foundSome := false var matchingRiskIdExpression = regexp.MustCompile(strings.ReplaceAll(regexp.QuoteMeta(syntheticRiskIdPattern), `\*`, `[^@]+`)) for syntheticRiskId := range model.GeneratedRisksBySyntheticId { @@ -5266,9 +4863,10 @@ func (context *Context) applyWildcardRiskTrackingEvaluation() { } } } + if !foundSome { if *context.ignoreOrphanedRiskTracking { - fmt.Println("Wildcard risk tracking does not match any risk id: " + syntheticRiskIdPattern) + fmt.Println("WARNING: Wildcard risk tracking does not match any risk id: " + syntheticRiskIdPattern) } else { panic(errors.New("wildcard risk tracking does not match any risk id: " + syntheticRiskIdPattern)) } @@ -5745,8 +5343,8 @@ func makeTechAssetNode(technicalAsset model.TechnicalAsset, simplified bool) str if simplified { color := colors.RgbHexColorOutOfScope() if !technicalAsset.OutOfScope { - risks := technicalAsset.GeneratedRisks() - switch model.HighestSeverityStillAtRisk(risks) { + generatedRisks := technicalAsset.GeneratedRisks() + switch model.HighestSeverityStillAtRisk(generatedRisks) { case model.CriticalSeverity: color = colors.RgbHexColorCriticalRisk() case model.HighSeverity: @@ -5760,7 +5358,7 @@ func makeTechAssetNode(technicalAsset model.TechnicalAsset, simplified bool) str default: color = "#444444" // since black is too dark here as fill color } - if len(model.ReduceToOnlyStillAtRisk(risks)) == 0 { + if len(model.ReduceToOnlyStillAtRisk(generatedRisks)) == 0 { color = "#444444" // since black is too dark here as fill color } } @@ -5860,7 +5458,7 @@ func (context *Context) renderDataFlowDiagramGraphvizImage(dotFile *os.File, tar } // exec - cmd := exec.Command(graphvizDataFlowDiagramConversionCall, tmpFileDOT.Name(), tmpFilePNG.Name()) + cmd := exec.Command(filepath.Join(*context.binFolder, graphvizDataFlowDiagramConversionCall), tmpFileDOT.Name(), tmpFilePNG.Name()) cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr err = cmd.Run() @@ -5908,7 +5506,7 @@ func (context *Context) renderDataAssetDiagramGraphvizImage(dotFile *os.File, ta } // exec - cmd := exec.Command(graphvizDataAssetDiagramConversionCall, tmpFileDOT.Name(), tmpFilePNG.Name()) + cmd := exec.Command(filepath.Join(*context.binFolder, graphvizDataAssetDiagramConversionCall), tmpFileDOT.Name(), tmpFilePNG.Name()) cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr err = cmd.Run() diff --git a/model/types.go b/model/types.go index b432ad6e..322098e5 100644 --- a/model/types.go +++ b/model/types.go @@ -55,10 +55,10 @@ func AddToListOfSupportedTags(tags []string) { } } -type CustomRiskRule interface { - Category() RiskCategory - SupportedTags() []string - GenerateRisks() []Risk +type CustomRiskRule struct { + Category func() RiskCategory + SupportedTags func() []string + GenerateRisks func(input *ModelInput) []Risk } // === To be used by model macros etc. ======================= @@ -3348,6 +3348,23 @@ func (what RiskFunction) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } +func (what *RiskFunction) UnmarshalJSON(value []byte) error { + text := "" + unmarshalError := json.Unmarshal(value, &text) + if unmarshalError != nil { + return unmarshalError + } + + for n, v := range RiskFunctionTypeDescription { + if strings.ToLower(v.Name) == strings.ToLower(text) { + *what = RiskFunction(n) + return nil + } + } + + return fmt.Errorf("unknown value %q for risk function\n", text) +} + type STRIDE int const ( @@ -3396,6 +3413,23 @@ func (what STRIDE) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } +func (what *STRIDE) UnmarshalJSON(value []byte) error { + text := "" + unmarshalError := json.Unmarshal(value, &text) + if unmarshalError != nil { + return unmarshalError + } + + for n, v := range StrideTypeDescription { + if strings.ToLower(v.Name) == strings.ToLower(text) { + *what = STRIDE(n) + return nil + } + } + + return fmt.Errorf("unknown value %q for STRIDE category\n", text) +} + type MacroDetails struct { ID, Title, Description string } @@ -3643,6 +3677,23 @@ func (what RiskStatus) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } +func (what *RiskStatus) UnmarshalJSON(value []byte) error { + text := "" + unmarshalError := json.Unmarshal(value, &text) + if unmarshalError != nil { + return unmarshalError + } + + for n, v := range RiskStatusTypeDescription { + if strings.ToLower(v.Name) == strings.ToLower(text) { + *what = RiskStatus(n) + return nil + } + } + + return fmt.Errorf("unknown value %q for risk status\n", text) +} + func (what RiskStatus) IsStillAtRisk() bool { return what == Unchecked || what == InDiscussion || what == Accepted || what == InProgress } diff --git a/raa/dummy/dummy.go b/raa/dummy/dummy.go index 1793be00..e0623995 100644 --- a/raa/dummy/dummy.go +++ b/raa/dummy/dummy.go @@ -1,24 +1,52 @@ package main import ( + "bufio" + "encoding/json" "fmt" "github.com/threagile/threagile/model" + "io" "math/rand" + "os" ) // JUST A DUMMY TO HAVE AN ALTERNATIVE PLUGIN TO USE/TEST -var ( - _ = CalculateRAA -) +func main() { + reader := bufio.NewReader(os.Stdin) + + inData, outError := io.ReadAll(reader) + if outError != nil { + _, _ = fmt.Fprintf(os.Stderr, "failed to read model data from stdin\n") + os.Exit(-2) + } + + var input model.ParsedModel + inError := json.Unmarshal(inData, &input) + if inError != nil { + _, _ = fmt.Fprintf(os.Stderr, "failed to parse model: %v\n", inError) + os.Exit(-2) + } + + text := CalculateRAA(&input) + outData, marshalError := json.Marshal(input) + if marshalError != nil { + _, _ = fmt.Fprintf(os.Stderr, "failed to print model: %v\n", marshalError) + os.Exit(-2) + } + + _, _ = fmt.Fprint(os.Stdout, outData) + _, _ = fmt.Fprint(os.Stderr, text) + os.Exit(0) +} -// used from plugin caller: +// used from run caller: -func CalculateRAA() string { - for techAssetID, techAsset := range model.ParsedModelRoot.TechnicalAssets { +func CalculateRAA(input *model.ParsedModel) string { + for techAssetID, techAsset := range input.TechnicalAssets { techAsset.RAA = float64(rand.Intn(100)) fmt.Println("Using dummy RAA random calculation (just to test the usage of other shared object files as plugins)") - model.ParsedModelRoot.TechnicalAssets[techAssetID] = techAsset + input.TechnicalAssets[techAssetID] = techAsset } // return intro text (for reporting etc., can be short summary-like) return "Just some dummy algorithm implementation for demo purposes of pluggability..." diff --git a/raa/raa/raa.go b/raa/raa/raa.go index a6babc82..398f7ae0 100644 --- a/raa/raa/raa.go +++ b/raa/raa/raa.go @@ -1,22 +1,52 @@ package main import ( + "bufio" + "encoding/json" + "fmt" "github.com/threagile/threagile/model" + "io" + "os" "sort" ) -var ( - _ = CalculateRAA -) +// used from run caller: + +func main() { + reader := bufio.NewReader(os.Stdin) + inData, outError := io.ReadAll(reader) + if outError != nil { + _, _ = fmt.Fprintf(os.Stderr, "failed to read model data from stdin\n") + os.Exit(-2) + } + + var input model.ParsedModel + inError := json.Unmarshal(inData, &input) + if inError != nil { + _, _ = fmt.Fprintf(os.Stderr, "failed to parse model: %v\n", inError) + _, _ = fmt.Fprint(os.Stderr, string(inData)) + _, _ = fmt.Fprintf(os.Stderr, "\n") + os.Exit(-2) + } + + text := CalculateRAA(&input) + outData, marshalError := json.Marshal(input) + if marshalError != nil { + _, _ = fmt.Fprintf(os.Stderr, "failed to print model: %v\n", marshalError) + os.Exit(-2) + } -// used from plugin caller: + _, _ = fmt.Fprint(os.Stdout, string(outData)) + _, _ = fmt.Fprint(os.Stderr, text) + os.Exit(0) +} -func CalculateRAA() string { - for techAssetID, techAsset := range model.ParsedModelRoot.TechnicalAssets { - aa := calculateAttackerAttractiveness(techAsset) - aa += calculatePivotingNeighbourEffectAdjustment(techAsset) - techAsset.RAA = calculateRelativeAttackerAttractiveness(aa) - model.ParsedModelRoot.TechnicalAssets[techAssetID] = techAsset +func CalculateRAA(input *model.ParsedModel) string { + for techAssetID, techAsset := range input.TechnicalAssets { + aa := calculateAttackerAttractiveness(input, techAsset) + aa += calculatePivotingNeighbourEffectAdjustment(input, techAsset) + techAsset.RAA = calculateRelativeAttackerAttractiveness(input, aa) + input.TechnicalAssets[techAssetID] = techAsset } // return intro text (for reporting etc., can be short summary-like) return "For each technical asset the \"Relative Attacker Attractiveness\" (RAA) value was calculated " + @@ -31,24 +61,24 @@ func CalculateRAA() string { var attackerAttractivenessMinimum, attackerAttractivenessMaximum, spread float64 = 0, 0, 0 // set the concrete value in relation to the minimum and maximum of all -func calculateRelativeAttackerAttractiveness(attractiveness float64) float64 { +func calculateRelativeAttackerAttractiveness(input *model.ParsedModel, attractiveness float64) float64 { if attackerAttractivenessMinimum == 0 || attackerAttractivenessMaximum == 0 { attackerAttractivenessMinimum, attackerAttractivenessMaximum = 9223372036854775807, -9223372036854775808 // determine (only one time required) the min/max of all // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: keys := make([]string, 0) - for k := range model.ParsedModelRoot.TechnicalAssets { + for k := range input.TechnicalAssets { keys = append(keys, k) } sort.Strings(keys) for _, key := range keys { - techAsset := model.ParsedModelRoot.TechnicalAssets[key] - if calculateAttackerAttractiveness(techAsset) > attackerAttractivenessMaximum { - attackerAttractivenessMaximum = calculateAttackerAttractiveness(techAsset) + techAsset := input.TechnicalAssets[key] + if calculateAttackerAttractiveness(input, techAsset) > attackerAttractivenessMaximum { + attackerAttractivenessMaximum = calculateAttackerAttractiveness(input, techAsset) } - if calculateAttackerAttractiveness(techAsset) < attackerAttractivenessMinimum { - attackerAttractivenessMinimum = calculateAttackerAttractiveness(techAsset) + if calculateAttackerAttractiveness(input, techAsset) < attackerAttractivenessMinimum { + attackerAttractivenessMinimum = calculateAttackerAttractiveness(input, techAsset) } } if !(attackerAttractivenessMinimum < attackerAttractivenessMaximum) { @@ -66,15 +96,15 @@ func calculateRelativeAttackerAttractiveness(attractiveness float64) float64 { } // increase the RAA (relative attacker attractiveness) by one third (1/3) of the delta to the highest outgoing neighbour (if positive delta) -func calculatePivotingNeighbourEffectAdjustment(techAsset model.TechnicalAsset) float64 { +func calculatePivotingNeighbourEffectAdjustment(input *model.ParsedModel, techAsset model.TechnicalAsset) float64 { if techAsset.OutOfScope { return 0 } adjustment := 0.0 for _, commLink := range techAsset.CommunicationLinks { - outgoingNeighbour := model.ParsedModelRoot.TechnicalAssets[commLink.TargetId] + outgoingNeighbour := input.TechnicalAssets[commLink.TargetId] //if outgoingNeighbour.getTrustBoundary() == techAsset.getTrustBoundary() { // same trust boundary - delta := calculateRelativeAttackerAttractiveness(calculateAttackerAttractiveness(outgoingNeighbour)) - calculateRelativeAttackerAttractiveness(calculateAttackerAttractiveness(techAsset)) + delta := calculateRelativeAttackerAttractiveness(input, calculateAttackerAttractiveness(input, outgoingNeighbour)) - calculateRelativeAttackerAttractiveness(input, calculateAttackerAttractiveness(input, techAsset)) if delta > 0 { potentialIncrease := delta / 3 //fmt.Println("Positive delta from", techAsset.Id, "to", outgoingNeighbour.Id, "is", delta, "yields to pivoting neighbour effect of an increase of", potentialIncrease) @@ -89,7 +119,7 @@ func calculatePivotingNeighbourEffectAdjustment(techAsset model.TechnicalAsset) // The sum of all CIAs of the asset itself (fibonacci scale) plus the sum of the comm-links' transferred CIAs // Multiplied by the quantity values of the data asset for C and I (not A) -func calculateAttackerAttractiveness(techAsset model.TechnicalAsset) float64 { +func calculateAttackerAttractiveness(input *model.ParsedModel, techAsset model.TechnicalAsset) float64 { if techAsset.OutOfScope { return 0 } @@ -98,26 +128,26 @@ func calculateAttackerAttractiveness(techAsset model.TechnicalAsset) float64 { score += techAsset.Integrity.AttackerAttractivenessForAsset() score += techAsset.Availability.AttackerAttractivenessForAsset() for _, dataAssetProcessed := range techAsset.DataAssetsProcessed { - dataAsset := model.ParsedModelRoot.DataAssets[dataAssetProcessed] + dataAsset := input.DataAssets[dataAssetProcessed] score += dataAsset.Confidentiality.AttackerAttractivenessForProcessedOrStoredData() * dataAsset.Quantity.QuantityFactor() score += dataAsset.Integrity.AttackerAttractivenessForProcessedOrStoredData() * dataAsset.Quantity.QuantityFactor() score += dataAsset.Availability.AttackerAttractivenessForProcessedOrStoredData() } for _, dataAssetStored := range techAsset.DataAssetsStored { - dataAsset := model.ParsedModelRoot.DataAssets[dataAssetStored] + dataAsset := input.DataAssets[dataAssetStored] score += dataAsset.Confidentiality.AttackerAttractivenessForProcessedOrStoredData() * dataAsset.Quantity.QuantityFactor() score += dataAsset.Integrity.AttackerAttractivenessForProcessedOrStoredData() * dataAsset.Quantity.QuantityFactor() score += dataAsset.Availability.AttackerAttractivenessForProcessedOrStoredData() } for _, dataFlow := range techAsset.CommunicationLinks { for _, dataAssetSent := range dataFlow.DataAssetsSent { - dataAsset := model.ParsedModelRoot.DataAssets[dataAssetSent] + dataAsset := input.DataAssets[dataAssetSent] score += dataAsset.Confidentiality.AttackerAttractivenessForInOutTransferredData() * dataAsset.Quantity.QuantityFactor() score += dataAsset.Integrity.AttackerAttractivenessForInOutTransferredData() * dataAsset.Quantity.QuantityFactor() score += dataAsset.Availability.AttackerAttractivenessForInOutTransferredData() } for _, dataAssetReceived := range dataFlow.DataAssetsReceived { - dataAsset := model.ParsedModelRoot.DataAssets[dataAssetReceived] + dataAsset := input.DataAssets[dataAssetReceived] score += dataAsset.Confidentiality.AttackerAttractivenessForInOutTransferredData() * dataAsset.Quantity.QuantityFactor() score += dataAsset.Integrity.AttackerAttractivenessForInOutTransferredData() * dataAsset.Quantity.QuantityFactor() score += dataAsset.Availability.AttackerAttractivenessForInOutTransferredData() diff --git a/report/report.go b/report/report.go index 060ff442..4762053f 100644 --- a/report/report.go +++ b/report/report.go @@ -7,6 +7,7 @@ import ( "github.com/jung-kurt/gofpdf/contrib/gofpdi" "github.com/threagile/threagile/colors" "github.com/threagile/threagile/model" + "github.com/threagile/threagile/risks" "github.com/threagile/threagile/risks/built-in/accidental-secret-leak" "github.com/threagile/threagile/risks/built-in/code-backdooring" "github.com/threagile/threagile/risks/built-in/container-baseimage-backdooring" @@ -118,7 +119,7 @@ func WriteReportPDF(reportFilename string, buildTimestamp string, modelHash string, introTextRAA string, - customRiskRules map[string]model.CustomRiskRule, + customRiskRules map[string]*risks.CustomRisk, tempFolder string) { initReport() createPdfAndInitMetadata() @@ -271,17 +272,17 @@ func createTableOfContents() { pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) - risks := "Risks" + risksStr := "Risks" catStr := "Categories" count, catCount := model.TotalRiskCount(), len(model.GeneratedRisksByCategory) if count == 1 { - risks = "Risk" + risksStr = "Risk" } if catCount == 1 { catStr = "Category" } y += 6 - pdf.Text(11, y, " "+"Impact Analysis of "+strconv.Itoa(count)+" Initial "+risks+" in "+strconv.Itoa(catCount)+" "+catStr) + pdf.Text(11, y, " "+"Impact Analysis of "+strconv.Itoa(count)+" Initial "+risksStr+" in "+strconv.Itoa(catCount)+" "+catStr) pdf.Text(175, y, "{impact-analysis-initial-risks}") pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) @@ -293,16 +294,16 @@ func createTableOfContents() { pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) y += 6 - risks = "Risks" + risksStr = "Risks" catStr = "Categories" count, catCount = len(model.FilteredByStillAtRisk()), len(model.CategoriesOfOnlyRisksStillAtRisk(model.GeneratedRisksByCategory)) if count == 1 { - risks = "Risk" + risksStr = "Risk" } if catCount == 1 { catStr = "Category" } - pdf.Text(11, y, " "+"Impact Analysis of "+strconv.Itoa(count)+" Remaining "+risks+" in "+strconv.Itoa(catCount)+" "+catStr) + pdf.Text(11, y, " "+"Impact Analysis of "+strconv.Itoa(count)+" Remaining "+risksStr+" in "+strconv.Itoa(catCount)+" "+catStr) pdf.Text(175, y, "{impact-analysis-remaining-risks}") pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) @@ -387,16 +388,16 @@ func createTableOfContents() { y += 6 modelFailures := model.FlattenRiskSlice(model.FilterByModelFailures(model.GeneratedRisksByCategory)) - risks = "Risks" + risksStr = "Risks" count = len(modelFailures) if count == 1 { - risks = "Risk" + risksStr = "Risk" } countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(modelFailures)) if countStillAtRisk > 0 { colors.ColorModelFailure(pdf) } - pdf.Text(11, y, " "+"Potential Model Failures: "+strconv.Itoa(countStillAtRisk)+" / "+strconv.Itoa(count)+" "+risks) + pdf.Text(11, y, " "+"Potential Model Failures: "+strconv.Itoa(countStillAtRisk)+" / "+strconv.Itoa(count)+" "+risksStr) pdf.Text(175, y, "{model-failures}") pdfColorBlack() pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) @@ -436,8 +437,8 @@ func createTableOfContents() { pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) for _, category := range model.SortedRiskCategories() { - risks := model.SortedRisksOfCategory(category) - switch model.HighestSeverityStillAtRisk(risks) { + newRisksStr := model.SortedRisksOfCategory(category) + switch model.HighestSeverityStillAtRisk(newRisksStr) { case model.CriticalSeverity: colors.ColorCriticalRisk(pdf) case model.HighSeverity: @@ -451,7 +452,7 @@ func createTableOfContents() { default: pdfColorBlack() } - if len(model.ReduceToOnlyStillAtRisk(risks)) == 0 { + if len(model.ReduceToOnlyStillAtRisk(newRisksStr)) == 0 { pdfColorBlack() } y += 6 @@ -459,9 +460,9 @@ func createTableOfContents() { pageBreakInLists() y = 40 } - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(risks)) - suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(risks)) + " Risk" - if len(risks) != 1 { + countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(newRisksStr)) + suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(newRisksStr)) + " Risk" + if len(newRisksStr) != 1 { suffix += "s" } pdf.Text(11, y, " "+uni(category.Title)+": "+suffix) @@ -491,22 +492,22 @@ func createTableOfContents() { pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) for _, technicalAsset := range model.SortedTechnicalAssetsByRiskSeverityAndTitle() { - risks := technicalAsset.GeneratedRisks() + newRisksStr := technicalAsset.GeneratedRisks() y += 6 if y > 275 { pageBreakInLists() y = 40 } - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(risks)) - suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(risks)) + " Risk" - if len(risks) != 1 { + countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(newRisksStr)) + suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(newRisksStr)) + " Risk" + if len(newRisksStr) != 1 { suffix += "s" } if technicalAsset.OutOfScope { pdfColorOutOfScope() suffix = "out-of-scope" } else { - switch model.HighestSeverityStillAtRisk(risks) { + switch model.HighestSeverityStillAtRisk(newRisksStr) { case model.CriticalSeverity: colors.ColorCriticalRisk(pdf) case model.HighSeverity: @@ -520,7 +521,7 @@ func createTableOfContents() { default: pdfColorBlack() } - if len(model.ReduceToOnlyStillAtRisk(risks)) == 0 { + if len(model.ReduceToOnlyStillAtRisk(newRisksStr)) == 0 { pdfColorBlack() } } @@ -556,10 +557,10 @@ func createTableOfContents() { pageBreakInLists() y = 40 } - risks := dataAsset.IdentifiedDataBreachProbabilityRisks() - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(risks)) - suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(risks)) + " Risk" - if len(risks) != 1 { + newRisksStr := dataAsset.IdentifiedDataBreachProbabilityRisks() + countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(newRisksStr)) + suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(newRisksStr)) + " Risk" + if len(newRisksStr) != 1 { suffix += "s" } switch dataAsset.IdentifiedDataBreachProbabilityStillAtRisk() { @@ -1412,16 +1413,16 @@ func createOutOfScopeAssets() { func createModelFailures() { pdf.SetTextColor(0, 0, 0) modelFailures := model.FlattenRiskSlice(model.FilterByModelFailures(model.GeneratedRisksByCategory)) - risks := "Risks" + risksStr := "Risks" count := len(modelFailures) if count == 1 { - risks = "Risk" + risksStr = "Risk" } countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(modelFailures)) if countStillAtRisk > 0 { colors.ColorModelFailure(pdf) } - chapTitle := "Potential Model Failures: " + strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(count) + " " + risks + chapTitle := "Potential Model Failures: " + strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(count) + " " + risksStr addHeadline(chapTitle, false) defineLinkTarget("{model-failures}") currentChapterTitleBreadcrumb = chapTitle @@ -1489,8 +1490,8 @@ func createRAA(introTextRAA string) { } else { strBuilder.WriteString("

") } - risks := technicalAsset.GeneratedRisks() - switch model.HighestSeverityStillAtRisk(risks) { + newRisksStr := technicalAsset.GeneratedRisks() + switch model.HighestSeverityStillAtRisk(newRisksStr) { case model.HighSeverity: colors.ColorHighRisk(pdf) case model.MediumSeverity: @@ -1500,7 +1501,7 @@ func createRAA(introTextRAA string) { default: pdfColorBlack() } - if len(model.ReduceToOnlyStillAtRisk(risks)) == 0 { + if len(model.ReduceToOnlyStillAtRisk(newRisksStr)) == 0 { pdfColorBlack() } @@ -1611,11 +1612,11 @@ func addCategories(riskCategories []model.RiskCategory, severity model.RiskSever var strBuilder strings.Builder sort.Sort(model.ByRiskCategoryTitleSort(riskCategories)) for _, riskCategory := range riskCategories { - risks := model.GeneratedRisksByCategory[riskCategory] + risksStr := model.GeneratedRisksByCategory[riskCategory] if !initialRisks { - risks = model.ReduceToOnlyStillAtRisk(risks) + risksStr = model.ReduceToOnlyStillAtRisk(risksStr) } - if len(risks) == 0 { + if len(risksStr) == 0 { continue } if pdf.GetY() > 250 { @@ -1645,7 +1646,7 @@ func addCategories(riskCategories []model.RiskCategory, severity model.RiskSever pdfColorBlack() prefix = "" } - switch model.HighestSeverityStillAtRisk(risks) { + switch model.HighestSeverityStillAtRisk(risksStr) { case model.CriticalSeverity: colors.ColorCriticalRisk(pdf) case model.HighSeverity: @@ -1657,7 +1658,7 @@ func addCategories(riskCategories []model.RiskCategory, severity model.RiskSever case model.LowSeverity: colors.ColorLowRisk(pdf) } - if len(model.ReduceToOnlyStillAtRisk(risks)) == 0 { + if len(model.ReduceToOnlyStillAtRisk(risksStr)) == 0 { pdfColorBlack() } html.Write(5, strBuilder.String()) @@ -1667,12 +1668,12 @@ func addCategories(riskCategories []model.RiskCategory, severity model.RiskSever strBuilder.WriteString("") strBuilder.WriteString(riskCategory.Title) strBuilder.WriteString(": ") - count := len(risks) + count := len(risksStr) initialStr := "Initial" if !initialRisks { initialStr = "Remaining" } - remainingRisks := model.ReduceToOnlyStillAtRisk(risks) + remainingRisks := model.ReduceToOnlyStillAtRisk(risksStr) suffix := strconv.Itoa(count) + " " + initialStr + " Risk" if bothInitialAndRemainingRisks { suffix = strconv.Itoa(len(remainingRisks)) + " / " + strconv.Itoa(count) + " Risk" @@ -1682,7 +1683,7 @@ func addCategories(riskCategories []model.RiskCategory, severity model.RiskSever } suffix += " - Exploitation likelihood is " if initialRisks { - suffix += model.HighestExploitationLikelihood(risks).Title() + " with " + model.HighestExploitationImpact(risks).Title() + " impact." + suffix += model.HighestExploitationLikelihood(risksStr).Title() + " with " + model.HighestExploitationImpact(risksStr).Title() + " impact." } else { suffix += model.HighestExploitationLikelihood(remainingRisks).Title() + " with " + model.HighestExploitationImpact(remainingRisks).Title() + " impact." } @@ -2263,10 +2264,10 @@ func createRiskCategories() { text.Reset() currentChapterTitleBreadcrumb = title for _, category := range model.SortedRiskCategories() { - risks := model.SortedRisksOfCategory(category) + risksStr := model.SortedRisksOfCategory(category) // category color - switch model.HighestSeverityStillAtRisk(risks) { + switch model.HighestSeverityStillAtRisk(risksStr) { case model.CriticalSeverity: colors.ColorCriticalRisk(pdf) case model.HighSeverity: @@ -2280,14 +2281,14 @@ func createRiskCategories() { default: pdfColorBlack() } - if len(model.ReduceToOnlyStillAtRisk(risks)) == 0 { + if len(model.ReduceToOnlyStillAtRisk(risksStr)) == 0 { pdfColorBlack() } // category title - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(risks)) - suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(risks)) + " Risk" - if len(risks) != 1 { + countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(risksStr)) + suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(risksStr)) + " Risk" + if len(risksStr) != 1 { suffix += "s" } title := category.Title + ": " + suffix @@ -2354,8 +2355,8 @@ func createRiskCategories() { pageBreak() pdf.SetY(36) text.WriteString("Risk Findings

") - times := strconv.Itoa(len(risks)) + " time" - if len(risks) > 1 { + times := strconv.Itoa(len(risksStr)) + " time" + if len(risksStr) > 1 { times += "s" } text.WriteString("The risk " + category.Title + " was found " + times + " in the analyzed architecture to be " + @@ -2369,7 +2370,7 @@ func createRiskCategories() { pdf.SetFont("Helvetica", "", fontSizeBody) oldLeft, _, _, _ := pdf.GetMargins() headlineCriticalWritten, headlineHighWritten, headlineElevatedWritten, headlineMediumWritten, headlineLowWritten := false, false, false, false, false - for _, risk := range risks { + for _, risk := range risksStr { text.WriteString("
") html.Write(5, text.String()) text.Reset() @@ -2531,17 +2532,17 @@ func createTechnicalAssets() { text.Reset() currentChapterTitleBreadcrumb = title for _, technicalAsset := range model.SortedTechnicalAssetsByRiskSeverityAndTitle() { - risks := technicalAsset.GeneratedRisks() - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(risks)) - suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(risks)) + " Risk" - if len(risks) != 1 { + risksStr := technicalAsset.GeneratedRisks() + countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(risksStr)) + suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(risksStr)) + " Risk" + if len(risksStr) != 1 { suffix += "s" } if technicalAsset.OutOfScope { pdfColorOutOfScope() suffix = "out-of-scope" } else { - switch model.HighestSeverityStillAtRisk(risks) { + switch model.HighestSeverityStillAtRisk(risksStr) { case model.CriticalSeverity: colors.ColorCriticalRisk(pdf) case model.HighSeverity: @@ -2555,7 +2556,7 @@ func createTechnicalAssets() { default: pdfColorBlack() } - if len(model.ReduceToOnlyStillAtRisk(risks)) == 0 { + if len(model.ReduceToOnlyStillAtRisk(risksStr)) == 0 { pdfColorBlack() } } @@ -2589,7 +2590,7 @@ func createTechnicalAssets() { pdf.CellFormat(190, 6, "Identified Risks of Asset", "0", 0, "", false, 0, "") pdfColorGray() oldLeft, _, _, _ := pdf.GetMargins() - if len(risks) > 0 { + if len(risksStr) > 0 { pdf.SetFont("Helvetica", "", fontSizeSmall) html.Write(5, "Risk finding paragraphs are clickable and link to the corresponding chapter.") pdf.SetFont("Helvetica", "", fontSizeBody) @@ -2599,11 +2600,11 @@ func createTechnicalAssets() { pdf.Ln(-1) pdfColorGray() pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(185, 6, strconv.Itoa(len(risks))+" risks in total were identified", "0", 0, "", false, 0, "") + pdf.CellFormat(185, 6, strconv.Itoa(len(risksStr))+" risksStr in total were identified", "0", 0, "", false, 0, "") */ headlineCriticalWritten, headlineHighWritten, headlineElevatedWritten, headlineMediumWritten, headlineLowWritten := false, false, false, false, false pdf.Ln(-1) - for _, risk := range risks { + for _, risk := range risksStr { text.WriteString("
") html.Write(5, text.String()) text.Reset() @@ -2680,7 +2681,7 @@ func createTechnicalAssets() { pdfColorGray() pdf.SetFont("Helvetica", "", fontSizeBody) pdf.SetLeftMargin(15) - text := "No risks were identified." + text := "No risksStr were identified." if technicalAsset.OutOfScope { text = "Asset was defined as out-of-scope." } @@ -3362,10 +3363,10 @@ func createDataAssets() { if !dataAsset.IsDataBreachPotentialStillAtRisk() { pdfColorBlack() } - risks := dataAsset.IdentifiedDataBreachProbabilityRisks() - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(risks)) - suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(risks)) + " Risk" - if len(risks) != 1 { + risksStr := dataAsset.IdentifiedDataBreachProbabilityRisks() + countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(risksStr)) + suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(risksStr)) + " Risk" + if len(risksStr) != 1 { suffix += "s" } title := uni(dataAsset.Title) + ": " + suffix @@ -3641,12 +3642,12 @@ func createDataAssets() { default: pdfColorBlack() } - risks := techAssetResponsible.GeneratedRisks() - if len(model.ReduceToOnlyStillAtRisk(risks)) == 0 { + risksStr := techAssetResponsible.GeneratedRisks() + if len(model.ReduceToOnlyStillAtRisk(risksStr)) == 0 { pdfColorBlack() } - riskStr := "risks" - if len(risks) == 1 { + riskStr := "risksStr" + if len(risksStr) == 1 { riskStr = "risk" } pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") @@ -3705,7 +3706,7 @@ func createDataAssets() { pdf.MultiCell(145, 6, "This data asset has no data breach potential.", "0", "0", false) } else { pdfColorBlack() - riskRemainingStr := "risks" + riskRemainingStr := "risksStr" if countStillAtRisk == 1 { riskRemainingStr = "risk" } @@ -3941,7 +3942,7 @@ func createSharedRuntimes() { } } -func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTimestamp string, modelHash string, customRiskRules map[string]model.CustomRiskRule) { +func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTimestamp string, modelHash string, customRiskRules map[string]*risks.CustomRisk) { pdf.SetTextColor(0, 0, 0) title := "Risk Rules Checked by Threagile" addHeadline(title, false) @@ -3969,7 +3970,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim html.Write(5, strBuilder.String()) strBuilder.Reset() - // TODO use the new plugin system to discover risk rules instead of hard-coding them here: + // TODO use the new run system to discover risk rules instead of hard-coding them here: skippedRules := strings.Split(skipRiskRules, ",") skipped := "" pdf.Ln(-1) @@ -3982,7 +3983,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+customRule.Category().Title, "0", 0, "", false, 0, "") + pdf.CellFormat(190, 3, skipped+customRule.Category.Title, "0", 0, "", false, 0, "") pdf.Ln(-1) pdf.SetFont("Helvetica", "", fontSizeSmall) pdf.CellFormat(190, 6, id, "0", 0, "", false, 0, "") @@ -3995,22 +3996,22 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") pdfColorBlack() - pdf.MultiCell(160, 6, customRule.Category().STRIDE.Title(), "0", "0", false) + pdf.MultiCell(160, 6, customRule.Category.STRIDE.Title(), "0", "0", false) pdfColorGray() pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(customRule.Category().Description), "0", "0", false) + pdf.MultiCell(160, 6, firstParagraph(customRule.Category.Description), "0", "0", false) pdfColorGray() pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") pdfColorBlack() - pdf.MultiCell(160, 6, customRule.Category().DetectionLogic, "0", "0", false) + pdf.MultiCell(160, 6, customRule.Category.DetectionLogic, "0", "0", false) pdfColorGray() pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") pdfColorBlack() - pdf.MultiCell(160, 6, customRule.Category().RiskAssessment, "0", "0", false) + pdf.MultiCell(160, 6, customRule.Category.RiskAssessment, "0", "0", false) } for _, key := range model.SortedKeysOfIndividualRiskCategories() { diff --git a/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go b/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go index 3f4f9b4c..50807472 100644 --- a/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go +++ b/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "accidental-secret-leak", @@ -34,7 +42,7 @@ func SupportedTags() []string { return []string{"git", "nexus"} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { techAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/code-backdooring/code-backdooring-rule.go b/risks/built-in/code-backdooring/code-backdooring-rule.go index 22d8093d..d89f7745 100644 --- a/risks/built-in/code-backdooring/code-backdooring-rule.go +++ b/risks/built-in/code-backdooring/code-backdooring-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "code-backdooring", @@ -40,7 +48,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go b/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go index e8d92d37..7981fd10 100644 --- a/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go +++ b/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "container-baseimage-backdooring", @@ -35,7 +43,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/container-platform-escape/container-platform-escape-rule.go b/risks/built-in/container-platform-escape/container-platform-escape-rule.go index 520b3d1d..343a079f 100644 --- a/risks/built-in/container-platform-escape/container-platform-escape-rule.go +++ b/risks/built-in/container-platform-escape/container-platform-escape-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "container-platform-escape", @@ -40,7 +48,7 @@ func SupportedTags() []string { return []string{"docker", "kubernetes", "openshift"} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go b/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go index 04ed7882..790354dd 100644 --- a/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go +++ b/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "cross-site-request-forgery", @@ -36,7 +44,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go b/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go index a6da6781..723dfdc6 100644 --- a/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go +++ b/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "cross-site-scripting", @@ -34,7 +42,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go b/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go index 54627414..caa1ceb0 100644 --- a/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go +++ b/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "dos-risky-access-across-trust-boundary", @@ -38,7 +46,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/incomplete-model/incomplete-model-rule.go b/risks/built-in/incomplete-model/incomplete-model-rule.go index 55399410..83cda2a2 100644 --- a/risks/built-in/incomplete-model/incomplete-model-rule.go +++ b/risks/built-in/incomplete-model/incomplete-model-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "incomplete-model", @@ -30,7 +38,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/ldap-injection/ldap-injection-rule.go b/risks/built-in/ldap-injection/ldap-injection-rule.go index e76d2706..e6c991b3 100644 --- a/risks/built-in/ldap-injection/ldap-injection-rule.go +++ b/risks/built-in/ldap-injection/ldap-injection-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "ldap-injection", @@ -29,7 +37,7 @@ func Category() model.RiskCategory { } } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { incomingFlows := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] diff --git a/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go b/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go index db2014c7..b5553c0b 100644 --- a/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go +++ b/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go @@ -5,6 +5,14 @@ import ( "github.com/threagile/threagile/risks/built-in/missing-authentication" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-authentication-second-factor", @@ -34,7 +42,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/missing-authentication/missing-authentication-rule.go b/risks/built-in/missing-authentication/missing-authentication-rule.go index 82934af2..076d1ad7 100644 --- a/risks/built-in/missing-authentication/missing-authentication-rule.go +++ b/risks/built-in/missing-authentication/missing-authentication-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-authentication", @@ -33,7 +41,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go b/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go index 8941dc5b..e02a1110 100644 --- a/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go +++ b/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-build-infrastructure", @@ -35,7 +43,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) hasCustomDevelopedParts, hasBuildPipeline, hasSourcecodeRepo, hasDevOpsClient := false, false, false, false impact := model.LowImpact diff --git a/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go b/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go index 77539d81..07f1e800 100644 --- a/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go +++ b/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go @@ -5,6 +5,14 @@ import ( "sort" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-cloud-hardening", @@ -49,7 +57,7 @@ func SupportedTags() []string { return res } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) sharedRuntimesWithUnspecificCloudRisks := make(map[string]bool) diff --git a/risks/built-in/missing-file-validation/missing-file-validation-rule.go b/risks/built-in/missing-file-validation/missing-file-validation-rule.go index bc0b5d67..cbf5604f 100644 --- a/risks/built-in/missing-file-validation/missing-file-validation-rule.go +++ b/risks/built-in/missing-file-validation/missing-file-validation-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-file-validation", @@ -34,7 +42,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/missing-hardening/missing-hardening-rule.go b/risks/built-in/missing-hardening/missing-hardening-rule.go index d9061cfc..38bdec74 100644 --- a/risks/built-in/missing-hardening/missing-hardening-rule.go +++ b/risks/built-in/missing-hardening/missing-hardening-rule.go @@ -8,6 +8,14 @@ import ( const raaLimit = 55 const raaLimitReduced = 40 +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-hardening", @@ -36,7 +44,7 @@ func SupportedTags() []string { return []string{"tomcat"} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go b/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go index 01e2ba38..609137aa 100644 --- a/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go +++ b/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-identity-propagation", @@ -39,7 +47,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go b/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go index 287cd751..cfa3f7f0 100644 --- a/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go +++ b/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-identity-provider-isolation", @@ -35,7 +43,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { if !technicalAsset.OutOfScope && technicalAsset.Technology.IsIdentityRelated() { diff --git a/risks/built-in/missing-identity-store/missing-identity-store-rule.go b/risks/built-in/missing-identity-store/missing-identity-store-rule.go index c985a39e..11631698 100644 --- a/risks/built-in/missing-identity-store/missing-identity-store-rule.go +++ b/risks/built-in/missing-identity-store/missing-identity-store-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-identity-store", @@ -33,7 +41,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { if !technicalAsset.OutOfScope && diff --git a/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go b/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go index 2b29af70..438c8d45 100644 --- a/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go +++ b/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go @@ -7,6 +7,14 @@ import ( const raaLimit = 50 +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-network-segmentation", @@ -41,7 +49,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: diff --git a/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go b/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go index 0c8919b4..fe0b64d7 100644 --- a/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go +++ b/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-vault-isolation", @@ -35,7 +43,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { if !technicalAsset.OutOfScope && technicalAsset.Technology == model.Vault { diff --git a/risks/built-in/missing-vault/missing-vault-rule.go b/risks/built-in/missing-vault/missing-vault-rule.go index ac3e4590..444e0039 100644 --- a/risks/built-in/missing-vault/missing-vault-rule.go +++ b/risks/built-in/missing-vault/missing-vault-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-vault", @@ -34,7 +42,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) hasVault := false var mostRelevantAsset model.TechnicalAsset diff --git a/risks/built-in/missing-waf/missing-waf-rule.go b/risks/built-in/missing-waf/missing-waf-rule.go index 684cc9af..ed2e2406 100644 --- a/risks/built-in/missing-waf/missing-waf-rule.go +++ b/risks/built-in/missing-waf/missing-waf-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "missing-waf", @@ -33,7 +41,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { if !technicalAsset.OutOfScope && diff --git a/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go b/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go index 435a6ab9..e2b52ce2 100644 --- a/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go +++ b/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go @@ -5,6 +5,14 @@ import ( "sort" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "mixed-targets-on-shared-runtime", @@ -37,7 +45,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: keys := make([]string, 0) diff --git a/risks/built-in/path-traversal/path-traversal-rule.go b/risks/built-in/path-traversal/path-traversal-rule.go index 1258c039..0b6b292d 100644 --- a/risks/built-in/path-traversal/path-traversal-rule.go +++ b/risks/built-in/path-traversal/path-traversal-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "path-traversal", @@ -31,7 +39,7 @@ func Category() model.RiskCategory { } } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go b/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go index d965ce18..9bc4116a 100644 --- a/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go +++ b/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "push-instead-of-pull-deployment", @@ -35,7 +43,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) impact := model.LowImpact for _, buildPipeline := range model.ParsedModelRoot.TechnicalAssets { diff --git a/risks/built-in/search-query-injection/search-query-injection-rule.go b/risks/built-in/search-query-injection/search-query-injection-rule.go index 1f250b28..90726f7a 100644 --- a/risks/built-in/search-query-injection/search-query-injection-rule.go +++ b/risks/built-in/search-query-injection/search-query-injection-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "search-query-injection", @@ -32,7 +40,7 @@ func Category() model.RiskCategory { } } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go b/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go index a1844ae0..076d332e 100644 --- a/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go +++ b/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "server-side-request-forgery", @@ -35,7 +43,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go b/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go index 5cee3a3e..e7316833 100644 --- a/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go +++ b/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "service-registry-poisoning", @@ -32,7 +40,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go b/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go index 331f54e8..582e6973 100644 --- a/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go +++ b/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "sql-nosql-injection", @@ -32,7 +40,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go b/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go index 80cac869..d2fd5c87 100644 --- a/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go +++ b/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "unchecked-deployment", @@ -35,7 +43,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { if technicalAsset.Technology.IsDevelopmentRelevant() { diff --git a/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go b/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go index 67c42992..4a350dcc 100644 --- a/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go +++ b/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "unencrypted-asset", @@ -37,7 +45,7 @@ func SupportedTags() []string { // check for technical assets that should be encrypted due to their confidentiality -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go b/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go index ed5a7eb3..538f0023 100644 --- a/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go +++ b/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "unencrypted-communication", @@ -34,7 +42,7 @@ func SupportedTags() []string { // check for communication links that should be encrypted due to their confidentiality and/or integrity -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { for _, dataFlow := range technicalAsset.CommunicationLinks { diff --git a/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go b/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go index a6e00bec..b5ed9b3d 100644 --- a/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go +++ b/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go @@ -5,6 +5,14 @@ import ( "sort" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "unguarded-access-from-internet", @@ -43,7 +51,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go b/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go index e845f955..30f48204 100644 --- a/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go +++ b/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "unguarded-direct-datastore-access", @@ -36,7 +44,7 @@ func SupportedTags() []string { // check for data stores that should not be accessed directly across trust boundaries -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go b/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go index 80843a32..1c882e2e 100644 --- a/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go +++ b/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "unnecessary-communication-link", @@ -30,7 +38,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go b/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go index 2af7c618..d384c844 100644 --- a/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go +++ b/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go @@ -5,6 +5,14 @@ import ( "sort" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "unnecessary-data-asset", @@ -33,7 +41,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) // first create them in memory - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: diff --git a/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go b/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go index f33c58a0..3baa68c2 100644 --- a/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go +++ b/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go @@ -5,6 +5,14 @@ import ( "sort" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "unnecessary-data-transfer", @@ -38,7 +46,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go b/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go index 012117e8..4c1a228d 100644 --- a/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go +++ b/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "unnecessary-technical-asset", @@ -31,7 +39,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go b/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go index f4e5c8d6..427527c7 100644 --- a/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go +++ b/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "untrusted-deserialization", @@ -36,7 +44,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go b/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go index 3464e490..c07d8517 100644 --- a/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go +++ b/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "wrong-communication-link-content", @@ -31,7 +39,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, techAsset := range model.ParsedModelRoot.TechnicalAssets { for _, commLink := range techAsset.CommunicationLinks { diff --git a/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go b/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go index 95801845..66ce0daf 100644 --- a/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go +++ b/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "wrong-trust-boundary-content", @@ -30,7 +38,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, trustBoundary := range model.ParsedModelRoot.TrustBoundaries { if trustBoundary.Type == model.NetworkPolicyNamespaceIsolation { diff --git a/risks/built-in/xml-external-entity/xml-external-entity-rule.go b/risks/built-in/xml-external-entity/xml-external-entity-rule.go index e6e4778a..81997b16 100644 --- a/risks/built-in/xml-external-entity/xml-external-entity-rule.go +++ b/risks/built-in/xml-external-entity/xml-external-entity-rule.go @@ -4,6 +4,14 @@ import ( "github.com/threagile/threagile/model" ) +func Rule() model.CustomRiskRule { + return model.CustomRiskRule{ + Category: Category, + SupportedTags: SupportedTags, + GenerateRisks: GenerateRisks, + } +} + func Category() model.RiskCategory { return model.RiskCategory{ Id: "xml-external-entity", @@ -34,7 +42,7 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks() []model.Risk { +func GenerateRisks(input *model.ModelInput) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] diff --git a/risks/custom/demo/demo-rule.go b/risks/custom/demo/demo-rule.go index 2a2daacf..4bbb6e25 100644 --- a/risks/custom/demo/demo-rule.go +++ b/risks/custom/demo/demo-rule.go @@ -1,7 +1,14 @@ package main import ( + "bufio" + "encoding/json" + "flag" + "fmt" "github.com/threagile/threagile/model" + "github.com/threagile/threagile/risks" + "io" + "os" ) type customRiskRule string @@ -10,6 +17,59 @@ type customRiskRule string var CustomRiskRule customRiskRule +func main() { + getInfo := flag.Bool("get-info", false, "get rule info") + generateRisks := flag.Bool("generate-risks", false, "generate risks") + flag.Parse() + + if *getInfo { + rule := new(customRiskRule) + category := rule.Category() + riskData, marshalError := json.Marshal(risks.CustomRisk{ + ID: category.Id, + Category: category, + Tags: rule.SupportedTags(), + }) + + if marshalError != nil { + _, _ = fmt.Fprintf(os.Stderr, "failed to print risk data: %v", marshalError) + os.Exit(-2) + } + + _, _ = fmt.Fprint(os.Stdout, riskData) + os.Exit(0) + } + + if *generateRisks { + reader := bufio.NewReader(os.Stdin) + inData, outError := io.ReadAll(reader) + if outError != nil { + _, _ = fmt.Fprintf(os.Stderr, "failed to read model data from stdin\n") + os.Exit(-2) + } + + var input model.ParsedModel + inError := json.Unmarshal(inData, &input) + if inError != nil { + _, _ = fmt.Fprintf(os.Stderr, "failed to parse model: %v\n", inError) + os.Exit(-2) + } + + generatedRisks := new(customRiskRule).GenerateRisks(&input) + outData, marshalError := json.Marshal(generatedRisks) + if marshalError != nil { + _, _ = fmt.Fprintf(os.Stderr, "failed to print generated risks: %v\n", marshalError) + os.Exit(-2) + } + + _, _ = fmt.Fprint(os.Stdout, outData) + os.Exit(0) + } + + flag.Usage() + os.Exit(-2) +} + func (r customRiskRule) Category() model.RiskCategory { return model.RiskCategory{ Id: "demo", @@ -35,12 +95,12 @@ func (r customRiskRule) SupportedTags() []string { return []string{"demo tag"} } -func (r customRiskRule) GenerateRisks() []model.Risk { - risks := make([]model.Risk, 0) - for _, techAsset := range model.ParsedModelRoot.TechnicalAssets { - risks = append(risks, createRisk(techAsset)) +func (r customRiskRule) GenerateRisks(input *model.ParsedModel) []model.Risk { + generatedRisks := make([]model.Risk, 0) + for _, techAsset := range input.TechnicalAssets { + generatedRisks = append(generatedRisks, createRisk(techAsset)) } - return risks + return generatedRisks } func createRisk(technicalAsset model.TechnicalAsset) model.Risk { diff --git a/risks/risk.go b/risks/risk.go new file mode 100644 index 00000000..f9fcf1a6 --- /dev/null +++ b/risks/risk.go @@ -0,0 +1,34 @@ +package risks + +import ( + "github.com/threagile/threagile/model" + "github.com/threagile/threagile/run" + "log" +) + +type BuiltInRisk struct { + Category func() model.RiskCategory + SupportedTags func() []string + GenerateRisks func(m *model.ModelInput) []model.Risk +} + +type CustomRisk struct { + ID string + Category model.RiskCategory + Tags []string + Runner *run.Runner +} + +func (r *CustomRisk) GenerateRisks(m *model.ParsedModel) []model.Risk { + if r.Runner == nil { + return nil + } + + risks := make([]model.Risk, 0) + runError := r.Runner.Run(m, &risks, "-generate-risks") + if runError != nil { + log.Fatalf("Failed to generate risks for custom risk rule %q: %v\n", r.Runner.Filename, runError) + } + + return risks +} diff --git a/run/runner.go b/run/runner.go new file mode 100644 index 00000000..3d624b07 --- /dev/null +++ b/run/runner.go @@ -0,0 +1,105 @@ +package run + +import ( + "encoding/json" + "fmt" + "io" + "os" + "os/exec" +) + +type Runner struct { + Filename string + Parameters []string + In any + Out any + ErrorOutput string +} + +func (p *Runner) Load(filename string) (*Runner, error) { + *p = Runner{ + Filename: filename, + } + + fileInfo, statError := os.Stat(filename) + if statError != nil { + return p, statError + } + + if !fileInfo.Mode().IsRegular() { + return p, fmt.Errorf("run %q is not a regular file", filename) + } + + return p, nil +} + +func (p *Runner) Run(in any, out any, parameters ...string) error { + *p = Runner{ + Filename: p.Filename, + Parameters: parameters, + In: in, + Out: out, + } + + plugin := exec.Command(p.Filename, p.Parameters...) + stdin, stdinError := plugin.StdinPipe() + if stdinError != nil { + return stdinError + } + defer func() { _ = stdin.Close() }() + + stdout, stdoutError := plugin.StdoutPipe() + if stdoutError != nil { + return stdoutError + } + defer func() { _ = stdout.Close() }() + + stderr, stderrError := plugin.StderrPipe() + if stderrError != nil { + return stderrError + } + defer func() { _ = stderr.Close() }() + + startError := plugin.Start() + if startError != nil { + return startError + } + + inData, inError := json.MarshalIndent(p.In, "", " ") + if inError != nil { + return inError + } + + _, writeError := stdin.Write(inData) + if writeError != nil { + return writeError + } + + inCloseError := stdin.Close() + if inCloseError != nil { + return inCloseError + } + + errData, errError := io.ReadAll(stderr) + if errError != nil { + return errError + } + p.ErrorOutput = string(errData) + + outData, outError := io.ReadAll(stdout) + if outError != nil { + return outError + } + + waitError := plugin.Wait() + if waitError != nil { + return fmt.Errorf("%v: %v", waitError, p.ErrorOutput) + } + + unmarshalError := json.Unmarshal(outData, &p.Out) + if unmarshalError != nil { + return unmarshalError + } + + return nil +} From c7f4ebaa9980cb8fe98b67acb2752cbf4a091cba Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Mon, 18 Dec 2023 13:48:25 -0800 Subject: [PATCH 08/68] modify builtin risks to use model as input parameter --- main.go | 2 +- model/types.go | 2 +- .../accidental-secret-leak-rule.go | 4 +- .../code-backdooring/code-backdooring-rule.go | 14 +++--- .../container-baseimage-backdooring-rule.go | 4 +- .../container-platform-escape-rule.go | 10 ++-- .../cross-site-request-forgery-rule.go | 10 ++-- .../cross-site-scripting-rule.go | 4 +- ...risky-access-across-trust-boundary-rule.go | 14 +++--- .../incomplete-model/incomplete-model-rule.go | 4 +- .../ldap-injection/ldap-injection-rule.go | 12 ++--- ...ssing-authentication-second-factor-rule.go | 12 ++--- .../missing-authentication-rule.go | 14 +++--- .../missing-build-infrastructure-rule.go | 4 +- .../missing-cloud-hardening-rule.go | 46 +++++++++---------- .../missing-file-validation-rule.go | 4 +- .../missing-hardening-rule.go | 4 +- .../missing-identity-propagation-rule.go | 14 +++--- ...issing-identity-provider-isolation-rule.go | 8 ++-- .../missing-identity-store-rule.go | 8 ++-- .../missing-network-segmentation-rule.go | 8 ++-- .../missing-vault-isolation-rule.go | 8 ++-- .../missing-vault/missing-vault-rule.go | 4 +- .../built-in/missing-waf/missing-waf-rule.go | 6 +-- .../mixed-targets-on-shared-runtime-rule.go | 20 ++++---- .../path-traversal/path-traversal-rule.go | 12 ++--- .../push-instead-of-pull-deployment-rule.go | 6 +-- .../search-query-injection-rule.go | 12 ++--- .../server-side-request-forgery-rule.go | 14 +++--- .../service-registry-poisoning-rule.go | 10 ++-- .../sql-nosql-injection-rule.go | 12 ++--- .../unchecked-deployment-rule.go | 12 ++--- .../unencrypted-asset-rule.go | 4 +- .../unencrypted-communication-rule.go | 24 +++++----- .../unguarded-access-from-internet-rule.go | 10 ++-- .../unguarded-direct-datastore-access-rule.go | 8 ++-- .../unnecessary-communication-link-rule.go | 6 +-- .../unnecessary-data-asset-rule.go | 12 ++--- .../unnecessary-data-transfer-rule.go | 22 ++++----- .../unnecessary-technical-asset-rule.go | 4 +- .../untrusted-deserialization-rule.go | 4 +- .../wrong-communication-link-content-rule.go | 6 +-- .../wrong-trust-boundary-content.go | 6 +-- .../xml-external-entity-rule.go | 4 +- risks/risk.go | 2 +- 45 files changed, 215 insertions(+), 215 deletions(-) diff --git a/main.go b/main.go index 0c7df67a..039e2b82 100644 --- a/main.go +++ b/main.go @@ -170,7 +170,7 @@ func (context *Context) applyRisk(rule model.CustomRiskRule, skippedRules *map[s delete(*skippedRules, rule.Category().Id) } else { model.AddToListOfSupportedTags(rule.SupportedTags()) - generatedRisks := rule.GenerateRisks(&context.modelInput) + generatedRisks := rule.GenerateRisks(&model.ParsedModelRoot) if generatedRisks != nil { if len(generatedRisks) > 0 { model.GeneratedRisksByCategory[rule.Category()] = generatedRisks diff --git a/model/types.go b/model/types.go index 322098e5..98bd9fc7 100644 --- a/model/types.go +++ b/model/types.go @@ -58,7 +58,7 @@ func AddToListOfSupportedTags(tags []string) { type CustomRiskRule struct { Category func() RiskCategory SupportedTags func() []string - GenerateRisks func(input *ModelInput) []Risk + GenerateRisks func(input *ParsedModel) []Risk } // === To be used by model macros etc. ======================= diff --git a/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go b/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go index 50807472..fd1945fe 100644 --- a/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go +++ b/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go @@ -42,10 +42,10 @@ func SupportedTags() []string { return []string{"git", "nexus"} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - techAsset := model.ParsedModelRoot.TechnicalAssets[id] + techAsset := input.TechnicalAssets[id] if !techAsset.OutOfScope && (techAsset.Technology == model.SourcecodeRepository || techAsset.Technology == model.ArtifactRegistry) { var risk model.Risk diff --git a/risks/built-in/code-backdooring/code-backdooring-rule.go b/risks/built-in/code-backdooring/code-backdooring-rule.go index d89f7745..72419c91 100644 --- a/risks/built-in/code-backdooring/code-backdooring-rule.go +++ b/risks/built-in/code-backdooring/code-backdooring-rule.go @@ -48,13 +48,13 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope && technicalAsset.Technology.IsDevelopmentRelevant() { if technicalAsset.Internet { - risks = append(risks, createRisk(technicalAsset, true)) + risks = append(risks, createRisk(input, technicalAsset, true)) continue } @@ -62,9 +62,9 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { //riskByLinkAdded := false for _, callerLink := range model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] { - caller := model.ParsedModelRoot.TechnicalAssets[callerLink.SourceId] + caller := input.TechnicalAssets[callerLink.SourceId] if (!callerLink.VPN && caller.Internet) || caller.OutOfScope { - risks = append(risks, createRisk(technicalAsset, true)) + risks = append(risks, createRisk(input, technicalAsset, true)) //riskByLinkAdded = true break } @@ -74,7 +74,7 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { return risks } -func createRisk(technicalAsset model.TechnicalAsset, elevatedRisk bool) model.Risk { +func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, elevatedRisk bool) model.Risk { title := "Code Backdooring risk at " + technicalAsset.Title + "" impact := model.LowImpact if technicalAsset.Technology != model.CodeInspectionPlatform { @@ -95,7 +95,7 @@ func createRisk(technicalAsset model.TechnicalAsset, elevatedRisk bool) model.Ri if codeDeploymentTargetCommLink.Usage == model.DevOps { for _, dataAssetID := range codeDeploymentTargetCommLink.DataAssetsSent { // it appears to be code when elevated integrity rating of sent data asset - if model.ParsedModelRoot.DataAssets[dataAssetID].Integrity >= model.Important { + if input.DataAssets[dataAssetID].Integrity >= model.Important { // here we've got a deployment target which has its data assets at risk via deployment of backdoored code uniqueDataBreachTechnicalAssetIDs[codeDeploymentTargetCommLink.TargetId] = true break diff --git a/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go b/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go index 7981fd10..52f36d3b 100644 --- a/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go +++ b/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go @@ -43,10 +43,10 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope && technicalAsset.Machine == model.Container { risks = append(risks, createRisk(technicalAsset)) } diff --git a/risks/built-in/container-platform-escape/container-platform-escape-rule.go b/risks/built-in/container-platform-escape/container-platform-escape-rule.go index 343a079f..5daaed1e 100644 --- a/risks/built-in/container-platform-escape/container-platform-escape-rule.go +++ b/risks/built-in/container-platform-escape/container-platform-escape-rule.go @@ -48,18 +48,18 @@ func SupportedTags() []string { return []string{"docker", "kubernetes", "openshift"} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope && technicalAsset.Technology == model.ContainerPlatform { - risks = append(risks, createRisk(technicalAsset)) + risks = append(risks, createRisk(input, technicalAsset)) } } return risks } -func createRisk(technicalAsset model.TechnicalAsset) model.Risk { +func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset) model.Risk { title := "Container Platform Escape risk at " + technicalAsset.Title + "" impact := model.MediumImpact if technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || @@ -69,7 +69,7 @@ func createRisk(technicalAsset model.TechnicalAsset) model.Risk { } // data breach at all container assets dataBreachTechnicalAssetIDs := make([]string, 0) - for id, techAsset := range model.ParsedModelRoot.TechnicalAssets { + for id, techAsset := range input.TechnicalAssets { if techAsset.Machine == model.Container { dataBreachTechnicalAssetIDs = append(dataBreachTechnicalAssetIDs, id) } diff --git a/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go b/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go index 790354dd..f0f2c007 100644 --- a/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go +++ b/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go @@ -44,10 +44,10 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope || !technicalAsset.Technology.IsWebApplication() { continue } @@ -58,15 +58,15 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { if incomingFlow.Usage == model.DevOps { likelihood = model.Likely } - risks = append(risks, createRisk(technicalAsset, incomingFlow, likelihood)) + risks = append(risks, createRisk(input, technicalAsset, incomingFlow, likelihood)) } } } return risks } -func createRisk(technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink, likelihood model.RiskExploitationLikelihood) model.Risk { - sourceAsset := model.ParsedModelRoot.TechnicalAssets[incomingFlow.SourceId] +func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink, likelihood model.RiskExploitationLikelihood) model.Risk { + sourceAsset := input.TechnicalAssets[incomingFlow.SourceId] title := "Cross-Site Request Forgery (CSRF) risk at " + technicalAsset.Title + " via " + incomingFlow.Title + " from " + sourceAsset.Title + "" impact := model.LowImpact if incomingFlow.HighestIntegrity() == model.MissionCritical { diff --git a/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go b/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go index 723dfdc6..c058122c 100644 --- a/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go +++ b/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go @@ -42,10 +42,10 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope || !technicalAsset.Technology.IsWebApplication() { // TODO: also mobile clients or rich-clients as long as they use web-view... continue } diff --git a/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go b/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go index caa1ceb0..2d350ce6 100644 --- a/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go +++ b/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go @@ -46,22 +46,22 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope && technicalAsset.Technology != model.LoadBalancer && technicalAsset.Availability >= model.Critical { for _, incomingAccess := range model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] { - sourceAsset := model.ParsedModelRoot.TechnicalAssets[incomingAccess.SourceId] + sourceAsset := input.TechnicalAssets[incomingAccess.SourceId] if sourceAsset.Technology.IsTrafficForwarding() { // Now try to walk a call chain up (1 hop only) to find a caller's caller used by human callersCommLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[sourceAsset.Id] for _, callersCommLink := range callersCommLinks { - risks = checkRisk(technicalAsset, callersCommLink, sourceAsset.Title, risks) + risks = checkRisk(input, technicalAsset, callersCommLink, sourceAsset.Title, risks) } } else { - risks = checkRisk(technicalAsset, incomingAccess, "", risks) + risks = checkRisk(input, technicalAsset, incomingAccess, "", risks) } } } @@ -69,13 +69,13 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { return risks } -func checkRisk(technicalAsset model.TechnicalAsset, incomingAccess model.CommunicationLink, hopBetween string, risks []model.Risk) []model.Risk { +func checkRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingAccess model.CommunicationLink, hopBetween string, risks []model.Risk) []model.Risk { if incomingAccess.IsAcrossTrustBoundaryNetworkOnly() && !incomingAccess.Protocol.IsProcessLocal() && incomingAccess.Usage != model.DevOps { highRisk := technicalAsset.Availability == model.MissionCritical && !incomingAccess.VPN && !incomingAccess.IpFiltered && !technicalAsset.Redundant risks = append(risks, createRisk(technicalAsset, incomingAccess, hopBetween, - model.ParsedModelRoot.TechnicalAssets[incomingAccess.SourceId], highRisk)) + input.TechnicalAssets[incomingAccess.SourceId], highRisk)) } return risks } diff --git a/risks/built-in/incomplete-model/incomplete-model-rule.go b/risks/built-in/incomplete-model/incomplete-model-rule.go index 83cda2a2..18e2621a 100644 --- a/risks/built-in/incomplete-model/incomplete-model-rule.go +++ b/risks/built-in/incomplete-model/incomplete-model-rule.go @@ -38,10 +38,10 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope { if technicalAsset.Technology == model.UnknownTechnology { risks = append(risks, createRiskTechAsset(technicalAsset)) diff --git a/risks/built-in/ldap-injection/ldap-injection-rule.go b/risks/built-in/ldap-injection/ldap-injection-rule.go index e6c991b3..1deabfbc 100644 --- a/risks/built-in/ldap-injection/ldap-injection-rule.go +++ b/risks/built-in/ldap-injection/ldap-injection-rule.go @@ -37,12 +37,12 @@ func Category() model.RiskCategory { } } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { + for _, technicalAsset := range input.TechnicalAssets { incomingFlows := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, incomingFlow := range incomingFlows { - if model.ParsedModelRoot.TechnicalAssets[incomingFlow.SourceId].OutOfScope { + if input.TechnicalAssets[incomingFlow.SourceId].OutOfScope { continue } if incomingFlow.Protocol == model.LDAP || incomingFlow.Protocol == model.LDAPS { @@ -50,7 +50,7 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { if incomingFlow.Usage == model.DevOps { likelihood = model.Unlikely } - risks = append(risks, createRisk(technicalAsset, incomingFlow, likelihood)) + risks = append(risks, createRisk(input, technicalAsset, incomingFlow, likelihood)) } } } @@ -61,8 +61,8 @@ func SupportedTags() []string { return []string{} } -func createRisk(technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink, likelihood model.RiskExploitationLikelihood) model.Risk { - caller := model.ParsedModelRoot.TechnicalAssets[incomingFlow.SourceId] +func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink, likelihood model.RiskExploitationLikelihood) model.Risk { + caller := input.TechnicalAssets[incomingFlow.SourceId] title := "LDAP-Injection risk at " + caller.Title + " against LDAP server " + technicalAsset.Title + "" + " via " + incomingFlow.Title + "" impact := model.MediumImpact diff --git a/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go b/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go index b5553c0b..aa1530a4 100644 --- a/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go +++ b/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go @@ -42,10 +42,10 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope || technicalAsset.Technology.IsTrafficForwarding() || technicalAsset.Technology.IsUnprotectedCommunicationsTolerated() { @@ -58,7 +58,7 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { // check each incoming data flow commLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, commLink := range commLinks { - caller := model.ParsedModelRoot.TechnicalAssets[commLink.SourceId] + caller := input.TechnicalAssets[commLink.SourceId] if caller.Technology.IsUnprotectedCommunicationsTolerated() || caller.Type == model.Datastore { continue } @@ -66,13 +66,13 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { moreRisky := commLink.HighestConfidentiality() >= model.Confidential || commLink.HighestIntegrity() >= model.Critical if moreRisky && commLink.Authentication != model.TwoFactor { - risks = append(risks, missing_authentication.CreateRisk(technicalAsset, commLink, commLink, "", model.MediumImpact, model.Unlikely, true, Category())) + risks = append(risks, missing_authentication.CreateRisk(input, technicalAsset, commLink, commLink, "", model.MediumImpact, model.Unlikely, true, Category())) } } else if caller.Technology.IsTrafficForwarding() { // Now try to walk a call chain up (1 hop only) to find a caller's caller used by human callersCommLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[caller.Id] for _, callersCommLink := range callersCommLinks { - callersCaller := model.ParsedModelRoot.TechnicalAssets[callersCommLink.SourceId] + callersCaller := input.TechnicalAssets[callersCommLink.SourceId] if callersCaller.Technology.IsUnprotectedCommunicationsTolerated() || callersCaller.Type == model.Datastore { continue } @@ -80,7 +80,7 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { moreRisky := callersCommLink.HighestConfidentiality() >= model.Confidential || callersCommLink.HighestIntegrity() >= model.Critical if moreRisky && callersCommLink.Authentication != model.TwoFactor { - risks = append(risks, missing_authentication.CreateRisk(technicalAsset, commLink, callersCommLink, caller.Title, model.MediumImpact, model.Unlikely, true, Category())) + risks = append(risks, missing_authentication.CreateRisk(input, technicalAsset, commLink, callersCommLink, caller.Title, model.MediumImpact, model.Unlikely, true, Category())) } } } diff --git a/risks/built-in/missing-authentication/missing-authentication-rule.go b/risks/built-in/missing-authentication/missing-authentication-rule.go index 076d1ad7..be1070a1 100644 --- a/risks/built-in/missing-authentication/missing-authentication-rule.go +++ b/risks/built-in/missing-authentication/missing-authentication-rule.go @@ -41,10 +41,10 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope || technicalAsset.Technology == model.LoadBalancer || technicalAsset.Technology == model.ReverseProxy || technicalAsset.Technology == model.ServiceRegistry || technicalAsset.Technology == model.WAF || technicalAsset.Technology == model.IDS || technicalAsset.Technology == model.IPS { continue @@ -56,7 +56,7 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { // check each incoming data flow commLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, commLink := range commLinks { - caller := model.ParsedModelRoot.TechnicalAssets[commLink.SourceId] + caller := input.TechnicalAssets[commLink.SourceId] if caller.Technology.IsUnprotectedCommunicationsTolerated() || caller.Type == model.Datastore { continue } @@ -71,7 +71,7 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { impact = model.LowImpact } if commLink.Authentication == model.NoneAuthentication && !commLink.Protocol.IsProcessLocal() { - risks = append(risks, CreateRisk(technicalAsset, commLink, commLink, "", impact, model.Likely, false, Category())) + risks = append(risks, CreateRisk(input, technicalAsset, commLink, commLink, "", impact, model.Likely, false, Category())) } } } @@ -79,7 +79,7 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { return risks } -func CreateRisk(technicalAsset model.TechnicalAsset, incomingAccess, incomingAccessOrigin model.CommunicationLink, hopBetween string, +func CreateRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingAccess, incomingAccessOrigin model.CommunicationLink, hopBetween string, impact model.RiskExploitationImpact, likelihood model.RiskExploitationLikelihood, twoFactor bool, category model.RiskCategory) model.Risk { factorString := "" if twoFactor { @@ -94,13 +94,13 @@ func CreateRisk(technicalAsset model.TechnicalAsset, incomingAccess, incomingAcc ExploitationLikelihood: likelihood, ExploitationImpact: impact, Title: "Missing " + factorString + "Authentication covering communication link " + incomingAccess.Title + " " + - "from " + model.ParsedModelRoot.TechnicalAssets[incomingAccessOrigin.SourceId].Title + " " + hopBetween + + "from " + input.TechnicalAssets[incomingAccessOrigin.SourceId].Title + " " + hopBetween + "to " + technicalAsset.Title + "", MostRelevantTechnicalAssetId: technicalAsset.Id, MostRelevantCommunicationLinkId: incomingAccess.Id, DataBreachProbability: model.Possible, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + incomingAccess.Id + "@" + model.ParsedModelRoot.TechnicalAssets[incomingAccess.SourceId].Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.Category.Id + "@" + incomingAccess.Id + "@" + input.TechnicalAssets[incomingAccess.SourceId].Id + "@" + technicalAsset.Id return risk } diff --git a/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go b/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go index e02a1110..02319c75 100644 --- a/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go +++ b/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go @@ -43,13 +43,13 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) hasCustomDevelopedParts, hasBuildPipeline, hasSourcecodeRepo, hasDevOpsClient := false, false, false, false impact := model.LowImpact var mostRelevantAsset model.TechnicalAsset for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if technicalAsset.CustomDevelopedParts && !technicalAsset.OutOfScope { hasCustomDevelopedParts = true if impact == model.LowImpact { diff --git a/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go b/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go index 07f1e800..6f800a9d 100644 --- a/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go +++ b/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go @@ -57,7 +57,7 @@ func SupportedTags() []string { return res } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) sharedRuntimesWithUnspecificCloudRisks := make(map[string]bool) @@ -82,14 +82,14 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { techAssetIDsWithSubtagSpecificCloudRisks := make(map[string]bool) - for _, trustBoundary := range model.ParsedModelRoot.TrustBoundaries { + for _, trustBoundary := range input.TrustBoundaries { taggedOuterTB := trustBoundary.IsTaggedWithAny(SupportedTags()...) // false = generic cloud risks only // true = cloud-individual risks if taggedOuterTB || trustBoundary.Type.IsWithinCloud() { addTrustBoundaryAccordingToBaseTag(trustBoundary, trustBoundariesWithUnspecificCloudRisks, trustBoundaryIDsAWS, trustBoundaryIDsAzure, trustBoundaryIDsGCP, trustBoundaryIDsOCP) for _, techAssetID := range trustBoundary.RecursivelyAllTechnicalAssetIDsInside() { added := false - tA := model.ParsedModelRoot.TechnicalAssets[techAssetID] + tA := input.TechnicalAssets[techAssetID] if tA.IsTaggedWithAny(SupportedTags()...) { addAccordingToBaseTag(tA, tA.Tags, techAssetIDsWithSubtagSpecificCloudRisks, @@ -116,7 +116,7 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { } for _, tB := range model.TrustBoundariesTaggedWithAny(SupportedTags()...) { for _, candidateID := range tB.RecursivelyAllTechnicalAssetIDsInside() { - tA := model.ParsedModelRoot.TechnicalAssets[candidateID] + tA := input.TechnicalAssets[candidateID] if tA.IsTaggedWithAny(SupportedTags()...) { addAccordingToBaseTag(tA, tA.Tags, techAssetIDsWithSubtagSpecificCloudRisks, @@ -132,7 +132,7 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { addSharedRuntimeAccordingToBaseTag(sR, sharedRuntimesWithUnspecificCloudRisks, sharedRuntimeIDsAWS, sharedRuntimeIDsAzure, sharedRuntimeIDsGCP, sharedRuntimeIDsOCP) for _, candidateID := range sR.TechnicalAssetsRunning { - tA := model.ParsedModelRoot.TechnicalAssets[candidateID] + tA := input.TechnicalAssets[candidateID] addAccordingToBaseTag(tA, sR.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) @@ -189,49 +189,49 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { // first try to add shared runtimes... for id := range sharedRuntimeIDsAWS { - risks = append(risks, createRiskForSharedRuntime(model.ParsedModelRoot.SharedRuntimes[id], "AWS", "CIS Benchmark for AWS")) + risks = append(risks, createRiskForSharedRuntime(input, input.SharedRuntimes[id], "AWS", "CIS Benchmark for AWS")) addedAWS = true } for id := range sharedRuntimeIDsAzure { - risks = append(risks, createRiskForSharedRuntime(model.ParsedModelRoot.SharedRuntimes[id], "Azure", "CIS Benchmark for Microsoft Azure")) + risks = append(risks, createRiskForSharedRuntime(input, input.SharedRuntimes[id], "Azure", "CIS Benchmark for Microsoft Azure")) addedAzure = true } for id := range sharedRuntimeIDsGCP { - risks = append(risks, createRiskForSharedRuntime(model.ParsedModelRoot.SharedRuntimes[id], "GCP", "CIS Benchmark for Google Cloud Computing Platform")) + risks = append(risks, createRiskForSharedRuntime(input, input.SharedRuntimes[id], "GCP", "CIS Benchmark for Google Cloud Computing Platform")) addedGCP = true } for id := range sharedRuntimeIDsOCP { - risks = append(risks, createRiskForSharedRuntime(model.ParsedModelRoot.SharedRuntimes[id], "OCP", "Vendor Best Practices for Oracle Cloud Platform")) + risks = append(risks, createRiskForSharedRuntime(input, input.SharedRuntimes[id], "OCP", "Vendor Best Practices for Oracle Cloud Platform")) addedOCP = true } for id := range sharedRuntimesWithUnspecificCloudRisks { - risks = append(risks, createRiskForSharedRuntime(model.ParsedModelRoot.SharedRuntimes[id], "", "")) + risks = append(risks, createRiskForSharedRuntime(input, input.SharedRuntimes[id], "", "")) } // ... followed by trust boundaries for the generic risks for id := range trustBoundaryIDsAWS { - risks = append(risks, createRiskForTrustBoundary(model.ParsedModelRoot.TrustBoundaries[id], "AWS", "CIS Benchmark for AWS")) + risks = append(risks, createRiskForTrustBoundary(input.TrustBoundaries[id], "AWS", "CIS Benchmark for AWS")) addedAWS = true } for id := range trustBoundaryIDsAzure { - risks = append(risks, createRiskForTrustBoundary(model.ParsedModelRoot.TrustBoundaries[id], "Azure", "CIS Benchmark for Microsoft Azure")) + risks = append(risks, createRiskForTrustBoundary(input.TrustBoundaries[id], "Azure", "CIS Benchmark for Microsoft Azure")) addedAzure = true } for id := range trustBoundaryIDsGCP { - risks = append(risks, createRiskForTrustBoundary(model.ParsedModelRoot.TrustBoundaries[id], "GCP", "CIS Benchmark for Google Cloud Computing Platform")) + risks = append(risks, createRiskForTrustBoundary(input.TrustBoundaries[id], "GCP", "CIS Benchmark for Google Cloud Computing Platform")) addedGCP = true } for id := range trustBoundaryIDsOCP { - risks = append(risks, createRiskForTrustBoundary(model.ParsedModelRoot.TrustBoundaries[id], "OCP", "Vendor Best Practices for Oracle Cloud Platform")) + risks = append(risks, createRiskForTrustBoundary(input.TrustBoundaries[id], "OCP", "Vendor Best Practices for Oracle Cloud Platform")) addedOCP = true } for id := range trustBoundariesWithUnspecificCloudRisks { - risks = append(risks, createRiskForTrustBoundary(model.ParsedModelRoot.TrustBoundaries[id], "", "")) + risks = append(risks, createRiskForTrustBoundary(input.TrustBoundaries[id], "", "")) } // just use the most sensitive asset as an example - to only create one general "AWS cloud hardening" risk, not many if !addedAWS { - mostRelevantAsset := findMostSensitiveTechnicalAsset(techAssetIDsAWS) + mostRelevantAsset := findMostSensitiveTechnicalAsset(input, techAssetIDsAWS) if !mostRelevantAsset.IsZero() { risks = append(risks, createRiskForTechnicalAsset(mostRelevantAsset, "AWS", "CIS Benchmark for AWS")) addedAWS = true @@ -239,7 +239,7 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { } // just use the most sensitive asset as an example - to only create one general "Azure cloud hardening" risk, not many if !addedAzure { - mostRelevantAsset := findMostSensitiveTechnicalAsset(techAssetIDsAzure) + mostRelevantAsset := findMostSensitiveTechnicalAsset(input, techAssetIDsAzure) if !mostRelevantAsset.IsZero() { risks = append(risks, createRiskForTechnicalAsset(mostRelevantAsset, "Azure", "CIS Benchmark for Microsoft Azure")) addedAzure = true @@ -247,7 +247,7 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { } // just use the most sensitive asset as an example - to only create one general "GCP cloud hardening" risk, not many if !addedGCP { - mostRelevantAsset := findMostSensitiveTechnicalAsset(techAssetIDsGCP) + mostRelevantAsset := findMostSensitiveTechnicalAsset(input, techAssetIDsGCP) if !mostRelevantAsset.IsZero() { risks = append(risks, createRiskForTechnicalAsset(mostRelevantAsset, "GCP", "CIS Benchmark for Google Cloud Computing Platform")) addedGCP = true @@ -255,7 +255,7 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { } // just use the most sensitive asset as an example - to only create one general "GCP cloud hardening" risk, not many if !addedOCP { - mostRelevantAsset := findMostSensitiveTechnicalAsset(techAssetIDsOCP) + mostRelevantAsset := findMostSensitiveTechnicalAsset(input, techAssetIDsOCP) if !mostRelevantAsset.IsZero() { risks = append(risks, createRiskForTechnicalAsset(mostRelevantAsset, "OCP", "Vendor Best Practices for Oracle Cloud Platform")) addedOCP = true @@ -264,7 +264,7 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { // now also add all tech asset specific tag-specific risks, as they are specific to the asset anyway (therefore don't set added to true here) for id := range techAssetIDsWithSubtagSpecificCloudRisks { - tA := model.ParsedModelRoot.TechnicalAssets[id] + tA := input.TechnicalAssets[id] if tA.IsTaggedWithAnyTraversingUp("aws:ec2") { risks = append(risks, createRiskForTechnicalAsset(tA, "EC2", "CIS Benchmark for Amazon Linux")) } @@ -348,7 +348,7 @@ func addAccordingToBaseTag(techAsset model.TechnicalAsset, tags []string, } } -func findMostSensitiveTechnicalAsset(techAssets map[string]bool) model.TechnicalAsset { +func findMostSensitiveTechnicalAsset(input *model.ParsedModel, techAssets map[string]bool) model.TechnicalAsset { var mostRelevantAsset model.TechnicalAsset keys := make([]string, 0, len(techAssets)) for k := range techAssets { @@ -356,7 +356,7 @@ func findMostSensitiveTechnicalAsset(techAssets map[string]bool) model.Technical } sort.Strings(keys) for _, id := range keys { - tA := model.ParsedModelRoot.TechnicalAssets[id] + tA := input.TechnicalAssets[id] if mostRelevantAsset.IsZero() || tA.HighestSensitivityScore() > mostRelevantAsset.HighestSensitivityScore() { mostRelevantAsset = tA } @@ -364,7 +364,7 @@ func findMostSensitiveTechnicalAsset(techAssets map[string]bool) model.Technical return mostRelevantAsset } -func createRiskForSharedRuntime(sharedRuntime model.SharedRuntime, prefix, details string) model.Risk { +func createRiskForSharedRuntime(input *model.ParsedModel, sharedRuntime model.SharedRuntime, prefix, details string) model.Risk { if len(prefix) > 0 { prefix = " (" + prefix + ")" } diff --git a/risks/built-in/missing-file-validation/missing-file-validation-rule.go b/risks/built-in/missing-file-validation/missing-file-validation-rule.go index cbf5604f..01797023 100644 --- a/risks/built-in/missing-file-validation/missing-file-validation-rule.go +++ b/risks/built-in/missing-file-validation/missing-file-validation-rule.go @@ -42,10 +42,10 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope || !technicalAsset.CustomDevelopedParts { continue } diff --git a/risks/built-in/missing-hardening/missing-hardening-rule.go b/risks/built-in/missing-hardening/missing-hardening-rule.go index 38bdec74..04a8ae10 100644 --- a/risks/built-in/missing-hardening/missing-hardening-rule.go +++ b/risks/built-in/missing-hardening/missing-hardening-rule.go @@ -44,10 +44,10 @@ func SupportedTags() []string { return []string{"tomcat"} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope { if technicalAsset.RAA >= raaLimit || (technicalAsset.RAA >= raaLimitReduced && (technicalAsset.Type == model.Datastore || technicalAsset.Technology == model.ApplicationServer || technicalAsset.Technology == model.IdentityProvider || technicalAsset.Technology == model.ERP)) { diff --git a/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go b/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go index 609137aa..68ede20a 100644 --- a/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go +++ b/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go @@ -47,10 +47,10 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope { continue } @@ -65,7 +65,7 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { // check each incoming authenticated data flow commLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, commLink := range commLinks { - caller := model.ParsedModelRoot.TechnicalAssets[commLink.SourceId] + caller := input.TechnicalAssets[commLink.SourceId] if !caller.Technology.IsUsuallyAbleToPropagateIdentityToOutgoingTargets() || caller.Type == model.Datastore { continue } @@ -77,7 +77,7 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { highRisk := technicalAsset.Confidentiality == model.StrictlyConfidential || technicalAsset.Integrity == model.MissionCritical || technicalAsset.Availability == model.MissionCritical - risks = append(risks, createRisk(technicalAsset, commLink, highRisk)) + risks = append(risks, createRisk(input, technicalAsset, commLink, highRisk)) } } } @@ -85,7 +85,7 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { return risks } -func createRisk(technicalAsset model.TechnicalAsset, incomingAccess model.CommunicationLink, moreRisky bool) model.Risk { +func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingAccess model.CommunicationLink, moreRisky bool) model.Risk { impact := model.LowImpact if moreRisky { impact = model.MediumImpact @@ -96,13 +96,13 @@ func createRisk(technicalAsset model.TechnicalAsset, incomingAccess model.Commun ExploitationLikelihood: model.Unlikely, ExploitationImpact: impact, Title: "Missing End User Identity Propagation over communication link " + incomingAccess.Title + " " + - "from " + model.ParsedModelRoot.TechnicalAssets[incomingAccess.SourceId].Title + " " + + "from " + input.TechnicalAssets[incomingAccess.SourceId].Title + " " + "to " + technicalAsset.Title + "", MostRelevantTechnicalAssetId: technicalAsset.Id, MostRelevantCommunicationLinkId: incomingAccess.Id, DataBreachProbability: model.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + incomingAccess.Id + "@" + model.ParsedModelRoot.TechnicalAssets[incomingAccess.SourceId].Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.Category.Id + "@" + incomingAccess.Id + "@" + input.TechnicalAssets[incomingAccess.SourceId].Id + "@" + technicalAsset.Id return risk } diff --git a/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go b/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go index cfa3f7f0..bd58e297 100644 --- a/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go +++ b/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go @@ -43,9 +43,9 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { + for _, technicalAsset := range input.TechnicalAssets { if !technicalAsset.OutOfScope && technicalAsset.Technology.IsIdentityRelated() { moreImpact := technicalAsset.Confidentiality == model.StrictlyConfidential || technicalAsset.Integrity == model.MissionCritical || @@ -53,9 +53,9 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { sameExecutionEnv := false createRiskEntry := false // now check for any other same-network assets of non-identity-related types - for sparringAssetCandidateId := range model.ParsedModelRoot.TechnicalAssets { // so inner loop again over all assets + for sparringAssetCandidateId := range input.TechnicalAssets { // so inner loop again over all assets if technicalAsset.Id != sparringAssetCandidateId { - sparringAssetCandidate := model.ParsedModelRoot.TechnicalAssets[sparringAssetCandidateId] + sparringAssetCandidate := input.TechnicalAssets[sparringAssetCandidateId] if !sparringAssetCandidate.Technology.IsIdentityRelated() && !sparringAssetCandidate.Technology.IsCloseToHighValueTargetsTolerated() { if technicalAsset.IsSameExecutionEnvironment(sparringAssetCandidateId) { createRiskEntry = true diff --git a/risks/built-in/missing-identity-store/missing-identity-store-rule.go b/risks/built-in/missing-identity-store/missing-identity-store-rule.go index 11631698..61e54450 100644 --- a/risks/built-in/missing-identity-store/missing-identity-store-rule.go +++ b/risks/built-in/missing-identity-store/missing-identity-store-rule.go @@ -41,9 +41,9 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { + for _, technicalAsset := range input.TechnicalAssets { if !technicalAsset.OutOfScope && (technicalAsset.Technology == model.IdentityStoreLDAP || technicalAsset.Technology == model.IdentityStoreDatabase) { // everything fine, no risk, as we have an in-scope identity store in the model @@ -55,11 +55,11 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { var mostRelevantAsset model.TechnicalAsset impact := model.LowImpact for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] for _, commLink := range technicalAsset.CommunicationLinksSorted() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset if commLink.Authorization == model.EndUserIdentityPropagation { riskIdentified = true - targetAsset := model.ParsedModelRoot.TechnicalAssets[commLink.TargetId] + targetAsset := input.TechnicalAssets[commLink.TargetId] if impact == model.LowImpact { mostRelevantAsset = targetAsset if targetAsset.HighestConfidentiality() >= model.Confidential || diff --git a/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go b/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go index 438c8d45..459fdd51 100644 --- a/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go +++ b/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go @@ -49,24 +49,24 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: keys := make([]string, 0) - for k := range model.ParsedModelRoot.TechnicalAssets { + for k := range input.TechnicalAssets { keys = append(keys, k) } sort.Strings(keys) for _, key := range keys { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[key] + technicalAsset := input.TechnicalAssets[key] if !technicalAsset.OutOfScope && technicalAsset.Technology != model.ReverseProxy && technicalAsset.Technology != model.WAF && technicalAsset.Technology != model.IDS && technicalAsset.Technology != model.IPS && technicalAsset.Technology != model.ServiceRegistry { if technicalAsset.RAA >= raaLimit && (technicalAsset.Type == model.Datastore || technicalAsset.Confidentiality >= model.Confidential || technicalAsset.Integrity >= model.Critical || technicalAsset.Availability >= model.Critical) { // now check for any other same-network assets of certain types which have no direct connection for _, sparringAssetCandidateId := range keys { // so inner loop again over all assets if technicalAsset.Id != sparringAssetCandidateId { - sparringAssetCandidate := model.ParsedModelRoot.TechnicalAssets[sparringAssetCandidateId] + sparringAssetCandidate := input.TechnicalAssets[sparringAssetCandidateId] if sparringAssetCandidate.Technology.IsLessProtectedType() && technicalAsset.IsSameTrustBoundaryNetworkOnly(sparringAssetCandidateId) && !technicalAsset.HasDirectConnection(sparringAssetCandidateId) && diff --git a/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go b/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go index fe0b64d7..62e2f1d8 100644 --- a/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go +++ b/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go @@ -43,9 +43,9 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { + for _, technicalAsset := range input.TechnicalAssets { if !technicalAsset.OutOfScope && technicalAsset.Technology == model.Vault { moreImpact := technicalAsset.Confidentiality == model.StrictlyConfidential || technicalAsset.Integrity == model.MissionCritical || @@ -53,9 +53,9 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { sameExecutionEnv := false createRiskEntry := false // now check for any other same-network assets of non-vault-related types - for sparringAssetCandidateId := range model.ParsedModelRoot.TechnicalAssets { // so inner loop again over all assets + for sparringAssetCandidateId := range input.TechnicalAssets { // so inner loop again over all assets if technicalAsset.Id != sparringAssetCandidateId { - sparringAssetCandidate := model.ParsedModelRoot.TechnicalAssets[sparringAssetCandidateId] + sparringAssetCandidate := input.TechnicalAssets[sparringAssetCandidateId] if sparringAssetCandidate.Technology != model.Vault && !isVaultStorage(technicalAsset, sparringAssetCandidate) { if technicalAsset.IsSameExecutionEnvironment(sparringAssetCandidateId) { createRiskEntry = true diff --git a/risks/built-in/missing-vault/missing-vault-rule.go b/risks/built-in/missing-vault/missing-vault-rule.go index 444e0039..b104739c 100644 --- a/risks/built-in/missing-vault/missing-vault-rule.go +++ b/risks/built-in/missing-vault/missing-vault-rule.go @@ -42,13 +42,13 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) hasVault := false var mostRelevantAsset model.TechnicalAsset impact := model.LowImpact for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset - techAsset := model.ParsedModelRoot.TechnicalAssets[id] + techAsset := input.TechnicalAssets[id] if techAsset.Technology == model.Vault { hasVault = true } diff --git a/risks/built-in/missing-waf/missing-waf-rule.go b/risks/built-in/missing-waf/missing-waf-rule.go index ed2e2406..3905fbaf 100644 --- a/risks/built-in/missing-waf/missing-waf-rule.go +++ b/risks/built-in/missing-waf/missing-waf-rule.go @@ -41,15 +41,15 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { + for _, technicalAsset := range input.TechnicalAssets { if !technicalAsset.OutOfScope && (technicalAsset.Technology.IsWebApplication() || technicalAsset.Technology.IsWebService()) { for _, incomingAccess := range model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] { if incomingAccess.IsAcrossTrustBoundaryNetworkOnly() && incomingAccess.Protocol.IsPotentialWebAccessProtocol() && - model.ParsedModelRoot.TechnicalAssets[incomingAccess.SourceId].Technology != model.WAF { + input.TechnicalAssets[incomingAccess.SourceId].Technology != model.WAF { risks = append(risks, createRisk(technicalAsset)) break } diff --git a/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go b/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go index e2b52ce2..0c9292ef 100644 --- a/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go +++ b/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go @@ -45,23 +45,23 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: keys := make([]string, 0) - for k := range model.ParsedModelRoot.SharedRuntimes { + for k := range input.SharedRuntimes { keys = append(keys, k) } sort.Strings(keys) for _, key := range keys { - sharedRuntime := model.ParsedModelRoot.SharedRuntimes[key] + sharedRuntime := input.SharedRuntimes[key] currentTrustBoundaryId := "" hasFrontend, hasBackend := false, false riskAdded := false for _, technicalAssetId := range sharedRuntime.TechnicalAssetsRunning { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[technicalAssetId] + technicalAsset := input.TechnicalAssets[technicalAssetId] if len(currentTrustBoundaryId) > 0 && currentTrustBoundaryId != technicalAsset.GetTrustBoundaryId() { - risks = append(risks, createRisk(sharedRuntime)) + risks = append(risks, createRisk(input, sharedRuntime)) riskAdded = true break } @@ -74,15 +74,15 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { } } if !riskAdded && hasFrontend && hasBackend { - risks = append(risks, createRisk(sharedRuntime)) + risks = append(risks, createRisk(input, sharedRuntime)) } } return risks } -func createRisk(sharedRuntime model.SharedRuntime) model.Risk { +func createRisk(input *model.ParsedModel, sharedRuntime model.SharedRuntime) model.Risk { impact := model.LowImpact - if isMoreRisky(sharedRuntime) { + if isMoreRisky(input, sharedRuntime) { impact = model.MediumImpact } risk := model.Risk{ @@ -100,9 +100,9 @@ func createRisk(sharedRuntime model.SharedRuntime) model.Risk { return risk } -func isMoreRisky(sharedRuntime model.SharedRuntime) bool { +func isMoreRisky(input *model.ParsedModel, sharedRuntime model.SharedRuntime) bool { for _, techAssetId := range sharedRuntime.TechnicalAssetsRunning { - techAsset := model.ParsedModelRoot.TechnicalAssets[techAssetId] + techAsset := input.TechnicalAssets[techAssetId] if techAsset.Confidentiality == model.StrictlyConfidential || techAsset.Integrity == model.MissionCritical || techAsset.Availability == model.MissionCritical { return true diff --git a/risks/built-in/path-traversal/path-traversal-rule.go b/risks/built-in/path-traversal/path-traversal-rule.go index 0b6b292d..482fd725 100644 --- a/risks/built-in/path-traversal/path-traversal-rule.go +++ b/risks/built-in/path-traversal/path-traversal-rule.go @@ -39,23 +39,23 @@ func Category() model.RiskCategory { } } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if technicalAsset.Technology != model.FileServer && technicalAsset.Technology != model.LocalFileSystem { continue } incomingFlows := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, incomingFlow := range incomingFlows { - if model.ParsedModelRoot.TechnicalAssets[incomingFlow.SourceId].OutOfScope { + if input.TechnicalAssets[incomingFlow.SourceId].OutOfScope { continue } likelihood := model.VeryLikely if incomingFlow.Usage == model.DevOps { likelihood = model.Likely } - risks = append(risks, createRisk(technicalAsset, incomingFlow, likelihood)) + risks = append(risks, createRisk(input, technicalAsset, incomingFlow, likelihood)) } } return risks @@ -65,8 +65,8 @@ func SupportedTags() []string { return []string{} } -func createRisk(technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink, likelihood model.RiskExploitationLikelihood) model.Risk { - caller := model.ParsedModelRoot.TechnicalAssets[incomingFlow.SourceId] +func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink, likelihood model.RiskExploitationLikelihood) model.Risk { + caller := input.TechnicalAssets[incomingFlow.SourceId] title := "Path-Traversal risk at " + caller.Title + " against filesystem " + technicalAsset.Title + "" + " via " + incomingFlow.Title + "" impact := model.MediumImpact diff --git a/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go b/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go index 9bc4116a..e7306a1b 100644 --- a/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go +++ b/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go @@ -43,13 +43,13 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) impact := model.LowImpact - for _, buildPipeline := range model.ParsedModelRoot.TechnicalAssets { + for _, buildPipeline := range input.TechnicalAssets { if buildPipeline.Technology == model.BuildPipeline { for _, deploymentLink := range buildPipeline.CommunicationLinks { - targetAsset := model.ParsedModelRoot.TechnicalAssets[deploymentLink.TargetId] + targetAsset := input.TechnicalAssets[deploymentLink.TargetId] if !deploymentLink.Readonly && deploymentLink.Usage == model.DevOps && !targetAsset.OutOfScope && !targetAsset.Technology.IsDevelopmentRelevant() && targetAsset.Usage == model.Business { if targetAsset.HighestConfidentiality() >= model.Confidential || diff --git a/risks/built-in/search-query-injection/search-query-injection-rule.go b/risks/built-in/search-query-injection/search-query-injection-rule.go index 90726f7a..9a3a3415 100644 --- a/risks/built-in/search-query-injection/search-query-injection-rule.go +++ b/risks/built-in/search-query-injection/search-query-injection-rule.go @@ -40,14 +40,14 @@ func Category() model.RiskCategory { } } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if technicalAsset.Technology == model.SearchEngine || technicalAsset.Technology == model.SearchIndex { incomingFlows := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, incomingFlow := range incomingFlows { - if model.ParsedModelRoot.TechnicalAssets[incomingFlow.SourceId].OutOfScope { + if input.TechnicalAssets[incomingFlow.SourceId].OutOfScope { continue } if incomingFlow.Protocol == model.HTTP || incomingFlow.Protocol == model.HTTPS || @@ -56,7 +56,7 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { if incomingFlow.Usage == model.DevOps { likelihood = model.Likely } - risks = append(risks, createRisk(technicalAsset, incomingFlow, likelihood)) + risks = append(risks, createRisk(input, technicalAsset, incomingFlow, likelihood)) } } } @@ -68,8 +68,8 @@ func SupportedTags() []string { return []string{} } -func createRisk(technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink, likelihood model.RiskExploitationLikelihood) model.Risk { - caller := model.ParsedModelRoot.TechnicalAssets[incomingFlow.SourceId] +func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink, likelihood model.RiskExploitationLikelihood) model.Risk { + caller := input.TechnicalAssets[incomingFlow.SourceId] title := "Search Query Injection risk at " + caller.Title + " against search engine server " + technicalAsset.Title + "" + " via " + incomingFlow.Title + "" impact := model.MediumImpact diff --git a/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go b/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go index 076d332e..db5974cb 100644 --- a/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go +++ b/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go @@ -43,24 +43,24 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope || technicalAsset.Technology.IsClient() || technicalAsset.Technology == model.LoadBalancer { continue } for _, outgoingFlow := range technicalAsset.CommunicationLinks { if outgoingFlow.Protocol.IsPotentialWebAccessProtocol() { - risks = append(risks, createRisk(technicalAsset, outgoingFlow)) + risks = append(risks, createRisk(input, technicalAsset, outgoingFlow)) } } } return risks } -func createRisk(technicalAsset model.TechnicalAsset, outgoingFlow model.CommunicationLink) model.Risk { - target := model.ParsedModelRoot.TechnicalAssets[outgoingFlow.TargetId] +func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, outgoingFlow model.CommunicationLink) model.Risk { + target := input.TechnicalAssets[outgoingFlow.TargetId] title := "Server-Side Request Forgery (SSRF) risk at " + technicalAsset.Title + " server-side web-requesting " + "the target " + target.Title + " via " + outgoingFlow.Title + "" impact := model.LowImpact @@ -71,7 +71,7 @@ func createRisk(technicalAsset model.TechnicalAsset, outgoingFlow model.Communic // check all potential attack targets within the same trust boundary (accessible via web protocols) uniqueDataBreachTechnicalAssetIDs := make(map[string]interface{}) uniqueDataBreachTechnicalAssetIDs[technicalAsset.Id] = true - for _, potentialTargetAsset := range model.ParsedModelRoot.TechnicalAssets { + for _, potentialTargetAsset := range input.TechnicalAssets { if technicalAsset.IsSameTrustBoundaryNetworkOnly(potentialTargetAsset.Id) { for _, commLinkIncoming := range model.IncomingTechnicalCommunicationLinksMappedByTargetId[potentialTargetAsset.Id] { if commLinkIncoming.Protocol.IsPotentialWebAccessProtocol() { @@ -84,7 +84,7 @@ func createRisk(technicalAsset model.TechnicalAsset, outgoingFlow model.Communic } } // adjust for cloud-based special risks - if impact == model.LowImpact && model.ParsedModelRoot.TrustBoundaries[technicalAsset.GetTrustBoundaryId()].Type.IsWithinCloud() { + if impact == model.LowImpact && input.TrustBoundaries[technicalAsset.GetTrustBoundaryId()].Type.IsWithinCloud() { impact = model.MediumImpact } dataBreachTechnicalAssetIDs := make([]string, 0) diff --git a/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go b/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go index e7316833..41e7f894 100644 --- a/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go +++ b/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go @@ -40,24 +40,24 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope && technicalAsset.Technology == model.ServiceRegistry { incomingFlows := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] - risks = append(risks, createRisk(technicalAsset, incomingFlows)) + risks = append(risks, createRisk(input, technicalAsset, incomingFlows)) } } return risks } -func createRisk(technicalAsset model.TechnicalAsset, incomingFlows []model.CommunicationLink) model.Risk { +func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlows []model.CommunicationLink) model.Risk { title := "Service Registry Poisoning risk at " + technicalAsset.Title + "" impact := model.LowImpact for _, incomingFlow := range incomingFlows { - caller := model.ParsedModelRoot.TechnicalAssets[incomingFlow.SourceId] + caller := input.TechnicalAssets[incomingFlow.SourceId] if technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || technicalAsset.HighestIntegrity() == model.MissionCritical || technicalAsset.HighestAvailability() == model.MissionCritical || caller.HighestConfidentiality() == model.StrictlyConfidential || caller.HighestIntegrity() == model.MissionCritical || caller.HighestAvailability() == model.MissionCritical || incomingFlow.HighestConfidentiality() == model.StrictlyConfidential || incomingFlow.HighestIntegrity() == model.MissionCritical || incomingFlow.HighestAvailability() == model.MissionCritical { diff --git a/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go b/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go index 582e6973..c9100737 100644 --- a/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go +++ b/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go @@ -40,26 +40,26 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] incomingFlows := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, incomingFlow := range incomingFlows { - if model.ParsedModelRoot.TechnicalAssets[incomingFlow.SourceId].OutOfScope { + if input.TechnicalAssets[incomingFlow.SourceId].OutOfScope { continue } if incomingFlow.Protocol.IsPotentialDatabaseAccessProtocol(true) && (technicalAsset.Technology == model.Database || technicalAsset.Technology == model.IdentityStoreDatabase) || (incomingFlow.Protocol.IsPotentialDatabaseAccessProtocol(false)) { - risks = append(risks, createRisk(technicalAsset, incomingFlow)) + risks = append(risks, createRisk(input, technicalAsset, incomingFlow)) } } } return risks } -func createRisk(technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink) model.Risk { - caller := model.ParsedModelRoot.TechnicalAssets[incomingFlow.SourceId] +func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink) model.Risk { + caller := input.TechnicalAssets[incomingFlow.SourceId] title := "SQL/NoSQL-Injection risk at " + caller.Title + " against database " + technicalAsset.Title + "" + " via " + incomingFlow.Title + "" impact := model.MediumImpact diff --git a/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go b/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go index d2fd5c87..975840b2 100644 --- a/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go +++ b/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go @@ -43,17 +43,17 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { + for _, technicalAsset := range input.TechnicalAssets { if technicalAsset.Technology.IsDevelopmentRelevant() { - risks = append(risks, createRisk(technicalAsset)) + risks = append(risks, createRisk(input, technicalAsset)) } } return risks } -func createRisk(technicalAsset model.TechnicalAsset) model.Risk { +func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset) model.Risk { title := "Unchecked Deployment risk at " + technicalAsset.Title + "" // impact is depending on highest rating impact := model.LowImpact @@ -64,10 +64,10 @@ func createRisk(technicalAsset model.TechnicalAsset) model.Risk { if codeDeploymentTargetCommLink.Usage == model.DevOps { for _, dataAssetID := range codeDeploymentTargetCommLink.DataAssetsSent { // it appears to be code when elevated integrity rating of sent data asset - if model.ParsedModelRoot.DataAssets[dataAssetID].Integrity >= model.Important { + if input.DataAssets[dataAssetID].Integrity >= model.Important { // here we've got a deployment target which has its data assets at risk via deployment of backdoored code uniqueDataBreachTechnicalAssetIDs[codeDeploymentTargetCommLink.TargetId] = true - targetTechAsset := model.ParsedModelRoot.TechnicalAssets[codeDeploymentTargetCommLink.TargetId] + targetTechAsset := input.TechnicalAssets[codeDeploymentTargetCommLink.TargetId] if targetTechAsset.HighestConfidentiality() >= model.Confidential || targetTechAsset.HighestIntegrity() >= model.Critical || targetTechAsset.HighestAvailability() >= model.Critical { diff --git a/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go b/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go index 4a350dcc..4383e531 100644 --- a/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go +++ b/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go @@ -45,10 +45,10 @@ func SupportedTags() []string { // check for technical assets that should be encrypted due to their confidentiality -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope && !IsEncryptionWaiver(technicalAsset) && (technicalAsset.HighestConfidentiality() >= model.Confidential || technicalAsset.HighestIntegrity() >= model.Critical) { diff --git a/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go b/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go index 538f0023..33c5da63 100644 --- a/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go +++ b/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go @@ -42,38 +42,38 @@ func SupportedTags() []string { // check for communication links that should be encrypted due to their confidentiality and/or integrity -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { + for _, technicalAsset := range input.TechnicalAssets { for _, dataFlow := range technicalAsset.CommunicationLinks { transferringAuthData := dataFlow.Authentication != model.NoneAuthentication - sourceAsset := model.ParsedModelRoot.TechnicalAssets[dataFlow.SourceId] - targetAsset := model.ParsedModelRoot.TechnicalAssets[dataFlow.TargetId] + sourceAsset := input.TechnicalAssets[dataFlow.SourceId] + targetAsset := input.TechnicalAssets[dataFlow.TargetId] if !technicalAsset.OutOfScope || !sourceAsset.OutOfScope { if !dataFlow.Protocol.IsEncrypted() && !dataFlow.Protocol.IsProcessLocal() && !sourceAsset.Technology.IsUnprotectedCommunicationsTolerated() && !targetAsset.Technology.IsUnprotectedCommunicationsTolerated() { addedOne := false for _, sentDataAsset := range dataFlow.DataAssetsSent { - dataAsset := model.ParsedModelRoot.DataAssets[sentDataAsset] + dataAsset := input.DataAssets[sentDataAsset] if isHighSensitivity(dataAsset) || transferringAuthData { - risks = append(risks, createRisk(technicalAsset, dataFlow, true, transferringAuthData)) + risks = append(risks, createRisk(input, technicalAsset, dataFlow, true, transferringAuthData)) addedOne = true break } else if !dataFlow.VPN && isMediumSensitivity(dataAsset) { - risks = append(risks, createRisk(technicalAsset, dataFlow, false, transferringAuthData)) + risks = append(risks, createRisk(input, technicalAsset, dataFlow, false, transferringAuthData)) addedOne = true break } } if !addedOne { for _, receivedDataAsset := range dataFlow.DataAssetsReceived { - dataAsset := model.ParsedModelRoot.DataAssets[receivedDataAsset] + dataAsset := input.DataAssets[receivedDataAsset] if isHighSensitivity(dataAsset) || transferringAuthData { - risks = append(risks, createRisk(technicalAsset, dataFlow, true, transferringAuthData)) + risks = append(risks, createRisk(input, technicalAsset, dataFlow, true, transferringAuthData)) break } else if !dataFlow.VPN && isMediumSensitivity(dataAsset) { - risks = append(risks, createRisk(technicalAsset, dataFlow, false, transferringAuthData)) + risks = append(risks, createRisk(input, technicalAsset, dataFlow, false, transferringAuthData)) break } } @@ -85,12 +85,12 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { return risks } -func createRisk(technicalAsset model.TechnicalAsset, dataFlow model.CommunicationLink, highRisk bool, transferringAuthData bool) model.Risk { +func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, dataFlow model.CommunicationLink, highRisk bool, transferringAuthData bool) model.Risk { impact := model.MediumImpact if highRisk { impact = model.HighImpact } - target := model.ParsedModelRoot.TechnicalAssets[dataFlow.TargetId] + target := input.TechnicalAssets[dataFlow.TargetId] title := "Unencrypted Communication named " + dataFlow.Title + " between " + technicalAsset.Title + " and " + target.Title + "" if transferringAuthData { title += " transferring authentication data (like credentials, token, session-id, etc.)" diff --git a/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go b/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go index b5ed9b3d..a156479c 100644 --- a/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go +++ b/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go @@ -51,10 +51,10 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope { commLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] sort.Sort(model.ByTechnicalCommunicationLinkIdSort(commLinks)) @@ -70,17 +70,17 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { continue } } - if model.ParsedModelRoot.TechnicalAssets[incomingAccess.SourceId].Technology == model.Monitoring || + if input.TechnicalAssets[incomingAccess.SourceId].Technology == model.Monitoring || incomingAccess.VPN { continue } if technicalAsset.Confidentiality >= model.Confidential || technicalAsset.Integrity >= model.Critical { - sourceAsset := model.ParsedModelRoot.TechnicalAssets[incomingAccess.SourceId] + sourceAsset := input.TechnicalAssets[incomingAccess.SourceId] if sourceAsset.Internet { highRisk := technicalAsset.Confidentiality == model.StrictlyConfidential || technicalAsset.Integrity == model.MissionCritical risks = append(risks, createRisk(technicalAsset, incomingAccess, - model.ParsedModelRoot.TechnicalAssets[incomingAccess.SourceId], highRisk)) + input.TechnicalAssets[incomingAccess.SourceId], highRisk)) } } } diff --git a/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go b/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go index 30f48204..1e1adcc1 100644 --- a/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go +++ b/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go @@ -44,13 +44,13 @@ func SupportedTags() []string { // check for data stores that should not be accessed directly across trust boundaries -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope && technicalAsset.Type == model.Datastore { for _, incomingAccess := range model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] { - sourceAsset := model.ParsedModelRoot.TechnicalAssets[incomingAccess.SourceId] + sourceAsset := input.TechnicalAssets[incomingAccess.SourceId] if (technicalAsset.Technology == model.IdentityStoreLDAP || technicalAsset.Technology == model.IdentityStoreDatabase) && sourceAsset.Technology == model.IdentityProvider { continue @@ -61,7 +61,7 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { highRisk := technicalAsset.Confidentiality == model.StrictlyConfidential || technicalAsset.Integrity == model.MissionCritical risks = append(risks, createRisk(technicalAsset, incomingAccess, - model.ParsedModelRoot.TechnicalAssets[incomingAccess.SourceId], highRisk)) + input.TechnicalAssets[incomingAccess.SourceId], highRisk)) } } } diff --git a/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go b/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go index 1c882e2e..6ad9a275 100644 --- a/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go +++ b/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go @@ -38,13 +38,13 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] for _, commLink := range technicalAsset.CommunicationLinks { if len(commLink.DataAssetsSent) == 0 && len(commLink.DataAssetsReceived) == 0 { - if !technicalAsset.OutOfScope || !model.ParsedModelRoot.TechnicalAssets[commLink.TargetId].OutOfScope { + if !technicalAsset.OutOfScope || !input.TechnicalAssets[commLink.TargetId].OutOfScope { risks = append(risks, createRisk(technicalAsset, commLink)) } } diff --git a/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go b/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go index d384c844..627d7b41 100644 --- a/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go +++ b/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go @@ -41,15 +41,15 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) // first create them in memory - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: unusedDataAssetIDs := make(map[string]bool) - for k := range model.ParsedModelRoot.DataAssets { + for k := range input.DataAssets { unusedDataAssetIDs[k] = true } - for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { + for _, technicalAsset := range input.TechnicalAssets { for _, processedDataAssetID := range technicalAsset.DataAssetsProcessed { delete(unusedDataAssetIDs, processedDataAssetID) } @@ -71,13 +71,13 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { } sort.Strings(keys) for _, unusedDataAssetID := range keys { - risks = append(risks, createRisk(unusedDataAssetID)) + risks = append(risks, createRisk(input, unusedDataAssetID)) } return risks } -func createRisk(unusedDataAssetID string) model.Risk { - unusedDataAsset := model.ParsedModelRoot.DataAssets[unusedDataAssetID] +func createRisk(input *model.ParsedModel, unusedDataAssetID string) model.Risk { + unusedDataAsset := input.DataAssets[unusedDataAssetID] title := "Unnecessary Data Asset named " + unusedDataAsset.Title + "" risk := model.Risk{ Category: Category(), diff --git a/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go b/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go index 3baa68c2..96929a74 100644 --- a/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go +++ b/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go @@ -46,47 +46,47 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope { continue } // outgoing data flows for _, outgoingDataFlow := range technicalAsset.CommunicationLinks { - targetAsset := model.ParsedModelRoot.TechnicalAssets[outgoingDataFlow.TargetId] + targetAsset := input.TechnicalAssets[outgoingDataFlow.TargetId] if targetAsset.Technology.IsUnnecessaryDataTolerated() { continue } - risks = checkRisksAgainstTechnicalAsset(risks, technicalAsset, outgoingDataFlow, false) + risks = checkRisksAgainstTechnicalAsset(input, risks, technicalAsset, outgoingDataFlow, false) } // incoming data flows commLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] sort.Sort(model.ByTechnicalCommunicationLinkIdSort(commLinks)) for _, incomingDataFlow := range commLinks { - targetAsset := model.ParsedModelRoot.TechnicalAssets[incomingDataFlow.SourceId] + targetAsset := input.TechnicalAssets[incomingDataFlow.SourceId] if targetAsset.Technology.IsUnnecessaryDataTolerated() { continue } - risks = checkRisksAgainstTechnicalAsset(risks, technicalAsset, incomingDataFlow, true) + risks = checkRisksAgainstTechnicalAsset(input, risks, technicalAsset, incomingDataFlow, true) } } return risks } -func checkRisksAgainstTechnicalAsset(risks []model.Risk, technicalAsset model.TechnicalAsset, +func checkRisksAgainstTechnicalAsset(input *model.ParsedModel, risks []model.Risk, technicalAsset model.TechnicalAsset, dataFlow model.CommunicationLink, inverseDirection bool) []model.Risk { for _, transferredDataAssetId := range dataFlow.DataAssetsSent { if !technicalAsset.ProcessesOrStoresDataAsset(transferredDataAssetId) { - transferredDataAsset := model.ParsedModelRoot.DataAssets[transferredDataAssetId] + transferredDataAsset := input.DataAssets[transferredDataAssetId] //fmt.Print("--->>> Checking "+technicalAsset.Id+": "+transferredDataAsset.Id+" sent via "+dataFlow.Id+"\n") if transferredDataAsset.Confidentiality >= model.Confidential || transferredDataAsset.Integrity >= model.Critical { commPartnerId := dataFlow.TargetId if inverseDirection { commPartnerId = dataFlow.SourceId } - commPartnerAsset := model.ParsedModelRoot.TechnicalAssets[commPartnerId] + commPartnerAsset := input.TechnicalAssets[commPartnerId] risk := createRisk(technicalAsset, transferredDataAsset, commPartnerAsset) if isNewRisk(risks, risk) { risks = append(risks, risk) @@ -96,14 +96,14 @@ func checkRisksAgainstTechnicalAsset(risks []model.Risk, technicalAsset model.Te } for _, transferredDataAssetId := range dataFlow.DataAssetsReceived { if !technicalAsset.ProcessesOrStoresDataAsset(transferredDataAssetId) { - transferredDataAsset := model.ParsedModelRoot.DataAssets[transferredDataAssetId] + transferredDataAsset := input.DataAssets[transferredDataAssetId] //fmt.Print("--->>> Checking "+technicalAsset.Id+": "+transferredDataAsset.Id+" received via "+dataFlow.Id+"\n") if transferredDataAsset.Confidentiality >= model.Confidential || transferredDataAsset.Integrity >= model.Critical { commPartnerId := dataFlow.TargetId if inverseDirection { commPartnerId = dataFlow.SourceId } - commPartnerAsset := model.ParsedModelRoot.TechnicalAssets[commPartnerId] + commPartnerAsset := input.TechnicalAssets[commPartnerId] risk := createRisk(technicalAsset, transferredDataAsset, commPartnerAsset) if isNewRisk(risks, risk) { risks = append(risks, risk) diff --git a/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go b/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go index 4c1a228d..ec66f500 100644 --- a/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go +++ b/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go @@ -39,10 +39,10 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if len(technicalAsset.DataAssetsProcessed) == 0 && len(technicalAsset.DataAssetsStored) == 0 || (len(technicalAsset.CommunicationLinks) == 0 && len(model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id]) == 0) { risks = append(risks, createRisk(technicalAsset)) diff --git a/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go b/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go index 427527c7..3b9e838d 100644 --- a/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go +++ b/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go @@ -44,10 +44,10 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope { continue } diff --git a/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go b/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go index c07d8517..47f0007a 100644 --- a/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go +++ b/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go @@ -39,9 +39,9 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, techAsset := range model.ParsedModelRoot.TechnicalAssets { + for _, techAsset := range input.TechnicalAssets { for _, commLink := range techAsset.CommunicationLinks { // check readonly consistency if commLink.Readonly { @@ -56,7 +56,7 @@ func GenerateRisks(input *model.ModelInput) []model.Risk { } } // check for protocol inconsistencies - targetAsset := model.ParsedModelRoot.TechnicalAssets[commLink.TargetId] + targetAsset := input.TechnicalAssets[commLink.TargetId] if commLink.Protocol == model.InProcessLibraryCall && targetAsset.Technology != model.Library { risks = append(risks, createRisk(techAsset, commLink, "(protocol type \""+model.InProcessLibraryCall.String()+"\" does not match target technology type \""+targetAsset.Technology.String()+"\": expected \""+model.Library.String()+"\")")) diff --git a/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go b/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go index 66ce0daf..42af4d18 100644 --- a/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go +++ b/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go @@ -38,12 +38,12 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, trustBoundary := range model.ParsedModelRoot.TrustBoundaries { + for _, trustBoundary := range input.TrustBoundaries { if trustBoundary.Type == model.NetworkPolicyNamespaceIsolation { for _, techAssetID := range trustBoundary.TechnicalAssetsInside { - techAsset := model.ParsedModelRoot.TechnicalAssets[techAssetID] + techAsset := input.TechnicalAssets[techAssetID] if techAsset.Machine != model.Container && techAsset.Machine != model.Serverless { risks = append(risks, createRisk(techAsset)) } diff --git a/risks/built-in/xml-external-entity/xml-external-entity-rule.go b/risks/built-in/xml-external-entity/xml-external-entity-rule.go index 81997b16..daca3f8e 100644 --- a/risks/built-in/xml-external-entity/xml-external-entity-rule.go +++ b/risks/built-in/xml-external-entity/xml-external-entity-rule.go @@ -42,10 +42,10 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ModelInput) []model.Risk { +func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := model.ParsedModelRoot.TechnicalAssets[id] + technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope { continue } diff --git a/risks/risk.go b/risks/risk.go index f9fcf1a6..b685c4f0 100644 --- a/risks/risk.go +++ b/risks/risk.go @@ -9,7 +9,7 @@ import ( type BuiltInRisk struct { Category func() model.RiskCategory SupportedTags func() []string - GenerateRisks func(m *model.ModelInput) []model.Risk + GenerateRisks func(m *model.ParsedModel) []model.Risk } type CustomRisk struct { From ed97925dabb60b2c32addd553f0bf2084c9d08f4 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Tue, 19 Dec 2023 16:40:24 -0800 Subject: [PATCH 09/68] - made Dockerfile and Dockerfile.local consistent - addressed snyk vuln reports - added resolution of home dir (~, $HOME) for paths passed as command line arguments for ease of use in goland build configs --- Dockerfile | 84 ++++++++++++++++----------------- Dockerfile.local | 62 +++++++++++++------------ Makefile | 15 ++++-- main.go | 118 ++++++++++++++++++++++++++++++++++++----------- 4 files changed, 176 insertions(+), 103 deletions(-) diff --git a/Dockerfile b/Dockerfile index a552ac79..b071d6d7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,8 +1,11 @@ + + ###### ## Stage 1: Clone the Git repository ###### FROM alpine/git as clone WORKDIR /app + RUN git clone https://github.com/threagile/threagile.git @@ -12,76 +15,69 @@ RUN git clone https://github.com/threagile/threagile.git ## Stage 2: Build application with Go's build tools ###### FROM golang as build +WORKDIR /app + ENV GO111MODULE=on + # https://stackoverflow.com/questions/36279253/go-compiled-binary-wont-run-in-an-alpine-docker-container-on-ubuntu-host #ENV CGO_ENABLED=0 # cannot be set as otherwise plugins don't run -WORKDIR /app COPY --from=clone /app/threagile /app + RUN go version RUN go test ./... -RUN GOOS=linux go build -a -trimpath -ldflags="-s -w -X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -buildmode=run -o raa.so raa/raa/raa.go -RUN GOOS=linux go build -a -trimpath -ldflags="-s -w -X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -buildmode=run -o dummy.so raa/dummy/dummy.go -RUN GOOS=linux go build -a -trimpath -ldflags="-s -w -X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -buildmode=run -o demo-rule.so risks/custom/demo/demo-rule.go -RUN GOOS=linux go build -a -trimpath -ldflags="-s -w -X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o threagile +RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o raa_calc raa/raa/raa.go +RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o raa_dummy raa/dummy/dummy.go +RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o risk_demo_rule risks/custom/demo/demo-rule.go +RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o threagile # add the -race parameter to go build call in order to instrument with race condition detector: https://blog.golang.org/race-detector # NOTE: copy files with final name to send to final build -RUN cp /app/demo/example/threagile.yaml /app/demo/example/threagile-example-model.yaml && \ - cp /app/demo/stub/threagile.yaml /app/demo/stub/threagile-stub-model.yaml +RUN cp /app/demo/example/threagile.yaml /app/demo/example/threagile-example-model.yaml +RUN cp /app/demo/stub/threagile.yaml /app/demo/stub/threagile-stub-model.yaml -###### -## Stage 3: Copy needed files into desired folder structure -###### -FROM scratch AS files - -COPY --from=build --chown=1000:1000 \ - /app/threagile \ - /app/raa.so \ - /app/dummy.so \ - /app/demo-rule.so \ - /app/LICENSE.txt \ - /app/report/template/background.pdf \ - /app/support/openapi.yaml \ - /app/support/schema.json \ - /app/support/live-templates.txt \ - /app/support/render-data-asset-diagram.sh \ - /app/support/render-data-flow-diagram.sh \ - /app/demo/example/threagile-example-model.yaml \ - /app/demo/stub/threagile-stub-model.yaml \ - \ - /app/ -COPY --from=build --chown=1000:1000 /app/server /app/server ###### -## Stage 4: Make final small image +## Stage 3: Make final small image ###### -FROM alpine +FROM alpine as deploy +WORKDIR /app # label used in other scripts to filter LABEL type="threagile" # add certificates -RUN apk add --update --no-cache ca-certificates \ -# add graphviz, fonts \ - graphviz ttf-freefont \ -# https://stackoverflow.com/questions/66963068/docker-alpine-executable-binary-not-found-even-if-in-path \ - libc6-compat && \ +RUN apk add --update --no-cache ca-certificates +# add graphviz, fonts +RUN apk add --update --no-cache graphviz ttf-freefont +# https://stackoverflow.com/questions/66963068/docker-alpine-executable-binary-not-found-even-if-in-path +RUN apk add libc6-compat # https://stackoverflow.com/questions/34729748/installed-go-binary-not-found-in-path-on-alpine-linux-docker # RUN mkdir -p /lib64 && ln -s /lib/libc.musl-x86_64.so.1 /lib64/ld-linux-x86-64.so.2 # clean apk cache - rm -rf /var/cache/apk/* && \ -# create application and data directories - mkdir -p /app /data && \ - chown -R 1000:1000 /app /data +RUN rm -rf /var/cache/apk/* -COPY --from=files / / +RUN mkdir -p /app /data +RUN chown -R 1000:1000 /app /data + +COPY --from=build --chown=1000:1000 /app/threagile /app/ +COPY --from=build --chown=1000:1000 /app/raa_calc /app/ +COPY --from=build --chown=1000:1000 /app/raa_dummy /app/ +COPY --from=build --chown=1000:1000 /app/risk_demo_rule /app/ +COPY --from=build --chown=1000:1000 /app/LICENSE.txt /app/ +COPY --from=build --chown=1000:1000 /app/report/template/background.pdf /app/ +COPY --from=build --chown=1000:1000 /app/support/openapi.yaml /app/ +COPY --from=build --chown=1000:1000 /app/support/schema.json /app/ +COPY --from=build --chown=1000:1000 /app/support/live-templates.txt /app/ +COPY --from=build --chown=1000:1000 /app/support/render-data-asset-diagram.sh /app/ +COPY --from=build --chown=1000:1000 /app/support/render-data-flow-diagram.sh /app/ +COPY --from=build --chown=1000:1000 /app/demo/example/threagile-example-model.yaml /app/ +COPY --from=build --chown=1000:1000 /app/demo/stub/threagile-stub-model.yaml /app/ +COPY --from=build --chown=1000:1000 /app/server /app/server USER 1000:1000 -WORKDIR /app -ENV PATH=/app:$PATH \ - GIN_MODE=release +ENV PATH=/app:$PATH GIN_MODE=release ENTRYPOINT ["/app/threagile"] CMD ["-help"] diff --git a/Dockerfile.local b/Dockerfile.local index c0887604..7d138153 100644 --- a/Dockerfile.local +++ b/Dockerfile.local @@ -5,7 +5,7 @@ ###### FROM alpine/git as clone WORKDIR /app -#RUN git clone https://github.com/threagile/threagile.git + COPY . /app/threagile @@ -15,18 +15,24 @@ COPY . /app/threagile ## Stage 2: Build application with Go's build tools ###### FROM golang as build +WORKDIR /app + ENV GO111MODULE=on + # https://stackoverflow.com/questions/36279253/go-compiled-binary-wont-run-in-an-alpine-docker-container-on-ubuntu-host #ENV CGO_ENABLED=0 # cannot be set as otherwise plugins don't run -WORKDIR /app COPY --from=clone /app/threagile /app + RUN go version RUN go test ./... -RUN GOOS=linux go build -a -trimpath -ldflags="-s -w -X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -buildmode=plugin -o raa.so raa/raa/raa.go -RUN GOOS=linux go build -a -trimpath -ldflags="-s -w -X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -buildmode=plugin -o dummy.so raa/dummy/dummy.go -RUN GOOS=linux go build -a -trimpath -ldflags="-s -w -X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -buildmode=plugin -o demo-rule.so risks/custom/demo/demo-rule.go -RUN GOOS=linux go build -a -trimpath -ldflags="-s -w -X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o threagile +RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o raa_calc raa/raa/raa.go +RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o raa_dummy raa/dummy/dummy.go +RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o risk_demo_rule risks/custom/demo/demo-rule.go +RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o threagile # add the -race parameter to go build call in order to instrument with race condition detector: https://blog.golang.org/race-detector +# NOTE: copy files with final name to send to final build +RUN cp /app/demo/example/threagile.yaml /app/demo/example/threagile-example-model.yaml +RUN cp /app/demo/stub/threagile.yaml /app/demo/stub/threagile-stub-model.yaml @@ -34,13 +40,14 @@ RUN GOOS=linux go build -a -trimpath -ldflags="-s -w -X main.buildTimestamp=$(da ###### ## Stage 3: Make final small image ###### -FROM alpine +FROM alpine as deploy +WORKDIR /app # label used in other scripts to filter LABEL type="threagile" # add certificates -RUN apk add ca-certificates +RUN apk add --update --no-cache ca-certificates # add graphviz, fonts RUN apk add --update --no-cache graphviz ttf-freefont # https://stackoverflow.com/questions/66963068/docker-alpine-executable-binary-not-found-even-if-in-path @@ -50,30 +57,27 @@ RUN apk add libc6-compat # clean apk cache RUN rm -rf /var/cache/apk/* -WORKDIR /app +RUN mkdir -p /app /data +RUN chown -R 1000:1000 /app /data -COPY --from=build /app/threagile /app/threagile -COPY --from=build /app/raa.so /app/raa.so -COPY --from=build /app/dummy.so /app/dummy.so -COPY --from=build /app/demo-rule.so /app/demo-rule.so -COPY --from=build /app/LICENSE.txt /app/LICENSE.txt -COPY --from=build /app/report/template/background.pdf /app/background.pdf -COPY --from=build /app/support/openapi.yaml /app/openapi.yaml -COPY --from=build /app/support/schema.json /app/schema.json -COPY --from=build /app/support/live-templates.txt /app/live-templates.txt -COPY --from=build /app/support/render-data-asset-diagram.sh /app/render-data-asset-diagram.sh -COPY --from=build /app/support/render-data-flow-diagram.sh /app/render-data-flow-diagram.sh -COPY --from=build /app/server /app/server -COPY --from=build /app/demo/example/threagile.yaml /app/threagile-example-model.yaml -COPY --from=build /app/demo/stub/threagile.yaml /app/threagile-stub-model.yaml - -RUN mkdir /data +COPY --from=build --chown=1000:1000 /app/threagile /app/ +COPY --from=build --chown=1000:1000 /app/raa_calc /app/ +COPY --from=build --chown=1000:1000 /app/raa_dummy /app/ +COPY --from=build --chown=1000:1000 /app/risk_demo_rule /app/ +COPY --from=build --chown=1000:1000 /app/LICENSE.txt /app/ +COPY --from=build --chown=1000:1000 /app/report/template/background.pdf /app/ +COPY --from=build --chown=1000:1000 /app/support/openapi.yaml /app/ +COPY --from=build --chown=1000:1000 /app/support/schema.json /app/ +COPY --from=build --chown=1000:1000 /app/support/live-templates.txt /app/ +COPY --from=build --chown=1000:1000 /app/support/render-data-asset-diagram.sh /app/ +COPY --from=build --chown=1000:1000 /app/support/render-data-flow-diagram.sh /app/ +COPY --from=build --chown=1000:1000 /app/demo/example/threagile-example-model.yaml /app/ +COPY --from=build --chown=1000:1000 /app/demo/stub/threagile-stub-model.yaml /app/ +COPY --from=build --chown=1000:1000 /app/server /app/server -RUN chown -R 1000:1000 /app /data USER 1000:1000 -ENV PATH=/app:$PATH -ENV GIN_MODE=release +ENV PATH=/app:$PATH GIN_MODE=release ENTRYPOINT ["/app/threagile"] -CMD ["-help"] \ No newline at end of file +CMD ["-help"] diff --git a/Makefile b/Makefile index 49bf843a..076459cc 100644 --- a/Makefile +++ b/Makefile @@ -9,9 +9,9 @@ ASSETS = \ support/live-templates.txt \ server BIN = \ - raa \ + raa_calc \ raa_dummy \ - risk_demo \ + risk_demo_rule \ threagile SCRIPTS = \ support/render-data-asset-diagram.sh \ @@ -38,6 +38,13 @@ all: prep $(addprefix bin/,$(BIN)) clean: $(RM) bin vendor +tidy: clean + $(RM) .DS_Store + $(RM) just-for-docker-build-?.txt + $(RM) data-asset-diagram.* data-flow-diagram.* + $(RM) report.pdf risks.xlsx tags.xlsx risks.json technical-assets.json stats.json + $(RM) *.exe *.exe~ *.dll *.so *.dylibc *.test *.out + install: all mkdir -p $(BIN_DIR) $(ASSET_DIR) $(CP) $(addprefix bin/,$(BIN)) $(BIN_DIR) @@ -51,13 +58,13 @@ uninstall: $(RM) $(addprefix $(BIN_DIR)/,$(notdir $(SCRIPTS))) $(RM) $(ASSET_DIR) -bin/raa: raa/raa/raa.go +bin/raa_calc: raa/raa/raa.go $(GO) build $(GOFLAGS) -o $@ $< bin/raa_dummy: raa/dummy/dummy.go $(GO) build $(GOFLAGS) -o $@ $< -bin/risk_demo: risks/custom/demo/demo-rule.go +bin/risk_demo_rule: risks/custom/demo/demo-rule.go $(GO) build $(GOFLAGS) -o $@ $< bin/threagile: main.go diff --git a/main.go b/main.go index 039e2b82..331974f6 100644 --- a/main.go +++ b/main.go @@ -26,6 +26,7 @@ import ( "os/exec" "path/filepath" "regexp" + "runtime" "sort" "strconv" "strings" @@ -356,6 +357,9 @@ func (context *Context) unzip(src string, dest string) ([]string, error) { if err = os.MkdirAll(filepath.Dir(path), os.ModePerm); err != nil { return filenames, err } + if path != filepath.Clean(path) { + return filenames, fmt.Errorf("weird file path %v", path) + } outFile, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode()) if err != nil { return filenames, err @@ -867,15 +871,10 @@ func (context *Context) applyRAA() string { fmt.Println("Applying RAA calculation:", *context.raaPlugin) } - raa := *context.raaPlugin runner, loadError := new(run.Runner).Load(filepath.Join(*context.binFolder, *context.raaPlugin)) if loadError != nil { - raa = strings.TrimSuffix(raa, filepath.Ext(raa)) - runner, loadError = new(run.Runner).Load(filepath.Join(*context.binFolder, raa)) - if loadError != nil { - fmt.Printf("WARNING: raa %q not loaded: %v\n", *context.raaPlugin, loadError) - return "" - } + fmt.Printf("WARNING: raa %q not loaded: %v\n", *context.raaPlugin, loadError) + return "" } runError := runner.Run(model.ParsedModelRoot, &model.ParsedModelRoot) @@ -1089,7 +1088,10 @@ func (context *Context) doItViaRuntimeCall(modelFile string, outputDir string, if generateStatsJSON { args = append(args, "-generate-stats-json") } - self := os.Args[0] + self, nameError := os.Executable() + if nameError != nil { + panic(nameError) + } cmd = exec.Command(self, args...) out, err := cmd.CombinedOutput() if err != nil { @@ -1807,6 +1809,12 @@ func (context *Context) stats(ginContext *gin.Context) { for _, keyFolder := range keyFolders { if len(keyFolder.Name()) == 128 { // it's a sha512 token hash probably, so count it as token folder for the stats keyCount++ + if keyFolder.Name() != filepath.Clean(keyFolder.Name()) { + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "weird file path", + }) + return + } modelFolders, err := os.ReadDir(filepath.Join(*context.serverFolder, keyDir, keyFolder.Name())) if err != nil { log.Println(err) @@ -2161,8 +2169,8 @@ func (context *Context) createNewSharedRuntime(ginContext *gin.Context) { return } // but later it will in memory keyed by its "id", so do this uniqueness check also - for _, runtime := range modelInput.SharedRuntimes { - if runtime.ID == payload.Id { + for _, sharedRuntime := range modelInput.SharedRuntimes { + if sharedRuntime.ID == payload.Id { ginContext.JSON(http.StatusConflict, gin.H{ "error": "shared runtime with this id already exists", }) @@ -2791,11 +2799,18 @@ func (context *Context) deleteModel(ginContext *gin.Context) { defer context.unlockFolder(folderNameOfKey) folder, ok := context.checkModelFolder(ginContext, ginContext.Param("model-id"), folderNameOfKey) if ok { + if folder != filepath.Clean(folder) { + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "model-id is weird", + }) + return + } err := os.RemoveAll(folder) if err != nil { ginContext.JSON(http.StatusNotFound, gin.H{ "error": "model not found", }) + return } ginContext.JSON(http.StatusOK, gin.H{ "message": "model deleted", @@ -2996,6 +3011,9 @@ func (context *Context) backupModelToHistory(modelFolder string, changeReasonFor }) for _, file := range files { requiredToDelete-- + if file.Name() != filepath.Clean(file.Name()) { + return fmt.Errorf("weird file name %v", file.Name()) + } err = os.Remove(filepath.Join(historyFolder, file.Name())) if err != nil { return err @@ -3242,20 +3260,59 @@ func (context *Context) deleteKey(ginContext *gin.Context) { }) } +func (context *Context) userHomeDir() string { + switch runtime.GOOS { + case "windows": + home := os.Getenv("HOMEDRIVE") + os.Getenv("HOMEPATH") + if home == "" { + home = os.Getenv("USERPROFILE") + } + return home + + default: + return os.Getenv("HOME") + } +} + +func (context *Context) expandPath(path string) *string { + home := context.userHomeDir() + if strings.HasPrefix(path, "~") { + path = strings.Replace(path, "~", home, 1) + } + + if strings.HasPrefix(path, "$HOME") { + path = strings.Replace(path, "$HOME", home, -1) + } + + return &path +} + func (context *Context) parseCommandlineArgs() { - context.tempFolder = flag.String("temp-dir", tempDir, "temporary folder location") - context.binFolder = flag.String("bin-dir", binDir, "binary folder location") + // folders context.appFolder = flag.String("app-dir", appDir, "app folder (default: "+appDir+")") context.serverFolder = flag.String("server-dir", dataDir, "base folder for server mode (default: "+dataDir+")") - context.modelFilename = flag.String("model", inputFile, "input model yaml file") + context.tempFolder = flag.String("temp-dir", tempDir, "temporary folder location") + context.binFolder = flag.String("bin-dir", binDir, "binary folder location") context.outputDir = flag.String("output", ".", "output directory") - context.raaPlugin = flag.String("raa-run", "raa.so", "RAA calculation run (.so shared object) file name") + + // files + context.modelFilename = flag.String("model", inputFile, "input model yaml file") + context.raaPlugin = flag.String("raa-run", "raa_calc", "RAA calculation run file name") + + // flags + context.verbose = flag.Bool("verbose", false, "verbose output") + context.diagramDPI = flag.Int("diagram-dpi", defaultGraphvizDPI, "DPI used to render: maximum is "+strconv.Itoa(maxGraphvizDPI)+"") + context.skipRiskRules = flag.String("skip-risk-rules", "", "comma-separated list of risk rules (by their ID) to skip") + context.riskRulesPlugins = flag.String("custom-risk-rules-plugins", "", "comma-separated list of plugins file names with custom risk rules to load") + context.ignoreOrphanedRiskTracking = flag.Bool("ignore-orphaned-risk-tracking", false, "ignore orphaned risk tracking (just log them) not matching a concrete risk") + + // commands + context.serverPort = flag.Int("server", 0, "start a server (instead of commandline execution) on the given port") context.executeModelMacro = flag.String("execute-model-macro", "", "Execute model macro (by ID)") context.testParseModel = flag.Bool("test-parse-model", false, "test parse model functionality") context.createExampleModel = flag.Bool("create-example-model", false, "just create an example model named threagile-example-model.yaml in the output directory") context.createStubModel = flag.Bool("create-stub-model", false, "just create a minimal stub model named threagile-stub-model.yaml in the output directory") context.createEditingSupport = flag.Bool("create-editing-support", false, "just create some editing support stuff in the output directory") - context.serverPort = flag.Int("server", 0, "start a server (instead of commandline execution) on the given port") context.templateFilename = flag.String("background", "background.pdf", "background pdf file") context.generateDataFlowDiagram = flag.Bool("generate-data-flow-diagram", true, "generate data-flow diagram") context.generateDataAssetDiagram = flag.Bool("generate-data-asset-diagram", true, "generate data asset diagram") @@ -3265,11 +3322,8 @@ func (context *Context) parseCommandlineArgs() { context.generateRisksExcel = flag.Bool("generate-risks-excel", true, "generate risks excel") context.generateTagsExcel = flag.Bool("generate-tags-excel", true, "generate tags excel") context.generateReportPDF = flag.Bool("generate-report-pdf", true, "generate report pdf, including diagrams") - context.diagramDPI = flag.Int("diagram-dpi", defaultGraphvizDPI, "DPI used to render: maximum is "+strconv.Itoa(maxGraphvizDPI)+"") - context.skipRiskRules = flag.String("skip-risk-rules", "", "comma-separated list of risk rules (by their ID) to skip") - context.riskRulesPlugins = flag.String("custom-risk-rules-plugins", "", "comma-separated list of plugins (.so shared object) file names with custom risk rules to load") - context.verbose = flag.Bool("verbose", false, "verbose output") - context.ignoreOrphanedRiskTracking = flag.Bool("ignore-orphaned-risk-tracking", false, "ignore orphaned risk tracking (just log them) not matching a concrete risk") + + // more commands version := flag.Bool("version", false, "print version") listTypes := flag.Bool("list-types", false, "print type information (enum values to be used in models)") listRiskRules := flag.Bool("list-risk-rules", false, "print risk rules") @@ -3279,6 +3333,7 @@ func (context *Context) parseCommandlineArgs() { explainModelMacros := flag.Bool("explain-model-macros", false, "Detailed explanation of all the model macros") print3rdParty := flag.Bool("print-3rd-party-licenses", false, "print 3rd-party license information") license := flag.Bool("print-license", false, "print license information") + flag.Usage = func() { context.printLogo() _, _ = fmt.Fprintf(os.Stderr, "Usage: threagile [options]") @@ -3310,6 +3365,14 @@ func (context *Context) parseCommandlineArgs() { fmt.Println() } flag.Parse() + + context.modelFilename = context.expandPath(*context.modelFilename) + context.appFolder = context.expandPath(*context.appFolder) + context.serverFolder = context.expandPath(*context.serverFolder) + context.tempFolder = context.expandPath(*context.tempFolder) + context.binFolder = context.expandPath(*context.binFolder) + context.outputDir = context.expandPath(*context.outputDir) + if *context.diagramDPI < 20 { *context.diagramDPI = 20 } else if *context.diagramDPI > maxGraphvizDPI { @@ -3559,6 +3622,9 @@ func (context *Context) parseCommandlineArgs() { } if *license { context.printLogo() + if *context.appFolder != filepath.Clean(*context.appFolder) { + log.Fatalf("weird app folder %v", *context.appFolder) + } content, err := os.ReadFile(filepath.Join(*context.appFolder, "LICENSE.txt")) checkErr(err) fmt.Print(string(content)) @@ -4534,12 +4600,12 @@ func (context *Context) parseModel() { // Shared Runtime =============================================================================== model.ParsedModelRoot.SharedRuntimes = make(map[string]model.SharedRuntime) - for title, runtime := range context.modelInput.SharedRuntimes { - id := fmt.Sprintf("%v", runtime.ID) + for title, inputRuntime := range context.modelInput.SharedRuntimes { + id := fmt.Sprintf("%v", inputRuntime.ID) var technicalAssetsRunning = make([]string, 0) - if runtime.TechnicalAssetsRunning != nil { - parsedRunningAssets := runtime.TechnicalAssetsRunning + if inputRuntime.TechnicalAssetsRunning != nil { + parsedRunningAssets := inputRuntime.TechnicalAssetsRunning technicalAssetsRunning = make([]string, len(parsedRunningAssets)) for i, parsedRunningAsset := range parsedRunningAssets { assetId := fmt.Sprintf("%v", parsedRunningAsset) @@ -4551,8 +4617,8 @@ func (context *Context) parseModel() { sharedRuntime := model.SharedRuntime{ Id: id, Title: title, //fmt.Sprintf("%v", boundary["title"]), - Description: withDefault(fmt.Sprintf("%v", runtime.Description), title), - Tags: checkTags(runtime.Tags, "shared runtime '"+title+"'"), + Description: withDefault(fmt.Sprintf("%v", inputRuntime.Description), title), + Tags: checkTags(inputRuntime.Tags, "shared runtime '"+title+"'"), TechnicalAssetsRunning: technicalAssetsRunning, } context.checkIdSyntax(id) From 8c2e7b0adf5e853fe136896267b3f30ac1c543b3 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Wed, 20 Dec 2023 12:44:31 -0800 Subject: [PATCH 10/68] converted parse test to go-style testing --- main.go | 53 ---------------------------------------------------- main_test.go | 53 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+), 53 deletions(-) create mode 100644 main_test.go diff --git a/main.go b/main.go index 331974f6..d553cc76 100644 --- a/main.go +++ b/main.go @@ -12,11 +12,9 @@ import ( "crypto/sha512" "encoding/base64" "encoding/hex" - "encoding/json" "errors" "flag" "fmt" - "github.com/akedrou/textdiff" "github.com/threagile/threagile/risks" "hash/fnv" "io" @@ -133,7 +131,6 @@ type Context struct { modelInput model.ModelInput modelFilename, templateFilename *string - testParseModel *bool createExampleModel, createStubModel, createEditingSupport, verbose, ignoreOrphanedRiskTracking *bool generateDataFlowDiagram, generateDataAssetDiagram, generateRisksJSON, generateTechnicalAssetsJSON *bool generateStatsJSON, generateRisksExcel, generateTagsExcel, generateReportPDF *bool @@ -3309,7 +3306,6 @@ func (context *Context) parseCommandlineArgs() { // commands context.serverPort = flag.Int("server", 0, "start a server (instead of commandline execution) on the given port") context.executeModelMacro = flag.String("execute-model-macro", "", "Execute model macro (by ID)") - context.testParseModel = flag.Bool("test-parse-model", false, "test parse model functionality") context.createExampleModel = flag.Bool("create-example-model", false, "just create an example model named threagile-example-model.yaml in the output directory") context.createStubModel = flag.Bool("create-stub-model", false, "just create a minimal stub model named threagile-stub-model.yaml in the output directory") context.createEditingSupport = flag.Bool("create-editing-support", false, "just create some editing support stuff in the output directory") @@ -3631,16 +3627,6 @@ func (context *Context) parseCommandlineArgs() { fmt.Println() os.Exit(0) } - if *context.testParseModel { - testError := context.goTestParseModel() - if testError != nil { - log.Fatalf("parse test failed: %v", testError) - return - } - fmt.Println("Parse test successful.") - fmt.Println() - os.Exit(0) - } if *context.createExampleModel { exampleError := context.createExampleModelFile() if exampleError != nil { @@ -3811,45 +3797,6 @@ func copyFile(src, dst string) (int64, error) { return nBytes, err } -func (context *Context) goTestParseModel() error { - flatModelFile := filepath.Join("test", "all.yaml") - flatModel := *new(model.ModelInput).Defaults() - flatLoadError := flatModel.Load(flatModelFile) - if flatLoadError != nil { - return fmt.Errorf("unable to parse model yaml %q: %v", flatModelFile, flatLoadError) - } - - sort.Strings(flatModel.TagsAvailable) - flatModel.TagsAvailable = []string{strings.Join(flatModel.TagsAvailable, ", ")} - - flatData, flatMarshalError := json.MarshalIndent(flatModel, "", " ") - if flatMarshalError != nil { - return fmt.Errorf("unable to print model yaml %q: %v", flatModelFile, flatMarshalError) - } - - splitModelFile := filepath.Join("test", "main.yaml") - splitModel := *new(model.ModelInput).Defaults() - splitLoadError := splitModel.Load(splitModelFile) - if splitLoadError != nil { - return fmt.Errorf("unable to parse model yaml %q: %v", splitModelFile, splitLoadError) - } - - sort.Strings(splitModel.TagsAvailable) - splitModel.TagsAvailable = []string{strings.Join(splitModel.TagsAvailable, ", ")} - - splitModel.Includes = flatModel.Includes - splitData, splitMarshalError := json.MarshalIndent(splitModel, "", " ") - if splitMarshalError != nil { - return fmt.Errorf("unable to print model yaml %q: %v", splitModelFile, splitMarshalError) - } - - if string(flatData) != string(splitData) { - return fmt.Errorf("parsing split model files is broken; diff: %v", textdiff.Unified(flatModelFile, splitModelFile, string(flatData), string(splitData))) - } - - return nil -} - func (context *Context) parseModel() { if *context.verbose { fmt.Println("Parsing model:", *context.modelFilename) diff --git a/main_test.go b/main_test.go new file mode 100644 index 00000000..1eb8502b --- /dev/null +++ b/main_test.go @@ -0,0 +1,53 @@ +package main + +import ( + "encoding/json" + "github.com/akedrou/textdiff" + "github.com/threagile/threagile/model" + "path/filepath" + "sort" + "strings" + "testing" +) + +func TestParseModel(t *testing.T) { + flatModelFile := filepath.Join("test", "all.yaml") + flatModel := *new(model.ModelInput).Defaults() + flatLoadError := flatModel.Load(flatModelFile) + if flatLoadError != nil { + t.Errorf("unable to parse model yaml %q: %v", flatModelFile, flatLoadError) + return + } + + sort.Strings(flatModel.TagsAvailable) + flatModel.TagsAvailable = []string{strings.Join(flatModel.TagsAvailable, ", ")} + + flatData, flatMarshalError := json.MarshalIndent(flatModel, "", " ") + if flatMarshalError != nil { + t.Errorf("unable to print model yaml %q: %v", flatModelFile, flatMarshalError) + return + } + + splitModelFile := filepath.Join("test", "main.yaml") + splitModel := *new(model.ModelInput).Defaults() + splitLoadError := splitModel.Load(splitModelFile) + if splitLoadError != nil { + t.Errorf("unable to parse model yaml %q: %v", splitModelFile, splitLoadError) + return + } + + sort.Strings(splitModel.TagsAvailable) + splitModel.TagsAvailable = []string{strings.Join(splitModel.TagsAvailable, ", ")} + + splitModel.Includes = flatModel.Includes + splitData, splitMarshalError := json.MarshalIndent(splitModel, "", " ") + if splitMarshalError != nil { + t.Errorf("unable to print model yaml %q: %v", splitModelFile, splitMarshalError) + return + } + + if string(flatData) != string(splitData) { + t.Errorf("parsing split model files is broken; diff: %v", textdiff.Unified(flatModelFile, splitModelFile, string(flatData), string(splitData))) + return + } +} From 07855675591844a3febd0db9122f9110b69a0f98 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Thu, 21 Dec 2023 15:43:25 -0800 Subject: [PATCH 11/68] moved to go-style folder structure --- Dockerfile.local | 10 +- Makefile | 17 +- raa/raa/raa.go => cmd/raa/main.go | 0 raa/dummy/dummy.go => cmd/raa_dummy/main.go | 0 .../demo-rule.go => cmd/risk_demo/main.go | 2 +- cmd/threagile/main.go | 13 ++ main_test.go => cmd/threagile/main_test.go | 4 +- main.go => internal/threagile/context.go | 155 ++++++++---------- model/types.go | 7 +- .../add-build-pipeline-macro.go | 0 .../built-in/add-vault/add-vault-macro.go | 0 .../pretty-print/pretty-print-macro.go | 0 .../remove-unused-tags-macro.go | 0 .../seed-risk-tracking-macro.go | 0 .../built-in/seed-tags/seed-tags-macro.go | 0 {report => pkg/report}/excel.go | 0 {report => pkg/report}/json.go | 0 {report => pkg/report}/report.go | 86 +++++----- .../report}/template/background.pdf | Bin .../accidental-secret-leak-rule.go | 0 .../code-backdooring/code-backdooring-rule.go | 0 .../container-baseimage-backdooring-rule.go | 0 .../container-platform-escape-rule.go | 0 .../cross-site-request-forgery-rule.go | 0 .../cross-site-scripting-rule.go | 0 ...risky-access-across-trust-boundary-rule.go | 0 .../incomplete-model/incomplete-model-rule.go | 0 .../ldap-injection/ldap-injection-rule.go | 0 ...ssing-authentication-second-factor-rule.go | 2 +- .../missing-authentication-rule.go | 0 .../missing-build-infrastructure-rule.go | 0 .../missing-cloud-hardening-rule.go | 0 .../missing-file-validation-rule.go | 0 .../missing-hardening-rule.go | 0 .../missing-identity-propagation-rule.go | 0 ...issing-identity-provider-isolation-rule.go | 0 .../missing-identity-store-rule.go | 0 .../missing-network-segmentation-rule.go | 0 .../missing-vault-isolation-rule.go | 0 .../missing-vault/missing-vault-rule.go | 0 .../built-in/missing-waf/missing-waf-rule.go | 0 .../mixed-targets-on-shared-runtime-rule.go | 0 .../path-traversal/path-traversal-rule.go | 0 .../push-instead-of-pull-deployment-rule.go | 0 .../search-query-injection-rule.go | 0 .../server-side-request-forgery-rule.go | 0 .../service-registry-poisoning-rule.go | 0 .../sql-nosql-injection-rule.go | 0 .../unchecked-deployment-rule.go | 0 .../unencrypted-asset-rule.go | 0 .../unencrypted-communication-rule.go | 0 .../unguarded-access-from-internet-rule.go | 0 .../unguarded-direct-datastore-access-rule.go | 0 .../unnecessary-communication-link-rule.go | 0 .../unnecessary-data-asset-rule.go | 0 .../unnecessary-data-transfer-rule.go | 0 .../unnecessary-technical-asset-rule.go | 0 .../untrusted-deserialization-rule.go | 0 .../wrong-communication-link-content-rule.go | 0 .../wrong-trust-boundary-content.go | 0 .../xml-external-entity-rule.go | 0 {risks => pkg/risks}/risk.go | 2 +- {run => pkg/run}/runner.go | 0 63 files changed, 151 insertions(+), 147 deletions(-) rename raa/raa/raa.go => cmd/raa/main.go (100%) rename raa/dummy/dummy.go => cmd/raa_dummy/main.go (100%) rename risks/custom/demo/demo-rule.go => cmd/risk_demo/main.go (98%) create mode 100644 cmd/threagile/main.go rename main_test.go => cmd/threagile/main_test.go (92%) rename main.go => internal/threagile/context.go (97%) rename {macros => pkg/macros}/built-in/add-build-pipeline/add-build-pipeline-macro.go (100%) rename {macros => pkg/macros}/built-in/add-vault/add-vault-macro.go (100%) rename {macros => pkg/macros}/built-in/pretty-print/pretty-print-macro.go (100%) rename {macros => pkg/macros}/built-in/remove-unused-tags/remove-unused-tags-macro.go (100%) rename {macros => pkg/macros}/built-in/seed-risk-tracking/seed-risk-tracking-macro.go (100%) rename {macros => pkg/macros}/built-in/seed-tags/seed-tags-macro.go (100%) rename {report => pkg/report}/excel.go (100%) rename {report => pkg/report}/json.go (100%) rename {report => pkg/report}/report.go (98%) rename {report => pkg/report}/template/background.pdf (100%) rename {risks => pkg/risks}/built-in/accidental-secret-leak/accidental-secret-leak-rule.go (100%) rename {risks => pkg/risks}/built-in/code-backdooring/code-backdooring-rule.go (100%) rename {risks => pkg/risks}/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go (100%) rename {risks => pkg/risks}/built-in/container-platform-escape/container-platform-escape-rule.go (100%) rename {risks => pkg/risks}/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go (100%) rename {risks => pkg/risks}/built-in/cross-site-scripting/cross-site-scripting-rule.go (100%) rename {risks => pkg/risks}/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go (100%) rename {risks => pkg/risks}/built-in/incomplete-model/incomplete-model-rule.go (100%) rename {risks => pkg/risks}/built-in/ldap-injection/ldap-injection-rule.go (100%) rename {risks => pkg/risks}/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go (98%) rename {risks => pkg/risks}/built-in/missing-authentication/missing-authentication-rule.go (100%) rename {risks => pkg/risks}/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go (100%) rename {risks => pkg/risks}/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go (100%) rename {risks => pkg/risks}/built-in/missing-file-validation/missing-file-validation-rule.go (100%) rename {risks => pkg/risks}/built-in/missing-hardening/missing-hardening-rule.go (100%) rename {risks => pkg/risks}/built-in/missing-identity-propagation/missing-identity-propagation-rule.go (100%) rename {risks => pkg/risks}/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go (100%) rename {risks => pkg/risks}/built-in/missing-identity-store/missing-identity-store-rule.go (100%) rename {risks => pkg/risks}/built-in/missing-network-segmentation/missing-network-segmentation-rule.go (100%) rename {risks => pkg/risks}/built-in/missing-vault-isolation/missing-vault-isolation-rule.go (100%) rename {risks => pkg/risks}/built-in/missing-vault/missing-vault-rule.go (100%) rename {risks => pkg/risks}/built-in/missing-waf/missing-waf-rule.go (100%) rename {risks => pkg/risks}/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go (100%) rename {risks => pkg/risks}/built-in/path-traversal/path-traversal-rule.go (100%) rename {risks => pkg/risks}/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go (100%) rename {risks => pkg/risks}/built-in/search-query-injection/search-query-injection-rule.go (100%) rename {risks => pkg/risks}/built-in/server-side-request-forgery/server-side-request-forgery-rule.go (100%) rename {risks => pkg/risks}/built-in/service-registry-poisoning/service-registry-poisoning-rule.go (100%) rename {risks => pkg/risks}/built-in/sql-nosql-injection/sql-nosql-injection-rule.go (100%) rename {risks => pkg/risks}/built-in/unchecked-deployment/unchecked-deployment-rule.go (100%) rename {risks => pkg/risks}/built-in/unencrypted-asset/unencrypted-asset-rule.go (100%) rename {risks => pkg/risks}/built-in/unencrypted-communication/unencrypted-communication-rule.go (100%) rename {risks => pkg/risks}/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go (100%) rename {risks => pkg/risks}/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go (100%) rename {risks => pkg/risks}/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go (100%) rename {risks => pkg/risks}/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go (100%) rename {risks => pkg/risks}/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go (100%) rename {risks => pkg/risks}/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go (100%) rename {risks => pkg/risks}/built-in/untrusted-deserialization/untrusted-deserialization-rule.go (100%) rename {risks => pkg/risks}/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go (100%) rename {risks => pkg/risks}/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go (100%) rename {risks => pkg/risks}/built-in/xml-external-entity/xml-external-entity-rule.go (100%) rename {risks => pkg/risks}/risk.go (94%) rename {run => pkg/run}/runner.go (100%) diff --git a/Dockerfile.local b/Dockerfile.local index 7d138153..39662a3c 100644 --- a/Dockerfile.local +++ b/Dockerfile.local @@ -25,10 +25,10 @@ COPY --from=clone /app/threagile /app RUN go version RUN go test ./... -RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o raa_calc raa/raa/raa.go -RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o raa_dummy raa/dummy/dummy.go -RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o risk_demo_rule risks/custom/demo/demo-rule.go -RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o threagile +RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o raa_calc cmd/raa/main.go +RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o raa_dummy cmd/raa_dummy/main.go +RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o risk_demo_rule cmd/risk_demo/main.go +RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o threagile cmd/threagile/main.go # add the -race parameter to go build call in order to instrument with race condition detector: https://blog.golang.org/race-detector # NOTE: copy files with final name to send to final build RUN cp /app/demo/example/threagile.yaml /app/demo/example/threagile-example-model.yaml @@ -65,7 +65,7 @@ COPY --from=build --chown=1000:1000 /app/raa_calc /app/ COPY --from=build --chown=1000:1000 /app/raa_dummy /app/ COPY --from=build --chown=1000:1000 /app/risk_demo_rule /app/ COPY --from=build --chown=1000:1000 /app/LICENSE.txt /app/ -COPY --from=build --chown=1000:1000 /app/report/template/background.pdf /app/ +COPY --from=build --chown=1000:1000 /app/pkg/report/template/background.pdf /app/ COPY --from=build --chown=1000:1000 /app/support/openapi.yaml /app/ COPY --from=build --chown=1000:1000 /app/support/schema.json /app/ COPY --from=build --chown=1000:1000 /app/support/live-templates.txt /app/ diff --git a/Makefile b/Makefile index 076459cc..463e70ff 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,7 @@ ASSET_DIR = $(HOME)/.threagile BIN_DIR = $(HOME)/bin ASSETS = \ LICENSE.txt \ - report/template/background.pdf \ + pkg/report/template/background.pdf \ support/openapi.yaml \ support/schema.json \ support/live-templates.txt \ @@ -25,7 +25,7 @@ CP = cp -r RM = rm -rf # Targets -.phony: all install clean uninstall +.phony: all run_tests install clean uninstall default: all @@ -33,7 +33,10 @@ prep: env GO111MODULE=on go mod vendor $(MKDIR) bin -all: prep $(addprefix bin/,$(BIN)) +run_tests: + $(GO) test ./... + +all: prep run_tests $(addprefix bin/,$(BIN)) clean: $(RM) bin vendor @@ -58,14 +61,14 @@ uninstall: $(RM) $(addprefix $(BIN_DIR)/,$(notdir $(SCRIPTS))) $(RM) $(ASSET_DIR) -bin/raa_calc: raa/raa/raa.go +bin/raa_calc: cmd/raa/main.go $(GO) build $(GOFLAGS) -o $@ $< -bin/raa_dummy: raa/dummy/dummy.go +bin/raa_dummy: cmd/raa_dummy/main.go $(GO) build $(GOFLAGS) -o $@ $< -bin/risk_demo_rule: risks/custom/demo/demo-rule.go +bin/risk_demo_rule: cmd/risk_demo/main.go $(GO) build $(GOFLAGS) -o $@ $< -bin/threagile: main.go +bin/threagile: cmd/threagile/main.go $(GO) build $(GOFLAGS) -o $@ $< diff --git a/raa/raa/raa.go b/cmd/raa/main.go similarity index 100% rename from raa/raa/raa.go rename to cmd/raa/main.go diff --git a/raa/dummy/dummy.go b/cmd/raa_dummy/main.go similarity index 100% rename from raa/dummy/dummy.go rename to cmd/raa_dummy/main.go diff --git a/risks/custom/demo/demo-rule.go b/cmd/risk_demo/main.go similarity index 98% rename from risks/custom/demo/demo-rule.go rename to cmd/risk_demo/main.go index 4bbb6e25..fe9c560c 100644 --- a/risks/custom/demo/demo-rule.go +++ b/cmd/risk_demo/main.go @@ -6,7 +6,7 @@ import ( "flag" "fmt" "github.com/threagile/threagile/model" - "github.com/threagile/threagile/risks" + "github.com/threagile/threagile/pkg/risks" "io" "os" ) diff --git a/cmd/threagile/main.go b/cmd/threagile/main.go new file mode 100644 index 00000000..6390ad97 --- /dev/null +++ b/cmd/threagile/main.go @@ -0,0 +1,13 @@ +package main + +import "github.com/threagile/threagile/internal/threagile" + +func main() { + context := new(threagile.Context).Defaults() + context.ParseCommandlineArgs() + if context.ServerPort > 0 { + context.StartServer() + } else { + context.DoIt() + } +} diff --git a/main_test.go b/cmd/threagile/main_test.go similarity index 92% rename from main_test.go rename to cmd/threagile/main_test.go index 1eb8502b..c8802008 100644 --- a/main_test.go +++ b/cmd/threagile/main_test.go @@ -11,7 +11,7 @@ import ( ) func TestParseModel(t *testing.T) { - flatModelFile := filepath.Join("test", "all.yaml") + flatModelFile := filepath.Join("..", "..", "test", "all.yaml") flatModel := *new(model.ModelInput).Defaults() flatLoadError := flatModel.Load(flatModelFile) if flatLoadError != nil { @@ -28,7 +28,7 @@ func TestParseModel(t *testing.T) { return } - splitModelFile := filepath.Join("test", "main.yaml") + splitModelFile := filepath.Join("..", "..", "test", "main.yaml") splitModel := *new(model.ModelInput).Defaults() splitLoadError := splitModel.Load(splitModelFile) if splitLoadError != nil { diff --git a/main.go b/internal/threagile/context.go similarity index 97% rename from main.go rename to internal/threagile/context.go index d553cc76..ed48f8d0 100644 --- a/main.go +++ b/internal/threagile/context.go @@ -1,4 +1,4 @@ -package main +package threagile import ( "archive/zip" @@ -15,7 +15,63 @@ import ( "errors" "flag" "fmt" - "github.com/threagile/threagile/risks" + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/threagile/threagile/colors" + "github.com/threagile/threagile/model" + addbuildpipeline "github.com/threagile/threagile/pkg/macros/built-in/add-build-pipeline" + addvault "github.com/threagile/threagile/pkg/macros/built-in/add-vault" + prettyprint "github.com/threagile/threagile/pkg/macros/built-in/pretty-print" + removeunusedtags "github.com/threagile/threagile/pkg/macros/built-in/remove-unused-tags" + seedrisktracking "github.com/threagile/threagile/pkg/macros/built-in/seed-risk-tracking" + seedtags "github.com/threagile/threagile/pkg/macros/built-in/seed-tags" + "github.com/threagile/threagile/pkg/report" + "github.com/threagile/threagile/pkg/risks" + accidentalsecretleak "github.com/threagile/threagile/pkg/risks/built-in/accidental-secret-leak" + codebackdooring "github.com/threagile/threagile/pkg/risks/built-in/code-backdooring" + containerbaseimagebackdooring "github.com/threagile/threagile/pkg/risks/built-in/container-baseimage-backdooring" + containerplatformescape "github.com/threagile/threagile/pkg/risks/built-in/container-platform-escape" + crosssiterequestforgery "github.com/threagile/threagile/pkg/risks/built-in/cross-site-request-forgery" + crosssitescripting "github.com/threagile/threagile/pkg/risks/built-in/cross-site-scripting" + dosriskyaccessacrosstrustboundary "github.com/threagile/threagile/pkg/risks/built-in/dos-risky-access-across-trust-boundary" + incompletemodel "github.com/threagile/threagile/pkg/risks/built-in/incomplete-model" + ldapinjection "github.com/threagile/threagile/pkg/risks/built-in/ldap-injection" + missingauthentication "github.com/threagile/threagile/pkg/risks/built-in/missing-authentication" + missingauthenticationsecondfactor "github.com/threagile/threagile/pkg/risks/built-in/missing-authentication-second-factor" + missingbuildinfrastructure "github.com/threagile/threagile/pkg/risks/built-in/missing-build-infrastructure" + missingcloudhardening "github.com/threagile/threagile/pkg/risks/built-in/missing-cloud-hardening" + missingfilevalidation "github.com/threagile/threagile/pkg/risks/built-in/missing-file-validation" + missinghardening "github.com/threagile/threagile/pkg/risks/built-in/missing-hardening" + missingidentitypropagation "github.com/threagile/threagile/pkg/risks/built-in/missing-identity-propagation" + missingidentityproviderisolation "github.com/threagile/threagile/pkg/risks/built-in/missing-identity-provider-isolation" + missingidentitystore "github.com/threagile/threagile/pkg/risks/built-in/missing-identity-store" + missingnetworksegmentation "github.com/threagile/threagile/pkg/risks/built-in/missing-network-segmentation" + missingvault "github.com/threagile/threagile/pkg/risks/built-in/missing-vault" + missingvaultisolation "github.com/threagile/threagile/pkg/risks/built-in/missing-vault-isolation" + missingwaf "github.com/threagile/threagile/pkg/risks/built-in/missing-waf" + mixedtargetsonsharedruntime "github.com/threagile/threagile/pkg/risks/built-in/mixed-targets-on-shared-runtime" + pathtraversal "github.com/threagile/threagile/pkg/risks/built-in/path-traversal" + pushinsteadofpulldeployment "github.com/threagile/threagile/pkg/risks/built-in/push-instead-of-pull-deployment" + searchqueryinjection "github.com/threagile/threagile/pkg/risks/built-in/search-query-injection" + serversiderequestforgery "github.com/threagile/threagile/pkg/risks/built-in/server-side-request-forgery" + serviceregistrypoisoning "github.com/threagile/threagile/pkg/risks/built-in/service-registry-poisoning" + sqlnosqlinjection "github.com/threagile/threagile/pkg/risks/built-in/sql-nosql-injection" + uncheckeddeployment "github.com/threagile/threagile/pkg/risks/built-in/unchecked-deployment" + unencryptedasset "github.com/threagile/threagile/pkg/risks/built-in/unencrypted-asset" + unencryptedcommunication "github.com/threagile/threagile/pkg/risks/built-in/unencrypted-communication" + unguardedaccessfrominternet "github.com/threagile/threagile/pkg/risks/built-in/unguarded-access-from-internet" + unguardeddirectdatastoreaccess "github.com/threagile/threagile/pkg/risks/built-in/unguarded-direct-datastore-access" + unnecessarycommunicationlink "github.com/threagile/threagile/pkg/risks/built-in/unnecessary-communication-link" + unnecessarydataasset "github.com/threagile/threagile/pkg/risks/built-in/unnecessary-data-asset" + unnecessarydatatransfer "github.com/threagile/threagile/pkg/risks/built-in/unnecessary-data-transfer" + unnecessarytechnicalasset "github.com/threagile/threagile/pkg/risks/built-in/unnecessary-technical-asset" + untrusteddeserialization "github.com/threagile/threagile/pkg/risks/built-in/untrusted-deserialization" + wrongcommunicationlinkcontent "github.com/threagile/threagile/pkg/risks/built-in/wrong-communication-link-content" + wrongtrustboundarycontent "github.com/threagile/threagile/pkg/risks/built-in/wrong-trust-boundary-content" + xmlexternalentity "github.com/threagile/threagile/pkg/risks/built-in/xml-external-entity" + "github.com/threagile/threagile/pkg/run" + "golang.org/x/crypto/argon2" + "gopkg.in/yaml.v3" "hash/fnv" "io" "log" @@ -30,63 +86,6 @@ import ( "strings" "sync" "time" - - "github.com/gin-gonic/gin" - "github.com/google/uuid" - "github.com/threagile/threagile/colors" - addbuildpipeline "github.com/threagile/threagile/macros/built-in/add-build-pipeline" - addvault "github.com/threagile/threagile/macros/built-in/add-vault" - prettyprint "github.com/threagile/threagile/macros/built-in/pretty-print" - removeunusedtags "github.com/threagile/threagile/macros/built-in/remove-unused-tags" - seedrisktracking "github.com/threagile/threagile/macros/built-in/seed-risk-tracking" - seedtags "github.com/threagile/threagile/macros/built-in/seed-tags" - "github.com/threagile/threagile/model" - "github.com/threagile/threagile/report" - accidentalsecretleak "github.com/threagile/threagile/risks/built-in/accidental-secret-leak" - codebackdooring "github.com/threagile/threagile/risks/built-in/code-backdooring" - containerbaseimagebackdooring "github.com/threagile/threagile/risks/built-in/container-baseimage-backdooring" - containerplatformescape "github.com/threagile/threagile/risks/built-in/container-platform-escape" - crosssiterequestforgery "github.com/threagile/threagile/risks/built-in/cross-site-request-forgery" - crosssitescripting "github.com/threagile/threagile/risks/built-in/cross-site-scripting" - dosriskyaccessacrosstrustboundary "github.com/threagile/threagile/risks/built-in/dos-risky-access-across-trust-boundary" - incompletemodel "github.com/threagile/threagile/risks/built-in/incomplete-model" - ldapinjection "github.com/threagile/threagile/risks/built-in/ldap-injection" - missingauthentication "github.com/threagile/threagile/risks/built-in/missing-authentication" - missingauthenticationsecondfactor "github.com/threagile/threagile/risks/built-in/missing-authentication-second-factor" - missingbuildinfrastructure "github.com/threagile/threagile/risks/built-in/missing-build-infrastructure" - missingcloudhardening "github.com/threagile/threagile/risks/built-in/missing-cloud-hardening" - missingfilevalidation "github.com/threagile/threagile/risks/built-in/missing-file-validation" - missinghardening "github.com/threagile/threagile/risks/built-in/missing-hardening" - missingidentitypropagation "github.com/threagile/threagile/risks/built-in/missing-identity-propagation" - missingidentityproviderisolation "github.com/threagile/threagile/risks/built-in/missing-identity-provider-isolation" - missingidentitystore "github.com/threagile/threagile/risks/built-in/missing-identity-store" - missingnetworksegmentation "github.com/threagile/threagile/risks/built-in/missing-network-segmentation" - missingvault "github.com/threagile/threagile/risks/built-in/missing-vault" - missingvaultisolation "github.com/threagile/threagile/risks/built-in/missing-vault-isolation" - missingwaf "github.com/threagile/threagile/risks/built-in/missing-waf" - mixedtargetsonsharedruntime "github.com/threagile/threagile/risks/built-in/mixed-targets-on-shared-runtime" - pathtraversal "github.com/threagile/threagile/risks/built-in/path-traversal" - pushinsteadofpulldeployment "github.com/threagile/threagile/risks/built-in/push-instead-of-pull-deployment" - searchqueryinjection "github.com/threagile/threagile/risks/built-in/search-query-injection" - serversiderequestforgery "github.com/threagile/threagile/risks/built-in/server-side-request-forgery" - serviceregistrypoisoning "github.com/threagile/threagile/risks/built-in/service-registry-poisoning" - sqlnosqlinjection "github.com/threagile/threagile/risks/built-in/sql-nosql-injection" - uncheckeddeployment "github.com/threagile/threagile/risks/built-in/unchecked-deployment" - unencryptedasset "github.com/threagile/threagile/risks/built-in/unencrypted-asset" - unencryptedcommunication "github.com/threagile/threagile/risks/built-in/unencrypted-communication" - unguardedaccessfrominternet "github.com/threagile/threagile/risks/built-in/unguarded-access-from-internet" - unguardeddirectdatastoreaccess "github.com/threagile/threagile/risks/built-in/unguarded-direct-datastore-access" - unnecessarycommunicationlink "github.com/threagile/threagile/risks/built-in/unnecessary-communication-link" - unnecessarydataasset "github.com/threagile/threagile/risks/built-in/unnecessary-data-asset" - unnecessarydatatransfer "github.com/threagile/threagile/risks/built-in/unnecessary-data-transfer" - unnecessarytechnicalasset "github.com/threagile/threagile/risks/built-in/unnecessary-technical-asset" - untrusteddeserialization "github.com/threagile/threagile/risks/built-in/untrusted-deserialization" - wrongcommunicationlinkcontent "github.com/threagile/threagile/risks/built-in/wrong-communication-link-content" - wrongtrustboundarycontent "github.com/threagile/threagile/risks/built-in/wrong-trust-boundary-content" - xmlexternalentity "github.com/threagile/threagile/risks/built-in/xml-external-entity" - "github.com/threagile/threagile/run" - "golang.org/x/crypto/argon2" - "gopkg.in/yaml.v3" ) const ( @@ -122,6 +121,8 @@ const ( ) type Context struct { + ServerPort int + successCount int errorCount int drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks bool @@ -136,7 +137,7 @@ type Context struct { generateStatsJSON, generateRisksExcel, generateTagsExcel, generateReportPDF *bool outputDir, raaPlugin, skipRiskRules, riskRulesPlugins, executeModelMacro *string customRiskRules map[string]*risks.CustomRisk - diagramDPI, serverPort *int + diagramDPI *int deferredRiskTrackingDueToWildcardMatching map[string]model.RiskTracking addModelTitle bool keepDiagramSourceFiles bool @@ -146,6 +147,12 @@ type Context struct { tempFolder *string } +func checkErr(err error) { + if err != nil { + panic(err) + } +} + func (context *Context) Defaults() *Context { *context = Context{ keepDiagramSourceFiles: keepDiagramSourceFiles, @@ -308,24 +315,6 @@ func (context *Context) checkRiskTracking() { } } -// === Error handling stuff ======================================== - -func checkErr(err error) { - if err != nil { - panic(err) - } -} - -func main() { - context := new(Context).Defaults() - context.parseCommandlineArgs() - if *context.serverPort > 0 { - context.startServer() - } else { - context.doIt() - } -} - // Unzip will decompress a zip archive, moving all files and folders // within the zip file (parameter 1) to an output directory (parameter 2). func (context *Context) unzip(src string, dest string) ([]string, error) { @@ -432,7 +421,7 @@ func (context *Context) addFileToZip(zipWriter *zip.Writer, filename string) err return err } -func (context *Context) doIt() { +func (context *Context) DoIt() { defer func() { var err error if r := recover(); r != nil { @@ -1102,7 +1091,7 @@ func (context *Context) doItViaRuntimeCall(modelFile string, outputDir string, } } -func (context *Context) startServer() { +func (context *Context) StartServer() { router := gin.Default() router.LoadHTMLGlob("server/static/*.html") // <== router.GET("/", func(c *gin.Context) { @@ -1232,7 +1221,7 @@ func (context *Context) startServer() { router.DELETE("/models/:model-id/shared-runtimes/:shared-runtime-id", context.deleteSharedRuntime) fmt.Println("Threagile server running...") - _ = router.Run(":" + strconv.Itoa(*context.serverPort)) // listen and serve on 0.0.0.0:8080 or whatever port was specified + _ = router.Run(":" + strconv.Itoa(context.ServerPort)) // listen and serve on 0.0.0.0:8080 or whatever port was specified } func (context *Context) exampleFile(ginContext *gin.Context) { @@ -3284,7 +3273,7 @@ func (context *Context) expandPath(path string) *string { return &path } -func (context *Context) parseCommandlineArgs() { +func (context *Context) ParseCommandlineArgs() { // folders context.appFolder = flag.String("app-dir", appDir, "app folder (default: "+appDir+")") context.serverFolder = flag.String("server-dir", dataDir, "base folder for server mode (default: "+dataDir+")") @@ -3304,7 +3293,7 @@ func (context *Context) parseCommandlineArgs() { context.ignoreOrphanedRiskTracking = flag.Bool("ignore-orphaned-risk-tracking", false, "ignore orphaned risk tracking (just log them) not matching a concrete risk") // commands - context.serverPort = flag.Int("server", 0, "start a server (instead of commandline execution) on the given port") + flag.IntVar(&context.ServerPort, "server", 0, "start a server (instead of commandline execution) on the given port") context.executeModelMacro = flag.String("execute-model-macro", "", "Execute model macro (by ID)") context.createExampleModel = flag.Bool("create-example-model", false, "just create an example model named threagile-example-model.yaml in the output directory") context.createStubModel = flag.Bool("create-stub-model", false, "just create a minimal stub model named threagile-stub-model.yaml in the output directory") diff --git a/model/types.go b/model/types.go index 98bd9fc7..a8c1999f 100644 --- a/model/types.go +++ b/model/types.go @@ -6,7 +6,6 @@ import ( "fmt" "github.com/threagile/threagile/colors" "gopkg.in/yaml.v3" - "log" "os" "path/filepath" "regexp" @@ -132,18 +131,18 @@ func (model *ModelInput) Defaults() *ModelInput { func (model *ModelInput) Load(inputFilename string) error { modelYaml, readError := os.ReadFile(inputFilename) if readError != nil { - log.Fatal("Unable to read model file: ", readError) + return fmt.Errorf("unable to read model file: %v", readError) } unmarshalError := yaml.Unmarshal(modelYaml, &model) if unmarshalError != nil { - log.Fatal("Unable to parse model yaml: ", unmarshalError) + return fmt.Errorf("unable to parse model yaml: %v", unmarshalError) } for _, includeFile := range model.Includes { mergeError := model.Merge(filepath.Dir(inputFilename), includeFile) if mergeError != nil { - log.Fatalf("Unable to merge model include %q: %v", includeFile, mergeError) + return fmt.Errorf("unable to merge model include %q: %v", includeFile, mergeError) } } diff --git a/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go b/pkg/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go similarity index 100% rename from macros/built-in/add-build-pipeline/add-build-pipeline-macro.go rename to pkg/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go diff --git a/macros/built-in/add-vault/add-vault-macro.go b/pkg/macros/built-in/add-vault/add-vault-macro.go similarity index 100% rename from macros/built-in/add-vault/add-vault-macro.go rename to pkg/macros/built-in/add-vault/add-vault-macro.go diff --git a/macros/built-in/pretty-print/pretty-print-macro.go b/pkg/macros/built-in/pretty-print/pretty-print-macro.go similarity index 100% rename from macros/built-in/pretty-print/pretty-print-macro.go rename to pkg/macros/built-in/pretty-print/pretty-print-macro.go diff --git a/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go b/pkg/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go similarity index 100% rename from macros/built-in/remove-unused-tags/remove-unused-tags-macro.go rename to pkg/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go diff --git a/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go b/pkg/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go similarity index 100% rename from macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go rename to pkg/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go diff --git a/macros/built-in/seed-tags/seed-tags-macro.go b/pkg/macros/built-in/seed-tags/seed-tags-macro.go similarity index 100% rename from macros/built-in/seed-tags/seed-tags-macro.go rename to pkg/macros/built-in/seed-tags/seed-tags-macro.go diff --git a/report/excel.go b/pkg/report/excel.go similarity index 100% rename from report/excel.go rename to pkg/report/excel.go diff --git a/report/json.go b/pkg/report/json.go similarity index 100% rename from report/json.go rename to pkg/report/json.go diff --git a/report/report.go b/pkg/report/report.go similarity index 98% rename from report/report.go rename to pkg/report/report.go index 4762053f..76dc44fe 100644 --- a/report/report.go +++ b/pkg/report/report.go @@ -7,49 +7,49 @@ import ( "github.com/jung-kurt/gofpdf/contrib/gofpdi" "github.com/threagile/threagile/colors" "github.com/threagile/threagile/model" - "github.com/threagile/threagile/risks" - "github.com/threagile/threagile/risks/built-in/accidental-secret-leak" - "github.com/threagile/threagile/risks/built-in/code-backdooring" - "github.com/threagile/threagile/risks/built-in/container-baseimage-backdooring" - "github.com/threagile/threagile/risks/built-in/container-platform-escape" - "github.com/threagile/threagile/risks/built-in/cross-site-request-forgery" - "github.com/threagile/threagile/risks/built-in/cross-site-scripting" - "github.com/threagile/threagile/risks/built-in/dos-risky-access-across-trust-boundary" - "github.com/threagile/threagile/risks/built-in/incomplete-model" - "github.com/threagile/threagile/risks/built-in/ldap-injection" - "github.com/threagile/threagile/risks/built-in/missing-authentication" - "github.com/threagile/threagile/risks/built-in/missing-authentication-second-factor" - "github.com/threagile/threagile/risks/built-in/missing-build-infrastructure" - "github.com/threagile/threagile/risks/built-in/missing-cloud-hardening" - "github.com/threagile/threagile/risks/built-in/missing-file-validation" - "github.com/threagile/threagile/risks/built-in/missing-hardening" - "github.com/threagile/threagile/risks/built-in/missing-identity-propagation" - "github.com/threagile/threagile/risks/built-in/missing-identity-provider-isolation" - "github.com/threagile/threagile/risks/built-in/missing-identity-store" - "github.com/threagile/threagile/risks/built-in/missing-network-segmentation" - "github.com/threagile/threagile/risks/built-in/missing-vault" - "github.com/threagile/threagile/risks/built-in/missing-vault-isolation" - "github.com/threagile/threagile/risks/built-in/missing-waf" - "github.com/threagile/threagile/risks/built-in/mixed-targets-on-shared-runtime" - "github.com/threagile/threagile/risks/built-in/path-traversal" - "github.com/threagile/threagile/risks/built-in/push-instead-of-pull-deployment" - "github.com/threagile/threagile/risks/built-in/search-query-injection" - "github.com/threagile/threagile/risks/built-in/server-side-request-forgery" - "github.com/threagile/threagile/risks/built-in/service-registry-poisoning" - "github.com/threagile/threagile/risks/built-in/sql-nosql-injection" - "github.com/threagile/threagile/risks/built-in/unchecked-deployment" - "github.com/threagile/threagile/risks/built-in/unencrypted-asset" - "github.com/threagile/threagile/risks/built-in/unencrypted-communication" - "github.com/threagile/threagile/risks/built-in/unguarded-access-from-internet" - "github.com/threagile/threagile/risks/built-in/unguarded-direct-datastore-access" - "github.com/threagile/threagile/risks/built-in/unnecessary-communication-link" - "github.com/threagile/threagile/risks/built-in/unnecessary-data-asset" - "github.com/threagile/threagile/risks/built-in/unnecessary-data-transfer" - "github.com/threagile/threagile/risks/built-in/unnecessary-technical-asset" - "github.com/threagile/threagile/risks/built-in/untrusted-deserialization" - "github.com/threagile/threagile/risks/built-in/wrong-communication-link-content" - "github.com/threagile/threagile/risks/built-in/wrong-trust-boundary-content" - "github.com/threagile/threagile/risks/built-in/xml-external-entity" + "github.com/threagile/threagile/pkg/risks" + "github.com/threagile/threagile/pkg/risks/built-in/accidental-secret-leak" + "github.com/threagile/threagile/pkg/risks/built-in/code-backdooring" + "github.com/threagile/threagile/pkg/risks/built-in/container-baseimage-backdooring" + "github.com/threagile/threagile/pkg/risks/built-in/container-platform-escape" + "github.com/threagile/threagile/pkg/risks/built-in/cross-site-request-forgery" + "github.com/threagile/threagile/pkg/risks/built-in/cross-site-scripting" + "github.com/threagile/threagile/pkg/risks/built-in/dos-risky-access-across-trust-boundary" + "github.com/threagile/threagile/pkg/risks/built-in/incomplete-model" + "github.com/threagile/threagile/pkg/risks/built-in/ldap-injection" + "github.com/threagile/threagile/pkg/risks/built-in/missing-authentication" + "github.com/threagile/threagile/pkg/risks/built-in/missing-authentication-second-factor" + "github.com/threagile/threagile/pkg/risks/built-in/missing-build-infrastructure" + "github.com/threagile/threagile/pkg/risks/built-in/missing-cloud-hardening" + "github.com/threagile/threagile/pkg/risks/built-in/missing-file-validation" + "github.com/threagile/threagile/pkg/risks/built-in/missing-hardening" + "github.com/threagile/threagile/pkg/risks/built-in/missing-identity-propagation" + "github.com/threagile/threagile/pkg/risks/built-in/missing-identity-provider-isolation" + "github.com/threagile/threagile/pkg/risks/built-in/missing-identity-store" + "github.com/threagile/threagile/pkg/risks/built-in/missing-network-segmentation" + "github.com/threagile/threagile/pkg/risks/built-in/missing-vault" + "github.com/threagile/threagile/pkg/risks/built-in/missing-vault-isolation" + "github.com/threagile/threagile/pkg/risks/built-in/missing-waf" + "github.com/threagile/threagile/pkg/risks/built-in/mixed-targets-on-shared-runtime" + "github.com/threagile/threagile/pkg/risks/built-in/path-traversal" + "github.com/threagile/threagile/pkg/risks/built-in/push-instead-of-pull-deployment" + "github.com/threagile/threagile/pkg/risks/built-in/search-query-injection" + "github.com/threagile/threagile/pkg/risks/built-in/server-side-request-forgery" + "github.com/threagile/threagile/pkg/risks/built-in/service-registry-poisoning" + "github.com/threagile/threagile/pkg/risks/built-in/sql-nosql-injection" + "github.com/threagile/threagile/pkg/risks/built-in/unchecked-deployment" + "github.com/threagile/threagile/pkg/risks/built-in/unencrypted-asset" + "github.com/threagile/threagile/pkg/risks/built-in/unencrypted-communication" + "github.com/threagile/threagile/pkg/risks/built-in/unguarded-access-from-internet" + "github.com/threagile/threagile/pkg/risks/built-in/unguarded-direct-datastore-access" + "github.com/threagile/threagile/pkg/risks/built-in/unnecessary-communication-link" + "github.com/threagile/threagile/pkg/risks/built-in/unnecessary-data-asset" + "github.com/threagile/threagile/pkg/risks/built-in/unnecessary-data-transfer" + "github.com/threagile/threagile/pkg/risks/built-in/unnecessary-technical-asset" + "github.com/threagile/threagile/pkg/risks/built-in/untrusted-deserialization" + "github.com/threagile/threagile/pkg/risks/built-in/wrong-communication-link-content" + "github.com/threagile/threagile/pkg/risks/built-in/wrong-trust-boundary-content" + "github.com/threagile/threagile/pkg/risks/built-in/xml-external-entity" "github.com/wcharczuk/go-chart" "github.com/wcharczuk/go-chart/drawing" "image" diff --git a/report/template/background.pdf b/pkg/report/template/background.pdf similarity index 100% rename from report/template/background.pdf rename to pkg/report/template/background.pdf diff --git a/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go b/pkg/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go similarity index 100% rename from risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go rename to pkg/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go diff --git a/risks/built-in/code-backdooring/code-backdooring-rule.go b/pkg/risks/built-in/code-backdooring/code-backdooring-rule.go similarity index 100% rename from risks/built-in/code-backdooring/code-backdooring-rule.go rename to pkg/risks/built-in/code-backdooring/code-backdooring-rule.go diff --git a/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go b/pkg/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go similarity index 100% rename from risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go rename to pkg/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go diff --git a/risks/built-in/container-platform-escape/container-platform-escape-rule.go b/pkg/risks/built-in/container-platform-escape/container-platform-escape-rule.go similarity index 100% rename from risks/built-in/container-platform-escape/container-platform-escape-rule.go rename to pkg/risks/built-in/container-platform-escape/container-platform-escape-rule.go diff --git a/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go b/pkg/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go similarity index 100% rename from risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go rename to pkg/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go diff --git a/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go b/pkg/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go similarity index 100% rename from risks/built-in/cross-site-scripting/cross-site-scripting-rule.go rename to pkg/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go diff --git a/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go b/pkg/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go similarity index 100% rename from risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go rename to pkg/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go diff --git a/risks/built-in/incomplete-model/incomplete-model-rule.go b/pkg/risks/built-in/incomplete-model/incomplete-model-rule.go similarity index 100% rename from risks/built-in/incomplete-model/incomplete-model-rule.go rename to pkg/risks/built-in/incomplete-model/incomplete-model-rule.go diff --git a/risks/built-in/ldap-injection/ldap-injection-rule.go b/pkg/risks/built-in/ldap-injection/ldap-injection-rule.go similarity index 100% rename from risks/built-in/ldap-injection/ldap-injection-rule.go rename to pkg/risks/built-in/ldap-injection/ldap-injection-rule.go diff --git a/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go b/pkg/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go similarity index 98% rename from risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go rename to pkg/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go index aa1530a4..49b3ef57 100644 --- a/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go +++ b/pkg/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go @@ -2,7 +2,7 @@ package missing_authentication_second_factor import ( "github.com/threagile/threagile/model" - "github.com/threagile/threagile/risks/built-in/missing-authentication" + "github.com/threagile/threagile/pkg/risks/built-in/missing-authentication" ) func Rule() model.CustomRiskRule { diff --git a/risks/built-in/missing-authentication/missing-authentication-rule.go b/pkg/risks/built-in/missing-authentication/missing-authentication-rule.go similarity index 100% rename from risks/built-in/missing-authentication/missing-authentication-rule.go rename to pkg/risks/built-in/missing-authentication/missing-authentication-rule.go diff --git a/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go b/pkg/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go similarity index 100% rename from risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go rename to pkg/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go diff --git a/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go b/pkg/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go similarity index 100% rename from risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go rename to pkg/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go diff --git a/risks/built-in/missing-file-validation/missing-file-validation-rule.go b/pkg/risks/built-in/missing-file-validation/missing-file-validation-rule.go similarity index 100% rename from risks/built-in/missing-file-validation/missing-file-validation-rule.go rename to pkg/risks/built-in/missing-file-validation/missing-file-validation-rule.go diff --git a/risks/built-in/missing-hardening/missing-hardening-rule.go b/pkg/risks/built-in/missing-hardening/missing-hardening-rule.go similarity index 100% rename from risks/built-in/missing-hardening/missing-hardening-rule.go rename to pkg/risks/built-in/missing-hardening/missing-hardening-rule.go diff --git a/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go b/pkg/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go similarity index 100% rename from risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go rename to pkg/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go diff --git a/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go b/pkg/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go similarity index 100% rename from risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go rename to pkg/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go diff --git a/risks/built-in/missing-identity-store/missing-identity-store-rule.go b/pkg/risks/built-in/missing-identity-store/missing-identity-store-rule.go similarity index 100% rename from risks/built-in/missing-identity-store/missing-identity-store-rule.go rename to pkg/risks/built-in/missing-identity-store/missing-identity-store-rule.go diff --git a/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go b/pkg/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go similarity index 100% rename from risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go rename to pkg/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go diff --git a/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go b/pkg/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go similarity index 100% rename from risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go rename to pkg/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go diff --git a/risks/built-in/missing-vault/missing-vault-rule.go b/pkg/risks/built-in/missing-vault/missing-vault-rule.go similarity index 100% rename from risks/built-in/missing-vault/missing-vault-rule.go rename to pkg/risks/built-in/missing-vault/missing-vault-rule.go diff --git a/risks/built-in/missing-waf/missing-waf-rule.go b/pkg/risks/built-in/missing-waf/missing-waf-rule.go similarity index 100% rename from risks/built-in/missing-waf/missing-waf-rule.go rename to pkg/risks/built-in/missing-waf/missing-waf-rule.go diff --git a/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go b/pkg/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go similarity index 100% rename from risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go rename to pkg/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go diff --git a/risks/built-in/path-traversal/path-traversal-rule.go b/pkg/risks/built-in/path-traversal/path-traversal-rule.go similarity index 100% rename from risks/built-in/path-traversal/path-traversal-rule.go rename to pkg/risks/built-in/path-traversal/path-traversal-rule.go diff --git a/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go b/pkg/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go similarity index 100% rename from risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go rename to pkg/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go diff --git a/risks/built-in/search-query-injection/search-query-injection-rule.go b/pkg/risks/built-in/search-query-injection/search-query-injection-rule.go similarity index 100% rename from risks/built-in/search-query-injection/search-query-injection-rule.go rename to pkg/risks/built-in/search-query-injection/search-query-injection-rule.go diff --git a/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go b/pkg/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go similarity index 100% rename from risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go rename to pkg/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go diff --git a/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go b/pkg/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go similarity index 100% rename from risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go rename to pkg/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go diff --git a/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go b/pkg/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go similarity index 100% rename from risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go rename to pkg/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go diff --git a/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go b/pkg/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go similarity index 100% rename from risks/built-in/unchecked-deployment/unchecked-deployment-rule.go rename to pkg/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go diff --git a/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go b/pkg/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go similarity index 100% rename from risks/built-in/unencrypted-asset/unencrypted-asset-rule.go rename to pkg/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go diff --git a/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go b/pkg/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go similarity index 100% rename from risks/built-in/unencrypted-communication/unencrypted-communication-rule.go rename to pkg/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go diff --git a/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go b/pkg/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go similarity index 100% rename from risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go rename to pkg/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go diff --git a/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go b/pkg/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go similarity index 100% rename from risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go rename to pkg/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go diff --git a/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go b/pkg/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go similarity index 100% rename from risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go rename to pkg/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go diff --git a/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go b/pkg/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go similarity index 100% rename from risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go rename to pkg/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go diff --git a/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go b/pkg/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go similarity index 100% rename from risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go rename to pkg/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go diff --git a/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go b/pkg/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go similarity index 100% rename from risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go rename to pkg/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go diff --git a/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go b/pkg/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go similarity index 100% rename from risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go rename to pkg/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go diff --git a/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go b/pkg/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go similarity index 100% rename from risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go rename to pkg/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go diff --git a/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go b/pkg/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go similarity index 100% rename from risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go rename to pkg/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go diff --git a/risks/built-in/xml-external-entity/xml-external-entity-rule.go b/pkg/risks/built-in/xml-external-entity/xml-external-entity-rule.go similarity index 100% rename from risks/built-in/xml-external-entity/xml-external-entity-rule.go rename to pkg/risks/built-in/xml-external-entity/xml-external-entity-rule.go diff --git a/risks/risk.go b/pkg/risks/risk.go similarity index 94% rename from risks/risk.go rename to pkg/risks/risk.go index b685c4f0..5b00981b 100644 --- a/risks/risk.go +++ b/pkg/risks/risk.go @@ -2,7 +2,7 @@ package risks import ( "github.com/threagile/threagile/model" - "github.com/threagile/threagile/run" + "github.com/threagile/threagile/pkg/run" "log" ) diff --git a/run/runner.go b/pkg/run/runner.go similarity index 100% rename from run/runner.go rename to pkg/run/runner.go From 273a867f7084f61a376137613772a149d66ea579 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Fri, 22 Dec 2023 03:37:27 -0800 Subject: [PATCH 12/68] merged Yevhen's changes --- Makefile | 2 +- cmd/raa/main.go | 18 +- cmd/raa_dummy/main.go | 3 +- cmd/risk_demo/main.go | 23 +- cmd/threagile/main.go | 20 +- cmd/threagile/main_test.go | 6 +- go.mod | 3 + go.sum | 14 + internal/threagile/about.go | 16 + internal/threagile/context.go | 8555 ++++++++--------- internal/threagile/macros.go | 60 + internal/threagile/progress-reporter.go | 33 + internal/threagile/root.go | 37 + internal/threagile/rules.go | 65 + internal/threagile/types.go | 46 + model/types.go | 4389 --------- {colors => pkg/colors}/colors.go | 2 + pkg/docs/constants.go | 32 + pkg/input/input.go | 402 + .../add-build-pipeline-macro.go | 473 +- .../built-in/add-vault/add-vault-macro.go | 164 +- pkg/macros/built-in/built-in.go | 22 + .../pretty-print/pretty-print-macro.go | 17 +- .../remove-unused-tags-macro.go | 27 +- .../seed-risk-tracking-macro.go | 28 +- .../built-in/seed-tags/seed-tags-macro.go | 23 +- pkg/macros/macros.go | 57 + pkg/model/communication_link.go | 271 + pkg/model/data_asset.go | 272 + pkg/model/helpers.go | 53 + pkg/model/model.go | 159 + pkg/model/risks.go | 873 ++ pkg/model/shared_runtime.go | 87 + pkg/model/technical_asset.go | 544 ++ pkg/model/trust_boundary.go | 127 + pkg/report/excel.go | 94 +- pkg/report/json.go | 15 +- pkg/report/report.go | 1378 +-- pkg/risks/risk.go | 34 - pkg/run/runner.go | 1 + .../accidental-secret-leak-rule.go | 45 +- .../code-backdooring/code-backdooring-rule.go | 43 +- .../container-baseimage-backdooring-rule.go | 35 +- .../container-platform-escape-rule.go | 39 +- .../cross-site-request-forgery-rule.go | 35 +- .../cross-site-scripting-rule.go | 25 +- ...risky-access-across-trust-boundary-rule.go | 47 +- .../incomplete-model/incomplete-model-rule.go | 31 +- .../ldap-injection/ldap-injection-rule.go | 27 +- ...ssing-authentication-second-factor-rule.go | 45 +- .../missing-authentication-rule.go | 47 +- .../missing-build-infrastructure-rule.go | 43 +- .../missing-cloud-hardening-rule.go | 122 +- .../missing-file-validation-rule.go | 31 +- .../missing-hardening-rule.go | 28 +- .../missing-identity-propagation-rule.go | 47 +- ...issing-identity-provider-isolation-rule.go | 33 +- .../missing-identity-store-rule.go | 41 +- .../missing-network-segmentation-rule.go | 42 +- .../missing-vault-isolation-rule.go | 41 +- .../missing-vault/missing-vault-rule.go | 37 +- .../built-in/missing-waf/missing-waf-rule.go | 31 +- .../mixed-targets-on-shared-runtime-rule.go | 26 +- .../path-traversal/path-traversal-rule.go | 29 +- .../push-instead-of-pull-deployment-rule.go | 31 +- .../search-query-injection-rule.go | 37 +- .../server-side-request-forgery-rule.go | 37 +- .../service-registry-poisoning-rule.go | 29 +- .../sql-nosql-injection-rule.go | 27 +- .../unchecked-deployment-rule.go | 27 +- .../unencrypted-asset-rule.go | 53 +- .../unencrypted-communication-rule.go | 29 +- .../unguarded-access-from-internet-rule.go | 56 +- .../unguarded-direct-datastore-access-rule.go | 77 +- .../unnecessary-communication-link-rule.go | 19 +- .../unnecessary-data-asset-rule.go | 18 +- .../unnecessary-data-transfer-rule.go | 30 +- .../unnecessary-technical-asset-rule.go | 21 +- .../untrusted-deserialization-rule.go | 41 +- .../wrong-communication-link-content-rule.go | 29 +- .../wrong-trust-boundary-content.go | 23 +- .../xml-external-entity-rule.go | 31 +- pkg/security/risks/rules.go | 134 + pkg/security/types/authentication.go | 48 + pkg/security/types/authorization.go | 35 + pkg/security/types/confidentiality.go | 90 + pkg/security/types/criticality.go | 90 + pkg/security/types/data_breach_probability.go | 45 + pkg/security/types/data_format.go | 58 + pkg/security/types/encryption_style.go | 60 + pkg/security/types/protocol.go | 192 + pkg/security/types/quantity.go | 62 + pkg/security/types/risk_explotation_impact.go | 52 + .../types/risk_explotation_likelihood.go | 52 + pkg/security/types/risk_function.go | 48 + pkg/security/types/risk_severity.go | 51 + pkg/security/types/risk_status.go | 58 + pkg/security/types/stride.go | 54 + pkg/security/types/technical_asset_machine.go | 37 + pkg/security/types/technical_asset_size.go | 38 + .../types/technical_asset_technology.go | 276 + pkg/security/types/technical_asset_type.go | 35 + pkg/security/types/trust_boundary.go | 56 + pkg/security/types/types.go | 41 + pkg/security/types/usage.go | 52 + .../report => report}/template/background.pdf | Bin 106 files changed, 10931 insertions(+), 10763 deletions(-) create mode 100644 internal/threagile/about.go create mode 100644 internal/threagile/macros.go create mode 100644 internal/threagile/progress-reporter.go create mode 100644 internal/threagile/root.go create mode 100644 internal/threagile/rules.go create mode 100644 internal/threagile/types.go delete mode 100644 model/types.go rename {colors => pkg/colors}/colors.go (96%) create mode 100644 pkg/docs/constants.go create mode 100644 pkg/input/input.go create mode 100644 pkg/macros/built-in/built-in.go create mode 100644 pkg/macros/macros.go create mode 100644 pkg/model/communication_link.go create mode 100644 pkg/model/data_asset.go create mode 100644 pkg/model/helpers.go create mode 100644 pkg/model/model.go create mode 100644 pkg/model/risks.go create mode 100644 pkg/model/shared_runtime.go create mode 100644 pkg/model/technical_asset.go create mode 100644 pkg/model/trust_boundary.go delete mode 100644 pkg/risks/risk.go rename pkg/{ => security}/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go (66%) rename pkg/{ => security}/risks/built-in/code-backdooring/code-backdooring-rule.go (78%) rename pkg/{ => security}/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go (72%) rename pkg/{ => security}/risks/built-in/container-platform-escape/container-platform-escape-rule.go (77%) rename pkg/{ => security}/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go (74%) rename pkg/{ => security}/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go (78%) rename pkg/{ => security}/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go (75%) rename pkg/{ => security}/risks/built-in/incomplete-model/incomplete-model-rule.go (76%) rename pkg/{ => security}/risks/built-in/ldap-injection/ldap-injection-rule.go (81%) rename pkg/{ => security}/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go (66%) rename pkg/{ => security}/risks/built-in/missing-authentication/missing-authentication-rule.go (68%) rename pkg/{ => security}/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go (74%) rename pkg/{ => security}/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go (79%) rename pkg/{ => security}/risks/built-in/missing-file-validation/missing-file-validation-rule.go (76%) rename pkg/{ => security}/risks/built-in/missing-hardening/missing-hardening-rule.go (73%) rename pkg/{ => security}/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go (75%) rename pkg/{ => security}/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go (79%) rename pkg/{ => security}/risks/built-in/missing-identity-store/missing-identity-store-rule.go (74%) rename pkg/{ => security}/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go (69%) rename pkg/{ => security}/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go (72%) rename pkg/{ => security}/risks/built-in/missing-vault/missing-vault-rule.go (75%) rename pkg/{ => security}/risks/built-in/missing-waf/missing-waf-rule.go (79%) rename pkg/{ => security}/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go (86%) rename pkg/{ => security}/risks/built-in/path-traversal/path-traversal-rule.go (80%) rename pkg/{ => security}/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go (79%) rename pkg/{ => security}/risks/built-in/search-query-injection/search-query-injection-rule.go (76%) rename pkg/{ => security}/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go (82%) rename pkg/{ => security}/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go (70%) rename pkg/{ => security}/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go (81%) rename pkg/{ => security}/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go (84%) rename pkg/{ => security}/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go (59%) rename pkg/{ => security}/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go (83%) rename pkg/{ => security}/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go (60%) rename pkg/{ => security}/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go (52%) rename pkg/{ => security}/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go (82%) rename pkg/{ => security}/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go (87%) rename pkg/{ => security}/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go (86%) rename pkg/{ => security}/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go (79%) rename pkg/{ => security}/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go (74%) rename pkg/{ => security}/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go (74%) rename pkg/{ => security}/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go (76%) rename pkg/{ => security}/risks/built-in/xml-external-entity/xml-external-entity-rule.go (76%) create mode 100644 pkg/security/risks/rules.go create mode 100644 pkg/security/types/authentication.go create mode 100644 pkg/security/types/authorization.go create mode 100644 pkg/security/types/confidentiality.go create mode 100644 pkg/security/types/criticality.go create mode 100644 pkg/security/types/data_breach_probability.go create mode 100644 pkg/security/types/data_format.go create mode 100644 pkg/security/types/encryption_style.go create mode 100644 pkg/security/types/protocol.go create mode 100644 pkg/security/types/quantity.go create mode 100644 pkg/security/types/risk_explotation_impact.go create mode 100644 pkg/security/types/risk_explotation_likelihood.go create mode 100644 pkg/security/types/risk_function.go create mode 100644 pkg/security/types/risk_severity.go create mode 100644 pkg/security/types/risk_status.go create mode 100644 pkg/security/types/stride.go create mode 100644 pkg/security/types/technical_asset_machine.go create mode 100644 pkg/security/types/technical_asset_size.go create mode 100644 pkg/security/types/technical_asset_technology.go create mode 100644 pkg/security/types/technical_asset_type.go create mode 100644 pkg/security/types/trust_boundary.go create mode 100644 pkg/security/types/types.go create mode 100644 pkg/security/types/usage.go rename {pkg/report => report}/template/background.pdf (100%) diff --git a/Makefile b/Makefile index 463e70ff..796b695a 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,7 @@ ASSET_DIR = $(HOME)/.threagile BIN_DIR = $(HOME)/bin ASSETS = \ LICENSE.txt \ - pkg/report/template/background.pdf \ + report/template/background.pdf \ support/openapi.yaml \ support/schema.json \ support/live-templates.txt \ diff --git a/cmd/raa/main.go b/cmd/raa/main.go index 398f7ae0..5d5ba4c6 100644 --- a/cmd/raa/main.go +++ b/cmd/raa/main.go @@ -4,10 +4,12 @@ import ( "bufio" "encoding/json" "fmt" - "github.com/threagile/threagile/model" "io" "os" "sort" + + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) // used from run caller: @@ -153,24 +155,24 @@ func calculateAttackerAttractiveness(input *model.ParsedModel, techAsset model.T score += dataAsset.Availability.AttackerAttractivenessForInOutTransferredData() } } - if techAsset.Technology == model.LoadBalancer || techAsset.Technology == model.ReverseProxy { + if techAsset.Technology == types.LoadBalancer || techAsset.Technology == types.ReverseProxy { score = score / 5.5 } - if techAsset.Technology == model.Monitoring { + if techAsset.Technology == types.Monitoring { score = score / 5 } - if techAsset.Technology == model.ContainerPlatform { + if techAsset.Technology == types.ContainerPlatform { score = score * 5 } - if techAsset.Technology == model.Vault { + if techAsset.Technology == types.Vault { score = score * 2 } - if techAsset.Technology == model.BuildPipeline || techAsset.Technology == model.SourcecodeRepository || techAsset.Technology == model.ArtifactRegistry { + if techAsset.Technology == types.BuildPipeline || techAsset.Technology == types.SourcecodeRepository || techAsset.Technology == types.ArtifactRegistry { score = score * 2 } - if techAsset.Technology == model.IdentityProvider || techAsset.Technology == model.IdentityStoreDatabase || techAsset.Technology == model.IdentityStoreLDAP { + if techAsset.Technology == types.IdentityProvider || techAsset.Technology == types.IdentityStoreDatabase || techAsset.Technology == types.IdentityStoreLDAP { score = score * 2.5 - } else if techAsset.Type == model.Datastore { + } else if techAsset.Type == types.Datastore { score = score * 2 } if techAsset.MultiTenant { diff --git a/cmd/raa_dummy/main.go b/cmd/raa_dummy/main.go index e0623995..f4aa20b1 100644 --- a/cmd/raa_dummy/main.go +++ b/cmd/raa_dummy/main.go @@ -4,10 +4,11 @@ import ( "bufio" "encoding/json" "fmt" - "github.com/threagile/threagile/model" "io" "math/rand" "os" + + "github.com/threagile/threagile/pkg/model" ) // JUST A DUMMY TO HAVE AN ALTERNATIVE PLUGIN TO USE/TEST diff --git a/cmd/risk_demo/main.go b/cmd/risk_demo/main.go index fe9c560c..15cc23a9 100644 --- a/cmd/risk_demo/main.go +++ b/cmd/risk_demo/main.go @@ -5,10 +5,11 @@ import ( "encoding/json" "flag" "fmt" - "github.com/threagile/threagile/model" - "github.com/threagile/threagile/pkg/risks" "io" "os" + + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) type customRiskRule string @@ -25,7 +26,7 @@ func main() { if *getInfo { rule := new(customRiskRule) category := rule.Category() - riskData, marshalError := json.Marshal(risks.CustomRisk{ + riskData, marshalError := json.Marshal(model.CustomRisk{ ID: category.Id, Category: category, Tags: rule.SupportedTags(), @@ -81,8 +82,8 @@ func (r customRiskRule) Category() model.RiskCategory { Action: "Demo Action", Mitigation: "Demo Mitigation", Check: "Demo Check", - Function: model.Development, - STRIDE: model.Tampering, + Function: types.Development, + STRIDE: types.Tampering, DetectionLogic: "Demo Detection", RiskAssessment: "Demo Risk Assessment", FalsePositives: "Demo False Positive.", @@ -95,9 +96,9 @@ func (r customRiskRule) SupportedTags() []string { return []string{"demo tag"} } -func (r customRiskRule) GenerateRisks(input *model.ParsedModel) []model.Risk { +func (r customRiskRule) GenerateRisks(parsedModel *model.ParsedModel) []model.Risk { generatedRisks := make([]model.Risk, 0) - for _, techAsset := range input.TechnicalAssets { + for _, techAsset := range parsedModel.TechnicalAssets { generatedRisks = append(generatedRisks, createRisk(techAsset)) } return generatedRisks @@ -106,12 +107,12 @@ func (r customRiskRule) GenerateRisks(input *model.ParsedModel) []model.Risk { func createRisk(technicalAsset model.TechnicalAsset) model.Risk { risk := model.Risk{ Category: CustomRiskRule.Category(), - Severity: model.CalculateSeverity(model.VeryLikely, model.MediumImpact), - ExploitationLikelihood: model.VeryLikely, - ExploitationImpact: model.MediumImpact, + Severity: model.CalculateSeverity(types.VeryLikely, types.MediumImpact), + ExploitationLikelihood: types.VeryLikely, + ExploitationImpact: types.MediumImpact, Title: "Demo risk at " + technicalAsset.Title + "", MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Possible, + DataBreachProbability: types.Possible, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id diff --git a/cmd/threagile/main.go b/cmd/threagile/main.go index 6390ad97..db9b007c 100644 --- a/cmd/threagile/main.go +++ b/cmd/threagile/main.go @@ -1,11 +1,25 @@ package main -import "github.com/threagile/threagile/internal/threagile" +import ( + "github.com/threagile/threagile/internal/threagile" +) + +const ( + buildTimestamp = "" +) + +// === Error handling stuff ======================================== func main() { - context := new(threagile.Context).Defaults() + + // TODO: uncomment below as soon as refactoring is finished - everything will go through rootCmd.Execute + // cmd.Execute() + + // TODO: remove below as soon as refactoring is finished - everything will go through rootCmd.Execute + // for now it's fine to have as frequently uncommented to see the actual behaviour + context := new(threagile.Context).Defaults(buildTimestamp) context.ParseCommandlineArgs() - if context.ServerPort > 0 { + if context.ServerMode { context.StartServer() } else { context.DoIt() diff --git a/cmd/threagile/main_test.go b/cmd/threagile/main_test.go index c8802008..dd1b235f 100644 --- a/cmd/threagile/main_test.go +++ b/cmd/threagile/main_test.go @@ -3,7 +3,7 @@ package main import ( "encoding/json" "github.com/akedrou/textdiff" - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/input" "path/filepath" "sort" "strings" @@ -12,7 +12,7 @@ import ( func TestParseModel(t *testing.T) { flatModelFile := filepath.Join("..", "..", "test", "all.yaml") - flatModel := *new(model.ModelInput).Defaults() + flatModel := *new(input.ModelInput).Defaults() flatLoadError := flatModel.Load(flatModelFile) if flatLoadError != nil { t.Errorf("unable to parse model yaml %q: %v", flatModelFile, flatLoadError) @@ -29,7 +29,7 @@ func TestParseModel(t *testing.T) { } splitModelFile := filepath.Join("..", "..", "test", "main.yaml") - splitModel := *new(model.ModelInput).Defaults() + splitModel := *new(input.ModelInput).Defaults() splitLoadError := splitModel.Load(splitModelFile) if splitLoadError != nil { t.Errorf("unable to parse model yaml %q: %v", splitModelFile, splitLoadError) diff --git a/go.mod b/go.mod index 6b20efe3..0d650038 100644 --- a/go.mod +++ b/go.mod @@ -25,6 +25,7 @@ require ( github.com/go-playground/validator/v10 v10.15.5 // indirect github.com/goccy/go-json v0.10.2 // indirect github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/klauspost/cpuid/v2 v2.2.5 // indirect github.com/leodido/go-urn v1.2.4 // indirect @@ -37,6 +38,8 @@ require ( github.com/pkg/errors v0.9.1 // indirect github.com/richardlehane/mscfb v1.0.4 // indirect github.com/richardlehane/msoleps v1.0.3 // indirect + github.com/spf13/cobra v1.8.0 // indirect + github.com/spf13/pflag v1.0.5 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect github.com/ugorji/go/codec v1.2.11 // indirect github.com/xuri/efp v0.0.0-20231025114914-d1ff6096ae53 // indirect diff --git a/go.sum b/go.sum index 4a517668..cc4471a6 100644 --- a/go.sum +++ b/go.sum @@ -1,3 +1,4 @@ +cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= github.com/akedrou/textdiff v0.0.0-20230423230343-2ebdcebdccc1 h1:XfKKiQL7irIGI7nfu4a6IKhrgUHvKwhH/AnuHgZy/+U= github.com/akedrou/textdiff v0.0.0-20230423230343-2ebdcebdccc1/go.mod h1:PJwvxBpzqjdeomc0r8Hgc+xJC7k6z+k371tffCGXR2M= github.com/blend/go-sdk v1.20220411.3 h1:GFV4/FQX5UzXLPwWV03gP811pj7B8J2sbuq+GJQofXc= @@ -14,6 +15,7 @@ github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpV github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= github.com/chenzhuoyu/iasm v0.9.1 h1:tUHQJXo3NhBqw6s33wkGn9SP3bvrWLdlVIJ3hQBL7P0= github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= +github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -36,11 +38,14 @@ github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MG github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g= github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4= github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= @@ -76,7 +81,12 @@ github.com/richardlehane/mscfb v1.0.4/go.mod h1:YzVpcZg9czvAuhk9T+a3avCpcFPMUWm7 github.com/richardlehane/msoleps v1.0.1/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg= github.com/richardlehane/msoleps v1.0.3 h1:aznSZzrwYRl3rLKRT3gUk9am7T/mLNSnJINvN0AQoVM= github.com/richardlehane/msoleps v1.0.3/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w= +github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= +github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= @@ -126,6 +136,7 @@ golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= +golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -153,6 +164,7 @@ golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= @@ -161,6 +173,8 @@ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= diff --git a/internal/threagile/about.go b/internal/threagile/about.go new file mode 100644 index 00000000..e2fb735e --- /dev/null +++ b/internal/threagile/about.go @@ -0,0 +1,16 @@ +/* +Copyright © 2023 NAME HERE +*/ +package threagile + +import ( + "github.com/spf13/cobra" + + "github.com/threagile/threagile/pkg/docs" +) + +var versionCmd = &cobra.Command{ + Use: "version", + Short: "Get version information", + Long: "\n" + docs.Logo + "\n\n" + docs.VersionText, +} diff --git a/internal/threagile/context.go b/internal/threagile/context.go index ed48f8d0..5ab59e7d 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -14,64 +14,7 @@ import ( "encoding/hex" "errors" "flag" - "fmt" - "github.com/gin-gonic/gin" - "github.com/google/uuid" - "github.com/threagile/threagile/colors" - "github.com/threagile/threagile/model" - addbuildpipeline "github.com/threagile/threagile/pkg/macros/built-in/add-build-pipeline" - addvault "github.com/threagile/threagile/pkg/macros/built-in/add-vault" - prettyprint "github.com/threagile/threagile/pkg/macros/built-in/pretty-print" - removeunusedtags "github.com/threagile/threagile/pkg/macros/built-in/remove-unused-tags" - seedrisktracking "github.com/threagile/threagile/pkg/macros/built-in/seed-risk-tracking" - seedtags "github.com/threagile/threagile/pkg/macros/built-in/seed-tags" - "github.com/threagile/threagile/pkg/report" - "github.com/threagile/threagile/pkg/risks" - accidentalsecretleak "github.com/threagile/threagile/pkg/risks/built-in/accidental-secret-leak" - codebackdooring "github.com/threagile/threagile/pkg/risks/built-in/code-backdooring" - containerbaseimagebackdooring "github.com/threagile/threagile/pkg/risks/built-in/container-baseimage-backdooring" - containerplatformescape "github.com/threagile/threagile/pkg/risks/built-in/container-platform-escape" - crosssiterequestforgery "github.com/threagile/threagile/pkg/risks/built-in/cross-site-request-forgery" - crosssitescripting "github.com/threagile/threagile/pkg/risks/built-in/cross-site-scripting" - dosriskyaccessacrosstrustboundary "github.com/threagile/threagile/pkg/risks/built-in/dos-risky-access-across-trust-boundary" - incompletemodel "github.com/threagile/threagile/pkg/risks/built-in/incomplete-model" - ldapinjection "github.com/threagile/threagile/pkg/risks/built-in/ldap-injection" - missingauthentication "github.com/threagile/threagile/pkg/risks/built-in/missing-authentication" - missingauthenticationsecondfactor "github.com/threagile/threagile/pkg/risks/built-in/missing-authentication-second-factor" - missingbuildinfrastructure "github.com/threagile/threagile/pkg/risks/built-in/missing-build-infrastructure" - missingcloudhardening "github.com/threagile/threagile/pkg/risks/built-in/missing-cloud-hardening" - missingfilevalidation "github.com/threagile/threagile/pkg/risks/built-in/missing-file-validation" - missinghardening "github.com/threagile/threagile/pkg/risks/built-in/missing-hardening" - missingidentitypropagation "github.com/threagile/threagile/pkg/risks/built-in/missing-identity-propagation" - missingidentityproviderisolation "github.com/threagile/threagile/pkg/risks/built-in/missing-identity-provider-isolation" - missingidentitystore "github.com/threagile/threagile/pkg/risks/built-in/missing-identity-store" - missingnetworksegmentation "github.com/threagile/threagile/pkg/risks/built-in/missing-network-segmentation" - missingvault "github.com/threagile/threagile/pkg/risks/built-in/missing-vault" - missingvaultisolation "github.com/threagile/threagile/pkg/risks/built-in/missing-vault-isolation" - missingwaf "github.com/threagile/threagile/pkg/risks/built-in/missing-waf" - mixedtargetsonsharedruntime "github.com/threagile/threagile/pkg/risks/built-in/mixed-targets-on-shared-runtime" - pathtraversal "github.com/threagile/threagile/pkg/risks/built-in/path-traversal" - pushinsteadofpulldeployment "github.com/threagile/threagile/pkg/risks/built-in/push-instead-of-pull-deployment" - searchqueryinjection "github.com/threagile/threagile/pkg/risks/built-in/search-query-injection" - serversiderequestforgery "github.com/threagile/threagile/pkg/risks/built-in/server-side-request-forgery" - serviceregistrypoisoning "github.com/threagile/threagile/pkg/risks/built-in/service-registry-poisoning" - sqlnosqlinjection "github.com/threagile/threagile/pkg/risks/built-in/sql-nosql-injection" - uncheckeddeployment "github.com/threagile/threagile/pkg/risks/built-in/unchecked-deployment" - unencryptedasset "github.com/threagile/threagile/pkg/risks/built-in/unencrypted-asset" - unencryptedcommunication "github.com/threagile/threagile/pkg/risks/built-in/unencrypted-communication" - unguardedaccessfrominternet "github.com/threagile/threagile/pkg/risks/built-in/unguarded-access-from-internet" - unguardeddirectdatastoreaccess "github.com/threagile/threagile/pkg/risks/built-in/unguarded-direct-datastore-access" - unnecessarycommunicationlink "github.com/threagile/threagile/pkg/risks/built-in/unnecessary-communication-link" - unnecessarydataasset "github.com/threagile/threagile/pkg/risks/built-in/unnecessary-data-asset" - unnecessarydatatransfer "github.com/threagile/threagile/pkg/risks/built-in/unnecessary-data-transfer" - unnecessarytechnicalasset "github.com/threagile/threagile/pkg/risks/built-in/unnecessary-technical-asset" - untrusteddeserialization "github.com/threagile/threagile/pkg/risks/built-in/untrusted-deserialization" - wrongcommunicationlinkcontent "github.com/threagile/threagile/pkg/risks/built-in/wrong-communication-link-content" - wrongtrustboundarycontent "github.com/threagile/threagile/pkg/risks/built-in/wrong-trust-boundary-content" - xmlexternalentity "github.com/threagile/threagile/pkg/risks/built-in/xml-external-entity" - "github.com/threagile/threagile/pkg/run" - "golang.org/x/crypto/argon2" - "gopkg.in/yaml.v3" + "fmt" // TODO: no fmt here "hash/fnv" "io" "log" @@ -86,6 +29,28 @@ import ( "strings" "sync" "time" + + addbuildpipeline "github.com/threagile/threagile/pkg/macros/built-in/add-build-pipeline" + addvault "github.com/threagile/threagile/pkg/macros/built-in/add-vault" + prettyprint "github.com/threagile/threagile/pkg/macros/built-in/pretty-print" + removeunusedtags "github.com/threagile/threagile/pkg/macros/built-in/remove-unused-tags" + seedrisktracking "github.com/threagile/threagile/pkg/macros/built-in/seed-risk-tracking" + seedtags "github.com/threagile/threagile/pkg/macros/built-in/seed-tags" + + "golang.org/x/crypto/argon2" + "gopkg.in/yaml.v3" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/threagile/threagile/pkg/colors" + "github.com/threagile/threagile/pkg/docs" + "github.com/threagile/threagile/pkg/input" + "github.com/threagile/threagile/pkg/macros" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/report" + "github.com/threagile/threagile/pkg/run" + risks "github.com/threagile/threagile/pkg/security/risks" + "github.com/threagile/threagile/pkg/security/types" ) const ( @@ -121,7 +86,7 @@ const ( ) type Context struct { - ServerPort int + ServerMode bool successCount int errorCount int @@ -129,15 +94,19 @@ type Context struct { buildTimestamp string globalLock sync.Mutex - modelInput model.ModelInput + modelInput input.ModelInput + + // TODO: remove refactoring note below + // moved from types.go + parsedModel model.ParsedModel modelFilename, templateFilename *string createExampleModel, createStubModel, createEditingSupport, verbose, ignoreOrphanedRiskTracking *bool generateDataFlowDiagram, generateDataAssetDiagram, generateRisksJSON, generateTechnicalAssetsJSON *bool generateStatsJSON, generateRisksExcel, generateTagsExcel, generateReportPDF *bool outputDir, raaPlugin, skipRiskRules, riskRulesPlugins, executeModelMacro *string - customRiskRules map[string]*risks.CustomRisk - diagramDPI *int + customRiskRules map[string]*model.CustomRisk + diagramDPI, serverPort *int deferredRiskTrackingDueToWildcardMatching map[string]model.RiskTracking addModelTitle bool keepDiagramSourceFiles bool @@ -145,23 +114,101 @@ type Context struct { binFolder *string serverFolder *string tempFolder *string + + defaultGraphvizDPI int + maxGraphvizDPI int + backupHistoryFilesToKeep int + + tempDir string + binDir string + appDir string + dataDir string + keyDir string + reportFilename string + excelRisksFilename string + excelTagsFilename string + jsonRisksFilename string + jsonTechnicalAssetsFilename string + jsonStatsFilename string + dataFlowDiagramFilenameDOT string + dataFlowDiagramFilenamePNG string + dataAssetDiagramFilenameDOT string + dataAssetDiagramFilenamePNG string + graphvizDataFlowDiagramConversionCall string + graphvizDataAssetDiagramConversionCall string + inputFile string + + progressReporter ProgressReporter +} + +func (context *Context) addToListOfSupportedTags(tags []string) { + for _, tag := range tags { + context.parsedModel.AllSupportedTags[tag] = true + } } -func checkErr(err error) { - if err != nil { - panic(err) +func (context *Context) checkRiskTracking() { + if *context.verbose { + fmt.Println("Checking risk tracking") + } + for _, tracking := range context.parsedModel.RiskTracking { + if _, ok := context.parsedModel.GeneratedRisksBySyntheticId[tracking.SyntheticRiskId]; !ok { + if *context.ignoreOrphanedRiskTracking { + fmt.Println("Risk tracking references unknown risk (risk id not found): " + tracking.SyntheticRiskId) + } else { + panic(errors.New("Risk tracking references unknown risk (risk id not found) - you might want to use the option -ignore-orphaned-risk-tracking: " + tracking.SyntheticRiskId + + "\n\nNOTE: For risk tracking each risk-id needs to be defined (the string with the @ sign in it). " + + "These unique risk IDs are visible in the PDF report (the small grey string under each risk), " + + "the Excel (column \"ID\"), as well as the JSON responses. Some risk IDs have only one @ sign in them, " + + "while others multiple. The idea is to allow for unique but still speaking IDs. Therefore each risk instance " + + "creates its individual ID by taking all affected elements causing the risk to be within an @-delimited part. " + + "Using wildcards (the * sign) for parts delimited by @ signs allows to handle groups of certain risks at once. " + + "Best is to lookup the IDs to use in the created Excel file. Alternatively a model macro \"seed-risk-tracking\" " + + "is available that helps in initially seeding the risk tracking part here based on already identified and not yet handled risks.")) + } + } + } + + // save also the risk-category-id and risk-status directly in the risk for better JSON marshalling + for category := range context.parsedModel.GeneratedRisksByCategory { + for i := range context.parsedModel.GeneratedRisksByCategory[category] { + context.parsedModel.GeneratedRisksByCategory[category][i].CategoryId = category.Id + context.parsedModel.GeneratedRisksByCategory[category][i].RiskStatus = context.parsedModel.GeneratedRisksByCategory[category][i].GetRiskTrackingStatusDefaultingUnchecked(&context.parsedModel) + } } } -func (context *Context) Defaults() *Context { +func (context *Context) Defaults(buildTimestamp string) *Context { *context = Context{ - keepDiagramSourceFiles: keepDiagramSourceFiles, - addModelTitle: addModelTitle, + keepDiagramSourceFiles: false, + addModelTitle: false, buildTimestamp: buildTimestamp, - customRiskRules: make(map[string]*risks.CustomRisk), + customRiskRules: make(map[string]*model.CustomRisk), deferredRiskTrackingDueToWildcardMatching: make(map[string]model.RiskTracking), drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks: true, - } + defaultGraphvizDPI: 120, + maxGraphvizDPI: 240, + backupHistoryFilesToKeep: 50, + } + + context.tempDir = "/dev/shm" // TODO: make configurable via cmdline arg? + context.binDir = "/app" + context.appDir = "/app" + context.dataDir = "/data" + context.keyDir = "keys" + context.reportFilename = "report.pdf" + context.excelRisksFilename = "risks.xlsx" + context.excelTagsFilename = "tags.xlsx" + context.jsonRisksFilename = "risks.json" + context.jsonTechnicalAssetsFilename = "technical-assets.json" + context.jsonStatsFilename = "stats.json" + context.dataFlowDiagramFilenameDOT = "data-flow-diagram.gv" + context.dataFlowDiagramFilenamePNG = "data-flow-diagram.png" + context.dataAssetDiagramFilenameDOT = "data-asset-diagram.gv" + context.dataAssetDiagramFilenamePNG = "data-asset-diagram.png" + context.graphvizDataFlowDiagramConversionCall = "render-data-flow-diagram.sh" + context.graphvizDataAssetDiagramConversionCall = "render-data-asset-diagram.sh" + context.inputFile = "threagile.yaml" return context } @@ -174,11 +221,11 @@ func (context *Context) applyRisk(rule model.CustomRiskRule, skippedRules *map[s fmt.Printf("Skipping risk rule %q\n", rule.Category().Id) delete(*skippedRules, rule.Category().Id) } else { - model.AddToListOfSupportedTags(rule.SupportedTags()) - generatedRisks := rule.GenerateRisks(&model.ParsedModelRoot) + context.addToListOfSupportedTags(rule.SupportedTags()) + generatedRisks := rule.GenerateRisks(&context.parsedModel) if generatedRisks != nil { if len(generatedRisks) > 0 { - model.GeneratedRisksByCategory[rule.Category()] = generatedRisks + context.parsedModel.GeneratedRisksByCategory[rule.Category()] = generatedRisks } } else { fmt.Printf("Failed to generate risks for %q\n", id) @@ -198,48 +245,9 @@ func (context *Context) applyRiskGeneration() { } } - context.applyRisk(accidentalsecretleak.Rule(), &skippedRules) - context.applyRisk(codebackdooring.Rule(), &skippedRules) - context.applyRisk(containerbaseimagebackdooring.Rule(), &skippedRules) - context.applyRisk(containerplatformescape.Rule(), &skippedRules) - context.applyRisk(crosssiterequestforgery.Rule(), &skippedRules) - context.applyRisk(crosssitescripting.Rule(), &skippedRules) - context.applyRisk(dosriskyaccessacrosstrustboundary.Rule(), &skippedRules) - context.applyRisk(incompletemodel.Rule(), &skippedRules) - context.applyRisk(ldapinjection.Rule(), &skippedRules) - context.applyRisk(missingauthentication.Rule(), &skippedRules) - context.applyRisk(missingauthenticationsecondfactor.Rule(), &skippedRules) - context.applyRisk(missingbuildinfrastructure.Rule(), &skippedRules) - context.applyRisk(missingcloudhardening.Rule(), &skippedRules) - context.applyRisk(missingfilevalidation.Rule(), &skippedRules) - context.applyRisk(missinghardening.Rule(), &skippedRules) - context.applyRisk(missingidentitypropagation.Rule(), &skippedRules) - context.applyRisk(missingidentityproviderisolation.Rule(), &skippedRules) - context.applyRisk(missingidentitystore.Rule(), &skippedRules) - context.applyRisk(missingnetworksegmentation.Rule(), &skippedRules) - context.applyRisk(missingvault.Rule(), &skippedRules) - context.applyRisk(missingvaultisolation.Rule(), &skippedRules) - context.applyRisk(missingwaf.Rule(), &skippedRules) - context.applyRisk(mixedtargetsonsharedruntime.Rule(), &skippedRules) - context.applyRisk(pathtraversal.Rule(), &skippedRules) - context.applyRisk(pushinsteadofpulldeployment.Rule(), &skippedRules) - context.applyRisk(searchqueryinjection.Rule(), &skippedRules) - context.applyRisk(serversiderequestforgery.Rule(), &skippedRules) - context.applyRisk(serviceregistrypoisoning.Rule(), &skippedRules) - context.applyRisk(sqlnosqlinjection.Rule(), &skippedRules) - context.applyRisk(uncheckeddeployment.Rule(), &skippedRules) - context.applyRisk(unencryptedasset.Rule(), &skippedRules) - context.applyRisk(unencryptedcommunication.Rule(), &skippedRules) - context.applyRisk(unguardedaccessfrominternet.Rule(), &skippedRules) - context.applyRisk(unguardeddirectdatastoreaccess.Rule(), &skippedRules) - context.applyRisk(unnecessarycommunicationlink.Rule(), &skippedRules) - context.applyRisk(unnecessarydataasset.Rule(), &skippedRules) - context.applyRisk(unnecessarydatatransfer.Rule(), &skippedRules) - context.applyRisk(unnecessarytechnicalasset.Rule(), &skippedRules) - context.applyRisk(untrusteddeserialization.Rule(), &skippedRules) - context.applyRisk(wrongcommunicationlinkcontent.Rule(), &skippedRules) - context.applyRisk(wrongtrustboundarycontent.Rule(), &skippedRules) - context.applyRisk(xmlexternalentity.Rule(), &skippedRules) + for _, rule := range risks.GetBuiltInRiskRules() { + context.applyRisk(rule, &skippedRules) + } // NOW THE CUSTOM RISK RULES (if any) for id, customRule := range context.customRiskRules { @@ -253,10 +261,10 @@ func (context *Context) applyRiskGeneration() { if *context.verbose { fmt.Println("Executing custom risk rule:", id) } - model.AddToListOfSupportedTags(customRule.Tags) - customRisks := customRule.GenerateRisks(&model.ParsedModelRoot) + context.addToListOfSupportedTags(customRule.Tags) + customRisks := customRule.GenerateRisks(&context.parsedModel) if len(customRisks) > 0 { - model.GeneratedRisksByCategory[customRule.Category] = customRisks + context.parsedModel.GeneratedRisksByCategory[customRule.Category] = customRisks } if *context.verbose { @@ -276,41 +284,10 @@ func (context *Context) applyRiskGeneration() { } // save also in map keyed by synthetic risk-id - for _, category := range model.SortedRiskCategories() { - someRisks := model.SortedRisksOfCategory(category) + for _, category := range model.SortedRiskCategories(&context.parsedModel) { + someRisks := model.SortedRisksOfCategory(&context.parsedModel, category) for _, risk := range someRisks { - model.GeneratedRisksBySyntheticId[strings.ToLower(risk.SyntheticId)] = risk - } - } -} - -func (context *Context) checkRiskTracking() { - if *context.verbose { - fmt.Println("Checking risk tracking") - } - for _, tracking := range model.ParsedModelRoot.RiskTracking { - if _, ok := model.GeneratedRisksBySyntheticId[tracking.SyntheticRiskId]; !ok { - if *context.ignoreOrphanedRiskTracking { - fmt.Println("Risk tracking references unknown risk (risk id not found): " + tracking.SyntheticRiskId) - } else { - panic(errors.New("Risk tracking references unknown risk (risk id not found) - you might want to use the option -ignore-orphaned-risk-tracking: " + tracking.SyntheticRiskId + - "\n\nNOTE: For risk tracking each risk-id needs to be defined (the string with the @ sign in it). " + - "These unique risk IDs are visible in the PDF report (the small grey string under each risk), " + - "the Excel (column \"ID\"), as well as the JSON responses. Some risk IDs have only one @ sign in them, " + - "while others multiple. The idea is to allow for unique but still speaking IDs. Therefore each risk instance " + - "creates its individual ID by taking all affected elements causing the risk to be within an @-delimited part. " + - "Using wildcards (the * sign) for parts delimited by @ signs allows to handle groups of certain risks at once. " + - "Best is to lookup the IDs to use in the created Excel file. Alternatively a model macro \"seed-risk-tracking\" " + - "is available that helps in initially seeding the risk tracking part here based on already identified and not yet handled risks.")) - } - } - } - - // save also the risk-category-id and risk-status directly in the risk for better JSON marshalling - for category := range model.GeneratedRisksByCategory { - for i := range model.GeneratedRisksByCategory[category] { - model.GeneratedRisksByCategory[category][i].CategoryId = category.Id - model.GeneratedRisksByCategory[category][i].RiskStatus = model.GeneratedRisksByCategory[category][i].GetRiskTrackingStatusDefaultingUnchecked() + context.parsedModel.GeneratedRisksBySyntheticId[strings.ToLower(risk.SyntheticId)] = risk } } } @@ -421,4424 +398,4296 @@ func (context *Context) addFileToZip(zipWriter *zip.Writer, filename string) err return err } -func (context *Context) DoIt() { +func (context *Context) analyzeModelOnServerDirectly(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) + if !ok { + return + } + context.lockFolder(folderNameOfKey) defer func() { + context.unlockFolder(folderNameOfKey) var err error if r := recover(); r != nil { err = r.(error) if *context.verbose { log.Println(err) } - _, _ = os.Stderr.WriteString(err.Error() + "\n") - os.Exit(2) + log.Println(err) + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": strings.TrimSpace(err.Error()), + }) + ok = false } }() - if len(*context.executeModelMacro) > 0 { - context.printLogo() - } else { - if *context.verbose { - fmt.Println("Writing into output directory:", *context.outputDir) + + dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(context.defaultGraphvizDPI))) + if err != nil { + context.handleErrorInServiceCall(err, ginContext) + return + } + + _, yamlText, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if !ok { + return + } + tmpModelFile, err := os.CreateTemp(*context.tempFolder, "threagile-direct-analyze-*") + if err != nil { + context.handleErrorInServiceCall(err, ginContext) + return + } + defer func() { _ = os.Remove(tmpModelFile.Name()) }() + tmpOutputDir, err := os.MkdirTemp(*context.tempFolder, "threagile-direct-analyze-") + if err != nil { + context.handleErrorInServiceCall(err, ginContext) + return + } + defer func() { _ = os.RemoveAll(tmpOutputDir) }() + tmpResultFile, err := os.CreateTemp(*context.tempFolder, "threagile-result-*.zip") + checkErr(err) + defer func() { _ = os.Remove(tmpResultFile.Name()) }() + + err = os.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) + + context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, true, true, true, true, true, true, true, true, dpi) + if err != nil { + context.handleErrorInServiceCall(err, ginContext) + return + } + err = os.WriteFile(filepath.Join(tmpOutputDir, context.inputFile), []byte(yamlText), 0400) + if err != nil { + context.handleErrorInServiceCall(err, ginContext) + return + } + + files := []string{ + filepath.Join(tmpOutputDir, context.inputFile), + filepath.Join(tmpOutputDir, context.dataFlowDiagramFilenamePNG), + filepath.Join(tmpOutputDir, context.dataAssetDiagramFilenamePNG), + filepath.Join(tmpOutputDir, context.reportFilename), + filepath.Join(tmpOutputDir, context.excelRisksFilename), + filepath.Join(tmpOutputDir, context.excelTagsFilename), + filepath.Join(tmpOutputDir, context.jsonRisksFilename), + filepath.Join(tmpOutputDir, context.jsonTechnicalAssetsFilename), + filepath.Join(tmpOutputDir, context.jsonStatsFilename), + } + if context.keepDiagramSourceFiles { + files = append(files, filepath.Join(tmpOutputDir, context.dataFlowDiagramFilenameDOT)) + files = append(files, filepath.Join(tmpOutputDir, context.dataAssetDiagramFilenameDOT)) + } + err = context.zipFiles(tmpResultFile.Name(), files) + checkErr(err) + if *context.verbose { + fmt.Println("Streaming back result file: " + tmpResultFile.Name()) + } + ginContext.FileAttachment(tmpResultFile.Name(), "threagile-result.zip") +} + +func (context *Context) parseModel() { + if *context.verbose { + fmt.Println("Parsing model:", *context.modelFilename) + } + + context.modelInput = *new(input.ModelInput).Defaults() + loadError := context.modelInput.Load(*context.modelFilename) + if loadError != nil { + log.Fatal("Unable to parse model yaml: ", loadError) + } + + // data, _ := json.MarshalIndent(context.modelInput, "", " ") + // fmt.Printf("%v\n", string(data)) + + var businessCriticality types.Criticality + switch context.modelInput.BusinessCriticality { + case types.Archive.String(): + businessCriticality = types.Archive + case types.Operational.String(): + businessCriticality = types.Operational + case types.Important.String(): + businessCriticality = types.Important + case types.Critical.String(): + businessCriticality = types.Critical + case types.MissionCritical.String(): + businessCriticality = types.MissionCritical + default: + panic(errors.New("unknown 'business_criticality' value of application: " + context.modelInput.BusinessCriticality)) + } + + reportDate := time.Now() + if len(context.modelInput.Date) > 0 { + var parseError error + reportDate, parseError = time.Parse("2006-01-02", context.modelInput.Date) + if parseError != nil { + panic(errors.New("unable to parse 'date' value of model file")) } } - model.Init() - context.parseModel() - introTextRAA := context.applyRAA() - context.loadCustomRiskRules() - context.applyRiskGeneration() - context.applyWildcardRiskTrackingEvaluation() - context.checkRiskTracking() + context.parsedModel = model.ParsedModel{ + Author: context.modelInput.Author, + Title: context.modelInput.Title, + Date: reportDate, + ManagementSummaryComment: context.modelInput.ManagementSummaryComment, + BusinessCriticality: businessCriticality, + BusinessOverview: removePathElementsFromImageFiles(context.modelInput.BusinessOverview), + TechnicalOverview: removePathElementsFromImageFiles(context.modelInput.TechnicalOverview), + Questions: context.modelInput.Questions, + AbuseCases: context.modelInput.AbuseCases, + SecurityRequirements: context.modelInput.SecurityRequirements, + TagsAvailable: lowerCaseAndTrim(context.modelInput.TagsAvailable), + DiagramTweakNodesep: context.modelInput.DiagramTweakNodesep, + DiagramTweakRanksep: context.modelInput.DiagramTweakRanksep, + DiagramTweakEdgeLayout: context.modelInput.DiagramTweakEdgeLayout, + DiagramTweakSuppressEdgeLabels: context.modelInput.DiagramTweakSuppressEdgeLabels, + DiagramTweakLayoutLeftToRight: context.modelInput.DiagramTweakLayoutLeftToRight, + DiagramTweakInvisibleConnectionsBetweenAssets: context.modelInput.DiagramTweakInvisibleConnectionsBetweenAssets, + DiagramTweakSameRankAssets: context.modelInput.DiagramTweakSameRankAssets, + } - if len(*context.executeModelMacro) > 0 { - var macroDetails model.MacroDetails - switch *context.executeModelMacro { - case addbuildpipeline.GetMacroDetails().ID: - macroDetails = addbuildpipeline.GetMacroDetails() - case addvault.GetMacroDetails().ID: - macroDetails = addvault.GetMacroDetails() - case prettyprint.GetMacroDetails().ID: - macroDetails = prettyprint.GetMacroDetails() - case removeunusedtags.GetMacroDetails().ID: - macroDetails = removeunusedtags.GetMacroDetails() - case seedrisktracking.GetMacroDetails().ID: - macroDetails = seedrisktracking.GetMacroDetails() - case seedtags.GetMacroDetails().ID: - macroDetails = seedtags.GetMacroDetails() + context.parsedModel.CommunicationLinks = make(map[string]model.CommunicationLink) + context.parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId = make(map[string][]model.CommunicationLink) + context.parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId = make(map[string]model.TrustBoundary) + context.parsedModel.GeneratedRisksByCategory = make(map[model.RiskCategory][]model.Risk) + context.parsedModel.GeneratedRisksBySyntheticId = make(map[string]model.Risk) + context.parsedModel.AllSupportedTags = make(map[string]bool) + + if context.parsedModel.DiagramTweakNodesep == 0 { + context.parsedModel.DiagramTweakNodesep = 2 + } + if context.parsedModel.DiagramTweakRanksep == 0 { + context.parsedModel.DiagramTweakRanksep = 2 + } + + // Data Assets =============================================================================== + context.parsedModel.DataAssets = make(map[string]model.DataAsset) + for title, asset := range context.modelInput.DataAssets { + id := fmt.Sprintf("%v", asset.ID) + + var usage types.Usage + switch asset.Usage { + case types.Business.String(): + usage = types.Business + case types.DevOps.String(): + usage = types.DevOps default: - log.Fatal("Unknown model macro: ", *context.executeModelMacro) + panic(errors.New("unknown 'usage' value of data asset '" + title + "': " + asset.Usage)) } - fmt.Println("Executing model macro:", macroDetails.ID) - fmt.Println() - fmt.Println() - context.printBorder(len(macroDetails.Title), true) - fmt.Println(macroDetails.Title) - context.printBorder(len(macroDetails.Title), true) - if len(macroDetails.Description) > 0 { - fmt.Println(macroDetails.Description) + + var quantity types.Quantity + switch asset.Quantity { + case types.VeryFew.String(): + quantity = types.VeryFew + case types.Few.String(): + quantity = types.Few + case types.Many.String(): + quantity = types.Many + case types.VeryMany.String(): + quantity = types.VeryMany + default: + panic(errors.New("unknown 'quantity' value of data asset '" + title + "': " + asset.Quantity)) } - fmt.Println() - reader := bufio.NewReader(os.Stdin) - var err error - var nextQuestion model.MacroQuestion - for { - switch macroDetails.ID { - case addbuildpipeline.GetMacroDetails().ID: - nextQuestion, err = addbuildpipeline.GetNextQuestion() - case addvault.GetMacroDetails().ID: - nextQuestion, err = addvault.GetNextQuestion() - case prettyprint.GetMacroDetails().ID: - nextQuestion, err = prettyprint.GetNextQuestion() - case removeunusedtags.GetMacroDetails().ID: - nextQuestion, err = removeunusedtags.GetNextQuestion() - case seedrisktracking.GetMacroDetails().ID: - nextQuestion, err = seedrisktracking.GetNextQuestion() - case seedtags.GetMacroDetails().ID: - nextQuestion, err = seedtags.GetNextQuestion() - } - checkErr(err) - if nextQuestion.NoMoreQuestions() { - break - } - fmt.Println() - context.printBorder(len(nextQuestion.Title), false) - fmt.Println(nextQuestion.Title) - context.printBorder(len(nextQuestion.Title), false) - if len(nextQuestion.Description) > 0 { - fmt.Println(nextQuestion.Description) + + var confidentiality types.Confidentiality + switch asset.Confidentiality { + case types.Public.String(): + confidentiality = types.Public + case types.Internal.String(): + confidentiality = types.Internal + case types.Restricted.String(): + confidentiality = types.Restricted + case types.Confidential.String(): + confidentiality = types.Confidential + case types.StrictlyConfidential.String(): + confidentiality = types.StrictlyConfidential + default: + panic(errors.New("unknown 'confidentiality' value of data asset '" + title + "': " + asset.Confidentiality)) + } + + var integrity types.Criticality + switch asset.Integrity { + case types.Archive.String(): + integrity = types.Archive + case types.Operational.String(): + integrity = types.Operational + case types.Important.String(): + integrity = types.Important + case types.Critical.String(): + integrity = types.Critical + case types.MissionCritical.String(): + integrity = types.MissionCritical + default: + panic(errors.New("unknown 'integrity' value of data asset '" + title + "': " + asset.Integrity)) + } + + var availability types.Criticality + switch asset.Availability { + case types.Archive.String(): + availability = types.Archive + case types.Operational.String(): + availability = types.Operational + case types.Important.String(): + availability = types.Important + case types.Critical.String(): + availability = types.Critical + case types.MissionCritical.String(): + availability = types.MissionCritical + default: + panic(errors.New("unknown 'availability' value of data asset '" + title + "': " + asset.Availability)) + } + + context.checkIdSyntax(id) + if _, exists := context.parsedModel.DataAssets[id]; exists { + panic(errors.New("duplicate id used: " + id)) + } + context.parsedModel.DataAssets[id] = model.DataAsset{ + Id: id, + Title: title, + Usage: usage, + Description: withDefault(fmt.Sprintf("%v", asset.Description), title), + Quantity: quantity, + Tags: context.checkTags(lowerCaseAndTrim(asset.Tags), "data asset '"+title+"'"), + Origin: fmt.Sprintf("%v", asset.Origin), + Owner: fmt.Sprintf("%v", asset.Owner), + Confidentiality: confidentiality, + Integrity: integrity, + Availability: availability, + JustificationCiaRating: fmt.Sprintf("%v", asset.JustificationCiaRating), + } + } + + // Technical Assets =============================================================================== + context.parsedModel.TechnicalAssets = make(map[string]model.TechnicalAsset) + for title, asset := range context.modelInput.TechnicalAssets { + id := fmt.Sprintf("%v", asset.ID) + + var usage types.Usage + switch asset.Usage { + case types.Business.String(): + usage = types.Business + case types.DevOps.String(): + usage = types.DevOps + default: + panic(errors.New("unknown 'usage' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Usage))) + } + + var dataAssetsProcessed = make([]string, 0) + if asset.DataAssetsProcessed != nil { + dataAssetsProcessed = make([]string, len(asset.DataAssetsProcessed)) + for i, parsedProcessedAsset := range asset.DataAssetsProcessed { + referencedAsset := fmt.Sprintf("%v", parsedProcessedAsset) + context.checkDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") + dataAssetsProcessed[i] = referencedAsset } - resultingMultiValueSelection := make([]string, 0) - if nextQuestion.IsValueConstrained() { - if nextQuestion.MultiSelect { - selectedValues := make(map[string]bool) - for { - fmt.Println("Please select (multiple executions possible) from the following values (use number to select/deselect):") - fmt.Println(" 0:", "SELECTION PROCESS FINISHED: CONTINUE TO NEXT QUESTION") - for i, val := range nextQuestion.PossibleAnswers { - number := i + 1 - padding, selected := "", " " - if number < 10 { - padding = " " - } - if val, exists := selectedValues[val]; exists && val { - selected = "*" - } - fmt.Println(" "+selected+" "+padding+strconv.Itoa(number)+":", val) - } - fmt.Println() - fmt.Print("Enter number to select/deselect (or 0 when finished): ") - answer, err := reader.ReadString('\n') - // convert CRLF to LF - answer = strings.TrimSpace(strings.Replace(answer, "\n", "", -1)) - checkErr(err) - if val, err := strconv.Atoi(answer); err == nil { // flip selection - if val == 0 { - for key, selected := range selectedValues { - if selected { - resultingMultiValueSelection = append(resultingMultiValueSelection, key) - } - } - break - } else if val > 0 && val <= len(nextQuestion.PossibleAnswers) { - selectedValues[nextQuestion.PossibleAnswers[val-1]] = !selectedValues[nextQuestion.PossibleAnswers[val-1]] - } - } - } - } else { - fmt.Println("Please choose from the following values (enter value directly or use number):") - for i, val := range nextQuestion.PossibleAnswers { - number := i + 1 - padding := "" - if number < 10 { - padding = " " - } - fmt.Println(" "+padding+strconv.Itoa(number)+":", val) - } + } + + var dataAssetsStored = make([]string, 0) + if asset.DataAssetsStored != nil { + dataAssetsStored = make([]string, len(asset.DataAssetsStored)) + for i, parsedStoredAssets := range asset.DataAssetsStored { + referencedAsset := fmt.Sprintf("%v", parsedStoredAssets) + context.checkDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") + dataAssetsStored[i] = referencedAsset + } + } + + var technicalAssetType types.TechnicalAssetType + switch asset.Type { + case types.ExternalEntity.String(): + technicalAssetType = types.ExternalEntity + case types.Process.String(): + technicalAssetType = types.Process + case types.Datastore.String(): + technicalAssetType = types.Datastore + default: + panic(errors.New("unknown 'type' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Type))) + } + + var technicalAssetSize types.TechnicalAssetSize + switch asset.Size { + case types.Service.String(): + technicalAssetSize = types.Service + case types.System.String(): + technicalAssetSize = types.System + case types.Application.String(): + technicalAssetSize = types.Application + case types.Component.String(): + technicalAssetSize = types.Component + default: + panic(errors.New("unknown 'size' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Size))) + } + + var technicalAssetTechnology types.TechnicalAssetTechnology + switch asset.Technology { + case types.UnknownTechnology.String(): + technicalAssetTechnology = types.UnknownTechnology + case types.ClientSystem.String(): + technicalAssetTechnology = types.ClientSystem + case types.Browser.String(): + technicalAssetTechnology = types.Browser + case types.Desktop.String(): + technicalAssetTechnology = types.Desktop + case types.MobileApp.String(): + technicalAssetTechnology = types.MobileApp + case types.DevOpsClient.String(): + technicalAssetTechnology = types.DevOpsClient + case types.WebServer.String(): + technicalAssetTechnology = types.WebServer + case types.WebApplication.String(): + technicalAssetTechnology = types.WebApplication + case types.ApplicationServer.String(): + technicalAssetTechnology = types.ApplicationServer + case types.Database.String(): + technicalAssetTechnology = types.Database + case types.FileServer.String(): + technicalAssetTechnology = types.FileServer + case types.LocalFileSystem.String(): + technicalAssetTechnology = types.LocalFileSystem + case types.ERP.String(): + technicalAssetTechnology = types.ERP + case types.CMS.String(): + technicalAssetTechnology = types.CMS + case types.WebServiceREST.String(): + technicalAssetTechnology = types.WebServiceREST + case types.WebServiceSOAP.String(): + technicalAssetTechnology = types.WebServiceSOAP + case types.EJB.String(): + technicalAssetTechnology = types.EJB + case types.SearchIndex.String(): + technicalAssetTechnology = types.SearchIndex + case types.SearchEngine.String(): + technicalAssetTechnology = types.SearchEngine + case types.ServiceRegistry.String(): + technicalAssetTechnology = types.ServiceRegistry + case types.ReverseProxy.String(): + technicalAssetTechnology = types.ReverseProxy + case types.LoadBalancer.String(): + technicalAssetTechnology = types.LoadBalancer + case types.BuildPipeline.String(): + technicalAssetTechnology = types.BuildPipeline + case types.SourcecodeRepository.String(): + technicalAssetTechnology = types.SourcecodeRepository + case types.ArtifactRegistry.String(): + technicalAssetTechnology = types.ArtifactRegistry + case types.CodeInspectionPlatform.String(): + technicalAssetTechnology = types.CodeInspectionPlatform + case types.Monitoring.String(): + technicalAssetTechnology = types.Monitoring + case types.LDAPServer.String(): + technicalAssetTechnology = types.LDAPServer + case types.ContainerPlatform.String(): + technicalAssetTechnology = types.ContainerPlatform + case types.BatchProcessing.String(): + technicalAssetTechnology = types.BatchProcessing + case types.EventListener.String(): + technicalAssetTechnology = types.EventListener + case types.IdentityProvider.String(): + technicalAssetTechnology = types.IdentityProvider + case types.IdentityStoreLDAP.String(): + technicalAssetTechnology = types.IdentityStoreLDAP + case types.IdentityStoreDatabase.String(): + technicalAssetTechnology = types.IdentityStoreDatabase + case types.Tool.String(): + technicalAssetTechnology = types.Tool + case types.CLI.String(): + technicalAssetTechnology = types.CLI + case types.Task.String(): + technicalAssetTechnology = types.Task + case types.Function.String(): + technicalAssetTechnology = types.Function + case types.Gateway.String(): + technicalAssetTechnology = types.Gateway + case types.IoTDevice.String(): + technicalAssetTechnology = types.IoTDevice + case types.MessageQueue.String(): + technicalAssetTechnology = types.MessageQueue + case types.StreamProcessing.String(): + technicalAssetTechnology = types.StreamProcessing + case types.ServiceMesh.String(): + technicalAssetTechnology = types.ServiceMesh + case types.DataLake.String(): + technicalAssetTechnology = types.DataLake + case types.BigDataPlatform.String(): + technicalAssetTechnology = types.BigDataPlatform + case types.ReportEngine.String(): + technicalAssetTechnology = types.ReportEngine + case types.AI.String(): + technicalAssetTechnology = types.AI + case types.MailServer.String(): + technicalAssetTechnology = types.MailServer + case types.Vault.String(): + technicalAssetTechnology = types.Vault + case types.HSM.String(): + technicalAssetTechnology = types.HSM + case types.WAF.String(): + technicalAssetTechnology = types.WAF + case types.IDS.String(): + technicalAssetTechnology = types.IDS + case types.IPS.String(): + technicalAssetTechnology = types.IPS + case types.Scheduler.String(): + technicalAssetTechnology = types.Scheduler + case types.Mainframe.String(): + technicalAssetTechnology = types.Mainframe + case types.BlockStorage.String(): + technicalAssetTechnology = types.BlockStorage + case types.Library.String(): + technicalAssetTechnology = types.Library + default: + panic(errors.New("unknown 'technology' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Technology))) + } + + var encryption types.EncryptionStyle + switch asset.Encryption { + case types.NoneEncryption.String(): + encryption = types.NoneEncryption + case types.Transparent.String(): + encryption = types.Transparent + case types.DataWithSymmetricSharedKey.String(): + encryption = types.DataWithSymmetricSharedKey + case types.DataWithAsymmetricSharedKey.String(): + encryption = types.DataWithAsymmetricSharedKey + case types.DataWithEndUserIndividualKey.String(): + encryption = types.DataWithEndUserIndividualKey + default: + panic(errors.New("unknown 'encryption' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Encryption))) + } + + var technicalAssetMachine types.TechnicalAssetMachine + switch asset.Machine { + case types.Physical.String(): + technicalAssetMachine = types.Physical + case types.Virtual.String(): + technicalAssetMachine = types.Virtual + case types.Container.String(): + technicalAssetMachine = types.Container + case types.Serverless.String(): + technicalAssetMachine = types.Serverless + default: + panic(errors.New("unknown 'machine' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Machine))) + } + + var confidentiality types.Confidentiality + switch asset.Confidentiality { + case types.Public.String(): + confidentiality = types.Public + case types.Internal.String(): + confidentiality = types.Internal + case types.Restricted.String(): + confidentiality = types.Restricted + case types.Confidential.String(): + confidentiality = types.Confidential + case types.StrictlyConfidential.String(): + confidentiality = types.StrictlyConfidential + default: + panic(errors.New("unknown 'confidentiality' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Confidentiality))) + } + + var integrity types.Criticality + switch asset.Integrity { + case types.Archive.String(): + integrity = types.Archive + case types.Operational.String(): + integrity = types.Operational + case types.Important.String(): + integrity = types.Important + case types.Critical.String(): + integrity = types.Critical + case types.MissionCritical.String(): + integrity = types.MissionCritical + default: + panic(errors.New("unknown 'integrity' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Integrity))) + } + + var availability types.Criticality + switch asset.Availability { + case types.Archive.String(): + availability = types.Archive + case types.Operational.String(): + availability = types.Operational + case types.Important.String(): + availability = types.Important + case types.Critical.String(): + availability = types.Critical + case types.MissionCritical.String(): + availability = types.MissionCritical + default: + panic(errors.New("unknown 'availability' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Availability))) + } + + dataFormatsAccepted := make([]types.DataFormat, 0) + if asset.DataFormatsAccepted != nil { + for _, dataFormatName := range asset.DataFormatsAccepted { + switch dataFormatName { + case types.JSON.String(): + dataFormatsAccepted = append(dataFormatsAccepted, types.JSON) + case types.XML.String(): + dataFormatsAccepted = append(dataFormatsAccepted, types.XML) + case types.Serialization.String(): + dataFormatsAccepted = append(dataFormatsAccepted, types.Serialization) + case types.File.String(): + dataFormatsAccepted = append(dataFormatsAccepted, types.File) + case types.CSV.String(): + dataFormatsAccepted = append(dataFormatsAccepted, types.CSV) + default: + panic(errors.New("unknown 'data_formats_accepted' value of technical asset '" + title + "': " + fmt.Sprintf("%v", dataFormatName))) } } - message := "" - validResult := true - if !nextQuestion.IsValueConstrained() || !nextQuestion.MultiSelect { - fmt.Println() - fmt.Println("Enter your answer (use 'BACK' to go one step back or 'QUIT' to quit without executing the model macro)") - fmt.Print("Answer") - if len(nextQuestion.DefaultAnswer) > 0 { - fmt.Print(" (default '" + nextQuestion.DefaultAnswer + "')") + } + + communicationLinks := make([]model.CommunicationLink, 0) + if asset.CommunicationLinks != nil { + for commLinkTitle, commLink := range asset.CommunicationLinks { + constraint := true + weight := 1 + var protocol types.Protocol + var authentication types.Authentication + var authorization types.Authorization + var usage types.Usage + var dataAssetsSent []string + var dataAssetsReceived []string + + switch commLink.Authentication { + case types.NoneAuthentication.String(): + authentication = types.NoneAuthentication + case types.Credentials.String(): + authentication = types.Credentials + case types.SessionId.String(): + authentication = types.SessionId + case types.Token.String(): + authentication = types.Token + case types.ClientCertificate.String(): + authentication = types.ClientCertificate + case types.TwoFactor.String(): + authentication = types.TwoFactor + case types.Externalized.String(): + authentication = types.Externalized + default: + panic(errors.New("unknown 'authentication' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Authentication))) } - fmt.Print(": ") - answer, err := reader.ReadString('\n') - // convert CRLF to LF - answer = strings.TrimSpace(strings.Replace(answer, "\n", "", -1)) - checkErr(err) - if len(answer) == 0 && len(nextQuestion.DefaultAnswer) > 0 { // accepting the default - answer = nextQuestion.DefaultAnswer - } else if nextQuestion.IsValueConstrained() { // convert number to value - if val, err := strconv.Atoi(answer); err == nil { - if val > 0 && val <= len(nextQuestion.PossibleAnswers) { - answer = nextQuestion.PossibleAnswers[val-1] - } - } + + switch commLink.Authorization { + case types.NoneAuthorization.String(): + authorization = types.NoneAuthorization + case types.TechnicalUser.String(): + authorization = types.TechnicalUser + case types.EndUserIdentityPropagation.String(): + authorization = types.EndUserIdentityPropagation + default: + panic(errors.New("unknown 'authorization' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Authorization))) } - if strings.ToLower(answer) == "quit" { - fmt.Println("Quitting without executing the model macro") - return - } else if strings.ToLower(answer) == "back" { - switch macroDetails.ID { - case addbuildpipeline.GetMacroDetails().ID: - message, validResult, err = addbuildpipeline.GoBack() - case addvault.GetMacroDetails().ID: - message, validResult, err = addvault.GoBack() - case prettyprint.GetMacroDetails().ID: - message, validResult, err = prettyprint.GoBack() - case removeunusedtags.GetMacroDetails().ID: - message, validResult, err = removeunusedtags.GoBack() - case seedrisktracking.GetMacroDetails().ID: - message, validResult, err = seedrisktracking.GoBack() - case seedtags.GetMacroDetails().ID: - message, validResult, err = seedtags.GoBack() - } - } else if len(answer) > 0 { // individual answer - if nextQuestion.IsValueConstrained() { - if !nextQuestion.IsMatchingValueConstraint(answer) { - fmt.Println() - fmt.Println(">>> INVALID <<<") - fmt.Println("Answer does not match any allowed value. Please try again:") - continue - } + + switch commLink.Usage { + case types.Business.String(): + usage = types.Business + case types.DevOps.String(): + usage = types.DevOps + default: + panic(errors.New("unknown 'usage' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Usage))) + } + + switch commLink.Protocol { + case types.UnknownProtocol.String(): + protocol = types.UnknownProtocol + case types.HTTP.String(): + protocol = types.HTTP + case types.HTTPS.String(): + protocol = types.HTTPS + case types.WS.String(): + protocol = types.WS + case types.WSS.String(): + protocol = types.WSS + case types.MQTT.String(): + protocol = types.MQTT + case types.JDBC.String(): + protocol = types.JDBC + case types.JdbcEncrypted.String(): + protocol = types.JdbcEncrypted + case types.ODBC.String(): + protocol = types.ODBC + case types.OdbcEncrypted.String(): + protocol = types.OdbcEncrypted + case types.SqlAccessProtocol.String(): + protocol = types.SqlAccessProtocol + case types.SqlAccessProtocolEncrypted.String(): + protocol = types.SqlAccessProtocolEncrypted + case types.NosqlAccessProtocol.String(): + protocol = types.NosqlAccessProtocol + case types.NosqlAccessProtocolEncrypted.String(): + protocol = types.NosqlAccessProtocolEncrypted + case types.TEXT.String(): + protocol = types.TEXT + case types.TextEncrypted.String(): + protocol = types.TextEncrypted + case types.BINARY.String(): + protocol = types.BINARY + case types.BinaryEncrypted.String(): + protocol = types.BinaryEncrypted + case types.SSH.String(): + protocol = types.SSH + case types.SshTunnel.String(): + protocol = types.SshTunnel + case types.SMTP.String(): + protocol = types.SMTP + case types.SmtpEncrypted.String(): + protocol = types.SmtpEncrypted + case types.POP3.String(): + protocol = types.POP3 + case types.Pop3Encrypted.String(): + protocol = types.Pop3Encrypted + case types.IMAP.String(): + protocol = types.IMAP + case types.ImapEncrypted.String(): + protocol = types.ImapEncrypted + case types.FTP.String(): + protocol = types.FTP + case types.FTPS.String(): + protocol = types.FTPS + case types.SFTP.String(): + protocol = types.SFTP + case types.SCP.String(): + protocol = types.SCP + case types.LDAP.String(): + protocol = types.LDAP + case types.LDAPS.String(): + protocol = types.LDAPS + case types.JMS.String(): + protocol = types.JMS + case types.NFS.String(): + protocol = types.NFS + case types.SMB.String(): + protocol = types.SMB + case types.SmbEncrypted.String(): + protocol = types.SmbEncrypted + case types.LocalFileAccess.String(): + protocol = types.LocalFileAccess + case types.NRPE.String(): + protocol = types.NRPE + case types.XMPP.String(): + protocol = types.XMPP + case types.IIOP.String(): + protocol = types.IIOP + case types.IiopEncrypted.String(): + protocol = types.IiopEncrypted + case types.JRMP.String(): + protocol = types.JRMP + case types.JrmpEncrypted.String(): + protocol = types.JrmpEncrypted + case types.InProcessLibraryCall.String(): + protocol = types.InProcessLibraryCall + case types.ContainerSpawning.String(): + protocol = types.ContainerSpawning + default: + panic(errors.New("unknown 'protocol' of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Protocol))) + } + + if commLink.DataAssetsSent != nil { + for _, dataAssetSent := range commLink.DataAssetsSent { + referencedAsset := fmt.Sprintf("%v", dataAssetSent) + context.checkDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") + dataAssetsSent = append(dataAssetsSent, referencedAsset) } - switch macroDetails.ID { - case addbuildpipeline.GetMacroDetails().ID: - message, validResult, err = addbuildpipeline.ApplyAnswer(nextQuestion.ID, answer) - case addvault.GetMacroDetails().ID: - message, validResult, err = addvault.ApplyAnswer(nextQuestion.ID, answer) - case prettyprint.GetMacroDetails().ID: - message, validResult, err = prettyprint.ApplyAnswer(nextQuestion.ID, answer) - case removeunusedtags.GetMacroDetails().ID: - message, validResult, err = removeunusedtags.ApplyAnswer(nextQuestion.ID, answer) - case seedrisktracking.GetMacroDetails().ID: - message, validResult, err = seedrisktracking.ApplyAnswer(nextQuestion.ID, answer) - case seedtags.GetMacroDetails().ID: - message, validResult, err = seedtags.ApplyAnswer(nextQuestion.ID, answer) + } + + if commLink.DataAssetsReceived != nil { + for _, dataAssetReceived := range commLink.DataAssetsReceived { + referencedAsset := fmt.Sprintf("%v", dataAssetReceived) + context.checkDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") + dataAssetsReceived = append(dataAssetsReceived, referencedAsset) } } - } else { - switch macroDetails.ID { - case addbuildpipeline.GetMacroDetails().ID: - message, validResult, err = addbuildpipeline.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case addvault.GetMacroDetails().ID: - message, validResult, err = addvault.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case prettyprint.GetMacroDetails().ID: - message, validResult, err = prettyprint.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case removeunusedtags.GetMacroDetails().ID: - message, validResult, err = removeunusedtags.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case seedrisktracking.GetMacroDetails().ID: - message, validResult, err = seedrisktracking.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case seedtags.GetMacroDetails().ID: - message, validResult, err = seedtags.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - } - } - checkErr(err) - if !validResult { - fmt.Println() - fmt.Println(">>> INVALID <<<") - } - fmt.Println(message) - fmt.Println() - } - for { - fmt.Println() - fmt.Println() - fmt.Println("#################################################################") - fmt.Println("Do you want to execute the model macro (updating the model file)?") - fmt.Println("#################################################################") - fmt.Println() - fmt.Println("The following changes will be applied:") - var changes []string - message := "" - validResult := true - var err error - switch macroDetails.ID { - case addbuildpipeline.GetMacroDetails().ID: - changes, message, validResult, err = addbuildpipeline.GetFinalChangeImpact(&context.modelInput) - case addvault.GetMacroDetails().ID: - changes, message, validResult, err = addvault.GetFinalChangeImpact(&context.modelInput) - case prettyprint.GetMacroDetails().ID: - changes, message, validResult, err = prettyprint.GetFinalChangeImpact(&context.modelInput) - case removeunusedtags.GetMacroDetails().ID: - changes, message, validResult, err = removeunusedtags.GetFinalChangeImpact(&context.modelInput) - case seedrisktracking.GetMacroDetails().ID: - changes, message, validResult, err = seedrisktracking.GetFinalChangeImpact(&context.modelInput) - case seedtags.GetMacroDetails().ID: - changes, message, validResult, err = seedtags.GetFinalChangeImpact(&context.modelInput) - } - checkErr(err) - for _, change := range changes { - fmt.Println(" -", change) - } - if !validResult { - fmt.Println() - fmt.Println(">>> INVALID <<<") - } - fmt.Println() - fmt.Println(message) - fmt.Println() - fmt.Print("Apply these changes to the model file?\nType Yes or No: ") - answer, err := reader.ReadString('\n') - // convert CRLF to LF - answer = strings.TrimSpace(strings.Replace(answer, "\n", "", -1)) - checkErr(err) - answer = strings.ToLower(answer) - fmt.Println() - if answer == "yes" || answer == "y" { - message := "" - validResult := true - var err error - switch macroDetails.ID { - case addbuildpipeline.GetMacroDetails().ID: - message, validResult, err = addbuildpipeline.Execute(&context.modelInput) - case addvault.GetMacroDetails().ID: - message, validResult, err = addvault.Execute(&context.modelInput) - case prettyprint.GetMacroDetails().ID: - message, validResult, err = prettyprint.Execute(&context.modelInput) - case removeunusedtags.GetMacroDetails().ID: - message, validResult, err = removeunusedtags.Execute(&context.modelInput) - case seedrisktracking.GetMacroDetails().ID: - message, validResult, err = seedrisktracking.Execute(&context.modelInput) - case seedtags.GetMacroDetails().ID: - message, validResult, err = seedtags.Execute(&context.modelInput) + + if commLink.DiagramTweakWeight > 0 { + weight = commLink.DiagramTweakWeight } - checkErr(err) - if !validResult { - fmt.Println() - fmt.Println(">>> INVALID <<<") + + constraint = !commLink.DiagramTweakConstraint + + dataFlowTitle := fmt.Sprintf("%v", commLinkTitle) + commLink := model.CommunicationLink{ + Id: createDataFlowId(id, dataFlowTitle), + SourceId: id, + TargetId: commLink.Target, + Title: dataFlowTitle, + Description: withDefault(commLink.Description, dataFlowTitle), + Protocol: protocol, + Authentication: authentication, + Authorization: authorization, + Usage: usage, + Tags: context.checkTags(lowerCaseAndTrim(commLink.Tags), "communication link '"+commLinkTitle+"' of technical asset '"+title+"'"), + VPN: commLink.VPN, + IpFiltered: commLink.IpFiltered, + Readonly: commLink.Readonly, + DataAssetsSent: dataAssetsSent, + DataAssetsReceived: dataAssetsReceived, + DiagramTweakWeight: weight, + DiagramTweakConstraint: constraint, } - fmt.Println(message) - fmt.Println() - backupFilename := *context.modelFilename + ".backup" - fmt.Println("Creating backup model file:", backupFilename) // TODO add random files in /dev/shm space? - _, err = copyFile(*context.modelFilename, backupFilename) - checkErr(err) - fmt.Println("Updating model") - yamlBytes, err := yaml.Marshal(context.modelInput) - checkErr(err) - /* - yamlBytes = model.ReformatYAML(yamlBytes) - */ - fmt.Println("Writing model file:", *context.modelFilename) - err = os.WriteFile(*context.modelFilename, yamlBytes, 0400) - checkErr(err) - fmt.Println("Model file successfully updated") - return - } else if answer == "no" || answer == "n" { - fmt.Println("Quitting without executing the model macro") - return + communicationLinks = append(communicationLinks, commLink) + // track all comm links + context.parsedModel.CommunicationLinks[commLink.Id] = commLink + // keep track of map of *all* comm links mapped by target-id (to be able to look up "who is calling me" kind of things) + context.parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId[commLink.TargetId] = append( + context.parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId[commLink.TargetId], commLink) } } - return - } - - renderDataFlowDiagram := *context.generateDataFlowDiagram - renderDataAssetDiagram := *context.generateDataAssetDiagram - renderRisksJSON := *context.generateRisksJSON - renderTechnicalAssetsJSON := *context.generateTechnicalAssetsJSON - renderStatsJSON := *context.generateStatsJSON - renderRisksExcel := *context.generateRisksExcel - renderTagsExcel := *context.generateTagsExcel - renderPDF := *context.generateReportPDF - if renderPDF { // as the PDF report includes both diagrams - renderDataFlowDiagram, renderDataAssetDiagram = true, true - } - // Data-flow Diagram rendering - if renderDataFlowDiagram { - gvFile := filepath.Join(*context.outputDir, dataFlowDiagramFilenameDOT) - if !context.keepDiagramSourceFiles { - tmpFileGV, err := os.CreateTemp(*context.tempFolder, dataFlowDiagramFilenameDOT) - checkErr(err) - gvFile = tmpFileGV.Name() - defer func() { _ = os.Remove(gvFile) }() + context.checkIdSyntax(id) + if _, exists := context.parsedModel.TechnicalAssets[id]; exists { + panic(errors.New("duplicate id used: " + id)) } - dotFile := context.writeDataFlowDiagramGraphvizDOT(gvFile, *context.diagramDPI) - context.renderDataFlowDiagramGraphvizImage(dotFile, *context.outputDir) - } - // Data Asset Diagram rendering - if renderDataAssetDiagram { - gvFile := filepath.Join(*context.outputDir, dataAssetDiagramFilenameDOT) - if !context.keepDiagramSourceFiles { - tmpFile, err := os.CreateTemp(*context.tempFolder, dataAssetDiagramFilenameDOT) - checkErr(err) - gvFile = tmpFile.Name() - defer func() { _ = os.Remove(gvFile) }() + context.parsedModel.TechnicalAssets[id] = model.TechnicalAsset{ + Id: id, + Usage: usage, + Title: title, //fmt.Sprintf("%v", asset["title"]), + Description: withDefault(fmt.Sprintf("%v", asset.Description), title), + Type: technicalAssetType, + Size: technicalAssetSize, + Technology: technicalAssetTechnology, + Tags: context.checkTags(lowerCaseAndTrim(asset.Tags), "technical asset '"+title+"'"), + Machine: technicalAssetMachine, + Internet: asset.Internet, + Encryption: encryption, + MultiTenant: asset.MultiTenant, + Redundant: asset.Redundant, + CustomDevelopedParts: asset.CustomDevelopedParts, + UsedAsClientByHuman: asset.UsedAsClientByHuman, + OutOfScope: asset.OutOfScope, + JustificationOutOfScope: fmt.Sprintf("%v", asset.JustificationOutOfScope), + Owner: fmt.Sprintf("%v", asset.Owner), + Confidentiality: confidentiality, + Integrity: integrity, + Availability: availability, + JustificationCiaRating: fmt.Sprintf("%v", asset.JustificationCiaRating), + DataAssetsProcessed: dataAssetsProcessed, + DataAssetsStored: dataAssetsStored, + DataFormatsAccepted: dataFormatsAccepted, + CommunicationLinks: communicationLinks, + DiagramTweakOrder: asset.DiagramTweakOrder, } - dotFile := context.writeDataAssetDiagramGraphvizDOT(gvFile, *context.diagramDPI) - context.renderDataAssetDiagramGraphvizImage(dotFile, *context.outputDir) } - // risks as risks json - if renderRisksJSON { - if *context.verbose { - fmt.Println("Writing risks json") - } - report.WriteRisksJSON(filepath.Join(*context.outputDir, jsonRisksFilename)) - } + // Trust Boundaries =============================================================================== + checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries := make(map[string]bool) + context.parsedModel.TrustBoundaries = make(map[string]model.TrustBoundary) + for title, boundary := range context.modelInput.TrustBoundaries { + id := fmt.Sprintf("%v", boundary.ID) - // technical assets json - if renderTechnicalAssetsJSON { - if *context.verbose { - fmt.Println("Writing technical assets json") + var technicalAssetsInside = make([]string, 0) + if boundary.TechnicalAssetsInside != nil { + parsedInsideAssets := boundary.TechnicalAssetsInside + technicalAssetsInside = make([]string, len(parsedInsideAssets)) + for i, parsedInsideAsset := range parsedInsideAssets { + technicalAssetsInside[i] = fmt.Sprintf("%v", parsedInsideAsset) + _, found := context.parsedModel.TechnicalAssets[technicalAssetsInside[i]] + if !found { + panic(errors.New("missing referenced technical asset " + technicalAssetsInside[i] + " at trust boundary '" + title + "'")) + } + if checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries[technicalAssetsInside[i]] == true { + panic(errors.New("referenced technical asset " + technicalAssetsInside[i] + " at trust boundary '" + title + "' is modeled in multiple trust boundaries")) + } + checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries[technicalAssetsInside[i]] = true + //fmt.Println("asset "+technicalAssetsInside[i]+" at i="+strconv.Itoa(i)) + } } - report.WriteTechnicalAssetsJSON(filepath.Join(*context.outputDir, jsonTechnicalAssetsFilename)) - } - // risks as risks json - if renderStatsJSON { - if *context.verbose { - fmt.Println("Writing stats json") + var trustBoundariesNested = make([]string, 0) + if boundary.TrustBoundariesNested != nil { + parsedNestedBoundaries := boundary.TrustBoundariesNested + trustBoundariesNested = make([]string, len(parsedNestedBoundaries)) + for i, parsedNestedBoundary := range parsedNestedBoundaries { + trustBoundariesNested[i] = fmt.Sprintf("%v", parsedNestedBoundary) + } } - report.WriteStatsJSON(filepath.Join(*context.outputDir, jsonStatsFilename)) - } - // risks Excel - if renderRisksExcel { - if *context.verbose { - fmt.Println("Writing risks excel") + var trustBoundaryType types.TrustBoundaryType + switch boundary.Type { + case types.NetworkOnPrem.String(): + trustBoundaryType = types.NetworkOnPrem + case types.NetworkDedicatedHoster.String(): + trustBoundaryType = types.NetworkDedicatedHoster + case types.NetworkVirtualLAN.String(): + trustBoundaryType = types.NetworkVirtualLAN + case types.NetworkCloudProvider.String(): + trustBoundaryType = types.NetworkCloudProvider + case types.NetworkCloudSecurityGroup.String(): + trustBoundaryType = types.NetworkCloudSecurityGroup + case types.NetworkPolicyNamespaceIsolation.String(): + trustBoundaryType = types.NetworkPolicyNamespaceIsolation + case types.ExecutionEnvironment.String(): + trustBoundaryType = types.ExecutionEnvironment + default: + panic(errors.New("unknown 'type' of trust boundary '" + title + "': " + fmt.Sprintf("%v", boundary.Type))) } - report.WriteRisksExcelToFile(filepath.Join(*context.outputDir, excelRisksFilename)) - } - // tags Excel - if renderTagsExcel { - if *context.verbose { - fmt.Println("Writing tags excel") + trustBoundary := model.TrustBoundary{ + Id: id, + Title: title, //fmt.Sprintf("%v", boundary["title"]), + Description: withDefault(fmt.Sprintf("%v", boundary.Description), title), + Type: trustBoundaryType, + Tags: context.checkTags(lowerCaseAndTrim(boundary.Tags), "trust boundary '"+title+"'"), + TechnicalAssetsInside: technicalAssetsInside, + TrustBoundariesNested: trustBoundariesNested, + } + context.checkIdSyntax(id) + if _, exists := context.parsedModel.TrustBoundaries[id]; exists { + panic(errors.New("duplicate id used: " + id)) + } + context.parsedModel.TrustBoundaries[id] = trustBoundary + for _, technicalAsset := range trustBoundary.TechnicalAssetsInside { + context.parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId[technicalAsset] = trustBoundary + //fmt.Println("Asset "+technicalAsset+" is directly in trust boundary "+trustBoundary.Id) } - report.WriteTagsExcelToFile(filepath.Join(*context.outputDir, excelTagsFilename)) } + context.checkNestedTrustBoundariesExisting() - if renderPDF { - // hash the YAML input file - f, err := os.Open(*context.modelFilename) - checkErr(err) - defer func() { _ = f.Close() }() - hasher := sha256.New() - if _, err := io.Copy(hasher, f); err != nil { - panic(err) + // Shared Runtime =============================================================================== + context.parsedModel.SharedRuntimes = make(map[string]model.SharedRuntime) + for title, inputRuntime := range context.modelInput.SharedRuntimes { + id := fmt.Sprintf("%v", inputRuntime.ID) + + var technicalAssetsRunning = make([]string, 0) + if inputRuntime.TechnicalAssetsRunning != nil { + parsedRunningAssets := inputRuntime.TechnicalAssetsRunning + technicalAssetsRunning = make([]string, len(parsedRunningAssets)) + for i, parsedRunningAsset := range parsedRunningAssets { + assetId := fmt.Sprintf("%v", parsedRunningAsset) + context.checkTechnicalAssetExists(assetId, "shared runtime '"+title+"'", false) + technicalAssetsRunning[i] = assetId + } } - modelHash := hex.EncodeToString(hasher.Sum(nil)) - // report PDF - if *context.verbose { - fmt.Println("Writing report pdf") + + sharedRuntime := model.SharedRuntime{ + Id: id, + Title: title, //fmt.Sprintf("%v", boundary["title"]), + Description: withDefault(fmt.Sprintf("%v", inputRuntime.Description), title), + Tags: context.checkTags(inputRuntime.Tags, "shared runtime '"+title+"'"), + TechnicalAssetsRunning: technicalAssetsRunning, } - report.WriteReportPDF(filepath.Join(*context.outputDir, reportFilename), - filepath.Join(*context.appFolder, *context.templateFilename), - filepath.Join(*context.outputDir, dataFlowDiagramFilenamePNG), - filepath.Join(*context.outputDir, dataAssetDiagramFilenamePNG), - *context.modelFilename, - *context.skipRiskRules, - context.buildTimestamp, - modelHash, - introTextRAA, - context.customRiskRules, - *context.tempFolder) + context.checkIdSyntax(id) + if _, exists := context.parsedModel.SharedRuntimes[id]; exists { + panic(errors.New("duplicate id used: " + id)) + } + context.parsedModel.SharedRuntimes[id] = sharedRuntime } -} -func (context *Context) printBorder(length int, bold bool) { - char := "-" - if bold { - char = "=" - } - for i := 1; i <= length; i++ { - fmt.Print(char) - } - fmt.Println() -} + // Individual Risk Categories (just used as regular risk categories) =============================================================================== + context.parsedModel.IndividualRiskCategories = make(map[string]model.RiskCategory) + for title, individualCategory := range context.modelInput.IndividualRiskCategories { + id := fmt.Sprintf("%v", individualCategory.ID) -func (context *Context) applyRAA() string { - if *context.verbose { - fmt.Println("Applying RAA calculation:", *context.raaPlugin) - } + var function types.RiskFunction + switch individualCategory.Function { + case types.BusinessSide.String(): + function = types.BusinessSide + case types.Architecture.String(): + function = types.Architecture + case types.Development.String(): + function = types.Development + case types.Operations.String(): + function = types.Operations + default: + panic(errors.New("unknown 'function' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.Function))) + } - runner, loadError := new(run.Runner).Load(filepath.Join(*context.binFolder, *context.raaPlugin)) - if loadError != nil { - fmt.Printf("WARNING: raa %q not loaded: %v\n", *context.raaPlugin, loadError) - return "" - } + var stride types.STRIDE + switch individualCategory.STRIDE { + case types.Spoofing.String(): + stride = types.Spoofing + case types.Tampering.String(): + stride = types.Tampering + case types.Repudiation.String(): + stride = types.Repudiation + case types.InformationDisclosure.String(): + stride = types.InformationDisclosure + case types.DenialOfService.String(): + stride = types.DenialOfService + case types.ElevationOfPrivilege.String(): + stride = types.ElevationOfPrivilege + default: + panic(errors.New("unknown 'stride' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.STRIDE))) + } - runError := runner.Run(model.ParsedModelRoot, &model.ParsedModelRoot) - if runError != nil { - fmt.Printf("WARNING: raa %q not applied: %v\n", *context.raaPlugin, runError) - return "" - } + cat := model.RiskCategory{ + Id: id, + Title: title, + Description: withDefault(fmt.Sprintf("%v", individualCategory.Description), title), + Impact: fmt.Sprintf("%v", individualCategory.Impact), + ASVS: fmt.Sprintf("%v", individualCategory.ASVS), + CheatSheet: fmt.Sprintf("%v", individualCategory.CheatSheet), + Action: fmt.Sprintf("%v", individualCategory.Action), + Mitigation: fmt.Sprintf("%v", individualCategory.Mitigation), + Check: fmt.Sprintf("%v", individualCategory.Check), + DetectionLogic: fmt.Sprintf("%v", individualCategory.DetectionLogic), + RiskAssessment: fmt.Sprintf("%v", individualCategory.RiskAssessment), + FalsePositives: fmt.Sprintf("%v", individualCategory.FalsePositives), + Function: function, + STRIDE: stride, + ModelFailurePossibleReason: individualCategory.ModelFailurePossibleReason, + CWE: individualCategory.CWE, + } + context.checkIdSyntax(id) + if _, exists := context.parsedModel.IndividualRiskCategories[id]; exists { + panic(errors.New("duplicate id used: " + id)) + } + context.parsedModel.IndividualRiskCategories[id] = cat - return runner.ErrorOutput -} + // NOW THE INDIVIDUAL RISK INSTANCES: + //individualRiskInstances := make([]model.Risk, 0) + if individualCategory.RisksIdentified != nil { // TODO: also add syntax checks of input YAML when linked asset is not found or when synthetic-id is already used... + for title, individualRiskInstance := range individualCategory.RisksIdentified { + var severity types.RiskSeverity + var exploitationLikelihood types.RiskExploitationLikelihood + var exploitationImpact types.RiskExploitationImpact + var mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId string + var dataBreachProbability types.DataBreachProbability + var dataBreachTechnicalAssetIDs []string -func (context *Context) loadCustomRiskRules() { - context.customRiskRules = make(map[string]*risks.CustomRisk) - if len(*context.riskRulesPlugins) > 0 { - if *context.verbose { - fmt.Println("Loading custom risk rules:", *context.riskRulesPlugins) - } + switch individualRiskInstance.Severity { + case types.LowSeverity.String(): + severity = types.LowSeverity + case types.MediumSeverity.String(): + severity = types.MediumSeverity + case types.ElevatedSeverity.String(): + severity = types.ElevatedSeverity + case types.HighSeverity.String(): + severity = types.HighSeverity + case types.CriticalSeverity.String(): + severity = types.CriticalSeverity + case "": // added default + severity = types.MediumSeverity + default: + panic(errors.New("unknown 'severity' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.Severity))) + } + + switch individualRiskInstance.ExploitationLikelihood { + case types.Unlikely.String(): + exploitationLikelihood = types.Unlikely + case types.Likely.String(): + exploitationLikelihood = types.Likely + case types.VeryLikely.String(): + exploitationLikelihood = types.VeryLikely + case types.Frequent.String(): + exploitationLikelihood = types.Frequent + case "": // added default + exploitationLikelihood = types.Likely + default: + panic(errors.New("unknown 'exploitation_likelihood' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationLikelihood))) + } + + switch individualRiskInstance.ExploitationImpact { + case types.LowImpact.String(): + exploitationImpact = types.LowImpact + case types.MediumImpact.String(): + exploitationImpact = types.MediumImpact + case types.HighImpact.String(): + exploitationImpact = types.HighImpact + case types.VeryHighImpact.String(): + exploitationImpact = types.VeryHighImpact + case "": // added default + exploitationImpact = types.MediumImpact + default: + panic(errors.New("unknown 'exploitation_impact' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationImpact))) + } + + if len(individualRiskInstance.MostRelevantDataAsset) > 0 { + mostRelevantDataAssetId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantDataAsset) + context.checkDataAssetTargetExists(mostRelevantDataAssetId, "individual risk '"+title+"'") + } + + if len(individualRiskInstance.MostRelevantTechnicalAsset) > 0 { + mostRelevantTechnicalAssetId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantTechnicalAsset) + context.checkTechnicalAssetExists(mostRelevantTechnicalAssetId, "individual risk '"+title+"'", false) + } - for _, pluginFile := range strings.Split(*context.riskRulesPlugins, ",") { - if len(pluginFile) > 0 { - runner, loadError := new(run.Runner).Load(pluginFile) - if loadError != nil { - log.Fatalf("WARNING: Custom risk rule %q not loaded: %v\n", pluginFile, loadError) + if len(individualRiskInstance.MostRelevantCommunicationLink) > 0 { + mostRelevantCommunicationLinkId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantCommunicationLink) + context.checkCommunicationLinkExists(mostRelevantCommunicationLinkId, "individual risk '"+title+"'") + } + + if len(individualRiskInstance.MostRelevantTrustBoundary) > 0 { + mostRelevantTrustBoundaryId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantTrustBoundary) + context.checkTrustBoundaryExists(mostRelevantTrustBoundaryId, "individual risk '"+title+"'") + } + + if len(individualRiskInstance.MostRelevantSharedRuntime) > 0 { + mostRelevantSharedRuntimeId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantSharedRuntime) + context.checkSharedRuntimeExists(mostRelevantSharedRuntimeId, "individual risk '"+title+"'") + } + + switch individualRiskInstance.DataBreachProbability { + case types.Improbable.String(): + dataBreachProbability = types.Improbable + case types.Possible.String(): + dataBreachProbability = types.Possible + case types.Probable.String(): + dataBreachProbability = types.Probable + case "": // added default + dataBreachProbability = types.Possible + default: + panic(errors.New("unknown 'data_breach_probability' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.DataBreachProbability))) } - risk := new(risks.CustomRisk) - runError := runner.Run(nil, &risk, "-get-info") - if runError != nil { - log.Fatalf("WARNING: Failed to get ID for custom risk rule %q: %v\n", pluginFile, runError) + if individualRiskInstance.DataBreachTechnicalAssets != nil { + dataBreachTechnicalAssetIDs = make([]string, len(individualRiskInstance.DataBreachTechnicalAssets)) + for i, parsedReferencedAsset := range individualRiskInstance.DataBreachTechnicalAssets { + assetId := fmt.Sprintf("%v", parsedReferencedAsset) + context.checkTechnicalAssetExists(assetId, "data breach technical assets of individual risk '"+title+"'", false) + dataBreachTechnicalAssetIDs[i] = assetId + } } - risk.Runner = runner - context.customRiskRules[risk.ID] = risk - if *context.verbose { - fmt.Println("Custom risk rule loaded:", risk.ID) + individualRiskInstance := model.Risk{ + SyntheticId: createSyntheticId(cat.Id, mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId), + Title: fmt.Sprintf("%v", title), + Category: cat, + Severity: severity, + ExploitationLikelihood: exploitationLikelihood, + ExploitationImpact: exploitationImpact, + MostRelevantDataAssetId: mostRelevantDataAssetId, + MostRelevantTechnicalAssetId: mostRelevantTechnicalAssetId, + MostRelevantCommunicationLinkId: mostRelevantCommunicationLinkId, + MostRelevantTrustBoundaryId: mostRelevantTrustBoundaryId, + MostRelevantSharedRuntimeId: mostRelevantSharedRuntimeId, + DataBreachProbability: dataBreachProbability, + DataBreachTechnicalAssetIDs: dataBreachTechnicalAssetIDs, } + context.parsedModel.GeneratedRisksByCategory[cat] = append(context.parsedModel.GeneratedRisksByCategory[cat], individualRiskInstance) } } + } - if *context.verbose { - fmt.Println("Loaded custom risk rules:", len(context.customRiskRules)) + // Risk Tracking =============================================================================== + context.parsedModel.RiskTracking = make(map[string]model.RiskTracking) + for syntheticRiskId, riskTracking := range context.modelInput.RiskTracking { + justification := fmt.Sprintf("%v", riskTracking.Justification) + checkedBy := fmt.Sprintf("%v", riskTracking.CheckedBy) + ticket := fmt.Sprintf("%v", riskTracking.Ticket) + var date time.Time + if len(riskTracking.Date) > 0 { + var parseError error + date, parseError = time.Parse("2006-01-02", riskTracking.Date) + if parseError != nil { + panic(errors.New("unable to parse 'date' of risk tracking '" + syntheticRiskId + "': " + riskTracking.Date)) + } + } + + var status types.RiskStatus + switch riskTracking.Status { + case types.Unchecked.String(): + status = types.Unchecked + case types.Mitigated.String(): + status = types.Mitigated + case types.InProgress.String(): + status = types.InProgress + case types.Accepted.String(): + status = types.Accepted + case types.InDiscussion.String(): + status = types.InDiscussion + case types.FalsePositive.String(): + status = types.FalsePositive + default: + panic(errors.New("unknown 'status' value of risk tracking '" + syntheticRiskId + "': " + riskTracking.Status)) + } + + tracking := model.RiskTracking{ + SyntheticRiskId: strings.TrimSpace(syntheticRiskId), + Justification: justification, + CheckedBy: checkedBy, + Ticket: ticket, + Date: date, + Status: status, + } + if strings.Contains(syntheticRiskId, "*") { // contains a wildcard char + context.deferredRiskTrackingDueToWildcardMatching[syntheticRiskId] = tracking + } else { + context.parsedModel.RiskTracking[syntheticRiskId] = tracking } } -} -var validIdSyntax = regexp.MustCompile(`^[a-zA-Z0-9\-]+$`) + // ====================== model consistency check (linking) + for _, technicalAsset := range context.parsedModel.TechnicalAssets { + for _, commLink := range technicalAsset.CommunicationLinks { + context.checkTechnicalAssetExists(commLink.TargetId, "communication link '"+commLink.Title+"' of technical asset '"+technicalAsset.Title+"'", false) + } + } +} -func (context *Context) checkIdSyntax(id string) { - if !validIdSyntax.MatchString(id) { - panic(errors.New("invalid id syntax used (only letters, numbers, and hyphen allowed): " + id)) +func createSyntheticId(categoryId string, + mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId string) string { + result := categoryId + if len(mostRelevantTechnicalAssetId) > 0 { + result += "@" + mostRelevantTechnicalAssetId + } + if len(mostRelevantCommunicationLinkId) > 0 { + result += "@" + mostRelevantCommunicationLinkId + } + if len(mostRelevantTrustBoundaryId) > 0 { + result += "@" + mostRelevantTrustBoundaryId } + if len(mostRelevantSharedRuntimeId) > 0 { + result += "@" + mostRelevantSharedRuntimeId + } + if len(mostRelevantDataAssetId) > 0 { + result += "@" + mostRelevantDataAssetId + } + return result } -func (context *Context) analyze(ginContext *gin.Context) { - context.execute(ginContext, false) +func createDataFlowId(sourceAssetId, title string) string { + reg, err := regexp.Compile("[^A-Za-z0-9]+") + checkErr(err) + return sourceAssetId + ">" + strings.Trim(reg.ReplaceAllString(strings.ToLower(title), "-"), "- ") } -func (context *Context) check(ginContext *gin.Context) { - _, ok := context.execute(ginContext, true) - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "model is ok", - }) +func (context *Context) checkDataAssetTargetExists(referencedAsset, where string) { + if _, ok := context.parsedModel.DataAssets[referencedAsset]; !ok { + panic(errors.New("missing referenced data asset target at " + where + ": " + referencedAsset)) } } -func (context *Context) execute(ginContext *gin.Context, dryRun bool) (yamlContent []byte, ok bool) { - defer func() { - var err error - if r := recover(); r != nil { - context.errorCount++ - err = r.(error) - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": strings.TrimSpace(err.Error()), - }) - ok = false - } - }() +func (context *Context) checkTrustBoundaryExists(referencedId, where string) { + if _, ok := context.parsedModel.TrustBoundaries[referencedId]; !ok { + panic(errors.New("missing referenced trust boundary at " + where + ": " + referencedId)) + } +} - dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(defaultGraphvizDPI))) - checkErr(err) +func (context *Context) checkSharedRuntimeExists(referencedId, where string) { + if _, ok := context.parsedModel.SharedRuntimes[referencedId]; !ok { + panic(errors.New("missing referenced shared runtime at " + where + ": " + referencedId)) + } +} - fileUploaded, header, err := ginContext.Request.FormFile("file") - checkErr(err) - - if header.Size > 50000000 { - msg := "maximum model upload file size exceeded (denial-of-service protection)" - log.Println(msg) - ginContext.JSON(http.StatusRequestEntityTooLarge, gin.H{ - "error": msg, - }) - return yamlContent, false +func (context *Context) checkCommunicationLinkExists(referencedId, where string) { + if _, ok := context.parsedModel.CommunicationLinks[referencedId]; !ok { + panic(errors.New("missing referenced communication link at " + where + ": " + referencedId)) } +} - filenameUploaded := strings.TrimSpace(header.Filename) - - tmpInputDir, err := os.MkdirTemp(*context.tempFolder, "threagile-input-") - checkErr(err) - defer func() { _ = os.RemoveAll(tmpInputDir) }() - - tmpModelFile, err := os.CreateTemp(tmpInputDir, "threagile-model-*") - checkErr(err) - defer func() { _ = os.Remove(tmpModelFile.Name()) }() - _, err = io.Copy(tmpModelFile, fileUploaded) - checkErr(err) - - yamlFile := tmpModelFile.Name() - - if strings.ToLower(filepath.Ext(filenameUploaded)) == ".zip" { - // unzip first (including the resources like images etc.) - if *context.verbose { - fmt.Println("Decompressing uploaded archive") - } - filenamesUnzipped, err := context.unzip(tmpModelFile.Name(), tmpInputDir) - checkErr(err) - found := false - for _, name := range filenamesUnzipped { - if strings.ToLower(filepath.Ext(name)) == ".yaml" { - yamlFile = name - found = true - break - } - } - if !found { - panic(errors.New("no yaml file found in uploaded archive")) +func (context *Context) checkTechnicalAssetExists(referencedAsset, where string, onlyForTweak bool) { + if _, ok := context.parsedModel.TechnicalAssets[referencedAsset]; !ok { + suffix := "" + if onlyForTweak { + suffix = " (only referenced in diagram tweak)" } + panic(errors.New("missing referenced technical asset target" + suffix + " at " + where + ": " + referencedAsset)) } +} - tmpOutputDir, err := os.MkdirTemp(*context.tempFolder, "threagile-output-") - checkErr(err) - defer func() { _ = os.RemoveAll(tmpOutputDir) }() - - tmpResultFile, err := os.CreateTemp(*context.tempFolder, "threagile-result-*.zip") - checkErr(err) - defer func() { _ = os.Remove(tmpResultFile.Name()) }() - - if dryRun { - context.doItViaRuntimeCall(yamlFile, tmpOutputDir, false, false, false, false, false, true, true, true, 40) - } else { - context.doItViaRuntimeCall(yamlFile, tmpOutputDir, true, true, true, true, true, true, true, true, dpi) +func (context *Context) checkNestedTrustBoundariesExisting() { + for _, trustBoundary := range context.parsedModel.TrustBoundaries { + for _, nestedId := range trustBoundary.TrustBoundariesNested { + if _, ok := context.parsedModel.TrustBoundaries[nestedId]; !ok { + panic(errors.New("missing referenced nested trust boundary: " + nestedId)) + } + } } - checkErr(err) - - yamlContent, err = os.ReadFile(yamlFile) - checkErr(err) - err = os.WriteFile(filepath.Join(tmpOutputDir, inputFile), yamlContent, 0400) - checkErr(err) +} - if !dryRun { - files := []string{ - filepath.Join(tmpOutputDir, inputFile), - filepath.Join(tmpOutputDir, dataFlowDiagramFilenamePNG), - filepath.Join(tmpOutputDir, dataAssetDiagramFilenamePNG), - filepath.Join(tmpOutputDir, reportFilename), - filepath.Join(tmpOutputDir, excelRisksFilename), - filepath.Join(tmpOutputDir, excelTagsFilename), - filepath.Join(tmpOutputDir, jsonRisksFilename), - filepath.Join(tmpOutputDir, jsonTechnicalAssetsFilename), - filepath.Join(tmpOutputDir, jsonStatsFilename), - } - if keepDiagramSourceFiles { - files = append(files, filepath.Join(tmpOutputDir, dataFlowDiagramFilenameDOT)) - files = append(files, filepath.Join(tmpOutputDir, dataAssetDiagramFilenameDOT)) - } - err = context.zipFiles(tmpResultFile.Name(), files) - checkErr(err) - if *context.verbose { - log.Println("Streaming back result file: " + tmpResultFile.Name()) +// in order to prevent Path-Traversal like stuff... +func removePathElementsFromImageFiles(overview input.Overview) input.Overview { + for i := range overview.Images { + newValue := make(map[string]string) + for file, desc := range overview.Images[i] { + newValue[filepath.Base(file)] = desc } - ginContext.FileAttachment(tmpResultFile.Name(), "threagile-result.zip") + overview.Images[i] = newValue } - context.successCount++ - return yamlContent, true + return overview } -// ultimately to avoid any in-process memory and/or data leaks by the used third party libs like PDF generation: exec and quit -func (context *Context) doItViaRuntimeCall(modelFile string, outputDir string, - generateDataFlowDiagram, generateDataAssetDiagram, generateReportPdf, generateRisksExcel, generateTagsExcel, generateRisksJSON, generateTechnicalAssetsJSON, generateStatsJSON bool, - dpi int) { - // Remember to also add the same args to the exec based sub-process calls! - var cmd *exec.Cmd - args := []string{"-model", modelFile, "-output", outputDir, "-execute-model-macro", *context.executeModelMacro, "-raa-run", *context.raaPlugin, "-custom-risk-rules-plugins", *context.riskRulesPlugins, "-skip-risk-rules", *context.skipRiskRules, "-diagram-dpi", strconv.Itoa(dpi)} +func (context *Context) writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.File { if *context.verbose { - args = append(args, "-verbose") + fmt.Println("Writing data flow diagram input") } - if *context.ignoreOrphanedRiskTracking { // TODO why add all them as arguments, when they are also variables on outer level? - args = append(args, "-ignore-orphaned-risk-tracking") + var dotContent strings.Builder + dotContent.WriteString("digraph generatedModel { concentrate=false \n") + + // Metadata init =============================================================================== + tweaks := "" + if context.parsedModel.DiagramTweakNodesep > 0 { + tweaks += "\n nodesep=\"" + strconv.Itoa(context.parsedModel.DiagramTweakNodesep) + "\"" } - if generateDataFlowDiagram { - args = append(args, "-generate-data-flow-diagram") + if context.parsedModel.DiagramTweakRanksep > 0 { + tweaks += "\n ranksep=\"" + strconv.Itoa(context.parsedModel.DiagramTweakRanksep) + "\"" } - if generateDataAssetDiagram { - args = append(args, "-generate-data-asset-diagram") + suppressBidirectionalArrows := true + splines := "ortho" + if len(context.parsedModel.DiagramTweakEdgeLayout) > 0 { + switch context.parsedModel.DiagramTweakEdgeLayout { + case "spline": + splines = "spline" + context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false + case "polyline": + splines = "polyline" + context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false + case "ortho": + splines = "ortho" + suppressBidirectionalArrows = true + case "curved": + splines = "curved" + context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false + case "false": + splines = "false" + context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false + default: + panic(errors.New("unknown value for diagram_tweak_suppress_edge_labels (spline, polyline, ortho, curved, false): " + + context.parsedModel.DiagramTweakEdgeLayout)) + } } - if generateReportPdf { - args = append(args, "-generate-report-pdf") + rankdir := "TB" + if context.parsedModel.DiagramTweakLayoutLeftToRight { + rankdir = "LR" } - if generateRisksExcel { - args = append(args, "-generate-risks-excel") + modelTitle := "" + if context.addModelTitle { + modelTitle = `label="` + context.parsedModel.Title + `"` } - if generateTagsExcel { - args = append(args, "-generate-tags-excel") + dotContent.WriteString(` graph [ ` + modelTitle + ` + labelloc=t + fontname="Verdana" + fontsize=40 + outputorder="nodesfirst" + dpi=` + strconv.Itoa(dpi) + ` + splines=` + splines + ` + rankdir="` + rankdir + `" +` + tweaks + ` + ]; + node [ + fontname="Verdana" + fontsize="20" + ]; + edge [ + shape="none" + fontname="Verdana" + fontsize="18" + ]; +`) + + // Trust Boundaries =============================================================================== + var subgraphSnippetsById = make(map[string]string) + // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order + // range over them in sorted (hence re-producible) way: + keys := make([]string, 0) + for k := range context.parsedModel.TrustBoundaries { + keys = append(keys, k) } - if generateRisksJSON { - args = append(args, "-generate-risks-json") + sort.Strings(keys) + for _, key := range keys { + trustBoundary := context.parsedModel.TrustBoundaries[key] + var snippet strings.Builder + if len(trustBoundary.TechnicalAssetsInside) > 0 || len(trustBoundary.TrustBoundariesNested) > 0 { + if context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks { + // see https://stackoverflow.com/questions/17247455/how-do-i-add-extra-space-between-clusters?noredirect=1&lq=1 + snippet.WriteString("\n subgraph cluster_space_boundary_for_layout_only_1" + hash(trustBoundary.Id) + " {\n") + snippet.WriteString(` graph [ + dpi=` + strconv.Itoa(dpi) + ` + label=<
> + fontsize="21" + style="invis" + color="green" + fontcolor="green" + margin="50.0" + penwidth="6.5" + outputorder="nodesfirst" + ];`) + } + snippet.WriteString("\n subgraph cluster_" + hash(trustBoundary.Id) + " {\n") + color, fontColor, bgColor, style, fontname := colors.RgbHexColorTwilight(), colors.RgbHexColorTwilight() /*"#550E0C"*/, "#FAFAFA", "dashed", "Verdana" + penWidth := 4.5 + if len(trustBoundary.TrustBoundariesNested) > 0 { + //color, fontColor, style, fontname = colors.Blue, colors.Blue, "dashed", "Verdana" + penWidth = 5.5 + } + if len(trustBoundary.ParentTrustBoundaryID(&context.parsedModel)) > 0 { + bgColor = "#F1F1F1" + } + if trustBoundary.Type == types.NetworkPolicyNamespaceIsolation { + fontColor, bgColor = "#222222", "#DFF4FF" + } + if trustBoundary.Type == types.ExecutionEnvironment { + fontColor, bgColor, style = "#555555", "#FFFFF0", "dotted" + } + snippet.WriteString(` graph [ + dpi=` + strconv.Itoa(dpi) + ` + label=<
` + trustBoundary.Title + ` (` + trustBoundary.Type.String() + `)
> + fontsize="21" + style="` + style + `" + color="` + color + `" + bgcolor="` + bgColor + `" + fontcolor="` + fontColor + `" + fontname="` + fontname + `" + penwidth="` + fmt.Sprintf("%f", penWidth) + `" + forcelabels=true + outputorder="nodesfirst" + margin="50.0" + ];`) + snippet.WriteString("\n") + keys := trustBoundary.TechnicalAssetsInside + sort.Strings(keys) + for _, technicalAssetInside := range keys { + //log.Println("About to add technical asset link to trust boundary: ", technicalAssetInside) + technicalAsset := context.parsedModel.TechnicalAssets[technicalAssetInside] + snippet.WriteString(hash(technicalAsset.Id)) + snippet.WriteString(";\n") + } + keys = trustBoundary.TrustBoundariesNested + sort.Strings(keys) + for _, trustBoundaryNested := range keys { + //log.Println("About to add nested trust boundary to trust boundary: ", trustBoundaryNested) + trustBoundaryNested := context.parsedModel.TrustBoundaries[trustBoundaryNested] + snippet.WriteString("LINK-NEEDS-REPLACED-BY-cluster_" + hash(trustBoundaryNested.Id)) + snippet.WriteString(";\n") + } + snippet.WriteString(" }\n\n") + if context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks { + snippet.WriteString(" }\n\n") + } + } + subgraphSnippetsById[hash(trustBoundary.Id)] = snippet.String() } - if generateTechnicalAssetsJSON { - args = append(args, "-generate-technical-assets-json") + // here replace links and remove from map after replacement (i.e. move snippet into nested) + for i := range subgraphSnippetsById { + re := regexp.MustCompile(`LINK-NEEDS-REPLACED-BY-cluster_([0-9]*);`) + for { + matches := re.FindStringSubmatch(subgraphSnippetsById[i]) + if len(matches) > 0 { + embeddedSnippet := " //nested:" + subgraphSnippetsById[matches[1]] + subgraphSnippetsById[i] = strings.ReplaceAll(subgraphSnippetsById[i], matches[0], embeddedSnippet) + subgraphSnippetsById[matches[1]] = "" // to something like remove it + } else { + break + } + } } - if generateStatsJSON { - args = append(args, "-generate-stats-json") + // now write them all + keys = make([]string, 0) + for k := range subgraphSnippetsById { + keys = append(keys, k) } - self, nameError := os.Executable() - if nameError != nil { - panic(nameError) - } - cmd = exec.Command(self, args...) - out, err := cmd.CombinedOutput() - if err != nil { - panic(errors.New(string(out))) - } else { - if *context.verbose && len(out) > 0 { - fmt.Println("---") - fmt.Print(string(out)) - fmt.Println("---") - } + sort.Strings(keys) + for _, key := range keys { + snippet := subgraphSnippetsById[key] + dotContent.WriteString(snippet) } -} - -func (context *Context) StartServer() { - router := gin.Default() - router.LoadHTMLGlob("server/static/*.html") // <== - router.GET("/", func(c *gin.Context) { - c.HTML(http.StatusOK, "index.html", gin.H{}) - }) - router.HEAD("/", func(c *gin.Context) { - c.HTML(http.StatusOK, "index.html", gin.H{}) - }) - router.StaticFile("/threagile.png", "server/static/threagile.png") // <== - router.StaticFile("/site.webmanifest", "server/static/site.webmanifest") - router.StaticFile("/favicon.ico", "server/static/favicon.ico") - router.StaticFile("/favicon-32x32.png", "server/static/favicon-32x32.png") - router.StaticFile("/favicon-16x16.png", "server/static/favicon-16x16.png") - router.StaticFile("/apple-touch-icon.png", "server/static/apple-touch-icon.png") - router.StaticFile("/android-chrome-512x512.png", "server/static/android-chrome-512x512.png") - router.StaticFile("/android-chrome-192x192.png", "server/static/android-chrome-192x192.png") - - router.StaticFile("/schema.json", "schema.json") - router.StaticFile("/live-templates.txt", "live-templates.txt") - router.StaticFile("/openapi.yaml", "openapi.yaml") - router.StaticFile("/swagger-ui/", "server/static/swagger-ui/index.html") - router.StaticFile("/swagger-ui/index.html", "server/static/swagger-ui/index.html") - router.StaticFile("/swagger-ui/oauth2-redirect.html", "server/static/swagger-ui/oauth2-redirect.html") - router.StaticFile("/swagger-ui/swagger-ui.css", "server/static/swagger-ui/swagger-ui.css") - router.StaticFile("/swagger-ui/swagger-ui.js", "server/static/swagger-ui/swagger-ui.js") - router.StaticFile("/swagger-ui/swagger-ui-bundle.js", "server/static/swagger-ui/swagger-ui-bundle.js") - router.StaticFile("/swagger-ui/swagger-ui-standalone-preset.js", "server/static/swagger-ui/swagger-ui-standalone-preset.js") // <== - - router.GET("/threagile-example-model.yaml", context.exampleFile) - router.GET("/threagile-stub-model.yaml", context.stubFile) - - router.GET("/meta/ping", func(c *gin.Context) { - c.JSON(200, gin.H{ - "message": "pong", - }) - }) - router.GET("/meta/version", func(c *gin.Context) { - c.JSON(200, gin.H{ - "version": model.ThreagileVersion, - "build_timestamp": context.buildTimestamp, - }) - }) - router.GET("/meta/types", func(c *gin.Context) { - c.JSON(200, gin.H{ - "quantity": context.arrayOfStringValues(model.QuantityValues()), - "confidentiality": context.arrayOfStringValues(model.ConfidentialityValues()), - "criticality": context.arrayOfStringValues(model.CriticalityValues()), - "technical_asset_type": context.arrayOfStringValues(model.TechnicalAssetTypeValues()), - "technical_asset_size": context.arrayOfStringValues(model.TechnicalAssetSizeValues()), - "authorization": context.arrayOfStringValues(model.AuthorizationValues()), - "authentication": context.arrayOfStringValues(model.AuthenticationValues()), - "usage": context.arrayOfStringValues(model.UsageValues()), - "encryption": context.arrayOfStringValues(model.EncryptionStyleValues()), - "data_format": context.arrayOfStringValues(model.DataFormatValues()), - "protocol": context.arrayOfStringValues(model.ProtocolValues()), - "technical_asset_technology": context.arrayOfStringValues(model.TechnicalAssetTechnologyValues()), - "technical_asset_machine": context.arrayOfStringValues(model.TechnicalAssetMachineValues()), - "trust_boundary_type": context.arrayOfStringValues(model.TrustBoundaryTypeValues()), - "data_breach_probability": context.arrayOfStringValues(model.DataBreachProbabilityValues()), - "risk_severity": context.arrayOfStringValues(model.RiskSeverityValues()), - "risk_exploitation_likelihood": context.arrayOfStringValues(model.RiskExploitationLikelihoodValues()), - "risk_exploitation_impact": context.arrayOfStringValues(model.RiskExploitationImpactValues()), - "risk_function": context.arrayOfStringValues(model.RiskFunctionValues()), - "risk_status": context.arrayOfStringValues(model.RiskStatusValues()), - "stride": context.arrayOfStringValues(model.STRIDEValues()), - }) - }) - - // TODO router.GET("/meta/risk-rules", listRiskRules) - // TODO router.GET("/meta/model-macros", listModelMacros) - - router.GET("/meta/stats", context.stats) - - router.POST("/direct/analyze", context.analyze) - router.POST("/direct/check", context.check) - router.GET("/direct/stub", context.stubFile) - - router.POST("/auth/keys", context.createKey) - router.DELETE("/auth/keys", context.deleteKey) - router.POST("/auth/tokens", context.createToken) - router.DELETE("/auth/tokens", context.deleteToken) - router.POST("/models", context.createNewModel) - router.GET("/models", context.listModels) - router.DELETE("/models/:model-id", context.deleteModel) - router.GET("/models/:model-id", context.getModel) - router.PUT("/models/:model-id", context.importModel) - router.GET("/models/:model-id/data-flow-diagram", context.streamDataFlowDiagram) - router.GET("/models/:model-id/data-asset-diagram", context.streamDataAssetDiagram) - router.GET("/models/:model-id/report-pdf", context.streamReportPDF) - router.GET("/models/:model-id/risks-excel", context.streamRisksExcel) - router.GET("/models/:model-id/tags-excel", context.streamTagsExcel) - router.GET("/models/:model-id/risks", context.streamRisksJSON) - router.GET("/models/:model-id/technical-assets", context.streamTechnicalAssetsJSON) - router.GET("/models/:model-id/stats", context.streamStatsJSON) - router.GET("/models/:model-id/analysis", context.analyzeModelOnServerDirectly) + // Technical Assets =============================================================================== + // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order + // range over them in sorted (hence re-producible) way: + // Convert map to slice of values: + var techAssets []model.TechnicalAsset + for _, techAsset := range context.parsedModel.TechnicalAssets { + techAssets = append(techAssets, techAsset) + } + sort.Sort(model.ByOrderAndIdSort(techAssets)) + for _, technicalAsset := range techAssets { + dotContent.WriteString(context.makeTechAssetNode(technicalAsset, false)) + dotContent.WriteString("\n") + } - router.GET("/models/:model-id/cover", context.getCover) - router.PUT("/models/:model-id/cover", context.setCover) - router.GET("/models/:model-id/overview", context.getOverview) - router.PUT("/models/:model-id/overview", context.setOverview) - //router.GET("/models/:model-id/questions", getQuestions) - //router.PUT("/models/:model-id/questions", setQuestions) - router.GET("/models/:model-id/abuse-cases", context.getAbuseCases) - router.PUT("/models/:model-id/abuse-cases", context.setAbuseCases) - router.GET("/models/:model-id/security-requirements", context.getSecurityRequirements) - router.PUT("/models/:model-id/security-requirements", context.setSecurityRequirements) - //router.GET("/models/:model-id/tags", getTags) - //router.PUT("/models/:model-id/tags", setTags) + // Data Flows (Technical Communication Links) =============================================================================== + for _, technicalAsset := range techAssets { + for _, dataFlow := range technicalAsset.CommunicationLinks { + sourceId := technicalAsset.Id + targetId := dataFlow.TargetId + //log.Println("About to add link from", sourceId, "to", targetId, "with id", dataFlow.Id) + var arrowStyle, arrowColor, readOrWriteHead, readOrWriteTail string + if dataFlow.Readonly { + readOrWriteHead = "empty" + readOrWriteTail = "odot" + } else { + readOrWriteHead = "normal" + readOrWriteTail = "dot" + } + dir := "forward" + if dataFlow.IsBidirectional() { + if !suppressBidirectionalArrows { // as it does not work as bug in graphviz with ortho: https://gitlab.com/graphviz/graphviz/issues/144 + dir = "both" + } + } + arrowStyle = ` style="` + dataFlow.DetermineArrowLineStyle() + `" penwidth="` + dataFlow.DetermineArrowPenWidth(&context.parsedModel) + `" arrowtail="` + readOrWriteTail + `" arrowhead="` + readOrWriteHead + `" dir="` + dir + `" arrowsize="2.0" ` + arrowColor = ` color="` + dataFlow.DetermineArrowColor(&context.parsedModel) + `"` + tweaks := "" + if dataFlow.DiagramTweakWeight > 0 { + tweaks += " weight=\"" + strconv.Itoa(dataFlow.DiagramTweakWeight) + "\" " + } - router.GET("/models/:model-id/data-assets", context.getDataAssets) - router.POST("/models/:model-id/data-assets", context.createNewDataAsset) - router.GET("/models/:model-id/data-assets/:data-asset-id", context.getDataAsset) - router.PUT("/models/:model-id/data-assets/:data-asset-id", context.setDataAsset) - router.DELETE("/models/:model-id/data-assets/:data-asset-id", context.deleteDataAsset) + dotContent.WriteString("\n") + dotContent.WriteString(" " + hash(sourceId) + " -> " + hash(targetId) + + ` [` + arrowColor + ` ` + arrowStyle + tweaks + ` constraint=` + strconv.FormatBool(dataFlow.DiagramTweakConstraint) + ` `) + if !context.parsedModel.DiagramTweakSuppressEdgeLabels { + dotContent.WriteString(` xlabel="` + encode(dataFlow.Protocol.String()) + `" fontcolor="` + dataFlow.DetermineLabelColor(&context.parsedModel) + `" `) + } + dotContent.WriteString(" ];\n") + } + } - router.GET("/models/:model-id/trust-boundaries", context.getTrustBoundaries) - // router.POST("/models/:model-id/trust-boundaries", createNewTrustBoundary) - // router.GET("/models/:model-id/trust-boundaries/:trust-boundary-id", getTrustBoundary) - // router.PUT("/models/:model-id/trust-boundaries/:trust-boundary-id", setTrustBoundary) - // router.DELETE("/models/:model-id/trust-boundaries/:trust-boundary-id", deleteTrustBoundary) + dotContent.WriteString(context.makeDiagramInvisibleConnectionsTweaks()) + dotContent.WriteString(context.makeDiagramSameRankNodeTweaks()) - router.GET("/models/:model-id/shared-runtimes", context.getSharedRuntimes) - router.POST("/models/:model-id/shared-runtimes", context.createNewSharedRuntime) - router.GET("/models/:model-id/shared-runtimes/:shared-runtime-id", context.getSharedRuntime) - router.PUT("/models/:model-id/shared-runtimes/:shared-runtime-id", context.setSharedRuntime) - router.DELETE("/models/:model-id/shared-runtimes/:shared-runtime-id", context.deleteSharedRuntime) + dotContent.WriteString("}") - fmt.Println("Threagile server running...") - _ = router.Run(":" + strconv.Itoa(context.ServerPort)) // listen and serve on 0.0.0.0:8080 or whatever port was specified -} + //fmt.Println(dotContent.String()) -func (context *Context) exampleFile(ginContext *gin.Context) { - example, err := os.ReadFile(filepath.Join(*context.appFolder, "threagile-example-model.yaml")) + // Write the DOT file + file, err := os.Create(diagramFilenameDOT) checkErr(err) - ginContext.Data(http.StatusOK, gin.MIMEYAML, example) -} - -func (context *Context) stubFile(ginContext *gin.Context) { - stub, err := os.ReadFile(filepath.Join(*context.appFolder, "threagile-stub-model.yaml")) + defer func() { _ = file.Close() }() + _, err = fmt.Fprintln(file, dotContent.String()) checkErr(err) - ginContext.Data(http.StatusOK, gin.MIMEYAML, context.addSupportedTags(stub)) // TODO use also the MIMEYAML way of serving YAML in model export? + return file } -func (context *Context) addSupportedTags(input []byte) []byte { - // add distinct tags as "tags_available" - supportedTags := make(map[string]bool) - for _, customRule := range context.customRiskRules { - for _, tag := range customRule.Tags { - supportedTags[strings.ToLower(tag)] = true +func (context *Context) makeDiagramSameRankNodeTweaks() string { + // see https://stackoverflow.com/questions/25734244/how-do-i-place-nodes-on-the-same-level-in-dot + tweak := "" + if len(context.parsedModel.DiagramTweakSameRankAssets) > 0 { + for _, sameRank := range context.parsedModel.DiagramTweakSameRankAssets { + assetIDs := strings.Split(sameRank, ":") + if len(assetIDs) > 0 { + tweak += "{ rank=same; " + for _, id := range assetIDs { + context.checkTechnicalAssetExists(id, "diagram tweak same-rank", true) + if len(context.parsedModel.TechnicalAssets[id].GetTrustBoundaryId(&context.parsedModel)) > 0 { + panic(errors.New("technical assets (referenced in same rank diagram tweak) are inside trust boundaries: " + + fmt.Sprintf("%v", context.parsedModel.DiagramTweakSameRankAssets))) + } + tweak += " " + hash(id) + "; " + } + tweak += " }" + } } } - for _, tag := range accidentalsecretleak.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range codebackdooring.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range containerbaseimagebackdooring.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range containerplatformescape.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range crosssiterequestforgery.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range crosssitescripting.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range dosriskyaccessacrosstrustboundary.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range incompletemodel.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range ldapinjection.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range missingauthentication.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range missingauthenticationsecondfactor.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range missingbuildinfrastructure.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range missingcloudhardening.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range missingfilevalidation.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range missinghardening.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range missingidentitypropagation.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range missingidentityproviderisolation.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range missingidentitystore.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range missingnetworksegmentation.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range missingvault.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range missingvaultisolation.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range missingwaf.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range mixedtargetsonsharedruntime.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range pathtraversal.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range pushinsteadofpulldeployment.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range searchqueryinjection.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range serversiderequestforgery.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range serviceregistrypoisoning.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range sqlnosqlinjection.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true + return tweak +} + +func (context *Context) makeDiagramInvisibleConnectionsTweaks() string { + // see https://stackoverflow.com/questions/2476575/how-to-control-node-placement-in-graphviz-i-e-avoid-edge-crossings + tweak := "" + if len(context.parsedModel.DiagramTweakInvisibleConnectionsBetweenAssets) > 0 { + for _, invisibleConnections := range context.parsedModel.DiagramTweakInvisibleConnectionsBetweenAssets { + assetIDs := strings.Split(invisibleConnections, ":") + if len(assetIDs) == 2 { + context.checkTechnicalAssetExists(assetIDs[0], "diagram tweak connections", true) + context.checkTechnicalAssetExists(assetIDs[1], "diagram tweak connections", true) + tweak += "\n" + hash(assetIDs[0]) + " -> " + hash(assetIDs[1]) + " [style=invis]; \n" + } + } } - for _, tag := range uncheckeddeployment.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true + return tweak +} + +func (context *Context) DoIt() { + + defer func() { + var err error + if r := recover(); r != nil { + err = r.(error) + if *context.verbose { + log.Println(err) + } + _, _ = os.Stderr.WriteString(err.Error() + "\n") + os.Exit(2) + } + }() + if len(*context.executeModelMacro) > 0 { + fmt.Println(docs.Logo + "\n\n" + docs.VersionText) + } else { + if *context.verbose { + fmt.Println("Writing into output directory:", *context.outputDir) + } } - for _, tag := range unencryptedasset.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range unencryptedcommunication.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range unguardedaccessfrominternet.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range unguardeddirectdatastoreaccess.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range unnecessarycommunicationlink.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range unnecessarydataasset.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range unnecessarydatatransfer.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range unnecessarytechnicalasset.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range untrusteddeserialization.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range wrongcommunicationlinkcontent.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range wrongtrustboundarycontent.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - for _, tag := range xmlexternalentity.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - tags := make([]string, 0, len(supportedTags)) - for t := range supportedTags { - tags = append(tags, t) - } - if len(tags) == 0 { - return input - } - sort.Strings(tags) - if *context.verbose { - fmt.Print("Supported tags of all risk rules: ") - for i, tag := range tags { - if i > 0 { - fmt.Print(", ") - } - fmt.Print(tag) - } - fmt.Println() - } - replacement := "tags_available:" - for _, tag := range tags { - replacement += "\n - " + tag - } - return []byte(strings.Replace(string(input), "tags_available:", replacement, 1)) -} - -const keySize = 32 - -type timeoutStruct struct { - xorRand []byte - createdNanoTime, lastAccessedNanoTime int64 -} - -var mapTokenHashToTimeoutStruct = make(map[string]timeoutStruct) -var mapFolderNameToTokenHash = make(map[string]string) - -func (context *Context) createToken(ginContext *gin.Context) { - folderName, key, ok := context.checkKeyToFolderName(ginContext) - if !ok { - return - } - context.globalLock.Lock() - defer context.globalLock.Unlock() - if tokenHash, exists := mapFolderNameToTokenHash[folderName]; exists { - // invalidate previous token - delete(mapTokenHashToTimeoutStruct, tokenHash) - } - // create a strong random 256 bit value (used to xor) - xorBytesArr := make([]byte, keySize) - n, err := rand.Read(xorBytesArr[:]) - if n != keySize || err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to create token", - }) - return - } - now := time.Now().UnixNano() - token := xor(key, xorBytesArr) - tokenHash := hashSHA256(token) - housekeepingTokenMaps() - mapTokenHashToTimeoutStruct[tokenHash] = timeoutStruct{ - xorRand: xorBytesArr, - createdNanoTime: now, - lastAccessedNanoTime: now, - } - mapFolderNameToTokenHash[folderName] = tokenHash - ginContext.JSON(http.StatusCreated, gin.H{ - "token": base64.RawURLEncoding.EncodeToString(token[:]), - }) -} -func (context *Context) deleteToken(ginContext *gin.Context) { - header := tokenHeader{} - if err := ginContext.ShouldBindHeader(&header); err != nil { - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", - }) - return - } - token, err := base64.RawURLEncoding.DecodeString(strings.TrimSpace(header.Token)) - if len(token) == 0 || err != nil { - if err != nil { - log.Println(err) - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", - }) - return - } - context.globalLock.Lock() - defer context.globalLock.Unlock() - deleteTokenHashFromMaps(hashSHA256(token)) - ginContext.JSON(http.StatusOK, gin.H{ - "message": "token deleted", - }) -} + context.parseModel() + introTextRAA := context.applyRAA() -const extremeShortTimeoutsForTesting = false + context.customRiskRules = risks.LoadCustomRiskRules(strings.Split(*context.riskRulesPlugins, ","), context.progressReporter) + context.applyRiskGeneration() + context.applyWildcardRiskTrackingEvaluation() + context.checkRiskTracking() -func housekeepingTokenMaps() { - now := time.Now().UnixNano() - for tokenHash, val := range mapTokenHashToTimeoutStruct { - if extremeShortTimeoutsForTesting { - // remove all elements older than 1 minute (= 60000000000 ns) soft - // and all elements older than 3 minutes (= 180000000000 ns) hard - if now-val.lastAccessedNanoTime > 60000000000 || now-val.createdNanoTime > 180000000000 { - fmt.Println("About to remove a token hash from maps") - deleteTokenHashFromMaps(tokenHash) - } - } else { - // remove all elements older than 30 minutes (= 1800000000000 ns) soft - // and all elements older than 10 hours (= 36000000000000 ns) hard - if now-val.lastAccessedNanoTime > 1800000000000 || now-val.createdNanoTime > 36000000000000 { - deleteTokenHashFromMaps(tokenHash) - } + if len(*context.executeModelMacro) > 0 { + var macroDetails macros.MacroDetails + switch *context.executeModelMacro { + case addbuildpipeline.GetMacroDetails().ID: + macroDetails = addbuildpipeline.GetMacroDetails() + case addvault.GetMacroDetails().ID: + macroDetails = addvault.GetMacroDetails() + case prettyprint.GetMacroDetails().ID: + macroDetails = prettyprint.GetMacroDetails() + case removeunusedtags.GetMacroDetails().ID: + macroDetails = removeunusedtags.GetMacroDetails() + case seedrisktracking.GetMacroDetails().ID: + macroDetails = seedrisktracking.GetMacroDetails() + case seedtags.GetMacroDetails().ID: + macroDetails = seedtags.GetMacroDetails() + default: + log.Fatal("Unknown model macro: ", *context.executeModelMacro) } - } -} - -func deleteTokenHashFromMaps(tokenHash string) { - delete(mapTokenHashToTimeoutStruct, tokenHash) - for folderName, check := range mapFolderNameToTokenHash { - if check == tokenHash { - delete(mapFolderNameToTokenHash, folderName) - break + fmt.Println("Executing model macro:", macroDetails.ID) + fmt.Println() + fmt.Println() + context.printBorder(len(macroDetails.Title), true) + fmt.Println(macroDetails.Title) + context.printBorder(len(macroDetails.Title), true) + if len(macroDetails.Description) > 0 { + fmt.Println(macroDetails.Description) } - } -} - -func xor(key []byte, xor []byte) []byte { - if len(key) != len(xor) { - panic(errors.New("key length not matching XOR length")) - } - result := make([]byte, len(xor)) - for i, b := range key { - result[i] = b ^ xor[i] - } - return result -} - -func (context *Context) analyzeModelOnServerDirectly(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer func() { - context.unlockFolder(folderNameOfKey) + fmt.Println() + reader := bufio.NewReader(os.Stdin) var err error - if r := recover(); r != nil { - err = r.(error) - if *context.verbose { - log.Println(err) + var nextQuestion macros.MacroQuestion + for { + switch macroDetails.ID { + case addbuildpipeline.GetMacroDetails().ID: + nextQuestion, err = addbuildpipeline.GetNextQuestion(&context.parsedModel) + case addvault.GetMacroDetails().ID: + nextQuestion, err = addvault.GetNextQuestion(&context.parsedModel) + case prettyprint.GetMacroDetails().ID: + nextQuestion, err = prettyprint.GetNextQuestion() + case removeunusedtags.GetMacroDetails().ID: + nextQuestion, err = removeunusedtags.GetNextQuestion() + case seedrisktracking.GetMacroDetails().ID: + nextQuestion, err = seedrisktracking.GetNextQuestion() + case seedtags.GetMacroDetails().ID: + nextQuestion, err = seedtags.GetNextQuestion() } - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": strings.TrimSpace(err.Error()), - }) - ok = false - } - }() - - dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(defaultGraphvizDPI))) - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return - } - - _, yamlText, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if !ok { - return - } - tmpModelFile, err := os.CreateTemp(*context.tempFolder, "threagile-direct-analyze-*") - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return - } - defer func() { _ = os.Remove(tmpModelFile.Name()) }() - tmpOutputDir, err := os.MkdirTemp(*context.tempFolder, "threagile-direct-analyze-") - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return - } - defer func() { _ = os.RemoveAll(tmpOutputDir) }() - tmpResultFile, err := os.CreateTemp(*context.tempFolder, "threagile-result-*.zip") - checkErr(err) - defer func() { _ = os.Remove(tmpResultFile.Name()) }() - - err = os.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) - - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, true, true, true, true, true, true, true, true, dpi) - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return - } - err = os.WriteFile(filepath.Join(tmpOutputDir, inputFile), []byte(yamlText), 0400) - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return - } - - files := []string{ - filepath.Join(tmpOutputDir, inputFile), - filepath.Join(tmpOutputDir, dataFlowDiagramFilenamePNG), - filepath.Join(tmpOutputDir, dataAssetDiagramFilenamePNG), - filepath.Join(tmpOutputDir, reportFilename), - filepath.Join(tmpOutputDir, excelRisksFilename), - filepath.Join(tmpOutputDir, excelTagsFilename), - filepath.Join(tmpOutputDir, jsonRisksFilename), - filepath.Join(tmpOutputDir, jsonTechnicalAssetsFilename), - filepath.Join(tmpOutputDir, jsonStatsFilename), - } - if keepDiagramSourceFiles { - files = append(files, filepath.Join(tmpOutputDir, dataFlowDiagramFilenameDOT)) - files = append(files, filepath.Join(tmpOutputDir, dataAssetDiagramFilenameDOT)) - } - err = context.zipFiles(tmpResultFile.Name(), files) - checkErr(err) - if *context.verbose { - fmt.Println("Streaming back result file: " + tmpResultFile.Name()) - } - ginContext.FileAttachment(tmpResultFile.Name(), "threagile-result.zip") -} - -type responseType int - -const ( - dataFlowDiagram responseType = iota - dataAssetDiagram - reportPDF - risksExcel - tagsExcel - risksJSON - technicalAssetsJSON - statsJSON -) - -func (context *Context) streamDataFlowDiagram(ginContext *gin.Context) { - context.streamResponse(ginContext, dataFlowDiagram) -} - -func (context *Context) streamDataAssetDiagram(ginContext *gin.Context) { - context.streamResponse(ginContext, dataAssetDiagram) -} - -func (context *Context) streamReportPDF(ginContext *gin.Context) { - context.streamResponse(ginContext, reportPDF) -} - -func (context *Context) streamRisksExcel(ginContext *gin.Context) { - context.streamResponse(ginContext, risksExcel) -} - -func (context *Context) streamTagsExcel(ginContext *gin.Context) { - context.streamResponse(ginContext, tagsExcel) -} - -func (context *Context) streamRisksJSON(ginContext *gin.Context) { - context.streamResponse(ginContext, risksJSON) -} - -func (context *Context) streamTechnicalAssetsJSON(ginContext *gin.Context) { - context.streamResponse(ginContext, technicalAssetsJSON) -} - -func (context *Context) streamStatsJSON(ginContext *gin.Context) { - context.streamResponse(ginContext, statsJSON) -} - -func (context *Context) streamResponse(ginContext *gin.Context, responseType responseType) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer func() { - context.unlockFolder(folderNameOfKey) - var err error - if r := recover(); r != nil { - err = r.(error) - if *context.verbose { - log.Println(err) + checkErr(err) + if nextQuestion.NoMoreQuestions() { + break } - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": strings.TrimSpace(err.Error()), - }) - ok = false - } - }() - dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(defaultGraphvizDPI))) - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return - } - _, yamlText, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if !ok { - return - } - tmpModelFile, err := os.CreateTemp(*context.tempFolder, "threagile-render-*") - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return - } - defer func() { _ = os.Remove(tmpModelFile.Name()) }() - tmpOutputDir, err := os.MkdirTemp(*context.tempFolder, "threagile-render-") - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return - } - defer func() { _ = os.RemoveAll(tmpOutputDir) }() - err = os.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) - if responseType == dataFlowDiagram { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, true, false, false, false, false, false, false, false, dpi) - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return - } - ginContext.File(filepath.Join(tmpOutputDir, dataFlowDiagramFilenamePNG)) - } else if responseType == dataAssetDiagram { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, true, false, false, false, false, false, false, dpi) - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return - } - ginContext.File(filepath.Join(tmpOutputDir, dataAssetDiagramFilenamePNG)) - } else if responseType == reportPDF { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, true, false, false, false, false, false, dpi) - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return - } - ginContext.FileAttachment(filepath.Join(tmpOutputDir, reportFilename), reportFilename) - } else if responseType == risksExcel { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, true, false, false, false, false, dpi) - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return - } - ginContext.FileAttachment(filepath.Join(tmpOutputDir, excelRisksFilename), excelRisksFilename) - } else if responseType == tagsExcel { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, true, false, false, false, dpi) - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return - } - ginContext.FileAttachment(filepath.Join(tmpOutputDir, excelTagsFilename), excelTagsFilename) - } else if responseType == risksJSON { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, true, false, false, dpi) - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return - } - jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, jsonRisksFilename)) - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return - } - ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download - } else if responseType == technicalAssetsJSON { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, true, true, false, dpi) - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return + fmt.Println() + context.printBorder(len(nextQuestion.Title), false) + fmt.Println(nextQuestion.Title) + context.printBorder(len(nextQuestion.Title), false) + if len(nextQuestion.Description) > 0 { + fmt.Println(nextQuestion.Description) + } + resultingMultiValueSelection := make([]string, 0) + if nextQuestion.IsValueConstrained() { + if nextQuestion.MultiSelect { + selectedValues := make(map[string]bool) + for { + fmt.Println("Please select (multiple executions possible) from the following values (use number to select/deselect):") + fmt.Println(" 0:", "SELECTION PROCESS FINISHED: CONTINUE TO NEXT QUESTION") + for i, val := range nextQuestion.PossibleAnswers { + number := i + 1 + padding, selected := "", " " + if number < 10 { + padding = " " + } + if val, exists := selectedValues[val]; exists && val { + selected = "*" + } + fmt.Println(" "+selected+" "+padding+strconv.Itoa(number)+":", val) + } + fmt.Println() + fmt.Print("Enter number to select/deselect (or 0 when finished): ") + answer, err := reader.ReadString('\n') + // convert CRLF to LF + answer = strings.TrimSpace(strings.Replace(answer, "\n", "", -1)) + checkErr(err) + if val, err := strconv.Atoi(answer); err == nil { // flip selection + if val == 0 { + for key, selected := range selectedValues { + if selected { + resultingMultiValueSelection = append(resultingMultiValueSelection, key) + } + } + break + } else if val > 0 && val <= len(nextQuestion.PossibleAnswers) { + selectedValues[nextQuestion.PossibleAnswers[val-1]] = !selectedValues[nextQuestion.PossibleAnswers[val-1]] + } + } + } + } else { + fmt.Println("Please choose from the following values (enter value directly or use number):") + for i, val := range nextQuestion.PossibleAnswers { + number := i + 1 + padding := "" + if number < 10 { + padding = " " + } + fmt.Println(" "+padding+strconv.Itoa(number)+":", val) + } + } + } + message := "" + validResult := true + if !nextQuestion.IsValueConstrained() || !nextQuestion.MultiSelect { + fmt.Println() + fmt.Println("Enter your answer (use 'BACK' to go one step back or 'QUIT' to quit without executing the model macro)") + fmt.Print("Answer") + if len(nextQuestion.DefaultAnswer) > 0 { + fmt.Print(" (default '" + nextQuestion.DefaultAnswer + "')") + } + fmt.Print(": ") + answer, err := reader.ReadString('\n') + // convert CRLF to LF + answer = strings.TrimSpace(strings.Replace(answer, "\n", "", -1)) + checkErr(err) + if len(answer) == 0 && len(nextQuestion.DefaultAnswer) > 0 { // accepting the default + answer = nextQuestion.DefaultAnswer + } else if nextQuestion.IsValueConstrained() { // convert number to value + if val, err := strconv.Atoi(answer); err == nil { + if val > 0 && val <= len(nextQuestion.PossibleAnswers) { + answer = nextQuestion.PossibleAnswers[val-1] + } + } + } + if strings.ToLower(answer) == "quit" { + fmt.Println("Quitting without executing the model macro") + return + } else if strings.ToLower(answer) == "back" { + switch macroDetails.ID { + case addbuildpipeline.GetMacroDetails().ID: + message, validResult, err = addbuildpipeline.GoBack() + case addvault.GetMacroDetails().ID: + message, validResult, err = addvault.GoBack() + case prettyprint.GetMacroDetails().ID: + message, validResult, err = prettyprint.GoBack() + case removeunusedtags.GetMacroDetails().ID: + message, validResult, err = removeunusedtags.GoBack() + case seedrisktracking.GetMacroDetails().ID: + message, validResult, err = seedrisktracking.GoBack() + case seedtags.GetMacroDetails().ID: + message, validResult, err = seedtags.GoBack() + } + } else if len(answer) > 0 { // individual answer + if nextQuestion.IsValueConstrained() { + if !nextQuestion.IsMatchingValueConstraint(answer) { + fmt.Println() + fmt.Println(">>> INVALID <<<") + fmt.Println("Answer does not match any allowed value. Please try again:") + continue + } + } + switch macroDetails.ID { + case addbuildpipeline.GetMacroDetails().ID: + message, validResult, err = addbuildpipeline.ApplyAnswer(nextQuestion.ID, answer) + case addvault.GetMacroDetails().ID: + message, validResult, err = addvault.ApplyAnswer(nextQuestion.ID, answer) + case prettyprint.GetMacroDetails().ID: + message, validResult, err = prettyprint.ApplyAnswer(nextQuestion.ID, answer) + case removeunusedtags.GetMacroDetails().ID: + message, validResult, err = removeunusedtags.ApplyAnswer(nextQuestion.ID, answer) + case seedrisktracking.GetMacroDetails().ID: + message, validResult, err = seedrisktracking.ApplyAnswer(nextQuestion.ID, answer) + case seedtags.GetMacroDetails().ID: + message, validResult, err = seedtags.ApplyAnswer(nextQuestion.ID, answer) + } + } + } else { + switch macroDetails.ID { + case addbuildpipeline.GetMacroDetails().ID: + message, validResult, err = addbuildpipeline.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case addvault.GetMacroDetails().ID: + message, validResult, err = addvault.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case prettyprint.GetMacroDetails().ID: + message, validResult, err = prettyprint.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case removeunusedtags.GetMacroDetails().ID: + message, validResult, err = removeunusedtags.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case seedrisktracking.GetMacroDetails().ID: + message, validResult, err = seedrisktracking.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + case seedtags.GetMacroDetails().ID: + message, validResult, err = seedtags.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + } + } + checkErr(err) + if !validResult { + fmt.Println() + fmt.Println(">>> INVALID <<<") + } + fmt.Println(message) + fmt.Println() } - jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, jsonTechnicalAssetsFilename)) - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return + for { + fmt.Println() + fmt.Println() + fmt.Println("#################################################################") + fmt.Println("Do you want to execute the model macro (updating the model file)?") + fmt.Println("#################################################################") + fmt.Println() + fmt.Println("The following changes will be applied:") + var changes []string + message := "" + validResult := true + var err error + switch macroDetails.ID { + case addbuildpipeline.GetMacroDetails().ID: + changes, message, validResult, err = addbuildpipeline.GetFinalChangeImpact(&context.modelInput, &context.parsedModel) + case addvault.GetMacroDetails().ID: + changes, message, validResult, err = addvault.GetFinalChangeImpact(&context.modelInput, &context.parsedModel) + case prettyprint.GetMacroDetails().ID: + changes, message, validResult, err = prettyprint.GetFinalChangeImpact(&context.modelInput) + case removeunusedtags.GetMacroDetails().ID: + changes, message, validResult, err = removeunusedtags.GetFinalChangeImpact(&context.modelInput) + case seedrisktracking.GetMacroDetails().ID: + changes, message, validResult, err = seedrisktracking.GetFinalChangeImpact(&context.modelInput) + case seedtags.GetMacroDetails().ID: + changes, message, validResult, err = seedtags.GetFinalChangeImpact(&context.modelInput) + } + checkErr(err) + for _, change := range changes { + fmt.Println(" -", change) + } + if !validResult { + fmt.Println() + fmt.Println(">>> INVALID <<<") + } + fmt.Println() + fmt.Println(message) + fmt.Println() + fmt.Print("Apply these changes to the model file?\nType Yes or No: ") + answer, err := reader.ReadString('\n') + // convert CRLF to LF + answer = strings.TrimSpace(strings.Replace(answer, "\n", "", -1)) + checkErr(err) + answer = strings.ToLower(answer) + fmt.Println() + if answer == "yes" || answer == "y" { + message := "" + validResult := true + var err error + switch macroDetails.ID { + case addbuildpipeline.GetMacroDetails().ID: + message, validResult, err = addbuildpipeline.Execute(&context.modelInput, &context.parsedModel) + case addvault.GetMacroDetails().ID: + message, validResult, err = addvault.Execute(&context.modelInput, &context.parsedModel) + case prettyprint.GetMacroDetails().ID: + message, validResult, err = prettyprint.Execute(&context.modelInput) + case removeunusedtags.GetMacroDetails().ID: + message, validResult, err = removeunusedtags.Execute(&context.modelInput, &context.parsedModel) + case seedrisktracking.GetMacroDetails().ID: + message, validResult, err = seedrisktracking.Execute(&context.parsedModel, &context.modelInput) + case seedtags.GetMacroDetails().ID: + message, validResult, err = seedtags.Execute(&context.modelInput, &context.parsedModel) + } + checkErr(err) + if !validResult { + fmt.Println() + fmt.Println(">>> INVALID <<<") + } + fmt.Println(message) + fmt.Println() + backupFilename := *context.modelFilename + ".backup" + fmt.Println("Creating backup model file:", backupFilename) // TODO add random files in /dev/shm space? + _, err = copyFile(*context.modelFilename, backupFilename) + checkErr(err) + fmt.Println("Updating model") + yamlBytes, err := yaml.Marshal(context.modelInput) + checkErr(err) + /* + yamlBytes = model.ReformatYAML(yamlBytes) + */ + fmt.Println("Writing model file:", *context.modelFilename) + err = os.WriteFile(*context.modelFilename, yamlBytes, 0400) + checkErr(err) + fmt.Println("Model file successfully updated") + return + } else if answer == "no" || answer == "n" { + fmt.Println("Quitting without executing the model macro") + return + } } - ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download - } else if responseType == statsJSON { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, false, false, true, dpi) - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return + } + + renderDataFlowDiagram := *context.generateDataFlowDiagram + renderDataAssetDiagram := *context.generateDataAssetDiagram + renderRisksJSON := *context.generateRisksJSON + renderTechnicalAssetsJSON := *context.generateTechnicalAssetsJSON + renderStatsJSON := *context.generateStatsJSON + renderRisksExcel := *context.generateRisksExcel + renderTagsExcel := *context.generateTagsExcel + renderPDF := *context.generateReportPDF + if renderPDF { // as the PDF report includes both diagrams + renderDataFlowDiagram, renderDataAssetDiagram = true, true + } + + // Data-flow Diagram rendering + if renderDataFlowDiagram { + gvFile := filepath.Join(*context.outputDir, context.dataFlowDiagramFilenameDOT) + if !context.keepDiagramSourceFiles { + tmpFileGV, err := os.CreateTemp(*context.tempFolder, context.dataFlowDiagramFilenameDOT) + checkErr(err) + gvFile = tmpFileGV.Name() + defer func() { _ = os.Remove(gvFile) }() } - jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, jsonStatsFilename)) - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return + dotFile := context.writeDataFlowDiagramGraphvizDOT(gvFile, *context.diagramDPI) + context.renderDataFlowDiagramGraphvizImage(dotFile, *context.outputDir) + } + // Data Asset Diagram rendering + if renderDataAssetDiagram { + gvFile := filepath.Join(*context.outputDir, context.dataAssetDiagramFilenameDOT) + if !context.keepDiagramSourceFiles { + tmpFile, err := os.CreateTemp(*context.tempFolder, context.dataAssetDiagramFilenameDOT) + checkErr(err) + gvFile = tmpFile.Name() + defer func() { _ = os.Remove(gvFile) }() } - ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download + dotFile := context.writeDataAssetDiagramGraphvizDOT(gvFile, *context.diagramDPI) + context.renderDataAssetDiagramGraphvizImage(dotFile, *context.outputDir) } -} -// fully replaces threagile.yaml in sub-folder given by UUID -func (context *Context) importModel(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return + // risks as risks json + if renderRisksJSON { + if *context.verbose { + fmt.Println("Writing risks json") + } + report.WriteRisksJSON(&context.parsedModel, filepath.Join(*context.outputDir, context.jsonRisksFilename)) } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aUuid := ginContext.Param("model-id") // UUID is syntactically validated in readModel+checkModelFolder (next line) via uuid.Parse(modelUUID) - _, _, ok = context.readModel(ginContext, aUuid, key, folderNameOfKey) - if ok { - // first analyze it simply by executing the full risk process (just discard the result) to ensure that everything would work - yamlContent, ok := context.execute(ginContext, true) - if ok { - // if we're here, then no problem was raised, so ok to proceed - ok = context.writeModelYAML(ginContext, string(yamlContent), key, context.folderNameForModel(folderNameOfKey, aUuid), "Model Import", false) - if ok { - ginContext.JSON(http.StatusCreated, gin.H{ - "message": "model imported", - }) - } + // technical assets json + if renderTechnicalAssetsJSON { + if *context.verbose { + fmt.Println("Writing technical assets json") } + report.WriteTechnicalAssetsJSON(&context.parsedModel, filepath.Join(*context.outputDir, context.jsonTechnicalAssetsFilename)) } -} -func (context *Context) stats(ginContext *gin.Context) { - keyCount, modelCount := 0, 0 - keyFolders, err := os.ReadDir(filepath.Join(*context.serverFolder, keyDir)) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to collect stats", - }) - return + // risks as risks json + if renderStatsJSON { + if *context.verbose { + fmt.Println("Writing stats json") + } + report.WriteStatsJSON(&context.parsedModel, filepath.Join(*context.outputDir, context.jsonStatsFilename)) } - for _, keyFolder := range keyFolders { - if len(keyFolder.Name()) == 128 { // it's a sha512 token hash probably, so count it as token folder for the stats - keyCount++ - if keyFolder.Name() != filepath.Clean(keyFolder.Name()) { - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "weird file path", - }) - return - } - modelFolders, err := os.ReadDir(filepath.Join(*context.serverFolder, keyDir, keyFolder.Name())) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to collect stats", - }) - return - } - for _, modelFolder := range modelFolders { - if len(modelFolder.Name()) == 36 { // it's a uuid model folder probably, so count it as model folder for the stats - modelCount++ - } - } + + // risks Excel + if renderRisksExcel { + if *context.verbose { + fmt.Println("Writing risks excel") } + report.WriteRisksExcelToFile(&context.parsedModel, filepath.Join(*context.outputDir, context.excelRisksFilename)) } - // TODO collect and deliver more stats (old model count?) and health info - ginContext.JSON(http.StatusOK, gin.H{ - "key_count": keyCount, - "model_count": modelCount, - "success_count": context.successCount, - "error_count": context.errorCount, - }) -} -func (context *Context) getDataAsset(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return + // tags Excel + if renderTagsExcel { + if *context.verbose { + fmt.Println("Writing tags excel") + } + report.WriteTagsExcelToFile(&context.parsedModel, filepath.Join(*context.outputDir, context.excelTagsFilename)) } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, dataAsset := range modelInput.DataAssets { - if dataAsset.ID == ginContext.Param("data-asset-id") { - ginContext.JSON(http.StatusOK, gin.H{ - title: dataAsset, - }) - return - } + + if renderPDF { + // hash the YAML input file + f, err := os.Open(*context.modelFilename) + checkErr(err) + defer func() { _ = f.Close() }() + hasher := sha256.New() + if _, err := io.Copy(hasher, f); err != nil { + panic(err) } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "data asset not found", - }) + modelHash := hex.EncodeToString(hasher.Sum(nil)) + // report PDF + if *context.verbose { + fmt.Println("Writing report pdf") + } + report.WriteReportPDF(filepath.Join(*context.outputDir, context.reportFilename), + filepath.Join(*context.appFolder, *context.templateFilename), + filepath.Join(*context.outputDir, context.dataFlowDiagramFilenamePNG), + filepath.Join(*context.outputDir, context.dataAssetDiagramFilenamePNG), + *context.modelFilename, + *context.skipRiskRules, + context.buildTimestamp, + modelHash, + introTextRAA, + context.customRiskRules, + *context.tempFolder, + &context.parsedModel) } } -func (context *Context) deleteDataAsset(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - referencesDeleted := false - // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, dataAsset := range modelInput.DataAssets { - if dataAsset.ID == ginContext.Param("data-asset-id") { - // also remove all usages of this data asset !! - for _, techAsset := range modelInput.TechnicalAssets { - if techAsset.DataAssetsProcessed != nil { - for i, parsedChangeCandidateAsset := range techAsset.DataAssetsProcessed { - referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) - if referencedAsset == dataAsset.ID { // apply the removal - referencesDeleted = true - // Remove the element at index i - // TODO needs more testing - copy(techAsset.DataAssetsProcessed[i:], techAsset.DataAssetsProcessed[i+1:]) // Shift a[i+1:] left one index. - techAsset.DataAssetsProcessed[len(techAsset.DataAssetsProcessed)-1] = "" // Erase last element (write zero value). - techAsset.DataAssetsProcessed = techAsset.DataAssetsProcessed[:len(techAsset.DataAssetsProcessed)-1] // Truncate slice. - } - } - } - if techAsset.DataAssetsStored != nil { - for i, parsedChangeCandidateAsset := range techAsset.DataAssetsStored { - referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) - if referencedAsset == dataAsset.ID { // apply the removal - referencesDeleted = true - // Remove the element at index i - // TODO needs more testing - copy(techAsset.DataAssetsStored[i:], techAsset.DataAssetsStored[i+1:]) // Shift a[i+1:] left one index. - techAsset.DataAssetsStored[len(techAsset.DataAssetsStored)-1] = "" // Erase last element (write zero value). - techAsset.DataAssetsStored = techAsset.DataAssetsStored[:len(techAsset.DataAssetsStored)-1] // Truncate slice. - } - } - } - if techAsset.CommunicationLinks != nil { - for title, commLink := range techAsset.CommunicationLinks { - for i, dataAssetSent := range commLink.DataAssetsSent { - referencedAsset := fmt.Sprintf("%v", dataAssetSent) - if referencedAsset == dataAsset.ID { // apply the removal - referencesDeleted = true - // Remove the element at index i - // TODO needs more testing - copy(techAsset.CommunicationLinks[title].DataAssetsSent[i:], techAsset.CommunicationLinks[title].DataAssetsSent[i+1:]) // Shift a[i+1:] left one index. - techAsset.CommunicationLinks[title].DataAssetsSent[len(techAsset.CommunicationLinks[title].DataAssetsSent)-1] = "" // Erase last element (write zero value). - x := techAsset.CommunicationLinks[title] - x.DataAssetsSent = techAsset.CommunicationLinks[title].DataAssetsSent[:len(techAsset.CommunicationLinks[title].DataAssetsSent)-1] // Truncate slice. - techAsset.CommunicationLinks[title] = x - } - } - for i, dataAssetReceived := range commLink.DataAssetsReceived { - referencedAsset := fmt.Sprintf("%v", dataAssetReceived) - if referencedAsset == dataAsset.ID { // apply the removal - referencesDeleted = true - // Remove the element at index i - // TODO needs more testing - copy(techAsset.CommunicationLinks[title].DataAssetsReceived[i:], techAsset.CommunicationLinks[title].DataAssetsReceived[i+1:]) // Shift a[i+1:] left one index. - techAsset.CommunicationLinks[title].DataAssetsReceived[len(techAsset.CommunicationLinks[title].DataAssetsReceived)-1] = "" // Erase last element (write zero value). - x := techAsset.CommunicationLinks[title] - x.DataAssetsReceived = techAsset.CommunicationLinks[title].DataAssetsReceived[:len(techAsset.CommunicationLinks[title].DataAssetsReceived)-1] // Truncate slice. - techAsset.CommunicationLinks[title] = x - } - } - } - } - } - for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { - if individualRiskCat.RisksIdentified != nil { - for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { - if individualRiskInstance.MostRelevantDataAsset == dataAsset.ID { // apply the removal - referencesDeleted = true - x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] - x.MostRelevantDataAsset = "" // TODO needs more testing - modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x - } - } - } - } - // remove it itself - delete(modelInput.DataAssets, title) - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Data Asset Deletion") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "data asset deleted", - "id": dataAsset.ID, - "references_deleted": referencesDeleted, // in order to signal to clients, that other model parts might've been deleted as well - }) - } - return - } - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "data asset not found", - }) +func (context *Context) printBorder(length int, bold bool) { + char := "-" + if bold { + char = "=" + } + for i := 1; i <= length; i++ { + fmt.Print(char) } + fmt.Println() } -func (context *Context) setSharedRuntime(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, sharedRuntime := range modelInput.SharedRuntimes { - if sharedRuntime.ID == ginContext.Param("shared-runtime-id") { - payload := payloadSharedRuntime{} - err := ginContext.BindJSON(&payload) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) - return - } - sharedRuntimeInput, ok := context.populateSharedRuntime(ginContext, payload) - if !ok { - return - } - // in order to also update the title, remove the shared runtime from the map and re-insert it (with new key) - delete(modelInput.SharedRuntimes, title) - modelInput.SharedRuntimes[payload.Title] = sharedRuntimeInput - idChanged := sharedRuntimeInput.ID != sharedRuntime.ID - if idChanged { // ID-CHANGE-PROPAGATION - for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { - if individualRiskCat.RisksIdentified != nil { - for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { - if individualRiskInstance.MostRelevantSharedRuntime == sharedRuntime.ID { // apply the ID change - x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] - x.MostRelevantSharedRuntime = sharedRuntimeInput.ID // TODO needs more testing - modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x - } - } - } - } - } - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Shared Runtime Update") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "shared runtime updated", - "id": sharedRuntimeInput.ID, - "id_changed": idChanged, // in order to signal to clients, that other model parts might've received updates as well and should be reloaded - }) - } - return - } - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "shared runtime not found", - }) +func (context *Context) applyRAA() string { + if *context.verbose { + fmt.Println("Applying RAA calculation:", *context.raaPlugin) } -} -func (context *Context) setDataAsset(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return + runner, loadError := new(run.Runner).Load(filepath.Join(*context.binFolder, *context.raaPlugin)) + if loadError != nil { + fmt.Printf("WARNING: raa %q not loaded: %v\n", *context.raaPlugin, loadError) + return "" } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, dataAsset := range modelInput.DataAssets { - if dataAsset.ID == ginContext.Param("data-asset-id") { - payload := payloadDataAsset{} - err := ginContext.BindJSON(&payload) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) - return - } - dataAssetInput, ok := context.populateDataAsset(ginContext, payload) - if !ok { - return - } - // in order to also update the title, remove the asset from the map and re-insert it (with new key) - delete(modelInput.DataAssets, title) - modelInput.DataAssets[payload.Title] = dataAssetInput - idChanged := dataAssetInput.ID != dataAsset.ID - if idChanged { // ID-CHANGE-PROPAGATION - // also update all usages to point to the new (changed) ID !! - for techAssetTitle, techAsset := range modelInput.TechnicalAssets { - if techAsset.DataAssetsProcessed != nil { - for i, parsedChangeCandidateAsset := range techAsset.DataAssetsProcessed { - referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) - if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.TechnicalAssets[techAssetTitle].DataAssetsProcessed[i] = dataAssetInput.ID - } - } - } - if techAsset.DataAssetsStored != nil { - for i, parsedChangeCandidateAsset := range techAsset.DataAssetsStored { - referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) - if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.TechnicalAssets[techAssetTitle].DataAssetsStored[i] = dataAssetInput.ID - } - } - } - if techAsset.CommunicationLinks != nil { - for title, commLink := range techAsset.CommunicationLinks { - for i, dataAssetSent := range commLink.DataAssetsSent { - referencedAsset := fmt.Sprintf("%v", dataAssetSent) - if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.TechnicalAssets[techAssetTitle].CommunicationLinks[title].DataAssetsSent[i] = dataAssetInput.ID - } - } - for i, dataAssetReceived := range commLink.DataAssetsReceived { - referencedAsset := fmt.Sprintf("%v", dataAssetReceived) - if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.TechnicalAssets[techAssetTitle].CommunicationLinks[title].DataAssetsReceived[i] = dataAssetInput.ID - } - } - } - } - } - for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { - if individualRiskCat.RisksIdentified != nil { - for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { - if individualRiskInstance.MostRelevantDataAsset == dataAsset.ID { // apply the ID change - x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] - x.MostRelevantDataAsset = dataAssetInput.ID // TODO needs more testing - modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x - } - } - } - } - } - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Data Asset Update") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "data asset updated", - "id": dataAssetInput.ID, - "id_changed": idChanged, // in order to signal to clients, that other model parts might've received updates as well and should be reloaded - }) - } - return - } - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "data asset not found", - }) + + runError := runner.Run(context.parsedModel, &context.parsedModel) + if runError != nil { + fmt.Printf("WARNING: raa %q not applied: %v\n", *context.raaPlugin, runError) + return "" } + + return runner.ErrorOutput } -func (context *Context) getSharedRuntime(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return +func (context *Context) checkIdSyntax(id string) { + validIdSyntax := regexp.MustCompile(`^[a-zA-Z0-9\-]+$`) + if !validIdSyntax.MatchString(id) { + panic(errors.New("invalid id syntax used (only letters, numbers, and hyphen allowed): " + id)) } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) +} + +func (context *Context) analyze(ginContext *gin.Context) { + context.execute(ginContext, false) +} + +func (context *Context) check(ginContext *gin.Context) { + _, ok := context.execute(ginContext, true) if ok { - // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, sharedRuntime := range modelInput.SharedRuntimes { - if sharedRuntime.ID == ginContext.Param("shared-runtime-id") { - ginContext.JSON(http.StatusOK, gin.H{ - title: sharedRuntime, - }) - return - } - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "shared runtime not found", + ginContext.JSON(http.StatusOK, gin.H{ + "message": "model is ok", }) } } -func (context *Context) createNewSharedRuntime(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - payload := payloadSharedRuntime{} - err := ginContext.BindJSON(&payload) - if err != nil { +func (context *Context) execute(ginContext *gin.Context, dryRun bool) (yamlContent []byte, ok bool) { + defer func() { + var err error + if r := recover(); r != nil { + context.errorCount++ + err = r.(error) log.Println(err) ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", + "error": strings.TrimSpace(err.Error()), }) - return + ok = false } - // yes, here keyed by title in YAML for better readability in the YAML file itself - if _, exists := modelInput.SharedRuntimes[payload.Title]; exists { - ginContext.JSON(http.StatusConflict, gin.H{ - "error": "shared runtime with this title already exists", - }) - return + }() + + dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(context.defaultGraphvizDPI))) + checkErr(err) + + fileUploaded, header, err := ginContext.Request.FormFile("file") + checkErr(err) + + if header.Size > 50000000 { + msg := "maximum model upload file size exceeded (denial-of-service protection)" + log.Println(msg) + ginContext.JSON(http.StatusRequestEntityTooLarge, gin.H{ + "error": msg, + }) + return yamlContent, false + } + + filenameUploaded := strings.TrimSpace(header.Filename) + + tmpInputDir, err := os.MkdirTemp(*context.tempFolder, "threagile-input-") + checkErr(err) + defer func() { _ = os.RemoveAll(tmpInputDir) }() + + tmpModelFile, err := os.CreateTemp(tmpInputDir, "threagile-model-*") + checkErr(err) + defer func() { _ = os.Remove(tmpModelFile.Name()) }() + _, err = io.Copy(tmpModelFile, fileUploaded) + checkErr(err) + + yamlFile := tmpModelFile.Name() + + if strings.ToLower(filepath.Ext(filenameUploaded)) == ".zip" { + // unzip first (including the resources like images etc.) + if *context.verbose { + fmt.Println("Decompressing uploaded archive") } - // but later it will in memory keyed by its "id", so do this uniqueness check also - for _, sharedRuntime := range modelInput.SharedRuntimes { - if sharedRuntime.ID == payload.Id { - ginContext.JSON(http.StatusConflict, gin.H{ - "error": "shared runtime with this id already exists", - }) - return + filenamesUnzipped, err := context.unzip(tmpModelFile.Name(), tmpInputDir) + checkErr(err) + found := false + for _, name := range filenamesUnzipped { + if strings.ToLower(filepath.Ext(name)) == ".yaml" { + yamlFile = name + found = true + break } } - if !context.checkTechnicalAssetsExisting(modelInput, payload.TechnicalAssetsRunning) { - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "referenced technical asset does not exist", - }) - return - } - sharedRuntimeInput, ok := context.populateSharedRuntime(ginContext, payload) - if !ok { - return - } - if modelInput.SharedRuntimes == nil { - modelInput.SharedRuntimes = make(map[string]model.InputSharedRuntime) - } - modelInput.SharedRuntimes[payload.Title] = sharedRuntimeInput - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Shared Runtime Creation") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "shared runtime created", - "id": sharedRuntimeInput.ID, - }) + if !found { + panic(errors.New("no yaml file found in uploaded archive")) } } -} -func (context *Context) checkTechnicalAssetsExisting(modelInput model.ModelInput, techAssetIDs []string) (ok bool) { - for _, techAssetID := range techAssetIDs { - exists := false - for _, val := range modelInput.TechnicalAssets { - if val.ID == techAssetID { - exists = true - break - } + tmpOutputDir, err := os.MkdirTemp(*context.tempFolder, "threagile-output-") + checkErr(err) + defer func() { _ = os.RemoveAll(tmpOutputDir) }() + + tmpResultFile, err := os.CreateTemp(*context.tempFolder, "threagile-result-*.zip") + checkErr(err) + defer func() { _ = os.Remove(tmpResultFile.Name()) }() + + if dryRun { + context.doItViaRuntimeCall(yamlFile, tmpOutputDir, false, false, false, false, false, true, true, true, 40) + } else { + context.doItViaRuntimeCall(yamlFile, tmpOutputDir, true, true, true, true, true, true, true, true, dpi) + } + checkErr(err) + + yamlContent, err = os.ReadFile(yamlFile) + checkErr(err) + err = os.WriteFile(filepath.Join(tmpOutputDir, context.inputFile), yamlContent, 0400) + checkErr(err) + + if !dryRun { + files := []string{ + filepath.Join(tmpOutputDir, context.inputFile), + filepath.Join(tmpOutputDir, context.dataFlowDiagramFilenamePNG), + filepath.Join(tmpOutputDir, context.dataAssetDiagramFilenamePNG), + filepath.Join(tmpOutputDir, context.reportFilename), + filepath.Join(tmpOutputDir, context.excelRisksFilename), + filepath.Join(tmpOutputDir, context.excelTagsFilename), + filepath.Join(tmpOutputDir, context.jsonRisksFilename), + filepath.Join(tmpOutputDir, context.jsonTechnicalAssetsFilename), + filepath.Join(tmpOutputDir, context.jsonStatsFilename), + } + if context.keepDiagramSourceFiles { + files = append(files, filepath.Join(tmpOutputDir, context.dataFlowDiagramFilenameDOT)) + files = append(files, filepath.Join(tmpOutputDir, context.dataAssetDiagramFilenameDOT)) } - if !exists { - return false + err = context.zipFiles(tmpResultFile.Name(), files) + checkErr(err) + if *context.verbose { + log.Println("Streaming back result file: " + tmpResultFile.Name()) } + ginContext.FileAttachment(tmpResultFile.Name(), "threagile-result.zip") } - return true + context.successCount++ + return yamlContent, true } -func (context *Context) populateSharedRuntime(_ *gin.Context, payload payloadSharedRuntime) (sharedRuntimeInput model.InputSharedRuntime, ok bool) { - sharedRuntimeInput = model.InputSharedRuntime{ - ID: payload.Id, - Description: payload.Description, - Tags: lowerCaseAndTrim(payload.Tags), - TechnicalAssetsRunning: payload.TechnicalAssetsRunning, +// ultimately to avoid any in-process memory and/or data leaks by the used third party libs like PDF generation: exec and quit +func (context *Context) doItViaRuntimeCall(modelFile string, outputDir string, + generateDataFlowDiagram, generateDataAssetDiagram, generateReportPdf, generateRisksExcel, generateTagsExcel, generateRisksJSON, generateTechnicalAssetsJSON, generateStatsJSON bool, + dpi int) { + // Remember to also add the same args to the exec based sub-process calls! + var cmd *exec.Cmd + args := []string{"-model", modelFile, "-output", outputDir, "-execute-model-macro", *context.executeModelMacro, "-raa-run", *context.raaPlugin, "-custom-risk-rules-plugins", *context.riskRulesPlugins, "-skip-risk-rules", *context.skipRiskRules, "-diagram-dpi", strconv.Itoa(dpi)} + if *context.verbose { + args = append(args, "-verbose") } - return sharedRuntimeInput, true -} - -func (context *Context) deleteSharedRuntime(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return + if *context.ignoreOrphanedRiskTracking { // TODO why add all them as arguments, when they are also variables on outer level? + args = append(args, "-ignore-orphaned-risk-tracking") } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - referencesDeleted := false - // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, sharedRuntime := range modelInput.SharedRuntimes { - if sharedRuntime.ID == ginContext.Param("shared-runtime-id") { - // also remove all usages of this shared runtime !! - for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { - if individualRiskCat.RisksIdentified != nil { - for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { - if individualRiskInstance.MostRelevantSharedRuntime == sharedRuntime.ID { // apply the removal - referencesDeleted = true - x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] - x.MostRelevantSharedRuntime = "" // TODO needs more testing - modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x - } - } - } - } - // remove it itself - delete(modelInput.SharedRuntimes, title) - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Shared Runtime Deletion") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "shared runtime deleted", - "id": sharedRuntime.ID, - "references_deleted": referencesDeleted, // in order to signal to clients, that other model parts might've been deleted as well - }) - } - return - } + if generateDataFlowDiagram { + args = append(args, "-generate-data-flow-diagram") + } + if generateDataAssetDiagram { + args = append(args, "-generate-data-asset-diagram") + } + if generateReportPdf { + args = append(args, "-generate-report-pdf") + } + if generateRisksExcel { + args = append(args, "-generate-risks-excel") + } + if generateTagsExcel { + args = append(args, "-generate-tags-excel") + } + if generateRisksJSON { + args = append(args, "-generate-risks-json") + } + if generateTechnicalAssetsJSON { + args = append(args, "-generate-technical-assets-json") + } + if generateStatsJSON { + args = append(args, "-generate-stats-json") + } + self, nameError := os.Executable() + if nameError != nil { + panic(nameError) + } + cmd = exec.Command(self, args...) + out, err := cmd.CombinedOutput() + if err != nil { + panic(errors.New(string(out))) + } else { + if *context.verbose && len(out) > 0 { + fmt.Println("---") + fmt.Print(string(out)) + fmt.Println("---") } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "shared runtime not found", - }) } } -func (context *Context) createNewDataAsset(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - payload := payloadDataAsset{} - err := ginContext.BindJSON(&payload) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) - return - } - // yes, here keyed by title in YAML for better readability in the YAML file itself - if _, exists := modelInput.DataAssets[payload.Title]; exists { - ginContext.JSON(http.StatusConflict, gin.H{ - "error": "data asset with this title already exists", - }) - return - } - // but later it will in memory keyed by its "id", so do this uniqueness check also - for _, asset := range modelInput.DataAssets { - if asset.ID == payload.Id { - ginContext.JSON(http.StatusConflict, gin.H{ - "error": "data asset with this id already exists", - }) - return - } - } - dataAssetInput, ok := context.populateDataAsset(ginContext, payload) - if !ok { - return - } - if modelInput.DataAssets == nil { - modelInput.DataAssets = make(map[string]model.InputDataAsset) - } - modelInput.DataAssets[payload.Title] = dataAssetInput - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Data Asset Creation") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "data asset created", - "id": dataAssetInput.ID, - }) - } - } +func (context *Context) StartServer() { + router := gin.Default() + router.LoadHTMLGlob("server/static/*.html") // <== + router.GET("/", func(c *gin.Context) { + c.HTML(http.StatusOK, "index.html", gin.H{}) + }) + router.HEAD("/", func(c *gin.Context) { + c.HTML(http.StatusOK, "index.html", gin.H{}) + }) + router.StaticFile("/threagile.png", "server/static/threagile.png") // <== + router.StaticFile("/site.webmanifest", "server/static/site.webmanifest") + router.StaticFile("/favicon.ico", "server/static/favicon.ico") + router.StaticFile("/favicon-32x32.png", "server/static/favicon-32x32.png") + router.StaticFile("/favicon-16x16.png", "server/static/favicon-16x16.png") + router.StaticFile("/apple-touch-icon.png", "server/static/apple-touch-icon.png") + router.StaticFile("/android-chrome-512x512.png", "server/static/android-chrome-512x512.png") + router.StaticFile("/android-chrome-192x192.png", "server/static/android-chrome-192x192.png") + + router.StaticFile("/schema.json", "schema.json") + router.StaticFile("/live-templates.txt", "live-templates.txt") + router.StaticFile("/openapi.yaml", "openapi.yaml") + router.StaticFile("/swagger-ui/", "server/static/swagger-ui/index.html") + router.StaticFile("/swagger-ui/index.html", "server/static/swagger-ui/index.html") + router.StaticFile("/swagger-ui/oauth2-redirect.html", "server/static/swagger-ui/oauth2-redirect.html") + router.StaticFile("/swagger-ui/swagger-ui.css", "server/static/swagger-ui/swagger-ui.css") + router.StaticFile("/swagger-ui/swagger-ui.js", "server/static/swagger-ui/swagger-ui.js") + router.StaticFile("/swagger-ui/swagger-ui-bundle.js", "server/static/swagger-ui/swagger-ui-bundle.js") + router.StaticFile("/swagger-ui/swagger-ui-standalone-preset.js", "server/static/swagger-ui/swagger-ui-standalone-preset.js") // <== + + router.GET("/threagile-example-model.yaml", context.exampleFile) + router.GET("/threagile-stub-model.yaml", context.stubFile) + + router.GET("/meta/ping", func(c *gin.Context) { + c.JSON(200, gin.H{ + "message": "pong", + }) + }) + router.GET("/meta/version", func(c *gin.Context) { + c.JSON(200, gin.H{ + "version": docs.ThreagileVersion, + "build_timestamp": context.buildTimestamp, + }) + }) + router.GET("/meta/types", func(c *gin.Context) { + c.JSON(200, gin.H{ + "quantity": context.arrayOfStringValues(types.QuantityValues()), + "confidentiality": context.arrayOfStringValues(types.ConfidentialityValues()), + "criticality": context.arrayOfStringValues(types.CriticalityValues()), + "technical_asset_type": context.arrayOfStringValues(types.TechnicalAssetTypeValues()), + "technical_asset_size": context.arrayOfStringValues(types.TechnicalAssetSizeValues()), + "authorization": context.arrayOfStringValues(types.AuthorizationValues()), + "authentication": context.arrayOfStringValues(types.AuthenticationValues()), + "usage": context.arrayOfStringValues(types.UsageValues()), + "encryption": context.arrayOfStringValues(types.EncryptionStyleValues()), + "data_format": context.arrayOfStringValues(types.DataFormatValues()), + "protocol": context.arrayOfStringValues(types.ProtocolValues()), + "technical_asset_technology": context.arrayOfStringValues(types.TechnicalAssetTechnologyValues()), + "technical_asset_machine": context.arrayOfStringValues(types.TechnicalAssetMachineValues()), + "trust_boundary_type": context.arrayOfStringValues(types.TrustBoundaryTypeValues()), + "data_breach_probability": context.arrayOfStringValues(types.DataBreachProbabilityValues()), + "risk_severity": context.arrayOfStringValues(types.RiskSeverityValues()), + "risk_exploitation_likelihood": context.arrayOfStringValues(types.RiskExploitationLikelihoodValues()), + "risk_exploitation_impact": context.arrayOfStringValues(types.RiskExploitationImpactValues()), + "risk_function": context.arrayOfStringValues(types.RiskFunctionValues()), + "risk_status": context.arrayOfStringValues(types.RiskStatusValues()), + "stride": context.arrayOfStringValues(types.STRIDEValues()), + }) + }) + + // TODO router.GET("/meta/risk-rules", listRiskRules) + // TODO router.GET("/meta/model-macros", listModelMacros) + + router.GET("/meta/stats", context.stats) + + router.POST("/direct/analyze", context.analyze) + router.POST("/direct/check", context.check) + router.GET("/direct/stub", context.stubFile) + + router.POST("/auth/keys", context.createKey) + router.DELETE("/auth/keys", context.deleteKey) + router.POST("/auth/tokens", context.createToken) + router.DELETE("/auth/tokens", context.deleteToken) + + router.POST("/models", context.createNewModel) + router.GET("/models", context.listModels) + router.DELETE("/models/:model-id", context.deleteModel) + router.GET("/models/:model-id", context.getModel) + router.PUT("/models/:model-id", context.importModel) + router.GET("/models/:model-id/data-flow-diagram", context.streamDataFlowDiagram) + router.GET("/models/:model-id/data-asset-diagram", context.streamDataAssetDiagram) + router.GET("/models/:model-id/report-pdf", context.streamReportPDF) + router.GET("/models/:model-id/risks-excel", context.streamRisksExcel) + router.GET("/models/:model-id/tags-excel", context.streamTagsExcel) + router.GET("/models/:model-id/risks", context.streamRisksJSON) + router.GET("/models/:model-id/technical-assets", context.streamTechnicalAssetsJSON) + router.GET("/models/:model-id/stats", context.streamStatsJSON) + router.GET("/models/:model-id/analysis", context.analyzeModelOnServerDirectly) + + router.GET("/models/:model-id/cover", context.getCover) + router.PUT("/models/:model-id/cover", context.setCover) + router.GET("/models/:model-id/overview", context.getOverview) + router.PUT("/models/:model-id/overview", context.setOverview) + //router.GET("/models/:model-id/questions", getQuestions) + //router.PUT("/models/:model-id/questions", setQuestions) + router.GET("/models/:model-id/abuse-cases", context.getAbuseCases) + router.PUT("/models/:model-id/abuse-cases", context.setAbuseCases) + router.GET("/models/:model-id/security-requirements", context.getSecurityRequirements) + router.PUT("/models/:model-id/security-requirements", context.setSecurityRequirements) + //router.GET("/models/:model-id/tags", getTags) + //router.PUT("/models/:model-id/tags", setTags) + + router.GET("/models/:model-id/data-assets", context.getDataAssets) + router.POST("/models/:model-id/data-assets", context.createNewDataAsset) + router.GET("/models/:model-id/data-assets/:data-asset-id", context.getDataAsset) + router.PUT("/models/:model-id/data-assets/:data-asset-id", context.setDataAsset) + router.DELETE("/models/:model-id/data-assets/:data-asset-id", context.deleteDataAsset) + + router.GET("/models/:model-id/trust-boundaries", context.getTrustBoundaries) + // router.POST("/models/:model-id/trust-boundaries", createNewTrustBoundary) + // router.GET("/models/:model-id/trust-boundaries/:trust-boundary-id", getTrustBoundary) + // router.PUT("/models/:model-id/trust-boundaries/:trust-boundary-id", setTrustBoundary) + // router.DELETE("/models/:model-id/trust-boundaries/:trust-boundary-id", deleteTrustBoundary) + + router.GET("/models/:model-id/shared-runtimes", context.getSharedRuntimes) + router.POST("/models/:model-id/shared-runtimes", context.createNewSharedRuntime) + router.GET("/models/:model-id/shared-runtimes/:shared-runtime-id", context.getSharedRuntime) + router.PUT("/models/:model-id/shared-runtimes/:shared-runtime-id", context.setSharedRuntime) + router.DELETE("/models/:model-id/shared-runtimes/:shared-runtime-id", context.deleteSharedRuntime) + + fmt.Println("Threagile server running...") + _ = router.Run(":" + strconv.Itoa(*context.serverPort)) // listen and serve on 0.0.0.0:8080 or whatever port was specified +} + +func (context *Context) exampleFile(ginContext *gin.Context) { + example, err := os.ReadFile(filepath.Join(*context.appFolder, "threagile-example-model.yaml")) + checkErr(err) + ginContext.Data(http.StatusOK, gin.MIMEYAML, example) } -func (context *Context) populateDataAsset(ginContext *gin.Context, payload payloadDataAsset) (dataAssetInput model.InputDataAsset, ok bool) { - usage, err := model.ParseUsage(payload.Usage) - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return dataAssetInput, false +func (context *Context) stubFile(ginContext *gin.Context) { + stub, err := os.ReadFile(filepath.Join(*context.appFolder, "threagile-stub-model.yaml")) + checkErr(err) + ginContext.Data(http.StatusOK, gin.MIMEYAML, context.addSupportedTags(stub)) // TODO use also the MIMEYAML way of serving YAML in model export? +} + +func (context *Context) addSupportedTags(input []byte) []byte { + // add distinct tags as "tags_available" + supportedTags := make(map[string]bool) + for _, customRule := range context.customRiskRules { + for _, tag := range customRule.Tags { + supportedTags[strings.ToLower(tag)] = true + } } - quantity, err := model.ParseQuantity(payload.Quantity) - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return dataAssetInput, false + + for _, rule := range risks.GetBuiltInRiskRules() { + for _, tag := range rule.SupportedTags() { + supportedTags[strings.ToLower(tag)] = true + } } - confidentiality, err := model.ParseConfidentiality(payload.Confidentiality) - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return dataAssetInput, false + + tags := make([]string, 0, len(supportedTags)) + for t := range supportedTags { + tags = append(tags, t) } - integrity, err := model.ParseCriticality(payload.Integrity) - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return dataAssetInput, false + if len(tags) == 0 { + return input } - availability, err := model.ParseCriticality(payload.Availability) - if err != nil { - context.handleErrorInServiceCall(err, ginContext) - return dataAssetInput, false + sort.Strings(tags) + if *context.verbose { + fmt.Print("Supported tags of all risk rules: ") + for i, tag := range tags { + if i > 0 { + fmt.Print(", ") + } + fmt.Print(tag) + } + fmt.Println() } - dataAssetInput = model.InputDataAsset{ - ID: payload.Id, - Description: payload.Description, - Usage: usage.String(), - Tags: lowerCaseAndTrim(payload.Tags), - Origin: payload.Origin, - Owner: payload.Owner, - Quantity: quantity.String(), - Confidentiality: confidentiality.String(), - Integrity: integrity.String(), - Availability: availability.String(), - JustificationCiaRating: payload.JustificationCiaRating, + replacement := "tags_available:" + for _, tag := range tags { + replacement += "\n - " + tag } - return dataAssetInput, true + return []byte(strings.Replace(string(input), "tags_available:", replacement, 1)) } -func (context *Context) getDataAssets(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - ginContext.JSON(http.StatusOK, aModel.DataAssets) - } -} +var mapFolderNameToTokenHash = make(map[string]string) -func (context *Context) getTrustBoundaries(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) +const keySize = 32 + +func (context *Context) createToken(ginContext *gin.Context) { + folderName, key, ok := context.checkKeyToFolderName(ginContext) if !ok { return } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - ginContext.JSON(http.StatusOK, aModel.TrustBoundaries) + context.globalLock.Lock() + defer context.globalLock.Unlock() + if tokenHash, exists := mapFolderNameToTokenHash[folderName]; exists { + // invalidate previous token + delete(mapTokenHashToTimeoutStruct, tokenHash) } -} - -func (context *Context) getSharedRuntimes(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { + // create a strong random 256 bit value (used to xor) + xorBytesArr := make([]byte, keySize) + n, err := rand.Read(xorBytesArr[:]) + if n != keySize || err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to create token", + }) return } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - ginContext.JSON(http.StatusOK, aModel.SharedRuntimes) + now := time.Now().UnixNano() + token := xor(key, xorBytesArr) + tokenHash := hashSHA256(token) + housekeepingTokenMaps() + mapTokenHashToTimeoutStruct[tokenHash] = timeoutStruct{ + xorRand: xorBytesArr, + createdNanoTime: now, + lastAccessedNanoTime: now, } + mapFolderNameToTokenHash[folderName] = tokenHash + ginContext.JSON(http.StatusCreated, gin.H{ + "token": base64.RawURLEncoding.EncodeToString(token[:]), + }) } -func (context *Context) arrayOfStringValues(values []model.TypeEnum) []string { - result := make([]string, 0) - for _, value := range values { - result = append(result, value.String()) - } - return result +type tokenHeader struct { + Token string `header:"token"` } -func (context *Context) getModel(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { +func (context *Context) deleteToken(ginContext *gin.Context) { + header := tokenHeader{} + if err := ginContext.ShouldBindHeader(&header); err != nil { + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "token not found", + }) return } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - _, yamlText, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - tmpResultFile, err := os.CreateTemp(*context.tempFolder, "threagile-*.yaml") - checkErr(err) - err = os.WriteFile(tmpResultFile.Name(), []byte(yamlText), 0400) + token, err := base64.RawURLEncoding.DecodeString(strings.TrimSpace(header.Token)) + if len(token) == 0 || err != nil { if err != nil { log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to stream model file", - }) - return } - defer func() { _ = os.Remove(tmpResultFile.Name()) }() - ginContext.FileAttachment(tmpResultFile.Name(), inputFile) + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "token not found", + }) + return } + context.globalLock.Lock() + defer context.globalLock.Unlock() + deleteTokenHashFromMaps(hashSHA256(token)) + ginContext.JSON(http.StatusOK, gin.H{ + "message": "token deleted", + }) +} + +type responseType int + +const ( + dataFlowDiagram responseType = iota + dataAssetDiagram + reportPDF + risksExcel + tagsExcel + risksJSON + technicalAssetsJSON + statsJSON +) + +func (context *Context) streamDataFlowDiagram(ginContext *gin.Context) { + context.streamResponse(ginContext, dataFlowDiagram) } -type payloadModels struct { - ID string `yaml:"id" json:"id"` - Title string `yaml:"title" json:"title"` - TimestampCreated time.Time `yaml:"timestamp_created" json:"timestamp_created"` - TimestampModified time.Time `yaml:"timestamp_modified" json:"timestamp_modified"` +func (context *Context) streamDataAssetDiagram(ginContext *gin.Context) { + context.streamResponse(ginContext, dataAssetDiagram) } -type payloadCover struct { - Title string `yaml:"title" json:"title"` - Date time.Time `yaml:"date" json:"date"` - Author model.Author `yaml:"author" json:"author"` +func (context *Context) streamReportPDF(ginContext *gin.Context) { + context.streamResponse(ginContext, reportPDF) } -type payloadOverview struct { - ManagementSummaryComment string `yaml:"management_summary_comment" json:"management_summary_comment"` - BusinessCriticality string `yaml:"business_criticality" json:"business_criticality"` - BusinessOverview model.Overview `yaml:"business_overview" json:"business_overview"` - TechnicalOverview model.Overview `yaml:"technical_overview" json:"technical_overview"` +func (context *Context) streamRisksExcel(ginContext *gin.Context) { + context.streamResponse(ginContext, risksExcel) } -type payloadAbuseCases map[string]string +func (context *Context) streamTagsExcel(ginContext *gin.Context) { + context.streamResponse(ginContext, tagsExcel) +} -type payloadSecurityRequirements map[string]string +func (context *Context) streamRisksJSON(ginContext *gin.Context) { + context.streamResponse(ginContext, risksJSON) +} -type payloadDataAsset struct { - Title string `yaml:"title" json:"title"` - Id string `yaml:"id" json:"id"` - Description string `yaml:"description" json:"description"` - Usage string `yaml:"usage" json:"usage"` - Tags []string `yaml:"tags" json:"tags"` - Origin string `yaml:"origin" json:"origin"` - Owner string `yaml:"owner" json:"owner"` - Quantity string `yaml:"quantity" json:"quantity"` - Confidentiality string `yaml:"confidentiality" json:"confidentiality"` - Integrity string `yaml:"integrity" json:"integrity"` - Availability string `yaml:"availability" json:"availability"` - JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` +func (context *Context) streamTechnicalAssetsJSON(ginContext *gin.Context) { + context.streamResponse(ginContext, technicalAssetsJSON) } -type payloadSharedRuntime struct { - Title string `yaml:"title" json:"title"` - Id string `yaml:"id" json:"id"` - Description string `yaml:"description" json:"description"` - Tags []string `yaml:"tags" json:"tags"` - TechnicalAssetsRunning []string `yaml:"technical_assets_running" json:"technical_assets_running"` +func (context *Context) streamStatsJSON(ginContext *gin.Context) { + context.streamResponse(ginContext, statsJSON) } -func (context *Context) setSecurityRequirements(ginContext *gin.Context) { +func (context *Context) streamResponse(ginContext *gin.Context, responseType responseType) { folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - payload := payloadSecurityRequirements{} - err := ginContext.BindJSON(&payload) - if err != nil { + defer func() { + context.unlockFolder(folderNameOfKey) + var err error + if r := recover(); r != nil { + err = r.(error) + if *context.verbose { + log.Println(err) + } log.Println(err) ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) - return - } - modelInput.SecurityRequirements = payload - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Security Requirements Update") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "model updated", + "error": strings.TrimSpace(err.Error()), }) + ok = false } + }() + dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(context.defaultGraphvizDPI))) + if err != nil { + context.handleErrorInServiceCall(err, ginContext) + return } -} - -func (context *Context) getSecurityRequirements(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) + _, yamlText, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if !ok { return } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - ginContext.JSON(http.StatusOK, aModel.SecurityRequirements) + tmpModelFile, err := os.CreateTemp(*context.tempFolder, "threagile-render-*") + if err != nil { + context.handleErrorInServiceCall(err, ginContext) + return } -} - -func (context *Context) setAbuseCases(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { + defer func() { _ = os.Remove(tmpModelFile.Name()) }() + tmpOutputDir, err := os.MkdirTemp(*context.tempFolder, "threagile-render-") + if err != nil { + context.handleErrorInServiceCall(err, ginContext) return } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - payload := payloadAbuseCases{} - err := ginContext.BindJSON(&payload) + defer func() { _ = os.RemoveAll(tmpOutputDir) }() + err = os.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) + if responseType == dataFlowDiagram { + context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, true, false, false, false, false, false, false, false, dpi) if err != nil { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) + context.handleErrorInServiceCall(err, ginContext) return } - modelInput.AbuseCases = payload - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Abuse Cases Update") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "model updated", - }) + ginContext.File(filepath.Join(tmpOutputDir, context.dataFlowDiagramFilenamePNG)) + } else if responseType == dataAssetDiagram { + context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, true, false, false, false, false, false, false, dpi) + if err != nil { + context.handleErrorInServiceCall(err, ginContext) + return } - } -} - -func (context *Context) getAbuseCases(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - ginContext.JSON(http.StatusOK, aModel.AbuseCases) - } -} - -func (context *Context) setOverview(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - payload := payloadOverview{} - err := ginContext.BindJSON(&payload) + ginContext.File(filepath.Join(tmpOutputDir, context.dataAssetDiagramFilenamePNG)) + } else if responseType == reportPDF { + context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, true, false, false, false, false, false, dpi) if err != nil { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) + context.handleErrorInServiceCall(err, ginContext) return } - criticality, err := model.ParseCriticality(payload.BusinessCriticality) + ginContext.FileAttachment(filepath.Join(tmpOutputDir, context.reportFilename), context.reportFilename) + } else if responseType == risksExcel { + context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, true, false, false, false, false, dpi) if err != nil { context.handleErrorInServiceCall(err, ginContext) return } - modelInput.ManagementSummaryComment = payload.ManagementSummaryComment - modelInput.BusinessCriticality = criticality.String() - modelInput.BusinessOverview.Description = payload.BusinessOverview.Description - modelInput.BusinessOverview.Images = payload.BusinessOverview.Images - modelInput.TechnicalOverview.Description = payload.TechnicalOverview.Description - modelInput.TechnicalOverview.Images = payload.TechnicalOverview.Images - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Overview Update") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "model updated", - }) + ginContext.FileAttachment(filepath.Join(tmpOutputDir, context.excelRisksFilename), context.excelRisksFilename) + } else if responseType == tagsExcel { + context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, true, false, false, false, dpi) + if err != nil { + context.handleErrorInServiceCall(err, ginContext) + return } - } -} - -func (context *Context) handleErrorInServiceCall(err error, ginContext *gin.Context) { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": strings.TrimSpace(err.Error()), - }) -} - -func (context *Context) getOverview(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "management_summary_comment": aModel.ManagementSummaryComment, - "business_criticality": aModel.BusinessCriticality, - "business_overview": aModel.BusinessOverview, - "technical_overview": aModel.TechnicalOverview, - }) - } -} - -func (context *Context) setCover(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - payload := payloadCover{} - err := ginContext.BindJSON(&payload) + ginContext.FileAttachment(filepath.Join(tmpOutputDir, context.excelTagsFilename), context.excelTagsFilename) + } else if responseType == risksJSON { + context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, true, false, false, dpi) if err != nil { - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) + context.handleErrorInServiceCall(err, ginContext) return } - modelInput.Title = payload.Title - if !payload.Date.IsZero() { - modelInput.Date = payload.Date.Format("2006-01-02") + jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, context.jsonRisksFilename)) + if err != nil { + context.handleErrorInServiceCall(err, ginContext) + return } - modelInput.Author.Name = payload.Author.Name - modelInput.Author.Homepage = payload.Author.Homepage - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Cover Update") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "model updated", - }) + ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download + } else if responseType == technicalAssetsJSON { + context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, true, true, false, dpi) + if err != nil { + context.handleErrorInServiceCall(err, ginContext) + return + } + jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, context.jsonTechnicalAssetsFilename)) + if err != nil { + context.handleErrorInServiceCall(err, ginContext) + return + } + ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download + } else if responseType == statsJSON { + context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, false, false, true, dpi) + if err != nil { + context.handleErrorInServiceCall(err, ginContext) + return + } + jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, context.jsonStatsFilename)) + if err != nil { + context.handleErrorInServiceCall(err, ginContext) + return } + ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download } } -func (context *Context) getCover(ginContext *gin.Context) { +// fully replaces threagile.yaml in sub-folder given by UUID +func (context *Context) importModel(ginContext *gin.Context) { folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } context.lockFolder(folderNameOfKey) defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + + aUuid := ginContext.Param("model-id") // UUID is syntactically validated in readModel+checkModelFolder (next line) via uuid.Parse(modelUUID) + _, _, ok = context.readModel(ginContext, aUuid, key, folderNameOfKey) if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "title": aModel.Title, - "date": aModel.Date, - "author": aModel.Author, - }) + // first analyze it simply by executing the full risk process (just discard the result) to ensure that everything would work + yamlContent, ok := context.execute(ginContext, true) + if ok { + // if we're here, then no problem was raised, so ok to proceed + ok = context.writeModelYAML(ginContext, string(yamlContent), key, context.folderNameForModel(folderNameOfKey, aUuid), "Model Import", false) + if ok { + ginContext.JSON(http.StatusCreated, gin.H{ + "message": "model imported", + }) + } + } } } -// creates a sub-folder (named by a new UUID) inside the token folder -func (context *Context) createNewModel(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - ok = context.checkObjectCreationThrottler(ginContext, "MODEL") - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - - aUuid := uuid.New().String() - err := os.Mkdir(context.folderNameForModel(folderNameOfKey, aUuid), 0700) +func (context *Context) stats(ginContext *gin.Context) { + keyCount, modelCount := 0, 0 + keyFolders, err := os.ReadDir(filepath.Join(*context.serverFolder, context.keyDir)) if err != nil { + log.Println(err) ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to create model", + "error": "unable to collect stats", }) return } - - aYaml := `title: New Threat Model -threagile_version: ` + model.ThreagileVersion + ` -author: - name: "" - homepage: "" -date: -business_overview: - description: "" - images: [] -technical_overview: - description: "" - images: [] -business_criticality: "" -management_summary_comment: "" -questions: {} -abuse_cases: {} -security_requirements: {} -tags_available: [] -data_assets: {} -technical_assets: {} -trust_boundaries: {} -shared_runtimes: {} -individual_risk_categories: {} -risk_tracking: {} -diagram_tweak_nodesep: "" -diagram_tweak_ranksep: "" -diagram_tweak_edge_layout: "" -diagram_tweak_suppress_edge_labels: false -diagram_tweak_invisible_connections_between_assets: [] -diagram_tweak_same_rank_assets: []` - - ok = context.writeModelYAML(ginContext, aYaml, key, context.folderNameForModel(folderNameOfKey, aUuid), "New Model Creation", true) - if ok { - ginContext.JSON(http.StatusCreated, gin.H{ - "message": "model created", - "id": aUuid, - }) + for _, keyFolder := range keyFolders { + if len(keyFolder.Name()) == 128 { // it's a sha512 token hash probably, so count it as token folder for the stats + keyCount++ + if keyFolder.Name() != filepath.Clean(keyFolder.Name()) { + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "weird file path", + }) + return + } + modelFolders, err := os.ReadDir(filepath.Join(*context.serverFolder, context.keyDir, keyFolder.Name())) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to collect stats", + }) + return + } + for _, modelFolder := range modelFolders { + if len(modelFolder.Name()) == 36 { // it's a uuid model folder probably, so count it as model folder for the stats + modelCount++ + } + } + } } + // TODO collect and deliver more stats (old model count?) and health info + ginContext.JSON(http.StatusOK, gin.H{ + "key_count": keyCount, + "model_count": modelCount, + "success_count": context.successCount, + "error_count": context.errorCount, + }) } -func (context *Context) listModels(ginContext *gin.Context) { // TODO currently returns error when any model is no longer valid in syntax, so eventually have some fallback to not just bark on an invalid model... +func (context *Context) getDataAsset(ginContext *gin.Context) { folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } context.lockFolder(folderNameOfKey) defer context.unlockFolder(folderNameOfKey) - - result := make([]payloadModels, 0) - modelFolders, err := os.ReadDir(folderNameOfKey) - if err != nil { - log.Println(err) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + // yes, here keyed by title in YAML for better readability in the YAML file itself + for title, dataAsset := range modelInput.DataAssets { + if dataAsset.ID == ginContext.Param("data-asset-id") { + ginContext.JSON(http.StatusOK, gin.H{ + title: dataAsset, + }) + return + } + } ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", + "error": "data asset not found", }) + } +} + +func (context *Context) deleteDataAsset(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) + if !ok { return } - for _, dirEntry := range modelFolders { - if dirEntry.IsDir() { - modelStat, err := os.Stat(filepath.Join(folderNameOfKey, dirEntry.Name(), inputFile)) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "unable to list model", - }) - return - } - aModel, _, ok := context.readModel(ginContext, dirEntry.Name(), key, folderNameOfKey) - if !ok { - return - } - fileInfo, err := dirEntry.Info() - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "unable to get file info", - }) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + referencesDeleted := false + // yes, here keyed by title in YAML for better readability in the YAML file itself + for title, dataAsset := range modelInput.DataAssets { + if dataAsset.ID == ginContext.Param("data-asset-id") { + // also remove all usages of this data asset !! + for _, techAsset := range modelInput.TechnicalAssets { + if techAsset.DataAssetsProcessed != nil { + for i, parsedChangeCandidateAsset := range techAsset.DataAssetsProcessed { + referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) + if referencedAsset == dataAsset.ID { // apply the removal + referencesDeleted = true + // Remove the element at index i + // TODO needs more testing + copy(techAsset.DataAssetsProcessed[i:], techAsset.DataAssetsProcessed[i+1:]) // Shift a[i+1:] left one index. + techAsset.DataAssetsProcessed[len(techAsset.DataAssetsProcessed)-1] = "" // Erase last element (write zero value). + techAsset.DataAssetsProcessed = techAsset.DataAssetsProcessed[:len(techAsset.DataAssetsProcessed)-1] // Truncate slice. + } + } + } + if techAsset.DataAssetsStored != nil { + for i, parsedChangeCandidateAsset := range techAsset.DataAssetsStored { + referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) + if referencedAsset == dataAsset.ID { // apply the removal + referencesDeleted = true + // Remove the element at index i + // TODO needs more testing + copy(techAsset.DataAssetsStored[i:], techAsset.DataAssetsStored[i+1:]) // Shift a[i+1:] left one index. + techAsset.DataAssetsStored[len(techAsset.DataAssetsStored)-1] = "" // Erase last element (write zero value). + techAsset.DataAssetsStored = techAsset.DataAssetsStored[:len(techAsset.DataAssetsStored)-1] // Truncate slice. + } + } + } + if techAsset.CommunicationLinks != nil { + for title, commLink := range techAsset.CommunicationLinks { + for i, dataAssetSent := range commLink.DataAssetsSent { + referencedAsset := fmt.Sprintf("%v", dataAssetSent) + if referencedAsset == dataAsset.ID { // apply the removal + referencesDeleted = true + // Remove the element at index i + // TODO needs more testing + copy(techAsset.CommunicationLinks[title].DataAssetsSent[i:], techAsset.CommunicationLinks[title].DataAssetsSent[i+1:]) // Shift a[i+1:] left one index. + techAsset.CommunicationLinks[title].DataAssetsSent[len(techAsset.CommunicationLinks[title].DataAssetsSent)-1] = "" // Erase last element (write zero value). + x := techAsset.CommunicationLinks[title] + x.DataAssetsSent = techAsset.CommunicationLinks[title].DataAssetsSent[:len(techAsset.CommunicationLinks[title].DataAssetsSent)-1] // Truncate slice. + techAsset.CommunicationLinks[title] = x + } + } + for i, dataAssetReceived := range commLink.DataAssetsReceived { + referencedAsset := fmt.Sprintf("%v", dataAssetReceived) + if referencedAsset == dataAsset.ID { // apply the removal + referencesDeleted = true + // Remove the element at index i + // TODO needs more testing + copy(techAsset.CommunicationLinks[title].DataAssetsReceived[i:], techAsset.CommunicationLinks[title].DataAssetsReceived[i+1:]) // Shift a[i+1:] left one index. + techAsset.CommunicationLinks[title].DataAssetsReceived[len(techAsset.CommunicationLinks[title].DataAssetsReceived)-1] = "" // Erase last element (write zero value). + x := techAsset.CommunicationLinks[title] + x.DataAssetsReceived = techAsset.CommunicationLinks[title].DataAssetsReceived[:len(techAsset.CommunicationLinks[title].DataAssetsReceived)-1] // Truncate slice. + techAsset.CommunicationLinks[title] = x + } + } + } + } + } + for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { + if individualRiskCat.RisksIdentified != nil { + for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { + if individualRiskInstance.MostRelevantDataAsset == dataAsset.ID { // apply the removal + referencesDeleted = true + x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] + x.MostRelevantDataAsset = "" // TODO needs more testing + modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x + } + } + } + } + // remove it itself + delete(modelInput.DataAssets, title) + ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Data Asset Deletion") + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "data asset deleted", + "id": dataAsset.ID, + "references_deleted": referencesDeleted, // in order to signal to clients, that other model parts might've been deleted as well + }) + } return - } - result = append(result, payloadModels{ - ID: dirEntry.Name(), - Title: aModel.Title, - TimestampCreated: fileInfo.ModTime(), - TimestampModified: modelStat.ModTime(), - }) + } } + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "data asset not found", + }) } - ginContext.JSON(http.StatusOK, result) } -func (context *Context) deleteModel(ginContext *gin.Context) { - folderNameOfKey, _, ok := context.checkTokenToFolderName(ginContext) +type payloadSharedRuntime struct { + Title string `yaml:"title" json:"title"` + Id string `yaml:"id" json:"id"` + Description string `yaml:"description" json:"description"` + Tags []string `yaml:"tags" json:"tags"` + TechnicalAssetsRunning []string `yaml:"technical_assets_running" json:"technical_assets_running"` +} + +func (context *Context) setSharedRuntime(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } context.lockFolder(folderNameOfKey) defer context.unlockFolder(folderNameOfKey) - folder, ok := context.checkModelFolder(ginContext, ginContext.Param("model-id"), folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { - if folder != filepath.Clean(folder) { - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "model-id is weird", - }) - return - } - err := os.RemoveAll(folder) - if err != nil { - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "model not found", - }) - return + // yes, here keyed by title in YAML for better readability in the YAML file itself + for title, sharedRuntime := range modelInput.SharedRuntimes { + if sharedRuntime.ID == ginContext.Param("shared-runtime-id") { + payload := payloadSharedRuntime{} + err := ginContext.BindJSON(&payload) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": "unable to parse request payload", + }) + return + } + sharedRuntimeInput, ok := context.populateSharedRuntime(ginContext, payload) + if !ok { + return + } + // in order to also update the title, remove the shared runtime from the map and re-insert it (with new key) + delete(modelInput.SharedRuntimes, title) + modelInput.SharedRuntimes[payload.Title] = sharedRuntimeInput + idChanged := sharedRuntimeInput.ID != sharedRuntime.ID + if idChanged { // ID-CHANGE-PROPAGATION + for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { + if individualRiskCat.RisksIdentified != nil { + for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { + if individualRiskInstance.MostRelevantSharedRuntime == sharedRuntime.ID { // apply the ID change + x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] + x.MostRelevantSharedRuntime = sharedRuntimeInput.ID // TODO needs more testing + modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x + } + } + } + } + } + ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Shared Runtime Update") + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "shared runtime updated", + "id": sharedRuntimeInput.ID, + "id_changed": idChanged, // in order to signal to clients, that other model parts might've received updates as well and should be reloaded + }) + } + return + } } - ginContext.JSON(http.StatusOK, gin.H{ - "message": "model deleted", + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "shared runtime not found", }) } } -func (context *Context) checkModelFolder(ginContext *gin.Context, modelUUID string, folderNameOfKey string) (modelFolder string, ok bool) { - uuidParsed, err := uuid.Parse(modelUUID) - if err != nil { - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "model not found", - }) - return modelFolder, false - } - modelFolder = context.folderNameForModel(folderNameOfKey, uuidParsed.String()) - if _, err := os.Stat(modelFolder); os.IsNotExist(err) { - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "model not found", - }) - return modelFolder, false - } - return modelFolder, true +type payloadDataAsset struct { + Title string `yaml:"title" json:"title"` + Id string `yaml:"id" json:"id"` + Description string `yaml:"description" json:"description"` + Usage string `yaml:"usage" json:"usage"` + Tags []string `yaml:"tags" json:"tags"` + Origin string `yaml:"origin" json:"origin"` + Owner string `yaml:"owner" json:"owner"` + Quantity string `yaml:"quantity" json:"quantity"` + Confidentiality string `yaml:"confidentiality" json:"confidentiality"` + Integrity string `yaml:"integrity" json:"integrity"` + Availability string `yaml:"availability" json:"availability"` + JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` } -func (context *Context) readModel(ginContext *gin.Context, modelUUID string, key []byte, folderNameOfKey string) (modelInputResult model.ModelInput, yamlText string, ok bool) { - modelFolder, ok := context.checkModelFolder(ginContext, modelUUID, folderNameOfKey) +func (context *Context) setDataAsset(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { - return modelInputResult, yamlText, false - } - cryptoKey := context.generateKeyFromAlreadyStrongRandomInput(key) - block, err := aes.NewCipher(cryptoKey) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to open model", - }) - return modelInputResult, yamlText, false - } - aesGcm, err := cipher.NewGCM(block) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to open model", - }) - return modelInputResult, yamlText, false - } - - fileBytes, err := os.ReadFile(filepath.Join(modelFolder, inputFile)) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to open model", - }) - return modelInputResult, yamlText, false + return } - - nonce := fileBytes[0:12] - ciphertext := fileBytes[12:] - plaintext, err := aesGcm.Open(nil, nonce, ciphertext, nil) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to open model", + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + // yes, here keyed by title in YAML for better readability in the YAML file itself + for title, dataAsset := range modelInput.DataAssets { + if dataAsset.ID == ginContext.Param("data-asset-id") { + payload := payloadDataAsset{} + err := ginContext.BindJSON(&payload) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": "unable to parse request payload", + }) + return + } + dataAssetInput, ok := context.populateDataAsset(ginContext, payload) + if !ok { + return + } + // in order to also update the title, remove the asset from the map and re-insert it (with new key) + delete(modelInput.DataAssets, title) + modelInput.DataAssets[payload.Title] = dataAssetInput + idChanged := dataAssetInput.ID != dataAsset.ID + if idChanged { // ID-CHANGE-PROPAGATION + // also update all usages to point to the new (changed) ID !! + for techAssetTitle, techAsset := range modelInput.TechnicalAssets { + if techAsset.DataAssetsProcessed != nil { + for i, parsedChangeCandidateAsset := range techAsset.DataAssetsProcessed { + referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) + if referencedAsset == dataAsset.ID { // apply the ID change + modelInput.TechnicalAssets[techAssetTitle].DataAssetsProcessed[i] = dataAssetInput.ID + } + } + } + if techAsset.DataAssetsStored != nil { + for i, parsedChangeCandidateAsset := range techAsset.DataAssetsStored { + referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) + if referencedAsset == dataAsset.ID { // apply the ID change + modelInput.TechnicalAssets[techAssetTitle].DataAssetsStored[i] = dataAssetInput.ID + } + } + } + if techAsset.CommunicationLinks != nil { + for title, commLink := range techAsset.CommunicationLinks { + for i, dataAssetSent := range commLink.DataAssetsSent { + referencedAsset := fmt.Sprintf("%v", dataAssetSent) + if referencedAsset == dataAsset.ID { // apply the ID change + modelInput.TechnicalAssets[techAssetTitle].CommunicationLinks[title].DataAssetsSent[i] = dataAssetInput.ID + } + } + for i, dataAssetReceived := range commLink.DataAssetsReceived { + referencedAsset := fmt.Sprintf("%v", dataAssetReceived) + if referencedAsset == dataAsset.ID { // apply the ID change + modelInput.TechnicalAssets[techAssetTitle].CommunicationLinks[title].DataAssetsReceived[i] = dataAssetInput.ID + } + } + } + } + } + for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { + if individualRiskCat.RisksIdentified != nil { + for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { + if individualRiskInstance.MostRelevantDataAsset == dataAsset.ID { // apply the ID change + x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] + x.MostRelevantDataAsset = dataAssetInput.ID // TODO needs more testing + modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x + } + } + } + } + } + ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Data Asset Update") + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "data asset updated", + "id": dataAssetInput.ID, + "id_changed": idChanged, // in order to signal to clients, that other model parts might've received updates as well and should be reloaded + }) + } + return + } + } + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "data asset not found", }) - return modelInputResult, yamlText, false } +} - r, err := gzip.NewReader(bytes.NewReader(plaintext)) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to open model", - }) - return modelInputResult, yamlText, false +func (context *Context) getSharedRuntime(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) + if !ok { + return } - buf := new(bytes.Buffer) - _, _ = buf.ReadFrom(r) - modelInput := new(model.ModelInput).Defaults() - yamlBytes := buf.Bytes() - err = yaml.Unmarshal(yamlBytes, &modelInput) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to open model", + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + // yes, here keyed by title in YAML for better readability in the YAML file itself + for title, sharedRuntime := range modelInput.SharedRuntimes { + if sharedRuntime.ID == ginContext.Param("shared-runtime-id") { + ginContext.JSON(http.StatusOK, gin.H{ + title: sharedRuntime, + }) + return + } + } + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "shared runtime not found", }) - return modelInputResult, yamlText, false } - return *modelInput, string(yamlBytes), true } -func (context *Context) writeModel(ginContext *gin.Context, key []byte, folderNameOfKey string, modelInput *model.ModelInput, changeReasonForHistory string) (ok bool) { - modelFolder, ok := context.checkModelFolder(ginContext, ginContext.Param("model-id"), folderNameOfKey) +func (context *Context) createNewSharedRuntime(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) + if !ok { + return + } + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { - modelInput.ThreagileVersion = model.ThreagileVersion - yamlBytes, err := yaml.Marshal(modelInput) + payload := payloadSharedRuntime{} + err := ginContext.BindJSON(&payload) if err != nil { log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to write model", + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": "unable to parse request payload", + }) + return + } + // yes, here keyed by title in YAML for better readability in the YAML file itself + if _, exists := modelInput.SharedRuntimes[payload.Title]; exists { + ginContext.JSON(http.StatusConflict, gin.H{ + "error": "shared runtime with this title already exists", + }) + return + } + // but later it will in memory keyed by its "id", so do this uniqueness check also + for _, sharedRuntime := range modelInput.SharedRuntimes { + if sharedRuntime.ID == payload.Id { + ginContext.JSON(http.StatusConflict, gin.H{ + "error": "shared runtime with this id already exists", + }) + return + } + } + if !context.checkTechnicalAssetsExisting(modelInput, payload.TechnicalAssetsRunning) { + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": "referenced technical asset does not exist", + }) + return + } + sharedRuntimeInput, ok := context.populateSharedRuntime(ginContext, payload) + if !ok { + return + } + if modelInput.SharedRuntimes == nil { + modelInput.SharedRuntimes = make(map[string]input.InputSharedRuntime) + } + modelInput.SharedRuntimes[payload.Title] = sharedRuntimeInput + ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Shared Runtime Creation") + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "shared runtime created", + "id": sharedRuntimeInput.ID, }) + } + } +} + +func (context *Context) checkTechnicalAssetsExisting(modelInput input.ModelInput, techAssetIDs []string) (ok bool) { + for _, techAssetID := range techAssetIDs { + exists := false + for _, val := range modelInput.TechnicalAssets { + if val.ID == techAssetID { + exists = true + break + } + } + if !exists { return false } - /* - yamlBytes = model.ReformatYAML(yamlBytes) - */ - return context.writeModelYAML(ginContext, string(yamlBytes), key, modelFolder, changeReasonForHistory, false) } - return false + return true } -func (context *Context) writeModelYAML(ginContext *gin.Context, yaml string, key []byte, modelFolder string, changeReasonForHistory string, skipBackup bool) (ok bool) { - if *context.verbose { - fmt.Println("about to write " + strconv.Itoa(len(yaml)) + " bytes of yaml into model folder: " + modelFolder) +func (context *Context) populateSharedRuntime(_ *gin.Context, payload payloadSharedRuntime) (sharedRuntimeInput input.InputSharedRuntime, ok bool) { + sharedRuntimeInput = input.InputSharedRuntime{ + ID: payload.Id, + Description: payload.Description, + Tags: lowerCaseAndTrim(payload.Tags), + TechnicalAssetsRunning: payload.TechnicalAssetsRunning, } - var b bytes.Buffer - w := gzip.NewWriter(&b) - _, _ = w.Write([]byte(yaml)) - _ = w.Close() - plaintext := b.Bytes() - cryptoKey := context.generateKeyFromAlreadyStrongRandomInput(key) - block, err := aes.NewCipher(cryptoKey) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to write model", - }) - return false + return sharedRuntimeInput, true +} + +func (context *Context) deleteSharedRuntime(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) + if !ok { + return } - // Never use more than 2^32 random nonces with a given key because of the risk of a repeat. - nonce := make([]byte, 12) - if _, err := io.ReadFull(rand.Reader, nonce); err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to write model", + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + referencesDeleted := false + // yes, here keyed by title in YAML for better readability in the YAML file itself + for title, sharedRuntime := range modelInput.SharedRuntimes { + if sharedRuntime.ID == ginContext.Param("shared-runtime-id") { + // also remove all usages of this shared runtime !! + for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { + if individualRiskCat.RisksIdentified != nil { + for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { + if individualRiskInstance.MostRelevantSharedRuntime == sharedRuntime.ID { // apply the removal + referencesDeleted = true + x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] + x.MostRelevantSharedRuntime = "" // TODO needs more testing + modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x + } + } + } + } + // remove it itself + delete(modelInput.SharedRuntimes, title) + ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Shared Runtime Deletion") + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "shared runtime deleted", + "id": sharedRuntime.ID, + "references_deleted": referencesDeleted, // in order to signal to clients, that other model parts might've been deleted as well + }) + } + return + } + } + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "shared runtime not found", }) - return false } - aesGcm, err := cipher.NewGCM(block) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to write model", - }) - return false +} + +func (context *Context) createNewDataAsset(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) + if !ok { + return } - ciphertext := aesGcm.Seal(nil, nonce, plaintext, nil) - if !skipBackup { - err = context.backupModelToHistory(modelFolder, changeReasonForHistory) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + payload := payloadDataAsset{} + err := ginContext.BindJSON(&payload) if err != nil { log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to write model", + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": "unable to parse request payload", }) - return false + return } - } - f, err := os.Create(filepath.Join(modelFolder, inputFile)) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to write model", - }) - return false - } - _, _ = f.Write(nonce) - _, _ = f.Write(ciphertext) - _ = f.Close() - return true -} - -func (context *Context) backupModelToHistory(modelFolder string, changeReasonForHistory string) (err error) { - historyFolder := filepath.Join(modelFolder, "history") - if _, err := os.Stat(historyFolder); os.IsNotExist(err) { - err = os.Mkdir(historyFolder, 0700) - if err != nil { - return err + // yes, here keyed by title in YAML for better readability in the YAML file itself + if _, exists := modelInput.DataAssets[payload.Title]; exists { + ginContext.JSON(http.StatusConflict, gin.H{ + "error": "data asset with this title already exists", + }) + return } - } - input, err := os.ReadFile(filepath.Join(modelFolder, inputFile)) - if err != nil { - return err - } - historyFile := filepath.Join(historyFolder, time.Now().Format("2006-01-02 15:04:05")+" "+changeReasonForHistory+".backup") - err = os.WriteFile(historyFile, input, 0400) - if err != nil { - return err - } - // now delete any old files if over limit to keep - files, err := os.ReadDir(historyFolder) - if err != nil { - return err - } - if len(files) > backupHistoryFilesToKeep { - requiredToDelete := len(files) - backupHistoryFilesToKeep - sort.Slice(files, func(i, j int) bool { - return files[i].Name() < files[j].Name() - }) - for _, file := range files { - requiredToDelete-- - if file.Name() != filepath.Clean(file.Name()) { - return fmt.Errorf("weird file name %v", file.Name()) - } - err = os.Remove(filepath.Join(historyFolder, file.Name())) - if err != nil { - return err - } - if requiredToDelete <= 0 { - break + // but later it will in memory keyed by its "id", so do this uniqueness check also + for _, asset := range modelInput.DataAssets { + if asset.ID == payload.Id { + ginContext.JSON(http.StatusConflict, gin.H{ + "error": "data asset with this id already exists", + }) + return } } + dataAssetInput, ok := context.populateDataAsset(ginContext, payload) + if !ok { + return + } + if modelInput.DataAssets == nil { + modelInput.DataAssets = make(map[string]input.InputDataAsset) + } + modelInput.DataAssets[payload.Title] = dataAssetInput + ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Data Asset Creation") + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "data asset created", + "id": dataAssetInput.ID, + }) + } } - return -} - -type argon2Params struct { - memory uint32 - iterations uint32 - parallelism uint8 - saltLength uint32 - keyLength uint32 } -func (context *Context) generateKeyFromAlreadyStrongRandomInput(alreadyRandomInput []byte) []byte { - // Establish the parameters to use for Argon2. - p := &argon2Params{ - memory: 64 * 1024, - iterations: 3, - parallelism: 2, - saltLength: 16, - keyLength: keySize, +func (context *Context) populateDataAsset(ginContext *gin.Context, payload payloadDataAsset) (dataAssetInput input.InputDataAsset, ok bool) { + usage, err := types.ParseUsage(payload.Usage) + if err != nil { + context.handleErrorInServiceCall(err, ginContext) + return dataAssetInput, false } - // As the input is already cryptographically secure random, the salt is simply the first n bytes - salt := alreadyRandomInput[0:p.saltLength] - hash := argon2.IDKey(alreadyRandomInput[p.saltLength:], salt, p.iterations, p.memory, p.parallelism, p.keyLength) - return hash -} - -func (context *Context) folderNameForModel(folderNameOfKey string, uuid string) string { - return filepath.Join(folderNameOfKey, uuid) -} - -var throttlerLock sync.Mutex -var createdObjectsThrottler = make(map[string][]int64) - -func (context *Context) checkObjectCreationThrottler(ginContext *gin.Context, typeName string) bool { - throttlerLock.Lock() - defer throttlerLock.Unlock() - - // remove all elements older than 3 minutes (= 180000000000 ns) - now := time.Now().UnixNano() - cutoff := now - 180000000000 - for keyCheck := range createdObjectsThrottler { - for i := 0; i < len(createdObjectsThrottler[keyCheck]); i++ { - if createdObjectsThrottler[keyCheck][i] < cutoff { - // Remove the element at index i from slice (safe while looping using i as iterator) - createdObjectsThrottler[keyCheck] = append(createdObjectsThrottler[keyCheck][:i], createdObjectsThrottler[keyCheck][i+1:]...) - i-- // Since we just deleted a[i], we must redo that index - } - } - length := len(createdObjectsThrottler[keyCheck]) - if length == 0 { - delete(createdObjectsThrottler, keyCheck) - } - /* - if *verbose { - log.Println("Throttling count: "+strconv.Itoa(length)) - } - */ + quantity, err := types.ParseQuantity(payload.Quantity) + if err != nil { + context.handleErrorInServiceCall(err, ginContext) + return dataAssetInput, false } - - // check current request - keyHash := hash(typeName) // getting the real client ip is not easy inside fully encapsulated containerized runtime - if _, ok := createdObjectsThrottler[keyHash]; !ok { - createdObjectsThrottler[keyHash] = make([]int64, 0) + confidentiality, err := types.ParseConfidentiality(payload.Confidentiality) + if err != nil { + context.handleErrorInServiceCall(err, ginContext) + return dataAssetInput, false } - // check the limit of 20 creations for this type per 3 minutes - withinLimit := len(createdObjectsThrottler[keyHash]) < 20 - if withinLimit { - createdObjectsThrottler[keyHash] = append(createdObjectsThrottler[keyHash], now) - return true + integrity, err := types.ParseCriticality(payload.Integrity) + if err != nil { + context.handleErrorInServiceCall(err, ginContext) + return dataAssetInput, false } - ginContext.JSON(http.StatusTooManyRequests, gin.H{ - "error": "object creation throttling exceeded (denial-of-service protection): please wait some time and try again", - }) - return false -} - -var locksByFolderName = make(map[string]*sync.Mutex) - -func (context *Context) lockFolder(folderName string) { - context.globalLock.Lock() - defer context.globalLock.Unlock() - _, exists := locksByFolderName[folderName] - if !exists { - locksByFolderName[folderName] = &sync.Mutex{} + availability, err := types.ParseCriticality(payload.Availability) + if err != nil { + context.handleErrorInServiceCall(err, ginContext) + return dataAssetInput, false } - locksByFolderName[folderName].Lock() -} - -func (context *Context) unlockFolder(folderName string) { - if _, exists := locksByFolderName[folderName]; exists { - locksByFolderName[folderName].Unlock() - delete(locksByFolderName, folderName) + dataAssetInput = input.InputDataAsset{ + ID: payload.Id, + Description: payload.Description, + Usage: usage.String(), + Tags: lowerCaseAndTrim(payload.Tags), + Origin: payload.Origin, + Owner: payload.Owner, + Quantity: quantity.String(), + Confidentiality: confidentiality.String(), + Integrity: integrity.String(), + Availability: availability.String(), + JustificationCiaRating: payload.JustificationCiaRating, } + return dataAssetInput, true } -type tokenHeader struct { - Token string `header:"token"` -} -type keyHeader struct { - Key string `header:"key"` -} - -func (context *Context) folderNameFromKey(key []byte) string { - sha512Hash := hashSHA256(key) - return filepath.Join(*context.serverFolder, keyDir, sha512Hash) -} - -func hashSHA256(key []byte) string { - hasher := sha512.New() - hasher.Write(key) - return hex.EncodeToString(hasher.Sum(nil)) -} - -func (context *Context) createKey(ginContext *gin.Context) { - ok := context.checkObjectCreationThrottler(ginContext, "KEY") +func (context *Context) getDataAssets(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - context.globalLock.Lock() - defer context.globalLock.Unlock() + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + ginContext.JSON(http.StatusOK, aModel.DataAssets) + } +} - keyBytesArr := make([]byte, keySize) - n, err := rand.Read(keyBytesArr[:]) - if n != keySize || err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to create key", - }) +func (context *Context) getTrustBoundaries(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) + if !ok { return } - err = os.MkdirAll(context.folderNameFromKey(keyBytesArr), 0700) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to create key", - }) - return + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + ginContext.JSON(http.StatusOK, aModel.TrustBoundaries) } - ginContext.JSON(http.StatusCreated, gin.H{ - "key": base64.RawURLEncoding.EncodeToString(keyBytesArr[:]), - }) } -func (context *Context) checkTokenToFolderName(ginContext *gin.Context) (folderNameOfKey string, key []byte, ok bool) { - header := tokenHeader{} - if err := ginContext.ShouldBindHeader(&header); err != nil { - log.Println(err) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", - }) - return folderNameOfKey, key, false +func (context *Context) getSharedRuntimes(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) + if !ok { + return } - token, err := base64.RawURLEncoding.DecodeString(strings.TrimSpace(header.Token)) - if len(token) == 0 || err != nil { - if err != nil { - log.Println(err) - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", - }) - return folderNameOfKey, key, false + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + ginContext.JSON(http.StatusOK, aModel.SharedRuntimes) } - context.globalLock.Lock() - defer context.globalLock.Unlock() - housekeepingTokenMaps() // to remove timed-out ones - tokenHash := hashSHA256(token) - if timeoutStruct, exists := mapTokenHashToTimeoutStruct[tokenHash]; exists { - // re-create the key from token - key := xor(token, timeoutStruct.xorRand) - folderNameOfKey := context.folderNameFromKey(key) - if _, err := os.Stat(folderNameOfKey); os.IsNotExist(err) { - log.Println(err) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", - }) - return folderNameOfKey, key, false - } - timeoutStruct.lastAccessedNanoTime = time.Now().UnixNano() - return folderNameOfKey, key, true - } else { - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", - }) - return folderNameOfKey, key, false +} + +func (context *Context) arrayOfStringValues(values []types.TypeEnum) []string { + result := make([]string, 0) + for _, value := range values { + result = append(result, value.String()) } + return result } -func (context *Context) checkKeyToFolderName(ginContext *gin.Context) (folderNameOfKey string, key []byte, ok bool) { - header := keyHeader{} - if err := ginContext.ShouldBindHeader(&header); err != nil { - log.Println(err) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "key not found", - }) - return folderNameOfKey, key, false +func (context *Context) getModel(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) + if !ok { + return } - key, err := base64.RawURLEncoding.DecodeString(strings.TrimSpace(header.Key)) - if len(key) == 0 || err != nil { + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + _, yamlText, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + tmpResultFile, err := os.CreateTemp(*context.tempFolder, "threagile-*.yaml") + checkErr(err) + err = os.WriteFile(tmpResultFile.Name(), []byte(yamlText), 0400) if err != nil { log.Println(err) - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "key not found", - }) - return folderNameOfKey, key, false - } - folderNameOfKey = context.folderNameFromKey(key) - if _, err := os.Stat(folderNameOfKey); os.IsNotExist(err) { - log.Println(err) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "key not found", - }) - return folderNameOfKey, key, false + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to stream model file", + }) + return + } + defer func() { _ = os.Remove(tmpResultFile.Name()) }() + ginContext.FileAttachment(tmpResultFile.Name(), context.inputFile) } - return folderNameOfKey, key, true } -func (context *Context) deleteKey(ginContext *gin.Context) { - folderName, _, ok := context.checkKeyToFolderName(ginContext) +type payloadSecurityRequirements map[string]string + +func (context *Context) setSecurityRequirements(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) if !ok { return } - context.globalLock.Lock() - defer context.globalLock.Unlock() - err := os.RemoveAll(folderName) - if err != nil { - log.Println("error during key delete: " + err.Error()) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "key not found", - }) - return - } - ginContext.JSON(http.StatusOK, gin.H{ - "message": "key deleted", - }) -} - -func (context *Context) userHomeDir() string { - switch runtime.GOOS { - case "windows": - home := os.Getenv("HOMEDRIVE") + os.Getenv("HOMEPATH") - if home == "" { - home = os.Getenv("USERPROFILE") + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + payload := payloadSecurityRequirements{} + err := ginContext.BindJSON(&payload) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": "unable to parse request payload", + }) + return + } + modelInput.SecurityRequirements = payload + ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Security Requirements Update") + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "model updated", + }) } - return home - - default: - return os.Getenv("HOME") } } -func (context *Context) expandPath(path string) *string { - home := context.userHomeDir() - if strings.HasPrefix(path, "~") { - path = strings.Replace(path, "~", home, 1) +func (context *Context) getSecurityRequirements(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) + if !ok { + return } - - if strings.HasPrefix(path, "$HOME") { - path = strings.Replace(path, "$HOME", home, -1) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + ginContext.JSON(http.StatusOK, aModel.SecurityRequirements) } - - return &path } -func (context *Context) ParseCommandlineArgs() { - // folders - context.appFolder = flag.String("app-dir", appDir, "app folder (default: "+appDir+")") - context.serverFolder = flag.String("server-dir", dataDir, "base folder for server mode (default: "+dataDir+")") - context.tempFolder = flag.String("temp-dir", tempDir, "temporary folder location") - context.binFolder = flag.String("bin-dir", binDir, "binary folder location") - context.outputDir = flag.String("output", ".", "output directory") - - // files - context.modelFilename = flag.String("model", inputFile, "input model yaml file") - context.raaPlugin = flag.String("raa-run", "raa_calc", "RAA calculation run file name") - - // flags - context.verbose = flag.Bool("verbose", false, "verbose output") - context.diagramDPI = flag.Int("diagram-dpi", defaultGraphvizDPI, "DPI used to render: maximum is "+strconv.Itoa(maxGraphvizDPI)+"") - context.skipRiskRules = flag.String("skip-risk-rules", "", "comma-separated list of risk rules (by their ID) to skip") - context.riskRulesPlugins = flag.String("custom-risk-rules-plugins", "", "comma-separated list of plugins file names with custom risk rules to load") - context.ignoreOrphanedRiskTracking = flag.Bool("ignore-orphaned-risk-tracking", false, "ignore orphaned risk tracking (just log them) not matching a concrete risk") - - // commands - flag.IntVar(&context.ServerPort, "server", 0, "start a server (instead of commandline execution) on the given port") - context.executeModelMacro = flag.String("execute-model-macro", "", "Execute model macro (by ID)") - context.createExampleModel = flag.Bool("create-example-model", false, "just create an example model named threagile-example-model.yaml in the output directory") - context.createStubModel = flag.Bool("create-stub-model", false, "just create a minimal stub model named threagile-stub-model.yaml in the output directory") - context.createEditingSupport = flag.Bool("create-editing-support", false, "just create some editing support stuff in the output directory") - context.templateFilename = flag.String("background", "background.pdf", "background pdf file") - context.generateDataFlowDiagram = flag.Bool("generate-data-flow-diagram", true, "generate data-flow diagram") - context.generateDataAssetDiagram = flag.Bool("generate-data-asset-diagram", true, "generate data asset diagram") - context.generateRisksJSON = flag.Bool("generate-risks-json", true, "generate risks json") - context.generateTechnicalAssetsJSON = flag.Bool("generate-technical-assets-json", true, "generate technical assets json") - context.generateStatsJSON = flag.Bool("generate-stats-json", true, "generate stats json") - context.generateRisksExcel = flag.Bool("generate-risks-excel", true, "generate risks excel") - context.generateTagsExcel = flag.Bool("generate-tags-excel", true, "generate tags excel") - context.generateReportPDF = flag.Bool("generate-report-pdf", true, "generate report pdf, including diagrams") - - // more commands - version := flag.Bool("version", false, "print version") - listTypes := flag.Bool("list-types", false, "print type information (enum values to be used in models)") - listRiskRules := flag.Bool("list-risk-rules", false, "print risk rules") - listModelMacros := flag.Bool("list-model-macros", false, "print model macros") - explainTypes := flag.Bool("explain-types", false, "Detailed explanation of all the types") - explainRiskRules := flag.Bool("explain-risk-rules", false, "Detailed explanation of all the risk rules") - explainModelMacros := flag.Bool("explain-model-macros", false, "Detailed explanation of all the model macros") - print3rdParty := flag.Bool("print-3rd-party-licenses", false, "print 3rd-party license information") - license := flag.Bool("print-license", false, "print license information") - - flag.Usage = func() { - context.printLogo() - _, _ = fmt.Fprintf(os.Stderr, "Usage: threagile [options]") - fmt.Println() - fmt.Println() - fmt.Println() - fmt.Println("Options:") - fmt.Println() - flag.PrintDefaults() - fmt.Println() - fmt.Println() - fmt.Println("Examples:") - fmt.Println() - fmt.Println("If you want to create an example model (via docker) as a starting point to learn about Threagile just run: ") - fmt.Println(" docker run --rm -it " + - "-v \"$(pwd)\":" + filepath.Join(*context.appFolder, "work") + " " + - "threagile/threagile " + - "-create-example-model " + - "-output " + filepath.Join(*context.appFolder, "work")) - fmt.Println() - fmt.Println("If you want to create a minimal stub model (via docker) as a starting point for your own model just run: ") - fmt.Println(" docker run --rm -it " + - "-v \"$(pwd)\":" + filepath.Join(*context.appFolder, "work") + " " + - "threagile/threagile " + - "-create-stub-model " + - "-output " + filepath.Join(*context.appFolder, "work")) - fmt.Println() - context.printExamples() - fmt.Println() - } - flag.Parse() - - context.modelFilename = context.expandPath(*context.modelFilename) - context.appFolder = context.expandPath(*context.appFolder) - context.serverFolder = context.expandPath(*context.serverFolder) - context.tempFolder = context.expandPath(*context.tempFolder) - context.binFolder = context.expandPath(*context.binFolder) - context.outputDir = context.expandPath(*context.outputDir) +type payloadAbuseCases map[string]string - if *context.diagramDPI < 20 { - *context.diagramDPI = 20 - } else if *context.diagramDPI > maxGraphvizDPI { - *context.diagramDPI = 300 - } - if *version { - context.printLogo() - os.Exit(0) - } - if *listTypes { - context.printLogo() - fmt.Println("The following types are available (can be extended for custom rules):") - fmt.Println() - printTypes("Authentication", model.AuthenticationValues()) - fmt.Println() - printTypes("Authorization", model.AuthorizationValues()) - fmt.Println() - printTypes("Confidentiality", model.ConfidentialityValues()) - fmt.Println() - printTypes("Criticality (for integrity and availability)", model.CriticalityValues()) - fmt.Println() - printTypes("Data Breach Probability", model.DataBreachProbabilityValues()) - fmt.Println() - printTypes("Data Format", model.DataFormatValues()) - fmt.Println() - printTypes("Encryption", model.EncryptionStyleValues()) - fmt.Println() - printTypes("Protocol", model.ProtocolValues()) - fmt.Println() - printTypes("Quantity", model.QuantityValues()) - fmt.Println() - printTypes("Risk Exploitation Impact", model.RiskExploitationImpactValues()) - fmt.Println() - printTypes("Risk Exploitation Likelihood", model.RiskExploitationLikelihoodValues()) - fmt.Println() - printTypes("Risk Function", model.RiskFunctionValues()) - fmt.Println() - printTypes("Risk Severity", model.RiskSeverityValues()) - fmt.Println() - printTypes("Risk Status", model.RiskStatusValues()) - fmt.Println() - printTypes("STRIDE", model.STRIDEValues()) - fmt.Println() - printTypes("Technical Asset Machine", model.TechnicalAssetMachineValues()) - fmt.Println() - printTypes("Technical Asset Size", model.TechnicalAssetSizeValues()) - fmt.Println() - printTypes("Technical Asset Technology", model.TechnicalAssetTechnologyValues()) - fmt.Println() - printTypes("Technical Asset Type", model.TechnicalAssetTypeValues()) - fmt.Println() - printTypes("Trust Boundary Type", model.TrustBoundaryTypeValues()) - fmt.Println() - printTypes("Usage", model.UsageValues()) - fmt.Println() - os.Exit(0) +func (context *Context) setAbuseCases(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) + if !ok { + return } - if *listModelMacros { - context.printLogo() - fmt.Println("The following model macros are available (can be extended via custom model macros):") - fmt.Println() - /* TODO finish run stuff - fmt.Println("Custom model macros:") - for id, customModelMacro := range customModelMacros { - fmt.Println(id, "-->", customModelMacro.GetMacroDetails().Title) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + payload := payloadAbuseCases{} + err := ginContext.BindJSON(&payload) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": "unable to parse request payload", + }) + return } - fmt.Println() - */ - fmt.Println("----------------------") - fmt.Println("Built-in model macros:") - fmt.Println("----------------------") - fmt.Println(addbuildpipeline.GetMacroDetails().ID, "-->", addbuildpipeline.GetMacroDetails().Title) - fmt.Println(addvault.GetMacroDetails().ID, "-->", addvault.GetMacroDetails().Title) - fmt.Println(prettyprint.GetMacroDetails().ID, "-->", prettyprint.GetMacroDetails().Title) - fmt.Println(removeunusedtags.GetMacroDetails().ID, "-->", removeunusedtags.GetMacroDetails().Title) - fmt.Println(seedrisktracking.GetMacroDetails().ID, "-->", seedrisktracking.GetMacroDetails().Title) - fmt.Println(seedtags.GetMacroDetails().ID, "-->", seedtags.GetMacroDetails().Title) - fmt.Println() - os.Exit(0) - } - if *listRiskRules { - context.printLogo() - fmt.Println("The following risk rules are available (can be extended via custom risk rules):") - fmt.Println() - fmt.Println("------------------") - fmt.Println("Custom risk rules:") - fmt.Println("------------------") - context.loadCustomRiskRules() - for id, customRule := range context.customRiskRules { - fmt.Println(id, "-->", customRule.Category.Title, "--> with tags:", customRule.Tags) + modelInput.AbuseCases = payload + ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Abuse Cases Update") + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "model updated", + }) } - fmt.Println() - fmt.Println("--------------------") - fmt.Println("Built-in risk rules:") - fmt.Println("--------------------") - fmt.Println(accidentalsecretleak.Category().Id, "-->", accidentalsecretleak.Category().Title, "--> with tags:", accidentalsecretleak.SupportedTags()) - fmt.Println(codebackdooring.Category().Id, "-->", codebackdooring.Category().Title, "--> with tags:", codebackdooring.SupportedTags()) - fmt.Println(containerbaseimagebackdooring.Category().Id, "-->", containerbaseimagebackdooring.Category().Title, "--> with tags:", containerbaseimagebackdooring.SupportedTags()) - fmt.Println(containerplatformescape.Category().Id, "-->", containerplatformescape.Category().Title, "--> with tags:", containerplatformescape.SupportedTags()) - fmt.Println(crosssiterequestforgery.Category().Id, "-->", crosssiterequestforgery.Category().Title, "--> with tags:", crosssiterequestforgery.SupportedTags()) - fmt.Println(crosssitescripting.Category().Id, "-->", crosssitescripting.Category().Title, "--> with tags:", crosssitescripting.SupportedTags()) - fmt.Println(dosriskyaccessacrosstrustboundary.Category().Id, "-->", dosriskyaccessacrosstrustboundary.Category().Title, "--> with tags:", dosriskyaccessacrosstrustboundary.SupportedTags()) - fmt.Println(incompletemodel.Category().Id, "-->", incompletemodel.Category().Title, "--> with tags:", incompletemodel.SupportedTags()) - fmt.Println(ldapinjection.Category().Id, "-->", ldapinjection.Category().Title, "--> with tags:", ldapinjection.SupportedTags()) - fmt.Println(missingauthentication.Category().Id, "-->", missingauthentication.Category().Title, "--> with tags:", missingauthentication.SupportedTags()) - fmt.Println(missingauthenticationsecondfactor.Category().Id, "-->", missingauthenticationsecondfactor.Category().Title, "--> with tags:", missingauthenticationsecondfactor.SupportedTags()) - fmt.Println(missingbuildinfrastructure.Category().Id, "-->", missingbuildinfrastructure.Category().Title, "--> with tags:", missingbuildinfrastructure.SupportedTags()) - fmt.Println(missingcloudhardening.Category().Id, "-->", missingcloudhardening.Category().Title, "--> with tags:", missingcloudhardening.SupportedTags()) - fmt.Println(missingfilevalidation.Category().Id, "-->", missingfilevalidation.Category().Title, "--> with tags:", missingfilevalidation.SupportedTags()) - fmt.Println(missinghardening.Category().Id, "-->", missinghardening.Category().Title, "--> with tags:", missinghardening.SupportedTags()) - fmt.Println(missingidentitypropagation.Category().Id, "-->", missingidentitypropagation.Category().Title, "--> with tags:", missingidentitypropagation.SupportedTags()) - fmt.Println(missingidentityproviderisolation.Category().Id, "-->", missingidentityproviderisolation.Category().Title, "--> with tags:", missingidentityproviderisolation.SupportedTags()) - fmt.Println(missingidentitystore.Category().Id, "-->", missingidentitystore.Category().Title, "--> with tags:", missingidentitystore.SupportedTags()) - fmt.Println(missingnetworksegmentation.Category().Id, "-->", missingnetworksegmentation.Category().Title, "--> with tags:", missingnetworksegmentation.SupportedTags()) - fmt.Println(missingvault.Category().Id, "-->", missingvault.Category().Title, "--> with tags:", missingvault.SupportedTags()) - fmt.Println(missingvaultisolation.Category().Id, "-->", missingvaultisolation.Category().Title, "--> with tags:", missingvaultisolation.SupportedTags()) - fmt.Println(missingwaf.Category().Id, "-->", missingwaf.Category().Title, "--> with tags:", missingwaf.SupportedTags()) - fmt.Println(mixedtargetsonsharedruntime.Category().Id, "-->", mixedtargetsonsharedruntime.Category().Title, "--> with tags:", mixedtargetsonsharedruntime.SupportedTags()) - fmt.Println(pathtraversal.Category().Id, "-->", pathtraversal.Category().Title, "--> with tags:", pathtraversal.SupportedTags()) - fmt.Println(pushinsteadofpulldeployment.Category().Id, "-->", pushinsteadofpulldeployment.Category().Title, "--> with tags:", pushinsteadofpulldeployment.SupportedTags()) - fmt.Println(searchqueryinjection.Category().Id, "-->", searchqueryinjection.Category().Title, "--> with tags:", searchqueryinjection.SupportedTags()) - fmt.Println(serversiderequestforgery.Category().Id, "-->", serversiderequestforgery.Category().Title, "--> with tags:", serversiderequestforgery.SupportedTags()) - fmt.Println(serviceregistrypoisoning.Category().Id, "-->", serviceregistrypoisoning.Category().Title, "--> with tags:", serviceregistrypoisoning.SupportedTags()) - fmt.Println(sqlnosqlinjection.Category().Id, "-->", sqlnosqlinjection.Category().Title, "--> with tags:", sqlnosqlinjection.SupportedTags()) - fmt.Println(uncheckeddeployment.Category().Id, "-->", uncheckeddeployment.Category().Title, "--> with tags:", uncheckeddeployment.SupportedTags()) - fmt.Println(unencryptedasset.Category().Id, "-->", unencryptedasset.Category().Title, "--> with tags:", unencryptedasset.SupportedTags()) - fmt.Println(unencryptedcommunication.Category().Id, "-->", unencryptedcommunication.Category().Title, "--> with tags:", unencryptedcommunication.SupportedTags()) - fmt.Println(unguardedaccessfrominternet.Category().Id, "-->", unguardedaccessfrominternet.Category().Title, "--> with tags:", unguardedaccessfrominternet.SupportedTags()) - fmt.Println(unguardeddirectdatastoreaccess.Category().Id, "-->", unguardeddirectdatastoreaccess.Category().Title, "--> with tags:", unguardeddirectdatastoreaccess.SupportedTags()) - fmt.Println(unnecessarycommunicationlink.Category().Id, "-->", unnecessarycommunicationlink.Category().Title, "--> with tags:", unnecessarycommunicationlink.SupportedTags()) - fmt.Println(unnecessarydataasset.Category().Id, "-->", unnecessarydataasset.Category().Title, "--> with tags:", unnecessarydataasset.SupportedTags()) - fmt.Println(unnecessarydatatransfer.Category().Id, "-->", unnecessarydatatransfer.Category().Title, "--> with tags:", unnecessarydatatransfer.SupportedTags()) - fmt.Println(unnecessarytechnicalasset.Category().Id, "-->", unnecessarytechnicalasset.Category().Title, "--> with tags:", unnecessarytechnicalasset.SupportedTags()) - fmt.Println(untrusteddeserialization.Category().Id, "-->", untrusteddeserialization.Category().Title, "--> with tags:", untrusteddeserialization.SupportedTags()) - fmt.Println(wrongcommunicationlinkcontent.Category().Id, "-->", wrongcommunicationlinkcontent.Category().Title, "--> with tags:", wrongcommunicationlinkcontent.SupportedTags()) - fmt.Println(wrongtrustboundarycontent.Category().Id, "-->", wrongtrustboundarycontent.Category().Title, "--> with tags:", wrongtrustboundarycontent.SupportedTags()) - fmt.Println(xmlexternalentity.Category().Id, "-->", xmlexternalentity.Category().Title, "--> with tags:", xmlexternalentity.SupportedTags()) - fmt.Println() - os.Exit(0) - } - if *explainTypes { - context.printLogo() - fmt.Println("Explanation for the types:") - fmt.Println() - printExplainTypes("Authentication", model.AuthenticationValues()) - printExplainTypes("Authorization", model.AuthorizationValues()) - printExplainTypes("Confidentiality", model.ConfidentialityValues()) - printExplainTypes("Criticality", model.CriticalityValues()) - printExplainTypes("Data Breach Probability", model.DataBreachProbabilityValues()) - printExplainTypes("Data Format", model.DataFormatValues()) - printExplainTypes("Encryption", model.EncryptionStyleValues()) - printExplainTypes("Protocol", model.ProtocolValues()) - printExplainTypes("Quantity", model.QuantityValues()) - printExplainTypes("Risk Exploitation Impact", model.RiskExploitationImpactValues()) - printExplainTypes("Risk Exploitation likelihood", model.RiskExploitationLikelihoodValues()) - printExplainTypes("Risk Function", model.RiskFunctionValues()) - printExplainTypes("Risk Severity", model.RiskSeverityValues()) - printExplainTypes("Risk Status", model.RiskStatusValues()) - printExplainTypes("STRIDE", model.STRIDEValues()) - printExplainTypes("Technical Asset Machine", model.TechnicalAssetMachineValues()) - printExplainTypes("Technical Asset Size", model.TechnicalAssetSizeValues()) - printExplainTypes("Technical Asset Technology", model.TechnicalAssetTechnologyValues()) - printExplainTypes("Technical Asset Type", model.TechnicalAssetTypeValues()) - printExplainTypes("Trust Boundary Type", model.TrustBoundaryTypeValues()) - printExplainTypes("Usage", model.UsageValues()) - - os.Exit(0) } - if *explainModelMacros { - context.printLogo() - fmt.Println("Explanation for the model macros:") - fmt.Println() - fmt.Printf("%v: %v\n", addbuildpipeline.GetMacroDetails().ID, addbuildpipeline.GetMacroDetails().Description) - fmt.Printf("%v: %v\n", addvault.GetMacroDetails().ID, addvault.GetMacroDetails().Description) - fmt.Printf("%v: %v\n", prettyprint.GetMacroDetails().ID, prettyprint.GetMacroDetails().Description) - fmt.Printf("%v: %v\n", removeunusedtags.GetMacroDetails().ID, removeunusedtags.GetMacroDetails().Description) - fmt.Printf("%v: %v\n", seedrisktracking.GetMacroDetails().ID, seedrisktracking.GetMacroDetails().Description) - fmt.Printf("%v: %v\n", seedtags.GetMacroDetails().ID, seedtags.GetMacroDetails().Description) - fmt.Println() - os.Exit(0) +} +func (context *Context) getAbuseCases(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) + if !ok { + return } - if *explainRiskRules { - context.printLogo() - fmt.Println("Explanation for risk rules:") - fmt.Println() - fmt.Printf("%v: %v\n", accidentalsecretleak.Category().Id, accidentalsecretleak.Category().Description) - fmt.Printf("%v: %v\n", codebackdooring.Category().Id, codebackdooring.Category().Description) - fmt.Printf("%v: %v\n", containerbaseimagebackdooring.Category().Id, containerbaseimagebackdooring.Category().Description) - fmt.Printf("%v: %v\n", containerplatformescape.Category().Id, containerplatformescape.Category().Description) - fmt.Printf("%v: %v\n", crosssiterequestforgery.Category().Id, crosssiterequestforgery.Category().Description) - fmt.Printf("%v: %v\n", crosssitescripting.Category().Id, crosssitescripting.Category().Description) - fmt.Printf("%v: %v\n", dosriskyaccessacrosstrustboundary.Category().Id, dosriskyaccessacrosstrustboundary.Category().Description) - fmt.Printf("%v: %v\n", incompletemodel.Category().Id, incompletemodel.Category().Description) - fmt.Printf("%v: %v\n", ldapinjection.Category().Id, ldapinjection.Category().Description) - fmt.Printf("%v: %v\n", missingauthentication.Category().Id, missingauthentication.Category().Description) - fmt.Printf("%v: %v\n", missingauthenticationsecondfactor.Category().Id, missingauthenticationsecondfactor.Category().Description) - fmt.Printf("%v: %v\n", missingbuildinfrastructure.Category().Id, missingbuildinfrastructure.Category().Description) - fmt.Printf("%v: %v\n", missingcloudhardening.Category().Id, missingcloudhardening.Category().Description) - fmt.Printf("%v: %v\n", missingfilevalidation.Category().Id, missingfilevalidation.Category().Description) - fmt.Printf("%v: %v\n", missinghardening.Category().Id, missinghardening.Category().Description) - fmt.Printf("%v: %v\n", missingidentitypropagation.Category().Id, missingidentitypropagation.Category().Description) - fmt.Printf("%v: %v\n", missingidentityproviderisolation.Category().Id, missingidentityproviderisolation.Category().Description) - fmt.Printf("%v: %v\n", missingidentitystore.Category().Id, missingidentitystore.Category().Description) - fmt.Printf("%v: %v\n", missingnetworksegmentation.Category().Id, missingnetworksegmentation.Category().Description) - fmt.Printf("%v: %v\n", missingvault.Category().Id, missingvault.Category().Description) - fmt.Printf("%v: %v\n", missingvaultisolation.Category().Id, missingvaultisolation.Category().Description) - fmt.Printf("%v: %v\n", missingwaf.Category().Id, missingwaf.Category().Description) - fmt.Printf("%v: %v\n", mixedtargetsonsharedruntime.Category().Id, mixedtargetsonsharedruntime.Category().Description) - fmt.Printf("%v: %v\n", pathtraversal.Category().Id, pathtraversal.Category().Description) - fmt.Printf("%v: %v\n", pushinsteadofpulldeployment.Category().Id, pushinsteadofpulldeployment.Category().Description) - fmt.Printf("%v: %v\n", searchqueryinjection.Category().Id, searchqueryinjection.Category().Description) - fmt.Printf("%v: %v\n", serversiderequestforgery.Category().Id, serversiderequestforgery.Category().Description) - fmt.Printf("%v: %v\n", serviceregistrypoisoning.Category().Id, serviceregistrypoisoning.Category().Description) - fmt.Printf("%v: %v\n", sqlnosqlinjection.Category().Id, sqlnosqlinjection.Category().Description) - fmt.Printf("%v: %v\n", uncheckeddeployment.Category().Id, uncheckeddeployment.Category().Description) - fmt.Printf("%v: %v\n", unencryptedasset.Category().Id, unencryptedasset.Category().Description) - fmt.Printf("%v: %v\n", unencryptedcommunication.Category().Id, unencryptedcommunication.Category().Description) - fmt.Printf("%v: %v\n", unguardedaccessfrominternet.Category().Id, unguardedaccessfrominternet.Category().Description) - fmt.Printf("%v: %v\n", unguardeddirectdatastoreaccess.Category().Id, unguardeddirectdatastoreaccess.Category().Description) - fmt.Printf("%v: %v\n", unnecessarycommunicationlink.Category().Id, unnecessarycommunicationlink.Category().Description) - fmt.Printf("%v: %v\n", unnecessarydataasset.Category().Id, unnecessarydataasset.Category().Description) - fmt.Printf("%v: %v\n", unnecessarydatatransfer.Category().Id, unnecessarydatatransfer.Category().Description) - fmt.Printf("%v: %v\n", unnecessarytechnicalasset.Category().Id, unnecessarytechnicalasset.Category().Description) - fmt.Printf("%v: %v\n", untrusteddeserialization.Category().Id, untrusteddeserialization.Category().Description) - fmt.Printf("%v: %v\n", wrongcommunicationlinkcontent.Category().Id, wrongcommunicationlinkcontent.Category().Description) - fmt.Printf("%v: %v\n", wrongtrustboundarycontent.Category().Id, wrongtrustboundarycontent.Category().Description) - fmt.Printf("%v: %v\n", xmlexternalentity.Category().Id, xmlexternalentity.Category().Description) - fmt.Println() - os.Exit(0) - } - if *print3rdParty { - context.printLogo() - fmt.Println("Kudos & Credits to the following open-source projects:") - fmt.Println(" - golang (Google Go License): https://golang.org/LICENSE") - fmt.Println(" - go-yaml (MIT License): https://github.com/go-yaml/yaml/blob/v3/LICENSE") - fmt.Println(" - graphviz (CPL License): https://graphviz.gitlab.io/license/") - fmt.Println(" - gofpdf (MIT License): https://github.com/jung-kurt/gofpdf/blob/master/LICENSE") - fmt.Println(" - go-chart (MIT License): https://github.com/wcharczuk/go-chart/blob/master/LICENSE") - fmt.Println(" - excelize (BSD License): https://github.com/qax-os/excelize/blob/master/LICENSE") - fmt.Println(" - graphics-go (BSD License): https://github.com/BurntSushi/graphics-go/blob/master/LICENSE") - fmt.Println(" - google-uuid (BSD License): https://github.com/google/uuid/blob/master/LICENSE") - fmt.Println(" - gin-gonic (MIT License): https://github.com/gin-gonic/gin/blob/master/LICENSE") - fmt.Println(" - swagger-ui (Apache License): https://swagger.io/license/") - fmt.Println() - os.Exit(0) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + ginContext.JSON(http.StatusOK, aModel.AbuseCases) } - if *license { - context.printLogo() - if *context.appFolder != filepath.Clean(*context.appFolder) { - log.Fatalf("weird app folder %v", *context.appFolder) - } - content, err := os.ReadFile(filepath.Join(*context.appFolder, "LICENSE.txt")) - checkErr(err) - fmt.Print(string(content)) - fmt.Println() - os.Exit(0) +} + +type payloadOverview struct { + ManagementSummaryComment string `yaml:"management_summary_comment" json:"management_summary_comment"` + BusinessCriticality string `yaml:"business_criticality" json:"business_criticality"` + BusinessOverview input.Overview `yaml:"business_overview" json:"business_overview"` + TechnicalOverview input.Overview `yaml:"technical_overview" json:"technical_overview"` +} + +func (context *Context) setOverview(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) + if !ok { + return } - if *context.createExampleModel { - exampleError := context.createExampleModelFile() - if exampleError != nil { - log.Fatalf("Unable to copy example model: %v", exampleError) + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + payload := payloadOverview{} + err := ginContext.BindJSON(&payload) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": "unable to parse request payload", + }) return } - context.printLogo() - fmt.Println("An example model was created named threagile-example-model.yaml in the output directory.") - fmt.Println() - context.printExamples() - fmt.Println() - os.Exit(0) - } - if *context.createStubModel { - stubError := context.createStubModelFile() - if stubError != nil { - log.Fatalf("Unable to copy stub model: %v", stubError) + criticality, err := types.ParseCriticality(payload.BusinessCriticality) + if err != nil { + context.handleErrorInServiceCall(err, ginContext) return } - context.printLogo() - fmt.Println("A minimal stub model was created named threagile-stub-model.yaml in the output directory.") - fmt.Println() - context.printExamples() - fmt.Println() - os.Exit(0) - } - if *context.createEditingSupport { - supportError := context.createEditingSupportFiles() - if supportError != nil { - log.Fatalf("Unable to copy editing support files: %v", supportError) - return + modelInput.ManagementSummaryComment = payload.ManagementSummaryComment + modelInput.BusinessCriticality = criticality.String() + modelInput.BusinessOverview.Description = payload.BusinessOverview.Description + modelInput.BusinessOverview.Images = payload.BusinessOverview.Images + modelInput.TechnicalOverview.Description = payload.TechnicalOverview.Description + modelInput.TechnicalOverview.Images = payload.TechnicalOverview.Images + ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Overview Update") + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "model updated", + }) } - context.printLogo() - fmt.Println("The following files were created in the output directory:") - fmt.Println(" - schema.json") - fmt.Println(" - live-templates.txt") - fmt.Println() - fmt.Println("For a perfect editing experience within your IDE of choice you can easily get " + - "model syntax validation and autocompletion (very handy for enum values) as well as live templates: " + - "Just import the schema.json into your IDE and assign it as \"schema\" to each Threagile YAML file. " + - "Also try to import individual parts from the live-templates.txt file into your IDE as live editing templates.") - fmt.Println() - os.Exit(0) } } -func (context *Context) printLogo() { - fmt.Println() - fmt.Println(" _____ _ _ _ \n |_ _| |__ _ __ ___ __ _ __ _(_) | ___ \n | | | '_ \\| '__/ _ \\/ _` |/ _` | | |/ _ \\\n | | | | | | | | __/ (_| | (_| | | | __/\n |_| |_| |_|_| \\___|\\__,_|\\__, |_|_|\\___|\n |___/ ") - fmt.Println("Threagile - Agile Threat Modeling") - fmt.Println() - fmt.Println() - context.printVersion() +func (context *Context) handleErrorInServiceCall(err error, ginContext *gin.Context) { + log.Println(err) + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": strings.TrimSpace(err.Error()), + }) } -func (context *Context) printVersion() { - fmt.Println("Documentation: https://threagile.io") - fmt.Println("Docker Images: https://hub.docker.com/r/threagile/threagile") - fmt.Println("Sourcecode: https://github.com/threagile") - fmt.Println("License: Open-Source (MIT License)") - fmt.Println("Version: " + model.ThreagileVersion + " (" + context.buildTimestamp + ")") - fmt.Println() - fmt.Println() +func (context *Context) getOverview(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) + if !ok { + return + } + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "management_summary_comment": aModel.ManagementSummaryComment, + "business_criticality": aModel.BusinessCriticality, + "business_overview": aModel.BusinessOverview, + "technical_overview": aModel.TechnicalOverview, + }) + } } -func (context *Context) createExampleModelFile() error { - _, err := copyFile(filepath.Join(*context.appFolder, "threagile-example-model.yaml"), filepath.Join(*context.outputDir, "threagile-example-model.yaml")) - if err == nil { - return nil - } +type payloadCover struct { + Title string `yaml:"title" json:"title"` + Date time.Time `yaml:"date" json:"date"` + Author input.Author `yaml:"author" json:"author"` +} - _, altError := copyFile(filepath.Join(*context.appFolder, "threagile.yaml"), filepath.Join(*context.outputDir, "threagile-example-model.yaml")) - if altError != nil { - return err +func (context *Context) setCover(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) + if !ok { + return + } + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + payload := payloadCover{} + err := ginContext.BindJSON(&payload) + if err != nil { + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": "unable to parse request payload", + }) + return + } + modelInput.Title = payload.Title + if !payload.Date.IsZero() { + modelInput.Date = payload.Date.Format("2006-01-02") + } + modelInput.Author.Name = payload.Author.Name + modelInput.Author.Homepage = payload.Author.Homepage + ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Cover Update") + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "model updated", + }) + } } +} - return nil +func (context *Context) getCover(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) + if !ok { + return + } + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "title": aModel.Title, + "date": aModel.Date, + "author": aModel.Author, + }) + } } -func (context *Context) createStubModelFile() error { - _, err := copyFile(filepath.Join(*context.appFolder, "threagile-stub-model.yaml"), filepath.Join(*context.outputDir, "threagile-stub-model.yaml")) - if err == nil { - return nil +// creates a sub-folder (named by a new UUID) inside the token folder +func (context *Context) createNewModel(ginContext *gin.Context) { + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) + if !ok { + return + } + ok = context.checkObjectCreationThrottler(ginContext, "MODEL") + if !ok { + return } + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) - _, altError := copyFile(filepath.Join(*context.appFolder, "threagile.yaml"), filepath.Join(*context.outputDir, "threagile-stub-model.yaml")) - if altError != nil { - return err + aUuid := uuid.New().String() + err := os.Mkdir(context.folderNameForModel(folderNameOfKey, aUuid), 0700) + if err != nil { + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to create model", + }) + return } - return nil -} + aYaml := `title: New Threat Model +threagile_version: ` + docs.ThreagileVersion + ` +author: + name: "" + homepage: "" +date: +business_overview: + description: "" + images: [] +technical_overview: + description: "" + images: [] +business_criticality: "" +management_summary_comment: "" +questions: {} +abuse_cases: {} +security_requirements: {} +tags_available: [] +data_assets: {} +technical_assets: {} +trust_boundaries: {} +shared_runtimes: {} +individual_risk_categories: {} +risk_tracking: {} +diagram_tweak_nodesep: "" +diagram_tweak_ranksep: "" +diagram_tweak_edge_layout: "" +diagram_tweak_suppress_edge_labels: false +diagram_tweak_invisible_connections_between_assets: [] +diagram_tweak_same_rank_assets: []` -func (context *Context) createEditingSupportFiles() error { - _, schemaError := copyFile(filepath.Join(*context.appFolder, "schema.json"), filepath.Join(*context.outputDir, "schema.json")) - if schemaError != nil { - return schemaError + ok = context.writeModelYAML(ginContext, aYaml, key, context.folderNameForModel(folderNameOfKey, aUuid), "New Model Creation", true) + if ok { + ginContext.JSON(http.StatusCreated, gin.H{ + "message": "model created", + "id": aUuid, + }) } - - _, templateError := copyFile(filepath.Join(*context.appFolder, "live-templates.txt"), filepath.Join(*context.outputDir, "live-templates.txt")) - return templateError } -func (context *Context) printExamples() { - fmt.Println("If you want to execute Threagile on a model yaml file (via docker): ") - fmt.Println(" docker run --rm -it " + - "-v \"$(pwd)\":" + filepath.Join(*context.appFolder, "work") + " " + - "threagile/threagile " + - "-verbose " + - "-model " + filepath.Join(*context.appFolder, "work", inputFile) + " " + - "-output " + filepath.Join(*context.appFolder, "work")) - fmt.Println() - fmt.Println("If you want to run Threagile as a server (REST API) on some port (here 8080): ") - fmt.Println(" docker run --rm -it " + - "--shm-size=256m " + - "-p 8080:8080 " + - "--name threagile-server " + - "--mount 'type=volume,src=threagile-storage,dst=/data,readonly=false' " + - "threagile/threagile -server 8080") - fmt.Println() - fmt.Println("If you want to find out about the different enum values usable in the model yaml file: ") - fmt.Println(" docker run --rm -it threagile/threagile -list-types") - fmt.Println() - fmt.Println("If you want to use some nice editing help (syntax validation, autocompletion, and live templates) in your favourite IDE: ") - fmt.Println(" docker run --rm -it -v \"$(pwd)\":" + filepath.Join(*context.appFolder, "work") + " threagile/threagile -create-editing-support -output " + filepath.Join(*context.appFolder, "work")) - fmt.Println() - fmt.Println("If you want to list all available model macros (which are macros capable of reading a model yaml file, asking you questions in a wizard-style and then update the model yaml file accordingly): ") - fmt.Println(" docker run --rm -it threagile/threagile -list-model-macros") - fmt.Println() - fmt.Println("If you want to execute a certain model macro on the model yaml file (here the macro add-build-pipeline): ") - fmt.Println(" docker run --rm -it -v \"$(pwd)\":" + filepath.Join(*context.appFolder, "work") + " threagile/threagile -model " + filepath.Join(*context.appFolder, "work", inputFile) + " -output " + filepath.Join(*context.appFolder, "work") + " -execute-model-macro add-build-pipeline") +type payloadModels struct { + ID string `yaml:"id" json:"id"` + Title string `yaml:"title" json:"title"` + TimestampCreated time.Time `yaml:"timestamp_created" json:"timestamp_created"` + TimestampModified time.Time `yaml:"timestamp_modified" json:"timestamp_modified"` } -func printTypes(title string, value interface{}) { - fmt.Println(fmt.Sprintf(" %v: %v", title, value)) +func (context *Context) listModels(ginContext *gin.Context) { // TODO currently returns error when any model is no longer valid in syntax, so eventually have some fallback to not just bark on an invalid model... + folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) + if !ok { + return + } + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + + result := make([]payloadModels, 0) + modelFolders, err := os.ReadDir(folderNameOfKey) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "token not found", + }) + return + } + for _, dirEntry := range modelFolders { + if dirEntry.IsDir() { + modelStat, err := os.Stat(filepath.Join(folderNameOfKey, dirEntry.Name(), context.inputFile)) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "unable to list model", + }) + return + } + aModel, _, ok := context.readModel(ginContext, dirEntry.Name(), key, folderNameOfKey) + if !ok { + return + } + fileInfo, err := dirEntry.Info() + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "unable to get file info", + }) + return + } + result = append(result, payloadModels{ + ID: dirEntry.Name(), + Title: aModel.Title, + TimestampCreated: fileInfo.ModTime(), + TimestampModified: modelStat.ModTime(), + }) + } + } + ginContext.JSON(http.StatusOK, result) } -// explainTypes prints and explanation block and a header -func printExplainTypes(title string, value []model.TypeEnum) { - fmt.Println(title) - for _, candidate := range value { - fmt.Printf("\t %v: %v\n", candidate, candidate.Explain()) +func (context *Context) deleteModel(ginContext *gin.Context) { + folderNameOfKey, _, ok := context.checkTokenToFolderName(ginContext) + if !ok { + return + } + context.lockFolder(folderNameOfKey) + defer context.unlockFolder(folderNameOfKey) + folder, ok := context.checkModelFolder(ginContext, ginContext.Param("model-id"), folderNameOfKey) + if ok { + if folder != filepath.Clean(folder) { + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "model-id is weird", + }) + return + } + err := os.RemoveAll(folder) + if err != nil { + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "model not found", + }) + return + } + ginContext.JSON(http.StatusOK, gin.H{ + "message": "model deleted", + }) } } -func copyFile(src, dst string) (int64, error) { - sourceFileStat, err := os.Stat(src) +func (context *Context) checkModelFolder(ginContext *gin.Context, modelUUID string, folderNameOfKey string) (modelFolder string, ok bool) { + uuidParsed, err := uuid.Parse(modelUUID) if err != nil { - return 0, err + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "model not found", + }) + return modelFolder, false } - - if !sourceFileStat.Mode().IsRegular() { - return 0, fmt.Errorf("%s is not a regular file", src) + modelFolder = context.folderNameForModel(folderNameOfKey, uuidParsed.String()) + if _, err := os.Stat(modelFolder); os.IsNotExist(err) { + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "model not found", + }) + return modelFolder, false } + return modelFolder, true +} - source, err := os.Open(src) +func (context *Context) readModel(ginContext *gin.Context, modelUUID string, key []byte, folderNameOfKey string) (modelInputResult input.ModelInput, yamlText string, ok bool) { + modelFolder, ok := context.checkModelFolder(ginContext, modelUUID, folderNameOfKey) + if !ok { + return modelInputResult, yamlText, false + } + cryptoKey := context.generateKeyFromAlreadyStrongRandomInput(key) + block, err := aes.NewCipher(cryptoKey) if err != nil { - return 0, err + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to open model", + }) + return modelInputResult, yamlText, false } - defer func() { _ = source.Close() }() - - destination, err := os.Create(dst) + aesGcm, err := cipher.NewGCM(block) if err != nil { - return 0, err + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to open model", + }) + return modelInputResult, yamlText, false } - defer func() { _ = destination.Close() }() - nBytes, err := io.Copy(destination, source) - return nBytes, err -} -func (context *Context) parseModel() { - if *context.verbose { - fmt.Println("Parsing model:", *context.modelFilename) + fileBytes, err := os.ReadFile(filepath.Join(modelFolder, context.inputFile)) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to open model", + }) + return modelInputResult, yamlText, false } - context.modelInput = *new(model.ModelInput).Defaults() - loadError := context.modelInput.Load(*context.modelFilename) - if loadError != nil { - log.Fatal("Unable to parse model yaml: ", loadError) + nonce := fileBytes[0:12] + ciphertext := fileBytes[12:] + plaintext, err := aesGcm.Open(nil, nonce, ciphertext, nil) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to open model", + }) + return modelInputResult, yamlText, false } - // data, _ := json.MarshalIndent(context.modelInput, "", " ") - // fmt.Printf("%v\n", string(data)) - - var businessCriticality model.Criticality - switch context.modelInput.BusinessCriticality { - case model.Archive.String(): - businessCriticality = model.Archive - case model.Operational.String(): - businessCriticality = model.Operational - case model.Important.String(): - businessCriticality = model.Important - case model.Critical.String(): - businessCriticality = model.Critical - case model.MissionCritical.String(): - businessCriticality = model.MissionCritical - default: - panic(errors.New("unknown 'business_criticality' value of application: " + context.modelInput.BusinessCriticality)) + r, err := gzip.NewReader(bytes.NewReader(plaintext)) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to open model", + }) + return modelInputResult, yamlText, false + } + buf := new(bytes.Buffer) + _, _ = buf.ReadFrom(r) + modelInput := new(input.ModelInput).Defaults() + yamlBytes := buf.Bytes() + err = yaml.Unmarshal(yamlBytes, &modelInput) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to open model", + }) + return modelInputResult, yamlText, false } + return *modelInput, string(yamlBytes), true +} - reportDate := time.Now() - if len(context.modelInput.Date) > 0 { - var parseError error - reportDate, parseError = time.Parse("2006-01-02", context.modelInput.Date) - if parseError != nil { - panic(errors.New("unable to parse 'date' value of model file")) +func (context *Context) writeModel(ginContext *gin.Context, key []byte, folderNameOfKey string, modelInput *input.ModelInput, changeReasonForHistory string) (ok bool) { + modelFolder, ok := context.checkModelFolder(ginContext, ginContext.Param("model-id"), folderNameOfKey) + if ok { + modelInput.ThreagileVersion = docs.ThreagileVersion + yamlBytes, err := yaml.Marshal(modelInput) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to write model", + }) + return false } + /* + yamlBytes = model.ReformatYAML(yamlBytes) + */ + return context.writeModelYAML(ginContext, string(yamlBytes), key, modelFolder, changeReasonForHistory, false) } + return false +} - model.ParsedModelRoot = model.ParsedModel{ - Author: context.modelInput.Author, - Title: context.modelInput.Title, - Date: reportDate, - ManagementSummaryComment: context.modelInput.ManagementSummaryComment, - BusinessCriticality: businessCriticality, - BusinessOverview: removePathElementsFromImageFiles(context.modelInput.BusinessOverview), - TechnicalOverview: removePathElementsFromImageFiles(context.modelInput.TechnicalOverview), - Questions: context.modelInput.Questions, - AbuseCases: context.modelInput.AbuseCases, - SecurityRequirements: context.modelInput.SecurityRequirements, - TagsAvailable: lowerCaseAndTrim(context.modelInput.TagsAvailable), - DiagramTweakNodesep: context.modelInput.DiagramTweakNodesep, - DiagramTweakRanksep: context.modelInput.DiagramTweakRanksep, - DiagramTweakEdgeLayout: context.modelInput.DiagramTweakEdgeLayout, - DiagramTweakSuppressEdgeLabels: context.modelInput.DiagramTweakSuppressEdgeLabels, - DiagramTweakLayoutLeftToRight: context.modelInput.DiagramTweakLayoutLeftToRight, - DiagramTweakInvisibleConnectionsBetweenAssets: context.modelInput.DiagramTweakInvisibleConnectionsBetweenAssets, - DiagramTweakSameRankAssets: context.modelInput.DiagramTweakSameRankAssets, +func (context *Context) writeModelYAML(ginContext *gin.Context, yaml string, key []byte, modelFolder string, changeReasonForHistory string, skipBackup bool) (ok bool) { + if *context.verbose { + fmt.Println("about to write " + strconv.Itoa(len(yaml)) + " bytes of yaml into model folder: " + modelFolder) } - if model.ParsedModelRoot.DiagramTweakNodesep == 0 { - model.ParsedModelRoot.DiagramTweakNodesep = 2 + var b bytes.Buffer + w := gzip.NewWriter(&b) + _, _ = w.Write([]byte(yaml)) + _ = w.Close() + plaintext := b.Bytes() + cryptoKey := context.generateKeyFromAlreadyStrongRandomInput(key) + block, err := aes.NewCipher(cryptoKey) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to write model", + }) + return false } - if model.ParsedModelRoot.DiagramTweakRanksep == 0 { - model.ParsedModelRoot.DiagramTweakRanksep = 2 + // Never use more than 2^32 random nonces with a given key because of the risk of a repeat. + nonce := make([]byte, 12) + if _, err := io.ReadFull(rand.Reader, nonce); err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to write model", + }) + return false } - - // Data Assets =============================================================================== - model.ParsedModelRoot.DataAssets = make(map[string]model.DataAsset) - for title, asset := range context.modelInput.DataAssets { - id := fmt.Sprintf("%v", asset.ID) - - var usage model.Usage - switch asset.Usage { - case model.Business.String(): - usage = model.Business - case model.DevOps.String(): - usage = model.DevOps - default: - panic(errors.New("unknown 'usage' value of data asset '" + title + "': " + asset.Usage)) - } - - var quantity model.Quantity - switch asset.Quantity { - case model.VeryFew.String(): - quantity = model.VeryFew - case model.Few.String(): - quantity = model.Few - case model.Many.String(): - quantity = model.Many - case model.VeryMany.String(): - quantity = model.VeryMany - default: - panic(errors.New("unknown 'quantity' value of data asset '" + title + "': " + asset.Quantity)) - } - - var confidentiality model.Confidentiality - switch asset.Confidentiality { - case model.Public.String(): - confidentiality = model.Public - case model.Internal.String(): - confidentiality = model.Internal - case model.Restricted.String(): - confidentiality = model.Restricted - case model.Confidential.String(): - confidentiality = model.Confidential - case model.StrictlyConfidential.String(): - confidentiality = model.StrictlyConfidential - default: - panic(errors.New("unknown 'confidentiality' value of data asset '" + title + "': " + asset.Confidentiality)) - } - - var integrity model.Criticality - switch asset.Integrity { - case model.Archive.String(): - integrity = model.Archive - case model.Operational.String(): - integrity = model.Operational - case model.Important.String(): - integrity = model.Important - case model.Critical.String(): - integrity = model.Critical - case model.MissionCritical.String(): - integrity = model.MissionCritical - default: - panic(errors.New("unknown 'integrity' value of data asset '" + title + "': " + asset.Integrity)) - } - - var availability model.Criticality - switch asset.Availability { - case model.Archive.String(): - availability = model.Archive - case model.Operational.String(): - availability = model.Operational - case model.Important.String(): - availability = model.Important - case model.Critical.String(): - availability = model.Critical - case model.MissionCritical.String(): - availability = model.MissionCritical - default: - panic(errors.New("unknown 'availability' value of data asset '" + title + "': " + asset.Availability)) - } - - context.checkIdSyntax(id) - if _, exists := model.ParsedModelRoot.DataAssets[id]; exists { - panic(errors.New("duplicate id used: " + id)) - } - model.ParsedModelRoot.DataAssets[id] = model.DataAsset{ - Id: id, - Title: title, - Usage: usage, - Description: withDefault(fmt.Sprintf("%v", asset.Description), title), - Quantity: quantity, - Tags: checkTags(lowerCaseAndTrim(asset.Tags), "data asset '"+title+"'"), - Origin: fmt.Sprintf("%v", asset.Origin), - Owner: fmt.Sprintf("%v", asset.Owner), - Confidentiality: confidentiality, - Integrity: integrity, - Availability: availability, - JustificationCiaRating: fmt.Sprintf("%v", asset.JustificationCiaRating), + aesGcm, err := cipher.NewGCM(block) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to write model", + }) + return false + } + ciphertext := aesGcm.Seal(nil, nonce, plaintext, nil) + if !skipBackup { + err = context.backupModelToHistory(modelFolder, changeReasonForHistory) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to write model", + }) + return false } } + f, err := os.Create(filepath.Join(modelFolder, context.inputFile)) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to write model", + }) + return false + } + _, _ = f.Write(nonce) + _, _ = f.Write(ciphertext) + _ = f.Close() + return true +} - // Technical Assets =============================================================================== - model.ParsedModelRoot.TechnicalAssets = make(map[string]model.TechnicalAsset) - for title, asset := range context.modelInput.TechnicalAssets { - id := fmt.Sprintf("%v", asset.ID) - - var usage model.Usage - switch asset.Usage { - case model.Business.String(): - usage = model.Business - case model.DevOps.String(): - usage = model.DevOps - default: - panic(errors.New("unknown 'usage' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Usage))) +func (context *Context) backupModelToHistory(modelFolder string, changeReasonForHistory string) (err error) { + historyFolder := filepath.Join(modelFolder, "history") + if _, err := os.Stat(historyFolder); os.IsNotExist(err) { + err = os.Mkdir(historyFolder, 0700) + if err != nil { + return err } - - var dataAssetsProcessed = make([]string, 0) - if asset.DataAssetsProcessed != nil { - dataAssetsProcessed = make([]string, len(asset.DataAssetsProcessed)) - for i, parsedProcessedAsset := range asset.DataAssetsProcessed { - referencedAsset := fmt.Sprintf("%v", parsedProcessedAsset) - checkDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") - dataAssetsProcessed[i] = referencedAsset + } + input, err := os.ReadFile(filepath.Join(modelFolder, context.inputFile)) + if err != nil { + return err + } + historyFile := filepath.Join(historyFolder, time.Now().Format("2006-01-02 15:04:05")+" "+changeReasonForHistory+".backup") + err = os.WriteFile(historyFile, input, 0400) + if err != nil { + return err + } + // now delete any old files if over limit to keep + files, err := os.ReadDir(historyFolder) + if err != nil { + return err + } + if len(files) > context.backupHistoryFilesToKeep { + requiredToDelete := len(files) - context.backupHistoryFilesToKeep + sort.Slice(files, func(i, j int) bool { + return files[i].Name() < files[j].Name() + }) + for _, file := range files { + requiredToDelete-- + if file.Name() != filepath.Clean(file.Name()) { + return fmt.Errorf("weird file name %v", file.Name()) } - } - - var dataAssetsStored = make([]string, 0) - if asset.DataAssetsStored != nil { - dataAssetsStored = make([]string, len(asset.DataAssetsStored)) - for i, parsedStoredAssets := range asset.DataAssetsStored { - referencedAsset := fmt.Sprintf("%v", parsedStoredAssets) - checkDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") - dataAssetsStored[i] = referencedAsset + err = os.Remove(filepath.Join(historyFolder, file.Name())) + if err != nil { + return err } - } - - var technicalAssetType model.TechnicalAssetType - switch asset.Type { - case model.ExternalEntity.String(): - technicalAssetType = model.ExternalEntity - case model.Process.String(): - technicalAssetType = model.Process - case model.Datastore.String(): - technicalAssetType = model.Datastore - default: - panic(errors.New("unknown 'type' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Type))) - } - - var technicalAssetSize model.TechnicalAssetSize - switch asset.Size { - case model.Service.String(): - technicalAssetSize = model.Service - case model.System.String(): - technicalAssetSize = model.System - case model.Application.String(): - technicalAssetSize = model.Application - case model.Component.String(): - technicalAssetSize = model.Component - default: - panic(errors.New("unknown 'size' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Size))) - } - - var technicalAssetTechnology model.TechnicalAssetTechnology - switch asset.Technology { - case model.UnknownTechnology.String(): - technicalAssetTechnology = model.UnknownTechnology - case model.ClientSystem.String(): - technicalAssetTechnology = model.ClientSystem - case model.Browser.String(): - technicalAssetTechnology = model.Browser - case model.Desktop.String(): - technicalAssetTechnology = model.Desktop - case model.MobileApp.String(): - technicalAssetTechnology = model.MobileApp - case model.DevOpsClient.String(): - technicalAssetTechnology = model.DevOpsClient - case model.WebServer.String(): - technicalAssetTechnology = model.WebServer - case model.WebApplication.String(): - technicalAssetTechnology = model.WebApplication - case model.ApplicationServer.String(): - technicalAssetTechnology = model.ApplicationServer - case model.Database.String(): - technicalAssetTechnology = model.Database - case model.FileServer.String(): - technicalAssetTechnology = model.FileServer - case model.LocalFileSystem.String(): - technicalAssetTechnology = model.LocalFileSystem - case model.ERP.String(): - technicalAssetTechnology = model.ERP - case model.CMS.String(): - technicalAssetTechnology = model.CMS - case model.WebServiceREST.String(): - technicalAssetTechnology = model.WebServiceREST - case model.WebServiceSOAP.String(): - technicalAssetTechnology = model.WebServiceSOAP - case model.EJB.String(): - technicalAssetTechnology = model.EJB - case model.SearchIndex.String(): - technicalAssetTechnology = model.SearchIndex - case model.SearchEngine.String(): - technicalAssetTechnology = model.SearchEngine - case model.ServiceRegistry.String(): - technicalAssetTechnology = model.ServiceRegistry - case model.ReverseProxy.String(): - technicalAssetTechnology = model.ReverseProxy - case model.LoadBalancer.String(): - technicalAssetTechnology = model.LoadBalancer - case model.BuildPipeline.String(): - technicalAssetTechnology = model.BuildPipeline - case model.SourcecodeRepository.String(): - technicalAssetTechnology = model.SourcecodeRepository - case model.ArtifactRegistry.String(): - technicalAssetTechnology = model.ArtifactRegistry - case model.CodeInspectionPlatform.String(): - technicalAssetTechnology = model.CodeInspectionPlatform - case model.Monitoring.String(): - technicalAssetTechnology = model.Monitoring - case model.LDAPServer.String(): - technicalAssetTechnology = model.LDAPServer - case model.ContainerPlatform.String(): - technicalAssetTechnology = model.ContainerPlatform - case model.BatchProcessing.String(): - technicalAssetTechnology = model.BatchProcessing - case model.EventListener.String(): - technicalAssetTechnology = model.EventListener - case model.IdentityProvider.String(): - technicalAssetTechnology = model.IdentityProvider - case model.IdentityStoreLDAP.String(): - technicalAssetTechnology = model.IdentityStoreLDAP - case model.IdentityStoreDatabase.String(): - technicalAssetTechnology = model.IdentityStoreDatabase - case model.Tool.String(): - technicalAssetTechnology = model.Tool - case model.CLI.String(): - technicalAssetTechnology = model.CLI - case model.Task.String(): - technicalAssetTechnology = model.Task - case model.Function.String(): - technicalAssetTechnology = model.Function - case model.Gateway.String(): - technicalAssetTechnology = model.Gateway - case model.IoTDevice.String(): - technicalAssetTechnology = model.IoTDevice - case model.MessageQueue.String(): - technicalAssetTechnology = model.MessageQueue - case model.StreamProcessing.String(): - technicalAssetTechnology = model.StreamProcessing - case model.ServiceMesh.String(): - technicalAssetTechnology = model.ServiceMesh - case model.DataLake.String(): - technicalAssetTechnology = model.DataLake - case model.BigDataPlatform.String(): - technicalAssetTechnology = model.BigDataPlatform - case model.ReportEngine.String(): - technicalAssetTechnology = model.ReportEngine - case model.AI.String(): - technicalAssetTechnology = model.AI - case model.MailServer.String(): - technicalAssetTechnology = model.MailServer - case model.Vault.String(): - technicalAssetTechnology = model.Vault - case model.HSM.String(): - technicalAssetTechnology = model.HSM - case model.WAF.String(): - technicalAssetTechnology = model.WAF - case model.IDS.String(): - technicalAssetTechnology = model.IDS - case model.IPS.String(): - technicalAssetTechnology = model.IPS - case model.Scheduler.String(): - technicalAssetTechnology = model.Scheduler - case model.Mainframe.String(): - technicalAssetTechnology = model.Mainframe - case model.BlockStorage.String(): - technicalAssetTechnology = model.BlockStorage - case model.Library.String(): - technicalAssetTechnology = model.Library - default: - panic(errors.New("unknown 'technology' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Technology))) - } - - var encryption model.EncryptionStyle - switch asset.Encryption { - case model.NoneEncryption.String(): - encryption = model.NoneEncryption - case model.Transparent.String(): - encryption = model.Transparent - case model.DataWithSymmetricSharedKey.String(): - encryption = model.DataWithSymmetricSharedKey - case model.DataWithAsymmetricSharedKey.String(): - encryption = model.DataWithAsymmetricSharedKey - case model.DataWithEndUserIndividualKey.String(): - encryption = model.DataWithEndUserIndividualKey - default: - panic(errors.New("unknown 'encryption' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Encryption))) - } - - var technicalAssetMachine model.TechnicalAssetMachine - switch asset.Machine { - case model.Physical.String(): - technicalAssetMachine = model.Physical - case model.Virtual.String(): - technicalAssetMachine = model.Virtual - case model.Container.String(): - technicalAssetMachine = model.Container - case model.Serverless.String(): - technicalAssetMachine = model.Serverless - default: - panic(errors.New("unknown 'machine' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Machine))) - } - - var confidentiality model.Confidentiality - switch asset.Confidentiality { - case model.Public.String(): - confidentiality = model.Public - case model.Internal.String(): - confidentiality = model.Internal - case model.Restricted.String(): - confidentiality = model.Restricted - case model.Confidential.String(): - confidentiality = model.Confidential - case model.StrictlyConfidential.String(): - confidentiality = model.StrictlyConfidential - default: - panic(errors.New("unknown 'confidentiality' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Confidentiality))) - } - - var integrity model.Criticality - switch asset.Integrity { - case model.Archive.String(): - integrity = model.Archive - case model.Operational.String(): - integrity = model.Operational - case model.Important.String(): - integrity = model.Important - case model.Critical.String(): - integrity = model.Critical - case model.MissionCritical.String(): - integrity = model.MissionCritical - default: - panic(errors.New("unknown 'integrity' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Integrity))) - } - - var availability model.Criticality - switch asset.Availability { - case model.Archive.String(): - availability = model.Archive - case model.Operational.String(): - availability = model.Operational - case model.Important.String(): - availability = model.Important - case model.Critical.String(): - availability = model.Critical - case model.MissionCritical.String(): - availability = model.MissionCritical - default: - panic(errors.New("unknown 'availability' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Availability))) - } - - dataFormatsAccepted := make([]model.DataFormat, 0) - if asset.DataFormatsAccepted != nil { - for _, dataFormatName := range asset.DataFormatsAccepted { - switch dataFormatName { - case model.JSON.String(): - dataFormatsAccepted = append(dataFormatsAccepted, model.JSON) - case model.XML.String(): - dataFormatsAccepted = append(dataFormatsAccepted, model.XML) - case model.Serialization.String(): - dataFormatsAccepted = append(dataFormatsAccepted, model.Serialization) - case model.File.String(): - dataFormatsAccepted = append(dataFormatsAccepted, model.File) - case model.CSV.String(): - dataFormatsAccepted = append(dataFormatsAccepted, model.CSV) - default: - panic(errors.New("unknown 'data_formats_accepted' value of technical asset '" + title + "': " + fmt.Sprintf("%v", dataFormatName))) - } + if requiredToDelete <= 0 { + break } } + } + return +} - communicationLinks := make([]model.CommunicationLink, 0) - if asset.CommunicationLinks != nil { - for commLinkTitle, commLink := range asset.CommunicationLinks { - constraint := true - weight := 1 - var protocol model.Protocol - var authentication model.Authentication - var authorization model.Authorization - var usage model.Usage - var dataAssetsSent []string - var dataAssetsReceived []string - - switch commLink.Authentication { - case model.NoneAuthentication.String(): - authentication = model.NoneAuthentication - case model.Credentials.String(): - authentication = model.Credentials - case model.SessionId.String(): - authentication = model.SessionId - case model.Token.String(): - authentication = model.Token - case model.ClientCertificate.String(): - authentication = model.ClientCertificate - case model.TwoFactor.String(): - authentication = model.TwoFactor - case model.Externalized.String(): - authentication = model.Externalized - default: - panic(errors.New("unknown 'authentication' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Authentication))) - } - - switch commLink.Authorization { - case model.NoneAuthorization.String(): - authorization = model.NoneAuthorization - case model.TechnicalUser.String(): - authorization = model.TechnicalUser - case model.EndUserIdentityPropagation.String(): - authorization = model.EndUserIdentityPropagation - default: - panic(errors.New("unknown 'authorization' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Authorization))) - } - - switch commLink.Usage { - case model.Business.String(): - usage = model.Business - case model.DevOps.String(): - usage = model.DevOps - default: - panic(errors.New("unknown 'usage' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Usage))) - } +func (context *Context) generateKeyFromAlreadyStrongRandomInput(alreadyRandomInput []byte) []byte { + // Establish the parameters to use for Argon2. + p := &argon2Params{ + memory: 64 * 1024, + iterations: 3, + parallelism: 2, + saltLength: 16, + keyLength: keySize, + } + // As the input is already cryptographically secure random, the salt is simply the first n bytes + salt := alreadyRandomInput[0:p.saltLength] + hash := argon2.IDKey(alreadyRandomInput[p.saltLength:], salt, p.iterations, p.memory, p.parallelism, p.keyLength) + return hash +} - switch commLink.Protocol { - case model.UnknownProtocol.String(): - protocol = model.UnknownProtocol - case model.HTTP.String(): - protocol = model.HTTP - case model.HTTPS.String(): - protocol = model.HTTPS - case model.WS.String(): - protocol = model.WS - case model.WSS.String(): - protocol = model.WSS - case model.MQTT.String(): - protocol = model.MQTT - case model.JDBC.String(): - protocol = model.JDBC - case model.JdbcEncrypted.String(): - protocol = model.JdbcEncrypted - case model.ODBC.String(): - protocol = model.ODBC - case model.OdbcEncrypted.String(): - protocol = model.OdbcEncrypted - case model.SqlAccessProtocol.String(): - protocol = model.SqlAccessProtocol - case model.SqlAccessProtocolEncrypted.String(): - protocol = model.SqlAccessProtocolEncrypted - case model.NosqlAccessProtocol.String(): - protocol = model.NosqlAccessProtocol - case model.NosqlAccessProtocolEncrypted.String(): - protocol = model.NosqlAccessProtocolEncrypted - case model.TEXT.String(): - protocol = model.TEXT - case model.TextEncrypted.String(): - protocol = model.TextEncrypted - case model.BINARY.String(): - protocol = model.BINARY - case model.BinaryEncrypted.String(): - protocol = model.BinaryEncrypted - case model.SSH.String(): - protocol = model.SSH - case model.SshTunnel.String(): - protocol = model.SshTunnel - case model.SMTP.String(): - protocol = model.SMTP - case model.SmtpEncrypted.String(): - protocol = model.SmtpEncrypted - case model.POP3.String(): - protocol = model.POP3 - case model.Pop3Encrypted.String(): - protocol = model.Pop3Encrypted - case model.IMAP.String(): - protocol = model.IMAP - case model.ImapEncrypted.String(): - protocol = model.ImapEncrypted - case model.FTP.String(): - protocol = model.FTP - case model.FTPS.String(): - protocol = model.FTPS - case model.SFTP.String(): - protocol = model.SFTP - case model.SCP.String(): - protocol = model.SCP - case model.LDAP.String(): - protocol = model.LDAP - case model.LDAPS.String(): - protocol = model.LDAPS - case model.JMS.String(): - protocol = model.JMS - case model.NFS.String(): - protocol = model.NFS - case model.SMB.String(): - protocol = model.SMB - case model.SmbEncrypted.String(): - protocol = model.SmbEncrypted - case model.LocalFileAccess.String(): - protocol = model.LocalFileAccess - case model.NRPE.String(): - protocol = model.NRPE - case model.XMPP.String(): - protocol = model.XMPP - case model.IIOP.String(): - protocol = model.IIOP - case model.IiopEncrypted.String(): - protocol = model.IiopEncrypted - case model.JRMP.String(): - protocol = model.JRMP - case model.JrmpEncrypted.String(): - protocol = model.JrmpEncrypted - case model.InProcessLibraryCall.String(): - protocol = model.InProcessLibraryCall - case model.ContainerSpawning.String(): - protocol = model.ContainerSpawning - default: - panic(errors.New("unknown 'protocol' of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Protocol))) - } +func (context *Context) folderNameForModel(folderNameOfKey string, uuid string) string { + return filepath.Join(folderNameOfKey, uuid) +} - if commLink.DataAssetsSent != nil { - for _, dataAssetSent := range commLink.DataAssetsSent { - referencedAsset := fmt.Sprintf("%v", dataAssetSent) - checkDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") - dataAssetsSent = append(dataAssetsSent, referencedAsset) - } - } +type argon2Params struct { + memory uint32 + iterations uint32 + parallelism uint8 + saltLength uint32 + keyLength uint32 +} - if commLink.DataAssetsReceived != nil { - for _, dataAssetReceived := range commLink.DataAssetsReceived { - referencedAsset := fmt.Sprintf("%v", dataAssetReceived) - checkDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") - dataAssetsReceived = append(dataAssetsReceived, referencedAsset) - } - } +var throttlerLock sync.Mutex - if commLink.DiagramTweakWeight > 0 { - weight = commLink.DiagramTweakWeight - } +var createdObjectsThrottler = make(map[string][]int64) - constraint = !commLink.DiagramTweakConstraint +func (context *Context) checkObjectCreationThrottler(ginContext *gin.Context, typeName string) bool { + throttlerLock.Lock() + defer throttlerLock.Unlock() - dataFlowTitle := fmt.Sprintf("%v", commLinkTitle) - commLink := model.CommunicationLink{ - Id: createDataFlowId(id, dataFlowTitle), - SourceId: id, - TargetId: commLink.Target, - Title: dataFlowTitle, - Description: withDefault(commLink.Description, dataFlowTitle), - Protocol: protocol, - Authentication: authentication, - Authorization: authorization, - Usage: usage, - Tags: checkTags(lowerCaseAndTrim(commLink.Tags), "communication link '"+commLinkTitle+"' of technical asset '"+title+"'"), - VPN: commLink.VPN, - IpFiltered: commLink.IpFiltered, - Readonly: commLink.Readonly, - DataAssetsSent: dataAssetsSent, - DataAssetsReceived: dataAssetsReceived, - DiagramTweakWeight: weight, - DiagramTweakConstraint: constraint, - } - communicationLinks = append(communicationLinks, commLink) - // track all comm links - model.CommunicationLinks[commLink.Id] = commLink - // keep track of map of *all* comm links mapped by target-id (to be able to look up "who is calling me" kind of things) - model.IncomingTechnicalCommunicationLinksMappedByTargetId[commLink.TargetId] = append( - model.IncomingTechnicalCommunicationLinksMappedByTargetId[commLink.TargetId], commLink) + // remove all elements older than 3 minutes (= 180000000000 ns) + now := time.Now().UnixNano() + cutoff := now - 180000000000 + for keyCheck := range createdObjectsThrottler { + for i := 0; i < len(createdObjectsThrottler[keyCheck]); i++ { + if createdObjectsThrottler[keyCheck][i] < cutoff { + // Remove the element at index i from slice (safe while looping using i as iterator) + createdObjectsThrottler[keyCheck] = append(createdObjectsThrottler[keyCheck][:i], createdObjectsThrottler[keyCheck][i+1:]...) + i-- // Since we just deleted a[i], we must redo that index } } - - context.checkIdSyntax(id) - if _, exists := model.ParsedModelRoot.TechnicalAssets[id]; exists { - panic(errors.New("duplicate id used: " + id)) - } - model.ParsedModelRoot.TechnicalAssets[id] = model.TechnicalAsset{ - Id: id, - Usage: usage, - Title: title, //fmt.Sprintf("%v", asset["title"]), - Description: withDefault(fmt.Sprintf("%v", asset.Description), title), - Type: technicalAssetType, - Size: technicalAssetSize, - Technology: technicalAssetTechnology, - Tags: checkTags(lowerCaseAndTrim(asset.Tags), "technical asset '"+title+"'"), - Machine: technicalAssetMachine, - Internet: asset.Internet, - Encryption: encryption, - MultiTenant: asset.MultiTenant, - Redundant: asset.Redundant, - CustomDevelopedParts: asset.CustomDevelopedParts, - UsedAsClientByHuman: asset.UsedAsClientByHuman, - OutOfScope: asset.OutOfScope, - JustificationOutOfScope: fmt.Sprintf("%v", asset.JustificationOutOfScope), - Owner: fmt.Sprintf("%v", asset.Owner), - Confidentiality: confidentiality, - Integrity: integrity, - Availability: availability, - JustificationCiaRating: fmt.Sprintf("%v", asset.JustificationCiaRating), - DataAssetsProcessed: dataAssetsProcessed, - DataAssetsStored: dataAssetsStored, - DataFormatsAccepted: dataFormatsAccepted, - CommunicationLinks: communicationLinks, - DiagramTweakOrder: asset.DiagramTweakOrder, + length := len(createdObjectsThrottler[keyCheck]) + if length == 0 { + delete(createdObjectsThrottler, keyCheck) } + /* + if *verbose { + log.Println("Throttling count: "+strconv.Itoa(length)) + } + */ + } + + // check current request + keyHash := hash(typeName) // getting the real client ip is not easy inside fully encapsulated containerized runtime + if _, ok := createdObjectsThrottler[keyHash]; !ok { + createdObjectsThrottler[keyHash] = make([]int64, 0) + } + // check the limit of 20 creations for this type per 3 minutes + withinLimit := len(createdObjectsThrottler[keyHash]) < 20 + if withinLimit { + createdObjectsThrottler[keyHash] = append(createdObjectsThrottler[keyHash], now) + return true } + ginContext.JSON(http.StatusTooManyRequests, gin.H{ + "error": "object creation throttling exceeded (denial-of-service protection): please wait some time and try again", + }) + return false +} - // Trust Boundaries =============================================================================== - checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries := make(map[string]bool) - model.ParsedModelRoot.TrustBoundaries = make(map[string]model.TrustBoundary) - for title, boundary := range context.modelInput.TrustBoundaries { - id := fmt.Sprintf("%v", boundary.ID) +var locksByFolderName = make(map[string]*sync.Mutex) - var technicalAssetsInside = make([]string, 0) - if boundary.TechnicalAssetsInside != nil { - parsedInsideAssets := boundary.TechnicalAssetsInside - technicalAssetsInside = make([]string, len(parsedInsideAssets)) - for i, parsedInsideAsset := range parsedInsideAssets { - technicalAssetsInside[i] = fmt.Sprintf("%v", parsedInsideAsset) - _, found := model.ParsedModelRoot.TechnicalAssets[technicalAssetsInside[i]] - if !found { - panic(errors.New("missing referenced technical asset " + technicalAssetsInside[i] + " at trust boundary '" + title + "'")) - } - if checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries[technicalAssetsInside[i]] == true { - panic(errors.New("referenced technical asset " + technicalAssetsInside[i] + " at trust boundary '" + title + "' is modeled in multiple trust boundaries")) - } - checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries[technicalAssetsInside[i]] = true - //fmt.Println("asset "+technicalAssetsInside[i]+" at i="+strconv.Itoa(i)) - } - } +func (context *Context) lockFolder(folderName string) { + context.globalLock.Lock() + defer context.globalLock.Unlock() + _, exists := locksByFolderName[folderName] + if !exists { + locksByFolderName[folderName] = &sync.Mutex{} + } + locksByFolderName[folderName].Lock() +} - var trustBoundariesNested = make([]string, 0) - if boundary.TrustBoundariesNested != nil { - parsedNestedBoundaries := boundary.TrustBoundariesNested - trustBoundariesNested = make([]string, len(parsedNestedBoundaries)) - for i, parsedNestedBoundary := range parsedNestedBoundaries { - trustBoundariesNested[i] = fmt.Sprintf("%v", parsedNestedBoundary) - } - } +func (context *Context) unlockFolder(folderName string) { + if _, exists := locksByFolderName[folderName]; exists { + locksByFolderName[folderName].Unlock() + delete(locksByFolderName, folderName) + } +} - var trustBoundaryType model.TrustBoundaryType - switch boundary.Type { - case model.NetworkOnPrem.String(): - trustBoundaryType = model.NetworkOnPrem - case model.NetworkDedicatedHoster.String(): - trustBoundaryType = model.NetworkDedicatedHoster - case model.NetworkVirtualLAN.String(): - trustBoundaryType = model.NetworkVirtualLAN - case model.NetworkCloudProvider.String(): - trustBoundaryType = model.NetworkCloudProvider - case model.NetworkCloudSecurityGroup.String(): - trustBoundaryType = model.NetworkCloudSecurityGroup - case model.NetworkPolicyNamespaceIsolation.String(): - trustBoundaryType = model.NetworkPolicyNamespaceIsolation - case model.ExecutionEnvironment.String(): - trustBoundaryType = model.ExecutionEnvironment - default: - panic(errors.New("unknown 'type' of trust boundary '" + title + "': " + fmt.Sprintf("%v", boundary.Type))) - } +func (context *Context) folderNameFromKey(key []byte) string { + sha512Hash := hashSHA256(key) + return filepath.Join(*context.serverFolder, context.keyDir, sha512Hash) +} - trustBoundary := model.TrustBoundary{ - Id: id, - Title: title, //fmt.Sprintf("%v", boundary["title"]), - Description: withDefault(fmt.Sprintf("%v", boundary.Description), title), - Type: trustBoundaryType, - Tags: checkTags(lowerCaseAndTrim(boundary.Tags), "trust boundary '"+title+"'"), - TechnicalAssetsInside: technicalAssetsInside, - TrustBoundariesNested: trustBoundariesNested, - } - context.checkIdSyntax(id) - if _, exists := model.ParsedModelRoot.TrustBoundaries[id]; exists { - panic(errors.New("duplicate id used: " + id)) +func hashSHA256(key []byte) string { + hasher := sha512.New() + hasher.Write(key) + return hex.EncodeToString(hasher.Sum(nil)) +} + +func (context *Context) createKey(ginContext *gin.Context) { + ok := context.checkObjectCreationThrottler(ginContext, "KEY") + if !ok { + return + } + context.globalLock.Lock() + defer context.globalLock.Unlock() + + keyBytesArr := make([]byte, keySize) + n, err := rand.Read(keyBytesArr[:]) + if n != keySize || err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to create key", + }) + return + } + err = os.MkdirAll(context.folderNameFromKey(keyBytesArr), 0700) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to create key", + }) + return + } + ginContext.JSON(http.StatusCreated, gin.H{ + "key": base64.RawURLEncoding.EncodeToString(keyBytesArr[:]), + }) +} + +func (context *Context) checkTokenToFolderName(ginContext *gin.Context) (folderNameOfKey string, key []byte, ok bool) { + header := tokenHeader{} + if err := ginContext.ShouldBindHeader(&header); err != nil { + log.Println(err) + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "token not found", + }) + return folderNameOfKey, key, false + } + token, err := base64.RawURLEncoding.DecodeString(strings.TrimSpace(header.Token)) + if len(token) == 0 || err != nil { + if err != nil { + log.Println(err) } - model.ParsedModelRoot.TrustBoundaries[id] = trustBoundary - for _, technicalAsset := range trustBoundary.TechnicalAssetsInside { - model.DirectContainingTrustBoundaryMappedByTechnicalAssetId[technicalAsset] = trustBoundary - //fmt.Println("Asset "+technicalAsset+" is directly in trust boundary "+trustBoundary.Id) + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "token not found", + }) + return folderNameOfKey, key, false + } + context.globalLock.Lock() + defer context.globalLock.Unlock() + housekeepingTokenMaps() // to remove timed-out ones + tokenHash := hashSHA256(token) + if timeoutStruct, exists := mapTokenHashToTimeoutStruct[tokenHash]; exists { + // re-create the key from token + key := xor(token, timeoutStruct.xorRand) + folderNameOfKey := context.folderNameFromKey(key) + if _, err := os.Stat(folderNameOfKey); os.IsNotExist(err) { + log.Println(err) + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "token not found", + }) + return folderNameOfKey, key, false } + timeoutStruct.lastAccessedNanoTime = time.Now().UnixNano() + return folderNameOfKey, key, true + } else { + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "token not found", + }) + return folderNameOfKey, key, false } - checkNestedTrustBoundariesExisting() +} - // Shared Runtime =============================================================================== - model.ParsedModelRoot.SharedRuntimes = make(map[string]model.SharedRuntime) - for title, inputRuntime := range context.modelInput.SharedRuntimes { - id := fmt.Sprintf("%v", inputRuntime.ID) +func xor(key []byte, xor []byte) []byte { + if len(key) != len(xor) { + panic(errors.New("key length not matching XOR length")) + } + result := make([]byte, len(xor)) + for i, b := range key { + result[i] = b ^ xor[i] + } + return result +} - var technicalAssetsRunning = make([]string, 0) - if inputRuntime.TechnicalAssetsRunning != nil { - parsedRunningAssets := inputRuntime.TechnicalAssetsRunning - technicalAssetsRunning = make([]string, len(parsedRunningAssets)) - for i, parsedRunningAsset := range parsedRunningAssets { - assetId := fmt.Sprintf("%v", parsedRunningAsset) - checkTechnicalAssetExists(assetId, "shared runtime '"+title+"'", false) - technicalAssetsRunning[i] = assetId +type timeoutStruct struct { + xorRand []byte + createdNanoTime, lastAccessedNanoTime int64 +} + +var mapTokenHashToTimeoutStruct = make(map[string]timeoutStruct) + +const extremeShortTimeoutsForTesting = false + +func housekeepingTokenMaps() { + now := time.Now().UnixNano() + for tokenHash, val := range mapTokenHashToTimeoutStruct { + if extremeShortTimeoutsForTesting { + // remove all elements older than 1 minute (= 60000000000 ns) soft + // and all elements older than 3 minutes (= 180000000000 ns) hard + if now-val.lastAccessedNanoTime > 60000000000 || now-val.createdNanoTime > 180000000000 { + fmt.Println("About to remove a token hash from maps") + deleteTokenHashFromMaps(tokenHash) + } + } else { + // remove all elements older than 30 minutes (= 1800000000000 ns) soft + // and all elements older than 10 hours (= 36000000000000 ns) hard + if now-val.lastAccessedNanoTime > 1800000000000 || now-val.createdNanoTime > 36000000000000 { + deleteTokenHashFromMaps(tokenHash) } } + } +} - sharedRuntime := model.SharedRuntime{ - Id: id, - Title: title, //fmt.Sprintf("%v", boundary["title"]), - Description: withDefault(fmt.Sprintf("%v", inputRuntime.Description), title), - Tags: checkTags(inputRuntime.Tags, "shared runtime '"+title+"'"), - TechnicalAssetsRunning: technicalAssetsRunning, - } - context.checkIdSyntax(id) - if _, exists := model.ParsedModelRoot.SharedRuntimes[id]; exists { - panic(errors.New("duplicate id used: " + id)) - } - model.ParsedModelRoot.SharedRuntimes[id] = sharedRuntime - for _, technicalAssetId := range sharedRuntime.TechnicalAssetsRunning { - model.DirectContainingSharedRuntimeMappedByTechnicalAssetId[technicalAssetId] = sharedRuntime +func deleteTokenHashFromMaps(tokenHash string) { + delete(mapTokenHashToTimeoutStruct, tokenHash) + for folderName, check := range mapFolderNameToTokenHash { + if check == tokenHash { + delete(mapFolderNameToTokenHash, folderName) + break } } +} - // Individual Risk Categories (just used as regular risk categories) =============================================================================== - model.ParsedModelRoot.IndividualRiskCategories = make(map[string]model.RiskCategory) - for title, individualCategory := range context.modelInput.IndividualRiskCategories { - id := fmt.Sprintf("%v", individualCategory.ID) +type keyHeader struct { + Key string `header:"key"` +} - var function model.RiskFunction - switch individualCategory.Function { - case model.BusinessSide.String(): - function = model.BusinessSide - case model.Architecture.String(): - function = model.Architecture - case model.Development.String(): - function = model.Development - case model.Operations.String(): - function = model.Operations - default: - panic(errors.New("unknown 'function' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.Function))) +func (context *Context) checkKeyToFolderName(ginContext *gin.Context) (folderNameOfKey string, key []byte, ok bool) { + header := keyHeader{} + if err := ginContext.ShouldBindHeader(&header); err != nil { + log.Println(err) + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "key not found", + }) + return folderNameOfKey, key, false + } + key, err := base64.RawURLEncoding.DecodeString(strings.TrimSpace(header.Key)) + if len(key) == 0 || err != nil { + if err != nil { + log.Println(err) } + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "key not found", + }) + return folderNameOfKey, key, false + } + folderNameOfKey = context.folderNameFromKey(key) + if _, err := os.Stat(folderNameOfKey); os.IsNotExist(err) { + log.Println(err) + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "key not found", + }) + return folderNameOfKey, key, false + } + return folderNameOfKey, key, true +} + +func (context *Context) deleteKey(ginContext *gin.Context) { + folderName, _, ok := context.checkKeyToFolderName(ginContext) + if !ok { + return + } + context.globalLock.Lock() + defer context.globalLock.Unlock() + err := os.RemoveAll(folderName) + if err != nil { + log.Println("error during key delete: " + err.Error()) + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "key not found", + }) + return + } + ginContext.JSON(http.StatusOK, gin.H{ + "message": "key deleted", + }) +} - var stride model.STRIDE - switch individualCategory.STRIDE { - case model.Spoofing.String(): - stride = model.Spoofing - case model.Tampering.String(): - stride = model.Tampering - case model.Repudiation.String(): - stride = model.Repudiation - case model.InformationDisclosure.String(): - stride = model.InformationDisclosure - case model.DenialOfService.String(): - stride = model.DenialOfService - case model.ElevationOfPrivilege.String(): - stride = model.ElevationOfPrivilege - default: - panic(errors.New("unknown 'stride' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.STRIDE))) +func (context *Context) userHomeDir() string { + switch runtime.GOOS { + case "windows": + home := os.Getenv("HOMEDRIVE") + os.Getenv("HOMEPATH") + if home == "" { + home = os.Getenv("USERPROFILE") } + return home - cat := model.RiskCategory{ - Id: id, - Title: title, - Description: withDefault(fmt.Sprintf("%v", individualCategory.Description), title), - Impact: fmt.Sprintf("%v", individualCategory.Impact), - ASVS: fmt.Sprintf("%v", individualCategory.ASVS), - CheatSheet: fmt.Sprintf("%v", individualCategory.CheatSheet), - Action: fmt.Sprintf("%v", individualCategory.Action), - Mitigation: fmt.Sprintf("%v", individualCategory.Mitigation), - Check: fmt.Sprintf("%v", individualCategory.Check), - DetectionLogic: fmt.Sprintf("%v", individualCategory.DetectionLogic), - RiskAssessment: fmt.Sprintf("%v", individualCategory.RiskAssessment), - FalsePositives: fmt.Sprintf("%v", individualCategory.FalsePositives), - Function: function, - STRIDE: stride, - ModelFailurePossibleReason: individualCategory.ModelFailurePossibleReason, - CWE: individualCategory.CWE, - } - context.checkIdSyntax(id) - if _, exists := model.ParsedModelRoot.IndividualRiskCategories[id]; exists { - panic(errors.New("duplicate id used: " + id)) - } - model.ParsedModelRoot.IndividualRiskCategories[id] = cat + default: + return os.Getenv("HOME") + } +} - // NOW THE INDIVIDUAL RISK INSTANCES: - //individualRiskInstances := make([]model.Risk, 0) - if individualCategory.RisksIdentified != nil { // TODO: also add syntax checks of input YAML when linked asset is not found or when synthetic-id is already used... - for title, individualRiskInstance := range individualCategory.RisksIdentified { - var severity model.RiskSeverity - var exploitationLikelihood model.RiskExploitationLikelihood - var exploitationImpact model.RiskExploitationImpact - var mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId string - var dataBreachProbability model.DataBreachProbability - var dataBreachTechnicalAssetIDs []string +func (context *Context) expandPath(path string) *string { + home := context.userHomeDir() + if strings.HasPrefix(path, "~") { + path = strings.Replace(path, "~", home, 1) + } - switch individualRiskInstance.Severity { - case model.LowSeverity.String(): - severity = model.LowSeverity - case model.MediumSeverity.String(): - severity = model.MediumSeverity - case model.ElevatedSeverity.String(): - severity = model.ElevatedSeverity - case model.HighSeverity.String(): - severity = model.HighSeverity - case model.CriticalSeverity.String(): - severity = model.CriticalSeverity - case "": // added default - severity = model.MediumSeverity - default: - panic(errors.New("unknown 'severity' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.Severity))) - } + if strings.HasPrefix(path, "$HOME") { + path = strings.Replace(path, "$HOME", home, -1) + } - switch individualRiskInstance.ExploitationLikelihood { - case model.Unlikely.String(): - exploitationLikelihood = model.Unlikely - case model.Likely.String(): - exploitationLikelihood = model.Likely - case model.VeryLikely.String(): - exploitationLikelihood = model.VeryLikely - case model.Frequent.String(): - exploitationLikelihood = model.Frequent - case "": // added default - exploitationLikelihood = model.Likely - default: - panic(errors.New("unknown 'exploitation_likelihood' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationLikelihood))) - } + return &path +} - switch individualRiskInstance.ExploitationImpact { - case model.LowImpact.String(): - exploitationImpact = model.LowImpact - case model.MediumImpact.String(): - exploitationImpact = model.MediumImpact - case model.HighImpact.String(): - exploitationImpact = model.HighImpact - case model.VeryHighImpact.String(): - exploitationImpact = model.VeryHighImpact - case "": // added default - exploitationImpact = model.MediumImpact - default: - panic(errors.New("unknown 'exploitation_impact' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationImpact))) - } +func (context *Context) ParseCommandlineArgs() { // folders + context.appFolder = flag.String("app-dir", appDir, "app folder (default: "+appDir+")") + context.serverFolder = flag.String("server-dir", dataDir, "base folder for server mode (default: "+dataDir+")") + context.tempFolder = flag.String("temp-dir", tempDir, "temporary folder location") + context.binFolder = flag.String("bin-dir", binDir, "binary folder location") + context.outputDir = flag.String("output", ".", "output directory") - if len(individualRiskInstance.MostRelevantDataAsset) > 0 { - mostRelevantDataAssetId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantDataAsset) - checkDataAssetTargetExists(mostRelevantDataAssetId, "individual risk '"+title+"'") - } + // files + context.modelFilename = flag.String("model", inputFile, "input model yaml file") + context.raaPlugin = flag.String("raa-run", "raa_calc", "RAA calculation run file name") - if len(individualRiskInstance.MostRelevantTechnicalAsset) > 0 { - mostRelevantTechnicalAssetId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantTechnicalAsset) - checkTechnicalAssetExists(mostRelevantTechnicalAssetId, "individual risk '"+title+"'", false) - } + // flags + context.verbose = flag.Bool("verbose", false, "verbose output") + context.diagramDPI = flag.Int("diagram-dpi", defaultGraphvizDPI, "DPI used to render: maximum is "+strconv.Itoa(maxGraphvizDPI)+"") + context.skipRiskRules = flag.String("skip-risk-rules", "", "comma-separated list of risk rules (by their ID) to skip") + context.riskRulesPlugins = flag.String("custom-risk-rules-plugins", "", "comma-separated list of plugins file names with custom risk rules to load") + context.ignoreOrphanedRiskTracking = flag.Bool("ignore-orphaned-risk-tracking", false, "ignore orphaned risk tracking (just log them) not matching a concrete risk") - if len(individualRiskInstance.MostRelevantCommunicationLink) > 0 { - mostRelevantCommunicationLinkId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantCommunicationLink) - checkCommunicationLinkExists(mostRelevantCommunicationLinkId, "individual risk '"+title+"'") - } + // commands + context.serverPort = flag.Int("server", 0, "start a server (instead of commandline execution) on the given port") + context.executeModelMacro = flag.String("execute-model-macro", "", "Execute model macro (by ID)") + context.createExampleModel = flag.Bool("create-example-model", false, "just create an example model named threagile-example-model.yaml in the output directory") + context.createStubModel = flag.Bool("create-stub-model", false, "just create a minimal stub model named threagile-stub-model.yaml in the output directory") + context.createEditingSupport = flag.Bool("create-editing-support", false, "just create some editing support stuff in the output directory") + context.templateFilename = flag.String("background", "background.pdf", "background pdf file") + context.generateDataFlowDiagram = flag.Bool("generate-data-flow-diagram", true, "generate data-flow diagram") + context.generateDataAssetDiagram = flag.Bool("generate-data-asset-diagram", true, "generate data asset diagram") + context.generateRisksExcel = flag.Bool("generate-risks-excel", true, "generate risks excel") + context.generateTagsExcel = flag.Bool("generate-tags-excel", true, "generate tags excel") + context.generateReportPDF = flag.Bool("generate-report-pdf", true, "generate report pdf, including diagrams") - if len(individualRiskInstance.MostRelevantTrustBoundary) > 0 { - mostRelevantTrustBoundaryId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantTrustBoundary) - checkTrustBoundaryExists(mostRelevantTrustBoundaryId, "individual risk '"+title+"'") - } + // more commands + print3rdParty := flag.Bool("print-3rd-party-licenses", false, "print 3rd-party license information") + license := flag.Bool("print-license", false, "print license information") - if len(individualRiskInstance.MostRelevantSharedRuntime) > 0 { - mostRelevantSharedRuntimeId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantSharedRuntime) - checkSharedRuntimeExists(mostRelevantSharedRuntimeId, "individual risk '"+title+"'") - } + flag.Usage = func() { + fmt.Println(docs.Logo + "\n\n" + docs.VersionText) + _, _ = fmt.Fprintf(os.Stderr, "Usage: threagile [options]") + fmt.Println() + } + flag.Parse() - switch individualRiskInstance.DataBreachProbability { - case model.Improbable.String(): - dataBreachProbability = model.Improbable - case model.Possible.String(): - dataBreachProbability = model.Possible - case model.Probable.String(): - dataBreachProbability = model.Probable - case "": // added default - dataBreachProbability = model.Possible - default: - panic(errors.New("unknown 'data_breach_probability' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.DataBreachProbability))) - } + context.modelFilename = context.expandPath(*context.modelFilename) + context.appFolder = context.expandPath(*context.appFolder) + context.serverFolder = context.expandPath(*context.serverFolder) + context.tempFolder = context.expandPath(*context.tempFolder) + context.binFolder = context.expandPath(*context.binFolder) + context.outputDir = context.expandPath(*context.outputDir) - if individualRiskInstance.DataBreachTechnicalAssets != nil { - dataBreachTechnicalAssetIDs = make([]string, len(individualRiskInstance.DataBreachTechnicalAssets)) - for i, parsedReferencedAsset := range individualRiskInstance.DataBreachTechnicalAssets { - assetId := fmt.Sprintf("%v", parsedReferencedAsset) - checkTechnicalAssetExists(assetId, "data breach technical assets of individual risk '"+title+"'", false) - dataBreachTechnicalAssetIDs[i] = assetId - } - } + if *context.diagramDPI < 20 { + *context.diagramDPI = 20 + } else if *context.diagramDPI > context.maxGraphvizDPI { + *context.diagramDPI = 300 + } - individualRiskInstance := model.Risk{ - SyntheticId: createSyntheticId(cat.Id, mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId), - Title: fmt.Sprintf("%v", title), - Category: cat, - Severity: severity, - ExploitationLikelihood: exploitationLikelihood, - ExploitationImpact: exploitationImpact, - MostRelevantDataAssetId: mostRelevantDataAssetId, - MostRelevantTechnicalAssetId: mostRelevantTechnicalAssetId, - MostRelevantCommunicationLinkId: mostRelevantCommunicationLinkId, - MostRelevantTrustBoundaryId: mostRelevantTrustBoundaryId, - MostRelevantSharedRuntimeId: mostRelevantSharedRuntimeId, - DataBreachProbability: dataBreachProbability, - DataBreachTechnicalAssetIDs: dataBreachTechnicalAssetIDs, - } - model.GeneratedRisksByCategory[cat] = append(model.GeneratedRisksByCategory[cat], individualRiskInstance) - } - } + context.progressReporter = SilentProgressReporter{} + if *context.verbose { + context.progressReporter = CommandLineProgressReporter{} } - // Risk Tracking =============================================================================== - model.ParsedModelRoot.RiskTracking = make(map[string]model.RiskTracking) - for syntheticRiskId, riskTracking := range context.modelInput.RiskTracking { - justification := fmt.Sprintf("%v", riskTracking.Justification) - checkedBy := fmt.Sprintf("%v", riskTracking.CheckedBy) - ticket := fmt.Sprintf("%v", riskTracking.Ticket) - var date time.Time - if len(riskTracking.Date) > 0 { - var parseError error - date, parseError = time.Parse("2006-01-02", riskTracking.Date) - if parseError != nil { - panic(errors.New("unable to parse 'date' of risk tracking '" + syntheticRiskId + "': " + riskTracking.Date)) - } + if *print3rdParty { + fmt.Println(docs.Logo + "\n\n" + docs.VersionText) + fmt.Println("Kudos & Credits to the following open-source projects:") + fmt.Println(" - golang (Google Go License): https://golang.org/LICENSE") + fmt.Println(" - go-yaml (MIT License): https://github.com/go-yaml/yaml/blob/v3/LICENSE") + fmt.Println(" - graphviz (CPL License): https://graphviz.gitlab.io/license/") + fmt.Println(" - gofpdf (MIT License): https://github.com/jung-kurt/gofpdf/blob/master/LICENSE") + fmt.Println(" - go-chart (MIT License): https://github.com/wcharczuk/go-chart/blob/master/LICENSE") + fmt.Println(" - excelize (BSD License): https://github.com/qax-os/excelize/blob/master/LICENSE") + fmt.Println(" - graphics-go (BSD License): https://github.com/BurntSushi/graphics-go/blob/master/LICENSE") + fmt.Println(" - google-uuid (BSD License): https://github.com/google/uuid/blob/master/LICENSE") + fmt.Println(" - gin-gonic (MIT License): https://github.com/gin-gonic/gin/blob/master/LICENSE") + fmt.Println(" - swagger-ui (Apache License): https://swagger.io/license/") + fmt.Println() + os.Exit(0) + } + if *license { + fmt.Println(docs.Logo + "\n\n" + docs.VersionText) + if *context.appFolder != filepath.Clean(*context.appFolder) { + log.Fatalf("weird app folder %v", *context.appFolder) } - - var status model.RiskStatus - switch riskTracking.Status { - case model.Unchecked.String(): - status = model.Unchecked - case model.Mitigated.String(): - status = model.Mitigated - case model.InProgress.String(): - status = model.InProgress - case model.Accepted.String(): - status = model.Accepted - case model.InDiscussion.String(): - status = model.InDiscussion - case model.FalsePositive.String(): - status = model.FalsePositive - default: - panic(errors.New("unknown 'status' value of risk tracking '" + syntheticRiskId + "': " + riskTracking.Status)) + content, err := os.ReadFile(filepath.Join(*context.appFolder, "LICENSE.txt")) + checkErr(err) + fmt.Print(string(content)) + fmt.Println() + os.Exit(0) + } + if *context.createExampleModel { + exampleError := context.createExampleModelFile() + if exampleError != nil { + log.Fatalf("Unable to copy example model: %v", exampleError) + return } - - tracking := model.RiskTracking{ - SyntheticRiskId: strings.TrimSpace(syntheticRiskId), - Justification: justification, - CheckedBy: checkedBy, - Ticket: ticket, - Date: date, - Status: status, + fmt.Println(docs.Logo + "\n\n" + docs.VersionText) + fmt.Println("An example model was created named threagile-example-model.yaml in the output directory.") + fmt.Println() + fmt.Println(docs.Examples) + fmt.Println() + os.Exit(0) + } + if *context.createStubModel { + stubError := context.createStubModelFile() + if stubError != nil { + log.Fatalf("Unable to copy stub model: %v", stubError) + return } - if strings.Contains(syntheticRiskId, "*") { // contains a wildcard char - context.deferredRiskTrackingDueToWildcardMatching[syntheticRiskId] = tracking - } else { - model.ParsedModelRoot.RiskTracking[syntheticRiskId] = tracking + fmt.Println(docs.Logo + "\n\n" + docs.VersionText) + fmt.Println("A minimal stub model was created named threagile-stub-model.yaml in the output directory.") + fmt.Println() + fmt.Println(docs.Examples) + fmt.Println() + os.Exit(0) + } + if *context.createEditingSupport { + supportError := context.createEditingSupportFiles() + if supportError != nil { + log.Fatalf("Unable to copy editing support files: %v", supportError) + return } + fmt.Println(docs.Logo + "\n\n" + docs.VersionText) + fmt.Println("The following files were created in the output directory:") + fmt.Println(" - schema.json") + fmt.Println(" - live-templates.txt") + fmt.Println() + fmt.Println("For a perfect editing experience within your IDE of choice you can easily get " + + "model syntax validation and autocompletion (very handy for enum values) as well as live templates: " + + "Just import the schema.json into your IDE and assign it as \"schema\" to each Threagile YAML file. " + + "Also try to import individual parts from the live-templates.txt file into your IDE as live editing templates.") + fmt.Println() + os.Exit(0) + } + + context.ServerMode = (*context.serverPort > 0) +} + +func (context *Context) createExampleModelFile() error { + _, err := copyFile(filepath.Join(*context.appFolder, "threagile-example-model.yaml"), filepath.Join(*context.outputDir, "threagile-example-model.yaml")) + if err == nil { + return nil } - // ====================== model consistency check (linking) - for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets { - for _, commLink := range technicalAsset.CommunicationLinks { - checkTechnicalAssetExists(commLink.TargetId, "communication link '"+commLink.Title+"' of technical asset '"+technicalAsset.Title+"'", false) - } + _, altError := copyFile(filepath.Join(*context.appFolder, "threagile.yaml"), filepath.Join(*context.outputDir, "threagile-example-model.yaml")) + if altError != nil { + return err } + + return nil } -func lowerCaseAndTrim(tags []string) []string { - for i := range tags { - tags[i] = strings.ToLower(strings.TrimSpace(tags[i])) +func (context *Context) createStubModelFile() error { + _, err := copyFile(filepath.Join(*context.appFolder, "threagile-stub-model.yaml"), filepath.Join(*context.outputDir, "threagile-stub-model.yaml")) + if err == nil { + return nil } - return tags + + _, altError := copyFile(filepath.Join(*context.appFolder, "threagile.yaml"), filepath.Join(*context.outputDir, "threagile-stub-model.yaml")) + if altError != nil { + return err + } + + return nil } -func checkTags(tags []string, where string) []string { - var tagsUsed = make([]string, 0) - if tags != nil { - tagsUsed = make([]string, len(tags)) - for i, parsedEntry := range tags { - referencedTag := fmt.Sprintf("%v", parsedEntry) - checkTagExists(referencedTag, where) - tagsUsed[i] = referencedTag - } +func (context *Context) createEditingSupportFiles() error { + _, schemaError := copyFile(filepath.Join(*context.appFolder, "schema.json"), filepath.Join(*context.outputDir, "schema.json")) + if schemaError != nil { + return schemaError } - return tagsUsed + + _, templateError := copyFile(filepath.Join(*context.appFolder, "live-templates.txt"), filepath.Join(*context.outputDir, "live-templates.txt")) + return templateError } -// in order to prevent Path-Traversal like stuff... -func removePathElementsFromImageFiles(overview model.Overview) model.Overview { - for i := range overview.Images { - newValue := make(map[string]string) - for file, desc := range overview.Images[i] { - newValue[filepath.Base(file)] = desc - } - overview.Images[i] = newValue +func copyFile(src, dst string) (int64, error) { + sourceFileStat, err := os.Stat(src) + if err != nil { + return 0, err } - return overview + + if !sourceFileStat.Mode().IsRegular() { + return 0, fmt.Errorf("%s is not a regular file", src) + } + + source, err := os.Open(src) + if err != nil { + return 0, err + } + defer func() { _ = source.Close() }() + + destination, err := os.Create(dst) + if err != nil { + return 0, err + } + defer func() { _ = destination.Close() }() + nBytes, err := io.Copy(destination, source) + return nBytes, err } func (context *Context) applyWildcardRiskTrackingEvaluation() { @@ -4852,10 +4701,10 @@ func (context *Context) applyWildcardRiskTrackingEvaluation() { foundSome := false var matchingRiskIdExpression = regexp.MustCompile(strings.ReplaceAll(regexp.QuoteMeta(syntheticRiskIdPattern), `\*`, `[^@]+`)) - for syntheticRiskId := range model.GeneratedRisksBySyntheticId { - if matchingRiskIdExpression.Match([]byte(syntheticRiskId)) && hasNotYetAnyDirectNonWildcardRiskTracking(syntheticRiskId) { + for syntheticRiskId := range context.parsedModel.GeneratedRisksBySyntheticId { + if matchingRiskIdExpression.Match([]byte(syntheticRiskId)) && context.hasNotYetAnyDirectNonWildcardRiskTracking(syntheticRiskId) { foundSome = true - model.ParsedModelRoot.RiskTracking[syntheticRiskId] = model.RiskTracking{ + context.parsedModel.RiskTracking[syntheticRiskId] = model.RiskTracking{ SyntheticRiskId: strings.TrimSpace(syntheticRiskId), Justification: riskTracking.Justification, CheckedBy: riskTracking.CheckedBy, @@ -4876,104 +4725,13 @@ func (context *Context) applyWildcardRiskTrackingEvaluation() { } } -func hasNotYetAnyDirectNonWildcardRiskTracking(syntheticRiskId string) bool { - if _, ok := model.ParsedModelRoot.RiskTracking[syntheticRiskId]; ok { +func (context *Context) hasNotYetAnyDirectNonWildcardRiskTracking(syntheticRiskId string) bool { + if _, ok := context.parsedModel.RiskTracking[syntheticRiskId]; ok { return false } return true } -func withDefault(value string, defaultWhenEmpty string) string { - trimmed := strings.TrimSpace(value) - if len(trimmed) > 0 && trimmed != "" { - return trimmed - } - return strings.TrimSpace(defaultWhenEmpty) -} - -func createDataFlowId(sourceAssetId, title string) string { - reg, err := regexp.Compile("[^A-Za-z0-9]+") - checkErr(err) - return sourceAssetId + ">" + strings.Trim(reg.ReplaceAllString(strings.ToLower(title), "-"), "- ") -} - -func createSyntheticId(categoryId string, - mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId string) string { - result := categoryId - if len(mostRelevantTechnicalAssetId) > 0 { - result += "@" + mostRelevantTechnicalAssetId - } - if len(mostRelevantCommunicationLinkId) > 0 { - result += "@" + mostRelevantCommunicationLinkId - } - if len(mostRelevantTrustBoundaryId) > 0 { - result += "@" + mostRelevantTrustBoundaryId - } - if len(mostRelevantSharedRuntimeId) > 0 { - result += "@" + mostRelevantSharedRuntimeId - } - if len(mostRelevantDataAssetId) > 0 { - result += "@" + mostRelevantDataAssetId - } - return result -} - -func checkTagExists(referencedTag, where string) { - if !model.Contains(model.ParsedModelRoot.TagsAvailable, referencedTag) { - panic(errors.New("missing referenced tag in overall tag list at " + where + ": " + referencedTag)) - } -} - -func checkDataAssetTargetExists(referencedAsset, where string) { - if _, ok := model.ParsedModelRoot.DataAssets[referencedAsset]; !ok { - panic(errors.New("missing referenced data asset target at " + where + ": " + referencedAsset)) - } -} - -func checkTrustBoundaryExists(referencedId, where string) { - if _, ok := model.ParsedModelRoot.TrustBoundaries[referencedId]; !ok { - panic(errors.New("missing referenced trust boundary at " + where + ": " + referencedId)) - } -} - -func checkSharedRuntimeExists(referencedId, where string) { - if _, ok := model.ParsedModelRoot.SharedRuntimes[referencedId]; !ok { - panic(errors.New("missing referenced shared runtime at " + where + ": " + referencedId)) - } -} - -func checkCommunicationLinkExists(referencedId, where string) { - if _, ok := model.CommunicationLinks[referencedId]; !ok { - panic(errors.New("missing referenced communication link at " + where + ": " + referencedId)) - } -} - -func checkTechnicalAssetExists(referencedAsset, where string, onlyForTweak bool) { - if _, ok := model.ParsedModelRoot.TechnicalAssets[referencedAsset]; !ok { - suffix := "" - if onlyForTweak { - suffix = " (only referenced in diagram tweak)" - } - panic(errors.New("missing referenced technical asset target" + suffix + " at " + where + ": " + referencedAsset)) - } -} - -func checkNestedTrustBoundariesExisting() { - for _, trustBoundary := range model.ParsedModelRoot.TrustBoundaries { - for _, nestedId := range trustBoundary.TrustBoundariesNested { - if _, ok := model.ParsedModelRoot.TrustBoundaries[nestedId]; !ok { - panic(errors.New("missing referenced nested trust boundary: " + nestedId)) - } - } - } -} - -func hash(s string) string { - h := fnv.New32a() - _, _ = h.Write([]byte(s)) - return fmt.Sprintf("%v", h.Sum32()) -} - func (context *Context) writeDataAssetDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.File { if *context.verbose { fmt.Println("Writing data asset diagram input") @@ -4982,142 +4740,19 @@ func (context *Context) writeDataAssetDiagramGraphvizDOT(diagramFilenameDOT stri dotContent.WriteString("digraph generatedModel { concentrate=true \n") // Metadata init =============================================================================== - dotContent.WriteString(` graph [ - dpi=` + strconv.Itoa(dpi) + ` - fontname="Verdana" - labelloc="c" - fontsize="20" - splines=false - rankdir="LR" - nodesep=1.0 - ranksep=3.0 - outputorder="nodesfirst" - ]; - node [ - fontcolor="white" - fontname="Verdana" - fontsize="20" - ]; - edge [ - shape="none" - fontname="Verdana" - fontsize="18" - ]; -`) - - // Technical Assets =============================================================================== - techAssets := make([]model.TechnicalAsset, 0) - for _, techAsset := range model.ParsedModelRoot.TechnicalAssets { - techAssets = append(techAssets, techAsset) - } - sort.Sort(model.ByOrderAndIdSort(techAssets)) - for _, technicalAsset := range techAssets { - if len(technicalAsset.DataAssetsStored) > 0 || len(technicalAsset.DataAssetsProcessed) > 0 { - dotContent.WriteString(makeTechAssetNode(technicalAsset, true)) - dotContent.WriteString("\n") - } - } - - // Data Assets =============================================================================== - dataAssets := make([]model.DataAsset, 0) - for _, dataAsset := range model.ParsedModelRoot.DataAssets { - dataAssets = append(dataAssets, dataAsset) - } - sort.Sort(model.ByDataAssetDataBreachProbabilityAndTitleSort(dataAssets)) - for _, dataAsset := range dataAssets { - dotContent.WriteString(makeDataAssetNode(dataAsset)) - dotContent.WriteString("\n") - } - - // Data Asset to Tech Asset links =============================================================================== - for _, technicalAsset := range techAssets { - for _, sourceId := range technicalAsset.DataAssetsStored { - targetId := technicalAsset.Id - dotContent.WriteString("\n") - dotContent.WriteString(hash(sourceId) + " -> " + hash(targetId) + - ` [ color="blue" style="solid" ];`) - dotContent.WriteString("\n") - } - for _, sourceId := range technicalAsset.DataAssetsProcessed { - if !model.Contains(technicalAsset.DataAssetsStored, sourceId) { // here only if not already drawn above - targetId := technicalAsset.Id - dotContent.WriteString("\n") - dotContent.WriteString(hash(sourceId) + " -> " + hash(targetId) + - ` [ color="#666666" style="dashed" ];`) - dotContent.WriteString("\n") - } - } - } - - dotContent.WriteString("}") - - // Write the DOT file - file, err := os.Create(diagramFilenameDOT) - checkErr(err) - defer func() { _ = file.Close() }() - _, err = fmt.Fprintln(file, dotContent.String()) - checkErr(err) - return file -} - -func (context *Context) writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.File { - if *context.verbose { - fmt.Println("Writing data flow diagram input") - } - var dotContent strings.Builder - dotContent.WriteString("digraph generatedModel { concentrate=false \n") - - // Metadata init =============================================================================== - tweaks := "" - if model.ParsedModelRoot.DiagramTweakNodesep > 0 { - tweaks += "\n nodesep=\"" + strconv.Itoa(model.ParsedModelRoot.DiagramTweakNodesep) + "\"" - } - if model.ParsedModelRoot.DiagramTweakRanksep > 0 { - tweaks += "\n ranksep=\"" + strconv.Itoa(model.ParsedModelRoot.DiagramTweakRanksep) + "\"" - } - suppressBidirectionalArrows := true - splines := "ortho" - if len(model.ParsedModelRoot.DiagramTweakEdgeLayout) > 0 { - switch model.ParsedModelRoot.DiagramTweakEdgeLayout { - case "spline": - splines = "spline" - context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false - case "polyline": - splines = "polyline" - context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false - case "ortho": - splines = "ortho" - suppressBidirectionalArrows = true - case "curved": - splines = "curved" - context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false - case "false": - splines = "false" - context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false - default: - panic(errors.New("unknown value for diagram_tweak_suppress_edge_labels (spline, polyline, ortho, curved, false): " + - model.ParsedModelRoot.DiagramTweakEdgeLayout)) - } - } - rankdir := "TB" - if model.ParsedModelRoot.DiagramTweakLayoutLeftToRight { - rankdir = "LR" - } - modelTitle := "" - if context.addModelTitle { - modelTitle = `label="` + model.ParsedModelRoot.Title + `"` - } - dotContent.WriteString(` graph [ ` + modelTitle + ` - labelloc=t + dotContent.WriteString(` graph [ + dpi=` + strconv.Itoa(dpi) + ` fontname="Verdana" - fontsize=40 + labelloc="c" + fontsize="20" + splines=false + rankdir="LR" + nodesep=1.0 + ranksep=3.0 outputorder="nodesfirst" - dpi=` + strconv.Itoa(dpi) + ` - splines=` + splines + ` - rankdir="` + rankdir + `" -` + tweaks + ` ]; node [ + fontcolor="white" fontname="Verdana" fontsize="20" ]; @@ -5128,171 +4763,53 @@ func (context *Context) writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT strin ]; `) - // Trust Boundaries =============================================================================== - var subgraphSnippetsById = make(map[string]string) - // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order - // range over them in sorted (hence re-producible) way: - keys := make([]string, 0) - for k := range model.ParsedModelRoot.TrustBoundaries { - keys = append(keys, k) - } - sort.Strings(keys) - for _, key := range keys { - trustBoundary := model.ParsedModelRoot.TrustBoundaries[key] - var snippet strings.Builder - if len(trustBoundary.TechnicalAssetsInside) > 0 || len(trustBoundary.TrustBoundariesNested) > 0 { - if context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks { - // see https://stackoverflow.com/questions/17247455/how-do-i-add-extra-space-between-clusters?noredirect=1&lq=1 - snippet.WriteString("\n subgraph cluster_space_boundary_for_layout_only_1" + hash(trustBoundary.Id) + " {\n") - snippet.WriteString(` graph [ - dpi=` + strconv.Itoa(dpi) + ` - label=<
> - fontsize="21" - style="invis" - color="green" - fontcolor="green" - margin="50.0" - penwidth="6.5" - outputorder="nodesfirst" - ];`) - } - snippet.WriteString("\n subgraph cluster_" + hash(trustBoundary.Id) + " {\n") - color, fontColor, bgColor, style, fontname := colors.RgbHexColorTwilight(), colors.RgbHexColorTwilight() /*"#550E0C"*/, "#FAFAFA", "dashed", "Verdana" - penWidth := 4.5 - if len(trustBoundary.TrustBoundariesNested) > 0 { - //color, fontColor, style, fontname = colors.Blue, colors.Blue, "dashed", "Verdana" - penWidth = 5.5 - } - if len(trustBoundary.ParentTrustBoundaryID()) > 0 { - bgColor = "#F1F1F1" - } - if trustBoundary.Type == model.NetworkPolicyNamespaceIsolation { - fontColor, bgColor = "#222222", "#DFF4FF" - } - if trustBoundary.Type == model.ExecutionEnvironment { - fontColor, bgColor, style = "#555555", "#FFFFF0", "dotted" - } - snippet.WriteString(` graph [ - dpi=` + strconv.Itoa(dpi) + ` - label=<
` + trustBoundary.Title + ` (` + trustBoundary.Type.String() + `)
> - fontsize="21" - style="` + style + `" - color="` + color + `" - bgcolor="` + bgColor + `" - fontcolor="` + fontColor + `" - fontname="` + fontname + `" - penwidth="` + fmt.Sprintf("%f", penWidth) + `" - forcelabels=true - outputorder="nodesfirst" - margin="50.0" - ];`) - snippet.WriteString("\n") - keys := trustBoundary.TechnicalAssetsInside - sort.Strings(keys) - for _, technicalAssetInside := range keys { - //log.Println("About to add technical asset link to trust boundary: ", technicalAssetInside) - technicalAsset := model.ParsedModelRoot.TechnicalAssets[technicalAssetInside] - snippet.WriteString(hash(technicalAsset.Id)) - snippet.WriteString(";\n") - } - keys = trustBoundary.TrustBoundariesNested - sort.Strings(keys) - for _, trustBoundaryNested := range keys { - //log.Println("About to add nested trust boundary to trust boundary: ", trustBoundaryNested) - trustBoundaryNested := model.ParsedModelRoot.TrustBoundaries[trustBoundaryNested] - snippet.WriteString("LINK-NEEDS-REPLACED-BY-cluster_" + hash(trustBoundaryNested.Id)) - snippet.WriteString(";\n") - } - snippet.WriteString(" }\n\n") - if context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks { - snippet.WriteString(" }\n\n") - } - } - subgraphSnippetsById[hash(trustBoundary.Id)] = snippet.String() - } - // here replace links and remove from map after replacement (i.e. move snippet into nested) - for i := range subgraphSnippetsById { - re := regexp.MustCompile(`LINK-NEEDS-REPLACED-BY-cluster_([0-9]*);`) - for { - matches := re.FindStringSubmatch(subgraphSnippetsById[i]) - if len(matches) > 0 { - embeddedSnippet := " //nested:" + subgraphSnippetsById[matches[1]] - subgraphSnippetsById[i] = strings.ReplaceAll(subgraphSnippetsById[i], matches[0], embeddedSnippet) - subgraphSnippetsById[matches[1]] = "" // to something like remove it - } else { - break - } - } - } - // now write them all - keys = make([]string, 0) - for k := range subgraphSnippetsById { - keys = append(keys, k) - } - sort.Strings(keys) - for _, key := range keys { - snippet := subgraphSnippetsById[key] - dotContent.WriteString(snippet) - } - // Technical Assets =============================================================================== - // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order - // range over them in sorted (hence re-producible) way: - // Convert map to slice of values: - var techAssets []model.TechnicalAsset - for _, techAsset := range model.ParsedModelRoot.TechnicalAssets { + techAssets := make([]model.TechnicalAsset, 0) + for _, techAsset := range context.parsedModel.TechnicalAssets { techAssets = append(techAssets, techAsset) } sort.Sort(model.ByOrderAndIdSort(techAssets)) for _, technicalAsset := range techAssets { - dotContent.WriteString(makeTechAssetNode(technicalAsset, false)) + if len(technicalAsset.DataAssetsStored) > 0 || len(technicalAsset.DataAssetsProcessed) > 0 { + dotContent.WriteString(context.makeTechAssetNode(technicalAsset, true)) + dotContent.WriteString("\n") + } + } + + // Data Assets =============================================================================== + dataAssets := make([]model.DataAsset, 0) + for _, dataAsset := range context.parsedModel.DataAssets { + dataAssets = append(dataAssets, dataAsset) + } + + model.SortByDataAssetDataBreachProbabilityAndTitle(&context.parsedModel, dataAssets) + for _, dataAsset := range dataAssets { + dotContent.WriteString(context.makeDataAssetNode(dataAsset)) dotContent.WriteString("\n") } - // Data Flows (Technical Communication Links) =============================================================================== + // Data Asset to Tech Asset links =============================================================================== for _, technicalAsset := range techAssets { - for _, dataFlow := range technicalAsset.CommunicationLinks { - sourceId := technicalAsset.Id - targetId := dataFlow.TargetId - //log.Println("About to add link from", sourceId, "to", targetId, "with id", dataFlow.Id) - var arrowStyle, arrowColor, readOrWriteHead, readOrWriteTail string - if dataFlow.Readonly { - readOrWriteHead = "empty" - readOrWriteTail = "odot" - } else { - readOrWriteHead = "normal" - readOrWriteTail = "dot" - } - dir := "forward" - if dataFlow.IsBidirectional() { - if !suppressBidirectionalArrows { // as it does not work as bug in graphviz with ortho: https://gitlab.com/graphviz/graphviz/issues/144 - dir = "both" - } - } - arrowStyle = ` style="` + dataFlow.DetermineArrowLineStyle() + `" penwidth="` + dataFlow.DetermineArrowPenWidth() + `" arrowtail="` + readOrWriteTail + `" arrowhead="` + readOrWriteHead + `" dir="` + dir + `" arrowsize="2.0" ` - arrowColor = ` color="` + dataFlow.DetermineArrowColor() + `"` - tweaks := "" - if dataFlow.DiagramTweakWeight > 0 { - tweaks += " weight=\"" + strconv.Itoa(dataFlow.DiagramTweakWeight) + "\" " - } - + for _, sourceId := range technicalAsset.DataAssetsStored { + targetId := technicalAsset.Id dotContent.WriteString("\n") - dotContent.WriteString(" " + hash(sourceId) + " -> " + hash(targetId) + - ` [` + arrowColor + ` ` + arrowStyle + tweaks + ` constraint=` + strconv.FormatBool(dataFlow.DiagramTweakConstraint) + ` `) - if !model.ParsedModelRoot.DiagramTweakSuppressEdgeLabels { - dotContent.WriteString(` xlabel="` + encode(dataFlow.Protocol.String()) + `" fontcolor="` + dataFlow.DetermineLabelColor() + `" `) + dotContent.WriteString(hash(sourceId) + " -> " + hash(targetId) + + ` [ color="blue" style="solid" ];`) + dotContent.WriteString("\n") + } + for _, sourceId := range technicalAsset.DataAssetsProcessed { + if !contains(technicalAsset.DataAssetsStored, sourceId) { // here only if not already drawn above + targetId := technicalAsset.Id + dotContent.WriteString("\n") + dotContent.WriteString(hash(sourceId) + " -> " + hash(targetId) + + ` [ color="#666666" style="dashed" ];`) + dotContent.WriteString("\n") } - dotContent.WriteString(" ];\n") } } - dotContent.WriteString(makeDiagramInvisibleConnectionsTweaks()) - dotContent.WriteString(makeDiagramSameRankNodeTweaks()) - dotContent.WriteString("}") - //fmt.Println(dotContent.String()) - // Write the DOT file file, err := os.Create(diagramFilenameDOT) checkErr(err) @@ -5302,65 +4819,26 @@ func (context *Context) writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT strin return file } -func makeDiagramInvisibleConnectionsTweaks() string { - // see https://stackoverflow.com/questions/2476575/how-to-control-node-placement-in-graphviz-i-e-avoid-edge-crossings - tweak := "" - if len(model.ParsedModelRoot.DiagramTweakInvisibleConnectionsBetweenAssets) > 0 { - for _, invisibleConnections := range model.ParsedModelRoot.DiagramTweakInvisibleConnectionsBetweenAssets { - assetIDs := strings.Split(invisibleConnections, ":") - if len(assetIDs) == 2 { - checkTechnicalAssetExists(assetIDs[0], "diagram tweak connections", true) - checkTechnicalAssetExists(assetIDs[1], "diagram tweak connections", true) - tweak += "\n" + hash(assetIDs[0]) + " -> " + hash(assetIDs[1]) + " [style=invis]; \n" - } - } - } - return tweak -} - -func makeDiagramSameRankNodeTweaks() string { - // see https://stackoverflow.com/questions/25734244/how-do-i-place-nodes-on-the-same-level-in-dot - tweak := "" - if len(model.ParsedModelRoot.DiagramTweakSameRankAssets) > 0 { - for _, sameRank := range model.ParsedModelRoot.DiagramTweakSameRankAssets { - assetIDs := strings.Split(sameRank, ":") - if len(assetIDs) > 0 { - tweak += "{ rank=same; " - for _, id := range assetIDs { - checkTechnicalAssetExists(id, "diagram tweak same-rank", true) - if len(model.ParsedModelRoot.TechnicalAssets[id].GetTrustBoundaryId()) > 0 { - panic(errors.New("technical assets (referenced in same rank diagram tweak) are inside trust boundaries: " + - fmt.Sprintf("%v", model.ParsedModelRoot.DiagramTweakSameRankAssets))) - } - tweak += " " + hash(id) + "; " - } - tweak += " }" - } - } - } - return tweak -} - -func makeTechAssetNode(technicalAsset model.TechnicalAsset, simplified bool) string { +func (context *Context) makeTechAssetNode(technicalAsset model.TechnicalAsset, simplified bool) string { if simplified { color := colors.RgbHexColorOutOfScope() if !technicalAsset.OutOfScope { - generatedRisks := technicalAsset.GeneratedRisks() - switch model.HighestSeverityStillAtRisk(generatedRisks) { - case model.CriticalSeverity: + generatedRisks := technicalAsset.GeneratedRisks(&context.parsedModel) + switch model.HighestSeverityStillAtRisk(&context.parsedModel, generatedRisks) { + case types.CriticalSeverity: color = colors.RgbHexColorCriticalRisk() - case model.HighSeverity: + case types.HighSeverity: color = colors.RgbHexColorHighRisk() - case model.ElevatedSeverity: + case types.ElevatedSeverity: color = colors.RgbHexColorElevatedRisk() - case model.MediumSeverity: + case types.MediumSeverity: color = colors.RgbHexColorMediumRisk() - case model.LowSeverity: + case types.LowSeverity: color = colors.RgbHexColorLowRisk() default: color = "#444444" // since black is too dark here as fill color } - if len(model.ReduceToOnlyStillAtRisk(generatedRisks)) == 0 { + if len(model.ReduceToOnlyStillAtRisk(&context.parsedModel, generatedRisks)) == 0 { color = "#444444" // since black is too dark here as fill color } } @@ -5371,13 +4849,13 @@ func makeTechAssetNode(technicalAsset model.TechnicalAsset, simplified bool) str var shape, title string var lineBreak = "" switch technicalAsset.Type { - case model.ExternalEntity: + case types.ExternalEntity: shape = "box" title = technicalAsset.Title - case model.Process: + case types.Process: shape = "ellipse" title = technicalAsset.Title - case model.Datastore: + case types.Datastore: shape = "cylinder" title = technicalAsset.Title if technicalAsset.Redundant { @@ -5404,35 +4882,31 @@ func makeTechAssetNode(technicalAsset model.TechnicalAsset, simplified bool) str } return " " + hash(technicalAsset.Id) + ` [ - label=<
` + lineBreak + technicalAsset.Technology.String() + `
` + technicalAsset.Size.String() + `
` + encode(title) + `
` + attackerAttractivenessLabel + `
> - shape=` + shape + ` style="` + technicalAsset.DetermineShapeBorderLineStyle() + `,` + technicalAsset.DetermineShapeStyle() + `" penwidth="` + technicalAsset.DetermineShapeBorderPenWidth() + `" fillcolor="` + technicalAsset.DetermineShapeFillColor() + `" + label=<
` + lineBreak + technicalAsset.Technology.String() + `
` + technicalAsset.Size.String() + `
` + encode(title) + `
` + attackerAttractivenessLabel + `
> + shape=` + shape + ` style="` + technicalAsset.DetermineShapeBorderLineStyle() + `,` + technicalAsset.DetermineShapeStyle() + `" penwidth="` + technicalAsset.DetermineShapeBorderPenWidth(&context.parsedModel) + `" fillcolor="` + technicalAsset.DetermineShapeFillColor(&context.parsedModel) + `" peripheries=` + strconv.Itoa(technicalAsset.DetermineShapePeripheries()) + ` - color="` + technicalAsset.DetermineShapeBorderColor() + "\"\n ]; " + color="` + technicalAsset.DetermineShapeBorderColor(&context.parsedModel) + "\"\n ]; " } } -func makeDataAssetNode(dataAsset model.DataAsset) string { +func (context *Context) makeDataAssetNode(dataAsset model.DataAsset) string { var color string - switch dataAsset.IdentifiedDataBreachProbabilityStillAtRisk() { - case model.Probable: + switch dataAsset.IdentifiedDataBreachProbabilityStillAtRisk(&context.parsedModel) { + case types.Probable: color = colors.RgbHexColorHighRisk() - case model.Possible: + case types.Possible: color = colors.RgbHexColorMediumRisk() - case model.Improbable: + case types.Improbable: color = colors.RgbHexColorLowRisk() default: color = "#444444" // since black is too dark here as fill color } - if !dataAsset.IsDataBreachPotentialStillAtRisk() { + if !dataAsset.IsDataBreachPotentialStillAtRisk(&context.parsedModel) { color = "#444444" // since black is too dark here as fill color } return " " + hash(dataAsset.Id) + ` [ label=<` + encode(dataAsset.Title) + `> penwidth="3.0" style="filled" fillcolor="` + color + `" color="` + color + "\"\n ]; " } -func encode(value string) string { - return strings.ReplaceAll(value, "&", "&") -} - func (context *Context) renderDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string) { if *context.verbose { fmt.Println("Rendering data flow diagram input") @@ -5460,7 +4934,7 @@ func (context *Context) renderDataFlowDiagramGraphvizImage(dotFile *os.File, tar } // exec - cmd := exec.Command(filepath.Join(*context.binFolder, graphvizDataFlowDiagramConversionCall), tmpFileDOT.Name(), tmpFilePNG.Name()) + cmd := exec.Command(filepath.Join(*context.binFolder, context.graphvizDataFlowDiagramConversionCall), tmpFileDOT.Name(), tmpFilePNG.Name()) cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr err = cmd.Run() @@ -5473,9 +4947,9 @@ func (context *Context) renderDataFlowDiagramGraphvizImage(dotFile *os.File, tar fmt.Println(err) return } - err = os.WriteFile(filepath.Join(targetDir, dataFlowDiagramFilenamePNG), input, 0644) + err = os.WriteFile(filepath.Join(targetDir, context.dataFlowDiagramFilenamePNG), input, 0644) if err != nil { - fmt.Println("Error creating", dataFlowDiagramFilenamePNG) + fmt.Println("Error creating", context.dataFlowDiagramFilenamePNG) fmt.Println(err) return } @@ -5508,7 +4982,7 @@ func (context *Context) renderDataAssetDiagramGraphvizImage(dotFile *os.File, ta } // exec - cmd := exec.Command(filepath.Join(*context.binFolder, graphvizDataAssetDiagramConversionCall), tmpFileDOT.Name(), tmpFilePNG.Name()) + cmd := exec.Command(filepath.Join(*context.binFolder, context.graphvizDataAssetDiagramConversionCall), tmpFileDOT.Name(), tmpFilePNG.Name()) cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr err = cmd.Run() @@ -5521,10 +4995,69 @@ func (context *Context) renderDataAssetDiagramGraphvizImage(dotFile *os.File, ta fmt.Println(err) return } - err = os.WriteFile(filepath.Join(targetDir, dataAssetDiagramFilenamePNG), input, 0644) + err = os.WriteFile(filepath.Join(targetDir, context.dataAssetDiagramFilenamePNG), input, 0644) if err != nil { - fmt.Println("Error creating", dataAssetDiagramFilenamePNG) + fmt.Println("Error creating", context.dataAssetDiagramFilenamePNG) fmt.Println(err) return } } + +func checkErr(err error) { + if err != nil { + panic(err) + } +} + +func lowerCaseAndTrim(tags []string) []string { + for i := range tags { + tags[i] = strings.ToLower(strings.TrimSpace(tags[i])) + } + return tags +} + +func (context *Context) checkTags(tags []string, where string) []string { + var tagsUsed = make([]string, 0) + if tags != nil { + tagsUsed = make([]string, len(tags)) + for i, parsedEntry := range tags { + referencedTag := fmt.Sprintf("%v", parsedEntry) + context.checkTagExists(referencedTag, where) + tagsUsed[i] = referencedTag + } + } + return tagsUsed +} + +func (context *Context) checkTagExists(referencedTag, where string) { + if !contains(context.parsedModel.TagsAvailable, referencedTag) { + panic(errors.New("missing referenced tag in overall tag list at " + where + ": " + referencedTag)) + } +} + +func contains(a []string, x string) bool { + for _, n := range a { + if x == n { + return true + } + } + return false +} + +func withDefault(value string, defaultWhenEmpty string) string { + trimmed := strings.TrimSpace(value) + if len(trimmed) > 0 && trimmed != "" { + return trimmed + } + return strings.TrimSpace(defaultWhenEmpty) +} + +func hash(s string) string { + h := fnv.New32a() + _, _ = h.Write([]byte(s)) + return fmt.Sprintf("%v", h.Sum32()) +} + +func encode(value string) string { + return strings.ReplaceAll(value, "&", "&") +} diff --git a/internal/threagile/macros.go b/internal/threagile/macros.go new file mode 100644 index 00000000..422b4a5a --- /dev/null +++ b/internal/threagile/macros.go @@ -0,0 +1,60 @@ +/* +Copyright © 2023 NAME HERE +*/ +package threagile + +import ( + "github.com/spf13/cobra" + + "github.com/threagile/threagile/pkg/docs" + builinmacros "github.com/threagile/threagile/pkg/macros/built-in" +) + +var listMacrosCmd = &cobra.Command{ + Use: "list-model-macros", + Short: "Print model macros", + Run: func(cmd *cobra.Command, args []string) { + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println("The following model macros are available (can be extended via custom model macros):") + cmd.Println() + /* TODO finish plugin stuff + cmd.Println("Custom model macros:") + for id, customModelMacro := range macros.ListCustomMacros() { + cmd.Println(id, "-->", customModelMacro.GetMacroDetails().Title) + } + cmd.Println() + */ + cmd.Println("----------------------") + cmd.Println("Built-in model macros:") + cmd.Println("----------------------") + for _, macros := range builinmacros.ListBuiltInMacros() { + cmd.Println(macros.ID, "-->", macros.Title) + } + cmd.Println() + }, +} + +var explainMacrosCmd = &cobra.Command{ + Use: "explain-model-macros", + Short: "Explain model macros", + Run: func(cmd *cobra.Command, args []string) { + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println("Explanation for the model macros:") + cmd.Println() + /* TODO finish plugin stuff + cmd.Println("Custom model macros:") + for id, customModelMacro := range macros.ListCustomMacros() { + cmd.Printf("%v: %v\n", macros.ID, macros.Title) + } + cmd.Println() + */ + cmd.Println("----------------------") + cmd.Println("Built-in model macros:") + cmd.Println("----------------------") + for _, macros := range builinmacros.ListBuiltInMacros() { + cmd.Printf("%v: %v\n", macros.ID, macros.Title) + } + + cmd.Println() + }, +} diff --git a/internal/threagile/progress-reporter.go b/internal/threagile/progress-reporter.go new file mode 100644 index 00000000..4d5e9585 --- /dev/null +++ b/internal/threagile/progress-reporter.go @@ -0,0 +1,33 @@ +/* +Copyright © 2023 NAME HERE +*/ + +package threagile + +import ( + "fmt" + "log" +) + +type ProgressReporter interface { + Println(a ...any) (n int, err error) + Fatalf(format string, v ...any) +} + +type SilentProgressReporter struct{} + +func (SilentProgressReporter) Println(a ...any) (n int, err error) { + return 0, nil +} + +func (SilentProgressReporter) Fatalf(format string, v ...any) { +} + +type CommandLineProgressReporter struct{} + +func (CommandLineProgressReporter) Println(a ...any) (n int, err error) { + return fmt.Println(a...) +} +func (CommandLineProgressReporter) Fatalf(format string, v ...any) { + log.Fatalf(format, v...) +} diff --git a/internal/threagile/root.go b/internal/threagile/root.go new file mode 100644 index 00000000..3c871182 --- /dev/null +++ b/internal/threagile/root.go @@ -0,0 +1,37 @@ +/* +Copyright © 2023 NAME HERE +*/ +package threagile + +import ( + "os" + + "github.com/spf13/cobra" + + "github.com/threagile/threagile/pkg/docs" +) + +var rootCmd = &cobra.Command{ + Use: "threagile", + Short: "\n" + docs.Logo, + Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\n" + docs.Examples, +} + +// Execute adds all child commands to the root command and sets flags appropriately. +// This is called by main.main(). It only needs to happen once to the rootCmd. +func Execute() { + err := rootCmd.Execute() + if err != nil { + os.Exit(1) + } +} + +func init() { + rootCmd.AddCommand(versionCmd) + rootCmd.AddCommand(listMacrosCmd) + rootCmd.AddCommand(explainMacrosCmd) + rootCmd.AddCommand(listTypesCmd) + rootCmd.AddCommand(explainTypesCmd) + rootCmd.AddCommand(listRiskRules) + rootCmd.AddCommand(explainRiskRules) +} diff --git a/internal/threagile/rules.go b/internal/threagile/rules.go new file mode 100644 index 00000000..a20f412f --- /dev/null +++ b/internal/threagile/rules.go @@ -0,0 +1,65 @@ +/* +Copyright © 2023 NAME HERE +*/ +package threagile + +import ( + "github.com/spf13/cobra" + + "github.com/threagile/threagile/pkg/docs" + "github.com/threagile/threagile/pkg/security/risks" +) + +var listRiskRules = &cobra.Command{ + Use: "list-risk-rules", + Short: "Print available risk rules", + Run: func(cmd *cobra.Command, args []string) { + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println("The following risk rules are available (can be extended via custom risk rules):") + cmd.Println() + cmd.Println("----------------------") + cmd.Println("Custom risk rules:") + cmd.Println("----------------------") + customRiskRules := risks.LoadCustomRiskRules([]string{""}, CommandLineProgressReporter{}) + for id, customRule := range customRiskRules { + cmd.Println(id, "-->", customRule.Category.Title, "--> with tags:", customRule.Tags) + } + cmd.Println() + cmd.Println("--------------------") + cmd.Println("Built-in risk rules:") + cmd.Println("--------------------") + cmd.Println() + for _, rule := range risks.GetBuiltInRiskRules() { + cmd.Println(rule.Category().Id, "-->", rule.Category().Title, "--> with tags:", rule.SupportedTags()) + } + }, +} + +var explainRiskRules = &cobra.Command{ + Use: "explain-risk-rules", + Short: "Detailed explanation of all the risk rules", + Run: func(cmd *cobra.Command, args []string) { + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println("Explanation for risk rules:") + cmd.Println() + cmd.Println("----------------------") + cmd.Println("Custom risk rules:") + cmd.Println("----------------------") + // fmt.Printf("%v: %v\n", accidental_secret_leak.Category().Id, accidental_secret_leak.Category().Description) + + // TODO: parse custom risk rules and print them + customRiskRules := risks.LoadCustomRiskRules([]string{""}, CommandLineProgressReporter{}) + for _, customRule := range customRiskRules { + cmd.Printf("%v: %v\n", customRule.Category.Id, customRule.Category.Description) + } + cmd.Println() + cmd.Println("--------------------") + cmd.Println("Built-in risk rules:") + cmd.Println("--------------------") + cmd.Println() + for _, rule := range risks.GetBuiltInRiskRules() { + cmd.Printf("%v: %v\n", rule.Category().Id, rule.Category().Description) + } + cmd.Println() + }, +} diff --git a/internal/threagile/types.go b/internal/threagile/types.go new file mode 100644 index 00000000..05840ff8 --- /dev/null +++ b/internal/threagile/types.go @@ -0,0 +1,46 @@ +/* +Copyright © 2023 NAME HERE +*/ +package threagile + +import ( + "fmt" + + "github.com/spf13/cobra" + + "github.com/threagile/threagile/pkg/docs" + "github.com/threagile/threagile/pkg/security/types" +) + +var listTypesCmd = &cobra.Command{ + Use: "list-types", + Short: "Print type information (enum values to be used in models)", + Run: func(cmd *cobra.Command, args []string) { + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println() + cmd.Println() + cmd.Println("The following types are available (can be extended for custom rules):") + cmd.Println() + for name, values := range types.GetBuiltinTypeValues() { + cmd.Println(fmt.Sprintf(" %v: %v", name, values)) + } + }, +} + +var explainTypesCmd = &cobra.Command{ + Use: "explain-types", + Short: "Print type information (enum values to be used in models)", + Run: func(cmd *cobra.Command, args []string) { + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + fmt.Println("Explanation for the types:") + cmd.Println() + cmd.Println("The following types are available (can be extended for custom rules):") + cmd.Println() + for name, values := range types.GetBuiltinTypeValues() { + cmd.Println(name) + for _, candidate := range values { + cmd.Printf("\t %v: %v\n", candidate, candidate.Explain()) + } + } + }, +} diff --git a/model/types.go b/model/types.go deleted file mode 100644 index a8c1999f..00000000 --- a/model/types.go +++ /dev/null @@ -1,4389 +0,0 @@ -package model - -import ( - "encoding/json" - "errors" - "fmt" - "github.com/threagile/threagile/colors" - "gopkg.in/yaml.v3" - "os" - "path/filepath" - "regexp" - "sort" - "strings" - "time" -) - -const ThreagileVersion = "1.0.0" // Also update into example and stub model files and openapi.yaml - -var ParsedModelRoot ParsedModel - -var CommunicationLinks map[string]CommunicationLink // TODO as part of "ParsedModelRoot"? -var IncomingTechnicalCommunicationLinksMappedByTargetId map[string][]CommunicationLink -var DirectContainingTrustBoundaryMappedByTechnicalAssetId map[string]TrustBoundary -var DirectContainingSharedRuntimeMappedByTechnicalAssetId map[string]SharedRuntime - -var GeneratedRisksByCategory map[RiskCategory][]Risk -var GeneratedRisksBySyntheticId map[string]Risk - -var AllSupportedTags map[string]bool - -var ( - _ = ParseEncryptionStyle - _ = SortedKeysOfDataAssets - _ = SortedKeysOfTechnicalAssets - _ = SortedDataAssetsByDataBreachProbabilityAndTitleStillAtRisk - _ = ReduceToOnlyHighRisk - _ = ReduceToOnlyMediumRisk - _ = ReduceToOnlyLowRisk -) - -func Init() { - CommunicationLinks = make(map[string]CommunicationLink) - IncomingTechnicalCommunicationLinksMappedByTargetId = make(map[string][]CommunicationLink) - DirectContainingTrustBoundaryMappedByTechnicalAssetId = make(map[string]TrustBoundary) - DirectContainingSharedRuntimeMappedByTechnicalAssetId = make(map[string]SharedRuntime) - GeneratedRisksByCategory = make(map[RiskCategory][]Risk) - GeneratedRisksBySyntheticId = make(map[string]Risk) - AllSupportedTags = make(map[string]bool) -} - -func AddToListOfSupportedTags(tags []string) { - for _, tag := range tags { - AllSupportedTags[tag] = true - } -} - -type CustomRiskRule struct { - Category func() RiskCategory - SupportedTags func() []string - GenerateRisks func(input *ParsedModel) []Risk -} - -// === To be used by model macros etc. ======================= - -func AddTagToModelInput(modelInput *ModelInput, tag string, dryRun bool, changes *[]string) { - tag = NormalizeTag(tag) - if !Contains(modelInput.TagsAvailable, tag) { - *changes = append(*changes, "adding tag: "+tag) - if !dryRun { - modelInput.TagsAvailable = append(modelInput.TagsAvailable, tag) - } - } -} - -func NormalizeTag(tag string) string { - return strings.TrimSpace(strings.ToLower(tag)) -} - -func MakeID(val string) string { - reg, _ := regexp.Compile("[^A-Za-z0-9]+") - return strings.Trim(reg.ReplaceAllString(strings.ToLower(val), "-"), "- ") -} - -// === Model Type Stuff ====================================== - -type ModelInput struct { // TODO: Eventually remove this and directly use ParsedModelRoot? But then the error messages for model errors are not quite as good anymore... - Includes []string `yaml:"includes" json:"includes"` - ThreagileVersion string `yaml:"threagile_version" json:"threagile_version"` - Title string `yaml:"title" json:"title"` - Author Author `yaml:"author" json:"author"` - Date string `yaml:"date" json:"date"` - BusinessOverview Overview `yaml:"business_overview" json:"business_overview"` - TechnicalOverview Overview `yaml:"technical_overview" json:"technical_overview"` - BusinessCriticality string `yaml:"business_criticality" json:"business_criticality"` - ManagementSummaryComment string `yaml:"management_summary_comment" json:"management_summary_comment"` - Questions map[string]string `yaml:"questions" json:"questions"` - AbuseCases map[string]string `yaml:"abuse_cases" json:"abuse_cases"` - SecurityRequirements map[string]string `yaml:"security_requirements" json:"security_requirements"` - TagsAvailable []string `yaml:"tags_available" json:"tags_available"` - DataAssets map[string]InputDataAsset `yaml:"data_assets" json:"data_assets"` - TechnicalAssets map[string]InputTechnicalAsset `yaml:"technical_assets" json:"technical_assets"` - TrustBoundaries map[string]InputTrustBoundary `yaml:"trust_boundaries" json:"trust_boundaries"` - SharedRuntimes map[string]InputSharedRuntime `yaml:"shared_runtimes" json:"shared_runtimes"` - IndividualRiskCategories map[string]InputIndividualRiskCategory `yaml:"individual_risk_categories" json:"individual_risk_categories"` - RiskTracking map[string]InputRiskTracking `yaml:"risk_tracking" json:"risk_tracking"` - DiagramTweakNodesep int `yaml:"diagram_tweak_nodesep" json:"diagram_tweak_nodesep"` - DiagramTweakRanksep int `yaml:"diagram_tweak_ranksep" json:"diagram_tweak_ranksep"` - DiagramTweakEdgeLayout string `yaml:"diagram_tweak_edge_layout" json:"diagram_tweak_edge_layout"` - DiagramTweakSuppressEdgeLabels bool `yaml:"diagram_tweak_suppress_edge_labels" json:"diagram_tweak_suppress_edge_labels"` - DiagramTweakLayoutLeftToRight bool `yaml:"diagram_tweak_layout_left_to_right" json:"diagram_tweak_layout_left_to_right"` - DiagramTweakInvisibleConnectionsBetweenAssets []string `yaml:"diagram_tweak_invisible_connections_between_assets" json:"diagram_tweak_invisible_connections_between_assets"` - DiagramTweakSameRankAssets []string `yaml:"diagram_tweak_same_rank_assets" json:"diagram_tweak_same_rank_assets"` -} - -func (model *ModelInput) Defaults() *ModelInput { - *model = ModelInput{ - Questions: make(map[string]string), - AbuseCases: make(map[string]string), - SecurityRequirements: make(map[string]string), - DataAssets: make(map[string]InputDataAsset), - TechnicalAssets: make(map[string]InputTechnicalAsset), - TrustBoundaries: make(map[string]InputTrustBoundary), - SharedRuntimes: make(map[string]InputSharedRuntime), - IndividualRiskCategories: make(map[string]InputIndividualRiskCategory), - RiskTracking: make(map[string]InputRiskTracking), - } - - return model -} - -func (model *ModelInput) Load(inputFilename string) error { - modelYaml, readError := os.ReadFile(inputFilename) - if readError != nil { - return fmt.Errorf("unable to read model file: %v", readError) - } - - unmarshalError := yaml.Unmarshal(modelYaml, &model) - if unmarshalError != nil { - return fmt.Errorf("unable to parse model yaml: %v", unmarshalError) - } - - for _, includeFile := range model.Includes { - mergeError := model.Merge(filepath.Dir(inputFilename), includeFile) - if mergeError != nil { - return fmt.Errorf("unable to merge model include %q: %v", includeFile, mergeError) - } - } - - return nil -} - -type UniqueStringSlice []string - -func (slice UniqueStringSlice) Merge(otherSlice []string) []string { - valueMap := make(map[string]bool) - for _, value := range slice { - valueMap[value] = true - } - - for _, value := range otherSlice { - valueMap[value] = true - } - - valueSlice := make(UniqueStringSlice, 0) - for key := range valueMap { - valueSlice = append(valueSlice, key) - } - - return valueSlice -} - -func (model *ModelInput) Merge(dir string, includeFilename string) error { - modelYaml, readError := os.ReadFile(filepath.Join(dir, includeFilename)) - if readError != nil { - return fmt.Errorf("unable to read model file: %v", readError) - } - - var fileStructure map[string]any - unmarshalStructureError := yaml.Unmarshal(modelYaml, &fileStructure) - if unmarshalStructureError != nil { - return fmt.Errorf("unable to parse model structure: %v", unmarshalStructureError) - } - - var includedModel ModelInput - unmarshalError := yaml.Unmarshal(modelYaml, &includedModel) - if unmarshalError != nil { - return fmt.Errorf("unable to parse model yaml: %v", unmarshalError) - } - - for item := range fileStructure { - switch strings.ToLower(item) { - case strings.ToLower("includes"): - for _, includeFile := range includedModel.Includes { - mergeError := model.Merge(filepath.Join(dir, filepath.Dir(includeFilename)), includeFile) - if mergeError != nil { - return fmt.Errorf("unable to merge model include %q: %v", includeFile, mergeError) - } - } - break - - case strings.ToLower("threagile_version"): - model.ThreagileVersion = includedModel.ThreagileVersion - break - - case strings.ToLower("title"): - model.Title = includedModel.Title - break - - case strings.ToLower("author"): - model.Author = includedModel.Author - break - - case strings.ToLower("date"): - model.Date = includedModel.Date - break - - case strings.ToLower("business_overview"): - model.BusinessOverview = includedModel.BusinessOverview - break - - case strings.ToLower("technical_overview"): - model.TechnicalOverview = includedModel.TechnicalOverview - break - - case strings.ToLower("business_criticality"): - model.BusinessCriticality = includedModel.BusinessCriticality - break - - case strings.ToLower("management_summary_comment"): - model.ManagementSummaryComment = includedModel.ManagementSummaryComment - break - - case strings.ToLower("questions"): - for mapKey, mapValue := range includedModel.Questions { - model.Questions[mapKey] = mapValue - } - break - - case strings.ToLower("abuse_cases"): - for mapKey, mapValue := range includedModel.AbuseCases { - model.AbuseCases[mapKey] = mapValue - } - break - - case strings.ToLower("security_requirements"): - for mapKey, mapValue := range includedModel.SecurityRequirements { - model.SecurityRequirements[mapKey] = mapValue - } - break - - case strings.ToLower("tags_available"): - model.TagsAvailable = UniqueStringSlice(model.TagsAvailable).Merge(includedModel.TagsAvailable) - break - - case strings.ToLower("data_assets"): - for mapKey, mapValue := range includedModel.DataAssets { - model.DataAssets[mapKey] = mapValue - } - break - - case strings.ToLower("technical_assets"): - for mapKey, mapValue := range includedModel.TechnicalAssets { - model.TechnicalAssets[mapKey] = mapValue - } - break - - case strings.ToLower("trust_boundaries"): - for mapKey, mapValue := range includedModel.TrustBoundaries { - model.TrustBoundaries[mapKey] = mapValue - } - break - - case strings.ToLower("shared_runtimes"): - for mapKey, mapValue := range includedModel.SharedRuntimes { - model.SharedRuntimes[mapKey] = mapValue - } - break - - case strings.ToLower("individual_risk_categories"): - for mapKey, mapValue := range includedModel.IndividualRiskCategories { - model.IndividualRiskCategories[mapKey] = mapValue - } - break - - case strings.ToLower("risk_tracking"): - for mapKey, mapValue := range includedModel.RiskTracking { - model.RiskTracking[mapKey] = mapValue - } - break - - case "diagram_tweak_nodesep": - model.DiagramTweakNodesep = includedModel.DiagramTweakNodesep - break - - case "diagram_tweak_ranksep": - model.DiagramTweakRanksep = includedModel.DiagramTweakRanksep - break - - case "diagram_tweak_edge_layout": - model.DiagramTweakEdgeLayout = includedModel.DiagramTweakEdgeLayout - break - - case "diagram_tweak_suppress_edge_labels": - model.DiagramTweakSuppressEdgeLabels = includedModel.DiagramTweakSuppressEdgeLabels - break - - case "diagram_tweak_layout_left_to_right": - model.DiagramTweakLayoutLeftToRight = includedModel.DiagramTweakLayoutLeftToRight - break - - case "diagram_tweak_invisible_connections_between_assets": - model.DiagramTweakInvisibleConnectionsBetweenAssets = append(model.DiagramTweakInvisibleConnectionsBetweenAssets, includedModel.DiagramTweakInvisibleConnectionsBetweenAssets...) - break - - case "diagram_tweak_same_rank_assets": - model.DiagramTweakSameRankAssets = append(model.DiagramTweakSameRankAssets, includedModel.DiagramTweakSameRankAssets...) - } - } - - return nil -} - -type InputDataAsset struct { - ID string `yaml:"id" json:"id"` - Description string `yaml:"description" json:"description"` - Usage string `yaml:"usage" json:"usage"` - Tags []string `yaml:"tags" json:"tags"` - Origin string `yaml:"origin" json:"origin"` - Owner string `yaml:"owner" json:"owner"` - Quantity string `yaml:"quantity" json:"quantity"` - Confidentiality string `yaml:"confidentiality" json:"confidentiality"` - Integrity string `yaml:"integrity" json:"integrity"` - Availability string `yaml:"availability" json:"availability"` - JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` -} - -type InputTechnicalAsset struct { - ID string `yaml:"id" json:"id"` - Description string `yaml:"description" json:"description"` - Type string `yaml:"type" json:"type"` - Usage string `yaml:"usage" json:"usage"` - UsedAsClientByHuman bool `yaml:"used_as_client_by_human" json:"used_as_client_by_human"` - OutOfScope bool `yaml:"out_of_scope" json:"out_of_scope"` - JustificationOutOfScope string `yaml:"justification_out_of_scope" json:"justification_out_of_scope"` - Size string `yaml:"size" json:"size"` - Technology string `yaml:"technology" json:"technology"` - Tags []string `yaml:"tags" json:"tags"` - Internet bool `yaml:"internet" json:"internet"` - Machine string `yaml:"machine" json:"machine"` - Encryption string `yaml:"encryption" json:"encryption"` - Owner string `yaml:"owner" json:"owner"` - Confidentiality string `yaml:"confidentiality" json:"confidentiality"` - Integrity string `yaml:"integrity" json:"integrity"` - Availability string `yaml:"availability" json:"availability"` - JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` - MultiTenant bool `yaml:"multi_tenant" json:"multi_tenant"` - Redundant bool `yaml:"redundant" json:"redundant"` - CustomDevelopedParts bool `yaml:"custom_developed_parts" json:"custom_developed_parts"` - DataAssetsProcessed []string `yaml:"data_assets_processed" json:"data_assets_processed"` - DataAssetsStored []string `yaml:"data_assets_stored" json:"data_assets_stored"` - DataFormatsAccepted []string `yaml:"data_formats_accepted" json:"data_formats_accepted"` - DiagramTweakOrder int `yaml:"diagram_tweak_order" json:"diagram_tweak_order"` - CommunicationLinks map[string]InputCommunicationLink `yaml:"communication_links" json:"communication_links"` -} - -type InputCommunicationLink struct { - Target string `yaml:"target" json:"target"` - Description string `yaml:"description" json:"description"` - Protocol string `yaml:"protocol" json:"protocol"` - Authentication string `yaml:"authentication" json:"authentication"` - Authorization string `yaml:"authorization" json:"authorization"` - Tags []string `yaml:"tags" json:"tags"` - VPN bool `yaml:"vpn" json:"vpn"` - IpFiltered bool `yaml:"ip_filtered" json:"ip_filtered"` - Readonly bool `yaml:"readonly" json:"readonly"` - Usage string `yaml:"usage" json:"usage"` - DataAssetsSent []string `yaml:"data_assets_sent" json:"data_assets_sent"` - DataAssetsReceived []string `yaml:"data_assets_received" json:"data_assets_received"` - DiagramTweakWeight int `yaml:"diagram_tweak_weight" json:"diagram_tweak_weight"` - DiagramTweakConstraint bool `yaml:"diagram_tweak_constraint" json:"diagram_tweak_constraint"` -} - -type InputSharedRuntime struct { - ID string `yaml:"id" json:"id"` - Description string `yaml:"description" json:"description"` - Tags []string `yaml:"tags" json:"tags"` - TechnicalAssetsRunning []string `yaml:"technical_assets_running" json:"technical_assets_running"` -} - -type InputTrustBoundary struct { - ID string `yaml:"id" json:"id"` - Description string `yaml:"description" json:"description"` - Type string `yaml:"type" json:"type"` - Tags []string `yaml:"tags" json:"tags"` - TechnicalAssetsInside []string `yaml:"technical_assets_inside" json:"technical_assets_inside"` - TrustBoundariesNested []string `yaml:"trust_boundaries_nested" json:"trust_boundaries_nested"` -} - -type InputIndividualRiskCategory struct { - ID string `yaml:"id" json:"id"` - Description string `yaml:"description" json:"description"` - Impact string `yaml:"impact" json:"impact"` - ASVS string `yaml:"asvs" json:"asvs"` - CheatSheet string `yaml:"cheat_sheet" json:"cheat_sheet"` - Action string `yaml:"action" json:"action"` - Mitigation string `yaml:"mitigation" json:"mitigation"` - Check string `yaml:"check" json:"check"` - Function string `yaml:"function" json:"function"` - STRIDE string `yaml:"stride" json:"stride"` - DetectionLogic string `yaml:"detection_logic" json:"detection_logic"` - RiskAssessment string `yaml:"risk_assessment" json:"risk_assessment"` - FalsePositives string `yaml:"false_positives" json:"false_positives"` - ModelFailurePossibleReason bool `yaml:"model_failure_possible_reason" json:"model_failure_possible_reason"` - CWE int `yaml:"cwe" json:"cwe"` - RisksIdentified map[string]InputRiskIdentified `yaml:"risks_identified" json:"risks_identified"` -} - -type InputRiskIdentified struct { - Severity string `yaml:"severity" json:"severity"` - ExploitationLikelihood string `yaml:"exploitation_likelihood" json:"exploitation_likelihood"` - ExploitationImpact string `yaml:"exploitation_impact" json:"exploitation_impact"` - DataBreachProbability string `yaml:"data_breach_probability" json:"data_breach_probability"` - DataBreachTechnicalAssets []string `yaml:"data_breach_technical_assets" json:"data_breach_technical_assets"` - MostRelevantDataAsset string `yaml:"most_relevant_data_asset" json:"most_relevant_data_asset"` - MostRelevantTechnicalAsset string `yaml:"most_relevant_technical_asset" json:"most_relevant_technical_asset"` - MostRelevantCommunicationLink string `yaml:"most_relevant_communication_link" json:"most_relevant_communication_link"` - MostRelevantTrustBoundary string `yaml:"most_relevant_trust_boundary" json:"most_relevant_trust_boundary"` - MostRelevantSharedRuntime string `yaml:"most_relevant_shared_runtime" json:"most_relevant_shared_runtime"` -} - -type InputRiskTracking struct { - Status string `yaml:"status" json:"status"` - Justification string `yaml:"justification" json:"justification"` - Ticket string `yaml:"ticket" json:"ticket"` - Date string `yaml:"date" json:"date"` - CheckedBy string `yaml:"checked_by" json:"checked_by"` -} - -// TypeDescription contains a name for a type and its description -type TypeDescription struct { - Name string - Description string -} - -type TypeEnum interface { - String() string - Explain() string -} - -type Quantity int - -const ( - VeryFew Quantity = iota - Few - Many - VeryMany -) - -func QuantityValues() []TypeEnum { - return []TypeEnum{ - VeryFew, - Few, - Many, - VeryMany, - } -} - -func ParseQuantity(value string) (quantity Quantity, err error) { - value = strings.TrimSpace(value) - for _, candidate := range QuantityValues() { - if candidate.String() == value { - return candidate.(Quantity), err - } - } - return quantity, errors.New("Unable to parse into type: " + value) -} - -var QuantityTypeDescription = [...]TypeDescription{ - {"very-few", "Very few"}, - {"few", "Few"}, - {"many", "Many"}, - {"very-many", "Very many"}, -} - -func (what Quantity) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - return QuantityTypeDescription[what].Name -} - -func (what Quantity) Explain() string { - return QuantityTypeDescription[what].Description -} - -func (what Quantity) Title() string { - return [...]string{"very few", "few", "many", "very many"}[what] -} - -func (what Quantity) QuantityFactor() float64 { - // fibonacci starting at 1 - return [...]float64{1, 2, 3, 5}[what] -} - -type Confidentiality int - -const ( - Public Confidentiality = iota - Internal - Restricted - Confidential - StrictlyConfidential -) - -func ConfidentialityValues() []TypeEnum { - return []TypeEnum{ - Public, - Internal, - Restricted, - Confidential, - StrictlyConfidential, - } -} - -func ParseConfidentiality(value string) (confidentiality Confidentiality, err error) { - value = strings.TrimSpace(value) - for _, candidate := range ConfidentialityValues() { - if candidate.String() == value { - return candidate.(Confidentiality), err - } - } - return confidentiality, errors.New("Unable to parse into type: " + value) -} - -var ConfidentialityTypeDescription = [...]TypeDescription{ - {"public", "Public available information"}, - {"internal", "(Company) internal information - but all people in the institution can access it"}, - {"restricted", "Internal and with restricted access"}, - {"confidential", "Only a few selected people have access"}, - {"strictly-confidential", "Highest secrecy level"}, -} - -func (what Confidentiality) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - return ConfidentialityTypeDescription[what].Name -} - -func (what Confidentiality) Explain() string { - return ConfidentialityTypeDescription[what].Description -} - -func (what Confidentiality) AttackerAttractivenessForAsset() float64 { - // fibonacci starting at 8 - return [...]float64{8, 13, 21, 34, 55}[what] -} -func (what Confidentiality) AttackerAttractivenessForProcessedOrStoredData() float64 { - // fibonacci starting at 5 - return [...]float64{5, 8, 13, 21, 34}[what] -} -func (what Confidentiality) AttackerAttractivenessForInOutTransferredData() float64 { - // fibonacci starting at 2 - return [...]float64{2, 3, 5, 8, 13}[what] -} - -func (what Confidentiality) RatingStringInScale() string { - result := "(rated " - if what == Public { - result += "1" - } - if what == Internal { - result += "2" - } - if what == Restricted { - result += "3" - } - if what == Confidential { - result += "4" - } - if what == StrictlyConfidential { - result += "5" - } - result += " in scale of 5)" - return result -} - -type Criticality int - -const ( - Archive Criticality = iota - Operational - Important - Critical - MissionCritical -) - -func CriticalityValues() []TypeEnum { - return []TypeEnum{ - Archive, - Operational, - Important, - Critical, - MissionCritical, - } -} - -func ParseCriticality(value string) (criticality Criticality, err error) { - value = strings.TrimSpace(value) - for _, candidate := range CriticalityValues() { - if candidate.String() == value { - return candidate.(Criticality), err - } - } - return criticality, errors.New("Unable to parse into type: " + value) -} - -var CriticalityTypeDescription = [...]TypeDescription{ - {"archive", "Stored, not active"}, - {"operational", "If this fails, people will just have an ad-hoc coffee break until it is back"}, - {"important", "Issues here results in angry people"}, - {"critical", "Failure is really expensive or crippling"}, - {"mission-critical", "This must not fail"}, -} - -func (what Criticality) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - return CriticalityTypeDescription[what].Name -} - -func (what Criticality) Explain() string { - return CriticalityTypeDescription[what].Description -} - -func (what Criticality) AttackerAttractivenessForAsset() float64 { - // fibonacci starting at 5 - return [...]float64{5, 8, 13, 21, 34}[what] -} -func (what Criticality) AttackerAttractivenessForProcessedOrStoredData() float64 { - // fibonacci starting at 3 - return [...]float64{3, 5, 8, 13, 21}[what] -} -func (what Criticality) AttackerAttractivenessForInOutTransferredData() float64 { - // fibonacci starting at 2 - return [...]float64{2, 3, 5, 8, 13}[what] -} - -func (what Criticality) RatingStringInScale() string { - result := "(rated " - if what == Archive { - result += "1" - } - if what == Operational { - result += "2" - } - if what == Important { - result += "3" - } - if what == Critical { - result += "4" - } - if what == MissionCritical { - result += "5" - } - result += " in scale of 5)" - return result -} - -type TechnicalAssetType int - -const ( - ExternalEntity TechnicalAssetType = iota - Process - Datastore -) - -func TechnicalAssetTypeValues() []TypeEnum { - return []TypeEnum{ - ExternalEntity, - Process, - Datastore, - } -} - -var TechnicalAssetTypeDescription = [...]TypeDescription{ - {"external-entity", "This asset is hosted and managed by a third party"}, - {"process", "A software process"}, - {"datastore", "This asset stores data"}, -} - -func (what TechnicalAssetType) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - return TechnicalAssetTypeDescription[what].Name -} - -func (what TechnicalAssetType) Explain() string { - return TechnicalAssetTypeDescription[what].Description -} - -type TechnicalAssetSize int - -const ( - System TechnicalAssetSize = iota - Service - Application - Component -) - -func TechnicalAssetSizeValues() []TypeEnum { - return []TypeEnum{ - System, - Service, - Application, - Component, - } -} - -var TechnicalAssetSizeDescription = [...]TypeDescription{ - {"system", "A system consists of several services"}, - {"service", "A specific service (web, mail, ...)"}, - {"application", "A single application"}, - {"component", "A component of an application (smaller unit like a microservice)"}, -} - -func (what TechnicalAssetSize) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - return TechnicalAssetSizeDescription[what].Name -} - -func (what TechnicalAssetSize) Explain() string { - return TechnicalAssetSizeDescription[what].Description -} - -type Authorization int - -const ( - NoneAuthorization Authorization = iota - TechnicalUser - EndUserIdentityPropagation -) - -func AuthorizationValues() []TypeEnum { - return []TypeEnum{ - NoneAuthorization, - TechnicalUser, - EndUserIdentityPropagation, - } -} - -var AuthorizationTypeDescription = [...]TypeDescription{ - {"none", "No authorization"}, - {"technical-user", "Technical user (service-to-service) like DB user credentials"}, - {"enduser-identity-propagation", "Identity of end user propagates to this service"}, -} - -func (what Authorization) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - return AuthorizationTypeDescription[what].Name -} - -func (what Authorization) Explain() string { - return AuthorizationTypeDescription[what].Description -} - -type Authentication int - -const ( - NoneAuthentication Authentication = iota - Credentials - SessionId - Token - ClientCertificate - TwoFactor - Externalized -) - -func AuthenticationValues() []TypeEnum { - return []TypeEnum{ - NoneAuthentication, - Credentials, - SessionId, - Token, - ClientCertificate, - TwoFactor, - Externalized, - } -} - -var AuthenticationTypeDescription = [...]TypeDescription{ - {"none", "No authentication"}, - {"credentials", "Username and password, pin or passphrase"}, - {"session-id", "A server generated session id with limited life span"}, - {"token", "A server generated token. Containing session id, other data and is cryptographically signed"}, - {"client-certificate", "A certificate file stored on the client identifying this specific client"}, - {"two-factor", "Credentials plus another factor like a physical object (card) or biometrics"}, - {"externalized", "Some external company handles authentication"}, -} - -func (what Authentication) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - //return [...]string{"none", "credentials", "session-id", "token", "client-certificate", "two-factor", "externalized"}[what] - return AuthenticationTypeDescription[what].Name -} - -func (what Authentication) Explain() string { - return AuthenticationTypeDescription[what].Description -} - -type Usage int - -const ( - Business Usage = iota - DevOps -) - -func UsageValues() []TypeEnum { - return []TypeEnum{ - Business, - DevOps, - } -} - -func ParseUsage(value string) (usage Usage, err error) { - value = strings.TrimSpace(value) - for _, candidate := range UsageValues() { - if candidate.String() == value { - return candidate.(Usage), err - } - } - return usage, errors.New("Unable to parse into type: " + value) -} - -var UsageTypeDescription = [...]TypeDescription{ - {"business", "This system is operational and does business tasks"}, - {"devops", "This system is for development and/or deployment or other operational tasks"}, -} - -func (what Usage) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - //return [...]string{"business", "devops"}[what] - return UsageTypeDescription[what].Name -} - -func (what Usage) Explain() string { - return UsageTypeDescription[what].Description -} - -func (what Usage) Title() string { - return [...]string{"Business", "DevOps"}[what] -} - -type EncryptionStyle int - -const ( - NoneEncryption EncryptionStyle = iota - Transparent - DataWithSymmetricSharedKey - DataWithAsymmetricSharedKey - DataWithEndUserIndividualKey -) - -func EncryptionStyleValues() []TypeEnum { - return []TypeEnum{ - NoneEncryption, - Transparent, - DataWithSymmetricSharedKey, - DataWithAsymmetricSharedKey, - DataWithEndUserIndividualKey, - } -} - -func ParseEncryptionStyle(value string) (encryptionStyle EncryptionStyle, err error) { - value = strings.TrimSpace(value) - for _, candidate := range EncryptionStyleValues() { - if candidate.String() == value { - return candidate.(EncryptionStyle), err - } - } - return encryptionStyle, errors.New("Unable to parse into type: " + value) -} - -var EncryptionStyleTypeDescription = [...]TypeDescription{ - {"none", "No encryption"}, - {"transparent", "Encrypted data at rest"}, - {"data-with-symmetric-shared-key", "Both communication partners have the same key. This must be kept secret"}, - {"data-with-asymmetric-shared-key", "The key is split into public and private. Those two are shared between partners"}, - {"data-with-enduser-individual-key", "The key is (managed) by the end user"}, -} - -func (what EncryptionStyle) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - return EncryptionStyleTypeDescription[what].Name -} - -func (what EncryptionStyle) Explain() string { - return EncryptionStyleTypeDescription[what].Description -} - -func (what EncryptionStyle) Title() string { - return [...]string{"None", "Transparent", "Data with Symmetric Shared Key", "Data with Asymmetric Shared Key", "Data with End-User Individual Key"}[what] -} - -type DataFormat int - -const ( - JSON DataFormat = iota - XML - Serialization - File - CSV -) - -func DataFormatValues() []TypeEnum { - return []TypeEnum{ - JSON, - XML, - Serialization, - File, - CSV, - } -} - -var DataFormatTypeDescription = [...]TypeDescription{ - {"json", "JSON"}, - {"xml", "XML"}, - {"serialization", "Serialized program objects"}, - {"file", "Specific file types for data"}, - {"csv", "CSV"}, -} - -func (what DataFormat) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - return DataFormatTypeDescription[what].Name -} - -func (what DataFormat) Explain() string { - return DataFormatTypeDescription[what].Description -} - -func (what DataFormat) Title() string { - return [...]string{"JSON", "XML", "Serialization", "File", "CSV"}[what] -} - -func (what DataFormat) Description() string { - return [...]string{"JSON marshalled object data", "XML structured data", "Serialization-based object graphs", - "File input/uploads", "CSV tabular data"}[what] -} - -type Protocol int - -const ( - UnknownProtocol Protocol = iota - HTTP - HTTPS - WS - WSS - ReverseProxyWebProtocol - ReverseProxyWebProtocolEncrypted - MQTT - JDBC - JdbcEncrypted - ODBC - OdbcEncrypted - SqlAccessProtocol - SqlAccessProtocolEncrypted - NosqlAccessProtocol - NosqlAccessProtocolEncrypted - BINARY - BinaryEncrypted - TEXT - TextEncrypted - SSH - SshTunnel - SMTP - SmtpEncrypted - POP3 - Pop3Encrypted - IMAP - ImapEncrypted - FTP - FTPS - SFTP - SCP - LDAP - LDAPS - JMS - NFS - SMB - SmbEncrypted - LocalFileAccess - NRPE - XMPP - IIOP - IiopEncrypted - JRMP - JrmpEncrypted - InProcessLibraryCall - ContainerSpawning -) - -func ProtocolValues() []TypeEnum { - return []TypeEnum{ - UnknownProtocol, - HTTP, - HTTPS, - WS, - WSS, - ReverseProxyWebProtocol, - ReverseProxyWebProtocolEncrypted, - MQTT, - JDBC, - JdbcEncrypted, - ODBC, - OdbcEncrypted, - SqlAccessProtocol, - SqlAccessProtocolEncrypted, - NosqlAccessProtocol, - NosqlAccessProtocolEncrypted, - BINARY, - BinaryEncrypted, - TEXT, - TextEncrypted, - SSH, - SshTunnel, - SMTP, - SmtpEncrypted, - POP3, - Pop3Encrypted, - IMAP, - ImapEncrypted, - FTP, - FTPS, - SFTP, - SCP, - LDAP, - LDAPS, - JMS, - NFS, - SMB, - SmbEncrypted, - LocalFileAccess, - NRPE, - XMPP, - IIOP, - IiopEncrypted, - JRMP, - JrmpEncrypted, - InProcessLibraryCall, - ContainerSpawning, - } -} - -var ProtocolTypeDescription = [...]TypeDescription{ - {"unknown-protocol", "Unknown protocol"}, - {"http", "HTTP protocol"}, - {"https", "HTTPS protocol (encrypted)"}, - {"ws", "WebSocket"}, - {"wss", "WebSocket but encrypted"}, - {"reverse-proxy-web-protocol", "Protocols used by reverse proxies"}, - {"reverse-proxy-web-protocol-encrypted", "Protocols used by reverse proxies but encrypted"}, - {"mqtt", "MQTT Message protocol. Encryption via TLS is optional"}, - {"jdbc", "Java Database Connectivity"}, - {"jdbc-encrypted", "Java Database Connectivity but encrypted"}, - {"odbc", "Open Database Connectivity"}, - {"odbc-encrypted", "Open Database Connectivity but encrypted"}, - {"sql-access-protocol", "SQL access protocol"}, - {"sql-access-protocol-encrypted", "SQL access protocol but encrypted"}, - {"nosql-access-protocol", "NOSQL access protocol"}, - {"nosql-access-protocol-encrypted", "NOSQL access protocol but encrypted"}, - {"binary", "Some other binary protocol"}, - {"binary-encrypted", "Some other binary protocol, encrypted"}, - {"text", "Some other text protocol"}, - {"text-encrypted", "Some other text protocol, encrypted"}, - {"ssh", "Secure Shell to execute commands"}, - {"ssh-tunnel", "Secure Shell as a tunnel"}, - {"smtp", "Mail transfer protocol (sending)"}, - {"smtp-encrypted", "Mail transfer protocol (sending), encrypted"}, - {"pop3", "POP 3 mail fetching"}, - {"pop3-encrypted", "POP 3 mail fetching, encrypted"}, - {"imap", "IMAP mail sync protocol"}, - {"imap-encrypted", "IMAP mail sync protocol, encrypted"}, - {"ftp", "File Transfer Protocol"}, - {"ftps", "FTP with TLS"}, - {"sftp", "FTP on SSH"}, - {"scp", "Secure Shell to copy files"}, - {"ldap", "Lightweight Directory Access Protocol - User directories"}, - {"ldaps", "Lightweight Directory Access Protocol - User directories on TLS"}, - {"jms", "Jakarta Messaging"}, - {"nfs", "Network File System"}, - {"smb", "Server Message Block"}, - {"smb-encrypted", "Server Message Block, but encrypted"}, - {"local-file-access", "Data files are on the local system"}, - {"nrpe", "Nagios Remote Plugin Executor"}, - {"xmpp", "Extensible Messaging and Presence Protocol"}, - {"iiop", "Internet Inter-ORB Protocol "}, - {"iiop-encrypted", "Internet Inter-ORB Protocol , encrypted"}, - {"jrmp", "Java Remote Method Protocol"}, - {"jrmp-encrypted", "Java Remote Method Protocol, encrypted"}, - {"in-process-library-call", "Call to local library"}, - {"container-spawning", "Spawn a container"}, -} - -func (what Protocol) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - return ProtocolTypeDescription[what].Name -} - -func (what Protocol) Explain() string { - return ProtocolTypeDescription[what].Description -} - -func (what Protocol) IsProcessLocal() bool { - return what == InProcessLibraryCall || what == LocalFileAccess || what == ContainerSpawning -} - -func (what Protocol) IsEncrypted() bool { - return what == HTTPS || what == WSS || what == JdbcEncrypted || what == OdbcEncrypted || - what == NosqlAccessProtocolEncrypted || what == SqlAccessProtocolEncrypted || what == BinaryEncrypted || what == TextEncrypted || what == SSH || what == SshTunnel || - what == FTPS || what == SFTP || what == SCP || what == LDAPS || what == ReverseProxyWebProtocolEncrypted || - what == IiopEncrypted || what == JrmpEncrypted || what == SmbEncrypted || what == SmtpEncrypted || what == Pop3Encrypted || what == ImapEncrypted -} - -func (what Protocol) IsPotentialDatabaseAccessProtocol(includingLaxDatabaseProtocols bool) bool { - strictlyDatabaseOnlyProtocol := what == JdbcEncrypted || what == OdbcEncrypted || - what == NosqlAccessProtocolEncrypted || what == SqlAccessProtocolEncrypted || what == JDBC || what == ODBC || what == NosqlAccessProtocol || what == SqlAccessProtocol - if includingLaxDatabaseProtocols { - // include HTTP for REST-based NoSQL-DBs as well as unknown binary - return strictlyDatabaseOnlyProtocol || what == HTTPS || what == HTTP || what == BINARY || what == BinaryEncrypted - } - return strictlyDatabaseOnlyProtocol -} - -func (what Protocol) IsPotentialWebAccessProtocol() bool { - return what == HTTP || what == HTTPS || what == WS || what == WSS || what == ReverseProxyWebProtocol || what == ReverseProxyWebProtocolEncrypted -} - -type TechnicalAssetTechnology int - -const ( - UnknownTechnology TechnicalAssetTechnology = iota - ClientSystem - Browser - Desktop - MobileApp - DevOpsClient - WebServer - WebApplication - ApplicationServer - Database - FileServer - LocalFileSystem - ERP - CMS - WebServiceREST - WebServiceSOAP - EJB - SearchIndex - SearchEngine - ServiceRegistry - ReverseProxy - LoadBalancer - BuildPipeline - SourcecodeRepository - ArtifactRegistry - CodeInspectionPlatform - Monitoring - LDAPServer - ContainerPlatform - BatchProcessing - EventListener - IdentityProvider - IdentityStoreLDAP - IdentityStoreDatabase - Tool - CLI - Task - Function - Gateway // TODO rename to API-Gateway to be more clear? - IoTDevice - MessageQueue - StreamProcessing - ServiceMesh - DataLake - BigDataPlatform - ReportEngine - AI - MailServer - Vault - HSM - WAF - IDS - IPS - Scheduler - Mainframe - BlockStorage - Library -) - -func TechnicalAssetTechnologyValues() []TypeEnum { - return []TypeEnum{ - UnknownTechnology, - ClientSystem, - Browser, - Desktop, - MobileApp, - DevOpsClient, - WebServer, - WebApplication, - ApplicationServer, - Database, - FileServer, - LocalFileSystem, - ERP, - CMS, - WebServiceREST, - WebServiceSOAP, - EJB, - SearchIndex, - SearchEngine, - ServiceRegistry, - ReverseProxy, - LoadBalancer, - BuildPipeline, - SourcecodeRepository, - ArtifactRegistry, - CodeInspectionPlatform, - Monitoring, - LDAPServer, - ContainerPlatform, - BatchProcessing, - EventListener, - IdentityProvider, - IdentityStoreLDAP, - IdentityStoreDatabase, - Tool, - CLI, - Task, - Function, - Gateway, - IoTDevice, - MessageQueue, - StreamProcessing, - ServiceMesh, - DataLake, - BigDataPlatform, - ReportEngine, - AI, - MailServer, - Vault, - HSM, - WAF, - IDS, - IPS, - Scheduler, - Mainframe, - BlockStorage, - Library, - } -} - -var TechnicalAssetTechnologyTypeDescription = [...]TypeDescription{ - {"unknown-technology", "Unknown technology"}, - {"client-system", "A client system"}, - {"browser", "A web browser"}, - {"desktop", "A desktop system (or laptop)"}, - {"mobile-app", "A mobile app (smartphone, tablet)"}, - {"devops-client", "A client used for DevOps"}, - {"web-server", "A web server"}, - {"web-application", "A web application"}, - {"application-server", "An application server (Apache Tomcat, ...)"}, - {"database", "A database"}, - {"file-server", "A file server"}, - {"local-file-system", "The local file system"}, - {"erp", "Enterprise-Resource-Planning"}, - {"cms", "Content Management System"}, - {"web-service-rest", "A REST web service (API)"}, - {"web-service-soap", "A SOAP web service (API)"}, - {"ejb", "Jakarta Enterprise Beans fka Enterprise JavaBeans"}, - {"search-index", "The index database of a search engine"}, - {"search-engine", "A search engine"}, - {"service-registry", "A central place where data schemas can be found and distributed"}, - {"reverse-proxy", "A proxy hiding internal infrastructure from caller making requests. Can also reduce load"}, - {"load-balancer", "A load balancer directing incoming requests to available internal infrastructure"}, - {"build-pipeline", "A software build pipeline"}, - {"sourcecode-repository", "Git or similar"}, - {"artifact-registry", "A registry to store build artifacts"}, - {"code-inspection-platform", "(Static) Code Analysis)"}, - {"monitoring", "A monitoring system (SIEM, logs)"}, - {"ldap-server", "A LDAP server"}, - {"container-platform", "A platform for hosting and executing containers"}, - {"batch-processing", "A set of tools automatically processing data"}, - {"event-listener", "An event listener waiting to be triggered and spring to action"}, - {"identity-provider", "A authentication provider"}, - {"identity-store-ldap", "Authentication data as LDAP"}, - {"identity-store-database", "Authentication data as database"}, - {"tool", "A specific tool"}, - {"cli", "A command line tool"}, - {"task", "A specific task"}, - {"function", "A specific function (maybe RPC ?)"}, - {"gateway", "A gateway connecting two systems or trust boundaries"}, - {"iot-device", "An IoT device"}, - {"message-queue", "A message queue (like MQTT)"}, - {"stream-processing", "Data stream processing"}, - {"service-mesh", "Infrastructure for service-to-service communication"}, - {"data-lake", "A huge database"}, - {"big-data-platform", "Storage for big data"}, - {"report-engine", "Software for report generation"}, - {"ai", "An Artificial Intelligence service"}, - {"mail-server", "A Mail server"}, - {"vault", "Encryption and key management"}, - {"hsm", "Hardware Security Module"}, - {"waf", "Web Application Firewall"}, - {"ids", "Intrusion Detection System"}, - {"ips", "Intrusion Prevention System"}, - {"scheduler", "Scheduled tasks"}, - {"mainframe", "A central, big computer"}, - {"block-storage", "SAN or similar central file storage"}, - {"library", "A software library"}, -} - -func (what TechnicalAssetTechnology) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - return TechnicalAssetTechnologyTypeDescription[what].Name -} - -func (what TechnicalAssetTechnology) Explain() string { - return TechnicalAssetTechnologyTypeDescription[what].Description -} - -func (what TechnicalAssetTechnology) IsWebApplication() bool { - return what == WebServer || what == WebApplication || what == ApplicationServer || what == ERP || what == CMS || what == IdentityProvider || what == ReportEngine -} - -func (what TechnicalAssetTechnology) IsWebService() bool { - return what == WebServiceREST || what == WebServiceSOAP -} - -func (what TechnicalAssetTechnology) IsIdentityRelated() bool { - return what == IdentityProvider || what == IdentityStoreLDAP || what == IdentityStoreDatabase -} - -func (what TechnicalAssetTechnology) IsSecurityControlRelated() bool { - return what == Vault || what == HSM || what == WAF || what == IDS || what == IPS -} - -func (what TechnicalAssetTechnology) IsUnprotectedCommunicationsTolerated() bool { - return what == Monitoring || what == IDS || what == IPS -} - -func (what TechnicalAssetTechnology) IsUnnecessaryDataTolerated() bool { - return what == Monitoring || what == IDS || what == IPS -} - -func (what TechnicalAssetTechnology) IsCloseToHighValueTargetsTolerated() bool { - return what == Monitoring || what == IDS || what == IPS || what == LoadBalancer || what == ReverseProxy -} - -func (what TechnicalAssetTechnology) IsClient() bool { - return what == ClientSystem || what == Browser || what == Desktop || what == MobileApp || what == DevOpsClient || what == IoTDevice -} - -func (what TechnicalAssetTechnology) IsUsuallyAbleToPropagateIdentityToOutgoingTargets() bool { - return what == ClientSystem || what == Browser || what == Desktop || what == MobileApp || - what == DevOpsClient || what == WebServer || what == WebApplication || what == ApplicationServer || what == ERP || - what == CMS || what == WebServiceREST || what == WebServiceSOAP || what == EJB || - what == SearchEngine || what == ReverseProxy || what == LoadBalancer || what == IdentityProvider || - what == Tool || what == CLI || what == Task || what == Function || what == Gateway || - what == IoTDevice || what == MessageQueue || what == ServiceMesh || what == ReportEngine || what == WAF || what == Library - -} - -func (what TechnicalAssetTechnology) IsLessProtectedType() bool { - return what == ClientSystem || what == Browser || what == Desktop || what == MobileApp || what == DevOpsClient || what == WebServer || what == WebApplication || what == ApplicationServer || what == CMS || - what == WebServiceREST || what == WebServiceSOAP || what == EJB || what == BuildPipeline || what == SourcecodeRepository || - what == ArtifactRegistry || what == CodeInspectionPlatform || what == Monitoring || what == IoTDevice || what == AI || what == MailServer || what == Scheduler || - what == Mainframe -} - -func (what TechnicalAssetTechnology) IsUsuallyProcessingEndUserRequests() bool { - return what == WebServer || what == WebApplication || what == ApplicationServer || what == ERP || what == WebServiceREST || what == WebServiceSOAP || what == EJB || what == ReportEngine -} - -func (what TechnicalAssetTechnology) IsUsuallyStoringEndUserData() bool { - return what == Database || what == ERP || what == FileServer || what == LocalFileSystem || what == BlockStorage || what == MailServer || what == StreamProcessing || what == MessageQueue -} - -func (what TechnicalAssetTechnology) IsExclusivelyFrontendRelated() bool { - return what == ClientSystem || what == Browser || what == Desktop || what == MobileApp || what == DevOpsClient || what == CMS || what == ReverseProxy || what == WAF || what == LoadBalancer || what == Gateway || what == IoTDevice -} - -func (what TechnicalAssetTechnology) IsExclusivelyBackendRelated() bool { - return what == Database || what == IdentityProvider || what == IdentityStoreLDAP || what == IdentityStoreDatabase || what == ERP || what == WebServiceREST || what == WebServiceSOAP || what == EJB || what == SearchIndex || - what == SearchEngine || what == ContainerPlatform || what == BatchProcessing || what == EventListener || what == DataLake || what == BigDataPlatform || what == MessageQueue || - what == StreamProcessing || what == ServiceMesh || what == Vault || what == HSM || what == Scheduler || what == Mainframe || what == FileServer || what == BlockStorage -} - -func (what TechnicalAssetTechnology) IsDevelopmentRelevant() bool { - return what == BuildPipeline || what == SourcecodeRepository || what == ArtifactRegistry || what == CodeInspectionPlatform || what == DevOpsClient -} - -func (what TechnicalAssetTechnology) IsTrafficForwarding() bool { - return what == LoadBalancer || what == ReverseProxy || what == WAF -} - -func (what TechnicalAssetTechnology) IsEmbeddedComponent() bool { - return what == Library -} - -type TechnicalAssetMachine int - -const ( - Physical TechnicalAssetMachine = iota - Virtual - Container - Serverless -) - -func TechnicalAssetMachineValues() []TypeEnum { - return []TypeEnum{ - Physical, - Virtual, - Container, - Serverless, - } -} - -var TechnicalAssetMachineTypeDescription = [...]TypeDescription{ - {"physical", "A physical machine"}, - {"virtual", "A virtual machine"}, - {"container", "A container"}, - {"serverless", "A serverless application"}, -} - -func (what TechnicalAssetMachine) String() string { - return TechnicalAssetMachineTypeDescription[what].Name -} - -func (what TechnicalAssetMachine) Explain() string { - return TechnicalAssetMachineTypeDescription[what].Description -} - -type TrustBoundaryType int - -const ( - NetworkOnPrem TrustBoundaryType = iota - NetworkDedicatedHoster - NetworkVirtualLAN - NetworkCloudProvider - NetworkCloudSecurityGroup - NetworkPolicyNamespaceIsolation - ExecutionEnvironment -) - -func TrustBoundaryTypeValues() []TypeEnum { - return []TypeEnum{ - NetworkOnPrem, - NetworkDedicatedHoster, - NetworkVirtualLAN, - NetworkCloudProvider, - NetworkCloudSecurityGroup, - NetworkPolicyNamespaceIsolation, - ExecutionEnvironment, - } -} - -var TrustBoundaryTypeDescription = [...]TypeDescription{ - {"network-on-prem", "The whole network is on prem"}, - {"network-dedicated-hoster", "The network is at a dedicated hoster"}, - {"network-virtual-lan", "Network is a VLAN"}, - {"network-cloud-provider", "Network is at a cloud provider"}, - {"network-cloud-security-group", "Cloud rules controlling network traffic"}, - {"network-policy-namespace-isolation", "Segregation in a Kubernetes cluster"}, - {"execution-environment", "Logical group of items (not a protective network boundary in that sense). More like a namespace or another logical group of items"}, -} - -func (what TrustBoundaryType) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - return TrustBoundaryTypeDescription[what].Name -} - -func (what TrustBoundaryType) Explain() string { - return TrustBoundaryTypeDescription[what].Description -} - -func (what TrustBoundaryType) IsNetworkBoundary() bool { - return what == NetworkOnPrem || what == NetworkDedicatedHoster || what == NetworkVirtualLAN || - what == NetworkCloudProvider || what == NetworkCloudSecurityGroup || what == NetworkPolicyNamespaceIsolation -} - -func (what TrustBoundaryType) IsWithinCloud() bool { - return what == NetworkCloudProvider || what == NetworkCloudSecurityGroup -} - -func (what TrustBoundary) RecursivelyAllTechnicalAssetIDsInside() []string { - result := make([]string, 0) - what.addAssetIDsRecursively(&result) - return result -} - -func (what TrustBoundary) addAssetIDsRecursively(result *[]string) { - *result = append(*result, what.TechnicalAssetsInside...) - for _, nestedBoundaryID := range what.TrustBoundariesNested { - ParsedModelRoot.TrustBoundaries[nestedBoundaryID].addAssetIDsRecursively(result) - } -} - -func (what TrustBoundary) AllParentTrustBoundaryIDs() []string { - result := make([]string, 0) - what.addTrustBoundaryIDsRecursively(&result) - return result -} - -func (what TrustBoundary) addTrustBoundaryIDsRecursively(result *[]string) { - *result = append(*result, what.Id) - parentID := what.ParentTrustBoundaryID() - if len(parentID) > 0 { - ParsedModelRoot.TrustBoundaries[parentID].addTrustBoundaryIDsRecursively(result) - } -} - -func IsSharingSameParentTrustBoundary(left, right TechnicalAsset) bool { - tbIDLeft, tbIDRight := left.GetTrustBoundaryId(), right.GetTrustBoundaryId() - if len(tbIDLeft) == 0 && len(tbIDRight) > 0 { - return false - } - if len(tbIDLeft) > 0 && len(tbIDRight) == 0 { - return false - } - if len(tbIDLeft) == 0 && len(tbIDRight) == 0 { - return true - } - if tbIDLeft == tbIDRight { - return true - } - tbLeft, tbRight := ParsedModelRoot.TrustBoundaries[tbIDLeft], ParsedModelRoot.TrustBoundaries[tbIDRight] - tbParentsLeft, tbParentsRight := tbLeft.AllParentTrustBoundaryIDs(), tbRight.AllParentTrustBoundaryIDs() - for _, parentLeft := range tbParentsLeft { - for _, parentRight := range tbParentsRight { - if parentLeft == parentRight { - return true - } - } - } - return false -} - -type DataAsset struct { - Id string `yaml:"id" json:"id"` // TODO: tag here still required? - Title string `yaml:"title" json:"title"` // TODO: tag here still required? - Description string `yaml:"description" json:"description"` // TODO: tag here still required? - Usage Usage `yaml:"usage" json:"usage"` - Tags []string `yaml:"tags" json:"tags"` - Origin string `yaml:"origin" json:"origin"` - Owner string `yaml:"owner" json:"owner"` - Quantity Quantity `yaml:"quantity" json:"quantity"` - Confidentiality Confidentiality `yaml:"confidentiality" json:"confidentiality"` - Integrity Criticality `yaml:"integrity" json:"integrity"` - Availability Criticality `yaml:"availability" json:"availability"` - JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` -} - -func (what DataAsset) IsTaggedWithAny(tags ...string) bool { - return ContainsCaseInsensitiveAny(what.Tags, tags...) -} - -func (what DataAsset) IsTaggedWithBaseTag(baseTag string) bool { - return IsTaggedWithBaseTag(what.Tags, baseTag) -} - -/* -func (what DataAsset) IsAtRisk() bool { - for _, techAsset := range what.ProcessedByTechnicalAssetsSorted() { - if len(ReduceToOnlyStillAtRisk(techAsset.GeneratedRisks())) > 0 { - return true - } - } - for _, techAsset := range what.StoredByTechnicalAssetsSorted() { - if len(ReduceToOnlyStillAtRisk(techAsset.GeneratedRisks())) > 0 { - return true - } - } - return false -} -*/ - -/* -func (what DataAsset) IdentifiedRiskSeverityStillAtRisk() RiskSeverity { - highestRiskSeverity := Low - for _, techAsset := range what.ProcessedByTechnicalAssetsSorted() { - candidateSeverity := HighestSeverityStillAtRisk(ReduceToOnlyStillAtRisk(techAsset.GeneratedRisks())) - if candidateSeverity > highestRiskSeverity { - highestRiskSeverity = candidateSeverity - } - } - for _, techAsset := range what.StoredByTechnicalAssetsSorted() { - candidateSeverity := HighestSeverityStillAtRisk(ReduceToOnlyStillAtRisk(techAsset.GeneratedRisks())) - if candidateSeverity > highestRiskSeverity { - highestRiskSeverity = candidateSeverity - } - } - return highestRiskSeverity -} -*/ - -func (what DataAsset) IdentifiedRisksByResponsibleTechnicalAssetId() map[string][]Risk { - uniqueTechAssetIDsResponsibleForThisDataAsset := make(map[string]interface{}) - for _, techAsset := range what.ProcessedByTechnicalAssetsSorted() { - if len(techAsset.GeneratedRisks()) > 0 { - uniqueTechAssetIDsResponsibleForThisDataAsset[techAsset.Id] = true - } - } - for _, techAsset := range what.StoredByTechnicalAssetsSorted() { - if len(techAsset.GeneratedRisks()) > 0 { - uniqueTechAssetIDsResponsibleForThisDataAsset[techAsset.Id] = true - } - } - - result := make(map[string][]Risk) - for techAssetId := range uniqueTechAssetIDsResponsibleForThisDataAsset { - result[techAssetId] = append(result[techAssetId], ParsedModelRoot.TechnicalAssets[techAssetId].GeneratedRisks()...) - } - return result -} - -func (what DataAsset) IsDataBreachPotentialStillAtRisk() bool { - for _, risk := range FilteredByStillAtRisk() { - for _, techAsset := range risk.DataBreachTechnicalAssetIDs { - if Contains(ParsedModelRoot.TechnicalAssets[techAsset].DataAssetsProcessed, what.Id) { - return true - } - if Contains(ParsedModelRoot.TechnicalAssets[techAsset].DataAssetsStored, what.Id) { - return true - } - } - } - return false -} - -func (what DataAsset) IdentifiedDataBreachProbability() DataBreachProbability { - highestProbability := Improbable - for _, risk := range AllRisks() { - for _, techAsset := range risk.DataBreachTechnicalAssetIDs { - if Contains(ParsedModelRoot.TechnicalAssets[techAsset].DataAssetsProcessed, what.Id) { - if risk.DataBreachProbability > highestProbability { - highestProbability = risk.DataBreachProbability - break - } - } - if Contains(ParsedModelRoot.TechnicalAssets[techAsset].DataAssetsStored, what.Id) { - if risk.DataBreachProbability > highestProbability { - highestProbability = risk.DataBreachProbability - break - } - } - } - } - return highestProbability -} - -func (what DataAsset) IdentifiedDataBreachProbabilityStillAtRisk() DataBreachProbability { - highestProbability := Improbable - for _, risk := range FilteredByStillAtRisk() { - for _, techAsset := range risk.DataBreachTechnicalAssetIDs { - if Contains(ParsedModelRoot.TechnicalAssets[techAsset].DataAssetsProcessed, what.Id) { - if risk.DataBreachProbability > highestProbability { - highestProbability = risk.DataBreachProbability - break - } - } - if Contains(ParsedModelRoot.TechnicalAssets[techAsset].DataAssetsStored, what.Id) { - if risk.DataBreachProbability > highestProbability { - highestProbability = risk.DataBreachProbability - break - } - } - } - } - return highestProbability -} - -func (what DataAsset) IdentifiedDataBreachProbabilityRisksStillAtRisk() []Risk { - result := make([]Risk, 0) - for _, risk := range FilteredByStillAtRisk() { - for _, techAsset := range risk.DataBreachTechnicalAssetIDs { - if Contains(ParsedModelRoot.TechnicalAssets[techAsset].DataAssetsProcessed, what.Id) { - result = append(result, risk) - break - } - if Contains(ParsedModelRoot.TechnicalAssets[techAsset].DataAssetsStored, what.Id) { - result = append(result, risk) - break - } - } - } - return result -} - -func (what DataAsset) IdentifiedDataBreachProbabilityRisks() []Risk { - result := make([]Risk, 0) - for _, risk := range AllRisks() { - for _, techAsset := range risk.DataBreachTechnicalAssetIDs { - if Contains(ParsedModelRoot.TechnicalAssets[techAsset].DataAssetsProcessed, what.Id) { - result = append(result, risk) - break - } - if Contains(ParsedModelRoot.TechnicalAssets[techAsset].DataAssetsStored, what.Id) { - result = append(result, risk) - break - } - } - } - return result -} - -func (what DataAsset) ProcessedByTechnicalAssetsSorted() []TechnicalAsset { - result := make([]TechnicalAsset, 0) - for _, technicalAsset := range ParsedModelRoot.TechnicalAssets { - for _, candidateID := range technicalAsset.DataAssetsProcessed { - if candidateID == what.Id { - result = append(result, technicalAsset) - } - } - } - sort.Sort(ByTechnicalAssetTitleSort(result)) - return result -} - -func (what DataAsset) StoredByTechnicalAssetsSorted() []TechnicalAsset { - result := make([]TechnicalAsset, 0) - for _, technicalAsset := range ParsedModelRoot.TechnicalAssets { - for _, candidateID := range technicalAsset.DataAssetsStored { - if candidateID == what.Id { - result = append(result, technicalAsset) - } - } - } - sort.Sort(ByTechnicalAssetTitleSort(result)) - return result -} - -func (what DataAsset) SentViaCommLinksSorted() []CommunicationLink { - result := make([]CommunicationLink, 0) - for _, technicalAsset := range ParsedModelRoot.TechnicalAssets { - for _, commLink := range technicalAsset.CommunicationLinks { - for _, candidateID := range commLink.DataAssetsSent { - if candidateID == what.Id { - result = append(result, commLink) - } - } - } - } - sort.Sort(ByTechnicalCommunicationLinkTitleSort(result)) - return result -} - -func (what DataAsset) ReceivedViaCommLinksSorted() []CommunicationLink { - result := make([]CommunicationLink, 0) - for _, technicalAsset := range ParsedModelRoot.TechnicalAssets { - for _, commLink := range technicalAsset.CommunicationLinks { - for _, candidateID := range commLink.DataAssetsReceived { - if candidateID == what.Id { - result = append(result, commLink) - } - } - } - } - sort.Sort(ByTechnicalCommunicationLinkTitleSort(result)) - return result -} - -func IsTaggedWithBaseTag(tags []string, baseTag string) bool { // base tags are before the colon ":" like in "aws:ec2" it's "aws". The subtag is after the colon. Also, a pure "aws" tag matches the base tag "aws" - baseTag = strings.ToLower(strings.TrimSpace(baseTag)) - for _, tag := range tags { - tag = strings.ToLower(strings.TrimSpace(tag)) - if tag == baseTag || strings.HasPrefix(tag, baseTag+":") { - return true - } - } - return false -} - -type TechnicalAsset struct { - Id, Title, Description string - Usage Usage - Type TechnicalAssetType - Size TechnicalAssetSize - Technology TechnicalAssetTechnology - Machine TechnicalAssetMachine - Internet, MultiTenant, Redundant, CustomDevelopedParts, OutOfScope, UsedAsClientByHuman bool - Encryption EncryptionStyle - JustificationOutOfScope string - Owner string - Confidentiality Confidentiality - Integrity, Availability Criticality - JustificationCiaRating string - Tags, DataAssetsProcessed, DataAssetsStored []string - DataFormatsAccepted []DataFormat - CommunicationLinks []CommunicationLink - DiagramTweakOrder int - // will be set by separate calculation step: - RAA float64 -} - -func (what TechnicalAsset) IsTaggedWithAny(tags ...string) bool { - return ContainsCaseInsensitiveAny(what.Tags, tags...) -} - -func (what TechnicalAsset) IsTaggedWithBaseTag(baseTag string) bool { - return IsTaggedWithBaseTag(what.Tags, baseTag) -} - -// first use the tag(s) of the asset itself, then their trust boundaries (recursively up) and then their shared runtime - -func (what TechnicalAsset) IsTaggedWithAnyTraversingUp(tags ...string) bool { - if ContainsCaseInsensitiveAny(what.Tags, tags...) { - return true - } - tbID := what.GetTrustBoundaryId() - if len(tbID) > 0 { - if ParsedModelRoot.TrustBoundaries[tbID].IsTaggedWithAnyTraversingUp(tags...) { - return true - } - } - for _, sr := range ParsedModelRoot.SharedRuntimes { - if Contains(sr.TechnicalAssetsRunning, what.Id) && sr.IsTaggedWithAny(tags...) { - return true - } - } - return false -} - -func (what TechnicalAsset) IsSameTrustBoundary(otherAssetId string) bool { - trustBoundaryOfMyAsset := DirectContainingTrustBoundaryMappedByTechnicalAssetId[what.Id] - trustBoundaryOfOtherAsset := DirectContainingTrustBoundaryMappedByTechnicalAssetId[otherAssetId] - return trustBoundaryOfMyAsset.Id == trustBoundaryOfOtherAsset.Id -} - -func (what TechnicalAsset) IsSameExecutionEnvironment(otherAssetId string) bool { - trustBoundaryOfMyAsset := DirectContainingTrustBoundaryMappedByTechnicalAssetId[what.Id] - trustBoundaryOfOtherAsset := DirectContainingTrustBoundaryMappedByTechnicalAssetId[otherAssetId] - if trustBoundaryOfMyAsset.Type == ExecutionEnvironment && trustBoundaryOfOtherAsset.Type == ExecutionEnvironment { - return trustBoundaryOfMyAsset.Id == trustBoundaryOfOtherAsset.Id - } - return false -} - -func (what TechnicalAsset) IsSameTrustBoundaryNetworkOnly(otherAssetId string) bool { - trustBoundaryOfMyAsset := DirectContainingTrustBoundaryMappedByTechnicalAssetId[what.Id] - if !trustBoundaryOfMyAsset.Type.IsNetworkBoundary() { // find and use the parent boundary then - trustBoundaryOfMyAsset = ParsedModelRoot.TrustBoundaries[trustBoundaryOfMyAsset.ParentTrustBoundaryID()] - } - trustBoundaryOfOtherAsset := DirectContainingTrustBoundaryMappedByTechnicalAssetId[otherAssetId] - if !trustBoundaryOfOtherAsset.Type.IsNetworkBoundary() { // find and use the parent boundary then - trustBoundaryOfOtherAsset = ParsedModelRoot.TrustBoundaries[trustBoundaryOfOtherAsset.ParentTrustBoundaryID()] - } - return trustBoundaryOfMyAsset.Id == trustBoundaryOfOtherAsset.Id -} - -func (what TechnicalAsset) HighestSensitivityScore() float64 { - return what.Confidentiality.AttackerAttractivenessForAsset() + - what.Integrity.AttackerAttractivenessForAsset() + - what.Availability.AttackerAttractivenessForAsset() -} - -func (what TechnicalAsset) HighestConfidentiality() Confidentiality { - highest := what.Confidentiality - for _, dataId := range what.DataAssetsProcessed { - dataAsset := ParsedModelRoot.DataAssets[dataId] - if dataAsset.Confidentiality > highest { - highest = dataAsset.Confidentiality - } - } - for _, dataId := range what.DataAssetsStored { - dataAsset := ParsedModelRoot.DataAssets[dataId] - if dataAsset.Confidentiality > highest { - highest = dataAsset.Confidentiality - } - } - return highest -} - -func (what TechnicalAsset) DataAssetsProcessedSorted() []DataAsset { - result := make([]DataAsset, 0) - for _, assetID := range what.DataAssetsProcessed { - result = append(result, ParsedModelRoot.DataAssets[assetID]) - } - sort.Sort(ByDataAssetTitleSort(result)) - return result -} - -func (what TechnicalAsset) DataAssetsStoredSorted() []DataAsset { - result := make([]DataAsset, 0) - for _, assetID := range what.DataAssetsStored { - result = append(result, ParsedModelRoot.DataAssets[assetID]) - } - sort.Sort(ByDataAssetTitleSort(result)) - return result -} - -func (what TechnicalAsset) DataFormatsAcceptedSorted() []DataFormat { - result := make([]DataFormat, 0) - for _, format := range what.DataFormatsAccepted { - result = append(result, format) - } - sort.Sort(ByDataFormatAcceptedSort(result)) - return result -} - -func (what TechnicalAsset) CommunicationLinksSorted() []CommunicationLink { - result := make([]CommunicationLink, 0) - for _, format := range what.CommunicationLinks { - result = append(result, format) - } - sort.Sort(ByTechnicalCommunicationLinkTitleSort(result)) - return result -} - -func (what TechnicalAsset) HighestIntegrity() Criticality { - highest := what.Integrity - for _, dataId := range what.DataAssetsProcessed { - dataAsset := ParsedModelRoot.DataAssets[dataId] - if dataAsset.Integrity > highest { - highest = dataAsset.Integrity - } - } - for _, dataId := range what.DataAssetsStored { - dataAsset := ParsedModelRoot.DataAssets[dataId] - if dataAsset.Integrity > highest { - highest = dataAsset.Integrity - } - } - return highest -} - -func (what TechnicalAsset) HighestAvailability() Criticality { - highest := what.Availability - for _, dataId := range what.DataAssetsProcessed { - dataAsset := ParsedModelRoot.DataAssets[dataId] - if dataAsset.Availability > highest { - highest = dataAsset.Availability - } - } - for _, dataId := range what.DataAssetsStored { - dataAsset := ParsedModelRoot.DataAssets[dataId] - if dataAsset.Availability > highest { - highest = dataAsset.Availability - } - } - return highest -} - -func (what TechnicalAsset) HasDirectConnection(otherAssetId string) bool { - for _, dataFlow := range IncomingTechnicalCommunicationLinksMappedByTargetId[what.Id] { - if dataFlow.SourceId == otherAssetId { - return true - } - } - // check both directions, hence two times, just reversed - for _, dataFlow := range IncomingTechnicalCommunicationLinksMappedByTargetId[otherAssetId] { - if dataFlow.SourceId == what.Id { - return true - } - } - return false -} - -func (what TechnicalAsset) GeneratedRisks() []Risk { - resultingRisks := make([]Risk, 0) - if len(SortedRiskCategories()) == 0 { - fmt.Println("Uh, strange, no risks generated (yet?) and asking for them by tech asset...") - } - for _, category := range SortedRiskCategories() { - risks := SortedRisksOfCategory(category) - for _, risk := range risks { - if risk.MostRelevantTechnicalAssetId == what.Id { - resultingRisks = append(resultingRisks, risk) - } - } - } - sort.Sort(ByRiskSeveritySort(resultingRisks)) - return resultingRisks -} - -/* -func (what TechnicalAsset) HighestRiskSeverity() RiskSeverity { - highest := Low - for _, risk := range what.GeneratedRisks() { - if risk.Severity > highest { - highest = risk.Severity - } - } - return highest -} -*/ - -type ByDataAssetDataBreachProbabilityAndTitleSort []DataAsset - -func (what ByDataAssetDataBreachProbabilityAndTitleSort) Len() int { return len(what) } -func (what ByDataAssetDataBreachProbabilityAndTitleSort) Swap(i, j int) { - what[i], what[j] = what[j], what[i] -} -func (what ByDataAssetDataBreachProbabilityAndTitleSort) Less(i, j int) bool { - highestDataBreachProbabilityLeft := what[i].IdentifiedDataBreachProbability() - highestDataBreachProbabilityRight := what[j].IdentifiedDataBreachProbability() - if highestDataBreachProbabilityLeft == highestDataBreachProbabilityRight { - return what[i].Title < what[j].Title - } - return highestDataBreachProbabilityLeft > highestDataBreachProbabilityRight -} - -type ByDataAssetDataBreachProbabilityAndTitleSortStillAtRisk []DataAsset - -func (what ByDataAssetDataBreachProbabilityAndTitleSortStillAtRisk) Len() int { return len(what) } -func (what ByDataAssetDataBreachProbabilityAndTitleSortStillAtRisk) Swap(i, j int) { - what[i], what[j] = what[j], what[i] -} -func (what ByDataAssetDataBreachProbabilityAndTitleSortStillAtRisk) Less(i, j int) bool { - risksLeft := what[i].IdentifiedDataBreachProbabilityRisksStillAtRisk() - risksRight := what[j].IdentifiedDataBreachProbabilityRisksStillAtRisk() - highestDataBreachProbabilityLeft := what[i].IdentifiedDataBreachProbabilityStillAtRisk() - highestDataBreachProbabilityRight := what[j].IdentifiedDataBreachProbabilityStillAtRisk() - if highestDataBreachProbabilityLeft == highestDataBreachProbabilityRight { - if len(risksLeft) == 0 && len(risksRight) > 0 { - return false - } - if len(risksLeft) > 0 && len(risksRight) == 0 { - return true - } - return what[i].Title < what[j].Title - } - return highestDataBreachProbabilityLeft > highestDataBreachProbabilityRight -} - -type ByOrderAndIdSort []TechnicalAsset - -func (what ByOrderAndIdSort) Len() int { return len(what) } -func (what ByOrderAndIdSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } -func (what ByOrderAndIdSort) Less(i, j int) bool { - if what[i].DiagramTweakOrder == what[j].DiagramTweakOrder { - return what[i].Id > what[j].Id - } - return what[i].DiagramTweakOrder < what[j].DiagramTweakOrder -} - -type ByTechnicalAssetRiskSeverityAndTitleSortStillAtRisk []TechnicalAsset - -func (what ByTechnicalAssetRiskSeverityAndTitleSortStillAtRisk) Len() int { return len(what) } -func (what ByTechnicalAssetRiskSeverityAndTitleSortStillAtRisk) Swap(i, j int) { - what[i], what[j] = what[j], what[i] -} -func (what ByTechnicalAssetRiskSeverityAndTitleSortStillAtRisk) Less(i, j int) bool { - risksLeft := ReduceToOnlyStillAtRisk(what[i].GeneratedRisks()) - risksRight := ReduceToOnlyStillAtRisk(what[j].GeneratedRisks()) - highestSeverityLeft := HighestSeverityStillAtRisk(risksLeft) - highestSeverityRight := HighestSeverityStillAtRisk(risksRight) - var result bool - if highestSeverityLeft == highestSeverityRight { - if len(risksLeft) == 0 && len(risksRight) > 0 { - return false - } else if len(risksLeft) > 0 && len(risksRight) == 0 { - return true - } else { - result = what[i].Title < what[j].Title - } - } else { - result = highestSeverityLeft > highestSeverityRight - } - if what[i].OutOfScope && what[j].OutOfScope { - result = what[i].Title < what[j].Title - } else if what[i].OutOfScope { - result = false - } else if what[j].OutOfScope { - result = true - } - return result -} - -type ByTechnicalAssetRAAAndTitleSort []TechnicalAsset - -func (what ByTechnicalAssetRAAAndTitleSort) Len() int { return len(what) } -func (what ByTechnicalAssetRAAAndTitleSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } -func (what ByTechnicalAssetRAAAndTitleSort) Less(i, j int) bool { - raaLeft := what[i].RAA - raaRight := what[j].RAA - if raaLeft == raaRight { - return what[i].Title < what[j].Title - } - return raaLeft > raaRight -} - -/* -type ByTechnicalAssetQuickWinsAndTitleSort []TechnicalAsset - -func (what ByTechnicalAssetQuickWinsAndTitleSort) Len() int { return len(what) } -func (what ByTechnicalAssetQuickWinsAndTitleSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } -func (what ByTechnicalAssetQuickWinsAndTitleSort) Less(i, j int) bool { - qwLeft := what[i].QuickWins() - qwRight := what[j].QuickWins() - if qwLeft == qwRight { - return what[i].Title < what[j].Title - } - return qwLeft > qwRight -} -*/ - -type ByTechnicalAssetTitleSort []TechnicalAsset - -func (what ByTechnicalAssetTitleSort) Len() int { return len(what) } -func (what ByTechnicalAssetTitleSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } -func (what ByTechnicalAssetTitleSort) Less(i, j int) bool { - return what[i].Title < what[j].Title -} - -type ByTrustBoundaryTitleSort []TrustBoundary - -func (what ByTrustBoundaryTitleSort) Len() int { return len(what) } -func (what ByTrustBoundaryTitleSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } -func (what ByTrustBoundaryTitleSort) Less(i, j int) bool { - return what[i].Title < what[j].Title -} - -type BySharedRuntimeTitleSort []SharedRuntime - -func (what BySharedRuntimeTitleSort) Len() int { return len(what) } -func (what BySharedRuntimeTitleSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } -func (what BySharedRuntimeTitleSort) Less(i, j int) bool { - return what[i].Title < what[j].Title -} - -type ByDataAssetTitleSort []DataAsset - -func (what ByDataAssetTitleSort) Len() int { return len(what) } -func (what ByDataAssetTitleSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } -func (what ByDataAssetTitleSort) Less(i, j int) bool { - return what[i].Title < what[j].Title -} - -type ByDataFormatAcceptedSort []DataFormat - -func (what ByDataFormatAcceptedSort) Len() int { return len(what) } -func (what ByDataFormatAcceptedSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } -func (what ByDataFormatAcceptedSort) Less(i, j int) bool { - return what[i].String() < what[j].String() -} - -type CommunicationLink struct { - Id, SourceId, TargetId, Title, Description string - Protocol Protocol - Tags []string - VPN, IpFiltered, Readonly bool - Authentication Authentication - Authorization Authorization - Usage Usage - DataAssetsSent, DataAssetsReceived []string - DiagramTweakWeight int - DiagramTweakConstraint bool -} - -func (what CommunicationLink) IsTaggedWithAny(tags ...string) bool { - return ContainsCaseInsensitiveAny(what.Tags, tags...) -} - -func (what CommunicationLink) IsTaggedWithBaseTag(baseTag string) bool { - return IsTaggedWithBaseTag(what.Tags, baseTag) -} - -type ByTechnicalCommunicationLinkIdSort []CommunicationLink - -func (what ByTechnicalCommunicationLinkIdSort) Len() int { return len(what) } -func (what ByTechnicalCommunicationLinkIdSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } -func (what ByTechnicalCommunicationLinkIdSort) Less(i, j int) bool { - return what[i].Id > what[j].Id -} - -type ByTechnicalCommunicationLinkTitleSort []CommunicationLink - -func (what ByTechnicalCommunicationLinkTitleSort) Len() int { return len(what) } -func (what ByTechnicalCommunicationLinkTitleSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } -func (what ByTechnicalCommunicationLinkTitleSort) Less(i, j int) bool { - return what[i].Title > what[j].Title -} - -type TrustBoundary struct { - Id, Title, Description string - Type TrustBoundaryType - Tags []string - TechnicalAssetsInside []string - TrustBoundariesNested []string -} - -func (what TrustBoundary) IsTaggedWithAny(tags ...string) bool { - return ContainsCaseInsensitiveAny(what.Tags, tags...) -} - -func (what TrustBoundary) IsTaggedWithBaseTag(baseTag string) bool { - return IsTaggedWithBaseTag(what.Tags, baseTag) -} - -func (what TrustBoundary) IsTaggedWithAnyTraversingUp(tags ...string) bool { - if what.IsTaggedWithAny(tags...) { - return true - } - parentID := what.ParentTrustBoundaryID() - if len(parentID) > 0 && ParsedModelRoot.TrustBoundaries[parentID].IsTaggedWithAnyTraversingUp(tags...) { - return true - } - return false -} - -func (what TrustBoundary) ParentTrustBoundaryID() string { - var result string - for _, candidate := range ParsedModelRoot.TrustBoundaries { - if Contains(candidate.TrustBoundariesNested, what.Id) { - result = candidate.Id - return result - } - } - return result -} - -func (what TrustBoundary) HighestConfidentiality() Confidentiality { - highest := Public - for _, id := range what.RecursivelyAllTechnicalAssetIDsInside() { - techAsset := ParsedModelRoot.TechnicalAssets[id] - if techAsset.HighestConfidentiality() > highest { - highest = techAsset.HighestConfidentiality() - } - } - return highest -} - -func (what TrustBoundary) HighestIntegrity() Criticality { - highest := Archive - for _, id := range what.RecursivelyAllTechnicalAssetIDsInside() { - techAsset := ParsedModelRoot.TechnicalAssets[id] - if techAsset.HighestIntegrity() > highest { - highest = techAsset.HighestIntegrity() - } - } - return highest -} - -func (what TrustBoundary) HighestAvailability() Criticality { - highest := Archive - for _, id := range what.RecursivelyAllTechnicalAssetIDsInside() { - techAsset := ParsedModelRoot.TechnicalAssets[id] - if techAsset.HighestAvailability() > highest { - highest = techAsset.HighestAvailability() - } - } - return highest -} - -type SharedRuntime struct { - Id, Title, Description string - Tags []string - TechnicalAssetsRunning []string -} - -func (what SharedRuntime) IsTaggedWithAny(tags ...string) bool { - return ContainsCaseInsensitiveAny(what.Tags, tags...) -} - -func (what SharedRuntime) IsTaggedWithBaseTag(baseTag string) bool { - return IsTaggedWithBaseTag(what.Tags, baseTag) -} - -func (what SharedRuntime) HighestConfidentiality() Confidentiality { - highest := Public - for _, id := range what.TechnicalAssetsRunning { - techAsset := ParsedModelRoot.TechnicalAssets[id] - if techAsset.HighestConfidentiality() > highest { - highest = techAsset.HighestConfidentiality() - } - } - return highest -} - -func (what SharedRuntime) HighestIntegrity() Criticality { - highest := Archive - for _, id := range what.TechnicalAssetsRunning { - techAsset := ParsedModelRoot.TechnicalAssets[id] - if techAsset.HighestIntegrity() > highest { - highest = techAsset.HighestIntegrity() - } - } - return highest -} - -func (what SharedRuntime) HighestAvailability() Criticality { - highest := Archive - for _, id := range what.TechnicalAssetsRunning { - techAsset := ParsedModelRoot.TechnicalAssets[id] - if techAsset.HighestAvailability() > highest { - highest = techAsset.HighestAvailability() - } - } - return highest -} - -func (what SharedRuntime) TechnicalAssetWithHighestRAA() TechnicalAsset { - result := ParsedModelRoot.TechnicalAssets[what.TechnicalAssetsRunning[0]] - for _, asset := range what.TechnicalAssetsRunning { - candidate := ParsedModelRoot.TechnicalAssets[asset] - if candidate.RAA > result.RAA { - result = candidate - } - } - return result -} - -func (what CommunicationLink) IsAcrossTrustBoundary() bool { - trustBoundaryOfSourceAsset := DirectContainingTrustBoundaryMappedByTechnicalAssetId[what.SourceId] - trustBoundaryOfTargetAsset := DirectContainingTrustBoundaryMappedByTechnicalAssetId[what.TargetId] - return trustBoundaryOfSourceAsset.Id != trustBoundaryOfTargetAsset.Id -} - -func (what CommunicationLink) IsAcrossTrustBoundaryNetworkOnly() bool { - trustBoundaryOfSourceAsset := DirectContainingTrustBoundaryMappedByTechnicalAssetId[what.SourceId] - if !trustBoundaryOfSourceAsset.Type.IsNetworkBoundary() { // find and use the parent boundary then - trustBoundaryOfSourceAsset = ParsedModelRoot.TrustBoundaries[trustBoundaryOfSourceAsset.ParentTrustBoundaryID()] - } - trustBoundaryOfTargetAsset := DirectContainingTrustBoundaryMappedByTechnicalAssetId[what.TargetId] - if !trustBoundaryOfTargetAsset.Type.IsNetworkBoundary() { // find and use the parent boundary then - trustBoundaryOfTargetAsset = ParsedModelRoot.TrustBoundaries[trustBoundaryOfTargetAsset.ParentTrustBoundaryID()] - } - return trustBoundaryOfSourceAsset.Id != trustBoundaryOfTargetAsset.Id && trustBoundaryOfTargetAsset.Type.IsNetworkBoundary() -} - -func (what CommunicationLink) HighestConfidentiality() Confidentiality { - highest := Public - for _, dataId := range what.DataAssetsSent { - dataAsset := ParsedModelRoot.DataAssets[dataId] - if dataAsset.Confidentiality > highest { - highest = dataAsset.Confidentiality - } - } - for _, dataId := range what.DataAssetsReceived { - dataAsset := ParsedModelRoot.DataAssets[dataId] - if dataAsset.Confidentiality > highest { - highest = dataAsset.Confidentiality - } - } - return highest -} - -func (what CommunicationLink) HighestIntegrity() Criticality { - highest := Archive - for _, dataId := range what.DataAssetsSent { - dataAsset := ParsedModelRoot.DataAssets[dataId] - if dataAsset.Integrity > highest { - highest = dataAsset.Integrity - } - } - for _, dataId := range what.DataAssetsReceived { - dataAsset := ParsedModelRoot.DataAssets[dataId] - if dataAsset.Integrity > highest { - highest = dataAsset.Integrity - } - } - return highest -} - -func (what CommunicationLink) HighestAvailability() Criticality { - highest := Archive - for _, dataId := range what.DataAssetsSent { - dataAsset := ParsedModelRoot.DataAssets[dataId] - if dataAsset.Availability > highest { - highest = dataAsset.Availability - } - } - for _, dataId := range what.DataAssetsReceived { - dataAsset := ParsedModelRoot.DataAssets[dataId] - if dataAsset.Availability > highest { - highest = dataAsset.Availability - } - } - return highest -} - -func (what CommunicationLink) DataAssetsSentSorted() []DataAsset { - result := make([]DataAsset, 0) - for _, assetID := range what.DataAssetsSent { - result = append(result, ParsedModelRoot.DataAssets[assetID]) - } - sort.Sort(ByDataAssetTitleSort(result)) - return result -} - -func (what CommunicationLink) DataAssetsReceivedSorted() []DataAsset { - result := make([]DataAsset, 0) - for _, assetID := range what.DataAssetsReceived { - result = append(result, ParsedModelRoot.DataAssets[assetID]) - } - sort.Sort(ByDataAssetTitleSort(result)) - return result -} - -type Author struct { - Name string `yaml:"name" json:"name"` - Homepage string `yaml:"homepage" json:"homepage"` -} - -type Overview struct { - Description string `yaml:"description" json:"description"` - Images []map[string]string `yaml:"images" json:"images"` // yes, array of map here, as array keeps the order of the image keys -} - -type ParsedModel struct { - Author Author - Title string - Date time.Time - ManagementSummaryComment string - BusinessOverview Overview - TechnicalOverview Overview - BusinessCriticality Criticality - SecurityRequirements map[string]string - Questions map[string]string - AbuseCases map[string]string - TagsAvailable []string - DataAssets map[string]DataAsset - TechnicalAssets map[string]TechnicalAsset - TrustBoundaries map[string]TrustBoundary - SharedRuntimes map[string]SharedRuntime - IndividualRiskCategories map[string]RiskCategory - RiskTracking map[string]RiskTracking - DiagramTweakNodesep, DiagramTweakRanksep int - DiagramTweakEdgeLayout string - DiagramTweakSuppressEdgeLabels bool - DiagramTweakLayoutLeftToRight bool - DiagramTweakInvisibleConnectionsBetweenAssets []string - DiagramTweakSameRankAssets []string -} - -func SortedTechnicalAssetIDs() []string { - res := make([]string, 0) - for id := range ParsedModelRoot.TechnicalAssets { - res = append(res, id) - } - sort.Strings(res) - return res -} - -func TagsActuallyUsed() []string { - result := make([]string, 0) - for _, tag := range ParsedModelRoot.TagsAvailable { - if len(TechnicalAssetsTaggedWithAny(tag)) > 0 || - len(CommunicationLinksTaggedWithAny(tag)) > 0 || - len(DataAssetsTaggedWithAny(tag)) > 0 || - len(TrustBoundariesTaggedWithAny(tag)) > 0 || - len(SharedRuntimesTaggedWithAny(tag)) > 0 { - result = append(result, tag) - } - } - return result -} - -// === Sorting stuff ===================================== - -// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - -func SortedKeysOfIndividualRiskCategories() []string { - keys := make([]string, 0) - for k := range ParsedModelRoot.IndividualRiskCategories { - keys = append(keys, k) - } - sort.Strings(keys) - return keys -} - -// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - -func SortedKeysOfSecurityRequirements() []string { - keys := make([]string, 0) - for k := range ParsedModelRoot.SecurityRequirements { - keys = append(keys, k) - } - sort.Strings(keys) - return keys -} - -// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - -func SortedKeysOfAbuseCases() []string { - keys := make([]string, 0) - for k := range ParsedModelRoot.AbuseCases { - keys = append(keys, k) - } - sort.Strings(keys) - return keys -} - -// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - -func SortedKeysOfQuestions() []string { - keys := make([]string, 0) - for k := range ParsedModelRoot.Questions { - keys = append(keys, k) - } - sort.Strings(keys) - return keys -} - -// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - -func SortedKeysOfDataAssets() []string { - keys := make([]string, 0) - for k := range ParsedModelRoot.DataAssets { - keys = append(keys, k) - } - sort.Strings(keys) - return keys -} - -// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - -func SortedKeysOfTechnicalAssets() []string { - keys := make([]string, 0) - for k := range ParsedModelRoot.TechnicalAssets { - keys = append(keys, k) - } - sort.Strings(keys) - return keys -} - -func TechnicalAssetsTaggedWithAny(tags ...string) []TechnicalAsset { - result := make([]TechnicalAsset, 0) - for _, candidate := range ParsedModelRoot.TechnicalAssets { - if candidate.IsTaggedWithAny(tags...) { - result = append(result, candidate) - } - } - return result -} - -func CommunicationLinksTaggedWithAny(tags ...string) []CommunicationLink { - result := make([]CommunicationLink, 0) - for _, asset := range ParsedModelRoot.TechnicalAssets { - for _, candidate := range asset.CommunicationLinks { - if candidate.IsTaggedWithAny(tags...) { - result = append(result, candidate) - } - } - } - return result -} - -func DataAssetsTaggedWithAny(tags ...string) []DataAsset { - result := make([]DataAsset, 0) - for _, candidate := range ParsedModelRoot.DataAssets { - if candidate.IsTaggedWithAny(tags...) { - result = append(result, candidate) - } - } - return result -} - -func TrustBoundariesTaggedWithAny(tags ...string) []TrustBoundary { - result := make([]TrustBoundary, 0) - for _, candidate := range ParsedModelRoot.TrustBoundaries { - if candidate.IsTaggedWithAny(tags...) { - result = append(result, candidate) - } - } - return result -} - -func SharedRuntimesTaggedWithAny(tags ...string) []SharedRuntime { - result := make([]SharedRuntime, 0) - for _, candidate := range ParsedModelRoot.SharedRuntimes { - if candidate.IsTaggedWithAny(tags...) { - result = append(result, candidate) - } - } - return result -} - -// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - -func SortedTechnicalAssetsByTitle() []TechnicalAsset { - assets := make([]TechnicalAsset, 0) - for _, asset := range ParsedModelRoot.TechnicalAssets { - assets = append(assets, asset) - } - sort.Sort(ByTechnicalAssetTitleSort(assets)) - return assets -} - -// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - -func SortedDataAssetsByTitle() []DataAsset { - assets := make([]DataAsset, 0) - for _, asset := range ParsedModelRoot.DataAssets { - assets = append(assets, asset) - } - sort.Sort(ByDataAssetTitleSort(assets)) - return assets -} - -// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - -func SortedDataAssetsByDataBreachProbabilityAndTitleStillAtRisk() []DataAsset { - assets := make([]DataAsset, 0) - for _, asset := range ParsedModelRoot.DataAssets { - assets = append(assets, asset) - } - sort.Sort(ByDataAssetDataBreachProbabilityAndTitleSortStillAtRisk(assets)) - return assets -} - -// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - -func SortedDataAssetsByDataBreachProbabilityAndTitle() []DataAsset { - assets := make([]DataAsset, 0) - for _, asset := range ParsedModelRoot.DataAssets { - assets = append(assets, asset) - } - sort.Sort(ByDataAssetDataBreachProbabilityAndTitleSortStillAtRisk(assets)) - return assets -} - -// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - -func SortedTechnicalAssetsByRiskSeverityAndTitle() []TechnicalAsset { - assets := make([]TechnicalAsset, 0) - for _, asset := range ParsedModelRoot.TechnicalAssets { - assets = append(assets, asset) - } - sort.Sort(ByTechnicalAssetRiskSeverityAndTitleSortStillAtRisk(assets)) - return assets -} - -// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - -func SortedTechnicalAssetsByRAAAndTitle() []TechnicalAsset { - assets := make([]TechnicalAsset, 0) - for _, asset := range ParsedModelRoot.TechnicalAssets { - assets = append(assets, asset) - } - sort.Sort(ByTechnicalAssetRAAAndTitleSort(assets)) - return assets -} - -/* -// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: -func SortedTechnicalAssetsByQuickWinsAndTitle() []TechnicalAsset { - assets := make([]TechnicalAsset, 0) - for _, asset := range ParsedModelRoot.TechnicalAssets { - if !asset.OutOfScope && asset.QuickWins() > 0 { - assets = append(assets, asset) - } - } - sort.Sort(ByTechnicalAssetQuickWinsAndTitleSort(assets)) - return assets -} -*/ - -func OutOfScopeTechnicalAssets() []TechnicalAsset { - assets := make([]TechnicalAsset, 0) - for _, asset := range ParsedModelRoot.TechnicalAssets { - if asset.OutOfScope { - assets = append(assets, asset) - } - } - sort.Sort(ByTechnicalAssetTitleSort(assets)) - return assets -} - -// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - -func SortedKeysOfTrustBoundaries() []string { - keys := make([]string, 0) - for k := range ParsedModelRoot.TrustBoundaries { - keys = append(keys, k) - } - sort.Strings(keys) - return keys -} - -func SortedTrustBoundariesByTitle() []TrustBoundary { - boundaries := make([]TrustBoundary, 0) - for _, boundary := range ParsedModelRoot.TrustBoundaries { - boundaries = append(boundaries, boundary) - } - sort.Sort(ByTrustBoundaryTitleSort(boundaries)) - return boundaries -} - -// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - -func SortedKeysOfSharedRuntime() []string { - keys := make([]string, 0) - for k := range ParsedModelRoot.SharedRuntimes { - keys = append(keys, k) - } - sort.Strings(keys) - return keys -} - -func SortedSharedRuntimesByTitle() []SharedRuntime { - result := make([]SharedRuntime, 0) - for _, runtime := range ParsedModelRoot.SharedRuntimes { - result = append(result, runtime) - } - sort.Sort(BySharedRuntimeTitleSort(result)) - return result -} - -func QuestionsUnanswered() int { - result := 0 - for _, answer := range ParsedModelRoot.Questions { - if len(strings.TrimSpace(answer)) == 0 { - result++ - } - } - return result -} - -// === Style stuff ======================================= - -// Line Styles: - -// dotted when model forgery attempt (i.e. nothing being sent and received) - -func (what CommunicationLink) DetermineArrowLineStyle() string { - if len(what.DataAssetsSent) == 0 && len(what.DataAssetsReceived) == 0 { - return "dotted" // dotted, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... - } - if what.Usage == DevOps { - return "dashed" - } - return "solid" -} - -// dotted when model forgery attempt (i.e. nothing being processed or stored) - -func (what TechnicalAsset) DetermineShapeBorderLineStyle() string { - if len(what.DataAssetsProcessed) == 0 && len(what.DataAssetsStored) == 0 || what.OutOfScope { - return "dotted" // dotted, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... - } - return "solid" -} - -// 3 when redundant - -func (what TechnicalAsset) DetermineShapePeripheries() int { - if what.Redundant { - return 2 - } - return 1 -} - -func (what TechnicalAsset) DetermineShapeStyle() string { - return "filled" -} - -func (what TechnicalAsset) GetTrustBoundaryId() string { - for _, trustBoundary := range ParsedModelRoot.TrustBoundaries { - for _, techAssetInside := range trustBoundary.TechnicalAssetsInside { - if techAssetInside == what.Id { - return trustBoundary.Id - } - } - } - return "" -} - -// Pen Widths: - -func (what CommunicationLink) DetermineArrowPenWidth() string { - if what.DetermineArrowColor() == colors.Pink { - return fmt.Sprintf("%f", 3.0) - } - if what.DetermineArrowColor() != colors.Black { - return fmt.Sprintf("%f", 2.5) - } - return fmt.Sprintf("%f", 1.5) -} - -func (what TechnicalAsset) DetermineShapeBorderPenWidth() string { - if what.DetermineShapeBorderColor() == colors.Pink { - return fmt.Sprintf("%f", 3.5) - } - if what.DetermineShapeBorderColor() != colors.Black { - return fmt.Sprintf("%f", 3.0) - } - return fmt.Sprintf("%f", 2.0) -} - -/* -// Loops over all data assets (stored and processed by this technical asset) and determines for each -// data asset, how many percentage of the data risk is reduced when this technical asset has all risks mitigated. -// Example: This means if the data asset is loosing a risk and thus getting from red to amber it counts as 1. -// Other example: When only one out of four lines (see data risk mapping) leading to red tech assets are removed by -// the mitigations, then this counts as 0.25. The overall sum is returned. -func (what TechnicalAsset) QuickWins() float64 { - result := 0.0 - uniqueDataAssetsStoredAndProcessed := make(map[string]interface{}) - for _, dataAssetId := range what.DataAssetsStored { - uniqueDataAssetsStoredAndProcessed[dataAssetId] = true - } - for _, dataAssetId := range what.DataAssetsProcessed { - uniqueDataAssetsStoredAndProcessed[dataAssetId] = true - } - highestSeverity := HighestSeverityStillAtRisk(what.GeneratedRisks()) - for dataAssetId, _ := range uniqueDataAssetsStoredAndProcessed { - dataAsset := ParsedModelRoot.DataAssets[dataAssetId] - if dataAsset.IdentifiedRiskSeverityStillAtRisk() <= highestSeverity { - howManySameLevelCausingUsagesOfThisData := 0.0 - for techAssetId, risks := range dataAsset.IdentifiedRisksByResponsibleTechnicalAssetId() { - if !ParsedModelRoot.TechnicalAssets[techAssetId].OutOfScope { - for _, risk := range risks { - if len(risk.MostRelevantTechnicalAssetId) > 0 { // T O D O caching of generated risks inside the method? - if HighestSeverityStillAtRisk(ParsedModelRoot.TechnicalAssets[risk.MostRelevantTechnicalAssetId].GeneratedRisks()) == highestSeverity { - howManySameLevelCausingUsagesOfThisData++ - break - } - } - } - } - } - if howManySameLevelCausingUsagesOfThisData > 0 { - result += 1.0 / howManySameLevelCausingUsagesOfThisData - } - } - } - return result -} -*/ - -func (what CommunicationLink) IsBidirectional() bool { - return len(what.DataAssetsSent) > 0 && len(what.DataAssetsReceived) > 0 -} - -// Contains tells whether a contains x (in an unsorted slice) -func Contains(a []string, x string) bool { - for _, n := range a { - if x == n { - return true - } - } - return false -} - -func ContainsCaseInsensitiveAny(a []string, x ...string) bool { - for _, n := range a { - for _, c := range x { - if strings.TrimSpace(strings.ToLower(c)) == strings.TrimSpace(strings.ToLower(n)) { - return true - } - } - } - return false -} - -func (what TechnicalAsset) IsZero() bool { - return len(what.Id) == 0 -} - -func (what TechnicalAsset) ProcessesOrStoresDataAsset(dataAssetId string) bool { - if Contains(what.DataAssetsProcessed, dataAssetId) { - return true - } - if Contains(what.DataAssetsStored, dataAssetId) { - return true - } - return false -} - -// red when >= confidential data stored in unencrypted technical asset - -func (what TechnicalAsset) DetermineLabelColor() string { - // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here - // Check for red - if what.Integrity == MissionCritical { - return colors.Red - } - for _, storedDataAsset := range what.DataAssetsStored { - if ParsedModelRoot.DataAssets[storedDataAsset].Integrity == MissionCritical { - return colors.Red - } - } - for _, processedDataAsset := range what.DataAssetsProcessed { - if ParsedModelRoot.DataAssets[processedDataAsset].Integrity == MissionCritical { - return colors.Red - } - } - // Check for amber - if what.Integrity == Critical { - return colors.Amber - } - for _, storedDataAsset := range what.DataAssetsStored { - if ParsedModelRoot.DataAssets[storedDataAsset].Integrity == Critical { - return colors.Amber - } - } - for _, processedDataAsset := range what.DataAssetsProcessed { - if ParsedModelRoot.DataAssets[processedDataAsset].Integrity == Critical { - return colors.Amber - } - } - return colors.Black - /* - if what.Encrypted { - return colors.Black - } else { - if what.Confidentiality == StrictlyConfidential { - return colors.Red - } - for _, storedDataAsset := range what.DataAssetsStored { - if ParsedModelRoot.DataAssets[storedDataAsset].Confidentiality == StrictlyConfidential { - return colors.Red - } - } - if what.Confidentiality == Confidential { - return colors.Amber - } - for _, storedDataAsset := range what.DataAssetsStored { - if ParsedModelRoot.DataAssets[storedDataAsset].Confidentiality == Confidential { - return colors.Amber - } - } - return colors.Black - } - */ -} - -// red when mission-critical integrity, but still unauthenticated (non-readonly) channels access it -// amber when critical integrity, but still unauthenticated (non-readonly) channels access it -// pink when model forgery attempt (i.e. nothing being processed or stored) - -func (what TechnicalAsset) DetermineShapeBorderColor() string { - // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here - // Check for red - if what.Confidentiality == StrictlyConfidential { - return colors.Red - } - for _, storedDataAsset := range what.DataAssetsStored { - if ParsedModelRoot.DataAssets[storedDataAsset].Confidentiality == StrictlyConfidential { - return colors.Red - } - } - for _, processedDataAsset := range what.DataAssetsProcessed { - if ParsedModelRoot.DataAssets[processedDataAsset].Confidentiality == StrictlyConfidential { - return colors.Red - } - } - // Check for amber - if what.Confidentiality == Confidential { - return colors.Amber - } - for _, storedDataAsset := range what.DataAssetsStored { - if ParsedModelRoot.DataAssets[storedDataAsset].Confidentiality == Confidential { - return colors.Amber - } - } - for _, processedDataAsset := range what.DataAssetsProcessed { - if ParsedModelRoot.DataAssets[processedDataAsset].Confidentiality == Confidential { - return colors.Amber - } - } - return colors.Black - /* - if what.Integrity == MissionCritical { - for _, dataFlow := range IncomingTechnicalCommunicationLinksMappedByTargetId[what.Id] { - if !dataFlow.Readonly && dataFlow.Authentication == NoneAuthentication { - return colors.Red - } - } - } - - if what.Integrity == Critical { - for _, dataFlow := range IncomingTechnicalCommunicationLinksMappedByTargetId[what.Id] { - if !dataFlow.Readonly && dataFlow.Authentication == NoneAuthentication { - return colors.Amber - } - } - } - - if len(what.DataAssetsProcessed) == 0 && len(what.DataAssetsStored) == 0 { - return colors.Pink // pink, because it's strange when too many technical assets process no data... some are ok, but many in a diagram is a sign of model forgery... - } - - return colors.Black - */ -} - -func (what CommunicationLink) DetermineLabelColor() string { - // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here - /* - if dataFlow.Protocol.IsEncrypted() { - return colors.Gray - } else {*/ - // check for red - for _, sentDataAsset := range what.DataAssetsSent { - if ParsedModelRoot.DataAssets[sentDataAsset].Integrity == MissionCritical { - return colors.Red - } - } - for _, receivedDataAsset := range what.DataAssetsReceived { - if ParsedModelRoot.DataAssets[receivedDataAsset].Integrity == MissionCritical { - return colors.Red - } - } - // check for amber - for _, sentDataAsset := range what.DataAssetsSent { - if ParsedModelRoot.DataAssets[sentDataAsset].Integrity == Critical { - return colors.Amber - } - } - for _, receivedDataAsset := range what.DataAssetsReceived { - if ParsedModelRoot.DataAssets[receivedDataAsset].Integrity == Critical { - return colors.Amber - } - } - // default - return colors.Gray - -} - -// pink when model forgery attempt (i.e. nothing being sent and received) - -func (what CommunicationLink) DetermineArrowColor() string { - // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here - if len(what.DataAssetsSent) == 0 && len(what.DataAssetsReceived) == 0 || - what.Protocol == UnknownProtocol { - return colors.Pink // pink, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... - } - if what.Usage == DevOps { - return colors.MiddleLightGray - } else if what.VPN { - return colors.DarkBlue - } else if what.IpFiltered { - return colors.Brown - } - // check for red - for _, sentDataAsset := range what.DataAssetsSent { - if ParsedModelRoot.DataAssets[sentDataAsset].Confidentiality == StrictlyConfidential { - return colors.Red - } - } - for _, receivedDataAsset := range what.DataAssetsReceived { - if ParsedModelRoot.DataAssets[receivedDataAsset].Confidentiality == StrictlyConfidential { - return colors.Red - } - } - // check for amber - for _, sentDataAsset := range what.DataAssetsSent { - if ParsedModelRoot.DataAssets[sentDataAsset].Confidentiality == Confidential { - return colors.Amber - } - } - for _, receivedDataAsset := range what.DataAssetsReceived { - if ParsedModelRoot.DataAssets[receivedDataAsset].Confidentiality == Confidential { - return colors.Amber - } - } - // default - return colors.Black - /* - } else if dataFlow.Authentication != NoneAuthentication { - return colors.Black - } else { - // check for red - for _, sentDataAsset := range dataFlow.DataAssetsSent { // first check if any red? - if ParsedModelRoot.DataAssets[sentDataAsset].Integrity == MissionCritical { - return colors.Red - } - } - for _, receivedDataAsset := range dataFlow.DataAssetsReceived { // first check if any red? - if ParsedModelRoot.DataAssets[receivedDataAsset].Integrity == MissionCritical { - return colors.Red - } - } - // check for amber - for _, sentDataAsset := range dataFlow.DataAssetsSent { // then check if any amber? - if ParsedModelRoot.DataAssets[sentDataAsset].Integrity == Critical { - return colors.Amber - } - } - for _, receivedDataAsset := range dataFlow.DataAssetsReceived { // then check if any amber? - if ParsedModelRoot.DataAssets[receivedDataAsset].Integrity == Critical { - return colors.Amber - } - } - return colors.Black - } - */ -} - -func (what TechnicalAsset) DetermineShapeFillColor() string { - fillColor := colors.VeryLightGray - if len(what.DataAssetsProcessed) == 0 && len(what.DataAssetsStored) == 0 || - what.Technology == UnknownTechnology { - fillColor = colors.LightPink // lightPink, because it's strange when too many technical assets process no data... some ok, but many in a diagram ist a sign of model forgery... - } else if len(what.CommunicationLinks) == 0 && len(IncomingTechnicalCommunicationLinksMappedByTargetId[what.Id]) == 0 { - fillColor = colors.LightPink - } else if what.Internet { - fillColor = colors.ExtremeLightBlue - } else if what.OutOfScope { - fillColor = colors.OutOfScopeFancy - } else if what.CustomDevelopedParts { - fillColor = colors.CustomDevelopedParts - } - switch what.Machine { - case Physical: - fillColor = colors.DarkenHexColor(fillColor) - case Container: - fillColor = colors.BrightenHexColor(fillColor) - case Serverless: - fillColor = colors.BrightenHexColor(colors.BrightenHexColor(fillColor)) - case Virtual: - } - return fillColor -} - -// === Risk stuff ======================================== - -type DataBreachProbability int - -const ( - Improbable DataBreachProbability = iota - Possible - Probable -) - -func DataBreachProbabilityValues() []TypeEnum { - return []TypeEnum{ - Improbable, - Possible, - Probable, - } -} - -var DataBreachProbabilityTypeDescription = [...]TypeDescription{ - {"improbable", "Improbable"}, - {"possible", "Possible"}, - {"probable", "Probable"}, -} - -func (what DataBreachProbability) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - return DataBreachProbabilityTypeDescription[what].Name -} - -func (what DataBreachProbability) Explain() string { - return DataBreachProbabilityTypeDescription[what].Description -} - -func (what DataBreachProbability) Title() string { - return [...]string{"Improbable", "Possible", "Probable"}[what] -} - -func (what DataBreachProbability) MarshalJSON() ([]byte, error) { - return json.Marshal(what.String()) -} - -func CalculateSeverity(likelihood RiskExploitationLikelihood, impact RiskExploitationImpact) RiskSeverity { - result := likelihood.Weight() * impact.Weight() - if result <= 1 { - return LowSeverity - } - if result <= 3 { - return MediumSeverity - } - if result <= 8 { - return ElevatedSeverity - } - if result <= 12 { - return HighSeverity - } - return CriticalSeverity -} - -type RiskSeverity int - -const ( - LowSeverity RiskSeverity = iota - MediumSeverity - ElevatedSeverity - HighSeverity - CriticalSeverity -) - -func RiskSeverityValues() []TypeEnum { - return []TypeEnum{ - LowSeverity, - MediumSeverity, - ElevatedSeverity, - HighSeverity, - CriticalSeverity, - } -} - -var RiskSeverityTypeDescription = [...]TypeDescription{ - {"low", "Low"}, - {"medium", "Medium"}, - {"elevated", "Elevated"}, - {"high", "High"}, - {"critical", "Critical"}, -} - -func (what RiskSeverity) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - return RiskSeverityTypeDescription[what].Name -} - -func (what RiskSeverity) Explain() string { - return RiskSeverityTypeDescription[what].Description -} - -func (what RiskSeverity) Title() string { - return [...]string{"Low", "Medium", "Elevated", "High", "Critical"}[what] -} - -func (what RiskSeverity) MarshalJSON() ([]byte, error) { - return json.Marshal(what.String()) -} - -type RiskExploitationLikelihood int - -const ( - Unlikely RiskExploitationLikelihood = iota - Likely - VeryLikely - Frequent -) - -func RiskExploitationLikelihoodValues() []TypeEnum { - return []TypeEnum{ - Unlikely, - Likely, - VeryLikely, - Frequent, - } -} - -var RiskExploitationLikelihoodTypeDescription = [...]TypeDescription{ - {"unlikely", "Unlikely"}, - {"likely", "Likely"}, - {"very-likely", "Very-Likely"}, - {"frequent", "Frequent"}, -} - -func (what RiskExploitationLikelihood) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - return RiskExploitationLikelihoodTypeDescription[what].Name -} - -func (what RiskExploitationLikelihood) Explain() string { - return RiskExploitationLikelihoodTypeDescription[what].Description -} - -func (what RiskExploitationLikelihood) Title() string { - return [...]string{"Unlikely", "Likely", "Very Likely", "Frequent"}[what] -} - -func (what RiskExploitationLikelihood) Weight() int { - return [...]int{1, 2, 3, 4}[what] -} - -func (what RiskExploitationLikelihood) MarshalJSON() ([]byte, error) { - return json.Marshal(what.String()) -} - -type RiskExploitationImpact int - -const ( - LowImpact RiskExploitationImpact = iota - MediumImpact - HighImpact - VeryHighImpact -) - -func RiskExploitationImpactValues() []TypeEnum { - return []TypeEnum{ - LowImpact, - MediumImpact, - HighImpact, - VeryHighImpact, - } -} - -var RiskExploitationImpactTypeDescription = [...]TypeDescription{ - {"low", "Low"}, - {"medium", "Medium"}, - {"high", "High"}, - {"very-high", "Very High"}, -} - -func (what RiskExploitationImpact) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - return RiskExploitationImpactTypeDescription[what].Name -} - -func (what RiskExploitationImpact) Explain() string { - return RiskExploitationImpactTypeDescription[what].Description -} - -func (what RiskExploitationImpact) Title() string { - return [...]string{"Low", "Medium", "High", "Very High"}[what] -} - -func (what RiskExploitationImpact) Weight() int { - return [...]int{1, 2, 3, 4}[what] -} - -func (what RiskExploitationImpact) MarshalJSON() ([]byte, error) { - return json.Marshal(what.String()) -} - -type RiskFunction int - -const ( - BusinessSide RiskFunction = iota - Architecture - Development - Operations -) - -func RiskFunctionValues() []TypeEnum { - return []TypeEnum{ - BusinessSide, - Architecture, - Development, - Operations, - } -} - -var RiskFunctionTypeDescription = [...]TypeDescription{ - {"business-side", "Business"}, - {"architecture", "Architecture"}, - {"development", "Development"}, - {"operations", "Operations"}, -} - -func (what RiskFunction) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - return RiskFunctionTypeDescription[what].Name -} - -func (what RiskFunction) Explain() string { - return RiskFunctionTypeDescription[what].Description -} - -func (what RiskFunction) Title() string { - return [...]string{"Business Side", "Architecture", "Development", "Operations"}[what] -} - -func (what RiskFunction) MarshalJSON() ([]byte, error) { - return json.Marshal(what.String()) -} - -func (what *RiskFunction) UnmarshalJSON(value []byte) error { - text := "" - unmarshalError := json.Unmarshal(value, &text) - if unmarshalError != nil { - return unmarshalError - } - - for n, v := range RiskFunctionTypeDescription { - if strings.ToLower(v.Name) == strings.ToLower(text) { - *what = RiskFunction(n) - return nil - } - } - - return fmt.Errorf("unknown value %q for risk function\n", text) -} - -type STRIDE int - -const ( - Spoofing STRIDE = iota - Tampering - Repudiation - InformationDisclosure - DenialOfService - ElevationOfPrivilege -) - -func STRIDEValues() []TypeEnum { - return []TypeEnum{ - Spoofing, - Tampering, - Repudiation, - InformationDisclosure, - DenialOfService, - ElevationOfPrivilege, - } -} - -var StrideTypeDescription = [...]TypeDescription{ - {"spoofing", "Spoofing - Authenticity"}, - {"tampering", "Tampering - Integrity"}, - {"repudiation", "Repudiation - Non-repudiability"}, - {"information-disclosure", "Information disclosure - Confidentiality"}, - {"denial-of-service", "Denial of service - Availability"}, - {"elevation-of-privilege", "Elevation of privilege - Authorization"}, -} - -func (what STRIDE) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - return StrideTypeDescription[what].Name -} - -func (what STRIDE) Explain() string { - return StrideTypeDescription[what].Description -} - -func (what STRIDE) Title() string { - return [...]string{"Spoofing", "Tampering", "Repudiation", "Information Disclosure", "Denial of Service", "Elevation of Privilege"}[what] -} - -func (what STRIDE) MarshalJSON() ([]byte, error) { - return json.Marshal(what.String()) -} - -func (what *STRIDE) UnmarshalJSON(value []byte) error { - text := "" - unmarshalError := json.Unmarshal(value, &text) - if unmarshalError != nil { - return unmarshalError - } - - for n, v := range StrideTypeDescription { - if strings.ToLower(v.Name) == strings.ToLower(text) { - *what = STRIDE(n) - return nil - } - } - - return fmt.Errorf("unknown value %q for STRIDE category\n", text) -} - -type MacroDetails struct { - ID, Title, Description string -} - -type MacroQuestion struct { - ID, Title, Description string - PossibleAnswers []string - MultiSelect bool - DefaultAnswer string -} - -const NoMoreQuestionsID = "" - -func NoMoreQuestions() MacroQuestion { - return MacroQuestion{ - ID: NoMoreQuestionsID, - Title: "", - Description: "", - PossibleAnswers: nil, - MultiSelect: false, - DefaultAnswer: "", - } -} - -func (what MacroQuestion) NoMoreQuestions() bool { - return what.ID == NoMoreQuestionsID -} - -func (what MacroQuestion) IsValueConstrained() bool { - return what.PossibleAnswers != nil && len(what.PossibleAnswers) > 0 -} - -func (what MacroQuestion) IsMatchingValueConstraint(answer string) bool { - if what.IsValueConstrained() { - for _, val := range what.PossibleAnswers { - if strings.ToLower(val) == strings.ToLower(answer) { - return true - } - } - return false - } - return true -} - -type RiskCategory struct { - // TODO: refactor all "Id" here and elsewhere to "ID" - Id string - Title string - Description string - Impact string - ASVS string - CheatSheet string - Action string - Mitigation string - Check string - DetectionLogic string - RiskAssessment string - FalsePositives string - Function RiskFunction - STRIDE STRIDE - ModelFailurePossibleReason bool - CWE int -} - -type ByRiskCategoryTitleSort []RiskCategory - -func (what ByRiskCategoryTitleSort) Len() int { return len(what) } -func (what ByRiskCategoryTitleSort) Swap(i, j int) { - what[i], what[j] = what[j], what[i] -} -func (what ByRiskCategoryTitleSort) Less(i, j int) bool { - return what[i].Title < what[j].Title -} - -type ByRiskCategoryHighestContainingRiskSeveritySortStillAtRisk []RiskCategory - -func (what ByRiskCategoryHighestContainingRiskSeveritySortStillAtRisk) Len() int { return len(what) } -func (what ByRiskCategoryHighestContainingRiskSeveritySortStillAtRisk) Swap(i, j int) { - what[i], what[j] = what[j], what[i] -} -func (what ByRiskCategoryHighestContainingRiskSeveritySortStillAtRisk) Less(i, j int) bool { - risksLeft := ReduceToOnlyStillAtRisk(GeneratedRisksByCategory[what[i]]) - risksRight := ReduceToOnlyStillAtRisk(GeneratedRisksByCategory[what[j]]) - highestLeft := HighestSeverityStillAtRisk(risksLeft) - highestRight := HighestSeverityStillAtRisk(risksRight) - if highestLeft == highestRight { - if len(risksLeft) == 0 && len(risksRight) > 0 { - return false - } - if len(risksLeft) > 0 && len(risksRight) == 0 { - return true - } - return what[i].Title < what[j].Title - } - return highestLeft > highestRight -} - -type RiskStatistics struct { - // TODO add also some more like before / after (i.e. with mitigation applied) - Risks map[string]map[string]int `yaml:"risks" json:"risks"` -} - -type Risk struct { - Category RiskCategory `yaml:"-" json:"-"` // just for navigational convenience... not JSON marshalled - CategoryId string `yaml:"category" json:"category"` // used for better JSON marshalling, is assigned in risk evaluation phase automatically - RiskStatus RiskStatus `yaml:"risk_status" json:"risk_status"` // used for better JSON marshalling, is assigned in risk evaluation phase automatically - Severity RiskSeverity `yaml:"severity" json:"severity"` - ExploitationLikelihood RiskExploitationLikelihood `yaml:"exploitation_likelihood" json:"exploitation_likelihood"` - ExploitationImpact RiskExploitationImpact `yaml:"exploitation_impact" json:"exploitation_impact"` - Title string `yaml:"title" json:"title"` - SyntheticId string `yaml:"synthetic_id" json:"synthetic_id"` - MostRelevantDataAssetId string `yaml:"most_relevant_data_asset" json:"most_relevant_data_asset"` - MostRelevantTechnicalAssetId string `yaml:"most_relevant_technical_asset" json:"most_relevant_technical_asset"` - MostRelevantTrustBoundaryId string `yaml:"most_relevant_trust_boundary" json:"most_relevant_trust_boundary"` - MostRelevantSharedRuntimeId string `yaml:"most_relevant_shared_runtime" json:"most_relevant_shared_runtime"` - MostRelevantCommunicationLinkId string `yaml:"most_relevant_communication_link" json:"most_relevant_communication_link"` - DataBreachProbability DataBreachProbability `yaml:"data_breach_probability" json:"data_breach_probability"` - DataBreachTechnicalAssetIDs []string `yaml:"data_breach_technical_assets" json:"data_breach_technical_assets"` - // TODO: refactor all "Id" here to "ID"? -} - -func (what Risk) GetRiskTracking() RiskTracking { // TODO: Unify function naming regarding Get etc. - var result RiskTracking - if riskTracking, ok := ParsedModelRoot.RiskTracking[what.SyntheticId]; ok { - result = riskTracking - } - return result -} - -func (what Risk) GetRiskTrackingStatusDefaultingUnchecked() RiskStatus { - if riskTracking, ok := ParsedModelRoot.RiskTracking[what.SyntheticId]; ok { - return riskTracking.Status - } - return Unchecked -} - -func (what Risk) IsRiskTracked() bool { - if _, ok := ParsedModelRoot.RiskTracking[what.SyntheticId]; ok { - return true - } - return false -} - -type ByRiskSeveritySort []Risk - -func (what ByRiskSeveritySort) Len() int { return len(what) } -func (what ByRiskSeveritySort) Swap(i, j int) { - what[i], what[j] = what[j], what[i] -} -func (what ByRiskSeveritySort) Less(i, j int) bool { - if what[i].Severity == what[j].Severity { - trackingStatusLeft := what[i].GetRiskTrackingStatusDefaultingUnchecked() - trackingStatusRight := what[j].GetRiskTrackingStatusDefaultingUnchecked() - if trackingStatusLeft == trackingStatusRight { - impactLeft := what[i].ExploitationImpact - impactRight := what[j].ExploitationImpact - if impactLeft == impactRight { - likelihoodLeft := what[i].ExploitationLikelihood - likelihoodRight := what[j].ExploitationLikelihood - if likelihoodLeft == likelihoodRight { - return what[i].Title < what[j].Title - } else { - return likelihoodLeft > likelihoodRight - } - } else { - return impactLeft > impactRight - } - } else { - return trackingStatusLeft < trackingStatusRight - } - } - return what[i].Severity > what[j].Severity -} - -type ByDataBreachProbabilitySort []Risk - -func (what ByDataBreachProbabilitySort) Len() int { return len(what) } -func (what ByDataBreachProbabilitySort) Swap(i, j int) { - what[i], what[j] = what[j], what[i] -} -func (what ByDataBreachProbabilitySort) Less(i, j int) bool { - if what[i].DataBreachProbability == what[j].DataBreachProbability { - trackingStatusLeft := what[i].GetRiskTrackingStatusDefaultingUnchecked() - trackingStatusRight := what[j].GetRiskTrackingStatusDefaultingUnchecked() - if trackingStatusLeft == trackingStatusRight { - return what[i].Title < what[j].Title - } else { - return trackingStatusLeft < trackingStatusRight - } - } - return what[i].DataBreachProbability > what[j].DataBreachProbability -} - -type RiskTracking struct { - SyntheticRiskId, Justification, Ticket, CheckedBy string - Status RiskStatus - Date time.Time -} - -type RiskStatus int - -const ( - Unchecked RiskStatus = iota - InDiscussion - Accepted - InProgress - Mitigated - FalsePositive -) - -func RiskStatusValues() []TypeEnum { - return []TypeEnum{ - Unchecked, - InDiscussion, - Accepted, - InProgress, - Mitigated, - FalsePositive, - } -} - -var RiskStatusTypeDescription = [...]TypeDescription{ - {"unchecked", "Risk has not yet been reviewed"}, - {"in-discussion", "Risk is currently being discussed (during review)"}, - {"accepted", "Risk has been accepted (as possibly a corporate risk acceptance process defines)"}, - {"in-progress", "Risk mitigation is currently in progress"}, - {"mitigated", "Risk has been mitigated"}, - {"false-positive", "Risk is a false positive (i.e. no risk at all or not applicable)"}, -} - -func (what RiskStatus) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - return RiskStatusTypeDescription[what].Name -} - -func (what RiskStatus) Explain() string { - return RiskStatusTypeDescription[what].Description -} - -func (what RiskStatus) Title() string { - return [...]string{"Unchecked", "in Discussion", "Accepted", "in Progress", "Mitigated", "False Positive"}[what] -} - -func (what RiskStatus) MarshalJSON() ([]byte, error) { - return json.Marshal(what.String()) -} - -func (what *RiskStatus) UnmarshalJSON(value []byte) error { - text := "" - unmarshalError := json.Unmarshal(value, &text) - if unmarshalError != nil { - return unmarshalError - } - - for n, v := range RiskStatusTypeDescription { - if strings.ToLower(v.Name) == strings.ToLower(text) { - *what = RiskStatus(n) - return nil - } - } - - return fmt.Errorf("unknown value %q for risk status\n", text) -} - -func (what RiskStatus) IsStillAtRisk() bool { - return what == Unchecked || what == InDiscussion || what == Accepted || what == InProgress -} - -type RiskRule interface { - Category() RiskCategory - GenerateRisks(parsedModel ParsedModel) []Risk -} - -// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: - -func SortedRiskCategories() []RiskCategory { - categories := make([]RiskCategory, 0) - for k := range GeneratedRisksByCategory { - categories = append(categories, k) - } - sort.Sort(ByRiskCategoryHighestContainingRiskSeveritySortStillAtRisk(categories)) - return categories -} -func SortedRisksOfCategory(category RiskCategory) []Risk { - risks := GeneratedRisksByCategory[category] - sort.Sort(ByRiskSeveritySort(risks)) - return risks -} - -func CountRisks(risksByCategory map[RiskCategory][]Risk) int { - result := 0 - for _, risks := range risksByCategory { - result += len(risks) - } - return result -} - -func RisksOfOnlySTRIDESpoofing(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) - for _, risks := range risksByCategory { - for _, risk := range risks { - if risk.Category.STRIDE == Spoofing { - result[risk.Category] = append(result[risk.Category], risk) - } - } - } - return result -} - -func RisksOfOnlySTRIDETampering(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) - for _, risks := range risksByCategory { - for _, risk := range risks { - if risk.Category.STRIDE == Tampering { - result[risk.Category] = append(result[risk.Category], risk) - } - } - } - return result -} - -func RisksOfOnlySTRIDERepudiation(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) - for _, risks := range risksByCategory { - for _, risk := range risks { - if risk.Category.STRIDE == Repudiation { - result[risk.Category] = append(result[risk.Category], risk) - } - } - } - return result -} - -func RisksOfOnlySTRIDEInformationDisclosure(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) - for _, risks := range risksByCategory { - for _, risk := range risks { - if risk.Category.STRIDE == InformationDisclosure { - result[risk.Category] = append(result[risk.Category], risk) - } - } - } - return result -} - -func RisksOfOnlySTRIDEDenialOfService(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) - for _, risks := range risksByCategory { - for _, risk := range risks { - if risk.Category.STRIDE == DenialOfService { - result[risk.Category] = append(result[risk.Category], risk) - } - } - } - return result -} - -func RisksOfOnlySTRIDEElevationOfPrivilege(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) - for _, risks := range risksByCategory { - for _, risk := range risks { - if risk.Category.STRIDE == ElevationOfPrivilege { - result[risk.Category] = append(result[risk.Category], risk) - } - } - } - return result -} - -func RisksOfOnlyBusinessSide(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) - for _, risks := range risksByCategory { - for _, risk := range risks { - if risk.Category.Function == BusinessSide { - result[risk.Category] = append(result[risk.Category], risk) - } - } - } - return result -} - -func RisksOfOnlyArchitecture(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) - for _, risks := range risksByCategory { - for _, risk := range risks { - if risk.Category.Function == Architecture { - result[risk.Category] = append(result[risk.Category], risk) - } - } - } - return result -} - -func RisksOfOnlyDevelopment(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) - for _, risks := range risksByCategory { - for _, risk := range risks { - if risk.Category.Function == Development { - result[risk.Category] = append(result[risk.Category], risk) - } - } - } - return result -} - -func RisksOfOnlyOperation(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) - for _, risks := range risksByCategory { - for _, risk := range risks { - if risk.Category.Function == Operations { - result[risk.Category] = append(result[risk.Category], risk) - } - } - } - return result -} - -func CategoriesOfOnlyRisksStillAtRisk(risksByCategory map[RiskCategory][]Risk) []RiskCategory { - categories := make(map[RiskCategory]struct{}) // Go's trick of unique elements is a map - for _, risks := range risksByCategory { - for _, risk := range risks { - if !risk.GetRiskTrackingStatusDefaultingUnchecked().IsStillAtRisk() { - continue - } - categories[risk.Category] = struct{}{} - } - } - // return as slice (of now unique values) - return keysAsSlice(categories) -} - -func CategoriesOfOnlyCriticalRisks(risksByCategory map[RiskCategory][]Risk, initialRisks bool) []RiskCategory { - categories := make(map[RiskCategory]struct{}) // Go's trick of unique elements is a map - for _, risks := range risksByCategory { - for _, risk := range risks { - if !initialRisks && !risk.GetRiskTrackingStatusDefaultingUnchecked().IsStillAtRisk() { - continue - } - if risk.Severity == CriticalSeverity { - categories[risk.Category] = struct{}{} - } - } - } - // return as slice (of now unique values) - return keysAsSlice(categories) -} - -func CategoriesOfOnlyHighRisks(risksByCategory map[RiskCategory][]Risk, initialRisks bool) []RiskCategory { - categories := make(map[RiskCategory]struct{}) // Go's trick of unique elements is a map - for _, risks := range risksByCategory { - for _, risk := range risks { - if !initialRisks && !risk.GetRiskTrackingStatusDefaultingUnchecked().IsStillAtRisk() { - continue - } - highest := HighestSeverity(GeneratedRisksByCategory[risk.Category]) - if !initialRisks { - highest = HighestSeverityStillAtRisk(GeneratedRisksByCategory[risk.Category]) - } - if risk.Severity == HighSeverity && highest < CriticalSeverity { - categories[risk.Category] = struct{}{} - } - } - } - // return as slice (of now unique values) - return keysAsSlice(categories) -} - -func CategoriesOfOnlyElevatedRisks(risksByCategory map[RiskCategory][]Risk, initialRisks bool) []RiskCategory { - categories := make(map[RiskCategory]struct{}) // Go's trick of unique elements is a map - for _, risks := range risksByCategory { - for _, risk := range risks { - if !initialRisks && !risk.GetRiskTrackingStatusDefaultingUnchecked().IsStillAtRisk() { - continue - } - highest := HighestSeverity(GeneratedRisksByCategory[risk.Category]) - if !initialRisks { - highest = HighestSeverityStillAtRisk(GeneratedRisksByCategory[risk.Category]) - } - if risk.Severity == ElevatedSeverity && highest < HighSeverity { - categories[risk.Category] = struct{}{} - } - } - } - // return as slice (of now unique values) - return keysAsSlice(categories) -} - -func CategoriesOfOnlyMediumRisks(risksByCategory map[RiskCategory][]Risk, initialRisks bool) []RiskCategory { - categories := make(map[RiskCategory]struct{}) // Go's trick of unique elements is a map - for _, risks := range risksByCategory { - for _, risk := range risks { - if !initialRisks && !risk.GetRiskTrackingStatusDefaultingUnchecked().IsStillAtRisk() { - continue - } - highest := HighestSeverity(GeneratedRisksByCategory[risk.Category]) - if !initialRisks { - highest = HighestSeverityStillAtRisk(GeneratedRisksByCategory[risk.Category]) - } - if risk.Severity == MediumSeverity && highest < ElevatedSeverity { - categories[risk.Category] = struct{}{} - } - } - } - // return as slice (of now unique values) - return keysAsSlice(categories) -} - -func CategoriesOfOnlyLowRisks(risksByCategory map[RiskCategory][]Risk, initialRisks bool) []RiskCategory { - categories := make(map[RiskCategory]struct{}) // Go's trick of unique elements is a map - for _, risks := range risksByCategory { - for _, risk := range risks { - if !initialRisks && !risk.GetRiskTrackingStatusDefaultingUnchecked().IsStillAtRisk() { - continue - } - highest := HighestSeverity(GeneratedRisksByCategory[risk.Category]) - if !initialRisks { - highest = HighestSeverityStillAtRisk(GeneratedRisksByCategory[risk.Category]) - } - if risk.Severity == LowSeverity && highest < MediumSeverity { - categories[risk.Category] = struct{}{} - } - } - } - // return as slice (of now unique values) - return keysAsSlice(categories) -} - -func HighestSeverity(risks []Risk) RiskSeverity { - result := LowSeverity - for _, risk := range risks { - if risk.Severity > result { - result = risk.Severity - } - } - return result -} - -func HighestSeverityStillAtRisk(risks []Risk) RiskSeverity { - result := LowSeverity - for _, risk := range risks { - if risk.Severity > result && risk.GetRiskTrackingStatusDefaultingUnchecked().IsStillAtRisk() { - result = risk.Severity - } - } - return result -} - -func keysAsSlice(categories map[RiskCategory]struct{}) []RiskCategory { - result := make([]RiskCategory, 0, len(categories)) - for k := range categories { - result = append(result, k) - } - return result -} - -func FilteredByOnlyBusinessSide() []Risk { - filteredRisks := make([]Risk, 0) - for _, risks := range GeneratedRisksByCategory { - for _, risk := range risks { - if risk.Category.Function == BusinessSide { - filteredRisks = append(filteredRisks, risk) - } - } - } - return filteredRisks -} - -func FilteredByOnlyArchitecture() []Risk { - filteredRisks := make([]Risk, 0) - for _, risks := range GeneratedRisksByCategory { - for _, risk := range risks { - if risk.Category.Function == Architecture { - filteredRisks = append(filteredRisks, risk) - } - } - } - return filteredRisks -} - -func FilteredByOnlyDevelopment() []Risk { - filteredRisks := make([]Risk, 0) - for _, risks := range GeneratedRisksByCategory { - for _, risk := range risks { - if risk.Category.Function == Development { - filteredRisks = append(filteredRisks, risk) - } - } - } - return filteredRisks -} - -func FilteredByOnlyOperation() []Risk { - filteredRisks := make([]Risk, 0) - for _, risks := range GeneratedRisksByCategory { - for _, risk := range risks { - if risk.Category.Function == Operations { - filteredRisks = append(filteredRisks, risk) - } - } - } - return filteredRisks -} - -func FilteredByOnlyCriticalRisks() []Risk { - filteredRisks := make([]Risk, 0) - for _, risks := range GeneratedRisksByCategory { - for _, risk := range risks { - if risk.Severity == CriticalSeverity { - filteredRisks = append(filteredRisks, risk) - } - } - } - return filteredRisks -} - -func FilteredByOnlyHighRisks() []Risk { - filteredRisks := make([]Risk, 0) - for _, risks := range GeneratedRisksByCategory { - for _, risk := range risks { - if risk.Severity == HighSeverity { - filteredRisks = append(filteredRisks, risk) - } - } - } - return filteredRisks -} - -func FilteredByOnlyElevatedRisks() []Risk { - filteredRisks := make([]Risk, 0) - for _, risks := range GeneratedRisksByCategory { - for _, risk := range risks { - if risk.Severity == ElevatedSeverity { - filteredRisks = append(filteredRisks, risk) - } - } - } - return filteredRisks -} - -func FilteredByOnlyMediumRisks() []Risk { - filteredRisks := make([]Risk, 0) - for _, risks := range GeneratedRisksByCategory { - for _, risk := range risks { - if risk.Severity == MediumSeverity { - filteredRisks = append(filteredRisks, risk) - } - } - } - return filteredRisks -} - -func FilteredByOnlyLowRisks() []Risk { - filteredRisks := make([]Risk, 0) - for _, risks := range GeneratedRisksByCategory { - for _, risk := range risks { - if risk.Severity == LowSeverity { - filteredRisks = append(filteredRisks, risk) - } - } - } - return filteredRisks -} - -func FilterByModelFailures(risksByCat map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) - for riskCat, risks := range risksByCat { - if riskCat.ModelFailurePossibleReason { - result[riskCat] = risks - } - } - return result -} - -func FlattenRiskSlice(risksByCat map[RiskCategory][]Risk) []Risk { - result := make([]Risk, 0) - for _, risks := range risksByCat { - result = append(result, risks...) - } - return result -} - -func TotalRiskCount() int { - count := 0 - for _, risks := range GeneratedRisksByCategory { - count += len(risks) - } - return count -} - -func FilteredByRiskTrackingUnchecked() []Risk { - filteredRisks := make([]Risk, 0) - for _, risks := range GeneratedRisksByCategory { - for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked() == Unchecked { - filteredRisks = append(filteredRisks, risk) - } - } - } - return filteredRisks -} - -func FilteredByRiskTrackingInDiscussion() []Risk { - filteredRisks := make([]Risk, 0) - for _, risks := range GeneratedRisksByCategory { - for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked() == InDiscussion { - filteredRisks = append(filteredRisks, risk) - } - } - } - return filteredRisks -} - -func FilteredByRiskTrackingAccepted() []Risk { - filteredRisks := make([]Risk, 0) - for _, risks := range GeneratedRisksByCategory { - for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked() == Accepted { - filteredRisks = append(filteredRisks, risk) - } - } - } - return filteredRisks -} - -func FilteredByRiskTrackingInProgress() []Risk { - filteredRisks := make([]Risk, 0) - for _, risks := range GeneratedRisksByCategory { - for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked() == InProgress { - filteredRisks = append(filteredRisks, risk) - } - } - } - return filteredRisks -} - -func FilteredByRiskTrackingMitigated() []Risk { - filteredRisks := make([]Risk, 0) - for _, risks := range GeneratedRisksByCategory { - for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked() == Mitigated { - filteredRisks = append(filteredRisks, risk) - } - } - } - return filteredRisks -} - -func FilteredByRiskTrackingFalsePositive() []Risk { - filteredRisks := make([]Risk, 0) - for _, risks := range GeneratedRisksByCategory { - for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked() == FalsePositive { - filteredRisks = append(filteredRisks, risk) - } - } - } - return filteredRisks -} - -func ReduceToOnlyHighRisk(risks []Risk) []Risk { - filteredRisks := make([]Risk, 0) - for _, risk := range risks { - if risk.Severity == HighSeverity { - filteredRisks = append(filteredRisks, risk) - } - } - return filteredRisks -} - -func ReduceToOnlyMediumRisk(risks []Risk) []Risk { - filteredRisks := make([]Risk, 0) - for _, risk := range risks { - if risk.Severity == MediumSeverity { - filteredRisks = append(filteredRisks, risk) - } - } - return filteredRisks -} - -func ReduceToOnlyLowRisk(risks []Risk) []Risk { - filteredRisks := make([]Risk, 0) - for _, risk := range risks { - if risk.Severity == LowSeverity { - filteredRisks = append(filteredRisks, risk) - } - } - return filteredRisks -} - -func ReduceToOnlyRiskTrackingUnchecked(risks []Risk) []Risk { - filteredRisks := make([]Risk, 0) - for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked() == Unchecked { - filteredRisks = append(filteredRisks, risk) - } - } - return filteredRisks -} - -func ReduceToOnlyRiskTrackingInDiscussion(risks []Risk) []Risk { - filteredRisks := make([]Risk, 0) - for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked() == InDiscussion { - filteredRisks = append(filteredRisks, risk) - } - } - return filteredRisks -} - -func ReduceToOnlyRiskTrackingAccepted(risks []Risk) []Risk { - filteredRisks := make([]Risk, 0) - for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked() == Accepted { - filteredRisks = append(filteredRisks, risk) - } - } - return filteredRisks -} - -func ReduceToOnlyRiskTrackingInProgress(risks []Risk) []Risk { - filteredRisks := make([]Risk, 0) - for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked() == InProgress { - filteredRisks = append(filteredRisks, risk) - } - } - return filteredRisks -} - -func ReduceToOnlyRiskTrackingMitigated(risks []Risk) []Risk { - filteredRisks := make([]Risk, 0) - for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked() == Mitigated { - filteredRisks = append(filteredRisks, risk) - } - } - return filteredRisks -} - -func ReduceToOnlyRiskTrackingFalsePositive(risks []Risk) []Risk { - filteredRisks := make([]Risk, 0) - for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked() == FalsePositive { - filteredRisks = append(filteredRisks, risk) - } - } - return filteredRisks -} - -func FilteredByStillAtRisk() []Risk { - filteredRisks := make([]Risk, 0) - for _, risks := range GeneratedRisksByCategory { - for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked().IsStillAtRisk() { - filteredRisks = append(filteredRisks, risk) - } - } - } - return filteredRisks -} - -func OverallRiskStatistics() RiskStatistics { - result := RiskStatistics{} - result.Risks = make(map[string]map[string]int) - result.Risks[CriticalSeverity.String()] = make(map[string]int) - result.Risks[CriticalSeverity.String()][Unchecked.String()] = 0 - result.Risks[CriticalSeverity.String()][InDiscussion.String()] = 0 - result.Risks[CriticalSeverity.String()][Accepted.String()] = 0 - result.Risks[CriticalSeverity.String()][InProgress.String()] = 0 - result.Risks[CriticalSeverity.String()][Mitigated.String()] = 0 - result.Risks[CriticalSeverity.String()][FalsePositive.String()] = 0 - result.Risks[HighSeverity.String()] = make(map[string]int) - result.Risks[HighSeverity.String()][Unchecked.String()] = 0 - result.Risks[HighSeverity.String()][InDiscussion.String()] = 0 - result.Risks[HighSeverity.String()][Accepted.String()] = 0 - result.Risks[HighSeverity.String()][InProgress.String()] = 0 - result.Risks[HighSeverity.String()][Mitigated.String()] = 0 - result.Risks[HighSeverity.String()][FalsePositive.String()] = 0 - result.Risks[ElevatedSeverity.String()] = make(map[string]int) - result.Risks[ElevatedSeverity.String()][Unchecked.String()] = 0 - result.Risks[ElevatedSeverity.String()][InDiscussion.String()] = 0 - result.Risks[ElevatedSeverity.String()][Accepted.String()] = 0 - result.Risks[ElevatedSeverity.String()][InProgress.String()] = 0 - result.Risks[ElevatedSeverity.String()][Mitigated.String()] = 0 - result.Risks[ElevatedSeverity.String()][FalsePositive.String()] = 0 - result.Risks[MediumSeverity.String()] = make(map[string]int) - result.Risks[MediumSeverity.String()][Unchecked.String()] = 0 - result.Risks[MediumSeverity.String()][InDiscussion.String()] = 0 - result.Risks[MediumSeverity.String()][Accepted.String()] = 0 - result.Risks[MediumSeverity.String()][InProgress.String()] = 0 - result.Risks[MediumSeverity.String()][Mitigated.String()] = 0 - result.Risks[MediumSeverity.String()][FalsePositive.String()] = 0 - result.Risks[LowSeverity.String()] = make(map[string]int) - result.Risks[LowSeverity.String()][Unchecked.String()] = 0 - result.Risks[LowSeverity.String()][InDiscussion.String()] = 0 - result.Risks[LowSeverity.String()][Accepted.String()] = 0 - result.Risks[LowSeverity.String()][InProgress.String()] = 0 - result.Risks[LowSeverity.String()][Mitigated.String()] = 0 - result.Risks[LowSeverity.String()][FalsePositive.String()] = 0 - for _, risks := range GeneratedRisksByCategory { - for _, risk := range risks { - result.Risks[risk.Severity.String()][risk.GetRiskTrackingStatusDefaultingUnchecked().String()]++ - } - } - return result -} - -func AllRisks() []Risk { - result := make([]Risk, 0) - for _, risks := range GeneratedRisksByCategory { - for _, risk := range risks { - result = append(result, risk) - } - } - return result -} - -func ReduceToOnlyStillAtRisk(risks []Risk) []Risk { - filteredRisks := make([]Risk, 0) - for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked().IsStillAtRisk() { - filteredRisks = append(filteredRisks, risk) - } - } - return filteredRisks -} - -func HighestExploitationLikelihood(risks []Risk) RiskExploitationLikelihood { - result := Unlikely - for _, risk := range risks { - if risk.ExploitationLikelihood > result { - result = risk.ExploitationLikelihood - } - } - return result -} - -func HighestExploitationImpact(risks []Risk) RiskExploitationImpact { - result := LowImpact - for _, risk := range risks { - if risk.ExploitationImpact > result { - result = risk.ExploitationImpact - } - } - return result -} - -func InScopeTechnicalAssets() []TechnicalAsset { - result := make([]TechnicalAsset, 0) - for _, asset := range ParsedModelRoot.TechnicalAssets { - if !asset.OutOfScope { - result = append(result, asset) - } - } - return result -} diff --git a/colors/colors.go b/pkg/colors/colors.go similarity index 96% rename from colors/colors.go rename to pkg/colors/colors.go index cac70f6d..eb68c4f3 100644 --- a/colors/colors.go +++ b/pkg/colors/colors.go @@ -1,7 +1,9 @@ +// TODO: move content of this package to internal because it's only bunch of helper which is actually detail of implementation on how to generate package colors import ( "encoding/hex" + "github.com/jung-kurt/gofpdf" ) diff --git a/pkg/docs/constants.go b/pkg/docs/constants.go new file mode 100644 index 00000000..0a2fb994 --- /dev/null +++ b/pkg/docs/constants.go @@ -0,0 +1,32 @@ +/* +Copyright © 2023 NAME HERE +*/ +package docs + +const ( + ThreagileVersion = "1.0.0" // Also update into example and stub model files and openapi.yaml + Logo = " _____ _ _ _ \n |_ _| |__ _ __ ___ __ _ __ _(_) | ___ \n | | | '_ \\| '__/ _ \\/ _` |/ _` | | |/ _ \\\n | | | | | | | | __/ (_| | (_| | | | __/\n |_| |_| |_|_| \\___|\\__,_|\\__, |_|_|\\___|\n |___/ " + + "\nThreagile - Agile Threat Modeling" + VersionText = "Documentation: https://threagile.io\n" + + "Docker Images: https://hub.docker.com/r/threagile/threagile\n" + + "Sourcecode: https://github.com/threagile\n" + + "License: Open-Source (MIT License)" + + "Version: " + ThreagileVersion // TODO: add buildTimestamp + " (" + buildTimestamp + ")" + Examples = "Examples:\n\n" + + "If you want to create an example model (via docker) as a starting point to learn about Threagile just run: \n" + + " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile -create-example-model -output app/work \n\n" + + "If you want to create a minimal stub model (via docker) as a starting point for your own model just run: \n" + + " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile -create-stub-model -output app/work \n\n" + + "If you want to execute Threagile on a model yaml file (via docker): \n" + + " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile -verbose -model -output app/work \n\n" + + "If you want to run Threagile as a server (REST API) on some port (here 8080): \n" + + " docker run --rm -it --shm-size=256m -p 8080:8080 --name --mount 'type=volume,src=threagile-storage,dst=/data,readonly=false' threagile/threagile -server 8080 \n\n" + + "If you want to find out about the different enum values usable in the model yaml file: \n" + + " docker run --rm -it threagile/threagile -list-types\n\n" + + "If you want to use some nice editing help (syntax validation, autocompletion, and live templates) in your favourite IDE: " + + " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile -create-editing-support -output app/work\n\n" + + "If you want to list all available model macros (which are macros capable of reading a model yaml file, asking you questions in a wizard-style and then update the model yaml file accordingly): \n" + + " docker run --rm -it threagile/threagile -list-model-macros \n\n" + + "If you want to execute a certain model macro on the model yaml file (here the macro add-build-pipeline): \n" + + " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile -model app/work/threagile.yaml -output app/work -execute-model-macro add-build-pipeline" +) diff --git a/pkg/input/input.go b/pkg/input/input.go new file mode 100644 index 00000000..9e7d251c --- /dev/null +++ b/pkg/input/input.go @@ -0,0 +1,402 @@ +/* +Copyright © 2023 NAME HERE +*/ +package input + +import ( + "fmt" + "log" + "os" + "path/filepath" + "strings" + + "gopkg.in/yaml.v3" +) + +// === Model Type Stuff ====================================== + +type Author struct { + Name string `yaml:"name" json:"name"` + Homepage string `yaml:"homepage" json:"homepage"` +} + +type Overview struct { + Description string `yaml:"description" json:"description"` + Images []map[string]string `yaml:"images" json:"images"` // yes, array of map here, as array keeps the order of the image keys +} + +type InputDataAsset struct { + ID string `yaml:"id" json:"id"` + Description string `yaml:"description" json:"description"` + Usage string `yaml:"usage" json:"usage"` + Tags []string `yaml:"tags" json:"tags"` + Origin string `yaml:"origin" json:"origin"` + Owner string `yaml:"owner" json:"owner"` + Quantity string `yaml:"quantity" json:"quantity"` + Confidentiality string `yaml:"confidentiality" json:"confidentiality"` + Integrity string `yaml:"integrity" json:"integrity"` + Availability string `yaml:"availability" json:"availability"` + JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` +} + +type InputTechnicalAsset struct { + ID string `yaml:"id" json:"id"` + Description string `yaml:"description" json:"description"` + Type string `yaml:"type" json:"type"` + Usage string `yaml:"usage" json:"usage"` + UsedAsClientByHuman bool `yaml:"used_as_client_by_human" json:"used_as_client_by_human"` + OutOfScope bool `yaml:"out_of_scope" json:"out_of_scope"` + JustificationOutOfScope string `yaml:"justification_out_of_scope" json:"justification_out_of_scope"` + Size string `yaml:"size" json:"size"` + Technology string `yaml:"technology" json:"technology"` + Tags []string `yaml:"tags" json:"tags"` + Internet bool `yaml:"internet" json:"internet"` + Machine string `yaml:"machine" json:"machine"` + Encryption string `yaml:"encryption" json:"encryption"` + Owner string `yaml:"owner" json:"owner"` + Confidentiality string `yaml:"confidentiality" json:"confidentiality"` + Integrity string `yaml:"integrity" json:"integrity"` + Availability string `yaml:"availability" json:"availability"` + JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` + MultiTenant bool `yaml:"multi_tenant" json:"multi_tenant"` + Redundant bool `yaml:"redundant" json:"redundant"` + CustomDevelopedParts bool `yaml:"custom_developed_parts" json:"custom_developed_parts"` + DataAssetsProcessed []string `yaml:"data_assets_processed" json:"data_assets_processed"` + DataAssetsStored []string `yaml:"data_assets_stored" json:"data_assets_stored"` + DataFormatsAccepted []string `yaml:"data_formats_accepted" json:"data_formats_accepted"` + DiagramTweakOrder int `yaml:"diagram_tweak_order" json:"diagram_tweak_order"` + CommunicationLinks map[string]InputCommunicationLink `yaml:"communication_links" json:"communication_links"` +} + +type InputCommunicationLink struct { + Target string `yaml:"target" json:"target"` + Description string `yaml:"description" json:"description"` + Protocol string `yaml:"protocol" json:"protocol"` + Authentication string `yaml:"authentication" json:"authentication"` + Authorization string `yaml:"authorization" json:"authorization"` + Tags []string `yaml:"tags" json:"tags"` + VPN bool `yaml:"vpn" json:"vpn"` + IpFiltered bool `yaml:"ip_filtered" json:"ip_filtered"` + Readonly bool `yaml:"readonly" json:"readonly"` + Usage string `yaml:"usage" json:"usage"` + DataAssetsSent []string `yaml:"data_assets_sent" json:"data_assets_sent"` + DataAssetsReceived []string `yaml:"data_assets_received" json:"data_assets_received"` + DiagramTweakWeight int `yaml:"diagram_tweak_weight" json:"diagram_tweak_weight"` + DiagramTweakConstraint bool `yaml:"diagram_tweak_constraint" json:"diagram_tweak_constraint"` +} + +type InputSharedRuntime struct { + ID string `yaml:"id" json:"id"` + Description string `yaml:"description" json:"description"` + Tags []string `yaml:"tags" json:"tags"` + TechnicalAssetsRunning []string `yaml:"technical_assets_running" json:"technical_assets_running"` +} + +type InputTrustBoundary struct { + ID string `yaml:"id" json:"id"` + Description string `yaml:"description" json:"description"` + Type string `yaml:"type" json:"type"` + Tags []string `yaml:"tags" json:"tags"` + TechnicalAssetsInside []string `yaml:"technical_assets_inside" json:"technical_assets_inside"` + TrustBoundariesNested []string `yaml:"trust_boundaries_nested" json:"trust_boundaries_nested"` +} + +type InputIndividualRiskCategory struct { + ID string `yaml:"id" json:"id"` + Description string `yaml:"description" json:"description"` + Impact string `yaml:"impact" json:"impact"` + ASVS string `yaml:"asvs" json:"asvs"` + CheatSheet string `yaml:"cheat_sheet" json:"cheat_sheet"` + Action string `yaml:"action" json:"action"` + Mitigation string `yaml:"mitigation" json:"mitigation"` + Check string `yaml:"check" json:"check"` + Function string `yaml:"function" json:"function"` + STRIDE string `yaml:"stride" json:"stride"` + DetectionLogic string `yaml:"detection_logic" json:"detection_logic"` + RiskAssessment string `yaml:"risk_assessment" json:"risk_assessment"` + FalsePositives string `yaml:"false_positives" json:"false_positives"` + ModelFailurePossibleReason bool `yaml:"model_failure_possible_reason" json:"model_failure_possible_reason"` + CWE int `yaml:"cwe" json:"cwe"` + RisksIdentified map[string]InputRiskIdentified `yaml:"risks_identified" json:"risks_identified"` +} + +type InputRiskIdentified struct { + Severity string `yaml:"severity" json:"severity"` + ExploitationLikelihood string `yaml:"exploitation_likelihood" json:"exploitation_likelihood"` + ExploitationImpact string `yaml:"exploitation_impact" json:"exploitation_impact"` + DataBreachProbability string `yaml:"data_breach_probability" json:"data_breach_probability"` + DataBreachTechnicalAssets []string `yaml:"data_breach_technical_assets" json:"data_breach_technical_assets"` + MostRelevantDataAsset string `yaml:"most_relevant_data_asset" json:"most_relevant_data_asset"` + MostRelevantTechnicalAsset string `yaml:"most_relevant_technical_asset" json:"most_relevant_technical_asset"` + MostRelevantCommunicationLink string `yaml:"most_relevant_communication_link" json:"most_relevant_communication_link"` + MostRelevantTrustBoundary string `yaml:"most_relevant_trust_boundary" json:"most_relevant_trust_boundary"` + MostRelevantSharedRuntime string `yaml:"most_relevant_shared_runtime" json:"most_relevant_shared_runtime"` +} + +type InputRiskTracking struct { + Status string `yaml:"status" json:"status"` + Justification string `yaml:"justification" json:"justification"` + Ticket string `yaml:"ticket" json:"ticket"` + Date string `yaml:"date" json:"date"` + CheckedBy string `yaml:"checked_by" json:"checked_by"` +} + +type ModelInput struct { // TODO: Eventually remove this and directly use ParsedModelRoot? But then the error messages for model errors are not quite as good anymore... + Includes []string `yaml:"includes" json:"includes"` + ThreagileVersion string `yaml:"threagile_version" json:"threagile_version"` + Title string `yaml:"title" json:"title"` + Author Author `yaml:"author" json:"author"` + Date string `yaml:"date" json:"date"` + BusinessOverview Overview `yaml:"business_overview" json:"business_overview"` + TechnicalOverview Overview `yaml:"technical_overview" json:"technical_overview"` + BusinessCriticality string `yaml:"business_criticality" json:"business_criticality"` + ManagementSummaryComment string `yaml:"management_summary_comment" json:"management_summary_comment"` + Questions map[string]string `yaml:"questions" json:"questions"` + AbuseCases map[string]string `yaml:"abuse_cases" json:"abuse_cases"` + SecurityRequirements map[string]string `yaml:"security_requirements" json:"security_requirements"` + TagsAvailable []string `yaml:"tags_available" json:"tags_available"` + DataAssets map[string]InputDataAsset `yaml:"data_assets" json:"data_assets"` + TechnicalAssets map[string]InputTechnicalAsset `yaml:"technical_assets" json:"technical_assets"` + TrustBoundaries map[string]InputTrustBoundary `yaml:"trust_boundaries" json:"trust_boundaries"` + SharedRuntimes map[string]InputSharedRuntime `yaml:"shared_runtimes" json:"shared_runtimes"` + IndividualRiskCategories map[string]InputIndividualRiskCategory `yaml:"individual_risk_categories" json:"individual_risk_categories"` + RiskTracking map[string]InputRiskTracking `yaml:"risk_tracking" json:"risk_tracking"` + DiagramTweakNodesep int `yaml:"diagram_tweak_nodesep" json:"diagram_tweak_nodesep"` + DiagramTweakRanksep int `yaml:"diagram_tweak_ranksep" json:"diagram_tweak_ranksep"` + DiagramTweakEdgeLayout string `yaml:"diagram_tweak_edge_layout" json:"diagram_tweak_edge_layout"` + DiagramTweakSuppressEdgeLabels bool `yaml:"diagram_tweak_suppress_edge_labels" json:"diagram_tweak_suppress_edge_labels"` + DiagramTweakLayoutLeftToRight bool `yaml:"diagram_tweak_layout_left_to_right" json:"diagram_tweak_layout_left_to_right"` + DiagramTweakInvisibleConnectionsBetweenAssets []string `yaml:"diagram_tweak_invisible_connections_between_assets" json:"diagram_tweak_invisible_connections_between_assets"` + DiagramTweakSameRankAssets []string `yaml:"diagram_tweak_same_rank_assets" json:"diagram_tweak_same_rank_assets"` +} + +func (model *ModelInput) Defaults() *ModelInput { + *model = ModelInput{ + Questions: make(map[string]string), + AbuseCases: make(map[string]string), + SecurityRequirements: make(map[string]string), + DataAssets: make(map[string]InputDataAsset), + TechnicalAssets: make(map[string]InputTechnicalAsset), + TrustBoundaries: make(map[string]InputTrustBoundary), + SharedRuntimes: make(map[string]InputSharedRuntime), + IndividualRiskCategories: make(map[string]InputIndividualRiskCategory), + RiskTracking: make(map[string]InputRiskTracking), + } + + return model +} + +func (model *ModelInput) Load(inputFilename string) error { + modelYaml, readError := os.ReadFile(inputFilename) + if readError != nil { + log.Fatal("Unable to read model file: ", readError) + } + + unmarshalError := yaml.Unmarshal(modelYaml, &model) + if unmarshalError != nil { + log.Fatal("Unable to parse model yaml: ", unmarshalError) + } + + for _, includeFile := range model.Includes { + mergeError := model.Merge(filepath.Dir(inputFilename), includeFile) + if mergeError != nil { + log.Fatalf("Unable to merge model include %q: %v", includeFile, mergeError) + } + } + + return nil +} + +type UniqueStringSlice []string + +func (slice UniqueStringSlice) Merge(otherSlice []string) []string { + valueMap := make(map[string]bool) + for _, value := range slice { + valueMap[value] = true + } + + for _, value := range otherSlice { + valueMap[value] = true + } + + valueSlice := make(UniqueStringSlice, 0) + for key := range valueMap { + valueSlice = append(valueSlice, key) + } + + return valueSlice +} + +func (model *ModelInput) Merge(dir string, includeFilename string) error { + modelYaml, readError := os.ReadFile(filepath.Join(dir, includeFilename)) + if readError != nil { + return fmt.Errorf("unable to read model file: %v", readError) + } + + var fileStructure map[string]any + unmarshalStructureError := yaml.Unmarshal(modelYaml, &fileStructure) + if unmarshalStructureError != nil { + return fmt.Errorf("unable to parse model structure: %v", unmarshalStructureError) + } + + var includedModel ModelInput + unmarshalError := yaml.Unmarshal(modelYaml, &includedModel) + if unmarshalError != nil { + return fmt.Errorf("unable to parse model yaml: %v", unmarshalError) + } + + for item := range fileStructure { + switch strings.ToLower(item) { + case strings.ToLower("includes"): + for _, includeFile := range includedModel.Includes { + mergeError := model.Merge(filepath.Join(dir, filepath.Dir(includeFilename)), includeFile) + if mergeError != nil { + return fmt.Errorf("unable to merge model include %q: %v", includeFile, mergeError) + } + } + break + + case strings.ToLower("threagile_version"): + model.ThreagileVersion = includedModel.ThreagileVersion + break + + case strings.ToLower("title"): + model.Title = includedModel.Title + break + + case strings.ToLower("author"): + model.Author = includedModel.Author + break + + case strings.ToLower("date"): + model.Date = includedModel.Date + break + + case strings.ToLower("business_overview"): + model.BusinessOverview = includedModel.BusinessOverview + break + + case strings.ToLower("technical_overview"): + model.TechnicalOverview = includedModel.TechnicalOverview + break + + case strings.ToLower("business_criticality"): + model.BusinessCriticality = includedModel.BusinessCriticality + break + + case strings.ToLower("management_summary_comment"): + model.ManagementSummaryComment = includedModel.ManagementSummaryComment + break + + case strings.ToLower("questions"): + for mapKey, mapValue := range includedModel.Questions { + model.Questions[mapKey] = mapValue + } + break + + case strings.ToLower("abuse_cases"): + for mapKey, mapValue := range includedModel.AbuseCases { + model.AbuseCases[mapKey] = mapValue + } + break + + case strings.ToLower("security_requirements"): + for mapKey, mapValue := range includedModel.SecurityRequirements { + model.SecurityRequirements[mapKey] = mapValue + } + break + + case strings.ToLower("tags_available"): + model.TagsAvailable = UniqueStringSlice(model.TagsAvailable).Merge(includedModel.TagsAvailable) + break + + case strings.ToLower("data_assets"): + for mapKey, mapValue := range includedModel.DataAssets { + model.DataAssets[mapKey] = mapValue + } + break + + case strings.ToLower("technical_assets"): + for mapKey, mapValue := range includedModel.TechnicalAssets { + model.TechnicalAssets[mapKey] = mapValue + } + break + + case strings.ToLower("trust_boundaries"): + for mapKey, mapValue := range includedModel.TrustBoundaries { + model.TrustBoundaries[mapKey] = mapValue + } + break + + case strings.ToLower("shared_runtimes"): + for mapKey, mapValue := range includedModel.SharedRuntimes { + model.SharedRuntimes[mapKey] = mapValue + } + break + + case strings.ToLower("individual_risk_categories"): + for mapKey, mapValue := range includedModel.IndividualRiskCategories { + model.IndividualRiskCategories[mapKey] = mapValue + } + break + + case strings.ToLower("risk_tracking"): + for mapKey, mapValue := range includedModel.RiskTracking { + model.RiskTracking[mapKey] = mapValue + } + break + + case "diagram_tweak_nodesep": + model.DiagramTweakNodesep = includedModel.DiagramTweakNodesep + break + + case "diagram_tweak_ranksep": + model.DiagramTweakRanksep = includedModel.DiagramTweakRanksep + break + + case "diagram_tweak_edge_layout": + model.DiagramTweakEdgeLayout = includedModel.DiagramTweakEdgeLayout + break + + case "diagram_tweak_suppress_edge_labels": + model.DiagramTweakSuppressEdgeLabels = includedModel.DiagramTweakSuppressEdgeLabels + break + + case "diagram_tweak_layout_left_to_right": + model.DiagramTweakLayoutLeftToRight = includedModel.DiagramTweakLayoutLeftToRight + break + + case "diagram_tweak_invisible_connections_between_assets": + model.DiagramTweakInvisibleConnectionsBetweenAssets = append(model.DiagramTweakInvisibleConnectionsBetweenAssets, includedModel.DiagramTweakInvisibleConnectionsBetweenAssets...) + break + + case "diagram_tweak_same_rank_assets": + model.DiagramTweakSameRankAssets = append(model.DiagramTweakSameRankAssets, includedModel.DiagramTweakSameRankAssets...) + } + } + + return nil +} + +func AddTagToModelInput(modelInput *ModelInput, tag string, dryRun bool, changes *[]string) { + tag = NormalizeTag(tag) + if !contains(modelInput.TagsAvailable, tag) { + *changes = append(*changes, "adding tag: "+tag) + if !dryRun { + modelInput.TagsAvailable = append(modelInput.TagsAvailable, tag) + } + } +} + +func NormalizeTag(tag string) string { + return strings.TrimSpace(strings.ToLower(tag)) +} + +func contains(a []string, x string) bool { + for _, n := range a { + if x == n { + return true + } + } + return false +} diff --git a/pkg/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go b/pkg/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go index b6ec0514..e3e92dda 100644 --- a/pkg/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go +++ b/pkg/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go @@ -2,13 +2,17 @@ package add_build_pipeline import ( "fmt" - "github.com/threagile/threagile/model" "sort" "strings" + + "github.com/threagile/threagile/pkg/input" + "github.com/threagile/threagile/pkg/macros" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) -func GetMacroDetails() model.MacroDetails { - return model.MacroDetails{ +func GetMacroDetails() macros.MacroDetails { + return macros.MacroDetails{ ID: "add-build-pipeline", Title: "Add Build Pipeline", Description: "This model macro adds a build pipeline (development client, build pipeline, artifact registry, container image registry, " + @@ -29,7 +33,7 @@ var pushOrPull = []string{ // TODO add question for type of machine (either physical, virtual, container, etc.) -func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { +func GetNextQuestion(model *model.ParsedModel) (nextQuestion macros.MacroQuestion, err error) { counter := len(questionsAnswered) if counter > 3 && !codeInspectionUsed { counter++ @@ -45,7 +49,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { } switch counter { case 0: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "source-repository", Title: "What product is used as the sourcecode repository?", Description: "This name affects the technical asset's title and ID plus also the tags used.", @@ -54,7 +58,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { DefaultAnswer: "Git", }, nil case 1: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "build-pipeline", Title: "What product is used as the build pipeline?", Description: "This name affects the technical asset's title and ID plus also the tags used.", @@ -63,7 +67,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { DefaultAnswer: "Jenkins", }, nil case 2: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "artifact-registry", Title: "What product is used as the artifact registry?", Description: "This name affects the technical asset's title and ID plus also the tags used.", @@ -72,7 +76,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { DefaultAnswer: "Nexus", }, nil case 3: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "code-inspection-used", Title: "Are code inspection platforms (like SonarQube) used?", Description: "This affects whether code inspection platform are added.", @@ -81,7 +85,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { DefaultAnswer: "Yes", }, nil case 4: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "code-inspection-platform", Title: "What product is used as the code inspection platform?", Description: "This name affects the technical asset's title and ID plus also the tags used.", @@ -90,7 +94,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { DefaultAnswer: "SonarQube", }, nil case 5: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "container-technology-used", Title: "Is container technology (like Docker) used?", Description: "This affects whether container registries are added.", @@ -99,7 +103,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { DefaultAnswer: "Yes", }, nil case 6: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "container-registry", Title: "What product is used as the container registry?", Description: "This name affects the technical asset's title and ID plus also the tags used.", @@ -108,7 +112,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { DefaultAnswer: "Docker", }, nil case 7: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "container-platform", Title: "What product is used as the container platform (for orchestration and runtime)?", Description: "This name affects the technical asset's title and ID plus also the tags used.", @@ -117,7 +121,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { DefaultAnswer: "Kubernetes", }, nil case 8: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "internet", Title: "Are build pipeline components exposed on the internet?", Description: "", @@ -126,7 +130,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { DefaultAnswer: "No", }, nil case 9: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "multi-tenant", Title: "Are build pipeline components used by multiple tenants?", Description: "", @@ -135,7 +139,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { DefaultAnswer: "No", }, nil case 10: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "encryption", Title: "Are build pipeline components encrypted?", Description: "", @@ -145,12 +149,12 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { }, nil case 11: possibleAnswers := make([]string, 0) - for id := range model.ParsedModelRoot.TechnicalAssets { + for id := range model.TechnicalAssets { possibleAnswers = append(possibleAnswers, id) } sort.Strings(possibleAnswers) if len(possibleAnswers) > 0 { - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "deploy-targets", Title: "Select all technical assets where the build pipeline deploys to:", Description: "This affects the communication links being generated.", @@ -160,7 +164,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { }, nil } case 12: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "within-trust-boundary", Title: "Are the server-side components of the build pipeline components within a network trust boundary?", Description: "", @@ -170,13 +174,13 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { }, nil case 13: possibleAnswers := []string{createNewTrustBoundaryLabel} - for id, trustBoundary := range model.ParsedModelRoot.TrustBoundaries { + for id, trustBoundary := range model.TrustBoundaries { if trustBoundary.Type.IsNetworkBoundary() { possibleAnswers = append(possibleAnswers, id) } } sort.Strings(possibleAnswers) - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "selected-trust-boundary", Title: "Choose from the list of existing network trust boundaries or create a new one?", Description: "", @@ -185,21 +189,21 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { DefaultAnswer: "", }, nil case 14: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "new-trust-boundary-type", Title: "Of which type shall the new trust boundary be?", Description: "", - PossibleAnswers: []string{model.NetworkOnPrem.String(), - model.NetworkDedicatedHoster.String(), - model.NetworkVirtualLAN.String(), - model.NetworkCloudProvider.String(), - model.NetworkCloudSecurityGroup.String(), - model.NetworkPolicyNamespaceIsolation.String()}, + PossibleAnswers: []string{types.NetworkOnPrem.String(), + types.NetworkDedicatedHoster.String(), + types.NetworkVirtualLAN.String(), + types.NetworkCloudProvider.String(), + types.NetworkCloudSecurityGroup.String(), + types.NetworkPolicyNamespaceIsolation.String()}, MultiSelect: false, - DefaultAnswer: model.NetworkOnPrem.String(), + DefaultAnswer: types.NetworkOnPrem.String(), }, nil case 15: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "push-or-pull", Title: "What type of deployment strategy is used?", Description: "Push-based deployments are more classic ones and pull-based are more GitOps-like ones.", @@ -208,7 +212,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { DefaultAnswer: "", }, nil case 16: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "owner", Title: "Who is the owner of the build pipeline and runtime assets?", Description: "This name affects the technical asset's and data asset's owner.", @@ -217,7 +221,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { DefaultAnswer: "", }, nil } - return model.NoMoreQuestions(), nil + return macros.NoMoreQuestions(), nil } func ApplyAnswer(questionID string, answer ...string) (message string, validResult bool, err error) { @@ -245,30 +249,30 @@ func GoBack() (message string, validResult bool, err error) { return "Undo successful", true, nil } -func GetFinalChangeImpact(modelInput *model.ModelInput) (changes []string, message string, validResult bool, err error) { +func GetFinalChangeImpact(modelInput *input.ModelInput, model *model.ParsedModel) (changes []string, message string, validResult bool, err error) { changeLogCollector := make([]string, 0) - message, validResult, err = applyChange(modelInput, &changeLogCollector, true) + message, validResult, err = applyChange(modelInput, model, &changeLogCollector, true) return changeLogCollector, message, validResult, err } -func Execute(modelInput *model.ModelInput) (message string, validResult bool, err error) { +func Execute(modelInput *input.ModelInput, model *model.ParsedModel) (message string, validResult bool, err error) { changeLogCollector := make([]string, 0) - message, validResult, err = applyChange(modelInput, &changeLogCollector, false) + message, validResult, err = applyChange(modelInput, model, &changeLogCollector, false) return message, validResult, err } -func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dryRun bool) (message string, validResult bool, err error) { +func applyChange(modelInput *input.ModelInput, parsedModel *model.ParsedModel, changeLogCollector *[]string, dryRun bool) (message string, validResult bool, err error) { var serverSideTechAssets = make([]string, 0) // ################################################ - model.AddTagToModelInput(modelInput, macroState["source-repository"][0], dryRun, changeLogCollector) - model.AddTagToModelInput(modelInput, macroState["build-pipeline"][0], dryRun, changeLogCollector) - model.AddTagToModelInput(modelInput, macroState["artifact-registry"][0], dryRun, changeLogCollector) + input.AddTagToModelInput(modelInput, macroState["source-repository"][0], dryRun, changeLogCollector) + input.AddTagToModelInput(modelInput, macroState["build-pipeline"][0], dryRun, changeLogCollector) + input.AddTagToModelInput(modelInput, macroState["artifact-registry"][0], dryRun, changeLogCollector) if containerTechUsed { - model.AddTagToModelInput(modelInput, macroState["container-registry"][0], dryRun, changeLogCollector) - model.AddTagToModelInput(modelInput, macroState["container-platform"][0], dryRun, changeLogCollector) + input.AddTagToModelInput(modelInput, macroState["container-registry"][0], dryRun, changeLogCollector) + input.AddTagToModelInput(modelInput, macroState["container-platform"][0], dryRun, changeLogCollector) } if codeInspectionUsed { - model.AddTagToModelInput(modelInput, macroState["code-inspection-platform"][0], dryRun, changeLogCollector) + input.AddTagToModelInput(modelInput, macroState["code-inspection-platform"][0], dryRun, changeLogCollector) } sourceRepoID := model.MakeID(macroState["source-repository"][0]) + "-sourcecode-repository" @@ -286,19 +290,19 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } owner := macroState["owner"][0] - if _, exists := model.ParsedModelRoot.DataAssets["Sourcecode"]; !exists { + if _, exists := parsedModel.DataAssets["Sourcecode"]; !exists { //fmt.Println("Adding data asset:", "sourcecode") // ################################################ - dataAsset := model.InputDataAsset{ + dataAsset := input.InputDataAsset{ ID: "sourcecode", Description: "Sourcecode to build the application components from", - Usage: model.DevOps.String(), + Usage: types.DevOps.String(), Tags: []string{}, Origin: "", Owner: owner, - Quantity: model.Few.String(), - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), + Quantity: types.Few.String(), + Confidentiality: types.Confidential.String(), + Integrity: types.Critical.String(), + Availability: types.Important.String(), JustificationCiaRating: "Sourcecode is at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", } @@ -308,19 +312,19 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } } - if _, exists := model.ParsedModelRoot.DataAssets["Deployment"]; !exists { + if _, exists := parsedModel.DataAssets["Deployment"]; !exists { //fmt.Println("Adding data asset:", "deployment") // ################################################ - dataAsset := model.InputDataAsset{ + dataAsset := input.InputDataAsset{ ID: "deployment", Description: "Deployment unit being installed/shipped", - Usage: model.DevOps.String(), + Usage: types.DevOps.String(), Tags: []string{}, Origin: "", Owner: owner, - Quantity: model.VeryFew.String(), - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), + Quantity: types.VeryFew.String(), + Confidentiality: types.Confidential.String(), + Integrity: types.Critical.String(), + Availability: types.Important.String(), JustificationCiaRating: "Deployment units are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", } @@ -331,90 +335,90 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } id := "development-client" - if _, exists := model.ParsedModelRoot.TechnicalAssets[id]; !exists { + if _, exists := parsedModel.TechnicalAssets[id]; !exists { //fmt.Println("Adding technical asset:", id) // ################################################ - encryption := model.NoneEncryption.String() + encryption := types.NoneEncryption.String() if strings.ToLower(macroState["encryption"][0]) == "yes" { - encryption = model.Transparent.String() + encryption = types.Transparent.String() } - commLinks := make(map[string]model.InputCommunicationLink) - commLinks["Sourcecode Repository Traffic"] = model.InputCommunicationLink{ + commLinks := make(map[string]input.InputCommunicationLink) + commLinks["Sourcecode Repository Traffic"] = input.InputCommunicationLink{ Target: sourceRepoID, Description: "Sourcecode Repository Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EndUserIdentityPropagation.String(), + Protocol: types.HTTPS.String(), + Authentication: types.Credentials.String(), + Authorization: types.EndUserIdentityPropagation.String(), Tags: []string{}, VPN: false, IpFiltered: false, Readonly: false, - Usage: model.DevOps.String(), + Usage: types.DevOps.String(), DataAssetsSent: []string{"sourcecode"}, DataAssetsReceived: []string{"sourcecode"}, DiagramTweakWeight: 0, DiagramTweakConstraint: false, } - commLinks["Build Pipeline Traffic"] = model.InputCommunicationLink{ + commLinks["Build Pipeline Traffic"] = input.InputCommunicationLink{ Target: buildPipelineID, Description: "Build Pipeline Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EndUserIdentityPropagation.String(), + Protocol: types.HTTPS.String(), + Authentication: types.Credentials.String(), + Authorization: types.EndUserIdentityPropagation.String(), Tags: []string{}, VPN: false, IpFiltered: false, Readonly: true, - Usage: model.DevOps.String(), + Usage: types.DevOps.String(), DataAssetsSent: nil, DataAssetsReceived: []string{"deployment"}, DiagramTweakWeight: 0, DiagramTweakConstraint: false, } - commLinks["Artifact Registry Traffic"] = model.InputCommunicationLink{ + commLinks["Artifact Registry Traffic"] = input.InputCommunicationLink{ Target: artifactRegistryID, Description: "Artifact Registry Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EndUserIdentityPropagation.String(), + Protocol: types.HTTPS.String(), + Authentication: types.Credentials.String(), + Authorization: types.EndUserIdentityPropagation.String(), Tags: []string{}, VPN: false, IpFiltered: false, Readonly: true, - Usage: model.DevOps.String(), + Usage: types.DevOps.String(), DataAssetsSent: nil, DataAssetsReceived: []string{"deployment"}, DiagramTweakWeight: 0, DiagramTweakConstraint: false, } if containerTechUsed { - commLinks["Container Registry Traffic"] = model.InputCommunicationLink{ + commLinks["Container Registry Traffic"] = input.InputCommunicationLink{ Target: containerRepoID, Description: "Container Registry Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EndUserIdentityPropagation.String(), + Protocol: types.HTTPS.String(), + Authentication: types.Credentials.String(), + Authorization: types.EndUserIdentityPropagation.String(), Tags: []string{}, VPN: false, IpFiltered: false, Readonly: false, - Usage: model.DevOps.String(), + Usage: types.DevOps.String(), DataAssetsSent: []string{"deployment"}, DataAssetsReceived: []string{"deployment"}, DiagramTweakWeight: 0, DiagramTweakConstraint: false, } - commLinks["Container Platform Traffic"] = model.InputCommunicationLink{ + commLinks["Container Platform Traffic"] = input.InputCommunicationLink{ Target: containerPlatformID, Description: "Container Platform Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EndUserIdentityPropagation.String(), + Protocol: types.HTTPS.String(), + Authentication: types.Credentials.String(), + Authorization: types.EndUserIdentityPropagation.String(), Tags: []string{}, VPN: false, IpFiltered: false, Readonly: false, - Usage: model.DevOps.String(), + Usage: types.DevOps.String(), DataAssetsSent: []string{"deployment"}, DataAssetsReceived: []string{"deployment"}, DiagramTweakWeight: 0, @@ -422,17 +426,17 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } } if codeInspectionUsed { - commLinks["Code Inspection Platform Traffic"] = model.InputCommunicationLink{ + commLinks["Code Inspection Platform Traffic"] = input.InputCommunicationLink{ Target: codeInspectionPlatformID, Description: "Code Inspection Platform Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.EndUserIdentityPropagation.String(), + Protocol: types.HTTPS.String(), + Authentication: types.Credentials.String(), + Authorization: types.EndUserIdentityPropagation.String(), Tags: []string{}, VPN: false, IpFiltered: false, Readonly: true, - Usage: model.DevOps.String(), + Usage: types.DevOps.String(), DataAssetsSent: nil, DataAssetsReceived: []string{"sourcecode"}, DiagramTweakWeight: 0, @@ -440,24 +444,24 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } } - techAsset := model.InputTechnicalAsset{ + techAsset := input.InputTechnicalAsset{ ID: id, Description: "Development Client", - Type: model.ExternalEntity.String(), - Usage: model.DevOps.String(), + Type: types.ExternalEntity.String(), + Usage: types.DevOps.String(), UsedAsClientByHuman: true, OutOfScope: true, JustificationOutOfScope: "Development client is not directly in-scope of the application.", - Size: model.System.String(), - Technology: model.DevOpsClient.String(), + Size: types.System.String(), + Technology: types.DevOpsClient.String(), Tags: []string{}, Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Physical.String(), + Machine: types.Physical.String(), Encryption: encryption, Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), + Confidentiality: types.Confidential.String(), + Integrity: types.Critical.String(), + Availability: types.Important.String(), JustificationCiaRating: "Sourcecode processing components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", MultiTenant: false, @@ -475,31 +479,31 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } id = sourceRepoID - if _, exists := model.ParsedModelRoot.TechnicalAssets[id]; !exists { + if _, exists := parsedModel.TechnicalAssets[id]; !exists { //fmt.Println("Adding technical asset:", id) // ################################################ serverSideTechAssets = append(serverSideTechAssets, id) - encryption := model.NoneEncryption.String() + encryption := types.NoneEncryption.String() if strings.ToLower(macroState["encryption"][0]) == "yes" { - encryption = model.Transparent.String() + encryption = types.Transparent.String() } - techAsset := model.InputTechnicalAsset{ + techAsset := input.InputTechnicalAsset{ ID: id, Description: macroState["source-repository"][0] + " Sourcecode Repository", - Type: model.Process.String(), - Usage: model.DevOps.String(), + Type: types.Process.String(), + Usage: types.DevOps.String(), UsedAsClientByHuman: false, OutOfScope: false, JustificationOutOfScope: "", - Size: model.Service.String(), - Technology: model.SourcecodeRepository.String(), - Tags: []string{model.NormalizeTag(macroState["source-repository"][0])}, + Size: types.Service.String(), + Technology: types.SourcecodeRepository.String(), + Tags: []string{input.NormalizeTag(macroState["source-repository"][0])}, Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), + Machine: types.Virtual.String(), Encryption: encryption, Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), + Confidentiality: types.Confidential.String(), + Integrity: types.Critical.String(), + Availability: types.Important.String(), JustificationCiaRating: "Sourcecode processing components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", @@ -518,31 +522,31 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry if containerTechUsed { id = containerRepoID - if _, exists := model.ParsedModelRoot.TechnicalAssets[id]; !exists { + if _, exists := parsedModel.TechnicalAssets[id]; !exists { //fmt.Println("Adding technical asset:", id) // ################################################ serverSideTechAssets = append(serverSideTechAssets, id) - encryption := model.NoneEncryption.String() + encryption := types.NoneEncryption.String() if strings.ToLower(macroState["encryption"][0]) == "yes" { - encryption = model.Transparent.String() + encryption = types.Transparent.String() } - techAsset := model.InputTechnicalAsset{ + techAsset := input.InputTechnicalAsset{ ID: id, Description: macroState["container-registry"][0] + " Container Registry", - Type: model.Process.String(), - Usage: model.DevOps.String(), + Type: types.Process.String(), + Usage: types.DevOps.String(), UsedAsClientByHuman: false, OutOfScope: false, JustificationOutOfScope: "", - Size: model.Service.String(), - Technology: model.ArtifactRegistry.String(), - Tags: []string{model.NormalizeTag(macroState["container-registry"][0])}, + Size: types.Service.String(), + Technology: types.ArtifactRegistry.String(), + Tags: []string{input.NormalizeTag(macroState["container-registry"][0])}, Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), + Machine: types.Virtual.String(), Encryption: encryption, Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), + Confidentiality: types.Confidential.String(), + Integrity: types.Critical.String(), + Availability: types.Important.String(), JustificationCiaRating: "Container registry components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", @@ -560,31 +564,31 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } id = containerPlatformID - if _, exists := model.ParsedModelRoot.TechnicalAssets[id]; !exists { + if _, exists := parsedModel.TechnicalAssets[id]; !exists { //fmt.Println("Adding technical asset:", id) // ################################################ serverSideTechAssets = append(serverSideTechAssets, id) - encryption := model.NoneEncryption.String() + encryption := types.NoneEncryption.String() if strings.ToLower(macroState["encryption"][0]) == "yes" { - encryption = model.Transparent.String() + encryption = types.Transparent.String() } - techAsset := model.InputTechnicalAsset{ + techAsset := input.InputTechnicalAsset{ ID: id, Description: macroState["container-platform"][0] + " Container Platform", - Type: model.Process.String(), - Usage: model.DevOps.String(), + Type: types.Process.String(), + Usage: types.DevOps.String(), UsedAsClientByHuman: false, OutOfScope: false, JustificationOutOfScope: "", - Size: model.System.String(), - Technology: model.ContainerPlatform.String(), - Tags: []string{model.NormalizeTag(macroState["container-platform"][0])}, + Size: types.System.String(), + Technology: types.ContainerPlatform.String(), + Tags: []string{input.NormalizeTag(macroState["container-platform"][0])}, Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), + Machine: types.Virtual.String(), Encryption: encryption, Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.MissionCritical.String(), - Availability: model.MissionCritical.String(), + Confidentiality: types.Confidential.String(), + Integrity: types.MissionCritical.String(), + Availability: types.MissionCritical.String(), JustificationCiaRating: "Container platform components are rated as 'mission-critical' in terms of integrity and availability, because any " + "malicious modification of it might lead to a backdoored production system.", MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", @@ -603,93 +607,93 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } id = buildPipelineID - if _, exists := model.ParsedModelRoot.TechnicalAssets[id]; !exists { + if _, exists := parsedModel.TechnicalAssets[id]; !exists { //fmt.Println("Adding technical asset:", id) // ################################################ serverSideTechAssets = append(serverSideTechAssets, id) - encryption := model.NoneEncryption.String() + encryption := types.NoneEncryption.String() if strings.ToLower(macroState["encryption"][0]) == "yes" { - encryption = model.Transparent.String() + encryption = types.Transparent.String() } - commLinks := make(map[string]model.InputCommunicationLink) - commLinks["Sourcecode Repository Traffic"] = model.InputCommunicationLink{ + commLinks := make(map[string]input.InputCommunicationLink) + commLinks["Sourcecode Repository Traffic"] = input.InputCommunicationLink{ Target: sourceRepoID, Description: "Sourcecode Repository Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), + Protocol: types.HTTPS.String(), + Authentication: types.Credentials.String(), + Authorization: types.TechnicalUser.String(), Tags: []string{}, VPN: false, IpFiltered: false, Readonly: true, - Usage: model.DevOps.String(), + Usage: types.DevOps.String(), DataAssetsSent: nil, DataAssetsReceived: []string{"sourcecode"}, DiagramTweakWeight: 0, DiagramTweakConstraint: false, } - commLinks["Artifact Registry Traffic"] = model.InputCommunicationLink{ + commLinks["Artifact Registry Traffic"] = input.InputCommunicationLink{ Target: artifactRegistryID, Description: "Artifact Registry Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), + Protocol: types.HTTPS.String(), + Authentication: types.Credentials.String(), + Authorization: types.TechnicalUser.String(), Tags: []string{}, VPN: false, IpFiltered: false, Readonly: false, - Usage: model.DevOps.String(), + Usage: types.DevOps.String(), DataAssetsSent: []string{"deployment"}, DataAssetsReceived: []string{"deployment"}, DiagramTweakWeight: 0, DiagramTweakConstraint: false, } if containerTechUsed { - commLinks["Container Registry Traffic"] = model.InputCommunicationLink{ + commLinks["Container Registry Traffic"] = input.InputCommunicationLink{ Target: containerRepoID, Description: "Container Registry Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), + Protocol: types.HTTPS.String(), + Authentication: types.Credentials.String(), + Authorization: types.TechnicalUser.String(), Tags: []string{}, VPN: false, IpFiltered: false, Readonly: false, - Usage: model.DevOps.String(), + Usage: types.DevOps.String(), DataAssetsSent: []string{"deployment"}, DataAssetsReceived: []string{"deployment"}, DiagramTweakWeight: 0, DiagramTweakConstraint: false, } if macroState["push-or-pull"][0] == pushOrPull[0] { // Push - commLinks["Container Platform Push"] = model.InputCommunicationLink{ + commLinks["Container Platform Push"] = input.InputCommunicationLink{ Target: containerPlatformID, Description: "Container Platform Push", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), + Protocol: types.HTTPS.String(), + Authentication: types.Credentials.String(), + Authorization: types.TechnicalUser.String(), Tags: []string{}, VPN: false, IpFiltered: false, Readonly: false, - Usage: model.DevOps.String(), + Usage: types.DevOps.String(), DataAssetsSent: []string{"deployment"}, DataAssetsReceived: []string{"deployment"}, DiagramTweakWeight: 0, DiagramTweakConstraint: false, } } else { // Pull - commLinkPull := model.InputCommunicationLink{ + commLinkPull := input.InputCommunicationLink{ Target: containerRepoID, Description: "Container Platform Pull", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), + Protocol: types.HTTPS.String(), + Authentication: types.Credentials.String(), + Authorization: types.TechnicalUser.String(), Tags: []string{}, VPN: false, IpFiltered: false, Readonly: true, - Usage: model.DevOps.String(), + Usage: types.DevOps.String(), DataAssetsSent: nil, DataAssetsReceived: []string{"deployment"}, DiagramTweakWeight: 0, @@ -699,7 +703,7 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry titleOfTargetAsset := macroState["container-platform"][0] + " Container Platform" containerPlatform := modelInput.TechnicalAssets[titleOfTargetAsset] if containerPlatform.CommunicationLinks == nil { - containerPlatform.CommunicationLinks = make(map[string]model.InputCommunicationLink) + containerPlatform.CommunicationLinks = make(map[string]input.InputCommunicationLink) } containerPlatform.CommunicationLinks["Container Platform Pull"] = commLinkPull modelInput.TechnicalAssets[titleOfTargetAsset] = containerPlatform @@ -707,17 +711,17 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } } if codeInspectionUsed { - commLinks["Code Inspection Platform Traffic"] = model.InputCommunicationLink{ + commLinks["Code Inspection Platform Traffic"] = input.InputCommunicationLink{ Target: codeInspectionPlatformID, Description: "Code Inspection Platform Traffic", - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), + Protocol: types.HTTPS.String(), + Authentication: types.Credentials.String(), + Authorization: types.TechnicalUser.String(), Tags: []string{}, VPN: false, IpFiltered: false, Readonly: false, - Usage: model.DevOps.String(), + Usage: types.DevOps.String(), DataAssetsSent: []string{"sourcecode"}, DataAssetsReceived: []string{}, DiagramTweakWeight: 0, @@ -731,19 +735,19 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry if !dryRun { containerPlatform := modelInput.TechnicalAssets[macroState["container-platform"][0]+" Container Platform"] if containerPlatform.CommunicationLinks == nil { - containerPlatform.CommunicationLinks = make(map[string]model.InputCommunicationLink) + containerPlatform.CommunicationLinks = make(map[string]input.InputCommunicationLink) } - containerPlatform.CommunicationLinks["Container Spawning ("+deployTargetID+")"] = model.InputCommunicationLink{ + containerPlatform.CommunicationLinks["Container Spawning ("+deployTargetID+")"] = input.InputCommunicationLink{ Target: deployTargetID, Description: "Container Spawning " + deployTargetID, - Protocol: model.ContainerSpawning.String(), - Authentication: model.NoneAuthentication.String(), - Authorization: model.NoneAuthorization.String(), + Protocol: types.ContainerSpawning.String(), + Authentication: types.NoneAuthentication.String(), + Authorization: types.NoneAuthorization.String(), Tags: []string{}, VPN: false, IpFiltered: false, Readonly: false, - Usage: model.DevOps.String(), + Usage: types.DevOps.String(), DataAssetsSent: []string{"deployment"}, DataAssetsReceived: nil, DiagramTweakWeight: 0, @@ -753,17 +757,17 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } } else { // No Containers used if macroState["push-or-pull"][0] == pushOrPull[0] { // Push - commLinks["Deployment Push ("+deployTargetID+")"] = model.InputCommunicationLink{ + commLinks["Deployment Push ("+deployTargetID+")"] = input.InputCommunicationLink{ Target: deployTargetID, Description: "Deployment Push to " + deployTargetID, - Protocol: model.SSH.String(), - Authentication: model.ClientCertificate.String(), - Authorization: model.TechnicalUser.String(), + Protocol: types.SSH.String(), + Authentication: types.ClientCertificate.String(), + Authorization: types.TechnicalUser.String(), Tags: []string{}, VPN: false, IpFiltered: false, Readonly: false, - Usage: model.DevOps.String(), + Usage: types.DevOps.String(), DataAssetsSent: []string{"deployment"}, DataAssetsReceived: nil, DiagramTweakWeight: 0, @@ -771,17 +775,17 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } } else { // Pull pullFromWhere := artifactRegistryID - commLinkPull := model.InputCommunicationLink{ + commLinkPull := input.InputCommunicationLink{ Target: pullFromWhere, Description: "Deployment Pull from " + deployTargetID, - Protocol: model.HTTPS.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), + Protocol: types.HTTPS.String(), + Authentication: types.Credentials.String(), + Authorization: types.TechnicalUser.String(), Tags: []string{}, VPN: false, IpFiltered: false, Readonly: true, - Usage: model.DevOps.String(), + Usage: types.DevOps.String(), DataAssetsSent: nil, DataAssetsReceived: []string{"deployment"}, DiagramTweakWeight: 0, @@ -789,20 +793,19 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } if !dryRun { // take care to lookup by title (as keyed in input YAML by title and only in parsed model representation by ID) - titleOfTargetAsset := model.ParsedModelRoot.TechnicalAssets[deployTargetID].Title + titleOfTargetAsset := parsedModel.TechnicalAssets[deployTargetID].Title x := modelInput.TechnicalAssets[titleOfTargetAsset] if x.CommunicationLinks == nil { - x.CommunicationLinks = make(map[string]model.InputCommunicationLink) + x.CommunicationLinks = make(map[string]input.InputCommunicationLink) } x.CommunicationLinks["Deployment Pull ("+deployTargetID+")"] = commLinkPull modelInput.TechnicalAssets[titleOfTargetAsset] = x } - } } // don't forget to also add the "deployment" data asset as stored on the target - targetAssetTitle := model.ParsedModelRoot.TechnicalAssets[deployTargetID].Title + targetAssetTitle := parsedModel.TechnicalAssets[deployTargetID].Title assetsStored := make([]string, 0) if modelInput.TechnicalAssets[targetAssetTitle].DataAssetsStored != nil { for _, val := range modelInput.TechnicalAssets[targetAssetTitle].DataAssetsStored { @@ -821,24 +824,24 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } } - techAsset := model.InputTechnicalAsset{ + techAsset := input.InputTechnicalAsset{ ID: id, Description: macroState["build-pipeline"][0] + " Build Pipeline", - Type: model.Process.String(), - Usage: model.DevOps.String(), + Type: types.Process.String(), + Usage: types.DevOps.String(), UsedAsClientByHuman: false, OutOfScope: false, JustificationOutOfScope: "", - Size: model.Service.String(), - Technology: model.BuildPipeline.String(), - Tags: []string{model.NormalizeTag(macroState["build-pipeline"][0])}, + Size: types.Service.String(), + Technology: types.BuildPipeline.String(), + Tags: []string{input.NormalizeTag(macroState["build-pipeline"][0])}, Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), + Machine: types.Virtual.String(), Encryption: encryption, Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), + Confidentiality: types.Confidential.String(), + Integrity: types.Critical.String(), + Availability: types.Important.String(), JustificationCiaRating: "Build pipeline components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", @@ -856,31 +859,31 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } id = artifactRegistryID - if _, exists := model.ParsedModelRoot.TechnicalAssets[id]; !exists { + if _, exists := parsedModel.TechnicalAssets[id]; !exists { //fmt.Println("Adding technical asset:", id) // ################################################ serverSideTechAssets = append(serverSideTechAssets, id) - encryption := model.NoneEncryption.String() + encryption := types.NoneEncryption.String() if strings.ToLower(macroState["encryption"][0]) == "yes" { - encryption = model.Transparent.String() + encryption = types.Transparent.String() } - techAsset := model.InputTechnicalAsset{ + techAsset := input.InputTechnicalAsset{ ID: id, Description: macroState["artifact-registry"][0] + " Artifact Registry", - Type: model.Process.String(), - Usage: model.DevOps.String(), + Type: types.Process.String(), + Usage: types.DevOps.String(), UsedAsClientByHuman: false, OutOfScope: false, JustificationOutOfScope: "", - Size: model.Service.String(), - Technology: model.ArtifactRegistry.String(), - Tags: []string{model.NormalizeTag(macroState["artifact-registry"][0])}, + Size: types.Service.String(), + Technology: types.ArtifactRegistry.String(), + Tags: []string{input.NormalizeTag(macroState["artifact-registry"][0])}, Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), + Machine: types.Virtual.String(), Encryption: encryption, Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Important.String(), + Confidentiality: types.Confidential.String(), + Integrity: types.Critical.String(), + Availability: types.Important.String(), JustificationCiaRating: "Artifact registry components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", @@ -899,31 +902,31 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry if codeInspectionUsed { id = codeInspectionPlatformID - if _, exists := model.ParsedModelRoot.TechnicalAssets[id]; !exists { + if _, exists := parsedModel.TechnicalAssets[id]; !exists { //fmt.Println("Adding technical asset:", id) // ################################################ serverSideTechAssets = append(serverSideTechAssets, id) - encryption := model.NoneEncryption.String() + encryption := types.NoneEncryption.String() if strings.ToLower(macroState["encryption"][0]) == "yes" { - encryption = model.Transparent.String() + encryption = types.Transparent.String() } - techAsset := model.InputTechnicalAsset{ + techAsset := input.InputTechnicalAsset{ ID: id, Description: macroState["code-inspection-platform"][0] + " Code Inspection Platform", - Type: model.Process.String(), - Usage: model.DevOps.String(), + Type: types.Process.String(), + Usage: types.DevOps.String(), UsedAsClientByHuman: false, OutOfScope: false, JustificationOutOfScope: "", - Size: model.Service.String(), - Technology: model.CodeInspectionPlatform.String(), - Tags: []string{model.NormalizeTag(macroState["code-inspection-platform"][0])}, + Size: types.Service.String(), + Technology: types.CodeInspectionPlatform.String(), + Tags: []string{input.NormalizeTag(macroState["code-inspection-platform"][0])}, Internet: strings.ToLower(macroState["internet"][0]) == "yes", - Machine: model.Virtual.String(), + Machine: types.Virtual.String(), Encryption: encryption, Owner: owner, - Confidentiality: model.Confidential.String(), - Integrity: model.Important.String(), - Availability: model.Operational.String(), + Confidentiality: types.Confidential.String(), + Integrity: types.Important.String(), + Availability: types.Operational.String(), JustificationCiaRating: "Sourcecode inspection platforms are rated at least 'important' in terms of integrity, because any " + "malicious modification of it might lead to vulnerabilities found by the scanner engine not being shown.", MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", @@ -946,7 +949,7 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry trustBoundaryType := macroState["new-trust-boundary-type"][0] //fmt.Println("Adding new trust boundary of type:", trustBoundaryType) title := "DevOps Network" - trustBoundary := model.InputTrustBoundary{ + trustBoundary := input.InputTrustBoundary{ ID: "devops-network", Description: "DevOps Network", Type: trustBoundaryType, @@ -961,7 +964,7 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } else { existingTrustBoundaryToAddTo := macroState["selected-trust-boundary"][0] //fmt.Println("Adding to existing trust boundary:", existingTrustBoundaryToAddTo) - title := model.ParsedModelRoot.TrustBoundaries[existingTrustBoundaryToAddTo].Title + title := parsedModel.TrustBoundaries[existingTrustBoundaryToAddTo].Title assetsInside := make([]string, 0) if modelInput.TrustBoundaries[title].TechnicalAssetsInside != nil { values := modelInput.TrustBoundaries[title].TechnicalAssetsInside @@ -977,7 +980,7 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry *changeLogCollector = append(*changeLogCollector, "filling existing trust boundary: "+existingTrustBoundaryToAddTo) if !dryRun { if modelInput.TrustBoundaries == nil { - modelInput.TrustBoundaries = make(map[string]model.InputTrustBoundary) + modelInput.TrustBoundaries = make(map[string]input.InputTrustBoundary) } tb := modelInput.TrustBoundaries[title] tb.TechnicalAssetsInside = mergedArrays @@ -993,16 +996,16 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry assetsRunning = append(assetsRunning, deployTargetID) } title := macroState["container-platform"][0] + " Runtime" - sharedRuntime := model.InputSharedRuntime{ + sharedRuntime := input.InputSharedRuntime{ ID: containerSharedRuntimeID, Description: title, - Tags: []string{model.NormalizeTag(macroState["container-platform"][0])}, + Tags: []string{input.NormalizeTag(macroState["container-platform"][0])}, TechnicalAssetsRunning: assetsRunning, } *changeLogCollector = append(*changeLogCollector, "adding shared runtime: "+containerSharedRuntimeID) if !dryRun { if modelInput.SharedRuntimes == nil { - modelInput.SharedRuntimes = make(map[string]model.InputSharedRuntime) + modelInput.SharedRuntimes = make(map[string]input.InputSharedRuntime) } modelInput.SharedRuntimes[title] = sharedRuntime } diff --git a/pkg/macros/built-in/add-vault/add-vault-macro.go b/pkg/macros/built-in/add-vault/add-vault-macro.go index 06fc5065..8add58cc 100644 --- a/pkg/macros/built-in/add-vault/add-vault-macro.go +++ b/pkg/macros/built-in/add-vault/add-vault-macro.go @@ -2,13 +2,17 @@ package add_vault import ( "fmt" - "github.com/threagile/threagile/model" "sort" "strings" + + "github.com/threagile/threagile/pkg/input" + "github.com/threagile/threagile/pkg/macros" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) -func GetMacroDetails() model.MacroDetails { - return model.MacroDetails{ +func GetMacroDetails() macros.MacroDetails { + return macros.MacroDetails{ ID: "add-vault", Title: "Add Vault", Description: "This model macro adds a vault (secret storage) to the model.", @@ -37,7 +41,7 @@ var authenticationTypes = []string{ "Credentials (username/password, API-key, secret token, etc.)", } -func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { +func GetNextQuestion(parsedModel *model.ParsedModel) (nextQuestion macros.MacroQuestion, err error) { counter := len(questionsAnswered) if counter > 5 && !withinTrustBoundary { counter++ @@ -47,7 +51,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { } switch counter { case 0: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "vault-name", Title: "What product is used as the vault?", Description: "This name affects the technical asset's title and ID plus also the tags used.", @@ -56,7 +60,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { DefaultAnswer: "", }, nil case 1: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "storage-type", Title: "What type of storage is used for the vault?", Description: "This selection affects the type of technical asset for the persistence.", @@ -65,7 +69,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { DefaultAnswer: "", }, nil case 2: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "authentication-type", Title: "What type of authentication is used for accessing the vault?", Description: "This selection affects the type of communication links.", @@ -74,7 +78,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { DefaultAnswer: "", }, nil case 3: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "multi-tenant", Title: "Is the vault used by multiple tenants?", Description: "", @@ -84,12 +88,12 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { }, nil case 4: possibleAnswers := make([]string, 0) - for id := range model.ParsedModelRoot.TechnicalAssets { + for id := range parsedModel.TechnicalAssets { possibleAnswers = append(possibleAnswers, id) } sort.Strings(possibleAnswers) if len(possibleAnswers) > 0 { - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "clients", Title: "Select all technical assets that make use of the vault and access it:", Description: "This affects the communication links being generated.", @@ -99,7 +103,7 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { }, nil } case 5: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "within-trust-boundary", Title: "Is the vault placed within a network trust boundary?", Description: "", @@ -109,13 +113,13 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { }, nil case 6: possibleAnswers := []string{createNewTrustBoundaryLabel} - for id, trustBoundary := range model.ParsedModelRoot.TrustBoundaries { + for id, trustBoundary := range parsedModel.TrustBoundaries { if trustBoundary.Type.IsNetworkBoundary() { possibleAnswers = append(possibleAnswers, id) } } sort.Strings(possibleAnswers) - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "selected-trust-boundary", Title: "Choose from the list of existing network trust boundaries or create a new one?", Description: "", @@ -124,21 +128,21 @@ func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { DefaultAnswer: "", }, nil case 7: - return model.MacroQuestion{ + return macros.MacroQuestion{ ID: "new-trust-boundary-type", Title: "Of which type shall the new trust boundary be?", Description: "", - PossibleAnswers: []string{model.NetworkOnPrem.String(), - model.NetworkDedicatedHoster.String(), - model.NetworkVirtualLAN.String(), - model.NetworkCloudProvider.String(), - model.NetworkCloudSecurityGroup.String(), - model.NetworkPolicyNamespaceIsolation.String()}, + PossibleAnswers: []string{types.NetworkOnPrem.String(), + types.NetworkDedicatedHoster.String(), + types.NetworkVirtualLAN.String(), + types.NetworkCloudProvider.String(), + types.NetworkCloudSecurityGroup.String(), + types.NetworkPolicyNamespaceIsolation.String()}, MultiSelect: false, - DefaultAnswer: model.NetworkOnPrem.String(), + DefaultAnswer: types.NetworkOnPrem.String(), }, nil } - return model.NoMoreQuestions(), nil + return macros.NoMoreQuestions(), nil } func ApplyAnswer(questionID string, answer ...string) (message string, validResult bool, err error) { @@ -162,35 +166,35 @@ func GoBack() (message string, validResult bool, err error) { return "Undo successful", true, nil } -func GetFinalChangeImpact(modelInput *model.ModelInput) (changes []string, message string, validResult bool, err error) { +func GetFinalChangeImpact(modelInput *input.ModelInput, parsedModel *model.ParsedModel) (changes []string, message string, validResult bool, err error) { changeLogCollector := make([]string, 0) - message, validResult, err = applyChange(modelInput, &changeLogCollector, true) + message, validResult, err = applyChange(modelInput, parsedModel, &changeLogCollector, true) return changeLogCollector, message, validResult, err } -func Execute(modelInput *model.ModelInput) (message string, validResult bool, err error) { +func Execute(modelInput *input.ModelInput, parsedModel *model.ParsedModel) (message string, validResult bool, err error) { changeLogCollector := make([]string, 0) - message, validResult, err = applyChange(modelInput, &changeLogCollector, false) + message, validResult, err = applyChange(modelInput, parsedModel, &changeLogCollector, false) return message, validResult, err } -func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dryRun bool) (message string, validResult bool, err error) { - model.AddTagToModelInput(modelInput, macroState["vault-name"][0], dryRun, changeLogCollector) +func applyChange(modelInput *input.ModelInput, parsedModel *model.ParsedModel, changeLogCollector *[]string, dryRun bool) (message string, validResult bool, err error) { + input.AddTagToModelInput(modelInput, macroState["vault-name"][0], dryRun, changeLogCollector) var serverSideTechAssets = make([]string, 0) - if _, exists := model.ParsedModelRoot.DataAssets["Configuration Secrets"]; !exists { - dataAsset := model.InputDataAsset{ + if _, exists := parsedModel.DataAssets["Configuration Secrets"]; !exists { + dataAsset := input.InputDataAsset{ ID: "configuration-secrets", Description: "Configuration secrets (like credentials, keys, certificates, etc.) secured and managed by a vault", - Usage: model.DevOps.String(), + Usage: types.DevOps.String(), Tags: []string{}, Origin: "", Owner: "", - Quantity: model.VeryFew.String(), - Confidentiality: model.StrictlyConfidential.String(), - Integrity: model.Critical.String(), - Availability: model.Critical.String(), + Quantity: types.VeryFew.String(), + Confidentiality: types.StrictlyConfidential.String(), + Integrity: types.Critical.String(), + Availability: types.Critical.String(), JustificationCiaRating: "Configuration secrets are rated as being 'strictly-confidential'.", } *changeLogCollector = append(*changeLogCollector, "adding data asset: configuration-secrets") @@ -206,30 +210,30 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry storageID := "vault-storage" if databaseUsed || filesystemUsed { - tech := model.FileServer.String() // TODO ask for local or remote and only local use execution-environment (and add separate tech type LocalFilesystem?) + tech := types.FileServer.String() // TODO ask for local or remote and only local use execution-environment (and add separate tech type LocalFilesystem?) if databaseUsed { - tech = model.Database.String() + tech = types.Database.String() } - if _, exists := model.ParsedModelRoot.TechnicalAssets[storageID]; !exists { + if _, exists := parsedModel.TechnicalAssets[storageID]; !exists { serverSideTechAssets = append(serverSideTechAssets, storageID) - techAsset := model.InputTechnicalAsset{ + techAsset := input.InputTechnicalAsset{ ID: storageID, Description: "Vault Storage", - Type: model.Datastore.String(), - Usage: model.DevOps.String(), + Type: types.Datastore.String(), + Usage: types.DevOps.String(), UsedAsClientByHuman: false, OutOfScope: false, JustificationOutOfScope: "", - Size: model.Component.String(), + Size: types.Component.String(), Technology: tech, Tags: []string{}, // TODO: let user enter or too detailed for a wizard? Internet: false, - Machine: model.Virtual.String(), // TODO: let user enter or too detailed for a wizard? - Encryption: model.DataWithSymmetricSharedKey.String(), // can be assumed for a vault product as at least having some good encryption + Machine: types.Virtual.String(), // TODO: let user enter or too detailed for a wizard? + Encryption: types.DataWithSymmetricSharedKey.String(), // can be assumed for a vault product as at least having some good encryption Owner: "", - Confidentiality: model.Confidential.String(), - Integrity: model.Critical.String(), - Availability: model.Critical.String(), + Confidentiality: types.Confidential.String(), + Integrity: types.Critical.String(), + Availability: types.Critical.String(), JustificationCiaRating: "Vault components are only rated as 'confidential' as vaults usually apply a trust barrier to encrypt all data-at-rest with a vault key.", MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", Redundant: false, @@ -248,61 +252,61 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry vaultID := model.MakeID(macroState["vault-name"][0]) + "-vault" - if _, exists := model.ParsedModelRoot.TechnicalAssets[vaultID]; !exists { + if _, exists := parsedModel.TechnicalAssets[vaultID]; !exists { serverSideTechAssets = append(serverSideTechAssets, vaultID) - commLinks := make(map[string]model.InputCommunicationLink) + commLinks := make(map[string]input.InputCommunicationLink) if databaseUsed || filesystemUsed { - accessLink := model.InputCommunicationLink{ + accessLink := input.InputCommunicationLink{ Target: storageID, Description: "Vault Storage Access", - Protocol: model.LocalFileAccess.String(), - Authentication: model.Credentials.String(), - Authorization: model.TechnicalUser.String(), + Protocol: types.LocalFileAccess.String(), + Authentication: types.Credentials.String(), + Authorization: types.TechnicalUser.String(), Tags: []string{}, VPN: false, IpFiltered: false, Readonly: false, - Usage: model.DevOps.String(), + Usage: types.DevOps.String(), DataAssetsSent: []string{"configuration-secrets"}, DataAssetsReceived: []string{"configuration-secrets"}, DiagramTweakWeight: 0, DiagramTweakConstraint: false, } if databaseUsed { - accessLink.Protocol = model.SqlAccessProtocol.String() // TODO ask if encrypted and ask if NoSQL? or to detailed for a wizard? + accessLink.Protocol = types.SqlAccessProtocol.String() // TODO ask if encrypted and ask if NoSQL? or to detailed for a wizard? } commLinks["Vault Storage Access"] = accessLink } - authentication := model.NoneAuthentication.String() + authentication := types.NoneAuthentication.String() if macroState["authentication-type"][0] == authenticationTypes[0] { - authentication = model.ClientCertificate.String() + authentication = types.ClientCertificate.String() } else if macroState["authentication-type"][0] == authenticationTypes[1] { - authentication = model.Externalized.String() + authentication = types.Externalized.String() } else if macroState["authentication-type"][0] == authenticationTypes[2] { - authentication = model.Externalized.String() + authentication = types.Externalized.String() } else if macroState["authentication-type"][0] == authenticationTypes[3] { - authentication = model.Credentials.String() + authentication = types.Credentials.String() } for _, clientID := range macroState["clients"] { // add a connection from each client - clientAccessCommLink := model.InputCommunicationLink{ + clientAccessCommLink := input.InputCommunicationLink{ Target: vaultID, Description: "Vault Access Traffic (by " + clientID + ")", - Protocol: model.HTTPS.String(), + Protocol: types.HTTPS.String(), Authentication: authentication, - Authorization: model.TechnicalUser.String(), + Authorization: types.TechnicalUser.String(), Tags: []string{}, VPN: false, IpFiltered: false, Readonly: true, - Usage: model.DevOps.String(), + Usage: types.DevOps.String(), DataAssetsSent: nil, DataAssetsReceived: []string{"configuration-secrets"}, DiagramTweakWeight: 0, DiagramTweakConstraint: false, } - clientAssetTitle := model.ParsedModelRoot.TechnicalAssets[clientID].Title + clientAssetTitle := parsedModel.TechnicalAssets[clientID].Title if !dryRun { client := modelInput.TechnicalAssets[clientAssetTitle] client.CommunicationLinks["Vault Access ("+clientID+")"] = clientAccessCommLink @@ -327,24 +331,24 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } } - techAsset := model.InputTechnicalAsset{ + techAsset := input.InputTechnicalAsset{ ID: vaultID, Description: macroState["vault-name"][0] + " Vault", - Type: model.Process.String(), - Usage: model.DevOps.String(), + Type: types.Process.String(), + Usage: types.DevOps.String(), UsedAsClientByHuman: false, OutOfScope: false, JustificationOutOfScope: "", - Size: model.Service.String(), - Technology: model.Vault.String(), - Tags: []string{model.NormalizeTag(macroState["vault-name"][0])}, + Size: types.Service.String(), + Technology: types.Vault.String(), + Tags: []string{input.NormalizeTag(macroState["vault-name"][0])}, Internet: false, - Machine: model.Virtual.String(), - Encryption: model.Transparent.String(), + Machine: types.Virtual.String(), + Encryption: types.Transparent.String(), Owner: "", - Confidentiality: model.StrictlyConfidential.String(), - Integrity: model.Critical.String(), - Availability: model.Critical.String(), + Confidentiality: types.StrictlyConfidential.String(), + Integrity: types.Critical.String(), + Availability: types.Critical.String(), JustificationCiaRating: "Vault components are rated as 'strictly-confidential'.", MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", Redundant: false, @@ -366,10 +370,10 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry vaultEnvID := "vault-environment" if filesystemUsed { title := "Vault Environment" - trustBoundary := model.InputTrustBoundary{ + trustBoundary := input.InputTrustBoundary{ ID: vaultEnvID, Description: "Vault Environment", - Type: model.ExecutionEnvironment.String(), + Type: types.ExecutionEnvironment.String(), Tags: []string{}, TechnicalAssetsInside: []string{vaultID, storageID}, TrustBoundariesNested: nil, @@ -384,7 +388,7 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry if createNewTrustBoundary { trustBoundaryType := macroState["new-trust-boundary-type"][0] title := "Vault Network" - trustBoundary := model.InputTrustBoundary{ + trustBoundary := input.InputTrustBoundary{ ID: "vault-network", Description: "Vault Network", Type: trustBoundaryType, @@ -401,7 +405,7 @@ func applyChange(modelInput *model.ModelInput, changeLogCollector *[]string, dry } } else { // adding to existing trust boundary existingTrustBoundaryToAddTo := macroState["selected-trust-boundary"][0] - title := model.ParsedModelRoot.TrustBoundaries[existingTrustBoundaryToAddTo].Title + title := parsedModel.TrustBoundaries[existingTrustBoundaryToAddTo].Title if filesystemUsed { // ---------------------- nest as execution-environment trust boundary ---------------------- boundariesNested := make([]string, 0) diff --git a/pkg/macros/built-in/built-in.go b/pkg/macros/built-in/built-in.go new file mode 100644 index 00000000..bbf0b632 --- /dev/null +++ b/pkg/macros/built-in/built-in.go @@ -0,0 +1,22 @@ +package builtin + +import ( + "github.com/threagile/threagile/pkg/macros" + addbuildpipeline "github.com/threagile/threagile/pkg/macros/built-in/add-build-pipeline" + addvault "github.com/threagile/threagile/pkg/macros/built-in/add-vault" + prettyprint "github.com/threagile/threagile/pkg/macros/built-in/pretty-print" + removeunusedtags "github.com/threagile/threagile/pkg/macros/built-in/remove-unused-tags" + seedrisktracking "github.com/threagile/threagile/pkg/macros/built-in/seed-risk-tracking" + seedtags "github.com/threagile/threagile/pkg/macros/built-in/seed-tags" +) + +func ListBuiltInMacros() []macros.MacroDetails { + return []macros.MacroDetails{ + addbuildpipeline.GetMacroDetails(), + addvault.GetMacroDetails(), + prettyprint.GetMacroDetails(), + removeunusedtags.GetMacroDetails(), + seedrisktracking.GetMacroDetails(), + seedtags.GetMacroDetails(), + } +} diff --git a/pkg/macros/built-in/pretty-print/pretty-print-macro.go b/pkg/macros/built-in/pretty-print/pretty-print-macro.go index 51c05c05..93772caf 100644 --- a/pkg/macros/built-in/pretty-print/pretty-print-macro.go +++ b/pkg/macros/built-in/pretty-print/pretty-print-macro.go @@ -1,17 +1,20 @@ package pretty_print -import "github.com/threagile/threagile/model" +import ( + "github.com/threagile/threagile/pkg/input" + "github.com/threagile/threagile/pkg/macros" +) -func GetMacroDetails() model.MacroDetails { - return model.MacroDetails{ +func GetMacroDetails() macros.MacroDetails { + return macros.MacroDetails{ ID: "pretty-print", Title: "Pretty Print", Description: "This model macro simply reformats the model file in a pretty-print style.", } } -func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { - return model.NoMoreQuestions(), nil +func GetNextQuestion() (nextQuestion macros.MacroQuestion, err error) { + return macros.NoMoreQuestions(), nil } func ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { @@ -22,10 +25,10 @@ func GoBack() (message string, validResult bool, err error) { return "Cannot go back further", false, nil } -func GetFinalChangeImpact(_ *model.ModelInput) (changes []string, message string, validResult bool, err error) { +func GetFinalChangeImpact(_ *input.ModelInput) (changes []string, message string, validResult bool, err error) { return []string{"pretty-printing the model file"}, "Changeset valid", true, err } -func Execute(_ *model.ModelInput) (message string, validResult bool, err error) { +func Execute(_ *input.ModelInput) (message string, validResult bool, err error) { return "Model pretty printing successful", true, nil } diff --git a/pkg/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go b/pkg/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go index 478ce5a2..f8f3ec58 100644 --- a/pkg/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go +++ b/pkg/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go @@ -1,21 +1,24 @@ package remove_unused_tags import ( - "github.com/threagile/threagile/model" "sort" "strconv" + + "github.com/threagile/threagile/pkg/input" + "github.com/threagile/threagile/pkg/macros" + "github.com/threagile/threagile/pkg/model" ) -func GetMacroDetails() model.MacroDetails { - return model.MacroDetails{ +func GetMacroDetails() macros.MacroDetails { + return macros.MacroDetails{ ID: "remove-unused-tags", Title: "Remove Unused Tags", Description: "This model macro simply removes all unused tags from the model file.", } } -func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { - return model.NoMoreQuestions(), nil +func GetNextQuestion() (nextQuestion macros.MacroQuestion, err error) { + return macros.NoMoreQuestions(), nil } func ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { @@ -26,21 +29,21 @@ func GoBack() (message string, validResult bool, err error) { return "Cannot go back further", false, nil } -func GetFinalChangeImpact(_ *model.ModelInput) (changes []string, message string, validResult bool, err error) { +func GetFinalChangeImpact(_ *input.ModelInput) (changes []string, message string, validResult bool, err error) { return []string{"remove unused tags from the model file"}, "Changeset valid", true, err } -func Execute(modelInput *model.ModelInput) (message string, validResult bool, err error) { +func Execute(modelInput *input.ModelInput, parsedModel *model.ParsedModel) (message string, validResult bool, err error) { tagUsageMap := make(map[string]bool) - for _, tag := range model.ParsedModelRoot.TagsAvailable { + for _, tag := range parsedModel.TagsAvailable { tagUsageMap[tag] = false // false = tag is not used } - for _, dA := range model.ParsedModelRoot.DataAssets { + for _, dA := range parsedModel.DataAssets { for _, tag := range dA.Tags { tagUsageMap[tag] = true // true = tag is used } } - for _, tA := range model.ParsedModelRoot.TechnicalAssets { + for _, tA := range parsedModel.TechnicalAssets { for _, tag := range tA.Tags { tagUsageMap[tag] = true // true = tag is used } @@ -50,12 +53,12 @@ func Execute(modelInput *model.ModelInput) (message string, validResult bool, er } } } - for _, tB := range model.ParsedModelRoot.TrustBoundaries { + for _, tB := range parsedModel.TrustBoundaries { for _, tag := range tB.Tags { tagUsageMap[tag] = true // true = tag is used } } - for _, sR := range model.ParsedModelRoot.SharedRuntimes { + for _, sR := range parsedModel.SharedRuntimes { for _, tag := range sR.Tags { tagUsageMap[tag] = true // true = tag is used } diff --git a/pkg/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go b/pkg/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go index 8635c07b..2b6b45db 100644 --- a/pkg/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go +++ b/pkg/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go @@ -1,21 +1,25 @@ package seed_risk_tracking import ( - "github.com/threagile/threagile/model" "sort" "strconv" + + "github.com/threagile/threagile/pkg/input" + "github.com/threagile/threagile/pkg/macros" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) -func GetMacroDetails() model.MacroDetails { - return model.MacroDetails{ +func GetMacroDetails() macros.MacroDetails { + return macros.MacroDetails{ ID: "seed-risk-tracking", Title: "Seed Risk Tracking", Description: "This model macro simply seeds the model file with initial risk tracking entries for all untracked risks.", } } -func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { - return model.NoMoreQuestions(), nil +func GetNextQuestion() (nextQuestion macros.MacroQuestion, err error) { + return macros.NoMoreQuestions(), nil } func ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { @@ -26,24 +30,24 @@ func GoBack() (message string, validResult bool, err error) { return "Cannot go back further", false, nil } -func GetFinalChangeImpact(_ *model.ModelInput) (changes []string, message string, validResult bool, err error) { +func GetFinalChangeImpact(_ *input.ModelInput) (changes []string, message string, validResult bool, err error) { return []string{"seed the model file with with initial risk tracking entries for all untracked risks"}, "Changeset valid", true, err } -func Execute(modelInput *model.ModelInput) (message string, validResult bool, err error) { +func Execute(parsedModel *model.ParsedModel, modelInput *input.ModelInput) (message string, validResult bool, err error) { syntheticRiskIDsToCreateTrackingFor := make([]string, 0) - for id, risk := range model.GeneratedRisksBySyntheticId { - if !risk.IsRiskTracked() { + for id, risk := range parsedModel.GeneratedRisksBySyntheticId { + if !risk.IsRiskTracked(parsedModel) { syntheticRiskIDsToCreateTrackingFor = append(syntheticRiskIDsToCreateTrackingFor, id) } } sort.Strings(syntheticRiskIDsToCreateTrackingFor) if modelInput.RiskTracking == nil { - modelInput.RiskTracking = make(map[string]model.InputRiskTracking) + modelInput.RiskTracking = make(map[string]input.InputRiskTracking) } for _, id := range syntheticRiskIDsToCreateTrackingFor { - modelInput.RiskTracking[id] = model.InputRiskTracking{ - Status: model.Unchecked.String(), + modelInput.RiskTracking[id] = input.InputRiskTracking{ + Status: types.Unchecked.String(), Justification: "", Ticket: "", Date: "", diff --git a/pkg/macros/built-in/seed-tags/seed-tags-macro.go b/pkg/macros/built-in/seed-tags/seed-tags-macro.go index 427a5281..730b19f4 100644 --- a/pkg/macros/built-in/seed-tags/seed-tags-macro.go +++ b/pkg/macros/built-in/seed-tags/seed-tags-macro.go @@ -1,21 +1,24 @@ package seed_tags import ( - "github.com/threagile/threagile/model" "sort" "strconv" + + "github.com/threagile/threagile/pkg/input" + "github.com/threagile/threagile/pkg/macros" + "github.com/threagile/threagile/pkg/model" ) -func GetMacroDetails() model.MacroDetails { - return model.MacroDetails{ +func GetMacroDetails() macros.MacroDetails { + return macros.MacroDetails{ ID: "seed-tags", Title: "Seed Tags", Description: "This model macro simply seeds the model file with supported tags from all risk rules.", } } -func GetNextQuestion() (nextQuestion model.MacroQuestion, err error) { - return model.NoMoreQuestions(), nil +func GetNextQuestion() (nextQuestion macros.MacroQuestion, err error) { + return macros.NoMoreQuestions(), nil } func ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { @@ -26,16 +29,16 @@ func GoBack() (message string, validResult bool, err error) { return "Cannot go back further", false, nil } -func GetFinalChangeImpact(_ *model.ModelInput) (changes []string, message string, validResult bool, err error) { +func GetFinalChangeImpact(_ *input.ModelInput) (changes []string, message string, validResult bool, err error) { return []string{"seed the model file with supported tags from all risk rules"}, "Changeset valid", true, err } -func Execute(modelInput *model.ModelInput) (message string, validResult bool, err error) { +func Execute(modelInput *input.ModelInput, parsedModel *model.ParsedModel) (message string, validResult bool, err error) { tagMap := make(map[string]bool) - for k, v := range model.AllSupportedTags { + for k, v := range parsedModel.AllSupportedTags { tagMap[k] = v } - for _, tagFromModel := range model.ParsedModelRoot.TagsAvailable { + for _, tagFromModel := range parsedModel.TagsAvailable { tagMap[tagFromModel] = true } tagsSorted := make([]string, 0) @@ -44,5 +47,5 @@ func Execute(modelInput *model.ModelInput) (message string, validResult bool, er } sort.Strings(tagsSorted) modelInput.TagsAvailable = tagsSorted - return "Model file seeding with " + strconv.Itoa(len(model.AllSupportedTags)) + " tags successful", true, nil + return "Model file seeding with " + strconv.Itoa(len(parsedModel.AllSupportedTags)) + " tags successful", true, nil } diff --git a/pkg/macros/macros.go b/pkg/macros/macros.go new file mode 100644 index 00000000..64720344 --- /dev/null +++ b/pkg/macros/macros.go @@ -0,0 +1,57 @@ +/* +Copyright © 2023 NAME HERE +*/ +package macros + +import ( + "strings" +) + +func ListCustomMacros() []MacroDetails { + // TODO: implement + return []MacroDetails{} +} + +type MacroDetails struct { + ID, Title, Description string +} + +type MacroQuestion struct { + ID, Title, Description string + PossibleAnswers []string + MultiSelect bool + DefaultAnswer string +} + +const NoMoreQuestionsID = "" + +func NoMoreQuestions() MacroQuestion { + return MacroQuestion{ + ID: NoMoreQuestionsID, + Title: "", + Description: "", + PossibleAnswers: nil, + MultiSelect: false, + DefaultAnswer: "", + } +} + +func (what MacroQuestion) NoMoreQuestions() bool { + return what.ID == NoMoreQuestionsID +} + +func (what MacroQuestion) IsValueConstrained() bool { + return what.PossibleAnswers != nil && len(what.PossibleAnswers) > 0 +} + +func (what MacroQuestion) IsMatchingValueConstraint(answer string) bool { + if what.IsValueConstrained() { + for _, val := range what.PossibleAnswers { + if strings.ToLower(val) == strings.ToLower(answer) { + return true + } + } + return false + } + return true +} diff --git a/pkg/model/communication_link.go b/pkg/model/communication_link.go new file mode 100644 index 00000000..bcbb8853 --- /dev/null +++ b/pkg/model/communication_link.go @@ -0,0 +1,271 @@ +/* +Copyright © 2023 NAME HERE +*/ +package model + +import ( + "fmt" + "sort" + + "github.com/threagile/threagile/pkg/colors" + "github.com/threagile/threagile/pkg/security/types" +) + +type CommunicationLink struct { + Id, SourceId, TargetId, Title, Description string + Protocol types.Protocol + Tags []string + VPN, IpFiltered, Readonly bool + Authentication types.Authentication + Authorization types.Authorization + Usage types.Usage + DataAssetsSent, DataAssetsReceived []string + DiagramTweakWeight int + DiagramTweakConstraint bool +} + +func (what CommunicationLink) IsTaggedWithAny(tags ...string) bool { + return containsCaseInsensitiveAny(what.Tags, tags...) +} + +func (what CommunicationLink) IsTaggedWithBaseTag(baseTag string) bool { + return IsTaggedWithBaseTag(what.Tags, baseTag) +} + +func (what CommunicationLink) IsAcrossTrustBoundary(parsedModel *ParsedModel) bool { + trustBoundaryOfSourceAsset := parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId[what.SourceId] + trustBoundaryOfTargetAsset := parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId[what.TargetId] + return trustBoundaryOfSourceAsset.Id != trustBoundaryOfTargetAsset.Id +} + +func (what CommunicationLink) IsAcrossTrustBoundaryNetworkOnly(parsedModel *ParsedModel) bool { + trustBoundaryOfSourceAsset := parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId[what.SourceId] + if !trustBoundaryOfSourceAsset.Type.IsNetworkBoundary() { // find and use the parent boundary then + trustBoundaryOfSourceAsset = parsedModel.TrustBoundaries[trustBoundaryOfSourceAsset.ParentTrustBoundaryID(parsedModel)] + } + trustBoundaryOfTargetAsset := parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId[what.TargetId] + if !trustBoundaryOfTargetAsset.Type.IsNetworkBoundary() { // find and use the parent boundary then + trustBoundaryOfTargetAsset = parsedModel.TrustBoundaries[trustBoundaryOfTargetAsset.ParentTrustBoundaryID(parsedModel)] + } + return trustBoundaryOfSourceAsset.Id != trustBoundaryOfTargetAsset.Id && trustBoundaryOfTargetAsset.Type.IsNetworkBoundary() +} + +func (what CommunicationLink) HighestConfidentiality(parsedModel *ParsedModel) types.Confidentiality { + highest := types.Public + for _, dataId := range what.DataAssetsSent { + dataAsset := parsedModel.DataAssets[dataId] + if dataAsset.Confidentiality > highest { + highest = dataAsset.Confidentiality + } + } + for _, dataId := range what.DataAssetsReceived { + dataAsset := parsedModel.DataAssets[dataId] + if dataAsset.Confidentiality > highest { + highest = dataAsset.Confidentiality + } + } + return highest +} + +func (what CommunicationLink) HighestIntegrity(parsedModel *ParsedModel) types.Criticality { + highest := types.Archive + for _, dataId := range what.DataAssetsSent { + dataAsset := parsedModel.DataAssets[dataId] + if dataAsset.Integrity > highest { + highest = dataAsset.Integrity + } + } + for _, dataId := range what.DataAssetsReceived { + dataAsset := parsedModel.DataAssets[dataId] + if dataAsset.Integrity > highest { + highest = dataAsset.Integrity + } + } + return highest +} + +func (what CommunicationLink) HighestAvailability(parsedModel *ParsedModel) types.Criticality { + highest := types.Archive + for _, dataId := range what.DataAssetsSent { + dataAsset := parsedModel.DataAssets[dataId] + if dataAsset.Availability > highest { + highest = dataAsset.Availability + } + } + for _, dataId := range what.DataAssetsReceived { + dataAsset := parsedModel.DataAssets[dataId] + if dataAsset.Availability > highest { + highest = dataAsset.Availability + } + } + return highest +} + +func (what CommunicationLink) DataAssetsSentSorted(parsedModel *ParsedModel) []DataAsset { + result := make([]DataAsset, 0) + for _, assetID := range what.DataAssetsSent { + result = append(result, parsedModel.DataAssets[assetID]) + } + sort.Sort(byDataAssetTitleSort(result)) + return result +} + +func (what CommunicationLink) DataAssetsReceivedSorted(parsedModel *ParsedModel) []DataAsset { + result := make([]DataAsset, 0) + for _, assetID := range what.DataAssetsReceived { + result = append(result, parsedModel.DataAssets[assetID]) + } + sort.Sort(byDataAssetTitleSort(result)) + return result +} + +func (what CommunicationLink) IsBidirectional() bool { + return len(what.DataAssetsSent) > 0 && len(what.DataAssetsReceived) > 0 +} + +// === Style stuff ======================================= + +// Line Styles: + +// dotted when model forgery attempt (i.e. nothing being sent and received) + +func (what CommunicationLink) DetermineArrowLineStyle() string { + if len(what.DataAssetsSent) == 0 && len(what.DataAssetsReceived) == 0 { + return "dotted" // dotted, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... + } + if what.Usage == types.DevOps { + return "dashed" + } + return "solid" +} + +// Pen Widths: + +func (what CommunicationLink) DetermineArrowPenWidth(parsedModel *ParsedModel) string { + if what.DetermineArrowColor(parsedModel) == colors.Pink { + return fmt.Sprintf("%f", 3.0) + } + if what.DetermineArrowColor(parsedModel) != colors.Black { + return fmt.Sprintf("%f", 2.5) + } + return fmt.Sprintf("%f", 1.5) +} + +func (what CommunicationLink) DetermineLabelColor(parsedModel *ParsedModel) string { + // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here + /* + if dataFlow.Protocol.IsEncrypted() { + return colors.Gray + } else {*/ + // check for red + for _, sentDataAsset := range what.DataAssetsSent { + if parsedModel.DataAssets[sentDataAsset].Integrity == types.MissionCritical { + return colors.Red + } + } + for _, receivedDataAsset := range what.DataAssetsReceived { + if parsedModel.DataAssets[receivedDataAsset].Integrity == types.MissionCritical { + return colors.Red + } + } + // check for amber + for _, sentDataAsset := range what.DataAssetsSent { + if parsedModel.DataAssets[sentDataAsset].Integrity == types.Critical { + return colors.Amber + } + } + for _, receivedDataAsset := range what.DataAssetsReceived { + if parsedModel.DataAssets[receivedDataAsset].Integrity == types.Critical { + return colors.Amber + } + } + // default + return colors.Gray + +} + +// pink when model forgery attempt (i.e. nothing being sent and received) + +func (what CommunicationLink) DetermineArrowColor(parsedModel *ParsedModel) string { + // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here + if len(what.DataAssetsSent) == 0 && len(what.DataAssetsReceived) == 0 || + what.Protocol == types.UnknownProtocol { + return colors.Pink // pink, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... + } + if what.Usage == types.DevOps { + return colors.MiddleLightGray + } else if what.VPN { + return colors.DarkBlue + } else if what.IpFiltered { + return colors.Brown + } + // check for red + for _, sentDataAsset := range what.DataAssetsSent { + if parsedModel.DataAssets[sentDataAsset].Confidentiality == types.StrictlyConfidential { + return colors.Red + } + } + for _, receivedDataAsset := range what.DataAssetsReceived { + if parsedModel.DataAssets[receivedDataAsset].Confidentiality == types.StrictlyConfidential { + return colors.Red + } + } + // check for amber + for _, sentDataAsset := range what.DataAssetsSent { + if parsedModel.DataAssets[sentDataAsset].Confidentiality == types.Confidential { + return colors.Amber + } + } + for _, receivedDataAsset := range what.DataAssetsReceived { + if parsedModel.DataAssets[receivedDataAsset].Confidentiality == types.Confidential { + return colors.Amber + } + } + // default + return colors.Black + /* + } else if dataFlow.Authentication != NoneAuthentication { + return colors.Black + } else { + // check for red + for _, sentDataAsset := range dataFlow.DataAssetsSent { // first check if any red? + if ParsedModelRoot.DataAssets[sentDataAsset].Integrity == MissionCritical { + return colors.Red + } + } + for _, receivedDataAsset := range dataFlow.DataAssetsReceived { // first check if any red? + if ParsedModelRoot.DataAssets[receivedDataAsset].Integrity == MissionCritical { + return colors.Red + } + } + // check for amber + for _, sentDataAsset := range dataFlow.DataAssetsSent { // then check if any amber? + if ParsedModelRoot.DataAssets[sentDataAsset].Integrity == Critical { + return colors.Amber + } + } + for _, receivedDataAsset := range dataFlow.DataAssetsReceived { // then check if any amber? + if ParsedModelRoot.DataAssets[receivedDataAsset].Integrity == Critical { + return colors.Amber + } + } + return colors.Black + } + */ +} + +type ByTechnicalCommunicationLinkIdSort []CommunicationLink + +func (what ByTechnicalCommunicationLinkIdSort) Len() int { return len(what) } +func (what ByTechnicalCommunicationLinkIdSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } +func (what ByTechnicalCommunicationLinkIdSort) Less(i, j int) bool { + return what[i].Id > what[j].Id +} + +type ByTechnicalCommunicationLinkTitleSort []CommunicationLink + +func (what ByTechnicalCommunicationLinkTitleSort) Len() int { return len(what) } +func (what ByTechnicalCommunicationLinkTitleSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } +func (what ByTechnicalCommunicationLinkTitleSort) Less(i, j int) bool { + return what[i].Title > what[j].Title +} diff --git a/pkg/model/data_asset.go b/pkg/model/data_asset.go new file mode 100644 index 00000000..eb468d85 --- /dev/null +++ b/pkg/model/data_asset.go @@ -0,0 +1,272 @@ +/* +Copyright © 2023 NAME HERE +*/ +package model + +import ( + "sort" + + "github.com/threagile/threagile/pkg/security/types" +) + +type DataAsset struct { + Id string `yaml:"id" json:"id"` // TODO: tag here still required? + Title string `yaml:"title" json:"title"` // TODO: tag here still required? + Description string `yaml:"description" json:"description"` // TODO: tag here still required? + Usage types.Usage `yaml:"usage" json:"usage"` + Tags []string `yaml:"tags" json:"tags"` + Origin string `yaml:"origin" json:"origin"` + Owner string `yaml:"owner" json:"owner"` + Quantity types.Quantity `yaml:"quantity" json:"quantity"` + Confidentiality types.Confidentiality `yaml:"confidentiality" json:"confidentiality"` + Integrity types.Criticality `yaml:"integrity" json:"integrity"` + Availability types.Criticality `yaml:"availability" json:"availability"` + JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` +} + +func (what DataAsset) IsTaggedWithAny(tags ...string) bool { + return containsCaseInsensitiveAny(what.Tags, tags...) +} + +func (what DataAsset) IsTaggedWithBaseTag(baseTag string) bool { + return IsTaggedWithBaseTag(what.Tags, baseTag) +} + +/* +func (what DataAsset) IsAtRisk() bool { + for _, techAsset := range what.ProcessedByTechnicalAssetsSorted() { + if len(ReduceToOnlyStillAtRisk(techAsset.GeneratedRisks())) > 0 { + return true + } + } + for _, techAsset := range what.StoredByTechnicalAssetsSorted() { + if len(ReduceToOnlyStillAtRisk(techAsset.GeneratedRisks())) > 0 { + return true + } + } + return false +} +*/ + +/* +func (what DataAsset) IdentifiedRiskSeverityStillAtRisk() RiskSeverity { + highestRiskSeverity := Low + for _, techAsset := range what.ProcessedByTechnicalAssetsSorted() { + candidateSeverity := HighestSeverityStillAtRisk(ReduceToOnlyStillAtRisk(techAsset.GeneratedRisks())) + if candidateSeverity > highestRiskSeverity { + highestRiskSeverity = candidateSeverity + } + } + for _, techAsset := range what.StoredByTechnicalAssetsSorted() { + candidateSeverity := HighestSeverityStillAtRisk(ReduceToOnlyStillAtRisk(techAsset.GeneratedRisks())) + if candidateSeverity > highestRiskSeverity { + highestRiskSeverity = candidateSeverity + } + } + return highestRiskSeverity +} +*/ + +func (what DataAsset) IdentifiedRisksByResponsibleTechnicalAssetId(model *ParsedModel) map[string][]Risk { + uniqueTechAssetIDsResponsibleForThisDataAsset := make(map[string]interface{}) + for _, techAsset := range what.ProcessedByTechnicalAssetsSorted(model) { + if len(techAsset.GeneratedRisks(model)) > 0 { + uniqueTechAssetIDsResponsibleForThisDataAsset[techAsset.Id] = true + } + } + for _, techAsset := range what.StoredByTechnicalAssetsSorted(model) { + if len(techAsset.GeneratedRisks(model)) > 0 { + uniqueTechAssetIDsResponsibleForThisDataAsset[techAsset.Id] = true + } + } + + result := make(map[string][]Risk) + for techAssetId := range uniqueTechAssetIDsResponsibleForThisDataAsset { + result[techAssetId] = append(result[techAssetId], model.TechnicalAssets[techAssetId].GeneratedRisks(model)...) + } + return result +} + +func (what DataAsset) IsDataBreachPotentialStillAtRisk(parsedModel *ParsedModel) bool { + for _, risk := range FilteredByStillAtRisk(parsedModel) { + for _, techAsset := range risk.DataBreachTechnicalAssetIDs { + if contains(parsedModel.TechnicalAssets[techAsset].DataAssetsProcessed, what.Id) { + return true + } + if contains(parsedModel.TechnicalAssets[techAsset].DataAssetsStored, what.Id) { + return true + } + } + } + return false +} + +func (what DataAsset) IdentifiedDataBreachProbability(parsedModel *ParsedModel) types.DataBreachProbability { + highestProbability := types.Improbable + for _, risk := range AllRisks(parsedModel) { + for _, techAsset := range risk.DataBreachTechnicalAssetIDs { + if contains(parsedModel.TechnicalAssets[techAsset].DataAssetsProcessed, what.Id) { + if risk.DataBreachProbability > highestProbability { + highestProbability = risk.DataBreachProbability + break + } + } + if contains(parsedModel.TechnicalAssets[techAsset].DataAssetsStored, what.Id) { + if risk.DataBreachProbability > highestProbability { + highestProbability = risk.DataBreachProbability + break + } + } + } + } + return highestProbability +} + +func (what DataAsset) IdentifiedDataBreachProbabilityStillAtRisk(parsedModel *ParsedModel) types.DataBreachProbability { + highestProbability := types.Improbable + for _, risk := range FilteredByStillAtRisk(parsedModel) { + for _, techAsset := range risk.DataBreachTechnicalAssetIDs { + if contains(parsedModel.TechnicalAssets[techAsset].DataAssetsProcessed, what.Id) { + if risk.DataBreachProbability > highestProbability { + highestProbability = risk.DataBreachProbability + break + } + } + if contains(parsedModel.TechnicalAssets[techAsset].DataAssetsStored, what.Id) { + if risk.DataBreachProbability > highestProbability { + highestProbability = risk.DataBreachProbability + break + } + } + } + } + return highestProbability +} + +func (what DataAsset) IdentifiedDataBreachProbabilityRisksStillAtRisk(parsedModel *ParsedModel) []Risk { + result := make([]Risk, 0) + for _, risk := range FilteredByStillAtRisk(parsedModel) { + for _, techAsset := range risk.DataBreachTechnicalAssetIDs { + if contains(parsedModel.TechnicalAssets[techAsset].DataAssetsProcessed, what.Id) { + result = append(result, risk) + break + } + if contains(parsedModel.TechnicalAssets[techAsset].DataAssetsStored, what.Id) { + result = append(result, risk) + break + } + } + } + return result +} + +func (what DataAsset) IdentifiedDataBreachProbabilityRisks(parsedModel *ParsedModel) []Risk { + result := make([]Risk, 0) + for _, risk := range AllRisks(parsedModel) { + for _, techAsset := range risk.DataBreachTechnicalAssetIDs { + if contains(parsedModel.TechnicalAssets[techAsset].DataAssetsProcessed, what.Id) { + result = append(result, risk) + break + } + if contains(parsedModel.TechnicalAssets[techAsset].DataAssetsStored, what.Id) { + result = append(result, risk) + break + } + } + } + return result +} + +func (what DataAsset) ProcessedByTechnicalAssetsSorted(parsedModel *ParsedModel) []TechnicalAsset { + result := make([]TechnicalAsset, 0) + for _, technicalAsset := range parsedModel.TechnicalAssets { + for _, candidateID := range technicalAsset.DataAssetsProcessed { + if candidateID == what.Id { + result = append(result, technicalAsset) + } + } + } + sort.Sort(ByTechnicalAssetTitleSort(result)) + return result +} + +func (what DataAsset) StoredByTechnicalAssetsSorted(parsedModel *ParsedModel) []TechnicalAsset { + result := make([]TechnicalAsset, 0) + for _, technicalAsset := range parsedModel.TechnicalAssets { + for _, candidateID := range technicalAsset.DataAssetsStored { + if candidateID == what.Id { + result = append(result, technicalAsset) + } + } + } + sort.Sort(ByTechnicalAssetTitleSort(result)) + return result +} + +func (what DataAsset) SentViaCommLinksSorted(parsedModel *ParsedModel) []CommunicationLink { + result := make([]CommunicationLink, 0) + for _, technicalAsset := range parsedModel.TechnicalAssets { + for _, commLink := range technicalAsset.CommunicationLinks { + for _, candidateID := range commLink.DataAssetsSent { + if candidateID == what.Id { + result = append(result, commLink) + } + } + } + } + sort.Sort(ByTechnicalCommunicationLinkTitleSort(result)) + return result +} + +func (what DataAsset) ReceivedViaCommLinksSorted(parsedModel *ParsedModel) []CommunicationLink { + result := make([]CommunicationLink, 0) + for _, technicalAsset := range parsedModel.TechnicalAssets { + for _, commLink := range technicalAsset.CommunicationLinks { + for _, candidateID := range commLink.DataAssetsReceived { + if candidateID == what.Id { + result = append(result, commLink) + } + } + } + } + sort.Sort(ByTechnicalCommunicationLinkTitleSort(result)) + return result +} + +func SortByDataAssetDataBreachProbabilityAndTitle(parsedModel *ParsedModel, assets []DataAsset) { + sort.Slice(assets, func(i, j int) bool { + highestDataBreachProbabilityLeft := assets[i].IdentifiedDataBreachProbability(parsedModel) + highestDataBreachProbabilityRight := assets[j].IdentifiedDataBreachProbability(parsedModel) + if highestDataBreachProbabilityLeft == highestDataBreachProbabilityRight { + return assets[i].Title < assets[j].Title + } + return highestDataBreachProbabilityLeft > highestDataBreachProbabilityRight + }) +} + +func SortByDataAssetDataBreachProbabilityAndTitleStillAtRisk(parsedModel *ParsedModel, assets []DataAsset) { + sort.Slice(assets, func(i, j int) bool { + risksLeft := assets[i].IdentifiedDataBreachProbabilityRisksStillAtRisk(parsedModel) + risksRight := assets[j].IdentifiedDataBreachProbabilityRisksStillAtRisk(parsedModel) + highestDataBreachProbabilityLeft := assets[i].IdentifiedDataBreachProbabilityStillAtRisk(parsedModel) + highestDataBreachProbabilityRight := assets[j].IdentifiedDataBreachProbabilityStillAtRisk(parsedModel) + if highestDataBreachProbabilityLeft == highestDataBreachProbabilityRight { + if len(risksLeft) == 0 && len(risksRight) > 0 { + return false + } + if len(risksLeft) > 0 && len(risksRight) == 0 { + return true + } + return assets[i].Title < assets[j].Title + } + return highestDataBreachProbabilityLeft > highestDataBreachProbabilityRight + }) +} + +type ByDataAssetTitleSort []DataAsset + +func (what ByDataAssetTitleSort) Len() int { return len(what) } +func (what ByDataAssetTitleSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } +func (what ByDataAssetTitleSort) Less(i, j int) bool { + return what[i].Title < what[j].Title +} diff --git a/pkg/model/helpers.go b/pkg/model/helpers.go new file mode 100644 index 00000000..4c21e5de --- /dev/null +++ b/pkg/model/helpers.go @@ -0,0 +1,53 @@ +/* +Copyright © 2023 NAME HERE +*/ +package model + +import ( + "regexp" + "strings" +) + +func MakeID(val string) string { + reg, _ := regexp.Compile("[^A-Za-z0-9]+") + return strings.Trim(reg.ReplaceAllString(strings.ToLower(val), "-"), "- ") +} + +func contains(a []string, x string) bool { + for _, n := range a { + if x == n { + return true + } + } + return false +} + +func containsCaseInsensitiveAny(a []string, x ...string) bool { + for _, n := range a { + for _, c := range x { + if strings.TrimSpace(strings.ToLower(c)) == strings.TrimSpace(strings.ToLower(n)) { + return true + } + } + } + return false +} + +func IsTaggedWithBaseTag(tags []string, baseTag string) bool { // base tags are before the colon ":" like in "aws:ec2" it's "aws". The subtag is after the colon. Also, a pure "aws" tag matches the base tag "aws" + baseTag = strings.ToLower(strings.TrimSpace(baseTag)) + for _, tag := range tags { + tag = strings.ToLower(strings.TrimSpace(tag)) + if tag == baseTag || strings.HasPrefix(tag, baseTag+":") { + return true + } + } + return false +} + +type byDataAssetTitleSort []DataAsset + +func (what byDataAssetTitleSort) Len() int { return len(what) } +func (what byDataAssetTitleSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } +func (what byDataAssetTitleSort) Less(i, j int) bool { + return what[i].Title < what[j].Title +} diff --git a/pkg/model/model.go b/pkg/model/model.go new file mode 100644 index 00000000..a929a73f --- /dev/null +++ b/pkg/model/model.go @@ -0,0 +1,159 @@ +/* +Copyright © 2023 NAME HERE +*/ +package model + +import ( + "sort" + "time" + + "github.com/threagile/threagile/pkg/input" + "github.com/threagile/threagile/pkg/security/types" +) + +type ParsedModel struct { + Author input.Author + Title string + Date time.Time + ManagementSummaryComment string + BusinessOverview input.Overview + TechnicalOverview input.Overview + BusinessCriticality types.Criticality + SecurityRequirements map[string]string + Questions map[string]string + AbuseCases map[string]string + TagsAvailable []string + DataAssets map[string]DataAsset + TechnicalAssets map[string]TechnicalAsset + TrustBoundaries map[string]TrustBoundary + SharedRuntimes map[string]SharedRuntime + IndividualRiskCategories map[string]RiskCategory + RiskTracking map[string]RiskTracking + CommunicationLinks map[string]CommunicationLink + AllSupportedTags map[string]bool + DiagramTweakNodesep, DiagramTweakRanksep int + DiagramTweakEdgeLayout string + DiagramTweakSuppressEdgeLabels bool + DiagramTweakLayoutLeftToRight bool + DiagramTweakInvisibleConnectionsBetweenAssets []string + DiagramTweakSameRankAssets []string + + // TODO: those are generated based on items above and needs to be private + IncomingTechnicalCommunicationLinksMappedByTargetId map[string][]CommunicationLink + DirectContainingTrustBoundaryMappedByTechnicalAssetId map[string]TrustBoundary + GeneratedRisksByCategory map[RiskCategory][]Risk + GeneratedRisksBySyntheticId map[string]Risk +} + +func CalculateSeverity(likelihood types.RiskExploitationLikelihood, impact types.RiskExploitationImpact) types.RiskSeverity { + result := likelihood.Weight() * impact.Weight() + if result <= 1 { + return types.LowSeverity + } + if result <= 3 { + return types.MediumSeverity + } + if result <= 8 { + return types.ElevatedSeverity + } + if result <= 12 { + return types.HighSeverity + } + return types.CriticalSeverity +} + +func (model *ParsedModel) InScopeTechnicalAssets() []TechnicalAsset { + result := make([]TechnicalAsset, 0) + for _, asset := range model.TechnicalAssets { + if !asset.OutOfScope { + result = append(result, asset) + } + } + return result +} + +func (what *ParsedModel) SortedTechnicalAssetIDs() []string { + res := make([]string, 0) + for id := range what.TechnicalAssets { + res = append(res, id) + } + sort.Strings(res) + return res +} + +func (what *ParsedModel) TagsActuallyUsed() []string { + result := make([]string, 0) + for _, tag := range what.TagsAvailable { + if len(what.TechnicalAssetsTaggedWithAny(tag)) > 0 || + len(what.CommunicationLinksTaggedWithAny(tag)) > 0 || + len(what.DataAssetsTaggedWithAny(tag)) > 0 || + len(what.TrustBoundariesTaggedWithAny(tag)) > 0 || + len(what.SharedRuntimesTaggedWithAny(tag)) > 0 { + result = append(result, tag) + } + } + return result +} + +func (what *ParsedModel) TechnicalAssetsTaggedWithAny(tags ...string) []TechnicalAsset { + result := make([]TechnicalAsset, 0) + for _, candidate := range what.TechnicalAssets { + if candidate.IsTaggedWithAny(tags...) { + result = append(result, candidate) + } + } + return result +} + +func (what *ParsedModel) CommunicationLinksTaggedWithAny(tags ...string) []CommunicationLink { + result := make([]CommunicationLink, 0) + for _, asset := range what.TechnicalAssets { + for _, candidate := range asset.CommunicationLinks { + if candidate.IsTaggedWithAny(tags...) { + result = append(result, candidate) + } + } + } + return result +} + +func (what *ParsedModel) DataAssetsTaggedWithAny(tags ...string) []DataAsset { + result := make([]DataAsset, 0) + for _, candidate := range what.DataAssets { + if candidate.IsTaggedWithAny(tags...) { + result = append(result, candidate) + } + } + return result +} + +func (what *ParsedModel) TrustBoundariesTaggedWithAny(tags ...string) []TrustBoundary { + result := make([]TrustBoundary, 0) + for _, candidate := range what.TrustBoundaries { + if candidate.IsTaggedWithAny(tags...) { + result = append(result, candidate) + } + } + return result +} + +func (what *ParsedModel) SharedRuntimesTaggedWithAny(tags ...string) []SharedRuntime { + result := make([]SharedRuntime, 0) + for _, candidate := range what.SharedRuntimes { + if candidate.IsTaggedWithAny(tags...) { + result = append(result, candidate) + } + } + return result +} + +func (what *ParsedModel) OutOfScopeTechnicalAssets() []TechnicalAsset { + assets := make([]TechnicalAsset, 0) + for _, asset := range what.TechnicalAssets { + if asset.OutOfScope { + assets = append(assets, asset) + } + } + sort.Sort(ByTechnicalAssetTitleSort(assets)) + return assets +} diff --git a/pkg/model/risks.go b/pkg/model/risks.go new file mode 100644 index 00000000..c8f1849b --- /dev/null +++ b/pkg/model/risks.go @@ -0,0 +1,873 @@ +/* +Copyright © 2023 NAME HERE +*/ +package model + +import ( + "log" + "sort" + "time" + + "github.com/threagile/threagile/pkg/run" + "github.com/threagile/threagile/pkg/security/types" +) + +type RiskCategory struct { + // TODO: refactor all "Id" here and elsewhere to "ID" + Id string + Title string + Description string + Impact string + ASVS string + CheatSheet string + Action string + Mitigation string + Check string + DetectionLogic string + RiskAssessment string + FalsePositives string + Function types.RiskFunction + STRIDE types.STRIDE + ModelFailurePossibleReason bool + CWE int +} + +type BuiltInRisk struct { + Category func() RiskCategory + SupportedTags func() []string + GenerateRisks func(m *ParsedModel) []Risk +} + +type CustomRisk struct { + ID string + Category RiskCategory + Tags []string + Runner *run.Runner +} + +func (r *CustomRisk) GenerateRisks(m *ParsedModel) []Risk { + if r.Runner == nil { + return nil + } + + risks := make([]Risk, 0) + runError := r.Runner.Run(m, &risks, "-generate-risks") + if runError != nil { + log.Fatalf("Failed to generate risks for custom risk rule %q: %v\n", r.Runner.Filename, runError) + } + + return risks +} + +type RiskTracking struct { + SyntheticRiskId, Justification, Ticket, CheckedBy string + Status types.RiskStatus + Date time.Time +} + +type Risk struct { + Category RiskCategory `yaml:"-" json:"-"` // just for navigational convenience... not JSON marshalled + CategoryId string `yaml:"category" json:"category"` // used for better JSON marshalling, is assigned in risk evaluation phase automatically + RiskStatus types.RiskStatus `yaml:"risk_status" json:"risk_status"` // used for better JSON marshalling, is assigned in risk evaluation phase automatically + Severity types.RiskSeverity `yaml:"severity" json:"severity"` + ExploitationLikelihood types.RiskExploitationLikelihood `yaml:"exploitation_likelihood" json:"exploitation_likelihood"` + ExploitationImpact types.RiskExploitationImpact `yaml:"exploitation_impact" json:"exploitation_impact"` + Title string `yaml:"title" json:"title"` + SyntheticId string `yaml:"synthetic_id" json:"synthetic_id"` + MostRelevantDataAssetId string `yaml:"most_relevant_data_asset" json:"most_relevant_data_asset"` + MostRelevantTechnicalAssetId string `yaml:"most_relevant_technical_asset" json:"most_relevant_technical_asset"` + MostRelevantTrustBoundaryId string `yaml:"most_relevant_trust_boundary" json:"most_relevant_trust_boundary"` + MostRelevantSharedRuntimeId string `yaml:"most_relevant_shared_runtime" json:"most_relevant_shared_runtime"` + MostRelevantCommunicationLinkId string `yaml:"most_relevant_communication_link" json:"most_relevant_communication_link"` + DataBreachProbability types.DataBreachProbability `yaml:"data_breach_probability" json:"data_breach_probability"` + DataBreachTechnicalAssetIDs []string `yaml:"data_breach_technical_assets" json:"data_breach_technical_assets"` + // TODO: refactor all "Id" here to "ID"? +} + +func (what Risk) GetRiskTracking(model *ParsedModel) RiskTracking { // TODO: Unify function naming regarding Get etc. + var result RiskTracking + if riskTracking, ok := model.RiskTracking[what.SyntheticId]; ok { + result = riskTracking + } + return result +} + +func (what Risk) GetRiskTrackingStatusDefaultingUnchecked(model *ParsedModel) types.RiskStatus { + if riskTracking, ok := model.RiskTracking[what.SyntheticId]; ok { + return riskTracking.Status + } + return types.Unchecked +} + +func (what Risk) IsRiskTracked(model *ParsedModel) bool { + if _, ok := model.RiskTracking[what.SyntheticId]; ok { + return true + } + return false +} + +func AllRisks(parsedModel *ParsedModel) []Risk { + result := make([]Risk, 0) + for _, risks := range parsedModel.GeneratedRisksByCategory { + for _, risk := range risks { + result = append(result, risk) + } + } + return result +} + +func ReduceToOnlyStillAtRisk(parsedModel *ParsedModel, risks []Risk) []Risk { + filteredRisks := make([]Risk, 0) + for _, risk := range risks { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { + filteredRisks = append(filteredRisks, risk) + } + } + return filteredRisks +} + +func HighestExploitationLikelihood(risks []Risk) types.RiskExploitationLikelihood { + result := types.Unlikely + for _, risk := range risks { + if risk.ExploitationLikelihood > result { + result = risk.ExploitationLikelihood + } + } + return result +} + +func HighestExploitationImpact(risks []Risk) types.RiskExploitationImpact { + result := types.LowImpact + for _, risk := range risks { + if risk.ExploitationImpact > result { + result = risk.ExploitationImpact + } + } + return result +} + +type CustomRiskRule struct { + Category func() RiskCategory + SupportedTags func() []string + GenerateRisks func(input *ParsedModel) []Risk +} + +func HighestSeverityStillAtRisk(model *ParsedModel, risks []Risk) types.RiskSeverity { + result := types.LowSeverity + for _, risk := range risks { + if risk.Severity > result && risk.GetRiskTrackingStatusDefaultingUnchecked(model).IsStillAtRisk() { + result = risk.Severity + } + } + return result +} + +type ByRiskCategoryTitleSort []RiskCategory + +func (what ByRiskCategoryTitleSort) Len() int { return len(what) } +func (what ByRiskCategoryTitleSort) Swap(i, j int) { + what[i], what[j] = what[j], what[i] +} +func (what ByRiskCategoryTitleSort) Less(i, j int) bool { + return what[i].Title < what[j].Title +} + +func SortByRiskCategoryHighestContainingRiskSeveritySortStillAtRisk(parsedModel *ParsedModel, riskCategories []RiskCategory) { + sort.Slice(riskCategories, func(i, j int) bool { + risksLeft := ReduceToOnlyStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[riskCategories[i]]) + risksRight := ReduceToOnlyStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[riskCategories[j]]) + highestLeft := HighestSeverityStillAtRisk(parsedModel, risksLeft) + highestRight := HighestSeverityStillAtRisk(parsedModel, risksRight) + if highestLeft == highestRight { + if len(risksLeft) == 0 && len(risksRight) > 0 { + return false + } + if len(risksLeft) > 0 && len(risksRight) == 0 { + return true + } + return riskCategories[i].Title < riskCategories[j].Title + } + return highestLeft > highestRight + }) +} + +type RiskStatistics struct { + // TODO add also some more like before / after (i.e. with mitigation applied) + Risks map[string]map[string]int `yaml:"risks" json:"risks"` +} + +func SortByRiskSeverity(risks []Risk, parsedModel *ParsedModel) { + sort.Slice(risks, func(i, j int) bool { + if risks[i].Severity == risks[j].Severity { + trackingStatusLeft := risks[i].GetRiskTrackingStatusDefaultingUnchecked(parsedModel) + trackingStatusRight := risks[j].GetRiskTrackingStatusDefaultingUnchecked(parsedModel) + if trackingStatusLeft == trackingStatusRight { + impactLeft := risks[i].ExploitationImpact + impactRight := risks[j].ExploitationImpact + if impactLeft == impactRight { + likelihoodLeft := risks[i].ExploitationLikelihood + likelihoodRight := risks[j].ExploitationLikelihood + if likelihoodLeft == likelihoodRight { + return risks[i].Title < risks[j].Title + } else { + return likelihoodLeft > likelihoodRight + } + } else { + return impactLeft > impactRight + } + } else { + return trackingStatusLeft < trackingStatusRight + } + } + return risks[i].Severity > risks[j].Severity + + }) +} + +func SortByDataBreachProbability(risks []Risk, parsedModel *ParsedModel) { + sort.Slice(risks, func(i, j int) bool { + + if risks[i].DataBreachProbability == risks[j].DataBreachProbability { + trackingStatusLeft := risks[i].GetRiskTrackingStatusDefaultingUnchecked(parsedModel) + trackingStatusRight := risks[j].GetRiskTrackingStatusDefaultingUnchecked(parsedModel) + if trackingStatusLeft == trackingStatusRight { + return risks[i].Title < risks[j].Title + } else { + return trackingStatusLeft < trackingStatusRight + } + } + return risks[i].DataBreachProbability > risks[j].DataBreachProbability + }) +} + +type RiskRule interface { + Category() RiskCategory + GenerateRisks(parsedModel *ParsedModel) []Risk +} + +// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + +func SortedRiskCategories(parsedModel *ParsedModel) []RiskCategory { + categories := make([]RiskCategory, 0) + for k := range parsedModel.GeneratedRisksByCategory { + categories = append(categories, k) + } + SortByRiskCategoryHighestContainingRiskSeveritySortStillAtRisk(parsedModel, categories) + return categories +} + +func SortedRisksOfCategory(parsedModel *ParsedModel, category RiskCategory) []Risk { + risks := parsedModel.GeneratedRisksByCategory[category] + SortByRiskSeverity(risks, parsedModel) + return risks +} + +func CountRisks(risksByCategory map[RiskCategory][]Risk) int { + result := 0 + for _, risks := range risksByCategory { + result += len(risks) + } + return result +} + +func RisksOfOnlySTRIDESpoofing(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { + result := make(map[RiskCategory][]Risk) + for _, risks := range risksByCategory { + for _, risk := range risks { + if risk.Category.STRIDE == types.Spoofing { + result[risk.Category] = append(result[risk.Category], risk) + } + } + } + return result +} + +func RisksOfOnlySTRIDETampering(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { + result := make(map[RiskCategory][]Risk) + for _, risks := range risksByCategory { + for _, risk := range risks { + if risk.Category.STRIDE == types.Tampering { + result[risk.Category] = append(result[risk.Category], risk) + } + } + } + return result +} + +func RisksOfOnlySTRIDERepudiation(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { + result := make(map[RiskCategory][]Risk) + for _, risks := range risksByCategory { + for _, risk := range risks { + if risk.Category.STRIDE == types.Repudiation { + result[risk.Category] = append(result[risk.Category], risk) + } + } + } + return result +} + +func RisksOfOnlySTRIDEInformationDisclosure(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { + result := make(map[RiskCategory][]Risk) + for _, risks := range risksByCategory { + for _, risk := range risks { + if risk.Category.STRIDE == types.InformationDisclosure { + result[risk.Category] = append(result[risk.Category], risk) + } + } + } + return result +} + +func RisksOfOnlySTRIDEDenialOfService(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { + result := make(map[RiskCategory][]Risk) + for _, risks := range risksByCategory { + for _, risk := range risks { + if risk.Category.STRIDE == types.DenialOfService { + result[risk.Category] = append(result[risk.Category], risk) + } + } + } + return result +} + +func RisksOfOnlySTRIDEElevationOfPrivilege(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { + result := make(map[RiskCategory][]Risk) + for _, risks := range risksByCategory { + for _, risk := range risks { + if risk.Category.STRIDE == types.ElevationOfPrivilege { + result[risk.Category] = append(result[risk.Category], risk) + } + } + } + return result +} + +func RisksOfOnlyBusinessSide(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { + result := make(map[RiskCategory][]Risk) + for _, risks := range risksByCategory { + for _, risk := range risks { + if risk.Category.Function == types.BusinessSide { + result[risk.Category] = append(result[risk.Category], risk) + } + } + } + return result +} + +func RisksOfOnlyArchitecture(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { + result := make(map[RiskCategory][]Risk) + for _, risks := range risksByCategory { + for _, risk := range risks { + if risk.Category.Function == types.Architecture { + result[risk.Category] = append(result[risk.Category], risk) + } + } + } + return result +} + +func RisksOfOnlyDevelopment(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { + result := make(map[RiskCategory][]Risk) + for _, risks := range risksByCategory { + for _, risk := range risks { + if risk.Category.Function == types.Development { + result[risk.Category] = append(result[risk.Category], risk) + } + } + } + return result +} + +func RisksOfOnlyOperation(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { + result := make(map[RiskCategory][]Risk) + for _, risks := range risksByCategory { + for _, risk := range risks { + if risk.Category.Function == types.Operations { + result[risk.Category] = append(result[risk.Category], risk) + } + } + } + return result +} + +func CategoriesOfOnlyRisksStillAtRisk(parsedModel *ParsedModel, risksByCategory map[RiskCategory][]Risk) []RiskCategory { + categories := make(map[RiskCategory]struct{}) // Go's trick of unique elements is a map + for _, risks := range risksByCategory { + for _, risk := range risks { + if !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { + continue + } + categories[risk.Category] = struct{}{} + } + } + // return as slice (of now unique values) + return keysAsSlice(categories) +} + +func CategoriesOfOnlyCriticalRisks(parsedModel *ParsedModel, risksByCategory map[RiskCategory][]Risk, initialRisks bool) []RiskCategory { + categories := make(map[RiskCategory]struct{}) // Go's trick of unique elements is a map + for _, risks := range risksByCategory { + for _, risk := range risks { + if !initialRisks && !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { + continue + } + if risk.Severity == types.CriticalSeverity { + categories[risk.Category] = struct{}{} + } + } + } + // return as slice (of now unique values) + return keysAsSlice(categories) +} + +func CategoriesOfOnlyHighRisks(parsedModel *ParsedModel, risksByCategory map[RiskCategory][]Risk, initialRisks bool) []RiskCategory { + categories := make(map[RiskCategory]struct{}) // Go's trick of unique elements is a map + for _, risks := range risksByCategory { + for _, risk := range risks { + if !initialRisks && !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { + continue + } + highest := HighestSeverity(parsedModel.GeneratedRisksByCategory[risk.Category]) + if !initialRisks { + highest = HighestSeverityStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[risk.Category]) + } + if risk.Severity == types.HighSeverity && highest < types.CriticalSeverity { + categories[risk.Category] = struct{}{} + } + } + } + // return as slice (of now unique values) + return keysAsSlice(categories) +} + +func CategoriesOfOnlyElevatedRisks(parsedModel *ParsedModel, risksByCategory map[RiskCategory][]Risk, initialRisks bool) []RiskCategory { + categories := make(map[RiskCategory]struct{}) // Go's trick of unique elements is a map + for _, risks := range risksByCategory { + for _, risk := range risks { + if !initialRisks && !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { + continue + } + highest := HighestSeverity(parsedModel.GeneratedRisksByCategory[risk.Category]) + if !initialRisks { + highest = HighestSeverityStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[risk.Category]) + } + if risk.Severity == types.ElevatedSeverity && highest < types.HighSeverity { + categories[risk.Category] = struct{}{} + } + } + } + // return as slice (of now unique values) + return keysAsSlice(categories) +} + +func CategoriesOfOnlyMediumRisks(parsedModel *ParsedModel, risksByCategory map[RiskCategory][]Risk, initialRisks bool) []RiskCategory { + categories := make(map[RiskCategory]struct{}) // Go's trick of unique elements is a map + for _, risks := range risksByCategory { + for _, risk := range risks { + if !initialRisks && !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { + continue + } + highest := HighestSeverity(parsedModel.GeneratedRisksByCategory[risk.Category]) + if !initialRisks { + highest = HighestSeverityStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[risk.Category]) + } + if risk.Severity == types.MediumSeverity && highest < types.ElevatedSeverity { + categories[risk.Category] = struct{}{} + } + } + } + // return as slice (of now unique values) + return keysAsSlice(categories) +} + +func CategoriesOfOnlyLowRisks(parsedModel *ParsedModel, risksByCategory map[RiskCategory][]Risk, initialRisks bool) []RiskCategory { + categories := make(map[RiskCategory]struct{}) // Go's trick of unique elements is a map + for _, risks := range risksByCategory { + for _, risk := range risks { + if !initialRisks && !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { + continue + } + highest := HighestSeverity(parsedModel.GeneratedRisksByCategory[risk.Category]) + if !initialRisks { + highest = HighestSeverityStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[risk.Category]) + } + if risk.Severity == types.LowSeverity && highest < types.MediumSeverity { + categories[risk.Category] = struct{}{} + } + } + } + // return as slice (of now unique values) + return keysAsSlice(categories) +} + +func HighestSeverity(risks []Risk) types.RiskSeverity { + result := types.LowSeverity + for _, risk := range risks { + if risk.Severity > result { + result = risk.Severity + } + } + return result +} + +func keysAsSlice(categories map[RiskCategory]struct{}) []RiskCategory { + result := make([]RiskCategory, 0, len(categories)) + for k := range categories { + result = append(result, k) + } + return result +} + +func FilteredByOnlyBusinessSide(parsedModel *ParsedModel) []Risk { + filteredRisks := make([]Risk, 0) + for _, risks := range parsedModel.GeneratedRisksByCategory { + for _, risk := range risks { + if risk.Category.Function == types.BusinessSide { + filteredRisks = append(filteredRisks, risk) + } + } + } + return filteredRisks +} + +func FilteredByOnlyArchitecture(parsedModel *ParsedModel) []Risk { + filteredRisks := make([]Risk, 0) + for _, risks := range parsedModel.GeneratedRisksByCategory { + for _, risk := range risks { + if risk.Category.Function == types.Architecture { + filteredRisks = append(filteredRisks, risk) + } + } + } + return filteredRisks +} + +func FilteredByOnlyDevelopment(parsedModel *ParsedModel) []Risk { + filteredRisks := make([]Risk, 0) + for _, risks := range parsedModel.GeneratedRisksByCategory { + for _, risk := range risks { + if risk.Category.Function == types.Development { + filteredRisks = append(filteredRisks, risk) + } + } + } + return filteredRisks +} + +func FilteredByOnlyOperation(parsedModel *ParsedModel) []Risk { + filteredRisks := make([]Risk, 0) + for _, risks := range parsedModel.GeneratedRisksByCategory { + for _, risk := range risks { + if risk.Category.Function == types.Operations { + filteredRisks = append(filteredRisks, risk) + } + } + } + return filteredRisks +} + +func FilteredByOnlyCriticalRisks(parsedModel *ParsedModel) []Risk { + filteredRisks := make([]Risk, 0) + for _, risks := range parsedModel.GeneratedRisksByCategory { + for _, risk := range risks { + if risk.Severity == types.CriticalSeverity { + filteredRisks = append(filteredRisks, risk) + } + } + } + return filteredRisks +} + +func FilteredByOnlyHighRisks(parsedModel *ParsedModel) []Risk { + filteredRisks := make([]Risk, 0) + for _, risks := range parsedModel.GeneratedRisksByCategory { + for _, risk := range risks { + if risk.Severity == types.HighSeverity { + filteredRisks = append(filteredRisks, risk) + } + } + } + return filteredRisks +} + +func FilteredByOnlyElevatedRisks(parsedModel *ParsedModel) []Risk { + filteredRisks := make([]Risk, 0) + for _, risks := range parsedModel.GeneratedRisksByCategory { + for _, risk := range risks { + if risk.Severity == types.ElevatedSeverity { + filteredRisks = append(filteredRisks, risk) + } + } + } + return filteredRisks +} + +func FilteredByOnlyMediumRisks(parsedModel *ParsedModel) []Risk { + filteredRisks := make([]Risk, 0) + for _, risks := range parsedModel.GeneratedRisksByCategory { + for _, risk := range risks { + if risk.Severity == types.MediumSeverity { + filteredRisks = append(filteredRisks, risk) + } + } + } + return filteredRisks +} + +func FilteredByOnlyLowRisks(parsedModel *ParsedModel) []Risk { + filteredRisks := make([]Risk, 0) + for _, risks := range parsedModel.GeneratedRisksByCategory { + for _, risk := range risks { + if risk.Severity == types.LowSeverity { + filteredRisks = append(filteredRisks, risk) + } + } + } + return filteredRisks +} + +func FilterByModelFailures(risksByCat map[RiskCategory][]Risk) map[RiskCategory][]Risk { + result := make(map[RiskCategory][]Risk) + for riskCat, risks := range risksByCat { + if riskCat.ModelFailurePossibleReason { + result[riskCat] = risks + } + } + return result +} + +func FlattenRiskSlice(risksByCat map[RiskCategory][]Risk) []Risk { + result := make([]Risk, 0) + for _, risks := range risksByCat { + result = append(result, risks...) + } + return result +} + +func TotalRiskCount(parsedModel *ParsedModel) int { + count := 0 + for _, risks := range parsedModel.GeneratedRisksByCategory { + count += len(risks) + } + return count +} + +func FilteredByRiskTrackingUnchecked(parsedModel *ParsedModel) []Risk { + filteredRisks := make([]Risk, 0) + for _, risks := range parsedModel.GeneratedRisksByCategory { + for _, risk := range risks { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.Unchecked { + filteredRisks = append(filteredRisks, risk) + } + } + } + return filteredRisks +} + +func FilteredByRiskTrackingInDiscussion(parsedModel *ParsedModel) []Risk { + filteredRisks := make([]Risk, 0) + for _, risks := range parsedModel.GeneratedRisksByCategory { + for _, risk := range risks { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.InDiscussion { + filteredRisks = append(filteredRisks, risk) + } + } + } + return filteredRisks +} + +func FilteredByRiskTrackingAccepted(parsedModel *ParsedModel) []Risk { + filteredRisks := make([]Risk, 0) + for _, risks := range parsedModel.GeneratedRisksByCategory { + for _, risk := range risks { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.Accepted { + filteredRisks = append(filteredRisks, risk) + } + } + } + return filteredRisks +} + +func FilteredByRiskTrackingInProgress(parsedModel *ParsedModel) []Risk { + filteredRisks := make([]Risk, 0) + for _, risks := range parsedModel.GeneratedRisksByCategory { + for _, risk := range risks { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.InProgress { + filteredRisks = append(filteredRisks, risk) + } + } + } + return filteredRisks +} + +func FilteredByRiskTrackingMitigated(parsedModel *ParsedModel) []Risk { + filteredRisks := make([]Risk, 0) + for _, risks := range parsedModel.GeneratedRisksByCategory { + for _, risk := range risks { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.Mitigated { + filteredRisks = append(filteredRisks, risk) + } + } + } + return filteredRisks +} + +func FilteredByRiskTrackingFalsePositive(parsedModel *ParsedModel) []Risk { + filteredRisks := make([]Risk, 0) + for _, risks := range parsedModel.GeneratedRisksByCategory { + for _, risk := range risks { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.FalsePositive { + filteredRisks = append(filteredRisks, risk) + } + } + } + return filteredRisks +} + +func ReduceToOnlyHighRisk(risks []Risk) []Risk { + filteredRisks := make([]Risk, 0) + for _, risk := range risks { + if risk.Severity == types.HighSeverity { + filteredRisks = append(filteredRisks, risk) + } + } + return filteredRisks +} + +func ReduceToOnlyMediumRisk(risks []Risk) []Risk { + filteredRisks := make([]Risk, 0) + for _, risk := range risks { + if risk.Severity == types.MediumSeverity { + filteredRisks = append(filteredRisks, risk) + } + } + return filteredRisks +} + +func ReduceToOnlyLowRisk(risks []Risk) []Risk { + filteredRisks := make([]Risk, 0) + for _, risk := range risks { + if risk.Severity == types.LowSeverity { + filteredRisks = append(filteredRisks, risk) + } + } + return filteredRisks +} + +func ReduceToOnlyRiskTrackingUnchecked(parsedModel *ParsedModel, risks []Risk) []Risk { + filteredRisks := make([]Risk, 0) + for _, risk := range risks { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.Unchecked { + filteredRisks = append(filteredRisks, risk) + } + } + return filteredRisks +} + +func ReduceToOnlyRiskTrackingInDiscussion(parsedModel *ParsedModel, risks []Risk) []Risk { + filteredRisks := make([]Risk, 0) + for _, risk := range risks { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.InDiscussion { + filteredRisks = append(filteredRisks, risk) + } + } + return filteredRisks +} + +func ReduceToOnlyRiskTrackingAccepted(parsedModel *ParsedModel, risks []Risk) []Risk { + filteredRisks := make([]Risk, 0) + for _, risk := range risks { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.Accepted { + filteredRisks = append(filteredRisks, risk) + } + } + return filteredRisks +} + +func ReduceToOnlyRiskTrackingInProgress(parsedModel *ParsedModel, risks []Risk) []Risk { + filteredRisks := make([]Risk, 0) + for _, risk := range risks { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.InProgress { + filteredRisks = append(filteredRisks, risk) + } + } + return filteredRisks +} + +func ReduceToOnlyRiskTrackingMitigated(parsedModel *ParsedModel, risks []Risk) []Risk { + filteredRisks := make([]Risk, 0) + for _, risk := range risks { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.Mitigated { + filteredRisks = append(filteredRisks, risk) + } + } + return filteredRisks +} + +func ReduceToOnlyRiskTrackingFalsePositive(parsedModel *ParsedModel, risks []Risk) []Risk { + filteredRisks := make([]Risk, 0) + for _, risk := range risks { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.FalsePositive { + filteredRisks = append(filteredRisks, risk) + } + } + return filteredRisks +} + +func FilteredByStillAtRisk(parsedModel *ParsedModel) []Risk { + filteredRisks := make([]Risk, 0) + for _, risks := range parsedModel.GeneratedRisksByCategory { + for _, risk := range risks { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { + filteredRisks = append(filteredRisks, risk) + } + } + } + return filteredRisks +} + +func OverallRiskStatistics(parsedModel *ParsedModel) RiskStatistics { + result := RiskStatistics{} + result.Risks = make(map[string]map[string]int) + result.Risks[types.CriticalSeverity.String()] = make(map[string]int) + result.Risks[types.CriticalSeverity.String()][types.Unchecked.String()] = 0 + result.Risks[types.CriticalSeverity.String()][types.InDiscussion.String()] = 0 + result.Risks[types.CriticalSeverity.String()][types.Accepted.String()] = 0 + result.Risks[types.CriticalSeverity.String()][types.InProgress.String()] = 0 + result.Risks[types.CriticalSeverity.String()][types.Mitigated.String()] = 0 + result.Risks[types.CriticalSeverity.String()][types.FalsePositive.String()] = 0 + result.Risks[types.HighSeverity.String()] = make(map[string]int) + result.Risks[types.HighSeverity.String()][types.Unchecked.String()] = 0 + result.Risks[types.HighSeverity.String()][types.InDiscussion.String()] = 0 + result.Risks[types.HighSeverity.String()][types.Accepted.String()] = 0 + result.Risks[types.HighSeverity.String()][types.InProgress.String()] = 0 + result.Risks[types.HighSeverity.String()][types.Mitigated.String()] = 0 + result.Risks[types.HighSeverity.String()][types.FalsePositive.String()] = 0 + result.Risks[types.ElevatedSeverity.String()] = make(map[string]int) + result.Risks[types.ElevatedSeverity.String()][types.Unchecked.String()] = 0 + result.Risks[types.ElevatedSeverity.String()][types.InDiscussion.String()] = 0 + result.Risks[types.ElevatedSeverity.String()][types.Accepted.String()] = 0 + result.Risks[types.ElevatedSeverity.String()][types.InProgress.String()] = 0 + result.Risks[types.ElevatedSeverity.String()][types.Mitigated.String()] = 0 + result.Risks[types.ElevatedSeverity.String()][types.FalsePositive.String()] = 0 + result.Risks[types.MediumSeverity.String()] = make(map[string]int) + result.Risks[types.MediumSeverity.String()][types.Unchecked.String()] = 0 + result.Risks[types.MediumSeverity.String()][types.InDiscussion.String()] = 0 + result.Risks[types.MediumSeverity.String()][types.Accepted.String()] = 0 + result.Risks[types.MediumSeverity.String()][types.InProgress.String()] = 0 + result.Risks[types.MediumSeverity.String()][types.Mitigated.String()] = 0 + result.Risks[types.MediumSeverity.String()][types.FalsePositive.String()] = 0 + result.Risks[types.LowSeverity.String()] = make(map[string]int) + result.Risks[types.LowSeverity.String()][types.Unchecked.String()] = 0 + result.Risks[types.LowSeverity.String()][types.InDiscussion.String()] = 0 + result.Risks[types.LowSeverity.String()][types.Accepted.String()] = 0 + result.Risks[types.LowSeverity.String()][types.InProgress.String()] = 0 + result.Risks[types.LowSeverity.String()][types.Mitigated.String()] = 0 + result.Risks[types.LowSeverity.String()][types.FalsePositive.String()] = 0 + for _, risks := range parsedModel.GeneratedRisksByCategory { + for _, risk := range risks { + result.Risks[risk.Severity.String()][risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).String()]++ + } + } + return result +} diff --git a/pkg/model/shared_runtime.go b/pkg/model/shared_runtime.go new file mode 100644 index 00000000..c2231b6d --- /dev/null +++ b/pkg/model/shared_runtime.go @@ -0,0 +1,87 @@ +/* +Copyright © 2023 NAME HERE +*/ +package model + +import ( + "sort" + + "github.com/threagile/threagile/pkg/security/types" +) + +type SharedRuntime struct { + Id, Title, Description string + Tags []string + TechnicalAssetsRunning []string +} + +func (what SharedRuntime) IsTaggedWithAny(tags ...string) bool { + return containsCaseInsensitiveAny(what.Tags, tags...) +} + +func (what SharedRuntime) IsTaggedWithBaseTag(baseTag string) bool { + return IsTaggedWithBaseTag(what.Tags, baseTag) +} + +func (what SharedRuntime) HighestConfidentiality(model *ParsedModel) types.Confidentiality { + highest := types.Public + for _, id := range what.TechnicalAssetsRunning { + techAsset := model.TechnicalAssets[id] + if techAsset.HighestConfidentiality(model) > highest { + highest = techAsset.HighestConfidentiality(model) + } + } + return highest +} + +func (what SharedRuntime) HighestIntegrity(model *ParsedModel) types.Criticality { + highest := types.Archive + for _, id := range what.TechnicalAssetsRunning { + techAsset := model.TechnicalAssets[id] + if techAsset.HighestIntegrity(model) > highest { + highest = techAsset.HighestIntegrity(model) + } + } + return highest +} + +func (what SharedRuntime) HighestAvailability(model *ParsedModel) types.Criticality { + highest := types.Archive + for _, id := range what.TechnicalAssetsRunning { + techAsset := model.TechnicalAssets[id] + if techAsset.HighestAvailability(model) > highest { + highest = techAsset.HighestAvailability(model) + } + } + return highest +} + +func (what SharedRuntime) TechnicalAssetWithHighestRAA(model *ParsedModel) TechnicalAsset { + result := model.TechnicalAssets[what.TechnicalAssetsRunning[0]] + for _, asset := range what.TechnicalAssetsRunning { + candidate := model.TechnicalAssets[asset] + if candidate.RAA > result.RAA { + result = candidate + } + } + return result +} + +// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: + +func SortedKeysOfSharedRuntime(model *ParsedModel) []string { + keys := make([]string, 0) + for k := range model.SharedRuntimes { + keys = append(keys, k) + } + sort.Strings(keys) + return keys +} + +type BySharedRuntimeTitleSort []SharedRuntime + +func (what BySharedRuntimeTitleSort) Len() int { return len(what) } +func (what BySharedRuntimeTitleSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } +func (what BySharedRuntimeTitleSort) Less(i, j int) bool { + return what[i].Title < what[j].Title +} diff --git a/pkg/model/technical_asset.go b/pkg/model/technical_asset.go new file mode 100644 index 00000000..3dbdf504 --- /dev/null +++ b/pkg/model/technical_asset.go @@ -0,0 +1,544 @@ +/* +Copyright © 2023 NAME HERE +*/ +package model + +import ( + "fmt" + "sort" + + "github.com/threagile/threagile/pkg/colors" + "github.com/threagile/threagile/pkg/security/types" +) + +type TechnicalAsset struct { + Id, Title, Description string + Usage types.Usage + Type types.TechnicalAssetType + Size types.TechnicalAssetSize + Technology types.TechnicalAssetTechnology + Machine types.TechnicalAssetMachine + Internet, MultiTenant, Redundant, CustomDevelopedParts, OutOfScope, UsedAsClientByHuman bool + Encryption types.EncryptionStyle + JustificationOutOfScope string + Owner string + Confidentiality types.Confidentiality + Integrity, Availability types.Criticality + JustificationCiaRating string + Tags, DataAssetsProcessed, DataAssetsStored []string + DataFormatsAccepted []types.DataFormat + CommunicationLinks []CommunicationLink + DiagramTweakOrder int + // will be set by separate calculation step: + RAA float64 +} + +func (what TechnicalAsset) IsTaggedWithAny(tags ...string) bool { + return containsCaseInsensitiveAny(what.Tags, tags...) +} + +func (what TechnicalAsset) IsTaggedWithBaseTag(baseTag string) bool { + return IsTaggedWithBaseTag(what.Tags, baseTag) +} + +// first use the tag(s) of the asset itself, then their trust boundaries (recursively up) and then their shared runtime + +func (what TechnicalAsset) IsTaggedWithAnyTraversingUp(model *ParsedModel, tags ...string) bool { + if containsCaseInsensitiveAny(what.Tags, tags...) { + return true + } + tbID := what.GetTrustBoundaryId(model) + if len(tbID) > 0 { + if model.TrustBoundaries[tbID].IsTaggedWithAnyTraversingUp(model, tags...) { + return true + } + } + for _, sr := range model.SharedRuntimes { + if contains(sr.TechnicalAssetsRunning, what.Id) && sr.IsTaggedWithAny(tags...) { + return true + } + } + return false +} + +func (what TechnicalAsset) IsSameTrustBoundary(parsedModel *ParsedModel, otherAssetId string) bool { + trustBoundaryOfMyAsset := parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId[what.Id] + trustBoundaryOfOtherAsset := parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId[otherAssetId] + return trustBoundaryOfMyAsset.Id == trustBoundaryOfOtherAsset.Id +} + +func (what TechnicalAsset) IsSameExecutionEnvironment(parsedModel *ParsedModel, otherAssetId string) bool { + trustBoundaryOfMyAsset := parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId[what.Id] + trustBoundaryOfOtherAsset := parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId[otherAssetId] + if trustBoundaryOfMyAsset.Type == types.ExecutionEnvironment && trustBoundaryOfOtherAsset.Type == types.ExecutionEnvironment { + return trustBoundaryOfMyAsset.Id == trustBoundaryOfOtherAsset.Id + } + return false +} + +func (what TechnicalAsset) IsSameTrustBoundaryNetworkOnly(parsedModel *ParsedModel, otherAssetId string) bool { + trustBoundaryOfMyAsset := parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId[what.Id] + if !trustBoundaryOfMyAsset.Type.IsNetworkBoundary() { // find and use the parent boundary then + trustBoundaryOfMyAsset = parsedModel.TrustBoundaries[trustBoundaryOfMyAsset.ParentTrustBoundaryID(parsedModel)] + } + trustBoundaryOfOtherAsset := parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId[otherAssetId] + if !trustBoundaryOfOtherAsset.Type.IsNetworkBoundary() { // find and use the parent boundary then + trustBoundaryOfOtherAsset = parsedModel.TrustBoundaries[trustBoundaryOfOtherAsset.ParentTrustBoundaryID(parsedModel)] + } + return trustBoundaryOfMyAsset.Id == trustBoundaryOfOtherAsset.Id +} + +func (what TechnicalAsset) HighestSensitivityScore() float64 { + return what.Confidentiality.AttackerAttractivenessForAsset() + + what.Integrity.AttackerAttractivenessForAsset() + + what.Availability.AttackerAttractivenessForAsset() +} + +func (what TechnicalAsset) HighestConfidentiality(parsedModel *ParsedModel) types.Confidentiality { + highest := what.Confidentiality + for _, dataId := range what.DataAssetsProcessed { + dataAsset := parsedModel.DataAssets[dataId] + if dataAsset.Confidentiality > highest { + highest = dataAsset.Confidentiality + } + } + for _, dataId := range what.DataAssetsStored { + dataAsset := parsedModel.DataAssets[dataId] + if dataAsset.Confidentiality > highest { + highest = dataAsset.Confidentiality + } + } + return highest +} + +func (what TechnicalAsset) DataAssetsProcessedSorted(parsedModel *ParsedModel) []DataAsset { + result := make([]DataAsset, 0) + for _, assetID := range what.DataAssetsProcessed { + result = append(result, parsedModel.DataAssets[assetID]) + } + sort.Sort(ByDataAssetTitleSort(result)) + return result +} + +func (what TechnicalAsset) DataAssetsStoredSorted(parsedModel *ParsedModel) []DataAsset { + result := make([]DataAsset, 0) + for _, assetID := range what.DataAssetsStored { + result = append(result, parsedModel.DataAssets[assetID]) + } + sort.Sort(ByDataAssetTitleSort(result)) + return result +} + +func (what TechnicalAsset) DataFormatsAcceptedSorted() []types.DataFormat { + result := make([]types.DataFormat, 0) + for _, format := range what.DataFormatsAccepted { + result = append(result, format) + } + sort.Sort(types.ByDataFormatAcceptedSort(result)) + return result +} + +func (what TechnicalAsset) CommunicationLinksSorted() []CommunicationLink { + result := make([]CommunicationLink, 0) + for _, format := range what.CommunicationLinks { + result = append(result, format) + } + sort.Sort(ByTechnicalCommunicationLinkTitleSort(result)) + return result +} + +func (what TechnicalAsset) HighestIntegrity(model *ParsedModel) types.Criticality { + highest := what.Integrity + for _, dataId := range what.DataAssetsProcessed { + dataAsset := model.DataAssets[dataId] + if dataAsset.Integrity > highest { + highest = dataAsset.Integrity + } + } + for _, dataId := range what.DataAssetsStored { + dataAsset := model.DataAssets[dataId] + if dataAsset.Integrity > highest { + highest = dataAsset.Integrity + } + } + return highest +} + +func (what TechnicalAsset) HighestAvailability(model *ParsedModel) types.Criticality { + highest := what.Availability + for _, dataId := range what.DataAssetsProcessed { + dataAsset := model.DataAssets[dataId] + if dataAsset.Availability > highest { + highest = dataAsset.Availability + } + } + for _, dataId := range what.DataAssetsStored { + dataAsset := model.DataAssets[dataId] + if dataAsset.Availability > highest { + highest = dataAsset.Availability + } + } + return highest +} + +func (what TechnicalAsset) HasDirectConnection(parsedModel *ParsedModel, otherAssetId string) bool { + for _, dataFlow := range parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId[what.Id] { + if dataFlow.SourceId == otherAssetId { + return true + } + } + // check both directions, hence two times, just reversed + for _, dataFlow := range parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId[otherAssetId] { + if dataFlow.SourceId == what.Id { + return true + } + } + return false +} + +func (what TechnicalAsset) GeneratedRisks(parsedModel *ParsedModel) []Risk { + resultingRisks := make([]Risk, 0) + if len(SortedRiskCategories(parsedModel)) == 0 { + fmt.Println("Uh, strange, no risks generated (yet?) and asking for them by tech asset...") + } + for _, category := range SortedRiskCategories(parsedModel) { + risks := SortedRisksOfCategory(parsedModel, category) + for _, risk := range risks { + if risk.MostRelevantTechnicalAssetId == what.Id { + resultingRisks = append(resultingRisks, risk) + } + } + } + SortByRiskSeverity(resultingRisks, parsedModel) + return resultingRisks +} + +/* +func (what TechnicalAsset) HighestRiskSeverity() RiskSeverity { + highest := Low + for _, risk := range what.GeneratedRisks() { + if risk.Severity > highest { + highest = risk.Severity + } + } + return highest +} +*/ + +func (what TechnicalAsset) IsZero() bool { + return len(what.Id) == 0 +} + +func (what TechnicalAsset) ProcessesOrStoresDataAsset(dataAssetId string) bool { + if contains(what.DataAssetsProcessed, dataAssetId) { + return true + } + if contains(what.DataAssetsStored, dataAssetId) { + return true + } + return false +} + +// red when >= confidential data stored in unencrypted technical asset + +func (what TechnicalAsset) DetermineLabelColor(model *ParsedModel) string { + // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here + // Check for red + if what.Integrity == types.MissionCritical { + return colors.Red + } + for _, storedDataAsset := range what.DataAssetsStored { + if model.DataAssets[storedDataAsset].Integrity == types.MissionCritical { + return colors.Red + } + } + for _, processedDataAsset := range what.DataAssetsProcessed { + if model.DataAssets[processedDataAsset].Integrity == types.MissionCritical { + return colors.Red + } + } + // Check for amber + if what.Integrity == types.Critical { + return colors.Amber + } + for _, storedDataAsset := range what.DataAssetsStored { + if model.DataAssets[storedDataAsset].Integrity == types.Critical { + return colors.Amber + } + } + for _, processedDataAsset := range what.DataAssetsProcessed { + if model.DataAssets[processedDataAsset].Integrity == types.Critical { + return colors.Amber + } + } + return colors.Black + /* + if what.Encrypted { + return colors.Black + } else { + if what.Confidentiality == StrictlyConfidential { + return colors.Red + } + for _, storedDataAsset := range what.DataAssetsStored { + if ParsedModelRoot.DataAssets[storedDataAsset].Confidentiality == StrictlyConfidential { + return colors.Red + } + } + if what.Confidentiality == Confidential { + return colors.Amber + } + for _, storedDataAsset := range what.DataAssetsStored { + if ParsedModelRoot.DataAssets[storedDataAsset].Confidentiality == Confidential { + return colors.Amber + } + } + return colors.Black + } + */ +} + +// red when mission-critical integrity, but still unauthenticated (non-readonly) channels access it +// amber when critical integrity, but still unauthenticated (non-readonly) channels access it +// pink when model forgery attempt (i.e. nothing being processed or stored) + +func (what TechnicalAsset) DetermineShapeBorderColor(parsedModel *ParsedModel) string { + // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here + // Check for red + if what.Confidentiality == types.StrictlyConfidential { + return colors.Red + } + for _, storedDataAsset := range what.DataAssetsStored { + if parsedModel.DataAssets[storedDataAsset].Confidentiality == types.StrictlyConfidential { + return colors.Red + } + } + for _, processedDataAsset := range what.DataAssetsProcessed { + if parsedModel.DataAssets[processedDataAsset].Confidentiality == types.StrictlyConfidential { + return colors.Red + } + } + // Check for amber + if what.Confidentiality == types.Confidential { + return colors.Amber + } + for _, storedDataAsset := range what.DataAssetsStored { + if parsedModel.DataAssets[storedDataAsset].Confidentiality == types.Confidential { + return colors.Amber + } + } + for _, processedDataAsset := range what.DataAssetsProcessed { + if parsedModel.DataAssets[processedDataAsset].Confidentiality == types.Confidential { + return colors.Amber + } + } + return colors.Black + /* + if what.Integrity == MissionCritical { + for _, dataFlow := range IncomingTechnicalCommunicationLinksMappedByTargetId[what.Id] { + if !dataFlow.Readonly && dataFlow.Authentication == NoneAuthentication { + return colors.Red + } + } + } + + if what.Integrity == Critical { + for _, dataFlow := range IncomingTechnicalCommunicationLinksMappedByTargetId[what.Id] { + if !dataFlow.Readonly && dataFlow.Authentication == NoneAuthentication { + return colors.Amber + } + } + } + + if len(what.DataAssetsProcessed) == 0 && len(what.DataAssetsStored) == 0 { + return colors.Pink // pink, because it's strange when too many technical assets process no data... some are ok, but many in a diagram is a sign of model forgery... + } + + return colors.Black + */ +} + +/* +// Loops over all data assets (stored and processed by this technical asset) and determines for each +// data asset, how many percentage of the data risk is reduced when this technical asset has all risks mitigated. +// Example: This means if the data asset is loosing a risk and thus getting from red to amber it counts as 1. +// Other example: When only one out of four lines (see data risk mapping) leading to red tech assets are removed by +// the mitigations, then this counts as 0.25. The overall sum is returned. +func (what TechnicalAsset) QuickWins() float64 { + result := 0.0 + uniqueDataAssetsStoredAndProcessed := make(map[string]interface{}) + for _, dataAssetId := range what.DataAssetsStored { + uniqueDataAssetsStoredAndProcessed[dataAssetId] = true + } + for _, dataAssetId := range what.DataAssetsProcessed { + uniqueDataAssetsStoredAndProcessed[dataAssetId] = true + } + highestSeverity := HighestSeverityStillAtRisk(what.GeneratedRisks()) + for dataAssetId, _ := range uniqueDataAssetsStoredAndProcessed { + dataAsset := ParsedModelRoot.DataAssets[dataAssetId] + if dataAsset.IdentifiedRiskSeverityStillAtRisk() <= highestSeverity { + howManySameLevelCausingUsagesOfThisData := 0.0 + for techAssetId, risks := range dataAsset.IdentifiedRisksByResponsibleTechnicalAssetId() { + if !ParsedModelRoot.TechnicalAssets[techAssetId].OutOfScope { + for _, risk := range risks { + if len(risk.MostRelevantTechnicalAssetId) > 0 { // T O D O caching of generated risks inside the method? + if HighestSeverityStillAtRisk(ParsedModelRoot.TechnicalAssets[risk.MostRelevantTechnicalAssetId].GeneratedRisks()) == highestSeverity { + howManySameLevelCausingUsagesOfThisData++ + break + } + } + } + } + } + if howManySameLevelCausingUsagesOfThisData > 0 { + result += 1.0 / howManySameLevelCausingUsagesOfThisData + } + } + } + return result +} +*/ + +// dotted when model forgery attempt (i.e. nothing being processed or stored) + +func (what TechnicalAsset) DetermineShapeBorderLineStyle() string { + if len(what.DataAssetsProcessed) == 0 && len(what.DataAssetsStored) == 0 || what.OutOfScope { + return "dotted" // dotted, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... + } + return "solid" +} + +// 3 when redundant + +func (what TechnicalAsset) DetermineShapePeripheries() int { + if what.Redundant { + return 2 + } + return 1 +} + +func (what TechnicalAsset) DetermineShapeStyle() string { + return "filled" +} + +func (what TechnicalAsset) GetTrustBoundaryId(model *ParsedModel) string { + for _, trustBoundary := range model.TrustBoundaries { + for _, techAssetInside := range trustBoundary.TechnicalAssetsInside { + if techAssetInside == what.Id { + return trustBoundary.Id + } + } + } + return "" +} + +func (what TechnicalAsset) DetermineShapeFillColor(parsedModel *ParsedModel) string { + fillColor := colors.VeryLightGray + if len(what.DataAssetsProcessed) == 0 && len(what.DataAssetsStored) == 0 || + what.Technology == types.UnknownTechnology { + fillColor = colors.LightPink // lightPink, because it's strange when too many technical assets process no data... some ok, but many in a diagram ist a sign of model forgery... + } else if len(what.CommunicationLinks) == 0 && len(parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId[what.Id]) == 0 { + fillColor = colors.LightPink + } else if what.Internet { + fillColor = colors.ExtremeLightBlue + } else if what.OutOfScope { + fillColor = colors.OutOfScopeFancy + } else if what.CustomDevelopedParts { + fillColor = colors.CustomDevelopedParts + } + switch what.Machine { + case types.Physical: + fillColor = colors.DarkenHexColor(fillColor) + case types.Container: + fillColor = colors.BrightenHexColor(fillColor) + case types.Serverless: + fillColor = colors.BrightenHexColor(colors.BrightenHexColor(fillColor)) + case types.Virtual: + } + return fillColor +} + +func SortByTechnicalAssetRiskSeverityAndTitleStillAtRisk(assets []TechnicalAsset, parsedModel *ParsedModel) { + sort.Slice(assets, func(i, j int) bool { + risksLeft := ReduceToOnlyStillAtRisk(parsedModel, assets[i].GeneratedRisks(parsedModel)) + risksRight := ReduceToOnlyStillAtRisk(parsedModel, assets[j].GeneratedRisks(parsedModel)) + highestSeverityLeft := HighestSeverityStillAtRisk(parsedModel, risksLeft) + highestSeverityRight := HighestSeverityStillAtRisk(parsedModel, risksRight) + var result bool + if highestSeverityLeft == highestSeverityRight { + if len(risksLeft) == 0 && len(risksRight) > 0 { + return false + } else if len(risksLeft) > 0 && len(risksRight) == 0 { + return true + } else { + result = assets[i].Title < assets[j].Title + } + } else { + result = highestSeverityLeft > highestSeverityRight + } + if assets[i].OutOfScope && assets[j].OutOfScope { + result = assets[i].Title < assets[j].Title + } else if assets[i].OutOfScope { + result = false + } else if assets[j].OutOfScope { + result = true + } + return result + }) +} + +func (what TechnicalAsset) DetermineShapeBorderPenWidth(parsedModel *ParsedModel) string { + if what.DetermineShapeBorderColor(parsedModel) == colors.Pink { + return fmt.Sprintf("%f", 3.5) + } + if what.DetermineShapeBorderColor(parsedModel) != colors.Black { + return fmt.Sprintf("%f", 3.0) + } + return fmt.Sprintf("%f", 2.0) +} + +type ByTechnicalAssetRAAAndTitleSort []TechnicalAsset + +func (what ByTechnicalAssetRAAAndTitleSort) Len() int { return len(what) } +func (what ByTechnicalAssetRAAAndTitleSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } +func (what ByTechnicalAssetRAAAndTitleSort) Less(i, j int) bool { + raaLeft := what[i].RAA + raaRight := what[j].RAA + if raaLeft == raaRight { + return what[i].Title < what[j].Title + } + return raaLeft > raaRight +} + +/* +type ByTechnicalAssetQuickWinsAndTitleSort []TechnicalAsset + +func (what ByTechnicalAssetQuickWinsAndTitleSort) Len() int { return len(what) } +func (what ByTechnicalAssetQuickWinsAndTitleSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } +func (what ByTechnicalAssetQuickWinsAndTitleSort) Less(i, j int) bool { + qwLeft := what[i].QuickWins() + qwRight := what[j].QuickWins() + if qwLeft == qwRight { + return what[i].Title < what[j].Title + } + return qwLeft > qwRight +} +*/ + +type ByTechnicalAssetTitleSort []TechnicalAsset + +func (what ByTechnicalAssetTitleSort) Len() int { return len(what) } +func (what ByTechnicalAssetTitleSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } +func (what ByTechnicalAssetTitleSort) Less(i, j int) bool { + return what[i].Title < what[j].Title +} + +type ByOrderAndIdSort []TechnicalAsset + +func (what ByOrderAndIdSort) Len() int { return len(what) } +func (what ByOrderAndIdSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } +func (what ByOrderAndIdSort) Less(i, j int) bool { + if what[i].DiagramTweakOrder == what[j].DiagramTweakOrder { + return what[i].Id > what[j].Id + } + return what[i].DiagramTweakOrder < what[j].DiagramTweakOrder +} diff --git a/pkg/model/trust_boundary.go b/pkg/model/trust_boundary.go new file mode 100644 index 00000000..376b18a7 --- /dev/null +++ b/pkg/model/trust_boundary.go @@ -0,0 +1,127 @@ +/* +Copyright © 2023 NAME HERE +*/ +package model + +import ( + "sort" + + "github.com/threagile/threagile/pkg/security/types" +) + +type TrustBoundary struct { + Id, Title, Description string + Type types.TrustBoundaryType + Tags []string + TechnicalAssetsInside []string + TrustBoundariesNested []string +} + +func (what TrustBoundary) RecursivelyAllTechnicalAssetIDsInside(model *ParsedModel) []string { + result := make([]string, 0) + what.addAssetIDsRecursively(model, &result) + return result +} + +func (what TrustBoundary) IsTaggedWithAny(tags ...string) bool { + return containsCaseInsensitiveAny(what.Tags, tags...) +} + +func (what TrustBoundary) IsTaggedWithBaseTag(baseTag string) bool { + return IsTaggedWithBaseTag(what.Tags, baseTag) +} + +func (what TrustBoundary) IsTaggedWithAnyTraversingUp(model *ParsedModel, tags ...string) bool { + if what.IsTaggedWithAny(tags...) { + return true + } + parentID := what.ParentTrustBoundaryID(model) + if len(parentID) > 0 && model.TrustBoundaries[parentID].IsTaggedWithAnyTraversingUp(model, tags...) { + return true + } + return false +} + +func (what TrustBoundary) ParentTrustBoundaryID(model *ParsedModel) string { + var result string + for _, candidate := range model.TrustBoundaries { + if contains(candidate.TrustBoundariesNested, what.Id) { + result = candidate.Id + return result + } + } + return result +} + +func (what TrustBoundary) HighestConfidentiality(model *ParsedModel) types.Confidentiality { + highest := types.Public + for _, id := range what.RecursivelyAllTechnicalAssetIDsInside(model) { + techAsset := model.TechnicalAssets[id] + if techAsset.HighestConfidentiality(model) > highest { + highest = techAsset.HighestConfidentiality(model) + } + } + return highest +} + +func (what TrustBoundary) HighestIntegrity(model *ParsedModel) types.Criticality { + highest := types.Archive + for _, id := range what.RecursivelyAllTechnicalAssetIDsInside(model) { + techAsset := model.TechnicalAssets[id] + if techAsset.HighestIntegrity(model) > highest { + highest = techAsset.HighestIntegrity(model) + } + } + return highest +} + +func (what TrustBoundary) HighestAvailability(model *ParsedModel) types.Criticality { + highest := types.Archive + for _, id := range what.RecursivelyAllTechnicalAssetIDsInside(model) { + techAsset := model.TechnicalAssets[id] + if techAsset.HighestAvailability(model) > highest { + highest = techAsset.HighestAvailability(model) + } + } + return highest +} + +func (what TrustBoundary) AllParentTrustBoundaryIDs(model *ParsedModel) []string { + result := make([]string, 0) + what.addTrustBoundaryIDsRecursively(model, &result) + return result +} + +func (what TrustBoundary) addAssetIDsRecursively(model *ParsedModel, result *[]string) { + *result = append(*result, what.TechnicalAssetsInside...) + for _, nestedBoundaryID := range what.TrustBoundariesNested { + model.TrustBoundaries[nestedBoundaryID].addAssetIDsRecursively(model, result) + } +} + +// TODO: pass ParsedModelRoot as parameter instead of using global variable +func (what TrustBoundary) addTrustBoundaryIDsRecursively(model *ParsedModel, result *[]string) { + *result = append(*result, what.Id) + parentID := what.ParentTrustBoundaryID(model) + if len(parentID) > 0 { + model.TrustBoundaries[parentID].addTrustBoundaryIDsRecursively(model, result) + } +} + +// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: +func SortedKeysOfTrustBoundaries(model *ParsedModel) []string { + keys := make([]string, 0) + for k := range model.TrustBoundaries { + keys = append(keys, k) + } + sort.Strings(keys) + return keys +} + +type ByTrustBoundaryTitleSort []TrustBoundary + +func (what ByTrustBoundaryTitleSort) Len() int { return len(what) } +func (what ByTrustBoundaryTitleSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } +func (what ByTrustBoundaryTitleSort) Less(i, j int) bool { + return what[i].Title < what[j].Title +} diff --git a/pkg/report/excel.go b/pkg/report/excel.go index 1c1e414f..2dde8d91 100644 --- a/pkg/report/excel.go +++ b/pkg/report/excel.go @@ -1,28 +1,30 @@ package report import ( - "github.com/threagile/threagile/colors" - "github.com/threagile/threagile/model" - "github.com/xuri/excelize/v2" "sort" "strconv" "strings" + + "github.com/threagile/threagile/pkg/colors" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" + "github.com/xuri/excelize/v2" ) var excelRow int -func WriteRisksExcelToFile(filename string) { +func WriteRisksExcelToFile(parsedModel *model.ParsedModel, filename string) { excelRow = 0 excel := excelize.NewFile() - sheetName := model.ParsedModelRoot.Title + sheetName := parsedModel.Title err := excel.SetDocProps(&excelize.DocProperties{ Category: "Threat Model Risks Summary", ContentStatus: "Final", - Creator: model.ParsedModelRoot.Author.Name, + Creator: parsedModel.Author.Name, Description: sheetName + " via Threagile", Identifier: "xlsx", Keywords: "Threat Model", - LastModifiedBy: model.ParsedModelRoot.Author.Name, + LastModifiedBy: parsedModel.Author.Name, Revision: "0", Subject: sheetName, Title: sheetName, @@ -45,7 +47,7 @@ func WriteRisksExcelToFile(filename string) { OddFooter: "&C&F", EvenHeader: "&L&P", EvenFooter: "&L&D&R&T", - FirstHeader: `&Threat Model &"-,` + model.ParsedModelRoot.Title + `"Bold&"-,Regular"Risks Summary+000A&D`, + FirstHeader: `&Threat Model &"-,` + parsedModel.Title + `"Bold&"-,Regular"Risks Summary+000A&D`, }) checkErr(err) @@ -337,13 +339,13 @@ func WriteRisksExcelToFile(filename string) { }) excelRow++ // as we have a header line - for _, category := range model.SortedRiskCategories() { - risks := model.SortedRisksOfCategory(category) + for _, category := range model.SortedRiskCategories(parsedModel) { + risks := model.SortedRisksOfCategory(parsedModel, category) for _, risk := range risks { excelRow++ - techAsset := model.ParsedModelRoot.TechnicalAssets[risk.MostRelevantTechnicalAssetId] - commLink := model.CommunicationLinks[risk.MostRelevantCommunicationLinkId] - riskTrackingStatus := risk.GetRiskTrackingStatusDefaultingUnchecked() + techAsset := parsedModel.TechnicalAssets[risk.MostRelevantTechnicalAssetId] + commLink := parsedModel.CommunicationLinks[risk.MostRelevantCommunicationLinkId] + riskTrackingStatus := risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) // content err := excel.SetCellValue(sheetName, "A"+strconv.Itoa(excelRow), risk.Severity.Title()) err = excel.SetCellValue(sheetName, "B"+strconv.Itoa(excelRow), risk.ExploitationLikelihood.Title()) @@ -361,8 +363,8 @@ func WriteRisksExcelToFile(filename string) { err = excel.SetCellValue(sheetName, "N"+strconv.Itoa(excelRow), risk.Category.Check) err = excel.SetCellValue(sheetName, "O"+strconv.Itoa(excelRow), risk.SyntheticId) err = excel.SetCellValue(sheetName, "P"+strconv.Itoa(excelRow), riskTrackingStatus.Title()) - if riskTrackingStatus != model.Unchecked { - riskTracking := risk.GetRiskTracking() + if riskTrackingStatus != types.Unchecked { + riskTracking := risk.GetRiskTracking(parsedModel) err = excel.SetCellValue(sheetName, "Q"+strconv.Itoa(excelRow), riskTracking.Justification) if !riskTracking.Date.IsZero() { err = excel.SetCellValue(sheetName, "R"+strconv.Itoa(excelRow), riskTracking.Date.Format("2006-01-02")) @@ -373,19 +375,19 @@ func WriteRisksExcelToFile(filename string) { // styles if riskTrackingStatus.IsStillAtRisk() { switch risk.Severity { - case model.CriticalSeverity: + case types.CriticalSeverity: err = excel.SetCellStyle(sheetName, "A"+strconv.Itoa(excelRow), "F"+strconv.Itoa(excelRow), styleSeverityCriticalCenter) err = excel.SetCellStyle(sheetName, "G"+strconv.Itoa(excelRow), "I"+strconv.Itoa(excelRow), styleSeverityCriticalBold) - case model.HighSeverity: + case types.HighSeverity: err = excel.SetCellStyle(sheetName, "A"+strconv.Itoa(excelRow), "F"+strconv.Itoa(excelRow), styleSeverityHighCenter) err = excel.SetCellStyle(sheetName, "G"+strconv.Itoa(excelRow), "I"+strconv.Itoa(excelRow), styleSeverityHighBold) - case model.ElevatedSeverity: + case types.ElevatedSeverity: err = excel.SetCellStyle(sheetName, "A"+strconv.Itoa(excelRow), "F"+strconv.Itoa(excelRow), styleSeverityElevatedCenter) err = excel.SetCellStyle(sheetName, "G"+strconv.Itoa(excelRow), "I"+strconv.Itoa(excelRow), styleSeverityElevatedBold) - case model.MediumSeverity: + case types.MediumSeverity: err = excel.SetCellStyle(sheetName, "A"+strconv.Itoa(excelRow), "F"+strconv.Itoa(excelRow), styleSeverityMediumCenter) err = excel.SetCellStyle(sheetName, "G"+strconv.Itoa(excelRow), "I"+strconv.Itoa(excelRow), styleSeverityMediumBold) - case model.LowSeverity: + case types.LowSeverity: err = excel.SetCellStyle(sheetName, "A"+strconv.Itoa(excelRow), "F"+strconv.Itoa(excelRow), styleSeverityLowCenter) err = excel.SetCellStyle(sheetName, "G"+strconv.Itoa(excelRow), "I"+strconv.Itoa(excelRow), styleSeverityLowBold) } @@ -395,17 +397,17 @@ func WriteRisksExcelToFile(filename string) { } styleFromRiskTracking := styleBlackCenter switch riskTrackingStatus { - case model.Unchecked: + case types.Unchecked: styleFromRiskTracking = styleRedCenter - case model.Mitigated: + case types.Mitigated: styleFromRiskTracking = styleGreenCenter - case model.InProgress: + case types.InProgress: styleFromRiskTracking = styleBlueCenter - case model.Accepted: + case types.Accepted: styleFromRiskTracking = styleYellowCenter - case model.InDiscussion: + case types.InDiscussion: styleFromRiskTracking = styleOrangeCenter - case model.FalsePositive: + case types.FalsePositive: styleFromRiskTracking = styleGrayCenter default: styleFromRiskTracking = styleBlackCenter @@ -454,18 +456,18 @@ func WriteRisksExcelToFile(filename string) { checkErr(err) } -func WriteTagsExcelToFile(filename string) { // TODO: eventually when len(sortedTagsAvailable) == 0 is: write a hint in the Excel that no tags are used +func WriteTagsExcelToFile(parsedModel *model.ParsedModel, filename string) { // TODO: eventually when len(sortedTagsAvailable) == 0 is: write a hint in the Excel that no tags are used excelRow = 0 excel := excelize.NewFile() - sheetName := model.ParsedModelRoot.Title + sheetName := parsedModel.Title err := excel.SetDocProps(&excelize.DocProperties{ Category: "Tag Matrix", ContentStatus: "Final", - Creator: model.ParsedModelRoot.Author.Name, + Creator: parsedModel.Author.Name, Description: sheetName + " via Threagile", Identifier: "xlsx", Keywords: "Tag Matrix", - LastModifiedBy: model.ParsedModelRoot.Author.Name, + LastModifiedBy: parsedModel.Author.Name, Revision: "0", Subject: sheetName, Title: sheetName, @@ -488,12 +490,12 @@ func WriteTagsExcelToFile(filename string) { // TODO: eventually when len(sorted OddFooter: "&C&F", EvenHeader: "&L&P", EvenFooter: "&L&D&R&T", - FirstHeader: `&Tag Matrix &"-,` + model.ParsedModelRoot.Title + `"Bold&"-,Regular"Summary+000A&D`, + FirstHeader: `&Tag Matrix &"-,` + parsedModel.Title + `"Bold&"-,Regular"Summary+000A&D`, }) checkErr(err) err = excel.SetCellValue(sheetName, "A1", "Element") // TODO is "Element" the correct generic name when referencing assets, links, trust boundaries etc.? Eventually add separate column "type of element" like "technical asset" or "data asset"? - sortedTagsAvailable := model.TagsActuallyUsed() + sortedTagsAvailable := parsedModel.TagsActuallyUsed() sort.Strings(sortedTagsAvailable) axis := "" for i, tag := range sortedTagsAvailable { @@ -535,19 +537,19 @@ func WriteTagsExcelToFile(filename string) { // TODO: eventually when len(sorted excelRow++ // as we have a header line if len(sortedTagsAvailable) > 0 { - for _, techAsset := range model.SortedTechnicalAssetsByTitle() { + for _, techAsset := range sortedTechnicalAssetsByTitle(parsedModel) { writeRow(excel, sheetName, axis, styleBlackLeftBold, styleBlackCenter, sortedTagsAvailable, techAsset.Title, techAsset.Tags) for _, commLink := range techAsset.CommunicationLinksSorted() { writeRow(excel, sheetName, axis, styleBlackLeftBold, styleBlackCenter, sortedTagsAvailable, commLink.Title, commLink.Tags) } } - for _, dataAsset := range model.SortedDataAssetsByTitle() { + for _, dataAsset := range sortedDataAssetsByTitle(parsedModel) { writeRow(excel, sheetName, axis, styleBlackLeftBold, styleBlackCenter, sortedTagsAvailable, dataAsset.Title, dataAsset.Tags) } - for _, trustBoundary := range model.SortedTrustBoundariesByTitle() { + for _, trustBoundary := range sortedTrustBoundariesByTitle(parsedModel) { writeRow(excel, sheetName, axis, styleBlackLeftBold, styleBlackCenter, sortedTagsAvailable, trustBoundary.Title, trustBoundary.Tags) } - for _, sharedRuntime := range model.SortedSharedRuntimesByTitle() { + for _, sharedRuntime := range sortedSharedRuntimesByTitle(parsedModel) { writeRow(excel, sheetName, axis, styleBlackLeftBold, styleBlackCenter, sortedTagsAvailable, sharedRuntime.Title, sharedRuntime.Tags) } } @@ -598,12 +600,30 @@ func WriteTagsExcelToFile(filename string) { // TODO: eventually when len(sorted checkErr(err) } +func sortedTrustBoundariesByTitle(parsedModel *model.ParsedModel) []model.TrustBoundary { + boundaries := make([]model.TrustBoundary, 0) + for _, boundary := range parsedModel.TrustBoundaries { + boundaries = append(boundaries, boundary) + } + sort.Sort(model.ByTrustBoundaryTitleSort(boundaries)) + return boundaries +} + +func sortedDataAssetsByTitle(parsedModel *model.ParsedModel) []model.DataAsset { + assets := make([]model.DataAsset, 0) + for _, asset := range parsedModel.DataAssets { + assets = append(assets, asset) + } + sort.Sort(model.ByDataAssetTitleSort(assets)) + return assets +} + func writeRow(excel *excelize.File, sheetName string, axis string, styleBlackLeftBold int, styleBlackCenter int, sortedTags []string, assetTitle string, tagsUsed []string) { excelRow++ err := excel.SetCellValue(sheetName, "A"+strconv.Itoa(excelRow), assetTitle) for i, tag := range sortedTags { - if model.Contains(tagsUsed, tag) { + if contains(tagsUsed, tag) { err = excel.SetCellValue(sheetName, determineColumnLetter(i)+strconv.Itoa(excelRow), "X") } } diff --git a/pkg/report/json.go b/pkg/report/json.go index a1456dab..4442643e 100644 --- a/pkg/report/json.go +++ b/pkg/report/json.go @@ -2,11 +2,12 @@ package report import ( "encoding/json" - "github.com/threagile/threagile/model" "os" + + "github.com/threagile/threagile/pkg/model" ) -func WriteRisksJSON(filename string) { +func WriteRisksJSON(parsedModel *model.ParsedModel, filename string) { /* remainingRisks := make([]model.Risk, 0) for _, category := range model.SortedRiskCategories() { @@ -16,7 +17,7 @@ func WriteRisksJSON(filename string) { } } */ - jsonBytes, err := json.Marshal(model.AllRisks()) + jsonBytes, err := json.Marshal(model.AllRisks(parsedModel)) if err != nil { panic(err) } @@ -28,8 +29,8 @@ func WriteRisksJSON(filename string) { // TODO: also a "data assets" json? -func WriteTechnicalAssetsJSON(filename string) { - jsonBytes, err := json.Marshal(model.ParsedModelRoot.TechnicalAssets) +func WriteTechnicalAssetsJSON(parsedModel *model.ParsedModel, filename string) { + jsonBytes, err := json.Marshal(parsedModel.TechnicalAssets) if err != nil { panic(err) } @@ -39,8 +40,8 @@ func WriteTechnicalAssetsJSON(filename string) { } } -func WriteStatsJSON(filename string) { - jsonBytes, err := json.Marshal(model.OverallRiskStatistics()) +func WriteStatsJSON(parsedModel *model.ParsedModel, filename string) { + jsonBytes, err := json.Marshal(model.OverallRiskStatistics(parsedModel)) if err != nil { panic(err) } diff --git a/pkg/report/report.go b/pkg/report/report.go index 76dc44fe..511360d6 100644 --- a/pkg/report/report.go +++ b/pkg/report/report.go @@ -3,55 +3,6 @@ package report import ( "errors" "fmt" - "github.com/jung-kurt/gofpdf" - "github.com/jung-kurt/gofpdf/contrib/gofpdi" - "github.com/threagile/threagile/colors" - "github.com/threagile/threagile/model" - "github.com/threagile/threagile/pkg/risks" - "github.com/threagile/threagile/pkg/risks/built-in/accidental-secret-leak" - "github.com/threagile/threagile/pkg/risks/built-in/code-backdooring" - "github.com/threagile/threagile/pkg/risks/built-in/container-baseimage-backdooring" - "github.com/threagile/threagile/pkg/risks/built-in/container-platform-escape" - "github.com/threagile/threagile/pkg/risks/built-in/cross-site-request-forgery" - "github.com/threagile/threagile/pkg/risks/built-in/cross-site-scripting" - "github.com/threagile/threagile/pkg/risks/built-in/dos-risky-access-across-trust-boundary" - "github.com/threagile/threagile/pkg/risks/built-in/incomplete-model" - "github.com/threagile/threagile/pkg/risks/built-in/ldap-injection" - "github.com/threagile/threagile/pkg/risks/built-in/missing-authentication" - "github.com/threagile/threagile/pkg/risks/built-in/missing-authentication-second-factor" - "github.com/threagile/threagile/pkg/risks/built-in/missing-build-infrastructure" - "github.com/threagile/threagile/pkg/risks/built-in/missing-cloud-hardening" - "github.com/threagile/threagile/pkg/risks/built-in/missing-file-validation" - "github.com/threagile/threagile/pkg/risks/built-in/missing-hardening" - "github.com/threagile/threagile/pkg/risks/built-in/missing-identity-propagation" - "github.com/threagile/threagile/pkg/risks/built-in/missing-identity-provider-isolation" - "github.com/threagile/threagile/pkg/risks/built-in/missing-identity-store" - "github.com/threagile/threagile/pkg/risks/built-in/missing-network-segmentation" - "github.com/threagile/threagile/pkg/risks/built-in/missing-vault" - "github.com/threagile/threagile/pkg/risks/built-in/missing-vault-isolation" - "github.com/threagile/threagile/pkg/risks/built-in/missing-waf" - "github.com/threagile/threagile/pkg/risks/built-in/mixed-targets-on-shared-runtime" - "github.com/threagile/threagile/pkg/risks/built-in/path-traversal" - "github.com/threagile/threagile/pkg/risks/built-in/push-instead-of-pull-deployment" - "github.com/threagile/threagile/pkg/risks/built-in/search-query-injection" - "github.com/threagile/threagile/pkg/risks/built-in/server-side-request-forgery" - "github.com/threagile/threagile/pkg/risks/built-in/service-registry-poisoning" - "github.com/threagile/threagile/pkg/risks/built-in/sql-nosql-injection" - "github.com/threagile/threagile/pkg/risks/built-in/unchecked-deployment" - "github.com/threagile/threagile/pkg/risks/built-in/unencrypted-asset" - "github.com/threagile/threagile/pkg/risks/built-in/unencrypted-communication" - "github.com/threagile/threagile/pkg/risks/built-in/unguarded-access-from-internet" - "github.com/threagile/threagile/pkg/risks/built-in/unguarded-direct-datastore-access" - "github.com/threagile/threagile/pkg/risks/built-in/unnecessary-communication-link" - "github.com/threagile/threagile/pkg/risks/built-in/unnecessary-data-asset" - "github.com/threagile/threagile/pkg/risks/built-in/unnecessary-data-transfer" - "github.com/threagile/threagile/pkg/risks/built-in/unnecessary-technical-asset" - "github.com/threagile/threagile/pkg/risks/built-in/untrusted-deserialization" - "github.com/threagile/threagile/pkg/risks/built-in/wrong-communication-link-content" - "github.com/threagile/threagile/pkg/risks/built-in/wrong-trust-boundary-content" - "github.com/threagile/threagile/pkg/risks/built-in/xml-external-entity" - "github.com/wcharczuk/go-chart" - "github.com/wcharczuk/go-chart/drawing" "image" "log" "os" @@ -62,6 +13,57 @@ import ( "strings" "time" "unicode/utf8" + + "github.com/jung-kurt/gofpdf" + "github.com/jung-kurt/gofpdf/contrib/gofpdi" + "github.com/threagile/threagile/pkg/colors" + "github.com/threagile/threagile/pkg/docs" + "github.com/threagile/threagile/pkg/model" + accidental_secret_leak "github.com/threagile/threagile/pkg/security/risks/built-in/accidental-secret-leak" + code_backdooring "github.com/threagile/threagile/pkg/security/risks/built-in/code-backdooring" + container_baseimage_backdooring "github.com/threagile/threagile/pkg/security/risks/built-in/container-baseimage-backdooring" + container_platform_escape "github.com/threagile/threagile/pkg/security/risks/built-in/container-platform-escape" + cross_site_request_forgery "github.com/threagile/threagile/pkg/security/risks/built-in/cross-site-request-forgery" + cross_site_scripting "github.com/threagile/threagile/pkg/security/risks/built-in/cross-site-scripting" + dos_risky_access_across_trust_boundary "github.com/threagile/threagile/pkg/security/risks/built-in/dos-risky-access-across-trust-boundary" + incomplete_model "github.com/threagile/threagile/pkg/security/risks/built-in/incomplete-model" + ldap_injection "github.com/threagile/threagile/pkg/security/risks/built-in/ldap-injection" + missing_authentication "github.com/threagile/threagile/pkg/security/risks/built-in/missing-authentication" + missing_authentication_second_factor "github.com/threagile/threagile/pkg/security/risks/built-in/missing-authentication-second-factor" + missing_build_infrastructure "github.com/threagile/threagile/pkg/security/risks/built-in/missing-build-infrastructure" + missing_cloud_hardening "github.com/threagile/threagile/pkg/security/risks/built-in/missing-cloud-hardening" + missing_file_validation "github.com/threagile/threagile/pkg/security/risks/built-in/missing-file-validation" + missing_hardening "github.com/threagile/threagile/pkg/security/risks/built-in/missing-hardening" + missing_identity_propagation "github.com/threagile/threagile/pkg/security/risks/built-in/missing-identity-propagation" + missing_identity_provider_isolation "github.com/threagile/threagile/pkg/security/risks/built-in/missing-identity-provider-isolation" + missing_identity_store "github.com/threagile/threagile/pkg/security/risks/built-in/missing-identity-store" + missing_network_segmentation "github.com/threagile/threagile/pkg/security/risks/built-in/missing-network-segmentation" + missing_vault "github.com/threagile/threagile/pkg/security/risks/built-in/missing-vault" + missing_vault_isolation "github.com/threagile/threagile/pkg/security/risks/built-in/missing-vault-isolation" + missing_waf "github.com/threagile/threagile/pkg/security/risks/built-in/missing-waf" + mixed_targets_on_shared_runtime "github.com/threagile/threagile/pkg/security/risks/built-in/mixed-targets-on-shared-runtime" + path_traversal "github.com/threagile/threagile/pkg/security/risks/built-in/path-traversal" + push_instead_of_pull_deployment "github.com/threagile/threagile/pkg/security/risks/built-in/push-instead-of-pull-deployment" + search_query_injection "github.com/threagile/threagile/pkg/security/risks/built-in/search-query-injection" + server_side_request_forgery "github.com/threagile/threagile/pkg/security/risks/built-in/server-side-request-forgery" + service_registry_poisoning "github.com/threagile/threagile/pkg/security/risks/built-in/service-registry-poisoning" + sql_nosql_injection "github.com/threagile/threagile/pkg/security/risks/built-in/sql-nosql-injection" + unchecked_deployment "github.com/threagile/threagile/pkg/security/risks/built-in/unchecked-deployment" + unencrypted_asset "github.com/threagile/threagile/pkg/security/risks/built-in/unencrypted-asset" + unencrypted_communication "github.com/threagile/threagile/pkg/security/risks/built-in/unencrypted-communication" + unguarded_access_from_internet "github.com/threagile/threagile/pkg/security/risks/built-in/unguarded-access-from-internet" + unguarded_direct_datastore_access "github.com/threagile/threagile/pkg/security/risks/built-in/unguarded-direct-datastore-access" + unnecessary_communication_link "github.com/threagile/threagile/pkg/security/risks/built-in/unnecessary-communication-link" + unnecessary_data_asset "github.com/threagile/threagile/pkg/security/risks/built-in/unnecessary-data-asset" + unnecessary_data_transfer "github.com/threagile/threagile/pkg/security/risks/built-in/unnecessary-data-transfer" + unnecessary_technical_asset "github.com/threagile/threagile/pkg/security/risks/built-in/unnecessary-technical-asset" + untrusted_deserialization "github.com/threagile/threagile/pkg/security/risks/built-in/untrusted-deserialization" + wrong_communication_link_content "github.com/threagile/threagile/pkg/security/risks/built-in/wrong-communication-link-content" + wrong_trust_boundary_content "github.com/threagile/threagile/pkg/security/risks/built-in/wrong-trust-boundary-content" + xml_external_entity "github.com/threagile/threagile/pkg/security/risks/built-in/xml-external-entity" + "github.com/threagile/threagile/pkg/security/types" + "github.com/wcharczuk/go-chart" + "github.com/wcharczuk/go-chart/drawing" ) const fontSizeHeadline, fontSizeHeadlineSmall, fontSizeBody, fontSizeSmall, fontSizeVerySmall = 20, 16, 12, 9, 7 @@ -80,25 +82,6 @@ var homeLink int var currentChapterTitleBreadcrumb string var firstParagraphRegEx = regexp.MustCompile(`(.*?)((
)|(

))`) -var ( - _ = pdfColorDataAssets - _ = rgbHexColorDataAssets - _ = pdfColorTechnicalAssets - _ = rgbHexColorTechnicalAssets - _ = pdfColorTrustBoundaries - _ = pdfColorSharedRuntime - _ = rgbHexColorTrustBoundaries - _ = rgbHexColorSharedRuntime - _ = pdfColorRiskFindings - _ = rgbHexColorRiskFindings - _ = rgbHexColorDisclaimer - _ = rgbHexColorGray - _ = rgbHexColorLightGray - _ = rgbHexColorOutOfScope - _ = rgbHexColorBlack - _ = pdfColorRed - _ = rgbHexColorRed -) func initReport() { pdf = nil @@ -119,37 +102,38 @@ func WriteReportPDF(reportFilename string, buildTimestamp string, modelHash string, introTextRAA string, - customRiskRules map[string]*risks.CustomRisk, - tempFolder string) { + customRiskRules map[string]*model.CustomRisk, + tempFolder string, + model *model.ParsedModel) { initReport() - createPdfAndInitMetadata() + createPdfAndInitMetadata(model) parseBackgroundTemplate(templateFilename) - createCover() - createTableOfContents() - createManagementSummary(tempFolder) - createImpactInitialRisks() - createRiskMitigationStatus(tempFolder) - createImpactRemainingRisks() - createTargetDescription(filepath.Dir(modelFilename)) + createCover(model) + createTableOfContents(model) + createManagementSummary(model, tempFolder) + createImpactInitialRisks(model) + createRiskMitigationStatus(model, tempFolder) + createImpactRemainingRisks(model) + createTargetDescription(model, filepath.Dir(modelFilename)) embedDataFlowDiagram(dataFlowDiagramFilenamePNG, tempFolder) - createSecurityRequirements() - createAbuseCases() - createTagListing() - createSTRIDE() - createAssignmentByFunction() - createRAA(introTextRAA) + createSecurityRequirements(model) + createAbuseCases(model) + createTagListing(model) + createSTRIDE(model) + createAssignmentByFunction(model) + createRAA(model, introTextRAA) embedDataRiskMapping(dataAssetDiagramFilenamePNG, tempFolder) //createDataRiskQuickWins() - createOutOfScopeAssets() - createModelFailures() - createQuestions() - createRiskCategories() - createTechnicalAssets() - createDataAssets() - createTrustBoundaries() - createSharedRuntimes() - createRiskRulesChecked(modelFilename, skipRiskRules, buildTimestamp, modelHash, customRiskRules) - createDisclaimer() + createOutOfScopeAssets(model) + createModelFailures(model) + createQuestions(model) + createRiskCategories(model) + createTechnicalAssets(model) + createDataAssets(model) + createTrustBoundaries(model) + createSharedRuntimes(model) + createRiskRulesChecked(model, modelFilename, skipRiskRules, buildTimestamp, modelHash, customRiskRules) + createDisclaimer(model) writeReportToFile(reportFilename) } @@ -159,15 +143,31 @@ func checkErr(err error) { } } -func createPdfAndInitMetadata() { +func createPdfAndInitMetadata(model *model.ParsedModel) { pdf = gofpdf.New("P", "mm", "A4", "") - pdf.SetCreator(model.ParsedModelRoot.Author.Homepage, true) - pdf.SetAuthor(model.ParsedModelRoot.Author.Name, true) - pdf.SetTitle("Threat Model Report: "+model.ParsedModelRoot.Title, true) - pdf.SetSubject("Threat Model Report: "+model.ParsedModelRoot.Title, true) + pdf.SetCreator(model.Author.Homepage, true) + pdf.SetAuthor(model.Author.Name, true) + pdf.SetTitle("Threat Model Report: "+model.Title, true) + pdf.SetSubject("Threat Model Report: "+model.Title, true) // pdf.SetPageBox("crop", 0, 0, 100, 010) pdf.SetHeaderFunc(headerFunc) - pdf.SetFooterFunc(footerFunc) + pdf.SetFooterFunc(func() { + addBreadcrumb(model) + pdf.SetFont("Helvetica", "", 10) + pdf.SetTextColor(127, 127, 127) + pdf.Text(8.6, 284, "Threat Model Report via Threagile") //: "+parsedModel.Title) + pdf.Link(8.4, 281, 54.6, 4, homeLink) + pageNo++ + text := "Page " + strconv.Itoa(pageNo) + if pageNo < 10 { + text = " " + text + } else if pageNo < 100 { + text = " " + text + } + if pageNo > 1 { + pdf.Text(186, 284, text) + } + }) linkCounter = 1 // link counting starts at 1 via pdf.AddLink } @@ -178,30 +178,12 @@ func headerFunc() { } } -func footerFunc() { - addBreadcrumb() - pdf.SetFont("Helvetica", "", 10) - pdf.SetTextColor(127, 127, 127) - pdf.Text(8.6, 284, "Threat Model Report via Threagile") //: "+model.ParsedModelRoot.Title) - pdf.Link(8.4, 281, 54.6, 4, homeLink) - pageNo++ - text := "Page " + strconv.Itoa(pageNo) - if pageNo < 10 { - text = " " + text - } else if pageNo < 100 { - text = " " + text - } - if pageNo > 1 { - pdf.Text(186, 284, text) - } -} - -func addBreadcrumb() { +func addBreadcrumb(parsedModel *model.ParsedModel) { if len(currentChapterTitleBreadcrumb) > 0 { uni := pdf.UnicodeTranslatorFromDescriptor("") pdf.SetFont("Helvetica", "", 10) pdf.SetTextColor(127, 127, 127) - pdf.Text(46.7, 24.5, uni(currentChapterTitleBreadcrumb+" - "+model.ParsedModelRoot.Title)) + pdf.Text(46.7, 24.5, uni(currentChapterTitleBreadcrumb+" - "+parsedModel.Title)) } } @@ -221,29 +203,29 @@ func parseBackgroundTemplate(templateFilename string) { diagramLegendTemplateId = gofpdi.ImportPage(pdf, templateFilename, 3, "/MediaBox") } -func createCover() { +func createCover(parsedModel *model.ParsedModel) { uni := pdf.UnicodeTranslatorFromDescriptor("") pdf.AddPage() gofpdi.UseImportedTemplate(pdf, coverTemplateId, 0, 0, 0, 300) pdf.SetFont("Helvetica", "B", 28) pdf.SetTextColor(0, 0, 0) pdf.Text(40, 110, "Threat Model Report") - pdf.Text(40, 125, uni(model.ParsedModelRoot.Title)) + pdf.Text(40, 125, uni(parsedModel.Title)) pdf.SetFont("Helvetica", "", 12) - reportDate := model.ParsedModelRoot.Date + reportDate := parsedModel.Date if reportDate.IsZero() { reportDate = time.Now() } pdf.Text(40.7, 145, reportDate.Format("2 January 2006")) - pdf.Text(40.7, 153, uni(model.ParsedModelRoot.Author.Name)) + pdf.Text(40.7, 153, uni(parsedModel.Author.Name)) pdf.SetFont("Helvetica", "", 10) pdf.SetTextColor(80, 80, 80) - pdf.Text(8.6, 275, model.ParsedModelRoot.Author.Homepage) + pdf.Text(8.6, 275, parsedModel.Author.Homepage) pdf.SetFont("Helvetica", "", 12) pdf.SetTextColor(0, 0, 0) } -func createTableOfContents() { +func createTableOfContents(parsedModel *model.ParsedModel) { uni := pdf.UnicodeTranslatorFromDescriptor("") pdf.AddPage() currentChapterTitleBreadcrumb = "Table of Contents" @@ -274,7 +256,7 @@ func createTableOfContents() { risksStr := "Risks" catStr := "Categories" - count, catCount := model.TotalRiskCount(), len(model.GeneratedRisksByCategory) + count, catCount := model.TotalRiskCount(parsedModel), len(parsedModel.GeneratedRisksByCategory) if count == 1 { risksStr = "Risk" } @@ -296,7 +278,7 @@ func createTableOfContents() { y += 6 risksStr = "Risks" catStr = "Categories" - count, catCount = len(model.FilteredByStillAtRisk()), len(model.CategoriesOfOnlyRisksStillAtRisk(model.GeneratedRisksByCategory)) + count, catCount = len(model.FilteredByStillAtRisk(parsedModel)), len(model.CategoriesOfOnlyRisksStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory)) if count == 1 { risksStr = "Risk" } @@ -377,7 +359,7 @@ func createTableOfContents() { y += 6 assets := "Assets" - count = len(model.OutOfScopeTechnicalAssets()) + count = len(parsedModel.OutOfScopeTechnicalAssets()) if count == 1 { assets = "Asset" } @@ -387,13 +369,13 @@ func createTableOfContents() { pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) y += 6 - modelFailures := model.FlattenRiskSlice(model.FilterByModelFailures(model.GeneratedRisksByCategory)) + modelFailures := model.FlattenRiskSlice(model.FilterByModelFailures(parsedModel.GeneratedRisksByCategory)) risksStr = "Risks" count = len(modelFailures) if count == 1 { risksStr = "Risk" } - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(modelFailures)) + countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(parsedModel, modelFailures)) if countStillAtRisk > 0 { colors.ColorModelFailure(pdf) } @@ -405,14 +387,14 @@ func createTableOfContents() { y += 6 questions := "Questions" - count = len(model.ParsedModelRoot.Questions) + count = len(parsedModel.Questions) if count == 1 { questions = "Question" } - if model.QuestionsUnanswered() > 0 { + if questionsUnanswered(parsedModel) > 0 { colors.ColorModelFailure(pdf) } - pdf.Text(11, y, " "+"Questions: "+strconv.Itoa(model.QuestionsUnanswered())+" / "+strconv.Itoa(count)+" "+questions) + pdf.Text(11, y, " "+"Questions: "+strconv.Itoa(questionsUnanswered(parsedModel))+" / "+strconv.Itoa(count)+" "+questions) pdf.Text(175, y, "{questions}") pdfColorBlack() pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) @@ -420,7 +402,7 @@ func createTableOfContents() { // =============== - if len(model.GeneratedRisksByCategory) > 0 { + if len(parsedModel.GeneratedRisksByCategory) > 0 { y += 6 y += 6 if y > 260 { // 260 instead of 275 for major group headlines to avoid "Schusterjungen" @@ -436,23 +418,23 @@ func createTableOfContents() { pdf.Text(175, y, "{intro-risks-by-vulnerability-category}") pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) - for _, category := range model.SortedRiskCategories() { - newRisksStr := model.SortedRisksOfCategory(category) - switch model.HighestSeverityStillAtRisk(newRisksStr) { - case model.CriticalSeverity: + for _, category := range model.SortedRiskCategories(parsedModel) { + newRisksStr := model.SortedRisksOfCategory(parsedModel, category) + switch model.HighestSeverityStillAtRisk(parsedModel, newRisksStr) { + case types.CriticalSeverity: colors.ColorCriticalRisk(pdf) - case model.HighSeverity: + case types.HighSeverity: colors.ColorHighRisk(pdf) - case model.ElevatedSeverity: + case types.ElevatedSeverity: colors.ColorElevatedRisk(pdf) - case model.MediumSeverity: + case types.MediumSeverity: colors.ColorMediumRisk(pdf) - case model.LowSeverity: + case types.LowSeverity: colors.ColorLowRisk(pdf) default: pdfColorBlack() } - if len(model.ReduceToOnlyStillAtRisk(newRisksStr)) == 0 { + if len(model.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) == 0 { pdfColorBlack() } y += 6 @@ -460,7 +442,7 @@ func createTableOfContents() { pageBreakInLists() y = 40 } - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(newRisksStr)) + countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(newRisksStr)) + " Risk" if len(newRisksStr) != 1 { suffix += "s" @@ -475,7 +457,7 @@ func createTableOfContents() { // =============== - if len(model.ParsedModelRoot.TechnicalAssets) > 0 { + if len(parsedModel.TechnicalAssets) > 0 { y += 6 y += 6 if y > 260 { // 260 instead of 275 for major group headlines to avoid "Schusterjungen" @@ -491,14 +473,14 @@ func createTableOfContents() { pdf.Text(175, y, "{intro-risks-by-technical-asset}") pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) - for _, technicalAsset := range model.SortedTechnicalAssetsByRiskSeverityAndTitle() { - newRisksStr := technicalAsset.GeneratedRisks() + for _, technicalAsset := range sortedTechnicalAssetsByRiskSeverityAndTitle(parsedModel) { + newRisksStr := technicalAsset.GeneratedRisks(parsedModel) y += 6 if y > 275 { pageBreakInLists() y = 40 } - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(newRisksStr)) + countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(newRisksStr)) + " Risk" if len(newRisksStr) != 1 { suffix += "s" @@ -507,21 +489,21 @@ func createTableOfContents() { pdfColorOutOfScope() suffix = "out-of-scope" } else { - switch model.HighestSeverityStillAtRisk(newRisksStr) { - case model.CriticalSeverity: + switch model.HighestSeverityStillAtRisk(parsedModel, newRisksStr) { + case types.CriticalSeverity: colors.ColorCriticalRisk(pdf) - case model.HighSeverity: + case types.HighSeverity: colors.ColorHighRisk(pdf) - case model.ElevatedSeverity: + case types.ElevatedSeverity: colors.ColorElevatedRisk(pdf) - case model.MediumSeverity: + case types.MediumSeverity: colors.ColorMediumRisk(pdf) - case model.LowSeverity: + case types.LowSeverity: colors.ColorLowRisk(pdf) default: pdfColorBlack() } - if len(model.ReduceToOnlyStillAtRisk(newRisksStr)) == 0 { + if len(model.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) == 0 { pdfColorBlack() } } @@ -535,7 +517,7 @@ func createTableOfContents() { // =============== - if len(model.ParsedModelRoot.DataAssets) > 0 { + if len(parsedModel.DataAssets) > 0 { y += 6 y += 6 if y > 260 { // 260 instead of 275 for major group headlines to avoid "Schusterjungen" @@ -551,29 +533,29 @@ func createTableOfContents() { pdf.Text(175, y, "{intro-risks-by-data-asset}") pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) - for _, dataAsset := range model.SortedDataAssetsByDataBreachProbabilityAndTitle() { + for _, dataAsset := range sortedDataAssetsByDataBreachProbabilityAndTitle(parsedModel) { y += 6 if y > 275 { pageBreakInLists() y = 40 } - newRisksStr := dataAsset.IdentifiedDataBreachProbabilityRisks() - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(newRisksStr)) + newRisksStr := dataAsset.IdentifiedDataBreachProbabilityRisks(parsedModel) + countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(newRisksStr)) + " Risk" if len(newRisksStr) != 1 { suffix += "s" } - switch dataAsset.IdentifiedDataBreachProbabilityStillAtRisk() { - case model.Probable: + switch dataAsset.IdentifiedDataBreachProbabilityStillAtRisk(parsedModel) { + case types.Probable: colors.ColorHighRisk(pdf) - case model.Possible: + case types.Possible: colors.ColorMediumRisk(pdf) - case model.Improbable: + case types.Improbable: colors.ColorLowRisk(pdf) default: pdfColorBlack() } - if !dataAsset.IsDataBreachPotentialStillAtRisk() { + if !dataAsset.IsDataBreachPotentialStillAtRisk(parsedModel) { pdfColorBlack() } pdf.Text(11, y, " "+uni(dataAsset.Title)+": "+suffix) @@ -586,7 +568,7 @@ func createTableOfContents() { // =============== - if len(model.ParsedModelRoot.TrustBoundaries) > 0 { + if len(parsedModel.TrustBoundaries) > 0 { y += 6 y += 6 if y > 260 { // 260 instead of 275 for major group headlines to avoid "Schusterjungen" @@ -597,8 +579,8 @@ func createTableOfContents() { pdfColorBlack() pdf.Text(11, y, "Trust Boundaries") pdf.SetFont("Helvetica", "", fontSizeBody) - for _, key := range model.SortedKeysOfTrustBoundaries() { - trustBoundary := model.ParsedModelRoot.TrustBoundaries[key] + for _, key := range model.SortedKeysOfTrustBoundaries(parsedModel) { + trustBoundary := parsedModel.TrustBoundaries[key] y += 6 if y > 275 { pageBreakInLists() @@ -619,7 +601,7 @@ func createTableOfContents() { // =============== - if len(model.ParsedModelRoot.SharedRuntimes) > 0 { + if len(parsedModel.SharedRuntimes) > 0 { y += 6 y += 6 if y > 260 { // 260 instead of 275 for major group headlines to avoid "Schusterjungen" @@ -630,8 +612,8 @@ func createTableOfContents() { pdfColorBlack() pdf.Text(11, y, "Shared Runtime") pdf.SetFont("Helvetica", "", fontSizeBody) - for _, key := range model.SortedKeysOfSharedRuntime() { - sharedRuntime := model.ParsedModelRoot.SharedRuntimes[key] + for _, key := range model.SortedKeysOfSharedRuntime(parsedModel) { + sharedRuntime := parsedModel.SharedRuntimes[key] y += 6 if y > 275 { pageBreakInLists() @@ -686,6 +668,25 @@ func createTableOfContents() { // by the current page number. --> See the "pdf.RegisterAlias()" calls during the PDF creation in this file } +func sortedTechnicalAssetsByRiskSeverityAndTitle(parsedModel *model.ParsedModel) []model.TechnicalAsset { + assets := make([]model.TechnicalAsset, 0) + for _, asset := range parsedModel.TechnicalAssets { + assets = append(assets, asset) + } + model.SortByTechnicalAssetRiskSeverityAndTitleStillAtRisk(assets, parsedModel) + return assets +} + +func sortedDataAssetsByDataBreachProbabilityAndTitle(parsedModel *model.ParsedModel) []model.DataAsset { + assets := make([]model.DataAsset, 0) + for _, asset := range parsedModel.DataAssets { + assets = append(assets, asset) + } + + model.SortByDataAssetDataBreachProbabilityAndTitleStillAtRisk(parsedModel, assets) + return assets +} + func defineLinkTarget(alias string) { pageNumbStr := strconv.Itoa(pdf.PageNo()) if len(pageNumbStr) == 1 { @@ -698,7 +699,7 @@ func defineLinkTarget(alias string) { linkCounter++ } -func createDisclaimer() { +func createDisclaimer(parsedModel *model.ParsedModel) { pdf.AddPage() currentChapterTitleBreadcrumb = "Disclaimer" defineLinkTarget("{disclaimer}") @@ -710,7 +711,7 @@ func createDisclaimer() { pdf.SetY(46) var disclaimer strings.Builder - disclaimer.WriteString(model.ParsedModelRoot.Author.Name + " conducted this threat analysis using the open-source Threagile toolkit " + + disclaimer.WriteString(parsedModel.Author.Name + " conducted this threat analysis using the open-source Threagile toolkit " + "on the applications and systems that were modeled as of this report's date. " + "Information security threats are continually changing, with new " + "vulnerabilities discovered on a daily basis, and no application can ever be 100% secure no matter how much " + @@ -718,7 +719,7 @@ func createDisclaimer() { "(for example yearly) to ensure a high ongoing level of security and constantly check for new attack vectors. " + "

" + "This report cannot and does not protect against personal or business loss as the result of use of the " + - "applications or systems described. " + model.ParsedModelRoot.Author.Name + " and the Threagile toolkit offers no warranties, representations or " + + "applications or systems described. " + parsedModel.Author.Name + " and the Threagile toolkit offers no warranties, representations or " + "legal certifications concerning the applications or systems it tests. All software includes defects: nothing " + "in this document is intended to represent or warrant that threat modeling was complete and without error, " + "nor does this document represent or warrant that the architecture analyzed is suitable to task, free of other " + @@ -728,7 +729,7 @@ func createDisclaimer() { "These kinds of checks would only be possible with a separate code review and penetration test against " + "a working system and not via a threat model." + "

" + - "By using the resulting information you agree that " + model.ParsedModelRoot.Author.Name + " and the Threagile toolkit " + + "By using the resulting information you agree that " + parsedModel.Author.Name + " and the Threagile toolkit " + "shall be held harmless in any event." + "

" + "This report is confidential and intended for internal, confidential use by the client. The recipient " + @@ -751,33 +752,33 @@ func createDisclaimer() { pdfColorBlack() } -func createManagementSummary(tempFolder string) { +func createManagementSummary(parsedModel *model.ParsedModel, tempFolder string) { uni := pdf.UnicodeTranslatorFromDescriptor("") pdf.SetTextColor(0, 0, 0) title := "Management Summary" addHeadline(title, false) defineLinkTarget("{management-summary}") currentChapterTitleBreadcrumb = title - countCritical := len(model.FilteredByOnlyCriticalRisks()) - countHigh := len(model.FilteredByOnlyHighRisks()) - countElevated := len(model.FilteredByOnlyElevatedRisks()) - countMedium := len(model.FilteredByOnlyMediumRisks()) - countLow := len(model.FilteredByOnlyLowRisks()) - - countStatusUnchecked := len(model.FilteredByRiskTrackingUnchecked()) - countStatusInDiscussion := len(model.FilteredByRiskTrackingInDiscussion()) - countStatusAccepted := len(model.FilteredByRiskTrackingAccepted()) - countStatusInProgress := len(model.FilteredByRiskTrackingInProgress()) - countStatusMitigated := len(model.FilteredByRiskTrackingMitigated()) - countStatusFalsePositive := len(model.FilteredByRiskTrackingFalsePositive()) + countCritical := len(model.FilteredByOnlyCriticalRisks(parsedModel)) + countHigh := len(model.FilteredByOnlyHighRisks(parsedModel)) + countElevated := len(model.FilteredByOnlyElevatedRisks(parsedModel)) + countMedium := len(model.FilteredByOnlyMediumRisks(parsedModel)) + countLow := len(model.FilteredByOnlyLowRisks(parsedModel)) + + countStatusUnchecked := len(model.FilteredByRiskTrackingUnchecked(parsedModel)) + countStatusInDiscussion := len(model.FilteredByRiskTrackingInDiscussion(parsedModel)) + countStatusAccepted := len(model.FilteredByRiskTrackingAccepted(parsedModel)) + countStatusInProgress := len(model.FilteredByRiskTrackingInProgress(parsedModel)) + countStatusMitigated := len(model.FilteredByRiskTrackingMitigated(parsedModel)) + countStatusFalsePositive := len(model.FilteredByRiskTrackingFalsePositive(parsedModel)) html := pdf.HTMLBasicNew() - html.Write(5, "Threagile toolkit was used to model the architecture of \""+uni(model.ParsedModelRoot.Title)+"\" "+ + html.Write(5, "Threagile toolkit was used to model the architecture of \""+uni(parsedModel.Title)+"\" "+ "and derive risks by analyzing the components and data flows. The risks identified during this analysis are shown "+ "in the following chapters. Identified risks during threat modeling do not necessarily mean that the "+ "vulnerability associated with this risk actually exists: it is more to be seen as a list of potential risks and "+ "threats, which should be individually reviewed and reduced by removing false positives. For the remaining risks it should "+ - "be checked in the design and implementation of \""+uni(model.ParsedModelRoot.Title)+"\" whether the mitigation advices "+ + "be checked in the design and implementation of \""+uni(parsedModel.Title)+"\" whether the mitigation advices "+ "have been applied or not."+ "

"+ "Each risk finding references a chapter of the OWASP ASVS (Application Security Verification Standard) audit checklist. "+ @@ -785,7 +786,7 @@ func createManagementSummary(tempFolder string) { "the application in a Defense-in-Depth approach. Additionally, for each risk finding a "+ "link towards a matching OWASP Cheat Sheet or similar with technical details about how to implement a mitigation is given."+ "

"+ - "In total "+strconv.Itoa(model.TotalRiskCount())+" initial risks in "+strconv.Itoa(len(model.GeneratedRisksByCategory))+" categories have "+ + "In total "+strconv.Itoa(model.TotalRiskCount(parsedModel))+" initial risks in "+strconv.Itoa(len(parsedModel.GeneratedRisksByCategory))+" categories have "+ "been identified during the threat modeling process:

") // TODO plural singular stuff risk/s category/ies has/have pdf.SetFont("Helvetica", "B", fontSizeBody) @@ -932,15 +933,15 @@ func createManagementSummary(tempFolder string) { // individual management summary comment pdfColorBlack() - if len(model.ParsedModelRoot.ManagementSummaryComment) > 0 { + if len(parsedModel.ManagementSummaryComment) > 0 { html.Write(5, "















"+ - model.ParsedModelRoot.ManagementSummaryComment) + parsedModel.ManagementSummaryComment) } } -func createRiskMitigationStatus(tempFolder string) { +func createRiskMitigationStatus(parsedModel *model.ParsedModel, tempFolder string) { pdf.SetTextColor(0, 0, 0) - stillAtRisk := model.FilteredByStillAtRisk() + stillAtRisk := model.FilteredByStillAtRisk(parsedModel) count := len(stillAtRisk) title := "Risk Mitigation" addHeadline(title, false) @@ -950,18 +951,18 @@ func createRiskMitigationStatus(tempFolder string) { html := pdf.HTMLBasicNew() html.Write(5, "The following chart gives a high-level overview of the risk tracking status (including mitigated risks):") - risksCritical := model.FilteredByOnlyCriticalRisks() - risksHigh := model.FilteredByOnlyHighRisks() - risksElevated := model.FilteredByOnlyElevatedRisks() - risksMedium := model.FilteredByOnlyMediumRisks() - risksLow := model.FilteredByOnlyLowRisks() + risksCritical := model.FilteredByOnlyCriticalRisks(parsedModel) + risksHigh := model.FilteredByOnlyHighRisks(parsedModel) + risksElevated := model.FilteredByOnlyElevatedRisks(parsedModel) + risksMedium := model.FilteredByOnlyMediumRisks(parsedModel) + risksLow := model.FilteredByOnlyLowRisks(parsedModel) - countStatusUnchecked := len(model.FilteredByRiskTrackingUnchecked()) - countStatusInDiscussion := len(model.FilteredByRiskTrackingInDiscussion()) - countStatusAccepted := len(model.FilteredByRiskTrackingAccepted()) - countStatusInProgress := len(model.FilteredByRiskTrackingInProgress()) - countStatusMitigated := len(model.FilteredByRiskTrackingMitigated()) - countStatusFalsePositive := len(model.FilteredByRiskTrackingFalsePositive()) + countStatusUnchecked := len(model.FilteredByRiskTrackingUnchecked(parsedModel)) + countStatusInDiscussion := len(model.FilteredByRiskTrackingInDiscussion(parsedModel)) + countStatusAccepted := len(model.FilteredByRiskTrackingAccepted(parsedModel)) + countStatusInProgress := len(model.FilteredByRiskTrackingInProgress(parsedModel)) + countStatusMitigated := len(model.FilteredByRiskTrackingMitigated(parsedModel)) + countStatusFalsePositive := len(model.FilteredByRiskTrackingFalsePositive(parsedModel)) stackedBarChartRiskTracking := chart.StackedBarChart{ Width: 4000, @@ -970,92 +971,92 @@ func createRiskMitigationStatus(tempFolder string) { YAxis: chart.Style{Show: true, FontSize: 26, TextVerticalAlign: chart.TextVerticalAlignBottom}, Bars: []chart.StackedBar{ { - Name: model.LowSeverity.Title(), + Name: types.LowSeverity.Title(), Width: 130, Values: []chart.Value{ - {Value: float64(len(model.ReduceToOnlyRiskTrackingUnchecked(risksLow))), Label: model.Unchecked.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingUnchecked(parsedModel, risksLow))), Label: types.Unchecked.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusUnchecked()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingInDiscussion(risksLow))), Label: model.InDiscussion.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingInDiscussion(parsedModel, risksLow))), Label: types.InDiscussion.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingAccepted(risksLow))), Label: model.Accepted.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingAccepted(parsedModel, risksLow))), Label: types.Accepted.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusAccepted()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingInProgress(risksLow))), Label: model.InProgress.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingInProgress(parsedModel, risksLow))), Label: types.InProgress.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInProgress()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingMitigated(risksLow))), Label: model.Mitigated.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingMitigated(parsedModel, risksLow))), Label: types.Mitigated.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusMitigated()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingFalsePositive(risksLow))), Label: model.FalsePositive.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingFalsePositive(parsedModel, risksLow))), Label: types.FalsePositive.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, }, }, { - Name: model.MediumSeverity.Title(), + Name: types.MediumSeverity.Title(), Width: 130, Values: []chart.Value{ - {Value: float64(len(model.ReduceToOnlyRiskTrackingUnchecked(risksMedium))), Label: model.Unchecked.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingUnchecked(parsedModel, risksMedium))), Label: types.Unchecked.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusUnchecked()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingInDiscussion(risksMedium))), Label: model.InDiscussion.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingInDiscussion(parsedModel, risksMedium))), Label: types.InDiscussion.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingAccepted(risksMedium))), Label: model.Accepted.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingAccepted(parsedModel, risksMedium))), Label: types.Accepted.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusAccepted()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingInProgress(risksMedium))), Label: model.InProgress.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingInProgress(parsedModel, risksMedium))), Label: types.InProgress.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInProgress()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingMitigated(risksMedium))), Label: model.Mitigated.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingMitigated(parsedModel, risksMedium))), Label: types.Mitigated.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusMitigated()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingFalsePositive(risksMedium))), Label: model.FalsePositive.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingFalsePositive(parsedModel, risksMedium))), Label: types.FalsePositive.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, }, }, { - Name: model.ElevatedSeverity.Title(), + Name: types.ElevatedSeverity.Title(), Width: 130, Values: []chart.Value{ - {Value: float64(len(model.ReduceToOnlyRiskTrackingUnchecked(risksElevated))), Label: model.Unchecked.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingUnchecked(parsedModel, risksElevated))), Label: types.Unchecked.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusUnchecked()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingInDiscussion(risksElevated))), Label: model.InDiscussion.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingInDiscussion(parsedModel, risksElevated))), Label: types.InDiscussion.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingAccepted(risksElevated))), Label: model.Accepted.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingAccepted(parsedModel, risksElevated))), Label: types.Accepted.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusAccepted()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingInProgress(risksElevated))), Label: model.InProgress.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingInProgress(parsedModel, risksElevated))), Label: types.InProgress.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInProgress()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingMitigated(risksElevated))), Label: model.Mitigated.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingMitigated(parsedModel, risksElevated))), Label: types.Mitigated.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusMitigated()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingFalsePositive(risksElevated))), Label: model.FalsePositive.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingFalsePositive(parsedModel, risksElevated))), Label: types.FalsePositive.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, }, }, { - Name: model.HighSeverity.Title(), + Name: types.HighSeverity.Title(), Width: 130, Values: []chart.Value{ - {Value: float64(len(model.ReduceToOnlyRiskTrackingUnchecked(risksHigh))), Label: model.Unchecked.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingUnchecked(parsedModel, risksHigh))), Label: types.Unchecked.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusUnchecked()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingInDiscussion(risksHigh))), Label: model.InDiscussion.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingInDiscussion(parsedModel, risksHigh))), Label: types.InDiscussion.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingAccepted(risksHigh))), Label: model.Accepted.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingAccepted(parsedModel, risksHigh))), Label: types.Accepted.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusAccepted()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingInProgress(risksHigh))), Label: model.InProgress.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingInProgress(parsedModel, risksHigh))), Label: types.InProgress.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInProgress()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingMitigated(risksHigh))), Label: model.Mitigated.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingMitigated(parsedModel, risksHigh))), Label: types.Mitigated.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusMitigated()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingFalsePositive(risksHigh))), Label: model.FalsePositive.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingFalsePositive(parsedModel, risksHigh))), Label: types.FalsePositive.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, }, }, { - Name: model.CriticalSeverity.Title(), + Name: types.CriticalSeverity.Title(), Width: 130, Values: []chart.Value{ - {Value: float64(len(model.ReduceToOnlyRiskTrackingUnchecked(risksCritical))), Label: model.Unchecked.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingUnchecked(parsedModel, risksCritical))), Label: types.Unchecked.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusUnchecked()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingInDiscussion(risksCritical))), Label: model.InDiscussion.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingInDiscussion(parsedModel, risksCritical))), Label: types.InDiscussion.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingAccepted(risksCritical))), Label: model.Accepted.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingAccepted(parsedModel, risksCritical))), Label: types.Accepted.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusAccepted()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingInProgress(risksCritical))), Label: model.InProgress.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingInProgress(parsedModel, risksCritical))), Label: types.InProgress.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInProgress()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingMitigated(risksCritical))), Label: model.Mitigated.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingMitigated(parsedModel, risksCritical))), Label: types.Mitigated.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusMitigated()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingFalsePositive(risksCritical))), Label: model.FalsePositive.Title(), + {Value: float64(len(model.ReduceToOnlyRiskTrackingFalsePositive(parsedModel, risksCritical))), Label: types.FalsePositive.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, }, }, @@ -1124,16 +1125,16 @@ func createRiskMitigationStatus(tempFolder string) { "After removal of risks with status mitigated and false positive "+ "the following "+strconv.Itoa(count)+" remain unmitigated:") - countCritical := len(model.ReduceToOnlyStillAtRisk(model.FilteredByOnlyCriticalRisks())) - countHigh := len(model.ReduceToOnlyStillAtRisk(model.FilteredByOnlyHighRisks())) - countElevated := len(model.ReduceToOnlyStillAtRisk(model.FilteredByOnlyElevatedRisks())) - countMedium := len(model.ReduceToOnlyStillAtRisk(model.FilteredByOnlyMediumRisks())) - countLow := len(model.ReduceToOnlyStillAtRisk(model.FilteredByOnlyLowRisks())) + countCritical := len(model.ReduceToOnlyStillAtRisk(parsedModel, model.FilteredByOnlyCriticalRisks(parsedModel))) + countHigh := len(model.ReduceToOnlyStillAtRisk(parsedModel, model.FilteredByOnlyHighRisks(parsedModel))) + countElevated := len(model.ReduceToOnlyStillAtRisk(parsedModel, model.FilteredByOnlyElevatedRisks(parsedModel))) + countMedium := len(model.ReduceToOnlyStillAtRisk(parsedModel, model.FilteredByOnlyMediumRisks(parsedModel))) + countLow := len(model.ReduceToOnlyStillAtRisk(parsedModel, model.FilteredByOnlyLowRisks(parsedModel))) - countBusinessSide := len(model.ReduceToOnlyStillAtRisk(model.FilteredByOnlyBusinessSide())) - countArchitecture := len(model.ReduceToOnlyStillAtRisk(model.FilteredByOnlyArchitecture())) - countDevelopment := len(model.ReduceToOnlyStillAtRisk(model.FilteredByOnlyDevelopment())) - countOperation := len(model.ReduceToOnlyStillAtRisk(model.FilteredByOnlyOperation())) + countBusinessSide := len(model.ReduceToOnlyStillAtRisk(parsedModel, model.FilteredByOnlyBusinessSide(parsedModel))) + countArchitecture := len(model.ReduceToOnlyStillAtRisk(parsedModel, model.FilteredByOnlyArchitecture(parsedModel))) + countDevelopment := len(model.ReduceToOnlyStillAtRisk(parsedModel, model.FilteredByOnlyDevelopment(parsedModel))) + countOperation := len(model.ReduceToOnlyStillAtRisk(parsedModel, model.FilteredByOnlyOperation(parsedModel))) pieChartRemainingRiskSeverity := chart.PieChart{ Width: 1500, @@ -1279,19 +1280,19 @@ func makeColor(hexColor string) drawing.Color { return drawing.ColorFromHex(hexColor[i:]) // = remove first char, which is # in rgb hex here } -func createImpactInitialRisks() { - renderImpactAnalysis(true) +func createImpactInitialRisks(parsedModel *model.ParsedModel) { + renderImpactAnalysis(parsedModel, true) } -func createImpactRemainingRisks() { - renderImpactAnalysis(false) +func createImpactRemainingRisks(parsedModel *model.ParsedModel) { + renderImpactAnalysis(parsedModel, false) } -func renderImpactAnalysis(initialRisks bool) { +func renderImpactAnalysis(parsedModel *model.ParsedModel, initialRisks bool) { pdf.SetTextColor(0, 0, 0) - count, catCount := model.TotalRiskCount(), len(model.GeneratedRisksByCategory) + count, catCount := model.TotalRiskCount(parsedModel), len(parsedModel.GeneratedRisksByCategory) if !initialRisks { - count, catCount = len(model.FilteredByStillAtRisk()), len(model.CategoriesOfOnlyRisksStillAtRisk(model.GeneratedRisksByCategory)) + count, catCount = len(model.FilteredByStillAtRisk(parsedModel)), len(model.CategoriesOfOnlyRisksStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory)) } riskStr, catStr := "Risks", "Categories" if count == 1 { @@ -1332,26 +1333,26 @@ func renderImpactAnalysis(initialRisks bool) { html.Write(5, "Risk finding paragraphs are clickable and link to the corresponding chapter.") pdf.SetFont("Helvetica", "", fontSizeBody) - addCategories(model.CategoriesOfOnlyCriticalRisks(model.GeneratedRisksByCategory, initialRisks), - model.CriticalSeverity, false, initialRisks, true, false) - addCategories(model.CategoriesOfOnlyHighRisks(model.GeneratedRisksByCategory, initialRisks), - model.HighSeverity, false, initialRisks, true, false) - addCategories(model.CategoriesOfOnlyElevatedRisks(model.GeneratedRisksByCategory, initialRisks), - model.ElevatedSeverity, false, initialRisks, true, false) - addCategories(model.CategoriesOfOnlyMediumRisks(model.GeneratedRisksByCategory, initialRisks), - model.MediumSeverity, false, initialRisks, true, false) - addCategories(model.CategoriesOfOnlyLowRisks(model.GeneratedRisksByCategory, initialRisks), - model.LowSeverity, false, initialRisks, true, false) + addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks), + types.CriticalSeverity, false, initialRisks, true, false) + addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks), + types.HighSeverity, false, initialRisks, true, false) + addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks), + types.ElevatedSeverity, false, initialRisks, true, false) + addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks), + types.MediumSeverity, false, initialRisks, true, false) + addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks), + types.LowSeverity, false, initialRisks, true, false) pdf.SetDrawColor(0, 0, 0) pdf.SetDashPattern([]float64{}, 0) } -func createOutOfScopeAssets() { +func createOutOfScopeAssets(parsedModel *model.ParsedModel) { uni := pdf.UnicodeTranslatorFromDescriptor("") pdf.SetTextColor(0, 0, 0) assets := "Assets" - count := len(model.OutOfScopeTechnicalAssets()) + count := len(parsedModel.OutOfScopeTechnicalAssets()) if count == 1 { assets = "Asset" } @@ -1373,7 +1374,7 @@ func createOutOfScopeAssets() { pdf.SetFont("Helvetica", "", fontSizeBody) outOfScopeAssetCount := 0 - for _, technicalAsset := range model.SortedTechnicalAssetsByRAAAndTitle() { + for _, technicalAsset := range sortedTechnicalAssetsByRAAAndTitle(parsedModel) { if technicalAsset.OutOfScope { outOfScopeAssetCount++ if pdf.GetY() > 250 { @@ -1410,15 +1411,24 @@ func createOutOfScopeAssets() { pdf.SetDashPattern([]float64{}, 0) } -func createModelFailures() { +func sortedTechnicalAssetsByRAAAndTitle(parsedModel *model.ParsedModel) []model.TechnicalAsset { + assets := make([]model.TechnicalAsset, 0) + for _, asset := range parsedModel.TechnicalAssets { + assets = append(assets, asset) + } + sort.Sort(model.ByTechnicalAssetRAAAndTitleSort(assets)) + return assets +} + +func createModelFailures(parsedModel *model.ParsedModel) { pdf.SetTextColor(0, 0, 0) - modelFailures := model.FlattenRiskSlice(model.FilterByModelFailures(model.GeneratedRisksByCategory)) + modelFailures := model.FlattenRiskSlice(model.FilterByModelFailures(parsedModel.GeneratedRisksByCategory)) risksStr := "Risks" count := len(modelFailures) if count == 1 { risksStr = "Risk" } - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(modelFailures)) + countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(parsedModel, modelFailures)) if countStillAtRisk > 0 { colors.ColorModelFailure(pdf) } @@ -1440,28 +1450,28 @@ func createModelFailures() { html.Write(5, "Risk finding paragraphs are clickable and link to the corresponding chapter.") pdf.SetFont("Helvetica", "", fontSizeBody) - modelFailuresByCategory := model.FilterByModelFailures(model.GeneratedRisksByCategory) + modelFailuresByCategory := model.FilterByModelFailures(parsedModel.GeneratedRisksByCategory) if len(modelFailuresByCategory) == 0 { pdfColorGray() html.Write(5, "

No potential model failures have been identified.") } else { - addCategories(model.CategoriesOfOnlyCriticalRisks(modelFailuresByCategory, true), - model.CriticalSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyHighRisks(modelFailuresByCategory, true), - model.HighSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyElevatedRisks(modelFailuresByCategory, true), - model.ElevatedSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyMediumRisks(modelFailuresByCategory, true), - model.MediumSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyLowRisks(modelFailuresByCategory, true), - model.LowSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, modelFailuresByCategory, true), + types.CriticalSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, modelFailuresByCategory, true), + types.HighSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, modelFailuresByCategory, true), + types.ElevatedSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, modelFailuresByCategory, true), + types.MediumSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, modelFailuresByCategory, true), + types.LowSeverity, true, true, false, true) } pdf.SetDrawColor(0, 0, 0) pdf.SetDashPattern([]float64{}, 0) } -func createRAA(introTextRAA string) { +func createRAA(parsedModel *model.ParsedModel, introTextRAA string) { uni := pdf.UnicodeTranslatorFromDescriptor("") pdf.SetTextColor(0, 0, 0) chapTitle := "RAA Analysis" @@ -1480,7 +1490,7 @@ func createRAA(introTextRAA string) { html.Write(5, "Technical asset paragraphs are clickable and link to the corresponding chapter.") pdf.SetFont("Helvetica", "", fontSizeBody) - for _, technicalAsset := range model.SortedTechnicalAssetsByRAAAndTitle() { + for _, technicalAsset := range sortedTechnicalAssetsByRAAAndTitle(parsedModel) { if technicalAsset.OutOfScope { continue } @@ -1490,18 +1500,18 @@ func createRAA(introTextRAA string) { } else { strBuilder.WriteString("

") } - newRisksStr := technicalAsset.GeneratedRisks() - switch model.HighestSeverityStillAtRisk(newRisksStr) { - case model.HighSeverity: + newRisksStr := technicalAsset.GeneratedRisks(parsedModel) + switch model.HighestSeverityStillAtRisk(parsedModel, newRisksStr) { + case types.HighSeverity: colors.ColorHighRisk(pdf) - case model.MediumSeverity: + case types.MediumSeverity: colors.ColorMediumRisk(pdf) - case model.LowSeverity: + case types.LowSeverity: colors.ColorLowRisk(pdf) default: pdfColorBlack() } - if len(model.ReduceToOnlyStillAtRisk(newRisksStr)) == 0 { + if len(model.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) == 0 { pdfColorBlack() } @@ -1607,14 +1617,14 @@ func createDataRiskQuickWins() { } */ -func addCategories(riskCategories []model.RiskCategory, severity model.RiskSeverity, bothInitialAndRemainingRisks bool, initialRisks bool, describeImpact bool, describeDescription bool) { +func addCategories(parsedModel *model.ParsedModel, riskCategories []model.RiskCategory, severity types.RiskSeverity, bothInitialAndRemainingRisks bool, initialRisks bool, describeImpact bool, describeDescription bool) { html := pdf.HTMLBasicNew() var strBuilder strings.Builder sort.Sort(model.ByRiskCategoryTitleSort(riskCategories)) for _, riskCategory := range riskCategories { - risksStr := model.GeneratedRisksByCategory[riskCategory] + risksStr := parsedModel.GeneratedRisksByCategory[riskCategory] if !initialRisks { - risksStr = model.ReduceToOnlyStillAtRisk(risksStr) + risksStr = model.ReduceToOnlyStillAtRisk(parsedModel, risksStr) } if len(risksStr) == 0 { continue @@ -1627,38 +1637,38 @@ func addCategories(riskCategories []model.RiskCategory, severity model.RiskSever } var prefix string switch severity { - case model.CriticalSeverity: + case types.CriticalSeverity: colors.ColorCriticalRisk(pdf) prefix = "Critical: " - case model.HighSeverity: + case types.HighSeverity: colors.ColorHighRisk(pdf) prefix = "High: " - case model.ElevatedSeverity: + case types.ElevatedSeverity: colors.ColorElevatedRisk(pdf) prefix = "Elevated: " - case model.MediumSeverity: + case types.MediumSeverity: colors.ColorMediumRisk(pdf) prefix = "Medium: " - case model.LowSeverity: + case types.LowSeverity: colors.ColorLowRisk(pdf) prefix = "Low: " default: pdfColorBlack() prefix = "" } - switch model.HighestSeverityStillAtRisk(risksStr) { - case model.CriticalSeverity: + switch model.HighestSeverityStillAtRisk(parsedModel, risksStr) { + case types.CriticalSeverity: colors.ColorCriticalRisk(pdf) - case model.HighSeverity: + case types.HighSeverity: colors.ColorHighRisk(pdf) - case model.ElevatedSeverity: + case types.ElevatedSeverity: colors.ColorElevatedRisk(pdf) - case model.MediumSeverity: + case types.MediumSeverity: colors.ColorMediumRisk(pdf) - case model.LowSeverity: + case types.LowSeverity: colors.ColorLowRisk(pdf) } - if len(model.ReduceToOnlyStillAtRisk(risksStr)) == 0 { + if len(model.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) == 0 { pdfColorBlack() } html.Write(5, strBuilder.String()) @@ -1673,7 +1683,7 @@ func addCategories(riskCategories []model.RiskCategory, severity model.RiskSever if !initialRisks { initialStr = "Remaining" } - remainingRisks := model.ReduceToOnlyStillAtRisk(risksStr) + remainingRisks := model.ReduceToOnlyStillAtRisk(parsedModel, risksStr) suffix := strconv.Itoa(count) + " " + initialStr + " Risk" if bothInitialAndRemainingRisks { suffix = strconv.Itoa(len(remainingRisks)) + " / " + strconv.Itoa(count) + " Risk" @@ -1712,17 +1722,17 @@ func firstParagraph(text string) string { return match[1] } -func createAssignmentByFunction() { +func createAssignmentByFunction(parsedModel *model.ParsedModel) { pdf.SetTextColor(0, 0, 0) title := "Assignment by Function" addHeadline(title, false) defineLinkTarget("{function-assignment}") currentChapterTitleBreadcrumb = title - risksBusinessSideFunction := model.RisksOfOnlyBusinessSide(model.GeneratedRisksByCategory) - risksArchitectureFunction := model.RisksOfOnlyArchitecture(model.GeneratedRisksByCategory) - risksDevelopmentFunction := model.RisksOfOnlyDevelopment(model.GeneratedRisksByCategory) - risksOperationFunction := model.RisksOfOnlyOperation(model.GeneratedRisksByCategory) + risksBusinessSideFunction := model.RisksOfOnlyBusinessSide(parsedModel.GeneratedRisksByCategory) + risksArchitectureFunction := model.RisksOfOnlyArchitecture(parsedModel.GeneratedRisksByCategory) + risksDevelopmentFunction := model.RisksOfOnlyDevelopment(parsedModel.GeneratedRisksByCategory) + risksOperationFunction := model.RisksOfOnlyOperation(parsedModel.GeneratedRisksByCategory) countBusinessSideFunction := model.CountRisks(risksBusinessSideFunction) countArchitectureFunction := model.CountRisks(risksArchitectureFunction) @@ -1731,11 +1741,11 @@ func createAssignmentByFunction() { var intro strings.Builder intro.WriteString("This chapter clusters and assigns the risks by functions which are most likely able to " + "check and mitigate them: " + - "In total " + strconv.Itoa(model.TotalRiskCount()) + " potential risks have been identified during the threat modeling process " + - "of which " + strconv.Itoa(countBusinessSideFunction) + " should be checked by " + model.BusinessSide.Title() + ", " + - "" + strconv.Itoa(countArchitectureFunction) + " should be checked by " + model.Architecture.Title() + ", " + - "" + strconv.Itoa(countDevelopmentFunction) + " should be checked by " + model.Development.Title() + ", " + - "and " + strconv.Itoa(countOperationFunction) + " should be checked by " + model.Operations.Title() + ".
") + "In total " + strconv.Itoa(model.TotalRiskCount(parsedModel)) + " potential risks have been identified during the threat modeling process " + + "of which " + strconv.Itoa(countBusinessSideFunction) + " should be checked by " + types.BusinessSide.Title() + ", " + + "" + strconv.Itoa(countArchitectureFunction) + " should be checked by " + types.Architecture.Title() + ", " + + "" + strconv.Itoa(countDevelopmentFunction) + " should be checked by " + types.Development.Title() + ", " + + "and " + strconv.Itoa(countOperationFunction) + " should be checked by " + types.Operations.Title() + ".
") html := pdf.HTMLBasicNew() html.Write(5, intro.String()) intro.Reset() @@ -1754,22 +1764,22 @@ func createAssignmentByFunction() { } pdf.SetFont("Helvetica", "", fontSizeBody) pdf.SetTextColor(0, 0, 0) - html.Write(5, ""+model.BusinessSide.Title()+"") + html.Write(5, ""+types.BusinessSide.Title()+"") pdf.SetLeftMargin(15) if len(risksBusinessSideFunction) == 0 { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(model.CategoriesOfOnlyCriticalRisks(risksBusinessSideFunction, true), - model.CriticalSeverity, true, true, false, false) - addCategories(model.CategoriesOfOnlyHighRisks(risksBusinessSideFunction, true), - model.HighSeverity, true, true, false, false) - addCategories(model.CategoriesOfOnlyElevatedRisks(risksBusinessSideFunction, true), - model.ElevatedSeverity, true, true, false, false) - addCategories(model.CategoriesOfOnlyMediumRisks(risksBusinessSideFunction, true), - model.MediumSeverity, true, true, false, false) - addCategories(model.CategoriesOfOnlyLowRisks(risksBusinessSideFunction, true), - model.LowSeverity, true, true, false, false) + addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksBusinessSideFunction, true), + types.CriticalSeverity, true, true, false, false) + addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksBusinessSideFunction, true), + types.HighSeverity, true, true, false, false) + addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksBusinessSideFunction, true), + types.ElevatedSeverity, true, true, false, false) + addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksBusinessSideFunction, true), + types.MediumSeverity, true, true, false, false) + addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksBusinessSideFunction, true), + types.LowSeverity, true, true, false, false) } pdf.SetLeftMargin(oldLeft) @@ -1781,22 +1791,22 @@ func createAssignmentByFunction() { } pdf.SetFont("Helvetica", "", fontSizeBody) pdf.SetTextColor(0, 0, 0) - html.Write(5, ""+model.Architecture.Title()+"") + html.Write(5, ""+types.Architecture.Title()+"") pdf.SetLeftMargin(15) if len(risksArchitectureFunction) == 0 { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(model.CategoriesOfOnlyCriticalRisks(risksArchitectureFunction, true), - model.CriticalSeverity, true, true, false, false) - addCategories(model.CategoriesOfOnlyHighRisks(risksArchitectureFunction, true), - model.HighSeverity, true, true, false, false) - addCategories(model.CategoriesOfOnlyElevatedRisks(risksArchitectureFunction, true), - model.ElevatedSeverity, true, true, false, false) - addCategories(model.CategoriesOfOnlyMediumRisks(risksArchitectureFunction, true), - model.MediumSeverity, true, true, false, false) - addCategories(model.CategoriesOfOnlyLowRisks(risksArchitectureFunction, true), - model.LowSeverity, true, true, false, false) + addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksArchitectureFunction, true), + types.CriticalSeverity, true, true, false, false) + addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksArchitectureFunction, true), + types.HighSeverity, true, true, false, false) + addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksArchitectureFunction, true), + types.ElevatedSeverity, true, true, false, false) + addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksArchitectureFunction, true), + types.MediumSeverity, true, true, false, false) + addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksArchitectureFunction, true), + types.LowSeverity, true, true, false, false) } pdf.SetLeftMargin(oldLeft) @@ -1808,22 +1818,22 @@ func createAssignmentByFunction() { } pdf.SetFont("Helvetica", "", fontSizeBody) pdf.SetTextColor(0, 0, 0) - html.Write(5, ""+model.Development.Title()+"") + html.Write(5, ""+types.Development.Title()+"") pdf.SetLeftMargin(15) if len(risksDevelopmentFunction) == 0 { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(model.CategoriesOfOnlyCriticalRisks(risksDevelopmentFunction, true), - model.CriticalSeverity, true, true, false, false) - addCategories(model.CategoriesOfOnlyHighRisks(risksDevelopmentFunction, true), - model.HighSeverity, true, true, false, false) - addCategories(model.CategoriesOfOnlyElevatedRisks(risksDevelopmentFunction, true), - model.ElevatedSeverity, true, true, false, false) - addCategories(model.CategoriesOfOnlyMediumRisks(risksDevelopmentFunction, true), - model.MediumSeverity, true, true, false, false) - addCategories(model.CategoriesOfOnlyLowRisks(risksDevelopmentFunction, true), - model.LowSeverity, true, true, false, false) + addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksDevelopmentFunction, true), + types.CriticalSeverity, true, true, false, false) + addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksDevelopmentFunction, true), + types.HighSeverity, true, true, false, false) + addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksDevelopmentFunction, true), + types.ElevatedSeverity, true, true, false, false) + addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksDevelopmentFunction, true), + types.MediumSeverity, true, true, false, false) + addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksDevelopmentFunction, true), + types.LowSeverity, true, true, false, false) } pdf.SetLeftMargin(oldLeft) @@ -1835,22 +1845,22 @@ func createAssignmentByFunction() { } pdf.SetFont("Helvetica", "", fontSizeBody) pdf.SetTextColor(0, 0, 0) - html.Write(5, ""+model.Operations.Title()+"") + html.Write(5, ""+types.Operations.Title()+"") pdf.SetLeftMargin(15) if len(risksOperationFunction) == 0 { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(model.CategoriesOfOnlyCriticalRisks(risksOperationFunction, true), - model.CriticalSeverity, true, true, false, false) - addCategories(model.CategoriesOfOnlyHighRisks(risksOperationFunction, true), - model.HighSeverity, true, true, false, false) - addCategories(model.CategoriesOfOnlyElevatedRisks(risksOperationFunction, true), - model.ElevatedSeverity, true, true, false, false) - addCategories(model.CategoriesOfOnlyMediumRisks(risksOperationFunction, true), - model.MediumSeverity, true, true, false, false) - addCategories(model.CategoriesOfOnlyLowRisks(risksOperationFunction, true), - model.LowSeverity, true, true, false, false) + addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksOperationFunction, true), + types.CriticalSeverity, true, true, false, false) + addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksOperationFunction, true), + types.HighSeverity, true, true, false, false) + addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksOperationFunction, true), + types.ElevatedSeverity, true, true, false, false) + addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksOperationFunction, true), + types.MediumSeverity, true, true, false, false) + addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksOperationFunction, true), + types.LowSeverity, true, true, false, false) } pdf.SetLeftMargin(oldLeft) @@ -1858,19 +1868,19 @@ func createAssignmentByFunction() { pdf.SetDashPattern([]float64{}, 0) } -func createSTRIDE() { +func createSTRIDE(parsedModel *model.ParsedModel) { pdf.SetTextColor(0, 0, 0) title := "STRIDE Classification of Identified Risks" addHeadline(title, false) defineLinkTarget("{stride}") currentChapterTitleBreadcrumb = title - risksSTRIDESpoofing := model.RisksOfOnlySTRIDESpoofing(model.GeneratedRisksByCategory) - risksSTRIDETampering := model.RisksOfOnlySTRIDETampering(model.GeneratedRisksByCategory) - risksSTRIDERepudiation := model.RisksOfOnlySTRIDERepudiation(model.GeneratedRisksByCategory) - risksSTRIDEInformationDisclosure := model.RisksOfOnlySTRIDEInformationDisclosure(model.GeneratedRisksByCategory) - risksSTRIDEDenialOfService := model.RisksOfOnlySTRIDEDenialOfService(model.GeneratedRisksByCategory) - risksSTRIDEElevationOfPrivilege := model.RisksOfOnlySTRIDEElevationOfPrivilege(model.GeneratedRisksByCategory) + risksSTRIDESpoofing := model.RisksOfOnlySTRIDESpoofing(parsedModel.GeneratedRisksByCategory) + risksSTRIDETampering := model.RisksOfOnlySTRIDETampering(parsedModel.GeneratedRisksByCategory) + risksSTRIDERepudiation := model.RisksOfOnlySTRIDERepudiation(parsedModel.GeneratedRisksByCategory) + risksSTRIDEInformationDisclosure := model.RisksOfOnlySTRIDEInformationDisclosure(parsedModel.GeneratedRisksByCategory) + risksSTRIDEDenialOfService := model.RisksOfOnlySTRIDEDenialOfService(parsedModel.GeneratedRisksByCategory) + risksSTRIDEElevationOfPrivilege := model.RisksOfOnlySTRIDEElevationOfPrivilege(parsedModel.GeneratedRisksByCategory) countSTRIDESpoofing := model.CountRisks(risksSTRIDESpoofing) countSTRIDETampering := model.CountRisks(risksSTRIDETampering) @@ -1880,13 +1890,13 @@ func createSTRIDE() { countSTRIDEElevationOfPrivilege := model.CountRisks(risksSTRIDEElevationOfPrivilege) var intro strings.Builder intro.WriteString("This chapter clusters and classifies the risks by STRIDE categories: " + - "In total " + strconv.Itoa(model.TotalRiskCount()) + " potential risks have been identified during the threat modeling process " + - "of which " + strconv.Itoa(countSTRIDESpoofing) + " in the " + model.Spoofing.Title() + " category, " + - "" + strconv.Itoa(countSTRIDETampering) + " in the " + model.Tampering.Title() + " category, " + - "" + strconv.Itoa(countSTRIDERepudiation) + " in the " + model.Repudiation.Title() + " category, " + - "" + strconv.Itoa(countSTRIDEInformationDisclosure) + " in the " + model.InformationDisclosure.Title() + " category, " + - "" + strconv.Itoa(countSTRIDEDenialOfService) + " in the " + model.DenialOfService.Title() + " category, " + - "and " + strconv.Itoa(countSTRIDEElevationOfPrivilege) + " in the " + model.ElevationOfPrivilege.Title() + " category.
") + "In total " + strconv.Itoa(model.TotalRiskCount(parsedModel)) + " potential risks have been identified during the threat modeling process " + + "of which " + strconv.Itoa(countSTRIDESpoofing) + " in the " + types.Spoofing.Title() + " category, " + + "" + strconv.Itoa(countSTRIDETampering) + " in the " + types.Tampering.Title() + " category, " + + "" + strconv.Itoa(countSTRIDERepudiation) + " in the " + types.Repudiation.Title() + " category, " + + "" + strconv.Itoa(countSTRIDEInformationDisclosure) + " in the " + types.InformationDisclosure.Title() + " category, " + + "" + strconv.Itoa(countSTRIDEDenialOfService) + " in the " + types.DenialOfService.Title() + " category, " + + "and " + strconv.Itoa(countSTRIDEElevationOfPrivilege) + " in the " + types.ElevationOfPrivilege.Title() + " category.
") html := pdf.HTMLBasicNew() html.Write(5, intro.String()) intro.Reset() @@ -1905,22 +1915,22 @@ func createSTRIDE() { } pdf.SetFont("Helvetica", "", fontSizeBody) pdf.SetTextColor(0, 0, 0) - html.Write(5, ""+model.Spoofing.Title()+"") + html.Write(5, ""+types.Spoofing.Title()+"") pdf.SetLeftMargin(15) if len(risksSTRIDESpoofing) == 0 { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(model.CategoriesOfOnlyCriticalRisks(risksSTRIDESpoofing, true), - model.CriticalSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyHighRisks(risksSTRIDESpoofing, true), - model.HighSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyElevatedRisks(risksSTRIDESpoofing, true), - model.ElevatedSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyMediumRisks(risksSTRIDESpoofing, true), - model.MediumSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyLowRisks(risksSTRIDESpoofing, true), - model.LowSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDESpoofing, true), + types.CriticalSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDESpoofing, true), + types.HighSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDESpoofing, true), + types.ElevatedSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDESpoofing, true), + types.MediumSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDESpoofing, true), + types.LowSeverity, true, true, false, true) } pdf.SetLeftMargin(oldLeft) @@ -1932,22 +1942,22 @@ func createSTRIDE() { } pdf.SetFont("Helvetica", "", fontSizeBody) pdf.SetTextColor(0, 0, 0) - html.Write(5, ""+model.Tampering.Title()+"") + html.Write(5, ""+types.Tampering.Title()+"") pdf.SetLeftMargin(15) if len(risksSTRIDETampering) == 0 { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(model.CategoriesOfOnlyCriticalRisks(risksSTRIDETampering, true), - model.CriticalSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyHighRisks(risksSTRIDETampering, true), - model.HighSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyElevatedRisks(risksSTRIDETampering, true), - model.ElevatedSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyMediumRisks(risksSTRIDETampering, true), - model.MediumSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyLowRisks(risksSTRIDETampering, true), - model.LowSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDETampering, true), + types.CriticalSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDETampering, true), + types.HighSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDETampering, true), + types.ElevatedSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDETampering, true), + types.MediumSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDETampering, true), + types.LowSeverity, true, true, false, true) } pdf.SetLeftMargin(oldLeft) @@ -1959,22 +1969,22 @@ func createSTRIDE() { } pdf.SetFont("Helvetica", "", fontSizeBody) pdf.SetTextColor(0, 0, 0) - html.Write(5, ""+model.Repudiation.Title()+"") + html.Write(5, ""+types.Repudiation.Title()+"") pdf.SetLeftMargin(15) if len(risksSTRIDERepudiation) == 0 { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(model.CategoriesOfOnlyCriticalRisks(risksSTRIDERepudiation, true), - model.CriticalSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyHighRisks(risksSTRIDERepudiation, true), - model.HighSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyElevatedRisks(risksSTRIDERepudiation, true), - model.ElevatedSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyMediumRisks(risksSTRIDERepudiation, true), - model.MediumSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyLowRisks(risksSTRIDERepudiation, true), - model.LowSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDERepudiation, true), + types.CriticalSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDERepudiation, true), + types.HighSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDERepudiation, true), + types.ElevatedSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDERepudiation, true), + types.MediumSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDERepudiation, true), + types.LowSeverity, true, true, false, true) } pdf.SetLeftMargin(oldLeft) @@ -1986,22 +1996,22 @@ func createSTRIDE() { } pdf.SetFont("Helvetica", "", fontSizeBody) pdf.SetTextColor(0, 0, 0) - html.Write(5, ""+model.InformationDisclosure.Title()+"") + html.Write(5, ""+types.InformationDisclosure.Title()+"") pdf.SetLeftMargin(15) if len(risksSTRIDEInformationDisclosure) == 0 { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(model.CategoriesOfOnlyCriticalRisks(risksSTRIDEInformationDisclosure, true), - model.CriticalSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyHighRisks(risksSTRIDEInformationDisclosure, true), - model.HighSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyElevatedRisks(risksSTRIDEInformationDisclosure, true), - model.ElevatedSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyMediumRisks(risksSTRIDEInformationDisclosure, true), - model.MediumSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyLowRisks(risksSTRIDEInformationDisclosure, true), - model.LowSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEInformationDisclosure, true), + types.CriticalSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEInformationDisclosure, true), + types.HighSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEInformationDisclosure, true), + types.ElevatedSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEInformationDisclosure, true), + types.MediumSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEInformationDisclosure, true), + types.LowSeverity, true, true, false, true) } pdf.SetLeftMargin(oldLeft) @@ -2013,22 +2023,22 @@ func createSTRIDE() { } pdf.SetFont("Helvetica", "", fontSizeBody) pdf.SetTextColor(0, 0, 0) - html.Write(5, ""+model.DenialOfService.Title()+"") + html.Write(5, ""+types.DenialOfService.Title()+"") pdf.SetLeftMargin(15) if len(risksSTRIDEDenialOfService) == 0 { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(model.CategoriesOfOnlyCriticalRisks(risksSTRIDEDenialOfService, true), - model.CriticalSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyHighRisks(risksSTRIDEDenialOfService, true), - model.HighSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyElevatedRisks(risksSTRIDEDenialOfService, true), - model.ElevatedSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyMediumRisks(risksSTRIDEDenialOfService, true), - model.MediumSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyLowRisks(risksSTRIDEDenialOfService, true), - model.LowSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEDenialOfService, true), + types.CriticalSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEDenialOfService, true), + types.HighSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEDenialOfService, true), + types.ElevatedSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEDenialOfService, true), + types.MediumSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEDenialOfService, true), + types.LowSeverity, true, true, false, true) } pdf.SetLeftMargin(oldLeft) @@ -2040,22 +2050,22 @@ func createSTRIDE() { } pdf.SetFont("Helvetica", "", fontSizeBody) pdf.SetTextColor(0, 0, 0) - html.Write(5, ""+model.ElevationOfPrivilege.Title()+"") + html.Write(5, ""+types.ElevationOfPrivilege.Title()+"") pdf.SetLeftMargin(15) if len(risksSTRIDEElevationOfPrivilege) == 0 { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(model.CategoriesOfOnlyCriticalRisks(risksSTRIDEElevationOfPrivilege, true), - model.CriticalSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyHighRisks(risksSTRIDEElevationOfPrivilege, true), - model.HighSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyElevatedRisks(risksSTRIDEElevationOfPrivilege, true), - model.ElevatedSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyMediumRisks(risksSTRIDEElevationOfPrivilege, true), - model.MediumSeverity, true, true, false, true) - addCategories(model.CategoriesOfOnlyLowRisks(risksSTRIDEElevationOfPrivilege, true), - model.LowSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true), + types.CriticalSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true), + types.HighSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true), + types.ElevatedSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true), + types.MediumSeverity, true, true, false, true) + addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true), + types.LowSeverity, true, true, false, true) } pdf.SetLeftMargin(oldLeft) @@ -2063,7 +2073,7 @@ func createSTRIDE() { pdf.SetDashPattern([]float64{}, 0) } -func createSecurityRequirements() { +func createSecurityRequirements(parsedModel *model.ParsedModel) { uni := pdf.UnicodeTranslatorFromDescriptor("") pdf.SetTextColor(0, 0, 0) chapTitle := "Security Requirements" @@ -2074,8 +2084,8 @@ func createSecurityRequirements() { html := pdf.HTMLBasicNew() html.Write(5, "This chapter lists the custom security requirements which have been defined for the modeled target.") pdfColorBlack() - for _, title := range model.SortedKeysOfSecurityRequirements() { - description := model.ParsedModelRoot.SecurityRequirements[title] + for _, title := range sortedKeysOfSecurityRequirements(parsedModel) { + description := parsedModel.SecurityRequirements[title] if pdf.GetY() > 250 { pageBreak() pdf.SetY(36) @@ -2095,7 +2105,16 @@ func createSecurityRequirements() { "taken into account as well. Also custom individual security requirements might exist for the project.") } -func createAbuseCases() { +func sortedKeysOfSecurityRequirements(parsedModel *model.ParsedModel) []string { + keys := make([]string, 0) + for k := range parsedModel.SecurityRequirements { + keys = append(keys, k) + } + sort.Strings(keys) + return keys +} + +func createAbuseCases(parsedModel *model.ParsedModel) { pdf.SetTextColor(0, 0, 0) chapTitle := "Abuse Cases" addHeadline(chapTitle, false) @@ -2105,8 +2124,8 @@ func createAbuseCases() { html := pdf.HTMLBasicNew() html.Write(5, "This chapter lists the custom abuse cases which have been defined for the modeled target.") pdfColorBlack() - for _, title := range model.SortedKeysOfAbuseCases() { - description := model.ParsedModelRoot.AbuseCases[title] + for _, title := range sortedKeysOfAbuseCases(parsedModel) { + description := parsedModel.AbuseCases[title] if pdf.GetY() > 250 { pageBreak() pdf.SetY(36) @@ -2126,18 +2145,27 @@ func createAbuseCases() { "taken into account as well. Also custom individual abuse cases might exist for the project.") } -func createQuestions() { +func sortedKeysOfAbuseCases(parsedModel *model.ParsedModel) []string { + keys := make([]string, 0) + for k := range parsedModel.AbuseCases { + keys = append(keys, k) + } + sort.Strings(keys) + return keys +} + +func createQuestions(parsedModel *model.ParsedModel) { uni := pdf.UnicodeTranslatorFromDescriptor("") pdf.SetTextColor(0, 0, 0) questions := "Questions" - count := len(model.ParsedModelRoot.Questions) + count := len(parsedModel.Questions) if count == 1 { questions = "Question" } - if model.QuestionsUnanswered() > 0 { + if questionsUnanswered(parsedModel) > 0 { colors.ColorModelFailure(pdf) } - chapTitle := "Questions: " + strconv.Itoa(model.QuestionsUnanswered()) + " / " + strconv.Itoa(count) + " " + questions + chapTitle := "Questions: " + strconv.Itoa(questionsUnanswered(parsedModel)) + " / " + strconv.Itoa(count) + " " + questions addHeadline(chapTitle, false) defineLinkTarget("{questions}") currentChapterTitleBreadcrumb = chapTitle @@ -2146,14 +2174,14 @@ func createQuestions() { html := pdf.HTMLBasicNew() html.Write(5, "This chapter lists custom questions that arose during the threat modeling process.") - if len(model.ParsedModelRoot.Questions) == 0 { + if len(parsedModel.Questions) == 0 { pdfColorLightGray() html.Write(5, "


") html.Write(5, "No custom questions arose during the threat modeling process.") } pdfColorBlack() - for _, question := range model.SortedKeysOfQuestions() { - answer := model.ParsedModelRoot.Questions[question] + for _, question := range sortedKeysOfQuestions(parsedModel) { + answer := parsedModel.Questions[question] if pdf.GetY() > 250 { pageBreak() pdf.SetY(36) @@ -2174,7 +2202,16 @@ func createQuestions() { } } -func createTagListing() { +func sortedKeysOfQuestions(parsedModel *model.ParsedModel) []string { + keys := make([]string, 0) + for k := range parsedModel.Questions { + keys = append(keys, k) + } + sort.Strings(keys) + return keys +} + +func createTagListing(parsedModel *model.ParsedModel) { pdf.SetTextColor(0, 0, 0) chapTitle := "Tag Listing" addHeadline(chapTitle, false) @@ -2184,19 +2221,19 @@ func createTagListing() { html := pdf.HTMLBasicNew() html.Write(5, "This chapter lists what tags are used by which elements.") pdfColorBlack() - sorted := model.ParsedModelRoot.TagsAvailable + sorted := parsedModel.TagsAvailable sort.Strings(sorted) for _, tag := range sorted { description := "" // TODO: add some separation texts to distinguish between technical assets and data assets etc. for example? - for _, techAsset := range model.SortedTechnicalAssetsByTitle() { - if model.Contains(techAsset.Tags, tag) { + for _, techAsset := range sortedTechnicalAssetsByTitle(parsedModel) { + if contains(techAsset.Tags, tag) { if len(description) > 0 { description += ", " } description += techAsset.Title } for _, commLink := range techAsset.CommunicationLinksSorted() { - if model.Contains(commLink.Tags, tag) { + if contains(commLink.Tags, tag) { if len(description) > 0 { description += ", " } @@ -2204,24 +2241,24 @@ func createTagListing() { } } } - for _, dataAsset := range model.SortedDataAssetsByTitle() { - if model.Contains(dataAsset.Tags, tag) { + for _, dataAsset := range sortedDataAssetsByTitle(parsedModel) { + if contains(dataAsset.Tags, tag) { if len(description) > 0 { description += ", " } description += dataAsset.Title } } - for _, trustBoundary := range model.SortedTrustBoundariesByTitle() { - if model.Contains(trustBoundary.Tags, tag) { + for _, trustBoundary := range sortedTrustBoundariesByTitle(parsedModel) { + if contains(trustBoundary.Tags, tag) { if len(description) > 0 { description += ", " } description += trustBoundary.Title } } - for _, sharedRuntime := range model.SortedSharedRuntimesByTitle() { - if model.Contains(sharedRuntime.Tags, tag) { + for _, sharedRuntime := range sortedSharedRuntimesByTitle(parsedModel) { + if contains(sharedRuntime.Tags, tag) { if len(description) > 0 { description += ", " } @@ -2242,7 +2279,25 @@ func createTagListing() { } } -func createRiskCategories() { +func sortedSharedRuntimesByTitle(parsedModel *model.ParsedModel) []model.SharedRuntime { + result := make([]model.SharedRuntime, 0) + for _, runtime := range parsedModel.SharedRuntimes { + result = append(result, runtime) + } + sort.Sort(model.BySharedRuntimeTitleSort(result)) + return result +} + +func sortedTechnicalAssetsByTitle(parsedModel *model.ParsedModel) []model.TechnicalAsset { + assets := make([]model.TechnicalAsset, 0) + for _, asset := range parsedModel.TechnicalAssets { + assets = append(assets, asset) + } + sort.Sort(model.ByTechnicalAssetTitleSort(assets)) + return assets +} + +func createRiskCategories(parsedModel *model.ParsedModel) { uni := pdf.UnicodeTranslatorFromDescriptor("") // category title title := "Identified Risks by Vulnerability Category" @@ -2251,42 +2306,42 @@ func createRiskCategories() { defineLinkTarget("{intro-risks-by-vulnerability-category}") html := pdf.HTMLBasicNew() var text strings.Builder - text.WriteString("In total " + strconv.Itoa(model.TotalRiskCount()) + " potential risks have been identified during the threat modeling process " + + text.WriteString("In total " + strconv.Itoa(model.TotalRiskCount(parsedModel)) + " potential risks have been identified during the threat modeling process " + "of which " + - "" + strconv.Itoa(len(model.FilteredByOnlyCriticalRisks())) + " are rated as critical, " + - "" + strconv.Itoa(len(model.FilteredByOnlyHighRisks())) + " as high, " + - "" + strconv.Itoa(len(model.FilteredByOnlyElevatedRisks())) + " as elevated, " + - "" + strconv.Itoa(len(model.FilteredByOnlyMediumRisks())) + " as medium, " + - "and " + strconv.Itoa(len(model.FilteredByOnlyLowRisks())) + " as low. " + - "

These risks are distributed across " + strconv.Itoa(len(model.GeneratedRisksByCategory)) + " vulnerability categories. ") + "" + strconv.Itoa(len(model.FilteredByOnlyCriticalRisks(parsedModel))) + " are rated as critical, " + + "" + strconv.Itoa(len(model.FilteredByOnlyHighRisks(parsedModel))) + " as high, " + + "" + strconv.Itoa(len(model.FilteredByOnlyElevatedRisks(parsedModel))) + " as elevated, " + + "" + strconv.Itoa(len(model.FilteredByOnlyMediumRisks(parsedModel))) + " as medium, " + + "and " + strconv.Itoa(len(model.FilteredByOnlyLowRisks(parsedModel))) + " as low. " + + "

These risks are distributed across " + strconv.Itoa(len(parsedModel.GeneratedRisksByCategory)) + " vulnerability categories. ") text.WriteString("The following sub-chapters of this section describe each identified risk category.") // TODO more explanation text html.Write(5, text.String()) text.Reset() currentChapterTitleBreadcrumb = title - for _, category := range model.SortedRiskCategories() { - risksStr := model.SortedRisksOfCategory(category) + for _, category := range model.SortedRiskCategories(parsedModel) { + risksStr := model.SortedRisksOfCategory(parsedModel, category) // category color - switch model.HighestSeverityStillAtRisk(risksStr) { - case model.CriticalSeverity: + switch model.HighestSeverityStillAtRisk(parsedModel, risksStr) { + case types.CriticalSeverity: colors.ColorCriticalRisk(pdf) - case model.HighSeverity: + case types.HighSeverity: colors.ColorHighRisk(pdf) - case model.ElevatedSeverity: + case types.ElevatedSeverity: colors.ColorElevatedRisk(pdf) - case model.MediumSeverity: + case types.MediumSeverity: colors.ColorMediumRisk(pdf) - case model.LowSeverity: + case types.LowSeverity: colors.ColorLowRisk(pdf) default: pdfColorBlack() } - if len(model.ReduceToOnlyStillAtRisk(risksStr)) == 0 { + if len(model.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) == 0 { pdfColorBlack() } // category title - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(risksStr)) + countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(risksStr)) + " Risk" if len(risksStr) != 1 { suffix += "s" @@ -2379,7 +2434,7 @@ func createRiskCategories() { pdf.SetY(36) } switch risk.Severity { - case model.CriticalSeverity: + case types.CriticalSeverity: colors.ColorCriticalRisk(pdf) if !headlineCriticalWritten { pdf.SetFont("Helvetica", "", fontSizeBody) @@ -2389,7 +2444,7 @@ func createRiskCategories() { text.Reset() headlineCriticalWritten = true } - case model.HighSeverity: + case types.HighSeverity: colors.ColorHighRisk(pdf) if !headlineHighWritten { pdf.SetFont("Helvetica", "", fontSizeBody) @@ -2399,7 +2454,7 @@ func createRiskCategories() { text.Reset() headlineHighWritten = true } - case model.ElevatedSeverity: + case types.ElevatedSeverity: colors.ColorElevatedRisk(pdf) if !headlineElevatedWritten { pdf.SetFont("Helvetica", "", fontSizeBody) @@ -2409,7 +2464,7 @@ func createRiskCategories() { text.Reset() headlineElevatedWritten = true } - case model.MediumSeverity: + case types.MediumSeverity: colors.ColorMediumRisk(pdf) if !headlineMediumWritten { pdf.SetFont("Helvetica", "", fontSizeBody) @@ -2419,7 +2474,7 @@ func createRiskCategories() { text.Reset() headlineMediumWritten = true } - case model.LowSeverity: + case types.LowSeverity: colors.ColorLowRisk(pdf) if !headlineLowWritten { pdf.SetFont("Helvetica", "", fontSizeBody) @@ -2432,7 +2487,7 @@ func createRiskCategories() { default: pdfColorBlack() } - if !risk.GetRiskTrackingStatusDefaultingUnchecked().IsStillAtRisk() { + if !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { pdfColorBlack() } posY := pdf.GetY() @@ -2453,7 +2508,7 @@ func createRiskCategories() { } else if len(risk.MostRelevantTechnicalAssetId) > 0 { pdf.Link(20, posY, 180, pdf.GetY()-posY, tocLinkIdByAssetId[risk.MostRelevantTechnicalAssetId]) } - writeRiskTrackingStatus(risk) + writeRiskTrackingStatus(parsedModel, risk) pdf.SetLeftMargin(oldLeft) html.Write(5, text.String()) text.Reset() @@ -2462,33 +2517,33 @@ func createRiskCategories() { } } -func writeRiskTrackingStatus(risk model.Risk) { +func writeRiskTrackingStatus(parsedModel *model.ParsedModel, risk model.Risk) { uni := pdf.UnicodeTranslatorFromDescriptor("") - tracking := risk.GetRiskTracking() + tracking := risk.GetRiskTracking(parsedModel) pdfColorBlack() pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") switch tracking.Status { - case model.Unchecked: + case types.Unchecked: colors.ColorRiskStatusUnchecked(pdf) - case model.InDiscussion: + case types.InDiscussion: colors.ColorRiskStatusInDiscussion(pdf) - case model.Accepted: + case types.Accepted: colors.ColorRiskStatusAccepted(pdf) - case model.InProgress: + case types.InProgress: colors.ColorRiskStatusInProgress(pdf) - case model.Mitigated: + case types.Mitigated: colors.ColorRiskStatusMitigated(pdf) - case model.FalsePositive: + case types.FalsePositive: colors.ColorRiskStatusFalsePositive(pdf) default: pdfColorBlack() } pdf.SetFont("Helvetica", "", fontSizeSmall) - if tracking.Status == model.Unchecked { + if tracking.Status == types.Unchecked { pdf.SetFont("Helvetica", "B", fontSizeSmall) } pdf.CellFormat(25, 4, tracking.Status.Title(), "0", 0, "B", false, 0, "") - if tracking.Status != model.Unchecked { + if tracking.Status != types.Unchecked { dateStr := tracking.Date.Format("2006-01-02") if dateStr == "0001-01-01" { dateStr = "" @@ -2509,7 +2564,7 @@ func writeRiskTrackingStatus(risk model.Risk) { pdfColorBlack() } -func createTechnicalAssets() { +func createTechnicalAssets(parsedModel *model.ParsedModel) { uni := pdf.UnicodeTranslatorFromDescriptor("") // category title title := "Identified Risks by Technical Asset" @@ -2518,22 +2573,22 @@ func createTechnicalAssets() { defineLinkTarget("{intro-risks-by-technical-asset}") html := pdf.HTMLBasicNew() var text strings.Builder - text.WriteString("In total " + strconv.Itoa(model.TotalRiskCount()) + " potential risks have been identified during the threat modeling process " + + text.WriteString("In total " + strconv.Itoa(model.TotalRiskCount(parsedModel)) + " potential risks have been identified during the threat modeling process " + "of which " + - "" + strconv.Itoa(len(model.FilteredByOnlyCriticalRisks())) + " are rated as critical, " + - "" + strconv.Itoa(len(model.FilteredByOnlyHighRisks())) + " as high, " + - "" + strconv.Itoa(len(model.FilteredByOnlyElevatedRisks())) + " as elevated, " + - "" + strconv.Itoa(len(model.FilteredByOnlyMediumRisks())) + " as medium, " + - "and " + strconv.Itoa(len(model.FilteredByOnlyLowRisks())) + " as low. " + - "

These risks are distributed across " + strconv.Itoa(len(model.InScopeTechnicalAssets())) + " in-scope technical assets. ") + "" + strconv.Itoa(len(model.FilteredByOnlyCriticalRisks(parsedModel))) + " are rated as critical, " + + "" + strconv.Itoa(len(model.FilteredByOnlyHighRisks(parsedModel))) + " as high, " + + "" + strconv.Itoa(len(model.FilteredByOnlyElevatedRisks(parsedModel))) + " as elevated, " + + "" + strconv.Itoa(len(model.FilteredByOnlyMediumRisks(parsedModel))) + " as medium, " + + "and " + strconv.Itoa(len(model.FilteredByOnlyLowRisks(parsedModel))) + " as low. " + + "

These risks are distributed across " + strconv.Itoa(len(parsedModel.InScopeTechnicalAssets())) + " in-scope technical assets. ") text.WriteString("The following sub-chapters of this section describe each identified risk grouped by technical asset. ") // TODO more explanation text text.WriteString("The RAA value of a technical asset is the calculated \"Relative Attacker Attractiveness\" value in percent.") html.Write(5, text.String()) text.Reset() currentChapterTitleBreadcrumb = title - for _, technicalAsset := range model.SortedTechnicalAssetsByRiskSeverityAndTitle() { - risksStr := technicalAsset.GeneratedRisks() - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(risksStr)) + for _, technicalAsset := range sortedTechnicalAssetsByRiskSeverityAndTitle(parsedModel) { + risksStr := technicalAsset.GeneratedRisks(parsedModel) + countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(risksStr)) + " Risk" if len(risksStr) != 1 { suffix += "s" @@ -2542,21 +2597,21 @@ func createTechnicalAssets() { pdfColorOutOfScope() suffix = "out-of-scope" } else { - switch model.HighestSeverityStillAtRisk(risksStr) { - case model.CriticalSeverity: + switch model.HighestSeverityStillAtRisk(parsedModel, risksStr) { + case types.CriticalSeverity: colors.ColorCriticalRisk(pdf) - case model.HighSeverity: + case types.HighSeverity: colors.ColorHighRisk(pdf) - case model.ElevatedSeverity: + case types.ElevatedSeverity: colors.ColorElevatedRisk(pdf) - case model.MediumSeverity: + case types.MediumSeverity: colors.ColorMediumRisk(pdf) - case model.LowSeverity: + case types.LowSeverity: colors.ColorLowRisk(pdf) default: pdfColorBlack() } - if len(model.ReduceToOnlyStillAtRisk(risksStr)) == 0 { + if len(model.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) == 0 { pdfColorBlack() } } @@ -2613,7 +2668,7 @@ func createTechnicalAssets() { pdf.SetY(36) } switch risk.Severity { - case model.CriticalSeverity: + case types.CriticalSeverity: colors.ColorCriticalRisk(pdf) if !headlineCriticalWritten { pdf.SetFont("Helvetica", "", fontSizeBody) @@ -2621,7 +2676,7 @@ func createTechnicalAssets() { html.Write(5, "
Critical Risk Severity

") headlineCriticalWritten = true } - case model.HighSeverity: + case types.HighSeverity: colors.ColorHighRisk(pdf) if !headlineHighWritten { pdf.SetFont("Helvetica", "", fontSizeBody) @@ -2629,7 +2684,7 @@ func createTechnicalAssets() { html.Write(5, "
High Risk Severity

") headlineHighWritten = true } - case model.ElevatedSeverity: + case types.ElevatedSeverity: colors.ColorElevatedRisk(pdf) if !headlineElevatedWritten { pdf.SetFont("Helvetica", "", fontSizeBody) @@ -2637,7 +2692,7 @@ func createTechnicalAssets() { html.Write(5, "
Elevated Risk Severity

") headlineElevatedWritten = true } - case model.MediumSeverity: + case types.MediumSeverity: colors.ColorMediumRisk(pdf) if !headlineMediumWritten { pdf.SetFont("Helvetica", "", fontSizeBody) @@ -2645,7 +2700,7 @@ func createTechnicalAssets() { html.Write(5, "
Medium Risk Severity

") headlineMediumWritten = true } - case model.LowSeverity: + case types.LowSeverity: colors.ColorLowRisk(pdf) if !headlineLowWritten { pdf.SetFont("Helvetica", "", fontSizeBody) @@ -2656,7 +2711,7 @@ func createTechnicalAssets() { default: pdfColorBlack() } - if !risk.GetRiskTrackingStatusDefaultingUnchecked().IsStillAtRisk() { + if !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { pdfColorBlack() } posY := pdf.GetY() @@ -2672,7 +2727,7 @@ func createTechnicalAssets() { pdf.MultiCell(215, 5, uni(risk.SyntheticId), "0", "0", false) pdf.Link(20, posY, 180, pdf.GetY()-posY, tocLinkIdByAssetId[risk.Category.Id]) pdf.SetFont("Helvetica", "", fontSizeBody) - writeRiskTrackingStatus(risk) + writeRiskTrackingStatus(parsedModel, risk) pdf.SetLeftMargin(oldLeft) } } else { @@ -2848,7 +2903,7 @@ func createTechnicalAssets() { pdf.CellFormat(40, 6, "Data Processed:", "0", 0, "", false, 0, "") pdfColorBlack() dataAssetsProcessedText := "" - for _, dataAsset := range technicalAsset.DataAssetsProcessedSorted() { + for _, dataAsset := range technicalAsset.DataAssetsProcessedSorted(parsedModel) { if len(dataAssetsProcessedText) > 0 { dataAssetsProcessedText += ", " } @@ -2865,7 +2920,7 @@ func createTechnicalAssets() { pdf.CellFormat(40, 6, "Data Stored:", "0", 0, "", false, 0, "") pdfColorBlack() dataAssetsStoredText := "" - for _, dataAsset := range technicalAsset.DataAssetsStoredSorted() { + for _, dataAsset := range technicalAsset.DataAssetsStoredSorted(parsedModel) { if len(dataAssetsStoredText) > 0 { dataAssetsStoredText += ", " } @@ -3015,7 +3070,7 @@ func createTechnicalAssets() { pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") pdf.CellFormat(35, 6, "Target:", "0", 0, "", false, 0, "") pdfColorBlack() - pdf.MultiCell(125, 6, uni(model.ParsedModelRoot.TechnicalAssets[outgoingCommLink.TargetId].Title), "0", "0", false) + pdf.MultiCell(125, 6, uni(parsedModel.TechnicalAssets[outgoingCommLink.TargetId].Title), "0", "0", false) pdf.Link(60, pdf.GetY()-5, 70, 5, tocLinkIdByAssetId[outgoingCommLink.TargetId]) if pdf.GetY() > 270 { pageBreak() @@ -3116,7 +3171,7 @@ func createTechnicalAssets() { pdf.CellFormat(35, 6, "Data Sent:", "0", 0, "", false, 0, "") pdfColorBlack() dataAssetsSentText := "" - for _, dataAsset := range outgoingCommLink.DataAssetsSentSorted() { + for _, dataAsset := range outgoingCommLink.DataAssetsSentSorted(parsedModel) { if len(dataAssetsSentText) > 0 { dataAssetsSentText += ", " } @@ -3132,7 +3187,7 @@ func createTechnicalAssets() { pdf.CellFormat(35, 6, "Data Received:", "0", 0, "", false, 0, "") pdfColorBlack() dataAssetsReceivedText := "" - for _, dataAsset := range outgoingCommLink.DataAssetsReceivedSorted() { + for _, dataAsset := range outgoingCommLink.DataAssetsReceivedSorted(parsedModel) { if len(dataAssetsReceivedText) > 0 { dataAssetsReceivedText += ", " } @@ -3147,7 +3202,7 @@ func createTechnicalAssets() { } } - incomingCommLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] + incomingCommLinks := parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] if len(incomingCommLinks) > 0 { pdf.Ln(-1) if pdf.GetY() > 260 { // 260 only for major titles (to avoid "Schusterjungen"), for the rest attributes 270 @@ -3185,7 +3240,7 @@ func createTechnicalAssets() { pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") pdf.CellFormat(35, 6, "Source:", "0", 0, "", false, 0, "") pdfColorBlack() - pdf.MultiCell(140, 6, uni(model.ParsedModelRoot.TechnicalAssets[incomingCommLink.SourceId].Title), "0", "0", false) + pdf.MultiCell(140, 6, uni(parsedModel.TechnicalAssets[incomingCommLink.SourceId].Title), "0", "0", false) pdf.Link(60, pdf.GetY()-5, 70, 5, tocLinkIdByAssetId[incomingCommLink.SourceId]) if pdf.GetY() > 270 { pageBreak() @@ -3287,7 +3342,7 @@ func createTechnicalAssets() { pdfColorBlack() dataAssetsSentText := "" // yep, here we reverse the sent/received direction, as it's the incoming stuff - for _, dataAsset := range incomingCommLink.DataAssetsSentSorted() { + for _, dataAsset := range incomingCommLink.DataAssetsSentSorted(parsedModel) { if len(dataAssetsSentText) > 0 { dataAssetsSentText += ", " } @@ -3304,7 +3359,7 @@ func createTechnicalAssets() { pdfColorBlack() dataAssetsReceivedText := "" // yep, here we reverse the sent/received direction, as it's the incoming stuff - for _, dataAsset := range incomingCommLink.DataAssetsReceivedSorted() { + for _, dataAsset := range incomingCommLink.DataAssetsReceivedSorted(parsedModel) { if len(dataAssetsReceivedText) > 0 { dataAssetsReceivedText += ", " } @@ -3321,28 +3376,28 @@ func createTechnicalAssets() { } } -func createDataAssets() { +func createDataAssets(parsedModel *model.ParsedModel) { uni := pdf.UnicodeTranslatorFromDescriptor("") title := "Identified Data Breach Probabilities by Data Asset" pdfColorBlack() addHeadline(title, false) defineLinkTarget("{intro-risks-by-data-asset}") html := pdf.HTMLBasicNew() - html.Write(5, "In total "+strconv.Itoa(model.TotalRiskCount())+" potential risks have been identified during the threat modeling process "+ + html.Write(5, "In total "+strconv.Itoa(model.TotalRiskCount(parsedModel))+" potential risks have been identified during the threat modeling process "+ "of which "+ - ""+strconv.Itoa(len(model.FilteredByOnlyCriticalRisks()))+" are rated as critical, "+ - ""+strconv.Itoa(len(model.FilteredByOnlyHighRisks()))+" as high, "+ - ""+strconv.Itoa(len(model.FilteredByOnlyElevatedRisks()))+" as elevated, "+ - ""+strconv.Itoa(len(model.FilteredByOnlyMediumRisks()))+" as medium, "+ - "and "+strconv.Itoa(len(model.FilteredByOnlyLowRisks()))+" as low. "+ - "

These risks are distributed across "+strconv.Itoa(len(model.ParsedModelRoot.DataAssets))+" data assets. ") + ""+strconv.Itoa(len(model.FilteredByOnlyCriticalRisks(parsedModel)))+" are rated as critical, "+ + ""+strconv.Itoa(len(model.FilteredByOnlyHighRisks(parsedModel)))+" as high, "+ + ""+strconv.Itoa(len(model.FilteredByOnlyElevatedRisks(parsedModel)))+" as elevated, "+ + ""+strconv.Itoa(len(model.FilteredByOnlyMediumRisks(parsedModel)))+" as medium, "+ + "and "+strconv.Itoa(len(model.FilteredByOnlyLowRisks(parsedModel)))+" as low. "+ + "

These risks are distributed across "+strconv.Itoa(len(parsedModel.DataAssets))+" data assets. ") html.Write(5, "The following sub-chapters of this section describe the derived data breach probabilities grouped by data asset.
") // TODO more explanation text pdf.SetFont("Helvetica", "", fontSizeSmall) pdfColorGray() html.Write(5, "Technical asset names and risk IDs are clickable and link to the corresponding chapter.") pdf.SetFont("Helvetica", "", fontSizeBody) currentChapterTitleBreadcrumb = title - for _, dataAsset := range model.SortedDataAssetsByDataBreachProbabilityAndTitle() { + for _, dataAsset := range sortedDataAssetsByDataBreachProbabilityAndTitle(parsedModel) { if pdf.GetY() > 280 { // 280 as only small font previously (not 250) pageBreak() pdf.SetY(36) @@ -3350,21 +3405,21 @@ func createDataAssets() { html.Write(5, "


") } pdfColorBlack() - switch dataAsset.IdentifiedDataBreachProbabilityStillAtRisk() { - case model.Probable: + switch dataAsset.IdentifiedDataBreachProbabilityStillAtRisk(parsedModel) { + case types.Probable: colors.ColorHighRisk(pdf) - case model.Possible: + case types.Possible: colors.ColorMediumRisk(pdf) - case model.Improbable: + case types.Improbable: colors.ColorLowRisk(pdf) default: pdfColorBlack() } - if !dataAsset.IsDataBreachPotentialStillAtRisk() { + if !dataAsset.IsDataBreachPotentialStillAtRisk(parsedModel) { pdfColorBlack() } - risksStr := dataAsset.IdentifiedDataBreachProbabilityRisks() - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(risksStr)) + risksStr := dataAsset.IdentifiedDataBreachProbabilityRisks(parsedModel) + countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(risksStr)) + " Risk" if len(risksStr) != 1 { suffix += "s" @@ -3527,7 +3582,7 @@ func createDataAssets() { pdf.CellFormat(40, 6, "Processed by:", "0", 0, "", false, 0, "") pdfColorBlack() processedByText := "" - for _, dataAsset := range dataAsset.ProcessedByTechnicalAssetsSorted() { + for _, dataAsset := range dataAsset.ProcessedByTechnicalAssetsSorted(parsedModel) { if len(processedByText) > 0 { processedByText += ", " } @@ -3548,7 +3603,7 @@ func createDataAssets() { pdf.CellFormat(40, 6, "Stored by:", "0", 0, "", false, 0, "") pdfColorBlack() storedByText := "" - for _, dataAsset := range dataAsset.StoredByTechnicalAssetsSorted() { + for _, dataAsset := range dataAsset.StoredByTechnicalAssetsSorted(parsedModel) { if len(storedByText) > 0 { storedByText += ", " } @@ -3569,7 +3624,7 @@ func createDataAssets() { pdf.CellFormat(40, 6, "Sent via:", "0", 0, "", false, 0, "") pdfColorBlack() sentViaText := "" - for _, commLink := range dataAsset.SentViaCommLinksSorted() { + for _, commLink := range dataAsset.SentViaCommLinksSorted(parsedModel) { if len(sentViaText) > 0 { sentViaText += ", " } @@ -3590,7 +3645,7 @@ func createDataAssets() { pdf.CellFormat(40, 6, "Received via:", "0", 0, "", false, 0, "") pdfColorBlack() receivedViaText := "" - for _, commLink := range dataAsset.ReceivedViaCommLinksSorted() { + for _, commLink := range dataAsset.ReceivedViaCommLinksSorted(parsedModel) { if len(receivedViaText) > 0 { receivedViaText += ", " } @@ -3607,7 +3662,7 @@ func createDataAssets() { risksByTechAssetId := dataAsset.IdentifiedRisksByResponsibleTechnicalAssetId() techAssetsResponsible := make([]model.TechnicalAsset, 0) for techAssetId, _ := range risksByTechAssetId { - techAssetsResponsible = append(techAssetsResponsible, model.ParsedModelRoot.TechnicalAssets[techAssetId]) + techAssetsResponsible = append(techAssetsResponsible, parsedModel.TechnicalAssets[techAssetId]) } sort.Sort(model.ByTechnicalAssetRiskSeverityAndTitleSortStillAtRisk(techAssetsResponsible)) assetStr := "assets" @@ -3668,19 +3723,19 @@ func createDataAssets() { pdf.CellFormat(40, 6, "Data Breach:", "0", 0, "", false, 0, "") pdfColorBlack() pdf.SetFont("Helvetica", "B", fontSizeBody) - dataBreachProbability := dataAsset.IdentifiedDataBreachProbabilityStillAtRisk() + dataBreachProbability := dataAsset.IdentifiedDataBreachProbabilityStillAtRisk(parsedModel) riskText := dataBreachProbability.String() switch dataBreachProbability { - case model.Probable: + case types.Probable: colors.ColorHighRisk(pdf) - case model.Possible: + case types.Possible: colors.ColorMediumRisk(pdf) - case model.Improbable: + case types.Improbable: colors.ColorLowRisk(pdf) default: pdfColorBlack() } - if !dataAsset.IsDataBreachPotentialStillAtRisk() { + if !dataAsset.IsDataBreachPotentialStillAtRisk(parsedModel) { pdfColorBlack() riskText = "none" } @@ -3692,8 +3747,8 @@ func createDataAssets() { } // how can is this data asset be indirectly lost (i.e. why) - dataBreachRisksStillAtRisk := dataAsset.IdentifiedDataBreachProbabilityRisksStillAtRisk() - sort.Sort(model.ByDataBreachProbabilitySort(dataBreachRisksStillAtRisk)) + dataBreachRisksStillAtRisk := dataAsset.IdentifiedDataBreachProbabilityRisksStillAtRisk(parsedModel) + model.SortByDataBreachProbability(dataBreachRisksStillAtRisk, parsedModel) if pdf.GetY() > 265 { pageBreak() pdf.SetY(36) @@ -3718,16 +3773,16 @@ func createDataAssets() { pdf.SetY(36) } switch dataBreachRisk.DataBreachProbability { - case model.Probable: + case types.Probable: colors.ColorHighRisk(pdf) - case model.Possible: + case types.Possible: colors.ColorMediumRisk(pdf) - case model.Improbable: + case types.Improbable: colors.ColorLowRisk(pdf) default: pdfColorBlack() } - if !dataBreachRisk.GetRiskTrackingStatusDefaultingUnchecked().IsStillAtRisk() { + if !dataBreachRisk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { pdfColorBlack() } pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") @@ -3742,7 +3797,7 @@ func createDataAssets() { } } -func createTrustBoundaries() { +func createTrustBoundaries(parsedModel *model.ParsedModel) { uni := pdf.UnicodeTranslatorFromDescriptor("") title := "Trust Boundaries" pdfColorBlack() @@ -3750,13 +3805,13 @@ func createTrustBoundaries() { html := pdf.HTMLBasicNew() word := "has" - if len(model.ParsedModelRoot.TrustBoundaries) > 1 { + if len(parsedModel.TrustBoundaries) > 1 { word = "have" } - html.Write(5, "In total "+strconv.Itoa(len(model.ParsedModelRoot.TrustBoundaries))+" trust boundaries "+word+" been "+ + html.Write(5, "In total "+strconv.Itoa(len(parsedModel.TrustBoundaries))+" trust boundaries "+word+" been "+ "modeled during the threat modeling process.") currentChapterTitleBreadcrumb = title - for _, trustBoundary := range model.SortedTrustBoundariesByTitle() { + for _, trustBoundary := range sortedTrustBoundariesByTitle(parsedModel) { if pdf.GetY() > 250 { pageBreak() pdf.SetY(36) @@ -3830,7 +3885,7 @@ func createTrustBoundaries() { if len(assetsInsideText) > 0 { assetsInsideText += ", " } - assetsInsideText += model.ParsedModelRoot.TechnicalAssets[assetKey].Title // TODO add link to technical asset detail chapter and back + assetsInsideText += parsedModel.TechnicalAssets[assetKey].Title // TODO add link to technical asset detail chapter and back } if len(assetsInsideText) == 0 { pdfColorGray() @@ -3851,7 +3906,7 @@ func createTrustBoundaries() { if len(boundariesNestedText) > 0 { boundariesNestedText += ", " } - boundariesNestedText += model.ParsedModelRoot.TrustBoundaries[assetKey].Title + boundariesNestedText += parsedModel.TrustBoundaries[assetKey].Title } if len(boundariesNestedText) == 0 { pdfColorGray() @@ -3861,7 +3916,17 @@ func createTrustBoundaries() { } } -func createSharedRuntimes() { +func questionsUnanswered(parsedModel *model.ParsedModel) int { + result := 0 + for _, answer := range parsedModel.Questions { + if len(strings.TrimSpace(answer)) == 0 { + result++ + } + } + return result +} + +func createSharedRuntimes(parsedModel *model.ParsedModel) { uni := pdf.UnicodeTranslatorFromDescriptor("") title := "Shared Runtimes" pdfColorBlack() @@ -3869,13 +3934,13 @@ func createSharedRuntimes() { html := pdf.HTMLBasicNew() word, runtime := "has", "runtime" - if len(model.ParsedModelRoot.SharedRuntimes) > 1 { + if len(parsedModel.SharedRuntimes) > 1 { word, runtime = "have", "runtimes" } - html.Write(5, "In total "+strconv.Itoa(len(model.ParsedModelRoot.SharedRuntimes))+" shared "+runtime+" "+word+" been "+ + html.Write(5, "In total "+strconv.Itoa(len(parsedModel.SharedRuntimes))+" shared "+runtime+" "+word+" been "+ "modeled during the threat modeling process.") currentChapterTitleBreadcrumb = title - for _, sharedRuntime := range model.SortedSharedRuntimesByTitle() { + for _, sharedRuntime := range sortedSharedRuntimesByTitle(parsedModel) { pdfColorBlack() if pdf.GetY() > 250 { pageBreak() @@ -3932,7 +3997,7 @@ func createSharedRuntimes() { if len(assetsInsideText) > 0 { assetsInsideText += ", " } - assetsInsideText += model.ParsedModelRoot.TechnicalAssets[assetKey].Title // TODO add link to technical asset detail chapter and back + assetsInsideText += parsedModel.TechnicalAssets[assetKey].Title // TODO add link to technical asset detail chapter and back } if len(assetsInsideText) == 0 { pdfColorGray() @@ -3942,7 +4007,7 @@ func createSharedRuntimes() { } } -func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTimestamp string, modelHash string, customRiskRules map[string]*risks.CustomRisk) { +func createRiskRulesChecked(parsedModel *model.ParsedModel, modelFilename string, skipRiskRules string, buildTimestamp string, modelHash string, customRiskRules map[string]*model.CustomRisk) { pdf.SetTextColor(0, 0, 0) title := "Risk Rules Checked by Threagile" addHeadline(title, false) @@ -3954,7 +4019,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdfColorGray() pdf.SetFont("Helvetica", "", fontSizeSmall) timestamp := time.Now() - strBuilder.WriteString("Threagile Version: " + model.ThreagileVersion) + strBuilder.WriteString("Threagile Version: " + docs.ThreagileVersion) strBuilder.WriteString("
Threagile Build Timestamp: " + buildTimestamp) strBuilder.WriteString("
Threagile Execution Timestamp: " + timestamp.Format("20060102150405")) strBuilder.WriteString("
Model Filename: " + modelFilename) @@ -3978,7 +4043,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim for id, customRule := range customRiskRules { pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, id) { + if contains(skippedRules, id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4014,8 +4079,8 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.MultiCell(160, 6, customRule.Category.RiskAssessment, "0", "0", false) } - for _, key := range model.SortedKeysOfIndividualRiskCategories() { - individualRiskCategory := model.ParsedModelRoot.IndividualRiskCategories[key] + for _, key := range sortedKeysOfIndividualRiskCategories(parsedModel) { + individualRiskCategory := parsedModel.IndividualRiskCategories[key] pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) pdf.CellFormat(190, 3, individualRiskCategory.Title, "0", 0, "", false, 0, "") @@ -4051,7 +4116,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, accidental_secret_leak.Category().Id) { + if contains(skippedRules, accidental_secret_leak.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4085,7 +4150,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, code_backdooring.Category().Id) { + if contains(skippedRules, code_backdooring.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4119,7 +4184,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, container_baseimage_backdooring.Category().Id) { + if contains(skippedRules, container_baseimage_backdooring.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4153,7 +4218,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, container_platform_escape.Category().Id) { + if contains(skippedRules, container_platform_escape.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4187,7 +4252,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, cross_site_request_forgery.Category().Id) { + if contains(skippedRules, cross_site_request_forgery.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4221,7 +4286,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, cross_site_scripting.Category().Id) { + if contains(skippedRules, cross_site_scripting.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4255,7 +4320,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, dos_risky_access_across_trust_boundary.Category().Id) { + if contains(skippedRules, dos_risky_access_across_trust_boundary.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4289,7 +4354,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, incomplete_model.Category().Id) { + if contains(skippedRules, incomplete_model.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4323,7 +4388,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, ldap_injection.Category().Id) { + if contains(skippedRules, ldap_injection.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4357,7 +4422,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, missing_authentication.Category().Id) { + if contains(skippedRules, missing_authentication.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4391,7 +4456,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, missing_authentication_second_factor.Category().Id) { + if contains(skippedRules, missing_authentication_second_factor.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4425,7 +4490,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, missing_build_infrastructure.Category().Id) { + if contains(skippedRules, missing_build_infrastructure.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4459,7 +4524,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, missing_cloud_hardening.Category().Id) { + if contains(skippedRules, missing_cloud_hardening.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4493,7 +4558,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, missing_file_validation.Category().Id) { + if contains(skippedRules, missing_file_validation.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4527,7 +4592,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, missing_hardening.Category().Id) { + if contains(skippedRules, missing_hardening.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4561,7 +4626,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, missing_identity_propagation.Category().Id) { + if contains(skippedRules, missing_identity_propagation.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4595,7 +4660,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, missing_identity_provider_isolation.Category().Id) { + if contains(skippedRules, missing_identity_provider_isolation.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4629,7 +4694,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, missing_identity_store.Category().Id) { + if contains(skippedRules, missing_identity_store.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4663,7 +4728,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, missing_network_segmentation.Category().Id) { + if contains(skippedRules, missing_network_segmentation.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4697,7 +4762,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, missing_vault.Category().Id) { + if contains(skippedRules, missing_vault.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4731,7 +4796,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, missing_vault_isolation.Category().Id) { + if contains(skippedRules, missing_vault_isolation.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4765,7 +4830,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, missing_waf.Category().Id) { + if contains(skippedRules, missing_waf.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4799,7 +4864,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, mixed_targets_on_shared_runtime.Category().Id) { + if contains(skippedRules, mixed_targets_on_shared_runtime.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4833,7 +4898,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, path_traversal.Category().Id) { + if contains(skippedRules, path_traversal.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4867,7 +4932,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, push_instead_of_pull_deployment.Category().Id) { + if contains(skippedRules, push_instead_of_pull_deployment.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4901,7 +4966,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, search_query_injection.Category().Id) { + if contains(skippedRules, search_query_injection.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4935,7 +5000,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, server_side_request_forgery.Category().Id) { + if contains(skippedRules, server_side_request_forgery.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -4969,7 +5034,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, service_registry_poisoning.Category().Id) { + if contains(skippedRules, service_registry_poisoning.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -5003,7 +5068,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, sql_nosql_injection.Category().Id) { + if contains(skippedRules, sql_nosql_injection.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -5037,7 +5102,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, unchecked_deployment.Category().Id) { + if contains(skippedRules, unchecked_deployment.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -5071,7 +5136,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, unencrypted_asset.Category().Id) { + if contains(skippedRules, unencrypted_asset.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -5105,7 +5170,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, unencrypted_communication.Category().Id) { + if contains(skippedRules, unencrypted_communication.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -5139,7 +5204,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, unguarded_access_from_internet.Category().Id) { + if contains(skippedRules, unguarded_access_from_internet.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -5173,7 +5238,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, unguarded_direct_datastore_access.Category().Id) { + if contains(skippedRules, unguarded_direct_datastore_access.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -5207,7 +5272,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, unnecessary_communication_link.Category().Id) { + if contains(skippedRules, unnecessary_communication_link.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -5241,7 +5306,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, unnecessary_data_asset.Category().Id) { + if contains(skippedRules, unnecessary_data_asset.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -5275,7 +5340,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, unnecessary_data_transfer.Category().Id) { + if contains(skippedRules, unnecessary_data_transfer.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -5309,7 +5374,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, unnecessary_technical_asset.Category().Id) { + if contains(skippedRules, unnecessary_technical_asset.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -5343,7 +5408,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, untrusted_deserialization.Category().Id) { + if contains(skippedRules, untrusted_deserialization.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -5377,7 +5442,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, wrong_communication_link_content.Category().Id) { + if contains(skippedRules, wrong_communication_link_content.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -5411,7 +5476,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, wrong_trust_boundary_content.Category().Id) { + if contains(skippedRules, wrong_trust_boundary_content.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -5445,7 +5510,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.Ln(-1) pdf.SetFont("Helvetica", "B", fontSizeBody) - if model.Contains(skippedRules, xml_external_entity.Category().Id) { + if contains(skippedRules, xml_external_entity.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" @@ -5478,7 +5543,7 @@ func createRiskRulesChecked(modelFilename string, skipRiskRules string, buildTim pdf.MultiCell(160, 6, xml_external_entity.Category().RiskAssessment, "0", "0", false) } -func createTargetDescription(baseFolder string) { +func createTargetDescription(parsedModel *model.ParsedModel, baseFolder string) { uni := pdf.UnicodeTranslatorFromDescriptor("") pdf.SetTextColor(0, 0, 0) title := "Application Overview" @@ -5490,70 +5555,70 @@ func createTargetDescription(baseFolder string) { html := pdf.HTMLBasicNew() intro.WriteString("Business Criticality

") - intro.WriteString("The overall business criticality of \"" + uni(model.ParsedModelRoot.Title) + "\" was rated as:

") + intro.WriteString("The overall business criticality of \"" + uni(parsedModel.Title) + "\" was rated as:

") html.Write(5, intro.String()) - criticality := model.ParsedModelRoot.BusinessCriticality + criticality := parsedModel.BusinessCriticality intro.Reset() pdfColorGray() intro.WriteString("( ") - if criticality == model.Archive { + if criticality == types.Archive { html.Write(5, intro.String()) intro.Reset() pdfColorBlack() - intro.WriteString("" + strings.ToUpper(model.Archive.String()) + "") + intro.WriteString("" + strings.ToUpper(types.Archive.String()) + "") html.Write(5, intro.String()) intro.Reset() pdfColorGray() } else { - intro.WriteString(model.Archive.String()) + intro.WriteString(types.Archive.String()) } intro.WriteString(" | ") - if criticality == model.Operational { + if criticality == types.Operational { html.Write(5, intro.String()) intro.Reset() pdfColorBlack() - intro.WriteString("" + strings.ToUpper(model.Operational.String()) + "") + intro.WriteString("" + strings.ToUpper(types.Operational.String()) + "") html.Write(5, intro.String()) intro.Reset() pdfColorGray() } else { - intro.WriteString(model.Operational.String()) + intro.WriteString(types.Operational.String()) } intro.WriteString(" | ") - if criticality == model.Important { + if criticality == types.Important { html.Write(5, intro.String()) intro.Reset() pdfColorBlack() - intro.WriteString("" + strings.ToUpper(model.Important.String()) + "") + intro.WriteString("" + strings.ToUpper(types.Important.String()) + "") html.Write(5, intro.String()) intro.Reset() pdfColorGray() } else { - intro.WriteString(model.Important.String()) + intro.WriteString(types.Important.String()) } intro.WriteString(" | ") - if criticality == model.Critical { + if criticality == types.Critical { html.Write(5, intro.String()) intro.Reset() pdfColorBlack() - intro.WriteString("" + strings.ToUpper(model.Critical.String()) + "") + intro.WriteString("" + strings.ToUpper(types.Critical.String()) + "") html.Write(5, intro.String()) intro.Reset() pdfColorGray() } else { - intro.WriteString(model.Critical.String()) + intro.WriteString(types.Critical.String()) } intro.WriteString(" | ") - if criticality == model.MissionCritical { + if criticality == types.MissionCritical { html.Write(5, intro.String()) intro.Reset() pdfColorBlack() - intro.WriteString("" + strings.ToUpper(model.MissionCritical.String()) + "") + intro.WriteString("" + strings.ToUpper(types.MissionCritical.String()) + "") html.Write(5, intro.String()) intro.Reset() pdfColorGray() } else { - intro.WriteString(model.MissionCritical.String()) + intro.WriteString(types.MissionCritical.String()) } intro.WriteString(" )") html.Write(5, intro.String()) @@ -5561,16 +5626,16 @@ func createTargetDescription(baseFolder string) { pdfColorBlack() intro.WriteString("


Business Overview

") - intro.WriteString(uni(model.ParsedModelRoot.BusinessOverview.Description)) + intro.WriteString(uni(parsedModel.BusinessOverview.Description)) html.Write(5, intro.String()) intro.Reset() - addCustomImages(model.ParsedModelRoot.BusinessOverview.Images, baseFolder, html) + addCustomImages(parsedModel.BusinessOverview.Images, baseFolder, html) intro.WriteString("


Technical Overview

") - intro.WriteString(uni(model.ParsedModelRoot.TechnicalOverview.Description)) + intro.WriteString(uni(parsedModel.TechnicalOverview.Description)) html.Write(5, intro.String()) intro.Reset() - addCustomImages(model.ParsedModelRoot.TechnicalOverview.Images, baseFolder, html) + addCustomImages(parsedModel.TechnicalOverview.Images, baseFolder, html) } func addCustomImages(customImages []map[string]string, baseFolder string, html gofpdf.HTMLBasicType) { @@ -5714,6 +5779,15 @@ func embedDataFlowDiagram(diagramFilenamePNG string, tempFolder string) { } } +func sortedKeysOfIndividualRiskCategories(parsedModel *model.ParsedModel) []string { + keys := make([]string, 0) + for k := range parsedModel.IndividualRiskCategories { + keys = append(keys, k) + } + sort.Strings(keys) + return keys +} + func embedDataRiskMapping(diagramFilenamePNG string, tempFolder string) { pdf.SetTextColor(0, 0, 0) title := "Data Mapping" @@ -5817,6 +5891,7 @@ func pageBreak() { pdf.SetX(17) pdf.SetY(20) } + func pageBreakInLists() { pageBreak() pdf.SetLineWidth(0.25) @@ -5901,3 +5976,12 @@ func pdfColorRed() { func rgbHexColorRed() string { return "#FF0000" } + +func contains(a []string, x string) bool { + for _, n := range a { + if x == n { + return true + } + } + return false +} diff --git a/pkg/risks/risk.go b/pkg/risks/risk.go deleted file mode 100644 index 5b00981b..00000000 --- a/pkg/risks/risk.go +++ /dev/null @@ -1,34 +0,0 @@ -package risks - -import ( - "github.com/threagile/threagile/model" - "github.com/threagile/threagile/pkg/run" - "log" -) - -type BuiltInRisk struct { - Category func() model.RiskCategory - SupportedTags func() []string - GenerateRisks func(m *model.ParsedModel) []model.Risk -} - -type CustomRisk struct { - ID string - Category model.RiskCategory - Tags []string - Runner *run.Runner -} - -func (r *CustomRisk) GenerateRisks(m *model.ParsedModel) []model.Risk { - if r.Runner == nil { - return nil - } - - risks := make([]model.Risk, 0) - runError := r.Runner.Run(m, &risks, "-generate-risks") - if runError != nil { - log.Fatalf("Failed to generate risks for custom risk rule %q: %v\n", r.Runner.Filename, runError) - } - - return risks -} diff --git a/pkg/run/runner.go b/pkg/run/runner.go index 3d624b07..f2a4c152 100644 --- a/pkg/run/runner.go +++ b/pkg/run/runner.go @@ -1,3 +1,4 @@ +// TODO: consider moving to internal package run import ( diff --git a/pkg/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go b/pkg/security/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go similarity index 66% rename from pkg/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go rename to pkg/security/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go index fd1945fe..7df9371a 100644 --- a/pkg/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go +++ b/pkg/security/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go @@ -1,7 +1,8 @@ package accidental_secret_leak import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -28,8 +29,8 @@ func Category() model.RiskCategory { "See for example tools like \"git-secrets\" or \"Talisman\" to have check-in preventive measures for secrets. " + "Consider also to regularly scan your repositories for secrets accidentally checked-in using scanning tools like \"gitleaks\" or \"gitrob\".", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Operations, - STRIDE: model.InformationDisclosure, + Function: types.Operations, + STRIDE: types.InformationDisclosure, DetectionLogic: "In-scope sourcecode repositories and artifact registries.", RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", FalsePositives: "Usually no false positives.", @@ -42,17 +43,17 @@ func SupportedTags() []string { return []string{"git", "nexus"} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { +func GenerateRisks(parsedModel *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { - techAsset := input.TechnicalAssets[id] + for _, id := range parsedModel.SortedTechnicalAssetIDs() { + techAsset := parsedModel.TechnicalAssets[id] if !techAsset.OutOfScope && - (techAsset.Technology == model.SourcecodeRepository || techAsset.Technology == model.ArtifactRegistry) { + (techAsset.Technology == types.SourcecodeRepository || techAsset.Technology == types.ArtifactRegistry) { var risk model.Risk if techAsset.IsTaggedWithAny("git") { - risk = createRisk(techAsset, "Git", "Git Leak Prevention") + risk = createRisk(parsedModel, techAsset, "Git", "Git Leak Prevention") } else { - risk = createRisk(techAsset, "", "") + risk = createRisk(parsedModel, techAsset, "", "") } risks = append(risks, risk) } @@ -60,7 +61,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(technicalAsset model.TechnicalAsset, prefix, details string) model.Risk { +func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAsset, prefix, details string) model.Risk { if len(prefix) > 0 { prefix = " (" + prefix + ")" } @@ -68,26 +69,26 @@ func createRisk(technicalAsset model.TechnicalAsset, prefix, details string) mod if len(details) > 0 { title += ": " + details + "" } - impact := model.LowImpact - if technicalAsset.HighestConfidentiality() >= model.Confidential || - technicalAsset.HighestIntegrity() >= model.Critical || - technicalAsset.HighestAvailability() >= model.Critical { - impact = model.MediumImpact + impact := types.LowImpact + if technicalAsset.HighestConfidentiality(parsedModel) >= types.Confidential || + technicalAsset.HighestIntegrity(parsedModel) >= types.Critical || + technicalAsset.HighestAvailability(parsedModel) >= types.Critical { + impact = types.MediumImpact } - if technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || - technicalAsset.HighestIntegrity() == model.MissionCritical || - technicalAsset.HighestAvailability() == model.MissionCritical { - impact = model.HighImpact + if technicalAsset.HighestConfidentiality(parsedModel) == types.StrictlyConfidential || + technicalAsset.HighestIntegrity(parsedModel) == types.MissionCritical || + technicalAsset.HighestAvailability(parsedModel) == types.MissionCritical { + impact = types.HighImpact } // create risk risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, impact), - ExploitationLikelihood: model.Unlikely, + Severity: model.CalculateSeverity(types.Unlikely, impact), + ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Probable, + DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/code-backdooring/code-backdooring-rule.go b/pkg/security/risks/built-in/code-backdooring/code-backdooring-rule.go similarity index 78% rename from pkg/risks/built-in/code-backdooring/code-backdooring-rule.go rename to pkg/security/risks/built-in/code-backdooring/code-backdooring-rule.go index 72419c91..3cae4418 100644 --- a/pkg/risks/built-in/code-backdooring/code-backdooring-rule.go +++ b/pkg/security/risks/built-in/code-backdooring/code-backdooring-rule.go @@ -1,7 +1,8 @@ package code_backdooring import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -28,8 +29,8 @@ func Category() model.RiskCategory { "components on the public internet and also not exposing it in front of unmanaged (out-of-scope) developer clients." + "Also consider the use of code signing to prevent code modifications.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Operations, - STRIDE: model.Tampering, + Function: types.Operations, + STRIDE: types.Tampering, DetectionLogic: "In-scope development relevant technical assets which are either accessed by out-of-scope unmanaged " + "developer clients and/or are directly accessed by any kind of internet-located (non-VPN) component or are themselves directly located " + "on the internet.", @@ -48,23 +49,23 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { +func GenerateRisks(parsedModel *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := input.TechnicalAssets[id] + for _, id := range parsedModel.SortedTechnicalAssetIDs() { + technicalAsset := parsedModel.TechnicalAssets[id] if !technicalAsset.OutOfScope && technicalAsset.Technology.IsDevelopmentRelevant() { if technicalAsset.Internet { - risks = append(risks, createRisk(input, technicalAsset, true)) + risks = append(risks, createRisk(parsedModel, technicalAsset, true)) continue } // TODO: ensure that even internet or unmanaged clients coming over a reverse-proxy or load-balancer like component are treated as if it was directly accessed/exposed on the internet or towards unmanaged dev clients //riskByLinkAdded := false - for _, callerLink := range model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] { - caller := input.TechnicalAssets[callerLink.SourceId] + for _, callerLink := range parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] { + caller := parsedModel.TechnicalAssets[callerLink.SourceId] if (!callerLink.VPN && caller.Internet) || caller.OutOfScope { - risks = append(risks, createRisk(input, technicalAsset, true)) + risks = append(risks, createRisk(parsedModel, technicalAsset, true)) //riskByLinkAdded = true break } @@ -76,15 +77,15 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, elevatedRisk bool) model.Risk { title := "Code Backdooring risk at " + technicalAsset.Title + "" - impact := model.LowImpact - if technicalAsset.Technology != model.CodeInspectionPlatform { + impact := types.LowImpact + if technicalAsset.Technology != types.CodeInspectionPlatform { if elevatedRisk { - impact = model.MediumImpact + impact = types.MediumImpact } - if technicalAsset.HighestConfidentiality() >= model.Confidential || technicalAsset.HighestIntegrity() >= model.Critical { - impact = model.MediumImpact + if technicalAsset.HighestConfidentiality(input) >= types.Confidential || technicalAsset.HighestIntegrity(input) >= types.Critical { + impact = types.MediumImpact if elevatedRisk { - impact = model.HighImpact + impact = types.HighImpact } } } @@ -92,10 +93,10 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, e uniqueDataBreachTechnicalAssetIDs := make(map[string]interface{}) uniqueDataBreachTechnicalAssetIDs[technicalAsset.Id] = true for _, codeDeploymentTargetCommLink := range technicalAsset.CommunicationLinks { - if codeDeploymentTargetCommLink.Usage == model.DevOps { + if codeDeploymentTargetCommLink.Usage == types.DevOps { for _, dataAssetID := range codeDeploymentTargetCommLink.DataAssetsSent { // it appears to be code when elevated integrity rating of sent data asset - if input.DataAssets[dataAssetID].Integrity >= model.Important { + if input.DataAssets[dataAssetID].Integrity >= types.Important { // here we've got a deployment target which has its data assets at risk via deployment of backdoored code uniqueDataBreachTechnicalAssetIDs[codeDeploymentTargetCommLink.TargetId] = true break @@ -110,12 +111,12 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, e // create risk risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, impact), - ExploitationLikelihood: model.Unlikely, + Severity: model.CalculateSeverity(types.Unlikely, impact), + ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Probable, + DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: dataBreachTechnicalAssetIDs, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go b/pkg/security/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go similarity index 72% rename from pkg/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go rename to pkg/security/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go index 52f36d3b..65ee7058 100644 --- a/pkg/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go +++ b/pkg/security/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go @@ -1,7 +1,8 @@ package container_baseimage_backdooring import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -28,8 +29,8 @@ func Category() model.RiskCategory { "Also consider using Google's Distroless base images or otherwise very small base images. " + "Regularly execute container image scans with tools checking the layers for vulnerable components.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS/CSVS applied?", - Function: model.Operations, - STRIDE: model.Tampering, + Function: types.Operations, + STRIDE: types.Tampering, DetectionLogic: "In-scope technical assets running as containers.", RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets.", FalsePositives: "Fully trusted (i.e. reviewed and cryptographically signed or similar) base images of containers can be considered " + @@ -43,33 +44,33 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { +func GenerateRisks(parsedModel *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := input.TechnicalAssets[id] - if !technicalAsset.OutOfScope && technicalAsset.Machine == model.Container { - risks = append(risks, createRisk(technicalAsset)) + for _, id := range parsedModel.SortedTechnicalAssetIDs() { + technicalAsset := parsedModel.TechnicalAssets[id] + if !technicalAsset.OutOfScope && technicalAsset.Machine == types.Container { + risks = append(risks, createRisk(parsedModel, technicalAsset)) } } return risks } -func createRisk(technicalAsset model.TechnicalAsset) model.Risk { +func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAsset) model.Risk { title := "Container Base Image Backdooring risk at " + technicalAsset.Title + "" - impact := model.MediumImpact - if technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || - technicalAsset.HighestIntegrity() == model.MissionCritical || - technicalAsset.HighestAvailability() == model.MissionCritical { - impact = model.HighImpact + impact := types.MediumImpact + if technicalAsset.HighestConfidentiality(parsedModel) == types.StrictlyConfidential || + technicalAsset.HighestIntegrity(parsedModel) == types.MissionCritical || + technicalAsset.HighestAvailability(parsedModel) == types.MissionCritical { + impact = types.HighImpact } risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, impact), - ExploitationLikelihood: model.Unlikely, + Severity: model.CalculateSeverity(types.Unlikely, impact), + ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Probable, + DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/container-platform-escape/container-platform-escape-rule.go b/pkg/security/risks/built-in/container-platform-escape/container-platform-escape-rule.go similarity index 77% rename from pkg/risks/built-in/container-platform-escape/container-platform-escape-rule.go rename to pkg/security/risks/built-in/container-platform-escape/container-platform-escape-rule.go index 5daaed1e..b56de4ed 100644 --- a/pkg/risks/built-in/container-platform-escape/container-platform-escape-rule.go +++ b/pkg/security/risks/built-in/container-platform-escape/container-platform-escape-rule.go @@ -1,7 +1,8 @@ package container_platform_escape import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -33,8 +34,8 @@ func Category() model.RiskCategory { "Use only trusted base images, verify digital signatures and apply image creation best practices. Also consider using Google's Distroless base images or otherwise very small base images. " + "Apply namespace isolation and nod affinity to separate pods from each other in terms of access and nodes the same style as you separate data.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS or CSVS chapter applied?", - Function: model.Operations, - STRIDE: model.ElevationOfPrivilege, + Function: types.Operations, + STRIDE: types.ElevationOfPrivilege, DetectionLogic: "In-scope container platforms.", RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", FalsePositives: "Container platforms not running parts of the target architecture can be considered " + @@ -48,41 +49,41 @@ func SupportedTags() []string { return []string{"docker", "kubernetes", "openshift"} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { +func GenerateRisks(parsedModel *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := input.TechnicalAssets[id] - if !technicalAsset.OutOfScope && technicalAsset.Technology == model.ContainerPlatform { - risks = append(risks, createRisk(input, technicalAsset)) + for _, id := range parsedModel.SortedTechnicalAssetIDs() { + technicalAsset := parsedModel.TechnicalAssets[id] + if !technicalAsset.OutOfScope && technicalAsset.Technology == types.ContainerPlatform { + risks = append(risks, createRisk(parsedModel, technicalAsset)) } } return risks } -func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset) model.Risk { +func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAsset) model.Risk { title := "Container Platform Escape risk at " + technicalAsset.Title + "" - impact := model.MediumImpact - if technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || - technicalAsset.HighestIntegrity() == model.MissionCritical || - technicalAsset.HighestAvailability() == model.MissionCritical { - impact = model.HighImpact + impact := types.MediumImpact + if technicalAsset.HighestConfidentiality(parsedModel) == types.StrictlyConfidential || + technicalAsset.HighestIntegrity(parsedModel) == types.MissionCritical || + technicalAsset.HighestAvailability(parsedModel) == types.MissionCritical { + impact = types.HighImpact } // data breach at all container assets dataBreachTechnicalAssetIDs := make([]string, 0) - for id, techAsset := range input.TechnicalAssets { - if techAsset.Machine == model.Container { + for id, techAsset := range parsedModel.TechnicalAssets { + if techAsset.Machine == types.Container { dataBreachTechnicalAssetIDs = append(dataBreachTechnicalAssetIDs, id) } } // create risk risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, impact), - ExploitationLikelihood: model.Unlikely, + Severity: model.CalculateSeverity(types.Unlikely, impact), + ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Probable, + DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: dataBreachTechnicalAssetIDs, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go b/pkg/security/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go similarity index 74% rename from pkg/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go rename to pkg/security/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go index f0f2c007..6c05f755 100644 --- a/pkg/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go +++ b/pkg/security/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go @@ -1,7 +1,8 @@ package cross_site_request_forgery import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -27,8 +28,8 @@ func Category() model.RiskCategory { "the same-site flag. " + "When a third-party product is used instead of custom developed software, check if the product applies the proper mitigation and ensure a reasonable patch-level.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Development, - STRIDE: model.Spoofing, + Function: types.Development, + STRIDE: types.Spoofing, DetectionLogic: "In-scope web applications accessed via typical web access protocols.", RiskAssessment: "The risk rating depends on the integrity rating of the data sent across the communication link.", FalsePositives: "Web applications passing the authentication sate via custom headers instead of cookies can " + @@ -44,33 +45,33 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { +func GenerateRisks(parsedModel *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { - technicalAsset := input.TechnicalAssets[id] + for _, id := range parsedModel.SortedTechnicalAssetIDs() { + technicalAsset := parsedModel.TechnicalAssets[id] if technicalAsset.OutOfScope || !technicalAsset.Technology.IsWebApplication() { continue } - incomingFlows := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] + incomingFlows := parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, incomingFlow := range incomingFlows { if incomingFlow.Protocol.IsPotentialWebAccessProtocol() { - likelihood := model.VeryLikely - if incomingFlow.Usage == model.DevOps { - likelihood = model.Likely + likelihood := types.VeryLikely + if incomingFlow.Usage == types.DevOps { + likelihood = types.Likely } - risks = append(risks, createRisk(input, technicalAsset, incomingFlow, likelihood)) + risks = append(risks, createRisk(parsedModel, technicalAsset, incomingFlow, likelihood)) } } } return risks } -func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink, likelihood model.RiskExploitationLikelihood) model.Risk { - sourceAsset := input.TechnicalAssets[incomingFlow.SourceId] +func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink, likelihood types.RiskExploitationLikelihood) model.Risk { + sourceAsset := parsedModel.TechnicalAssets[incomingFlow.SourceId] title := "Cross-Site Request Forgery (CSRF) risk at " + technicalAsset.Title + " via " + incomingFlow.Title + " from " + sourceAsset.Title + "" - impact := model.LowImpact - if incomingFlow.HighestIntegrity() == model.MissionCritical { - impact = model.MediumImpact + impact := types.LowImpact + if incomingFlow.HighestIntegrity(parsedModel) == types.MissionCritical { + impact = types.MediumImpact } risk := model.Risk{ Category: Category(), @@ -80,7 +81,7 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, i Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, MostRelevantCommunicationLinkId: incomingFlow.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + "@" + incomingFlow.Id diff --git a/pkg/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go b/pkg/security/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go similarity index 78% rename from pkg/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go rename to pkg/security/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go index c058122c..836ef20a 100644 --- a/pkg/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go +++ b/pkg/security/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go @@ -1,7 +1,8 @@ package cross_site_scripting import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -26,8 +27,8 @@ func Category() model.RiskCategory { "to avoid DOM-based XSS. " + "When a third-party product is used instead of custom developed software, check if the product applies the proper mitigation and ensure a reasonable patch-level.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Development, - STRIDE: model.Tampering, + Function: types.Development, + STRIDE: types.Tampering, DetectionLogic: "In-scope web applications.", RiskAssessment: "The risk rating depends on the sensitivity of the data processed or stored in the web application.", FalsePositives: "When the technical asset " + @@ -44,30 +45,30 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope || !technicalAsset.Technology.IsWebApplication() { // TODO: also mobile clients or rich-clients as long as they use web-view... continue } - risks = append(risks, createRisk(technicalAsset)) + risks = append(risks, createRisk(input, technicalAsset)) } return risks } -func createRisk(technicalAsset model.TechnicalAsset) model.Risk { +func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAsset) model.Risk { title := "Cross-Site Scripting (XSS) risk at " + technicalAsset.Title + "" - impact := model.MediumImpact - if technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || technicalAsset.HighestIntegrity() == model.MissionCritical { - impact = model.HighImpact + impact := types.MediumImpact + if technicalAsset.HighestConfidentiality(parsedModel) == types.StrictlyConfidential || technicalAsset.HighestIntegrity(parsedModel) == types.MissionCritical { + impact = types.HighImpact } risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Likely, impact), - ExploitationLikelihood: model.Likely, + Severity: model.CalculateSeverity(types.Likely, impact), + ExploitationLikelihood: types.Likely, ExploitationImpact: impact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Possible, + DataBreachProbability: types.Possible, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go b/pkg/security/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go similarity index 75% rename from pkg/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go rename to pkg/security/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go index 2d350ce6..d31d5597 100644 --- a/pkg/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go +++ b/pkg/security/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go @@ -1,7 +1,8 @@ package dos_risky_access_across_trust_boundary import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -26,16 +27,16 @@ func Category() model.RiskCategory { "Also for maintenance access routes consider applying a VPN instead of public reachable interfaces. " + "Generally applying redundancy on the targeted technical asset reduces the risk of DoS.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Operations, - STRIDE: model.DenialOfService, - DetectionLogic: "In-scope technical assets (excluding " + model.LoadBalancer.String() + ") with " + - "availability rating of " + model.Critical.String() + " or higher which have incoming data-flows across a " + - "network trust-boundary (excluding " + model.DevOps.String() + " usage).", + Function: types.Operations, + STRIDE: types.DenialOfService, + DetectionLogic: "In-scope technical assets (excluding " + types.LoadBalancer.String() + ") with " + + "availability rating of " + types.Critical.String() + " or higher which have incoming data-flows across a " + + "network trust-boundary (excluding " + types.DevOps.String() + " usage).", RiskAssessment: "Matching technical assets with availability rating " + - "of " + model.Critical.String() + " or higher are " + - "at " + model.LowSeverity.String() + " risk. When the availability rating is " + - model.MissionCritical.String() + " and neither a VPN nor IP filter for the incoming data-flow nor redundancy " + - "for the asset is applied, the risk-rating is considered " + model.MediumSeverity.String() + ".", // TODO reduce also, when data-flow authenticated and encrypted? + "of " + types.Critical.String() + " or higher are " + + "at " + types.LowSeverity.String() + " risk. When the availability rating is " + + types.MissionCritical.String() + " and neither a VPN nor IP filter for the incoming data-flow nor redundancy " + + "for the asset is applied, the risk-rating is considered " + types.MediumSeverity.String() + ".", // TODO reduce also, when data-flow authenticated and encrypted? FalsePositives: "When the accessed target operations are not time- or resource-consuming.", ModelFailurePossibleReason: false, CWE: 400, @@ -48,15 +49,15 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] - if !technicalAsset.OutOfScope && technicalAsset.Technology != model.LoadBalancer && - technicalAsset.Availability >= model.Critical { - for _, incomingAccess := range model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] { + if !technicalAsset.OutOfScope && technicalAsset.Technology != types.LoadBalancer && + technicalAsset.Availability >= types.Critical { + for _, incomingAccess := range input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] { sourceAsset := input.TechnicalAssets[incomingAccess.SourceId] if sourceAsset.Technology.IsTrafficForwarding() { // Now try to walk a call chain up (1 hop only) to find a caller's caller used by human - callersCommLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[sourceAsset.Id] + callersCommLinks := input.IncomingTechnicalCommunicationLinksMappedByTargetId[sourceAsset.Id] for _, callersCommLink := range callersCommLinks { risks = checkRisk(input, technicalAsset, callersCommLink, sourceAsset.Title, risks) } @@ -70,9 +71,9 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { } func checkRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingAccess model.CommunicationLink, hopBetween string, risks []model.Risk) []model.Risk { - if incomingAccess.IsAcrossTrustBoundaryNetworkOnly() && - !incomingAccess.Protocol.IsProcessLocal() && incomingAccess.Usage != model.DevOps { - highRisk := technicalAsset.Availability == model.MissionCritical && + if incomingAccess.IsAcrossTrustBoundaryNetworkOnly(input) && + !incomingAccess.Protocol.IsProcessLocal() && incomingAccess.Usage != types.DevOps { + highRisk := technicalAsset.Availability == types.MissionCritical && !incomingAccess.VPN && !incomingAccess.IpFiltered && !technicalAsset.Redundant risks = append(risks, createRisk(technicalAsset, incomingAccess, hopBetween, input.TechnicalAssets[incomingAccess.SourceId], highRisk)) @@ -82,23 +83,23 @@ func checkRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, in func createRisk(techAsset model.TechnicalAsset, dataFlow model.CommunicationLink, hopBetween string, clientOutsideTrustBoundary model.TechnicalAsset, moreRisky bool) model.Risk { - impact := model.LowImpact + impact := types.LowImpact if moreRisky { - impact = model.MediumImpact + impact = types.MediumImpact } if len(hopBetween) > 0 { hopBetween = " forwarded via " + hopBetween + "" } risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, impact), - ExploitationLikelihood: model.Unlikely, + Severity: model.CalculateSeverity(types.Unlikely, impact), + ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: "Denial-of-Service risky access of " + techAsset.Title + " by " + clientOutsideTrustBoundary.Title + " via " + dataFlow.Title + "" + hopBetween, MostRelevantTechnicalAssetId: techAsset.Id, MostRelevantCommunicationLinkId: dataFlow.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{}, } risk.SyntheticId = risk.Category.Id + "@" + techAsset.Id + "@" + clientOutsideTrustBoundary.Id + "@" + dataFlow.Id diff --git a/pkg/risks/built-in/incomplete-model/incomplete-model-rule.go b/pkg/security/risks/built-in/incomplete-model/incomplete-model-rule.go similarity index 76% rename from pkg/risks/built-in/incomplete-model/incomplete-model-rule.go rename to pkg/security/risks/built-in/incomplete-model/incomplete-model-rule.go index 18e2621a..5b51c5ee 100644 --- a/pkg/risks/built-in/incomplete-model/incomplete-model-rule.go +++ b/pkg/security/risks/built-in/incomplete-model/incomplete-model-rule.go @@ -1,7 +1,8 @@ package incomplete_model import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -24,10 +25,10 @@ func Category() model.RiskCategory { Action: "Threat Modeling Completeness", Mitigation: "Try to find out what technology or protocol is used instead of specifying that it is unknown.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Architecture, - STRIDE: model.InformationDisclosure, + Function: types.Architecture, + STRIDE: types.InformationDisclosure, DetectionLogic: "All technical assets and communication links with technology type or protocol type specified as unknown.", - RiskAssessment: model.LowSeverity.String(), + RiskAssessment: types.LowSeverity.String(), FalsePositives: "Usually no false positives as this looks like an incomplete model.", ModelFailurePossibleReason: true, CWE: 1008, @@ -40,14 +41,14 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope { - if technicalAsset.Technology == model.UnknownTechnology { + if technicalAsset.Technology == types.UnknownTechnology { risks = append(risks, createRiskTechAsset(technicalAsset)) } for _, commLink := range technicalAsset.CommunicationLinks { - if commLink.Protocol == model.UnknownProtocol { + if commLink.Protocol == types.UnknownProtocol { risks = append(risks, createRiskCommLink(technicalAsset, commLink)) } } @@ -60,12 +61,12 @@ func createRiskTechAsset(technicalAsset model.TechnicalAsset) model.Risk { title := "Unknown Technology specified at technical asset " + technicalAsset.Title + "" risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, model.LowImpact), - ExploitationLikelihood: model.Unlikely, - ExploitationImpact: model.LowImpact, + Severity: model.CalculateSeverity(types.Unlikely, types.LowImpact), + ExploitationLikelihood: types.Unlikely, + ExploitationImpact: types.LowImpact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id @@ -76,13 +77,13 @@ func createRiskCommLink(technicalAsset model.TechnicalAsset, commLink model.Comm title := "Unknown Protocol specified for communication link " + commLink.Title + " at technical asset " + technicalAsset.Title + "" risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, model.LowImpact), - ExploitationLikelihood: model.Unlikely, - ExploitationImpact: model.LowImpact, + Severity: model.CalculateSeverity(types.Unlikely, types.LowImpact), + ExploitationLikelihood: types.Unlikely, + ExploitationImpact: types.LowImpact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, MostRelevantCommunicationLinkId: commLink.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + commLink.Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/ldap-injection/ldap-injection-rule.go b/pkg/security/risks/built-in/ldap-injection/ldap-injection-rule.go similarity index 81% rename from pkg/risks/built-in/ldap-injection/ldap-injection-rule.go rename to pkg/security/risks/built-in/ldap-injection/ldap-injection-rule.go index 1deabfbc..2b17e9e7 100644 --- a/pkg/risks/built-in/ldap-injection/ldap-injection-rule.go +++ b/pkg/security/risks/built-in/ldap-injection/ldap-injection-rule.go @@ -1,7 +1,8 @@ package ldap_injection import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -26,8 +27,8 @@ func Category() model.RiskCategory { "the LDAP sever in order to stay safe from LDAP-Injection vulnerabilities. " + "When a third-party product is used instead of custom developed software, check if the product applies the proper mitigation and ensure a reasonable patch-level.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Development, - STRIDE: model.Tampering, + Function: types.Development, + STRIDE: types.Tampering, DetectionLogic: "In-scope clients accessing LDAP servers via typical LDAP access protocols.", RiskAssessment: "The risk rating depends on the sensitivity of the LDAP server itself and of the data assets processed or stored.", FalsePositives: "LDAP server queries by search values not consisting of parts controllable by the caller can be considered " + @@ -40,15 +41,15 @@ func Category() model.RiskCategory { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, technicalAsset := range input.TechnicalAssets { - incomingFlows := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] + incomingFlows := input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, incomingFlow := range incomingFlows { if input.TechnicalAssets[incomingFlow.SourceId].OutOfScope { continue } - if incomingFlow.Protocol == model.LDAP || incomingFlow.Protocol == model.LDAPS { - likelihood := model.Likely - if incomingFlow.Usage == model.DevOps { - likelihood = model.Unlikely + if incomingFlow.Protocol == types.LDAP || incomingFlow.Protocol == types.LDAPS { + likelihood := types.Likely + if incomingFlow.Usage == types.DevOps { + likelihood = types.Unlikely } risks = append(risks, createRisk(input, technicalAsset, incomingFlow, likelihood)) } @@ -61,13 +62,13 @@ func SupportedTags() []string { return []string{} } -func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink, likelihood model.RiskExploitationLikelihood) model.Risk { +func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink, likelihood types.RiskExploitationLikelihood) model.Risk { caller := input.TechnicalAssets[incomingFlow.SourceId] title := "LDAP-Injection risk at " + caller.Title + " against LDAP server " + technicalAsset.Title + "" + " via " + incomingFlow.Title + "" - impact := model.MediumImpact - if technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || technicalAsset.HighestIntegrity() == model.MissionCritical { - impact = model.HighImpact + impact := types.MediumImpact + if technicalAsset.HighestConfidentiality(input) == types.StrictlyConfidential || technicalAsset.HighestIntegrity(input) == types.MissionCritical { + impact = types.HighImpact } risk := model.Risk{ Category: Category(), @@ -77,7 +78,7 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, i Title: title, MostRelevantTechnicalAssetId: caller.Id, MostRelevantCommunicationLinkId: incomingFlow.Id, - DataBreachProbability: model.Probable, + DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + caller.Id + "@" + technicalAsset.Id + "@" + incomingFlow.Id diff --git a/pkg/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go b/pkg/security/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go similarity index 66% rename from pkg/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go rename to pkg/security/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go index 49b3ef57..8a2a41d4 100644 --- a/pkg/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go +++ b/pkg/security/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go @@ -1,8 +1,9 @@ package missing_authentication_second_factor import ( - "github.com/threagile/threagile/model" - "github.com/threagile/threagile/pkg/risks/built-in/missing-authentication" + "github.com/threagile/threagile/pkg/model" + missing_authentication "github.com/threagile/threagile/pkg/security/risks/built-in/missing-authentication" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -26,11 +27,11 @@ func Category() model.RiskCategory { Mitigation: "Apply an authentication method to the technical asset protecting highly sensitive data via " + "two-factor authentication for human users.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.BusinessSide, - STRIDE: model.ElevationOfPrivilege, - DetectionLogic: "In-scope technical assets (except " + model.LoadBalancer.String() + ", " + model.ReverseProxy.String() + ", " + model.WAF.String() + ", " + model.IDS.String() + ", and " + model.IPS.String() + ") should authenticate incoming requests via two-factor authentication (2FA) " + + Function: types.BusinessSide, + STRIDE: types.ElevationOfPrivilege, + DetectionLogic: "In-scope technical assets (except " + types.LoadBalancer.String() + ", " + types.ReverseProxy.String() + ", " + types.WAF.String() + ", " + types.IDS.String() + ", and " + types.IPS.String() + ") should authenticate incoming requests via two-factor authentication (2FA) " + "when the asset processes or stores highly sensitive data (in terms of confidentiality, integrity, and availability) and is accessed by a client used by a human user.", - RiskAssessment: model.MediumSeverity.String(), + RiskAssessment: types.MediumSeverity.String(), FalsePositives: "Technical assets which do not process requests regarding functionality or data linked to end-users (customers) " + "can be considered as false positives after individual review.", ModelFailurePossibleReason: false, @@ -44,43 +45,43 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope || technicalAsset.Technology.IsTrafficForwarding() || technicalAsset.Technology.IsUnprotectedCommunicationsTolerated() { continue } - if technicalAsset.HighestConfidentiality() >= model.Confidential || - technicalAsset.HighestIntegrity() >= model.Critical || - technicalAsset.HighestAvailability() >= model.Critical || + if technicalAsset.HighestConfidentiality(input) >= types.Confidential || + technicalAsset.HighestIntegrity(input) >= types.Critical || + technicalAsset.HighestAvailability(input) >= types.Critical || technicalAsset.MultiTenant { // check each incoming data flow - commLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] + commLinks := input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, commLink := range commLinks { caller := input.TechnicalAssets[commLink.SourceId] - if caller.Technology.IsUnprotectedCommunicationsTolerated() || caller.Type == model.Datastore { + if caller.Technology.IsUnprotectedCommunicationsTolerated() || caller.Type == types.Datastore { continue } if caller.UsedAsClientByHuman { - moreRisky := commLink.HighestConfidentiality() >= model.Confidential || - commLink.HighestIntegrity() >= model.Critical - if moreRisky && commLink.Authentication != model.TwoFactor { - risks = append(risks, missing_authentication.CreateRisk(input, technicalAsset, commLink, commLink, "", model.MediumImpact, model.Unlikely, true, Category())) + moreRisky := commLink.HighestConfidentiality(input) >= types.Confidential || + commLink.HighestIntegrity(input) >= types.Critical + if moreRisky && commLink.Authentication != types.TwoFactor { + risks = append(risks, missing_authentication.CreateRisk(input, technicalAsset, commLink, commLink, "", types.MediumImpact, types.Unlikely, true, Category())) } } else if caller.Technology.IsTrafficForwarding() { // Now try to walk a call chain up (1 hop only) to find a caller's caller used by human - callersCommLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[caller.Id] + callersCommLinks := input.IncomingTechnicalCommunicationLinksMappedByTargetId[caller.Id] for _, callersCommLink := range callersCommLinks { callersCaller := input.TechnicalAssets[callersCommLink.SourceId] - if callersCaller.Technology.IsUnprotectedCommunicationsTolerated() || callersCaller.Type == model.Datastore { + if callersCaller.Technology.IsUnprotectedCommunicationsTolerated() || callersCaller.Type == types.Datastore { continue } if callersCaller.UsedAsClientByHuman { - moreRisky := callersCommLink.HighestConfidentiality() >= model.Confidential || - callersCommLink.HighestIntegrity() >= model.Critical - if moreRisky && callersCommLink.Authentication != model.TwoFactor { - risks = append(risks, missing_authentication.CreateRisk(input, technicalAsset, commLink, callersCommLink, caller.Title, model.MediumImpact, model.Unlikely, true, Category())) + moreRisky := callersCommLink.HighestConfidentiality(input) >= types.Confidential || + callersCommLink.HighestIntegrity(input) >= types.Critical + if moreRisky && callersCommLink.Authentication != types.TwoFactor { + risks = append(risks, missing_authentication.CreateRisk(input, technicalAsset, commLink, callersCommLink, caller.Title, types.MediumImpact, types.Unlikely, true, Category())) } } } diff --git a/pkg/risks/built-in/missing-authentication/missing-authentication-rule.go b/pkg/security/risks/built-in/missing-authentication/missing-authentication-rule.go similarity index 68% rename from pkg/risks/built-in/missing-authentication/missing-authentication-rule.go rename to pkg/security/risks/built-in/missing-authentication/missing-authentication-rule.go index be1070a1..ddd8474b 100644 --- a/pkg/risks/built-in/missing-authentication/missing-authentication-rule.go +++ b/pkg/security/risks/built-in/missing-authentication/missing-authentication-rule.go @@ -1,7 +1,8 @@ package missing_authentication import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -24,9 +25,9 @@ func Category() model.RiskCategory { Mitigation: "Apply an authentication method to the technical asset. To protect highly sensitive data consider " + "the use of two-factor authentication for human users.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Architecture, - STRIDE: model.ElevationOfPrivilege, - DetectionLogic: "In-scope technical assets (except " + model.LoadBalancer.String() + ", " + model.ReverseProxy.String() + ", " + model.ServiceRegistry.String() + ", " + model.WAF.String() + ", " + model.IDS.String() + ", and " + model.IPS.String() + " and in-process calls) should authenticate incoming requests when the asset processes or stores " + + Function: types.Architecture, + STRIDE: types.ElevationOfPrivilege, + DetectionLogic: "In-scope technical assets (except " + types.LoadBalancer.String() + ", " + types.ReverseProxy.String() + ", " + types.ServiceRegistry.String() + ", " + types.WAF.String() + ", " + types.IDS.String() + ", and " + types.IPS.String() + " and in-process calls) should authenticate incoming requests when the asset processes or stores " + "sensitive data. This is especially the case for all multi-tenant assets (there even non-sensitive ones).", RiskAssessment: "The risk rating (medium or high) " + "depends on the sensitivity of the data sent across the communication link. Monitoring callers are exempted from this risk.", @@ -43,35 +44,35 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] - if technicalAsset.OutOfScope || technicalAsset.Technology == model.LoadBalancer || - technicalAsset.Technology == model.ReverseProxy || technicalAsset.Technology == model.ServiceRegistry || technicalAsset.Technology == model.WAF || technicalAsset.Technology == model.IDS || technicalAsset.Technology == model.IPS { + if technicalAsset.OutOfScope || technicalAsset.Technology == types.LoadBalancer || + technicalAsset.Technology == types.ReverseProxy || technicalAsset.Technology == types.ServiceRegistry || technicalAsset.Technology == types.WAF || technicalAsset.Technology == types.IDS || technicalAsset.Technology == types.IPS { continue } - if technicalAsset.HighestConfidentiality() >= model.Confidential || - technicalAsset.HighestIntegrity() >= model.Critical || - technicalAsset.HighestAvailability() >= model.Critical || + if technicalAsset.HighestConfidentiality(input) >= types.Confidential || + technicalAsset.HighestIntegrity(input) >= types.Critical || + technicalAsset.HighestAvailability(input) >= types.Critical || technicalAsset.MultiTenant { // check each incoming data flow - commLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] + commLinks := input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, commLink := range commLinks { caller := input.TechnicalAssets[commLink.SourceId] - if caller.Technology.IsUnprotectedCommunicationsTolerated() || caller.Type == model.Datastore { + if caller.Technology.IsUnprotectedCommunicationsTolerated() || caller.Type == types.Datastore { continue } - highRisk := commLink.HighestConfidentiality() == model.StrictlyConfidential || - commLink.HighestIntegrity() == model.MissionCritical - lowRisk := commLink.HighestConfidentiality() <= model.Internal && - commLink.HighestIntegrity() == model.Operational - impact := model.MediumImpact + highRisk := commLink.HighestConfidentiality(input) == types.StrictlyConfidential || + commLink.HighestIntegrity(input) == types.MissionCritical + lowRisk := commLink.HighestConfidentiality(input) <= types.Internal && + commLink.HighestIntegrity(input) == types.Operational + impact := types.MediumImpact if highRisk { - impact = model.HighImpact + impact = types.HighImpact } else if lowRisk { - impact = model.LowImpact + impact = types.LowImpact } - if commLink.Authentication == model.NoneAuthentication && !commLink.Protocol.IsProcessLocal() { - risks = append(risks, CreateRisk(input, technicalAsset, commLink, commLink, "", impact, model.Likely, false, Category())) + if commLink.Authentication == types.NoneAuthentication && !commLink.Protocol.IsProcessLocal() { + risks = append(risks, CreateRisk(input, technicalAsset, commLink, commLink, "", impact, types.Likely, false, Category())) } } } @@ -80,7 +81,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { } func CreateRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingAccess, incomingAccessOrigin model.CommunicationLink, hopBetween string, - impact model.RiskExploitationImpact, likelihood model.RiskExploitationLikelihood, twoFactor bool, category model.RiskCategory) model.Risk { + impact types.RiskExploitationImpact, likelihood types.RiskExploitationLikelihood, twoFactor bool, category model.RiskCategory) model.Risk { factorString := "" if twoFactor { factorString = "Two-Factor " @@ -98,7 +99,7 @@ func CreateRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, i "to " + technicalAsset.Title + "", MostRelevantTechnicalAssetId: technicalAsset.Id, MostRelevantCommunicationLinkId: incomingAccess.Id, - DataBreachProbability: model.Possible, + DataBreachProbability: types.Possible, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + incomingAccess.Id + "@" + input.TechnicalAssets[incomingAccess.SourceId].Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go b/pkg/security/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go similarity index 74% rename from pkg/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go rename to pkg/security/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go index 02319c75..e6d0447c 100644 --- a/pkg/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go +++ b/pkg/security/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go @@ -1,7 +1,8 @@ package missing_build_infrastructure import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -27,8 +28,8 @@ func Category() model.RiskCategory { Action: "Build Pipeline Hardening", Mitigation: "Include the build infrastructure in the model.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Architecture, - STRIDE: model.Tampering, + Function: types.Architecture, + STRIDE: types.Tampering, DetectionLogic: "Models with in-scope custom-developed parts missing in-scope development (code creation) and build infrastructure " + "components (devops-client, sourcecode-repo, build-pipeline, etc.).", RiskAssessment: "The risk rating depends on the highest sensitivity of the in-scope assets running custom-developed parts.", @@ -46,37 +47,37 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) hasCustomDevelopedParts, hasBuildPipeline, hasSourcecodeRepo, hasDevOpsClient := false, false, false, false - impact := model.LowImpact + impact := types.LowImpact var mostRelevantAsset model.TechnicalAsset - for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset + for _, id := range input.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset technicalAsset := input.TechnicalAssets[id] if technicalAsset.CustomDevelopedParts && !technicalAsset.OutOfScope { hasCustomDevelopedParts = true - if impact == model.LowImpact { + if impact == types.LowImpact { mostRelevantAsset = technicalAsset - if technicalAsset.HighestConfidentiality() >= model.Confidential || - technicalAsset.HighestIntegrity() >= model.Critical || - technicalAsset.HighestAvailability() >= model.Critical { - impact = model.MediumImpact + if technicalAsset.HighestConfidentiality(input) >= types.Confidential || + technicalAsset.HighestIntegrity(input) >= types.Critical || + technicalAsset.HighestAvailability(input) >= types.Critical { + impact = types.MediumImpact } } - if technicalAsset.Confidentiality >= model.Confidential || - technicalAsset.Integrity >= model.Critical || - technicalAsset.Availability >= model.Critical { - impact = model.MediumImpact + if technicalAsset.Confidentiality >= types.Confidential || + technicalAsset.Integrity >= types.Critical || + technicalAsset.Availability >= types.Critical { + impact = types.MediumImpact } // just for referencing the most interesting asset if technicalAsset.HighestSensitivityScore() > mostRelevantAsset.HighestSensitivityScore() { mostRelevantAsset = technicalAsset } } - if technicalAsset.Technology == model.BuildPipeline { + if technicalAsset.Technology == types.BuildPipeline { hasBuildPipeline = true } - if technicalAsset.Technology == model.SourcecodeRepository { + if technicalAsset.Technology == types.SourcecodeRepository { hasSourcecodeRepo = true } - if technicalAsset.Technology == model.DevOpsClient { + if technicalAsset.Technology == types.DevOpsClient { hasDevOpsClient = true } } @@ -87,16 +88,16 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(technicalAsset model.TechnicalAsset, impact model.RiskExploitationImpact) model.Risk { +func createRisk(technicalAsset model.TechnicalAsset, impact types.RiskExploitationImpact) model.Risk { title := "Missing Build Infrastructure in the threat model (referencing asset " + technicalAsset.Title + " as an example)" risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, impact), - ExploitationLikelihood: model.Unlikely, + Severity: model.CalculateSeverity(types.Unlikely, impact), + ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{}, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go b/pkg/security/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go similarity index 79% rename from pkg/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go rename to pkg/security/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go index 6f800a9d..00339cfa 100644 --- a/pkg/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go +++ b/pkg/security/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go @@ -1,8 +1,10 @@ package missing_cloud_hardening import ( - "github.com/threagile/threagile/model" "sort" + + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -33,8 +35,8 @@ func Category() model.RiskCategory { "

For Google Cloud Platform: Follow the CIS Benchmark for Google Cloud Computing Platform (see also the automated checks of cloud audit tools like \"CloudSploit\" or \"ScoutSuite\"). " + "

For Oracle Cloud Platform: Follow the hardening best practices (see also the automated checks of cloud audit tools like \"CloudSploit\").", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Operations, - STRIDE: model.Tampering, + Function: types.Operations, + STRIDE: types.Tampering, DetectionLogic: "In-scope cloud components (either residing in cloud trust boundaries or more specifically tagged with cloud provider types).", RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", FalsePositives: "Cloud components not running parts of the target architecture can be considered " + @@ -87,7 +89,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { if taggedOuterTB || trustBoundary.Type.IsWithinCloud() { addTrustBoundaryAccordingToBaseTag(trustBoundary, trustBoundariesWithUnspecificCloudRisks, trustBoundaryIDsAWS, trustBoundaryIDsAzure, trustBoundaryIDsGCP, trustBoundaryIDsOCP) - for _, techAssetID := range trustBoundary.RecursivelyAllTechnicalAssetIDsInside() { + for _, techAssetID := range trustBoundary.RecursivelyAllTechnicalAssetIDsInside(input) { added := false tA := input.TechnicalAssets[techAssetID] if tA.IsTaggedWithAny(SupportedTags()...) { @@ -109,13 +111,13 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { } // now loop over all technical assets, trust boundaries, and shared runtimes model-wide by tag - for _, tA := range model.TechnicalAssetsTaggedWithAny(SupportedTags()...) { + for _, tA := range input.TechnicalAssetsTaggedWithAny(SupportedTags()...) { addAccordingToBaseTag(tA, tA.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) } - for _, tB := range model.TrustBoundariesTaggedWithAny(SupportedTags()...) { - for _, candidateID := range tB.RecursivelyAllTechnicalAssetIDsInside() { + for _, tB := range input.TrustBoundariesTaggedWithAny(SupportedTags()...) { + for _, candidateID := range tB.RecursivelyAllTechnicalAssetIDsInside(input) { tA := input.TechnicalAssets[candidateID] if tA.IsTaggedWithAny(SupportedTags()...) { addAccordingToBaseTag(tA, tA.Tags, @@ -128,7 +130,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { } } } - for _, sR := range model.SharedRuntimesTaggedWithAny(SupportedTags()...) { + for _, sR := range input.SharedRuntimesTaggedWithAny(SupportedTags()...) { addSharedRuntimeAccordingToBaseTag(sR, sharedRuntimesWithUnspecificCloudRisks, sharedRuntimeIDsAWS, sharedRuntimeIDsAzure, sharedRuntimeIDsGCP, sharedRuntimeIDsOCP) for _, candidateID := range sR.TechnicalAssetsRunning { @@ -210,30 +212,30 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { // ... followed by trust boundaries for the generic risks for id := range trustBoundaryIDsAWS { - risks = append(risks, createRiskForTrustBoundary(input.TrustBoundaries[id], "AWS", "CIS Benchmark for AWS")) + risks = append(risks, createRiskForTrustBoundary(input, input.TrustBoundaries[id], "AWS", "CIS Benchmark for AWS")) addedAWS = true } for id := range trustBoundaryIDsAzure { - risks = append(risks, createRiskForTrustBoundary(input.TrustBoundaries[id], "Azure", "CIS Benchmark for Microsoft Azure")) + risks = append(risks, createRiskForTrustBoundary(input, input.TrustBoundaries[id], "Azure", "CIS Benchmark for Microsoft Azure")) addedAzure = true } for id := range trustBoundaryIDsGCP { - risks = append(risks, createRiskForTrustBoundary(input.TrustBoundaries[id], "GCP", "CIS Benchmark for Google Cloud Computing Platform")) + risks = append(risks, createRiskForTrustBoundary(input, input.TrustBoundaries[id], "GCP", "CIS Benchmark for Google Cloud Computing Platform")) addedGCP = true } for id := range trustBoundaryIDsOCP { - risks = append(risks, createRiskForTrustBoundary(input.TrustBoundaries[id], "OCP", "Vendor Best Practices for Oracle Cloud Platform")) + risks = append(risks, createRiskForTrustBoundary(input, input.TrustBoundaries[id], "OCP", "Vendor Best Practices for Oracle Cloud Platform")) addedOCP = true } for id := range trustBoundariesWithUnspecificCloudRisks { - risks = append(risks, createRiskForTrustBoundary(input.TrustBoundaries[id], "", "")) + risks = append(risks, createRiskForTrustBoundary(input, input.TrustBoundaries[id], "", "")) } // just use the most sensitive asset as an example - to only create one general "AWS cloud hardening" risk, not many if !addedAWS { mostRelevantAsset := findMostSensitiveTechnicalAsset(input, techAssetIDsAWS) if !mostRelevantAsset.IsZero() { - risks = append(risks, createRiskForTechnicalAsset(mostRelevantAsset, "AWS", "CIS Benchmark for AWS")) + risks = append(risks, createRiskForTechnicalAsset(input, mostRelevantAsset, "AWS", "CIS Benchmark for AWS")) addedAWS = true } } @@ -241,7 +243,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { if !addedAzure { mostRelevantAsset := findMostSensitiveTechnicalAsset(input, techAssetIDsAzure) if !mostRelevantAsset.IsZero() { - risks = append(risks, createRiskForTechnicalAsset(mostRelevantAsset, "Azure", "CIS Benchmark for Microsoft Azure")) + risks = append(risks, createRiskForTechnicalAsset(input, mostRelevantAsset, "Azure", "CIS Benchmark for Microsoft Azure")) addedAzure = true } } @@ -249,7 +251,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { if !addedGCP { mostRelevantAsset := findMostSensitiveTechnicalAsset(input, techAssetIDsGCP) if !mostRelevantAsset.IsZero() { - risks = append(risks, createRiskForTechnicalAsset(mostRelevantAsset, "GCP", "CIS Benchmark for Google Cloud Computing Platform")) + risks = append(risks, createRiskForTechnicalAsset(input, mostRelevantAsset, "GCP", "CIS Benchmark for Google Cloud Computing Platform")) addedGCP = true } } @@ -257,7 +259,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { if !addedOCP { mostRelevantAsset := findMostSensitiveTechnicalAsset(input, techAssetIDsOCP) if !mostRelevantAsset.IsZero() { - risks = append(risks, createRiskForTechnicalAsset(mostRelevantAsset, "OCP", "Vendor Best Practices for Oracle Cloud Platform")) + risks = append(risks, createRiskForTechnicalAsset(input, mostRelevantAsset, "OCP", "Vendor Best Practices for Oracle Cloud Platform")) addedOCP = true } } @@ -265,11 +267,11 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { // now also add all tech asset specific tag-specific risks, as they are specific to the asset anyway (therefore don't set added to true here) for id := range techAssetIDsWithSubtagSpecificCloudRisks { tA := input.TechnicalAssets[id] - if tA.IsTaggedWithAnyTraversingUp("aws:ec2") { - risks = append(risks, createRiskForTechnicalAsset(tA, "EC2", "CIS Benchmark for Amazon Linux")) + if tA.IsTaggedWithAnyTraversingUp(input, "aws:ec2") { + risks = append(risks, createRiskForTechnicalAsset(input, tA, "EC2", "CIS Benchmark for Amazon Linux")) } - if tA.IsTaggedWithAnyTraversingUp("aws:s3") { - risks = append(risks, createRiskForTechnicalAsset(tA, "S3", "Security Best Practices for AWS S3")) + if tA.IsTaggedWithAnyTraversingUp(input, "aws:s3") { + risks = append(risks, createRiskForTechnicalAsset(input, tA, "S3", "Security Best Practices for AWS S3")) } // TODO add more tag-specific risks like also for aws:lambda etc. here } @@ -372,33 +374,33 @@ func createRiskForSharedRuntime(input *model.ParsedModel, sharedRuntime model.Sh if len(details) > 0 { title += ": " + details + "" } - impact := model.MediumImpact - if sharedRuntime.HighestConfidentiality() >= model.Confidential || - sharedRuntime.HighestIntegrity() >= model.Critical || - sharedRuntime.HighestAvailability() >= model.Critical { - impact = model.HighImpact + impact := types.MediumImpact + if sharedRuntime.HighestConfidentiality(input) >= types.Confidential || + sharedRuntime.HighestIntegrity(input) >= types.Critical || + sharedRuntime.HighestAvailability(input) >= types.Critical { + impact = types.HighImpact } - if sharedRuntime.HighestConfidentiality() == model.StrictlyConfidential || - sharedRuntime.HighestIntegrity() == model.MissionCritical || - sharedRuntime.HighestAvailability() == model.MissionCritical { - impact = model.VeryHighImpact + if sharedRuntime.HighestConfidentiality(input) == types.StrictlyConfidential || + sharedRuntime.HighestIntegrity(input) == types.MissionCritical || + sharedRuntime.HighestAvailability(input) == types.MissionCritical { + impact = types.VeryHighImpact } // create risk risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, impact), - ExploitationLikelihood: model.Unlikely, + Severity: model.CalculateSeverity(types.Unlikely, impact), + ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, MostRelevantSharedRuntimeId: sharedRuntime.Id, - DataBreachProbability: model.Probable, + DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: sharedRuntime.TechnicalAssetsRunning, } risk.SyntheticId = risk.Category.Id + "@" + sharedRuntime.Id return risk } -func createRiskForTrustBoundary(trustBoundary model.TrustBoundary, prefix, details string) model.Risk { +func createRiskForTrustBoundary(parsedModel *model.ParsedModel, trustBoundary model.TrustBoundary, prefix, details string) model.Risk { if len(prefix) > 0 { prefix = " (" + prefix + ")" } @@ -406,33 +408,33 @@ func createRiskForTrustBoundary(trustBoundary model.TrustBoundary, prefix, detai if len(details) > 0 { title += ": " + details + "" } - impact := model.MediumImpact - if trustBoundary.HighestConfidentiality() >= model.Confidential || - trustBoundary.HighestIntegrity() >= model.Critical || - trustBoundary.HighestAvailability() >= model.Critical { - impact = model.HighImpact + impact := types.MediumImpact + if trustBoundary.HighestConfidentiality(parsedModel) >= types.Confidential || + trustBoundary.HighestIntegrity(parsedModel) >= types.Critical || + trustBoundary.HighestAvailability(parsedModel) >= types.Critical { + impact = types.HighImpact } - if trustBoundary.HighestConfidentiality() == model.StrictlyConfidential || - trustBoundary.HighestIntegrity() == model.MissionCritical || - trustBoundary.HighestAvailability() == model.MissionCritical { - impact = model.VeryHighImpact + if trustBoundary.HighestConfidentiality(parsedModel) == types.StrictlyConfidential || + trustBoundary.HighestIntegrity(parsedModel) == types.MissionCritical || + trustBoundary.HighestAvailability(parsedModel) == types.MissionCritical { + impact = types.VeryHighImpact } // create risk risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, impact), - ExploitationLikelihood: model.Unlikely, + Severity: model.CalculateSeverity(types.Unlikely, impact), + ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, MostRelevantTrustBoundaryId: trustBoundary.Id, - DataBreachProbability: model.Probable, - DataBreachTechnicalAssetIDs: trustBoundary.RecursivelyAllTechnicalAssetIDsInside(), + DataBreachProbability: types.Probable, + DataBreachTechnicalAssetIDs: trustBoundary.RecursivelyAllTechnicalAssetIDsInside(parsedModel), } risk.SyntheticId = risk.Category.Id + "@" + trustBoundary.Id return risk } -func createRiskForTechnicalAsset(technicalAsset model.TechnicalAsset, prefix, details string) model.Risk { +func createRiskForTechnicalAsset(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAsset, prefix, details string) model.Risk { if len(prefix) > 0 { prefix = " (" + prefix + ")" } @@ -440,26 +442,26 @@ func createRiskForTechnicalAsset(technicalAsset model.TechnicalAsset, prefix, de if len(details) > 0 { title += ": " + details + "" } - impact := model.MediumImpact - if technicalAsset.HighestConfidentiality() >= model.Confidential || - technicalAsset.HighestIntegrity() >= model.Critical || - technicalAsset.HighestAvailability() >= model.Critical { - impact = model.HighImpact + impact := types.MediumImpact + if technicalAsset.HighestConfidentiality(parsedModel) >= types.Confidential || + technicalAsset.HighestIntegrity(parsedModel) >= types.Critical || + technicalAsset.HighestAvailability(parsedModel) >= types.Critical { + impact = types.HighImpact } - if technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || - technicalAsset.HighestIntegrity() == model.MissionCritical || - technicalAsset.HighestAvailability() == model.MissionCritical { - impact = model.VeryHighImpact + if technicalAsset.HighestConfidentiality(parsedModel) == types.StrictlyConfidential || + technicalAsset.HighestIntegrity(parsedModel) == types.MissionCritical || + technicalAsset.HighestAvailability(parsedModel) == types.MissionCritical { + impact = types.VeryHighImpact } // create risk risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, impact), - ExploitationLikelihood: model.Unlikely, + Severity: model.CalculateSeverity(types.Unlikely, impact), + ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Probable, + DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/missing-file-validation/missing-file-validation-rule.go b/pkg/security/risks/built-in/missing-file-validation/missing-file-validation-rule.go similarity index 76% rename from pkg/risks/built-in/missing-file-validation/missing-file-validation-rule.go rename to pkg/security/risks/built-in/missing-file-validation/missing-file-validation-rule.go index 01797023..342c7fcc 100644 --- a/pkg/risks/built-in/missing-file-validation/missing-file-validation-rule.go +++ b/pkg/security/risks/built-in/missing-file-validation/missing-file-validation-rule.go @@ -1,7 +1,8 @@ package missing_file_validation import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -27,8 +28,8 @@ func Category() model.RiskCategory { "were uploaded, also apply a fresh malware scan during retrieval to scan with newer signatures of popular malware). Also enforce " + "limits on maximum file size to avoid denial-of-service like scenarios.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Development, - STRIDE: model.Spoofing, + Function: types.Development, + STRIDE: types.Spoofing, DetectionLogic: "In-scope technical assets with custom-developed code accepting file data formats.", RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", FalsePositives: "Fully trusted (i.e. cryptographically signed or similar) files can be considered " + @@ -44,36 +45,36 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope || !technicalAsset.CustomDevelopedParts { continue } for _, format := range technicalAsset.DataFormatsAccepted { - if format == model.File { - risks = append(risks, createRisk(technicalAsset)) + if format == types.File { + risks = append(risks, createRisk(input, technicalAsset)) } } } return risks } -func createRisk(technicalAsset model.TechnicalAsset) model.Risk { +func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset) model.Risk { title := "Missing File Validation risk at " + technicalAsset.Title + "" - impact := model.LowImpact - if technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || - technicalAsset.HighestIntegrity() == model.MissionCritical || - technicalAsset.HighestAvailability() == model.MissionCritical { - impact = model.MediumImpact + impact := types.LowImpact + if technicalAsset.HighestConfidentiality(input) == types.StrictlyConfidential || + technicalAsset.HighestIntegrity(input) == types.MissionCritical || + technicalAsset.HighestAvailability(input) == types.MissionCritical { + impact = types.MediumImpact } risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.VeryLikely, impact), - ExploitationLikelihood: model.VeryLikely, + Severity: model.CalculateSeverity(types.VeryLikely, impact), + ExploitationLikelihood: types.VeryLikely, ExploitationImpact: impact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Probable, + DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/missing-hardening/missing-hardening-rule.go b/pkg/security/risks/built-in/missing-hardening/missing-hardening-rule.go similarity index 73% rename from pkg/risks/built-in/missing-hardening/missing-hardening-rule.go rename to pkg/security/risks/built-in/missing-hardening/missing-hardening-rule.go index 04a8ae10..9cf57bab 100644 --- a/pkg/risks/built-in/missing-hardening/missing-hardening-rule.go +++ b/pkg/security/risks/built-in/missing-hardening/missing-hardening-rule.go @@ -1,8 +1,10 @@ package missing_hardening import ( - "github.com/threagile/threagile/model" "strconv" + + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) const raaLimit = 55 @@ -29,8 +31,8 @@ func Category() model.RiskCategory { Mitigation: "Try to apply all hardening best practices (like CIS benchmarks, OWASP recommendations, vendor " + "recommendations, DevSec Hardening Framework, DBSAT for Oracle databases, and others).", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Operations, - STRIDE: model.Tampering, + Function: types.Operations, + STRIDE: types.Tampering, DetectionLogic: "In-scope technical assets with RAA values of " + strconv.Itoa(raaLimit) + " % or higher. " + "Generally for high-value targets like data stores, application servers, identity providers and ERP systems this limit is reduced to " + strconv.Itoa(raaLimitReduced) + " %", RiskAssessment: "The risk rating depends on the sensitivity of the data processed or stored in the technical asset.", @@ -46,32 +48,32 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope { if technicalAsset.RAA >= raaLimit || (technicalAsset.RAA >= raaLimitReduced && - (technicalAsset.Type == model.Datastore || technicalAsset.Technology == model.ApplicationServer || technicalAsset.Technology == model.IdentityProvider || technicalAsset.Technology == model.ERP)) { - risks = append(risks, createRisk(technicalAsset)) + (technicalAsset.Type == types.Datastore || technicalAsset.Technology == types.ApplicationServer || technicalAsset.Technology == types.IdentityProvider || technicalAsset.Technology == types.ERP)) { + risks = append(risks, createRisk(input, technicalAsset)) } } } return risks } -func createRisk(technicalAsset model.TechnicalAsset) model.Risk { +func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset) model.Risk { title := "Missing Hardening risk at " + technicalAsset.Title + "" - impact := model.LowImpact - if technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || technicalAsset.HighestIntegrity() == model.MissionCritical { - impact = model.MediumImpact + impact := types.LowImpact + if technicalAsset.HighestConfidentiality(input) == types.StrictlyConfidential || technicalAsset.HighestIntegrity(input) == types.MissionCritical { + impact = types.MediumImpact } risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Likely, impact), - ExploitationLikelihood: model.Likely, + Severity: model.CalculateSeverity(types.Likely, impact), + ExploitationLikelihood: types.Likely, ExploitationImpact: impact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go b/pkg/security/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go similarity index 75% rename from pkg/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go rename to pkg/security/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go index 68ede20a..4213938a 100644 --- a/pkg/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go +++ b/pkg/security/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go @@ -1,7 +1,8 @@ package missing_identity_propagation import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -28,8 +29,8 @@ func Category() model.RiskCategory { "identity of the end user. This can be achieved in passing JWTs or similar tokens and checking them in the backend " + "services. For DevOps usages apply at least a technical-user authorization.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Architecture, - STRIDE: model.ElevationOfPrivilege, + Function: types.Architecture, + STRIDE: types.ElevationOfPrivilege, DetectionLogic: "In-scope service-like technical assets which usually process data based on end user requests, if authenticated " + "(i.e. non-public), should authorize incoming requests based on the propagated end user identity when their rating is sensitive. " + "This is especially the case for all multi-tenant assets (there even less-sensitive rated ones). " + @@ -49,34 +50,34 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope { continue } if technicalAsset.Technology.IsUsuallyProcessingEndUserRequests() && - (technicalAsset.Confidentiality >= model.Confidential || - technicalAsset.Integrity >= model.Critical || - technicalAsset.Availability >= model.Critical || + (technicalAsset.Confidentiality >= types.Confidential || + technicalAsset.Integrity >= types.Critical || + technicalAsset.Availability >= types.Critical || (technicalAsset.MultiTenant && - (technicalAsset.Confidentiality >= model.Restricted || - technicalAsset.Integrity >= model.Important || - technicalAsset.Availability >= model.Important))) { + (technicalAsset.Confidentiality >= types.Restricted || + technicalAsset.Integrity >= types.Important || + technicalAsset.Availability >= types.Important))) { // check each incoming authenticated data flow - commLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] + commLinks := input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, commLink := range commLinks { caller := input.TechnicalAssets[commLink.SourceId] - if !caller.Technology.IsUsuallyAbleToPropagateIdentityToOutgoingTargets() || caller.Type == model.Datastore { + if !caller.Technology.IsUsuallyAbleToPropagateIdentityToOutgoingTargets() || caller.Type == types.Datastore { continue } - if commLink.Authentication != model.NoneAuthentication && - commLink.Authorization != model.EndUserIdentityPropagation { - if commLink.Usage == model.DevOps && commLink.Authorization != model.NoneAuthorization { + if commLink.Authentication != types.NoneAuthentication && + commLink.Authorization != types.EndUserIdentityPropagation { + if commLink.Usage == types.DevOps && commLink.Authorization != types.NoneAuthorization { continue } - highRisk := technicalAsset.Confidentiality == model.StrictlyConfidential || - technicalAsset.Integrity == model.MissionCritical || - technicalAsset.Availability == model.MissionCritical + highRisk := technicalAsset.Confidentiality == types.StrictlyConfidential || + technicalAsset.Integrity == types.MissionCritical || + technicalAsset.Availability == types.MissionCritical risks = append(risks, createRisk(input, technicalAsset, commLink, highRisk)) } } @@ -86,21 +87,21 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { } func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingAccess model.CommunicationLink, moreRisky bool) model.Risk { - impact := model.LowImpact + impact := types.LowImpact if moreRisky { - impact = model.MediumImpact + impact = types.MediumImpact } risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, impact), - ExploitationLikelihood: model.Unlikely, + Severity: model.CalculateSeverity(types.Unlikely, impact), + ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: "Missing End User Identity Propagation over communication link " + incomingAccess.Title + " " + "from " + input.TechnicalAssets[incomingAccess.SourceId].Title + " " + "to " + technicalAsset.Title + "", MostRelevantTechnicalAssetId: technicalAsset.Id, MostRelevantCommunicationLinkId: incomingAccess.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + incomingAccess.Id + "@" + input.TechnicalAssets[incomingAccess.SourceId].Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go b/pkg/security/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go similarity index 79% rename from pkg/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go rename to pkg/security/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go index bd58e297..7e1db5f8 100644 --- a/pkg/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go +++ b/pkg/security/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go @@ -1,7 +1,8 @@ package missing_identity_provider_isolation import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -17,7 +18,7 @@ func Category() model.RiskCategory { Id: "missing-identity-provider-isolation", Title: "Missing Identity Provider Isolation", Description: "Highly sensitive identity provider assets and their identity data stores should be isolated from other assets " + - "by their own network segmentation trust-boundary (" + model.ExecutionEnvironment.String() + " boundaries do not count as network isolation).", + "by their own network segmentation trust-boundary (" + types.ExecutionEnvironment.String() + " boundaries do not count as network isolation).", Impact: "If this risk is unmitigated, attackers successfully attacking other components of the system might have an easy path towards " + "highly sensitive identity provider assets and their identity data stores, as they are not separated by network segmentation.", ASVS: "V1 - Architecture, Design and Threat Modeling Requirements", @@ -25,13 +26,13 @@ func Category() model.RiskCategory { Action: "Network Segmentation", Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive identity provider assets and their identity data stores.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Operations, - STRIDE: model.ElevationOfPrivilege, + Function: types.Operations, + STRIDE: types.ElevationOfPrivilege, DetectionLogic: "In-scope identity provider assets and their identity data stores " + "when surrounded by other (not identity-related) assets (without a network trust-boundary in-between). " + "This risk is especially prevalent when other non-identity related assets are within the same execution environment (i.e. same database or same application server).", - RiskAssessment: "Default is " + model.HighImpact.String() + " impact. The impact is increased to " + model.VeryHighImpact.String() + " when the asset missing the " + - "trust-boundary protection is rated as " + model.StrictlyConfidential.String() + " or " + model.MissionCritical.String() + ".", + RiskAssessment: "Default is " + types.HighImpact.String() + " impact. The impact is increased to " + types.VeryHighImpact.String() + " when the asset missing the " + + "trust-boundary protection is rated as " + types.StrictlyConfidential.String() + " or " + types.MissionCritical.String() + ".", FalsePositives: "When all assets within the network segmentation trust-boundary are hardened and protected to the same extend as if all were " + "identity providers with data of highest sensitivity.", ModelFailurePossibleReason: false, @@ -47,9 +48,9 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, technicalAsset := range input.TechnicalAssets { if !technicalAsset.OutOfScope && technicalAsset.Technology.IsIdentityRelated() { - moreImpact := technicalAsset.Confidentiality == model.StrictlyConfidential || - technicalAsset.Integrity == model.MissionCritical || - technicalAsset.Availability == model.MissionCritical + moreImpact := technicalAsset.Confidentiality == types.StrictlyConfidential || + technicalAsset.Integrity == types.MissionCritical || + technicalAsset.Availability == types.MissionCritical sameExecutionEnv := false createRiskEntry := false // now check for any other same-network assets of non-identity-related types @@ -57,10 +58,10 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { if technicalAsset.Id != sparringAssetCandidateId { sparringAssetCandidate := input.TechnicalAssets[sparringAssetCandidateId] if !sparringAssetCandidate.Technology.IsIdentityRelated() && !sparringAssetCandidate.Technology.IsCloseToHighValueTargetsTolerated() { - if technicalAsset.IsSameExecutionEnvironment(sparringAssetCandidateId) { + if technicalAsset.IsSameExecutionEnvironment(input, sparringAssetCandidateId) { createRiskEntry = true sameExecutionEnv = true - } else if technicalAsset.IsSameTrustBoundaryNetworkOnly(sparringAssetCandidateId) { + } else if technicalAsset.IsSameTrustBoundaryNetworkOnly(input, sparringAssetCandidateId) { createRiskEntry = true } } @@ -75,14 +76,14 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { } func createRisk(techAsset model.TechnicalAsset, moreImpact bool, sameExecutionEnv bool) model.Risk { - impact := model.HighImpact - likelihood := model.Unlikely + impact := types.HighImpact + likelihood := types.Unlikely others := "in the same network segment" if moreImpact { - impact = model.VeryHighImpact + impact = types.VeryHighImpact } if sameExecutionEnv { - likelihood = model.Likely + likelihood = types.Likely others = "in the same execution environment" } risk := model.Risk{ @@ -93,7 +94,7 @@ func createRisk(techAsset model.TechnicalAsset, moreImpact bool, sameExecutionEn Title: "Missing Identity Provider Isolation to further encapsulate and protect identity-related asset " + techAsset.Title + " against unrelated " + "lower protected assets " + others + ", which might be easier to compromise by attackers", MostRelevantTechnicalAssetId: techAsset.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{techAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + techAsset.Id diff --git a/pkg/risks/built-in/missing-identity-store/missing-identity-store-rule.go b/pkg/security/risks/built-in/missing-identity-store/missing-identity-store-rule.go similarity index 74% rename from pkg/risks/built-in/missing-identity-store/missing-identity-store-rule.go rename to pkg/security/risks/built-in/missing-identity-store/missing-identity-store-rule.go index 61e54450..c470079e 100644 --- a/pkg/risks/built-in/missing-identity-store/missing-identity-store-rule.go +++ b/pkg/security/risks/built-in/missing-identity-store/missing-identity-store-rule.go @@ -1,7 +1,8 @@ package missing_identity_store import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -25,8 +26,8 @@ func Category() model.RiskCategory { Action: "Identity Store", Mitigation: "Include an identity store in the model if the application has a login.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Architecture, - STRIDE: model.Spoofing, + Function: types.Architecture, + STRIDE: types.Spoofing, DetectionLogic: "Models with authenticated data-flows authorized via end user identity missing an in-scope identity store.", RiskAssessment: "The risk rating depends on the sensitivity of the end user-identity authorized technical assets and " + "their data assets processed and stored.", @@ -45,7 +46,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, technicalAsset := range input.TechnicalAssets { if !technicalAsset.OutOfScope && - (technicalAsset.Technology == model.IdentityStoreLDAP || technicalAsset.Technology == model.IdentityStoreDatabase) { + (technicalAsset.Technology == types.IdentityStoreLDAP || technicalAsset.Technology == types.IdentityStoreDatabase) { // everything fine, no risk, as we have an in-scope identity store in the model return risks } @@ -53,25 +54,25 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { // now check if we have end user identity authorized communication links, then it's a risk riskIdentified := false var mostRelevantAsset model.TechnicalAsset - impact := model.LowImpact - for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset + impact := types.LowImpact + for _, id := range input.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset technicalAsset := input.TechnicalAssets[id] for _, commLink := range technicalAsset.CommunicationLinksSorted() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset - if commLink.Authorization == model.EndUserIdentityPropagation { + if commLink.Authorization == types.EndUserIdentityPropagation { riskIdentified = true targetAsset := input.TechnicalAssets[commLink.TargetId] - if impact == model.LowImpact { + if impact == types.LowImpact { mostRelevantAsset = targetAsset - if targetAsset.HighestConfidentiality() >= model.Confidential || - targetAsset.HighestIntegrity() >= model.Critical || - targetAsset.HighestAvailability() >= model.Critical { - impact = model.MediumImpact + if targetAsset.HighestConfidentiality(input) >= types.Confidential || + targetAsset.HighestIntegrity(input) >= types.Critical || + targetAsset.HighestAvailability(input) >= types.Critical { + impact = types.MediumImpact } } - if targetAsset.Confidentiality >= model.Confidential || - targetAsset.Integrity >= model.Critical || - targetAsset.Availability >= model.Critical { - impact = model.MediumImpact + if targetAsset.Confidentiality >= types.Confidential || + targetAsset.Integrity >= types.Critical || + targetAsset.Availability >= types.Critical { + impact = types.MediumImpact } // just for referencing the most interesting asset if technicalAsset.HighestSensitivityScore() > mostRelevantAsset.HighestSensitivityScore() { @@ -86,16 +87,16 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(technicalAsset model.TechnicalAsset, impact model.RiskExploitationImpact) model.Risk { +func createRisk(technicalAsset model.TechnicalAsset, impact types.RiskExploitationImpact) model.Risk { title := "Missing Identity Store in the threat model (referencing asset " + technicalAsset.Title + " as an example)" risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, impact), - ExploitationLikelihood: model.Unlikely, + Severity: model.CalculateSeverity(types.Unlikely, impact), + ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{}, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go b/pkg/security/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go similarity index 69% rename from pkg/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go rename to pkg/security/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go index 459fdd51..d0db7a29 100644 --- a/pkg/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go +++ b/pkg/security/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go @@ -1,8 +1,10 @@ package missing_network_segmentation import ( - "github.com/threagile/threagile/model" "sort" + + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) const raaLimit = 50 @@ -29,15 +31,15 @@ func Category() model.RiskCategory { Action: "Network Segmentation", Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive assets and/or data stores.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Operations, - STRIDE: model.ElevationOfPrivilege, + Function: types.Operations, + STRIDE: types.ElevationOfPrivilege, DetectionLogic: "In-scope technical assets with high sensitivity and RAA values as well as data stores " + - "when surrounded by assets (without a network trust-boundary in-between) which are of type " + model.ClientSystem.String() + ", " + - model.WebServer.String() + ", " + model.WebApplication.String() + ", " + model.CMS.String() + ", " + model.WebServiceREST.String() + ", " + model.WebServiceSOAP.String() + ", " + - model.BuildPipeline.String() + ", " + model.SourcecodeRepository.String() + ", " + model.Monitoring.String() + ", or similar and there is no direct connection between these " + + "when surrounded by assets (without a network trust-boundary in-between) which are of type " + types.ClientSystem.String() + ", " + + types.WebServer.String() + ", " + types.WebApplication.String() + ", " + types.CMS.String() + ", " + types.WebServiceREST.String() + ", " + types.WebServiceSOAP.String() + ", " + + types.BuildPipeline.String() + ", " + types.SourcecodeRepository.String() + ", " + types.Monitoring.String() + ", or similar and there is no direct connection between these " + "(hence no requirement to be so close to each other).", - RiskAssessment: "Default is " + model.LowSeverity.String() + " risk. The risk is increased to " + model.MediumSeverity.String() + " when the asset missing the " + - "trust-boundary protection is rated as " + model.StrictlyConfidential.String() + " or " + model.MissionCritical.String() + ".", + RiskAssessment: "Default is " + types.LowSeverity.String() + " risk. The risk is increased to " + types.MediumSeverity.String() + " when the asset missing the " + + "trust-boundary protection is rated as " + types.StrictlyConfidential.String() + " or " + types.MissionCritical.String() + ".", FalsePositives: "When all assets within the network segmentation trust-boundary are hardened and protected to the same extend as if all were " + "containing/processing highly sensitive data.", ModelFailurePossibleReason: false, @@ -60,19 +62,19 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { sort.Strings(keys) for _, key := range keys { technicalAsset := input.TechnicalAssets[key] - if !technicalAsset.OutOfScope && technicalAsset.Technology != model.ReverseProxy && technicalAsset.Technology != model.WAF && technicalAsset.Technology != model.IDS && technicalAsset.Technology != model.IPS && technicalAsset.Technology != model.ServiceRegistry { - if technicalAsset.RAA >= raaLimit && (technicalAsset.Type == model.Datastore || technicalAsset.Confidentiality >= model.Confidential || - technicalAsset.Integrity >= model.Critical || technicalAsset.Availability >= model.Critical) { + if !technicalAsset.OutOfScope && technicalAsset.Technology != types.ReverseProxy && technicalAsset.Technology != types.WAF && technicalAsset.Technology != types.IDS && technicalAsset.Technology != types.IPS && technicalAsset.Technology != types.ServiceRegistry { + if technicalAsset.RAA >= raaLimit && (technicalAsset.Type == types.Datastore || technicalAsset.Confidentiality >= types.Confidential || + technicalAsset.Integrity >= types.Critical || technicalAsset.Availability >= types.Critical) { // now check for any other same-network assets of certain types which have no direct connection for _, sparringAssetCandidateId := range keys { // so inner loop again over all assets if technicalAsset.Id != sparringAssetCandidateId { sparringAssetCandidate := input.TechnicalAssets[sparringAssetCandidateId] if sparringAssetCandidate.Technology.IsLessProtectedType() && - technicalAsset.IsSameTrustBoundaryNetworkOnly(sparringAssetCandidateId) && - !technicalAsset.HasDirectConnection(sparringAssetCandidateId) && + technicalAsset.IsSameTrustBoundaryNetworkOnly(input, sparringAssetCandidateId) && + !technicalAsset.HasDirectConnection(input, sparringAssetCandidateId) && !sparringAssetCandidate.Technology.IsCloseToHighValueTargetsTolerated() { - highRisk := technicalAsset.Confidentiality == model.StrictlyConfidential || - technicalAsset.Integrity == model.MissionCritical || technicalAsset.Availability == model.MissionCritical + highRisk := technicalAsset.Confidentiality == types.StrictlyConfidential || + technicalAsset.Integrity == types.MissionCritical || technicalAsset.Availability == types.MissionCritical risks = append(risks, createRisk(technicalAsset, highRisk)) break } @@ -85,19 +87,19 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { } func createRisk(techAsset model.TechnicalAsset, moreRisky bool) model.Risk { - impact := model.LowImpact + impact := types.LowImpact if moreRisky { - impact = model.MediumImpact + impact = types.MediumImpact } risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, impact), - ExploitationLikelihood: model.Unlikely, + Severity: model.CalculateSeverity(types.Unlikely, impact), + ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: "Missing Network Segmentation to further encapsulate and protect " + techAsset.Title + " against unrelated " + "lower protected assets in the same network segment, which might be easier to compromise by attackers", MostRelevantTechnicalAssetId: techAsset.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{techAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + techAsset.Id diff --git a/pkg/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go b/pkg/security/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go similarity index 72% rename from pkg/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go rename to pkg/security/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go index 62e2f1d8..1bb54e82 100644 --- a/pkg/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go +++ b/pkg/security/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go @@ -1,7 +1,8 @@ package missing_vault_isolation import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -17,7 +18,7 @@ func Category() model.RiskCategory { Id: "missing-vault-isolation", Title: "Missing Vault Isolation", Description: "Highly sensitive vault assets and their data stores should be isolated from other assets " + - "by their own network segmentation trust-boundary (" + model.ExecutionEnvironment.String() + " boundaries do not count as network isolation).", + "by their own network segmentation trust-boundary (" + types.ExecutionEnvironment.String() + " boundaries do not count as network isolation).", Impact: "If this risk is unmitigated, attackers successfully attacking other components of the system might have an easy path towards " + "highly sensitive vault assets and their data stores, as they are not separated by network segmentation.", ASVS: "V1 - Architecture, Design and Threat Modeling Requirements", @@ -25,13 +26,13 @@ func Category() model.RiskCategory { Action: "Network Segmentation", Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive vault assets and their data stores.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Operations, - STRIDE: model.ElevationOfPrivilege, + Function: types.Operations, + STRIDE: types.ElevationOfPrivilege, DetectionLogic: "In-scope vault assets " + "when surrounded by other (not vault-related) assets (without a network trust-boundary in-between). " + "This risk is especially prevalent when other non-vault related assets are within the same execution environment (i.e. same database or same application server).", - RiskAssessment: "Default is " + model.MediumImpact.String() + " impact. The impact is increased to " + model.HighImpact.String() + " when the asset missing the " + - "trust-boundary protection is rated as " + model.StrictlyConfidential.String() + " or " + model.MissionCritical.String() + ".", + RiskAssessment: "Default is " + types.MediumImpact.String() + " impact. The impact is increased to " + types.HighImpact.String() + " when the asset missing the " + + "trust-boundary protection is rated as " + types.StrictlyConfidential.String() + " or " + types.MissionCritical.String() + ".", FalsePositives: "When all assets within the network segmentation trust-boundary are hardened and protected to the same extend as if all were " + "vaults with data of highest sensitivity.", ModelFailurePossibleReason: false, @@ -46,21 +47,21 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, technicalAsset := range input.TechnicalAssets { - if !technicalAsset.OutOfScope && technicalAsset.Technology == model.Vault { - moreImpact := technicalAsset.Confidentiality == model.StrictlyConfidential || - technicalAsset.Integrity == model.MissionCritical || - technicalAsset.Availability == model.MissionCritical + if !technicalAsset.OutOfScope && technicalAsset.Technology == types.Vault { + moreImpact := technicalAsset.Confidentiality == types.StrictlyConfidential || + technicalAsset.Integrity == types.MissionCritical || + technicalAsset.Availability == types.MissionCritical sameExecutionEnv := false createRiskEntry := false // now check for any other same-network assets of non-vault-related types for sparringAssetCandidateId := range input.TechnicalAssets { // so inner loop again over all assets if technicalAsset.Id != sparringAssetCandidateId { sparringAssetCandidate := input.TechnicalAssets[sparringAssetCandidateId] - if sparringAssetCandidate.Technology != model.Vault && !isVaultStorage(technicalAsset, sparringAssetCandidate) { - if technicalAsset.IsSameExecutionEnvironment(sparringAssetCandidateId) { + if sparringAssetCandidate.Technology != types.Vault && !isVaultStorage(input, technicalAsset, sparringAssetCandidate) { + if technicalAsset.IsSameExecutionEnvironment(input, sparringAssetCandidateId) { createRiskEntry = true sameExecutionEnv = true - } else if technicalAsset.IsSameTrustBoundaryNetworkOnly(sparringAssetCandidateId) { + } else if technicalAsset.IsSameTrustBoundaryNetworkOnly(input, sparringAssetCandidateId) { createRiskEntry = true } } @@ -74,19 +75,19 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func isVaultStorage(vault model.TechnicalAsset, storage model.TechnicalAsset) bool { - return storage.Type == model.Datastore && vault.HasDirectConnection(storage.Id) +func isVaultStorage(parsedModel *model.ParsedModel, vault model.TechnicalAsset, storage model.TechnicalAsset) bool { + return storage.Type == types.Datastore && vault.HasDirectConnection(parsedModel, storage.Id) } func createRisk(techAsset model.TechnicalAsset, moreImpact bool, sameExecutionEnv bool) model.Risk { - impact := model.MediumImpact - likelihood := model.Unlikely + impact := types.MediumImpact + likelihood := types.Unlikely others := "in the same network segment" if moreImpact { - impact = model.HighImpact + impact = types.HighImpact } if sameExecutionEnv { - likelihood = model.Likely + likelihood = types.Likely others = "in the same execution environment" } risk := model.Risk{ @@ -97,7 +98,7 @@ func createRisk(techAsset model.TechnicalAsset, moreImpact bool, sameExecutionEn Title: "Missing Vault Isolation to further encapsulate and protect vault-related asset " + techAsset.Title + " against unrelated " + "lower protected assets " + others + ", which might be easier to compromise by attackers", MostRelevantTechnicalAssetId: techAsset.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{techAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + techAsset.Id diff --git a/pkg/risks/built-in/missing-vault/missing-vault-rule.go b/pkg/security/risks/built-in/missing-vault/missing-vault-rule.go similarity index 75% rename from pkg/risks/built-in/missing-vault/missing-vault-rule.go rename to pkg/security/risks/built-in/missing-vault/missing-vault-rule.go index b104739c..dbb596be 100644 --- a/pkg/risks/built-in/missing-vault/missing-vault-rule.go +++ b/pkg/security/risks/built-in/missing-vault/missing-vault-rule.go @@ -1,7 +1,8 @@ package missing_vault import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -27,8 +28,8 @@ func Category() model.RiskCategory { Action: "Vault (Secret Storage)", Mitigation: "Consider using a Vault (Secret Storage) to securely store and access config secrets (like credentials, private keys, client certificates, etc.).", Check: "Is a Vault (Secret Storage) in place?", - Function: model.Architecture, - STRIDE: model.InformationDisclosure, + Function: types.Architecture, + STRIDE: types.InformationDisclosure, DetectionLogic: "Models without a Vault (Secret Storage).", RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", FalsePositives: "Models where no technical assets have any kind of sensitive config data to protect " + @@ -46,21 +47,21 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) hasVault := false var mostRelevantAsset model.TechnicalAsset - impact := model.LowImpact - for _, id := range model.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset + impact := types.LowImpact + for _, id := range input.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset techAsset := input.TechnicalAssets[id] - if techAsset.Technology == model.Vault { + if techAsset.Technology == types.Vault { hasVault = true } - if techAsset.HighestConfidentiality() >= model.Confidential || - techAsset.HighestIntegrity() >= model.Critical || - techAsset.HighestAvailability() >= model.Critical { - impact = model.MediumImpact + if techAsset.HighestConfidentiality(input) >= types.Confidential || + techAsset.HighestIntegrity(input) >= types.Critical || + techAsset.HighestAvailability(input) >= types.Critical { + impact = types.MediumImpact } - if techAsset.Confidentiality >= model.Confidential || - techAsset.Integrity >= model.Critical || - techAsset.Availability >= model.Critical { - impact = model.MediumImpact + if techAsset.Confidentiality >= types.Confidential || + techAsset.Integrity >= types.Critical || + techAsset.Availability >= types.Critical { + impact = types.MediumImpact } // just for referencing the most interesting asset if techAsset.HighestSensitivityScore() > mostRelevantAsset.HighestSensitivityScore() { @@ -73,16 +74,16 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(technicalAsset model.TechnicalAsset, impact model.RiskExploitationImpact) model.Risk { +func createRisk(technicalAsset model.TechnicalAsset, impact types.RiskExploitationImpact) model.Risk { title := "Missing Vault (Secret Storage) in the threat model (referencing asset " + technicalAsset.Title + " as an example)" risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, impact), - ExploitationLikelihood: model.Unlikely, + Severity: model.CalculateSeverity(types.Unlikely, impact), + ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{}, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/missing-waf/missing-waf-rule.go b/pkg/security/risks/built-in/missing-waf/missing-waf-rule.go similarity index 79% rename from pkg/risks/built-in/missing-waf/missing-waf-rule.go rename to pkg/security/risks/built-in/missing-waf/missing-waf-rule.go index 3905fbaf..67166f51 100644 --- a/pkg/risks/built-in/missing-waf/missing-waf-rule.go +++ b/pkg/security/risks/built-in/missing-waf/missing-waf-rule.go @@ -1,7 +1,8 @@ package missing_waf import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -26,8 +27,8 @@ func Category() model.RiskCategory { Mitigation: "Consider placing a Web Application Firewall (WAF) in front of the web-services and/or web-applications. For cloud environments many cloud providers offer " + "pre-configured WAFs. Even reverse proxies can be enhances by a WAF component via ModSecurity plugins.", Check: "Is a Web Application Firewall (WAF) in place?", - Function: model.Operations, - STRIDE: model.Tampering, + Function: types.Operations, + STRIDE: types.Tampering, DetectionLogic: "In-scope web-services and/or web-applications accessed across a network trust boundary not having a Web Application Firewall (WAF) in front of them.", RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", FalsePositives: "Targets only accessible via WAFs or reverse proxies containing a WAF component (like ModSecurity) can be considered " + @@ -46,11 +47,11 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { for _, technicalAsset := range input.TechnicalAssets { if !technicalAsset.OutOfScope && (technicalAsset.Technology.IsWebApplication() || technicalAsset.Technology.IsWebService()) { - for _, incomingAccess := range model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] { - if incomingAccess.IsAcrossTrustBoundaryNetworkOnly() && + for _, incomingAccess := range input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] { + if incomingAccess.IsAcrossTrustBoundaryNetworkOnly(input) && incomingAccess.Protocol.IsPotentialWebAccessProtocol() && - input.TechnicalAssets[incomingAccess.SourceId].Technology != model.WAF { - risks = append(risks, createRisk(technicalAsset)) + input.TechnicalAssets[incomingAccess.SourceId].Technology != types.WAF { + risks = append(risks, createRisk(input, technicalAsset)) break } } @@ -59,14 +60,14 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(technicalAsset model.TechnicalAsset) model.Risk { +func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset) model.Risk { title := "Missing Web Application Firewall (WAF) risk at " + technicalAsset.Title + "" - likelihood := model.Unlikely - impact := model.LowImpact - if technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || - technicalAsset.HighestIntegrity() == model.MissionCritical || - technicalAsset.HighestAvailability() == model.MissionCritical { - impact = model.MediumImpact + likelihood := types.Unlikely + impact := types.LowImpact + if technicalAsset.HighestConfidentiality(input) == types.StrictlyConfidential || + technicalAsset.HighestIntegrity(input) == types.MissionCritical || + technicalAsset.HighestAvailability(input) == types.MissionCritical { + impact = types.MediumImpact } risk := model.Risk{ Category: Category(), @@ -75,7 +76,7 @@ func createRisk(technicalAsset model.TechnicalAsset) model.Risk { ExploitationImpact: impact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go b/pkg/security/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go similarity index 86% rename from pkg/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go rename to pkg/security/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go index 0c9292ef..5460af2d 100644 --- a/pkg/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go +++ b/pkg/security/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go @@ -1,8 +1,10 @@ package mixed_targets_on_shared_runtime import ( - "github.com/threagile/threagile/model" "sort" + + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -28,8 +30,8 @@ func Category() model.RiskCategory { "prevent load- or breach-related problems originating from one more attacker-facing asset impacts also the " + "other more critical rated backend/datastore assets.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Operations, - STRIDE: model.ElevationOfPrivilege, + Function: types.Operations, + STRIDE: types.ElevationOfPrivilege, DetectionLogic: "Shared runtime running technical assets of different trust-boundaries is at risk. " + "Also mixing backend/datastore with frontend components on the same shared runtime is considered a risk.", RiskAssessment: "The risk rating (low or medium) depends on the confidentiality, integrity, and availability rating of " + @@ -60,12 +62,12 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { riskAdded := false for _, technicalAssetId := range sharedRuntime.TechnicalAssetsRunning { technicalAsset := input.TechnicalAssets[technicalAssetId] - if len(currentTrustBoundaryId) > 0 && currentTrustBoundaryId != technicalAsset.GetTrustBoundaryId() { + if len(currentTrustBoundaryId) > 0 && currentTrustBoundaryId != technicalAsset.GetTrustBoundaryId(input) { risks = append(risks, createRisk(input, sharedRuntime)) riskAdded = true break } - currentTrustBoundaryId = technicalAsset.GetTrustBoundaryId() + currentTrustBoundaryId = technicalAsset.GetTrustBoundaryId(input) if technicalAsset.Technology.IsExclusivelyFrontendRelated() { hasFrontend = true } @@ -81,19 +83,19 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { } func createRisk(input *model.ParsedModel, sharedRuntime model.SharedRuntime) model.Risk { - impact := model.LowImpact + impact := types.LowImpact if isMoreRisky(input, sharedRuntime) { - impact = model.MediumImpact + impact = types.MediumImpact } risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, impact), - ExploitationLikelihood: model.Unlikely, + Severity: model.CalculateSeverity(types.Unlikely, impact), + ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: "Mixed Targets on Shared Runtime named " + sharedRuntime.Title + " might enable attackers moving from one less " + "valuable target to a more valuable one", // TODO list at least the assets in the text which are running on the shared HW MostRelevantSharedRuntimeId: sharedRuntime.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: sharedRuntime.TechnicalAssetsRunning, } risk.SyntheticId = risk.Category.Id + "@" + sharedRuntime.Id @@ -103,8 +105,8 @@ func createRisk(input *model.ParsedModel, sharedRuntime model.SharedRuntime) mod func isMoreRisky(input *model.ParsedModel, sharedRuntime model.SharedRuntime) bool { for _, techAssetId := range sharedRuntime.TechnicalAssetsRunning { techAsset := input.TechnicalAssets[techAssetId] - if techAsset.Confidentiality == model.StrictlyConfidential || techAsset.Integrity == model.MissionCritical || - techAsset.Availability == model.MissionCritical { + if techAsset.Confidentiality == types.StrictlyConfidential || techAsset.Integrity == types.MissionCritical || + techAsset.Availability == types.MissionCritical { return true } } diff --git a/pkg/risks/built-in/path-traversal/path-traversal-rule.go b/pkg/security/risks/built-in/path-traversal/path-traversal-rule.go similarity index 80% rename from pkg/risks/built-in/path-traversal/path-traversal-rule.go rename to pkg/security/risks/built-in/path-traversal/path-traversal-rule.go index 482fd725..6aaf85cd 100644 --- a/pkg/risks/built-in/path-traversal/path-traversal-rule.go +++ b/pkg/security/risks/built-in/path-traversal/path-traversal-rule.go @@ -1,7 +1,8 @@ package path_traversal import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -28,8 +29,8 @@ func Category() model.RiskCategory { "(partly or fully) provided by the caller. " + "When a third-party product is used instead of custom developed software, check if the product applies the proper mitigation and ensure a reasonable patch-level.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Development, - STRIDE: model.InformationDisclosure, + Function: types.Development, + STRIDE: types.InformationDisclosure, DetectionLogic: "Filesystems accessed by in-scope callers.", RiskAssessment: "The risk rating depends on the sensitivity of the data stored inside the technical asset.", FalsePositives: "File accesses by filenames not consisting of parts controllable by the caller can be considered " + @@ -41,19 +42,19 @@ func Category() model.RiskCategory { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] - if technicalAsset.Technology != model.FileServer && technicalAsset.Technology != model.LocalFileSystem { + if technicalAsset.Technology != types.FileServer && technicalAsset.Technology != types.LocalFileSystem { continue } - incomingFlows := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] + incomingFlows := input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, incomingFlow := range incomingFlows { if input.TechnicalAssets[incomingFlow.SourceId].OutOfScope { continue } - likelihood := model.VeryLikely - if incomingFlow.Usage == model.DevOps { - likelihood = model.Likely + likelihood := types.VeryLikely + if incomingFlow.Usage == types.DevOps { + likelihood = types.Likely } risks = append(risks, createRisk(input, technicalAsset, incomingFlow, likelihood)) } @@ -65,13 +66,13 @@ func SupportedTags() []string { return []string{} } -func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink, likelihood model.RiskExploitationLikelihood) model.Risk { +func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink, likelihood types.RiskExploitationLikelihood) model.Risk { caller := input.TechnicalAssets[incomingFlow.SourceId] title := "Path-Traversal risk at " + caller.Title + " against filesystem " + technicalAsset.Title + "" + " via " + incomingFlow.Title + "" - impact := model.MediumImpact - if technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || technicalAsset.HighestIntegrity() == model.MissionCritical { - impact = model.HighImpact + impact := types.MediumImpact + if technicalAsset.HighestConfidentiality(input) == types.StrictlyConfidential || technicalAsset.HighestIntegrity(input) == types.MissionCritical { + impact = types.HighImpact } risk := model.Risk{ Category: Category(), @@ -81,7 +82,7 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, i Title: title, MostRelevantTechnicalAssetId: caller.Id, MostRelevantCommunicationLinkId: incomingFlow.Id, - DataBreachProbability: model.Probable, + DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + caller.Id + "@" + technicalAsset.Id + "@" + incomingFlow.Id diff --git a/pkg/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go b/pkg/security/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go similarity index 79% rename from pkg/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go rename to pkg/security/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go index e7306a1b..76a2dee8 100644 --- a/pkg/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go +++ b/pkg/security/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go @@ -1,7 +1,8 @@ package push_instead_of_pull_deployment import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -27,8 +28,8 @@ func Category() model.RiskCategory { Action: "Build Pipeline Hardening", Mitigation: "Try to prefer pull-based deployments (like GitOps scenarios offer) over push-based deployments to reduce the attack surface of the production system.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Architecture, - STRIDE: model.Tampering, + Function: types.Architecture, + STRIDE: types.Tampering, DetectionLogic: "Models with build pipeline components accessing in-scope targets of deployment (in a non-readonly way) which " + "are not build-related components themselves.", RiskAssessment: "The risk rating depends on the highest sensitivity of the deployment targets running custom-developed parts.", @@ -45,17 +46,17 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - impact := model.LowImpact + impact := types.LowImpact for _, buildPipeline := range input.TechnicalAssets { - if buildPipeline.Technology == model.BuildPipeline { + if buildPipeline.Technology == types.BuildPipeline { for _, deploymentLink := range buildPipeline.CommunicationLinks { targetAsset := input.TechnicalAssets[deploymentLink.TargetId] - if !deploymentLink.Readonly && deploymentLink.Usage == model.DevOps && - !targetAsset.OutOfScope && !targetAsset.Technology.IsDevelopmentRelevant() && targetAsset.Usage == model.Business { - if targetAsset.HighestConfidentiality() >= model.Confidential || - targetAsset.HighestIntegrity() >= model.Critical || - targetAsset.HighestAvailability() >= model.Critical { - impact = model.MediumImpact + if !deploymentLink.Readonly && deploymentLink.Usage == types.DevOps && + !targetAsset.OutOfScope && !targetAsset.Technology.IsDevelopmentRelevant() && targetAsset.Usage == types.Business { + if targetAsset.HighestConfidentiality(input) >= types.Confidential || + targetAsset.HighestIntegrity(input) >= types.Critical || + targetAsset.HighestAvailability(input) >= types.Critical { + impact = types.MediumImpact } risks = append(risks, createRisk(buildPipeline, targetAsset, deploymentLink, impact)) } @@ -65,17 +66,17 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(buildPipeline model.TechnicalAsset, deploymentTarget model.TechnicalAsset, deploymentCommLink model.CommunicationLink, impact model.RiskExploitationImpact) model.Risk { +func createRisk(buildPipeline model.TechnicalAsset, deploymentTarget model.TechnicalAsset, deploymentCommLink model.CommunicationLink, impact types.RiskExploitationImpact) model.Risk { title := "Push instead of Pull Deployment at " + deploymentTarget.Title + " via build pipeline asset " + buildPipeline.Title + "" risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, impact), - ExploitationLikelihood: model.Unlikely, + Severity: model.CalculateSeverity(types.Unlikely, impact), + ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, MostRelevantTechnicalAssetId: deploymentTarget.Id, MostRelevantCommunicationLinkId: deploymentCommLink.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{deploymentTarget.Id}, } risk.SyntheticId = risk.Category.Id + "@" + buildPipeline.Id diff --git a/pkg/risks/built-in/search-query-injection/search-query-injection-rule.go b/pkg/security/risks/built-in/search-query-injection/search-query-injection-rule.go similarity index 76% rename from pkg/risks/built-in/search-query-injection/search-query-injection-rule.go rename to pkg/security/risks/built-in/search-query-injection/search-query-injection-rule.go index 9a3a3415..4cc24fb7 100644 --- a/pkg/risks/built-in/search-query-injection/search-query-injection-rule.go +++ b/pkg/security/risks/built-in/search-query-injection/search-query-injection-rule.go @@ -1,7 +1,8 @@ package search_query_injection import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -29,8 +30,8 @@ func Category() model.RiskCategory { "query unfiltered to the caller. " + "When a third-party product is used instead of custom developed software, check if the product applies the proper mitigation and ensure a reasonable patch-level.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Development, - STRIDE: model.Tampering, + Function: types.Development, + STRIDE: types.Tampering, DetectionLogic: "In-scope clients accessing search engine servers via typical search access protocols.", RiskAssessment: "The risk rating depends on the sensitivity of the search engine server itself and of the data assets processed or stored.", FalsePositives: "Server engine queries by search values not consisting of parts controllable by the caller can be considered " + @@ -42,19 +43,19 @@ func Category() model.RiskCategory { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] - if technicalAsset.Technology == model.SearchEngine || technicalAsset.Technology == model.SearchIndex { - incomingFlows := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] + if technicalAsset.Technology == types.SearchEngine || technicalAsset.Technology == types.SearchIndex { + incomingFlows := input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, incomingFlow := range incomingFlows { if input.TechnicalAssets[incomingFlow.SourceId].OutOfScope { continue } - if incomingFlow.Protocol == model.HTTP || incomingFlow.Protocol == model.HTTPS || - incomingFlow.Protocol == model.BINARY || incomingFlow.Protocol == model.BinaryEncrypted { - likelihood := model.VeryLikely - if incomingFlow.Usage == model.DevOps { - likelihood = model.Likely + if incomingFlow.Protocol == types.HTTP || incomingFlow.Protocol == types.HTTPS || + incomingFlow.Protocol == types.BINARY || incomingFlow.Protocol == types.BinaryEncrypted { + likelihood := types.VeryLikely + if incomingFlow.Usage == types.DevOps { + likelihood = types.Likely } risks = append(risks, createRisk(input, technicalAsset, incomingFlow, likelihood)) } @@ -68,15 +69,15 @@ func SupportedTags() []string { return []string{} } -func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink, likelihood model.RiskExploitationLikelihood) model.Risk { +func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink, likelihood types.RiskExploitationLikelihood) model.Risk { caller := input.TechnicalAssets[incomingFlow.SourceId] title := "Search Query Injection risk at " + caller.Title + " against search engine server " + technicalAsset.Title + "" + " via " + incomingFlow.Title + "" - impact := model.MediumImpact - if technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || technicalAsset.HighestIntegrity() == model.MissionCritical { - impact = model.HighImpact - } else if technicalAsset.HighestConfidentiality() <= model.Internal && technicalAsset.HighestIntegrity() == model.Operational { - impact = model.LowImpact + impact := types.MediumImpact + if technicalAsset.HighestConfidentiality(input) == types.StrictlyConfidential || technicalAsset.HighestIntegrity(input) == types.MissionCritical { + impact = types.HighImpact + } else if technicalAsset.HighestConfidentiality(input) <= types.Internal && technicalAsset.HighestIntegrity(input) == types.Operational { + impact = types.LowImpact } risk := model.Risk{ Category: Category(), @@ -86,7 +87,7 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, i Title: title, MostRelevantTechnicalAssetId: caller.Id, MostRelevantCommunicationLinkId: incomingFlow.Id, - DataBreachProbability: model.Probable, + DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + caller.Id + "@" + technicalAsset.Id + "@" + incomingFlow.Id diff --git a/pkg/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go b/pkg/security/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go similarity index 82% rename from pkg/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go rename to pkg/security/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go index db5974cb..85b53452 100644 --- a/pkg/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go +++ b/pkg/security/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go @@ -1,7 +1,8 @@ package server_side_request_forgery import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -26,8 +27,8 @@ func Category() model.RiskCategory { "controllable values. " + "When a third-party product is used instead of custom developed software, check if the product applies the proper mitigation and ensure a reasonable patch-level.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Development, - STRIDE: model.InformationDisclosure, + Function: types.Development, + STRIDE: types.InformationDisclosure, DetectionLogic: "In-scope non-client systems accessing (using outgoing communication links) targets with either HTTP or HTTPS protocol.", RiskAssessment: "The risk rating (low or medium) depends on the sensitivity of the data assets receivable via web protocols from " + "targets within the same network trust-boundary as well on the sensitivity of the data assets receivable via web protocols from the target asset itself. " + @@ -45,9 +46,9 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] - if technicalAsset.OutOfScope || technicalAsset.Technology.IsClient() || technicalAsset.Technology == model.LoadBalancer { + if technicalAsset.OutOfScope || technicalAsset.Technology.IsClient() || technicalAsset.Technology == types.LoadBalancer { continue } for _, outgoingFlow := range technicalAsset.CommunicationLinks { @@ -63,37 +64,37 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, o target := input.TechnicalAssets[outgoingFlow.TargetId] title := "Server-Side Request Forgery (SSRF) risk at " + technicalAsset.Title + " server-side web-requesting " + "the target " + target.Title + " via " + outgoingFlow.Title + "" - impact := model.LowImpact + impact := types.LowImpact // check by the target itself (can be in another trust-boundary) - if target.HighestConfidentiality() == model.StrictlyConfidential { - impact = model.MediumImpact + if target.HighestConfidentiality(input) == types.StrictlyConfidential { + impact = types.MediumImpact } // check all potential attack targets within the same trust boundary (accessible via web protocols) uniqueDataBreachTechnicalAssetIDs := make(map[string]interface{}) uniqueDataBreachTechnicalAssetIDs[technicalAsset.Id] = true for _, potentialTargetAsset := range input.TechnicalAssets { - if technicalAsset.IsSameTrustBoundaryNetworkOnly(potentialTargetAsset.Id) { - for _, commLinkIncoming := range model.IncomingTechnicalCommunicationLinksMappedByTargetId[potentialTargetAsset.Id] { + if technicalAsset.IsSameTrustBoundaryNetworkOnly(input, potentialTargetAsset.Id) { + for _, commLinkIncoming := range input.IncomingTechnicalCommunicationLinksMappedByTargetId[potentialTargetAsset.Id] { if commLinkIncoming.Protocol.IsPotentialWebAccessProtocol() { uniqueDataBreachTechnicalAssetIDs[potentialTargetAsset.Id] = true - if potentialTargetAsset.HighestConfidentiality() == model.StrictlyConfidential { - impact = model.MediumImpact + if potentialTargetAsset.HighestConfidentiality(input) == types.StrictlyConfidential { + impact = types.MediumImpact } } } } } // adjust for cloud-based special risks - if impact == model.LowImpact && input.TrustBoundaries[technicalAsset.GetTrustBoundaryId()].Type.IsWithinCloud() { - impact = model.MediumImpact + if impact == types.LowImpact && input.TrustBoundaries[technicalAsset.GetTrustBoundaryId(input)].Type.IsWithinCloud() { + impact = types.MediumImpact } dataBreachTechnicalAssetIDs := make([]string, 0) for key := range uniqueDataBreachTechnicalAssetIDs { dataBreachTechnicalAssetIDs = append(dataBreachTechnicalAssetIDs, key) } - likelihood := model.Likely - if outgoingFlow.Usage == model.DevOps { - likelihood = model.Unlikely + likelihood := types.Likely + if outgoingFlow.Usage == types.DevOps { + likelihood = types.Unlikely } risk := model.Risk{ Category: Category(), @@ -103,7 +104,7 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, o Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, MostRelevantCommunicationLinkId: outgoingFlow.Id, - DataBreachProbability: model.Possible, + DataBreachProbability: types.Possible, DataBreachTechnicalAssetIDs: dataBreachTechnicalAssetIDs, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + "@" + target.Id + "@" + outgoingFlow.Id diff --git a/pkg/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go b/pkg/security/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go similarity index 70% rename from pkg/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go rename to pkg/security/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go index 41e7f894..e9d286ef 100644 --- a/pkg/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go +++ b/pkg/security/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go @@ -1,7 +1,8 @@ package service_registry_poisoning import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -24,8 +25,8 @@ func Category() model.RiskCategory { Action: "Service Registry Integrity Check", Mitigation: "Try to strengthen the access control of the service registry and apply cross-checks to detect maliciously poisoned lookup data.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Architecture, - STRIDE: model.Spoofing, + Function: types.Architecture, + STRIDE: types.Spoofing, DetectionLogic: "In-scope service registries.", RiskAssessment: "The risk rating depends on the sensitivity of the technical assets accessing the service registry " + "as well as the data assets processed or stored.", @@ -42,10 +43,10 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] - if !technicalAsset.OutOfScope && technicalAsset.Technology == model.ServiceRegistry { - incomingFlows := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] + if !technicalAsset.OutOfScope && technicalAsset.Technology == types.ServiceRegistry { + incomingFlows := input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] risks = append(risks, createRisk(input, technicalAsset, incomingFlows)) } } @@ -54,26 +55,26 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlows []model.CommunicationLink) model.Risk { title := "Service Registry Poisoning risk at " + technicalAsset.Title + "" - impact := model.LowImpact + impact := types.LowImpact for _, incomingFlow := range incomingFlows { caller := input.TechnicalAssets[incomingFlow.SourceId] - if technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || technicalAsset.HighestIntegrity() == model.MissionCritical || technicalAsset.HighestAvailability() == model.MissionCritical || - caller.HighestConfidentiality() == model.StrictlyConfidential || caller.HighestIntegrity() == model.MissionCritical || caller.HighestAvailability() == model.MissionCritical || - incomingFlow.HighestConfidentiality() == model.StrictlyConfidential || incomingFlow.HighestIntegrity() == model.MissionCritical || incomingFlow.HighestAvailability() == model.MissionCritical { - impact = model.MediumImpact + if technicalAsset.HighestConfidentiality(input) == types.StrictlyConfidential || technicalAsset.HighestIntegrity(input) == types.MissionCritical || technicalAsset.HighestAvailability(input) == types.MissionCritical || + caller.HighestConfidentiality(input) == types.StrictlyConfidential || caller.HighestIntegrity(input) == types.MissionCritical || caller.HighestAvailability(input) == types.MissionCritical || + incomingFlow.HighestConfidentiality(input) == types.StrictlyConfidential || incomingFlow.HighestIntegrity(input) == types.MissionCritical || incomingFlow.HighestAvailability(input) == types.MissionCritical { + impact = types.MediumImpact break } } risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, impact), - ExploitationLikelihood: model.Unlikely, + Severity: model.CalculateSeverity(types.Unlikely, impact), + ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, // TODO: find all service-lookup-using tech assets, which then might use spoofed lookups? } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go b/pkg/security/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go similarity index 81% rename from pkg/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go rename to pkg/security/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go index c9100737..5fe4cfd9 100644 --- a/pkg/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go +++ b/pkg/security/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go @@ -1,7 +1,8 @@ package sql_nosql_injection import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -25,8 +26,8 @@ func Category() model.RiskCategory { Mitigation: "Try to use parameter binding to be safe from injection vulnerabilities. " + "When a third-party product is used instead of custom developed software, check if the product applies the proper mitigation and ensure a reasonable patch-level.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Development, - STRIDE: model.Tampering, + Function: types.Development, + STRIDE: types.Tampering, DetectionLogic: "Database accessed via typical database access protocols by in-scope clients.", RiskAssessment: "The risk rating depends on the sensitivity of the data stored inside the database.", FalsePositives: "Database accesses by queries not consisting of parts controllable by the caller can be considered " + @@ -42,14 +43,14 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] - incomingFlows := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] + incomingFlows := input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, incomingFlow := range incomingFlows { if input.TechnicalAssets[incomingFlow.SourceId].OutOfScope { continue } - if incomingFlow.Protocol.IsPotentialDatabaseAccessProtocol(true) && (technicalAsset.Technology == model.Database || technicalAsset.Technology == model.IdentityStoreDatabase) || + if incomingFlow.Protocol.IsPotentialDatabaseAccessProtocol(true) && (technicalAsset.Technology == types.Database || technicalAsset.Technology == types.IdentityStoreDatabase) || (incomingFlow.Protocol.IsPotentialDatabaseAccessProtocol(false)) { risks = append(risks, createRisk(input, technicalAsset, incomingFlow)) } @@ -62,13 +63,13 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, i caller := input.TechnicalAssets[incomingFlow.SourceId] title := "SQL/NoSQL-Injection risk at " + caller.Title + " against database " + technicalAsset.Title + "" + " via " + incomingFlow.Title + "" - impact := model.MediumImpact - if technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || technicalAsset.HighestIntegrity() == model.MissionCritical { - impact = model.HighImpact + impact := types.MediumImpact + if technicalAsset.HighestConfidentiality(input) == types.StrictlyConfidential || technicalAsset.HighestIntegrity(input) == types.MissionCritical { + impact = types.HighImpact } - likelihood := model.VeryLikely - if incomingFlow.Usage == model.DevOps { - likelihood = model.Likely + likelihood := types.VeryLikely + if incomingFlow.Usage == types.DevOps { + likelihood = types.Likely } risk := model.Risk{ Category: Category(), @@ -78,7 +79,7 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, i Title: title, MostRelevantTechnicalAssetId: caller.Id, MostRelevantCommunicationLinkId: incomingFlow.Id, - DataBreachProbability: model.Probable, + DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + caller.Id + "@" + technicalAsset.Id + "@" + incomingFlow.Id diff --git a/pkg/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go b/pkg/security/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go similarity index 84% rename from pkg/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go rename to pkg/security/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go index 975840b2..e57a0d91 100644 --- a/pkg/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go +++ b/pkg/security/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go @@ -1,7 +1,8 @@ package unchecked_deployment import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -28,8 +29,8 @@ func Category() model.RiskCategory { Mitigation: "Apply DevSecOps best-practices and use scanning tools to identify vulnerabilities in source- or byte-code," + "dependencies, container layers, and optionally also via dynamic scans against running test systems.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Architecture, - STRIDE: model.Tampering, + Function: types.Architecture, + STRIDE: types.Tampering, DetectionLogic: "All development-relevant technical assets.", RiskAssessment: "The risk rating depends on the highest rating of the technical assets and data assets processed by deployment-receiving targets.", FalsePositives: "When the build-pipeline does not build any software components it can be considered a false positive " + @@ -56,22 +57,22 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset) model.Risk { title := "Unchecked Deployment risk at " + technicalAsset.Title + "" // impact is depending on highest rating - impact := model.LowImpact + impact := types.LowImpact // data breach at all deployment targets uniqueDataBreachTechnicalAssetIDs := make(map[string]interface{}) uniqueDataBreachTechnicalAssetIDs[technicalAsset.Id] = true for _, codeDeploymentTargetCommLink := range technicalAsset.CommunicationLinks { - if codeDeploymentTargetCommLink.Usage == model.DevOps { + if codeDeploymentTargetCommLink.Usage == types.DevOps { for _, dataAssetID := range codeDeploymentTargetCommLink.DataAssetsSent { // it appears to be code when elevated integrity rating of sent data asset - if input.DataAssets[dataAssetID].Integrity >= model.Important { + if input.DataAssets[dataAssetID].Integrity >= types.Important { // here we've got a deployment target which has its data assets at risk via deployment of backdoored code uniqueDataBreachTechnicalAssetIDs[codeDeploymentTargetCommLink.TargetId] = true targetTechAsset := input.TechnicalAssets[codeDeploymentTargetCommLink.TargetId] - if targetTechAsset.HighestConfidentiality() >= model.Confidential || - targetTechAsset.HighestIntegrity() >= model.Critical || - targetTechAsset.HighestAvailability() >= model.Critical { - impact = model.MediumImpact + if targetTechAsset.HighestConfidentiality(input) >= types.Confidential || + targetTechAsset.HighestIntegrity(input) >= types.Critical || + targetTechAsset.HighestAvailability(input) >= types.Critical { + impact = types.MediumImpact } break } @@ -85,12 +86,12 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset) m // create risk risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, impact), - ExploitationLikelihood: model.Unlikely, + Severity: model.CalculateSeverity(types.Unlikely, impact), + ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Possible, + DataBreachProbability: types.Possible, DataBreachTechnicalAssetIDs: dataBreachTechnicalAssetIDs, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go b/pkg/security/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go similarity index 59% rename from pkg/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go rename to pkg/security/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go index 4383e531..d43b88b9 100644 --- a/pkg/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go +++ b/pkg/security/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go @@ -1,7 +1,8 @@ package unencrypted_asset import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -24,14 +25,14 @@ func Category() model.RiskCategory { Action: "Encryption of Technical Asset", Mitigation: "Apply encryption to the technical asset.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Operations, - STRIDE: model.InformationDisclosure, - DetectionLogic: "In-scope unencrypted technical assets (excluding " + model.ReverseProxy.String() + - ", " + model.LoadBalancer.String() + ", " + model.WAF.String() + ", " + model.IDS.String() + - ", " + model.IPS.String() + " and embedded components like " + model.Library.String() + ") " + - "storing data assets rated at least as " + model.Confidential.String() + " or " + model.Critical.String() + ". " + - "For technical assets storing data assets rated as " + model.StrictlyConfidential.String() + " or " + model.MissionCritical.String() + " the " + - "encryption must be of type " + model.DataWithEndUserIndividualKey.String() + ".", + Function: types.Operations, + STRIDE: types.InformationDisclosure, + DetectionLogic: "In-scope unencrypted technical assets (excluding " + types.ReverseProxy.String() + + ", " + types.LoadBalancer.String() + ", " + types.WAF.String() + ", " + types.IDS.String() + + ", " + types.IPS.String() + " and embedded components like " + types.Library.String() + ") " + + "storing data assets rated at least as " + types.Confidential.String() + " or " + types.Critical.String() + ". " + + "For technical assets storing data assets rated as " + types.StrictlyConfidential.String() + " or " + types.MissionCritical.String() + " the " + + "encryption must be of type " + types.DataWithEndUserIndividualKey.String() + ".", RiskAssessment: "Depending on the confidentiality rating of the stored data-assets either medium or high risk.", FalsePositives: "When all sensitive data stored within the asset is already fully encrypted on document or data level.", ModelFailurePossibleReason: false, @@ -47,23 +48,23 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope && !IsEncryptionWaiver(technicalAsset) && - (technicalAsset.HighestConfidentiality() >= model.Confidential || - technicalAsset.HighestIntegrity() >= model.Critical) { - verySensitive := technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || - technicalAsset.HighestIntegrity() == model.MissionCritical + (technicalAsset.HighestConfidentiality(input) >= types.Confidential || + technicalAsset.HighestIntegrity(input) >= types.Critical) { + verySensitive := technicalAsset.HighestConfidentiality(input) == types.StrictlyConfidential || + technicalAsset.HighestIntegrity(input) == types.MissionCritical requiresEndUserKey := verySensitive && technicalAsset.Technology.IsUsuallyStoringEndUserData() - if technicalAsset.Encryption == model.NoneEncryption { - impact := model.MediumImpact + if technicalAsset.Encryption == types.NoneEncryption { + impact := types.MediumImpact if verySensitive { - impact = model.HighImpact + impact = types.HighImpact } risks = append(risks, createRisk(technicalAsset, impact, requiresEndUserKey)) } else if requiresEndUserKey && - (technicalAsset.Encryption == model.Transparent || technicalAsset.Encryption == model.DataWithSymmetricSharedKey || technicalAsset.Encryption == model.DataWithAsymmetricSharedKey) { - risks = append(risks, createRisk(technicalAsset, model.MediumImpact, requiresEndUserKey)) + (technicalAsset.Encryption == types.Transparent || technicalAsset.Encryption == types.DataWithSymmetricSharedKey || technicalAsset.Encryption == types.DataWithAsymmetricSharedKey) { + risks = append(risks, createRisk(technicalAsset, types.MediumImpact, requiresEndUserKey)) } } } @@ -74,24 +75,24 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { // encryption requirement for the asset itself (though for the communication, but that's a different rule) func IsEncryptionWaiver(asset model.TechnicalAsset) bool { - return asset.Technology == model.ReverseProxy || asset.Technology == model.LoadBalancer || - asset.Technology == model.WAF || asset.Technology == model.IDS || asset.Technology == model.IPS || + return asset.Technology == types.ReverseProxy || asset.Technology == types.LoadBalancer || + asset.Technology == types.WAF || asset.Technology == types.IDS || asset.Technology == types.IPS || asset.Technology.IsEmbeddedComponent() } -func createRisk(technicalAsset model.TechnicalAsset, impact model.RiskExploitationImpact, requiresEndUserKey bool) model.Risk { +func createRisk(technicalAsset model.TechnicalAsset, impact types.RiskExploitationImpact, requiresEndUserKey bool) model.Risk { title := "Unencrypted Technical Asset named " + technicalAsset.Title + "" if requiresEndUserKey { - title += " missing end user individual encryption with " + model.DataWithEndUserIndividualKey.String() + title += " missing end user individual encryption with " + types.DataWithEndUserIndividualKey.String() } risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, impact), - ExploitationLikelihood: model.Unlikely, + Severity: model.CalculateSeverity(types.Unlikely, impact), + ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go b/pkg/security/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go similarity index 83% rename from pkg/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go rename to pkg/security/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go index 33c5da63..2f66e243 100644 --- a/pkg/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go +++ b/pkg/security/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go @@ -1,7 +1,8 @@ package unencrypted_communication import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -24,9 +25,9 @@ func Category() model.RiskCategory { Action: "Encryption of Communication Links", Mitigation: "Apply transport layer encryption to the communication link.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Operations, - STRIDE: model.InformationDisclosure, - DetectionLogic: "Unencrypted technical communication links of in-scope technical assets (excluding " + model.Monitoring.String() + " traffic as well as " + model.LocalFileAccess.String() + " and " + model.InProcessLibraryCall.String() + ") " + + Function: types.Operations, + STRIDE: types.InformationDisclosure, + DetectionLogic: "Unencrypted technical communication links of in-scope technical assets (excluding " + types.Monitoring.String() + " traffic as well as " + types.LocalFileAccess.String() + " and " + types.InProcessLibraryCall.String() + ") " + "transferring sensitive data.", // TODO more detailed text required here RiskAssessment: "Depending on the confidentiality rating of the transferred data-assets either medium or high risk.", FalsePositives: "When all sensitive data sent over the communication link is already fully encrypted on document or data level. " + @@ -46,7 +47,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, technicalAsset := range input.TechnicalAssets { for _, dataFlow := range technicalAsset.CommunicationLinks { - transferringAuthData := dataFlow.Authentication != model.NoneAuthentication + transferringAuthData := dataFlow.Authentication != types.NoneAuthentication sourceAsset := input.TechnicalAssets[dataFlow.SourceId] targetAsset := input.TechnicalAssets[dataFlow.TargetId] if !technicalAsset.OutOfScope || !sourceAsset.OutOfScope { @@ -86,9 +87,9 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { } func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, dataFlow model.CommunicationLink, highRisk bool, transferringAuthData bool) model.Risk { - impact := model.MediumImpact + impact := types.MediumImpact if highRisk { - impact = model.HighImpact + impact = types.HighImpact } target := input.TechnicalAssets[dataFlow.TargetId] title := "Unencrypted Communication named " + dataFlow.Title + " between " + technicalAsset.Title + " and " + target.Title + "" @@ -97,11 +98,11 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, d } if dataFlow.VPN { title += " (even VPN-protected connections need to encrypt their data in-transit when confidentiality is " + - "rated " + model.StrictlyConfidential.String() + " or integrity is rated " + model.MissionCritical.String() + ")" + "rated " + types.StrictlyConfidential.String() + " or integrity is rated " + types.MissionCritical.String() + ")" } - likelihood := model.Unlikely - if dataFlow.IsAcrossTrustBoundaryNetworkOnly() { - likelihood = model.Likely + likelihood := types.Unlikely + if dataFlow.IsAcrossTrustBoundaryNetworkOnly(input) { + likelihood = types.Likely } risk := model.Risk{ Category: Category(), @@ -111,7 +112,7 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, d Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, MostRelevantCommunicationLinkId: dataFlow.Id, - DataBreachProbability: model.Possible, + DataBreachProbability: types.Possible, DataBreachTechnicalAssetIDs: []string{target.Id}, } risk.SyntheticId = risk.Category.Id + "@" + dataFlow.Id + "@" + technicalAsset.Id + "@" + target.Id @@ -119,9 +120,9 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, d } func isHighSensitivity(dataAsset model.DataAsset) bool { - return dataAsset.Confidentiality == model.StrictlyConfidential || dataAsset.Integrity == model.MissionCritical + return dataAsset.Confidentiality == types.StrictlyConfidential || dataAsset.Integrity == types.MissionCritical } func isMediumSensitivity(dataAsset model.DataAsset) bool { - return dataAsset.Confidentiality == model.Confidential || dataAsset.Integrity == model.Critical + return dataAsset.Confidentiality == types.Confidential || dataAsset.Integrity == types.Critical } diff --git a/pkg/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go b/pkg/security/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go similarity index 60% rename from pkg/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go rename to pkg/security/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go index a156479c..5996199c 100644 --- a/pkg/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go +++ b/pkg/security/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go @@ -1,8 +1,10 @@ package unguarded_access_from_internet import ( - "github.com/threagile/threagile/model" "sort" + + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -28,20 +30,20 @@ func Category() model.RiskCategory { "For admin maintenance a bastion-host should be used as a jump-server. " + "For file transfer a store-and-forward-host should be used as an indirect file exchange platform.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Architecture, - STRIDE: model.ElevationOfPrivilege, - DetectionLogic: "In-scope technical assets (excluding " + model.LoadBalancer.String() + ") with confidentiality rating " + - "of " + model.Confidential.String() + " (or higher) or with integrity rating of " + model.Critical.String() + " (or higher) when " + + Function: types.Architecture, + STRIDE: types.ElevationOfPrivilege, + DetectionLogic: "In-scope technical assets (excluding " + types.LoadBalancer.String() + ") with confidentiality rating " + + "of " + types.Confidential.String() + " (or higher) or with integrity rating of " + types.Critical.String() + " (or higher) when " + "accessed directly from the internet. All " + - model.WebServer.String() + ", " + model.WebApplication.String() + ", " + model.ReverseProxy.String() + ", " + model.WAF.String() + ", and " + model.Gateway.String() + " assets are exempted from this risk when " + + types.WebServer.String() + ", " + types.WebApplication.String() + ", " + types.ReverseProxy.String() + ", " + types.WAF.String() + ", and " + types.Gateway.String() + " assets are exempted from this risk when " + "they do not consist of custom developed code and " + - "the data-flow only consists of HTTP or FTP protocols. Access from " + model.Monitoring.String() + " systems " + + "the data-flow only consists of HTTP or FTP protocols. Access from " + types.Monitoring.String() + " systems " + "as well as VPN-protected connections are exempted.", - RiskAssessment: "The matching technical assets are at " + model.LowSeverity.String() + " risk. When either the " + - "confidentiality rating is " + model.StrictlyConfidential.String() + " or the integrity rating " + - "is " + model.MissionCritical.String() + ", the risk-rating is considered " + model.MediumSeverity.String() + ". " + + RiskAssessment: "The matching technical assets are at " + types.LowSeverity.String() + " risk. When either the " + + "confidentiality rating is " + types.StrictlyConfidential.String() + " or the integrity rating " + + "is " + types.MissionCritical.String() + ", the risk-rating is considered " + types.MediumSeverity.String() + ". " + "For assets with RAA values higher than 40 % the risk-rating increases.", - FalsePositives: "When other means of filtering client requests are applied equivalent of " + model.ReverseProxy.String() + ", " + model.WAF.String() + ", or " + model.Gateway.String() + " components.", + FalsePositives: "When other means of filtering client requests are applied equivalent of " + types.ReverseProxy.String() + ", " + types.WAF.String() + ", or " + types.Gateway.String() + " components.", ModelFailurePossibleReason: false, CWE: 501, } @@ -53,32 +55,32 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope { - commLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] + commLinks := input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] sort.Sort(model.ByTechnicalCommunicationLinkIdSort(commLinks)) for _, incomingAccess := range commLinks { - if technicalAsset.Technology != model.LoadBalancer { + if technicalAsset.Technology != types.LoadBalancer { if !technicalAsset.CustomDevelopedParts { - if (technicalAsset.Technology == model.WebServer || technicalAsset.Technology == model.WebApplication || technicalAsset.Technology == model.ReverseProxy || technicalAsset.Technology == model.WAF || technicalAsset.Technology == model.Gateway) && - (incomingAccess.Protocol == model.HTTP || incomingAccess.Protocol == model.HTTPS) { + if (technicalAsset.Technology == types.WebServer || technicalAsset.Technology == types.WebApplication || technicalAsset.Technology == types.ReverseProxy || technicalAsset.Technology == types.WAF || technicalAsset.Technology == types.Gateway) && + (incomingAccess.Protocol == types.HTTP || incomingAccess.Protocol == types.HTTPS) { continue } - if technicalAsset.Technology == model.Gateway && - (incomingAccess.Protocol == model.FTP || incomingAccess.Protocol == model.FTPS || incomingAccess.Protocol == model.SFTP) { + if technicalAsset.Technology == types.Gateway && + (incomingAccess.Protocol == types.FTP || incomingAccess.Protocol == types.FTPS || incomingAccess.Protocol == types.SFTP) { continue } } - if input.TechnicalAssets[incomingAccess.SourceId].Technology == model.Monitoring || + if input.TechnicalAssets[incomingAccess.SourceId].Technology == types.Monitoring || incomingAccess.VPN { continue } - if technicalAsset.Confidentiality >= model.Confidential || technicalAsset.Integrity >= model.Critical { + if technicalAsset.Confidentiality >= types.Confidential || technicalAsset.Integrity >= types.Critical { sourceAsset := input.TechnicalAssets[incomingAccess.SourceId] if sourceAsset.Internet { - highRisk := technicalAsset.Confidentiality == model.StrictlyConfidential || - technicalAsset.Integrity == model.MissionCritical + highRisk := technicalAsset.Confidentiality == types.StrictlyConfidential || + technicalAsset.Integrity == types.MissionCritical risks = append(risks, createRisk(technicalAsset, incomingAccess, input.TechnicalAssets[incomingAccess.SourceId], highRisk)) } @@ -92,20 +94,20 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { func createRisk(dataStore model.TechnicalAsset, dataFlow model.CommunicationLink, clientFromInternet model.TechnicalAsset, moreRisky bool) model.Risk { - impact := model.LowImpact + impact := types.LowImpact if moreRisky || dataStore.RAA > 40 { - impact = model.MediumImpact + impact = types.MediumImpact } risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.VeryLikely, impact), - ExploitationLikelihood: model.VeryLikely, + Severity: model.CalculateSeverity(types.VeryLikely, impact), + ExploitationLikelihood: types.VeryLikely, ExploitationImpact: impact, Title: "Unguarded Access from Internet of " + dataStore.Title + " by " + clientFromInternet.Title + "" + " via " + dataFlow.Title + "", MostRelevantTechnicalAssetId: dataStore.Id, MostRelevantCommunicationLinkId: dataFlow.Id, - DataBreachProbability: model.Possible, + DataBreachProbability: types.Possible, DataBreachTechnicalAssetIDs: []string{dataStore.Id}, } risk.SyntheticId = risk.Category.Id + "@" + dataStore.Id + "@" + clientFromInternet.Id + "@" + dataFlow.Id diff --git a/pkg/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go b/pkg/security/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go similarity index 52% rename from pkg/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go rename to pkg/security/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go index 1e1adcc1..fbd3cef3 100644 --- a/pkg/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go +++ b/pkg/security/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go @@ -1,7 +1,8 @@ package unguarded_direct_datastore_access import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -23,14 +24,14 @@ func Category() model.RiskCategory { Action: "Encapsulation of Datastore", Mitigation: "Encapsulate the datastore access behind a guarding service or application.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Architecture, - STRIDE: model.ElevationOfPrivilege, - DetectionLogic: "In-scope technical assets of type " + model.Datastore.String() + " (except " + model.IdentityStoreLDAP.String() + " when accessed from " + model.IdentityProvider.String() + " and " + model.FileServer.String() + " when accessed via file transfer protocols) with confidentiality rating " + - "of " + model.Confidential.String() + " (or higher) or with integrity rating of " + model.Critical.String() + " (or higher) " + + Function: types.Architecture, + STRIDE: types.ElevationOfPrivilege, + DetectionLogic: "In-scope technical assets of type " + types.Datastore.String() + " (except " + types.IdentityStoreLDAP.String() + " when accessed from " + types.IdentityProvider.String() + " and " + types.FileServer.String() + " when accessed via file transfer protocols) with confidentiality rating " + + "of " + types.Confidential.String() + " (or higher) or with integrity rating of " + types.Critical.String() + " (or higher) " + "which have incoming data-flows from assets outside across a network trust-boundary. DevOps config and deployment access is excluded from this risk.", // TODO new rule "missing bastion host"? - RiskAssessment: "The matching technical assets are at " + model.LowSeverity.String() + " risk. When either the " + - "confidentiality rating is " + model.StrictlyConfidential.String() + " or the integrity rating " + - "is " + model.MissionCritical.String() + ", the risk-rating is considered " + model.MediumSeverity.String() + ". " + + RiskAssessment: "The matching technical assets are at " + types.LowSeverity.String() + " risk. When either the " + + "confidentiality rating is " + types.StrictlyConfidential.String() + " or the integrity rating " + + "is " + types.MissionCritical.String() + ", the risk-rating is considered " + types.MediumSeverity.String() + ". " + "For assets with RAA values higher than 40 % the risk-rating increases.", FalsePositives: "When the caller is considered fully trusted as if it was part of the datastore itself.", ModelFailurePossibleReason: false, @@ -46,20 +47,20 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] - if !technicalAsset.OutOfScope && technicalAsset.Type == model.Datastore { - for _, incomingAccess := range model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] { + if !technicalAsset.OutOfScope && technicalAsset.Type == types.Datastore { + for _, incomingAccess := range input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] { sourceAsset := input.TechnicalAssets[incomingAccess.SourceId] - if (technicalAsset.Technology == model.IdentityStoreLDAP || technicalAsset.Technology == model.IdentityStoreDatabase) && - sourceAsset.Technology == model.IdentityProvider { + if (technicalAsset.Technology == types.IdentityStoreLDAP || technicalAsset.Technology == types.IdentityStoreDatabase) && + sourceAsset.Technology == types.IdentityProvider { continue } - if technicalAsset.Confidentiality >= model.Confidential || technicalAsset.Integrity >= model.Critical { - if incomingAccess.IsAcrossTrustBoundaryNetworkOnly() && !FileServerAccessViaFTP(technicalAsset, incomingAccess) && - incomingAccess.Usage != model.DevOps && !model.IsSharingSameParentTrustBoundary(technicalAsset, sourceAsset) { - highRisk := technicalAsset.Confidentiality == model.StrictlyConfidential || - technicalAsset.Integrity == model.MissionCritical + if technicalAsset.Confidentiality >= types.Confidential || technicalAsset.Integrity >= types.Critical { + if incomingAccess.IsAcrossTrustBoundaryNetworkOnly(input) && !FileServerAccessViaFTP(technicalAsset, incomingAccess) && + incomingAccess.Usage != types.DevOps && !isSharingSameParentTrustBoundary(input, technicalAsset, sourceAsset) { + highRisk := technicalAsset.Confidentiality == types.StrictlyConfidential || + technicalAsset.Integrity == types.MissionCritical risks = append(risks, createRisk(technicalAsset, incomingAccess, input.TechnicalAssets[incomingAccess.SourceId], highRisk)) } @@ -70,26 +71,52 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } +func isSharingSameParentTrustBoundary(input *model.ParsedModel, left, right model.TechnicalAsset) bool { + tbIDLeft, tbIDRight := left.GetTrustBoundaryId(input), right.GetTrustBoundaryId(input) + if len(tbIDLeft) == 0 && len(tbIDRight) > 0 { + return false + } + if len(tbIDLeft) > 0 && len(tbIDRight) == 0 { + return false + } + if len(tbIDLeft) == 0 && len(tbIDRight) == 0 { + return true + } + if tbIDLeft == tbIDRight { + return true + } + tbLeft, tbRight := input.TrustBoundaries[tbIDLeft], input.TrustBoundaries[tbIDRight] + tbParentsLeft, tbParentsRight := tbLeft.AllParentTrustBoundaryIDs(input), tbRight.AllParentTrustBoundaryIDs(input) + for _, parentLeft := range tbParentsLeft { + for _, parentRight := range tbParentsRight { + if parentLeft == parentRight { + return true + } + } + } + return false +} + func FileServerAccessViaFTP(technicalAsset model.TechnicalAsset, incomingAccess model.CommunicationLink) bool { - return technicalAsset.Technology == model.FileServer && - (incomingAccess.Protocol == model.FTP || incomingAccess.Protocol == model.FTPS || incomingAccess.Protocol == model.SFTP) + return technicalAsset.Technology == types.FileServer && + (incomingAccess.Protocol == types.FTP || incomingAccess.Protocol == types.FTPS || incomingAccess.Protocol == types.SFTP) } func createRisk(dataStore model.TechnicalAsset, dataFlow model.CommunicationLink, clientOutsideTrustBoundary model.TechnicalAsset, moreRisky bool) model.Risk { - impact := model.LowImpact + impact := types.LowImpact if moreRisky || dataStore.RAA > 40 { - impact = model.MediumImpact + impact = types.MediumImpact } risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Likely, impact), - ExploitationLikelihood: model.Likely, + Severity: model.CalculateSeverity(types.Likely, impact), + ExploitationLikelihood: types.Likely, ExploitationImpact: impact, Title: "Unguarded Direct Datastore Access of " + dataStore.Title + " by " + clientOutsideTrustBoundary.Title + " via " + dataFlow.Title + "", MostRelevantTechnicalAssetId: dataStore.Id, MostRelevantCommunicationLinkId: dataFlow.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{dataStore.Id}, } risk.SyntheticId = risk.Category.Id + "@" + dataFlow.Id + "@" + clientOutsideTrustBoundary.Id + "@" + dataStore.Id diff --git a/pkg/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go b/pkg/security/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go similarity index 82% rename from pkg/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go rename to pkg/security/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go index 6ad9a275..2e158a87 100644 --- a/pkg/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go +++ b/pkg/security/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go @@ -1,7 +1,8 @@ package unnecessary_communication_link import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -24,10 +25,10 @@ func Category() model.RiskCategory { Action: "Attack Surface Reduction", Mitigation: "Try to avoid using technical communication links that do not send or receive anything.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Architecture, - STRIDE: model.ElevationOfPrivilege, + Function: types.Architecture, + STRIDE: types.ElevationOfPrivilege, DetectionLogic: "In-scope technical assets' technical communication links not sending or receiving any data assets.", - RiskAssessment: model.LowSeverity.String(), + RiskAssessment: types.LowSeverity.String(), FalsePositives: "Usually no false positives as this looks like an incomplete model.", ModelFailurePossibleReason: true, CWE: 1008, @@ -40,7 +41,7 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] for _, commLink := range technicalAsset.CommunicationLinks { if len(commLink.DataAssetsSent) == 0 && len(commLink.DataAssetsReceived) == 0 { @@ -57,13 +58,13 @@ func createRisk(technicalAsset model.TechnicalAsset, commLink model.Communicatio title := "Unnecessary Communication Link titled " + commLink.Title + " at technical asset " + technicalAsset.Title + "" risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, model.LowImpact), - ExploitationLikelihood: model.Unlikely, - ExploitationImpact: model.LowImpact, + Severity: model.CalculateSeverity(types.Unlikely, types.LowImpact), + ExploitationLikelihood: types.Unlikely, + ExploitationImpact: types.LowImpact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, MostRelevantCommunicationLinkId: commLink.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + commLink.Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go b/pkg/security/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go similarity index 87% rename from pkg/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go rename to pkg/security/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go index 627d7b41..76084df7 100644 --- a/pkg/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go +++ b/pkg/security/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go @@ -1,8 +1,10 @@ package unnecessary_data_asset import ( - "github.com/threagile/threagile/model" "sort" + + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -26,11 +28,11 @@ func Category() model.RiskCategory { Action: "Attack Surface Reduction", Mitigation: "Try to avoid having data assets that are not required/used.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Architecture, - STRIDE: model.ElevationOfPrivilege, + Function: types.Architecture, + STRIDE: types.ElevationOfPrivilege, DetectionLogic: "Modelled data assets not processed or stored by any data assets and also not transferred by any " + "communication links.", - RiskAssessment: model.LowSeverity.String(), + RiskAssessment: types.LowSeverity.String(), FalsePositives: "Usually no false positives as this looks like an incomplete model.", ModelFailurePossibleReason: true, CWE: 1008, @@ -81,12 +83,12 @@ func createRisk(input *model.ParsedModel, unusedDataAssetID string) model.Risk { title := "Unnecessary Data Asset named " + unusedDataAsset.Title + "" risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, model.LowImpact), - ExploitationLikelihood: model.Unlikely, - ExploitationImpact: model.LowImpact, + Severity: model.CalculateSeverity(types.Unlikely, types.LowImpact), + ExploitationLikelihood: types.Unlikely, + ExploitationImpact: types.LowImpact, Title: title, MostRelevantDataAssetId: unusedDataAsset.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{unusedDataAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + unusedDataAsset.Id diff --git a/pkg/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go b/pkg/security/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go similarity index 86% rename from pkg/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go rename to pkg/security/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go index 96929a74..a4de4a95 100644 --- a/pkg/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go +++ b/pkg/security/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go @@ -1,8 +1,10 @@ package unnecessary_data_transfer import ( - "github.com/threagile/threagile/model" "sort" + + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -27,13 +29,13 @@ func Category() model.RiskCategory { Mitigation: "Try to avoid sending or receiving sensitive data assets which are not required (i.e. neither " + "processed or stored) by the involved technical asset.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Architecture, - STRIDE: model.ElevationOfPrivilege, + Function: types.Architecture, + STRIDE: types.ElevationOfPrivilege, DetectionLogic: "In-scope technical assets sending or receiving sensitive data assets which are neither processed nor " + "stored by the technical asset are flagged with this risk. The risk rating (low or medium) depends on the " + "confidentiality, integrity, and availability rating of the technical asset. Monitoring data is exempted from this risk.", RiskAssessment: "The risk assessment is depending on the confidentiality and integrity rating of the transferred data asset " + - "either " + model.LowSeverity.String() + " or " + model.MediumSeverity.String() + ".", + "either " + types.LowSeverity.String() + " or " + types.MediumSeverity.String() + ".", FalsePositives: "Technical assets missing the model entries of either processing or storing the mentioned data assets " + "can be considered as false positives (incomplete models) after individual review. These should then be addressed by " + "completing the model so that all necessary data assets are processed and/or stored by the technical asset involved.", @@ -48,7 +50,7 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope { continue @@ -62,7 +64,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { risks = checkRisksAgainstTechnicalAsset(input, risks, technicalAsset, outgoingDataFlow, false) } // incoming data flows - commLinks := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] + commLinks := input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] sort.Sort(model.ByTechnicalCommunicationLinkIdSort(commLinks)) for _, incomingDataFlow := range commLinks { targetAsset := input.TechnicalAssets[incomingDataFlow.SourceId] @@ -81,7 +83,7 @@ func checkRisksAgainstTechnicalAsset(input *model.ParsedModel, risks []model.Ris if !technicalAsset.ProcessesOrStoresDataAsset(transferredDataAssetId) { transferredDataAsset := input.DataAssets[transferredDataAssetId] //fmt.Print("--->>> Checking "+technicalAsset.Id+": "+transferredDataAsset.Id+" sent via "+dataFlow.Id+"\n") - if transferredDataAsset.Confidentiality >= model.Confidential || transferredDataAsset.Integrity >= model.Critical { + if transferredDataAsset.Confidentiality >= types.Confidential || transferredDataAsset.Integrity >= types.Critical { commPartnerId := dataFlow.TargetId if inverseDirection { commPartnerId = dataFlow.SourceId @@ -98,7 +100,7 @@ func checkRisksAgainstTechnicalAsset(input *model.ParsedModel, risks []model.Ris if !technicalAsset.ProcessesOrStoresDataAsset(transferredDataAssetId) { transferredDataAsset := input.DataAssets[transferredDataAssetId] //fmt.Print("--->>> Checking "+technicalAsset.Id+": "+transferredDataAsset.Id+" received via "+dataFlow.Id+"\n") - if transferredDataAsset.Confidentiality >= model.Confidential || transferredDataAsset.Integrity >= model.Critical { + if transferredDataAsset.Confidentiality >= types.Confidential || transferredDataAsset.Integrity >= types.Critical { commPartnerId := dataFlow.TargetId if inverseDirection { commPartnerId = dataFlow.SourceId @@ -124,24 +126,24 @@ func isNewRisk(risks []model.Risk, risk model.Risk) bool { } func createRisk(technicalAsset model.TechnicalAsset, dataAssetTransferred model.DataAsset, commPartnerAsset model.TechnicalAsset) model.Risk { - moreRisky := dataAssetTransferred.Confidentiality == model.StrictlyConfidential || dataAssetTransferred.Integrity == model.MissionCritical + moreRisky := dataAssetTransferred.Confidentiality == types.StrictlyConfidential || dataAssetTransferred.Integrity == types.MissionCritical - impact := model.LowImpact + impact := types.LowImpact if moreRisky { - impact = model.MediumImpact + impact = types.MediumImpact } title := "Unnecessary Data Transfer of " + dataAssetTransferred.Title + " data at " + technicalAsset.Title + " " + "from/to " + commPartnerAsset.Title + "" risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, impact), - ExploitationLikelihood: model.Unlikely, + Severity: model.CalculateSeverity(types.Unlikely, impact), + ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, MostRelevantDataAssetId: dataAssetTransferred.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + dataAssetTransferred.Id + "@" + technicalAsset.Id + "@" + commPartnerAsset.Id diff --git a/pkg/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go b/pkg/security/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go similarity index 79% rename from pkg/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go rename to pkg/security/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go index ec66f500..9ee98791 100644 --- a/pkg/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go +++ b/pkg/security/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go @@ -1,7 +1,8 @@ package unnecessary_technical_asset import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -25,10 +26,10 @@ func Category() model.RiskCategory { Action: "Attack Surface Reduction", Mitigation: "Try to avoid using technical assets that do not process or store anything.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Architecture, - STRIDE: model.ElevationOfPrivilege, + Function: types.Architecture, + STRIDE: types.ElevationOfPrivilege, DetectionLogic: "Technical assets not processing or storing any data assets.", - RiskAssessment: model.LowSeverity.String(), + RiskAssessment: types.LowSeverity.String(), FalsePositives: "Usually no false positives as this looks like an incomplete model.", ModelFailurePossibleReason: true, CWE: 1008, @@ -41,10 +42,10 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if len(technicalAsset.DataAssetsProcessed) == 0 && len(technicalAsset.DataAssetsStored) == 0 || - (len(technicalAsset.CommunicationLinks) == 0 && len(model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id]) == 0) { + (len(technicalAsset.CommunicationLinks) == 0 && len(input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id]) == 0) { risks = append(risks, createRisk(technicalAsset)) } } @@ -55,12 +56,12 @@ func createRisk(technicalAsset model.TechnicalAsset) model.Risk { title := "Unnecessary Technical Asset named " + technicalAsset.Title + "" risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, model.LowImpact), - ExploitationLikelihood: model.Unlikely, - ExploitationImpact: model.LowImpact, + Severity: model.CalculateSeverity(types.Unlikely, types.LowImpact), + ExploitationLikelihood: types.Unlikely, + ExploitationImpact: types.LowImpact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go b/pkg/security/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go similarity index 74% rename from pkg/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go rename to pkg/security/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go index 3b9e838d..31357a78 100644 --- a/pkg/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go +++ b/pkg/security/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go @@ -1,7 +1,8 @@ package untrusted_deserialization import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -29,8 +30,8 @@ func Category() model.RiskCategory { "Alternatively a strict whitelisting approach of the classes/types/values to deserialize might help as well. " + "When a third-party product is used instead of custom developed software, check if the product applies the proper mitigation and ensure a reasonable patch-level.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Architecture, - STRIDE: model.Tampering, + Function: types.Architecture, + STRIDE: types.Tampering, DetectionLogic: "In-scope technical assets accepting serialization data formats (including EJB and RMI protocols).", RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", FalsePositives: "Fully trusted (i.e. cryptographically signed or similar) data deserialized can be considered " + @@ -46,7 +47,7 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope { continue @@ -54,43 +55,43 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { hasOne, acrossTrustBoundary := false, false commLinkTitle := "" for _, format := range technicalAsset.DataFormatsAccepted { - if format == model.Serialization { + if format == types.Serialization { hasOne = true } } - if technicalAsset.Technology == model.EJB { + if technicalAsset.Technology == types.EJB { hasOne = true } // check for any incoming IIOP and JRMP protocols - for _, commLink := range model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] { - if commLink.Protocol == model.IIOP || commLink.Protocol == model.IiopEncrypted || - commLink.Protocol == model.JRMP || commLink.Protocol == model.JrmpEncrypted { + for _, commLink := range input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] { + if commLink.Protocol == types.IIOP || commLink.Protocol == types.IiopEncrypted || + commLink.Protocol == types.JRMP || commLink.Protocol == types.JrmpEncrypted { hasOne = true - if commLink.IsAcrossTrustBoundaryNetworkOnly() { + if commLink.IsAcrossTrustBoundaryNetworkOnly(input) { acrossTrustBoundary = true commLinkTitle = commLink.Title } } } if hasOne { - risks = append(risks, createRisk(technicalAsset, acrossTrustBoundary, commLinkTitle)) + risks = append(risks, createRisk(input, technicalAsset, acrossTrustBoundary, commLinkTitle)) } } return risks } -func createRisk(technicalAsset model.TechnicalAsset, acrossTrustBoundary bool, commLinkTitle string) model.Risk { +func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAsset, acrossTrustBoundary bool, commLinkTitle string) model.Risk { title := "Untrusted Deserialization risk at " + technicalAsset.Title + "" - impact := model.HighImpact - likelihood := model.Likely + impact := types.HighImpact + likelihood := types.Likely if acrossTrustBoundary { - likelihood = model.VeryLikely + likelihood = types.VeryLikely title += " across a trust boundary (at least via communication link " + commLinkTitle + ")" } - if technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || - technicalAsset.HighestIntegrity() == model.MissionCritical || - technicalAsset.HighestAvailability() == model.MissionCritical { - impact = model.VeryHighImpact + if technicalAsset.HighestConfidentiality(parsedModel) == types.StrictlyConfidential || + technicalAsset.HighestIntegrity(parsedModel) == types.MissionCritical || + technicalAsset.HighestAvailability(parsedModel) == types.MissionCritical { + impact = types.VeryHighImpact } risk := model.Risk{ Category: Category(), @@ -99,7 +100,7 @@ func createRisk(technicalAsset model.TechnicalAsset, acrossTrustBoundary bool, c ExploitationImpact: impact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Probable, + DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go b/pkg/security/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go similarity index 74% rename from pkg/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go rename to pkg/security/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go index 47f0007a..dabd4219 100644 --- a/pkg/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go +++ b/pkg/security/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go @@ -1,7 +1,8 @@ package wrong_communication_link_content import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -25,10 +26,10 @@ func Category() model.RiskCategory { Mitigation: "Try to model the correct readonly flag and/or data sent/received of communication links. " + "Also try to use communication link types matching the target technology/machine types.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Architecture, - STRIDE: model.InformationDisclosure, + Function: types.Architecture, + STRIDE: types.InformationDisclosure, DetectionLogic: "Communication links with inconsistent data assets being sent/received not matching their readonly flag or otherwise inconsistent protocols not matching the target technology type.", - RiskAssessment: model.LowSeverity.String(), + RiskAssessment: types.LowSeverity.String(), FalsePositives: "Usually no false positives as this looks like an incomplete model.", ModelFailurePossibleReason: true, CWE: 1008, @@ -57,17 +58,17 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { } // check for protocol inconsistencies targetAsset := input.TechnicalAssets[commLink.TargetId] - if commLink.Protocol == model.InProcessLibraryCall && targetAsset.Technology != model.Library { + if commLink.Protocol == types.InProcessLibraryCall && targetAsset.Technology != types.Library { risks = append(risks, createRisk(techAsset, commLink, - "(protocol type \""+model.InProcessLibraryCall.String()+"\" does not match target technology type \""+targetAsset.Technology.String()+"\": expected \""+model.Library.String()+"\")")) + "(protocol type \""+types.InProcessLibraryCall.String()+"\" does not match target technology type \""+targetAsset.Technology.String()+"\": expected \""+types.Library.String()+"\")")) } - if commLink.Protocol == model.LocalFileAccess && targetAsset.Technology != model.LocalFileSystem { + if commLink.Protocol == types.LocalFileAccess && targetAsset.Technology != types.LocalFileSystem { risks = append(risks, createRisk(techAsset, commLink, - "(protocol type \""+model.LocalFileAccess.String()+"\" does not match target technology type \""+targetAsset.Technology.String()+"\": expected \""+model.LocalFileSystem.String()+"\")")) + "(protocol type \""+types.LocalFileAccess.String()+"\" does not match target technology type \""+targetAsset.Technology.String()+"\": expected \""+types.LocalFileSystem.String()+"\")")) } - if commLink.Protocol == model.ContainerSpawning && targetAsset.Machine != model.Container { + if commLink.Protocol == types.ContainerSpawning && targetAsset.Machine != types.Container { risks = append(risks, createRisk(techAsset, commLink, - "(protocol type \""+model.ContainerSpawning.String()+"\" does not match target machine type \""+targetAsset.Machine.String()+"\": expected \""+model.Container.String()+"\")")) + "(protocol type \""+types.ContainerSpawning.String()+"\" does not match target machine type \""+targetAsset.Machine.String()+"\": expected \""+types.Container.String()+"\")")) } } } @@ -79,13 +80,13 @@ func createRisk(technicalAsset model.TechnicalAsset, commLink model.Communicatio "regarding communication link " + commLink.Title + "" risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, model.LowImpact), - ExploitationLikelihood: model.Unlikely, - ExploitationImpact: model.LowImpact, + Severity: model.CalculateSeverity(types.Unlikely, types.LowImpact), + ExploitationLikelihood: types.Unlikely, + ExploitationImpact: types.LowImpact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, MostRelevantCommunicationLinkId: commLink.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{}, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + "@" + commLink.Id diff --git a/pkg/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go b/pkg/security/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go similarity index 76% rename from pkg/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go rename to pkg/security/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go index 42af4d18..2f7b3d76 100644 --- a/pkg/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go +++ b/pkg/security/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go @@ -1,7 +1,8 @@ package wrong_trust_boundary_content import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -16,7 +17,7 @@ func Category() model.RiskCategory { return model.RiskCategory{ Id: "wrong-trust-boundary-content", Title: "Wrong Trust Boundary Content", - Description: "When a trust boundary of type " + model.NetworkPolicyNamespaceIsolation.String() + " contains " + + Description: "When a trust boundary of type " + types.NetworkPolicyNamespaceIsolation.String() + " contains " + "non-container assets it is likely to be a model failure.", Impact: "If this potential model error is not fixed, some risks might not be visible.", ASVS: "V1 - Architecture, Design and Threat Modeling Requirements", @@ -24,10 +25,10 @@ func Category() model.RiskCategory { Action: "Model Consistency", Mitigation: "Try to model the correct types of trust boundaries and data assets.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Architecture, - STRIDE: model.ElevationOfPrivilege, + Function: types.Architecture, + STRIDE: types.ElevationOfPrivilege, DetectionLogic: "Trust boundaries which should only contain containers, but have different assets inside.", - RiskAssessment: model.LowSeverity.String(), + RiskAssessment: types.LowSeverity.String(), FalsePositives: "Usually no false positives as this looks like an incomplete model.", ModelFailurePossibleReason: true, CWE: 1008, @@ -41,10 +42,10 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) for _, trustBoundary := range input.TrustBoundaries { - if trustBoundary.Type == model.NetworkPolicyNamespaceIsolation { + if trustBoundary.Type == types.NetworkPolicyNamespaceIsolation { for _, techAssetID := range trustBoundary.TechnicalAssetsInside { techAsset := input.TechnicalAssets[techAssetID] - if techAsset.Machine != model.Container && techAsset.Machine != model.Serverless { + if techAsset.Machine != types.Container && techAsset.Machine != types.Serverless { risks = append(risks, createRisk(techAsset)) } } @@ -57,12 +58,12 @@ func createRisk(technicalAsset model.TechnicalAsset) model.Risk { title := "Wrong Trust Boundary Content (non-container asset inside container trust boundary) at " + technicalAsset.Title + "" risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.Unlikely, model.LowImpact), - ExploitationLikelihood: model.Unlikely, - ExploitationImpact: model.LowImpact, + Severity: model.CalculateSeverity(types.Unlikely, types.LowImpact), + ExploitationLikelihood: types.Unlikely, + ExploitationImpact: types.LowImpact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Improbable, + DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id diff --git a/pkg/risks/built-in/xml-external-entity/xml-external-entity-rule.go b/pkg/security/risks/built-in/xml-external-entity/xml-external-entity-rule.go similarity index 76% rename from pkg/risks/built-in/xml-external-entity/xml-external-entity-rule.go rename to pkg/security/risks/built-in/xml-external-entity/xml-external-entity-rule.go index daca3f8e..1dbee6ac 100644 --- a/pkg/risks/built-in/xml-external-entity/xml-external-entity-rule.go +++ b/pkg/security/risks/built-in/xml-external-entity/xml-external-entity-rule.go @@ -1,7 +1,8 @@ package xml_external_entity import ( - "github.com/threagile/threagile/model" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) func Rule() model.CustomRiskRule { @@ -26,8 +27,8 @@ func Category() model.RiskCategory { Mitigation: "Apply hardening of all XML parser instances in order to stay safe from XML External Entity (XXE) vulnerabilities. " + "When a third-party product is used instead of custom developed software, check if the product applies the proper mitigation and ensure a reasonable patch-level.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", - Function: model.Development, - STRIDE: model.InformationDisclosure, + Function: types.Development, + STRIDE: types.InformationDisclosure, DetectionLogic: "In-scope technical assets accepting XML data formats.", RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored. " + "Also for cloud-based environments the exploitation impact is at least medium, as cloud backend services can be attacked via SSRF (and XXE vulnerabilities are often also SSRF vulnerabilities).", @@ -44,36 +45,36 @@ func SupportedTags() []string { func GenerateRisks(input *model.ParsedModel) []model.Risk { risks := make([]model.Risk, 0) - for _, id := range model.SortedTechnicalAssetIDs() { + for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope { continue } for _, format := range technicalAsset.DataFormatsAccepted { - if format == model.XML { - risks = append(risks, createRisk(technicalAsset)) + if format == types.XML { + risks = append(risks, createRisk(input, technicalAsset)) } } } return risks } -func createRisk(technicalAsset model.TechnicalAsset) model.Risk { +func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAsset) model.Risk { title := "XML External Entity (XXE) risk at " + technicalAsset.Title + "" - impact := model.MediumImpact - if technicalAsset.HighestConfidentiality() == model.StrictlyConfidential || - technicalAsset.HighestIntegrity() == model.MissionCritical || - technicalAsset.HighestAvailability() == model.MissionCritical { - impact = model.HighImpact + impact := types.MediumImpact + if technicalAsset.HighestConfidentiality(parsedModel) == types.StrictlyConfidential || + technicalAsset.HighestIntegrity(parsedModel) == types.MissionCritical || + technicalAsset.HighestAvailability(parsedModel) == types.MissionCritical { + impact = types.HighImpact } risk := model.Risk{ Category: Category(), - Severity: model.CalculateSeverity(model.VeryLikely, impact), - ExploitationLikelihood: model.VeryLikely, + Severity: model.CalculateSeverity(types.VeryLikely, impact), + ExploitationLikelihood: types.VeryLikely, ExploitationImpact: impact, Title: title, MostRelevantTechnicalAssetId: technicalAsset.Id, - DataBreachProbability: model.Probable, + DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, // TODO: use the same logic here as for SSRF rule, as XXE is also SSRF ;) } risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id diff --git a/pkg/security/risks/rules.go b/pkg/security/risks/rules.go new file mode 100644 index 00000000..1177a6d6 --- /dev/null +++ b/pkg/security/risks/rules.go @@ -0,0 +1,134 @@ +/* +Copyright © 2023 NAME HERE +*/ +package risks + +import ( + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/run" + + accidentalsecretleak "github.com/threagile/threagile/pkg/security/risks/built-in/accidental-secret-leak" + codebackdooring "github.com/threagile/threagile/pkg/security/risks/built-in/code-backdooring" + containerbaseimagebackdooring "github.com/threagile/threagile/pkg/security/risks/built-in/container-baseimage-backdooring" + containerplatformescape "github.com/threagile/threagile/pkg/security/risks/built-in/container-platform-escape" + crosssiterequestforgery "github.com/threagile/threagile/pkg/security/risks/built-in/cross-site-request-forgery" + crosssitescripting "github.com/threagile/threagile/pkg/security/risks/built-in/cross-site-scripting" + dosriskyaccessacrosstrustboundary "github.com/threagile/threagile/pkg/security/risks/built-in/dos-risky-access-across-trust-boundary" + incompletemodel "github.com/threagile/threagile/pkg/security/risks/built-in/incomplete-model" + ldapinjection "github.com/threagile/threagile/pkg/security/risks/built-in/ldap-injection" + missingauthentication "github.com/threagile/threagile/pkg/security/risks/built-in/missing-authentication" + missingauthenticationsecondfactor "github.com/threagile/threagile/pkg/security/risks/built-in/missing-authentication-second-factor" + missingbuildinfrastructure "github.com/threagile/threagile/pkg/security/risks/built-in/missing-build-infrastructure" + missingcloudhardening "github.com/threagile/threagile/pkg/security/risks/built-in/missing-cloud-hardening" + missingfilevalidation "github.com/threagile/threagile/pkg/security/risks/built-in/missing-file-validation" + missinghardening "github.com/threagile/threagile/pkg/security/risks/built-in/missing-hardening" + missingidentitypropagation "github.com/threagile/threagile/pkg/security/risks/built-in/missing-identity-propagation" + missingidentityproviderisolation "github.com/threagile/threagile/pkg/security/risks/built-in/missing-identity-provider-isolation" + missingidentitystore "github.com/threagile/threagile/pkg/security/risks/built-in/missing-identity-store" + missingnetworksegmentation "github.com/threagile/threagile/pkg/security/risks/built-in/missing-network-segmentation" + missingvault "github.com/threagile/threagile/pkg/security/risks/built-in/missing-vault" + missingvaultisolation "github.com/threagile/threagile/pkg/security/risks/built-in/missing-vault-isolation" + missingwaf "github.com/threagile/threagile/pkg/security/risks/built-in/missing-waf" + mixedtargetsonsharedruntime "github.com/threagile/threagile/pkg/security/risks/built-in/mixed-targets-on-shared-runtime" + pathtraversal "github.com/threagile/threagile/pkg/security/risks/built-in/path-traversal" + pushinsteadofpulldeployment "github.com/threagile/threagile/pkg/security/risks/built-in/push-instead-of-pull-deployment" + searchqueryinjection "github.com/threagile/threagile/pkg/security/risks/built-in/search-query-injection" + serversiderequestforgery "github.com/threagile/threagile/pkg/security/risks/built-in/server-side-request-forgery" + serviceregistrypoisoning "github.com/threagile/threagile/pkg/security/risks/built-in/service-registry-poisoning" + sqlnosqlinjection "github.com/threagile/threagile/pkg/security/risks/built-in/sql-nosql-injection" + uncheckeddeployment "github.com/threagile/threagile/pkg/security/risks/built-in/unchecked-deployment" + unencryptedasset "github.com/threagile/threagile/pkg/security/risks/built-in/unencrypted-asset" + unencryptedcommunication "github.com/threagile/threagile/pkg/security/risks/built-in/unencrypted-communication" + unguardedaccessfrominternet "github.com/threagile/threagile/pkg/security/risks/built-in/unguarded-access-from-internet" + unguardeddirectdatastoreaccess "github.com/threagile/threagile/pkg/security/risks/built-in/unguarded-direct-datastore-access" + unnecessarycommunicationlink "github.com/threagile/threagile/pkg/security/risks/built-in/unnecessary-communication-link" + unnecessarydataasset "github.com/threagile/threagile/pkg/security/risks/built-in/unnecessary-data-asset" + unnecessarydatatransfer "github.com/threagile/threagile/pkg/security/risks/built-in/unnecessary-data-transfer" + unnecessarytechnicalasset "github.com/threagile/threagile/pkg/security/risks/built-in/unnecessary-technical-asset" + untrusteddeserialization "github.com/threagile/threagile/pkg/security/risks/built-in/untrusted-deserialization" + wrongcommunicationlinkcontent "github.com/threagile/threagile/pkg/security/risks/built-in/wrong-communication-link-content" + wrongtrustboundarycontent "github.com/threagile/threagile/pkg/security/risks/built-in/wrong-trust-boundary-content" + xmlexternalentity "github.com/threagile/threagile/pkg/security/risks/built-in/xml-external-entity" +) + +type progressReporter interface { + Println(a ...any) (n int, err error) + Fatalf(format string, v ...any) +} + +func LoadCustomRiskRules(pluginFiles []string, reporter progressReporter) map[string]*model.CustomRisk { + customRiskRules := make(map[string]*model.CustomRisk) + if len(pluginFiles) > 0 { + reporter.Println("Loading custom risk rules:", pluginFiles) + + for _, pluginFile := range pluginFiles { + if len(pluginFile) > 0 { + runner, loadError := new(run.Runner).Load(pluginFile) + if loadError != nil { + reporter.Fatalf("WARNING: Custom risk rule %q not loaded: %v\n", pluginFile, loadError) + } + + risk := new(model.CustomRisk) + runError := runner.Run(nil, &risk, "-get-info") + if runError != nil { + reporter.Fatalf("WARNING: Failed to get ID for custom risk rule %q: %v\n", pluginFile, runError) + } + + risk.Runner = runner + customRiskRules[risk.ID] = risk + reporter.Println("Custom risk rule loaded:", risk.ID) + } + } + + reporter.Println("Loaded custom risk rules:", customRiskRules) + } + + return customRiskRules +} + +func GetBuiltInRiskRules() []model.CustomRiskRule { + return []model.CustomRiskRule{ + accidentalsecretleak.Rule(), + codebackdooring.Rule(), + containerbaseimagebackdooring.Rule(), + containerplatformescape.Rule(), + crosssiterequestforgery.Rule(), + crosssitescripting.Rule(), + dosriskyaccessacrosstrustboundary.Rule(), + incompletemodel.Rule(), + ldapinjection.Rule(), + missingauthentication.Rule(), + missingauthenticationsecondfactor.Rule(), + missingbuildinfrastructure.Rule(), + missingcloudhardening.Rule(), + missingfilevalidation.Rule(), + missinghardening.Rule(), + missingidentitypropagation.Rule(), + missingidentityproviderisolation.Rule(), + missingidentitystore.Rule(), + missingnetworksegmentation.Rule(), + missingvault.Rule(), + missingvaultisolation.Rule(), + missingwaf.Rule(), + mixedtargetsonsharedruntime.Rule(), + pathtraversal.Rule(), + pushinsteadofpulldeployment.Rule(), + searchqueryinjection.Rule(), + serversiderequestforgery.Rule(), + serviceregistrypoisoning.Rule(), + sqlnosqlinjection.Rule(), + uncheckeddeployment.Rule(), + unencryptedasset.Rule(), + unencryptedcommunication.Rule(), + unguardedaccessfrominternet.Rule(), + unguardeddirectdatastoreaccess.Rule(), + unnecessarycommunicationlink.Rule(), + unnecessarydataasset.Rule(), + unnecessarydatatransfer.Rule(), + unnecessarytechnicalasset.Rule(), + untrusteddeserialization.Rule(), + wrongcommunicationlinkcontent.Rule(), + wrongtrustboundarycontent.Rule(), + xmlexternalentity.Rule(), + } +} diff --git a/pkg/security/types/authentication.go b/pkg/security/types/authentication.go new file mode 100644 index 00000000..04157129 --- /dev/null +++ b/pkg/security/types/authentication.go @@ -0,0 +1,48 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +type Authentication int + +const ( + NoneAuthentication Authentication = iota + Credentials + SessionId + Token + ClientCertificate + TwoFactor + Externalized +) + +func AuthenticationValues() []TypeEnum { + return []TypeEnum{ + NoneAuthentication, + Credentials, + SessionId, + Token, + ClientCertificate, + TwoFactor, + Externalized, + } +} + +var AuthenticationTypeDescription = [...]TypeDescription{ + {"none", "No authentication"}, + {"credentials", "Username and password, pin or passphrase"}, + {"session-id", "A server generated session id with limited life span"}, + {"token", "A server generated token. Containing session id, other data and is cryptographically signed"}, + {"client-certificate", "A certificate file stored on the client identifying this specific client"}, + {"two-factor", "Credentials plus another factor like a physical object (card) or biometrics"}, + {"externalized", "Some external company handles authentication"}, +} + +func (what Authentication) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + //return [...]string{"none", "credentials", "session-id", "token", "client-certificate", "two-factor", "externalized"}[what] + return AuthenticationTypeDescription[what].Name +} + +func (what Authentication) Explain() string { + return AuthenticationTypeDescription[what].Description +} diff --git a/pkg/security/types/authorization.go b/pkg/security/types/authorization.go new file mode 100644 index 00000000..2ccc467d --- /dev/null +++ b/pkg/security/types/authorization.go @@ -0,0 +1,35 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +type Authorization int + +const ( + NoneAuthorization Authorization = iota + TechnicalUser + EndUserIdentityPropagation +) + +func AuthorizationValues() []TypeEnum { + return []TypeEnum{ + NoneAuthorization, + TechnicalUser, + EndUserIdentityPropagation, + } +} + +var AuthorizationTypeDescription = [...]TypeDescription{ + {"none", "No authorization"}, + {"technical-user", "Technical user (service-to-service) like DB user credentials"}, + {"enduser-identity-propagation", "Identity of end user propagates to this service"}, +} + +func (what Authorization) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + return AuthorizationTypeDescription[what].Name +} + +func (what Authorization) Explain() string { + return AuthorizationTypeDescription[what].Description +} diff --git a/pkg/security/types/confidentiality.go b/pkg/security/types/confidentiality.go new file mode 100644 index 00000000..2230c0b8 --- /dev/null +++ b/pkg/security/types/confidentiality.go @@ -0,0 +1,90 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "strings" +) + +type Confidentiality int + +const ( + Public Confidentiality = iota + Internal + Restricted + Confidential + StrictlyConfidential +) + +func ConfidentialityValues() []TypeEnum { + return []TypeEnum{ + Public, + Internal, + Restricted, + Confidential, + StrictlyConfidential, + } +} + +func ParseConfidentiality(value string) (confidentiality Confidentiality, err error) { + value = strings.TrimSpace(value) + for _, candidate := range ConfidentialityValues() { + if candidate.String() == value { + return candidate.(Confidentiality), err + } + } + return confidentiality, errors.New("Unable to parse into type: " + value) +} + +var ConfidentialityTypeDescription = [...]TypeDescription{ + {"public", "Public available information"}, + {"internal", "(Company) internal information - but all people in the institution can access it"}, + {"restricted", "Internal and with restricted access"}, + {"confidential", "Only a few selected people have access"}, + {"strictly-confidential", "Highest secrecy level"}, +} + +func (what Confidentiality) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + return ConfidentialityTypeDescription[what].Name +} + +func (what Confidentiality) Explain() string { + return ConfidentialityTypeDescription[what].Description +} + +func (what Confidentiality) AttackerAttractivenessForAsset() float64 { + // fibonacci starting at 8 + return [...]float64{8, 13, 21, 34, 55}[what] +} +func (what Confidentiality) AttackerAttractivenessForProcessedOrStoredData() float64 { + // fibonacci starting at 5 + return [...]float64{5, 8, 13, 21, 34}[what] +} +func (what Confidentiality) AttackerAttractivenessForInOutTransferredData() float64 { + // fibonacci starting at 2 + return [...]float64{2, 3, 5, 8, 13}[what] +} + +func (what Confidentiality) RatingStringInScale() string { + result := "(rated " + if what == Public { + result += "1" + } + if what == Internal { + result += "2" + } + if what == Restricted { + result += "3" + } + if what == Confidential { + result += "4" + } + if what == StrictlyConfidential { + result += "5" + } + result += " in scale of 5)" + return result +} diff --git a/pkg/security/types/criticality.go b/pkg/security/types/criticality.go new file mode 100644 index 00000000..778ac665 --- /dev/null +++ b/pkg/security/types/criticality.go @@ -0,0 +1,90 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "strings" +) + +type Criticality int + +const ( + Archive Criticality = iota + Operational + Important + Critical + MissionCritical +) + +func CriticalityValues() []TypeEnum { + return []TypeEnum{ + Archive, + Operational, + Important, + Critical, + MissionCritical, + } +} + +func ParseCriticality(value string) (criticality Criticality, err error) { + value = strings.TrimSpace(value) + for _, candidate := range CriticalityValues() { + if candidate.String() == value { + return candidate.(Criticality), err + } + } + return criticality, errors.New("Unable to parse into type: " + value) +} + +var CriticalityTypeDescription = [...]TypeDescription{ + {"archive", "Stored, not active"}, + {"operational", "If this fails, people will just have an ad-hoc coffee break until it is back"}, + {"important", "Issues here results in angry people"}, + {"critical", "Failure is really expensive or crippling"}, + {"mission-critical", "This must not fail"}, +} + +func (what Criticality) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + return CriticalityTypeDescription[what].Name +} + +func (what Criticality) Explain() string { + return CriticalityTypeDescription[what].Description +} + +func (what Criticality) AttackerAttractivenessForAsset() float64 { + // fibonacci starting at 5 + return [...]float64{5, 8, 13, 21, 34}[what] +} +func (what Criticality) AttackerAttractivenessForProcessedOrStoredData() float64 { + // fibonacci starting at 3 + return [...]float64{3, 5, 8, 13, 21}[what] +} +func (what Criticality) AttackerAttractivenessForInOutTransferredData() float64 { + // fibonacci starting at 2 + return [...]float64{2, 3, 5, 8, 13}[what] +} + +func (what Criticality) RatingStringInScale() string { + result := "(rated " + if what == Archive { + result += "1" + } + if what == Operational { + result += "2" + } + if what == Important { + result += "3" + } + if what == Critical { + result += "4" + } + if what == MissionCritical { + result += "5" + } + result += " in scale of 5)" + return result +} diff --git a/pkg/security/types/data_breach_probability.go b/pkg/security/types/data_breach_probability.go new file mode 100644 index 00000000..b972c9f0 --- /dev/null +++ b/pkg/security/types/data_breach_probability.go @@ -0,0 +1,45 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import "encoding/json" + +type DataBreachProbability int + +const ( + Improbable DataBreachProbability = iota + Possible + Probable +) + +func DataBreachProbabilityValues() []TypeEnum { + return []TypeEnum{ + Improbable, + Possible, + Probable, + } +} + +var DataBreachProbabilityTypeDescription = [...]TypeDescription{ + {"improbable", "Improbable"}, + {"possible", "Possible"}, + {"probable", "Probable"}, +} + +func (what DataBreachProbability) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + return DataBreachProbabilityTypeDescription[what].Name +} + +func (what DataBreachProbability) Explain() string { + return DataBreachProbabilityTypeDescription[what].Description +} + +func (what DataBreachProbability) Title() string { + return [...]string{"Improbable", "Possible", "Probable"}[what] +} + +func (what DataBreachProbability) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} diff --git a/pkg/security/types/data_format.go b/pkg/security/types/data_format.go new file mode 100644 index 00000000..0e9dd618 --- /dev/null +++ b/pkg/security/types/data_format.go @@ -0,0 +1,58 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +type DataFormat int + +const ( + JSON DataFormat = iota + XML + Serialization + File + CSV +) + +func DataFormatValues() []TypeEnum { + return []TypeEnum{ + JSON, + XML, + Serialization, + File, + CSV, + } +} + +var DataFormatTypeDescription = [...]TypeDescription{ + {"json", "JSON"}, + {"xml", "XML"}, + {"serialization", "Serialized program objects"}, + {"file", "Specific file types for data"}, + {"csv", "CSV"}, +} + +func (what DataFormat) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + return DataFormatTypeDescription[what].Name +} + +func (what DataFormat) Explain() string { + return DataFormatTypeDescription[what].Description +} + +func (what DataFormat) Title() string { + return [...]string{"JSON", "XML", "Serialization", "File", "CSV"}[what] +} + +func (what DataFormat) Description() string { + return [...]string{"JSON marshalled object data", "XML structured data", "Serialization-based object graphs", + "File input/uploads", "CSV tabular data"}[what] +} + +type ByDataFormatAcceptedSort []DataFormat + +func (what ByDataFormatAcceptedSort) Len() int { return len(what) } +func (what ByDataFormatAcceptedSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } +func (what ByDataFormatAcceptedSort) Less(i, j int) bool { + return what[i].String() < what[j].String() +} diff --git a/pkg/security/types/encryption_style.go b/pkg/security/types/encryption_style.go new file mode 100644 index 00000000..87957684 --- /dev/null +++ b/pkg/security/types/encryption_style.go @@ -0,0 +1,60 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "strings" +) + +type EncryptionStyle int + +const ( + NoneEncryption EncryptionStyle = iota + Transparent + DataWithSymmetricSharedKey + DataWithAsymmetricSharedKey + DataWithEndUserIndividualKey +) + +func EncryptionStyleValues() []TypeEnum { + return []TypeEnum{ + NoneEncryption, + Transparent, + DataWithSymmetricSharedKey, + DataWithAsymmetricSharedKey, + DataWithEndUserIndividualKey, + } +} + +func ParseEncryptionStyle(value string) (encryptionStyle EncryptionStyle, err error) { + value = strings.TrimSpace(value) + for _, candidate := range EncryptionStyleValues() { + if candidate.String() == value { + return candidate.(EncryptionStyle), err + } + } + return encryptionStyle, errors.New("Unable to parse into type: " + value) +} + +var EncryptionStyleTypeDescription = [...]TypeDescription{ + {"none", "No encryption"}, + {"transparent", "Encrypted data at rest"}, + {"data-with-symmetric-shared-key", "Both communication partners have the same key. This must be kept secret"}, + {"data-with-asymmetric-shared-key", "The key is split into public and private. Those two are shared between partners"}, + {"data-with-enduser-individual-key", "The key is (managed) by the end user"}, +} + +func (what EncryptionStyle) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + return EncryptionStyleTypeDescription[what].Name +} + +func (what EncryptionStyle) Explain() string { + return EncryptionStyleTypeDescription[what].Description +} + +func (what EncryptionStyle) Title() string { + return [...]string{"None", "Transparent", "Data with Symmetric Shared Key", "Data with Asymmetric Shared Key", "Data with End-User Individual Key"}[what] +} diff --git a/pkg/security/types/protocol.go b/pkg/security/types/protocol.go new file mode 100644 index 00000000..0b02d36d --- /dev/null +++ b/pkg/security/types/protocol.go @@ -0,0 +1,192 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +type Protocol int + +const ( + UnknownProtocol Protocol = iota + HTTP + HTTPS + WS + WSS + ReverseProxyWebProtocol + ReverseProxyWebProtocolEncrypted + MQTT + JDBC + JdbcEncrypted + ODBC + OdbcEncrypted + SqlAccessProtocol + SqlAccessProtocolEncrypted + NosqlAccessProtocol + NosqlAccessProtocolEncrypted + BINARY + BinaryEncrypted + TEXT + TextEncrypted + SSH + SshTunnel + SMTP + SmtpEncrypted + POP3 + Pop3Encrypted + IMAP + ImapEncrypted + FTP + FTPS + SFTP + SCP + LDAP + LDAPS + JMS + NFS + SMB + SmbEncrypted + LocalFileAccess + NRPE + XMPP + IIOP + IiopEncrypted + JRMP + JrmpEncrypted + InProcessLibraryCall + ContainerSpawning +) + +func ProtocolValues() []TypeEnum { + return []TypeEnum{ + UnknownProtocol, + HTTP, + HTTPS, + WS, + WSS, + ReverseProxyWebProtocol, + ReverseProxyWebProtocolEncrypted, + MQTT, + JDBC, + JdbcEncrypted, + ODBC, + OdbcEncrypted, + SqlAccessProtocol, + SqlAccessProtocolEncrypted, + NosqlAccessProtocol, + NosqlAccessProtocolEncrypted, + BINARY, + BinaryEncrypted, + TEXT, + TextEncrypted, + SSH, + SshTunnel, + SMTP, + SmtpEncrypted, + POP3, + Pop3Encrypted, + IMAP, + ImapEncrypted, + FTP, + FTPS, + SFTP, + SCP, + LDAP, + LDAPS, + JMS, + NFS, + SMB, + SmbEncrypted, + LocalFileAccess, + NRPE, + XMPP, + IIOP, + IiopEncrypted, + JRMP, + JrmpEncrypted, + InProcessLibraryCall, + ContainerSpawning, + } +} + +var ProtocolTypeDescription = [...]TypeDescription{ + {"unknown-protocol", "Unknown protocol"}, + {"http", "HTTP protocol"}, + {"https", "HTTPS protocol (encrypted)"}, + {"ws", "WebSocket"}, + {"wss", "WebSocket but encrypted"}, + {"reverse-proxy-web-protocol", "Protocols used by reverse proxies"}, + {"reverse-proxy-web-protocol-encrypted", "Protocols used by reverse proxies but encrypted"}, + {"mqtt", "MQTT Message protocol. Encryption via TLS is optional"}, + {"jdbc", "Java Database Connectivity"}, + {"jdbc-encrypted", "Java Database Connectivity but encrypted"}, + {"odbc", "Open Database Connectivity"}, + {"odbc-encrypted", "Open Database Connectivity but encrypted"}, + {"sql-access-protocol", "SQL access protocol"}, + {"sql-access-protocol-encrypted", "SQL access protocol but encrypted"}, + {"nosql-access-protocol", "NOSQL access protocol"}, + {"nosql-access-protocol-encrypted", "NOSQL access protocol but encrypted"}, + {"binary", "Some other binary protocol"}, + {"binary-encrypted", "Some other binary protocol, encrypted"}, + {"text", "Some other text protocol"}, + {"text-encrypted", "Some other text protocol, encrypted"}, + {"ssh", "Secure Shell to execute commands"}, + {"ssh-tunnel", "Secure Shell as a tunnel"}, + {"smtp", "Mail transfer protocol (sending)"}, + {"smtp-encrypted", "Mail transfer protocol (sending), encrypted"}, + {"pop3", "POP 3 mail fetching"}, + {"pop3-encrypted", "POP 3 mail fetching, encrypted"}, + {"imap", "IMAP mail sync protocol"}, + {"imap-encrypted", "IMAP mail sync protocol, encrypted"}, + {"ftp", "File Transfer Protocol"}, + {"ftps", "FTP with TLS"}, + {"sftp", "FTP on SSH"}, + {"scp", "Secure Shell to copy files"}, + {"ldap", "Lightweight Directory Access Protocol - User directories"}, + {"ldaps", "Lightweight Directory Access Protocol - User directories on TLS"}, + {"jms", "Jakarta Messaging"}, + {"nfs", "Network File System"}, + {"smb", "Server Message Block"}, + {"smb-encrypted", "Server Message Block, but encrypted"}, + {"local-file-access", "Data files are on the local system"}, + {"nrpe", "Nagios Remote Plugin Executor"}, + {"xmpp", "Extensible Messaging and Presence Protocol"}, + {"iiop", "Internet Inter-ORB Protocol "}, + {"iiop-encrypted", "Internet Inter-ORB Protocol , encrypted"}, + {"jrmp", "Java Remote Method Protocol"}, + {"jrmp-encrypted", "Java Remote Method Protocol, encrypted"}, + {"in-process-library-call", "Call to local library"}, + {"container-spawning", "Spawn a container"}, +} + +func (what Protocol) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + return ProtocolTypeDescription[what].Name +} + +func (what Protocol) Explain() string { + return ProtocolTypeDescription[what].Description +} + +func (what Protocol) IsProcessLocal() bool { + return what == InProcessLibraryCall || what == LocalFileAccess || what == ContainerSpawning +} + +func (what Protocol) IsEncrypted() bool { + return what == HTTPS || what == WSS || what == JdbcEncrypted || what == OdbcEncrypted || + what == NosqlAccessProtocolEncrypted || what == SqlAccessProtocolEncrypted || what == BinaryEncrypted || what == TextEncrypted || what == SSH || what == SshTunnel || + what == FTPS || what == SFTP || what == SCP || what == LDAPS || what == ReverseProxyWebProtocolEncrypted || + what == IiopEncrypted || what == JrmpEncrypted || what == SmbEncrypted || what == SmtpEncrypted || what == Pop3Encrypted || what == ImapEncrypted +} + +func (what Protocol) IsPotentialDatabaseAccessProtocol(includingLaxDatabaseProtocols bool) bool { + strictlyDatabaseOnlyProtocol := what == JdbcEncrypted || what == OdbcEncrypted || + what == NosqlAccessProtocolEncrypted || what == SqlAccessProtocolEncrypted || what == JDBC || what == ODBC || what == NosqlAccessProtocol || what == SqlAccessProtocol + if includingLaxDatabaseProtocols { + // include HTTP for REST-based NoSQL-DBs as well as unknown binary + return strictlyDatabaseOnlyProtocol || what == HTTPS || what == HTTP || what == BINARY || what == BinaryEncrypted + } + return strictlyDatabaseOnlyProtocol +} + +func (what Protocol) IsPotentialWebAccessProtocol() bool { + return what == HTTP || what == HTTPS || what == WS || what == WSS || what == ReverseProxyWebProtocol || what == ReverseProxyWebProtocolEncrypted +} diff --git a/pkg/security/types/quantity.go b/pkg/security/types/quantity.go new file mode 100644 index 00000000..86563db7 --- /dev/null +++ b/pkg/security/types/quantity.go @@ -0,0 +1,62 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "strings" +) + +type Quantity int + +const ( + VeryFew Quantity = iota + Few + Many + VeryMany +) + +func QuantityValues() []TypeEnum { + return []TypeEnum{ + VeryFew, + Few, + Many, + VeryMany, + } +} + +func ParseQuantity(value string) (quantity Quantity, err error) { + value = strings.TrimSpace(value) + for _, candidate := range QuantityValues() { + if candidate.String() == value { + return candidate.(Quantity), err + } + } + return quantity, errors.New("Unable to parse into type: " + value) +} + +var QuantityTypeDescription = [...]TypeDescription{ + {"very-few", "Very few"}, + {"few", "Few"}, + {"many", "Many"}, + {"very-many", "Very many"}, +} + +func (what Quantity) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + return QuantityTypeDescription[what].Name +} + +func (what Quantity) Explain() string { + return QuantityTypeDescription[what].Description +} + +func (what Quantity) Title() string { + return [...]string{"very few", "few", "many", "very many"}[what] +} + +func (what Quantity) QuantityFactor() float64 { + // fibonacci starting at 1 + return [...]float64{1, 2, 3, 5}[what] +} diff --git a/pkg/security/types/risk_explotation_impact.go b/pkg/security/types/risk_explotation_impact.go new file mode 100644 index 00000000..6480e6ea --- /dev/null +++ b/pkg/security/types/risk_explotation_impact.go @@ -0,0 +1,52 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import "encoding/json" + +type RiskExploitationImpact int + +const ( + LowImpact RiskExploitationImpact = iota + MediumImpact + HighImpact + VeryHighImpact +) + +func RiskExploitationImpactValues() []TypeEnum { + return []TypeEnum{ + LowImpact, + MediumImpact, + HighImpact, + VeryHighImpact, + } +} + +var RiskExploitationImpactTypeDescription = [...]TypeDescription{ + {"low", "Low"}, + {"medium", "Medium"}, + {"high", "High"}, + {"very-high", "Very High"}, +} + +func (what RiskExploitationImpact) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + return RiskExploitationImpactTypeDescription[what].Name +} + +func (what RiskExploitationImpact) Explain() string { + return RiskExploitationImpactTypeDescription[what].Description +} + +func (what RiskExploitationImpact) Title() string { + return [...]string{"Low", "Medium", "High", "Very High"}[what] +} + +func (what RiskExploitationImpact) Weight() int { + return [...]int{1, 2, 3, 4}[what] +} + +func (what RiskExploitationImpact) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} diff --git a/pkg/security/types/risk_explotation_likelihood.go b/pkg/security/types/risk_explotation_likelihood.go new file mode 100644 index 00000000..9ae0f112 --- /dev/null +++ b/pkg/security/types/risk_explotation_likelihood.go @@ -0,0 +1,52 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import "encoding/json" + +type RiskExploitationLikelihood int + +const ( + Unlikely RiskExploitationLikelihood = iota + Likely + VeryLikely + Frequent +) + +func RiskExploitationLikelihoodValues() []TypeEnum { + return []TypeEnum{ + Unlikely, + Likely, + VeryLikely, + Frequent, + } +} + +var RiskExploitationLikelihoodTypeDescription = [...]TypeDescription{ + {"unlikely", "Unlikely"}, + {"likely", "Likely"}, + {"very-likely", "Very-Likely"}, + {"frequent", "Frequent"}, +} + +func (what RiskExploitationLikelihood) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + return RiskExploitationLikelihoodTypeDescription[what].Name +} + +func (what RiskExploitationLikelihood) Explain() string { + return RiskExploitationLikelihoodTypeDescription[what].Description +} + +func (what RiskExploitationLikelihood) Title() string { + return [...]string{"Unlikely", "Likely", "Very Likely", "Frequent"}[what] +} + +func (what RiskExploitationLikelihood) Weight() int { + return [...]int{1, 2, 3, 4}[what] +} + +func (what RiskExploitationLikelihood) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} diff --git a/pkg/security/types/risk_function.go b/pkg/security/types/risk_function.go new file mode 100644 index 00000000..0273c5d6 --- /dev/null +++ b/pkg/security/types/risk_function.go @@ -0,0 +1,48 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import "encoding/json" + +type RiskFunction int + +const ( + BusinessSide RiskFunction = iota + Architecture + Development + Operations +) + +func RiskFunctionValues() []TypeEnum { + return []TypeEnum{ + BusinessSide, + Architecture, + Development, + Operations, + } +} + +var RiskFunctionTypeDescription = [...]TypeDescription{ + {"business-side", "Business"}, + {"architecture", "Architecture"}, + {"development", "Development"}, + {"operations", "Operations"}, +} + +func (what RiskFunction) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + return RiskFunctionTypeDescription[what].Name +} + +func (what RiskFunction) Explain() string { + return RiskFunctionTypeDescription[what].Description +} + +func (what RiskFunction) Title() string { + return [...]string{"Business Side", "Architecture", "Development", "Operations"}[what] +} + +func (what RiskFunction) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} diff --git a/pkg/security/types/risk_severity.go b/pkg/security/types/risk_severity.go new file mode 100644 index 00000000..be684f7b --- /dev/null +++ b/pkg/security/types/risk_severity.go @@ -0,0 +1,51 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import "encoding/json" + +type RiskSeverity int + +const ( + LowSeverity RiskSeverity = iota + MediumSeverity + ElevatedSeverity + HighSeverity + CriticalSeverity +) + +func RiskSeverityValues() []TypeEnum { + return []TypeEnum{ + LowSeverity, + MediumSeverity, + ElevatedSeverity, + HighSeverity, + CriticalSeverity, + } +} + +var RiskSeverityTypeDescription = [...]TypeDescription{ + {"low", "Low"}, + {"medium", "Medium"}, + {"elevated", "Elevated"}, + {"high", "High"}, + {"critical", "Critical"}, +} + +func (what RiskSeverity) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + return RiskSeverityTypeDescription[what].Name +} + +func (what RiskSeverity) Explain() string { + return RiskSeverityTypeDescription[what].Description +} + +func (what RiskSeverity) Title() string { + return [...]string{"Low", "Medium", "Elevated", "High", "Critical"}[what] +} + +func (what RiskSeverity) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} diff --git a/pkg/security/types/risk_status.go b/pkg/security/types/risk_status.go new file mode 100644 index 00000000..32278dcd --- /dev/null +++ b/pkg/security/types/risk_status.go @@ -0,0 +1,58 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import "encoding/json" + +type RiskStatus int + +const ( + Unchecked RiskStatus = iota + InDiscussion + Accepted + InProgress + Mitigated + FalsePositive +) + +func RiskStatusValues() []TypeEnum { + return []TypeEnum{ + Unchecked, + InDiscussion, + Accepted, + InProgress, + Mitigated, + FalsePositive, + } +} + +var RiskStatusTypeDescription = [...]TypeDescription{ + {"unchecked", "Risk has not yet been reviewed"}, + {"in-discussion", "Risk is currently being discussed (during review)"}, + {"accepted", "Risk has been accepted (as possibly a corporate risk acceptance process defines)"}, + {"in-progress", "Risk mitigation is currently in progress"}, + {"mitigated", "Risk has been mitigated"}, + {"false-positive", "Risk is a false positive (i.e. no risk at all or not applicable)"}, +} + +func (what RiskStatus) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + return RiskStatusTypeDescription[what].Name +} + +func (what RiskStatus) Explain() string { + return RiskStatusTypeDescription[what].Description +} + +func (what RiskStatus) Title() string { + return [...]string{"Unchecked", "in Discussion", "Accepted", "in Progress", "Mitigated", "False Positive"}[what] +} + +func (what RiskStatus) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} + +func (what RiskStatus) IsStillAtRisk() bool { + return what == Unchecked || what == InDiscussion || what == Accepted || what == InProgress +} diff --git a/pkg/security/types/stride.go b/pkg/security/types/stride.go new file mode 100644 index 00000000..7625db04 --- /dev/null +++ b/pkg/security/types/stride.go @@ -0,0 +1,54 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import "encoding/json" + +type STRIDE int + +const ( + Spoofing STRIDE = iota + Tampering + Repudiation + InformationDisclosure + DenialOfService + ElevationOfPrivilege +) + +func STRIDEValues() []TypeEnum { + return []TypeEnum{ + Spoofing, + Tampering, + Repudiation, + InformationDisclosure, + DenialOfService, + ElevationOfPrivilege, + } +} + +var StrideTypeDescription = [...]TypeDescription{ + {"spoofing", "Spoofing - Authenticity"}, + {"tampering", "Tampering - Integrity"}, + {"repudiation", "Repudiation - Non-repudiability"}, + {"information-disclosure", "Information disclosure - Confidentiality"}, + {"denial-of-service", "Denial of service - Availability"}, + {"elevation-of-privilege", "Elevation of privilege - Authorization"}, +} + +func (what STRIDE) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + return StrideTypeDescription[what].Name +} + +func (what STRIDE) Explain() string { + return StrideTypeDescription[what].Description +} + +func (what STRIDE) Title() string { + return [...]string{"Spoofing", "Tampering", "Repudiation", "Information Disclosure", "Denial of Service", "Elevation of Privilege"}[what] +} + +func (what STRIDE) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} diff --git a/pkg/security/types/technical_asset_machine.go b/pkg/security/types/technical_asset_machine.go new file mode 100644 index 00000000..816b17a2 --- /dev/null +++ b/pkg/security/types/technical_asset_machine.go @@ -0,0 +1,37 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +type TechnicalAssetMachine int + +const ( + Physical TechnicalAssetMachine = iota + Virtual + Container + Serverless +) + +func TechnicalAssetMachineValues() []TypeEnum { + return []TypeEnum{ + Physical, + Virtual, + Container, + Serverless, + } +} + +var TechnicalAssetMachineTypeDescription = [...]TypeDescription{ + {"physical", "A physical machine"}, + {"virtual", "A virtual machine"}, + {"container", "A container"}, + {"serverless", "A serverless application"}, +} + +func (what TechnicalAssetMachine) String() string { + return TechnicalAssetMachineTypeDescription[what].Name +} + +func (what TechnicalAssetMachine) Explain() string { + return TechnicalAssetMachineTypeDescription[what].Description +} diff --git a/pkg/security/types/technical_asset_size.go b/pkg/security/types/technical_asset_size.go new file mode 100644 index 00000000..cc658bff --- /dev/null +++ b/pkg/security/types/technical_asset_size.go @@ -0,0 +1,38 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +type TechnicalAssetSize int + +const ( + System TechnicalAssetSize = iota + Service + Application + Component +) + +func TechnicalAssetSizeValues() []TypeEnum { + return []TypeEnum{ + System, + Service, + Application, + Component, + } +} + +var TechnicalAssetSizeDescription = [...]TypeDescription{ + {"system", "A system consists of several services"}, + {"service", "A specific service (web, mail, ...)"}, + {"application", "A single application"}, + {"component", "A component of an application (smaller unit like a microservice)"}, +} + +func (what TechnicalAssetSize) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + return TechnicalAssetSizeDescription[what].Name +} + +func (what TechnicalAssetSize) Explain() string { + return TechnicalAssetSizeDescription[what].Description +} diff --git a/pkg/security/types/technical_asset_technology.go b/pkg/security/types/technical_asset_technology.go new file mode 100644 index 00000000..73ea7899 --- /dev/null +++ b/pkg/security/types/technical_asset_technology.go @@ -0,0 +1,276 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +type TechnicalAssetTechnology int + +const ( + UnknownTechnology TechnicalAssetTechnology = iota + ClientSystem + Browser + Desktop + MobileApp + DevOpsClient + WebServer + WebApplication + ApplicationServer + Database + FileServer + LocalFileSystem + ERP + CMS + WebServiceREST + WebServiceSOAP + EJB + SearchIndex + SearchEngine + ServiceRegistry + ReverseProxy + LoadBalancer + BuildPipeline + SourcecodeRepository + ArtifactRegistry + CodeInspectionPlatform + Monitoring + LDAPServer + ContainerPlatform + BatchProcessing + EventListener + IdentityProvider + IdentityStoreLDAP + IdentityStoreDatabase + Tool + CLI + Task + Function + Gateway // TODO rename to API-Gateway to be more clear? + IoTDevice + MessageQueue + StreamProcessing + ServiceMesh + DataLake + BigDataPlatform + ReportEngine + AI + MailServer + Vault + HSM + WAF + IDS + IPS + Scheduler + Mainframe + BlockStorage + Library +) + +func TechnicalAssetTechnologyValues() []TypeEnum { + return []TypeEnum{ + UnknownTechnology, + ClientSystem, + Browser, + Desktop, + MobileApp, + DevOpsClient, + WebServer, + WebApplication, + ApplicationServer, + Database, + FileServer, + LocalFileSystem, + ERP, + CMS, + WebServiceREST, + WebServiceSOAP, + EJB, + SearchIndex, + SearchEngine, + ServiceRegistry, + ReverseProxy, + LoadBalancer, + BuildPipeline, + SourcecodeRepository, + ArtifactRegistry, + CodeInspectionPlatform, + Monitoring, + LDAPServer, + ContainerPlatform, + BatchProcessing, + EventListener, + IdentityProvider, + IdentityStoreLDAP, + IdentityStoreDatabase, + Tool, + CLI, + Task, + Function, + Gateway, + IoTDevice, + MessageQueue, + StreamProcessing, + ServiceMesh, + DataLake, + BigDataPlatform, + ReportEngine, + AI, + MailServer, + Vault, + HSM, + WAF, + IDS, + IPS, + Scheduler, + Mainframe, + BlockStorage, + Library, + } +} + +var TechnicalAssetTechnologyTypeDescription = [...]TypeDescription{ + {"unknown-technology", "Unknown technology"}, + {"client-system", "A client system"}, + {"browser", "A web browser"}, + {"desktop", "A desktop system (or laptop)"}, + {"mobile-app", "A mobile app (smartphone, tablet)"}, + {"devops-client", "A client used for DevOps"}, + {"web-server", "A web server"}, + {"web-application", "A web application"}, + {"application-server", "An application server (Apache Tomcat, ...)"}, + {"database", "A database"}, + {"file-server", "A file server"}, + {"local-file-system", "The local file system"}, + {"erp", "Enterprise-Resource-Planning"}, + {"cms", "Content Management System"}, + {"web-service-rest", "A REST web service (API)"}, + {"web-service-soap", "A SOAP web service (API)"}, + {"ejb", "Jakarta Enterprise Beans fka Enterprise JavaBeans"}, + {"search-index", "The index database of a search engine"}, + {"search-engine", "A search engine"}, + {"service-registry", "A central place where data schemas can be found and distributed"}, + {"reverse-proxy", "A proxy hiding internal infrastructure from caller making requests. Can also reduce load"}, + {"load-balancer", "A load balancer directing incoming requests to available internal infrastructure"}, + {"build-pipeline", "A software build pipeline"}, + {"sourcecode-repository", "Git or similar"}, + {"artifact-registry", "A registry to store build artifacts"}, + {"code-inspection-platform", "(Static) Code Analysis)"}, + {"monitoring", "A monitoring system (SIEM, logs)"}, + {"ldap-server", "A LDAP server"}, + {"container-platform", "A platform for hosting and executing containers"}, + {"batch-processing", "A set of tools automatically processing data"}, + {"event-listener", "An event listener waiting to be triggered and spring to action"}, + {"identity-provider", "A authentication provider"}, + {"identity-store-ldap", "Authentication data as LDAP"}, + {"identity-store-database", "Authentication data as database"}, + {"tool", "A specific tool"}, + {"threagile", "A command line tool"}, + {"task", "A specific task"}, + {"function", "A specific function (maybe RPC ?)"}, + {"gateway", "A gateway connecting two systems or trust boundaries"}, + {"iot-device", "An IoT device"}, + {"message-queue", "A message queue (like MQTT)"}, + {"stream-processing", "Data stream processing"}, + {"service-mesh", "Infrastructure for service-to-service communication"}, + {"data-lake", "A huge database"}, + {"big-data-platform", "Storage for big data"}, + {"report-engine", "Software for report generation"}, + {"ai", "An Artificial Intelligence service"}, + {"mail-server", "A Mail server"}, + {"vault", "Encryption and key management"}, + {"hsm", "Hardware Security Module"}, + {"waf", "Web Application Firewall"}, + {"ids", "Intrusion Detection System"}, + {"ips", "Intrusion Prevention System"}, + {"scheduler", "Scheduled tasks"}, + {"mainframe", "A central, big computer"}, + {"block-storage", "SAN or similar central file storage"}, + {"library", "A software library"}, +} + +func (what TechnicalAssetTechnology) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + return TechnicalAssetTechnologyTypeDescription[what].Name +} + +func (what TechnicalAssetTechnology) Explain() string { + return TechnicalAssetTechnologyTypeDescription[what].Description +} + +func (what TechnicalAssetTechnology) IsWebApplication() bool { + return what == WebServer || what == WebApplication || what == ApplicationServer || what == ERP || what == CMS || what == IdentityProvider || what == ReportEngine +} + +func (what TechnicalAssetTechnology) IsWebService() bool { + return what == WebServiceREST || what == WebServiceSOAP +} + +func (what TechnicalAssetTechnology) IsIdentityRelated() bool { + return what == IdentityProvider || what == IdentityStoreLDAP || what == IdentityStoreDatabase +} + +func (what TechnicalAssetTechnology) IsSecurityControlRelated() bool { + return what == Vault || what == HSM || what == WAF || what == IDS || what == IPS +} + +func (what TechnicalAssetTechnology) IsUnprotectedCommunicationsTolerated() bool { + return what == Monitoring || what == IDS || what == IPS +} + +func (what TechnicalAssetTechnology) IsUnnecessaryDataTolerated() bool { + return what == Monitoring || what == IDS || what == IPS +} + +func (what TechnicalAssetTechnology) IsCloseToHighValueTargetsTolerated() bool { + return what == Monitoring || what == IDS || what == IPS || what == LoadBalancer || what == ReverseProxy +} + +func (what TechnicalAssetTechnology) IsClient() bool { + return what == ClientSystem || what == Browser || what == Desktop || what == MobileApp || what == DevOpsClient || what == IoTDevice +} + +func (what TechnicalAssetTechnology) IsUsuallyAbleToPropagateIdentityToOutgoingTargets() bool { + return what == ClientSystem || what == Browser || what == Desktop || what == MobileApp || + what == DevOpsClient || what == WebServer || what == WebApplication || what == ApplicationServer || what == ERP || + what == CMS || what == WebServiceREST || what == WebServiceSOAP || what == EJB || + what == SearchEngine || what == ReverseProxy || what == LoadBalancer || what == IdentityProvider || + what == Tool || what == CLI || what == Task || what == Function || what == Gateway || + what == IoTDevice || what == MessageQueue || what == ServiceMesh || what == ReportEngine || what == WAF || what == Library + +} + +func (what TechnicalAssetTechnology) IsLessProtectedType() bool { + return what == ClientSystem || what == Browser || what == Desktop || what == MobileApp || what == DevOpsClient || what == WebServer || what == WebApplication || what == ApplicationServer || what == CMS || + what == WebServiceREST || what == WebServiceSOAP || what == EJB || what == BuildPipeline || what == SourcecodeRepository || + what == ArtifactRegistry || what == CodeInspectionPlatform || what == Monitoring || what == IoTDevice || what == AI || what == MailServer || what == Scheduler || + what == Mainframe +} + +func (what TechnicalAssetTechnology) IsUsuallyProcessingEndUserRequests() bool { + return what == WebServer || what == WebApplication || what == ApplicationServer || what == ERP || what == WebServiceREST || what == WebServiceSOAP || what == EJB || what == ReportEngine +} + +func (what TechnicalAssetTechnology) IsUsuallyStoringEndUserData() bool { + return what == Database || what == ERP || what == FileServer || what == LocalFileSystem || what == BlockStorage || what == MailServer || what == StreamProcessing || what == MessageQueue +} + +func (what TechnicalAssetTechnology) IsExclusivelyFrontendRelated() bool { + return what == ClientSystem || what == Browser || what == Desktop || what == MobileApp || what == DevOpsClient || what == CMS || what == ReverseProxy || what == WAF || what == LoadBalancer || what == Gateway || what == IoTDevice +} + +func (what TechnicalAssetTechnology) IsExclusivelyBackendRelated() bool { + return what == Database || what == IdentityProvider || what == IdentityStoreLDAP || what == IdentityStoreDatabase || what == ERP || what == WebServiceREST || what == WebServiceSOAP || what == EJB || what == SearchIndex || + what == SearchEngine || what == ContainerPlatform || what == BatchProcessing || what == EventListener || what == DataLake || what == BigDataPlatform || what == MessageQueue || + what == StreamProcessing || what == ServiceMesh || what == Vault || what == HSM || what == Scheduler || what == Mainframe || what == FileServer || what == BlockStorage +} + +func (what TechnicalAssetTechnology) IsDevelopmentRelevant() bool { + return what == BuildPipeline || what == SourcecodeRepository || what == ArtifactRegistry || what == CodeInspectionPlatform || what == DevOpsClient +} + +func (what TechnicalAssetTechnology) IsTrafficForwarding() bool { + return what == LoadBalancer || what == ReverseProxy || what == WAF +} + +func (what TechnicalAssetTechnology) IsEmbeddedComponent() bool { + return what == Library +} diff --git a/pkg/security/types/technical_asset_type.go b/pkg/security/types/technical_asset_type.go new file mode 100644 index 00000000..14173e72 --- /dev/null +++ b/pkg/security/types/technical_asset_type.go @@ -0,0 +1,35 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +type TechnicalAssetType int + +const ( + ExternalEntity TechnicalAssetType = iota + Process + Datastore +) + +func TechnicalAssetTypeValues() []TypeEnum { + return []TypeEnum{ + ExternalEntity, + Process, + Datastore, + } +} + +var TechnicalAssetTypeDescription = [...]TypeDescription{ + {"external-entity", "This asset is hosted and managed by a third party"}, + {"process", "A software process"}, + {"datastore", "This asset stores data"}, +} + +func (what TechnicalAssetType) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + return TechnicalAssetTypeDescription[what].Name +} + +func (what TechnicalAssetType) Explain() string { + return TechnicalAssetTypeDescription[what].Description +} diff --git a/pkg/security/types/trust_boundary.go b/pkg/security/types/trust_boundary.go new file mode 100644 index 00000000..de2b5df5 --- /dev/null +++ b/pkg/security/types/trust_boundary.go @@ -0,0 +1,56 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +type TrustBoundaryType int + +const ( + NetworkOnPrem TrustBoundaryType = iota + NetworkDedicatedHoster + NetworkVirtualLAN + NetworkCloudProvider + NetworkCloudSecurityGroup + NetworkPolicyNamespaceIsolation + ExecutionEnvironment +) + +func TrustBoundaryTypeValues() []TypeEnum { + return []TypeEnum{ + NetworkOnPrem, + NetworkDedicatedHoster, + NetworkVirtualLAN, + NetworkCloudProvider, + NetworkCloudSecurityGroup, + NetworkPolicyNamespaceIsolation, + ExecutionEnvironment, + } +} + +var TrustBoundaryTypeDescription = [...]TypeDescription{ + {"network-on-prem", "The whole network is on prem"}, + {"network-dedicated-hoster", "The network is at a dedicated hoster"}, + {"network-virtual-lan", "Network is a VLAN"}, + {"network-cloud-provider", "Network is at a cloud provider"}, + {"network-cloud-security-group", "Cloud rules controlling network traffic"}, + {"network-policy-namespace-isolation", "Segregation in a Kubernetes cluster"}, + {"execution-environment", "Logical group of items (not a protective network boundary in that sense). More like a namespace or another logical group of items"}, +} + +func (what TrustBoundaryType) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + return TrustBoundaryTypeDescription[what].Name +} + +func (what TrustBoundaryType) Explain() string { + return TrustBoundaryTypeDescription[what].Description +} + +func (what TrustBoundaryType) IsNetworkBoundary() bool { + return what == NetworkOnPrem || what == NetworkDedicatedHoster || what == NetworkVirtualLAN || + what == NetworkCloudProvider || what == NetworkCloudSecurityGroup || what == NetworkPolicyNamespaceIsolation +} + +func (what TrustBoundaryType) IsWithinCloud() bool { + return what == NetworkCloudProvider || what == NetworkCloudSecurityGroup +} diff --git a/pkg/security/types/types.go b/pkg/security/types/types.go new file mode 100644 index 00000000..62e76535 --- /dev/null +++ b/pkg/security/types/types.go @@ -0,0 +1,41 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +// TypeDescription contains a name for a type and its description +type TypeDescription struct { + Name string + Description string +} + +type TypeEnum interface { + String() string + Explain() string +} + +func GetBuiltinTypeValues() map[string][]TypeEnum { + return map[string][]TypeEnum{ + "Authentication": AuthenticationValues(), + "Authorization": AuthorizationValues(), + "Confidentiality": ConfidentialityValues(), + "Criticality (for integrity and availability)": CriticalityValues(), + "Data Breach Probability": DataBreachProbabilityValues(), + "Data Format": DataFormatValues(), + "Encryption": EncryptionStyleValues(), + "Protocol": ProtocolValues(), + "Quantity": QuantityValues(), + "Risk Exploitation Impact": RiskExploitationImpactValues(), + "Risk Exploitation Likelihood": RiskExploitationLikelihoodValues(), + "Risk Function": RiskFunctionValues(), + "Risk Severity": RiskSeverityValues(), + "Risk Status": RiskStatusValues(), + "STRIDE": STRIDEValues(), + "Technical Asset Machine": TechnicalAssetMachineValues(), + "Technical Asset Size": TechnicalAssetSizeValues(), + "Technical Asset Technology": TechnicalAssetTechnologyValues(), + "Technical Asset Type": TechnicalAssetTypeValues(), + "Trust Boundary Type": TrustBoundaryTypeValues(), + "Usage": UsageValues(), + } +} diff --git a/pkg/security/types/usage.go b/pkg/security/types/usage.go new file mode 100644 index 00000000..c9771082 --- /dev/null +++ b/pkg/security/types/usage.go @@ -0,0 +1,52 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "strings" +) + +type Usage int + +const ( + Business Usage = iota + DevOps +) + +func UsageValues() []TypeEnum { + return []TypeEnum{ + Business, + DevOps, + } +} + +func ParseUsage(value string) (usage Usage, err error) { + value = strings.TrimSpace(value) + for _, candidate := range UsageValues() { + if candidate.String() == value { + return candidate.(Usage), err + } + } + return usage, errors.New("Unable to parse into type: " + value) +} + +var UsageTypeDescription = [...]TypeDescription{ + {"business", "This system is operational and does business tasks"}, + {"devops", "This system is for development and/or deployment or other operational tasks"}, +} + +func (what Usage) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + //return [...]string{"business", "devops"}[what] + return UsageTypeDescription[what].Name +} + +func (what Usage) Explain() string { + return UsageTypeDescription[what].Description +} + +func (what Usage) Title() string { + return [...]string{"Business", "DevOps"}[what] +} diff --git a/pkg/report/template/background.pdf b/report/template/background.pdf similarity index 100% rename from pkg/report/template/background.pdf rename to report/template/background.pdf From 2ffb181692e97618b3065c55d35310d1f4b3db21 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Fri, 22 Dec 2023 15:10:13 +0000 Subject: [PATCH 13/68] Fix comment --- cmd/threagile/main.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/cmd/threagile/main.go b/cmd/threagile/main.go index db9b007c..8c384bbd 100644 --- a/cmd/threagile/main.go +++ b/cmd/threagile/main.go @@ -11,9 +11,8 @@ const ( // === Error handling stuff ======================================== func main() { - // TODO: uncomment below as soon as refactoring is finished - everything will go through rootCmd.Execute - // cmd.Execute() + // threagile.Execute() // TODO: remove below as soon as refactoring is finished - everything will go through rootCmd.Execute // for now it's fine to have as frequently uncommented to see the actual behaviour From 2b634f1fc73ff2df3db9cc0a70cdbded74d07b66 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Fri, 22 Dec 2023 22:09:18 +0000 Subject: [PATCH 14/68] Move create-stub-model, create-example-model, create-editing-support to cobra implementation; add flags for already ported commands --- internal/threagile/context.go | 137 +++++------------------- internal/threagile/examples.go | 114 ++++++++++++++++++++ internal/threagile/flags.go | 12 +++ internal/threagile/macros.go | 5 + internal/threagile/progress-reporter.go | 12 +++ internal/threagile/root.go | 12 +-- internal/threagile/rules.go | 36 +++++-- internal/threagile/types.go | 5 + pkg/examples/examples.go | 74 +++++++++++++ 9 files changed, 284 insertions(+), 123 deletions(-) create mode 100644 internal/threagile/examples.go create mode 100644 internal/threagile/flags.go create mode 100644 pkg/examples/examples.go diff --git a/internal/threagile/context.go b/internal/threagile/context.go index 5ab59e7d..9b52e124 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -101,7 +101,7 @@ type Context struct { parsedModel model.ParsedModel modelFilename, templateFilename *string - createExampleModel, createStubModel, createEditingSupport, verbose, ignoreOrphanedRiskTracking *bool + verbose, ignoreOrphanedRiskTracking *bool generateDataFlowDiagram, generateDataAssetDiagram, generateRisksJSON, generateTechnicalAssetsJSON *bool generateStatsJSON, generateRisksExcel, generateTagsExcel, generateReportPDF *bool outputDir, raaPlugin, skipRiskRules, riskRulesPlugins, executeModelMacro *string @@ -2295,6 +2295,31 @@ func (context *Context) DoIt() { } } +func copyFile(src, dst string) (int64, error) { + sourceFileStat, err := os.Stat(src) + if err != nil { + return 0, err + } + + if !sourceFileStat.Mode().IsRegular() { + return 0, fmt.Errorf("%s is not a regular file", src) + } + + source, err := os.Open(src) + if err != nil { + return 0, err + } + defer func() { _ = source.Close() }() + + destination, err := os.Create(dst) + if err != nil { + return 0, err + } + defer func() { _ = destination.Close() }() + nBytes, err := io.Copy(destination, source) + return nBytes, err +} + func (context *Context) printBorder(length int, bold bool) { char := "-" if bold { @@ -4513,9 +4538,6 @@ func (context *Context) ParseCommandlineArgs() { // folders // commands context.serverPort = flag.Int("server", 0, "start a server (instead of commandline execution) on the given port") context.executeModelMacro = flag.String("execute-model-macro", "", "Execute model macro (by ID)") - context.createExampleModel = flag.Bool("create-example-model", false, "just create an example model named threagile-example-model.yaml in the output directory") - context.createStubModel = flag.Bool("create-stub-model", false, "just create a minimal stub model named threagile-stub-model.yaml in the output directory") - context.createEditingSupport = flag.Bool("create-editing-support", false, "just create some editing support stuff in the output directory") context.templateFilename = flag.String("background", "background.pdf", "background pdf file") context.generateDataFlowDiagram = flag.Bool("generate-data-flow-diagram", true, "generate data-flow diagram") context.generateDataAssetDiagram = flag.Bool("generate-data-asset-diagram", true, "generate data asset diagram") @@ -4579,117 +4601,10 @@ func (context *Context) ParseCommandlineArgs() { // folders fmt.Println() os.Exit(0) } - if *context.createExampleModel { - exampleError := context.createExampleModelFile() - if exampleError != nil { - log.Fatalf("Unable to copy example model: %v", exampleError) - return - } - fmt.Println(docs.Logo + "\n\n" + docs.VersionText) - fmt.Println("An example model was created named threagile-example-model.yaml in the output directory.") - fmt.Println() - fmt.Println(docs.Examples) - fmt.Println() - os.Exit(0) - } - if *context.createStubModel { - stubError := context.createStubModelFile() - if stubError != nil { - log.Fatalf("Unable to copy stub model: %v", stubError) - return - } - fmt.Println(docs.Logo + "\n\n" + docs.VersionText) - fmt.Println("A minimal stub model was created named threagile-stub-model.yaml in the output directory.") - fmt.Println() - fmt.Println(docs.Examples) - fmt.Println() - os.Exit(0) - } - if *context.createEditingSupport { - supportError := context.createEditingSupportFiles() - if supportError != nil { - log.Fatalf("Unable to copy editing support files: %v", supportError) - return - } - fmt.Println(docs.Logo + "\n\n" + docs.VersionText) - fmt.Println("The following files were created in the output directory:") - fmt.Println(" - schema.json") - fmt.Println(" - live-templates.txt") - fmt.Println() - fmt.Println("For a perfect editing experience within your IDE of choice you can easily get " + - "model syntax validation and autocompletion (very handy for enum values) as well as live templates: " + - "Just import the schema.json into your IDE and assign it as \"schema\" to each Threagile YAML file. " + - "Also try to import individual parts from the live-templates.txt file into your IDE as live editing templates.") - fmt.Println() - os.Exit(0) - } context.ServerMode = (*context.serverPort > 0) } -func (context *Context) createExampleModelFile() error { - _, err := copyFile(filepath.Join(*context.appFolder, "threagile-example-model.yaml"), filepath.Join(*context.outputDir, "threagile-example-model.yaml")) - if err == nil { - return nil - } - - _, altError := copyFile(filepath.Join(*context.appFolder, "threagile.yaml"), filepath.Join(*context.outputDir, "threagile-example-model.yaml")) - if altError != nil { - return err - } - - return nil -} - -func (context *Context) createStubModelFile() error { - _, err := copyFile(filepath.Join(*context.appFolder, "threagile-stub-model.yaml"), filepath.Join(*context.outputDir, "threagile-stub-model.yaml")) - if err == nil { - return nil - } - - _, altError := copyFile(filepath.Join(*context.appFolder, "threagile.yaml"), filepath.Join(*context.outputDir, "threagile-stub-model.yaml")) - if altError != nil { - return err - } - - return nil -} - -func (context *Context) createEditingSupportFiles() error { - _, schemaError := copyFile(filepath.Join(*context.appFolder, "schema.json"), filepath.Join(*context.outputDir, "schema.json")) - if schemaError != nil { - return schemaError - } - - _, templateError := copyFile(filepath.Join(*context.appFolder, "live-templates.txt"), filepath.Join(*context.outputDir, "live-templates.txt")) - return templateError -} - -func copyFile(src, dst string) (int64, error) { - sourceFileStat, err := os.Stat(src) - if err != nil { - return 0, err - } - - if !sourceFileStat.Mode().IsRegular() { - return 0, fmt.Errorf("%s is not a regular file", src) - } - - source, err := os.Open(src) - if err != nil { - return 0, err - } - defer func() { _ = source.Close() }() - - destination, err := os.Create(dst) - if err != nil { - return 0, err - } - defer func() { _ = destination.Close() }() - nBytes, err := io.Copy(destination, source) - return nBytes, err -} - func (context *Context) applyWildcardRiskTrackingEvaluation() { if *context.verbose { fmt.Println("Executing risk tracking evaluation") diff --git a/internal/threagile/examples.go b/internal/threagile/examples.go new file mode 100644 index 00000000..9aa371f3 --- /dev/null +++ b/internal/threagile/examples.go @@ -0,0 +1,114 @@ +/* +Copyright © 2023 NAME HERE +*/ +package threagile + +import ( + "github.com/spf13/cobra" + "github.com/threagile/threagile/pkg/docs" + "github.com/threagile/threagile/pkg/examples" +) + +var createExampleModelCmd = &cobra.Command{ + Use: "create-example-model", + Short: "Create example threagile model", + Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\njust create an example model named threagile-example-model.yaml in the output directory", + RunE: func(cmd *cobra.Command, args []string) error { + appDir, err := cmd.Flags().GetString(appDirFlagName) + if err != nil { + cmd.Printf("Unable to read app-dir flag: %v", err) + return err + } + outDir, err := cmd.Flags().GetString(outputFlagName) + if err != nil { + cmd.Printf("Unable to read output flag: %v", err) + return err + } + + err = examples.CreateExampleModelFile(appDir, outDir) + if err != nil { + cmd.Printf("Unable to copy example model: %v", err) + return err + } + + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println("An example model was created named threagile-example-model.yaml in the output directory.") + cmd.Println() + cmd.Println(docs.Examples) + cmd.Println() + return nil + }, +} + +var createStubModelCmd = &cobra.Command{ + Use: "create-stub-model", + Short: "Create stub threagile model", + Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\njust create a minimal stub model named threagile-stub-model.yaml in the output directory", + RunE: func(cmd *cobra.Command, args []string) error { + appDir, err := cmd.Flags().GetString(appDirFlagName) + if err != nil { + cmd.Printf("Unable to read app-dir flag: %v", err) + return err + } + outDir, err := cmd.Flags().GetString(outputFlagName) + if err != nil { + cmd.Printf("Unable to read output flag: %v", err) + return err + } + + err = examples.CreateStubModelFile(appDir, outDir) + if err != nil { + cmd.Printf("Unable to copy stub model: %v", err) + return err + } + + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println("A minimal stub model was created named threagile-stub-model.yaml in the output directory.") + cmd.Println() + cmd.Println(docs.Examples) + cmd.Println() + return nil + }, +} + +var createEditingSupportCmd = &cobra.Command{ + Use: "create-editing-support", + Short: "Create editing support", + Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\njust create some editing support stuff in the output directory", + RunE: func(cmd *cobra.Command, args []string) error { + appDir, err := cmd.Flags().GetString(appDirFlagName) + if err != nil { + cmd.Printf("Unable to read app-dir flag: %v", err) + return err + } + outDir, err := cmd.Flags().GetString(outputFlagName) + if err != nil { + cmd.Printf("Unable to read output flag: %v", err) + return err + } + + err = examples.CreateEditingSupportFiles(appDir, outDir) + if err != nil { + cmd.Printf("Unable to copy editing support files: %v", err) + return err + } + + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println("The following files were created in the output directory:") + cmd.Println(" - schema.json") + cmd.Println(" - live-templates.txt") + cmd.Println() + cmd.Println("For a perfect editing experience within your IDE of choice you can easily get " + + "model syntax validation and autocompletion (very handy for enum values) as well as live templates: " + + "Just import the schema.json into your IDE and assign it as \"schema\" to each Threagile YAML file. " + + "Also try to import individual parts from the live-templates.txt file into your IDE as live editing templates.") + cmd.Println() + return nil + }, +} + +func init() { + rootCmd.AddCommand(createExampleModelCmd) + rootCmd.AddCommand(createStubModelCmd) + rootCmd.AddCommand(createEditingSupportCmd) +} diff --git a/internal/threagile/flags.go b/internal/threagile/flags.go new file mode 100644 index 00000000..d04c4cde --- /dev/null +++ b/internal/threagile/flags.go @@ -0,0 +1,12 @@ +/* +Copyright © 2023 NAME HERE +*/ +package threagile + +const verboseFlagName = "verbose" +const verboseFlagShorthand = "v" +const appDirFlagName = "app-dir" +const binDirFlagName = "bin-dir" +const outputFlagName = "output" +const tempDirFlagName = "temp-dir" +const customRiskRulesPluginFlagName = "custom-risk-rules-plugin" diff --git a/internal/threagile/macros.go b/internal/threagile/macros.go index 422b4a5a..c4e6494f 100644 --- a/internal/threagile/macros.go +++ b/internal/threagile/macros.go @@ -58,3 +58,8 @@ var explainMacrosCmd = &cobra.Command{ cmd.Println() }, } + +func init() { + rootCmd.AddCommand(listMacrosCmd) + rootCmd.AddCommand(explainMacrosCmd) +} diff --git a/internal/threagile/progress-reporter.go b/internal/threagile/progress-reporter.go index 4d5e9585..266bd5fa 100644 --- a/internal/threagile/progress-reporter.go +++ b/internal/threagile/progress-reporter.go @@ -7,6 +7,8 @@ package threagile import ( "fmt" "log" + + "github.com/spf13/cobra" ) type ProgressReporter interface { @@ -31,3 +33,13 @@ func (CommandLineProgressReporter) Println(a ...any) (n int, err error) { func (CommandLineProgressReporter) Fatalf(format string, v ...any) { log.Fatalf(format, v...) } + +func getProgressReporter(cobraCmd *cobra.Command) ProgressReporter { + if cobraCmd == nil { + return CommandLineProgressReporter{} + } + if cobraCmd.Flags().Lookup("verbose") != nil && cobraCmd.Flags().Lookup("verbose").Changed { + return SilentProgressReporter{} + } + return CommandLineProgressReporter{} +} diff --git a/internal/threagile/root.go b/internal/threagile/root.go index 3c871182..9fc9fdf7 100644 --- a/internal/threagile/root.go +++ b/internal/threagile/root.go @@ -27,11 +27,11 @@ func Execute() { } func init() { + rootCmd.PersistentFlags().BoolP(verboseFlagName, verboseFlagShorthand, false, "verbose output") + rootCmd.PersistentFlags().String(appDirFlagName, "/app", "app folder (default: /app)") + rootCmd.PersistentFlags().String(binDirFlagName, "/app", "binary folder location") + rootCmd.PersistentFlags().String(outputFlagName, ".", "output directory") + rootCmd.PersistentFlags().String(tempDirFlagName, "/tmp", "output directory") + rootCmd.AddCommand(versionCmd) - rootCmd.AddCommand(listMacrosCmd) - rootCmd.AddCommand(explainMacrosCmd) - rootCmd.AddCommand(listTypesCmd) - rootCmd.AddCommand(explainTypesCmd) - rootCmd.AddCommand(listRiskRules) - rootCmd.AddCommand(explainRiskRules) } diff --git a/internal/threagile/rules.go b/internal/threagile/rules.go index a20f412f..9ce5f60c 100644 --- a/internal/threagile/rules.go +++ b/internal/threagile/rules.go @@ -4,6 +4,8 @@ Copyright © 2023 NAME HERE package threagile import ( + "strings" + "github.com/spf13/cobra" "github.com/threagile/threagile/pkg/docs" @@ -13,14 +15,20 @@ import ( var listRiskRules = &cobra.Command{ Use: "list-risk-rules", Short: "Print available risk rules", - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { + plugins, err := cmd.Flags().GetString("custom-risk-rules-plugin") + if err != nil { + cmd.Printf("Unable to read custom-risk-rules-plugin flag: %v", err) + return err + } + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) cmd.Println("The following risk rules are available (can be extended via custom risk rules):") cmd.Println() cmd.Println("----------------------") cmd.Println("Custom risk rules:") cmd.Println("----------------------") - customRiskRules := risks.LoadCustomRiskRules([]string{""}, CommandLineProgressReporter{}) + customRiskRules := risks.LoadCustomRiskRules(strings.Split(plugins, ","), getProgressReporter(cmd)) for id, customRule := range customRiskRules { cmd.Println(id, "-->", customRule.Category.Title, "--> with tags:", customRule.Tags) } @@ -32,23 +40,29 @@ var listRiskRules = &cobra.Command{ for _, rule := range risks.GetBuiltInRiskRules() { cmd.Println(rule.Category().Id, "-->", rule.Category().Title, "--> with tags:", rule.SupportedTags()) } + + return nil }, } var explainRiskRules = &cobra.Command{ Use: "explain-risk-rules", Short: "Detailed explanation of all the risk rules", - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { + plugins, err := cmd.Flags().GetString("custom-risk-rules-plugin") + if err != nil { + cmd.Printf("Unable to read custom-risk-rules-plugin flag: %v", err) + return err + } + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) cmd.Println("Explanation for risk rules:") cmd.Println() cmd.Println("----------------------") cmd.Println("Custom risk rules:") cmd.Println("----------------------") - // fmt.Printf("%v: %v\n", accidental_secret_leak.Category().Id, accidental_secret_leak.Category().Description) - // TODO: parse custom risk rules and print them - customRiskRules := risks.LoadCustomRiskRules([]string{""}, CommandLineProgressReporter{}) + customRiskRules := risks.LoadCustomRiskRules(strings.Split(plugins, ","), getProgressReporter(cmd)) for _, customRule := range customRiskRules { cmd.Printf("%v: %v\n", customRule.Category.Id, customRule.Category.Description) } @@ -61,5 +75,15 @@ var explainRiskRules = &cobra.Command{ cmd.Printf("%v: %v\n", rule.Category().Id, rule.Category().Description) } cmd.Println() + + return nil }, } + +func init() { + listRiskRules.PersistentFlags().String(customRiskRulesPluginFlagName, "", "custom risk rules plugin (default: none)") + rootCmd.AddCommand(listRiskRules) + + explainRiskRules.PersistentFlags().String(customRiskRulesPluginFlagName, "", "custom risk rules plugin (default: none)") + rootCmd.AddCommand(explainRiskRules) +} diff --git a/internal/threagile/types.go b/internal/threagile/types.go index 05840ff8..aa4c1a9a 100644 --- a/internal/threagile/types.go +++ b/internal/threagile/types.go @@ -44,3 +44,8 @@ var explainTypesCmd = &cobra.Command{ } }, } + +func init() { + rootCmd.AddCommand(listTypesCmd) + rootCmd.AddCommand(explainTypesCmd) +} diff --git a/pkg/examples/examples.go b/pkg/examples/examples.go new file mode 100644 index 00000000..98b3b6e8 --- /dev/null +++ b/pkg/examples/examples.go @@ -0,0 +1,74 @@ +/* +Copyright © 2023 NAME HERE +*/ +package examples + +import ( + "fmt" + "io" + "os" + "path/filepath" +) + +func CreateExampleModelFile(appFolder, outputDir string) error { + _, err := copyFile(filepath.Join(appFolder, "threagile-example-model.yaml"), filepath.Join(outputDir, "threagile-example-model.yaml")) + if err == nil { + return nil + } + + _, altError := copyFile(filepath.Join(appFolder, "threagile.yaml"), filepath.Join(outputDir, "threagile-example-model.yaml")) + if altError != nil { + return err + } + + return nil +} + +func CreateStubModelFile(appFolder, outputDir string) error { + _, err := copyFile(filepath.Join(appFolder, "threagile-stub-model.yaml"), filepath.Join(outputDir, "threagile-stub-model.yaml")) + if err == nil { + return nil + } + + _, altError := copyFile(filepath.Join(appFolder, "threagile.yaml"), filepath.Join(outputDir, "threagile-stub-model.yaml")) + if altError != nil { + return err + } + + return nil +} + +func CreateEditingSupportFiles(appFolder, outputDir string) error { + _, schemaError := copyFile(filepath.Join(appFolder, "schema.json"), filepath.Join(outputDir, "schema.json")) + if schemaError != nil { + return schemaError + } + + _, templateError := copyFile(filepath.Join(appFolder, "live-templates.txt"), filepath.Join(outputDir, "live-templates.txt")) + return templateError +} + +func copyFile(src, dst string) (int64, error) { + sourceFileStat, err := os.Stat(src) + if err != nil { + return 0, err + } + + if !sourceFileStat.Mode().IsRegular() { + return 0, fmt.Errorf("%s is not a regular file", src) + } + + source, err := os.Open(src) + if err != nil { + return 0, err + } + defer func() { _ = source.Close() }() + + destination, err := os.Create(dst) + if err != nil { + return 0, err + } + defer func() { _ = destination.Close() }() + nBytes, err := io.Copy(destination, source) + return nBytes, err +} From 9619c244cae56a20aac0242780695cfa43547002 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Fri, 22 Dec 2023 22:09:52 +0000 Subject: [PATCH 15/68] Small cleanup --- internal/threagile/rules.go | 1 - 1 file changed, 1 deletion(-) diff --git a/internal/threagile/rules.go b/internal/threagile/rules.go index 9ce5f60c..67d6956d 100644 --- a/internal/threagile/rules.go +++ b/internal/threagile/rules.go @@ -61,7 +61,6 @@ var explainRiskRules = &cobra.Command{ cmd.Println("----------------------") cmd.Println("Custom risk rules:") cmd.Println("----------------------") - // TODO: parse custom risk rules and print them customRiskRules := risks.LoadCustomRiskRules(strings.Split(plugins, ","), getProgressReporter(cmd)) for _, customRule := range customRiskRules { cmd.Printf("%v: %v\n", customRule.Category.Id, customRule.Category.Description) From 86a28df252641c6ff634e9678196468b357b791c Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Fri, 22 Dec 2023 23:16:51 +0000 Subject: [PATCH 16/68] Migrate print license and print 3rd party to cobra --- internal/threagile/about.go | 42 +++++++++++++++++++++++++++++++++++ internal/threagile/context.go | 32 -------------------------- internal/threagile/root.go | 2 -- pkg/docs/constants.go | 11 +++++++++ 4 files changed, 53 insertions(+), 34 deletions(-) diff --git a/internal/threagile/about.go b/internal/threagile/about.go index e2fb735e..ab90336f 100644 --- a/internal/threagile/about.go +++ b/internal/threagile/about.go @@ -4,6 +4,10 @@ Copyright © 2023 NAME HERE package threagile import ( + "errors" + "os" + "path/filepath" + "github.com/spf13/cobra" "github.com/threagile/threagile/pkg/docs" @@ -14,3 +18,41 @@ var versionCmd = &cobra.Command{ Short: "Get version information", Long: "\n" + docs.Logo + "\n\n" + docs.VersionText, } + +var print3rdPartyCmd = &cobra.Command{ + Use: "print-3rd-party-licenses", + Short: "Print 3rd-party license information", + Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\n" + docs.ThirdPartyLicenses, +} + +var printLicenseCmd = &cobra.Command{ + Use: "print-license", + Short: "Print license information", + RunE: func(cmd *cobra.Command, args []string) error { + appDir, err := cmd.Flags().GetString(appDirFlagName) + if err != nil { + cmd.Printf("Unable to read app-dir flag: %v", err) + return err + } + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + if appDir != filepath.Clean(appDir) { + // TODO: do we need this check here? + cmd.Printf("weird app folder %v", appDir) + return errors.New("weird app folder") + } + content, err := os.ReadFile(filepath.Join(appDir, "LICENSE.txt")) + if err != nil { + cmd.Printf("Unable to read license file: %v", err) + return err + } + cmd.Print(string(content)) + cmd.Println() + return nil + }, +} + +func init() { + rootCmd.AddCommand(versionCmd) + rootCmd.AddCommand(print3rdPartyCmd) + rootCmd.AddCommand(printLicenseCmd) +} diff --git a/internal/threagile/context.go b/internal/threagile/context.go index 9b52e124..f127ec6f 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -4545,10 +4545,6 @@ func (context *Context) ParseCommandlineArgs() { // folders context.generateTagsExcel = flag.Bool("generate-tags-excel", true, "generate tags excel") context.generateReportPDF = flag.Bool("generate-report-pdf", true, "generate report pdf, including diagrams") - // more commands - print3rdParty := flag.Bool("print-3rd-party-licenses", false, "print 3rd-party license information") - license := flag.Bool("print-license", false, "print license information") - flag.Usage = func() { fmt.Println(docs.Logo + "\n\n" + docs.VersionText) _, _ = fmt.Fprintf(os.Stderr, "Usage: threagile [options]") @@ -4574,34 +4570,6 @@ func (context *Context) ParseCommandlineArgs() { // folders context.progressReporter = CommandLineProgressReporter{} } - if *print3rdParty { - fmt.Println(docs.Logo + "\n\n" + docs.VersionText) - fmt.Println("Kudos & Credits to the following open-source projects:") - fmt.Println(" - golang (Google Go License): https://golang.org/LICENSE") - fmt.Println(" - go-yaml (MIT License): https://github.com/go-yaml/yaml/blob/v3/LICENSE") - fmt.Println(" - graphviz (CPL License): https://graphviz.gitlab.io/license/") - fmt.Println(" - gofpdf (MIT License): https://github.com/jung-kurt/gofpdf/blob/master/LICENSE") - fmt.Println(" - go-chart (MIT License): https://github.com/wcharczuk/go-chart/blob/master/LICENSE") - fmt.Println(" - excelize (BSD License): https://github.com/qax-os/excelize/blob/master/LICENSE") - fmt.Println(" - graphics-go (BSD License): https://github.com/BurntSushi/graphics-go/blob/master/LICENSE") - fmt.Println(" - google-uuid (BSD License): https://github.com/google/uuid/blob/master/LICENSE") - fmt.Println(" - gin-gonic (MIT License): https://github.com/gin-gonic/gin/blob/master/LICENSE") - fmt.Println(" - swagger-ui (Apache License): https://swagger.io/license/") - fmt.Println() - os.Exit(0) - } - if *license { - fmt.Println(docs.Logo + "\n\n" + docs.VersionText) - if *context.appFolder != filepath.Clean(*context.appFolder) { - log.Fatalf("weird app folder %v", *context.appFolder) - } - content, err := os.ReadFile(filepath.Join(*context.appFolder, "LICENSE.txt")) - checkErr(err) - fmt.Print(string(content)) - fmt.Println() - os.Exit(0) - } - context.ServerMode = (*context.serverPort > 0) } diff --git a/internal/threagile/root.go b/internal/threagile/root.go index 9fc9fdf7..f11716dc 100644 --- a/internal/threagile/root.go +++ b/internal/threagile/root.go @@ -32,6 +32,4 @@ func init() { rootCmd.PersistentFlags().String(binDirFlagName, "/app", "binary folder location") rootCmd.PersistentFlags().String(outputFlagName, ".", "output directory") rootCmd.PersistentFlags().String(tempDirFlagName, "/tmp", "output directory") - - rootCmd.AddCommand(versionCmd) } diff --git a/pkg/docs/constants.go b/pkg/docs/constants.go index 0a2fb994..28e1af39 100644 --- a/pkg/docs/constants.go +++ b/pkg/docs/constants.go @@ -29,4 +29,15 @@ const ( " docker run --rm -it threagile/threagile -list-model-macros \n\n" + "If you want to execute a certain model macro on the model yaml file (here the macro add-build-pipeline): \n" + " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile -model app/work/threagile.yaml -output app/work -execute-model-macro add-build-pipeline" + ThirdPartyLicenses = " - golang (Google Go License): https://golang.org/LICENSE\n" + + " - go-yaml (MIT License): https://github.com/go-yaml/yaml/blob/v3/LICENSE\n" + + " - graphviz (CPL License): https://graphviz.gitlab.io/license/\n" + + " - gofpdf (MIT License): https://github.com/jung-kurt/gofpdf/blob/master/LICENSE\n" + + " - go-chart (MIT License): https://github.com/wcharczuk/go-chart/blob/master/LICENSE\n" + + " - excelize (BSD License): https://github.com/qax-os/excelize/blob/master/LICENSE\n" + + " - graphics-go (BSD License): https://github.com/BurntSushi/graphics-go/blob/master/LICENSE\n" + + " - google-uuid (BSD License): https://github.com/google/uuid/blob/master/LICENSE\n" + + " - gin-gonic (MIT License): https://github.com/gin-gonic/gin/blob/master/LICENSE\n" + + " - swagger-ui (Apache License): https://swagger.io/license/\n" + + " - cobra-cli (Apache License): https://github.com/spf13/cobra-cli/blob/main/LICENSE.txt\n" ) From 7af854042a5c194a997ce6ed34d4f8ff7748a4a5 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Sat, 23 Dec 2023 12:13:31 -0800 Subject: [PATCH 17/68] introduced config --- internal/threagile/context.go | 128 ++++++++++++++-------------------- pkg/common/attacker-focus.go | 7 ++ pkg/common/attractiveness.go | 8 +++ pkg/common/config.go | 33 +++++++++ pkg/common/consts.go | 46 ++++++++++++ pkg/common/plugin-input.go | 10 +++ pkg/examples/examples.go | 5 +- 7 files changed, 160 insertions(+), 77 deletions(-) create mode 100644 pkg/common/attacker-focus.go create mode 100644 pkg/common/attractiveness.go create mode 100644 pkg/common/config.go create mode 100644 pkg/common/consts.go create mode 100644 pkg/common/plugin-input.go diff --git a/internal/threagile/context.go b/internal/threagile/context.go index f127ec6f..52f0ea5e 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -15,6 +15,7 @@ import ( "errors" "flag" "fmt" // TODO: no fmt here + "github.com/threagile/threagile/pkg/common" "hash/fnv" "io" "log" @@ -49,43 +50,17 @@ import ( "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/report" "github.com/threagile/threagile/pkg/run" - risks "github.com/threagile/threagile/pkg/security/risks" + "github.com/threagile/threagile/pkg/security/risks" "github.com/threagile/threagile/pkg/security/types" ) -const ( - keepDiagramSourceFiles = false - addModelTitle = false -) - const ( defaultGraphvizDPI, maxGraphvizDPI = 120, 240 - backupHistoryFilesToKeep = 50 -) - -const ( - buildTimestamp = "" - tempDir = "/dev/shm" // TODO: make configurable via cmdline arg? - binDir = "/app" - appDir = "/app" - dataDir = "/data" - keyDir = "keys" - reportFilename = "report.pdf" - excelRisksFilename = "risks.xlsx" - excelTagsFilename = "tags.xlsx" - jsonRisksFilename = "risks.json" - jsonTechnicalAssetsFilename = "technical-assets.json" - jsonStatsFilename = "stats.json" - dataFlowDiagramFilenameDOT = "data-flow-diagram.gv" - dataFlowDiagramFilenamePNG = "data-flow-diagram.png" - dataAssetDiagramFilenameDOT = "data-asset-diagram.gv" - dataAssetDiagramFilenamePNG = "data-asset-diagram.png" - graphvizDataFlowDiagramConversionCall = "render-data-flow-diagram.sh" - graphvizDataAssetDiagramConversionCall = "render-data-asset-diagram.sh" - inputFile = "threagile.yaml" ) type Context struct { + common.Config + ServerMode bool successCount int @@ -115,8 +90,6 @@ type Context struct { serverFolder *string tempFolder *string - defaultGraphvizDPI int - maxGraphvizDPI int backupHistoryFilesToKeep int tempDir string @@ -178,7 +151,7 @@ func (context *Context) checkRiskTracking() { } } -func (context *Context) Defaults(buildTimestamp string) *Context { +func (context *Context) Init(buildTimestamp string) *Context { *context = Context{ keepDiagramSourceFiles: false, addModelTitle: false, @@ -186,29 +159,34 @@ func (context *Context) Defaults(buildTimestamp string) *Context { customRiskRules: make(map[string]*model.CustomRisk), deferredRiskTrackingDueToWildcardMatching: make(map[string]model.RiskTracking), drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks: true, - defaultGraphvizDPI: 120, - maxGraphvizDPI: 240, - backupHistoryFilesToKeep: 50, - } - - context.tempDir = "/dev/shm" // TODO: make configurable via cmdline arg? - context.binDir = "/app" - context.appDir = "/app" - context.dataDir = "/data" - context.keyDir = "keys" - context.reportFilename = "report.pdf" - context.excelRisksFilename = "risks.xlsx" - context.excelTagsFilename = "tags.xlsx" - context.jsonRisksFilename = "risks.json" - context.jsonTechnicalAssetsFilename = "technical-assets.json" - context.jsonStatsFilename = "stats.json" - context.dataFlowDiagramFilenameDOT = "data-flow-diagram.gv" - context.dataFlowDiagramFilenamePNG = "data-flow-diagram.png" - context.dataAssetDiagramFilenameDOT = "data-asset-diagram.gv" - context.dataAssetDiagramFilenamePNG = "data-asset-diagram.png" - context.graphvizDataFlowDiagramConversionCall = "render-data-flow-diagram.sh" - context.graphvizDataAssetDiagramConversionCall = "render-data-asset-diagram.sh" - context.inputFile = "threagile.yaml" + } + + return context +} + +func (context *Context) Defaults(buildTimestamp string) *Context { + *context = *new(Context).Init(buildTimestamp) + context.backupHistoryFilesToKeep = 50 + context.tempDir = common.TempDir + context.binDir = common.BinDir + context.appDir = common.AppDir + context.dataDir = common.DataDir + context.keyDir = common.KeyDir + context.reportFilename = common.ReportFilename + context.excelRisksFilename = common.ExcelRisksFilename + context.excelTagsFilename = common.ExcelTagsFilename + context.jsonRisksFilename = common.JsonRisksFilename + context.jsonTechnicalAssetsFilename = common.JsonTechnicalAssetsFilename + context.jsonStatsFilename = common.JsonStatsFilename + context.dataFlowDiagramFilenameDOT = common.DataFlowDiagramFilenameDOT + context.dataFlowDiagramFilenamePNG = common.DataFlowDiagramFilenamePNG + context.dataAssetDiagramFilenameDOT = common.DataAssetDiagramFilenameDOT + context.dataAssetDiagramFilenamePNG = common.DataAssetDiagramFilenamePNG + context.graphvizDataFlowDiagramConversionCall = common.GraphvizDataFlowDiagramConversionCall + context.graphvizDataAssetDiagramConversionCall = common.GraphvizDataAssetDiagramConversionCall + context.inputFile = common.InputFile + + context.Config.Defaults() return context } @@ -420,7 +398,7 @@ func (context *Context) analyzeModelOnServerDirectly(ginContext *gin.Context) { } }() - dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(context.defaultGraphvizDPI))) + dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(context.DefaultGraphvizDPI))) if err != nil { context.handleErrorInServiceCall(err, ginContext) return @@ -2385,7 +2363,7 @@ func (context *Context) execute(ginContext *gin.Context, dryRun bool) (yamlConte } }() - dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(context.defaultGraphvizDPI))) + dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(context.DefaultGraphvizDPI))) checkErr(err) fileUploaded, header, err := ginContext.Request.FormFile("file") @@ -2856,7 +2834,7 @@ func (context *Context) streamResponse(ginContext *gin.Context, responseType res ok = false } }() - dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(context.defaultGraphvizDPI))) + dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(context.DefaultGraphvizDPI))) if err != nil { context.handleErrorInServiceCall(err, ginContext) return @@ -4172,12 +4150,12 @@ func (context *Context) backupModelToHistory(modelFolder string, changeReasonFor return err } } - input, err := os.ReadFile(filepath.Join(modelFolder, context.inputFile)) + inputModel, err := os.ReadFile(filepath.Join(modelFolder, context.inputFile)) if err != nil { return err } historyFile := filepath.Join(historyFolder, time.Now().Format("2006-01-02 15:04:05")+" "+changeReasonForHistory+".backup") - err = os.WriteFile(historyFile, input, 0400) + err = os.WriteFile(historyFile, inputModel, 0400) if err != nil { return err } @@ -4518,14 +4496,14 @@ func (context *Context) expandPath(path string) *string { } func (context *Context) ParseCommandlineArgs() { // folders - context.appFolder = flag.String("app-dir", appDir, "app folder (default: "+appDir+")") - context.serverFolder = flag.String("server-dir", dataDir, "base folder for server mode (default: "+dataDir+")") - context.tempFolder = flag.String("temp-dir", tempDir, "temporary folder location") - context.binFolder = flag.String("bin-dir", binDir, "binary folder location") + context.appFolder = flag.String("app-dir", common.AppDir, "app folder (default: "+common.AppDir+")") + context.serverFolder = flag.String("server-dir", common.DataDir, "base folder for server mode (default: "+common.DataDir+")") + context.tempFolder = flag.String("temp-dir", common.TempDir, "temporary folder location") + context.binFolder = flag.String("bin-dir", common.BinDir, "binary folder location") context.outputDir = flag.String("output", ".", "output directory") // files - context.modelFilename = flag.String("model", inputFile, "input model yaml file") + context.modelFilename = flag.String("model", common.InputFile, "input model yaml file") context.raaPlugin = flag.String("raa-run", "raa_calc", "RAA calculation run file name") // flags @@ -4561,7 +4539,7 @@ func (context *Context) ParseCommandlineArgs() { // folders if *context.diagramDPI < 20 { *context.diagramDPI = 20 - } else if *context.diagramDPI > context.maxGraphvizDPI { + } else if *context.diagramDPI > context.MaxGraphvizDPI { *context.diagramDPI = 300 } @@ -4570,7 +4548,7 @@ func (context *Context) ParseCommandlineArgs() { // folders context.progressReporter = CommandLineProgressReporter{} } - context.ServerMode = (*context.serverPort > 0) + context.ServerMode = *context.serverPort > 0 } func (context *Context) applyWildcardRiskTrackingEvaluation() { @@ -4804,12 +4782,12 @@ func (context *Context) renderDataFlowDiagramGraphvizImage(dotFile *os.File, tar defer func() { _ = os.Remove(tmpFilePNG.Name()) }() // copy into tmp file as input - input, err := os.ReadFile(dotFile.Name()) + inputDOT, err := os.ReadFile(dotFile.Name()) if err != nil { fmt.Println(err) return } - err = os.WriteFile(tmpFileDOT.Name(), input, 0644) + err = os.WriteFile(tmpFileDOT.Name(), inputDOT, 0644) if err != nil { fmt.Println("Error creating", tmpFileDOT.Name()) fmt.Println(err) @@ -4825,12 +4803,12 @@ func (context *Context) renderDataFlowDiagramGraphvizImage(dotFile *os.File, tar panic(errors.New("graph rendering call failed with error:" + err.Error())) } // copy into resulting file - input, err = os.ReadFile(tmpFilePNG.Name()) + inputPNG, err := os.ReadFile(tmpFilePNG.Name()) if err != nil { fmt.Println(err) return } - err = os.WriteFile(filepath.Join(targetDir, context.dataFlowDiagramFilenamePNG), input, 0644) + err = os.WriteFile(filepath.Join(targetDir, context.dataFlowDiagramFilenamePNG), inputPNG, 0644) if err != nil { fmt.Println("Error creating", context.dataFlowDiagramFilenamePNG) fmt.Println(err) @@ -4852,12 +4830,12 @@ func (context *Context) renderDataAssetDiagramGraphvizImage(dotFile *os.File, ta defer func() { _ = os.Remove(tmpFilePNG.Name()) }() // copy into tmp file as input - input, err := os.ReadFile(dotFile.Name()) + inputDOT, err := os.ReadFile(dotFile.Name()) if err != nil { fmt.Println(err) return } - err = os.WriteFile(tmpFileDOT.Name(), input, 0644) + err = os.WriteFile(tmpFileDOT.Name(), inputDOT, 0644) if err != nil { fmt.Println("Error creating", tmpFileDOT.Name()) fmt.Println(err) @@ -4873,12 +4851,12 @@ func (context *Context) renderDataAssetDiagramGraphvizImage(dotFile *os.File, ta panic(errors.New("graph rendering call failed with error: " + err.Error())) } // copy into resulting file - input, err = os.ReadFile(tmpFilePNG.Name()) + inputPNG, err := os.ReadFile(tmpFilePNG.Name()) if err != nil { fmt.Println(err) return } - err = os.WriteFile(filepath.Join(targetDir, context.dataAssetDiagramFilenamePNG), input, 0644) + err = os.WriteFile(filepath.Join(targetDir, context.dataAssetDiagramFilenamePNG), inputPNG, 0644) if err != nil { fmt.Println("Error creating", context.dataAssetDiagramFilenamePNG) fmt.Println(err) diff --git a/pkg/common/attacker-focus.go b/pkg/common/attacker-focus.go new file mode 100644 index 00000000..931d2861 --- /dev/null +++ b/pkg/common/attacker-focus.go @@ -0,0 +1,7 @@ +package common + +type AttackerFocus struct { + Asset int // fibonacci sequence base index + ProcessedOrStoredData int // fibonacci sequence base index + TransferredData int // fibonacci sequence base index +} diff --git a/pkg/common/attractiveness.go b/pkg/common/attractiveness.go new file mode 100644 index 00000000..1e31b8c1 --- /dev/null +++ b/pkg/common/attractiveness.go @@ -0,0 +1,8 @@ +package common + +type Attractiveness struct { + Quantity int + Confidentiality AttackerFocus + Integrity AttackerFocus + Availability AttackerFocus +} diff --git a/pkg/common/config.go b/pkg/common/config.go new file mode 100644 index 00000000..c6c44fae --- /dev/null +++ b/pkg/common/config.go @@ -0,0 +1,33 @@ +package common + +type Config struct { + Verbose bool + IgnoreOrphanedRiskTracking bool + OutputDir string + RAAPlugin string + SkipRiskRules string + RiskRulesPlugins string + ModelFilename string + TemplateFilename string + ExecuteModelMacro string + DiagramDPI int + ServerPort int + AddModelTitle bool + KeepDiagramSourceFiles bool + AppFolder string + BinFolder string + ServerFolder string + TempFolder string + DefaultGraphvizDPI int + MaxGraphvizDPI int + Attractiveness Attractiveness +} + +func (c *Config) Defaults() *Config { + *c = Config{ + DefaultGraphvizDPI: 120, + MaxGraphvizDPI: 240, + } + + return c +} diff --git a/pkg/common/consts.go b/pkg/common/consts.go new file mode 100644 index 00000000..5a45f897 --- /dev/null +++ b/pkg/common/consts.go @@ -0,0 +1,46 @@ +package common + +const ( + TempDir = "/dev/shm" // TODO: make configurable via cmdline arg? + BinDir = "/app" + AppDir = "/app" + DataDir = "/data" + KeyDir = "keys" + ReportFilename = "report.pdf" + ExcelRisksFilename = "risks.xlsx" + ExcelTagsFilename = "tags.xlsx" + JsonRisksFilename = "risks.json" + JsonTechnicalAssetsFilename = "technical-assets.json" + JsonStatsFilename = "stats.json" + DataFlowDiagramFilenameDOT = "data-flow-diagram.gv" + DataFlowDiagramFilenamePNG = "data-flow-diagram.png" + DataAssetDiagramFilenameDOT = "data-asset-diagram.gv" + DataAssetDiagramFilenamePNG = "data-asset-diagram.png" + GraphvizDataFlowDiagramConversionCall = "render-data-flow-diagram.sh" + GraphvizDataAssetDiagramConversionCall = "render-data-asset-diagram.sh" + InputFile = "threagile.yaml" +) + +const ( + ServerPortCommand = "server-port" + CreateExampleModelCommand = "create-example-model" + CreateStubModelCommand = "create-stub-model" + CreateEditingSupportCommand = "create-editing-support" + GenerateDataFlowDiagramCommand = "generate-data-flow-diagram" + GenerateDataAssetDiagramCommand = "generate-data-asset-diagram" + GenerateRisksJSONCommand = "generate-risks-json" + GenerateTechnicalAssetsJSONCommand = "generate-technical-assets-json" + GenerateStatsJSONCommand = "generate-stats-json" + GenerateRisksExcelCommand = "generate-risks-excel" + GenerateTagsExcelCommand = "generate-tags-excel" + GenerateReportPDFCommand = "generate-report-pdf" + PrintVersionCommand = "version" + ListTypesCommand = "list-types" + ListRiskRulesCommand = "list-risk-rules" + ListModelMacrosCommand = "list-model-macros" + ExplainTypesCommand = "explain-types" + ExplainRiskRulesCommand = "explain-risk-rules" + ExplainModelMacrosCommand = "explain-model-macros" + Print3rdPartyCommand = "print-3rd-party-licenses" + PrintLicenseCommand = "print-license" +) diff --git a/pkg/common/plugin-input.go b/pkg/common/plugin-input.go new file mode 100644 index 00000000..7207bbb4 --- /dev/null +++ b/pkg/common/plugin-input.go @@ -0,0 +1,10 @@ +package common + +import ( + "github.com/threagile/threagile/pkg/model" +) + +type PluginInput struct { + Config + model.ParsedModel +} diff --git a/pkg/examples/examples.go b/pkg/examples/examples.go index 98b3b6e8..5d42d6ec 100644 --- a/pkg/examples/examples.go +++ b/pkg/examples/examples.go @@ -5,6 +5,7 @@ package examples import ( "fmt" + "github.com/threagile/threagile/pkg/common" "io" "os" "path/filepath" @@ -16,7 +17,7 @@ func CreateExampleModelFile(appFolder, outputDir string) error { return nil } - _, altError := copyFile(filepath.Join(appFolder, "threagile.yaml"), filepath.Join(outputDir, "threagile-example-model.yaml")) + _, altError := copyFile(filepath.Join(appFolder, common.InputFile), filepath.Join(outputDir, "threagile-example-model.yaml")) if altError != nil { return err } @@ -30,7 +31,7 @@ func CreateStubModelFile(appFolder, outputDir string) error { return nil } - _, altError := copyFile(filepath.Join(appFolder, "threagile.yaml"), filepath.Join(outputDir, "threagile-stub-model.yaml")) + _, altError := copyFile(filepath.Join(appFolder, common.InputFile), filepath.Join(outputDir, "threagile-stub-model.yaml")) if altError != nil { return err } From 49776646b51f458f7ebb284ab3f34c4a67d5974c Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Mon, 25 Dec 2023 11:31:41 +0000 Subject: [PATCH 18/68] Move parsing types closer to type definition --- go.mod | 52 +- go.sum | 63 +- internal/threagile/context.go | 608 ++---------------- pkg/security/types/authentication.go | 15 + pkg/security/types/authentication_test.go | 63 ++ pkg/security/types/authorization.go | 15 + pkg/security/types/authorization_test.go | 47 ++ pkg/security/types/confidentiality_test.go | 55 ++ pkg/security/types/criticality_test.go | 55 ++ pkg/security/types/data_breach_probability.go | 20 +- .../types/data_breach_probability_test.go | 51 ++ pkg/security/types/data_format.go | 15 + pkg/security/types/data_format_test.go | 55 ++ pkg/security/types/encryption_style_test.go | 55 ++ pkg/security/types/protocol.go | 15 + pkg/security/types/protocol_test.go | 223 +++++++ pkg/security/types/quantity_test.go | 51 ++ .../types/risk_exploitation_impact_test.go | 55 ++ pkg/security/types/risk_explotation_impact.go | 19 +- .../types/risk_explotation_likelihood.go | 19 +- .../types/risk_explotation_likelihood_test.go | 55 ++ pkg/security/types/risk_function.go | 16 +- pkg/security/types/risk_function_test.go | 51 ++ pkg/security/types/risk_severity.go | 19 +- pkg/security/types/risk_severity_test.go | 59 ++ pkg/security/types/risk_status.go | 16 +- pkg/security/types/risk_status_test.go | 59 ++ pkg/security/types/stride.go | 16 +- pkg/security/types/stride_test.go | 59 ++ pkg/security/types/technical_asset_machine.go | 15 + .../types/technical_asset_machine_test.go | 51 ++ pkg/security/types/technical_asset_size.go | 15 + .../types/technical_asset_size_test.go | 51 ++ .../types/technical_asset_technology.go | 15 + .../types/technical_asset_technology_test.go | 263 ++++++++ pkg/security/types/technical_asset_type.go | 15 + .../types/technical_asset_type_test.go | 47 ++ pkg/security/types/trust_boundary.go | 15 + pkg/security/types/trust_boundary_test.go | 63 ++ pkg/security/types/usage_test.go | 43 ++ 40 files changed, 1880 insertions(+), 614 deletions(-) create mode 100644 pkg/security/types/authentication_test.go create mode 100644 pkg/security/types/authorization_test.go create mode 100644 pkg/security/types/confidentiality_test.go create mode 100644 pkg/security/types/criticality_test.go create mode 100644 pkg/security/types/data_breach_probability_test.go create mode 100644 pkg/security/types/data_format_test.go create mode 100644 pkg/security/types/encryption_style_test.go create mode 100644 pkg/security/types/protocol_test.go create mode 100644 pkg/security/types/quantity_test.go create mode 100644 pkg/security/types/risk_exploitation_impact_test.go create mode 100644 pkg/security/types/risk_explotation_likelihood_test.go create mode 100644 pkg/security/types/risk_function_test.go create mode 100644 pkg/security/types/risk_severity_test.go create mode 100644 pkg/security/types/risk_status_test.go create mode 100644 pkg/security/types/stride_test.go create mode 100644 pkg/security/types/technical_asset_machine_test.go create mode 100644 pkg/security/types/technical_asset_size_test.go create mode 100644 pkg/security/types/technical_asset_technology_test.go create mode 100644 pkg/security/types/technical_asset_type_test.go create mode 100644 pkg/security/types/trust_boundary_test.go create mode 100644 pkg/security/types/usage_test.go diff --git a/go.mod b/go.mod index 0d650038..39558d98 100644 --- a/go.mod +++ b/go.mod @@ -1,53 +1,59 @@ module github.com/threagile/threagile -go 1.21 +go 1.20 require ( github.com/gin-gonic/gin v1.9.1 - github.com/google/uuid v1.4.0 + github.com/google/uuid v1.5.0 github.com/jung-kurt/gofpdf v1.16.2 github.com/wcharczuk/go-chart v2.0.1+incompatible github.com/xuri/excelize/v2 v2.8.0 - golang.org/x/crypto v0.14.0 + golang.org/x/crypto v0.17.0 gopkg.in/yaml.v3 v3.0.1 ) require ( - github.com/akedrou/textdiff v0.0.0-20230423230343-2ebdcebdccc1 // indirect - github.com/blend/go-sdk v1.20220411.3 // indirect - github.com/bytedance/sonic v1.10.2 // indirect github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect - github.com/chenzhuoyu/iasm v0.9.1 // indirect - github.com/gabriel-vasile/mimetype v1.4.3 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect github.com/gin-contrib/sse v0.1.0 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect - github.com/go-playground/validator/v10 v10.15.5 // indirect github.com/goccy/go-json v0.10.2 // indirect - github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/json-iterator/go v1.1.12 // indirect - github.com/klauspost/cpuid/v2 v2.2.5 // indirect github.com/leodido/go-urn v1.2.4 // indirect - github.com/mattn/go-isatty v0.0.20 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect - github.com/pelletier/go-toml/v2 v2.1.0 // indirect - github.com/phpdave11/gofpdi v1.0.13 // indirect - github.com/pkg/errors v0.9.1 // indirect + github.com/pkg/errors v0.8.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect github.com/richardlehane/mscfb v1.0.4 // indirect github.com/richardlehane/msoleps v1.0.3 // indirect - github.com/spf13/cobra v1.8.0 // indirect github.com/spf13/pflag v1.0.5 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect - github.com/ugorji/go/codec v1.2.11 // indirect + golang.org/x/sys v0.15.0 // indirect + golang.org/x/text v0.14.0 // indirect +) + +require ( + github.com/akedrou/textdiff v0.0.0-20230423230343-2ebdcebdccc1 + github.com/blend/go-sdk v1.20220411.3 // indirect + github.com/bytedance/sonic v1.10.2 // indirect + github.com/chenzhuoyu/iasm v0.9.1 // indirect + github.com/gabriel-vasile/mimetype v1.4.3 // indirect + github.com/go-playground/validator/v10 v10.16.0 // indirect + github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect + github.com/klauspost/cpuid/v2 v2.2.6 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/pelletier/go-toml/v2 v2.1.1 // indirect + github.com/phpdave11/gofpdi v1.0.13 // indirect + github.com/spf13/cobra v1.8.0 + github.com/stretchr/testify v1.8.4 + github.com/ugorji/go/codec v1.2.12 // indirect github.com/xuri/efp v0.0.0-20231025114914-d1ff6096ae53 // indirect github.com/xuri/nfp v0.0.0-20230919160717-d98342af3f05 // indirect - golang.org/x/arch v0.5.0 // indirect - golang.org/x/image v0.13.0 // indirect - golang.org/x/net v0.17.0 // indirect - golang.org/x/sys v0.13.0 // indirect - golang.org/x/text v0.13.0 // indirect - google.golang.org/protobuf v1.31.0 // indirect + golang.org/x/arch v0.6.0 // indirect + golang.org/x/image v0.14.0 // indirect + golang.org/x/net v0.19.0 // indirect + google.golang.org/protobuf v1.32.0 // indirect ) diff --git a/go.sum b/go.sum index cc4471a6..fa9a4f97 100644 --- a/go.sum +++ b/go.sum @@ -1,4 +1,3 @@ -cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= github.com/akedrou/textdiff v0.0.0-20230423230343-2ebdcebdccc1 h1:XfKKiQL7irIGI7nfu4a6IKhrgUHvKwhH/AnuHgZy/+U= github.com/akedrou/textdiff v0.0.0-20230423230343-2ebdcebdccc1/go.mod h1:PJwvxBpzqjdeomc0r8Hgc+xJC7k6z+k371tffCGXR2M= github.com/blend/go-sdk v1.20220411.3 h1:GFV4/FQX5UzXLPwWV03gP811pj7B8J2sbuq+GJQofXc= @@ -26,24 +25,20 @@ github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= -github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= -github.com/go-playground/validator/v10 v10.15.5 h1:LEBecTWb/1j5TNY1YYG2RcOUN3R7NLylN+x8TTueE24= -github.com/go-playground/validator/v10 v10.15.5/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= +github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE= +github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g= github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4= -github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU= +github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= @@ -52,8 +47,8 @@ github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+ github.com/jung-kurt/gofpdf v1.16.2 h1:jgbatWHfRlPYiK85qgevsZTHviWXKwB1TTiKdz5PtRc= github.com/jung-kurt/gofpdf v1.16.2/go.mod h1:1hl7y57EsiPAkLbOwzpzqgx1A30nQCk/YmFV8S2vmK0= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= -github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= -github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= +github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= +github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= @@ -66,14 +61,13 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= -github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4= -github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= +github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI= +github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= github.com/phpdave11/gofpdi v1.0.7/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= github.com/phpdave11/gofpdi v1.0.13 h1:o61duiW8M9sMlkVXWlvP92sZJtGKENvW3VExs6dZukQ= github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= +github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/richardlehane/mscfb v1.0.4 h1:WULscsljNPConisD5hR0+OyZjwK46Pfyr6mPu5ZawpM= @@ -101,8 +95,8 @@ github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcU github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= -github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU= -github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= +github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= +github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= github.com/wcharczuk/go-chart v2.0.1+incompatible h1:0pz39ZAycJFF7ju/1mepnk26RLVLBCWz1STcD3doU0A= github.com/wcharczuk/go-chart v2.0.1+incompatible/go.mod h1:PF5tmL4EIx/7Wf+hEkpCqYi5He4u90sw+0+6FhrryuE= github.com/xuri/efp v0.0.0-20230802181842-ad255f2331ca/go.mod h1:ybY/Jr0T0GTCnYjKqmdwxyxn2BQf2RcQIIvex5QldPI= @@ -115,17 +109,17 @@ github.com/xuri/nfp v0.0.0-20230919160717-d98342af3f05 h1:qhbILQo1K3mphbwKh1vNm4 github.com/xuri/nfp v0.0.0-20230919160717-d98342af3f05/go.mod h1:WwHg+CVyzlv/TX9xqBFXEZAuxOPxn2k1GNHwG41IIUQ= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= -golang.org/x/arch v0.5.0 h1:jpGode6huXQxcskEIpOCvrU+tzo81b6+oFLUYXWtH/Y= -golang.org/x/arch v0.5.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= +golang.org/x/arch v0.6.0 h1:S0JTfE48HbRj80+4tbvZDYsJ3tGv6BUU3XxyZ7CirAc= +golang.org/x/arch v0.6.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= -golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= -golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= +golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= +golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.11.0/go.mod h1:bglhjqbqVuEb9e9+eNR45Jfu7D+T4Qan+NhQk8Ck2P8= -golang.org/x/image v0.13.0 h1:3cge/F/QTkNLauhf2QoE9zp+7sr+ZcL4HnoZmdwg9sg= -golang.org/x/image v0.13.0/go.mod h1:6mmbMOeV28HuMTgA6OSRkdXKYw/t5W9Uwn2Yv1r3Yxk= +golang.org/x/image v0.14.0 h1:tNgSxAFe3jC4uYqvZdTr84SZoM1KfwdC9SKIFrLjFn4= +golang.org/x/image v0.14.0/go.mod h1:HUYqC05R2ZcZ3ejNQsIHQDQiwWM4JBqmm6MKANTp4LE= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -134,9 +128,8 @@ golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= -golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= -golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= -golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= +golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -149,8 +142,8 @@ golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= -golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= +golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= @@ -162,22 +155,16 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= -golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= -google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= +google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/internal/threagile/context.go b/internal/threagile/context.go index 52f0ea5e..a66b0415 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -14,8 +14,7 @@ import ( "encoding/hex" "errors" "flag" - "fmt" // TODO: no fmt here - "github.com/threagile/threagile/pkg/common" + "fmt" // TODO: no fmt.Println here "hash/fnv" "io" "log" @@ -31,6 +30,8 @@ import ( "sync" "time" + "github.com/threagile/threagile/pkg/common" + addbuildpipeline "github.com/threagile/threagile/pkg/macros/built-in/add-build-pipeline" addvault "github.com/threagile/threagile/pkg/macros/built-in/add-vault" prettyprint "github.com/threagile/threagile/pkg/macros/built-in/pretty-print" @@ -474,19 +475,8 @@ func (context *Context) parseModel() { // data, _ := json.MarshalIndent(context.modelInput, "", " ") // fmt.Printf("%v\n", string(data)) - var businessCriticality types.Criticality - switch context.modelInput.BusinessCriticality { - case types.Archive.String(): - businessCriticality = types.Archive - case types.Operational.String(): - businessCriticality = types.Operational - case types.Important.String(): - businessCriticality = types.Important - case types.Critical.String(): - businessCriticality = types.Critical - case types.MissionCritical.String(): - businessCriticality = types.MissionCritical - default: + businessCriticality, err := types.ParseCriticality(context.modelInput.BusinessCriticality) + if err != nil { panic(errors.New("unknown 'business_criticality' value of application: " + context.modelInput.BusinessCriticality)) } @@ -539,75 +529,24 @@ func (context *Context) parseModel() { for title, asset := range context.modelInput.DataAssets { id := fmt.Sprintf("%v", asset.ID) - var usage types.Usage - switch asset.Usage { - case types.Business.String(): - usage = types.Business - case types.DevOps.String(): - usage = types.DevOps - default: + usage, err := types.ParseUsage(asset.Usage) + if err != nil { panic(errors.New("unknown 'usage' value of data asset '" + title + "': " + asset.Usage)) } - - var quantity types.Quantity - switch asset.Quantity { - case types.VeryFew.String(): - quantity = types.VeryFew - case types.Few.String(): - quantity = types.Few - case types.Many.String(): - quantity = types.Many - case types.VeryMany.String(): - quantity = types.VeryMany - default: + quantity, err := types.ParseQuantity(asset.Quantity) + if err != nil { panic(errors.New("unknown 'quantity' value of data asset '" + title + "': " + asset.Quantity)) } - - var confidentiality types.Confidentiality - switch asset.Confidentiality { - case types.Public.String(): - confidentiality = types.Public - case types.Internal.String(): - confidentiality = types.Internal - case types.Restricted.String(): - confidentiality = types.Restricted - case types.Confidential.String(): - confidentiality = types.Confidential - case types.StrictlyConfidential.String(): - confidentiality = types.StrictlyConfidential - default: + confidentiality, err := types.ParseConfidentiality(asset.Confidentiality) + if err != nil { panic(errors.New("unknown 'confidentiality' value of data asset '" + title + "': " + asset.Confidentiality)) } - - var integrity types.Criticality - switch asset.Integrity { - case types.Archive.String(): - integrity = types.Archive - case types.Operational.String(): - integrity = types.Operational - case types.Important.String(): - integrity = types.Important - case types.Critical.String(): - integrity = types.Critical - case types.MissionCritical.String(): - integrity = types.MissionCritical - default: + integrity, err := types.ParseCriticality(asset.Integrity) + if err != nil { panic(errors.New("unknown 'integrity' value of data asset '" + title + "': " + asset.Integrity)) } - - var availability types.Criticality - switch asset.Availability { - case types.Archive.String(): - availability = types.Archive - case types.Operational.String(): - availability = types.Operational - case types.Important.String(): - availability = types.Important - case types.Critical.String(): - availability = types.Critical - case types.MissionCritical.String(): - availability = types.MissionCritical - default: + availability, err := types.ParseCriticality(asset.Availability) + if err != nil { panic(errors.New("unknown 'availability' value of data asset '" + title + "': " + asset.Availability)) } @@ -636,14 +575,9 @@ func (context *Context) parseModel() { for title, asset := range context.modelInput.TechnicalAssets { id := fmt.Sprintf("%v", asset.ID) - var usage types.Usage - switch asset.Usage { - case types.Business.String(): - usage = types.Business - case types.DevOps.String(): - usage = types.DevOps - default: - panic(errors.New("unknown 'usage' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Usage))) + usage, err := types.ParseUsage(asset.Usage) + if err != nil { + panic(errors.New("unknown 'usage' value of technical asset '" + title + "': " + asset.Usage)) } var dataAssetsProcessed = make([]string, 0) @@ -666,247 +600,47 @@ func (context *Context) parseModel() { } } - var technicalAssetType types.TechnicalAssetType - switch asset.Type { - case types.ExternalEntity.String(): - technicalAssetType = types.ExternalEntity - case types.Process.String(): - technicalAssetType = types.Process - case types.Datastore.String(): - technicalAssetType = types.Datastore - default: + technicalAssetType, err := types.ParseTechnicalAssetType(asset.Type) + if err != nil { panic(errors.New("unknown 'type' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Type))) } - - var technicalAssetSize types.TechnicalAssetSize - switch asset.Size { - case types.Service.String(): - technicalAssetSize = types.Service - case types.System.String(): - technicalAssetSize = types.System - case types.Application.String(): - technicalAssetSize = types.Application - case types.Component.String(): - technicalAssetSize = types.Component - default: + technicalAssetSize, err := types.ParseTechnicalAssetSize(asset.Size) + if err != nil { panic(errors.New("unknown 'size' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Size))) } - - var technicalAssetTechnology types.TechnicalAssetTechnology - switch asset.Technology { - case types.UnknownTechnology.String(): - technicalAssetTechnology = types.UnknownTechnology - case types.ClientSystem.String(): - technicalAssetTechnology = types.ClientSystem - case types.Browser.String(): - technicalAssetTechnology = types.Browser - case types.Desktop.String(): - technicalAssetTechnology = types.Desktop - case types.MobileApp.String(): - technicalAssetTechnology = types.MobileApp - case types.DevOpsClient.String(): - technicalAssetTechnology = types.DevOpsClient - case types.WebServer.String(): - technicalAssetTechnology = types.WebServer - case types.WebApplication.String(): - technicalAssetTechnology = types.WebApplication - case types.ApplicationServer.String(): - technicalAssetTechnology = types.ApplicationServer - case types.Database.String(): - technicalAssetTechnology = types.Database - case types.FileServer.String(): - technicalAssetTechnology = types.FileServer - case types.LocalFileSystem.String(): - technicalAssetTechnology = types.LocalFileSystem - case types.ERP.String(): - technicalAssetTechnology = types.ERP - case types.CMS.String(): - technicalAssetTechnology = types.CMS - case types.WebServiceREST.String(): - technicalAssetTechnology = types.WebServiceREST - case types.WebServiceSOAP.String(): - technicalAssetTechnology = types.WebServiceSOAP - case types.EJB.String(): - technicalAssetTechnology = types.EJB - case types.SearchIndex.String(): - technicalAssetTechnology = types.SearchIndex - case types.SearchEngine.String(): - technicalAssetTechnology = types.SearchEngine - case types.ServiceRegistry.String(): - technicalAssetTechnology = types.ServiceRegistry - case types.ReverseProxy.String(): - technicalAssetTechnology = types.ReverseProxy - case types.LoadBalancer.String(): - technicalAssetTechnology = types.LoadBalancer - case types.BuildPipeline.String(): - technicalAssetTechnology = types.BuildPipeline - case types.SourcecodeRepository.String(): - technicalAssetTechnology = types.SourcecodeRepository - case types.ArtifactRegistry.String(): - technicalAssetTechnology = types.ArtifactRegistry - case types.CodeInspectionPlatform.String(): - technicalAssetTechnology = types.CodeInspectionPlatform - case types.Monitoring.String(): - technicalAssetTechnology = types.Monitoring - case types.LDAPServer.String(): - technicalAssetTechnology = types.LDAPServer - case types.ContainerPlatform.String(): - technicalAssetTechnology = types.ContainerPlatform - case types.BatchProcessing.String(): - technicalAssetTechnology = types.BatchProcessing - case types.EventListener.String(): - technicalAssetTechnology = types.EventListener - case types.IdentityProvider.String(): - technicalAssetTechnology = types.IdentityProvider - case types.IdentityStoreLDAP.String(): - technicalAssetTechnology = types.IdentityStoreLDAP - case types.IdentityStoreDatabase.String(): - technicalAssetTechnology = types.IdentityStoreDatabase - case types.Tool.String(): - technicalAssetTechnology = types.Tool - case types.CLI.String(): - technicalAssetTechnology = types.CLI - case types.Task.String(): - technicalAssetTechnology = types.Task - case types.Function.String(): - technicalAssetTechnology = types.Function - case types.Gateway.String(): - technicalAssetTechnology = types.Gateway - case types.IoTDevice.String(): - technicalAssetTechnology = types.IoTDevice - case types.MessageQueue.String(): - technicalAssetTechnology = types.MessageQueue - case types.StreamProcessing.String(): - technicalAssetTechnology = types.StreamProcessing - case types.ServiceMesh.String(): - technicalAssetTechnology = types.ServiceMesh - case types.DataLake.String(): - technicalAssetTechnology = types.DataLake - case types.BigDataPlatform.String(): - technicalAssetTechnology = types.BigDataPlatform - case types.ReportEngine.String(): - technicalAssetTechnology = types.ReportEngine - case types.AI.String(): - technicalAssetTechnology = types.AI - case types.MailServer.String(): - technicalAssetTechnology = types.MailServer - case types.Vault.String(): - technicalAssetTechnology = types.Vault - case types.HSM.String(): - technicalAssetTechnology = types.HSM - case types.WAF.String(): - technicalAssetTechnology = types.WAF - case types.IDS.String(): - technicalAssetTechnology = types.IDS - case types.IPS.String(): - technicalAssetTechnology = types.IPS - case types.Scheduler.String(): - technicalAssetTechnology = types.Scheduler - case types.Mainframe.String(): - technicalAssetTechnology = types.Mainframe - case types.BlockStorage.String(): - technicalAssetTechnology = types.BlockStorage - case types.Library.String(): - technicalAssetTechnology = types.Library - default: + technicalAssetTechnology, err := types.ParseTechnicalAssetTechnology(asset.Technology) + if err != nil { panic(errors.New("unknown 'technology' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Technology))) } - - var encryption types.EncryptionStyle - switch asset.Encryption { - case types.NoneEncryption.String(): - encryption = types.NoneEncryption - case types.Transparent.String(): - encryption = types.Transparent - case types.DataWithSymmetricSharedKey.String(): - encryption = types.DataWithSymmetricSharedKey - case types.DataWithAsymmetricSharedKey.String(): - encryption = types.DataWithAsymmetricSharedKey - case types.DataWithEndUserIndividualKey.String(): - encryption = types.DataWithEndUserIndividualKey - default: + encryption, err := types.ParseEncryptionStyle(asset.Encryption) + if err != nil { panic(errors.New("unknown 'encryption' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Encryption))) } - - var technicalAssetMachine types.TechnicalAssetMachine - switch asset.Machine { - case types.Physical.String(): - technicalAssetMachine = types.Physical - case types.Virtual.String(): - technicalAssetMachine = types.Virtual - case types.Container.String(): - technicalAssetMachine = types.Container - case types.Serverless.String(): - technicalAssetMachine = types.Serverless - default: + technicalAssetMachine, err := types.ParseTechnicalAssetMachine(asset.Machine) + if err != nil { panic(errors.New("unknown 'machine' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Machine))) } - - var confidentiality types.Confidentiality - switch asset.Confidentiality { - case types.Public.String(): - confidentiality = types.Public - case types.Internal.String(): - confidentiality = types.Internal - case types.Restricted.String(): - confidentiality = types.Restricted - case types.Confidential.String(): - confidentiality = types.Confidential - case types.StrictlyConfidential.String(): - confidentiality = types.StrictlyConfidential - default: + confidentiality, err := types.ParseConfidentiality(asset.Confidentiality) + if err != nil { panic(errors.New("unknown 'confidentiality' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Confidentiality))) } - - var integrity types.Criticality - switch asset.Integrity { - case types.Archive.String(): - integrity = types.Archive - case types.Operational.String(): - integrity = types.Operational - case types.Important.String(): - integrity = types.Important - case types.Critical.String(): - integrity = types.Critical - case types.MissionCritical.String(): - integrity = types.MissionCritical - default: + integrity, err := types.ParseCriticality(asset.Integrity) + if err != nil { panic(errors.New("unknown 'integrity' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Integrity))) } - - var availability types.Criticality - switch asset.Availability { - case types.Archive.String(): - availability = types.Archive - case types.Operational.String(): - availability = types.Operational - case types.Important.String(): - availability = types.Important - case types.Critical.String(): - availability = types.Critical - case types.MissionCritical.String(): - availability = types.MissionCritical - default: + availability, err := types.ParseCriticality(asset.Availability) + if err != nil { panic(errors.New("unknown 'availability' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Availability))) } dataFormatsAccepted := make([]types.DataFormat, 0) if asset.DataFormatsAccepted != nil { for _, dataFormatName := range asset.DataFormatsAccepted { - switch dataFormatName { - case types.JSON.String(): - dataFormatsAccepted = append(dataFormatsAccepted, types.JSON) - case types.XML.String(): - dataFormatsAccepted = append(dataFormatsAccepted, types.XML) - case types.Serialization.String(): - dataFormatsAccepted = append(dataFormatsAccepted, types.Serialization) - case types.File.String(): - dataFormatsAccepted = append(dataFormatsAccepted, types.File) - case types.CSV.String(): - dataFormatsAccepted = append(dataFormatsAccepted, types.CSV) - default: + dataFormat, err := types.ParseDataFormat(dataFormatName) + if err != nil { panic(errors.New("unknown 'data_formats_accepted' value of technical asset '" + title + "': " + fmt.Sprintf("%v", dataFormatName))) } + dataFormatsAccepted = append(dataFormatsAccepted, dataFormat) } } @@ -915,145 +649,24 @@ func (context *Context) parseModel() { for commLinkTitle, commLink := range asset.CommunicationLinks { constraint := true weight := 1 - var protocol types.Protocol - var authentication types.Authentication - var authorization types.Authorization - var usage types.Usage var dataAssetsSent []string var dataAssetsReceived []string - switch commLink.Authentication { - case types.NoneAuthentication.String(): - authentication = types.NoneAuthentication - case types.Credentials.String(): - authentication = types.Credentials - case types.SessionId.String(): - authentication = types.SessionId - case types.Token.String(): - authentication = types.Token - case types.ClientCertificate.String(): - authentication = types.ClientCertificate - case types.TwoFactor.String(): - authentication = types.TwoFactor - case types.Externalized.String(): - authentication = types.Externalized - default: + authentication, err := types.ParseAuthentication(commLink.Authentication) + if err != nil { panic(errors.New("unknown 'authentication' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Authentication))) } - - switch commLink.Authorization { - case types.NoneAuthorization.String(): - authorization = types.NoneAuthorization - case types.TechnicalUser.String(): - authorization = types.TechnicalUser - case types.EndUserIdentityPropagation.String(): - authorization = types.EndUserIdentityPropagation - default: + authorization, err := types.ParseAuthorization(commLink.Authorization) + if err != nil { panic(errors.New("unknown 'authorization' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Authorization))) } - - switch commLink.Usage { - case types.Business.String(): - usage = types.Business - case types.DevOps.String(): - usage = types.DevOps - default: + usage, err := types.ParseUsage(commLink.Usage) + if err != nil { panic(errors.New("unknown 'usage' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Usage))) } - - switch commLink.Protocol { - case types.UnknownProtocol.String(): - protocol = types.UnknownProtocol - case types.HTTP.String(): - protocol = types.HTTP - case types.HTTPS.String(): - protocol = types.HTTPS - case types.WS.String(): - protocol = types.WS - case types.WSS.String(): - protocol = types.WSS - case types.MQTT.String(): - protocol = types.MQTT - case types.JDBC.String(): - protocol = types.JDBC - case types.JdbcEncrypted.String(): - protocol = types.JdbcEncrypted - case types.ODBC.String(): - protocol = types.ODBC - case types.OdbcEncrypted.String(): - protocol = types.OdbcEncrypted - case types.SqlAccessProtocol.String(): - protocol = types.SqlAccessProtocol - case types.SqlAccessProtocolEncrypted.String(): - protocol = types.SqlAccessProtocolEncrypted - case types.NosqlAccessProtocol.String(): - protocol = types.NosqlAccessProtocol - case types.NosqlAccessProtocolEncrypted.String(): - protocol = types.NosqlAccessProtocolEncrypted - case types.TEXT.String(): - protocol = types.TEXT - case types.TextEncrypted.String(): - protocol = types.TextEncrypted - case types.BINARY.String(): - protocol = types.BINARY - case types.BinaryEncrypted.String(): - protocol = types.BinaryEncrypted - case types.SSH.String(): - protocol = types.SSH - case types.SshTunnel.String(): - protocol = types.SshTunnel - case types.SMTP.String(): - protocol = types.SMTP - case types.SmtpEncrypted.String(): - protocol = types.SmtpEncrypted - case types.POP3.String(): - protocol = types.POP3 - case types.Pop3Encrypted.String(): - protocol = types.Pop3Encrypted - case types.IMAP.String(): - protocol = types.IMAP - case types.ImapEncrypted.String(): - protocol = types.ImapEncrypted - case types.FTP.String(): - protocol = types.FTP - case types.FTPS.String(): - protocol = types.FTPS - case types.SFTP.String(): - protocol = types.SFTP - case types.SCP.String(): - protocol = types.SCP - case types.LDAP.String(): - protocol = types.LDAP - case types.LDAPS.String(): - protocol = types.LDAPS - case types.JMS.String(): - protocol = types.JMS - case types.NFS.String(): - protocol = types.NFS - case types.SMB.String(): - protocol = types.SMB - case types.SmbEncrypted.String(): - protocol = types.SmbEncrypted - case types.LocalFileAccess.String(): - protocol = types.LocalFileAccess - case types.NRPE.String(): - protocol = types.NRPE - case types.XMPP.String(): - protocol = types.XMPP - case types.IIOP.String(): - protocol = types.IIOP - case types.IiopEncrypted.String(): - protocol = types.IiopEncrypted - case types.JRMP.String(): - protocol = types.JRMP - case types.JrmpEncrypted.String(): - protocol = types.JrmpEncrypted - case types.InProcessLibraryCall.String(): - protocol = types.InProcessLibraryCall - case types.ContainerSpawning.String(): - protocol = types.ContainerSpawning - default: - panic(errors.New("unknown 'protocol' of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Protocol))) + protocol, err := types.ParseProtocol(commLink.Protocol) + if err != nil { + panic(errors.New("unknown 'protocol' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Protocol))) } if commLink.DataAssetsSent != nil { @@ -1175,23 +788,8 @@ func (context *Context) parseModel() { } } - var trustBoundaryType types.TrustBoundaryType - switch boundary.Type { - case types.NetworkOnPrem.String(): - trustBoundaryType = types.NetworkOnPrem - case types.NetworkDedicatedHoster.String(): - trustBoundaryType = types.NetworkDedicatedHoster - case types.NetworkVirtualLAN.String(): - trustBoundaryType = types.NetworkVirtualLAN - case types.NetworkCloudProvider.String(): - trustBoundaryType = types.NetworkCloudProvider - case types.NetworkCloudSecurityGroup.String(): - trustBoundaryType = types.NetworkCloudSecurityGroup - case types.NetworkPolicyNamespaceIsolation.String(): - trustBoundaryType = types.NetworkPolicyNamespaceIsolation - case types.ExecutionEnvironment.String(): - trustBoundaryType = types.ExecutionEnvironment - default: + trustBoundaryType, err := types.ParseTrustBoundary(boundary.Type) + if err != nil { panic(errors.New("unknown 'type' of trust boundary '" + title + "': " + fmt.Sprintf("%v", boundary.Type))) } @@ -1251,35 +849,12 @@ func (context *Context) parseModel() { for title, individualCategory := range context.modelInput.IndividualRiskCategories { id := fmt.Sprintf("%v", individualCategory.ID) - var function types.RiskFunction - switch individualCategory.Function { - case types.BusinessSide.String(): - function = types.BusinessSide - case types.Architecture.String(): - function = types.Architecture - case types.Development.String(): - function = types.Development - case types.Operations.String(): - function = types.Operations - default: + function, err := types.ParseRiskFunction(individualCategory.Function) + if err != nil { panic(errors.New("unknown 'function' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.Function))) } - - var stride types.STRIDE - switch individualCategory.STRIDE { - case types.Spoofing.String(): - stride = types.Spoofing - case types.Tampering.String(): - stride = types.Tampering - case types.Repudiation.String(): - stride = types.Repudiation - case types.InformationDisclosure.String(): - stride = types.InformationDisclosure - case types.DenialOfService.String(): - stride = types.DenialOfService - case types.ElevationOfPrivilege.String(): - stride = types.ElevationOfPrivilege - default: + stride, err := types.ParseSTRIDE(individualCategory.STRIDE) + if err != nil { panic(errors.New("unknown 'stride' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.STRIDE))) } @@ -1311,57 +886,19 @@ func (context *Context) parseModel() { //individualRiskInstances := make([]model.Risk, 0) if individualCategory.RisksIdentified != nil { // TODO: also add syntax checks of input YAML when linked asset is not found or when synthetic-id is already used... for title, individualRiskInstance := range individualCategory.RisksIdentified { - var severity types.RiskSeverity - var exploitationLikelihood types.RiskExploitationLikelihood - var exploitationImpact types.RiskExploitationImpact var mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId string var dataBreachProbability types.DataBreachProbability var dataBreachTechnicalAssetIDs []string - - switch individualRiskInstance.Severity { - case types.LowSeverity.String(): - severity = types.LowSeverity - case types.MediumSeverity.String(): - severity = types.MediumSeverity - case types.ElevatedSeverity.String(): - severity = types.ElevatedSeverity - case types.HighSeverity.String(): - severity = types.HighSeverity - case types.CriticalSeverity.String(): - severity = types.CriticalSeverity - case "": // added default - severity = types.MediumSeverity - default: + severity, err := types.ParseRiskSeverity(individualRiskInstance.Severity) + if err != nil { panic(errors.New("unknown 'severity' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.Severity))) } - - switch individualRiskInstance.ExploitationLikelihood { - case types.Unlikely.String(): - exploitationLikelihood = types.Unlikely - case types.Likely.String(): - exploitationLikelihood = types.Likely - case types.VeryLikely.String(): - exploitationLikelihood = types.VeryLikely - case types.Frequent.String(): - exploitationLikelihood = types.Frequent - case "": // added default - exploitationLikelihood = types.Likely - default: + exploitationLikelihood, err := types.ParseRiskExploitationLikelihood(individualRiskInstance.ExploitationLikelihood) + if err != nil { panic(errors.New("unknown 'exploitation_likelihood' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationLikelihood))) } - - switch individualRiskInstance.ExploitationImpact { - case types.LowImpact.String(): - exploitationImpact = types.LowImpact - case types.MediumImpact.String(): - exploitationImpact = types.MediumImpact - case types.HighImpact.String(): - exploitationImpact = types.HighImpact - case types.VeryHighImpact.String(): - exploitationImpact = types.VeryHighImpact - case "": // added default - exploitationImpact = types.MediumImpact - default: + exploitationImpact, err := types.ParseRiskExploitationImpact(individualRiskInstance.ExploitationImpact) + if err != nil { panic(errors.New("unknown 'exploitation_impact' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationImpact))) } @@ -1390,16 +927,8 @@ func (context *Context) parseModel() { context.checkSharedRuntimeExists(mostRelevantSharedRuntimeId, "individual risk '"+title+"'") } - switch individualRiskInstance.DataBreachProbability { - case types.Improbable.String(): - dataBreachProbability = types.Improbable - case types.Possible.String(): - dataBreachProbability = types.Possible - case types.Probable.String(): - dataBreachProbability = types.Probable - case "": // added default - dataBreachProbability = types.Possible - default: + dataBreachProbability, err = types.ParseDataBreachProbability(individualRiskInstance.DataBreachProbability) + if err != nil { panic(errors.New("unknown 'data_breach_probability' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.DataBreachProbability))) } @@ -1447,21 +976,8 @@ func (context *Context) parseModel() { } } - var status types.RiskStatus - switch riskTracking.Status { - case types.Unchecked.String(): - status = types.Unchecked - case types.Mitigated.String(): - status = types.Mitigated - case types.InProgress.String(): - status = types.InProgress - case types.Accepted.String(): - status = types.Accepted - case types.InDiscussion.String(): - status = types.InDiscussion - case types.FalsePositive.String(): - status = types.FalsePositive - default: + status, err := types.ParseRiskStatus(riskTracking.Status) + if err != nil { panic(errors.New("unknown 'status' value of risk tracking '" + syntheticRiskId + "': " + riskTracking.Status)) } diff --git a/pkg/security/types/authentication.go b/pkg/security/types/authentication.go index 04157129..16659518 100644 --- a/pkg/security/types/authentication.go +++ b/pkg/security/types/authentication.go @@ -3,6 +3,11 @@ Copyright © 2023 NAME HERE */ package types +import ( + "errors" + "strings" +) + type Authentication int const ( @@ -37,6 +42,16 @@ var AuthenticationTypeDescription = [...]TypeDescription{ {"externalized", "Some external company handles authentication"}, } +func ParseAuthentication(value string) (authentication Authentication, err error) { + value = strings.TrimSpace(value) + for _, candidate := range AuthenticationValues() { + if candidate.String() == value { + return candidate.(Authentication), err + } + } + return authentication, errors.New("Unable to parse into type: " + value) +} + func (what Authentication) String() string { // NOTE: maintain list also in schema.json for validation in IDEs //return [...]string{"none", "credentials", "session-id", "token", "client-certificate", "two-factor", "externalized"}[what] diff --git a/pkg/security/types/authentication_test.go b/pkg/security/types/authentication_test.go new file mode 100644 index 00000000..ac212ba3 --- /dev/null +++ b/pkg/security/types/authentication_test.go @@ -0,0 +1,63 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseAuthenticationTest struct { + input string + expected Authentication + expectedError error +} + +func TestParseAuthentication(t *testing.T) { + testCases := map[string]ParseAuthenticationTest{ + "none": { + input: "none", + expected: NoneAuthentication, + }, + "credentials": { + input: "credentials", + expected: Credentials, + }, + "session-id": { + input: "session-id", + expected: SessionId, + }, + "token": { + input: "token", + expected: Token, + }, + "client-certificate": { + input: "client-certificate", + expected: ClientCertificate, + }, + "two-factor": { + input: "two-factor", + expected: TwoFactor, + }, + "externalized": { + input: "externalized", + expected: Externalized, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseAuthentication(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} diff --git a/pkg/security/types/authorization.go b/pkg/security/types/authorization.go index 2ccc467d..660b0a7a 100644 --- a/pkg/security/types/authorization.go +++ b/pkg/security/types/authorization.go @@ -3,6 +3,11 @@ Copyright © 2023 NAME HERE */ package types +import ( + "errors" + "strings" +) + type Authorization int const ( @@ -25,6 +30,16 @@ var AuthorizationTypeDescription = [...]TypeDescription{ {"enduser-identity-propagation", "Identity of end user propagates to this service"}, } +func ParseAuthorization(value string) (authorization Authorization, err error) { + value = strings.TrimSpace(value) + for _, candidate := range AuthorizationValues() { + if candidate.String() == value { + return candidate.(Authorization), err + } + } + return authorization, errors.New("Unable to parse into type: " + value) +} + func (what Authorization) String() string { // NOTE: maintain list also in schema.json for validation in IDEs return AuthorizationTypeDescription[what].Name diff --git a/pkg/security/types/authorization_test.go b/pkg/security/types/authorization_test.go new file mode 100644 index 00000000..dd60ad60 --- /dev/null +++ b/pkg/security/types/authorization_test.go @@ -0,0 +1,47 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseAuthorizationTest struct { + input string + expected Authorization + expectedError error +} + +func TestParseAuthorization(t *testing.T) { + testCases := map[string]ParseAuthorizationTest{ + "none": { + input: "none", + expected: NoneAuthorization, + }, + "technical-user": { + input: "technical-user", + expected: TechnicalUser, + }, + "enduser-identity-propagation": { + input: "enduser-identity-propagation", + expected: EndUserIdentityPropagation, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseAuthorization(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} diff --git a/pkg/security/types/confidentiality_test.go b/pkg/security/types/confidentiality_test.go new file mode 100644 index 00000000..e13f92ff --- /dev/null +++ b/pkg/security/types/confidentiality_test.go @@ -0,0 +1,55 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseConfidentialityTest struct { + input string + expected Confidentiality + expectedError error +} + +func TestParseConfidenitality(t *testing.T) { + testCases := map[string]ParseConfidentialityTest{ + "public": { + input: "public", + expected: Public, + }, + "internal": { + input: "internal", + expected: Internal, + }, + "restricted": { + input: "restricted", + expected: Restricted, + }, + "confidential": { + input: "confidential", + expected: Confidential, + }, + "strictly-confidential": { + input: "strictly-confidential", + expected: StrictlyConfidential, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseConfidentiality(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} diff --git a/pkg/security/types/criticality_test.go b/pkg/security/types/criticality_test.go new file mode 100644 index 00000000..937d1c26 --- /dev/null +++ b/pkg/security/types/criticality_test.go @@ -0,0 +1,55 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseCriticalityTest struct { + input string + expected Criticality + expectedError error +} + +func TestParseCriticality(t *testing.T) { + testCases := map[string]ParseCriticalityTest{ + "archive": { + input: "archive", + expected: Archive, + }, + "operational": { + input: "operational", + expected: Operational, + }, + "important": { + input: "important", + expected: Important, + }, + "critical": { + input: "critical", + expected: Critical, + }, + "mission-critical": { + input: "mission-critical", + expected: MissionCritical, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseCriticality(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} diff --git a/pkg/security/types/data_breach_probability.go b/pkg/security/types/data_breach_probability.go index b972c9f0..b66902af 100644 --- a/pkg/security/types/data_breach_probability.go +++ b/pkg/security/types/data_breach_probability.go @@ -3,7 +3,11 @@ Copyright © 2023 NAME HERE */ package types -import "encoding/json" +import ( + "encoding/json" + "errors" + "strings" +) type DataBreachProbability int @@ -27,6 +31,20 @@ var DataBreachProbabilityTypeDescription = [...]TypeDescription{ {"probable", "Probable"}, } +func ParseDataBreachProbability(value string) (dataBreachProbability DataBreachProbability, err error) { + value = strings.TrimSpace(value) + if value == "" { + return Possible, err + } + + for _, candidate := range DataBreachProbabilityValues() { + if candidate.String() == value { + return candidate.(DataBreachProbability), err + } + } + return dataBreachProbability, errors.New("Unable to parse into type: " + value) +} + func (what DataBreachProbability) String() string { // NOTE: maintain list also in schema.json for validation in IDEs return DataBreachProbabilityTypeDescription[what].Name diff --git a/pkg/security/types/data_breach_probability_test.go b/pkg/security/types/data_breach_probability_test.go new file mode 100644 index 00000000..b041c887 --- /dev/null +++ b/pkg/security/types/data_breach_probability_test.go @@ -0,0 +1,51 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseDataBreachProbabilityTest struct { + input string + expected DataBreachProbability + expectedError error +} + +func TestParseDataBreachProbability(t *testing.T) { + testCases := map[string]ParseDataBreachProbabilityTest{ + "improbable": { + input: "improbable", + expected: Improbable, + }, + "possible": { + input: "possible", + expected: Possible, + }, + "probable": { + input: "probable", + expected: Probable, + }, + "default": { + input: "", + expected: Possible, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseDataBreachProbability(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} diff --git a/pkg/security/types/data_format.go b/pkg/security/types/data_format.go index 0e9dd618..2ef473eb 100644 --- a/pkg/security/types/data_format.go +++ b/pkg/security/types/data_format.go @@ -3,6 +3,11 @@ Copyright © 2023 NAME HERE */ package types +import ( + "errors" + "strings" +) + type DataFormat int const ( @@ -31,6 +36,16 @@ var DataFormatTypeDescription = [...]TypeDescription{ {"csv", "CSV"}, } +func ParseDataFormat(value string) (dataFormat DataFormat, err error) { + value = strings.TrimSpace(value) + for _, candidate := range DataFormatValues() { + if candidate.String() == value { + return candidate.(DataFormat), err + } + } + return dataFormat, errors.New("Unable to parse into type: " + value) +} + func (what DataFormat) String() string { // NOTE: maintain list also in schema.json for validation in IDEs return DataFormatTypeDescription[what].Name diff --git a/pkg/security/types/data_format_test.go b/pkg/security/types/data_format_test.go new file mode 100644 index 00000000..03a759c2 --- /dev/null +++ b/pkg/security/types/data_format_test.go @@ -0,0 +1,55 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseDataFormatTest struct { + input string + expected DataFormat + expectedError error +} + +func TestParseDataFormat(t *testing.T) { + testCases := map[string]ParseDataFormatTest{ + "json": { + input: "json", + expected: JSON, + }, + "xml": { + input: "xml", + expected: XML, + }, + "serialization": { + input: "serialization", + expected: Serialization, + }, + "file": { + input: "file", + expected: File, + }, + "csv": { + input: "csv", + expected: CSV, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseDataFormat(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} diff --git a/pkg/security/types/encryption_style_test.go b/pkg/security/types/encryption_style_test.go new file mode 100644 index 00000000..80187fa5 --- /dev/null +++ b/pkg/security/types/encryption_style_test.go @@ -0,0 +1,55 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseEncryptionStyleTest struct { + input string + expected EncryptionStyle + expectedError error +} + +func TestParseEncryptionStyle(t *testing.T) { + testCases := map[string]ParseEncryptionStyleTest{ + "none": { + input: "none", + expected: NoneEncryption, + }, + "transparent": { + input: "transparent", + expected: Transparent, + }, + "data-with-symmetric-shared-key": { + input: "data-with-symmetric-shared-key", + expected: DataWithSymmetricSharedKey, + }, + "data-with-asymmetric-shared-key": { + input: "data-with-asymmetric-shared-key", + expected: DataWithAsymmetricSharedKey, + }, + "data-with-enduser-individual-key": { + input: "data-with-enduser-individual-key", + expected: DataWithEndUserIndividualKey, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseEncryptionStyle(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} diff --git a/pkg/security/types/protocol.go b/pkg/security/types/protocol.go index 0b02d36d..245c5a1a 100644 --- a/pkg/security/types/protocol.go +++ b/pkg/security/types/protocol.go @@ -3,6 +3,11 @@ Copyright © 2023 NAME HERE */ package types +import ( + "errors" + "strings" +) + type Protocol int const ( @@ -157,6 +162,16 @@ var ProtocolTypeDescription = [...]TypeDescription{ {"container-spawning", "Spawn a container"}, } +func ParseProtocol(value string) (protocol Protocol, err error) { + value = strings.TrimSpace(value) + for _, candidate := range ProtocolValues() { + if candidate.String() == value { + return candidate.(Protocol), err + } + } + return protocol, errors.New("Unable to parse into type: " + value) +} + func (what Protocol) String() string { // NOTE: maintain list also in schema.json for validation in IDEs return ProtocolTypeDescription[what].Name diff --git a/pkg/security/types/protocol_test.go b/pkg/security/types/protocol_test.go new file mode 100644 index 00000000..07981a7a --- /dev/null +++ b/pkg/security/types/protocol_test.go @@ -0,0 +1,223 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseProtocolTest struct { + input string + expected Protocol + expectedError error +} + +func TestParseProtocol(t *testing.T) { + testCases := map[string]ParseProtocolTest{ + "unknown-protocol": { + input: "unknown-protocol", + expected: UnknownProtocol, + }, + "http": { + input: "http", + expected: HTTP, + }, + "https": { + input: "https", + expected: HTTPS, + }, + "ws": { + input: "ws", + expected: WS, + }, + "wss": { + input: "wss", + expected: WSS, + }, + "reverse-proxy-web-protocol": { + input: "reverse-proxy-web-protocol", + expected: ReverseProxyWebProtocol, + }, + "reverse-proxy-web-protocol-encrypted": { + input: "reverse-proxy-web-protocol-encrypted", + expected: ReverseProxyWebProtocolEncrypted, + }, + "mqtt": { + input: "mqtt", + expected: MQTT, + }, + "jdbc": { + input: "jdbc", + expected: JDBC, + }, + "jdbc-encrypted": { + input: "jdbc-encrypted", + expected: JdbcEncrypted, + }, + "odbc": { + input: "odbc", + expected: ODBC, + }, + "odbc-encrypted": { + input: "odbc-encrypted", + expected: OdbcEncrypted, + }, + "sql-access-protocol": { + input: "sql-access-protocol", + expected: SqlAccessProtocol, + }, + "sql-access-protocol-encrypted": { + input: "sql-access-protocol-encrypted", + expected: SqlAccessProtocolEncrypted, + }, + "nosql-access-protocol": { + input: "nosql-access-protocol", + expected: NosqlAccessProtocol, + }, + "nosql-access-protocol-encrypted": { + input: "nosql-access-protocol-encrypted", + expected: NosqlAccessProtocolEncrypted, + }, + "binary": { + input: "binary", + expected: BINARY, + }, + "binary-encrypted": { + input: "binary-encrypted", + expected: BinaryEncrypted, + }, + "text": { + input: "text", + expected: TEXT, + }, + "text-encrypted": { + input: "text-encrypted", + expected: TextEncrypted, + }, + "ssh": { + input: "ssh", + expected: SSH, + }, + "ssh-tunnel": { + input: "ssh-tunnel", + expected: SshTunnel, + }, + "smtp": { + input: "smtp", + expected: SMTP, + }, + "smtp-encrypted": { + input: "smtp-encrypted", + expected: SmtpEncrypted, + }, + "pop3": { + input: "pop3", + expected: POP3, + }, + "pop3-encrypted": { + input: "pop3-encrypted", + expected: Pop3Encrypted, + }, + "imap": { + input: "imap", + expected: IMAP, + }, + "imap-encrypted": { + input: "imap-encrypted", + expected: ImapEncrypted, + }, + "ftp": { + input: "ftp", + expected: FTP, + }, + "ftps": { + input: "ftps", + expected: FTPS, + }, + "sftp": { + input: "sftp", + expected: SFTP, + }, + "scp": { + input: "scp", + expected: SCP, + }, + "ldap": { + input: "ldap", + expected: LDAP, + }, + "ldaps": { + input: "ldaps", + expected: LDAPS, + }, + "jms": { + input: "jms", + expected: JMS, + }, + "nfs": { + input: "nfs", + expected: NFS, + }, + "smb": { + input: "smb", + expected: SMB, + }, + "smb-encrypted": { + input: "smb-encrypted", + expected: SmbEncrypted, + }, + "local-file-access": { + input: "local-file-access", + expected: LocalFileAccess, + }, + "nrpe": { + input: "nrpe", + expected: NRPE, + }, + "xmpp": { + input: "xmpp", + expected: XMPP, + }, + "iiop": { + input: "iiop", + expected: IIOP, + }, + "iiop-encrypted": { + input: "iiop-encrypted", + expected: IiopEncrypted, + }, + "jrmp": { + input: "jrmp", + expected: JRMP, + }, + "jrmp-encrypted": { + input: "jrmp-encrypted", + expected: JrmpEncrypted, + }, + "in-process-library-call": { + input: "in-process-library-call", + expected: InProcessLibraryCall, + }, + "container-spawning": { + input: "container-spawning", + expected: ContainerSpawning, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseProtocol(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} diff --git a/pkg/security/types/quantity_test.go b/pkg/security/types/quantity_test.go new file mode 100644 index 00000000..82fbf306 --- /dev/null +++ b/pkg/security/types/quantity_test.go @@ -0,0 +1,51 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseQuantityTest struct { + input string + expected Quantity + expectedError error +} + +func TestParseQuantity(t *testing.T) { + testCases := map[string]ParseQuantityTest{ + "very-few": { + input: "very-few", + expected: VeryFew, + }, + "few": { + input: "few", + expected: Few, + }, + "many": { + input: "many", + expected: Many, + }, + "very-many": { + input: "very-many", + expected: VeryMany, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseQuantity(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} diff --git a/pkg/security/types/risk_exploitation_impact_test.go b/pkg/security/types/risk_exploitation_impact_test.go new file mode 100644 index 00000000..ffb77f29 --- /dev/null +++ b/pkg/security/types/risk_exploitation_impact_test.go @@ -0,0 +1,55 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseRiskExploitationImpactTest struct { + input string + expected RiskExploitationImpact + expectedError error +} + +func TestParseRiskExploitationImpact(t *testing.T) { + testCases := map[string]ParseRiskExploitationImpactTest{ + "low": { + input: "low", + expected: LowImpact, + }, + "medium": { + input: "medium", + expected: MediumImpact, + }, + "high": { + input: "high", + expected: HighImpact, + }, + "very-high": { + input: "very-high", + expected: VeryHighImpact, + }, + "default": { + input: "", + expected: MediumImpact, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseRiskExploitationImpact(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} diff --git a/pkg/security/types/risk_explotation_impact.go b/pkg/security/types/risk_explotation_impact.go index 6480e6ea..6ac93cfb 100644 --- a/pkg/security/types/risk_explotation_impact.go +++ b/pkg/security/types/risk_explotation_impact.go @@ -3,7 +3,11 @@ Copyright © 2023 NAME HERE */ package types -import "encoding/json" +import ( + "encoding/json" + "errors" + "strings" +) type RiskExploitationImpact int @@ -30,6 +34,19 @@ var RiskExploitationImpactTypeDescription = [...]TypeDescription{ {"very-high", "Very High"}, } +func ParseRiskExploitationImpact(value string) (riskExploitationImpact RiskExploitationImpact, err error) { + value = strings.TrimSpace(value) + if value == "" { + return MediumImpact, nil + } + for _, candidate := range RiskExploitationImpactValues() { + if candidate.String() == value { + return candidate.(RiskExploitationImpact), err + } + } + return riskExploitationImpact, errors.New("Unable to parse into type: " + value) +} + func (what RiskExploitationImpact) String() string { // NOTE: maintain list also in schema.json for validation in IDEs return RiskExploitationImpactTypeDescription[what].Name diff --git a/pkg/security/types/risk_explotation_likelihood.go b/pkg/security/types/risk_explotation_likelihood.go index 9ae0f112..b56c5d71 100644 --- a/pkg/security/types/risk_explotation_likelihood.go +++ b/pkg/security/types/risk_explotation_likelihood.go @@ -3,7 +3,11 @@ Copyright © 2023 NAME HERE */ package types -import "encoding/json" +import ( + "encoding/json" + "errors" + "strings" +) type RiskExploitationLikelihood int @@ -30,6 +34,19 @@ var RiskExploitationLikelihoodTypeDescription = [...]TypeDescription{ {"frequent", "Frequent"}, } +func ParseRiskExploitationLikelihood(value string) (riskExploitationLikelihood RiskExploitationLikelihood, err error) { + value = strings.TrimSpace(value) + if value == "" { + return Likely, nil + } + for _, candidate := range RiskExploitationLikelihoodValues() { + if candidate.String() == value { + return candidate.(RiskExploitationLikelihood), err + } + } + return riskExploitationLikelihood, errors.New("Unable to parse into type: " + value) +} + func (what RiskExploitationLikelihood) String() string { // NOTE: maintain list also in schema.json for validation in IDEs return RiskExploitationLikelihoodTypeDescription[what].Name diff --git a/pkg/security/types/risk_explotation_likelihood_test.go b/pkg/security/types/risk_explotation_likelihood_test.go new file mode 100644 index 00000000..cb981818 --- /dev/null +++ b/pkg/security/types/risk_explotation_likelihood_test.go @@ -0,0 +1,55 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseRiskExploitationLikelihoodTest struct { + input string + expected RiskExploitationLikelihood + expectedError error +} + +func TestParseRiskExploitationLikelihood(t *testing.T) { + testCases := map[string]ParseRiskExploitationLikelihoodTest{ + "unlikely": { + input: "unlikely", + expected: Unlikely, + }, + "likely": { + input: "likely", + expected: Likely, + }, + "very-likely": { + input: "very-likely", + expected: VeryLikely, + }, + "frequent": { + input: "frequent", + expected: Frequent, + }, + "default": { + input: "", + expected: Likely, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseRiskExploitationLikelihood(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} diff --git a/pkg/security/types/risk_function.go b/pkg/security/types/risk_function.go index 0273c5d6..f7a7cdb4 100644 --- a/pkg/security/types/risk_function.go +++ b/pkg/security/types/risk_function.go @@ -3,7 +3,11 @@ Copyright © 2023 NAME HERE */ package types -import "encoding/json" +import ( + "encoding/json" + "errors" + "strings" +) type RiskFunction int @@ -30,6 +34,16 @@ var RiskFunctionTypeDescription = [...]TypeDescription{ {"operations", "Operations"}, } +func ParseRiskFunction(value string) (riskFunction RiskFunction, err error) { + value = strings.TrimSpace(value) + for _, candidate := range RiskFunctionValues() { + if candidate.String() == value { + return candidate.(RiskFunction), err + } + } + return riskFunction, errors.New("Unable to parse into type: " + value) +} + func (what RiskFunction) String() string { // NOTE: maintain list also in schema.json for validation in IDEs return RiskFunctionTypeDescription[what].Name diff --git a/pkg/security/types/risk_function_test.go b/pkg/security/types/risk_function_test.go new file mode 100644 index 00000000..375dae50 --- /dev/null +++ b/pkg/security/types/risk_function_test.go @@ -0,0 +1,51 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseRiskFunctionTest struct { + input string + expected RiskFunction + expectedError error +} + +func TestParseRiskFunction(t *testing.T) { + testCases := map[string]ParseRiskFunctionTest{ + "business-side": { + input: "business-side", + expected: BusinessSide, + }, + "architecture": { + input: "architecture", + expected: Architecture, + }, + "development": { + input: "development", + expected: Development, + }, + "operations": { + input: "operations", + expected: Operations, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseRiskFunction(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} diff --git a/pkg/security/types/risk_severity.go b/pkg/security/types/risk_severity.go index be684f7b..b978eae9 100644 --- a/pkg/security/types/risk_severity.go +++ b/pkg/security/types/risk_severity.go @@ -3,7 +3,11 @@ Copyright © 2023 NAME HERE */ package types -import "encoding/json" +import ( + "encoding/json" + "errors" + "strings" +) type RiskSeverity int @@ -33,6 +37,19 @@ var RiskSeverityTypeDescription = [...]TypeDescription{ {"critical", "Critical"}, } +func ParseRiskSeverity(value string) (riskSeverity RiskSeverity, err error) { + value = strings.TrimSpace(value) + if value == "" { + return MediumSeverity, nil + } + for _, candidate := range RiskSeverityValues() { + if candidate.String() == value { + return candidate.(RiskSeverity), err + } + } + return riskSeverity, errors.New("Unable to parse into type: " + value) +} + func (what RiskSeverity) String() string { // NOTE: maintain list also in schema.json for validation in IDEs return RiskSeverityTypeDescription[what].Name diff --git a/pkg/security/types/risk_severity_test.go b/pkg/security/types/risk_severity_test.go new file mode 100644 index 00000000..1d50ac12 --- /dev/null +++ b/pkg/security/types/risk_severity_test.go @@ -0,0 +1,59 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseRiskSeverityTest struct { + input string + expected RiskSeverity + expectedError error +} + +func TestParseRiskSeverity(t *testing.T) { + testCases := map[string]ParseRiskSeverityTest{ + "low": { + input: "low", + expected: LowSeverity, + }, + "medium": { + input: "medium", + expected: MediumSeverity, + }, + "elevated": { + input: "elevated", + expected: ElevatedSeverity, + }, + "high": { + input: "high", + expected: HighSeverity, + }, + "critical": { + input: "critical", + expected: CriticalSeverity, + }, + "default": { + input: "", + expected: MediumSeverity, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseRiskSeverity(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} diff --git a/pkg/security/types/risk_status.go b/pkg/security/types/risk_status.go index 32278dcd..c47d4274 100644 --- a/pkg/security/types/risk_status.go +++ b/pkg/security/types/risk_status.go @@ -3,7 +3,11 @@ Copyright © 2023 NAME HERE */ package types -import "encoding/json" +import ( + "encoding/json" + "errors" + "strings" +) type RiskStatus int @@ -36,6 +40,16 @@ var RiskStatusTypeDescription = [...]TypeDescription{ {"false-positive", "Risk is a false positive (i.e. no risk at all or not applicable)"}, } +func ParseRiskStatus(value string) (riskStatus RiskStatus, err error) { + value = strings.TrimSpace(value) + for _, candidate := range RiskStatusValues() { + if candidate.String() == value { + return candidate.(RiskStatus), err + } + } + return riskStatus, errors.New("Unable to parse into type: " + value) +} + func (what RiskStatus) String() string { // NOTE: maintain list also in schema.json for validation in IDEs return RiskStatusTypeDescription[what].Name diff --git a/pkg/security/types/risk_status_test.go b/pkg/security/types/risk_status_test.go new file mode 100644 index 00000000..2ef16816 --- /dev/null +++ b/pkg/security/types/risk_status_test.go @@ -0,0 +1,59 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseRiskStatusTest struct { + input string + expected RiskStatus + expectedError error +} + +func TestParseRiskStatus(t *testing.T) { + testCases := map[string]ParseRiskStatusTest{ + "unchecked": { + input: "unchecked", + expected: Unchecked, + }, + "in-discussion": { + input: "in-discussion", + expected: InDiscussion, + }, + "accepted": { + input: "accepted", + expected: Accepted, + }, + "in-progress": { + input: "in-progress", + expected: InProgress, + }, + "mitigated": { + input: "mitigated", + expected: Mitigated, + }, + "false-positive": { + input: "false-positive", + expected: FalsePositive, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseRiskStatus(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} diff --git a/pkg/security/types/stride.go b/pkg/security/types/stride.go index 7625db04..d0345882 100644 --- a/pkg/security/types/stride.go +++ b/pkg/security/types/stride.go @@ -3,7 +3,11 @@ Copyright © 2023 NAME HERE */ package types -import "encoding/json" +import ( + "encoding/json" + "errors" + "strings" +) type STRIDE int @@ -36,6 +40,16 @@ var StrideTypeDescription = [...]TypeDescription{ {"elevation-of-privilege", "Elevation of privilege - Authorization"}, } +func ParseSTRIDE(value string) (stride STRIDE, err error) { + value = strings.TrimSpace(value) + for _, candidate := range STRIDEValues() { + if candidate.String() == value { + return candidate.(STRIDE), err + } + } + return stride, errors.New("Unable to parse into type: " + value) +} + func (what STRIDE) String() string { // NOTE: maintain list also in schema.json for validation in IDEs return StrideTypeDescription[what].Name diff --git a/pkg/security/types/stride_test.go b/pkg/security/types/stride_test.go new file mode 100644 index 00000000..7bb5ae5d --- /dev/null +++ b/pkg/security/types/stride_test.go @@ -0,0 +1,59 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseStrideTest struct { + input string + expected STRIDE + expectedError error +} + +func TestParseStride(t *testing.T) { + testCases := map[string]ParseStrideTest{ + "spoofing": { + input: "spoofing", + expected: Spoofing, + }, + "tampering": { + input: "tampering", + expected: Tampering, + }, + "repudiation": { + input: "repudiation", + expected: Repudiation, + }, + "information-disclosure": { + input: "information-disclosure", + expected: InformationDisclosure, + }, + "denial-of-service": { + input: "denial-of-service", + expected: DenialOfService, + }, + "elevation-of-privilege": { + input: "elevation-of-privilege", + expected: ElevationOfPrivilege, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseSTRIDE(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} diff --git a/pkg/security/types/technical_asset_machine.go b/pkg/security/types/technical_asset_machine.go index 816b17a2..7308e0df 100644 --- a/pkg/security/types/technical_asset_machine.go +++ b/pkg/security/types/technical_asset_machine.go @@ -3,6 +3,11 @@ Copyright © 2023 NAME HERE */ package types +import ( + "errors" + "strings" +) + type TechnicalAssetMachine int const ( @@ -28,6 +33,16 @@ var TechnicalAssetMachineTypeDescription = [...]TypeDescription{ {"serverless", "A serverless application"}, } +func ParseTechnicalAssetMachine(value string) (technicalAssetMachine TechnicalAssetMachine, err error) { + value = strings.TrimSpace(value) + for _, candidate := range TechnicalAssetMachineValues() { + if candidate.String() == value { + return candidate.(TechnicalAssetMachine), err + } + } + return technicalAssetMachine, errors.New("Unable to parse into type: " + value) +} + func (what TechnicalAssetMachine) String() string { return TechnicalAssetMachineTypeDescription[what].Name } diff --git a/pkg/security/types/technical_asset_machine_test.go b/pkg/security/types/technical_asset_machine_test.go new file mode 100644 index 00000000..b2af5e52 --- /dev/null +++ b/pkg/security/types/technical_asset_machine_test.go @@ -0,0 +1,51 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseTechnicalAssetMachineTest struct { + input string + expected TechnicalAssetMachine + expectedError error +} + +func TestParseTechnicalAssetMachine(t *testing.T) { + testCases := map[string]ParseTechnicalAssetMachineTest{ + "physical": { + input: "physical", + expected: Physical, + }, + "virtual": { + input: "virtual", + expected: Virtual, + }, + "container": { + input: "container", + expected: Container, + }, + "serverless": { + input: "serverless", + expected: Serverless, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseTechnicalAssetMachine(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} diff --git a/pkg/security/types/technical_asset_size.go b/pkg/security/types/technical_asset_size.go index cc658bff..f4aabec7 100644 --- a/pkg/security/types/technical_asset_size.go +++ b/pkg/security/types/technical_asset_size.go @@ -3,6 +3,11 @@ Copyright © 2023 NAME HERE */ package types +import ( + "errors" + "strings" +) + type TechnicalAssetSize int const ( @@ -36,3 +41,13 @@ func (what TechnicalAssetSize) String() string { func (what TechnicalAssetSize) Explain() string { return TechnicalAssetSizeDescription[what].Description } + +func ParseTechnicalAssetSize(value string) (technicalAssetSize TechnicalAssetSize, err error) { + value = strings.TrimSpace(value) + for _, candidate := range TechnicalAssetSizeValues() { + if candidate.String() == value { + return candidate.(TechnicalAssetSize), err + } + } + return technicalAssetSize, errors.New("Unable to parse into type: " + value) +} diff --git a/pkg/security/types/technical_asset_size_test.go b/pkg/security/types/technical_asset_size_test.go new file mode 100644 index 00000000..0c5d9190 --- /dev/null +++ b/pkg/security/types/technical_asset_size_test.go @@ -0,0 +1,51 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseTechnicalAssetSizeTest struct { + input string + expected TechnicalAssetSize + expectedError error +} + +func TestParseTechnicalAssetSize(t *testing.T) { + testCases := map[string]ParseTechnicalAssetSizeTest{ + "service": { + input: "service", + expected: Service, + }, + "system": { + input: "system", + expected: System, + }, + "application": { + input: "application", + expected: Application, + }, + "component": { + input: "component", + expected: Component, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseTechnicalAssetSize(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} diff --git a/pkg/security/types/technical_asset_technology.go b/pkg/security/types/technical_asset_technology.go index 73ea7899..5b2269da 100644 --- a/pkg/security/types/technical_asset_technology.go +++ b/pkg/security/types/technical_asset_technology.go @@ -3,6 +3,11 @@ Copyright © 2023 NAME HERE */ package types +import ( + "errors" + "strings" +) + type TechnicalAssetTechnology int const ( @@ -196,6 +201,16 @@ func (what TechnicalAssetTechnology) Explain() string { return TechnicalAssetTechnologyTypeDescription[what].Description } +func ParseTechnicalAssetTechnology(value string) (technicalAssetTechnology TechnicalAssetTechnology, err error) { + value = strings.TrimSpace(value) + for _, candidate := range TechnicalAssetTechnologyValues() { + if candidate.String() == value { + return candidate.(TechnicalAssetTechnology), err + } + } + return technicalAssetTechnology, errors.New("Unable to parse into type: " + value) +} + func (what TechnicalAssetTechnology) IsWebApplication() bool { return what == WebServer || what == WebApplication || what == ApplicationServer || what == ERP || what == CMS || what == IdentityProvider || what == ReportEngine } diff --git a/pkg/security/types/technical_asset_technology_test.go b/pkg/security/types/technical_asset_technology_test.go new file mode 100644 index 00000000..b40dd355 --- /dev/null +++ b/pkg/security/types/technical_asset_technology_test.go @@ -0,0 +1,263 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseTechnicalAssetTechnologyTest struct { + input string + expected TechnicalAssetTechnology + expectedError error +} + +func TestParseTechnicalAssetTechnology(t *testing.T) { + testCases := map[string]ParseTechnicalAssetTechnologyTest{ + "unknown-technology": { + input: "unknown-technology", + expected: UnknownTechnology, + }, + "client-system": { + input: "client-system", + expected: ClientSystem, + }, + "browser": { + input: "browser", + expected: Browser, + }, + "desktop": { + input: "desktop", + expected: Desktop, + }, + "mobile-app": { + input: "mobile-app", + expected: MobileApp, + }, + "devops-client": { + input: "devops-client", + expected: DevOpsClient, + }, + "web-server": { + input: "web-server", + expected: WebServer, + }, + "web-application": { + input: "web-application", + expected: WebApplication, + }, + "application-server": { + input: "application-server", + expected: ApplicationServer, + }, + "database": { + input: "database", + expected: Database, + }, + "file-server": { + input: "file-server", + expected: FileServer, + }, + "local-file-system": { + input: "local-file-system", + expected: LocalFileSystem, + }, + "erp": { + input: "erp", + expected: ERP, + }, + "cms": { + input: "cms", + expected: CMS, + }, + "web-service-rest": { + input: "web-service-rest", + expected: WebServiceREST, + }, + "web-service-soap": { + input: "web-service-soap", + expected: WebServiceSOAP, + }, + "ejb": { + input: "ejb", + expected: EJB, + }, + "search-index": { + input: "search-index", + expected: SearchIndex, + }, + "search-engine": { + input: "search-engine", + expected: SearchEngine, + }, + "service-registry": { + input: "service-registry", + expected: ServiceRegistry, + }, + "reverse-proxy": { + input: "reverse-proxy", + expected: ReverseProxy, + }, + "load-balancer": { + input: "load-balancer", + expected: LoadBalancer, + }, + "build-pipeline": { + input: "build-pipeline", + expected: BuildPipeline, + }, + "sourcecode-repository": { + input: "sourcecode-repository", + expected: SourcecodeRepository, + }, + "artifact-registry": { + input: "artifact-registry", + expected: ArtifactRegistry, + }, + "code-inspection-platform": { + input: "code-inspection-platform", + expected: CodeInspectionPlatform, + }, + "monitoring": { + input: "monitoring", + expected: Monitoring, + }, + "ldap-server": { + input: "ldap-server", + expected: LDAPServer, + }, + "container-platform": { + input: "container-platform", + expected: ContainerPlatform, + }, + "batch-processing": { + input: "batch-processing", + expected: BatchProcessing, + }, + "event-listener": { + input: "event-listener", + expected: EventListener, + }, + "identity-provider": { + input: "identity-provider", + expected: IdentityProvider, + }, + "identity-store-ldap": { + input: "identity-store-ldap", + expected: IdentityStoreLDAP, + }, + "identity-store-database": { + input: "identity-store-database", + expected: IdentityStoreDatabase, + }, + "tool": { + input: "tool", + expected: Tool, + }, + "threagile": { + input: "threagile", + expected: CLI, + }, + "task": { + input: "task", + expected: Task, + }, + "function": { + input: "function", + expected: Function, + }, + "gateway": { + input: "gateway", + expected: Gateway, + }, + "iot-device": { + input: "iot-device", + expected: IoTDevice, + }, + "message-queue": { + input: "message-queue", + expected: MessageQueue, + }, + "stream-processing": { + input: "stream-processing", + expected: StreamProcessing, + }, + "service-mesh": { + input: "service-mesh", + expected: ServiceMesh, + }, + "data-lake": { + input: "data-lake", + expected: DataLake, + }, + "big-data-platform": { + input: "big-data-platform", + expected: BigDataPlatform, + }, + "report-engine": { + input: "report-engine", + expected: ReportEngine, + }, + "ai": { + input: "ai", + expected: AI, + }, + "mail-server": { + input: "mail-server", + expected: MailServer, + }, + "vault": { + input: "vault", + expected: Vault, + }, + "hsm": { + input: "hsm", + expected: HSM, + }, + "waf": { + input: "waf", + expected: WAF, + }, + "ids": { + input: "ids", + expected: IDS, + }, + "ips": { + input: "ips", + expected: IPS, + }, + "scheduler": { + input: "scheduler", + expected: Scheduler, + }, + "mainframe": { + input: "mainframe", + expected: Mainframe, + }, + "block-storage": { + input: "block-storage", + expected: BlockStorage, + }, + "library": { + input: "library", + expected: Library, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseTechnicalAssetTechnology(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} diff --git a/pkg/security/types/technical_asset_type.go b/pkg/security/types/technical_asset_type.go index 14173e72..ffabb7cc 100644 --- a/pkg/security/types/technical_asset_type.go +++ b/pkg/security/types/technical_asset_type.go @@ -3,6 +3,11 @@ Copyright © 2023 NAME HERE */ package types +import ( + "errors" + "strings" +) + type TechnicalAssetType int const ( @@ -33,3 +38,13 @@ func (what TechnicalAssetType) String() string { func (what TechnicalAssetType) Explain() string { return TechnicalAssetTypeDescription[what].Description } + +func ParseTechnicalAssetType(value string) (technicalAssetType TechnicalAssetType, err error) { + value = strings.TrimSpace(value) + for _, candidate := range TechnicalAssetTypeValues() { + if candidate.String() == value { + return candidate.(TechnicalAssetType), err + } + } + return technicalAssetType, errors.New("Unable to parse into type: " + value) +} diff --git a/pkg/security/types/technical_asset_type_test.go b/pkg/security/types/technical_asset_type_test.go new file mode 100644 index 00000000..d04ac6be --- /dev/null +++ b/pkg/security/types/technical_asset_type_test.go @@ -0,0 +1,47 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseTechnicalAssetTypeTest struct { + input string + expected TechnicalAssetType + expectedError error +} + +func TestParseTechnicalAssetType(t *testing.T) { + testCases := map[string]ParseTechnicalAssetTypeTest{ + "external-entity": { + input: "external-entity", + expected: ExternalEntity, + }, + "process": { + input: "process", + expected: Process, + }, + "datastore": { + input: "datastore", + expected: Datastore, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseTechnicalAssetType(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} diff --git a/pkg/security/types/trust_boundary.go b/pkg/security/types/trust_boundary.go index de2b5df5..8ba6217b 100644 --- a/pkg/security/types/trust_boundary.go +++ b/pkg/security/types/trust_boundary.go @@ -3,6 +3,11 @@ Copyright © 2023 NAME HERE */ package types +import ( + "errors" + "strings" +) + type TrustBoundaryType int const ( @@ -37,6 +42,16 @@ var TrustBoundaryTypeDescription = [...]TypeDescription{ {"execution-environment", "Logical group of items (not a protective network boundary in that sense). More like a namespace or another logical group of items"}, } +func ParseTrustBoundary(value string) (trustBoundary TrustBoundaryType, err error) { + value = strings.TrimSpace(value) + for _, candidate := range TrustBoundaryTypeValues() { + if candidate.String() == value { + return candidate.(TrustBoundaryType), err + } + } + return trustBoundary, errors.New("Unable to parse into type: " + value) +} + func (what TrustBoundaryType) String() string { // NOTE: maintain list also in schema.json for validation in IDEs return TrustBoundaryTypeDescription[what].Name diff --git a/pkg/security/types/trust_boundary_test.go b/pkg/security/types/trust_boundary_test.go new file mode 100644 index 00000000..d93d00a1 --- /dev/null +++ b/pkg/security/types/trust_boundary_test.go @@ -0,0 +1,63 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseTrustBoundaryTest struct { + input string + expected TrustBoundaryType + expectedError error +} + +func TestParseTrustBoundaryType(t *testing.T) { + testCases := map[string]ParseTrustBoundaryTest{ + "network-on-prem": { + input: "network-on-prem", + expected: NetworkOnPrem, + }, + "network-dedicated-hoster": { + input: "network-dedicated-hoster", + expected: NetworkDedicatedHoster, + }, + "network-virtual-lan": { + input: "network-virtual-lan", + expected: NetworkVirtualLAN, + }, + "network-cloud-provider": { + input: "network-cloud-provider", + expected: NetworkCloudProvider, + }, + "network-cloud-security-group": { + input: "network-cloud-security-group", + expected: NetworkCloudSecurityGroup, + }, + "network-policy-namespace-isolation": { + input: "network-policy-namespace-isolation", + expected: NetworkPolicyNamespaceIsolation, + }, + "execution-environment": { + input: "execution-environment", + expected: ExecutionEnvironment, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseTrustBoundary(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} diff --git a/pkg/security/types/usage_test.go b/pkg/security/types/usage_test.go new file mode 100644 index 00000000..bad781e3 --- /dev/null +++ b/pkg/security/types/usage_test.go @@ -0,0 +1,43 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type ParseUsageTest struct { + input string + expected Usage + expectedError error +} + +func TestParseUsage(t *testing.T) { + testCases := map[string]ParseUsageTest{ + "business": { + input: "business", + expected: Business, + }, + "devops": { + input: "devops", + expected: DevOps, + }, + "unknown": { + input: "unknown", + expectedError: errors.New("Unable to parse into type: unknown"), + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + actual, err := ParseUsage(testCase.input) + + assert.Equal(t, testCase.expected, actual) + assert.Equal(t, testCase.expectedError, err) + }) + } +} From b72a2260222e7e125181bdd54e3fe8c83143ec66 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Mon, 25 Dec 2023 14:01:46 +0000 Subject: [PATCH 19/68] Move out parsing model code closer to model definition --- internal/threagile/context.go | 773 ++++------------------------------ pkg/model/model.go | 712 +++++++++++++++++++++++++++++++ 2 files changed, 784 insertions(+), 701 deletions(-) diff --git a/internal/threagile/context.go b/internal/threagile/context.go index a66b0415..874870ca 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -83,7 +83,6 @@ type Context struct { outputDir, raaPlugin, skipRiskRules, riskRulesPlugins, executeModelMacro *string customRiskRules map[string]*model.CustomRisk diagramDPI, serverPort *int - deferredRiskTrackingDueToWildcardMatching map[string]model.RiskTracking addModelTitle bool keepDiagramSourceFiles bool appFolder *string @@ -158,7 +157,6 @@ func (context *Context) Init(buildTimestamp string) *Context { addModelTitle: false, buildTimestamp: buildTimestamp, customRiskRules: make(map[string]*model.CustomRisk), - deferredRiskTrackingDueToWildcardMatching: make(map[string]model.RiskTracking), drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks: true, } @@ -401,7 +399,7 @@ func (context *Context) analyzeModelOnServerDirectly(ginContext *gin.Context) { dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(context.DefaultGraphvizDPI))) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return } @@ -411,13 +409,13 @@ func (context *Context) analyzeModelOnServerDirectly(ginContext *gin.Context) { } tmpModelFile, err := os.CreateTemp(*context.tempFolder, "threagile-direct-analyze-*") if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return } defer func() { _ = os.Remove(tmpModelFile.Name()) }() tmpOutputDir, err := os.MkdirTemp(*context.tempFolder, "threagile-direct-analyze-") if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return } defer func() { _ = os.RemoveAll(tmpOutputDir) }() @@ -429,12 +427,12 @@ func (context *Context) analyzeModelOnServerDirectly(ginContext *gin.Context) { context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, true, true, true, true, true, true, true, true, dpi) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return } err = os.WriteFile(filepath.Join(tmpOutputDir, context.inputFile), []byte(yamlText), 0400) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return } @@ -461,632 +459,6 @@ func (context *Context) analyzeModelOnServerDirectly(ginContext *gin.Context) { ginContext.FileAttachment(tmpResultFile.Name(), "threagile-result.zip") } -func (context *Context) parseModel() { - if *context.verbose { - fmt.Println("Parsing model:", *context.modelFilename) - } - - context.modelInput = *new(input.ModelInput).Defaults() - loadError := context.modelInput.Load(*context.modelFilename) - if loadError != nil { - log.Fatal("Unable to parse model yaml: ", loadError) - } - - // data, _ := json.MarshalIndent(context.modelInput, "", " ") - // fmt.Printf("%v\n", string(data)) - - businessCriticality, err := types.ParseCriticality(context.modelInput.BusinessCriticality) - if err != nil { - panic(errors.New("unknown 'business_criticality' value of application: " + context.modelInput.BusinessCriticality)) - } - - reportDate := time.Now() - if len(context.modelInput.Date) > 0 { - var parseError error - reportDate, parseError = time.Parse("2006-01-02", context.modelInput.Date) - if parseError != nil { - panic(errors.New("unable to parse 'date' value of model file")) - } - } - - context.parsedModel = model.ParsedModel{ - Author: context.modelInput.Author, - Title: context.modelInput.Title, - Date: reportDate, - ManagementSummaryComment: context.modelInput.ManagementSummaryComment, - BusinessCriticality: businessCriticality, - BusinessOverview: removePathElementsFromImageFiles(context.modelInput.BusinessOverview), - TechnicalOverview: removePathElementsFromImageFiles(context.modelInput.TechnicalOverview), - Questions: context.modelInput.Questions, - AbuseCases: context.modelInput.AbuseCases, - SecurityRequirements: context.modelInput.SecurityRequirements, - TagsAvailable: lowerCaseAndTrim(context.modelInput.TagsAvailable), - DiagramTweakNodesep: context.modelInput.DiagramTweakNodesep, - DiagramTweakRanksep: context.modelInput.DiagramTweakRanksep, - DiagramTweakEdgeLayout: context.modelInput.DiagramTweakEdgeLayout, - DiagramTweakSuppressEdgeLabels: context.modelInput.DiagramTweakSuppressEdgeLabels, - DiagramTweakLayoutLeftToRight: context.modelInput.DiagramTweakLayoutLeftToRight, - DiagramTweakInvisibleConnectionsBetweenAssets: context.modelInput.DiagramTweakInvisibleConnectionsBetweenAssets, - DiagramTweakSameRankAssets: context.modelInput.DiagramTweakSameRankAssets, - } - - context.parsedModel.CommunicationLinks = make(map[string]model.CommunicationLink) - context.parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId = make(map[string][]model.CommunicationLink) - context.parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId = make(map[string]model.TrustBoundary) - context.parsedModel.GeneratedRisksByCategory = make(map[model.RiskCategory][]model.Risk) - context.parsedModel.GeneratedRisksBySyntheticId = make(map[string]model.Risk) - context.parsedModel.AllSupportedTags = make(map[string]bool) - - if context.parsedModel.DiagramTweakNodesep == 0 { - context.parsedModel.DiagramTweakNodesep = 2 - } - if context.parsedModel.DiagramTweakRanksep == 0 { - context.parsedModel.DiagramTweakRanksep = 2 - } - - // Data Assets =============================================================================== - context.parsedModel.DataAssets = make(map[string]model.DataAsset) - for title, asset := range context.modelInput.DataAssets { - id := fmt.Sprintf("%v", asset.ID) - - usage, err := types.ParseUsage(asset.Usage) - if err != nil { - panic(errors.New("unknown 'usage' value of data asset '" + title + "': " + asset.Usage)) - } - quantity, err := types.ParseQuantity(asset.Quantity) - if err != nil { - panic(errors.New("unknown 'quantity' value of data asset '" + title + "': " + asset.Quantity)) - } - confidentiality, err := types.ParseConfidentiality(asset.Confidentiality) - if err != nil { - panic(errors.New("unknown 'confidentiality' value of data asset '" + title + "': " + asset.Confidentiality)) - } - integrity, err := types.ParseCriticality(asset.Integrity) - if err != nil { - panic(errors.New("unknown 'integrity' value of data asset '" + title + "': " + asset.Integrity)) - } - availability, err := types.ParseCriticality(asset.Availability) - if err != nil { - panic(errors.New("unknown 'availability' value of data asset '" + title + "': " + asset.Availability)) - } - - context.checkIdSyntax(id) - if _, exists := context.parsedModel.DataAssets[id]; exists { - panic(errors.New("duplicate id used: " + id)) - } - context.parsedModel.DataAssets[id] = model.DataAsset{ - Id: id, - Title: title, - Usage: usage, - Description: withDefault(fmt.Sprintf("%v", asset.Description), title), - Quantity: quantity, - Tags: context.checkTags(lowerCaseAndTrim(asset.Tags), "data asset '"+title+"'"), - Origin: fmt.Sprintf("%v", asset.Origin), - Owner: fmt.Sprintf("%v", asset.Owner), - Confidentiality: confidentiality, - Integrity: integrity, - Availability: availability, - JustificationCiaRating: fmt.Sprintf("%v", asset.JustificationCiaRating), - } - } - - // Technical Assets =============================================================================== - context.parsedModel.TechnicalAssets = make(map[string]model.TechnicalAsset) - for title, asset := range context.modelInput.TechnicalAssets { - id := fmt.Sprintf("%v", asset.ID) - - usage, err := types.ParseUsage(asset.Usage) - if err != nil { - panic(errors.New("unknown 'usage' value of technical asset '" + title + "': " + asset.Usage)) - } - - var dataAssetsProcessed = make([]string, 0) - if asset.DataAssetsProcessed != nil { - dataAssetsProcessed = make([]string, len(asset.DataAssetsProcessed)) - for i, parsedProcessedAsset := range asset.DataAssetsProcessed { - referencedAsset := fmt.Sprintf("%v", parsedProcessedAsset) - context.checkDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") - dataAssetsProcessed[i] = referencedAsset - } - } - - var dataAssetsStored = make([]string, 0) - if asset.DataAssetsStored != nil { - dataAssetsStored = make([]string, len(asset.DataAssetsStored)) - for i, parsedStoredAssets := range asset.DataAssetsStored { - referencedAsset := fmt.Sprintf("%v", parsedStoredAssets) - context.checkDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") - dataAssetsStored[i] = referencedAsset - } - } - - technicalAssetType, err := types.ParseTechnicalAssetType(asset.Type) - if err != nil { - panic(errors.New("unknown 'type' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Type))) - } - technicalAssetSize, err := types.ParseTechnicalAssetSize(asset.Size) - if err != nil { - panic(errors.New("unknown 'size' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Size))) - } - technicalAssetTechnology, err := types.ParseTechnicalAssetTechnology(asset.Technology) - if err != nil { - panic(errors.New("unknown 'technology' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Technology))) - } - encryption, err := types.ParseEncryptionStyle(asset.Encryption) - if err != nil { - panic(errors.New("unknown 'encryption' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Encryption))) - } - technicalAssetMachine, err := types.ParseTechnicalAssetMachine(asset.Machine) - if err != nil { - panic(errors.New("unknown 'machine' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Machine))) - } - confidentiality, err := types.ParseConfidentiality(asset.Confidentiality) - if err != nil { - panic(errors.New("unknown 'confidentiality' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Confidentiality))) - } - integrity, err := types.ParseCriticality(asset.Integrity) - if err != nil { - panic(errors.New("unknown 'integrity' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Integrity))) - } - availability, err := types.ParseCriticality(asset.Availability) - if err != nil { - panic(errors.New("unknown 'availability' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Availability))) - } - - dataFormatsAccepted := make([]types.DataFormat, 0) - if asset.DataFormatsAccepted != nil { - for _, dataFormatName := range asset.DataFormatsAccepted { - dataFormat, err := types.ParseDataFormat(dataFormatName) - if err != nil { - panic(errors.New("unknown 'data_formats_accepted' value of technical asset '" + title + "': " + fmt.Sprintf("%v", dataFormatName))) - } - dataFormatsAccepted = append(dataFormatsAccepted, dataFormat) - } - } - - communicationLinks := make([]model.CommunicationLink, 0) - if asset.CommunicationLinks != nil { - for commLinkTitle, commLink := range asset.CommunicationLinks { - constraint := true - weight := 1 - var dataAssetsSent []string - var dataAssetsReceived []string - - authentication, err := types.ParseAuthentication(commLink.Authentication) - if err != nil { - panic(errors.New("unknown 'authentication' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Authentication))) - } - authorization, err := types.ParseAuthorization(commLink.Authorization) - if err != nil { - panic(errors.New("unknown 'authorization' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Authorization))) - } - usage, err := types.ParseUsage(commLink.Usage) - if err != nil { - panic(errors.New("unknown 'usage' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Usage))) - } - protocol, err := types.ParseProtocol(commLink.Protocol) - if err != nil { - panic(errors.New("unknown 'protocol' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Protocol))) - } - - if commLink.DataAssetsSent != nil { - for _, dataAssetSent := range commLink.DataAssetsSent { - referencedAsset := fmt.Sprintf("%v", dataAssetSent) - context.checkDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") - dataAssetsSent = append(dataAssetsSent, referencedAsset) - } - } - - if commLink.DataAssetsReceived != nil { - for _, dataAssetReceived := range commLink.DataAssetsReceived { - referencedAsset := fmt.Sprintf("%v", dataAssetReceived) - context.checkDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") - dataAssetsReceived = append(dataAssetsReceived, referencedAsset) - } - } - - if commLink.DiagramTweakWeight > 0 { - weight = commLink.DiagramTweakWeight - } - - constraint = !commLink.DiagramTweakConstraint - - dataFlowTitle := fmt.Sprintf("%v", commLinkTitle) - commLink := model.CommunicationLink{ - Id: createDataFlowId(id, dataFlowTitle), - SourceId: id, - TargetId: commLink.Target, - Title: dataFlowTitle, - Description: withDefault(commLink.Description, dataFlowTitle), - Protocol: protocol, - Authentication: authentication, - Authorization: authorization, - Usage: usage, - Tags: context.checkTags(lowerCaseAndTrim(commLink.Tags), "communication link '"+commLinkTitle+"' of technical asset '"+title+"'"), - VPN: commLink.VPN, - IpFiltered: commLink.IpFiltered, - Readonly: commLink.Readonly, - DataAssetsSent: dataAssetsSent, - DataAssetsReceived: dataAssetsReceived, - DiagramTweakWeight: weight, - DiagramTweakConstraint: constraint, - } - communicationLinks = append(communicationLinks, commLink) - // track all comm links - context.parsedModel.CommunicationLinks[commLink.Id] = commLink - // keep track of map of *all* comm links mapped by target-id (to be able to look up "who is calling me" kind of things) - context.parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId[commLink.TargetId] = append( - context.parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId[commLink.TargetId], commLink) - } - } - - context.checkIdSyntax(id) - if _, exists := context.parsedModel.TechnicalAssets[id]; exists { - panic(errors.New("duplicate id used: " + id)) - } - context.parsedModel.TechnicalAssets[id] = model.TechnicalAsset{ - Id: id, - Usage: usage, - Title: title, //fmt.Sprintf("%v", asset["title"]), - Description: withDefault(fmt.Sprintf("%v", asset.Description), title), - Type: technicalAssetType, - Size: technicalAssetSize, - Technology: technicalAssetTechnology, - Tags: context.checkTags(lowerCaseAndTrim(asset.Tags), "technical asset '"+title+"'"), - Machine: technicalAssetMachine, - Internet: asset.Internet, - Encryption: encryption, - MultiTenant: asset.MultiTenant, - Redundant: asset.Redundant, - CustomDevelopedParts: asset.CustomDevelopedParts, - UsedAsClientByHuman: asset.UsedAsClientByHuman, - OutOfScope: asset.OutOfScope, - JustificationOutOfScope: fmt.Sprintf("%v", asset.JustificationOutOfScope), - Owner: fmt.Sprintf("%v", asset.Owner), - Confidentiality: confidentiality, - Integrity: integrity, - Availability: availability, - JustificationCiaRating: fmt.Sprintf("%v", asset.JustificationCiaRating), - DataAssetsProcessed: dataAssetsProcessed, - DataAssetsStored: dataAssetsStored, - DataFormatsAccepted: dataFormatsAccepted, - CommunicationLinks: communicationLinks, - DiagramTweakOrder: asset.DiagramTweakOrder, - } - } - - // Trust Boundaries =============================================================================== - checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries := make(map[string]bool) - context.parsedModel.TrustBoundaries = make(map[string]model.TrustBoundary) - for title, boundary := range context.modelInput.TrustBoundaries { - id := fmt.Sprintf("%v", boundary.ID) - - var technicalAssetsInside = make([]string, 0) - if boundary.TechnicalAssetsInside != nil { - parsedInsideAssets := boundary.TechnicalAssetsInside - technicalAssetsInside = make([]string, len(parsedInsideAssets)) - for i, parsedInsideAsset := range parsedInsideAssets { - technicalAssetsInside[i] = fmt.Sprintf("%v", parsedInsideAsset) - _, found := context.parsedModel.TechnicalAssets[technicalAssetsInside[i]] - if !found { - panic(errors.New("missing referenced technical asset " + technicalAssetsInside[i] + " at trust boundary '" + title + "'")) - } - if checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries[technicalAssetsInside[i]] == true { - panic(errors.New("referenced technical asset " + technicalAssetsInside[i] + " at trust boundary '" + title + "' is modeled in multiple trust boundaries")) - } - checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries[technicalAssetsInside[i]] = true - //fmt.Println("asset "+technicalAssetsInside[i]+" at i="+strconv.Itoa(i)) - } - } - - var trustBoundariesNested = make([]string, 0) - if boundary.TrustBoundariesNested != nil { - parsedNestedBoundaries := boundary.TrustBoundariesNested - trustBoundariesNested = make([]string, len(parsedNestedBoundaries)) - for i, parsedNestedBoundary := range parsedNestedBoundaries { - trustBoundariesNested[i] = fmt.Sprintf("%v", parsedNestedBoundary) - } - } - - trustBoundaryType, err := types.ParseTrustBoundary(boundary.Type) - if err != nil { - panic(errors.New("unknown 'type' of trust boundary '" + title + "': " + fmt.Sprintf("%v", boundary.Type))) - } - - trustBoundary := model.TrustBoundary{ - Id: id, - Title: title, //fmt.Sprintf("%v", boundary["title"]), - Description: withDefault(fmt.Sprintf("%v", boundary.Description), title), - Type: trustBoundaryType, - Tags: context.checkTags(lowerCaseAndTrim(boundary.Tags), "trust boundary '"+title+"'"), - TechnicalAssetsInside: technicalAssetsInside, - TrustBoundariesNested: trustBoundariesNested, - } - context.checkIdSyntax(id) - if _, exists := context.parsedModel.TrustBoundaries[id]; exists { - panic(errors.New("duplicate id used: " + id)) - } - context.parsedModel.TrustBoundaries[id] = trustBoundary - for _, technicalAsset := range trustBoundary.TechnicalAssetsInside { - context.parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId[technicalAsset] = trustBoundary - //fmt.Println("Asset "+technicalAsset+" is directly in trust boundary "+trustBoundary.Id) - } - } - context.checkNestedTrustBoundariesExisting() - - // Shared Runtime =============================================================================== - context.parsedModel.SharedRuntimes = make(map[string]model.SharedRuntime) - for title, inputRuntime := range context.modelInput.SharedRuntimes { - id := fmt.Sprintf("%v", inputRuntime.ID) - - var technicalAssetsRunning = make([]string, 0) - if inputRuntime.TechnicalAssetsRunning != nil { - parsedRunningAssets := inputRuntime.TechnicalAssetsRunning - technicalAssetsRunning = make([]string, len(parsedRunningAssets)) - for i, parsedRunningAsset := range parsedRunningAssets { - assetId := fmt.Sprintf("%v", parsedRunningAsset) - context.checkTechnicalAssetExists(assetId, "shared runtime '"+title+"'", false) - technicalAssetsRunning[i] = assetId - } - } - - sharedRuntime := model.SharedRuntime{ - Id: id, - Title: title, //fmt.Sprintf("%v", boundary["title"]), - Description: withDefault(fmt.Sprintf("%v", inputRuntime.Description), title), - Tags: context.checkTags(inputRuntime.Tags, "shared runtime '"+title+"'"), - TechnicalAssetsRunning: technicalAssetsRunning, - } - context.checkIdSyntax(id) - if _, exists := context.parsedModel.SharedRuntimes[id]; exists { - panic(errors.New("duplicate id used: " + id)) - } - context.parsedModel.SharedRuntimes[id] = sharedRuntime - } - - // Individual Risk Categories (just used as regular risk categories) =============================================================================== - context.parsedModel.IndividualRiskCategories = make(map[string]model.RiskCategory) - for title, individualCategory := range context.modelInput.IndividualRiskCategories { - id := fmt.Sprintf("%v", individualCategory.ID) - - function, err := types.ParseRiskFunction(individualCategory.Function) - if err != nil { - panic(errors.New("unknown 'function' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.Function))) - } - stride, err := types.ParseSTRIDE(individualCategory.STRIDE) - if err != nil { - panic(errors.New("unknown 'stride' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.STRIDE))) - } - - cat := model.RiskCategory{ - Id: id, - Title: title, - Description: withDefault(fmt.Sprintf("%v", individualCategory.Description), title), - Impact: fmt.Sprintf("%v", individualCategory.Impact), - ASVS: fmt.Sprintf("%v", individualCategory.ASVS), - CheatSheet: fmt.Sprintf("%v", individualCategory.CheatSheet), - Action: fmt.Sprintf("%v", individualCategory.Action), - Mitigation: fmt.Sprintf("%v", individualCategory.Mitigation), - Check: fmt.Sprintf("%v", individualCategory.Check), - DetectionLogic: fmt.Sprintf("%v", individualCategory.DetectionLogic), - RiskAssessment: fmt.Sprintf("%v", individualCategory.RiskAssessment), - FalsePositives: fmt.Sprintf("%v", individualCategory.FalsePositives), - Function: function, - STRIDE: stride, - ModelFailurePossibleReason: individualCategory.ModelFailurePossibleReason, - CWE: individualCategory.CWE, - } - context.checkIdSyntax(id) - if _, exists := context.parsedModel.IndividualRiskCategories[id]; exists { - panic(errors.New("duplicate id used: " + id)) - } - context.parsedModel.IndividualRiskCategories[id] = cat - - // NOW THE INDIVIDUAL RISK INSTANCES: - //individualRiskInstances := make([]model.Risk, 0) - if individualCategory.RisksIdentified != nil { // TODO: also add syntax checks of input YAML when linked asset is not found or when synthetic-id is already used... - for title, individualRiskInstance := range individualCategory.RisksIdentified { - var mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId string - var dataBreachProbability types.DataBreachProbability - var dataBreachTechnicalAssetIDs []string - severity, err := types.ParseRiskSeverity(individualRiskInstance.Severity) - if err != nil { - panic(errors.New("unknown 'severity' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.Severity))) - } - exploitationLikelihood, err := types.ParseRiskExploitationLikelihood(individualRiskInstance.ExploitationLikelihood) - if err != nil { - panic(errors.New("unknown 'exploitation_likelihood' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationLikelihood))) - } - exploitationImpact, err := types.ParseRiskExploitationImpact(individualRiskInstance.ExploitationImpact) - if err != nil { - panic(errors.New("unknown 'exploitation_impact' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationImpact))) - } - - if len(individualRiskInstance.MostRelevantDataAsset) > 0 { - mostRelevantDataAssetId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantDataAsset) - context.checkDataAssetTargetExists(mostRelevantDataAssetId, "individual risk '"+title+"'") - } - - if len(individualRiskInstance.MostRelevantTechnicalAsset) > 0 { - mostRelevantTechnicalAssetId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantTechnicalAsset) - context.checkTechnicalAssetExists(mostRelevantTechnicalAssetId, "individual risk '"+title+"'", false) - } - - if len(individualRiskInstance.MostRelevantCommunicationLink) > 0 { - mostRelevantCommunicationLinkId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantCommunicationLink) - context.checkCommunicationLinkExists(mostRelevantCommunicationLinkId, "individual risk '"+title+"'") - } - - if len(individualRiskInstance.MostRelevantTrustBoundary) > 0 { - mostRelevantTrustBoundaryId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantTrustBoundary) - context.checkTrustBoundaryExists(mostRelevantTrustBoundaryId, "individual risk '"+title+"'") - } - - if len(individualRiskInstance.MostRelevantSharedRuntime) > 0 { - mostRelevantSharedRuntimeId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantSharedRuntime) - context.checkSharedRuntimeExists(mostRelevantSharedRuntimeId, "individual risk '"+title+"'") - } - - dataBreachProbability, err = types.ParseDataBreachProbability(individualRiskInstance.DataBreachProbability) - if err != nil { - panic(errors.New("unknown 'data_breach_probability' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.DataBreachProbability))) - } - - if individualRiskInstance.DataBreachTechnicalAssets != nil { - dataBreachTechnicalAssetIDs = make([]string, len(individualRiskInstance.DataBreachTechnicalAssets)) - for i, parsedReferencedAsset := range individualRiskInstance.DataBreachTechnicalAssets { - assetId := fmt.Sprintf("%v", parsedReferencedAsset) - context.checkTechnicalAssetExists(assetId, "data breach technical assets of individual risk '"+title+"'", false) - dataBreachTechnicalAssetIDs[i] = assetId - } - } - - individualRiskInstance := model.Risk{ - SyntheticId: createSyntheticId(cat.Id, mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId), - Title: fmt.Sprintf("%v", title), - Category: cat, - Severity: severity, - ExploitationLikelihood: exploitationLikelihood, - ExploitationImpact: exploitationImpact, - MostRelevantDataAssetId: mostRelevantDataAssetId, - MostRelevantTechnicalAssetId: mostRelevantTechnicalAssetId, - MostRelevantCommunicationLinkId: mostRelevantCommunicationLinkId, - MostRelevantTrustBoundaryId: mostRelevantTrustBoundaryId, - MostRelevantSharedRuntimeId: mostRelevantSharedRuntimeId, - DataBreachProbability: dataBreachProbability, - DataBreachTechnicalAssetIDs: dataBreachTechnicalAssetIDs, - } - context.parsedModel.GeneratedRisksByCategory[cat] = append(context.parsedModel.GeneratedRisksByCategory[cat], individualRiskInstance) - } - } - } - - // Risk Tracking =============================================================================== - context.parsedModel.RiskTracking = make(map[string]model.RiskTracking) - for syntheticRiskId, riskTracking := range context.modelInput.RiskTracking { - justification := fmt.Sprintf("%v", riskTracking.Justification) - checkedBy := fmt.Sprintf("%v", riskTracking.CheckedBy) - ticket := fmt.Sprintf("%v", riskTracking.Ticket) - var date time.Time - if len(riskTracking.Date) > 0 { - var parseError error - date, parseError = time.Parse("2006-01-02", riskTracking.Date) - if parseError != nil { - panic(errors.New("unable to parse 'date' of risk tracking '" + syntheticRiskId + "': " + riskTracking.Date)) - } - } - - status, err := types.ParseRiskStatus(riskTracking.Status) - if err != nil { - panic(errors.New("unknown 'status' value of risk tracking '" + syntheticRiskId + "': " + riskTracking.Status)) - } - - tracking := model.RiskTracking{ - SyntheticRiskId: strings.TrimSpace(syntheticRiskId), - Justification: justification, - CheckedBy: checkedBy, - Ticket: ticket, - Date: date, - Status: status, - } - if strings.Contains(syntheticRiskId, "*") { // contains a wildcard char - context.deferredRiskTrackingDueToWildcardMatching[syntheticRiskId] = tracking - } else { - context.parsedModel.RiskTracking[syntheticRiskId] = tracking - } - } - - // ====================== model consistency check (linking) - for _, technicalAsset := range context.parsedModel.TechnicalAssets { - for _, commLink := range technicalAsset.CommunicationLinks { - context.checkTechnicalAssetExists(commLink.TargetId, "communication link '"+commLink.Title+"' of technical asset '"+technicalAsset.Title+"'", false) - } - } -} - -func createSyntheticId(categoryId string, - mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId string) string { - result := categoryId - if len(mostRelevantTechnicalAssetId) > 0 { - result += "@" + mostRelevantTechnicalAssetId - } - if len(mostRelevantCommunicationLinkId) > 0 { - result += "@" + mostRelevantCommunicationLinkId - } - if len(mostRelevantTrustBoundaryId) > 0 { - result += "@" + mostRelevantTrustBoundaryId - } - if len(mostRelevantSharedRuntimeId) > 0 { - result += "@" + mostRelevantSharedRuntimeId - } - if len(mostRelevantDataAssetId) > 0 { - result += "@" + mostRelevantDataAssetId - } - return result -} - -func createDataFlowId(sourceAssetId, title string) string { - reg, err := regexp.Compile("[^A-Za-z0-9]+") - checkErr(err) - return sourceAssetId + ">" + strings.Trim(reg.ReplaceAllString(strings.ToLower(title), "-"), "- ") -} - -func (context *Context) checkDataAssetTargetExists(referencedAsset, where string) { - if _, ok := context.parsedModel.DataAssets[referencedAsset]; !ok { - panic(errors.New("missing referenced data asset target at " + where + ": " + referencedAsset)) - } -} - -func (context *Context) checkTrustBoundaryExists(referencedId, where string) { - if _, ok := context.parsedModel.TrustBoundaries[referencedId]; !ok { - panic(errors.New("missing referenced trust boundary at " + where + ": " + referencedId)) - } -} - -func (context *Context) checkSharedRuntimeExists(referencedId, where string) { - if _, ok := context.parsedModel.SharedRuntimes[referencedId]; !ok { - panic(errors.New("missing referenced shared runtime at " + where + ": " + referencedId)) - } -} - -func (context *Context) checkCommunicationLinkExists(referencedId, where string) { - if _, ok := context.parsedModel.CommunicationLinks[referencedId]; !ok { - panic(errors.New("missing referenced communication link at " + where + ": " + referencedId)) - } -} - -func (context *Context) checkTechnicalAssetExists(referencedAsset, where string, onlyForTweak bool) { - if _, ok := context.parsedModel.TechnicalAssets[referencedAsset]; !ok { - suffix := "" - if onlyForTweak { - suffix = " (only referenced in diagram tweak)" - } - panic(errors.New("missing referenced technical asset target" + suffix + " at " + where + ": " + referencedAsset)) - } -} - -func (context *Context) checkNestedTrustBoundariesExisting() { - for _, trustBoundary := range context.parsedModel.TrustBoundaries { - for _, nestedId := range trustBoundary.TrustBoundariesNested { - if _, ok := context.parsedModel.TrustBoundaries[nestedId]; !ok { - panic(errors.New("missing referenced nested trust boundary: " + nestedId)) - } - } - } -} - -// in order to prevent Path-Traversal like stuff... -func removePathElementsFromImageFiles(overview input.Overview) input.Overview { - for i := range overview.Images { - newValue := make(map[string]string) - for file, desc := range overview.Images[i] { - newValue[filepath.Base(file)] = desc - } - overview.Images[i] = newValue - } - return overview -} - func (context *Context) writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.File { if *context.verbose { fmt.Println("Writing data flow diagram input") @@ -1338,7 +710,7 @@ func (context *Context) makeDiagramSameRankNodeTweaks() string { if len(assetIDs) > 0 { tweak += "{ rank=same; " for _, id := range assetIDs { - context.checkTechnicalAssetExists(id, "diagram tweak same-rank", true) + checkErr(context.parsedModel.CheckTechnicalAssetExists(id, "diagram tweak same-rank", true)) if len(context.parsedModel.TechnicalAssets[id].GetTrustBoundaryId(&context.parsedModel)) > 0 { panic(errors.New("technical assets (referenced in same rank diagram tweak) are inside trust boundaries: " + fmt.Sprintf("%v", context.parsedModel.DiagramTweakSameRankAssets))) @@ -1359,8 +731,8 @@ func (context *Context) makeDiagramInvisibleConnectionsTweaks() string { for _, invisibleConnections := range context.parsedModel.DiagramTweakInvisibleConnectionsBetweenAssets { assetIDs := strings.Split(invisibleConnections, ":") if len(assetIDs) == 2 { - context.checkTechnicalAssetExists(assetIDs[0], "diagram tweak connections", true) - context.checkTechnicalAssetExists(assetIDs[1], "diagram tweak connections", true) + checkErr(context.parsedModel.CheckTechnicalAssetExists(assetIDs[0], "diagram tweak connections", true)) + checkErr(context.parsedModel.CheckTechnicalAssetExists(assetIDs[1], "diagram tweak connections", true)) tweak += "\n" + hash(assetIDs[0]) + " -> " + hash(assetIDs[1]) + " [style=invis]; \n" } } @@ -1389,7 +761,26 @@ func (context *Context) DoIt() { } } - context.parseModel() + if *context.verbose { + fmt.Println("Parsing model:", *context.modelFilename) + } + + context.modelInput = *new(input.ModelInput).Defaults() + loadError := context.modelInput.Load(*context.modelFilename) + if loadError != nil { + log.Fatal("Unable to parse model yaml: ", loadError) + } + + // data, _ := json.MarshalIndent(context.modelInput, "", " ") + // fmt.Printf("%v\n", string(data)) + + parsedModel, err := model.ParseModel(&context.modelInput) + if err != nil { + panic(err) + } + + context.parsedModel = *parsedModel + introTextRAA := context.applyRAA() context.customRiskRules = risks.LoadCustomRiskRules(strings.Split(*context.riskRulesPlugins, ","), context.progressReporter) @@ -1845,13 +1236,6 @@ func (context *Context) applyRAA() string { return runner.ErrorOutput } -func (context *Context) checkIdSyntax(id string) { - validIdSyntax := regexp.MustCompile(`^[a-zA-Z0-9\-]+$`) - if !validIdSyntax.MatchString(id) { - panic(errors.New("invalid id syntax used (only letters, numbers, and hyphen allowed): " + id)) - } -} - func (context *Context) analyze(ginContext *gin.Context) { context.execute(ginContext, false) } @@ -2352,7 +1736,7 @@ func (context *Context) streamResponse(ginContext *gin.Context, responseType res }() dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(context.DefaultGraphvizDPI))) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return } _, yamlText, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) @@ -2361,13 +1745,13 @@ func (context *Context) streamResponse(ginContext *gin.Context, responseType res } tmpModelFile, err := os.CreateTemp(*context.tempFolder, "threagile-render-*") if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return } defer func() { _ = os.Remove(tmpModelFile.Name()) }() tmpOutputDir, err := os.MkdirTemp(*context.tempFolder, "threagile-render-") if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return } defer func() { _ = os.RemoveAll(tmpOutputDir) }() @@ -2375,71 +1759,71 @@ func (context *Context) streamResponse(ginContext *gin.Context, responseType res if responseType == dataFlowDiagram { context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, true, false, false, false, false, false, false, false, dpi) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return } ginContext.File(filepath.Join(tmpOutputDir, context.dataFlowDiagramFilenamePNG)) } else if responseType == dataAssetDiagram { context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, true, false, false, false, false, false, false, dpi) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return } ginContext.File(filepath.Join(tmpOutputDir, context.dataAssetDiagramFilenamePNG)) } else if responseType == reportPDF { context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, true, false, false, false, false, false, dpi) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return } ginContext.FileAttachment(filepath.Join(tmpOutputDir, context.reportFilename), context.reportFilename) } else if responseType == risksExcel { context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, true, false, false, false, false, dpi) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return } ginContext.FileAttachment(filepath.Join(tmpOutputDir, context.excelRisksFilename), context.excelRisksFilename) } else if responseType == tagsExcel { context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, true, false, false, false, dpi) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return } ginContext.FileAttachment(filepath.Join(tmpOutputDir, context.excelTagsFilename), context.excelTagsFilename) } else if responseType == risksJSON { context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, true, false, false, dpi) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return } jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, context.jsonRisksFilename)) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return } ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download } else if responseType == technicalAssetsJSON { context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, true, true, false, dpi) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return } jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, context.jsonTechnicalAssetsFilename)) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return } ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download } else if responseType == statsJSON { context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, false, false, true, dpi) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return } jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, context.jsonStatsFilename)) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return } ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download @@ -2462,7 +1846,7 @@ func (context *Context) importModel(ginContext *gin.Context) { yamlContent, ok := context.execute(ginContext, true) if ok { // if we're here, then no problem was raised, so ok to proceed - ok = context.writeModelYAML(ginContext, string(yamlContent), key, context.folderNameForModel(folderNameOfKey, aUuid), "Model Import", false) + ok = context.writeModelYAML(ginContext, string(yamlContent), key, folderNameForModel(folderNameOfKey, aUuid), "Model Import", false) if ok { ginContext.JSON(http.StatusCreated, gin.H{ "message": "model imported", @@ -2671,7 +2055,7 @@ func (context *Context) setSharedRuntime(ginContext *gin.Context) { }) return } - sharedRuntimeInput, ok := context.populateSharedRuntime(ginContext, payload) + sharedRuntimeInput, ok := populateSharedRuntime(ginContext, payload) if !ok { return } @@ -2882,7 +2266,7 @@ func (context *Context) createNewSharedRuntime(ginContext *gin.Context) { }) return } - sharedRuntimeInput, ok := context.populateSharedRuntime(ginContext, payload) + sharedRuntimeInput, ok := populateSharedRuntime(ginContext, payload) if !ok { return } @@ -2916,7 +2300,7 @@ func (context *Context) checkTechnicalAssetsExisting(modelInput input.ModelInput return true } -func (context *Context) populateSharedRuntime(_ *gin.Context, payload payloadSharedRuntime) (sharedRuntimeInput input.InputSharedRuntime, ok bool) { +func populateSharedRuntime(_ *gin.Context, payload payloadSharedRuntime) (sharedRuntimeInput input.InputSharedRuntime, ok bool) { sharedRuntimeInput = input.InputSharedRuntime{ ID: payload.Id, Description: payload.Description, @@ -3026,27 +2410,27 @@ func (context *Context) createNewDataAsset(ginContext *gin.Context) { func (context *Context) populateDataAsset(ginContext *gin.Context, payload payloadDataAsset) (dataAssetInput input.InputDataAsset, ok bool) { usage, err := types.ParseUsage(payload.Usage) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return dataAssetInput, false } quantity, err := types.ParseQuantity(payload.Quantity) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return dataAssetInput, false } confidentiality, err := types.ParseConfidentiality(payload.Confidentiality) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return dataAssetInput, false } integrity, err := types.ParseCriticality(payload.Integrity) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return dataAssetInput, false } availability, err := types.ParseCriticality(payload.Availability) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return dataAssetInput, false } dataAssetInput = input.InputDataAsset{ @@ -3249,7 +2633,7 @@ func (context *Context) setOverview(ginContext *gin.Context) { } criticality, err := types.ParseCriticality(payload.BusinessCriticality) if err != nil { - context.handleErrorInServiceCall(err, ginContext) + handleErrorInServiceCall(err, ginContext) return } modelInput.ManagementSummaryComment = payload.ManagementSummaryComment @@ -3267,7 +2651,7 @@ func (context *Context) setOverview(ginContext *gin.Context) { } } -func (context *Context) handleErrorInServiceCall(err error, ginContext *gin.Context) { +func handleErrorInServiceCall(err error, ginContext *gin.Context) { log.Println(err) ginContext.JSON(http.StatusBadRequest, gin.H{ "error": strings.TrimSpace(err.Error()), @@ -3361,7 +2745,7 @@ func (context *Context) createNewModel(ginContext *gin.Context) { defer context.unlockFolder(folderNameOfKey) aUuid := uuid.New().String() - err := os.Mkdir(context.folderNameForModel(folderNameOfKey, aUuid), 0700) + err := os.Mkdir(folderNameForModel(folderNameOfKey, aUuid), 0700) if err != nil { ginContext.JSON(http.StatusInternalServerError, gin.H{ "error": "unable to create model", @@ -3400,7 +2784,7 @@ diagram_tweak_suppress_edge_labels: false diagram_tweak_invisible_connections_between_assets: [] diagram_tweak_same_rank_assets: []` - ok = context.writeModelYAML(ginContext, aYaml, key, context.folderNameForModel(folderNameOfKey, aUuid), "New Model Creation", true) + ok = context.writeModelYAML(ginContext, aYaml, key, folderNameForModel(folderNameOfKey, aUuid), "New Model Creation", true) if ok { ginContext.JSON(http.StatusCreated, gin.H{ "message": "model created", @@ -3502,7 +2886,7 @@ func (context *Context) checkModelFolder(ginContext *gin.Context, modelUUID stri }) return modelFolder, false } - modelFolder = context.folderNameForModel(folderNameOfKey, uuidParsed.String()) + modelFolder = folderNameForModel(folderNameOfKey, uuidParsed.String()) if _, err := os.Stat(modelFolder); os.IsNotExist(err) { ginContext.JSON(http.StatusNotFound, gin.H{ "error": "model not found", @@ -3717,7 +3101,7 @@ func (context *Context) generateKeyFromAlreadyStrongRandomInput(alreadyRandomInp return hash } -func (context *Context) folderNameForModel(folderNameOfKey string, uuid string) string { +func folderNameForModel(folderNameOfKey string, uuid string) string { return filepath.Join(folderNameOfKey, uuid) } @@ -4035,6 +3419,9 @@ func (context *Context) ParseCommandlineArgs() { // folders context.templateFilename = flag.String("background", "background.pdf", "background pdf file") context.generateDataFlowDiagram = flag.Bool("generate-data-flow-diagram", true, "generate data-flow diagram") context.generateDataAssetDiagram = flag.Bool("generate-data-asset-diagram", true, "generate data asset diagram") + context.generateRisksJSON = flag.Bool("generate-risks-json", true, "generate risks json") + context.generateStatsJSON = flag.Bool("generate-stats-json", true, "generate stats json") + context.generateTechnicalAssetsJSON = flag.Bool("generate-technical-assets-json", true, "generate technical assets json") context.generateRisksExcel = flag.Bool("generate-risks-excel", true, "generate risks excel") context.generateTagsExcel = flag.Bool("generate-tags-excel", true, "generate tags excel") context.generateReportPDF = flag.Bool("generate-report-pdf", true, "generate report pdf, including diagrams") @@ -4071,7 +3458,7 @@ func (context *Context) applyWildcardRiskTrackingEvaluation() { if *context.verbose { fmt.Println("Executing risk tracking evaluation") } - for syntheticRiskIdPattern, riskTracking := range context.deferredRiskTrackingDueToWildcardMatching { + for syntheticRiskIdPattern, riskTracking := range context.getDeferredRiskTrackingDueToWildcardMatching() { if *context.verbose { fmt.Println("Applying wildcard risk tracking for risk id: " + syntheticRiskIdPattern) } @@ -4102,6 +3489,17 @@ func (context *Context) applyWildcardRiskTrackingEvaluation() { } } +func (context *Context) getDeferredRiskTrackingDueToWildcardMatching() map[string]model.RiskTracking { + deferredRiskTrackingDueToWildcardMatching := make(map[string]model.RiskTracking) + for syntheticRiskId, riskTracking := range context.parsedModel.RiskTracking { + if strings.Contains(syntheticRiskId, "*") { // contains a wildcard char + deferredRiskTrackingDueToWildcardMatching[syntheticRiskId] = riskTracking + } + } + + return deferredRiskTrackingDueToWildcardMatching +} + func (context *Context) hasNotYetAnyDirectNonWildcardRiskTracking(syntheticRiskId string) bool { if _, ok := context.parsedModel.RiskTracking[syntheticRiskId]; ok { return false @@ -4393,25 +3791,6 @@ func lowerCaseAndTrim(tags []string) []string { return tags } -func (context *Context) checkTags(tags []string, where string) []string { - var tagsUsed = make([]string, 0) - if tags != nil { - tagsUsed = make([]string, len(tags)) - for i, parsedEntry := range tags { - referencedTag := fmt.Sprintf("%v", parsedEntry) - context.checkTagExists(referencedTag, where) - tagsUsed[i] = referencedTag - } - } - return tagsUsed -} - -func (context *Context) checkTagExists(referencedTag, where string) { - if !contains(context.parsedModel.TagsAvailable, referencedTag) { - panic(errors.New("missing referenced tag in overall tag list at " + where + ": " + referencedTag)) - } -} - func contains(a []string, x string) bool { for _, n := range a { if x == n { @@ -4421,14 +3800,6 @@ func contains(a []string, x string) bool { return false } -func withDefault(value string, defaultWhenEmpty string) string { - trimmed := strings.TrimSpace(value) - if len(trimmed) > 0 && trimmed != "" { - return trimmed - } - return strings.TrimSpace(defaultWhenEmpty) -} - func hash(s string) string { h := fnv.New32a() _, _ = h.Write([]byte(s)) diff --git a/pkg/model/model.go b/pkg/model/model.go index a929a73f..c4529986 100644 --- a/pkg/model/model.go +++ b/pkg/model/model.go @@ -4,7 +4,12 @@ Copyright © 2023 NAME HERE package model import ( + "errors" + "fmt" + "path/filepath" + "regexp" "sort" + "strings" "time" "github.com/threagile/threagile/pkg/input" @@ -45,6 +50,713 @@ type ParsedModel struct { GeneratedRisksBySyntheticId map[string]Risk } +func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { + businessCriticality, err := types.ParseCriticality(modelInput.BusinessCriticality) + if err != nil { + panic(errors.New("unknown 'business_criticality' value of application: " + modelInput.BusinessCriticality)) + } + + reportDate := time.Now() + if len(modelInput.Date) > 0 { + var parseError error + reportDate, parseError = time.Parse("2006-01-02", modelInput.Date) + if parseError != nil { + panic(errors.New("unable to parse 'date' value of model file")) + } + } + + parsedModel := ParsedModel{ + Author: modelInput.Author, + Title: modelInput.Title, + Date: reportDate, + ManagementSummaryComment: modelInput.ManagementSummaryComment, + BusinessCriticality: businessCriticality, + BusinessOverview: removePathElementsFromImageFiles(modelInput.BusinessOverview), + TechnicalOverview: removePathElementsFromImageFiles(modelInput.TechnicalOverview), + Questions: modelInput.Questions, + AbuseCases: modelInput.AbuseCases, + SecurityRequirements: modelInput.SecurityRequirements, + TagsAvailable: lowerCaseAndTrim(modelInput.TagsAvailable), + DiagramTweakNodesep: modelInput.DiagramTweakNodesep, + DiagramTweakRanksep: modelInput.DiagramTweakRanksep, + DiagramTweakEdgeLayout: modelInput.DiagramTweakEdgeLayout, + DiagramTweakSuppressEdgeLabels: modelInput.DiagramTweakSuppressEdgeLabels, + DiagramTweakLayoutLeftToRight: modelInput.DiagramTweakLayoutLeftToRight, + DiagramTweakInvisibleConnectionsBetweenAssets: modelInput.DiagramTweakInvisibleConnectionsBetweenAssets, + DiagramTweakSameRankAssets: modelInput.DiagramTweakSameRankAssets, + } + + parsedModel.CommunicationLinks = make(map[string]CommunicationLink) + parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId = make(map[string][]CommunicationLink) + parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId = make(map[string]TrustBoundary) + parsedModel.GeneratedRisksByCategory = make(map[RiskCategory][]Risk) + parsedModel.GeneratedRisksBySyntheticId = make(map[string]Risk) + parsedModel.AllSupportedTags = make(map[string]bool) + + if parsedModel.DiagramTweakNodesep == 0 { + parsedModel.DiagramTweakNodesep = 2 + } + if parsedModel.DiagramTweakRanksep == 0 { + parsedModel.DiagramTweakRanksep = 2 + } + + // Data Assets =============================================================================== + parsedModel.DataAssets = make(map[string]DataAsset) + for title, asset := range modelInput.DataAssets { + id := fmt.Sprintf("%v", asset.ID) + + usage, err := types.ParseUsage(asset.Usage) + if err != nil { + panic(errors.New("unknown 'usage' value of data asset '" + title + "': " + asset.Usage)) + } + quantity, err := types.ParseQuantity(asset.Quantity) + if err != nil { + panic(errors.New("unknown 'quantity' value of data asset '" + title + "': " + asset.Quantity)) + } + confidentiality, err := types.ParseConfidentiality(asset.Confidentiality) + if err != nil { + panic(errors.New("unknown 'confidentiality' value of data asset '" + title + "': " + asset.Confidentiality)) + } + integrity, err := types.ParseCriticality(asset.Integrity) + if err != nil { + panic(errors.New("unknown 'integrity' value of data asset '" + title + "': " + asset.Integrity)) + } + availability, err := types.ParseCriticality(asset.Availability) + if err != nil { + panic(errors.New("unknown 'availability' value of data asset '" + title + "': " + asset.Availability)) + } + + checkIdSyntax(id) + if _, exists := parsedModel.DataAssets[id]; exists { + panic(errors.New("duplicate id used: " + id)) + } + parsedModel.DataAssets[id] = DataAsset{ + Id: id, + Title: title, + Usage: usage, + Description: withDefault(fmt.Sprintf("%v", asset.Description), title), + Quantity: quantity, + Tags: parsedModel.checkTags(lowerCaseAndTrim(asset.Tags), "data asset '"+title+"'"), + Origin: fmt.Sprintf("%v", asset.Origin), + Owner: fmt.Sprintf("%v", asset.Owner), + Confidentiality: confidentiality, + Integrity: integrity, + Availability: availability, + JustificationCiaRating: fmt.Sprintf("%v", asset.JustificationCiaRating), + } + } + + // Technical Assets =============================================================================== + parsedModel.TechnicalAssets = make(map[string]TechnicalAsset) + for title, asset := range modelInput.TechnicalAssets { + id := fmt.Sprintf("%v", asset.ID) + + usage, err := types.ParseUsage(asset.Usage) + if err != nil { + panic(errors.New("unknown 'usage' value of technical asset '" + title + "': " + asset.Usage)) + } + + var dataAssetsProcessed = make([]string, 0) + if asset.DataAssetsProcessed != nil { + dataAssetsProcessed = make([]string, len(asset.DataAssetsProcessed)) + for i, parsedProcessedAsset := range asset.DataAssetsProcessed { + referencedAsset := fmt.Sprintf("%v", parsedProcessedAsset) + err := parsedModel.checkDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") + if err != nil { + return nil, err + } + dataAssetsProcessed[i] = referencedAsset + } + } + + var dataAssetsStored = make([]string, 0) + if asset.DataAssetsStored != nil { + dataAssetsStored = make([]string, len(asset.DataAssetsStored)) + for i, parsedStoredAssets := range asset.DataAssetsStored { + referencedAsset := fmt.Sprintf("%v", parsedStoredAssets) + err := parsedModel.checkDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") + if err != nil { + return nil, err + } + dataAssetsStored[i] = referencedAsset + } + } + + technicalAssetType, err := types.ParseTechnicalAssetType(asset.Type) + if err != nil { + return nil, errors.New("unknown 'type' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Type)) + } + technicalAssetSize, err := types.ParseTechnicalAssetSize(asset.Size) + if err != nil { + return nil, errors.New("unknown 'size' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Size)) + } + technicalAssetTechnology, err := types.ParseTechnicalAssetTechnology(asset.Technology) + if err != nil { + return nil, errors.New("unknown 'technology' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Technology)) + } + encryption, err := types.ParseEncryptionStyle(asset.Encryption) + if err != nil { + return nil, errors.New("unknown 'encryption' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Encryption)) + } + technicalAssetMachine, err := types.ParseTechnicalAssetMachine(asset.Machine) + if err != nil { + return nil, errors.New("unknown 'machine' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Machine)) + } + confidentiality, err := types.ParseConfidentiality(asset.Confidentiality) + if err != nil { + return nil, errors.New("unknown 'confidentiality' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Confidentiality)) + } + integrity, err := types.ParseCriticality(asset.Integrity) + if err != nil { + return nil, errors.New("unknown 'integrity' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Integrity)) + } + availability, err := types.ParseCriticality(asset.Availability) + if err != nil { + return nil, errors.New("unknown 'availability' value of technical asset '" + title + "': " + fmt.Sprintf("%v", asset.Availability)) + } + + dataFormatsAccepted := make([]types.DataFormat, 0) + if asset.DataFormatsAccepted != nil { + for _, dataFormatName := range asset.DataFormatsAccepted { + dataFormat, err := types.ParseDataFormat(dataFormatName) + if err != nil { + panic(errors.New("unknown 'data_formats_accepted' value of technical asset '" + title + "': " + fmt.Sprintf("%v", dataFormatName))) + } + dataFormatsAccepted = append(dataFormatsAccepted, dataFormat) + } + } + + communicationLinks := make([]CommunicationLink, 0) + if asset.CommunicationLinks != nil { + for commLinkTitle, commLink := range asset.CommunicationLinks { + constraint := true + weight := 1 + var dataAssetsSent []string + var dataAssetsReceived []string + + authentication, err := types.ParseAuthentication(commLink.Authentication) + if err != nil { + return nil, errors.New("unknown 'authentication' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Authentication)) + } + authorization, err := types.ParseAuthorization(commLink.Authorization) + if err != nil { + return nil, errors.New("unknown 'authorization' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Authorization)) + } + usage, err := types.ParseUsage(commLink.Usage) + if err != nil { + return nil, errors.New("unknown 'usage' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Usage)) + } + protocol, err := types.ParseProtocol(commLink.Protocol) + if err != nil { + return nil, errors.New("unknown 'protocol' value of technical asset '" + title + "' communication link '" + commLinkTitle + "': " + fmt.Sprintf("%v", commLink.Protocol)) + } + + if commLink.DataAssetsSent != nil { + for _, dataAssetSent := range commLink.DataAssetsSent { + referencedAsset := fmt.Sprintf("%v", dataAssetSent) + err := parsedModel.checkDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") + if err != nil { + return nil, err + } + dataAssetsSent = append(dataAssetsSent, referencedAsset) + } + } + + if commLink.DataAssetsReceived != nil { + for _, dataAssetReceived := range commLink.DataAssetsReceived { + referencedAsset := fmt.Sprintf("%v", dataAssetReceived) + err := parsedModel.checkDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") + if err != nil { + return nil, err + } + dataAssetsReceived = append(dataAssetsReceived, referencedAsset) + } + } + + if commLink.DiagramTweakWeight > 0 { + weight = commLink.DiagramTweakWeight + } + + constraint = !commLink.DiagramTweakConstraint + + dataFlowTitle := fmt.Sprintf("%v", commLinkTitle) + if err != nil { + return nil, err + } + commLinkId, err := createDataFlowId(id, dataFlowTitle) + if err != nil { + return nil, err + } + commLink := CommunicationLink{ + Id: commLinkId, + SourceId: id, + TargetId: commLink.Target, + Title: dataFlowTitle, + Description: withDefault(commLink.Description, dataFlowTitle), + Protocol: protocol, + Authentication: authentication, + Authorization: authorization, + Usage: usage, + Tags: parsedModel.checkTags(lowerCaseAndTrim(commLink.Tags), "communication link '"+commLinkTitle+"' of technical asset '"+title+"'"), + VPN: commLink.VPN, + IpFiltered: commLink.IpFiltered, + Readonly: commLink.Readonly, + DataAssetsSent: dataAssetsSent, + DataAssetsReceived: dataAssetsReceived, + DiagramTweakWeight: weight, + DiagramTweakConstraint: constraint, + } + communicationLinks = append(communicationLinks, commLink) + // track all comm links + parsedModel.CommunicationLinks[commLink.Id] = commLink + // keep track of map of *all* comm links mapped by target-id (to be able to look up "who is calling me" kind of things) + parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId[commLink.TargetId] = append( + parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId[commLink.TargetId], commLink) + } + } + + checkIdSyntax(id) + if _, exists := parsedModel.TechnicalAssets[id]; exists { + panic(errors.New("duplicate id used: " + id)) + } + parsedModel.TechnicalAssets[id] = TechnicalAsset{ + Id: id, + Usage: usage, + Title: title, //fmt.Sprintf("%v", asset["title"]), + Description: withDefault(fmt.Sprintf("%v", asset.Description), title), + Type: technicalAssetType, + Size: technicalAssetSize, + Technology: technicalAssetTechnology, + Tags: parsedModel.checkTags(lowerCaseAndTrim(asset.Tags), "technical asset '"+title+"'"), + Machine: technicalAssetMachine, + Internet: asset.Internet, + Encryption: encryption, + MultiTenant: asset.MultiTenant, + Redundant: asset.Redundant, + CustomDevelopedParts: asset.CustomDevelopedParts, + UsedAsClientByHuman: asset.UsedAsClientByHuman, + OutOfScope: asset.OutOfScope, + JustificationOutOfScope: fmt.Sprintf("%v", asset.JustificationOutOfScope), + Owner: fmt.Sprintf("%v", asset.Owner), + Confidentiality: confidentiality, + Integrity: integrity, + Availability: availability, + JustificationCiaRating: fmt.Sprintf("%v", asset.JustificationCiaRating), + DataAssetsProcessed: dataAssetsProcessed, + DataAssetsStored: dataAssetsStored, + DataFormatsAccepted: dataFormatsAccepted, + CommunicationLinks: communicationLinks, + DiagramTweakOrder: asset.DiagramTweakOrder, + } + } + + // Trust Boundaries =============================================================================== + checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries := make(map[string]bool) + parsedModel.TrustBoundaries = make(map[string]TrustBoundary) + for title, boundary := range modelInput.TrustBoundaries { + id := fmt.Sprintf("%v", boundary.ID) + + var technicalAssetsInside = make([]string, 0) + if boundary.TechnicalAssetsInside != nil { + parsedInsideAssets := boundary.TechnicalAssetsInside + technicalAssetsInside = make([]string, len(parsedInsideAssets)) + for i, parsedInsideAsset := range parsedInsideAssets { + technicalAssetsInside[i] = fmt.Sprintf("%v", parsedInsideAsset) + _, found := parsedModel.TechnicalAssets[technicalAssetsInside[i]] + if !found { + panic(errors.New("missing referenced technical asset " + technicalAssetsInside[i] + " at trust boundary '" + title + "'")) + } + if checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries[technicalAssetsInside[i]] == true { + panic(errors.New("referenced technical asset " + technicalAssetsInside[i] + " at trust boundary '" + title + "' is modeled in multiple trust boundaries")) + } + checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries[technicalAssetsInside[i]] = true + //fmt.Println("asset "+technicalAssetsInside[i]+" at i="+strconv.Itoa(i)) + } + } + + var trustBoundariesNested = make([]string, 0) + if boundary.TrustBoundariesNested != nil { + parsedNestedBoundaries := boundary.TrustBoundariesNested + trustBoundariesNested = make([]string, len(parsedNestedBoundaries)) + for i, parsedNestedBoundary := range parsedNestedBoundaries { + trustBoundariesNested[i] = fmt.Sprintf("%v", parsedNestedBoundary) + } + } + + trustBoundaryType, err := types.ParseTrustBoundary(boundary.Type) + if err != nil { + panic(errors.New("unknown 'type' of trust boundary '" + title + "': " + fmt.Sprintf("%v", boundary.Type))) + } + + trustBoundary := TrustBoundary{ + Id: id, + Title: title, //fmt.Sprintf("%v", boundary["title"]), + Description: withDefault(fmt.Sprintf("%v", boundary.Description), title), + Type: trustBoundaryType, + Tags: parsedModel.checkTags(lowerCaseAndTrim(boundary.Tags), "trust boundary '"+title+"'"), + TechnicalAssetsInside: technicalAssetsInside, + TrustBoundariesNested: trustBoundariesNested, + } + checkIdSyntax(id) + if _, exists := parsedModel.TrustBoundaries[id]; exists { + panic(errors.New("duplicate id used: " + id)) + } + parsedModel.TrustBoundaries[id] = trustBoundary + for _, technicalAsset := range trustBoundary.TechnicalAssetsInside { + parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId[technicalAsset] = trustBoundary + //fmt.Println("Asset "+technicalAsset+" is directly in trust boundary "+trustBoundary.Id) + } + } + err = parsedModel.checkNestedTrustBoundariesExisting() + if err != nil { + return nil, err + } + + // Shared Runtime =============================================================================== + parsedModel.SharedRuntimes = make(map[string]SharedRuntime) + for title, inputRuntime := range modelInput.SharedRuntimes { + id := fmt.Sprintf("%v", inputRuntime.ID) + + var technicalAssetsRunning = make([]string, 0) + if inputRuntime.TechnicalAssetsRunning != nil { + parsedRunningAssets := inputRuntime.TechnicalAssetsRunning + technicalAssetsRunning = make([]string, len(parsedRunningAssets)) + for i, parsedRunningAsset := range parsedRunningAssets { + assetId := fmt.Sprintf("%v", parsedRunningAsset) + err := parsedModel.CheckTechnicalAssetExists(assetId, "shared runtime '"+title+"'", false) + if err != nil { + return nil, err + } + technicalAssetsRunning[i] = assetId + } + } + + sharedRuntime := SharedRuntime{ + Id: id, + Title: title, //fmt.Sprintf("%v", boundary["title"]), + Description: withDefault(fmt.Sprintf("%v", inputRuntime.Description), title), + Tags: parsedModel.checkTags(inputRuntime.Tags, "shared runtime '"+title+"'"), + TechnicalAssetsRunning: technicalAssetsRunning, + } + checkIdSyntax(id) + if _, exists := parsedModel.SharedRuntimes[id]; exists { + panic(errors.New("duplicate id used: " + id)) + } + parsedModel.SharedRuntimes[id] = sharedRuntime + } + + // Individual Risk Categories (just used as regular risk categories) =============================================================================== + parsedModel.IndividualRiskCategories = make(map[string]RiskCategory) + for title, individualCategory := range modelInput.IndividualRiskCategories { + id := fmt.Sprintf("%v", individualCategory.ID) + + function, err := types.ParseRiskFunction(individualCategory.Function) + if err != nil { + panic(errors.New("unknown 'function' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.Function))) + } + stride, err := types.ParseSTRIDE(individualCategory.STRIDE) + if err != nil { + panic(errors.New("unknown 'stride' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.STRIDE))) + } + + cat := RiskCategory{ + Id: id, + Title: title, + Description: withDefault(fmt.Sprintf("%v", individualCategory.Description), title), + Impact: fmt.Sprintf("%v", individualCategory.Impact), + ASVS: fmt.Sprintf("%v", individualCategory.ASVS), + CheatSheet: fmt.Sprintf("%v", individualCategory.CheatSheet), + Action: fmt.Sprintf("%v", individualCategory.Action), + Mitigation: fmt.Sprintf("%v", individualCategory.Mitigation), + Check: fmt.Sprintf("%v", individualCategory.Check), + DetectionLogic: fmt.Sprintf("%v", individualCategory.DetectionLogic), + RiskAssessment: fmt.Sprintf("%v", individualCategory.RiskAssessment), + FalsePositives: fmt.Sprintf("%v", individualCategory.FalsePositives), + Function: function, + STRIDE: stride, + ModelFailurePossibleReason: individualCategory.ModelFailurePossibleReason, + CWE: individualCategory.CWE, + } + checkIdSyntax(id) + if _, exists := parsedModel.IndividualRiskCategories[id]; exists { + panic(errors.New("duplicate id used: " + id)) + } + parsedModel.IndividualRiskCategories[id] = cat + + // NOW THE INDIVIDUAL RISK INSTANCES: + //individualRiskInstances := make([]model.Risk, 0) + if individualCategory.RisksIdentified != nil { // TODO: also add syntax checks of input YAML when linked asset is not found or when synthetic-id is already used... + for title, individualRiskInstance := range individualCategory.RisksIdentified { + var mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId string + var dataBreachProbability types.DataBreachProbability + var dataBreachTechnicalAssetIDs []string + severity, err := types.ParseRiskSeverity(individualRiskInstance.Severity) + if err != nil { + panic(errors.New("unknown 'severity' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.Severity))) + } + exploitationLikelihood, err := types.ParseRiskExploitationLikelihood(individualRiskInstance.ExploitationLikelihood) + if err != nil { + panic(errors.New("unknown 'exploitation_likelihood' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationLikelihood))) + } + exploitationImpact, err := types.ParseRiskExploitationImpact(individualRiskInstance.ExploitationImpact) + if err != nil { + panic(errors.New("unknown 'exploitation_impact' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationImpact))) + } + + if len(individualRiskInstance.MostRelevantDataAsset) > 0 { + mostRelevantDataAssetId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantDataAsset) + err := parsedModel.checkDataAssetTargetExists(mostRelevantDataAssetId, "individual risk '"+title+"'") + if err != nil { + return nil, err + } + } + + if len(individualRiskInstance.MostRelevantTechnicalAsset) > 0 { + mostRelevantTechnicalAssetId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantTechnicalAsset) + err := parsedModel.CheckTechnicalAssetExists(mostRelevantTechnicalAssetId, "individual risk '"+title+"'", false) + if err != nil { + return nil, err + } + } + + if len(individualRiskInstance.MostRelevantCommunicationLink) > 0 { + mostRelevantCommunicationLinkId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantCommunicationLink) + err := parsedModel.checkCommunicationLinkExists(mostRelevantCommunicationLinkId, "individual risk '"+title+"'") + if err != nil { + return nil, err + } + } + + if len(individualRiskInstance.MostRelevantTrustBoundary) > 0 { + mostRelevantTrustBoundaryId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantTrustBoundary) + err := parsedModel.checkTrustBoundaryExists(mostRelevantTrustBoundaryId, "individual risk '"+title+"'") + if err != nil { + return nil, err + } + } + + if len(individualRiskInstance.MostRelevantSharedRuntime) > 0 { + mostRelevantSharedRuntimeId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantSharedRuntime) + err := parsedModel.checkSharedRuntimeExists(mostRelevantSharedRuntimeId, "individual risk '"+title+"'") + if err != nil { + return nil, err + } + } + + dataBreachProbability, err = types.ParseDataBreachProbability(individualRiskInstance.DataBreachProbability) + if err != nil { + return nil, errors.New("unknown 'data_breach_probability' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.DataBreachProbability)) + } + + if individualRiskInstance.DataBreachTechnicalAssets != nil { + dataBreachTechnicalAssetIDs = make([]string, len(individualRiskInstance.DataBreachTechnicalAssets)) + for i, parsedReferencedAsset := range individualRiskInstance.DataBreachTechnicalAssets { + assetId := fmt.Sprintf("%v", parsedReferencedAsset) + err := parsedModel.CheckTechnicalAssetExists(assetId, "data breach technical assets of individual risk '"+title+"'", false) + if err != nil { + return nil, err + } + dataBreachTechnicalAssetIDs[i] = assetId + } + } + + individualRiskInstance := Risk{ + SyntheticId: createSyntheticId(cat.Id, mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId), + Title: fmt.Sprintf("%v", title), + Category: cat, + Severity: severity, + ExploitationLikelihood: exploitationLikelihood, + ExploitationImpact: exploitationImpact, + MostRelevantDataAssetId: mostRelevantDataAssetId, + MostRelevantTechnicalAssetId: mostRelevantTechnicalAssetId, + MostRelevantCommunicationLinkId: mostRelevantCommunicationLinkId, + MostRelevantTrustBoundaryId: mostRelevantTrustBoundaryId, + MostRelevantSharedRuntimeId: mostRelevantSharedRuntimeId, + DataBreachProbability: dataBreachProbability, + DataBreachTechnicalAssetIDs: dataBreachTechnicalAssetIDs, + } + parsedModel.GeneratedRisksByCategory[cat] = append(parsedModel.GeneratedRisksByCategory[cat], individualRiskInstance) + } + } + } + + // Risk Tracking =============================================================================== + parsedModel.RiskTracking = make(map[string]RiskTracking) + for syntheticRiskId, riskTracking := range modelInput.RiskTracking { + justification := fmt.Sprintf("%v", riskTracking.Justification) + checkedBy := fmt.Sprintf("%v", riskTracking.CheckedBy) + ticket := fmt.Sprintf("%v", riskTracking.Ticket) + var date time.Time + if len(riskTracking.Date) > 0 { + var parseError error + date, parseError = time.Parse("2006-01-02", riskTracking.Date) + if parseError != nil { + panic(errors.New("unable to parse 'date' of risk tracking '" + syntheticRiskId + "': " + riskTracking.Date)) + } + } + + status, err := types.ParseRiskStatus(riskTracking.Status) + if err != nil { + panic(errors.New("unknown 'status' value of risk tracking '" + syntheticRiskId + "': " + riskTracking.Status)) + } + + tracking := RiskTracking{ + SyntheticRiskId: strings.TrimSpace(syntheticRiskId), + Justification: justification, + CheckedBy: checkedBy, + Ticket: ticket, + Date: date, + Status: status, + } + + parsedModel.RiskTracking[syntheticRiskId] = tracking + } + + // ====================== model consistency check (linking) + for _, technicalAsset := range parsedModel.TechnicalAssets { + for _, commLink := range technicalAsset.CommunicationLinks { + err := parsedModel.CheckTechnicalAssetExists(commLink.TargetId, "communication link '"+commLink.Title+"' of technical asset '"+technicalAsset.Title+"'", false) + if err != nil { + return nil, err + } + } + } + + return &parsedModel, nil +} + +func checkIdSyntax(id string) { + validIdSyntax := regexp.MustCompile(`^[a-zA-Z0-9\-]+$`) + if !validIdSyntax.MatchString(id) { + panic(errors.New("invalid id syntax used (only letters, numbers, and hyphen allowed): " + id)) + } +} + +func createSyntheticId(categoryId string, + mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId string) string { + result := categoryId + if len(mostRelevantTechnicalAssetId) > 0 { + result += "@" + mostRelevantTechnicalAssetId + } + if len(mostRelevantCommunicationLinkId) > 0 { + result += "@" + mostRelevantCommunicationLinkId + } + if len(mostRelevantTrustBoundaryId) > 0 { + result += "@" + mostRelevantTrustBoundaryId + } + if len(mostRelevantSharedRuntimeId) > 0 { + result += "@" + mostRelevantSharedRuntimeId + } + if len(mostRelevantDataAssetId) > 0 { + result += "@" + mostRelevantDataAssetId + } + return result +} + +// in order to prevent Path-Traversal like stuff... +func removePathElementsFromImageFiles(overview input.Overview) input.Overview { + for i := range overview.Images { + newValue := make(map[string]string) + for file, desc := range overview.Images[i] { + newValue[filepath.Base(file)] = desc + } + overview.Images[i] = newValue + } + return overview +} + +func withDefault(value string, defaultWhenEmpty string) string { + trimmed := strings.TrimSpace(value) + if len(trimmed) > 0 && trimmed != "" { + return trimmed + } + return strings.TrimSpace(defaultWhenEmpty) +} + +func lowerCaseAndTrim(tags []string) []string { + for i := range tags { + tags[i] = strings.ToLower(strings.TrimSpace(tags[i])) + } + return tags +} + +func (parsedModel *ParsedModel) checkTags(tags []string, where string) []string { + var tagsUsed = make([]string, 0) + if tags != nil { + tagsUsed = make([]string, len(tags)) + for i, parsedEntry := range tags { + referencedTag := fmt.Sprintf("%v", parsedEntry) + parsedModel.checkTagExists(referencedTag, where) + tagsUsed[i] = referencedTag + } + } + return tagsUsed +} + +func (parsedModel *ParsedModel) checkTagExists(referencedTag, where string) { + if !contains(parsedModel.TagsAvailable, referencedTag) { + panic(errors.New("missing referenced tag in overall tag list at " + where + ": " + referencedTag)) + } +} + +func createDataFlowId(sourceAssetId, title string) (string, error) { + reg, err := regexp.Compile("[^A-Za-z0-9]+") + if err != nil { + return "", err + } + return sourceAssetId + ">" + strings.Trim(reg.ReplaceAllString(strings.ToLower(title), "-"), "- "), nil +} + +func (parsedModel *ParsedModel) checkDataAssetTargetExists(referencedAsset, where string) error { + if _, ok := parsedModel.DataAssets[referencedAsset]; !ok { + panic(errors.New("missing referenced data asset target at " + where + ": " + referencedAsset)) + } + return nil +} + +func (parsedModel *ParsedModel) checkTrustBoundaryExists(referencedId, where string) error { + if _, ok := parsedModel.TrustBoundaries[referencedId]; !ok { + return errors.New("missing referenced trust boundary at " + where + ": " + referencedId) + } + return nil +} + +func (parsedModel *ParsedModel) checkSharedRuntimeExists(referencedId, where string) error { + if _, ok := parsedModel.SharedRuntimes[referencedId]; !ok { + return errors.New("missing referenced shared runtime at " + where + ": " + referencedId) + } + return nil +} + +func (parsedModel *ParsedModel) checkCommunicationLinkExists(referencedId, where string) error { + if _, ok := parsedModel.CommunicationLinks[referencedId]; !ok { + return errors.New("missing referenced communication link at " + where + ": " + referencedId) + } + return nil +} + +func (parsedModel *ParsedModel) CheckTechnicalAssetExists(referencedAsset, where string, onlyForTweak bool) error { + if _, ok := parsedModel.TechnicalAssets[referencedAsset]; !ok { + suffix := "" + if onlyForTweak { + suffix = " (only referenced in diagram tweak)" + } + return errors.New("missing referenced technical asset target" + suffix + " at " + where + ": " + referencedAsset) + } + return nil +} + +func (parsedModel *ParsedModel) checkNestedTrustBoundariesExisting() error { + for _, trustBoundary := range parsedModel.TrustBoundaries { + for _, nestedId := range trustBoundary.TrustBoundariesNested { + if _, ok := parsedModel.TrustBoundaries[nestedId]; !ok { + return errors.New("missing referenced nested trust boundary: " + nestedId) + } + } + } + return nil +} + func CalculateSeverity(likelihood types.RiskExploitationLikelihood, impact types.RiskExploitationImpact) types.RiskSeverity { result := likelihood.Weight() * impact.Weight() if result <= 1 { From 4d8b398ddc70287f585f5887f59c58febba8ce58 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Mon, 25 Dec 2023 20:31:40 +0000 Subject: [PATCH 20/68] Return error instead of panicking --- pkg/model/model.go | 123 +++++++++++++++++++++++++++++---------------- 1 file changed, 79 insertions(+), 44 deletions(-) diff --git a/pkg/model/model.go b/pkg/model/model.go index c4529986..6fd142d8 100644 --- a/pkg/model/model.go +++ b/pkg/model/model.go @@ -53,7 +53,7 @@ type ParsedModel struct { func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { businessCriticality, err := types.ParseCriticality(modelInput.BusinessCriticality) if err != nil { - panic(errors.New("unknown 'business_criticality' value of application: " + modelInput.BusinessCriticality)) + return nil, errors.New("unknown 'business_criticality' value of application: " + modelInput.BusinessCriticality) } reportDate := time.Now() @@ -61,7 +61,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { var parseError error reportDate, parseError = time.Parse("2006-01-02", modelInput.Date) if parseError != nil { - panic(errors.New("unable to parse 'date' value of model file")) + return nil, errors.New("unable to parse 'date' value of model file") } } @@ -107,28 +107,35 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { usage, err := types.ParseUsage(asset.Usage) if err != nil { - panic(errors.New("unknown 'usage' value of data asset '" + title + "': " + asset.Usage)) + return nil, errors.New("unknown 'usage' value of data asset '" + title + "': " + asset.Usage) } quantity, err := types.ParseQuantity(asset.Quantity) if err != nil { - panic(errors.New("unknown 'quantity' value of data asset '" + title + "': " + asset.Quantity)) + return nil, errors.New("unknown 'quantity' value of data asset '" + title + "': " + asset.Quantity) } confidentiality, err := types.ParseConfidentiality(asset.Confidentiality) if err != nil { - panic(errors.New("unknown 'confidentiality' value of data asset '" + title + "': " + asset.Confidentiality)) + return nil, errors.New("unknown 'confidentiality' value of data asset '" + title + "': " + asset.Confidentiality) } integrity, err := types.ParseCriticality(asset.Integrity) if err != nil { - panic(errors.New("unknown 'integrity' value of data asset '" + title + "': " + asset.Integrity)) + return nil, errors.New("unknown 'integrity' value of data asset '" + title + "': " + asset.Integrity) } availability, err := types.ParseCriticality(asset.Availability) if err != nil { - panic(errors.New("unknown 'availability' value of data asset '" + title + "': " + asset.Availability)) + return nil, errors.New("unknown 'availability' value of data asset '" + title + "': " + asset.Availability) } - checkIdSyntax(id) + err = checkIdSyntax(id) + if err != nil { + return nil, err + } if _, exists := parsedModel.DataAssets[id]; exists { - panic(errors.New("duplicate id used: " + id)) + return nil, errors.New("duplicate id used: " + id) + } + tags, err := parsedModel.checkTags(lowerCaseAndTrim(asset.Tags), "data asset '"+title+"'") + if err != nil { + return nil, err } parsedModel.DataAssets[id] = DataAsset{ Id: id, @@ -136,7 +143,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { Usage: usage, Description: withDefault(fmt.Sprintf("%v", asset.Description), title), Quantity: quantity, - Tags: parsedModel.checkTags(lowerCaseAndTrim(asset.Tags), "data asset '"+title+"'"), + Tags: tags, Origin: fmt.Sprintf("%v", asset.Origin), Owner: fmt.Sprintf("%v", asset.Owner), Confidentiality: confidentiality, @@ -153,7 +160,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { usage, err := types.ParseUsage(asset.Usage) if err != nil { - panic(errors.New("unknown 'usage' value of technical asset '" + title + "': " + asset.Usage)) + return nil, errors.New("unknown 'usage' value of technical asset '" + title + "': " + asset.Usage) } var dataAssetsProcessed = make([]string, 0) @@ -220,7 +227,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { for _, dataFormatName := range asset.DataFormatsAccepted { dataFormat, err := types.ParseDataFormat(dataFormatName) if err != nil { - panic(errors.New("unknown 'data_formats_accepted' value of technical asset '" + title + "': " + fmt.Sprintf("%v", dataFormatName))) + return nil, errors.New("unknown 'data_formats_accepted' value of technical asset '" + title + "': " + fmt.Sprintf("%v", dataFormatName)) } dataFormatsAccepted = append(dataFormatsAccepted, dataFormat) } @@ -287,6 +294,10 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { if err != nil { return nil, err } + tags, err := parsedModel.checkTags(lowerCaseAndTrim(commLink.Tags), "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") + if err != nil { + return nil, err + } commLink := CommunicationLink{ Id: commLinkId, SourceId: id, @@ -297,7 +308,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { Authentication: authentication, Authorization: authorization, Usage: usage, - Tags: parsedModel.checkTags(lowerCaseAndTrim(commLink.Tags), "communication link '"+commLinkTitle+"' of technical asset '"+title+"'"), + Tags: tags, VPN: commLink.VPN, IpFiltered: commLink.IpFiltered, Readonly: commLink.Readonly, @@ -315,9 +326,16 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { } } - checkIdSyntax(id) + err = checkIdSyntax(id) + if err != nil { + return nil, err + } if _, exists := parsedModel.TechnicalAssets[id]; exists { - panic(errors.New("duplicate id used: " + id)) + return nil, errors.New("duplicate id used: " + id) + } + tags, err := parsedModel.checkTags(lowerCaseAndTrim(asset.Tags), "technical asset '"+title+"'") + if err != nil { + return nil, err } parsedModel.TechnicalAssets[id] = TechnicalAsset{ Id: id, @@ -327,7 +345,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { Type: technicalAssetType, Size: technicalAssetSize, Technology: technicalAssetTechnology, - Tags: parsedModel.checkTags(lowerCaseAndTrim(asset.Tags), "technical asset '"+title+"'"), + Tags: tags, Machine: technicalAssetMachine, Internet: asset.Internet, Encryption: encryption, @@ -364,10 +382,10 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { technicalAssetsInside[i] = fmt.Sprintf("%v", parsedInsideAsset) _, found := parsedModel.TechnicalAssets[technicalAssetsInside[i]] if !found { - panic(errors.New("missing referenced technical asset " + technicalAssetsInside[i] + " at trust boundary '" + title + "'")) + return nil, errors.New("missing referenced technical asset " + technicalAssetsInside[i] + " at trust boundary '" + title + "'") } if checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries[technicalAssetsInside[i]] == true { - panic(errors.New("referenced technical asset " + technicalAssetsInside[i] + " at trust boundary '" + title + "' is modeled in multiple trust boundaries")) + return nil, errors.New("referenced technical asset " + technicalAssetsInside[i] + " at trust boundary '" + title + "' is modeled in multiple trust boundaries") } checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries[technicalAssetsInside[i]] = true //fmt.Println("asset "+technicalAssetsInside[i]+" at i="+strconv.Itoa(i)) @@ -385,21 +403,24 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { trustBoundaryType, err := types.ParseTrustBoundary(boundary.Type) if err != nil { - panic(errors.New("unknown 'type' of trust boundary '" + title + "': " + fmt.Sprintf("%v", boundary.Type))) + return nil, errors.New("unknown 'type' of trust boundary '" + title + "': " + fmt.Sprintf("%v", boundary.Type)) } - + tags, err := parsedModel.checkTags(lowerCaseAndTrim(boundary.Tags), "trust boundary '"+title+"'") trustBoundary := TrustBoundary{ Id: id, Title: title, //fmt.Sprintf("%v", boundary["title"]), Description: withDefault(fmt.Sprintf("%v", boundary.Description), title), Type: trustBoundaryType, - Tags: parsedModel.checkTags(lowerCaseAndTrim(boundary.Tags), "trust boundary '"+title+"'"), + Tags: tags, TechnicalAssetsInside: technicalAssetsInside, TrustBoundariesNested: trustBoundariesNested, } - checkIdSyntax(id) + err = checkIdSyntax(id) + if err != nil { + return nil, err + } if _, exists := parsedModel.TrustBoundaries[id]; exists { - panic(errors.New("duplicate id used: " + id)) + return nil, errors.New("duplicate id used: " + id) } parsedModel.TrustBoundaries[id] = trustBoundary for _, technicalAsset := range trustBoundary.TechnicalAssetsInside { @@ -430,17 +451,23 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { technicalAssetsRunning[i] = assetId } } - + tags, err := parsedModel.checkTags(lowerCaseAndTrim(inputRuntime.Tags), "shared runtime '"+title+"'") + if err != nil { + return nil, err + } sharedRuntime := SharedRuntime{ Id: id, Title: title, //fmt.Sprintf("%v", boundary["title"]), Description: withDefault(fmt.Sprintf("%v", inputRuntime.Description), title), - Tags: parsedModel.checkTags(inputRuntime.Tags, "shared runtime '"+title+"'"), + Tags: tags, TechnicalAssetsRunning: technicalAssetsRunning, } - checkIdSyntax(id) + err = checkIdSyntax(id) + if err != nil { + return nil, err + } if _, exists := parsedModel.SharedRuntimes[id]; exists { - panic(errors.New("duplicate id used: " + id)) + return nil, errors.New("duplicate id used: " + id) } parsedModel.SharedRuntimes[id] = sharedRuntime } @@ -452,11 +479,11 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { function, err := types.ParseRiskFunction(individualCategory.Function) if err != nil { - panic(errors.New("unknown 'function' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.Function))) + return nil, errors.New("unknown 'function' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.Function)) } stride, err := types.ParseSTRIDE(individualCategory.STRIDE) if err != nil { - panic(errors.New("unknown 'stride' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.STRIDE))) + return nil, errors.New("unknown 'stride' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.STRIDE)) } cat := RiskCategory{ @@ -477,9 +504,12 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { ModelFailurePossibleReason: individualCategory.ModelFailurePossibleReason, CWE: individualCategory.CWE, } - checkIdSyntax(id) + err = checkIdSyntax(id) + if err != nil { + return nil, err + } if _, exists := parsedModel.IndividualRiskCategories[id]; exists { - panic(errors.New("duplicate id used: " + id)) + return nil, errors.New("duplicate id used: " + id) } parsedModel.IndividualRiskCategories[id] = cat @@ -492,15 +522,15 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { var dataBreachTechnicalAssetIDs []string severity, err := types.ParseRiskSeverity(individualRiskInstance.Severity) if err != nil { - panic(errors.New("unknown 'severity' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.Severity))) + return nil, errors.New("unknown 'severity' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.Severity)) } exploitationLikelihood, err := types.ParseRiskExploitationLikelihood(individualRiskInstance.ExploitationLikelihood) if err != nil { - panic(errors.New("unknown 'exploitation_likelihood' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationLikelihood))) + return nil, errors.New("unknown 'exploitation_likelihood' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationLikelihood)) } exploitationImpact, err := types.ParseRiskExploitationImpact(individualRiskInstance.ExploitationImpact) if err != nil { - panic(errors.New("unknown 'exploitation_impact' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationImpact))) + return nil, errors.New("unknown 'exploitation_impact' value of individual risk instance '" + title + "': " + fmt.Sprintf("%v", individualRiskInstance.ExploitationImpact)) } if len(individualRiskInstance.MostRelevantDataAsset) > 0 { @@ -591,13 +621,13 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { var parseError error date, parseError = time.Parse("2006-01-02", riskTracking.Date) if parseError != nil { - panic(errors.New("unable to parse 'date' of risk tracking '" + syntheticRiskId + "': " + riskTracking.Date)) + return nil, errors.New("unable to parse 'date' of risk tracking '" + syntheticRiskId + "': " + riskTracking.Date) } } status, err := types.ParseRiskStatus(riskTracking.Status) if err != nil { - panic(errors.New("unknown 'status' value of risk tracking '" + syntheticRiskId + "': " + riskTracking.Status)) + return nil, errors.New("unknown 'status' value of risk tracking '" + syntheticRiskId + "': " + riskTracking.Status) } tracking := RiskTracking{ @@ -625,11 +655,12 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { return &parsedModel, nil } -func checkIdSyntax(id string) { +func checkIdSyntax(id string) error { validIdSyntax := regexp.MustCompile(`^[a-zA-Z0-9\-]+$`) if !validIdSyntax.MatchString(id) { - panic(errors.New("invalid id syntax used (only letters, numbers, and hyphen allowed): " + id)) + return errors.New("invalid id syntax used (only letters, numbers, and hyphen allowed): " + id) } + return nil } func createSyntheticId(categoryId string, @@ -680,23 +711,27 @@ func lowerCaseAndTrim(tags []string) []string { return tags } -func (parsedModel *ParsedModel) checkTags(tags []string, where string) []string { +func (parsedModel *ParsedModel) checkTags(tags []string, where string) ([]string, error) { var tagsUsed = make([]string, 0) if tags != nil { tagsUsed = make([]string, len(tags)) for i, parsedEntry := range tags { referencedTag := fmt.Sprintf("%v", parsedEntry) - parsedModel.checkTagExists(referencedTag, where) + err := parsedModel.checkTagExists(referencedTag, where) + if err != nil { + return nil, err + } tagsUsed[i] = referencedTag } } - return tagsUsed + return tagsUsed, nil } -func (parsedModel *ParsedModel) checkTagExists(referencedTag, where string) { +func (parsedModel *ParsedModel) checkTagExists(referencedTag, where string) error { if !contains(parsedModel.TagsAvailable, referencedTag) { - panic(errors.New("missing referenced tag in overall tag list at " + where + ": " + referencedTag)) + return errors.New("missing referenced tag in overall tag list at " + where + ": " + referencedTag) } + return nil } func createDataFlowId(sourceAssetId, title string) (string, error) { @@ -709,7 +744,7 @@ func createDataFlowId(sourceAssetId, title string) (string, error) { func (parsedModel *ParsedModel) checkDataAssetTargetExists(referencedAsset, where string) error { if _, ok := parsedModel.DataAssets[referencedAsset]; !ok { - panic(errors.New("missing referenced data asset target at " + where + ": " + referencedAsset)) + return errors.New("missing referenced data asset target at " + where + ": " + referencedAsset) } return nil } From f4b8aefaa447974b7a0d42d6090d780f7e66c968 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Tue, 26 Dec 2023 11:48:57 -0800 Subject: [PATCH 21/68] converting maps using iskCategory as index to use RiskCategory.Id instead to allow marshaling the data to pass to plugins --- Dockerfile.local | 2 +- internal/threagile/context.go | 6 +- pkg/model/model.go | 2 +- pkg/model/risks.go | 167 ++++++++++++++++++++-------------- pkg/report/report.go | 122 ++++++++++++------------- 5 files changed, 163 insertions(+), 136 deletions(-) diff --git a/Dockerfile.local b/Dockerfile.local index 39662a3c..b4dcee26 100644 --- a/Dockerfile.local +++ b/Dockerfile.local @@ -65,7 +65,7 @@ COPY --from=build --chown=1000:1000 /app/raa_calc /app/ COPY --from=build --chown=1000:1000 /app/raa_dummy /app/ COPY --from=build --chown=1000:1000 /app/risk_demo_rule /app/ COPY --from=build --chown=1000:1000 /app/LICENSE.txt /app/ -COPY --from=build --chown=1000:1000 /app/pkg/report/template/background.pdf /app/ +COPY --from=build --chown=1000:1000 /app/report/template/background.pdf /app/ COPY --from=build --chown=1000:1000 /app/support/openapi.yaml /app/ COPY --from=build --chown=1000:1000 /app/support/schema.json /app/ COPY --from=build --chown=1000:1000 /app/support/live-templates.txt /app/ diff --git a/internal/threagile/context.go b/internal/threagile/context.go index 874870ca..069a42bd 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -145,7 +145,7 @@ func (context *Context) checkRiskTracking() { // save also the risk-category-id and risk-status directly in the risk for better JSON marshalling for category := range context.parsedModel.GeneratedRisksByCategory { for i := range context.parsedModel.GeneratedRisksByCategory[category] { - context.parsedModel.GeneratedRisksByCategory[category][i].CategoryId = category.Id + context.parsedModel.GeneratedRisksByCategory[category][i].CategoryId = category context.parsedModel.GeneratedRisksByCategory[category][i].RiskStatus = context.parsedModel.GeneratedRisksByCategory[category][i].GetRiskTrackingStatusDefaultingUnchecked(&context.parsedModel) } } @@ -202,7 +202,7 @@ func (context *Context) applyRisk(rule model.CustomRiskRule, skippedRules *map[s generatedRisks := rule.GenerateRisks(&context.parsedModel) if generatedRisks != nil { if len(generatedRisks) > 0 { - context.parsedModel.GeneratedRisksByCategory[rule.Category()] = generatedRisks + context.parsedModel.GeneratedRisksByCategory[rule.Category().Id] = generatedRisks } } else { fmt.Printf("Failed to generate risks for %q\n", id) @@ -241,7 +241,7 @@ func (context *Context) applyRiskGeneration() { context.addToListOfSupportedTags(customRule.Tags) customRisks := customRule.GenerateRisks(&context.parsedModel) if len(customRisks) > 0 { - context.parsedModel.GeneratedRisksByCategory[customRule.Category] = customRisks + context.parsedModel.GeneratedRisksByCategory[customRule.Category.Id] = customRisks } if *context.verbose { diff --git a/pkg/model/model.go b/pkg/model/model.go index 6fd142d8..2617f192 100644 --- a/pkg/model/model.go +++ b/pkg/model/model.go @@ -46,7 +46,7 @@ type ParsedModel struct { // TODO: those are generated based on items above and needs to be private IncomingTechnicalCommunicationLinksMappedByTargetId map[string][]CommunicationLink DirectContainingTrustBoundaryMappedByTechnicalAssetId map[string]TrustBoundary - GeneratedRisksByCategory map[RiskCategory][]Risk + GeneratedRisksByCategory map[string][]Risk GeneratedRisksBySyntheticId map[string]Risk } diff --git a/pkg/model/risks.go b/pkg/model/risks.go index c8f1849b..407a2798 100644 --- a/pkg/model/risks.go +++ b/pkg/model/risks.go @@ -106,6 +106,22 @@ func (what Risk) IsRiskTracked(model *ParsedModel) bool { return false } +func GetRiskCategories(parsedModel *ParsedModel, categoryIDs []string) []RiskCategory { + categoryMap := make(map[string]RiskCategory) + for _, categoryId := range categoryIDs { + if len(parsedModel.GeneratedRisksByCategory[categoryId]) > 0 { + categoryMap[categoryId] = parsedModel.GeneratedRisksByCategory[categoryId][0].Category + } + } + + categories := make([]RiskCategory, 0) + for categoryId := range categoryMap { + categories = append(categories, categoryMap[categoryId]) + } + + return categories +} + func AllRisks(parsedModel *ParsedModel) []Risk { result := make([]Risk, 0) for _, risks := range parsedModel.GeneratedRisksByCategory { @@ -174,8 +190,8 @@ func (what ByRiskCategoryTitleSort) Less(i, j int) bool { func SortByRiskCategoryHighestContainingRiskSeveritySortStillAtRisk(parsedModel *ParsedModel, riskCategories []RiskCategory) { sort.Slice(riskCategories, func(i, j int) bool { - risksLeft := ReduceToOnlyStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[riskCategories[i]]) - risksRight := ReduceToOnlyStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[riskCategories[j]]) + risksLeft := ReduceToOnlyStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[riskCategories[i].Id]) + risksRight := ReduceToOnlyStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[riskCategories[j].Id]) highestLeft := HighestSeverityStillAtRisk(parsedModel, risksLeft) highestRight := HighestSeverityStillAtRisk(parsedModel, risksRight) if highestLeft == highestRight { @@ -248,21 +264,29 @@ type RiskRule interface { // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: func SortedRiskCategories(parsedModel *ParsedModel) []RiskCategory { + categoryMap := make(map[string]RiskCategory) + for categoryId, risks := range parsedModel.GeneratedRisksByCategory { + for _, risk := range risks { + categoryMap[categoryId] = risk.Category + } + } + categories := make([]RiskCategory, 0) - for k := range parsedModel.GeneratedRisksByCategory { - categories = append(categories, k) + for categoryId := range categoryMap { + categories = append(categories, categoryMap[categoryId]) } + SortByRiskCategoryHighestContainingRiskSeveritySortStillAtRisk(parsedModel, categories) return categories } func SortedRisksOfCategory(parsedModel *ParsedModel, category RiskCategory) []Risk { - risks := parsedModel.GeneratedRisksByCategory[category] + risks := parsedModel.GeneratedRisksByCategory[category.Id] SortByRiskSeverity(risks, parsedModel) return risks } -func CountRisks(risksByCategory map[RiskCategory][]Risk) int { +func CountRisks(risksByCategory map[string][]Risk) int { result := 0 for _, risks := range risksByCategory { result += len(risks) @@ -270,149 +294,149 @@ func CountRisks(risksByCategory map[RiskCategory][]Risk) int { return result } -func RisksOfOnlySTRIDESpoofing(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) +func RisksOfOnlySTRIDESpoofing(risksByCategory map[string][]Risk) map[string][]Risk { + result := make(map[string][]Risk) for _, risks := range risksByCategory { for _, risk := range risks { if risk.Category.STRIDE == types.Spoofing { - result[risk.Category] = append(result[risk.Category], risk) + result[risk.Category.Id] = append(result[risk.Category.Id], risk) } } } return result } -func RisksOfOnlySTRIDETampering(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) +func RisksOfOnlySTRIDETampering(risksByCategory map[string][]Risk) map[string][]Risk { + result := make(map[string][]Risk) for _, risks := range risksByCategory { for _, risk := range risks { if risk.Category.STRIDE == types.Tampering { - result[risk.Category] = append(result[risk.Category], risk) + result[risk.Category.Id] = append(result[risk.Category.Id], risk) } } } return result } -func RisksOfOnlySTRIDERepudiation(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) +func RisksOfOnlySTRIDERepudiation(risksByCategory map[string][]Risk) map[string][]Risk { + result := make(map[string][]Risk) for _, risks := range risksByCategory { for _, risk := range risks { if risk.Category.STRIDE == types.Repudiation { - result[risk.Category] = append(result[risk.Category], risk) + result[risk.Category.Id] = append(result[risk.Category.Id], risk) } } } return result } -func RisksOfOnlySTRIDEInformationDisclosure(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) +func RisksOfOnlySTRIDEInformationDisclosure(risksByCategory map[string][]Risk) map[string][]Risk { + result := make(map[string][]Risk) for _, risks := range risksByCategory { for _, risk := range risks { if risk.Category.STRIDE == types.InformationDisclosure { - result[risk.Category] = append(result[risk.Category], risk) + result[risk.Category.Id] = append(result[risk.Category.Id], risk) } } } return result } -func RisksOfOnlySTRIDEDenialOfService(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) +func RisksOfOnlySTRIDEDenialOfService(risksByCategory map[string][]Risk) map[string][]Risk { + result := make(map[string][]Risk) for _, risks := range risksByCategory { for _, risk := range risks { if risk.Category.STRIDE == types.DenialOfService { - result[risk.Category] = append(result[risk.Category], risk) + result[risk.Category.Id] = append(result[risk.Category.Id], risk) } } } return result } -func RisksOfOnlySTRIDEElevationOfPrivilege(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) +func RisksOfOnlySTRIDEElevationOfPrivilege(risksByCategory map[string][]Risk) map[string][]Risk { + result := make(map[string][]Risk) for _, risks := range risksByCategory { for _, risk := range risks { if risk.Category.STRIDE == types.ElevationOfPrivilege { - result[risk.Category] = append(result[risk.Category], risk) + result[risk.Category.Id] = append(result[risk.Category.Id], risk) } } } return result } -func RisksOfOnlyBusinessSide(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) +func RisksOfOnlyBusinessSide(risksByCategory map[string][]Risk) map[string][]Risk { + result := make(map[string][]Risk) for _, risks := range risksByCategory { for _, risk := range risks { if risk.Category.Function == types.BusinessSide { - result[risk.Category] = append(result[risk.Category], risk) + result[risk.Category.Id] = append(result[risk.Category.Id], risk) } } } return result } -func RisksOfOnlyArchitecture(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) +func RisksOfOnlyArchitecture(risksByCategory map[string][]Risk) map[string][]Risk { + result := make(map[string][]Risk) for _, risks := range risksByCategory { for _, risk := range risks { if risk.Category.Function == types.Architecture { - result[risk.Category] = append(result[risk.Category], risk) + result[risk.Category.Id] = append(result[risk.Category.Id], risk) } } } return result } -func RisksOfOnlyDevelopment(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) +func RisksOfOnlyDevelopment(risksByCategory map[string][]Risk) map[string][]Risk { + result := make(map[string][]Risk) for _, risks := range risksByCategory { for _, risk := range risks { if risk.Category.Function == types.Development { - result[risk.Category] = append(result[risk.Category], risk) + result[risk.Category.Id] = append(result[risk.Category.Id], risk) } } } return result } -func RisksOfOnlyOperation(risksByCategory map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) +func RisksOfOnlyOperation(risksByCategory map[string][]Risk) map[string][]Risk { + result := make(map[string][]Risk) for _, risks := range risksByCategory { for _, risk := range risks { if risk.Category.Function == types.Operations { - result[risk.Category] = append(result[risk.Category], risk) + result[risk.Category.Id] = append(result[risk.Category.Id], risk) } } } return result } -func CategoriesOfOnlyRisksStillAtRisk(parsedModel *ParsedModel, risksByCategory map[RiskCategory][]Risk) []RiskCategory { - categories := make(map[RiskCategory]struct{}) // Go's trick of unique elements is a map +func CategoriesOfOnlyRisksStillAtRisk(parsedModel *ParsedModel, risksByCategory map[string][]Risk) []string { + categories := make(map[string]struct{}) // Go's trick of unique elements is a map for _, risks := range risksByCategory { for _, risk := range risks { if !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { continue } - categories[risk.Category] = struct{}{} + categories[risk.Category.Id] = struct{}{} } } // return as slice (of now unique values) return keysAsSlice(categories) } -func CategoriesOfOnlyCriticalRisks(parsedModel *ParsedModel, risksByCategory map[RiskCategory][]Risk, initialRisks bool) []RiskCategory { - categories := make(map[RiskCategory]struct{}) // Go's trick of unique elements is a map +func CategoriesOfOnlyCriticalRisks(parsedModel *ParsedModel, risksByCategory map[string][]Risk, initialRisks bool) []string { + categories := make(map[string]struct{}) // Go's trick of unique elements is a map for _, risks := range risksByCategory { for _, risk := range risks { if !initialRisks && !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { continue } if risk.Severity == types.CriticalSeverity { - categories[risk.Category] = struct{}{} + categories[risk.Category.Id] = struct{}{} } } } @@ -420,19 +444,19 @@ func CategoriesOfOnlyCriticalRisks(parsedModel *ParsedModel, risksByCategory map return keysAsSlice(categories) } -func CategoriesOfOnlyHighRisks(parsedModel *ParsedModel, risksByCategory map[RiskCategory][]Risk, initialRisks bool) []RiskCategory { - categories := make(map[RiskCategory]struct{}) // Go's trick of unique elements is a map +func CategoriesOfOnlyHighRisks(parsedModel *ParsedModel, risksByCategory map[string][]Risk, initialRisks bool) []string { + categories := make(map[string]struct{}) // Go's trick of unique elements is a map for _, risks := range risksByCategory { for _, risk := range risks { if !initialRisks && !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { continue } - highest := HighestSeverity(parsedModel.GeneratedRisksByCategory[risk.Category]) + highest := HighestSeverity(parsedModel.GeneratedRisksByCategory[risk.Category.Id]) if !initialRisks { - highest = HighestSeverityStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[risk.Category]) + highest = HighestSeverityStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[risk.Category.Id]) } if risk.Severity == types.HighSeverity && highest < types.CriticalSeverity { - categories[risk.Category] = struct{}{} + categories[risk.Category.Id] = struct{}{} } } } @@ -440,19 +464,19 @@ func CategoriesOfOnlyHighRisks(parsedModel *ParsedModel, risksByCategory map[Ris return keysAsSlice(categories) } -func CategoriesOfOnlyElevatedRisks(parsedModel *ParsedModel, risksByCategory map[RiskCategory][]Risk, initialRisks bool) []RiskCategory { - categories := make(map[RiskCategory]struct{}) // Go's trick of unique elements is a map +func CategoriesOfOnlyElevatedRisks(parsedModel *ParsedModel, risksByCategory map[string][]Risk, initialRisks bool) []string { + categories := make(map[string]struct{}) // Go's trick of unique elements is a map for _, risks := range risksByCategory { for _, risk := range risks { if !initialRisks && !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { continue } - highest := HighestSeverity(parsedModel.GeneratedRisksByCategory[risk.Category]) + highest := HighestSeverity(parsedModel.GeneratedRisksByCategory[risk.Category.Id]) if !initialRisks { - highest = HighestSeverityStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[risk.Category]) + highest = HighestSeverityStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[risk.Category.Id]) } if risk.Severity == types.ElevatedSeverity && highest < types.HighSeverity { - categories[risk.Category] = struct{}{} + categories[risk.Category.Id] = struct{}{} } } } @@ -460,19 +484,19 @@ func CategoriesOfOnlyElevatedRisks(parsedModel *ParsedModel, risksByCategory map return keysAsSlice(categories) } -func CategoriesOfOnlyMediumRisks(parsedModel *ParsedModel, risksByCategory map[RiskCategory][]Risk, initialRisks bool) []RiskCategory { - categories := make(map[RiskCategory]struct{}) // Go's trick of unique elements is a map +func CategoriesOfOnlyMediumRisks(parsedModel *ParsedModel, risksByCategory map[string][]Risk, initialRisks bool) []string { + categories := make(map[string]struct{}) // Go's trick of unique elements is a map for _, risks := range risksByCategory { for _, risk := range risks { if !initialRisks && !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { continue } - highest := HighestSeverity(parsedModel.GeneratedRisksByCategory[risk.Category]) + highest := HighestSeverity(parsedModel.GeneratedRisksByCategory[risk.Category.Id]) if !initialRisks { - highest = HighestSeverityStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[risk.Category]) + highest = HighestSeverityStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[risk.Category.Id]) } if risk.Severity == types.MediumSeverity && highest < types.ElevatedSeverity { - categories[risk.Category] = struct{}{} + categories[risk.Category.Id] = struct{}{} } } } @@ -480,19 +504,19 @@ func CategoriesOfOnlyMediumRisks(parsedModel *ParsedModel, risksByCategory map[R return keysAsSlice(categories) } -func CategoriesOfOnlyLowRisks(parsedModel *ParsedModel, risksByCategory map[RiskCategory][]Risk, initialRisks bool) []RiskCategory { - categories := make(map[RiskCategory]struct{}) // Go's trick of unique elements is a map +func CategoriesOfOnlyLowRisks(parsedModel *ParsedModel, risksByCategory map[string][]Risk, initialRisks bool) []string { + categories := make(map[string]struct{}) // Go's trick of unique elements is a map for _, risks := range risksByCategory { for _, risk := range risks { if !initialRisks && !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { continue } - highest := HighestSeverity(parsedModel.GeneratedRisksByCategory[risk.Category]) + highest := HighestSeverity(parsedModel.GeneratedRisksByCategory[risk.Category.Id]) if !initialRisks { - highest = HighestSeverityStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[risk.Category]) + highest = HighestSeverityStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[risk.Category.Id]) } if risk.Severity == types.LowSeverity && highest < types.MediumSeverity { - categories[risk.Category] = struct{}{} + categories[risk.Category.Id] = struct{}{} } } } @@ -510,8 +534,8 @@ func HighestSeverity(risks []Risk) types.RiskSeverity { return result } -func keysAsSlice(categories map[RiskCategory]struct{}) []RiskCategory { - result := make([]RiskCategory, 0, len(categories)) +func keysAsSlice(categories map[string]struct{}) []string { + result := make([]string, 0, len(categories)) for k := range categories { result = append(result, k) } @@ -626,17 +650,20 @@ func FilteredByOnlyLowRisks(parsedModel *ParsedModel) []Risk { return filteredRisks } -func FilterByModelFailures(risksByCat map[RiskCategory][]Risk) map[RiskCategory][]Risk { - result := make(map[RiskCategory][]Risk) - for riskCat, risks := range risksByCat { - if riskCat.ModelFailurePossibleReason { - result[riskCat] = risks +func FilterByModelFailures(risksByCat map[string][]Risk) map[string][]Risk { + result := make(map[string][]Risk) + for categoryId, risks := range risksByCat { + for _, risk := range risks { + if risk.Category.ModelFailurePossibleReason { + result[categoryId] = risks + } } } + return result } -func FlattenRiskSlice(risksByCat map[RiskCategory][]Risk) []Risk { +func FlattenRiskSlice(risksByCat map[string][]Risk) []Risk { result := make([]Risk, 0) for _, risks := range risksByCat { result = append(result, risks...) diff --git a/pkg/report/report.go b/pkg/report/report.go index 511360d6..57b01cbd 100644 --- a/pkg/report/report.go +++ b/pkg/report/report.go @@ -1333,15 +1333,15 @@ func renderImpactAnalysis(parsedModel *model.ParsedModel, initialRisks bool) { html.Write(5, "Risk finding paragraphs are clickable and link to the corresponding chapter.") pdf.SetFont("Helvetica", "", fontSizeBody) - addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), types.CriticalSeverity, false, initialRisks, true, false) - addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), types.HighSeverity, false, initialRisks, true, false) - addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), types.ElevatedSeverity, false, initialRisks, true, false) - addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), types.MediumSeverity, false, initialRisks, true, false) - addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), types.LowSeverity, false, initialRisks, true, false) pdf.SetDrawColor(0, 0, 0) @@ -1455,15 +1455,15 @@ func createModelFailures(parsedModel *model.ParsedModel) { pdfColorGray() html.Write(5, "

No potential model failures have been identified.") } else { - addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, modelFailuresByCategory, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, modelFailuresByCategory, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, modelFailuresByCategory, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, modelFailuresByCategory, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, modelFailuresByCategory, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, modelFailuresByCategory, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, modelFailuresByCategory, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, modelFailuresByCategory, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, modelFailuresByCategory, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, modelFailuresByCategory, true)), types.LowSeverity, true, true, false, true) } @@ -1622,7 +1622,7 @@ func addCategories(parsedModel *model.ParsedModel, riskCategories []model.RiskCa var strBuilder strings.Builder sort.Sort(model.ByRiskCategoryTitleSort(riskCategories)) for _, riskCategory := range riskCategories { - risksStr := parsedModel.GeneratedRisksByCategory[riskCategory] + risksStr := parsedModel.GeneratedRisksByCategory[riskCategory.Id] if !initialRisks { risksStr = model.ReduceToOnlyStillAtRisk(parsedModel, risksStr) } @@ -1770,15 +1770,15 @@ func createAssignmentByFunction(parsedModel *model.ParsedModel) { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksBusinessSideFunction, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksBusinessSideFunction, true)), types.CriticalSeverity, true, true, false, false) - addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksBusinessSideFunction, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksBusinessSideFunction, true)), types.HighSeverity, true, true, false, false) - addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksBusinessSideFunction, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksBusinessSideFunction, true)), types.ElevatedSeverity, true, true, false, false) - addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksBusinessSideFunction, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksBusinessSideFunction, true)), types.MediumSeverity, true, true, false, false) - addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksBusinessSideFunction, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksBusinessSideFunction, true)), types.LowSeverity, true, true, false, false) } pdf.SetLeftMargin(oldLeft) @@ -1797,15 +1797,15 @@ func createAssignmentByFunction(parsedModel *model.ParsedModel) { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksArchitectureFunction, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksArchitectureFunction, true)), types.CriticalSeverity, true, true, false, false) - addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksArchitectureFunction, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksArchitectureFunction, true)), types.HighSeverity, true, true, false, false) - addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksArchitectureFunction, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksArchitectureFunction, true)), types.ElevatedSeverity, true, true, false, false) - addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksArchitectureFunction, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksArchitectureFunction, true)), types.MediumSeverity, true, true, false, false) - addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksArchitectureFunction, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksArchitectureFunction, true)), types.LowSeverity, true, true, false, false) } pdf.SetLeftMargin(oldLeft) @@ -1824,15 +1824,15 @@ func createAssignmentByFunction(parsedModel *model.ParsedModel) { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksDevelopmentFunction, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksDevelopmentFunction, true)), types.CriticalSeverity, true, true, false, false) - addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksDevelopmentFunction, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksDevelopmentFunction, true)), types.HighSeverity, true, true, false, false) - addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksDevelopmentFunction, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksDevelopmentFunction, true)), types.ElevatedSeverity, true, true, false, false) - addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksDevelopmentFunction, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksDevelopmentFunction, true)), types.MediumSeverity, true, true, false, false) - addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksDevelopmentFunction, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksDevelopmentFunction, true)), types.LowSeverity, true, true, false, false) } pdf.SetLeftMargin(oldLeft) @@ -1851,15 +1851,15 @@ func createAssignmentByFunction(parsedModel *model.ParsedModel) { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksOperationFunction, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksOperationFunction, true)), types.CriticalSeverity, true, true, false, false) - addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksOperationFunction, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksOperationFunction, true)), types.HighSeverity, true, true, false, false) - addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksOperationFunction, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksOperationFunction, true)), types.ElevatedSeverity, true, true, false, false) - addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksOperationFunction, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksOperationFunction, true)), types.MediumSeverity, true, true, false, false) - addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksOperationFunction, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksOperationFunction, true)), types.LowSeverity, true, true, false, false) } pdf.SetLeftMargin(oldLeft) @@ -1921,15 +1921,15 @@ func createSTRIDE(parsedModel *model.ParsedModel) { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDESpoofing, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDESpoofing, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDESpoofing, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDESpoofing, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDESpoofing, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDESpoofing, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDESpoofing, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDESpoofing, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDESpoofing, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDESpoofing, true)), types.LowSeverity, true, true, false, true) } pdf.SetLeftMargin(oldLeft) @@ -1948,15 +1948,15 @@ func createSTRIDE(parsedModel *model.ParsedModel) { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDETampering, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDETampering, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDETampering, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDETampering, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDETampering, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDETampering, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDETampering, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDETampering, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDETampering, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDETampering, true)), types.LowSeverity, true, true, false, true) } pdf.SetLeftMargin(oldLeft) @@ -1975,15 +1975,15 @@ func createSTRIDE(parsedModel *model.ParsedModel) { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDERepudiation, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDERepudiation, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDERepudiation, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDERepudiation, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDERepudiation, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDERepudiation, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDERepudiation, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDERepudiation, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDERepudiation, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDERepudiation, true)), types.LowSeverity, true, true, false, true) } pdf.SetLeftMargin(oldLeft) @@ -2002,15 +2002,15 @@ func createSTRIDE(parsedModel *model.ParsedModel) { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEInformationDisclosure, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEInformationDisclosure, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEInformationDisclosure, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEInformationDisclosure, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEInformationDisclosure, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), types.LowSeverity, true, true, false, true) } pdf.SetLeftMargin(oldLeft) @@ -2029,15 +2029,15 @@ func createSTRIDE(parsedModel *model.ParsedModel) { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEDenialOfService, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEDenialOfService, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEDenialOfService, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEDenialOfService, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEDenialOfService, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEDenialOfService, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEDenialOfService, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEDenialOfService, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEDenialOfService, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEDenialOfService, true)), types.LowSeverity, true, true, false, true) } pdf.SetLeftMargin(oldLeft) @@ -2056,15 +2056,15 @@ func createSTRIDE(parsedModel *model.ParsedModel) { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true), + addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), types.LowSeverity, true, true, false, true) } pdf.SetLeftMargin(oldLeft) From 5ce843a7a5601f9dcc221c4c812eb59bb770cbec Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Tue, 26 Dec 2023 11:51:01 -0800 Subject: [PATCH 22/68] committing mising file for last commit --- pkg/model/model.go | 51 +++++++++++++++++++++++----------------------- 1 file changed, 26 insertions(+), 25 deletions(-) diff --git a/pkg/model/model.go b/pkg/model/model.go index 2617f192..5f9007a8 100644 --- a/pkg/model/model.go +++ b/pkg/model/model.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package model import ( @@ -89,7 +90,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { parsedModel.CommunicationLinks = make(map[string]CommunicationLink) parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId = make(map[string][]CommunicationLink) parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId = make(map[string]TrustBoundary) - parsedModel.GeneratedRisksByCategory = make(map[RiskCategory][]Risk) + parsedModel.GeneratedRisksByCategory = make(map[string][]Risk) parsedModel.GeneratedRisksBySyntheticId = make(map[string]Risk) parsedModel.AllSupportedTags = make(map[string]bool) @@ -605,7 +606,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { DataBreachProbability: dataBreachProbability, DataBreachTechnicalAssetIDs: dataBreachTechnicalAssetIDs, } - parsedModel.GeneratedRisksByCategory[cat] = append(parsedModel.GeneratedRisksByCategory[cat], individualRiskInstance) + parsedModel.GeneratedRisksByCategory[cat.Id] = append(parsedModel.GeneratedRisksByCategory[cat.Id], individualRiskInstance) } } } @@ -809,9 +810,9 @@ func CalculateSeverity(likelihood types.RiskExploitationLikelihood, impact types return types.CriticalSeverity } -func (model *ParsedModel) InScopeTechnicalAssets() []TechnicalAsset { +func (parsedModel *ParsedModel) InScopeTechnicalAssets() []TechnicalAsset { result := make([]TechnicalAsset, 0) - for _, asset := range model.TechnicalAssets { + for _, asset := range parsedModel.TechnicalAssets { if !asset.OutOfScope { result = append(result, asset) } @@ -819,32 +820,32 @@ func (model *ParsedModel) InScopeTechnicalAssets() []TechnicalAsset { return result } -func (what *ParsedModel) SortedTechnicalAssetIDs() []string { +func (parsedModel *ParsedModel) SortedTechnicalAssetIDs() []string { res := make([]string, 0) - for id := range what.TechnicalAssets { + for id := range parsedModel.TechnicalAssets { res = append(res, id) } sort.Strings(res) return res } -func (what *ParsedModel) TagsActuallyUsed() []string { +func (parsedModel *ParsedModel) TagsActuallyUsed() []string { result := make([]string, 0) - for _, tag := range what.TagsAvailable { - if len(what.TechnicalAssetsTaggedWithAny(tag)) > 0 || - len(what.CommunicationLinksTaggedWithAny(tag)) > 0 || - len(what.DataAssetsTaggedWithAny(tag)) > 0 || - len(what.TrustBoundariesTaggedWithAny(tag)) > 0 || - len(what.SharedRuntimesTaggedWithAny(tag)) > 0 { + for _, tag := range parsedModel.TagsAvailable { + if len(parsedModel.TechnicalAssetsTaggedWithAny(tag)) > 0 || + len(parsedModel.CommunicationLinksTaggedWithAny(tag)) > 0 || + len(parsedModel.DataAssetsTaggedWithAny(tag)) > 0 || + len(parsedModel.TrustBoundariesTaggedWithAny(tag)) > 0 || + len(parsedModel.SharedRuntimesTaggedWithAny(tag)) > 0 { result = append(result, tag) } } return result } -func (what *ParsedModel) TechnicalAssetsTaggedWithAny(tags ...string) []TechnicalAsset { +func (parsedModel *ParsedModel) TechnicalAssetsTaggedWithAny(tags ...string) []TechnicalAsset { result := make([]TechnicalAsset, 0) - for _, candidate := range what.TechnicalAssets { + for _, candidate := range parsedModel.TechnicalAssets { if candidate.IsTaggedWithAny(tags...) { result = append(result, candidate) } @@ -852,9 +853,9 @@ func (what *ParsedModel) TechnicalAssetsTaggedWithAny(tags ...string) []Technica return result } -func (what *ParsedModel) CommunicationLinksTaggedWithAny(tags ...string) []CommunicationLink { +func (parsedModel *ParsedModel) CommunicationLinksTaggedWithAny(tags ...string) []CommunicationLink { result := make([]CommunicationLink, 0) - for _, asset := range what.TechnicalAssets { + for _, asset := range parsedModel.TechnicalAssets { for _, candidate := range asset.CommunicationLinks { if candidate.IsTaggedWithAny(tags...) { result = append(result, candidate) @@ -864,9 +865,9 @@ func (what *ParsedModel) CommunicationLinksTaggedWithAny(tags ...string) []Commu return result } -func (what *ParsedModel) DataAssetsTaggedWithAny(tags ...string) []DataAsset { +func (parsedModel *ParsedModel) DataAssetsTaggedWithAny(tags ...string) []DataAsset { result := make([]DataAsset, 0) - for _, candidate := range what.DataAssets { + for _, candidate := range parsedModel.DataAssets { if candidate.IsTaggedWithAny(tags...) { result = append(result, candidate) } @@ -874,9 +875,9 @@ func (what *ParsedModel) DataAssetsTaggedWithAny(tags ...string) []DataAsset { return result } -func (what *ParsedModel) TrustBoundariesTaggedWithAny(tags ...string) []TrustBoundary { +func (parsedModel *ParsedModel) TrustBoundariesTaggedWithAny(tags ...string) []TrustBoundary { result := make([]TrustBoundary, 0) - for _, candidate := range what.TrustBoundaries { + for _, candidate := range parsedModel.TrustBoundaries { if candidate.IsTaggedWithAny(tags...) { result = append(result, candidate) } @@ -884,9 +885,9 @@ func (what *ParsedModel) TrustBoundariesTaggedWithAny(tags ...string) []TrustBou return result } -func (what *ParsedModel) SharedRuntimesTaggedWithAny(tags ...string) []SharedRuntime { +func (parsedModel *ParsedModel) SharedRuntimesTaggedWithAny(tags ...string) []SharedRuntime { result := make([]SharedRuntime, 0) - for _, candidate := range what.SharedRuntimes { + for _, candidate := range parsedModel.SharedRuntimes { if candidate.IsTaggedWithAny(tags...) { result = append(result, candidate) } @@ -894,9 +895,9 @@ func (what *ParsedModel) SharedRuntimesTaggedWithAny(tags ...string) []SharedRun return result } -func (what *ParsedModel) OutOfScopeTechnicalAssets() []TechnicalAsset { +func (parsedModel *ParsedModel) OutOfScopeTechnicalAssets() []TechnicalAsset { assets := make([]TechnicalAsset, 0) - for _, asset := range what.TechnicalAssets { + for _, asset := range parsedModel.TechnicalAssets { if asset.OutOfScope { assets = append(assets, asset) } From 01186a9a3d2c2b5797eeeba81757706c0dabade6 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Tue, 26 Dec 2023 14:27:53 -0800 Subject: [PATCH 23/68] fixed marshaling/unmarshaling parsed model struct from/to json fixed running raa plugin --- all.json | 1 + cmd/threagile/main_test.go | 27 +- pkg/common/consts.go | 2 +- pkg/input/input.go | 8 +- pkg/model/model.go | 59 +- pkg/run/runner.go | 2 + pkg/security/types/authentication.go | 17 + pkg/security/types/authorization.go | 17 + pkg/security/types/confidentiality.go | 17 + pkg/security/types/criticality.go | 17 + pkg/security/types/data_breach_probability.go | 12 + pkg/security/types/data_format.go | 17 + pkg/security/types/encryption_style.go | 17 + pkg/security/types/protocol.go | 17 + pkg/security/types/quantity.go | 17 + ..._impact.go => risk_exploitation_impact.go} | 12 + ...ood.go => risk_exploitation_likelihood.go} | 12 + ...o => risk_exploitation_likelihood_test.go} | 0 pkg/security/types/risk_function.go | 12 + pkg/security/types/risk_severity.go | 12 + pkg/security/types/risk_status.go | 12 + pkg/security/types/stride.go | 12 + pkg/security/types/technical_asset_machine.go | 17 + pkg/security/types/technical_asset_size.go | 17 + .../types/technical_asset_technology.go | 17 + pkg/security/types/technical_asset_type.go | 17 + pkg/security/types/trust_boundary.go | 17 + pkg/security/types/usage.go | 17 + test/all.json | 2608 +++++++++++++++++ 29 files changed, 2994 insertions(+), 35 deletions(-) create mode 100644 all.json rename pkg/security/types/{risk_explotation_impact.go => risk_exploitation_impact.go} (81%) rename pkg/security/types/{risk_explotation_likelihood.go => risk_exploitation_likelihood.go} (81%) rename pkg/security/types/{risk_explotation_likelihood_test.go => risk_exploitation_likelihood_test.go} (100%) create mode 100644 test/all.json diff --git a/all.json b/all.json new file mode 100644 index 00000000..3b2844b2 --- /dev/null +++ b/all.json @@ -0,0 +1 @@ +[{"category":"something-strange","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eExample Individual Risk\u003c/b\u003e at \u003cb\u003eDatabase\u003c/b\u003e","synthetic_id":"something-strange@sql-database","most_relevant_data_asset":"","most_relevant_technical_asset":"sql-database","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["sql-database"]},{"category":"something-strange","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eExample Individual Risk\u003c/b\u003e at \u003cb\u003eContract Filesystem\u003c/b\u003e","synthetic_id":"something-strange@contract-fileserver","most_relevant_data_asset":"","most_relevant_technical_asset":"contract-fileserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":null},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eApplication Deployment\u003c/b\u003e at technical asset \u003cb\u003eJenkins Buildserver\u003c/b\u003e","synthetic_id":"incomplete-model@jenkins-buildserver\u003eapplication-deployment@jenkins-buildserver","most_relevant_data_asset":"","most_relevant_technical_asset":"jenkins-buildserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"jenkins-buildserver\u003eapplication-deployment","data_breach_probability":"improbable","data_breach_technical_assets":["jenkins-buildserver"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eAuth Credential Check Traffic\u003c/b\u003e at technical asset \u003cb\u003eApache Webserver\u003c/b\u003e","synthetic_id":"incomplete-model@apache-webserver\u003eauth-credential-check-traffic@apache-webserver","most_relevant_data_asset":"","most_relevant_technical_asset":"apache-webserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"apache-webserver\u003eauth-credential-check-traffic","data_breach_probability":"improbable","data_breach_technical_assets":["apache-webserver"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eAuth Traffic\u003c/b\u003e at technical asset \u003cb\u003eMarketing CMS\u003c/b\u003e","synthetic_id":"incomplete-model@marketing-cms\u003eauth-traffic@marketing-cms","most_relevant_data_asset":"","most_relevant_technical_asset":"marketing-cms","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"marketing-cms\u003eauth-traffic","data_breach_probability":"improbable","data_breach_technical_assets":["marketing-cms"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eCMS Content Traffic\u003c/b\u003e at technical asset \u003cb\u003eLoad Balancer\u003c/b\u003e","synthetic_id":"incomplete-model@load-balancer\u003ecms-content-traffic@load-balancer","most_relevant_data_asset":"","most_relevant_technical_asset":"load-balancer","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"load-balancer\u003ecms-content-traffic","data_breach_probability":"improbable","data_breach_technical_assets":["load-balancer"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eCMS Updates\u003c/b\u003e at technical asset \u003cb\u003eJenkins Buildserver\u003c/b\u003e","synthetic_id":"incomplete-model@jenkins-buildserver\u003ecms-updates@jenkins-buildserver","most_relevant_data_asset":"","most_relevant_technical_asset":"jenkins-buildserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"jenkins-buildserver\u003ecms-updates","data_breach_probability":"improbable","data_breach_technical_assets":["jenkins-buildserver"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eDatabase Traffic\u003c/b\u003e at technical asset \u003cb\u003eBackoffice ERP System\u003c/b\u003e","synthetic_id":"incomplete-model@erp-system\u003edatabase-traffic@erp-system","most_relevant_data_asset":"","most_relevant_technical_asset":"erp-system","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"erp-system\u003edatabase-traffic","data_breach_probability":"improbable","data_breach_technical_assets":["erp-system"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eERP System Traffic\u003c/b\u003e at technical asset \u003cb\u003eApache Webserver\u003c/b\u003e","synthetic_id":"incomplete-model@apache-webserver\u003eerp-system-traffic@apache-webserver","most_relevant_data_asset":"","most_relevant_technical_asset":"apache-webserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"apache-webserver\u003eerp-system-traffic","data_breach_probability":"improbable","data_breach_technical_assets":["apache-webserver"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eGit Repo Code Read Access\u003c/b\u003e at technical asset \u003cb\u003eJenkins Buildserver\u003c/b\u003e","synthetic_id":"incomplete-model@jenkins-buildserver\u003egit-repo-code-read-access@jenkins-buildserver","most_relevant_data_asset":"","most_relevant_technical_asset":"jenkins-buildserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"jenkins-buildserver\u003egit-repo-code-read-access","data_breach_probability":"improbable","data_breach_technical_assets":["jenkins-buildserver"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eLDAP Credential Check Traffic\u003c/b\u003e at technical asset \u003cb\u003eIdentity Provider\u003c/b\u003e","synthetic_id":"incomplete-model@identity-provider\u003eldap-credential-check-traffic@identity-provider","most_relevant_data_asset":"","most_relevant_technical_asset":"identity-provider","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"identity-provider\u003eldap-credential-check-traffic","data_breach_probability":"improbable","data_breach_technical_assets":["identity-provider"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eNFS Filesystem Access\u003c/b\u003e at technical asset \u003cb\u003eBackoffice ERP System\u003c/b\u003e","synthetic_id":"incomplete-model@erp-system\u003enfs-filesystem-access@erp-system","most_relevant_data_asset":"","most_relevant_technical_asset":"erp-system","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"erp-system\u003enfs-filesystem-access","data_breach_probability":"improbable","data_breach_technical_assets":["erp-system"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eWeb Application Traffic\u003c/b\u003e at technical asset \u003cb\u003eLoad Balancer\u003c/b\u003e","synthetic_id":"incomplete-model@load-balancer\u003eweb-application-traffic@load-balancer","most_relevant_data_asset":"","most_relevant_technical_asset":"load-balancer","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"load-balancer\u003eweb-application-traffic","data_breach_probability":"improbable","data_breach_technical_assets":["load-balancer"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Technology\u003c/b\u003e specified at technical asset \u003cb\u003eApache Webserver\u003c/b\u003e","synthetic_id":"incomplete-model@apache-webserver","most_relevant_data_asset":"","most_relevant_technical_asset":"apache-webserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["apache-webserver"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Technology\u003c/b\u003e specified at technical asset \u003cb\u003eBackoffice ERP System\u003c/b\u003e","synthetic_id":"incomplete-model@erp-system","most_relevant_data_asset":"","most_relevant_technical_asset":"erp-system","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["erp-system"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Technology\u003c/b\u003e specified at technical asset \u003cb\u003eContract Fileserver\u003c/b\u003e","synthetic_id":"incomplete-model@contract-fileserver","most_relevant_data_asset":"","most_relevant_technical_asset":"contract-fileserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["contract-fileserver"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Technology\u003c/b\u003e specified at technical asset \u003cb\u003eCustomer Contract Database\u003c/b\u003e","synthetic_id":"incomplete-model@sql-database","most_relevant_data_asset":"","most_relevant_technical_asset":"sql-database","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["sql-database"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Technology\u003c/b\u003e specified at technical asset \u003cb\u003eGit Repository\u003c/b\u003e","synthetic_id":"incomplete-model@git-repo","most_relevant_data_asset":"","most_relevant_technical_asset":"git-repo","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["git-repo"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Technology\u003c/b\u003e specified at technical asset \u003cb\u003eIdentity Provider\u003c/b\u003e","synthetic_id":"incomplete-model@identity-provider","most_relevant_data_asset":"","most_relevant_technical_asset":"identity-provider","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["identity-provider"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Technology\u003c/b\u003e specified at technical asset \u003cb\u003eJenkins Buildserver\u003c/b\u003e","synthetic_id":"incomplete-model@jenkins-buildserver","most_relevant_data_asset":"","most_relevant_technical_asset":"jenkins-buildserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["jenkins-buildserver"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Technology\u003c/b\u003e specified at technical asset \u003cb\u003eLDAP Auth Server\u003c/b\u003e","synthetic_id":"incomplete-model@ldap-auth-server","most_relevant_data_asset":"","most_relevant_technical_asset":"ldap-auth-server","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["ldap-auth-server"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Technology\u003c/b\u003e specified at technical asset \u003cb\u003eLoad Balancer\u003c/b\u003e","synthetic_id":"incomplete-model@load-balancer","most_relevant_data_asset":"","most_relevant_technical_asset":"load-balancer","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["load-balancer"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Technology\u003c/b\u003e specified at technical asset \u003cb\u003eMarketing CMS\u003c/b\u003e","synthetic_id":"incomplete-model@marketing-cms","most_relevant_data_asset":"","most_relevant_technical_asset":"marketing-cms","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["marketing-cms"]},{"category":"missing-authentication","risk_status":"unchecked","severity":"elevated","exploitation_likelihood":"likely","exploitation_impact":"medium","title":"\u003cb\u003eMissing Authentication\u003c/b\u003e covering communication link \u003cb\u003eGit Repo Code Read Access\u003c/b\u003e from \u003cb\u003eJenkins Buildserver\u003c/b\u003e to \u003cb\u003eGit Repository\u003c/b\u003e","synthetic_id":"missing-authentication@jenkins-buildserver\u003egit-repo-code-read-access@jenkins-buildserver@git-repo","most_relevant_data_asset":"","most_relevant_technical_asset":"git-repo","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"jenkins-buildserver\u003egit-repo-code-read-access","data_breach_probability":"possible","data_breach_technical_assets":["git-repo"]},{"category":"missing-authentication","risk_status":"unchecked","severity":"elevated","exploitation_likelihood":"likely","exploitation_impact":"medium","title":"\u003cb\u003eMissing Authentication\u003c/b\u003e covering communication link \u003cb\u003eGit-Repo Code Write Access\u003c/b\u003e from \u003cb\u003eExternal Development Client\u003c/b\u003e to \u003cb\u003eGit Repository\u003c/b\u003e","synthetic_id":"missing-authentication@external-dev-client\u003egit-repo-code-write-access@external-dev-client@git-repo","most_relevant_data_asset":"","most_relevant_technical_asset":"git-repo","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"external-dev-client\u003egit-repo-code-write-access","data_breach_probability":"possible","data_breach_technical_assets":["git-repo"]},{"category":"missing-authentication","risk_status":"unchecked","severity":"elevated","exploitation_likelihood":"likely","exploitation_impact":"medium","title":"\u003cb\u003eMissing Authentication\u003c/b\u003e covering communication link \u003cb\u003eGit-Repo Web-UI Access\u003c/b\u003e from \u003cb\u003eExternal Development Client\u003c/b\u003e to \u003cb\u003eGit Repository\u003c/b\u003e","synthetic_id":"missing-authentication@external-dev-client\u003egit-repo-web-ui-access@external-dev-client@git-repo","most_relevant_data_asset":"","most_relevant_technical_asset":"git-repo","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"external-dev-client\u003egit-repo-web-ui-access","data_breach_probability":"possible","data_breach_technical_assets":["git-repo"]},{"category":"missing-authentication","risk_status":"unchecked","severity":"elevated","exploitation_likelihood":"likely","exploitation_impact":"medium","title":"\u003cb\u003eMissing Authentication\u003c/b\u003e covering communication link \u003cb\u003eJenkins Web-UI Access\u003c/b\u003e from \u003cb\u003eExternal Development Client\u003c/b\u003e to \u003cb\u003eJenkins Buildserver\u003c/b\u003e","synthetic_id":"missing-authentication@external-dev-client\u003ejenkins-web-ui-access@external-dev-client@jenkins-buildserver","most_relevant_data_asset":"","most_relevant_technical_asset":"jenkins-buildserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"external-dev-client\u003ejenkins-web-ui-access","data_breach_probability":"possible","data_breach_technical_assets":["jenkins-buildserver"]},{"category":"missing-build-infrastructure","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eMissing Build Infrastructure\u003c/b\u003e in the threat model (referencing asset \u003cb\u003eMarketing CMS\u003c/b\u003e as an example)","synthetic_id":"missing-build-infrastructure@marketing-cms","most_relevant_data_asset":"","most_relevant_technical_asset":"marketing-cms","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":[]},{"category":"missing-cloud-hardening","risk_status":"unchecked","severity":"medium","exploitation_likelihood":"unlikely","exploitation_impact":"medium","title":"\u003cb\u003eMissing Cloud Hardening (AWS)\u003c/b\u003e risk at \u003cb\u003eApplication Network\u003c/b\u003e: \u003cu\u003eCIS Benchmark for AWS\u003c/u\u003e","synthetic_id":"missing-cloud-hardening@application-network","most_relevant_data_asset":"","most_relevant_technical_asset":"","most_relevant_trust_boundary":"application-network","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"probable","data_breach_technical_assets":["load-balancer","apache-webserver","marketing-cms","erp-system","contract-fileserver","sql-database","identity-provider","ldap-auth-server"]},{"category":"missing-cloud-hardening","risk_status":"unchecked","severity":"medium","exploitation_likelihood":"unlikely","exploitation_impact":"medium","title":"\u003cb\u003eMissing Cloud Hardening (EC2)\u003c/b\u003e risk at \u003cb\u003eApache Webserver\u003c/b\u003e: \u003cu\u003eCIS Benchmark for Amazon Linux\u003c/u\u003e","synthetic_id":"missing-cloud-hardening@apache-webserver","most_relevant_data_asset":"","most_relevant_technical_asset":"apache-webserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"probable","data_breach_technical_assets":["apache-webserver"]},{"category":"missing-cloud-hardening","risk_status":"unchecked","severity":"medium","exploitation_likelihood":"unlikely","exploitation_impact":"medium","title":"\u003cb\u003eMissing Cloud Hardening (S3)\u003c/b\u003e risk at \u003cb\u003eContract Fileserver\u003c/b\u003e: \u003cu\u003eSecurity Best Practices for AWS S3\u003c/u\u003e","synthetic_id":"missing-cloud-hardening@contract-fileserver","most_relevant_data_asset":"","most_relevant_technical_asset":"contract-fileserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"probable","data_breach_technical_assets":["contract-fileserver"]},{"category":"missing-hardening","risk_status":"unchecked","severity":"medium","exploitation_likelihood":"likely","exploitation_impact":"low","title":"\u003cb\u003eMissing Hardening\u003c/b\u003e risk at \u003cb\u003eApache Webserver\u003c/b\u003e","synthetic_id":"missing-hardening@apache-webserver","most_relevant_data_asset":"","most_relevant_technical_asset":"apache-webserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["apache-webserver"]},{"category":"missing-hardening","risk_status":"unchecked","severity":"medium","exploitation_likelihood":"likely","exploitation_impact":"low","title":"\u003cb\u003eMissing Hardening\u003c/b\u003e risk at \u003cb\u003eBackoffice ERP System\u003c/b\u003e","synthetic_id":"missing-hardening@erp-system","most_relevant_data_asset":"","most_relevant_technical_asset":"erp-system","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["erp-system"]},{"category":"missing-hardening","risk_status":"unchecked","severity":"medium","exploitation_likelihood":"likely","exploitation_impact":"low","title":"\u003cb\u003eMissing Hardening\u003c/b\u003e risk at \u003cb\u003eJenkins Buildserver\u003c/b\u003e","synthetic_id":"missing-hardening@jenkins-buildserver","most_relevant_data_asset":"","most_relevant_technical_asset":"jenkins-buildserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["jenkins-buildserver"]},{"category":"missing-hardening","risk_status":"unchecked","severity":"medium","exploitation_likelihood":"likely","exploitation_impact":"low","title":"\u003cb\u003eMissing Hardening\u003c/b\u003e risk at \u003cb\u003eLoad Balancer\u003c/b\u003e","synthetic_id":"missing-hardening@load-balancer","most_relevant_data_asset":"","most_relevant_technical_asset":"load-balancer","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["load-balancer"]},{"category":"missing-vault","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eMissing Vault (Secret Storage)\u003c/b\u003e in the threat model (referencing asset \u003cb\u003e\u003c/b\u003e as an example)","synthetic_id":"missing-vault@","most_relevant_data_asset":"","most_relevant_technical_asset":"","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":[]},{"category":"mixed-targets-on-shared-runtime","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eMixed Targets on Shared Runtime\u003c/b\u003e named \u003cb\u003eWebApp and Backoffice Virtualization\u003c/b\u003e might enable attackers moving from one less valuable target to a more valuable one","synthetic_id":"mixed-targets-on-shared-runtime@webapp-virtualization","most_relevant_data_asset":"","most_relevant_technical_asset":"","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"webapp-virtualization","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["apache-webserver","marketing-cms","erp-system","contract-fileserver","sql-database"]}] \ No newline at end of file diff --git a/cmd/threagile/main_test.go b/cmd/threagile/main_test.go index dd1b235f..2ce7a9ac 100644 --- a/cmd/threagile/main_test.go +++ b/cmd/threagile/main_test.go @@ -4,13 +4,16 @@ import ( "encoding/json" "github.com/akedrou/textdiff" "github.com/threagile/threagile/pkg/input" + "github.com/threagile/threagile/pkg/model" + "log" + "os" "path/filepath" "sort" "strings" "testing" ) -func TestParseModel(t *testing.T) { +func TestParseModelYaml(t *testing.T) { flatModelFile := filepath.Join("..", "..", "test", "all.yaml") flatModel := *new(input.ModelInput).Defaults() flatLoadError := flatModel.Load(flatModelFile) @@ -51,3 +54,25 @@ func TestParseModel(t *testing.T) { return } } + +func TestParseModelJson(t *testing.T) { + modelFile := filepath.Join("..", "..", "test", "all.json") + modelJson, readError := os.ReadFile(modelFile) + if readError != nil { + t.Error("Unable to read model file: ", readError) + return + } + + var modelStruct model.ParsedModel + unmarshalError := json.Unmarshal(modelJson, &modelStruct) + if unmarshalError != nil { + log.Fatal("Unable to parse model json: ", unmarshalError) + return + } + + _, marshalError := json.Marshal(&modelStruct) + if marshalError != nil { + log.Fatal("Unable to print model json: ", marshalError) + return + } +} diff --git a/pkg/common/consts.go b/pkg/common/consts.go index 5a45f897..43c06591 100644 --- a/pkg/common/consts.go +++ b/pkg/common/consts.go @@ -9,7 +9,7 @@ const ( ReportFilename = "report.pdf" ExcelRisksFilename = "risks.xlsx" ExcelTagsFilename = "tags.xlsx" - JsonRisksFilename = "risks.json" + JsonRisksFilename = "all.json" JsonTechnicalAssetsFilename = "technical-assets.json" JsonStatsFilename = "stats.json" DataFlowDiagramFilenameDOT = "data-flow-diagram.gv" diff --git a/pkg/input/input.go b/pkg/input/input.go index 9e7d251c..27c598f1 100644 --- a/pkg/input/input.go +++ b/pkg/input/input.go @@ -142,7 +142,7 @@ type InputRiskTracking struct { } type ModelInput struct { // TODO: Eventually remove this and directly use ParsedModelRoot? But then the error messages for model errors are not quite as good anymore... - Includes []string `yaml:"includes" json:"includes"` + Includes []string `yaml:"includes,omitempty" json:"includes,omitempty"` ThreagileVersion string `yaml:"threagile_version" json:"threagile_version"` Title string `yaml:"title" json:"title"` Author Author `yaml:"author" json:"author"` @@ -154,7 +154,7 @@ type ModelInput struct { // TODO: Eventually remove this and directly use Parsed Questions map[string]string `yaml:"questions" json:"questions"` AbuseCases map[string]string `yaml:"abuse_cases" json:"abuse_cases"` SecurityRequirements map[string]string `yaml:"security_requirements" json:"security_requirements"` - TagsAvailable []string `yaml:"tags_available" json:"tags_available"` + TagsAvailable []string `yaml:"tags_available,omitempty" json:"tags_available,omitempty"` DataAssets map[string]InputDataAsset `yaml:"data_assets" json:"data_assets"` TechnicalAssets map[string]InputTechnicalAsset `yaml:"technical_assets" json:"technical_assets"` TrustBoundaries map[string]InputTrustBoundary `yaml:"trust_boundaries" json:"trust_boundaries"` @@ -166,8 +166,8 @@ type ModelInput struct { // TODO: Eventually remove this and directly use Parsed DiagramTweakEdgeLayout string `yaml:"diagram_tweak_edge_layout" json:"diagram_tweak_edge_layout"` DiagramTweakSuppressEdgeLabels bool `yaml:"diagram_tweak_suppress_edge_labels" json:"diagram_tweak_suppress_edge_labels"` DiagramTweakLayoutLeftToRight bool `yaml:"diagram_tweak_layout_left_to_right" json:"diagram_tweak_layout_left_to_right"` - DiagramTweakInvisibleConnectionsBetweenAssets []string `yaml:"diagram_tweak_invisible_connections_between_assets" json:"diagram_tweak_invisible_connections_between_assets"` - DiagramTweakSameRankAssets []string `yaml:"diagram_tweak_same_rank_assets" json:"diagram_tweak_same_rank_assets"` + DiagramTweakInvisibleConnectionsBetweenAssets []string `yaml:"diagram_tweak_invisible_connections_between_assets,omitempty" json:"diagram_tweak_invisible_connections_between_assets,omitempty"` + DiagramTweakSameRankAssets []string `yaml:"diagram_tweak_same_rank_assets,omitempty" json:"diagram_tweak_same_rank_assets,omitempty"` } func (model *ModelInput) Defaults() *ModelInput { diff --git a/pkg/model/model.go b/pkg/model/model.go index 5f9007a8..3f9626cb 100644 --- a/pkg/model/model.go +++ b/pkg/model/model.go @@ -18,37 +18,38 @@ import ( ) type ParsedModel struct { - Author input.Author - Title string - Date time.Time - ManagementSummaryComment string - BusinessOverview input.Overview - TechnicalOverview input.Overview - BusinessCriticality types.Criticality - SecurityRequirements map[string]string - Questions map[string]string - AbuseCases map[string]string - TagsAvailable []string - DataAssets map[string]DataAsset - TechnicalAssets map[string]TechnicalAsset - TrustBoundaries map[string]TrustBoundary - SharedRuntimes map[string]SharedRuntime - IndividualRiskCategories map[string]RiskCategory - RiskTracking map[string]RiskTracking - CommunicationLinks map[string]CommunicationLink - AllSupportedTags map[string]bool - DiagramTweakNodesep, DiagramTweakRanksep int - DiagramTweakEdgeLayout string - DiagramTweakSuppressEdgeLabels bool - DiagramTweakLayoutLeftToRight bool - DiagramTweakInvisibleConnectionsBetweenAssets []string - DiagramTweakSameRankAssets []string + Author input.Author `json:"author" yaml:"author"` + Title string `json:"title,omitempty" yaml:"title"` + Date time.Time `json:"date" yaml:"date"` + ManagementSummaryComment string `json:"management_summary_comment,omitempty" yaml:"management_summary_comment"` + BusinessOverview input.Overview `json:"business_overview" yaml:"business_overview"` + TechnicalOverview input.Overview `json:"technical_overview" yaml:"technical_overview"` + BusinessCriticality types.Criticality `json:"business_criticality,omitempty" yaml:"business_criticality"` + SecurityRequirements map[string]string `json:"security_requirements,omitempty" yaml:"security_requirements"` + Questions map[string]string `json:"questions,omitempty" yaml:"questions"` + AbuseCases map[string]string `json:"abuse_cases,omitempty" yaml:"abuse_cases"` + TagsAvailable []string `json:"tags_available,omitempty" yaml:"tags_available"` + DataAssets map[string]DataAsset `json:"data_assets,omitempty" yaml:"data_assets"` + TechnicalAssets map[string]TechnicalAsset `json:"technical_assets,omitempty" yaml:"technical_assets"` + TrustBoundaries map[string]TrustBoundary `json:"trust_boundaries,omitempty" yaml:"trust_boundaries"` + SharedRuntimes map[string]SharedRuntime `json:"shared_runtimes,omitempty" yaml:"shared_runtimes"` + IndividualRiskCategories map[string]RiskCategory `json:"individual_risk_categories,omitempty" yaml:"individual_risk_categories"` + RiskTracking map[string]RiskTracking `json:"risk_tracking,omitempty" yaml:"risk_tracking"` + CommunicationLinks map[string]CommunicationLink `json:"communication_links,omitempty" yaml:"communication_links"` + AllSupportedTags map[string]bool `json:"all_supported_tags,omitempty" yaml:"all_supported_tags"` + DiagramTweakNodesep int `json:"diagram_tweak_nodesep,omitempty" yaml:"diagram_tweak_nodesep"` + DiagramTweakRanksep int `json:"diagram_tweak_ranksep,omitempty" yaml:"diagram_tweak_ranksep"` + DiagramTweakEdgeLayout string `json:"diagram_tweak_edge_layout,omitempty" yaml:"diagram_tweak_edge_layout"` + DiagramTweakSuppressEdgeLabels bool `json:"diagram_tweak_suppress_edge_labels,omitempty" yaml:"diagram_tweak_suppress_edge_labels"` + DiagramTweakLayoutLeftToRight bool `json:"diagram_tweak_layout_left_to_right,omitempty" yaml:"diagram_tweak_layout_left_to_right"` + DiagramTweakInvisibleConnectionsBetweenAssets []string `json:"diagram_tweak_invisible_connections_between_assets,omitempty" yaml:"diagram_tweak_invisible_connections_between_assets"` + DiagramTweakSameRankAssets []string `json:"diagram_tweak_same_rank_assets,omitempty" yaml:"diagram_tweak_same_rank_assets"` // TODO: those are generated based on items above and needs to be private - IncomingTechnicalCommunicationLinksMappedByTargetId map[string][]CommunicationLink - DirectContainingTrustBoundaryMappedByTechnicalAssetId map[string]TrustBoundary - GeneratedRisksByCategory map[string][]Risk - GeneratedRisksBySyntheticId map[string]Risk + IncomingTechnicalCommunicationLinksMappedByTargetId map[string][]CommunicationLink `json:"incoming_technical_communication_links_mapped_by_target_id,omitempty" yaml:"incoming_technical_communication_links_mapped_by_target_id"` + DirectContainingTrustBoundaryMappedByTechnicalAssetId map[string]TrustBoundary `json:"direct_containing_trust_boundary_mapped_by_technical_asset_id,omitempty" yaml:"direct_containing_trust_boundary_mapped_by_technical_asset_id"` + GeneratedRisksByCategory map[string][]Risk `json:"generated_risks_by_category,omitempty" yaml:"generated_risks_by_category"` + GeneratedRisksBySyntheticId map[string]Risk `json:"generated_risks_by_synthetic_id,omitempty" yaml:"generated_risks_by_synthetic_id"` } func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { diff --git a/pkg/run/runner.go b/pkg/run/runner.go index f2a4c152..08f1cb4b 100644 --- a/pkg/run/runner.go +++ b/pkg/run/runner.go @@ -71,6 +71,8 @@ func (p *Runner) Run(in any, out any, parameters ...string) error { return inError } + _ = os.WriteFile("../../all.json", inData, 0644) + _, writeError := stdin.Write(inData) if writeError != nil { return writeError diff --git a/pkg/security/types/authentication.go b/pkg/security/types/authentication.go index 16659518..6595347f 100644 --- a/pkg/security/types/authentication.go +++ b/pkg/security/types/authentication.go @@ -4,7 +4,9 @@ Copyright © 2023 NAME HERE package types import ( + "encoding/json" "errors" + "fmt" "strings" ) @@ -61,3 +63,18 @@ func (what Authentication) String() string { func (what Authentication) Explain() string { return AuthenticationTypeDescription[what].Description } + +func (what Authentication) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} + +func (what *Authentication) UnmarshalJSON([]byte) error { + for index, description := range AuthenticationTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = Authentication(index) + return nil + } + } + + return fmt.Errorf("unknown authentication value %q", int(*what)) +} diff --git a/pkg/security/types/authorization.go b/pkg/security/types/authorization.go index 660b0a7a..80c595b3 100644 --- a/pkg/security/types/authorization.go +++ b/pkg/security/types/authorization.go @@ -4,7 +4,9 @@ Copyright © 2023 NAME HERE package types import ( + "encoding/json" "errors" + "fmt" "strings" ) @@ -48,3 +50,18 @@ func (what Authorization) String() string { func (what Authorization) Explain() string { return AuthorizationTypeDescription[what].Description } + +func (what Authorization) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} + +func (what *Authorization) UnmarshalJSON([]byte) error { + for index, description := range AuthorizationTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = Authorization(index) + return nil + } + } + + return fmt.Errorf("unknown authorization value %q", int(*what)) +} diff --git a/pkg/security/types/confidentiality.go b/pkg/security/types/confidentiality.go index 2230c0b8..bc3ed6ef 100644 --- a/pkg/security/types/confidentiality.go +++ b/pkg/security/types/confidentiality.go @@ -4,7 +4,9 @@ Copyright © 2023 NAME HERE package types import ( + "encoding/json" "errors" + "fmt" "strings" ) @@ -88,3 +90,18 @@ func (what Confidentiality) RatingStringInScale() string { result += " in scale of 5)" return result } + +func (what Confidentiality) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} + +func (what *Confidentiality) UnmarshalJSON([]byte) error { + for index, description := range ConfidentialityTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = Confidentiality(index) + return nil + } + } + + return fmt.Errorf("unknown confidentiality value %q", int(*what)) +} diff --git a/pkg/security/types/criticality.go b/pkg/security/types/criticality.go index 778ac665..7b8ef539 100644 --- a/pkg/security/types/criticality.go +++ b/pkg/security/types/criticality.go @@ -4,7 +4,9 @@ Copyright © 2023 NAME HERE package types import ( + "encoding/json" "errors" + "fmt" "strings" ) @@ -88,3 +90,18 @@ func (what Criticality) RatingStringInScale() string { result += " in scale of 5)" return result } + +func (what Criticality) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} + +func (what *Criticality) UnmarshalJSON([]byte) error { + for index, description := range CriticalityTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = Criticality(index) + return nil + } + } + + return fmt.Errorf("unknown criticality value %q", int(*what)) +} diff --git a/pkg/security/types/data_breach_probability.go b/pkg/security/types/data_breach_probability.go index b66902af..7adb6c50 100644 --- a/pkg/security/types/data_breach_probability.go +++ b/pkg/security/types/data_breach_probability.go @@ -6,6 +6,7 @@ package types import ( "encoding/json" "errors" + "fmt" "strings" ) @@ -61,3 +62,14 @@ func (what DataBreachProbability) Title() string { func (what DataBreachProbability) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } + +func (what *DataBreachProbability) UnmarshalJSON([]byte) error { + for index, description := range DataBreachProbabilityTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = DataBreachProbability(index) + return nil + } + } + + return fmt.Errorf("unknown data breach probability value %q", int(*what)) +} diff --git a/pkg/security/types/data_format.go b/pkg/security/types/data_format.go index 2ef473eb..ad2571f6 100644 --- a/pkg/security/types/data_format.go +++ b/pkg/security/types/data_format.go @@ -4,7 +4,9 @@ Copyright © 2023 NAME HERE package types import ( + "encoding/json" "errors" + "fmt" "strings" ) @@ -64,6 +66,21 @@ func (what DataFormat) Description() string { "File input/uploads", "CSV tabular data"}[what] } +func (what DataFormat) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} + +func (what *DataFormat) UnmarshalJSON([]byte) error { + for index, description := range DataFormatTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = DataFormat(index) + return nil + } + } + + return fmt.Errorf("unknown data format value %q", int(*what)) +} + type ByDataFormatAcceptedSort []DataFormat func (what ByDataFormatAcceptedSort) Len() int { return len(what) } diff --git a/pkg/security/types/encryption_style.go b/pkg/security/types/encryption_style.go index 87957684..01ce909a 100644 --- a/pkg/security/types/encryption_style.go +++ b/pkg/security/types/encryption_style.go @@ -4,7 +4,9 @@ Copyright © 2023 NAME HERE package types import ( + "encoding/json" "errors" + "fmt" "strings" ) @@ -58,3 +60,18 @@ func (what EncryptionStyle) Explain() string { func (what EncryptionStyle) Title() string { return [...]string{"None", "Transparent", "Data with Symmetric Shared Key", "Data with Asymmetric Shared Key", "Data with End-User Individual Key"}[what] } + +func (what EncryptionStyle) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} + +func (what *EncryptionStyle) UnmarshalJSON([]byte) error { + for index, description := range EncryptionStyleTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = EncryptionStyle(index) + return nil + } + } + + return fmt.Errorf("unknown encryption style value %q", int(*what)) +} diff --git a/pkg/security/types/protocol.go b/pkg/security/types/protocol.go index 245c5a1a..89e08f33 100644 --- a/pkg/security/types/protocol.go +++ b/pkg/security/types/protocol.go @@ -4,7 +4,9 @@ Copyright © 2023 NAME HERE package types import ( + "encoding/json" "errors" + "fmt" "strings" ) @@ -205,3 +207,18 @@ func (what Protocol) IsPotentialDatabaseAccessProtocol(includingLaxDatabaseProto func (what Protocol) IsPotentialWebAccessProtocol() bool { return what == HTTP || what == HTTPS || what == WS || what == WSS || what == ReverseProxyWebProtocol || what == ReverseProxyWebProtocolEncrypted } + +func (what Protocol) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} + +func (what *Protocol) UnmarshalJSON([]byte) error { + for index, description := range ProtocolTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = Protocol(index) + return nil + } + } + + return fmt.Errorf("unknown protocol value %q", int(*what)) +} diff --git a/pkg/security/types/quantity.go b/pkg/security/types/quantity.go index 86563db7..d410440c 100644 --- a/pkg/security/types/quantity.go +++ b/pkg/security/types/quantity.go @@ -4,7 +4,9 @@ Copyright © 2023 NAME HERE package types import ( + "encoding/json" "errors" + "fmt" "strings" ) @@ -60,3 +62,18 @@ func (what Quantity) QuantityFactor() float64 { // fibonacci starting at 1 return [...]float64{1, 2, 3, 5}[what] } + +func (what Quantity) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} + +func (what *Quantity) UnmarshalJSON([]byte) error { + for index, description := range QuantityTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = Quantity(index) + return nil + } + } + + return fmt.Errorf("unknown quantity value %q", int(*what)) +} diff --git a/pkg/security/types/risk_explotation_impact.go b/pkg/security/types/risk_exploitation_impact.go similarity index 81% rename from pkg/security/types/risk_explotation_impact.go rename to pkg/security/types/risk_exploitation_impact.go index 6ac93cfb..3c01038b 100644 --- a/pkg/security/types/risk_explotation_impact.go +++ b/pkg/security/types/risk_exploitation_impact.go @@ -6,6 +6,7 @@ package types import ( "encoding/json" "errors" + "fmt" "strings" ) @@ -67,3 +68,14 @@ func (what RiskExploitationImpact) Weight() int { func (what RiskExploitationImpact) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } + +func (what *RiskExploitationImpact) UnmarshalJSON([]byte) error { + for index, description := range RiskExploitationImpactTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = RiskExploitationImpact(index) + return nil + } + } + + return fmt.Errorf("unknown risk exploitation impact value %q", int(*what)) +} diff --git a/pkg/security/types/risk_explotation_likelihood.go b/pkg/security/types/risk_exploitation_likelihood.go similarity index 81% rename from pkg/security/types/risk_explotation_likelihood.go rename to pkg/security/types/risk_exploitation_likelihood.go index b56c5d71..2b0ef292 100644 --- a/pkg/security/types/risk_explotation_likelihood.go +++ b/pkg/security/types/risk_exploitation_likelihood.go @@ -6,6 +6,7 @@ package types import ( "encoding/json" "errors" + "fmt" "strings" ) @@ -67,3 +68,14 @@ func (what RiskExploitationLikelihood) Weight() int { func (what RiskExploitationLikelihood) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } + +func (what *RiskExploitationLikelihood) UnmarshalJSON([]byte) error { + for index, description := range RiskExploitationLikelihoodTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = RiskExploitationLikelihood(index) + return nil + } + } + + return fmt.Errorf("unknown risk exploration likelihood value %q", int(*what)) +} diff --git a/pkg/security/types/risk_explotation_likelihood_test.go b/pkg/security/types/risk_exploitation_likelihood_test.go similarity index 100% rename from pkg/security/types/risk_explotation_likelihood_test.go rename to pkg/security/types/risk_exploitation_likelihood_test.go diff --git a/pkg/security/types/risk_function.go b/pkg/security/types/risk_function.go index f7a7cdb4..853d1d02 100644 --- a/pkg/security/types/risk_function.go +++ b/pkg/security/types/risk_function.go @@ -6,6 +6,7 @@ package types import ( "encoding/json" "errors" + "fmt" "strings" ) @@ -60,3 +61,14 @@ func (what RiskFunction) Title() string { func (what RiskFunction) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } + +func (what *RiskFunction) UnmarshalJSON([]byte) error { + for index, description := range RiskFunctionTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = RiskFunction(index) + return nil + } + } + + return fmt.Errorf("unknown risk function %q", int(*what)) +} diff --git a/pkg/security/types/risk_severity.go b/pkg/security/types/risk_severity.go index b978eae9..6535d865 100644 --- a/pkg/security/types/risk_severity.go +++ b/pkg/security/types/risk_severity.go @@ -6,6 +6,7 @@ package types import ( "encoding/json" "errors" + "fmt" "strings" ) @@ -66,3 +67,14 @@ func (what RiskSeverity) Title() string { func (what RiskSeverity) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } + +func (what *RiskSeverity) UnmarshalJSON([]byte) error { + for index, description := range RiskSeverityTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = RiskSeverity(index) + return nil + } + } + + return fmt.Errorf("unknown risk severity value %q", int(*what)) +} diff --git a/pkg/security/types/risk_status.go b/pkg/security/types/risk_status.go index c47d4274..803f1043 100644 --- a/pkg/security/types/risk_status.go +++ b/pkg/security/types/risk_status.go @@ -6,6 +6,7 @@ package types import ( "encoding/json" "errors" + "fmt" "strings" ) @@ -70,3 +71,14 @@ func (what RiskStatus) MarshalJSON() ([]byte, error) { func (what RiskStatus) IsStillAtRisk() bool { return what == Unchecked || what == InDiscussion || what == Accepted || what == InProgress } + +func (what *RiskStatus) UnmarshalJSON([]byte) error { + for index, description := range RiskStatusTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = RiskStatus(index) + return nil + } + } + + return fmt.Errorf("unknown risk status value %q", int(*what)) +} diff --git a/pkg/security/types/stride.go b/pkg/security/types/stride.go index d0345882..a068d00d 100644 --- a/pkg/security/types/stride.go +++ b/pkg/security/types/stride.go @@ -6,6 +6,7 @@ package types import ( "encoding/json" "errors" + "fmt" "strings" ) @@ -66,3 +67,14 @@ func (what STRIDE) Title() string { func (what STRIDE) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } + +func (what *STRIDE) UnmarshalJSON([]byte) error { + for index, description := range StrideTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = STRIDE(index) + return nil + } + } + + return fmt.Errorf("unknown STRIDE value %q", int(*what)) +} diff --git a/pkg/security/types/technical_asset_machine.go b/pkg/security/types/technical_asset_machine.go index 7308e0df..ccd1ba04 100644 --- a/pkg/security/types/technical_asset_machine.go +++ b/pkg/security/types/technical_asset_machine.go @@ -4,7 +4,9 @@ Copyright © 2023 NAME HERE package types import ( + "encoding/json" "errors" + "fmt" "strings" ) @@ -50,3 +52,18 @@ func (what TechnicalAssetMachine) String() string { func (what TechnicalAssetMachine) Explain() string { return TechnicalAssetMachineTypeDescription[what].Description } + +func (what TechnicalAssetMachine) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} + +func (what *TechnicalAssetMachine) UnmarshalJSON([]byte) error { + for index, description := range TechnicalAssetMachineTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = TechnicalAssetMachine(index) + return nil + } + } + + return fmt.Errorf("unknown technical asset machine value %q", int(*what)) +} diff --git a/pkg/security/types/technical_asset_size.go b/pkg/security/types/technical_asset_size.go index f4aabec7..7effc122 100644 --- a/pkg/security/types/technical_asset_size.go +++ b/pkg/security/types/technical_asset_size.go @@ -4,7 +4,9 @@ Copyright © 2023 NAME HERE package types import ( + "encoding/json" "errors" + "fmt" "strings" ) @@ -51,3 +53,18 @@ func ParseTechnicalAssetSize(value string) (technicalAssetSize TechnicalAssetSiz } return technicalAssetSize, errors.New("Unable to parse into type: " + value) } + +func (what TechnicalAssetSize) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} + +func (what *TechnicalAssetSize) UnmarshalJSON([]byte) error { + for index, description := range TechnicalAssetSizeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = TechnicalAssetSize(index) + return nil + } + } + + return fmt.Errorf("unknown technical asset size value %q", int(*what)) +} diff --git a/pkg/security/types/technical_asset_technology.go b/pkg/security/types/technical_asset_technology.go index 5b2269da..8daaba10 100644 --- a/pkg/security/types/technical_asset_technology.go +++ b/pkg/security/types/technical_asset_technology.go @@ -4,7 +4,9 @@ Copyright © 2023 NAME HERE package types import ( + "encoding/json" "errors" + "fmt" "strings" ) @@ -289,3 +291,18 @@ func (what TechnicalAssetTechnology) IsTrafficForwarding() bool { func (what TechnicalAssetTechnology) IsEmbeddedComponent() bool { return what == Library } + +func (what TechnicalAssetTechnology) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} + +func (what *TechnicalAssetTechnology) UnmarshalJSON([]byte) error { + for index, description := range TechnicalAssetTechnologyTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = TechnicalAssetTechnology(index) + return nil + } + } + + return fmt.Errorf("unknown technical asset technology value %q", int(*what)) +} diff --git a/pkg/security/types/technical_asset_type.go b/pkg/security/types/technical_asset_type.go index ffabb7cc..5a105a42 100644 --- a/pkg/security/types/technical_asset_type.go +++ b/pkg/security/types/technical_asset_type.go @@ -4,7 +4,9 @@ Copyright © 2023 NAME HERE package types import ( + "encoding/json" "errors" + "fmt" "strings" ) @@ -48,3 +50,18 @@ func ParseTechnicalAssetType(value string) (technicalAssetType TechnicalAssetTyp } return technicalAssetType, errors.New("Unable to parse into type: " + value) } + +func (what TechnicalAssetType) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} + +func (what *TechnicalAssetType) UnmarshalJSON([]byte) error { + for index, description := range TechnicalAssetTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = TechnicalAssetType(index) + return nil + } + } + + return fmt.Errorf("unknown technical asset type value %q", int(*what)) +} diff --git a/pkg/security/types/trust_boundary.go b/pkg/security/types/trust_boundary.go index 8ba6217b..d08da6e8 100644 --- a/pkg/security/types/trust_boundary.go +++ b/pkg/security/types/trust_boundary.go @@ -4,7 +4,9 @@ Copyright © 2023 NAME HERE package types import ( + "encoding/json" "errors" + "fmt" "strings" ) @@ -69,3 +71,18 @@ func (what TrustBoundaryType) IsNetworkBoundary() bool { func (what TrustBoundaryType) IsWithinCloud() bool { return what == NetworkCloudProvider || what == NetworkCloudSecurityGroup } + +func (what TrustBoundaryType) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} + +func (what *TrustBoundaryType) UnmarshalJSON([]byte) error { + for index, description := range TrustBoundaryTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = TrustBoundaryType(index) + return nil + } + } + + return fmt.Errorf("unknown trust boundary type value %q", int(*what)) +} diff --git a/pkg/security/types/usage.go b/pkg/security/types/usage.go index c9771082..86653106 100644 --- a/pkg/security/types/usage.go +++ b/pkg/security/types/usage.go @@ -4,7 +4,9 @@ Copyright © 2023 NAME HERE package types import ( + "encoding/json" "errors" + "fmt" "strings" ) @@ -50,3 +52,18 @@ func (what Usage) Explain() string { func (what Usage) Title() string { return [...]string{"Business", "DevOps"}[what] } + +func (what Usage) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} + +func (what *Usage) UnmarshalJSON([]byte) error { + for index, description := range UsageTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = Usage(index) + return nil + } + } + + return fmt.Errorf("unknown usage type value %q", int(*what)) +} diff --git a/test/all.json b/test/all.json new file mode 100644 index 00000000..f1b92a8c --- /dev/null +++ b/test/all.json @@ -0,0 +1,2608 @@ +{ + "author": { + "name": "John Doe", + "homepage": "www.example.com" + }, + "title": "Some Example Application", + "date": "2020-07-01T00:00:00Z", + "management_summary_comment": "Just some \u003cb\u003emore\u003c/b\u003e custom summary possible here...\n", + "business_overview": { + "description": "Some more \u003ci\u003edemo text\u003c/i\u003e here and even images...", + "images": null + }, + "technical_overview": { + "description": "Some more \u003ci\u003edemo text\u003c/i\u003e here and even images...", + "images": null + }, + "business_criticality": 2, + "security_requirements": { + "EU-DSGVO": "Mandatory EU-Datenschutzgrundverordnung", + "Input Validation": "Strict input validation is required to reduce the overall attack surface.", + "Securing Administrative Access": "Administrative access must be secured with strong encryption and multi-factor authentication." + }, + "questions": { + "How are the admin clients managed/protected against compromise?": "", + "How are the build pipeline components managed/protected against compromise?": "Managed by XYZ\n", + "How are the development clients managed/protected against compromise?": "Managed by XYZ\n" + }, + "abuse_cases": { + "CPU-Cycle Theft": "As a hacker I want to steal CPU cycles in order to transform them into money via installed crypto currency miners.\n", + "Contract Filesystem Compromise": "As a hacker I want to access the filesystem storing the contract PDFs in order to steal/modify contract data.\n", + "Cross-Site Scripting Attacks": "As a hacker I want to execute Cross-Site Scripting (XSS) and similar attacks in order to takeover victim sessions and cause reputational damage.\n", + "Database Compromise": "As a hacker I want to access the database backend of the ERP-System in order to steal/modify sensitive business data.\n", + "Denial-of-Service": "As a hacker I want to disturb the functionality of the backend system in order to cause indirect financial damage via unusable features.\n", + "Denial-of-Service of ERP/DB Functionality": "As a hacker I want to disturb the functionality of the ERP system and/or it's database in order to cause indirect financial damage via unusable internal ERP features (not related to customer portal).\n", + "Denial-of-Service of Enduser Functionality": "As a hacker I want to disturb the functionality of the enduser parts of the application in order to cause direct financial damage (lower sales).\n", + "ERP-System Compromise": "As a hacker I want to access the ERP-System in order to steal/modify sensitive business data.\n", + "Identity Theft": "As a hacker I want to steal identity data in order to reuse credentials and/or keys on other targets of the same company or outside.\n", + "PII Theft": "As a hacker I want to steal PII (Personally Identifiable Information) data in order to blackmail the company and/or damage their repudiation by publishing them.\n", + "Ransomware": "As a hacker I want to encrypt the storage and file systems in order to demand ransom.\n" + }, + "tags_available": [ + "linux", + "apache", + "mysql", + "jboss", + "keycloak", + "jenkins", + "git", + "oracle", + "some-erp", + "vmware", + "aws", + "aws:ec2", + "aws:s3" + ], + "data_assets": { + "build-job-config": { + "id": "build-job-config", + "title": "Build Job Config", + "description": "Data for customizing of the build job system.", + "usage": 1, + "tags": [], + "origin": "Company XYZ", + "owner": "Company XYZ", + "quantity": 0, + "confidentiality": 2, + "integrity": 3, + "availability": 1, + "justification_cia_rating": "Data for customizing of the build job system.\n" + }, + "client-application-code": { + "id": "client-application-code", + "title": "Client Application Code", + "description": "Angular and other client-side code delivered by the application.", + "usage": 1, + "tags": [], + "origin": "Company ABC", + "owner": "Company ABC", + "quantity": 0, + "confidentiality": 0, + "integrity": 3, + "availability": 2, + "justification_cia_rating": "The integrity of the public data is critical to avoid reputational damage and the availability is important on the long-term scale (but not critical) to keep the growth rate of the customer base steady.\n" + }, + "contract-summaries": { + "id": "contract-summaries", + "title": "Customer Contract Summaries", + "description": "Customer Contract Summaries", + "usage": 0, + "tags": [], + "origin": "Customer", + "owner": "Company XYZ", + "quantity": 0, + "confidentiality": 2, + "integrity": 1, + "availability": 1, + "justification_cia_rating": "Just some summaries.\n" + }, + "customer-accounts": { + "id": "customer-accounts", + "title": "Customer Accounts", + "description": "Customer Accounts (including transient credentials when entered for checking them)", + "usage": 0, + "tags": [], + "origin": "Customer", + "owner": "Company XYZ", + "quantity": 2, + "confidentiality": 4, + "integrity": 3, + "availability": 3, + "justification_cia_rating": "Customer account data for using the portal are required to be available to offer the portal functionality.\n" + }, + "customer-contracts": { + "id": "customer-contracts", + "title": "Customer Contracts", + "description": "Customer Contracts (PDF)", + "usage": 0, + "tags": [], + "origin": "Customer", + "owner": "Company XYZ", + "quantity": 2, + "confidentiality": 3, + "integrity": 3, + "availability": 1, + "justification_cia_rating": "Contract data might contain financial data as well as personally identifiable information (PII). The integrity and availability of contract data is required for clearing payment disputes.\n" + }, + "customer-operational-data": { + "id": "customer-operational-data", + "title": "Customer Operational Data", + "description": "Customer Operational Data", + "usage": 0, + "tags": [], + "origin": "Customer", + "owner": "Company XYZ", + "quantity": 2, + "confidentiality": 3, + "integrity": 3, + "availability": 3, + "justification_cia_rating": "Customer operational data for using the portal are required to be available to offer the portal functionality and are used in the backend transactions.\n" + }, + "db-dumps": { + "id": "db-dumps", + "title": "Database Customizing and Dumps", + "description": "Data for customizing of the DB system, which might include full database dumps.", + "usage": 1, + "tags": [ + "oracle" + ], + "origin": "Company XYZ", + "owner": "Company XYZ", + "quantity": 0, + "confidentiality": 4, + "integrity": 3, + "availability": 3, + "justification_cia_rating": "Data for customizing of the DB system, which might include full database dumps.\n" + }, + "erp-customizing": { + "id": "erp-customizing", + "title": "ERP Customizing Data", + "description": "Data for customizing of the ERP system.", + "usage": 1, + "tags": [], + "origin": "Company XYZ", + "owner": "Company XYZ", + "quantity": 0, + "confidentiality": 3, + "integrity": 3, + "availability": 3, + "justification_cia_rating": "Data for customizing of the ERP system.\n" + }, + "erp-logs": { + "id": "erp-logs", + "title": "ERP Logs", + "description": "Logs generated by the ERP system.", + "usage": 1, + "tags": [], + "origin": "Company XYZ", + "owner": "Company XYZ", + "quantity": 2, + "confidentiality": 2, + "integrity": 0, + "availability": 0, + "justification_cia_rating": "Logs should not contain PII data and are only required for failure analysis, i.e. they are not considered as hard transactional logs.\n" + }, + "internal-business-data": { + "id": "internal-business-data", + "title": "Some Internal Business Data", + "description": "Internal business data of the ERP system used unrelated to the customer-facing processes.", + "usage": 0, + "tags": [], + "origin": "Company XYZ", + "owner": "Company XYZ", + "quantity": 1, + "confidentiality": 4, + "integrity": 3, + "availability": 3, + "justification_cia_rating": "Data used and/or generated during unrelated other usecases of the ERP-system (when used also by Company XYZ for internal non-customer-portal-related stuff).\n" + }, + "marketing-material": { + "id": "marketing-material", + "title": "Marketing Material", + "description": "Website and marketing data to inform potential customers and generate new leads.", + "usage": 1, + "tags": [], + "origin": "Company ABC", + "owner": "Company ABC", + "quantity": 0, + "confidentiality": 0, + "integrity": 2, + "availability": 2, + "justification_cia_rating": "The integrity of the public data is critical to avoid reputational damage and the availability is important on the long-term scale (but not critical) to keep the growth rate of the customer base steady.\n" + }, + "server-application-code": { + "id": "server-application-code", + "title": "Server Application Code", + "description": "API and other server-side code of the application.", + "usage": 1, + "tags": [], + "origin": "Company ABC", + "owner": "Company ABC", + "quantity": 0, + "confidentiality": 1, + "integrity": 4, + "availability": 2, + "justification_cia_rating": "The integrity of the API code is critical to avoid reputational damage and the availability is important on the long-term scale (but not critical) to keep the growth rate of the customer base steady.\n" + } + }, + "technical_assets": { + "apache-webserver": { + "Id": "apache-webserver", + "Title": "Apache Webserver", + "Description": "Apache Webserver hosting the API code and client-side code", + "Usage": 0, + "Type": 1, + "Size": 2, + "Technology": 6, + "Machine": 2, + "Internet": false, + "MultiTenant": false, + "Redundant": false, + "CustomDevelopedParts": true, + "OutOfScope": false, + "UsedAsClientByHuman": false, + "Encryption": 0, + "JustificationOutOfScope": "", + "Owner": "Company ABC", + "Confidentiality": 1, + "Integrity": 3, + "Availability": 3, + "JustificationCiaRating": "The correct configuration and reachability of the web server is mandatory for all customer usages of the portal.\n", + "Tags": [ + "linux", + "apache", + "aws:ec2" + ], + "DataAssetsProcessed": [ + "customer-accounts", + "customer-operational-data", + "customer-contracts", + "internal-business-data", + "client-application-code", + "server-application-code" + ], + "DataAssetsStored": [ + "client-application-code", + "server-application-code" + ], + "DataFormatsAccepted": [ + 0, + 3 + ], + "CommunicationLinks": [ + { + "Id": "apache-webserver\u003eerp-system-traffic", + "SourceId": "apache-webserver", + "TargetId": "erp-system", + "Title": "ERP System Traffic", + "Description": "Link to the ERP system", + "Protocol": 2, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 3, + "Authorization": 1, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts", + "customer-operational-data", + "internal-business-data" + ], + "DataAssetsReceived": [ + "customer-accounts", + "customer-operational-data", + "customer-contracts", + "internal-business-data" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + { + "Id": "apache-webserver\u003eauth-credential-check-traffic", + "SourceId": "apache-webserver", + "TargetId": "identity-provider", + "Title": "Auth Credential Check Traffic", + "Description": "Link to the identity provider server", + "Protocol": 2, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 1, + "Authorization": 1, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts" + ], + "DataAssetsReceived": null, + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + ], + "DiagramTweakOrder": 0, + "RAA": 0 + }, + "backend-admin-client": { + "Id": "backend-admin-client", + "Title": "Backend Admin Client", + "Description": "Backend admin client", + "Usage": 1, + "Type": 0, + "Size": 3, + "Technology": 2, + "Machine": 0, + "Internet": false, + "MultiTenant": false, + "Redundant": false, + "CustomDevelopedParts": false, + "OutOfScope": true, + "UsedAsClientByHuman": true, + "Encryption": 0, + "JustificationOutOfScope": "Owned and managed by ops provider", + "Owner": "Company XYZ", + "Confidentiality": 1, + "Integrity": 1, + "Availability": 1, + "JustificationCiaRating": "The client used by Company XYZ to administer the system.\n", + "Tags": [], + "DataAssetsProcessed": [ + "erp-logs" + ], + "DataAssetsStored": [], + "DataFormatsAccepted": [], + "CommunicationLinks": [ + { + "Id": "backend-admin-client\u003edb-update-access", + "SourceId": "backend-admin-client", + "TargetId": "sql-database", + "Title": "DB Update Access", + "Description": "Link to the database (JDBC tunneled via SSH)", + "Protocol": 20, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 4, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "db-dumps" + ], + "DataAssetsReceived": [ + "db-dumps", + "erp-logs", + "customer-accounts", + "customer-operational-data" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + { + "Id": "backend-admin-client\u003euser-management-access", + "SourceId": "backend-admin-client", + "TargetId": "ldap-auth-server", + "Title": "User Management Access", + "Description": "Link to the LDAP auth server for managing users", + "Protocol": 33, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 1, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "customer-accounts" + ], + "DataAssetsReceived": [ + "customer-accounts" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + { + "Id": "backend-admin-client\u003eerp-web-access", + "SourceId": "backend-admin-client", + "TargetId": "erp-system", + "Title": "ERP Web Access", + "Description": "Link to the ERP system (Web)", + "Protocol": 2, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 3, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "erp-customizing" + ], + "DataAssetsReceived": [ + "erp-logs" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + ], + "DiagramTweakOrder": 0, + "RAA": 0 + }, + "backoffice-client": { + "Id": "backoffice-client", + "Title": "Backoffice Client", + "Description": "Backoffice client", + "Usage": 0, + "Type": 0, + "Size": 3, + "Technology": 3, + "Machine": 0, + "Internet": false, + "MultiTenant": false, + "Redundant": false, + "CustomDevelopedParts": false, + "OutOfScope": true, + "UsedAsClientByHuman": true, + "Encryption": 0, + "JustificationOutOfScope": "Owned and managed by Company XYZ company", + "Owner": "Company XYZ", + "Confidentiality": 3, + "Integrity": 2, + "Availability": 2, + "JustificationCiaRating": "The client used by Company XYZ to administer and use the system.\n", + "Tags": [], + "DataAssetsProcessed": [ + "customer-contracts", + "internal-business-data", + "erp-logs" + ], + "DataAssetsStored": [], + "DataFormatsAccepted": [], + "CommunicationLinks": [ + { + "Id": "backoffice-client\u003eerp-internal-access", + "SourceId": "backoffice-client", + "TargetId": "erp-system", + "Title": "ERP Internal Access", + "Description": "Link to the ERP system", + "Protocol": 2, + "Tags": [ + "some-erp" + ], + "VPN": true, + "IpFiltered": false, + "Readonly": false, + "Authentication": 3, + "Authorization": 2, + "Usage": 0, + "DataAssetsSent": [ + "internal-business-data" + ], + "DataAssetsReceived": [ + "customer-contracts", + "internal-business-data" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + { + "Id": "backoffice-client\u003emarketing-cms-editing", + "SourceId": "backoffice-client", + "TargetId": "marketing-cms", + "Title": "Marketing CMS Editing", + "Description": "Link to the CMS for editing content", + "Protocol": 2, + "Tags": [], + "VPN": true, + "IpFiltered": false, + "Readonly": false, + "Authentication": 3, + "Authorization": 2, + "Usage": 0, + "DataAssetsSent": [ + "marketing-material" + ], + "DataAssetsReceived": [ + "marketing-material" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + ], + "DiagramTweakOrder": 0, + "RAA": 0 + }, + "contract-fileserver": { + "Id": "contract-fileserver", + "Title": "Contract Fileserver", + "Description": "NFS Filesystem for storing the contract PDFs", + "Usage": 0, + "Type": 2, + "Size": 3, + "Technology": 10, + "Machine": 1, + "Internet": false, + "MultiTenant": false, + "Redundant": false, + "CustomDevelopedParts": false, + "OutOfScope": false, + "UsedAsClientByHuman": false, + "Encryption": 0, + "JustificationOutOfScope": "", + "Owner": "Company ABC", + "Confidentiality": 3, + "Integrity": 3, + "Availability": 2, + "JustificationCiaRating": "Contract data might contain financial data as well as personally identifiable information (PII). The integrity and availability of contract data is required for clearing payment disputes. The filesystem is also required to be available for storing new contracts of freshly generated customers.\n", + "Tags": [ + "linux", + "aws:s3" + ], + "DataAssetsProcessed": [], + "DataAssetsStored": [ + "customer-contracts", + "contract-summaries" + ], + "DataFormatsAccepted": [ + 3 + ], + "CommunicationLinks": [], + "DiagramTweakOrder": 0, + "RAA": 0 + }, + "customer-client": { + "Id": "customer-client", + "Title": "Customer Web Client", + "Description": "Customer Web Client", + "Usage": 0, + "Type": 0, + "Size": 3, + "Technology": 2, + "Machine": 0, + "Internet": true, + "MultiTenant": false, + "Redundant": false, + "CustomDevelopedParts": false, + "OutOfScope": true, + "UsedAsClientByHuman": true, + "Encryption": 0, + "JustificationOutOfScope": "Owned and managed by enduser customer", + "Owner": "Customer", + "Confidentiality": 1, + "Integrity": 1, + "Availability": 1, + "JustificationCiaRating": "The client used by the customer to access the system.\n", + "Tags": [], + "DataAssetsProcessed": [ + "customer-accounts", + "customer-operational-data", + "customer-contracts", + "client-application-code", + "marketing-material" + ], + "DataAssetsStored": [], + "DataFormatsAccepted": [], + "CommunicationLinks": [ + { + "Id": "customer-client\u003ecustomer-traffic", + "SourceId": "customer-client", + "TargetId": "load-balancer", + "Title": "Customer Traffic", + "Description": "Link to the load balancer", + "Protocol": 2, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 2, + "Authorization": 2, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts", + "customer-operational-data" + ], + "DataAssetsReceived": [ + "customer-accounts", + "customer-operational-data", + "customer-contracts", + "client-application-code", + "marketing-material" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + ], + "DiagramTweakOrder": 0, + "RAA": 0 + }, + "erp-system": { + "Id": "erp-system", + "Title": "Backoffice ERP System", + "Description": "ERP system", + "Usage": 0, + "Type": 1, + "Size": 0, + "Technology": 12, + "Machine": 1, + "Internet": false, + "MultiTenant": false, + "Redundant": true, + "CustomDevelopedParts": false, + "OutOfScope": false, + "UsedAsClientByHuman": false, + "Encryption": 0, + "JustificationOutOfScope": "", + "Owner": "Company ABC", + "Confidentiality": 4, + "Integrity": 4, + "Availability": 4, + "JustificationCiaRating": "The ERP system contains business-relevant sensitive data for the leasing processes and eventually also for other Company XYZ internal processes.\n", + "Tags": [ + "linux" + ], + "DataAssetsProcessed": [ + "customer-accounts", + "customer-operational-data", + "customer-contracts", + "internal-business-data", + "erp-customizing" + ], + "DataAssetsStored": [ + "erp-logs" + ], + "DataFormatsAccepted": [ + 1, + 3, + 2 + ], + "CommunicationLinks": [ + { + "Id": "erp-system\u003enfs-filesystem-access", + "SourceId": "erp-system", + "TargetId": "contract-fileserver", + "Title": "NFS Filesystem Access", + "Description": "Link to the file system", + "Protocol": 35, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 0, + "Authorization": 0, + "Usage": 0, + "DataAssetsSent": [ + "customer-contracts" + ], + "DataAssetsReceived": [ + "customer-contracts" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + { + "Id": "erp-system\u003edatabase-traffic", + "SourceId": "erp-system", + "TargetId": "sql-database", + "Title": "Database Traffic", + "Description": "Link to the DB system", + "Protocol": 8, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 1, + "Authorization": 1, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts", + "customer-operational-data", + "internal-business-data" + ], + "DataAssetsReceived": [ + "customer-accounts", + "customer-operational-data", + "internal-business-data" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + ], + "DiagramTweakOrder": 0, + "RAA": 0 + }, + "external-dev-client": { + "Id": "external-dev-client", + "Title": "External Development Client", + "Description": "External developer client", + "Usage": 1, + "Type": 0, + "Size": 0, + "Technology": 5, + "Machine": 0, + "Internet": true, + "MultiTenant": true, + "Redundant": false, + "CustomDevelopedParts": false, + "OutOfScope": true, + "UsedAsClientByHuman": true, + "Encryption": 0, + "JustificationOutOfScope": "Owned and managed by external developers", + "Owner": "External Developers", + "Confidentiality": 3, + "Integrity": 3, + "Availability": 1, + "JustificationCiaRating": "The clients used by external developers to create parts of the application code.\n", + "Tags": [ + "linux" + ], + "DataAssetsProcessed": [ + "client-application-code", + "server-application-code" + ], + "DataAssetsStored": [ + "client-application-code", + "server-application-code" + ], + "DataFormatsAccepted": [ + 3 + ], + "CommunicationLinks": [ + { + "Id": "external-dev-client\u003ejenkins-web-ui-access", + "SourceId": "external-dev-client", + "TargetId": "jenkins-buildserver", + "Title": "Jenkins Web-UI Access", + "Description": "Link to the Jenkins build server", + "Protocol": 2, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 1, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "build-job-config" + ], + "DataAssetsReceived": [ + "build-job-config" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + { + "Id": "external-dev-client\u003egit-repo-code-write-access", + "SourceId": "external-dev-client", + "TargetId": "git-repo", + "Title": "Git-Repo Code Write Access", + "Description": "Link to the Git repo", + "Protocol": 20, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 4, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "client-application-code", + "server-application-code" + ], + "DataAssetsReceived": [ + "client-application-code", + "server-application-code" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + { + "Id": "external-dev-client\u003egit-repo-web-ui-access", + "SourceId": "external-dev-client", + "TargetId": "git-repo", + "Title": "Git-Repo Web-UI Access", + "Description": "Link to the Git repo", + "Protocol": 2, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 3, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "client-application-code", + "server-application-code" + ], + "DataAssetsReceived": [ + "client-application-code", + "server-application-code" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + ], + "DiagramTweakOrder": 0, + "RAA": 0 + }, + "git-repo": { + "Id": "git-repo", + "Title": "Git Repository", + "Description": "Git repository server", + "Usage": 1, + "Type": 1, + "Size": 0, + "Technology": 23, + "Machine": 1, + "Internet": false, + "MultiTenant": true, + "Redundant": false, + "CustomDevelopedParts": false, + "OutOfScope": false, + "UsedAsClientByHuman": false, + "Encryption": 0, + "JustificationOutOfScope": "", + "Owner": "Company ABC", + "Confidentiality": 3, + "Integrity": 2, + "Availability": 2, + "JustificationCiaRating": "The code repo pipeline might contain sensitive configuration values like backend credentials, certificates etc. and is therefore rated as confidential.\n", + "Tags": [ + "linux", + "git" + ], + "DataAssetsProcessed": [ + "client-application-code", + "server-application-code" + ], + "DataAssetsStored": [ + "client-application-code", + "server-application-code" + ], + "DataFormatsAccepted": [ + 3 + ], + "CommunicationLinks": [], + "DiagramTweakOrder": 0, + "RAA": 0 + }, + "identity-provider": { + "Id": "identity-provider", + "Title": "Identity Provider", + "Description": "Identity provider server", + "Usage": 0, + "Type": 1, + "Size": 3, + "Technology": 31, + "Machine": 1, + "Internet": false, + "MultiTenant": false, + "Redundant": false, + "CustomDevelopedParts": false, + "OutOfScope": false, + "UsedAsClientByHuman": false, + "Encryption": 0, + "JustificationOutOfScope": "", + "Owner": "Company ABC", + "Confidentiality": 3, + "Integrity": 3, + "Availability": 3, + "JustificationCiaRating": "The auth data of the application\n", + "Tags": [ + "linux", + "jboss", + "keycloak" + ], + "DataAssetsProcessed": [ + "customer-accounts" + ], + "DataAssetsStored": [], + "DataFormatsAccepted": [], + "CommunicationLinks": [ + { + "Id": "identity-provider\u003eldap-credential-check-traffic", + "SourceId": "identity-provider", + "TargetId": "ldap-auth-server", + "Title": "LDAP Credential Check Traffic", + "Description": "Link to the LDAP server", + "Protocol": 33, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 1, + "Authorization": 1, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts" + ], + "DataAssetsReceived": null, + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + ], + "DiagramTweakOrder": 0, + "RAA": 0 + }, + "jenkins-buildserver": { + "Id": "jenkins-buildserver", + "Title": "Jenkins Buildserver", + "Description": "Jenkins buildserver", + "Usage": 1, + "Type": 1, + "Size": 0, + "Technology": 22, + "Machine": 1, + "Internet": false, + "MultiTenant": true, + "Redundant": false, + "CustomDevelopedParts": false, + "OutOfScope": false, + "UsedAsClientByHuman": false, + "Encryption": 0, + "JustificationOutOfScope": "", + "Owner": "Company ABC", + "Confidentiality": 3, + "Integrity": 3, + "Availability": 2, + "JustificationCiaRating": "The build pipeline might contain sensitive configuration values like backend credentials, certificates etc. and is therefore rated as confidential. The integrity and availability is rated as critical and important due to the risk of reputation damage and application update unavailability when the build pipeline is compromised.\n", + "Tags": [ + "linux", + "jenkins" + ], + "DataAssetsProcessed": [ + "build-job-config", + "client-application-code", + "server-application-code", + "marketing-material" + ], + "DataAssetsStored": [ + "build-job-config", + "client-application-code", + "server-application-code", + "marketing-material" + ], + "DataFormatsAccepted": [ + 3, + 2 + ], + "CommunicationLinks": [ + { + "Id": "jenkins-buildserver\u003egit-repo-code-read-access", + "SourceId": "jenkins-buildserver", + "TargetId": "git-repo", + "Title": "Git Repo Code Read Access", + "Description": "Link to the Git repository server", + "Protocol": 20, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": true, + "Authentication": 4, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": null, + "DataAssetsReceived": [ + "client-application-code", + "server-application-code" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + { + "Id": "jenkins-buildserver\u003eapplication-deployment", + "SourceId": "jenkins-buildserver", + "TargetId": "apache-webserver", + "Title": "Application Deployment", + "Description": "Link to the Apache webserver", + "Protocol": 20, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 4, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "client-application-code", + "server-application-code" + ], + "DataAssetsReceived": null, + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + { + "Id": "jenkins-buildserver\u003ecms-updates", + "SourceId": "jenkins-buildserver", + "TargetId": "marketing-cms", + "Title": "CMS Updates", + "Description": "Link to the CMS", + "Protocol": 20, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 4, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "marketing-material" + ], + "DataAssetsReceived": null, + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + ], + "DiagramTweakOrder": 0, + "RAA": 0 + }, + "ldap-auth-server": { + "Id": "ldap-auth-server", + "Title": "LDAP Auth Server", + "Description": "LDAP authentication server", + "Usage": 0, + "Type": 2, + "Size": 3, + "Technology": 32, + "Machine": 0, + "Internet": false, + "MultiTenant": false, + "Redundant": false, + "CustomDevelopedParts": false, + "OutOfScope": false, + "UsedAsClientByHuman": false, + "Encryption": 1, + "JustificationOutOfScope": "", + "Owner": "Company ABC", + "Confidentiality": 3, + "Integrity": 3, + "Availability": 3, + "JustificationCiaRating": "The auth data of the application\n", + "Tags": [ + "linux" + ], + "DataAssetsProcessed": [ + "customer-accounts" + ], + "DataAssetsStored": [ + "customer-accounts" + ], + "DataFormatsAccepted": [], + "CommunicationLinks": [], + "DiagramTweakOrder": 0, + "RAA": 0 + }, + "load-balancer": { + "Id": "load-balancer", + "Title": "Load Balancer", + "Description": "Load Balancer (HA-Proxy)", + "Usage": 0, + "Type": 1, + "Size": 3, + "Technology": 21, + "Machine": 0, + "Internet": false, + "MultiTenant": false, + "Redundant": false, + "CustomDevelopedParts": false, + "OutOfScope": false, + "UsedAsClientByHuman": false, + "Encryption": 0, + "JustificationOutOfScope": "", + "Owner": "Company ABC", + "Confidentiality": 1, + "Integrity": 4, + "Availability": 4, + "JustificationCiaRating": "The correct configuration and reachability of the load balancer is mandatory for all customer and Company XYZ usages of the portal and ERP system.\n", + "Tags": [], + "DataAssetsProcessed": [ + "customer-accounts", + "customer-operational-data", + "customer-contracts", + "internal-business-data", + "client-application-code", + "marketing-material" + ], + "DataAssetsStored": [], + "DataFormatsAccepted": [], + "CommunicationLinks": [ + { + "Id": "load-balancer\u003ecms-content-traffic", + "SourceId": "load-balancer", + "TargetId": "marketing-cms", + "Title": "CMS Content Traffic", + "Description": "Link to the CMS server", + "Protocol": 1, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": true, + "Authentication": 0, + "Authorization": 0, + "Usage": 0, + "DataAssetsSent": null, + "DataAssetsReceived": [ + "marketing-material" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + { + "Id": "load-balancer\u003eweb-application-traffic", + "SourceId": "load-balancer", + "TargetId": "apache-webserver", + "Title": "Web Application Traffic", + "Description": "Link to the web server", + "Protocol": 1, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 2, + "Authorization": 2, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts", + "customer-operational-data" + ], + "DataAssetsReceived": [ + "customer-accounts", + "customer-operational-data", + "customer-contracts", + "client-application-code" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + ], + "DiagramTweakOrder": 0, + "RAA": 0 + }, + "marketing-cms": { + "Id": "marketing-cms", + "Title": "Marketing CMS", + "Description": "CMS for the marketing content", + "Usage": 0, + "Type": 1, + "Size": 2, + "Technology": 13, + "Machine": 2, + "Internet": false, + "MultiTenant": false, + "Redundant": false, + "CustomDevelopedParts": true, + "OutOfScope": false, + "UsedAsClientByHuman": false, + "Encryption": 0, + "JustificationOutOfScope": "", + "Owner": "Company ABC", + "Confidentiality": 1, + "Integrity": 2, + "Availability": 2, + "JustificationCiaRating": "The correct configuration and reachability of the web server is mandatory for all customer usages of the portal.\n", + "Tags": [ + "linux" + ], + "DataAssetsProcessed": [ + "marketing-material", + "customer-accounts" + ], + "DataAssetsStored": [ + "marketing-material" + ], + "DataFormatsAccepted": [], + "CommunicationLinks": [ + { + "Id": "marketing-cms\u003eauth-traffic", + "SourceId": "marketing-cms", + "TargetId": "ldap-auth-server", + "Title": "Auth Traffic", + "Description": "Link to the LDAP auth server", + "Protocol": 32, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": true, + "Authentication": 1, + "Authorization": 1, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts" + ], + "DataAssetsReceived": [ + "customer-accounts" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + ], + "DiagramTweakOrder": 0, + "RAA": 0 + }, + "sql-database": { + "Id": "sql-database", + "Title": "Customer Contract Database", + "Description": "The database behind the ERP system", + "Usage": 0, + "Type": 2, + "Size": 3, + "Technology": 9, + "Machine": 1, + "Internet": false, + "MultiTenant": false, + "Redundant": false, + "CustomDevelopedParts": false, + "OutOfScope": false, + "UsedAsClientByHuman": false, + "Encryption": 2, + "JustificationOutOfScope": "", + "Owner": "Company ABC", + "Confidentiality": 4, + "Integrity": 4, + "Availability": 4, + "JustificationCiaRating": "The ERP system's database contains business-relevant sensitive data for the leasing processes and eventually also for other Company XYZ internal processes.\n", + "Tags": [ + "linux", + "mysql" + ], + "DataAssetsProcessed": [ + "db-dumps" + ], + "DataAssetsStored": [ + "customer-accounts", + "customer-operational-data", + "internal-business-data" + ], + "DataFormatsAccepted": [], + "CommunicationLinks": [], + "DiagramTweakOrder": 0, + "RAA": 0 + } + }, + "trust_boundaries": { + "application-network": { + "Id": "application-network", + "Title": "Application Network", + "Description": "Application Network", + "Type": 3, + "Tags": [ + "aws" + ], + "TechnicalAssetsInside": [ + "load-balancer" + ], + "TrustBoundariesNested": [ + "web-dmz", + "erp-dmz", + "auth-env" + ] + }, + "auth-env": { + "Id": "auth-env", + "Title": "Auth Handling Environment", + "Description": "Auth Handling Environment", + "Type": 6, + "Tags": [], + "TechnicalAssetsInside": [ + "identity-provider", + "ldap-auth-server" + ], + "TrustBoundariesNested": [] + }, + "dev-network": { + "Id": "dev-network", + "Title": "Dev Network", + "Description": "Development Network", + "Type": 0, + "Tags": [], + "TechnicalAssetsInside": [ + "jenkins-buildserver", + "git-repo", + "backend-admin-client", + "backoffice-client" + ], + "TrustBoundariesNested": [] + }, + "erp-dmz": { + "Id": "erp-dmz", + "Title": "ERP DMZ", + "Description": "ERP DMZ", + "Type": 4, + "Tags": [ + "some-erp" + ], + "TechnicalAssetsInside": [ + "erp-system", + "contract-fileserver", + "sql-database" + ], + "TrustBoundariesNested": [] + }, + "web-dmz": { + "Id": "web-dmz", + "Title": "Web DMZ", + "Description": "Web DMZ", + "Type": 4, + "Tags": [], + "TechnicalAssetsInside": [ + "apache-webserver", + "marketing-cms" + ], + "TrustBoundariesNested": [] + } + }, + "shared_runtimes": { + "webapp-virtualization": { + "Id": "webapp-virtualization", + "Title": "WebApp and Backoffice Virtualization", + "Description": "WebApp Virtualization", + "Tags": [ + "vmware" + ], + "TechnicalAssetsRunning": [ + "apache-webserver", + "marketing-cms", + "erp-system", + "contract-fileserver", + "sql-database" + ] + } + }, + "individual_risk_categories": { + "something-strange": { + "Id": "something-strange", + "Title": "Some Individual Risk Example", + "Description": "Some text describing the risk category...", + "Impact": "Some text describing the impact...", + "ASVS": "V0 - Something Strange", + "CheatSheet": "https://example.com", + "Action": "Some text describing the action...", + "Mitigation": "Some text describing the mitigation...", + "Check": "Check if XYZ...", + "DetectionLogic": "Some text describing the detection logic...", + "RiskAssessment": "Some text describing the risk assessment...", + "FalsePositives": "Some text describing the most common types of false positives...", + "Function": "business-side", + "STRIDE": "repudiation", + "ModelFailurePossibleReason": false, + "CWE": 693 + } + }, + "risk_tracking": { + "dos-risky-access-across-trust-boundary@*@*@*": { + "SyntheticRiskId": "dos-risky-access-across-trust-boundary@*@*@*", + "Justification": "The hardening measures are being implemented and checked", + "Ticket": "XYZ-1234", + "CheckedBy": "John Doe", + "Status": "in-progress", + "Date": "2020-01-04T00:00:00Z" + }, + "ldap-injection@*@ldap-auth-server@*": { + "SyntheticRiskId": "ldap-injection@*@ldap-auth-server@*", + "Justification": "The hardening measures were implemented and checked", + "Ticket": "XYZ-5678", + "CheckedBy": "John Doe", + "Status": "mitigated", + "Date": "2020-01-05T00:00:00Z" + }, + "missing-authentication-second-factor@*@*@*": { + "SyntheticRiskId": "missing-authentication-second-factor@*@*@*", + "Justification": "The hardening measures were implemented and checked", + "Ticket": "XYZ-1234", + "CheckedBy": "John Doe", + "Status": "mitigated", + "Date": "2020-01-04T00:00:00Z" + }, + "missing-hardening@*": { + "SyntheticRiskId": "missing-hardening@*", + "Justification": "The hardening measures were implemented and checked", + "Ticket": "XYZ-1234", + "CheckedBy": "John Doe", + "Status": "mitigated", + "Date": "2020-01-04T00:00:00Z" + }, + "unencrypted-asset@*": { + "SyntheticRiskId": "unencrypted-asset@*", + "Justification": "The hardening measures were implemented and checked", + "Ticket": "XYZ-1234", + "CheckedBy": "John Doe", + "Status": "mitigated", + "Date": "2020-01-04T00:00:00Z" + }, + "untrusted-deserialization@erp-system": { + "SyntheticRiskId": "untrusted-deserialization@erp-system", + "Justification": "Risk accepted as tolerable", + "Ticket": "XYZ-1234", + "CheckedBy": "John Doe", + "Status": "accepted", + "Date": "2020-01-04T00:00:00Z" + } + }, + "communication_links": { + "apache-webserver\u003eauth-credential-check-traffic": { + "Id": "apache-webserver\u003eauth-credential-check-traffic", + "SourceId": "apache-webserver", + "TargetId": "identity-provider", + "Title": "Auth Credential Check Traffic", + "Description": "Link to the identity provider server", + "Protocol": 2, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 1, + "Authorization": 1, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts" + ], + "DataAssetsReceived": null, + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + "apache-webserver\u003eerp-system-traffic": { + "Id": "apache-webserver\u003eerp-system-traffic", + "SourceId": "apache-webserver", + "TargetId": "erp-system", + "Title": "ERP System Traffic", + "Description": "Link to the ERP system", + "Protocol": 2, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 3, + "Authorization": 1, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts", + "customer-operational-data", + "internal-business-data" + ], + "DataAssetsReceived": [ + "customer-accounts", + "customer-operational-data", + "customer-contracts", + "internal-business-data" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + "backend-admin-client\u003edb-update-access": { + "Id": "backend-admin-client\u003edb-update-access", + "SourceId": "backend-admin-client", + "TargetId": "sql-database", + "Title": "DB Update Access", + "Description": "Link to the database (JDBC tunneled via SSH)", + "Protocol": 20, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 4, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "db-dumps" + ], + "DataAssetsReceived": [ + "db-dumps", + "erp-logs", + "customer-accounts", + "customer-operational-data" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + "backend-admin-client\u003eerp-web-access": { + "Id": "backend-admin-client\u003eerp-web-access", + "SourceId": "backend-admin-client", + "TargetId": "erp-system", + "Title": "ERP Web Access", + "Description": "Link to the ERP system (Web)", + "Protocol": 2, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 3, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "erp-customizing" + ], + "DataAssetsReceived": [ + "erp-logs" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + "backend-admin-client\u003euser-management-access": { + "Id": "backend-admin-client\u003euser-management-access", + "SourceId": "backend-admin-client", + "TargetId": "ldap-auth-server", + "Title": "User Management Access", + "Description": "Link to the LDAP auth server for managing users", + "Protocol": 33, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 1, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "customer-accounts" + ], + "DataAssetsReceived": [ + "customer-accounts" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + "backoffice-client\u003eerp-internal-access": { + "Id": "backoffice-client\u003eerp-internal-access", + "SourceId": "backoffice-client", + "TargetId": "erp-system", + "Title": "ERP Internal Access", + "Description": "Link to the ERP system", + "Protocol": 2, + "Tags": [ + "some-erp" + ], + "VPN": true, + "IpFiltered": false, + "Readonly": false, + "Authentication": 3, + "Authorization": 2, + "Usage": 0, + "DataAssetsSent": [ + "internal-business-data" + ], + "DataAssetsReceived": [ + "customer-contracts", + "internal-business-data" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + "backoffice-client\u003emarketing-cms-editing": { + "Id": "backoffice-client\u003emarketing-cms-editing", + "SourceId": "backoffice-client", + "TargetId": "marketing-cms", + "Title": "Marketing CMS Editing", + "Description": "Link to the CMS for editing content", + "Protocol": 2, + "Tags": [], + "VPN": true, + "IpFiltered": false, + "Readonly": false, + "Authentication": 3, + "Authorization": 2, + "Usage": 0, + "DataAssetsSent": [ + "marketing-material" + ], + "DataAssetsReceived": [ + "marketing-material" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + "customer-client\u003ecustomer-traffic": { + "Id": "customer-client\u003ecustomer-traffic", + "SourceId": "customer-client", + "TargetId": "load-balancer", + "Title": "Customer Traffic", + "Description": "Link to the load balancer", + "Protocol": 2, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 2, + "Authorization": 2, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts", + "customer-operational-data" + ], + "DataAssetsReceived": [ + "customer-accounts", + "customer-operational-data", + "customer-contracts", + "client-application-code", + "marketing-material" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + "erp-system\u003edatabase-traffic": { + "Id": "erp-system\u003edatabase-traffic", + "SourceId": "erp-system", + "TargetId": "sql-database", + "Title": "Database Traffic", + "Description": "Link to the DB system", + "Protocol": 8, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 1, + "Authorization": 1, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts", + "customer-operational-data", + "internal-business-data" + ], + "DataAssetsReceived": [ + "customer-accounts", + "customer-operational-data", + "internal-business-data" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + "erp-system\u003enfs-filesystem-access": { + "Id": "erp-system\u003enfs-filesystem-access", + "SourceId": "erp-system", + "TargetId": "contract-fileserver", + "Title": "NFS Filesystem Access", + "Description": "Link to the file system", + "Protocol": 35, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 0, + "Authorization": 0, + "Usage": 0, + "DataAssetsSent": [ + "customer-contracts" + ], + "DataAssetsReceived": [ + "customer-contracts" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + "external-dev-client\u003egit-repo-code-write-access": { + "Id": "external-dev-client\u003egit-repo-code-write-access", + "SourceId": "external-dev-client", + "TargetId": "git-repo", + "Title": "Git-Repo Code Write Access", + "Description": "Link to the Git repo", + "Protocol": 20, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 4, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "client-application-code", + "server-application-code" + ], + "DataAssetsReceived": [ + "client-application-code", + "server-application-code" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + "external-dev-client\u003egit-repo-web-ui-access": { + "Id": "external-dev-client\u003egit-repo-web-ui-access", + "SourceId": "external-dev-client", + "TargetId": "git-repo", + "Title": "Git-Repo Web-UI Access", + "Description": "Link to the Git repo", + "Protocol": 2, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 3, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "client-application-code", + "server-application-code" + ], + "DataAssetsReceived": [ + "client-application-code", + "server-application-code" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + "external-dev-client\u003ejenkins-web-ui-access": { + "Id": "external-dev-client\u003ejenkins-web-ui-access", + "SourceId": "external-dev-client", + "TargetId": "jenkins-buildserver", + "Title": "Jenkins Web-UI Access", + "Description": "Link to the Jenkins build server", + "Protocol": 2, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 1, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "build-job-config" + ], + "DataAssetsReceived": [ + "build-job-config" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + "identity-provider\u003eldap-credential-check-traffic": { + "Id": "identity-provider\u003eldap-credential-check-traffic", + "SourceId": "identity-provider", + "TargetId": "ldap-auth-server", + "Title": "LDAP Credential Check Traffic", + "Description": "Link to the LDAP server", + "Protocol": 33, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 1, + "Authorization": 1, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts" + ], + "DataAssetsReceived": null, + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + "jenkins-buildserver\u003eapplication-deployment": { + "Id": "jenkins-buildserver\u003eapplication-deployment", + "SourceId": "jenkins-buildserver", + "TargetId": "apache-webserver", + "Title": "Application Deployment", + "Description": "Link to the Apache webserver", + "Protocol": 20, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 4, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "client-application-code", + "server-application-code" + ], + "DataAssetsReceived": null, + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + "jenkins-buildserver\u003ecms-updates": { + "Id": "jenkins-buildserver\u003ecms-updates", + "SourceId": "jenkins-buildserver", + "TargetId": "marketing-cms", + "Title": "CMS Updates", + "Description": "Link to the CMS", + "Protocol": 20, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 4, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "marketing-material" + ], + "DataAssetsReceived": null, + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + "jenkins-buildserver\u003egit-repo-code-read-access": { + "Id": "jenkins-buildserver\u003egit-repo-code-read-access", + "SourceId": "jenkins-buildserver", + "TargetId": "git-repo", + "Title": "Git Repo Code Read Access", + "Description": "Link to the Git repository server", + "Protocol": 20, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": true, + "Authentication": 4, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": null, + "DataAssetsReceived": [ + "client-application-code", + "server-application-code" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + "load-balancer\u003ecms-content-traffic": { + "Id": "load-balancer\u003ecms-content-traffic", + "SourceId": "load-balancer", + "TargetId": "marketing-cms", + "Title": "CMS Content Traffic", + "Description": "Link to the CMS server", + "Protocol": 1, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": true, + "Authentication": 0, + "Authorization": 0, + "Usage": 0, + "DataAssetsSent": null, + "DataAssetsReceived": [ + "marketing-material" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + "load-balancer\u003eweb-application-traffic": { + "Id": "load-balancer\u003eweb-application-traffic", + "SourceId": "load-balancer", + "TargetId": "apache-webserver", + "Title": "Web Application Traffic", + "Description": "Link to the web server", + "Protocol": 1, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 2, + "Authorization": 2, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts", + "customer-operational-data" + ], + "DataAssetsReceived": [ + "customer-accounts", + "customer-operational-data", + "customer-contracts", + "client-application-code" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + "marketing-cms\u003eauth-traffic": { + "Id": "marketing-cms\u003eauth-traffic", + "SourceId": "marketing-cms", + "TargetId": "ldap-auth-server", + "Title": "Auth Traffic", + "Description": "Link to the LDAP auth server", + "Protocol": 32, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": true, + "Authentication": 1, + "Authorization": 1, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts" + ], + "DataAssetsReceived": [ + "customer-accounts" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + }, + "diagram_tweak_nodesep": 2, + "diagram_tweak_ranksep": 2, + "incoming_technical_communication_links_mapped_by_target_id": { + "apache-webserver": [ + { + "Id": "jenkins-buildserver\u003eapplication-deployment", + "SourceId": "jenkins-buildserver", + "TargetId": "apache-webserver", + "Title": "Application Deployment", + "Description": "Link to the Apache webserver", + "Protocol": 20, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 4, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "client-application-code", + "server-application-code" + ], + "DataAssetsReceived": null, + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + { + "Id": "load-balancer\u003eweb-application-traffic", + "SourceId": "load-balancer", + "TargetId": "apache-webserver", + "Title": "Web Application Traffic", + "Description": "Link to the web server", + "Protocol": 1, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 2, + "Authorization": 2, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts", + "customer-operational-data" + ], + "DataAssetsReceived": [ + "customer-accounts", + "customer-operational-data", + "customer-contracts", + "client-application-code" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + ], + "contract-fileserver": [ + { + "Id": "erp-system\u003enfs-filesystem-access", + "SourceId": "erp-system", + "TargetId": "contract-fileserver", + "Title": "NFS Filesystem Access", + "Description": "Link to the file system", + "Protocol": 35, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 0, + "Authorization": 0, + "Usage": 0, + "DataAssetsSent": [ + "customer-contracts" + ], + "DataAssetsReceived": [ + "customer-contracts" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + ], + "erp-system": [ + { + "Id": "backoffice-client\u003eerp-internal-access", + "SourceId": "backoffice-client", + "TargetId": "erp-system", + "Title": "ERP Internal Access", + "Description": "Link to the ERP system", + "Protocol": 2, + "Tags": [ + "some-erp" + ], + "VPN": true, + "IpFiltered": false, + "Readonly": false, + "Authentication": 3, + "Authorization": 2, + "Usage": 0, + "DataAssetsSent": [ + "internal-business-data" + ], + "DataAssetsReceived": [ + "customer-contracts", + "internal-business-data" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + { + "Id": "backend-admin-client\u003eerp-web-access", + "SourceId": "backend-admin-client", + "TargetId": "erp-system", + "Title": "ERP Web Access", + "Description": "Link to the ERP system (Web)", + "Protocol": 2, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 3, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "erp-customizing" + ], + "DataAssetsReceived": [ + "erp-logs" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + { + "Id": "apache-webserver\u003eerp-system-traffic", + "SourceId": "apache-webserver", + "TargetId": "erp-system", + "Title": "ERP System Traffic", + "Description": "Link to the ERP system", + "Protocol": 2, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 3, + "Authorization": 1, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts", + "customer-operational-data", + "internal-business-data" + ], + "DataAssetsReceived": [ + "customer-accounts", + "customer-operational-data", + "customer-contracts", + "internal-business-data" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + ], + "git-repo": [ + { + "Id": "external-dev-client\u003egit-repo-code-write-access", + "SourceId": "external-dev-client", + "TargetId": "git-repo", + "Title": "Git-Repo Code Write Access", + "Description": "Link to the Git repo", + "Protocol": 20, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 4, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "client-application-code", + "server-application-code" + ], + "DataAssetsReceived": [ + "client-application-code", + "server-application-code" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + { + "Id": "external-dev-client\u003egit-repo-web-ui-access", + "SourceId": "external-dev-client", + "TargetId": "git-repo", + "Title": "Git-Repo Web-UI Access", + "Description": "Link to the Git repo", + "Protocol": 2, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 3, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "client-application-code", + "server-application-code" + ], + "DataAssetsReceived": [ + "client-application-code", + "server-application-code" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + { + "Id": "jenkins-buildserver\u003egit-repo-code-read-access", + "SourceId": "jenkins-buildserver", + "TargetId": "git-repo", + "Title": "Git Repo Code Read Access", + "Description": "Link to the Git repository server", + "Protocol": 20, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": true, + "Authentication": 4, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": null, + "DataAssetsReceived": [ + "client-application-code", + "server-application-code" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + ], + "identity-provider": [ + { + "Id": "apache-webserver\u003eauth-credential-check-traffic", + "SourceId": "apache-webserver", + "TargetId": "identity-provider", + "Title": "Auth Credential Check Traffic", + "Description": "Link to the identity provider server", + "Protocol": 2, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 1, + "Authorization": 1, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts" + ], + "DataAssetsReceived": null, + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + ], + "jenkins-buildserver": [ + { + "Id": "external-dev-client\u003ejenkins-web-ui-access", + "SourceId": "external-dev-client", + "TargetId": "jenkins-buildserver", + "Title": "Jenkins Web-UI Access", + "Description": "Link to the Jenkins build server", + "Protocol": 2, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 1, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "build-job-config" + ], + "DataAssetsReceived": [ + "build-job-config" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + ], + "ldap-auth-server": [ + { + "Id": "marketing-cms\u003eauth-traffic", + "SourceId": "marketing-cms", + "TargetId": "ldap-auth-server", + "Title": "Auth Traffic", + "Description": "Link to the LDAP auth server", + "Protocol": 32, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": true, + "Authentication": 1, + "Authorization": 1, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts" + ], + "DataAssetsReceived": [ + "customer-accounts" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + { + "Id": "backend-admin-client\u003euser-management-access", + "SourceId": "backend-admin-client", + "TargetId": "ldap-auth-server", + "Title": "User Management Access", + "Description": "Link to the LDAP auth server for managing users", + "Protocol": 33, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 1, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "customer-accounts" + ], + "DataAssetsReceived": [ + "customer-accounts" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + { + "Id": "identity-provider\u003eldap-credential-check-traffic", + "SourceId": "identity-provider", + "TargetId": "ldap-auth-server", + "Title": "LDAP Credential Check Traffic", + "Description": "Link to the LDAP server", + "Protocol": 33, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 1, + "Authorization": 1, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts" + ], + "DataAssetsReceived": null, + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + ], + "load-balancer": [ + { + "Id": "customer-client\u003ecustomer-traffic", + "SourceId": "customer-client", + "TargetId": "load-balancer", + "Title": "Customer Traffic", + "Description": "Link to the load balancer", + "Protocol": 2, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 2, + "Authorization": 2, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts", + "customer-operational-data" + ], + "DataAssetsReceived": [ + "customer-accounts", + "customer-operational-data", + "customer-contracts", + "client-application-code", + "marketing-material" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + ], + "marketing-cms": [ + { + "Id": "jenkins-buildserver\u003ecms-updates", + "SourceId": "jenkins-buildserver", + "TargetId": "marketing-cms", + "Title": "CMS Updates", + "Description": "Link to the CMS", + "Protocol": 20, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 4, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "marketing-material" + ], + "DataAssetsReceived": null, + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + { + "Id": "backoffice-client\u003emarketing-cms-editing", + "SourceId": "backoffice-client", + "TargetId": "marketing-cms", + "Title": "Marketing CMS Editing", + "Description": "Link to the CMS for editing content", + "Protocol": 2, + "Tags": [], + "VPN": true, + "IpFiltered": false, + "Readonly": false, + "Authentication": 3, + "Authorization": 2, + "Usage": 0, + "DataAssetsSent": [ + "marketing-material" + ], + "DataAssetsReceived": [ + "marketing-material" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + { + "Id": "load-balancer\u003ecms-content-traffic", + "SourceId": "load-balancer", + "TargetId": "marketing-cms", + "Title": "CMS Content Traffic", + "Description": "Link to the CMS server", + "Protocol": 1, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": true, + "Authentication": 0, + "Authorization": 0, + "Usage": 0, + "DataAssetsSent": null, + "DataAssetsReceived": [ + "marketing-material" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + ], + "sql-database": [ + { + "Id": "backend-admin-client\u003edb-update-access", + "SourceId": "backend-admin-client", + "TargetId": "sql-database", + "Title": "DB Update Access", + "Description": "Link to the database (JDBC tunneled via SSH)", + "Protocol": 20, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 4, + "Authorization": 1, + "Usage": 1, + "DataAssetsSent": [ + "db-dumps" + ], + "DataAssetsReceived": [ + "db-dumps", + "erp-logs", + "customer-accounts", + "customer-operational-data" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + }, + { + "Id": "erp-system\u003edatabase-traffic", + "SourceId": "erp-system", + "TargetId": "sql-database", + "Title": "Database Traffic", + "Description": "Link to the DB system", + "Protocol": 8, + "Tags": [], + "VPN": false, + "IpFiltered": false, + "Readonly": false, + "Authentication": 1, + "Authorization": 1, + "Usage": 0, + "DataAssetsSent": [ + "customer-accounts", + "customer-operational-data", + "internal-business-data" + ], + "DataAssetsReceived": [ + "customer-accounts", + "customer-operational-data", + "internal-business-data" + ], + "DiagramTweakWeight": 1, + "DiagramTweakConstraint": true + } + ] + }, + "direct_containing_trust_boundary_mapped_by_technical_asset_id": { + "apache-webserver": { + "Id": "web-dmz", + "Title": "Web DMZ", + "Description": "Web DMZ", + "Type": 4, + "Tags": [], + "TechnicalAssetsInside": [ + "apache-webserver", + "marketing-cms" + ], + "TrustBoundariesNested": [] + }, + "backend-admin-client": { + "Id": "dev-network", + "Title": "Dev Network", + "Description": "Development Network", + "Type": 0, + "Tags": [], + "TechnicalAssetsInside": [ + "jenkins-buildserver", + "git-repo", + "backend-admin-client", + "backoffice-client" + ], + "TrustBoundariesNested": [] + }, + "backoffice-client": { + "Id": "dev-network", + "Title": "Dev Network", + "Description": "Development Network", + "Type": 0, + "Tags": [], + "TechnicalAssetsInside": [ + "jenkins-buildserver", + "git-repo", + "backend-admin-client", + "backoffice-client" + ], + "TrustBoundariesNested": [] + }, + "contract-fileserver": { + "Id": "erp-dmz", + "Title": "ERP DMZ", + "Description": "ERP DMZ", + "Type": 4, + "Tags": [ + "some-erp" + ], + "TechnicalAssetsInside": [ + "erp-system", + "contract-fileserver", + "sql-database" + ], + "TrustBoundariesNested": [] + }, + "erp-system": { + "Id": "erp-dmz", + "Title": "ERP DMZ", + "Description": "ERP DMZ", + "Type": 4, + "Tags": [ + "some-erp" + ], + "TechnicalAssetsInside": [ + "erp-system", + "contract-fileserver", + "sql-database" + ], + "TrustBoundariesNested": [] + }, + "git-repo": { + "Id": "dev-network", + "Title": "Dev Network", + "Description": "Development Network", + "Type": 0, + "Tags": [], + "TechnicalAssetsInside": [ + "jenkins-buildserver", + "git-repo", + "backend-admin-client", + "backoffice-client" + ], + "TrustBoundariesNested": [] + }, + "identity-provider": { + "Id": "auth-env", + "Title": "Auth Handling Environment", + "Description": "Auth Handling Environment", + "Type": 6, + "Tags": [], + "TechnicalAssetsInside": [ + "identity-provider", + "ldap-auth-server" + ], + "TrustBoundariesNested": [] + }, + "jenkins-buildserver": { + "Id": "dev-network", + "Title": "Dev Network", + "Description": "Development Network", + "Type": 0, + "Tags": [], + "TechnicalAssetsInside": [ + "jenkins-buildserver", + "git-repo", + "backend-admin-client", + "backoffice-client" + ], + "TrustBoundariesNested": [] + }, + "ldap-auth-server": { + "Id": "auth-env", + "Title": "Auth Handling Environment", + "Description": "Auth Handling Environment", + "Type": 6, + "Tags": [], + "TechnicalAssetsInside": [ + "identity-provider", + "ldap-auth-server" + ], + "TrustBoundariesNested": [] + }, + "load-balancer": { + "Id": "application-network", + "Title": "Application Network", + "Description": "Application Network", + "Type": 3, + "Tags": [ + "aws" + ], + "TechnicalAssetsInside": [ + "load-balancer" + ], + "TrustBoundariesNested": [ + "web-dmz", + "erp-dmz", + "auth-env" + ] + }, + "marketing-cms": { + "Id": "web-dmz", + "Title": "Web DMZ", + "Description": "Web DMZ", + "Type": 4, + "Tags": [], + "TechnicalAssetsInside": [ + "apache-webserver", + "marketing-cms" + ], + "TrustBoundariesNested": [] + }, + "sql-database": { + "Id": "erp-dmz", + "Title": "ERP DMZ", + "Description": "ERP DMZ", + "Type": 4, + "Tags": [ + "some-erp" + ], + "TechnicalAssetsInside": [ + "erp-system", + "contract-fileserver", + "sql-database" + ], + "TrustBoundariesNested": [] + } + }, + "generated_risks_by_category": { + "something-strange": [ + { + "category": "", + "risk_status": "unchecked", + "severity": "critical", + "exploitation_likelihood": "likely", + "exploitation_impact": "medium", + "title": "\u003cb\u003eExample Individual Risk\u003c/b\u003e at \u003cb\u003eDatabase\u003c/b\u003e", + "synthetic_id": "something-strange@sql-database", + "most_relevant_data_asset": "", + "most_relevant_technical_asset": "sql-database", + "most_relevant_trust_boundary": "", + "most_relevant_shared_runtime": "", + "most_relevant_communication_link": "", + "data_breach_probability": "probable", + "data_breach_technical_assets": [ + "sql-database" + ] + }, + { + "category": "", + "risk_status": "unchecked", + "severity": "medium", + "exploitation_likelihood": "frequent", + "exploitation_impact": "very-high", + "title": "\u003cb\u003eExample Individual Risk\u003c/b\u003e at \u003cb\u003eContract Filesystem\u003c/b\u003e", + "synthetic_id": "something-strange@contract-fileserver", + "most_relevant_data_asset": "", + "most_relevant_technical_asset": "contract-fileserver", + "most_relevant_trust_boundary": "", + "most_relevant_shared_runtime": "", + "most_relevant_communication_link": "", + "data_breach_probability": "improbable", + "data_breach_technical_assets": null + } + ] + } +} \ No newline at end of file From 324c6398e60e3f92b725e81495d2b9ac81cae7b9 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Wed, 27 Dec 2023 00:36:54 +0000 Subject: [PATCH 24/68] Move server related files into package --- internal/threagile/context.go | 2362 +-------------------------------- pkg/server/execute.go | 228 ++++ pkg/server/hash.go | 35 + pkg/server/model.go | 1374 +++++++++++++++++++ pkg/server/report.go | 177 +++ pkg/server/server.go | 322 +++++ pkg/server/token.go | 297 +++++ pkg/server/zip.go | 119 ++ 8 files changed, 2581 insertions(+), 2333 deletions(-) create mode 100644 pkg/server/execute.go create mode 100644 pkg/server/hash.go create mode 100644 pkg/server/model.go create mode 100644 pkg/server/report.go create mode 100644 pkg/server/server.go create mode 100644 pkg/server/token.go create mode 100644 pkg/server/zip.go diff --git a/internal/threagile/context.go b/internal/threagile/context.go index 069a42bd..20e57f6e 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -1,16 +1,8 @@ package threagile import ( - "archive/zip" "bufio" - "bytes" - "compress/gzip" - "crypto/aes" - "crypto/cipher" - "crypto/rand" "crypto/sha256" - "crypto/sha512" - "encoding/base64" "encoding/hex" "errors" "flag" @@ -18,7 +10,6 @@ import ( "hash/fnv" "io" "log" - "net/http" "os" "os/exec" "path/filepath" @@ -27,10 +18,9 @@ import ( "sort" "strconv" "strings" - "sync" - "time" "github.com/threagile/threagile/pkg/common" + "github.com/threagile/threagile/pkg/server" addbuildpipeline "github.com/threagile/threagile/pkg/macros/built-in/add-build-pipeline" addvault "github.com/threagile/threagile/pkg/macros/built-in/add-vault" @@ -39,11 +29,8 @@ import ( seedrisktracking "github.com/threagile/threagile/pkg/macros/built-in/seed-risk-tracking" seedtags "github.com/threagile/threagile/pkg/macros/built-in/seed-tags" - "golang.org/x/crypto/argon2" "gopkg.in/yaml.v3" - "github.com/gin-gonic/gin" - "github.com/google/uuid" "github.com/threagile/threagile/pkg/colors" "github.com/threagile/threagile/pkg/docs" "github.com/threagile/threagile/pkg/input" @@ -69,7 +56,6 @@ type Context struct { drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks bool buildTimestamp string - globalLock sync.Mutex modelInput input.ModelInput // TODO: remove refactoring note below @@ -269,196 +255,6 @@ func (context *Context) applyRiskGeneration() { } } -// Unzip will decompress a zip archive, moving all files and folders -// within the zip file (parameter 1) to an output directory (parameter 2). -func (context *Context) unzip(src string, dest string) ([]string, error) { - var filenames []string - - r, err := zip.OpenReader(src) - if err != nil { - return filenames, err - } - defer func() { _ = r.Close() }() - - for _, f := range r.File { - // Store filename/path for returning and using later on - path := filepath.Join(dest, f.Name) - // Check for ZipSlip. More Info: http://bit.ly/2MsjAWE - if !strings.HasPrefix(path, filepath.Clean(dest)+string(os.PathSeparator)) { - return filenames, fmt.Errorf("%s: illegal file path", path) - } - filenames = append(filenames, path) - if f.FileInfo().IsDir() { - // Make Folder - _ = os.MkdirAll(path, os.ModePerm) - continue - } - // Make File - if err = os.MkdirAll(filepath.Dir(path), os.ModePerm); err != nil { - return filenames, err - } - if path != filepath.Clean(path) { - return filenames, fmt.Errorf("weird file path %v", path) - } - outFile, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode()) - if err != nil { - return filenames, err - } - rc, err := f.Open() - if err != nil { - return filenames, err - } - _, err = io.Copy(outFile, rc) - // Close the file without defer to close before next iteration of loop - _ = outFile.Close() - _ = rc.Close() - if err != nil { - return filenames, err - } - } - return filenames, nil -} - -// ZipFiles compresses one or many files into a single zip archive file. -// Param 1: filename is the output zip file's name. -// Param 2: files is a list of files to add to the zip. -func (context *Context) zipFiles(filename string, files []string) error { - newZipFile, err := os.Create(filename) - if err != nil { - return err - } - defer func() { _ = newZipFile.Close() }() - - zipWriter := zip.NewWriter(newZipFile) - defer func() { _ = zipWriter.Close() }() - - // Add files to zip - for _, file := range files { - if err = context.addFileToZip(zipWriter, file); err != nil { - return err - } - } - return nil -} - -func (context *Context) addFileToZip(zipWriter *zip.Writer, filename string) error { - fileToZip, err := os.Open(filename) - if err != nil { - return err - } - defer func() { _ = fileToZip.Close() }() - - // Get the file information - info, err := fileToZip.Stat() - if err != nil { - return err - } - - header, err := zip.FileInfoHeader(info) - if err != nil { - return err - } - - // Using FileInfoHeader() above only uses the basename of the file. If we want - // to preserve the folder structure we can overwrite this with the full path. - //header.Name = filename - - // Change to deflate to gain better compression - // see http://golang.org/pkg/archive/zip/#pkg-constants - header.Method = zip.Deflate - - writer, err := zipWriter.CreateHeader(header) - if err != nil { - return err - } - _, err = io.Copy(writer, fileToZip) - return err -} - -func (context *Context) analyzeModelOnServerDirectly(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer func() { - context.unlockFolder(folderNameOfKey) - var err error - if r := recover(); r != nil { - err = r.(error) - if *context.verbose { - log.Println(err) - } - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": strings.TrimSpace(err.Error()), - }) - ok = false - } - }() - - dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(context.DefaultGraphvizDPI))) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - - _, yamlText, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if !ok { - return - } - tmpModelFile, err := os.CreateTemp(*context.tempFolder, "threagile-direct-analyze-*") - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - defer func() { _ = os.Remove(tmpModelFile.Name()) }() - tmpOutputDir, err := os.MkdirTemp(*context.tempFolder, "threagile-direct-analyze-") - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - defer func() { _ = os.RemoveAll(tmpOutputDir) }() - tmpResultFile, err := os.CreateTemp(*context.tempFolder, "threagile-result-*.zip") - checkErr(err) - defer func() { _ = os.Remove(tmpResultFile.Name()) }() - - err = os.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) - - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, true, true, true, true, true, true, true, true, dpi) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - err = os.WriteFile(filepath.Join(tmpOutputDir, context.inputFile), []byte(yamlText), 0400) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - - files := []string{ - filepath.Join(tmpOutputDir, context.inputFile), - filepath.Join(tmpOutputDir, context.dataFlowDiagramFilenamePNG), - filepath.Join(tmpOutputDir, context.dataAssetDiagramFilenamePNG), - filepath.Join(tmpOutputDir, context.reportFilename), - filepath.Join(tmpOutputDir, context.excelRisksFilename), - filepath.Join(tmpOutputDir, context.excelTagsFilename), - filepath.Join(tmpOutputDir, context.jsonRisksFilename), - filepath.Join(tmpOutputDir, context.jsonTechnicalAssetsFilename), - filepath.Join(tmpOutputDir, context.jsonStatsFilename), - } - if context.keepDiagramSourceFiles { - files = append(files, filepath.Join(tmpOutputDir, context.dataFlowDiagramFilenameDOT)) - files = append(files, filepath.Join(tmpOutputDir, context.dataAssetDiagramFilenameDOT)) - } - err = context.zipFiles(tmpResultFile.Name(), files) - checkErr(err) - if *context.verbose { - fmt.Println("Streaming back result file: " + tmpResultFile.Name()) - } - ginContext.FileAttachment(tmpResultFile.Name(), "threagile-result.zip") -} - func (context *Context) writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.File { if *context.verbose { fmt.Println("Writing data flow diagram input") @@ -1236,2135 +1032,35 @@ func (context *Context) applyRAA() string { return runner.ErrorOutput } -func (context *Context) analyze(ginContext *gin.Context) { - context.execute(ginContext, false) -} - -func (context *Context) check(ginContext *gin.Context) { - _, ok := context.execute(ginContext, true) - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "model is ok", - }) - } -} - -func (context *Context) execute(ginContext *gin.Context, dryRun bool) (yamlContent []byte, ok bool) { - defer func() { - var err error - if r := recover(); r != nil { - context.errorCount++ - err = r.(error) - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": strings.TrimSpace(err.Error()), - }) - ok = false - } - }() - - dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(context.DefaultGraphvizDPI))) - checkErr(err) - - fileUploaded, header, err := ginContext.Request.FormFile("file") - checkErr(err) - - if header.Size > 50000000 { - msg := "maximum model upload file size exceeded (denial-of-service protection)" - log.Println(msg) - ginContext.JSON(http.StatusRequestEntityTooLarge, gin.H{ - "error": msg, - }) - return yamlContent, false - } - - filenameUploaded := strings.TrimSpace(header.Filename) - - tmpInputDir, err := os.MkdirTemp(*context.tempFolder, "threagile-input-") - checkErr(err) - defer func() { _ = os.RemoveAll(tmpInputDir) }() - - tmpModelFile, err := os.CreateTemp(tmpInputDir, "threagile-model-*") - checkErr(err) - defer func() { _ = os.Remove(tmpModelFile.Name()) }() - _, err = io.Copy(tmpModelFile, fileUploaded) - checkErr(err) - - yamlFile := tmpModelFile.Name() - - if strings.ToLower(filepath.Ext(filenameUploaded)) == ".zip" { - // unzip first (including the resources like images etc.) - if *context.verbose { - fmt.Println("Decompressing uploaded archive") - } - filenamesUnzipped, err := context.unzip(tmpModelFile.Name(), tmpInputDir) - checkErr(err) - found := false - for _, name := range filenamesUnzipped { - if strings.ToLower(filepath.Ext(name)) == ".yaml" { - yamlFile = name - found = true - break - } - } - if !found { - panic(errors.New("no yaml file found in uploaded archive")) - } - } - - tmpOutputDir, err := os.MkdirTemp(*context.tempFolder, "threagile-output-") - checkErr(err) - defer func() { _ = os.RemoveAll(tmpOutputDir) }() - - tmpResultFile, err := os.CreateTemp(*context.tempFolder, "threagile-result-*.zip") - checkErr(err) - defer func() { _ = os.Remove(tmpResultFile.Name()) }() - - if dryRun { - context.doItViaRuntimeCall(yamlFile, tmpOutputDir, false, false, false, false, false, true, true, true, 40) - } else { - context.doItViaRuntimeCall(yamlFile, tmpOutputDir, true, true, true, true, true, true, true, true, dpi) - } - checkErr(err) - - yamlContent, err = os.ReadFile(yamlFile) - checkErr(err) - err = os.WriteFile(filepath.Join(tmpOutputDir, context.inputFile), yamlContent, 0400) - checkErr(err) - - if !dryRun { - files := []string{ - filepath.Join(tmpOutputDir, context.inputFile), - filepath.Join(tmpOutputDir, context.dataFlowDiagramFilenamePNG), - filepath.Join(tmpOutputDir, context.dataAssetDiagramFilenamePNG), - filepath.Join(tmpOutputDir, context.reportFilename), - filepath.Join(tmpOutputDir, context.excelRisksFilename), - filepath.Join(tmpOutputDir, context.excelTagsFilename), - filepath.Join(tmpOutputDir, context.jsonRisksFilename), - filepath.Join(tmpOutputDir, context.jsonTechnicalAssetsFilename), - filepath.Join(tmpOutputDir, context.jsonStatsFilename), - } - if context.keepDiagramSourceFiles { - files = append(files, filepath.Join(tmpOutputDir, context.dataFlowDiagramFilenameDOT)) - files = append(files, filepath.Join(tmpOutputDir, context.dataAssetDiagramFilenameDOT)) - } - err = context.zipFiles(tmpResultFile.Name(), files) - checkErr(err) - if *context.verbose { - log.Println("Streaming back result file: " + tmpResultFile.Name()) - } - ginContext.FileAttachment(tmpResultFile.Name(), "threagile-result.zip") - } - context.successCount++ - return yamlContent, true -} - -// ultimately to avoid any in-process memory and/or data leaks by the used third party libs like PDF generation: exec and quit -func (context *Context) doItViaRuntimeCall(modelFile string, outputDir string, - generateDataFlowDiagram, generateDataAssetDiagram, generateReportPdf, generateRisksExcel, generateTagsExcel, generateRisksJSON, generateTechnicalAssetsJSON, generateStatsJSON bool, - dpi int) { - // Remember to also add the same args to the exec based sub-process calls! - var cmd *exec.Cmd - args := []string{"-model", modelFile, "-output", outputDir, "-execute-model-macro", *context.executeModelMacro, "-raa-run", *context.raaPlugin, "-custom-risk-rules-plugins", *context.riskRulesPlugins, "-skip-risk-rules", *context.skipRiskRules, "-diagram-dpi", strconv.Itoa(dpi)} - if *context.verbose { - args = append(args, "-verbose") - } - if *context.ignoreOrphanedRiskTracking { // TODO why add all them as arguments, when they are also variables on outer level? - args = append(args, "-ignore-orphaned-risk-tracking") - } - if generateDataFlowDiagram { - args = append(args, "-generate-data-flow-diagram") - } - if generateDataAssetDiagram { - args = append(args, "-generate-data-asset-diagram") - } - if generateReportPdf { - args = append(args, "-generate-report-pdf") - } - if generateRisksExcel { - args = append(args, "-generate-risks-excel") - } - if generateTagsExcel { - args = append(args, "-generate-tags-excel") - } - if generateRisksJSON { - args = append(args, "-generate-risks-json") - } - if generateTechnicalAssetsJSON { - args = append(args, "-generate-technical-assets-json") - } - if generateStatsJSON { - args = append(args, "-generate-stats-json") - } - self, nameError := os.Executable() - if nameError != nil { - panic(nameError) - } - cmd = exec.Command(self, args...) - out, err := cmd.CombinedOutput() - if err != nil { - panic(errors.New(string(out))) - } else { - if *context.verbose && len(out) > 0 { - fmt.Println("---") - fmt.Print(string(out)) - fmt.Println("---") - } - } -} - func (context *Context) StartServer() { - router := gin.Default() - router.LoadHTMLGlob("server/static/*.html") // <== - router.GET("/", func(c *gin.Context) { - c.HTML(http.StatusOK, "index.html", gin.H{}) - }) - router.HEAD("/", func(c *gin.Context) { - c.HTML(http.StatusOK, "index.html", gin.H{}) - }) - router.StaticFile("/threagile.png", "server/static/threagile.png") // <== - router.StaticFile("/site.webmanifest", "server/static/site.webmanifest") - router.StaticFile("/favicon.ico", "server/static/favicon.ico") - router.StaticFile("/favicon-32x32.png", "server/static/favicon-32x32.png") - router.StaticFile("/favicon-16x16.png", "server/static/favicon-16x16.png") - router.StaticFile("/apple-touch-icon.png", "server/static/apple-touch-icon.png") - router.StaticFile("/android-chrome-512x512.png", "server/static/android-chrome-512x512.png") - router.StaticFile("/android-chrome-192x192.png", "server/static/android-chrome-192x192.png") - - router.StaticFile("/schema.json", "schema.json") - router.StaticFile("/live-templates.txt", "live-templates.txt") - router.StaticFile("/openapi.yaml", "openapi.yaml") - router.StaticFile("/swagger-ui/", "server/static/swagger-ui/index.html") - router.StaticFile("/swagger-ui/index.html", "server/static/swagger-ui/index.html") - router.StaticFile("/swagger-ui/oauth2-redirect.html", "server/static/swagger-ui/oauth2-redirect.html") - router.StaticFile("/swagger-ui/swagger-ui.css", "server/static/swagger-ui/swagger-ui.css") - router.StaticFile("/swagger-ui/swagger-ui.js", "server/static/swagger-ui/swagger-ui.js") - router.StaticFile("/swagger-ui/swagger-ui-bundle.js", "server/static/swagger-ui/swagger-ui-bundle.js") - router.StaticFile("/swagger-ui/swagger-ui-standalone-preset.js", "server/static/swagger-ui/swagger-ui-standalone-preset.js") // <== - - router.GET("/threagile-example-model.yaml", context.exampleFile) - router.GET("/threagile-stub-model.yaml", context.stubFile) - - router.GET("/meta/ping", func(c *gin.Context) { - c.JSON(200, gin.H{ - "message": "pong", - }) - }) - router.GET("/meta/version", func(c *gin.Context) { - c.JSON(200, gin.H{ - "version": docs.ThreagileVersion, - "build_timestamp": context.buildTimestamp, - }) - }) - router.GET("/meta/types", func(c *gin.Context) { - c.JSON(200, gin.H{ - "quantity": context.arrayOfStringValues(types.QuantityValues()), - "confidentiality": context.arrayOfStringValues(types.ConfidentialityValues()), - "criticality": context.arrayOfStringValues(types.CriticalityValues()), - "technical_asset_type": context.arrayOfStringValues(types.TechnicalAssetTypeValues()), - "technical_asset_size": context.arrayOfStringValues(types.TechnicalAssetSizeValues()), - "authorization": context.arrayOfStringValues(types.AuthorizationValues()), - "authentication": context.arrayOfStringValues(types.AuthenticationValues()), - "usage": context.arrayOfStringValues(types.UsageValues()), - "encryption": context.arrayOfStringValues(types.EncryptionStyleValues()), - "data_format": context.arrayOfStringValues(types.DataFormatValues()), - "protocol": context.arrayOfStringValues(types.ProtocolValues()), - "technical_asset_technology": context.arrayOfStringValues(types.TechnicalAssetTechnologyValues()), - "technical_asset_machine": context.arrayOfStringValues(types.TechnicalAssetMachineValues()), - "trust_boundary_type": context.arrayOfStringValues(types.TrustBoundaryTypeValues()), - "data_breach_probability": context.arrayOfStringValues(types.DataBreachProbabilityValues()), - "risk_severity": context.arrayOfStringValues(types.RiskSeverityValues()), - "risk_exploitation_likelihood": context.arrayOfStringValues(types.RiskExploitationLikelihoodValues()), - "risk_exploitation_impact": context.arrayOfStringValues(types.RiskExploitationImpactValues()), - "risk_function": context.arrayOfStringValues(types.RiskFunctionValues()), - "risk_status": context.arrayOfStringValues(types.RiskStatusValues()), - "stride": context.arrayOfStringValues(types.STRIDEValues()), - }) - }) - - // TODO router.GET("/meta/risk-rules", listRiskRules) - // TODO router.GET("/meta/model-macros", listModelMacros) - - router.GET("/meta/stats", context.stats) - - router.POST("/direct/analyze", context.analyze) - router.POST("/direct/check", context.check) - router.GET("/direct/stub", context.stubFile) - - router.POST("/auth/keys", context.createKey) - router.DELETE("/auth/keys", context.deleteKey) - router.POST("/auth/tokens", context.createToken) - router.DELETE("/auth/tokens", context.deleteToken) - - router.POST("/models", context.createNewModel) - router.GET("/models", context.listModels) - router.DELETE("/models/:model-id", context.deleteModel) - router.GET("/models/:model-id", context.getModel) - router.PUT("/models/:model-id", context.importModel) - router.GET("/models/:model-id/data-flow-diagram", context.streamDataFlowDiagram) - router.GET("/models/:model-id/data-asset-diagram", context.streamDataAssetDiagram) - router.GET("/models/:model-id/report-pdf", context.streamReportPDF) - router.GET("/models/:model-id/risks-excel", context.streamRisksExcel) - router.GET("/models/:model-id/tags-excel", context.streamTagsExcel) - router.GET("/models/:model-id/risks", context.streamRisksJSON) - router.GET("/models/:model-id/technical-assets", context.streamTechnicalAssetsJSON) - router.GET("/models/:model-id/stats", context.streamStatsJSON) - router.GET("/models/:model-id/analysis", context.analyzeModelOnServerDirectly) - - router.GET("/models/:model-id/cover", context.getCover) - router.PUT("/models/:model-id/cover", context.setCover) - router.GET("/models/:model-id/overview", context.getOverview) - router.PUT("/models/:model-id/overview", context.setOverview) - //router.GET("/models/:model-id/questions", getQuestions) - //router.PUT("/models/:model-id/questions", setQuestions) - router.GET("/models/:model-id/abuse-cases", context.getAbuseCases) - router.PUT("/models/:model-id/abuse-cases", context.setAbuseCases) - router.GET("/models/:model-id/security-requirements", context.getSecurityRequirements) - router.PUT("/models/:model-id/security-requirements", context.setSecurityRequirements) - //router.GET("/models/:model-id/tags", getTags) - //router.PUT("/models/:model-id/tags", setTags) - - router.GET("/models/:model-id/data-assets", context.getDataAssets) - router.POST("/models/:model-id/data-assets", context.createNewDataAsset) - router.GET("/models/:model-id/data-assets/:data-asset-id", context.getDataAsset) - router.PUT("/models/:model-id/data-assets/:data-asset-id", context.setDataAsset) - router.DELETE("/models/:model-id/data-assets/:data-asset-id", context.deleteDataAsset) - - router.GET("/models/:model-id/trust-boundaries", context.getTrustBoundaries) - // router.POST("/models/:model-id/trust-boundaries", createNewTrustBoundary) - // router.GET("/models/:model-id/trust-boundaries/:trust-boundary-id", getTrustBoundary) - // router.PUT("/models/:model-id/trust-boundaries/:trust-boundary-id", setTrustBoundary) - // router.DELETE("/models/:model-id/trust-boundaries/:trust-boundary-id", deleteTrustBoundary) - - router.GET("/models/:model-id/shared-runtimes", context.getSharedRuntimes) - router.POST("/models/:model-id/shared-runtimes", context.createNewSharedRuntime) - router.GET("/models/:model-id/shared-runtimes/:shared-runtime-id", context.getSharedRuntime) - router.PUT("/models/:model-id/shared-runtimes/:shared-runtime-id", context.setSharedRuntime) - router.DELETE("/models/:model-id/shared-runtimes/:shared-runtime-id", context.deleteSharedRuntime) - - fmt.Println("Threagile server running...") - _ = router.Run(":" + strconv.Itoa(*context.serverPort)) // listen and serve on 0.0.0.0:8080 or whatever port was specified -} - -func (context *Context) exampleFile(ginContext *gin.Context) { - example, err := os.ReadFile(filepath.Join(*context.appFolder, "threagile-example-model.yaml")) - checkErr(err) - ginContext.Data(http.StatusOK, gin.MIMEYAML, example) -} - -func (context *Context) stubFile(ginContext *gin.Context) { - stub, err := os.ReadFile(filepath.Join(*context.appFolder, "threagile-stub-model.yaml")) - checkErr(err) - ginContext.Data(http.StatusOK, gin.MIMEYAML, context.addSupportedTags(stub)) // TODO use also the MIMEYAML way of serving YAML in model export? -} - -func (context *Context) addSupportedTags(input []byte) []byte { - // add distinct tags as "tags_available" - supportedTags := make(map[string]bool) - for _, customRule := range context.customRiskRules { - for _, tag := range customRule.Tags { - supportedTags[strings.ToLower(tag)] = true - } - } - - for _, rule := range risks.GetBuiltInRiskRules() { - for _, tag := range rule.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - } - - tags := make([]string, 0, len(supportedTags)) - for t := range supportedTags { - tags = append(tags, t) - } - if len(tags) == 0 { - return input - } - sort.Strings(tags) - if *context.verbose { - fmt.Print("Supported tags of all risk rules: ") - for i, tag := range tags { - if i > 0 { - fmt.Print(", ") - } - fmt.Print(tag) - } - fmt.Println() - } - replacement := "tags_available:" - for _, tag := range tags { - replacement += "\n - " + tag - } - return []byte(strings.Replace(string(input), "tags_available:", replacement, 1)) -} - -var mapFolderNameToTokenHash = make(map[string]string) - -const keySize = 32 - -func (context *Context) createToken(ginContext *gin.Context) { - folderName, key, ok := context.checkKeyToFolderName(ginContext) - if !ok { - return - } - context.globalLock.Lock() - defer context.globalLock.Unlock() - if tokenHash, exists := mapFolderNameToTokenHash[folderName]; exists { - // invalidate previous token - delete(mapTokenHashToTimeoutStruct, tokenHash) - } - // create a strong random 256 bit value (used to xor) - xorBytesArr := make([]byte, keySize) - n, err := rand.Read(xorBytesArr[:]) - if n != keySize || err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to create token", - }) - return - } - now := time.Now().UnixNano() - token := xor(key, xorBytesArr) - tokenHash := hashSHA256(token) - housekeepingTokenMaps() - mapTokenHashToTimeoutStruct[tokenHash] = timeoutStruct{ - xorRand: xorBytesArr, - createdNanoTime: now, - lastAccessedNanoTime: now, - } - mapFolderNameToTokenHash[folderName] = tokenHash - ginContext.JSON(http.StatusCreated, gin.H{ - "token": base64.RawURLEncoding.EncodeToString(token[:]), - }) -} - -type tokenHeader struct { - Token string `header:"token"` -} - -func (context *Context) deleteToken(ginContext *gin.Context) { - header := tokenHeader{} - if err := ginContext.ShouldBindHeader(&header); err != nil { - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", - }) - return - } - token, err := base64.RawURLEncoding.DecodeString(strings.TrimSpace(header.Token)) - if len(token) == 0 || err != nil { - if err != nil { - log.Println(err) - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", - }) - return - } - context.globalLock.Lock() - defer context.globalLock.Unlock() - deleteTokenHashFromMaps(hashSHA256(token)) - ginContext.JSON(http.StatusOK, gin.H{ - "message": "token deleted", - }) -} - -type responseType int - -const ( - dataFlowDiagram responseType = iota - dataAssetDiagram - reportPDF - risksExcel - tagsExcel - risksJSON - technicalAssetsJSON - statsJSON -) - -func (context *Context) streamDataFlowDiagram(ginContext *gin.Context) { - context.streamResponse(ginContext, dataFlowDiagram) -} - -func (context *Context) streamDataAssetDiagram(ginContext *gin.Context) { - context.streamResponse(ginContext, dataAssetDiagram) -} - -func (context *Context) streamReportPDF(ginContext *gin.Context) { - context.streamResponse(ginContext, reportPDF) -} - -func (context *Context) streamRisksExcel(ginContext *gin.Context) { - context.streamResponse(ginContext, risksExcel) -} - -func (context *Context) streamTagsExcel(ginContext *gin.Context) { - context.streamResponse(ginContext, tagsExcel) -} - -func (context *Context) streamRisksJSON(ginContext *gin.Context) { - context.streamResponse(ginContext, risksJSON) -} - -func (context *Context) streamTechnicalAssetsJSON(ginContext *gin.Context) { - context.streamResponse(ginContext, technicalAssetsJSON) -} - -func (context *Context) streamStatsJSON(ginContext *gin.Context) { - context.streamResponse(ginContext, statsJSON) -} - -func (context *Context) streamResponse(ginContext *gin.Context, responseType responseType) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer func() { - context.unlockFolder(folderNameOfKey) - var err error - if r := recover(); r != nil { - err = r.(error) - if *context.verbose { - log.Println(err) - } - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": strings.TrimSpace(err.Error()), - }) - ok = false - } - }() - dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(context.DefaultGraphvizDPI))) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - _, yamlText, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if !ok { - return - } - tmpModelFile, err := os.CreateTemp(*context.tempFolder, "threagile-render-*") - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - defer func() { _ = os.Remove(tmpModelFile.Name()) }() - tmpOutputDir, err := os.MkdirTemp(*context.tempFolder, "threagile-render-") - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - defer func() { _ = os.RemoveAll(tmpOutputDir) }() - err = os.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) - if responseType == dataFlowDiagram { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, true, false, false, false, false, false, false, false, dpi) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - ginContext.File(filepath.Join(tmpOutputDir, context.dataFlowDiagramFilenamePNG)) - } else if responseType == dataAssetDiagram { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, true, false, false, false, false, false, false, dpi) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - ginContext.File(filepath.Join(tmpOutputDir, context.dataAssetDiagramFilenamePNG)) - } else if responseType == reportPDF { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, true, false, false, false, false, false, dpi) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - ginContext.FileAttachment(filepath.Join(tmpOutputDir, context.reportFilename), context.reportFilename) - } else if responseType == risksExcel { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, true, false, false, false, false, dpi) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - ginContext.FileAttachment(filepath.Join(tmpOutputDir, context.excelRisksFilename), context.excelRisksFilename) - } else if responseType == tagsExcel { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, true, false, false, false, dpi) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - ginContext.FileAttachment(filepath.Join(tmpOutputDir, context.excelTagsFilename), context.excelTagsFilename) - } else if responseType == risksJSON { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, true, false, false, dpi) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, context.jsonRisksFilename)) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download - } else if responseType == technicalAssetsJSON { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, true, true, false, dpi) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, context.jsonTechnicalAssetsFilename)) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download - } else if responseType == statsJSON { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, false, false, true, dpi) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, context.jsonStatsFilename)) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download - } -} - -// fully replaces threagile.yaml in sub-folder given by UUID -func (context *Context) importModel(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - - aUuid := ginContext.Param("model-id") // UUID is syntactically validated in readModel+checkModelFolder (next line) via uuid.Parse(modelUUID) - _, _, ok = context.readModel(ginContext, aUuid, key, folderNameOfKey) - if ok { - // first analyze it simply by executing the full risk process (just discard the result) to ensure that everything would work - yamlContent, ok := context.execute(ginContext, true) - if ok { - // if we're here, then no problem was raised, so ok to proceed - ok = context.writeModelYAML(ginContext, string(yamlContent), key, folderNameForModel(folderNameOfKey, aUuid), "Model Import", false) - if ok { - ginContext.JSON(http.StatusCreated, gin.H{ - "message": "model imported", - }) - } - } - } -} - -func (context *Context) stats(ginContext *gin.Context) { - keyCount, modelCount := 0, 0 - keyFolders, err := os.ReadDir(filepath.Join(*context.serverFolder, context.keyDir)) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to collect stats", - }) - return - } - for _, keyFolder := range keyFolders { - if len(keyFolder.Name()) == 128 { // it's a sha512 token hash probably, so count it as token folder for the stats - keyCount++ - if keyFolder.Name() != filepath.Clean(keyFolder.Name()) { - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "weird file path", - }) - return - } - modelFolders, err := os.ReadDir(filepath.Join(*context.serverFolder, context.keyDir, keyFolder.Name())) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to collect stats", - }) - return - } - for _, modelFolder := range modelFolders { - if len(modelFolder.Name()) == 36 { // it's a uuid model folder probably, so count it as model folder for the stats - modelCount++ - } - } - } - } - // TODO collect and deliver more stats (old model count?) and health info - ginContext.JSON(http.StatusOK, gin.H{ - "key_count": keyCount, - "model_count": modelCount, - "success_count": context.successCount, - "error_count": context.errorCount, - }) -} - -func (context *Context) getDataAsset(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, dataAsset := range modelInput.DataAssets { - if dataAsset.ID == ginContext.Param("data-asset-id") { - ginContext.JSON(http.StatusOK, gin.H{ - title: dataAsset, - }) - return - } - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "data asset not found", - }) - } -} - -func (context *Context) deleteDataAsset(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - referencesDeleted := false - // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, dataAsset := range modelInput.DataAssets { - if dataAsset.ID == ginContext.Param("data-asset-id") { - // also remove all usages of this data asset !! - for _, techAsset := range modelInput.TechnicalAssets { - if techAsset.DataAssetsProcessed != nil { - for i, parsedChangeCandidateAsset := range techAsset.DataAssetsProcessed { - referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) - if referencedAsset == dataAsset.ID { // apply the removal - referencesDeleted = true - // Remove the element at index i - // TODO needs more testing - copy(techAsset.DataAssetsProcessed[i:], techAsset.DataAssetsProcessed[i+1:]) // Shift a[i+1:] left one index. - techAsset.DataAssetsProcessed[len(techAsset.DataAssetsProcessed)-1] = "" // Erase last element (write zero value). - techAsset.DataAssetsProcessed = techAsset.DataAssetsProcessed[:len(techAsset.DataAssetsProcessed)-1] // Truncate slice. - } - } - } - if techAsset.DataAssetsStored != nil { - for i, parsedChangeCandidateAsset := range techAsset.DataAssetsStored { - referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) - if referencedAsset == dataAsset.ID { // apply the removal - referencesDeleted = true - // Remove the element at index i - // TODO needs more testing - copy(techAsset.DataAssetsStored[i:], techAsset.DataAssetsStored[i+1:]) // Shift a[i+1:] left one index. - techAsset.DataAssetsStored[len(techAsset.DataAssetsStored)-1] = "" // Erase last element (write zero value). - techAsset.DataAssetsStored = techAsset.DataAssetsStored[:len(techAsset.DataAssetsStored)-1] // Truncate slice. - } - } - } - if techAsset.CommunicationLinks != nil { - for title, commLink := range techAsset.CommunicationLinks { - for i, dataAssetSent := range commLink.DataAssetsSent { - referencedAsset := fmt.Sprintf("%v", dataAssetSent) - if referencedAsset == dataAsset.ID { // apply the removal - referencesDeleted = true - // Remove the element at index i - // TODO needs more testing - copy(techAsset.CommunicationLinks[title].DataAssetsSent[i:], techAsset.CommunicationLinks[title].DataAssetsSent[i+1:]) // Shift a[i+1:] left one index. - techAsset.CommunicationLinks[title].DataAssetsSent[len(techAsset.CommunicationLinks[title].DataAssetsSent)-1] = "" // Erase last element (write zero value). - x := techAsset.CommunicationLinks[title] - x.DataAssetsSent = techAsset.CommunicationLinks[title].DataAssetsSent[:len(techAsset.CommunicationLinks[title].DataAssetsSent)-1] // Truncate slice. - techAsset.CommunicationLinks[title] = x - } - } - for i, dataAssetReceived := range commLink.DataAssetsReceived { - referencedAsset := fmt.Sprintf("%v", dataAssetReceived) - if referencedAsset == dataAsset.ID { // apply the removal - referencesDeleted = true - // Remove the element at index i - // TODO needs more testing - copy(techAsset.CommunicationLinks[title].DataAssetsReceived[i:], techAsset.CommunicationLinks[title].DataAssetsReceived[i+1:]) // Shift a[i+1:] left one index. - techAsset.CommunicationLinks[title].DataAssetsReceived[len(techAsset.CommunicationLinks[title].DataAssetsReceived)-1] = "" // Erase last element (write zero value). - x := techAsset.CommunicationLinks[title] - x.DataAssetsReceived = techAsset.CommunicationLinks[title].DataAssetsReceived[:len(techAsset.CommunicationLinks[title].DataAssetsReceived)-1] // Truncate slice. - techAsset.CommunicationLinks[title] = x - } - } - } - } - } - for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { - if individualRiskCat.RisksIdentified != nil { - for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { - if individualRiskInstance.MostRelevantDataAsset == dataAsset.ID { // apply the removal - referencesDeleted = true - x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] - x.MostRelevantDataAsset = "" // TODO needs more testing - modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x - } - } - } - } - // remove it itself - delete(modelInput.DataAssets, title) - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Data Asset Deletion") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "data asset deleted", - "id": dataAsset.ID, - "references_deleted": referencesDeleted, // in order to signal to clients, that other model parts might've been deleted as well - }) - } - return - } - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "data asset not found", - }) - } -} - -type payloadSharedRuntime struct { - Title string `yaml:"title" json:"title"` - Id string `yaml:"id" json:"id"` - Description string `yaml:"description" json:"description"` - Tags []string `yaml:"tags" json:"tags"` - TechnicalAssetsRunning []string `yaml:"technical_assets_running" json:"technical_assets_running"` -} - -func (context *Context) setSharedRuntime(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, sharedRuntime := range modelInput.SharedRuntimes { - if sharedRuntime.ID == ginContext.Param("shared-runtime-id") { - payload := payloadSharedRuntime{} - err := ginContext.BindJSON(&payload) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) - return - } - sharedRuntimeInput, ok := populateSharedRuntime(ginContext, payload) - if !ok { - return - } - // in order to also update the title, remove the shared runtime from the map and re-insert it (with new key) - delete(modelInput.SharedRuntimes, title) - modelInput.SharedRuntimes[payload.Title] = sharedRuntimeInput - idChanged := sharedRuntimeInput.ID != sharedRuntime.ID - if idChanged { // ID-CHANGE-PROPAGATION - for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { - if individualRiskCat.RisksIdentified != nil { - for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { - if individualRiskInstance.MostRelevantSharedRuntime == sharedRuntime.ID { // apply the ID change - x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] - x.MostRelevantSharedRuntime = sharedRuntimeInput.ID // TODO needs more testing - modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x - } - } - } - } - } - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Shared Runtime Update") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "shared runtime updated", - "id": sharedRuntimeInput.ID, - "id_changed": idChanged, // in order to signal to clients, that other model parts might've received updates as well and should be reloaded - }) - } - return - } - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "shared runtime not found", - }) - } -} - -type payloadDataAsset struct { - Title string `yaml:"title" json:"title"` - Id string `yaml:"id" json:"id"` - Description string `yaml:"description" json:"description"` - Usage string `yaml:"usage" json:"usage"` - Tags []string `yaml:"tags" json:"tags"` - Origin string `yaml:"origin" json:"origin"` - Owner string `yaml:"owner" json:"owner"` - Quantity string `yaml:"quantity" json:"quantity"` - Confidentiality string `yaml:"confidentiality" json:"confidentiality"` - Integrity string `yaml:"integrity" json:"integrity"` - Availability string `yaml:"availability" json:"availability"` - JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` -} - -func (context *Context) setDataAsset(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, dataAsset := range modelInput.DataAssets { - if dataAsset.ID == ginContext.Param("data-asset-id") { - payload := payloadDataAsset{} - err := ginContext.BindJSON(&payload) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) - return - } - dataAssetInput, ok := context.populateDataAsset(ginContext, payload) - if !ok { - return - } - // in order to also update the title, remove the asset from the map and re-insert it (with new key) - delete(modelInput.DataAssets, title) - modelInput.DataAssets[payload.Title] = dataAssetInput - idChanged := dataAssetInput.ID != dataAsset.ID - if idChanged { // ID-CHANGE-PROPAGATION - // also update all usages to point to the new (changed) ID !! - for techAssetTitle, techAsset := range modelInput.TechnicalAssets { - if techAsset.DataAssetsProcessed != nil { - for i, parsedChangeCandidateAsset := range techAsset.DataAssetsProcessed { - referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) - if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.TechnicalAssets[techAssetTitle].DataAssetsProcessed[i] = dataAssetInput.ID - } - } - } - if techAsset.DataAssetsStored != nil { - for i, parsedChangeCandidateAsset := range techAsset.DataAssetsStored { - referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) - if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.TechnicalAssets[techAssetTitle].DataAssetsStored[i] = dataAssetInput.ID - } - } - } - if techAsset.CommunicationLinks != nil { - for title, commLink := range techAsset.CommunicationLinks { - for i, dataAssetSent := range commLink.DataAssetsSent { - referencedAsset := fmt.Sprintf("%v", dataAssetSent) - if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.TechnicalAssets[techAssetTitle].CommunicationLinks[title].DataAssetsSent[i] = dataAssetInput.ID - } - } - for i, dataAssetReceived := range commLink.DataAssetsReceived { - referencedAsset := fmt.Sprintf("%v", dataAssetReceived) - if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.TechnicalAssets[techAssetTitle].CommunicationLinks[title].DataAssetsReceived[i] = dataAssetInput.ID - } - } - } - } - } - for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { - if individualRiskCat.RisksIdentified != nil { - for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { - if individualRiskInstance.MostRelevantDataAsset == dataAsset.ID { // apply the ID change - x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] - x.MostRelevantDataAsset = dataAssetInput.ID // TODO needs more testing - modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x - } - } - } - } - } - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Data Asset Update") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "data asset updated", - "id": dataAssetInput.ID, - "id_changed": idChanged, // in order to signal to clients, that other model parts might've received updates as well and should be reloaded - }) - } - return - } - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "data asset not found", - }) - } -} - -func (context *Context) getSharedRuntime(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, sharedRuntime := range modelInput.SharedRuntimes { - if sharedRuntime.ID == ginContext.Param("shared-runtime-id") { - ginContext.JSON(http.StatusOK, gin.H{ - title: sharedRuntime, - }) - return - } - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "shared runtime not found", - }) - } -} - -func (context *Context) createNewSharedRuntime(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - payload := payloadSharedRuntime{} - err := ginContext.BindJSON(&payload) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) - return - } - // yes, here keyed by title in YAML for better readability in the YAML file itself - if _, exists := modelInput.SharedRuntimes[payload.Title]; exists { - ginContext.JSON(http.StatusConflict, gin.H{ - "error": "shared runtime with this title already exists", - }) - return - } - // but later it will in memory keyed by its "id", so do this uniqueness check also - for _, sharedRuntime := range modelInput.SharedRuntimes { - if sharedRuntime.ID == payload.Id { - ginContext.JSON(http.StatusConflict, gin.H{ - "error": "shared runtime with this id already exists", - }) - return - } - } - if !context.checkTechnicalAssetsExisting(modelInput, payload.TechnicalAssetsRunning) { - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "referenced technical asset does not exist", - }) - return - } - sharedRuntimeInput, ok := populateSharedRuntime(ginContext, payload) - if !ok { - return - } - if modelInput.SharedRuntimes == nil { - modelInput.SharedRuntimes = make(map[string]input.InputSharedRuntime) - } - modelInput.SharedRuntimes[payload.Title] = sharedRuntimeInput - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Shared Runtime Creation") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "shared runtime created", - "id": sharedRuntimeInput.ID, - }) - } - } -} - -func (context *Context) checkTechnicalAssetsExisting(modelInput input.ModelInput, techAssetIDs []string) (ok bool) { - for _, techAssetID := range techAssetIDs { - exists := false - for _, val := range modelInput.TechnicalAssets { - if val.ID == techAssetID { - exists = true - break - } - } - if !exists { - return false - } - } - return true -} - -func populateSharedRuntime(_ *gin.Context, payload payloadSharedRuntime) (sharedRuntimeInput input.InputSharedRuntime, ok bool) { - sharedRuntimeInput = input.InputSharedRuntime{ - ID: payload.Id, - Description: payload.Description, - Tags: lowerCaseAndTrim(payload.Tags), - TechnicalAssetsRunning: payload.TechnicalAssetsRunning, - } - return sharedRuntimeInput, true -} - -func (context *Context) deleteSharedRuntime(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - referencesDeleted := false - // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, sharedRuntime := range modelInput.SharedRuntimes { - if sharedRuntime.ID == ginContext.Param("shared-runtime-id") { - // also remove all usages of this shared runtime !! - for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { - if individualRiskCat.RisksIdentified != nil { - for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { - if individualRiskInstance.MostRelevantSharedRuntime == sharedRuntime.ID { // apply the removal - referencesDeleted = true - x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] - x.MostRelevantSharedRuntime = "" // TODO needs more testing - modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x - } - } - } - } - // remove it itself - delete(modelInput.SharedRuntimes, title) - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Shared Runtime Deletion") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "shared runtime deleted", - "id": sharedRuntime.ID, - "references_deleted": referencesDeleted, // in order to signal to clients, that other model parts might've been deleted as well - }) - } - return - } - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "shared runtime not found", - }) - } -} - -func (context *Context) createNewDataAsset(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - payload := payloadDataAsset{} - err := ginContext.BindJSON(&payload) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) - return - } - // yes, here keyed by title in YAML for better readability in the YAML file itself - if _, exists := modelInput.DataAssets[payload.Title]; exists { - ginContext.JSON(http.StatusConflict, gin.H{ - "error": "data asset with this title already exists", - }) - return - } - // but later it will in memory keyed by its "id", so do this uniqueness check also - for _, asset := range modelInput.DataAssets { - if asset.ID == payload.Id { - ginContext.JSON(http.StatusConflict, gin.H{ - "error": "data asset with this id already exists", - }) - return - } - } - dataAssetInput, ok := context.populateDataAsset(ginContext, payload) - if !ok { - return - } - if modelInput.DataAssets == nil { - modelInput.DataAssets = make(map[string]input.InputDataAsset) - } - modelInput.DataAssets[payload.Title] = dataAssetInput - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Data Asset Creation") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "data asset created", - "id": dataAssetInput.ID, - }) - } - } -} - -func (context *Context) populateDataAsset(ginContext *gin.Context, payload payloadDataAsset) (dataAssetInput input.InputDataAsset, ok bool) { - usage, err := types.ParseUsage(payload.Usage) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return dataAssetInput, false - } - quantity, err := types.ParseQuantity(payload.Quantity) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return dataAssetInput, false - } - confidentiality, err := types.ParseConfidentiality(payload.Confidentiality) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return dataAssetInput, false - } - integrity, err := types.ParseCriticality(payload.Integrity) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return dataAssetInput, false - } - availability, err := types.ParseCriticality(payload.Availability) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return dataAssetInput, false - } - dataAssetInput = input.InputDataAsset{ - ID: payload.Id, - Description: payload.Description, - Usage: usage.String(), - Tags: lowerCaseAndTrim(payload.Tags), - Origin: payload.Origin, - Owner: payload.Owner, - Quantity: quantity.String(), - Confidentiality: confidentiality.String(), - Integrity: integrity.String(), - Availability: availability.String(), - JustificationCiaRating: payload.JustificationCiaRating, - } - return dataAssetInput, true -} - -func (context *Context) getDataAssets(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - ginContext.JSON(http.StatusOK, aModel.DataAssets) - } -} - -func (context *Context) getTrustBoundaries(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - ginContext.JSON(http.StatusOK, aModel.TrustBoundaries) - } -} - -func (context *Context) getSharedRuntimes(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - ginContext.JSON(http.StatusOK, aModel.SharedRuntimes) - } -} - -func (context *Context) arrayOfStringValues(values []types.TypeEnum) []string { - result := make([]string, 0) - for _, value := range values { - result = append(result, value.String()) - } - return result -} - -func (context *Context) getModel(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - _, yamlText, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - tmpResultFile, err := os.CreateTemp(*context.tempFolder, "threagile-*.yaml") - checkErr(err) - err = os.WriteFile(tmpResultFile.Name(), []byte(yamlText), 0400) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to stream model file", - }) - return - } - defer func() { _ = os.Remove(tmpResultFile.Name()) }() - ginContext.FileAttachment(tmpResultFile.Name(), context.inputFile) - } -} - -type payloadSecurityRequirements map[string]string - -func (context *Context) setSecurityRequirements(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - payload := payloadSecurityRequirements{} - err := ginContext.BindJSON(&payload) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) - return - } - modelInput.SecurityRequirements = payload - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Security Requirements Update") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "model updated", - }) - } - } -} - -func (context *Context) getSecurityRequirements(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - ginContext.JSON(http.StatusOK, aModel.SecurityRequirements) - } -} - -type payloadAbuseCases map[string]string - -func (context *Context) setAbuseCases(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - payload := payloadAbuseCases{} - err := ginContext.BindJSON(&payload) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) - return - } - modelInput.AbuseCases = payload - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Abuse Cases Update") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "model updated", - }) - } - } -} - -func (context *Context) getAbuseCases(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - ginContext.JSON(http.StatusOK, aModel.AbuseCases) - } -} - -type payloadOverview struct { - ManagementSummaryComment string `yaml:"management_summary_comment" json:"management_summary_comment"` - BusinessCriticality string `yaml:"business_criticality" json:"business_criticality"` - BusinessOverview input.Overview `yaml:"business_overview" json:"business_overview"` - TechnicalOverview input.Overview `yaml:"technical_overview" json:"technical_overview"` -} - -func (context *Context) setOverview(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - payload := payloadOverview{} - err := ginContext.BindJSON(&payload) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) - return - } - criticality, err := types.ParseCriticality(payload.BusinessCriticality) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - modelInput.ManagementSummaryComment = payload.ManagementSummaryComment - modelInput.BusinessCriticality = criticality.String() - modelInput.BusinessOverview.Description = payload.BusinessOverview.Description - modelInput.BusinessOverview.Images = payload.BusinessOverview.Images - modelInput.TechnicalOverview.Description = payload.TechnicalOverview.Description - modelInput.TechnicalOverview.Images = payload.TechnicalOverview.Images - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Overview Update") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "model updated", - }) - } - } -} - -func handleErrorInServiceCall(err error, ginContext *gin.Context) { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": strings.TrimSpace(err.Error()), - }) -} - -func (context *Context) getOverview(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "management_summary_comment": aModel.ManagementSummaryComment, - "business_criticality": aModel.BusinessCriticality, - "business_overview": aModel.BusinessOverview, - "technical_overview": aModel.TechnicalOverview, - }) - } -} - -type payloadCover struct { - Title string `yaml:"title" json:"title"` - Date time.Time `yaml:"date" json:"date"` - Author input.Author `yaml:"author" json:"author"` -} - -func (context *Context) setCover(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - payload := payloadCover{} - err := ginContext.BindJSON(&payload) - if err != nil { - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) - return - } - modelInput.Title = payload.Title - if !payload.Date.IsZero() { - modelInput.Date = payload.Date.Format("2006-01-02") - } - modelInput.Author.Name = payload.Author.Name - modelInput.Author.Homepage = payload.Author.Homepage - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Cover Update") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "model updated", - }) - } - } -} - -func (context *Context) getCover(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "title": aModel.Title, - "date": aModel.Date, - "author": aModel.Author, - }) - } -} - -// creates a sub-folder (named by a new UUID) inside the token folder -func (context *Context) createNewModel(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - ok = context.checkObjectCreationThrottler(ginContext, "MODEL") - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - - aUuid := uuid.New().String() - err := os.Mkdir(folderNameForModel(folderNameOfKey, aUuid), 0700) - if err != nil { - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to create model", - }) - return - } - - aYaml := `title: New Threat Model -threagile_version: ` + docs.ThreagileVersion + ` -author: - name: "" - homepage: "" -date: -business_overview: - description: "" - images: [] -technical_overview: - description: "" - images: [] -business_criticality: "" -management_summary_comment: "" -questions: {} -abuse_cases: {} -security_requirements: {} -tags_available: [] -data_assets: {} -technical_assets: {} -trust_boundaries: {} -shared_runtimes: {} -individual_risk_categories: {} -risk_tracking: {} -diagram_tweak_nodesep: "" -diagram_tweak_ranksep: "" -diagram_tweak_edge_layout: "" -diagram_tweak_suppress_edge_labels: false -diagram_tweak_invisible_connections_between_assets: [] -diagram_tweak_same_rank_assets: []` - - ok = context.writeModelYAML(ginContext, aYaml, key, folderNameForModel(folderNameOfKey, aUuid), "New Model Creation", true) - if ok { - ginContext.JSON(http.StatusCreated, gin.H{ - "message": "model created", - "id": aUuid, - }) - } -} - -type payloadModels struct { - ID string `yaml:"id" json:"id"` - Title string `yaml:"title" json:"title"` - TimestampCreated time.Time `yaml:"timestamp_created" json:"timestamp_created"` - TimestampModified time.Time `yaml:"timestamp_modified" json:"timestamp_modified"` -} - -func (context *Context) listModels(ginContext *gin.Context) { // TODO currently returns error when any model is no longer valid in syntax, so eventually have some fallback to not just bark on an invalid model... - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - - result := make([]payloadModels, 0) - modelFolders, err := os.ReadDir(folderNameOfKey) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", - }) - return - } - for _, dirEntry := range modelFolders { - if dirEntry.IsDir() { - modelStat, err := os.Stat(filepath.Join(folderNameOfKey, dirEntry.Name(), context.inputFile)) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "unable to list model", - }) - return - } - aModel, _, ok := context.readModel(ginContext, dirEntry.Name(), key, folderNameOfKey) - if !ok { - return - } - fileInfo, err := dirEntry.Info() - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "unable to get file info", - }) - return - } - result = append(result, payloadModels{ - ID: dirEntry.Name(), - Title: aModel.Title, - TimestampCreated: fileInfo.ModTime(), - TimestampModified: modelStat.ModTime(), - }) - } - } - ginContext.JSON(http.StatusOK, result) -} - -func (context *Context) deleteModel(ginContext *gin.Context) { - folderNameOfKey, _, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - folder, ok := context.checkModelFolder(ginContext, ginContext.Param("model-id"), folderNameOfKey) - if ok { - if folder != filepath.Clean(folder) { - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "model-id is weird", - }) - return - } - err := os.RemoveAll(folder) - if err != nil { - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "model not found", - }) - return - } - ginContext.JSON(http.StatusOK, gin.H{ - "message": "model deleted", - }) - } -} - -func (context *Context) checkModelFolder(ginContext *gin.Context, modelUUID string, folderNameOfKey string) (modelFolder string, ok bool) { - uuidParsed, err := uuid.Parse(modelUUID) - if err != nil { - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "model not found", - }) - return modelFolder, false - } - modelFolder = folderNameForModel(folderNameOfKey, uuidParsed.String()) - if _, err := os.Stat(modelFolder); os.IsNotExist(err) { - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "model not found", - }) - return modelFolder, false - } - return modelFolder, true -} - -func (context *Context) readModel(ginContext *gin.Context, modelUUID string, key []byte, folderNameOfKey string) (modelInputResult input.ModelInput, yamlText string, ok bool) { - modelFolder, ok := context.checkModelFolder(ginContext, modelUUID, folderNameOfKey) - if !ok { - return modelInputResult, yamlText, false - } - cryptoKey := context.generateKeyFromAlreadyStrongRandomInput(key) - block, err := aes.NewCipher(cryptoKey) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to open model", - }) - return modelInputResult, yamlText, false - } - aesGcm, err := cipher.NewGCM(block) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to open model", - }) - return modelInputResult, yamlText, false - } - - fileBytes, err := os.ReadFile(filepath.Join(modelFolder, context.inputFile)) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to open model", - }) - return modelInputResult, yamlText, false - } - - nonce := fileBytes[0:12] - ciphertext := fileBytes[12:] - plaintext, err := aesGcm.Open(nil, nonce, ciphertext, nil) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to open model", - }) - return modelInputResult, yamlText, false - } - - r, err := gzip.NewReader(bytes.NewReader(plaintext)) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to open model", - }) - return modelInputResult, yamlText, false - } - buf := new(bytes.Buffer) - _, _ = buf.ReadFrom(r) - modelInput := new(input.ModelInput).Defaults() - yamlBytes := buf.Bytes() - err = yaml.Unmarshal(yamlBytes, &modelInput) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to open model", - }) - return modelInputResult, yamlText, false - } - return *modelInput, string(yamlBytes), true -} - -func (context *Context) writeModel(ginContext *gin.Context, key []byte, folderNameOfKey string, modelInput *input.ModelInput, changeReasonForHistory string) (ok bool) { - modelFolder, ok := context.checkModelFolder(ginContext, ginContext.Param("model-id"), folderNameOfKey) - if ok { - modelInput.ThreagileVersion = docs.ThreagileVersion - yamlBytes, err := yaml.Marshal(modelInput) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to write model", - }) - return false - } - /* - yamlBytes = model.ReformatYAML(yamlBytes) - */ - return context.writeModelYAML(ginContext, string(yamlBytes), key, modelFolder, changeReasonForHistory, false) - } - return false -} - -func (context *Context) writeModelYAML(ginContext *gin.Context, yaml string, key []byte, modelFolder string, changeReasonForHistory string, skipBackup bool) (ok bool) { - if *context.verbose { - fmt.Println("about to write " + strconv.Itoa(len(yaml)) + " bytes of yaml into model folder: " + modelFolder) - } - var b bytes.Buffer - w := gzip.NewWriter(&b) - _, _ = w.Write([]byte(yaml)) - _ = w.Close() - plaintext := b.Bytes() - cryptoKey := context.generateKeyFromAlreadyStrongRandomInput(key) - block, err := aes.NewCipher(cryptoKey) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to write model", - }) - return false - } - // Never use more than 2^32 random nonces with a given key because of the risk of a repeat. - nonce := make([]byte, 12) - if _, err := io.ReadFull(rand.Reader, nonce); err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to write model", - }) - return false - } - aesGcm, err := cipher.NewGCM(block) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to write model", - }) - return false - } - ciphertext := aesGcm.Seal(nil, nonce, plaintext, nil) - if !skipBackup { - err = context.backupModelToHistory(modelFolder, changeReasonForHistory) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to write model", - }) - return false - } - } - f, err := os.Create(filepath.Join(modelFolder, context.inputFile)) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to write model", - }) - return false - } - _, _ = f.Write(nonce) - _, _ = f.Write(ciphertext) - _ = f.Close() - return true -} - -func (context *Context) backupModelToHistory(modelFolder string, changeReasonForHistory string) (err error) { - historyFolder := filepath.Join(modelFolder, "history") - if _, err := os.Stat(historyFolder); os.IsNotExist(err) { - err = os.Mkdir(historyFolder, 0700) - if err != nil { - return err - } - } - inputModel, err := os.ReadFile(filepath.Join(modelFolder, context.inputFile)) - if err != nil { - return err - } - historyFile := filepath.Join(historyFolder, time.Now().Format("2006-01-02 15:04:05")+" "+changeReasonForHistory+".backup") - err = os.WriteFile(historyFile, inputModel, 0400) - if err != nil { - return err - } - // now delete any old files if over limit to keep - files, err := os.ReadDir(historyFolder) - if err != nil { - return err - } - if len(files) > context.backupHistoryFilesToKeep { - requiredToDelete := len(files) - context.backupHistoryFilesToKeep - sort.Slice(files, func(i, j int) bool { - return files[i].Name() < files[j].Name() - }) - for _, file := range files { - requiredToDelete-- - if file.Name() != filepath.Clean(file.Name()) { - return fmt.Errorf("weird file name %v", file.Name()) - } - err = os.Remove(filepath.Join(historyFolder, file.Name())) - if err != nil { - return err - } - if requiredToDelete <= 0 { - break - } - } - } - return -} - -func (context *Context) generateKeyFromAlreadyStrongRandomInput(alreadyRandomInput []byte) []byte { - // Establish the parameters to use for Argon2. - p := &argon2Params{ - memory: 64 * 1024, - iterations: 3, - parallelism: 2, - saltLength: 16, - keyLength: keySize, - } - // As the input is already cryptographically secure random, the salt is simply the first n bytes - salt := alreadyRandomInput[0:p.saltLength] - hash := argon2.IDKey(alreadyRandomInput[p.saltLength:], salt, p.iterations, p.memory, p.parallelism, p.keyLength) - return hash -} - -func folderNameForModel(folderNameOfKey string, uuid string) string { - return filepath.Join(folderNameOfKey, uuid) -} - -type argon2Params struct { - memory uint32 - iterations uint32 - parallelism uint8 - saltLength uint32 - keyLength uint32 -} - -var throttlerLock sync.Mutex - -var createdObjectsThrottler = make(map[string][]int64) - -func (context *Context) checkObjectCreationThrottler(ginContext *gin.Context, typeName string) bool { - throttlerLock.Lock() - defer throttlerLock.Unlock() - - // remove all elements older than 3 minutes (= 180000000000 ns) - now := time.Now().UnixNano() - cutoff := now - 180000000000 - for keyCheck := range createdObjectsThrottler { - for i := 0; i < len(createdObjectsThrottler[keyCheck]); i++ { - if createdObjectsThrottler[keyCheck][i] < cutoff { - // Remove the element at index i from slice (safe while looping using i as iterator) - createdObjectsThrottler[keyCheck] = append(createdObjectsThrottler[keyCheck][:i], createdObjectsThrottler[keyCheck][i+1:]...) - i-- // Since we just deleted a[i], we must redo that index - } - } - length := len(createdObjectsThrottler[keyCheck]) - if length == 0 { - delete(createdObjectsThrottler, keyCheck) - } - /* - if *verbose { - log.Println("Throttling count: "+strconv.Itoa(length)) - } - */ - } - - // check current request - keyHash := hash(typeName) // getting the real client ip is not easy inside fully encapsulated containerized runtime - if _, ok := createdObjectsThrottler[keyHash]; !ok { - createdObjectsThrottler[keyHash] = make([]int64, 0) - } - // check the limit of 20 creations for this type per 3 minutes - withinLimit := len(createdObjectsThrottler[keyHash]) < 20 - if withinLimit { - createdObjectsThrottler[keyHash] = append(createdObjectsThrottler[keyHash], now) - return true - } - ginContext.JSON(http.StatusTooManyRequests, gin.H{ - "error": "object creation throttling exceeded (denial-of-service protection): please wait some time and try again", - }) - return false -} - -var locksByFolderName = make(map[string]*sync.Mutex) - -func (context *Context) lockFolder(folderName string) { - context.globalLock.Lock() - defer context.globalLock.Unlock() - _, exists := locksByFolderName[folderName] - if !exists { - locksByFolderName[folderName] = &sync.Mutex{} - } - locksByFolderName[folderName].Lock() -} - -func (context *Context) unlockFolder(folderName string) { - if _, exists := locksByFolderName[folderName]; exists { - locksByFolderName[folderName].Unlock() - delete(locksByFolderName, folderName) - } -} - -func (context *Context) folderNameFromKey(key []byte) string { - sha512Hash := hashSHA256(key) - return filepath.Join(*context.serverFolder, context.keyDir, sha512Hash) -} - -func hashSHA256(key []byte) string { - hasher := sha512.New() - hasher.Write(key) - return hex.EncodeToString(hasher.Sum(nil)) -} - -func (context *Context) createKey(ginContext *gin.Context) { - ok := context.checkObjectCreationThrottler(ginContext, "KEY") - if !ok { - return - } - context.globalLock.Lock() - defer context.globalLock.Unlock() - - keyBytesArr := make([]byte, keySize) - n, err := rand.Read(keyBytesArr[:]) - if n != keySize || err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to create key", - }) - return - } - err = os.MkdirAll(context.folderNameFromKey(keyBytesArr), 0700) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to create key", - }) - return - } - ginContext.JSON(http.StatusCreated, gin.H{ - "key": base64.RawURLEncoding.EncodeToString(keyBytesArr[:]), - }) -} - -func (context *Context) checkTokenToFolderName(ginContext *gin.Context) (folderNameOfKey string, key []byte, ok bool) { - header := tokenHeader{} - if err := ginContext.ShouldBindHeader(&header); err != nil { - log.Println(err) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", - }) - return folderNameOfKey, key, false - } - token, err := base64.RawURLEncoding.DecodeString(strings.TrimSpace(header.Token)) - if len(token) == 0 || err != nil { - if err != nil { - log.Println(err) - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", - }) - return folderNameOfKey, key, false - } - context.globalLock.Lock() - defer context.globalLock.Unlock() - housekeepingTokenMaps() // to remove timed-out ones - tokenHash := hashSHA256(token) - if timeoutStruct, exists := mapTokenHashToTimeoutStruct[tokenHash]; exists { - // re-create the key from token - key := xor(token, timeoutStruct.xorRand) - folderNameOfKey := context.folderNameFromKey(key) - if _, err := os.Stat(folderNameOfKey); os.IsNotExist(err) { - log.Println(err) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", - }) - return folderNameOfKey, key, false - } - timeoutStruct.lastAccessedNanoTime = time.Now().UnixNano() - return folderNameOfKey, key, true - } else { - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", - }) - return folderNameOfKey, key, false - } -} - -func xor(key []byte, xor []byte) []byte { - if len(key) != len(xor) { - panic(errors.New("key length not matching XOR length")) - } - result := make([]byte, len(xor)) - for i, b := range key { - result[i] = b ^ xor[i] - } - return result -} - -type timeoutStruct struct { - xorRand []byte - createdNanoTime, lastAccessedNanoTime int64 -} - -var mapTokenHashToTimeoutStruct = make(map[string]timeoutStruct) - -const extremeShortTimeoutsForTesting = false - -func housekeepingTokenMaps() { - now := time.Now().UnixNano() - for tokenHash, val := range mapTokenHashToTimeoutStruct { - if extremeShortTimeoutsForTesting { - // remove all elements older than 1 minute (= 60000000000 ns) soft - // and all elements older than 3 minutes (= 180000000000 ns) hard - if now-val.lastAccessedNanoTime > 60000000000 || now-val.createdNanoTime > 180000000000 { - fmt.Println("About to remove a token hash from maps") - deleteTokenHashFromMaps(tokenHash) - } - } else { - // remove all elements older than 30 minutes (= 1800000000000 ns) soft - // and all elements older than 10 hours (= 36000000000000 ns) hard - if now-val.lastAccessedNanoTime > 1800000000000 || now-val.createdNanoTime > 36000000000000 { - deleteTokenHashFromMaps(tokenHash) - } - } - } -} - -func deleteTokenHashFromMaps(tokenHash string) { - delete(mapTokenHashToTimeoutStruct, tokenHash) - for folderName, check := range mapFolderNameToTokenHash { - if check == tokenHash { - delete(mapFolderNameToTokenHash, folderName) - break - } - } -} - -type keyHeader struct { - Key string `header:"key"` -} - -func (context *Context) checkKeyToFolderName(ginContext *gin.Context) (folderNameOfKey string, key []byte, ok bool) { - header := keyHeader{} - if err := ginContext.ShouldBindHeader(&header); err != nil { - log.Println(err) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "key not found", - }) - return folderNameOfKey, key, false - } - key, err := base64.RawURLEncoding.DecodeString(strings.TrimSpace(header.Key)) - if len(key) == 0 || err != nil { - if err != nil { - log.Println(err) - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "key not found", - }) - return folderNameOfKey, key, false - } - folderNameOfKey = context.folderNameFromKey(key) - if _, err := os.Stat(folderNameOfKey); os.IsNotExist(err) { - log.Println(err) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "key not found", - }) - return folderNameOfKey, key, false - } - return folderNameOfKey, key, true -} - -func (context *Context) deleteKey(ginContext *gin.Context) { - folderName, _, ok := context.checkKeyToFolderName(ginContext) - if !ok { - return - } - context.globalLock.Lock() - defer context.globalLock.Unlock() - err := os.RemoveAll(folderName) - if err != nil { - log.Println("error during key delete: " + err.Error()) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "key not found", - }) - return - } - ginContext.JSON(http.StatusOK, gin.H{ - "message": "key deleted", + server.RunServer(server.ServerConfiguration{ + ServerPort: *context.serverPort, + ServerFolder: *context.serverFolder, + AppDir: context.appDir, + BuildTimestamp: context.buildTimestamp, + KeyDir: context.keyDir, + Verbose: *context.verbose, + CustomRiskRules: context.customRiskRules, + DefaultGraphvizDPI: context.DefaultGraphvizDPI, + TempFolder: *context.tempFolder, + ExecuteModelMacro: *context.executeModelMacro, + InputFile: context.inputFile, + IgnoreOrphanedRiskTracking: *context.ignoreOrphanedRiskTracking, + KeepDiagramSourceFiles: context.keepDiagramSourceFiles, + DataFlowDiagramFilenamePNG: context.dataFlowDiagramFilenamePNG, + DataFlowDiagramFilenameDOT: context.dataFlowDiagramFilenameDOT, + DataAssetDiagramFilenamePNG: context.dataAssetDiagramFilenamePNG, + DataAssetDiagramFilenameDOT: context.dataAssetDiagramFilenameDOT, + ReportFilename: context.reportFilename, + ExcelRisksFilename: context.excelRisksFilename, + ExcelTagsFilename: context.excelTagsFilename, + JsonRisksFilename: context.jsonRisksFilename, + JsonTechnicalAssetsFilename: context.jsonTechnicalAssetsFilename, + JsonStatsFilename: context.jsonStatsFilename, + RaaPlugin: *context.raaPlugin, + CustomRiskRulesPlugins: *context.riskRulesPlugins, + SkipRiskRules: *context.skipRiskRules, + BackupHistoryFilesToKeep: context.backupHistoryFilesToKeep, }) } diff --git a/pkg/server/execute.go b/pkg/server/execute.go new file mode 100644 index 00000000..08daf765 --- /dev/null +++ b/pkg/server/execute.go @@ -0,0 +1,228 @@ +/* +Copyright © 2023 NAME HERE +*/ +package server + +import ( + "errors" + "fmt" + "io" + "log" + "net/http" + "os" + "os/exec" + "path/filepath" + "strconv" + "strings" + + "github.com/gin-gonic/gin" +) + +func (s *server) analyze(ginContext *gin.Context) { + s.execute(ginContext, false) +} + +func (s *server) check(ginContext *gin.Context) { + _, ok := s.execute(ginContext, true) + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "model is ok", + }) + } +} + +func (s *server) execute(ginContext *gin.Context, dryRun bool) (yamlContent []byte, ok bool) { + defer func() { + var err error + if r := recover(); r != nil { + s.errorCount++ + err = r.(error) + log.Println(err) + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": strings.TrimSpace(err.Error()), + }) + ok = false + } + }() + + dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(s.configuration.DefaultGraphvizDPI))) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return yamlContent, false + } + + fileUploaded, header, err := ginContext.Request.FormFile("file") + if err != nil { + handleErrorInServiceCall(err, ginContext) + return yamlContent, false + } + + if header.Size > 50000000 { + msg := "maximum model upload file size exceeded (denial-of-service protection)" + log.Println(msg) + ginContext.JSON(http.StatusRequestEntityTooLarge, gin.H{ + "error": msg, + }) + return yamlContent, false + } + + filenameUploaded := strings.TrimSpace(header.Filename) + + tmpInputDir, err := os.MkdirTemp(s.configuration.TempFolder, "threagile-input-") + if err != nil { + handleErrorInServiceCall(err, ginContext) + return yamlContent, false + } + defer func() { _ = os.RemoveAll(tmpInputDir) }() + + tmpModelFile, err := os.CreateTemp(tmpInputDir, "threagile-model-*") + if err != nil { + handleErrorInServiceCall(err, ginContext) + return yamlContent, false + } + defer func() { _ = os.Remove(tmpModelFile.Name()) }() + _, err = io.Copy(tmpModelFile, fileUploaded) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return yamlContent, false + } + + yamlFile := tmpModelFile.Name() + + if strings.ToLower(filepath.Ext(filenameUploaded)) == ".zip" { + // unzip first (including the resources like images etc.) + if s.configuration.Verbose { + fmt.Println("Decompressing uploaded archive") + } + filenamesUnzipped, err := unzip(tmpModelFile.Name(), tmpInputDir) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return yamlContent, false + } + found := false + for _, name := range filenamesUnzipped { + if strings.ToLower(filepath.Ext(name)) == ".yaml" { + yamlFile = name + found = true + break + } + } + if !found { + panic(errors.New("no yaml file found in uploaded archive")) + } + } + + tmpOutputDir, err := os.MkdirTemp(s.configuration.TempFolder, "threagile-output-") + if err != nil { + handleErrorInServiceCall(err, ginContext) + return yamlContent, false + } + defer func() { _ = os.RemoveAll(tmpOutputDir) }() + + tmpResultFile, err := os.CreateTemp(s.configuration.TempFolder, "threagile-result-*.zip") + if err != nil { + handleErrorInServiceCall(err, ginContext) + return yamlContent, false + } + defer func() { _ = os.Remove(tmpResultFile.Name()) }() + + if dryRun { + s.doItViaRuntimeCall(yamlFile, tmpOutputDir, false, false, false, false, false, true, true, true, 40) + } else { + s.doItViaRuntimeCall(yamlFile, tmpOutputDir, true, true, true, true, true, true, true, true, dpi) + } + + yamlContent, err = os.ReadFile(yamlFile) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return yamlContent, false + } + err = os.WriteFile(filepath.Join(tmpOutputDir, s.configuration.InputFile), yamlContent, 0400) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return yamlContent, false + } + + if !dryRun { + files := []string{ + filepath.Join(tmpOutputDir, s.configuration.InputFile), + filepath.Join(tmpOutputDir, s.configuration.DataFlowDiagramFilenamePNG), + filepath.Join(tmpOutputDir, s.configuration.DataAssetDiagramFilenamePNG), + filepath.Join(tmpOutputDir, s.configuration.ReportFilename), + filepath.Join(tmpOutputDir, s.configuration.ExcelRisksFilename), + filepath.Join(tmpOutputDir, s.configuration.ExcelTagsFilename), + filepath.Join(tmpOutputDir, s.configuration.JsonRisksFilename), + filepath.Join(tmpOutputDir, s.configuration.JsonTechnicalAssetsFilename), + filepath.Join(tmpOutputDir, s.configuration.JsonStatsFilename), + } + if s.configuration.KeepDiagramSourceFiles { + files = append(files, filepath.Join(tmpOutputDir, s.configuration.DataAssetDiagramFilenamePNG)) + files = append(files, filepath.Join(tmpOutputDir, s.configuration.DataAssetDiagramFilenameDOT)) + } + err = zipFiles(tmpResultFile.Name(), files) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return yamlContent, false + } + if s.configuration.Verbose { + log.Println("Streaming back result file: " + tmpResultFile.Name()) + } + ginContext.FileAttachment(tmpResultFile.Name(), "threagile-result.zip") + } + s.successCount++ + return yamlContent, true +} + +// ultimately to avoid any in-process memory and/or data leaks by the used third party libs like PDF generation: exec and quit +func (s *server) doItViaRuntimeCall(modelFile string, outputDir string, + generateDataFlowDiagram, generateDataAssetDiagram, generateReportPdf, generateRisksExcel, generateTagsExcel, generateRisksJSON, generateTechnicalAssetsJSON, generateStatsJSON bool, + dpi int) { + // Remember to also add the same args to the exec based sub-process calls! + var cmd *exec.Cmd + args := []string{"-model", modelFile, "-output", outputDir, "-execute-model-macro", s.configuration.ExecuteModelMacro, "-raa-run", s.configuration.RaaPlugin, "-custom-risk-rules-plugins", s.configuration.CustomRiskRulesPlugins, "-skip-risk-rules", s.configuration.SkipRiskRules, "-diagram-dpi", strconv.Itoa(dpi)} + if s.configuration.Verbose { + args = append(args, "-verbose") + } + if s.configuration.IgnoreOrphanedRiskTracking { // TODO why add all them as arguments, when they are also variables on outer level? + args = append(args, "-ignore-orphaned-risk-tracking") + } + if generateDataFlowDiagram { + args = append(args, "-generate-data-flow-diagram") + } + if generateDataAssetDiagram { + args = append(args, "-generate-data-asset-diagram") + } + if generateReportPdf { + args = append(args, "-generate-report-pdf") + } + if generateRisksExcel { + args = append(args, "-generate-risks-excel") + } + if generateTagsExcel { + args = append(args, "-generate-tags-excel") + } + if generateRisksJSON { + args = append(args, "-generate-risks-json") + } + if generateTechnicalAssetsJSON { + args = append(args, "-generate-technical-assets-json") + } + if generateStatsJSON { + args = append(args, "-generate-stats-json") + } + self, nameError := os.Executable() + if nameError != nil { + panic(nameError) + } + cmd = exec.Command(self, args...) + out, err := cmd.CombinedOutput() + if err != nil { + panic(errors.New(string(out))) + } else { + if s.configuration.Verbose && len(out) > 0 { + fmt.Println("---") + fmt.Print(string(out)) + fmt.Println("---") + } + } +} diff --git a/pkg/server/hash.go b/pkg/server/hash.go new file mode 100644 index 00000000..89072380 --- /dev/null +++ b/pkg/server/hash.go @@ -0,0 +1,35 @@ +/* +Copyright © 2023 NAME HERE +*/ +package server + +import ( + "crypto/sha512" + "encoding/hex" + "errors" + "fmt" + "hash/fnv" +) + +func xor(key []byte, xor []byte) []byte { + if len(key) != len(xor) { + panic(errors.New("key length not matching XOR length")) + } + result := make([]byte, len(xor)) + for i, b := range key { + result[i] = b ^ xor[i] + } + return result +} + +func hashSHA256(key []byte) string { + hasher := sha512.New() + hasher.Write(key) + return hex.EncodeToString(hasher.Sum(nil)) +} + +func hash(s string) string { + h := fnv.New32a() + _, _ = h.Write([]byte(s)) + return fmt.Sprintf("%v", h.Sum32()) +} diff --git a/pkg/server/model.go b/pkg/server/model.go new file mode 100644 index 00000000..84066a6d --- /dev/null +++ b/pkg/server/model.go @@ -0,0 +1,1374 @@ +/* +Copyright © 2023 NAME HERE +*/ +package server + +import ( + "bytes" + "compress/gzip" + "crypto/aes" + "crypto/cipher" + "crypto/rand" + "fmt" + "io" + "log" + "net/http" + "os" + "path/filepath" + "sort" + "strconv" + "strings" + "sync" + "time" + + "gopkg.in/yaml.v3" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/threagile/threagile/pkg/docs" + "github.com/threagile/threagile/pkg/input" + "github.com/threagile/threagile/pkg/security/types" + "golang.org/x/crypto/argon2" +) + +// creates a sub-folder (named by a new UUID) inside the token folder +func (s *server) createNewModel(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + ok = s.checkObjectCreationThrottler(ginContext, "MODEL") + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + + aUuid := uuid.New().String() + err := os.Mkdir(folderNameForModel(folderNameOfKey, aUuid), 0700) + if err != nil { + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to create model", + }) + return + } + + aYaml := `title: New Threat Model +threagile_version: ` + docs.ThreagileVersion + ` +author: + name: "" + homepage: "" +date: +business_overview: + description: "" + images: [] +technical_overview: + description: "" + images: [] +business_criticality: "" +management_summary_comment: "" +questions: {} +abuse_cases: {} +security_requirements: {} +tags_available: [] +data_assets: {} +technical_assets: {} +trust_boundaries: {} +shared_runtimes: {} +individual_risk_categories: {} +risk_tracking: {} +diagram_tweak_nodesep: "" +diagram_tweak_ranksep: "" +diagram_tweak_edge_layout: "" +diagram_tweak_suppress_edge_labels: false +diagram_tweak_invisible_connections_between_assets: [] +diagram_tweak_same_rank_assets: []` + + ok = s.writeModelYAML(ginContext, aYaml, key, folderNameForModel(folderNameOfKey, aUuid), "New Model Creation", true) + if ok { + ginContext.JSON(http.StatusCreated, gin.H{ + "message": "model created", + "id": aUuid, + }) + } +} + +type payloadModels struct { + ID string `yaml:"id" json:"id"` + Title string `yaml:"title" json:"title"` + TimestampCreated time.Time `yaml:"timestamp_created" json:"timestamp_created"` + TimestampModified time.Time `yaml:"timestamp_modified" json:"timestamp_modified"` +} + +func (s *server) listModels(ginContext *gin.Context) { // TODO currently returns error when any model is no longer valid in syntax, so eventually have some fallback to not just bark on an invalid model... + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + + result := make([]payloadModels, 0) + modelFolders, err := os.ReadDir(folderNameOfKey) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "token not found", + }) + return + } + for _, dirEntry := range modelFolders { + if dirEntry.IsDir() { + modelStat, err := os.Stat(filepath.Join(folderNameOfKey, dirEntry.Name(), s.configuration.InputFile)) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "unable to list model", + }) + return + } + aModel, _, ok := s.readModel(ginContext, dirEntry.Name(), key, folderNameOfKey) + if !ok { + return + } + fileInfo, err := dirEntry.Info() + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "unable to get file info", + }) + return + } + result = append(result, payloadModels{ + ID: dirEntry.Name(), + Title: aModel.Title, + TimestampCreated: fileInfo.ModTime(), + TimestampModified: modelStat.ModTime(), + }) + } + } + ginContext.JSON(http.StatusOK, result) +} + +func (s *server) deleteModel(ginContext *gin.Context) { + folderNameOfKey, _, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + folder, ok := s.checkModelFolder(ginContext, ginContext.Param("model-id"), folderNameOfKey) + if ok { + if folder != filepath.Clean(folder) { + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "model-id is weird", + }) + return + } + err := os.RemoveAll(folder) + if err != nil { + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "model not found", + }) + return + } + ginContext.JSON(http.StatusOK, gin.H{ + "message": "model deleted", + }) + } +} + +type payloadCover struct { + Title string `yaml:"title" json:"title"` + Date time.Time `yaml:"date" json:"date"` + Author input.Author `yaml:"author" json:"author"` +} + +func (s *server) setCover(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + modelInput, _, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + payload := payloadCover{} + err := ginContext.BindJSON(&payload) + if err != nil { + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": "unable to parse request payload", + }) + return + } + modelInput.Title = payload.Title + if !payload.Date.IsZero() { + modelInput.Date = payload.Date.Format("2006-01-02") + } + modelInput.Author.Name = payload.Author.Name + modelInput.Author.Homepage = payload.Author.Homepage + ok = s.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Cover Update") + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "model updated", + }) + } + } +} + +func (s *server) getCover(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + aModel, _, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "title": aModel.Title, + "date": aModel.Date, + "author": aModel.Author, + }) + } +} + +type payloadOverview struct { + ManagementSummaryComment string `yaml:"management_summary_comment" json:"management_summary_comment"` + BusinessCriticality string `yaml:"business_criticality" json:"business_criticality"` + BusinessOverview input.Overview `yaml:"business_overview" json:"business_overview"` + TechnicalOverview input.Overview `yaml:"technical_overview" json:"technical_overview"` +} + +func (s *server) setOverview(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + modelInput, _, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + payload := payloadOverview{} + err := ginContext.BindJSON(&payload) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": "unable to parse request payload", + }) + return + } + criticality, err := types.ParseCriticality(payload.BusinessCriticality) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + modelInput.ManagementSummaryComment = payload.ManagementSummaryComment + modelInput.BusinessCriticality = criticality.String() + modelInput.BusinessOverview.Description = payload.BusinessOverview.Description + modelInput.BusinessOverview.Images = payload.BusinessOverview.Images + modelInput.TechnicalOverview.Description = payload.TechnicalOverview.Description + modelInput.TechnicalOverview.Images = payload.TechnicalOverview.Images + ok = s.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Overview Update") + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "model updated", + }) + } + } +} + +func (s *server) getOverview(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + aModel, _, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "management_summary_comment": aModel.ManagementSummaryComment, + "business_criticality": aModel.BusinessCriticality, + "business_overview": aModel.BusinessOverview, + "technical_overview": aModel.TechnicalOverview, + }) + } +} + +type payloadAbuseCases map[string]string + +func (s *server) setAbuseCases(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + modelInput, _, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + payload := payloadAbuseCases{} + err := ginContext.BindJSON(&payload) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": "unable to parse request payload", + }) + return + } + modelInput.AbuseCases = payload + ok = s.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Abuse Cases Update") + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "model updated", + }) + } + } +} + +func (s *server) getAbuseCases(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + aModel, _, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + ginContext.JSON(http.StatusOK, aModel.AbuseCases) + } +} + +type payloadSecurityRequirements map[string]string + +func (s *server) setSecurityRequirements(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + modelInput, _, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + payload := payloadSecurityRequirements{} + err := ginContext.BindJSON(&payload) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": "unable to parse request payload", + }) + return + } + modelInput.SecurityRequirements = payload + ok = s.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Security Requirements Update") + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "model updated", + }) + } + } +} + +func (s *server) getSecurityRequirements(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + aModel, _, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + ginContext.JSON(http.StatusOK, aModel.SecurityRequirements) + } +} + +type payloadDataAsset struct { + Title string `yaml:"title" json:"title"` + Id string `yaml:"id" json:"id"` + Description string `yaml:"description" json:"description"` + Usage string `yaml:"usage" json:"usage"` + Tags []string `yaml:"tags" json:"tags"` + Origin string `yaml:"origin" json:"origin"` + Owner string `yaml:"owner" json:"owner"` + Quantity string `yaml:"quantity" json:"quantity"` + Confidentiality string `yaml:"confidentiality" json:"confidentiality"` + Integrity string `yaml:"integrity" json:"integrity"` + Availability string `yaml:"availability" json:"availability"` + JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` +} + +func (s *server) getDataAssets(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + aModel, _, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + ginContext.JSON(http.StatusOK, aModel.DataAssets) + } +} + +func (s *server) getDataAsset(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + modelInput, _, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + // yes, here keyed by title in YAML for better readability in the YAML file itself + for title, dataAsset := range modelInput.DataAssets { + if dataAsset.ID == ginContext.Param("data-asset-id") { + ginContext.JSON(http.StatusOK, gin.H{ + title: dataAsset, + }) + return + } + } + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "data asset not found", + }) + } +} + +func (s *server) deleteDataAsset(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + modelInput, _, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + referencesDeleted := false + // yes, here keyed by title in YAML for better readability in the YAML file itself + for title, dataAsset := range modelInput.DataAssets { + if dataAsset.ID == ginContext.Param("data-asset-id") { + // also remove all usages of this data asset !! + for _, techAsset := range modelInput.TechnicalAssets { + if techAsset.DataAssetsProcessed != nil { + for i, parsedChangeCandidateAsset := range techAsset.DataAssetsProcessed { + referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) + if referencedAsset == dataAsset.ID { // apply the removal + referencesDeleted = true + // Remove the element at index i + // TODO needs more testing + copy(techAsset.DataAssetsProcessed[i:], techAsset.DataAssetsProcessed[i+1:]) // Shift a[i+1:] left one index. + techAsset.DataAssetsProcessed[len(techAsset.DataAssetsProcessed)-1] = "" // Erase last element (write zero value). + techAsset.DataAssetsProcessed = techAsset.DataAssetsProcessed[:len(techAsset.DataAssetsProcessed)-1] // Truncate slice. + } + } + } + if techAsset.DataAssetsStored != nil { + for i, parsedChangeCandidateAsset := range techAsset.DataAssetsStored { + referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) + if referencedAsset == dataAsset.ID { // apply the removal + referencesDeleted = true + // Remove the element at index i + // TODO needs more testing + copy(techAsset.DataAssetsStored[i:], techAsset.DataAssetsStored[i+1:]) // Shift a[i+1:] left one index. + techAsset.DataAssetsStored[len(techAsset.DataAssetsStored)-1] = "" // Erase last element (write zero value). + techAsset.DataAssetsStored = techAsset.DataAssetsStored[:len(techAsset.DataAssetsStored)-1] // Truncate slice. + } + } + } + if techAsset.CommunicationLinks != nil { + for title, commLink := range techAsset.CommunicationLinks { + for i, dataAssetSent := range commLink.DataAssetsSent { + referencedAsset := fmt.Sprintf("%v", dataAssetSent) + if referencedAsset == dataAsset.ID { // apply the removal + referencesDeleted = true + // Remove the element at index i + // TODO needs more testing + copy(techAsset.CommunicationLinks[title].DataAssetsSent[i:], techAsset.CommunicationLinks[title].DataAssetsSent[i+1:]) // Shift a[i+1:] left one index. + techAsset.CommunicationLinks[title].DataAssetsSent[len(techAsset.CommunicationLinks[title].DataAssetsSent)-1] = "" // Erase last element (write zero value). + x := techAsset.CommunicationLinks[title] + x.DataAssetsSent = techAsset.CommunicationLinks[title].DataAssetsSent[:len(techAsset.CommunicationLinks[title].DataAssetsSent)-1] // Truncate slice. + techAsset.CommunicationLinks[title] = x + } + } + for i, dataAssetReceived := range commLink.DataAssetsReceived { + referencedAsset := fmt.Sprintf("%v", dataAssetReceived) + if referencedAsset == dataAsset.ID { // apply the removal + referencesDeleted = true + // Remove the element at index i + // TODO needs more testing + copy(techAsset.CommunicationLinks[title].DataAssetsReceived[i:], techAsset.CommunicationLinks[title].DataAssetsReceived[i+1:]) // Shift a[i+1:] left one index. + techAsset.CommunicationLinks[title].DataAssetsReceived[len(techAsset.CommunicationLinks[title].DataAssetsReceived)-1] = "" // Erase last element (write zero value). + x := techAsset.CommunicationLinks[title] + x.DataAssetsReceived = techAsset.CommunicationLinks[title].DataAssetsReceived[:len(techAsset.CommunicationLinks[title].DataAssetsReceived)-1] // Truncate slice. + techAsset.CommunicationLinks[title] = x + } + } + } + } + } + for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { + if individualRiskCat.RisksIdentified != nil { + for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { + if individualRiskInstance.MostRelevantDataAsset == dataAsset.ID { // apply the removal + referencesDeleted = true + x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] + x.MostRelevantDataAsset = "" // TODO needs more testing + modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x + } + } + } + } + // remove it itself + delete(modelInput.DataAssets, title) + ok = s.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Data Asset Deletion") + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "data asset deleted", + "id": dataAsset.ID, + "references_deleted": referencesDeleted, // in order to signal to clients, that other model parts might've been deleted as well + }) + } + return + } + } + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "data asset not found", + }) + } +} + +func (s *server) setDataAsset(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + modelInput, _, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + // yes, here keyed by title in YAML for better readability in the YAML file itself + for title, dataAsset := range modelInput.DataAssets { + if dataAsset.ID == ginContext.Param("data-asset-id") { + payload := payloadDataAsset{} + err := ginContext.BindJSON(&payload) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": "unable to parse request payload", + }) + return + } + dataAssetInput, ok := s.populateDataAsset(ginContext, payload) + if !ok { + return + } + // in order to also update the title, remove the asset from the map and re-insert it (with new key) + delete(modelInput.DataAssets, title) + modelInput.DataAssets[payload.Title] = dataAssetInput + idChanged := dataAssetInput.ID != dataAsset.ID + if idChanged { // ID-CHANGE-PROPAGATION + // also update all usages to point to the new (changed) ID !! + for techAssetTitle, techAsset := range modelInput.TechnicalAssets { + if techAsset.DataAssetsProcessed != nil { + for i, parsedChangeCandidateAsset := range techAsset.DataAssetsProcessed { + referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) + if referencedAsset == dataAsset.ID { // apply the ID change + modelInput.TechnicalAssets[techAssetTitle].DataAssetsProcessed[i] = dataAssetInput.ID + } + } + } + if techAsset.DataAssetsStored != nil { + for i, parsedChangeCandidateAsset := range techAsset.DataAssetsStored { + referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) + if referencedAsset == dataAsset.ID { // apply the ID change + modelInput.TechnicalAssets[techAssetTitle].DataAssetsStored[i] = dataAssetInput.ID + } + } + } + if techAsset.CommunicationLinks != nil { + for title, commLink := range techAsset.CommunicationLinks { + for i, dataAssetSent := range commLink.DataAssetsSent { + referencedAsset := fmt.Sprintf("%v", dataAssetSent) + if referencedAsset == dataAsset.ID { // apply the ID change + modelInput.TechnicalAssets[techAssetTitle].CommunicationLinks[title].DataAssetsSent[i] = dataAssetInput.ID + } + } + for i, dataAssetReceived := range commLink.DataAssetsReceived { + referencedAsset := fmt.Sprintf("%v", dataAssetReceived) + if referencedAsset == dataAsset.ID { // apply the ID change + modelInput.TechnicalAssets[techAssetTitle].CommunicationLinks[title].DataAssetsReceived[i] = dataAssetInput.ID + } + } + } + } + } + for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { + if individualRiskCat.RisksIdentified != nil { + for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { + if individualRiskInstance.MostRelevantDataAsset == dataAsset.ID { // apply the ID change + x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] + x.MostRelevantDataAsset = dataAssetInput.ID // TODO needs more testing + modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x + } + } + } + } + } + ok = s.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Data Asset Update") + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "data asset updated", + "id": dataAssetInput.ID, + "id_changed": idChanged, // in order to signal to clients, that other model parts might've received updates as well and should be reloaded + }) + } + return + } + } + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "data asset not found", + }) + } +} + +func (s *server) createNewDataAsset(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + modelInput, _, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + payload := payloadDataAsset{} + err := ginContext.BindJSON(&payload) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": "unable to parse request payload", + }) + return + } + // yes, here keyed by title in YAML for better readability in the YAML file itself + if _, exists := modelInput.DataAssets[payload.Title]; exists { + ginContext.JSON(http.StatusConflict, gin.H{ + "error": "data asset with this title already exists", + }) + return + } + // but later it will in memory keyed by its "id", so do this uniqueness check also + for _, asset := range modelInput.DataAssets { + if asset.ID == payload.Id { + ginContext.JSON(http.StatusConflict, gin.H{ + "error": "data asset with this id already exists", + }) + return + } + } + dataAssetInput, ok := s.populateDataAsset(ginContext, payload) + if !ok { + return + } + if modelInput.DataAssets == nil { + modelInput.DataAssets = make(map[string]input.InputDataAsset) + } + modelInput.DataAssets[payload.Title] = dataAssetInput + ok = s.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Data Asset Creation") + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "data asset created", + "id": dataAssetInput.ID, + }) + } + } +} + +func (s *server) populateDataAsset(ginContext *gin.Context, payload payloadDataAsset) (dataAssetInput input.InputDataAsset, ok bool) { + usage, err := types.ParseUsage(payload.Usage) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return dataAssetInput, false + } + quantity, err := types.ParseQuantity(payload.Quantity) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return dataAssetInput, false + } + confidentiality, err := types.ParseConfidentiality(payload.Confidentiality) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return dataAssetInput, false + } + integrity, err := types.ParseCriticality(payload.Integrity) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return dataAssetInput, false + } + availability, err := types.ParseCriticality(payload.Availability) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return dataAssetInput, false + } + dataAssetInput = input.InputDataAsset{ + ID: payload.Id, + Description: payload.Description, + Usage: usage.String(), + Tags: lowerCaseAndTrim(payload.Tags), + Origin: payload.Origin, + Owner: payload.Owner, + Quantity: quantity.String(), + Confidentiality: confidentiality.String(), + Integrity: integrity.String(), + Availability: availability.String(), + JustificationCiaRating: payload.JustificationCiaRating, + } + return dataAssetInput, true +} + +func (s *server) getTrustBoundaries(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + aModel, _, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + ginContext.JSON(http.StatusOK, aModel.TrustBoundaries) + } +} + +type payloadSharedRuntime struct { + Title string `yaml:"title" json:"title"` + Id string `yaml:"id" json:"id"` + Description string `yaml:"description" json:"description"` + Tags []string `yaml:"tags" json:"tags"` + TechnicalAssetsRunning []string `yaml:"technical_assets_running" json:"technical_assets_running"` +} + +func (s *server) setSharedRuntime(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + modelInput, _, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + // yes, here keyed by title in YAML for better readability in the YAML file itself + for title, sharedRuntime := range modelInput.SharedRuntimes { + if sharedRuntime.ID == ginContext.Param("shared-runtime-id") { + payload := payloadSharedRuntime{} + err := ginContext.BindJSON(&payload) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": "unable to parse request payload", + }) + return + } + sharedRuntimeInput, ok := populateSharedRuntime(ginContext, payload) + if !ok { + return + } + // in order to also update the title, remove the shared runtime from the map and re-insert it (with new key) + delete(modelInput.SharedRuntimes, title) + modelInput.SharedRuntimes[payload.Title] = sharedRuntimeInput + idChanged := sharedRuntimeInput.ID != sharedRuntime.ID + if idChanged { // ID-CHANGE-PROPAGATION + for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { + if individualRiskCat.RisksIdentified != nil { + for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { + if individualRiskInstance.MostRelevantSharedRuntime == sharedRuntime.ID { // apply the ID change + x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] + x.MostRelevantSharedRuntime = sharedRuntimeInput.ID // TODO needs more testing + modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x + } + } + } + } + } + ok = s.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Shared Runtime Update") + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "shared runtime updated", + "id": sharedRuntimeInput.ID, + "id_changed": idChanged, // in order to signal to clients, that other model parts might've received updates as well and should be reloaded + }) + } + return + } + } + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "shared runtime not found", + }) + } +} + +func (s *server) getSharedRuntime(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + modelInput, _, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + // yes, here keyed by title in YAML for better readability in the YAML file itself + for title, sharedRuntime := range modelInput.SharedRuntimes { + if sharedRuntime.ID == ginContext.Param("shared-runtime-id") { + ginContext.JSON(http.StatusOK, gin.H{ + title: sharedRuntime, + }) + return + } + } + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "shared runtime not found", + }) + } +} + +func (s *server) createNewSharedRuntime(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + modelInput, _, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + payload := payloadSharedRuntime{} + err := ginContext.BindJSON(&payload) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": "unable to parse request payload", + }) + return + } + // yes, here keyed by title in YAML for better readability in the YAML file itself + if _, exists := modelInput.SharedRuntimes[payload.Title]; exists { + ginContext.JSON(http.StatusConflict, gin.H{ + "error": "shared runtime with this title already exists", + }) + return + } + // but later it will in memory keyed by its "id", so do this uniqueness check also + for _, sharedRuntime := range modelInput.SharedRuntimes { + if sharedRuntime.ID == payload.Id { + ginContext.JSON(http.StatusConflict, gin.H{ + "error": "shared runtime with this id already exists", + }) + return + } + } + if !checkTechnicalAssetsExisting(modelInput, payload.TechnicalAssetsRunning) { + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": "referenced technical asset does not exist", + }) + return + } + sharedRuntimeInput, ok := populateSharedRuntime(ginContext, payload) + if !ok { + return + } + if modelInput.SharedRuntimes == nil { + modelInput.SharedRuntimes = make(map[string]input.InputSharedRuntime) + } + modelInput.SharedRuntimes[payload.Title] = sharedRuntimeInput + ok = s.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Shared Runtime Creation") + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "shared runtime created", + "id": sharedRuntimeInput.ID, + }) + } + } +} + +func checkTechnicalAssetsExisting(modelInput input.ModelInput, techAssetIDs []string) (ok bool) { + for _, techAssetID := range techAssetIDs { + exists := false + for _, val := range modelInput.TechnicalAssets { + if val.ID == techAssetID { + exists = true + break + } + } + if !exists { + return false + } + } + return true +} + +func populateSharedRuntime(_ *gin.Context, payload payloadSharedRuntime) (sharedRuntimeInput input.InputSharedRuntime, ok bool) { + sharedRuntimeInput = input.InputSharedRuntime{ + ID: payload.Id, + Description: payload.Description, + Tags: lowerCaseAndTrim(payload.Tags), + TechnicalAssetsRunning: payload.TechnicalAssetsRunning, + } + return sharedRuntimeInput, true +} + +func (s *server) deleteSharedRuntime(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + modelInput, _, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + referencesDeleted := false + // yes, here keyed by title in YAML for better readability in the YAML file itself + for title, sharedRuntime := range modelInput.SharedRuntimes { + if sharedRuntime.ID == ginContext.Param("shared-runtime-id") { + // also remove all usages of this shared runtime !! + for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { + if individualRiskCat.RisksIdentified != nil { + for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { + if individualRiskInstance.MostRelevantSharedRuntime == sharedRuntime.ID { // apply the removal + referencesDeleted = true + x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] + x.MostRelevantSharedRuntime = "" // TODO needs more testing + modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x + } + } + } + } + // remove it itself + delete(modelInput.SharedRuntimes, title) + ok = s.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Shared Runtime Deletion") + if ok { + ginContext.JSON(http.StatusOK, gin.H{ + "message": "shared runtime deleted", + "id": sharedRuntime.ID, + "references_deleted": referencesDeleted, // in order to signal to clients, that other model parts might've been deleted as well + }) + } + return + } + } + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "shared runtime not found", + }) + } +} + +func (s *server) getSharedRuntimes(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + aModel, _, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + ginContext.JSON(http.StatusOK, aModel.SharedRuntimes) + } +} + +func (s *server) readModel(ginContext *gin.Context, modelUUID string, key []byte, folderNameOfKey string) (modelInputResult input.ModelInput, yamlText string, ok bool) { + modelFolder, ok := s.checkModelFolder(ginContext, modelUUID, folderNameOfKey) + if !ok { + return modelInputResult, yamlText, false + } + cryptoKey := generateKeyFromAlreadyStrongRandomInput(key) + block, err := aes.NewCipher(cryptoKey) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to open model", + }) + return modelInputResult, yamlText, false + } + aesGcm, err := cipher.NewGCM(block) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to open model", + }) + return modelInputResult, yamlText, false + } + + fileBytes, err := os.ReadFile(filepath.Join(modelFolder, s.configuration.InputFile)) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to open model", + }) + return modelInputResult, yamlText, false + } + + nonce := fileBytes[0:12] + ciphertext := fileBytes[12:] + plaintext, err := aesGcm.Open(nil, nonce, ciphertext, nil) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to open model", + }) + return modelInputResult, yamlText, false + } + + r, err := gzip.NewReader(bytes.NewReader(plaintext)) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to open model", + }) + return modelInputResult, yamlText, false + } + buf := new(bytes.Buffer) + _, _ = buf.ReadFrom(r) + modelInput := new(input.ModelInput).Defaults() + yamlBytes := buf.Bytes() + err = yaml.Unmarshal(yamlBytes, &modelInput) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to open model", + }) + return modelInputResult, yamlText, false + } + return *modelInput, string(yamlBytes), true +} + +func (s *server) writeModel(ginContext *gin.Context, key []byte, folderNameOfKey string, modelInput *input.ModelInput, changeReasonForHistory string) (ok bool) { + modelFolder, ok := s.checkModelFolder(ginContext, ginContext.Param("model-id"), folderNameOfKey) + if ok { + modelInput.ThreagileVersion = docs.ThreagileVersion + yamlBytes, err := yaml.Marshal(modelInput) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to write model", + }) + return false + } + /* + yamlBytes = model.ReformatYAML(yamlBytes) + */ + return s.writeModelYAML(ginContext, string(yamlBytes), key, modelFolder, changeReasonForHistory, false) + } + return false +} + +func (s *server) checkModelFolder(ginContext *gin.Context, modelUUID string, folderNameOfKey string) (modelFolder string, ok bool) { + uuidParsed, err := uuid.Parse(modelUUID) + if err != nil { + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "model not found", + }) + return modelFolder, false + } + modelFolder = folderNameForModel(folderNameOfKey, uuidParsed.String()) + if _, err := os.Stat(modelFolder); os.IsNotExist(err) { + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "model not found", + }) + return modelFolder, false + } + return modelFolder, true +} + +func (s *server) getModel(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + _, yamlText, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if ok { + tmpResultFile, err := os.CreateTemp(s.configuration.TempFolder, "threagile-*.yaml") + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + err = os.WriteFile(tmpResultFile.Name(), []byte(yamlText), 0400) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to stream model file", + }) + return + } + defer func() { _ = os.Remove(tmpResultFile.Name()) }() + ginContext.FileAttachment(tmpResultFile.Name(), s.configuration.InputFile) + } +} + +// fully replaces threagile.yaml in sub-folder given by UUID +func (s *server) importModel(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer s.unlockFolder(folderNameOfKey) + + aUuid := ginContext.Param("model-id") // UUID is syntactically validated in readModel+checkModelFolder (next line) via uuid.Parse(modelUUID) + _, _, ok = s.readModel(ginContext, aUuid, key, folderNameOfKey) + if ok { + // first analyze it simply by executing the full risk process (just discard the result) to ensure that everything would work + yamlContent, ok := s.execute(ginContext, true) + if ok { + // if we're here, then no problem was raised, so ok to proceed + ok = s.writeModelYAML(ginContext, string(yamlContent), key, folderNameForModel(folderNameOfKey, aUuid), "Model Import", false) + if ok { + ginContext.JSON(http.StatusCreated, gin.H{ + "message": "model imported", + }) + } + } + } +} + +func (s *server) analyzeModelOnServerDirectly(ginContext *gin.Context) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer func() { + s.unlockFolder(folderNameOfKey) + var err error + if r := recover(); r != nil { + err = r.(error) + if s.configuration.Verbose { + log.Println(err) + } + log.Println(err) + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": strings.TrimSpace(err.Error()), + }) + ok = false + } + }() + + dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(s.configuration.DefaultGraphvizDPI))) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + + _, yamlText, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if !ok { + return + } + tmpModelFile, err := os.CreateTemp(s.configuration.TempFolder, "threagile-direct-analyze-*") + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + defer func() { _ = os.Remove(tmpModelFile.Name()) }() + tmpOutputDir, err := os.MkdirTemp(s.configuration.TempFolder, "threagile-direct-analyze-") + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + defer func() { _ = os.RemoveAll(tmpOutputDir) }() + tmpResultFile, err := os.CreateTemp(s.configuration.TempFolder, "threagile-result-*.zip") + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + defer func() { _ = os.Remove(tmpResultFile.Name()) }() + + err = os.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) + + s.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, true, true, true, true, true, true, true, true, dpi) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + err = os.WriteFile(filepath.Join(tmpOutputDir, s.configuration.InputFile), []byte(yamlText), 0400) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + + files := []string{ + filepath.Join(tmpOutputDir, s.configuration.InputFile), + filepath.Join(tmpOutputDir, s.configuration.DataFlowDiagramFilenamePNG), + filepath.Join(tmpOutputDir, s.configuration.DataAssetDiagramFilenamePNG), + filepath.Join(tmpOutputDir, s.configuration.ReportFilename), + filepath.Join(tmpOutputDir, s.configuration.ExcelRisksFilename), + filepath.Join(tmpOutputDir, s.configuration.ExcelTagsFilename), + filepath.Join(tmpOutputDir, s.configuration.JsonRisksFilename), + filepath.Join(tmpOutputDir, s.configuration.JsonTechnicalAssetsFilename), + filepath.Join(tmpOutputDir, s.configuration.JsonStatsFilename), + } + if s.configuration.KeepDiagramSourceFiles { + files = append(files, filepath.Join(tmpOutputDir, s.configuration.DataFlowDiagramFilenameDOT)) + files = append(files, filepath.Join(tmpOutputDir, s.configuration.DataAssetDiagramFilenameDOT)) + } + err = zipFiles(tmpResultFile.Name(), files) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + if s.configuration.Verbose { + fmt.Println("Streaming back result file: " + tmpResultFile.Name()) + } + ginContext.FileAttachment(tmpResultFile.Name(), "threagile-result.zip") +} + +func (s *server) writeModelYAML(ginContext *gin.Context, yaml string, key []byte, modelFolder string, changeReasonForHistory string, skipBackup bool) (ok bool) { + if s.configuration.Verbose { + fmt.Println("about to write " + strconv.Itoa(len(yaml)) + " bytes of yaml into model folder: " + modelFolder) + } + var b bytes.Buffer + w := gzip.NewWriter(&b) + _, _ = w.Write([]byte(yaml)) + _ = w.Close() + plaintext := b.Bytes() + cryptoKey := generateKeyFromAlreadyStrongRandomInput(key) + block, err := aes.NewCipher(cryptoKey) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to write model", + }) + return false + } + // Never use more than 2^32 random nonces with a given key because of the risk of a repeat. + nonce := make([]byte, 12) + if _, err := io.ReadFull(rand.Reader, nonce); err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to write model", + }) + return false + } + aesGcm, err := cipher.NewGCM(block) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to write model", + }) + return false + } + ciphertext := aesGcm.Seal(nil, nonce, plaintext, nil) + if !skipBackup { + err = s.backupModelToHistory(modelFolder, changeReasonForHistory) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to write model", + }) + return false + } + } + f, err := os.Create(filepath.Join(modelFolder, s.configuration.InputFile)) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to write model", + }) + return false + } + _, _ = f.Write(nonce) + _, _ = f.Write(ciphertext) + _ = f.Close() + return true +} + +func (s *server) lockFolder(folderName string) { + s.globalLock.Lock() + defer s.globalLock.Unlock() + _, exists := s.locksByFolderName[folderName] + if !exists { + s.locksByFolderName[folderName] = &sync.Mutex{} + } + s.locksByFolderName[folderName].Lock() +} + +func (s *server) unlockFolder(folderName string) { + if _, exists := s.locksByFolderName[folderName]; exists { + s.locksByFolderName[folderName].Unlock() + delete(s.locksByFolderName, folderName) + } +} + +func (s *server) backupModelToHistory(modelFolder string, changeReasonForHistory string) (err error) { + historyFolder := filepath.Join(modelFolder, "history") + if _, err := os.Stat(historyFolder); os.IsNotExist(err) { + err = os.Mkdir(historyFolder, 0700) + if err != nil { + return err + } + } + inputModel, err := os.ReadFile(filepath.Join(modelFolder, s.configuration.InputFile)) + if err != nil { + return err + } + historyFile := filepath.Join(historyFolder, time.Now().Format("2006-01-02 15:04:05")+" "+changeReasonForHistory+".backup") + err = os.WriteFile(historyFile, inputModel, 0400) + if err != nil { + return err + } + // now delete any old files if over limit to keep + files, err := os.ReadDir(historyFolder) + if err != nil { + return err + } + if len(files) > s.configuration.BackupHistoryFilesToKeep { + requiredToDelete := len(files) - s.configuration.BackupHistoryFilesToKeep + sort.Slice(files, func(i, j int) bool { + return files[i].Name() < files[j].Name() + }) + for _, file := range files { + requiredToDelete-- + if file.Name() != filepath.Clean(file.Name()) { + return fmt.Errorf("weird file name %v", file.Name()) + } + err = os.Remove(filepath.Join(historyFolder, file.Name())) + if err != nil { + return err + } + if requiredToDelete <= 0 { + break + } + } + } + return +} + +func folderNameForModel(folderNameOfKey string, uuid string) string { + return filepath.Join(folderNameOfKey, uuid) +} + +type argon2Params struct { + memory uint32 + iterations uint32 + parallelism uint8 + saltLength uint32 + keyLength uint32 +} + +func generateKeyFromAlreadyStrongRandomInput(alreadyRandomInput []byte) []byte { + // Establish the parameters to use for Argon2. + p := &argon2Params{ + memory: 64 * 1024, + iterations: 3, + parallelism: 2, + saltLength: 16, + keyLength: keySize, + } + // As the input is already cryptographically secure random, the salt is simply the first n bytes + salt := alreadyRandomInput[0:p.saltLength] + hash := argon2.IDKey(alreadyRandomInput[p.saltLength:], salt, p.iterations, p.memory, p.parallelism, p.keyLength) + return hash +} + +func lowerCaseAndTrim(tags []string) []string { + for i := range tags { + tags[i] = strings.ToLower(strings.TrimSpace(tags[i])) + } + return tags +} diff --git a/pkg/server/report.go b/pkg/server/report.go new file mode 100644 index 00000000..b24b2bc0 --- /dev/null +++ b/pkg/server/report.go @@ -0,0 +1,177 @@ +/* +Copyright © 2023 NAME HERE +*/ +package server + +import ( + "log" + "net/http" + "os" + "path/filepath" + "strconv" + "strings" + + "github.com/gin-gonic/gin" +) + +type responseType int + +const ( + dataFlowDiagram responseType = iota + dataAssetDiagram + reportPDF + risksExcel + tagsExcel + risksJSON + technicalAssetsJSON + statsJSON +) + +func (s *server) streamDataFlowDiagram(ginContext *gin.Context) { + s.streamResponse(ginContext, dataFlowDiagram) +} + +func (s *server) streamDataAssetDiagram(ginContext *gin.Context) { + s.streamResponse(ginContext, dataAssetDiagram) +} + +func (s *server) streamReportPDF(ginContext *gin.Context) { + s.streamResponse(ginContext, reportPDF) +} + +func (s *server) streamRisksExcel(ginContext *gin.Context) { + s.streamResponse(ginContext, risksExcel) +} + +func (s *server) streamTagsExcel(ginContext *gin.Context) { + s.streamResponse(ginContext, tagsExcel) +} + +func (s *server) streamRisksJSON(ginContext *gin.Context) { + s.streamResponse(ginContext, risksJSON) +} + +func (s *server) streamTechnicalAssetsJSON(ginContext *gin.Context) { + s.streamResponse(ginContext, technicalAssetsJSON) +} + +func (s *server) streamStatsJSON(ginContext *gin.Context) { + s.streamResponse(ginContext, statsJSON) +} + +func (s *server) streamResponse(ginContext *gin.Context, responseType responseType) { + folderNameOfKey, key, ok := s.checkTokenToFolderName(ginContext) + if !ok { + return + } + s.lockFolder(folderNameOfKey) + defer func() { + s.unlockFolder(folderNameOfKey) + var err error + if r := recover(); r != nil { + err = r.(error) + if s.configuration.Verbose { + log.Println(err) + } + log.Println(err) + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": strings.TrimSpace(err.Error()), + }) + ok = false + } + }() + dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(s.configuration.DefaultGraphvizDPI))) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + _, yamlText, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) + if !ok { + return + } + tmpModelFile, err := os.CreateTemp(s.configuration.TempFolder, "threagile-render-*") + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + defer func() { _ = os.Remove(tmpModelFile.Name()) }() + tmpOutputDir, err := os.MkdirTemp(s.configuration.TempFolder, "threagile-render-") + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + defer func() { _ = os.RemoveAll(tmpOutputDir) }() + err = os.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) + if responseType == dataFlowDiagram { + s.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, true, false, false, false, false, false, false, false, dpi) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + ginContext.File(filepath.Join(tmpOutputDir, s.configuration.DataFlowDiagramFilenamePNG)) + } else if responseType == dataAssetDiagram { + s.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, true, false, false, false, false, false, false, dpi) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + ginContext.File(filepath.Join(tmpOutputDir, s.configuration.DataAssetDiagramFilenamePNG)) + } else if responseType == reportPDF { + s.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, true, false, false, false, false, false, dpi) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + ginContext.FileAttachment(filepath.Join(tmpOutputDir, s.configuration.ReportFilename), s.configuration.ReportFilename) + } else if responseType == risksExcel { + s.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, true, false, false, false, false, dpi) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + ginContext.FileAttachment(filepath.Join(tmpOutputDir, s.configuration.ExcelRisksFilename), s.configuration.ExcelRisksFilename) + } else if responseType == tagsExcel { + s.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, true, false, false, false, dpi) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + ginContext.FileAttachment(filepath.Join(tmpOutputDir, s.configuration.ExcelTagsFilename), s.configuration.ExcelTagsFilename) + } else if responseType == risksJSON { + s.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, true, false, false, dpi) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, s.configuration.JsonRisksFilename)) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download + } else if responseType == technicalAssetsJSON { + s.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, true, true, false, dpi) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, s.configuration.JsonTechnicalAssetsFilename)) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download + } else if responseType == statsJSON { + s.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, false, false, true, dpi) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, s.configuration.JsonStatsFilename)) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download + } +} diff --git a/pkg/server/server.go b/pkg/server/server.go new file mode 100644 index 00000000..1c92efe4 --- /dev/null +++ b/pkg/server/server.go @@ -0,0 +1,322 @@ +/* +Copyright © 2023 NAME HERE +*/ +package server + +import ( + "fmt" + "log" + "net/http" + "os" + "path/filepath" + "sort" + "strconv" + "strings" + "sync" + + "github.com/gin-gonic/gin" + "github.com/threagile/threagile/pkg/docs" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/risks" + "github.com/threagile/threagile/pkg/security/types" +) + +type server struct { + configuration ServerConfiguration + successCount int + errorCount int + globalLock sync.Mutex + throttlerLock sync.Mutex + createdObjectsThrottler map[string][]int64 + mapTokenHashToTimeoutStruct map[string]timeoutStruct + mapFolderNameToTokenHash map[string]string + extremeShortTimeoutsForTesting bool + locksByFolderName map[string]*sync.Mutex +} + +type ServerConfiguration struct { + ServerFolder string + AppDir string + BuildTimestamp string + KeyDir string + InputFile string + ExecuteModelMacro string + ServerPort int + Verbose bool + IgnoreOrphanedRiskTracking bool + KeepDiagramSourceFiles bool + CustomRiskRules map[string]*model.CustomRisk + DefaultGraphvizDPI int + TempFolder string + DataFlowDiagramFilenamePNG string + DataAssetDiagramFilenamePNG string + DataFlowDiagramFilenameDOT string + DataAssetDiagramFilenameDOT string + ReportFilename string + ExcelRisksFilename string + ExcelTagsFilename string + JsonRisksFilename string + JsonTechnicalAssetsFilename string + JsonStatsFilename string + CustomRiskRulesPlugins string + RaaPlugin string + SkipRiskRules string + BackupHistoryFilesToKeep int +} + +func RunServer(serverConfiguration ServerConfiguration) { + server := &server{ + configuration: serverConfiguration, + createdObjectsThrottler: make(map[string][]int64), + mapTokenHashToTimeoutStruct: make(map[string]timeoutStruct), + mapFolderNameToTokenHash: make(map[string]string), + extremeShortTimeoutsForTesting: false, + locksByFolderName: make(map[string]*sync.Mutex), + } + router := gin.Default() + router.LoadHTMLGlob(filepath.Join(server.configuration.ServerFolder, "server/static/*.html")) // <== + router.GET("/", func(c *gin.Context) { + c.HTML(http.StatusOK, "index.html", gin.H{}) + }) + router.HEAD("/", func(c *gin.Context) { + c.HTML(http.StatusOK, "index.html", gin.H{}) + }) + router.StaticFile("/threagile.png", filepath.Join(server.configuration.ServerFolder, "server/static/threagile.png")) // <== + router.StaticFile("/site.webmanifest", filepath.Join(server.configuration.ServerFolder, "server/static/site.webmanifest")) + router.StaticFile("/favicon.ico", filepath.Join(server.configuration.ServerFolder, "server/static/favicon.ico")) + router.StaticFile("/favicon-32x32.png", filepath.Join(server.configuration.ServerFolder, "server/static/favicon-32x32.png")) + router.StaticFile("/favicon-16x16.png", filepath.Join(server.configuration.ServerFolder, "server/static/favicon-16x16.png")) + router.StaticFile("/apple-touch-icon.png", filepath.Join(server.configuration.ServerFolder, "server/static/apple-touch-icon.png")) + router.StaticFile("/android-chrome-512x512.png", filepath.Join(server.configuration.ServerFolder, "server/static/android-chrome-512x512.png")) + router.StaticFile("/android-chrome-192x192.png", filepath.Join(server.configuration.ServerFolder, "server/static/android-chrome-192x192.png")) + + router.StaticFile("/schema.json", filepath.Join(server.configuration.AppDir, "schema.json")) + router.StaticFile("/live-templates.txt", filepath.Join(server.configuration.AppDir, "live-templates.txt")) + router.StaticFile("/openapi.yaml", filepath.Join(server.configuration.AppDir, "openapi.yaml")) + router.StaticFile("/swagger-ui/", filepath.Join(server.configuration.ServerFolder, "server/static/swagger-ui/index.html")) + router.StaticFile("/swagger-ui/index.html", filepath.Join(server.configuration.ServerFolder, "server/static/swagger-ui/index.html")) + router.StaticFile("/swagger-ui/oauth2-redirect.html", filepath.Join(server.configuration.ServerFolder, "server/static/swagger-ui/oauth2-redirect.html")) + router.StaticFile("/swagger-ui/swagger-ui.css", filepath.Join(server.configuration.ServerFolder, "server/static/swagger-ui/swagger-ui.css")) + router.StaticFile("/swagger-ui/swagger-ui.js", filepath.Join(server.configuration.ServerFolder, "server/static/swagger-ui/swagger-ui.js")) + router.StaticFile("/swagger-ui/swagger-ui-bundle.js", filepath.Join(server.configuration.ServerFolder, "server/static/swagger-ui/swagger-ui-bundle.js")) + router.StaticFile("/swagger-ui/swagger-ui-standalone-preset.js", filepath.Join(server.configuration.ServerFolder, "server/static/swagger-ui/swagger-ui-standalone-preset.js")) // <== + + router.GET("/threagile-example-model.yaml", server.exampleFile) + router.GET("/threagile-stub-model.yaml", server.stubFile) + + router.GET("/meta/ping", func(c *gin.Context) { + c.JSON(200, gin.H{ + "message": "pong", + }) + }) + router.GET("/meta/version", func(c *gin.Context) { + c.JSON(200, gin.H{ + "version": docs.ThreagileVersion, + "build_timestamp": server.configuration.BuildTimestamp, + }) + }) + router.GET("/meta/types", func(c *gin.Context) { + c.JSON(200, gin.H{ + "quantity": arrayOfStringValues(types.QuantityValues()), + "confidentiality": arrayOfStringValues(types.ConfidentialityValues()), + "criticality": arrayOfStringValues(types.CriticalityValues()), + "technical_asset_type": arrayOfStringValues(types.TechnicalAssetTypeValues()), + "technical_asset_size": arrayOfStringValues(types.TechnicalAssetSizeValues()), + "authorization": arrayOfStringValues(types.AuthorizationValues()), + "authentication": arrayOfStringValues(types.AuthenticationValues()), + "usage": arrayOfStringValues(types.UsageValues()), + "encryption": arrayOfStringValues(types.EncryptionStyleValues()), + "data_format": arrayOfStringValues(types.DataFormatValues()), + "protocol": arrayOfStringValues(types.ProtocolValues()), + "technical_asset_technology": arrayOfStringValues(types.TechnicalAssetTechnologyValues()), + "technical_asset_machine": arrayOfStringValues(types.TechnicalAssetMachineValues()), + "trust_boundary_type": arrayOfStringValues(types.TrustBoundaryTypeValues()), + "data_breach_probability": arrayOfStringValues(types.DataBreachProbabilityValues()), + "risk_severity": arrayOfStringValues(types.RiskSeverityValues()), + "risk_exploitation_likelihood": arrayOfStringValues(types.RiskExploitationLikelihoodValues()), + "risk_exploitation_impact": arrayOfStringValues(types.RiskExploitationImpactValues()), + "risk_function": arrayOfStringValues(types.RiskFunctionValues()), + "risk_status": arrayOfStringValues(types.RiskStatusValues()), + "stride": arrayOfStringValues(types.STRIDEValues()), + }) + }) + + // TODO router.GET("/meta/risk-rules", listRiskRules) + // TODO router.GET("/meta/model-macros", listModelMacros) + + router.GET("/meta/stats", server.stats) + + router.POST("/direct/analyze", server.analyze) + router.POST("/direct/check", server.check) + router.GET("/direct/stub", server.stubFile) + + router.POST("/auth/keys", server.createKey) + router.DELETE("/auth/keys", server.deleteKey) + router.POST("/auth/tokens", server.createToken) + router.DELETE("/auth/tokens", server.deleteToken) + + router.POST("/models", server.createNewModel) + router.GET("/models", server.listModels) + router.DELETE("/models/:model-id", server.deleteModel) + router.GET("/models/:model-id", server.getModel) + router.PUT("/models/:model-id", server.importModel) + router.GET("/models/:model-id/data-flow-diagram", server.streamDataFlowDiagram) + router.GET("/models/:model-id/data-asset-diagram", server.streamDataAssetDiagram) + router.GET("/models/:model-id/report-pdf", server.streamReportPDF) + router.GET("/models/:model-id/risks-excel", server.streamRisksExcel) + router.GET("/models/:model-id/tags-excel", server.streamTagsExcel) + router.GET("/models/:model-id/risks", server.streamRisksJSON) + router.GET("/models/:model-id/technical-assets", server.streamTechnicalAssetsJSON) + router.GET("/models/:model-id/stats", server.streamStatsJSON) + router.GET("/models/:model-id/analysis", server.analyzeModelOnServerDirectly) + + router.GET("/models/:model-id/cover", server.getCover) + router.PUT("/models/:model-id/cover", server.setCover) + router.GET("/models/:model-id/overview", server.getOverview) + router.PUT("/models/:model-id/overview", server.setOverview) + //router.GET("/models/:model-id/questions", getQuestions) + //router.PUT("/models/:model-id/questions", setQuestions) + router.GET("/models/:model-id/abuse-cases", server.getAbuseCases) + router.PUT("/models/:model-id/abuse-cases", server.setAbuseCases) + router.GET("/models/:model-id/security-requirements", server.getSecurityRequirements) + router.PUT("/models/:model-id/security-requirements", server.setSecurityRequirements) + //router.GET("/models/:model-id/tags", getTags) + //router.PUT("/models/:model-id/tags", setTags) + + router.GET("/models/:model-id/data-assets", server.getDataAssets) + router.POST("/models/:model-id/data-assets", server.createNewDataAsset) + router.GET("/models/:model-id/data-assets/:data-asset-id", server.getDataAsset) + router.PUT("/models/:model-id/data-assets/:data-asset-id", server.setDataAsset) + router.DELETE("/models/:model-id/data-assets/:data-asset-id", server.deleteDataAsset) + + router.GET("/models/:model-id/trust-boundaries", server.getTrustBoundaries) + // router.POST("/models/:model-id/trust-boundaries", createNewTrustBoundary) + // router.GET("/models/:model-id/trust-boundaries/:trust-boundary-id", getTrustBoundary) + // router.PUT("/models/:model-id/trust-boundaries/:trust-boundary-id", setTrustBoundary) + // router.DELETE("/models/:model-id/trust-boundaries/:trust-boundary-id", deleteTrustBoundary) + + router.GET("/models/:model-id/shared-runtimes", server.getSharedRuntimes) + router.POST("/models/:model-id/shared-runtimes", server.createNewSharedRuntime) + router.GET("/models/:model-id/shared-runtimes/:shared-runtime-id", server.getSharedRuntime) + router.PUT("/models/:model-id/shared-runtimes/:shared-runtime-id", server.setSharedRuntime) + router.DELETE("/models/:model-id/shared-runtimes/:shared-runtime-id", server.deleteSharedRuntime) + + fmt.Println("Threagile server running...") + _ = router.Run(":" + strconv.Itoa(server.configuration.ServerPort)) // listen and serve on 0.0.0.0:8080 or whatever port was specified +} + +func (s *server) exampleFile(ginContext *gin.Context) { + example, err := os.ReadFile(filepath.Join(s.configuration.AppDir, "threagile-example-model.yaml")) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + ginContext.Data(http.StatusOK, gin.MIMEYAML, example) +} + +func (s *server) stubFile(ginContext *gin.Context) { + stub, err := os.ReadFile(filepath.Join(s.configuration.AppDir, "threagile-stub-model.yaml")) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return + } + ginContext.Data(http.StatusOK, gin.MIMEYAML, s.addSupportedTags(stub)) // TODO use also the MIMEYAML way of serving YAML in model export? +} + +func (s *server) addSupportedTags(input []byte) []byte { + // add distinct tags as "tags_available" + supportedTags := make(map[string]bool) + for _, customRule := range s.configuration.CustomRiskRules { + for _, tag := range customRule.Tags { + supportedTags[strings.ToLower(tag)] = true + } + } + + for _, rule := range risks.GetBuiltInRiskRules() { + for _, tag := range rule.SupportedTags() { + supportedTags[strings.ToLower(tag)] = true + } + } + + tags := make([]string, 0, len(supportedTags)) + for t := range supportedTags { + tags = append(tags, t) + } + if len(tags) == 0 { + return input + } + sort.Strings(tags) + if s.configuration.Verbose { + fmt.Print("Supported tags of all risk rules: ") + for i, tag := range tags { + if i > 0 { + fmt.Print(", ") + } + fmt.Print(tag) + } + fmt.Println() + } + replacement := "tags_available:" + for _, tag := range tags { + replacement += "\n - " + tag + } + return []byte(strings.Replace(string(input), "tags_available:", replacement, 1)) +} + +func arrayOfStringValues(values []types.TypeEnum) []string { + result := make([]string, 0) + for _, value := range values { + result = append(result, value.String()) + } + return result +} + +func (s *server) stats(ginContext *gin.Context) { + keyCount, modelCount := 0, 0 + keyFolders, err := os.ReadDir(filepath.Join(s.configuration.ServerFolder, s.configuration.KeyDir)) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to collect stats", + }) + return + } + for _, keyFolder := range keyFolders { + if len(keyFolder.Name()) == 128 { // it's a sha512 token hash probably, so count it as token folder for the stats + keyCount++ + if keyFolder.Name() != filepath.Clean(keyFolder.Name()) { + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "weird file path", + }) + return + } + modelFolders, err := os.ReadDir(filepath.Join(s.configuration.ServerFolder, s.configuration.KeyDir, keyFolder.Name())) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to collect stats", + }) + return + } + for _, modelFolder := range modelFolders { + if len(modelFolder.Name()) == 36 { // it's a uuid model folder probably, so count it as model folder for the stats + modelCount++ + } + } + } + } + // TODO collect and deliver more stats (old model count?) and health info + ginContext.JSON(http.StatusOK, gin.H{ + "key_count": keyCount, + "model_count": modelCount, + "success_count": s.successCount, + "error_count": s.errorCount, + }) +} + +func handleErrorInServiceCall(err error, ginContext *gin.Context) { + log.Println(err) + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": strings.TrimSpace(err.Error()), + }) +} diff --git a/pkg/server/token.go b/pkg/server/token.go new file mode 100644 index 00000000..d77fed28 --- /dev/null +++ b/pkg/server/token.go @@ -0,0 +1,297 @@ +/* +Copyright © 2023 NAME HERE +*/ +package server + +import ( + "crypto/rand" + "encoding/base64" + "fmt" + "log" + "net/http" + "os" + "path/filepath" + "strings" + "time" + + "github.com/gin-gonic/gin" +) + +const keySize = 32 + +type keyHeader struct { + Key string `header:"key"` +} + +type timeoutStruct struct { + xorRand []byte + createdNanoTime, lastAccessedNanoTime int64 +} + +func (s *server) createKey(ginContext *gin.Context) { + ok := s.checkObjectCreationThrottler(ginContext, "KEY") + if !ok { + return + } + s.globalLock.Lock() + defer s.globalLock.Unlock() + + keyBytesArr := make([]byte, keySize) + n, err := rand.Read(keyBytesArr[:]) + if n != keySize || err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to create key", + }) + return + } + err = os.MkdirAll(s.folderNameFromKey(keyBytesArr), 0700) + if err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to create key", + }) + return + } + ginContext.JSON(http.StatusCreated, gin.H{ + "key": base64.RawURLEncoding.EncodeToString(keyBytesArr[:]), + }) +} + +func (s *server) checkObjectCreationThrottler(ginContext *gin.Context, typeName string) bool { + s.throttlerLock.Lock() + defer s.throttlerLock.Unlock() + + // remove all elements older than 3 minutes (= 180000000000 ns) + now := time.Now().UnixNano() + cutoff := now - 180000000000 + for keyCheck := range s.createdObjectsThrottler { + for i := 0; i < len(s.createdObjectsThrottler[keyCheck]); i++ { + if s.createdObjectsThrottler[keyCheck][i] < cutoff { + // Remove the element at index i from slice (safe while looping using i as iterator) + s.createdObjectsThrottler[keyCheck] = append(s.createdObjectsThrottler[keyCheck][:i], s.createdObjectsThrottler[keyCheck][i+1:]...) + i-- // Since we just deleted a[i], we must redo that index + } + } + length := len(s.createdObjectsThrottler[keyCheck]) + if length == 0 { + delete(s.createdObjectsThrottler, keyCheck) + } + /* + if *verbose { + log.Println("Throttling count: "+strconv.Itoa(length)) + } + */ + } + + // check current request + keyHash := hash(typeName) // getting the real client ip is not easy inside fully encapsulated containerized runtime + if _, ok := s.createdObjectsThrottler[keyHash]; !ok { + s.createdObjectsThrottler[keyHash] = make([]int64, 0) + } + // check the limit of 20 creations for this type per 3 minutes + withinLimit := len(s.createdObjectsThrottler[keyHash]) < 20 + if withinLimit { + s.createdObjectsThrottler[keyHash] = append(s.createdObjectsThrottler[keyHash], now) + return true + } + ginContext.JSON(http.StatusTooManyRequests, gin.H{ + "error": "object creation throttling exceeded (denial-of-service protection): please wait some time and try again", + }) + return false +} + +func (s *server) deleteKey(ginContext *gin.Context) { + folderName, _, ok := s.checkKeyToFolderName(ginContext) + if !ok { + return + } + s.globalLock.Lock() + defer s.globalLock.Unlock() + err := os.RemoveAll(folderName) + if err != nil { + log.Println("error during key delete: " + err.Error()) + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "key not found", + }) + return + } + ginContext.JSON(http.StatusOK, gin.H{ + "message": "key deleted", + }) +} + +func (s *server) createToken(ginContext *gin.Context) { + folderName, key, ok := s.checkKeyToFolderName(ginContext) + if !ok { + return + } + s.globalLock.Lock() + defer s.globalLock.Unlock() + if tokenHash, exists := s.mapFolderNameToTokenHash[folderName]; exists { + // invalidate previous token + delete(s.mapTokenHashToTimeoutStruct, tokenHash) + } + // create a strong random 256 bit value (used to xor) + xorBytesArr := make([]byte, keySize) + n, err := rand.Read(xorBytesArr[:]) + if n != keySize || err != nil { + log.Println(err) + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to create token", + }) + return + } + now := time.Now().UnixNano() + token := xor(key, xorBytesArr) + tokenHash := hashSHA256(token) + s.housekeepingTokenMaps() + s.mapTokenHashToTimeoutStruct[tokenHash] = timeoutStruct{ + xorRand: xorBytesArr, + createdNanoTime: now, + lastAccessedNanoTime: now, + } + s.mapFolderNameToTokenHash[folderName] = tokenHash + ginContext.JSON(http.StatusCreated, gin.H{ + "token": base64.RawURLEncoding.EncodeToString(token[:]), + }) +} + +type tokenHeader struct { + Token string `header:"token"` +} + +func (s *server) deleteToken(ginContext *gin.Context) { + header := tokenHeader{} + if err := ginContext.ShouldBindHeader(&header); err != nil { + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "token not found", + }) + return + } + token, err := base64.RawURLEncoding.DecodeString(strings.TrimSpace(header.Token)) + if len(token) == 0 || err != nil { + if err != nil { + log.Println(err) + } + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "token not found", + }) + return + } + s.globalLock.Lock() + defer s.globalLock.Unlock() + s.deleteTokenHashFromMaps(hashSHA256(token)) + ginContext.JSON(http.StatusOK, gin.H{ + "message": "token deleted", + }) +} + +func (s *server) checkKeyToFolderName(ginContext *gin.Context) (folderNameOfKey string, key []byte, ok bool) { + header := keyHeader{} + if err := ginContext.ShouldBindHeader(&header); err != nil { + log.Println(err) + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "key not found", + }) + return folderNameOfKey, key, false + } + key, err := base64.RawURLEncoding.DecodeString(strings.TrimSpace(header.Key)) + if len(key) == 0 || err != nil { + if err != nil { + log.Println(err) + } + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "key not found", + }) + return folderNameOfKey, key, false + } + folderNameOfKey = s.folderNameFromKey(key) + if _, err := os.Stat(folderNameOfKey); os.IsNotExist(err) { + log.Println(err) + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "key not found", + }) + return folderNameOfKey, key, false + } + return folderNameOfKey, key, true +} + +func (s *server) checkTokenToFolderName(ginContext *gin.Context) (folderNameOfKey string, key []byte, ok bool) { + header := tokenHeader{} + if err := ginContext.ShouldBindHeader(&header); err != nil { + log.Println(err) + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "token not found", + }) + return folderNameOfKey, key, false + } + token, err := base64.RawURLEncoding.DecodeString(strings.TrimSpace(header.Token)) + if len(token) == 0 || err != nil { + if err != nil { + log.Println(err) + } + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "token not found", + }) + return folderNameOfKey, key, false + } + s.globalLock.Lock() + defer s.globalLock.Unlock() + s.housekeepingTokenMaps() // to remove timed-out ones + tokenHash := hashSHA256(token) + if timeoutStruct, exists := s.mapTokenHashToTimeoutStruct[tokenHash]; exists { + // re-create the key from token + key := xor(token, timeoutStruct.xorRand) + folderNameOfKey := s.folderNameFromKey(key) + if _, err := os.Stat(folderNameOfKey); os.IsNotExist(err) { + log.Println(err) + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "token not found", + }) + return folderNameOfKey, key, false + } + timeoutStruct.lastAccessedNanoTime = time.Now().UnixNano() + return folderNameOfKey, key, true + } else { + ginContext.JSON(http.StatusNotFound, gin.H{ + "error": "token not found", + }) + return folderNameOfKey, key, false + } +} + +func (s *server) folderNameFromKey(key []byte) string { + sha512Hash := hashSHA256(key) + return filepath.Join(s.configuration.ServerFolder, s.configuration.KeyDir, sha512Hash) +} + +func (s *server) housekeepingTokenMaps() { + now := time.Now().UnixNano() + for tokenHash, val := range s.mapTokenHashToTimeoutStruct { + if s.extremeShortTimeoutsForTesting { + // remove all elements older than 1 minute (= 60000000000 ns) soft + // and all elements older than 3 minutes (= 180000000000 ns) hard + if now-val.lastAccessedNanoTime > 60000000000 || now-val.createdNanoTime > 180000000000 { + fmt.Println("About to remove a token hash from maps") + s.deleteTokenHashFromMaps(tokenHash) + } + } else { + // remove all elements older than 30 minutes (= 1800000000000 ns) soft + // and all elements older than 10 hours (= 36000000000000 ns) hard + if now-val.lastAccessedNanoTime > 1800000000000 || now-val.createdNanoTime > 36000000000000 { + s.deleteTokenHashFromMaps(tokenHash) + } + } + } +} + +func (s *server) deleteTokenHashFromMaps(tokenHash string) { + delete(s.mapTokenHashToTimeoutStruct, tokenHash) + for folderName, check := range s.mapFolderNameToTokenHash { + if check == tokenHash { + delete(s.mapFolderNameToTokenHash, folderName) + break + } + } +} diff --git a/pkg/server/zip.go b/pkg/server/zip.go new file mode 100644 index 00000000..93278b2d --- /dev/null +++ b/pkg/server/zip.go @@ -0,0 +1,119 @@ +/* +Copyright © 2023 NAME HERE +*/ +package server + +import ( + "archive/zip" + "fmt" + "io" + "os" + "path/filepath" + "strings" +) + +// ZipFiles compresses one or many files into a single zip archive file. +// Param 1: filename is the output zip file's name. +// Param 2: files is a list of files to add to the zip. +func zipFiles(filename string, files []string) error { + newZipFile, err := os.Create(filename) + if err != nil { + return err + } + defer func() { _ = newZipFile.Close() }() + + zipWriter := zip.NewWriter(newZipFile) + defer func() { _ = zipWriter.Close() }() + + // Add files to zip + for _, file := range files { + if err = addFileToZip(zipWriter, file); err != nil { + return err + } + } + return nil +} + +// Unzip will decompress a zip archive, moving all files and folders +// within the zip file (parameter 1) to an output directory (parameter 2). +func unzip(src string, dest string) ([]string, error) { + var filenames []string + + r, err := zip.OpenReader(src) + if err != nil { + return filenames, err + } + defer func() { _ = r.Close() }() + + for _, f := range r.File { + // Store filename/path for returning and using later on + path := filepath.Join(dest, f.Name) + // Check for ZipSlip. More Info: http://bit.ly/2MsjAWE + if !strings.HasPrefix(path, filepath.Clean(dest)+string(os.PathSeparator)) { + return filenames, fmt.Errorf("%s: illegal file path", path) + } + filenames = append(filenames, path) + if f.FileInfo().IsDir() { + // Make Folder + _ = os.MkdirAll(path, os.ModePerm) + continue + } + // Make File + if err = os.MkdirAll(filepath.Dir(path), os.ModePerm); err != nil { + return filenames, err + } + if path != filepath.Clean(path) { + return filenames, fmt.Errorf("weird file path %v", path) + } + outFile, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode()) + if err != nil { + return filenames, err + } + rc, err := f.Open() + if err != nil { + return filenames, err + } + _, err = io.Copy(outFile, rc) + // Close the file without defer to close before next iteration of loop + _ = outFile.Close() + _ = rc.Close() + if err != nil { + return filenames, err + } + } + return filenames, nil +} + +func addFileToZip(zipWriter *zip.Writer, filename string) error { + fileToZip, err := os.Open(filename) + if err != nil { + return err + } + defer func() { _ = fileToZip.Close() }() + + // Get the file information + info, err := fileToZip.Stat() + if err != nil { + return err + } + + header, err := zip.FileInfoHeader(info) + if err != nil { + return err + } + + // Using FileInfoHeader() above only uses the basename of the file. If we want + // to preserve the folder structure we can overwrite this with the full path. + //header.Name = filename + + // Change to deflate to gain better compression + // see http://golang.org/pkg/archive/zip/#pkg-constants + header.Method = zip.Deflate + + writer, err := zipWriter.CreateHeader(header) + if err != nil { + return err + } + _, err = io.Copy(writer, fileToZip) + return err +} From 50023fcf9775bf002fea820bb6018c5276becc12 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Wed, 27 Dec 2023 01:07:11 -0800 Subject: [PATCH 25/68] resolved circular dependencies --- all.json | 1 - cmd/raa/main.go | 29 +- cmd/raa_dummy/main.go | 7 +- cmd/risk_demo/main.go | 25 +- cmd/threagile/main_test.go | 4 +- go.mod | 11 + go.sum | 30 + internal/threagile/context.go | 67 +-- internal/threagile/rules.go | 8 +- pkg/common/consts.go | 2 +- pkg/common/plugin-input.go | 4 +- .../add-build-pipeline-macro.go | 23 +- .../built-in/add-vault/add-vault-macro.go | 11 +- .../remove-unused-tags-macro.go | 4 +- .../seed-risk-tracking-macro.go | 3 +- .../built-in/seed-tags/seed-tags-macro.go | 4 +- pkg/model/{model.go => parse.go} | 332 ++---------- pkg/model/trust_boundary.go | 127 ----- pkg/report/excel.go | 35 +- pkg/report/json.go | 13 +- pkg/report/report.go | 513 +++++++++--------- pkg/run/runner.go | 34 +- .../accidental-secret-leak-rule.go | 25 +- .../code-backdooring/code-backdooring-rule.go | 23 +- .../container-baseimage-backdooring-rule.go | 23 +- .../container-platform-escape-rule.go | 23 +- .../cross-site-request-forgery-rule.go | 23 +- .../cross-site-scripting-rule.go | 23 +- ...risky-access-across-trust-boundary-rule.go | 27 +- .../incomplete-model/incomplete-model-rule.go | 33 +- .../ldap-injection/ldap-injection-rule.go | 23 +- ...ssing-authentication-second-factor-rule.go | 13 +- .../missing-authentication-rule.go | 25 +- .../missing-build-infrastructure-rule.go | 25 +- .../missing-cloud-hardening-rule.go | 61 +-- .../missing-file-validation-rule.go | 23 +- .../missing-hardening-rule.go | 23 +- .../missing-identity-propagation-rule.go | 23 +- ...issing-identity-provider-isolation-rule.go | 23 +- .../missing-identity-store-rule.go | 25 +- .../missing-network-segmentation-rule.go | 23 +- .../missing-vault-isolation-rule.go | 25 +- .../missing-vault/missing-vault-rule.go | 25 +- .../built-in/missing-waf/missing-waf-rule.go | 23 +- .../mixed-targets-on-shared-runtime-rule.go | 25 +- .../path-traversal/path-traversal-rule.go | 23 +- .../push-instead-of-pull-deployment-rule.go | 23 +- .../search-query-injection-rule.go | 23 +- .../server-side-request-forgery-rule.go | 23 +- .../service-registry-poisoning-rule.go | 23 +- .../sql-nosql-injection-rule.go | 23 +- .../unchecked-deployment-rule.go | 23 +- .../unencrypted-asset-rule.go | 25 +- .../unencrypted-communication-rule.go | 27 +- .../unguarded-access-from-internet-rule.go | 27 +- .../unguarded-direct-datastore-access-rule.go | 27 +- .../unnecessary-communication-link-rule.go | 23 +- .../unnecessary-data-asset-rule.go | 23 +- .../unnecessary-data-transfer-rule.go | 31 +- .../unnecessary-technical-asset-rule.go | 23 +- .../untrusted-deserialization-rule.go | 23 +- .../wrong-communication-link-content-rule.go | 23 +- .../wrong-trust-boundary-content.go | 23 +- .../xml-external-entity-rule.go | 23 +- pkg/security/risks/{rules.go => risks.go} | 46 +- pkg/security/types/authentication.go | 1 + pkg/security/types/authentication_test.go | 1 + pkg/security/types/authorization.go | 1 + pkg/security/types/authorization_test.go | 1 + .../types}/communication_link.go | 65 ++- pkg/security/types/confidentiality.go | 1 + pkg/security/types/confidentiality_test.go | 1 + pkg/security/types/criticality.go | 1 + pkg/security/types/criticality_test.go | 1 + pkg/security/types/custom-risk.go | 27 + pkg/{model => security/types}/data_asset.go | 37 +- pkg/security/types/data_breach_probability.go | 1 + .../types/data_breach_probability_test.go | 1 + pkg/security/types/data_format.go | 1 + pkg/security/types/data_format_test.go | 1 + pkg/security/types/encryption_style.go | 1 + pkg/security/types/encryption_style_test.go | 1 + pkg/{model => security/types}/helpers.go | 3 +- pkg/security/types/model.go | 327 +++++++++++ pkg/security/types/protocol.go | 1 + pkg/security/types/protocol_test.go | 1 + pkg/security/types/quantity.go | 1 + pkg/security/types/quantity_test.go | 1 + pkg/security/types/risk-category.go | 21 + pkg/security/types/risk-rule.go | 7 + pkg/security/types/risk-tracking.go | 14 + pkg/security/types/risk.go | 41 ++ .../types/risk_exploitation_impact.go | 1 + .../types/risk_exploitation_impact_test.go | 1 + .../types/risk_exploitation_likelihood.go | 1 + .../risk_exploitation_likelihood_test.go | 1 + pkg/security/types/risk_function.go | 1 + pkg/security/types/risk_function_test.go | 1 + pkg/security/types/risk_severity.go | 1 + pkg/security/types/risk_severity_test.go | 1 + pkg/security/types/risk_status.go | 1 + pkg/security/types/risk_status_test.go | 1 + pkg/{model => security/types}/risks.go | 430 ++++++--------- pkg/security/types/rules.go | 47 ++ .../types}/shared_runtime.go | 25 +- pkg/security/types/stride.go | 1 + pkg/security/types/stride_test.go | 1 + .../types}/technical_asset.go | 98 ++-- pkg/security/types/technical_asset_machine.go | 1 + .../types/technical_asset_machine_test.go | 1 + pkg/security/types/technical_asset_size.go | 1 + .../types/technical_asset_size_test.go | 1 + .../types/technical_asset_technology.go | 1 + .../types/technical_asset_technology_test.go | 1 + pkg/security/types/technical_asset_type.go | 1 + .../types/technical_asset_type_test.go | 1 + pkg/security/types/trust_boundary.go | 152 ++++-- pkg/security/types/trust_boundary_type.go | 88 +++ ...ry_test.go => trust_boundary_type_test.go} | 0 pkg/security/types/types.go | 1 + pkg/security/types/usage_test.go | 1 + 121 files changed, 1992 insertions(+), 1807 deletions(-) delete mode 100644 all.json rename pkg/model/{model.go => parse.go} (69%) delete mode 100644 pkg/model/trust_boundary.go rename pkg/security/risks/{rules.go => risks.go} (82%) rename pkg/{model => security/types}/communication_link.go (80%) create mode 100644 pkg/security/types/custom-risk.go rename pkg/{model => security/types}/data_asset.go (86%) rename pkg/{model => security/types}/helpers.go (98%) create mode 100644 pkg/security/types/model.go create mode 100644 pkg/security/types/risk-category.go create mode 100644 pkg/security/types/risk-rule.go create mode 100644 pkg/security/types/risk-tracking.go create mode 100644 pkg/security/types/risk.go rename pkg/{model => security/types}/risks.go (58%) create mode 100644 pkg/security/types/rules.go rename pkg/{model => security/types}/shared_runtime.go (80%) rename pkg/{model => security/types}/technical_asset.go (84%) create mode 100644 pkg/security/types/trust_boundary_type.go rename pkg/security/types/{trust_boundary_test.go => trust_boundary_type_test.go} (100%) diff --git a/all.json b/all.json deleted file mode 100644 index 3b2844b2..00000000 --- a/all.json +++ /dev/null @@ -1 +0,0 @@ -[{"category":"something-strange","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eExample Individual Risk\u003c/b\u003e at \u003cb\u003eDatabase\u003c/b\u003e","synthetic_id":"something-strange@sql-database","most_relevant_data_asset":"","most_relevant_technical_asset":"sql-database","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["sql-database"]},{"category":"something-strange","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eExample Individual Risk\u003c/b\u003e at \u003cb\u003eContract Filesystem\u003c/b\u003e","synthetic_id":"something-strange@contract-fileserver","most_relevant_data_asset":"","most_relevant_technical_asset":"contract-fileserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":null},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eApplication Deployment\u003c/b\u003e at technical asset \u003cb\u003eJenkins Buildserver\u003c/b\u003e","synthetic_id":"incomplete-model@jenkins-buildserver\u003eapplication-deployment@jenkins-buildserver","most_relevant_data_asset":"","most_relevant_technical_asset":"jenkins-buildserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"jenkins-buildserver\u003eapplication-deployment","data_breach_probability":"improbable","data_breach_technical_assets":["jenkins-buildserver"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eAuth Credential Check Traffic\u003c/b\u003e at technical asset \u003cb\u003eApache Webserver\u003c/b\u003e","synthetic_id":"incomplete-model@apache-webserver\u003eauth-credential-check-traffic@apache-webserver","most_relevant_data_asset":"","most_relevant_technical_asset":"apache-webserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"apache-webserver\u003eauth-credential-check-traffic","data_breach_probability":"improbable","data_breach_technical_assets":["apache-webserver"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eAuth Traffic\u003c/b\u003e at technical asset \u003cb\u003eMarketing CMS\u003c/b\u003e","synthetic_id":"incomplete-model@marketing-cms\u003eauth-traffic@marketing-cms","most_relevant_data_asset":"","most_relevant_technical_asset":"marketing-cms","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"marketing-cms\u003eauth-traffic","data_breach_probability":"improbable","data_breach_technical_assets":["marketing-cms"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eCMS Content Traffic\u003c/b\u003e at technical asset \u003cb\u003eLoad Balancer\u003c/b\u003e","synthetic_id":"incomplete-model@load-balancer\u003ecms-content-traffic@load-balancer","most_relevant_data_asset":"","most_relevant_technical_asset":"load-balancer","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"load-balancer\u003ecms-content-traffic","data_breach_probability":"improbable","data_breach_technical_assets":["load-balancer"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eCMS Updates\u003c/b\u003e at technical asset \u003cb\u003eJenkins Buildserver\u003c/b\u003e","synthetic_id":"incomplete-model@jenkins-buildserver\u003ecms-updates@jenkins-buildserver","most_relevant_data_asset":"","most_relevant_technical_asset":"jenkins-buildserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"jenkins-buildserver\u003ecms-updates","data_breach_probability":"improbable","data_breach_technical_assets":["jenkins-buildserver"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eDatabase Traffic\u003c/b\u003e at technical asset \u003cb\u003eBackoffice ERP System\u003c/b\u003e","synthetic_id":"incomplete-model@erp-system\u003edatabase-traffic@erp-system","most_relevant_data_asset":"","most_relevant_technical_asset":"erp-system","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"erp-system\u003edatabase-traffic","data_breach_probability":"improbable","data_breach_technical_assets":["erp-system"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eERP System Traffic\u003c/b\u003e at technical asset \u003cb\u003eApache Webserver\u003c/b\u003e","synthetic_id":"incomplete-model@apache-webserver\u003eerp-system-traffic@apache-webserver","most_relevant_data_asset":"","most_relevant_technical_asset":"apache-webserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"apache-webserver\u003eerp-system-traffic","data_breach_probability":"improbable","data_breach_technical_assets":["apache-webserver"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eGit Repo Code Read Access\u003c/b\u003e at technical asset \u003cb\u003eJenkins Buildserver\u003c/b\u003e","synthetic_id":"incomplete-model@jenkins-buildserver\u003egit-repo-code-read-access@jenkins-buildserver","most_relevant_data_asset":"","most_relevant_technical_asset":"jenkins-buildserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"jenkins-buildserver\u003egit-repo-code-read-access","data_breach_probability":"improbable","data_breach_technical_assets":["jenkins-buildserver"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eLDAP Credential Check Traffic\u003c/b\u003e at technical asset \u003cb\u003eIdentity Provider\u003c/b\u003e","synthetic_id":"incomplete-model@identity-provider\u003eldap-credential-check-traffic@identity-provider","most_relevant_data_asset":"","most_relevant_technical_asset":"identity-provider","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"identity-provider\u003eldap-credential-check-traffic","data_breach_probability":"improbable","data_breach_technical_assets":["identity-provider"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eNFS Filesystem Access\u003c/b\u003e at technical asset \u003cb\u003eBackoffice ERP System\u003c/b\u003e","synthetic_id":"incomplete-model@erp-system\u003enfs-filesystem-access@erp-system","most_relevant_data_asset":"","most_relevant_technical_asset":"erp-system","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"erp-system\u003enfs-filesystem-access","data_breach_probability":"improbable","data_breach_technical_assets":["erp-system"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Protocol\u003c/b\u003e specified for communication link \u003cb\u003eWeb Application Traffic\u003c/b\u003e at technical asset \u003cb\u003eLoad Balancer\u003c/b\u003e","synthetic_id":"incomplete-model@load-balancer\u003eweb-application-traffic@load-balancer","most_relevant_data_asset":"","most_relevant_technical_asset":"load-balancer","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"load-balancer\u003eweb-application-traffic","data_breach_probability":"improbable","data_breach_technical_assets":["load-balancer"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Technology\u003c/b\u003e specified at technical asset \u003cb\u003eApache Webserver\u003c/b\u003e","synthetic_id":"incomplete-model@apache-webserver","most_relevant_data_asset":"","most_relevant_technical_asset":"apache-webserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["apache-webserver"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Technology\u003c/b\u003e specified at technical asset \u003cb\u003eBackoffice ERP System\u003c/b\u003e","synthetic_id":"incomplete-model@erp-system","most_relevant_data_asset":"","most_relevant_technical_asset":"erp-system","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["erp-system"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Technology\u003c/b\u003e specified at technical asset \u003cb\u003eContract Fileserver\u003c/b\u003e","synthetic_id":"incomplete-model@contract-fileserver","most_relevant_data_asset":"","most_relevant_technical_asset":"contract-fileserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["contract-fileserver"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Technology\u003c/b\u003e specified at technical asset \u003cb\u003eCustomer Contract Database\u003c/b\u003e","synthetic_id":"incomplete-model@sql-database","most_relevant_data_asset":"","most_relevant_technical_asset":"sql-database","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["sql-database"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Technology\u003c/b\u003e specified at technical asset \u003cb\u003eGit Repository\u003c/b\u003e","synthetic_id":"incomplete-model@git-repo","most_relevant_data_asset":"","most_relevant_technical_asset":"git-repo","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["git-repo"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Technology\u003c/b\u003e specified at technical asset \u003cb\u003eIdentity Provider\u003c/b\u003e","synthetic_id":"incomplete-model@identity-provider","most_relevant_data_asset":"","most_relevant_technical_asset":"identity-provider","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["identity-provider"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Technology\u003c/b\u003e specified at technical asset \u003cb\u003eJenkins Buildserver\u003c/b\u003e","synthetic_id":"incomplete-model@jenkins-buildserver","most_relevant_data_asset":"","most_relevant_technical_asset":"jenkins-buildserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["jenkins-buildserver"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Technology\u003c/b\u003e specified at technical asset \u003cb\u003eLDAP Auth Server\u003c/b\u003e","synthetic_id":"incomplete-model@ldap-auth-server","most_relevant_data_asset":"","most_relevant_technical_asset":"ldap-auth-server","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["ldap-auth-server"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Technology\u003c/b\u003e specified at technical asset \u003cb\u003eLoad Balancer\u003c/b\u003e","synthetic_id":"incomplete-model@load-balancer","most_relevant_data_asset":"","most_relevant_technical_asset":"load-balancer","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["load-balancer"]},{"category":"incomplete-model","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eUnknown Technology\u003c/b\u003e specified at technical asset \u003cb\u003eMarketing CMS\u003c/b\u003e","synthetic_id":"incomplete-model@marketing-cms","most_relevant_data_asset":"","most_relevant_technical_asset":"marketing-cms","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["marketing-cms"]},{"category":"missing-authentication","risk_status":"unchecked","severity":"elevated","exploitation_likelihood":"likely","exploitation_impact":"medium","title":"\u003cb\u003eMissing Authentication\u003c/b\u003e covering communication link \u003cb\u003eGit Repo Code Read Access\u003c/b\u003e from \u003cb\u003eJenkins Buildserver\u003c/b\u003e to \u003cb\u003eGit Repository\u003c/b\u003e","synthetic_id":"missing-authentication@jenkins-buildserver\u003egit-repo-code-read-access@jenkins-buildserver@git-repo","most_relevant_data_asset":"","most_relevant_technical_asset":"git-repo","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"jenkins-buildserver\u003egit-repo-code-read-access","data_breach_probability":"possible","data_breach_technical_assets":["git-repo"]},{"category":"missing-authentication","risk_status":"unchecked","severity":"elevated","exploitation_likelihood":"likely","exploitation_impact":"medium","title":"\u003cb\u003eMissing Authentication\u003c/b\u003e covering communication link \u003cb\u003eGit-Repo Code Write Access\u003c/b\u003e from \u003cb\u003eExternal Development Client\u003c/b\u003e to \u003cb\u003eGit Repository\u003c/b\u003e","synthetic_id":"missing-authentication@external-dev-client\u003egit-repo-code-write-access@external-dev-client@git-repo","most_relevant_data_asset":"","most_relevant_technical_asset":"git-repo","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"external-dev-client\u003egit-repo-code-write-access","data_breach_probability":"possible","data_breach_technical_assets":["git-repo"]},{"category":"missing-authentication","risk_status":"unchecked","severity":"elevated","exploitation_likelihood":"likely","exploitation_impact":"medium","title":"\u003cb\u003eMissing Authentication\u003c/b\u003e covering communication link \u003cb\u003eGit-Repo Web-UI Access\u003c/b\u003e from \u003cb\u003eExternal Development Client\u003c/b\u003e to \u003cb\u003eGit Repository\u003c/b\u003e","synthetic_id":"missing-authentication@external-dev-client\u003egit-repo-web-ui-access@external-dev-client@git-repo","most_relevant_data_asset":"","most_relevant_technical_asset":"git-repo","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"external-dev-client\u003egit-repo-web-ui-access","data_breach_probability":"possible","data_breach_technical_assets":["git-repo"]},{"category":"missing-authentication","risk_status":"unchecked","severity":"elevated","exploitation_likelihood":"likely","exploitation_impact":"medium","title":"\u003cb\u003eMissing Authentication\u003c/b\u003e covering communication link \u003cb\u003eJenkins Web-UI Access\u003c/b\u003e from \u003cb\u003eExternal Development Client\u003c/b\u003e to \u003cb\u003eJenkins Buildserver\u003c/b\u003e","synthetic_id":"missing-authentication@external-dev-client\u003ejenkins-web-ui-access@external-dev-client@jenkins-buildserver","most_relevant_data_asset":"","most_relevant_technical_asset":"jenkins-buildserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"external-dev-client\u003ejenkins-web-ui-access","data_breach_probability":"possible","data_breach_technical_assets":["jenkins-buildserver"]},{"category":"missing-build-infrastructure","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eMissing Build Infrastructure\u003c/b\u003e in the threat model (referencing asset \u003cb\u003eMarketing CMS\u003c/b\u003e as an example)","synthetic_id":"missing-build-infrastructure@marketing-cms","most_relevant_data_asset":"","most_relevant_technical_asset":"marketing-cms","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":[]},{"category":"missing-cloud-hardening","risk_status":"unchecked","severity":"medium","exploitation_likelihood":"unlikely","exploitation_impact":"medium","title":"\u003cb\u003eMissing Cloud Hardening (AWS)\u003c/b\u003e risk at \u003cb\u003eApplication Network\u003c/b\u003e: \u003cu\u003eCIS Benchmark for AWS\u003c/u\u003e","synthetic_id":"missing-cloud-hardening@application-network","most_relevant_data_asset":"","most_relevant_technical_asset":"","most_relevant_trust_boundary":"application-network","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"probable","data_breach_technical_assets":["load-balancer","apache-webserver","marketing-cms","erp-system","contract-fileserver","sql-database","identity-provider","ldap-auth-server"]},{"category":"missing-cloud-hardening","risk_status":"unchecked","severity":"medium","exploitation_likelihood":"unlikely","exploitation_impact":"medium","title":"\u003cb\u003eMissing Cloud Hardening (EC2)\u003c/b\u003e risk at \u003cb\u003eApache Webserver\u003c/b\u003e: \u003cu\u003eCIS Benchmark for Amazon Linux\u003c/u\u003e","synthetic_id":"missing-cloud-hardening@apache-webserver","most_relevant_data_asset":"","most_relevant_technical_asset":"apache-webserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"probable","data_breach_technical_assets":["apache-webserver"]},{"category":"missing-cloud-hardening","risk_status":"unchecked","severity":"medium","exploitation_likelihood":"unlikely","exploitation_impact":"medium","title":"\u003cb\u003eMissing Cloud Hardening (S3)\u003c/b\u003e risk at \u003cb\u003eContract Fileserver\u003c/b\u003e: \u003cu\u003eSecurity Best Practices for AWS S3\u003c/u\u003e","synthetic_id":"missing-cloud-hardening@contract-fileserver","most_relevant_data_asset":"","most_relevant_technical_asset":"contract-fileserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"probable","data_breach_technical_assets":["contract-fileserver"]},{"category":"missing-hardening","risk_status":"unchecked","severity":"medium","exploitation_likelihood":"likely","exploitation_impact":"low","title":"\u003cb\u003eMissing Hardening\u003c/b\u003e risk at \u003cb\u003eApache Webserver\u003c/b\u003e","synthetic_id":"missing-hardening@apache-webserver","most_relevant_data_asset":"","most_relevant_technical_asset":"apache-webserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["apache-webserver"]},{"category":"missing-hardening","risk_status":"unchecked","severity":"medium","exploitation_likelihood":"likely","exploitation_impact":"low","title":"\u003cb\u003eMissing Hardening\u003c/b\u003e risk at \u003cb\u003eBackoffice ERP System\u003c/b\u003e","synthetic_id":"missing-hardening@erp-system","most_relevant_data_asset":"","most_relevant_technical_asset":"erp-system","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["erp-system"]},{"category":"missing-hardening","risk_status":"unchecked","severity":"medium","exploitation_likelihood":"likely","exploitation_impact":"low","title":"\u003cb\u003eMissing Hardening\u003c/b\u003e risk at \u003cb\u003eJenkins Buildserver\u003c/b\u003e","synthetic_id":"missing-hardening@jenkins-buildserver","most_relevant_data_asset":"","most_relevant_technical_asset":"jenkins-buildserver","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["jenkins-buildserver"]},{"category":"missing-hardening","risk_status":"unchecked","severity":"medium","exploitation_likelihood":"likely","exploitation_impact":"low","title":"\u003cb\u003eMissing Hardening\u003c/b\u003e risk at \u003cb\u003eLoad Balancer\u003c/b\u003e","synthetic_id":"missing-hardening@load-balancer","most_relevant_data_asset":"","most_relevant_technical_asset":"load-balancer","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["load-balancer"]},{"category":"missing-vault","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eMissing Vault (Secret Storage)\u003c/b\u003e in the threat model (referencing asset \u003cb\u003e\u003c/b\u003e as an example)","synthetic_id":"missing-vault@","most_relevant_data_asset":"","most_relevant_technical_asset":"","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":[]},{"category":"mixed-targets-on-shared-runtime","risk_status":"unchecked","severity":"low","exploitation_likelihood":"unlikely","exploitation_impact":"low","title":"\u003cb\u003eMixed Targets on Shared Runtime\u003c/b\u003e named \u003cb\u003eWebApp and Backoffice Virtualization\u003c/b\u003e might enable attackers moving from one less valuable target to a more valuable one","synthetic_id":"mixed-targets-on-shared-runtime@webapp-virtualization","most_relevant_data_asset":"","most_relevant_technical_asset":"","most_relevant_trust_boundary":"","most_relevant_shared_runtime":"webapp-virtualization","most_relevant_communication_link":"","data_breach_probability":"improbable","data_breach_technical_assets":["apache-webserver","marketing-cms","erp-system","contract-fileserver","sql-database"]}] \ No newline at end of file diff --git a/cmd/raa/main.go b/cmd/raa/main.go index 5d5ba4c6..90b7d5f4 100644 --- a/cmd/raa/main.go +++ b/cmd/raa/main.go @@ -1,49 +1,40 @@ package main import ( - "bufio" "encoding/json" "fmt" - "io" "os" "sort" - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) // used from run caller: func main() { - reader := bufio.NewReader(os.Stdin) - inData, outError := io.ReadAll(reader) - if outError != nil { - _, _ = fmt.Fprintf(os.Stderr, "failed to read model data from stdin\n") - os.Exit(-2) - } - - var input model.ParsedModel - inError := json.Unmarshal(inData, &input) + var input types.ParsedModel + decoder := json.NewDecoder(os.Stdin) + inError := decoder.Decode(&input) if inError != nil { _, _ = fmt.Fprintf(os.Stderr, "failed to parse model: %v\n", inError) - _, _ = fmt.Fprint(os.Stderr, string(inData)) _, _ = fmt.Fprintf(os.Stderr, "\n") os.Exit(-2) } text := CalculateRAA(&input) - outData, marshalError := json.Marshal(input) + outData, marshalError := json.MarshalIndent(input, "", " ") if marshalError != nil { _, _ = fmt.Fprintf(os.Stderr, "failed to print model: %v\n", marshalError) os.Exit(-2) } _, _ = fmt.Fprint(os.Stdout, string(outData)) - _, _ = fmt.Fprint(os.Stderr, text) + _ = text + // _, _ = fmt.Fprint(os.Stderr, text) os.Exit(0) } -func CalculateRAA(input *model.ParsedModel) string { +func CalculateRAA(input *types.ParsedModel) string { for techAssetID, techAsset := range input.TechnicalAssets { aa := calculateAttackerAttractiveness(input, techAsset) aa += calculatePivotingNeighbourEffectAdjustment(input, techAsset) @@ -63,7 +54,7 @@ func CalculateRAA(input *model.ParsedModel) string { var attackerAttractivenessMinimum, attackerAttractivenessMaximum, spread float64 = 0, 0, 0 // set the concrete value in relation to the minimum and maximum of all -func calculateRelativeAttackerAttractiveness(input *model.ParsedModel, attractiveness float64) float64 { +func calculateRelativeAttackerAttractiveness(input *types.ParsedModel, attractiveness float64) float64 { if attackerAttractivenessMinimum == 0 || attackerAttractivenessMaximum == 0 { attackerAttractivenessMinimum, attackerAttractivenessMaximum = 9223372036854775807, -9223372036854775808 // determine (only one time required) the min/max of all @@ -98,7 +89,7 @@ func calculateRelativeAttackerAttractiveness(input *model.ParsedModel, attractiv } // increase the RAA (relative attacker attractiveness) by one third (1/3) of the delta to the highest outgoing neighbour (if positive delta) -func calculatePivotingNeighbourEffectAdjustment(input *model.ParsedModel, techAsset model.TechnicalAsset) float64 { +func calculatePivotingNeighbourEffectAdjustment(input *types.ParsedModel, techAsset types.TechnicalAsset) float64 { if techAsset.OutOfScope { return 0 } @@ -121,7 +112,7 @@ func calculatePivotingNeighbourEffectAdjustment(input *model.ParsedModel, techAs // The sum of all CIAs of the asset itself (fibonacci scale) plus the sum of the comm-links' transferred CIAs // Multiplied by the quantity values of the data asset for C and I (not A) -func calculateAttackerAttractiveness(input *model.ParsedModel, techAsset model.TechnicalAsset) float64 { +func calculateAttackerAttractiveness(input *types.ParsedModel, techAsset types.TechnicalAsset) float64 { if techAsset.OutOfScope { return 0 } diff --git a/cmd/raa_dummy/main.go b/cmd/raa_dummy/main.go index f4aa20b1..ff50c92e 100644 --- a/cmd/raa_dummy/main.go +++ b/cmd/raa_dummy/main.go @@ -4,11 +4,10 @@ import ( "bufio" "encoding/json" "fmt" + "github.com/threagile/threagile/pkg/security/types" "io" "math/rand" "os" - - "github.com/threagile/threagile/pkg/model" ) // JUST A DUMMY TO HAVE AN ALTERNATIVE PLUGIN TO USE/TEST @@ -22,7 +21,7 @@ func main() { os.Exit(-2) } - var input model.ParsedModel + var input types.ParsedModel inError := json.Unmarshal(inData, &input) if inError != nil { _, _ = fmt.Fprintf(os.Stderr, "failed to parse model: %v\n", inError) @@ -43,7 +42,7 @@ func main() { // used from run caller: -func CalculateRAA(input *model.ParsedModel) string { +func CalculateRAA(input *types.ParsedModel) string { for techAssetID, techAsset := range input.TechnicalAssets { techAsset.RAA = float64(rand.Intn(100)) fmt.Println("Using dummy RAA random calculation (just to test the usage of other shared object files as plugins)") diff --git a/cmd/risk_demo/main.go b/cmd/risk_demo/main.go index 15cc23a9..2cbc89c5 100644 --- a/cmd/risk_demo/main.go +++ b/cmd/risk_demo/main.go @@ -8,13 +8,12 @@ import ( "io" "os" - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) type customRiskRule string -// exported as symbol (here simply as variable to interface to bundle many functions under one symbol) named "CustomRiskRule" +// exported as symbol (here simply as variable to interface to bundle many functions under one symbol) named "RiskRule" var CustomRiskRule customRiskRule @@ -26,7 +25,7 @@ func main() { if *getInfo { rule := new(customRiskRule) category := rule.Category() - riskData, marshalError := json.Marshal(model.CustomRisk{ + riskData, marshalError := json.Marshal(types.CustomRisk{ ID: category.Id, Category: category, Tags: rule.SupportedTags(), @@ -49,7 +48,7 @@ func main() { os.Exit(-2) } - var input model.ParsedModel + var input types.ParsedModel inError := json.Unmarshal(inData, &input) if inError != nil { _, _ = fmt.Fprintf(os.Stderr, "failed to parse model: %v\n", inError) @@ -71,8 +70,8 @@ func main() { os.Exit(-2) } -func (r customRiskRule) Category() model.RiskCategory { - return model.RiskCategory{ +func (r customRiskRule) Category() types.RiskCategory { + return types.RiskCategory{ Id: "demo", Title: "Just a Demo", Description: "Demo Description", @@ -96,18 +95,18 @@ func (r customRiskRule) SupportedTags() []string { return []string{"demo tag"} } -func (r customRiskRule) GenerateRisks(parsedModel *model.ParsedModel) []model.Risk { - generatedRisks := make([]model.Risk, 0) +func (r customRiskRule) GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { + generatedRisks := make([]types.Risk, 0) for _, techAsset := range parsedModel.TechnicalAssets { generatedRisks = append(generatedRisks, createRisk(techAsset)) } return generatedRisks } -func createRisk(technicalAsset model.TechnicalAsset) model.Risk { - risk := model.Risk{ - Category: CustomRiskRule.Category(), - Severity: model.CalculateSeverity(types.VeryLikely, types.MediumImpact), +func createRisk(technicalAsset types.TechnicalAsset) types.Risk { + risk := types.Risk{ + CategoryId: CustomRiskRule.Category().Id, + Severity: types.CalculateSeverity(types.VeryLikely, types.MediumImpact), ExploitationLikelihood: types.VeryLikely, ExploitationImpact: types.MediumImpact, Title: "Demo risk at " + technicalAsset.Title + "", @@ -115,6 +114,6 @@ func createRisk(technicalAsset model.TechnicalAsset) model.Risk { DataBreachProbability: types.Possible, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } diff --git a/cmd/threagile/main_test.go b/cmd/threagile/main_test.go index 2ce7a9ac..1df0f50d 100644 --- a/cmd/threagile/main_test.go +++ b/cmd/threagile/main_test.go @@ -4,7 +4,7 @@ import ( "encoding/json" "github.com/akedrou/textdiff" "github.com/threagile/threagile/pkg/input" - "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" "log" "os" "path/filepath" @@ -63,7 +63,7 @@ func TestParseModelJson(t *testing.T) { return } - var modelStruct model.ParsedModel + var modelStruct types.ParsedModel unmarshalError := json.Unmarshal(modelJson, &modelStruct) if unmarshalError != nil { log.Fatal("Unable to parse model json: ", unmarshalError) diff --git a/go.mod b/go.mod index 39558d98..a165e526 100644 --- a/go.mod +++ b/go.mod @@ -16,23 +16,34 @@ require ( github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/gin-contrib/sse v0.1.0 // indirect + github.com/go-ole/go-ole v1.2.6 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect github.com/goccy/go-json v0.10.2 // indirect + github.com/google/gops v0.3.28 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/leodido/go-urn v1.2.4 // indirect + github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect github.com/pkg/errors v0.8.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect github.com/richardlehane/mscfb v1.0.4 // indirect github.com/richardlehane/msoleps v1.0.3 // indirect + github.com/shirou/gopsutil/v3 v3.23.7 // indirect + github.com/shoenig/go-m1cpu v0.1.6 // indirect github.com/spf13/pflag v1.0.5 // indirect + github.com/tklauser/go-sysconf v0.3.11 // indirect + github.com/tklauser/numcpus v0.6.0 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect + github.com/xlab/treeprint v1.2.0 // indirect + github.com/yusufpapurcu/wmi v1.2.3 // indirect golang.org/x/sys v0.15.0 // indirect golang.org/x/text v0.14.0 // indirect + rsc.io/goversion v1.2.0 // indirect ) require ( diff --git a/go.sum b/go.sum index fa9a4f97..278ce0b6 100644 --- a/go.sum +++ b/go.sum @@ -24,6 +24,8 @@ github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= +github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= @@ -36,7 +38,11 @@ github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MG github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g= github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/gops v0.3.28 h1:2Xr57tqKAmQYRAfG12E+yLcoa2Y42UJo2lOrUFL9ark= +github.com/google/gops v0.3.28/go.mod h1:6f6+Nl8LcHrzJwi8+p0ii+vmBFSlB4f8cOOkTJ7sk4c= github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU= github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= @@ -52,6 +58,8 @@ github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZY github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -70,6 +78,8 @@ github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= github.com/richardlehane/mscfb v1.0.4 h1:WULscsljNPConisD5hR0+OyZjwK46Pfyr6mPu5ZawpM= github.com/richardlehane/mscfb v1.0.4/go.mod h1:YzVpcZg9czvAuhk9T+a3avCpcFPMUWm7gK3DypaEsUk= github.com/richardlehane/msoleps v1.0.1/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg= @@ -77,6 +87,11 @@ github.com/richardlehane/msoleps v1.0.3 h1:aznSZzrwYRl3rLKRT3gUk9am7T/mLNSnJINvN github.com/richardlehane/msoleps v1.0.3/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w= +github.com/shirou/gopsutil/v3 v3.23.7 h1:C+fHO8hfIppoJ1WdsVm1RoI0RwXoNdfTK7yWXV0wVj4= +github.com/shirou/gopsutil/v3 v3.23.7/go.mod h1:c4gnmoRC0hQuaLqvxnx1//VXQ0Ms/X9UnJF8pddY5z4= +github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= +github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= +github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k= github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= @@ -93,12 +108,18 @@ github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/tklauser/go-sysconf v0.3.11 h1:89WgdJhk5SNwJfu+GKyYveZ4IaJ7xAkecBo+KdJV0CM= +github.com/tklauser/go-sysconf v0.3.11/go.mod h1:GqXfhXY3kiPa0nAXPDIQIWzJbMCB7AmcWpGR8lSZfqI= +github.com/tklauser/numcpus v0.6.0 h1:kebhY2Qt+3U6RNK7UqpYNA+tJ23IBEGKkB7JQBfDYms= +github.com/tklauser/numcpus v0.6.0/go.mod h1:FEZLMke0lhOUG6w2JadTzp0a+Nl8PF/GFkQ5UVIcaL4= github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= github.com/wcharczuk/go-chart v2.0.1+incompatible h1:0pz39ZAycJFF7ju/1mepnk26RLVLBCWz1STcD3doU0A= github.com/wcharczuk/go-chart v2.0.1+incompatible/go.mod h1:PF5tmL4EIx/7Wf+hEkpCqYi5He4u90sw+0+6FhrryuE= +github.com/xlab/treeprint v1.2.0 h1:HzHnuAF1plUN2zGlAFHbSQP2qJ0ZAD3XF5XD7OesXRQ= +github.com/xlab/treeprint v1.2.0/go.mod h1:gj5Gd3gPdKtR1ikdDK6fnFLdmIS0X30kTTuNd/WEJu0= github.com/xuri/efp v0.0.0-20230802181842-ad255f2331ca/go.mod h1:ybY/Jr0T0GTCnYjKqmdwxyxn2BQf2RcQIIvex5QldPI= github.com/xuri/efp v0.0.0-20231025114914-d1ff6096ae53 h1:Chd9DkqERQQuHpXjR/HSV1jLZA6uaoiwwH3vSuF3IW0= github.com/xuri/efp v0.0.0-20231025114914-d1ff6096ae53/go.mod h1:ybY/Jr0T0GTCnYjKqmdwxyxn2BQf2RcQIIvex5QldPI= @@ -108,6 +129,8 @@ github.com/xuri/nfp v0.0.0-20230819163627-dc951e3ffe1a/go.mod h1:WwHg+CVyzlv/TX9 github.com/xuri/nfp v0.0.0-20230919160717-d98342af3f05 h1:qhbILQo1K3mphbwKh1vNm4oGezE1eF9fQWmNiIpSfI4= github.com/xuri/nfp v0.0.0-20230919160717-d98342af3f05/go.mod h1:WwHg+CVyzlv/TX9xqBFXEZAuxOPxn2k1GNHwG41IIUQ= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= +github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/arch v0.6.0 h1:S0JTfE48HbRj80+4tbvZDYsJ3tGv6BUU3XxyZ7CirAc= golang.org/x/arch v0.6.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= @@ -134,13 +157,17 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= @@ -162,6 +189,7 @@ golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= @@ -171,4 +199,6 @@ gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50= +rsc.io/goversion v1.2.0 h1:SPn+NLTiAG7w30IRK/DKp1BjvpWabYgxlLp/+kx5J8w= +rsc.io/goversion v1.2.0/go.mod h1:Eih9y/uIBS3ulggl7KNJ09xGSLcuNaLgmvvqa07sgfo= rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= diff --git a/internal/threagile/context.go b/internal/threagile/context.go index 069a42bd..1a41d13f 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -15,6 +15,8 @@ import ( "errors" "flag" "fmt" // TODO: no fmt.Println here + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/risks" "hash/fnv" "io" "log" @@ -48,10 +50,8 @@ import ( "github.com/threagile/threagile/pkg/docs" "github.com/threagile/threagile/pkg/input" "github.com/threagile/threagile/pkg/macros" - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/report" "github.com/threagile/threagile/pkg/run" - "github.com/threagile/threagile/pkg/security/risks" "github.com/threagile/threagile/pkg/security/types" ) @@ -74,14 +74,15 @@ type Context struct { // TODO: remove refactoring note below // moved from types.go - parsedModel model.ParsedModel + parsedModel types.ParsedModel modelFilename, templateFilename *string verbose, ignoreOrphanedRiskTracking *bool generateDataFlowDiagram, generateDataAssetDiagram, generateRisksJSON, generateTechnicalAssetsJSON *bool generateStatsJSON, generateRisksExcel, generateTagsExcel, generateReportPDF *bool outputDir, raaPlugin, skipRiskRules, riskRulesPlugins, executeModelMacro *string - customRiskRules map[string]*model.CustomRisk + customRiskRules map[string]*types.CustomRisk + builtinRiskRules map[string]types.RiskRule diagramDPI, serverPort *int addModelTitle bool keepDiagramSourceFiles bool @@ -145,7 +146,7 @@ func (context *Context) checkRiskTracking() { // save also the risk-category-id and risk-status directly in the risk for better JSON marshalling for category := range context.parsedModel.GeneratedRisksByCategory { for i := range context.parsedModel.GeneratedRisksByCategory[category] { - context.parsedModel.GeneratedRisksByCategory[category][i].CategoryId = category + // context.parsedModel.GeneratedRisksByCategory[category][i].CategoryId = category context.parsedModel.GeneratedRisksByCategory[category][i].RiskStatus = context.parsedModel.GeneratedRisksByCategory[category][i].GetRiskTrackingStatusDefaultingUnchecked(&context.parsedModel) } } @@ -156,7 +157,7 @@ func (context *Context) Init(buildTimestamp string) *Context { keepDiagramSourceFiles: false, addModelTitle: false, buildTimestamp: buildTimestamp, - customRiskRules: make(map[string]*model.CustomRisk), + customRiskRules: make(map[string]*types.CustomRisk), drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks: true, } @@ -190,7 +191,7 @@ func (context *Context) Defaults(buildTimestamp string) *Context { return context } -func (context *Context) applyRisk(rule model.CustomRiskRule, skippedRules *map[string]bool) { +func (context *Context) applyRisk(rule types.RiskRule, skippedRules *map[string]bool) { id := rule.Category().Id _, ok := (*skippedRules)[id] @@ -222,13 +223,13 @@ func (context *Context) applyRiskGeneration() { } } - for _, rule := range risks.GetBuiltInRiskRules() { + for _, rule := range context.builtinRiskRules { context.applyRisk(rule, &skippedRules) } // NOW THE CUSTOM RISK RULES (if any) for id, customRule := range context.customRiskRules { - _, ok := skippedRules[customRule.ID] + _, ok := skippedRules[id] if ok { if *context.verbose { fmt.Println("Skipping custom risk rule:", id) @@ -261,8 +262,8 @@ func (context *Context) applyRiskGeneration() { } // save also in map keyed by synthetic risk-id - for _, category := range model.SortedRiskCategories(&context.parsedModel) { - someRisks := model.SortedRisksOfCategory(&context.parsedModel, category) + for _, category := range types.SortedRiskCategories(&context.parsedModel) { + someRisks := types.SortedRisksOfCategory(&context.parsedModel, category) for _, risk := range someRisks { context.parsedModel.GeneratedRisksBySyntheticId[strings.ToLower(risk.SyntheticId)] = risk } @@ -638,11 +639,11 @@ func (context *Context) writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT strin // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: // Convert map to slice of values: - var techAssets []model.TechnicalAsset + var techAssets []types.TechnicalAsset for _, techAsset := range context.parsedModel.TechnicalAssets { techAssets = append(techAssets, techAsset) } - sort.Sort(model.ByOrderAndIdSort(techAssets)) + sort.Sort(types.ByOrderAndIdSort(techAssets)) for _, technicalAsset := range techAssets { dotContent.WriteString(context.makeTechAssetNode(technicalAsset, false)) dotContent.WriteString("\n") @@ -768,22 +769,24 @@ func (context *Context) DoIt() { context.modelInput = *new(input.ModelInput).Defaults() loadError := context.modelInput.Load(*context.modelFilename) if loadError != nil { - log.Fatal("Unable to parse model yaml: ", loadError) + log.Fatal("Unable to load model yaml: ", loadError) } - // data, _ := json.MarshalIndent(context.modelInput, "", " ") - // fmt.Printf("%v\n", string(data)) + context.builtinRiskRules = make(map[string]types.RiskRule) + for _, rule := range risks.GetBuiltInRiskRules() { + context.builtinRiskRules[rule.Category().Id] = rule + } + context.customRiskRules = types.LoadCustomRiskRules(strings.Split(*context.riskRulesPlugins, ","), context.progressReporter) - parsedModel, err := model.ParseModel(&context.modelInput) - if err != nil { - panic(err) + parsedModel, parseError := model.ParseModel(&context.modelInput, context.builtinRiskRules, context.customRiskRules) + if parseError != nil { + log.Fatal("Unable to parse model yaml: ", parseError) } context.parsedModel = *parsedModel introTextRAA := context.applyRAA() - context.customRiskRules = risks.LoadCustomRiskRules(strings.Split(*context.riskRulesPlugins, ","), context.progressReporter) context.applyRiskGeneration() context.applyWildcardRiskTrackingEvaluation() context.checkRiskTracking() @@ -1567,7 +1570,7 @@ func (context *Context) addSupportedTags(input []byte) []byte { } } - for _, rule := range risks.GetBuiltInRiskRules() { + for _, rule := range context.builtinRiskRules { for _, tag := range rule.SupportedTags() { supportedTags[strings.ToLower(tag)] = true } @@ -3468,7 +3471,7 @@ func (context *Context) applyWildcardRiskTrackingEvaluation() { for syntheticRiskId := range context.parsedModel.GeneratedRisksBySyntheticId { if matchingRiskIdExpression.Match([]byte(syntheticRiskId)) && context.hasNotYetAnyDirectNonWildcardRiskTracking(syntheticRiskId) { foundSome = true - context.parsedModel.RiskTracking[syntheticRiskId] = model.RiskTracking{ + context.parsedModel.RiskTracking[syntheticRiskId] = types.RiskTracking{ SyntheticRiskId: strings.TrimSpace(syntheticRiskId), Justification: riskTracking.Justification, CheckedBy: riskTracking.CheckedBy, @@ -3489,8 +3492,8 @@ func (context *Context) applyWildcardRiskTrackingEvaluation() { } } -func (context *Context) getDeferredRiskTrackingDueToWildcardMatching() map[string]model.RiskTracking { - deferredRiskTrackingDueToWildcardMatching := make(map[string]model.RiskTracking) +func (context *Context) getDeferredRiskTrackingDueToWildcardMatching() map[string]types.RiskTracking { + deferredRiskTrackingDueToWildcardMatching := make(map[string]types.RiskTracking) for syntheticRiskId, riskTracking := range context.parsedModel.RiskTracking { if strings.Contains(syntheticRiskId, "*") { // contains a wildcard char deferredRiskTrackingDueToWildcardMatching[syntheticRiskId] = riskTracking @@ -3539,11 +3542,11 @@ func (context *Context) writeDataAssetDiagramGraphvizDOT(diagramFilenameDOT stri `) // Technical Assets =============================================================================== - techAssets := make([]model.TechnicalAsset, 0) + techAssets := make([]types.TechnicalAsset, 0) for _, techAsset := range context.parsedModel.TechnicalAssets { techAssets = append(techAssets, techAsset) } - sort.Sort(model.ByOrderAndIdSort(techAssets)) + sort.Sort(types.ByOrderAndIdSort(techAssets)) for _, technicalAsset := range techAssets { if len(technicalAsset.DataAssetsStored) > 0 || len(technicalAsset.DataAssetsProcessed) > 0 { dotContent.WriteString(context.makeTechAssetNode(technicalAsset, true)) @@ -3552,12 +3555,12 @@ func (context *Context) writeDataAssetDiagramGraphvizDOT(diagramFilenameDOT stri } // Data Assets =============================================================================== - dataAssets := make([]model.DataAsset, 0) + dataAssets := make([]types.DataAsset, 0) for _, dataAsset := range context.parsedModel.DataAssets { dataAssets = append(dataAssets, dataAsset) } - model.SortByDataAssetDataBreachProbabilityAndTitle(&context.parsedModel, dataAssets) + types.SortByDataAssetDataBreachProbabilityAndTitle(&context.parsedModel, dataAssets) for _, dataAsset := range dataAssets { dotContent.WriteString(context.makeDataAssetNode(dataAsset)) dotContent.WriteString("\n") @@ -3594,12 +3597,12 @@ func (context *Context) writeDataAssetDiagramGraphvizDOT(diagramFilenameDOT stri return file } -func (context *Context) makeTechAssetNode(technicalAsset model.TechnicalAsset, simplified bool) string { +func (context *Context) makeTechAssetNode(technicalAsset types.TechnicalAsset, simplified bool) string { if simplified { color := colors.RgbHexColorOutOfScope() if !technicalAsset.OutOfScope { generatedRisks := technicalAsset.GeneratedRisks(&context.parsedModel) - switch model.HighestSeverityStillAtRisk(&context.parsedModel, generatedRisks) { + switch types.HighestSeverityStillAtRisk(&context.parsedModel, generatedRisks) { case types.CriticalSeverity: color = colors.RgbHexColorCriticalRisk() case types.HighSeverity: @@ -3613,7 +3616,7 @@ func (context *Context) makeTechAssetNode(technicalAsset model.TechnicalAsset, s default: color = "#444444" // since black is too dark here as fill color } - if len(model.ReduceToOnlyStillAtRisk(&context.parsedModel, generatedRisks)) == 0 { + if len(types.ReduceToOnlyStillAtRisk(&context.parsedModel, generatedRisks)) == 0 { color = "#444444" // since black is too dark here as fill color } } @@ -3664,7 +3667,7 @@ func (context *Context) makeTechAssetNode(technicalAsset model.TechnicalAsset, s } } -func (context *Context) makeDataAssetNode(dataAsset model.DataAsset) string { +func (context *Context) makeDataAssetNode(dataAsset types.DataAsset) string { var color string switch dataAsset.IdentifiedDataBreachProbabilityStillAtRisk(&context.parsedModel) { case types.Probable: diff --git a/internal/threagile/rules.go b/internal/threagile/rules.go index 67d6956d..7c9f6cf7 100644 --- a/internal/threagile/rules.go +++ b/internal/threagile/rules.go @@ -1,15 +1,17 @@ /* Copyright © 2023 NAME HERE */ + package threagile import ( + "github.com/threagile/threagile/pkg/security/risks" + "github.com/threagile/threagile/pkg/security/types" "strings" "github.com/spf13/cobra" "github.com/threagile/threagile/pkg/docs" - "github.com/threagile/threagile/pkg/security/risks" ) var listRiskRules = &cobra.Command{ @@ -28,7 +30,7 @@ var listRiskRules = &cobra.Command{ cmd.Println("----------------------") cmd.Println("Custom risk rules:") cmd.Println("----------------------") - customRiskRules := risks.LoadCustomRiskRules(strings.Split(plugins, ","), getProgressReporter(cmd)) + customRiskRules := types.LoadCustomRiskRules(strings.Split(plugins, ","), getProgressReporter(cmd)) for id, customRule := range customRiskRules { cmd.Println(id, "-->", customRule.Category.Title, "--> with tags:", customRule.Tags) } @@ -61,7 +63,7 @@ var explainRiskRules = &cobra.Command{ cmd.Println("----------------------") cmd.Println("Custom risk rules:") cmd.Println("----------------------") - customRiskRules := risks.LoadCustomRiskRules(strings.Split(plugins, ","), getProgressReporter(cmd)) + customRiskRules := types.LoadCustomRiskRules(strings.Split(plugins, ","), getProgressReporter(cmd)) for _, customRule := range customRiskRules { cmd.Printf("%v: %v\n", customRule.Category.Id, customRule.Category.Description) } diff --git a/pkg/common/consts.go b/pkg/common/consts.go index 43c06591..5a45f897 100644 --- a/pkg/common/consts.go +++ b/pkg/common/consts.go @@ -9,7 +9,7 @@ const ( ReportFilename = "report.pdf" ExcelRisksFilename = "risks.xlsx" ExcelTagsFilename = "tags.xlsx" - JsonRisksFilename = "all.json" + JsonRisksFilename = "risks.json" JsonTechnicalAssetsFilename = "technical-assets.json" JsonStatsFilename = "stats.json" DataFlowDiagramFilenameDOT = "data-flow-diagram.gv" diff --git a/pkg/common/plugin-input.go b/pkg/common/plugin-input.go index 7207bbb4..3a3cfb5f 100644 --- a/pkg/common/plugin-input.go +++ b/pkg/common/plugin-input.go @@ -1,10 +1,10 @@ package common import ( - "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/types" ) type PluginInput struct { Config - model.ParsedModel + types.ParsedModel } diff --git a/pkg/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go b/pkg/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go index e3e92dda..41fbb0fa 100644 --- a/pkg/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go +++ b/pkg/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go @@ -7,7 +7,6 @@ import ( "github.com/threagile/threagile/pkg/input" "github.com/threagile/threagile/pkg/macros" - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) @@ -33,7 +32,7 @@ var pushOrPull = []string{ // TODO add question for type of machine (either physical, virtual, container, etc.) -func GetNextQuestion(model *model.ParsedModel) (nextQuestion macros.MacroQuestion, err error) { +func GetNextQuestion(model *types.ParsedModel) (nextQuestion macros.MacroQuestion, err error) { counter := len(questionsAnswered) if counter > 3 && !codeInspectionUsed { counter++ @@ -249,19 +248,19 @@ func GoBack() (message string, validResult bool, err error) { return "Undo successful", true, nil } -func GetFinalChangeImpact(modelInput *input.ModelInput, model *model.ParsedModel) (changes []string, message string, validResult bool, err error) { +func GetFinalChangeImpact(modelInput *input.ModelInput, model *types.ParsedModel) (changes []string, message string, validResult bool, err error) { changeLogCollector := make([]string, 0) message, validResult, err = applyChange(modelInput, model, &changeLogCollector, true) return changeLogCollector, message, validResult, err } -func Execute(modelInput *input.ModelInput, model *model.ParsedModel) (message string, validResult bool, err error) { +func Execute(modelInput *input.ModelInput, model *types.ParsedModel) (message string, validResult bool, err error) { changeLogCollector := make([]string, 0) message, validResult, err = applyChange(modelInput, model, &changeLogCollector, false) return message, validResult, err } -func applyChange(modelInput *input.ModelInput, parsedModel *model.ParsedModel, changeLogCollector *[]string, dryRun bool) (message string, validResult bool, err error) { +func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, changeLogCollector *[]string, dryRun bool) (message string, validResult bool, err error) { var serverSideTechAssets = make([]string, 0) // ################################################ input.AddTagToModelInput(modelInput, macroState["source-repository"][0], dryRun, changeLogCollector) @@ -275,18 +274,18 @@ func applyChange(modelInput *input.ModelInput, parsedModel *model.ParsedModel, c input.AddTagToModelInput(modelInput, macroState["code-inspection-platform"][0], dryRun, changeLogCollector) } - sourceRepoID := model.MakeID(macroState["source-repository"][0]) + "-sourcecode-repository" - buildPipelineID := model.MakeID(macroState["build-pipeline"][0]) + "-build-pipeline" - artifactRegistryID := model.MakeID(macroState["artifact-registry"][0]) + "-artifact-registry" + sourceRepoID := types.MakeID(macroState["source-repository"][0]) + "-sourcecode-repository" + buildPipelineID := types.MakeID(macroState["build-pipeline"][0]) + "-build-pipeline" + artifactRegistryID := types.MakeID(macroState["artifact-registry"][0]) + "-artifact-registry" containerRepoID, containerPlatformID, containerSharedRuntimeID := "", "", "" if containerTechUsed { - containerRepoID = model.MakeID(macroState["container-registry"][0]) + "-container-registry" - containerPlatformID = model.MakeID(macroState["container-platform"][0]) + "-container-platform" - containerSharedRuntimeID = model.MakeID(macroState["container-platform"][0]) + "-container-runtime" + containerRepoID = types.MakeID(macroState["container-registry"][0]) + "-container-registry" + containerPlatformID = types.MakeID(macroState["container-platform"][0]) + "-container-platform" + containerSharedRuntimeID = types.MakeID(macroState["container-platform"][0]) + "-container-runtime" } codeInspectionPlatformID := "" if codeInspectionUsed { - codeInspectionPlatformID = model.MakeID(macroState["code-inspection-platform"][0]) + "-code-inspection-platform" + codeInspectionPlatformID = types.MakeID(macroState["code-inspection-platform"][0]) + "-code-inspection-platform" } owner := macroState["owner"][0] diff --git a/pkg/macros/built-in/add-vault/add-vault-macro.go b/pkg/macros/built-in/add-vault/add-vault-macro.go index 8add58cc..70fc04a0 100644 --- a/pkg/macros/built-in/add-vault/add-vault-macro.go +++ b/pkg/macros/built-in/add-vault/add-vault-macro.go @@ -7,7 +7,6 @@ import ( "github.com/threagile/threagile/pkg/input" "github.com/threagile/threagile/pkg/macros" - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) @@ -41,7 +40,7 @@ var authenticationTypes = []string{ "Credentials (username/password, API-key, secret token, etc.)", } -func GetNextQuestion(parsedModel *model.ParsedModel) (nextQuestion macros.MacroQuestion, err error) { +func GetNextQuestion(parsedModel *types.ParsedModel) (nextQuestion macros.MacroQuestion, err error) { counter := len(questionsAnswered) if counter > 5 && !withinTrustBoundary { counter++ @@ -166,19 +165,19 @@ func GoBack() (message string, validResult bool, err error) { return "Undo successful", true, nil } -func GetFinalChangeImpact(modelInput *input.ModelInput, parsedModel *model.ParsedModel) (changes []string, message string, validResult bool, err error) { +func GetFinalChangeImpact(modelInput *input.ModelInput, parsedModel *types.ParsedModel) (changes []string, message string, validResult bool, err error) { changeLogCollector := make([]string, 0) message, validResult, err = applyChange(modelInput, parsedModel, &changeLogCollector, true) return changeLogCollector, message, validResult, err } -func Execute(modelInput *input.ModelInput, parsedModel *model.ParsedModel) (message string, validResult bool, err error) { +func Execute(modelInput *input.ModelInput, parsedModel *types.ParsedModel) (message string, validResult bool, err error) { changeLogCollector := make([]string, 0) message, validResult, err = applyChange(modelInput, parsedModel, &changeLogCollector, false) return message, validResult, err } -func applyChange(modelInput *input.ModelInput, parsedModel *model.ParsedModel, changeLogCollector *[]string, dryRun bool) (message string, validResult bool, err error) { +func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, changeLogCollector *[]string, dryRun bool) (message string, validResult bool, err error) { input.AddTagToModelInput(modelInput, macroState["vault-name"][0], dryRun, changeLogCollector) var serverSideTechAssets = make([]string, 0) @@ -250,7 +249,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *model.ParsedModel, c } } - vaultID := model.MakeID(macroState["vault-name"][0]) + "-vault" + vaultID := types.MakeID(macroState["vault-name"][0]) + "-vault" if _, exists := parsedModel.TechnicalAssets[vaultID]; !exists { serverSideTechAssets = append(serverSideTechAssets, vaultID) diff --git a/pkg/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go b/pkg/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go index f8f3ec58..2c06a83d 100644 --- a/pkg/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go +++ b/pkg/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go @@ -1,12 +1,12 @@ package remove_unused_tags import ( + "github.com/threagile/threagile/pkg/security/types" "sort" "strconv" "github.com/threagile/threagile/pkg/input" "github.com/threagile/threagile/pkg/macros" - "github.com/threagile/threagile/pkg/model" ) func GetMacroDetails() macros.MacroDetails { @@ -33,7 +33,7 @@ func GetFinalChangeImpact(_ *input.ModelInput) (changes []string, message string return []string{"remove unused tags from the model file"}, "Changeset valid", true, err } -func Execute(modelInput *input.ModelInput, parsedModel *model.ParsedModel) (message string, validResult bool, err error) { +func Execute(modelInput *input.ModelInput, parsedModel *types.ParsedModel) (message string, validResult bool, err error) { tagUsageMap := make(map[string]bool) for _, tag := range parsedModel.TagsAvailable { tagUsageMap[tag] = false // false = tag is not used diff --git a/pkg/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go b/pkg/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go index 2b6b45db..4210119e 100644 --- a/pkg/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go +++ b/pkg/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go @@ -6,7 +6,6 @@ import ( "github.com/threagile/threagile/pkg/input" "github.com/threagile/threagile/pkg/macros" - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) @@ -34,7 +33,7 @@ func GetFinalChangeImpact(_ *input.ModelInput) (changes []string, message string return []string{"seed the model file with with initial risk tracking entries for all untracked risks"}, "Changeset valid", true, err } -func Execute(parsedModel *model.ParsedModel, modelInput *input.ModelInput) (message string, validResult bool, err error) { +func Execute(parsedModel *types.ParsedModel, modelInput *input.ModelInput) (message string, validResult bool, err error) { syntheticRiskIDsToCreateTrackingFor := make([]string, 0) for id, risk := range parsedModel.GeneratedRisksBySyntheticId { if !risk.IsRiskTracked(parsedModel) { diff --git a/pkg/macros/built-in/seed-tags/seed-tags-macro.go b/pkg/macros/built-in/seed-tags/seed-tags-macro.go index 730b19f4..0aac3b97 100644 --- a/pkg/macros/built-in/seed-tags/seed-tags-macro.go +++ b/pkg/macros/built-in/seed-tags/seed-tags-macro.go @@ -1,12 +1,12 @@ package seed_tags import ( + "github.com/threagile/threagile/pkg/security/types" "sort" "strconv" "github.com/threagile/threagile/pkg/input" "github.com/threagile/threagile/pkg/macros" - "github.com/threagile/threagile/pkg/model" ) func GetMacroDetails() macros.MacroDetails { @@ -33,7 +33,7 @@ func GetFinalChangeImpact(_ *input.ModelInput) (changes []string, message string return []string{"seed the model file with supported tags from all risk rules"}, "Changeset valid", true, err } -func Execute(modelInput *input.ModelInput, parsedModel *model.ParsedModel) (message string, validResult bool, err error) { +func Execute(modelInput *input.ModelInput, parsedModel *types.ParsedModel) (message string, validResult bool, err error) { tagMap := make(map[string]bool) for k, v := range parsedModel.AllSupportedTags { tagMap[k] = v diff --git a/pkg/model/model.go b/pkg/model/parse.go similarity index 69% rename from pkg/model/model.go rename to pkg/model/parse.go index 3f9626cb..9874106e 100644 --- a/pkg/model/model.go +++ b/pkg/model/parse.go @@ -1,58 +1,17 @@ -/* -Copyright © 2023 NAME HERE -*/ - package model import ( "errors" "fmt" + "github.com/threagile/threagile/pkg/input" + "github.com/threagile/threagile/pkg/security/types" "path/filepath" "regexp" - "sort" "strings" "time" - - "github.com/threagile/threagile/pkg/input" - "github.com/threagile/threagile/pkg/security/types" ) -type ParsedModel struct { - Author input.Author `json:"author" yaml:"author"` - Title string `json:"title,omitempty" yaml:"title"` - Date time.Time `json:"date" yaml:"date"` - ManagementSummaryComment string `json:"management_summary_comment,omitempty" yaml:"management_summary_comment"` - BusinessOverview input.Overview `json:"business_overview" yaml:"business_overview"` - TechnicalOverview input.Overview `json:"technical_overview" yaml:"technical_overview"` - BusinessCriticality types.Criticality `json:"business_criticality,omitempty" yaml:"business_criticality"` - SecurityRequirements map[string]string `json:"security_requirements,omitempty" yaml:"security_requirements"` - Questions map[string]string `json:"questions,omitempty" yaml:"questions"` - AbuseCases map[string]string `json:"abuse_cases,omitempty" yaml:"abuse_cases"` - TagsAvailable []string `json:"tags_available,omitempty" yaml:"tags_available"` - DataAssets map[string]DataAsset `json:"data_assets,omitempty" yaml:"data_assets"` - TechnicalAssets map[string]TechnicalAsset `json:"technical_assets,omitempty" yaml:"technical_assets"` - TrustBoundaries map[string]TrustBoundary `json:"trust_boundaries,omitempty" yaml:"trust_boundaries"` - SharedRuntimes map[string]SharedRuntime `json:"shared_runtimes,omitempty" yaml:"shared_runtimes"` - IndividualRiskCategories map[string]RiskCategory `json:"individual_risk_categories,omitempty" yaml:"individual_risk_categories"` - RiskTracking map[string]RiskTracking `json:"risk_tracking,omitempty" yaml:"risk_tracking"` - CommunicationLinks map[string]CommunicationLink `json:"communication_links,omitempty" yaml:"communication_links"` - AllSupportedTags map[string]bool `json:"all_supported_tags,omitempty" yaml:"all_supported_tags"` - DiagramTweakNodesep int `json:"diagram_tweak_nodesep,omitempty" yaml:"diagram_tweak_nodesep"` - DiagramTweakRanksep int `json:"diagram_tweak_ranksep,omitempty" yaml:"diagram_tweak_ranksep"` - DiagramTweakEdgeLayout string `json:"diagram_tweak_edge_layout,omitempty" yaml:"diagram_tweak_edge_layout"` - DiagramTweakSuppressEdgeLabels bool `json:"diagram_tweak_suppress_edge_labels,omitempty" yaml:"diagram_tweak_suppress_edge_labels"` - DiagramTweakLayoutLeftToRight bool `json:"diagram_tweak_layout_left_to_right,omitempty" yaml:"diagram_tweak_layout_left_to_right"` - DiagramTweakInvisibleConnectionsBetweenAssets []string `json:"diagram_tweak_invisible_connections_between_assets,omitempty" yaml:"diagram_tweak_invisible_connections_between_assets"` - DiagramTweakSameRankAssets []string `json:"diagram_tweak_same_rank_assets,omitempty" yaml:"diagram_tweak_same_rank_assets"` - - // TODO: those are generated based on items above and needs to be private - IncomingTechnicalCommunicationLinksMappedByTargetId map[string][]CommunicationLink `json:"incoming_technical_communication_links_mapped_by_target_id,omitempty" yaml:"incoming_technical_communication_links_mapped_by_target_id"` - DirectContainingTrustBoundaryMappedByTechnicalAssetId map[string]TrustBoundary `json:"direct_containing_trust_boundary_mapped_by_technical_asset_id,omitempty" yaml:"direct_containing_trust_boundary_mapped_by_technical_asset_id"` - GeneratedRisksByCategory map[string][]Risk `json:"generated_risks_by_category,omitempty" yaml:"generated_risks_by_category"` - GeneratedRisksBySyntheticId map[string]Risk `json:"generated_risks_by_synthetic_id,omitempty" yaml:"generated_risks_by_synthetic_id"` -} - -func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { +func ParseModel(modelInput *input.ModelInput, builtinRiskRules map[string]types.RiskRule, customRiskRules map[string]*types.CustomRisk) (*types.ParsedModel, error) { businessCriticality, err := types.ParseCriticality(modelInput.BusinessCriticality) if err != nil { return nil, errors.New("unknown 'business_criticality' value of application: " + modelInput.BusinessCriticality) @@ -67,7 +26,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { } } - parsedModel := ParsedModel{ + parsedModel := types.ParsedModel{ Author: modelInput.Author, Title: modelInput.Title, Date: reportDate, @@ -88,11 +47,11 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { DiagramTweakSameRankAssets: modelInput.DiagramTweakSameRankAssets, } - parsedModel.CommunicationLinks = make(map[string]CommunicationLink) - parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId = make(map[string][]CommunicationLink) - parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId = make(map[string]TrustBoundary) - parsedModel.GeneratedRisksByCategory = make(map[string][]Risk) - parsedModel.GeneratedRisksBySyntheticId = make(map[string]Risk) + parsedModel.CommunicationLinks = make(map[string]types.CommunicationLink) + parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId = make(map[string][]types.CommunicationLink) + parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId = make(map[string]types.TrustBoundary) + parsedModel.GeneratedRisksByCategory = make(map[string][]types.Risk) + parsedModel.GeneratedRisksBySyntheticId = make(map[string]types.Risk) parsedModel.AllSupportedTags = make(map[string]bool) if parsedModel.DiagramTweakNodesep == 0 { @@ -103,7 +62,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { } // Data Assets =============================================================================== - parsedModel.DataAssets = make(map[string]DataAsset) + parsedModel.DataAssets = make(map[string]types.DataAsset) for title, asset := range modelInput.DataAssets { id := fmt.Sprintf("%v", asset.ID) @@ -135,11 +94,11 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { if _, exists := parsedModel.DataAssets[id]; exists { return nil, errors.New("duplicate id used: " + id) } - tags, err := parsedModel.checkTags(lowerCaseAndTrim(asset.Tags), "data asset '"+title+"'") + tags, err := parsedModel.CheckTags(lowerCaseAndTrim(asset.Tags), "data asset '"+title+"'") if err != nil { return nil, err } - parsedModel.DataAssets[id] = DataAsset{ + parsedModel.DataAssets[id] = types.DataAsset{ Id: id, Title: title, Usage: usage, @@ -156,7 +115,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { } // Technical Assets =============================================================================== - parsedModel.TechnicalAssets = make(map[string]TechnicalAsset) + parsedModel.TechnicalAssets = make(map[string]types.TechnicalAsset) for title, asset := range modelInput.TechnicalAssets { id := fmt.Sprintf("%v", asset.ID) @@ -170,7 +129,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { dataAssetsProcessed = make([]string, len(asset.DataAssetsProcessed)) for i, parsedProcessedAsset := range asset.DataAssetsProcessed { referencedAsset := fmt.Sprintf("%v", parsedProcessedAsset) - err := parsedModel.checkDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") + err := parsedModel.CheckDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") if err != nil { return nil, err } @@ -183,7 +142,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { dataAssetsStored = make([]string, len(asset.DataAssetsStored)) for i, parsedStoredAssets := range asset.DataAssetsStored { referencedAsset := fmt.Sprintf("%v", parsedStoredAssets) - err := parsedModel.checkDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") + err := parsedModel.CheckDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") if err != nil { return nil, err } @@ -235,7 +194,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { } } - communicationLinks := make([]CommunicationLink, 0) + communicationLinks := make([]types.CommunicationLink, 0) if asset.CommunicationLinks != nil { for commLinkTitle, commLink := range asset.CommunicationLinks { constraint := true @@ -263,7 +222,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { if commLink.DataAssetsSent != nil { for _, dataAssetSent := range commLink.DataAssetsSent { referencedAsset := fmt.Sprintf("%v", dataAssetSent) - err := parsedModel.checkDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") + err := parsedModel.CheckDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") if err != nil { return nil, err } @@ -274,7 +233,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { if commLink.DataAssetsReceived != nil { for _, dataAssetReceived := range commLink.DataAssetsReceived { referencedAsset := fmt.Sprintf("%v", dataAssetReceived) - err := parsedModel.checkDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") + err := parsedModel.CheckDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") if err != nil { return nil, err } @@ -296,11 +255,11 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { if err != nil { return nil, err } - tags, err := parsedModel.checkTags(lowerCaseAndTrim(commLink.Tags), "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") + tags, err := parsedModel.CheckTags(lowerCaseAndTrim(commLink.Tags), "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") if err != nil { return nil, err } - commLink := CommunicationLink{ + commLink := types.CommunicationLink{ Id: commLinkId, SourceId: id, TargetId: commLink.Target, @@ -335,11 +294,11 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { if _, exists := parsedModel.TechnicalAssets[id]; exists { return nil, errors.New("duplicate id used: " + id) } - tags, err := parsedModel.checkTags(lowerCaseAndTrim(asset.Tags), "technical asset '"+title+"'") + tags, err := parsedModel.CheckTags(lowerCaseAndTrim(asset.Tags), "technical asset '"+title+"'") if err != nil { return nil, err } - parsedModel.TechnicalAssets[id] = TechnicalAsset{ + parsedModel.TechnicalAssets[id] = types.TechnicalAsset{ Id: id, Usage: usage, Title: title, //fmt.Sprintf("%v", asset["title"]), @@ -372,7 +331,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { // Trust Boundaries =============================================================================== checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries := make(map[string]bool) - parsedModel.TrustBoundaries = make(map[string]TrustBoundary) + parsedModel.TrustBoundaries = make(map[string]types.TrustBoundary) for title, boundary := range modelInput.TrustBoundaries { id := fmt.Sprintf("%v", boundary.ID) @@ -407,8 +366,8 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { if err != nil { return nil, errors.New("unknown 'type' of trust boundary '" + title + "': " + fmt.Sprintf("%v", boundary.Type)) } - tags, err := parsedModel.checkTags(lowerCaseAndTrim(boundary.Tags), "trust boundary '"+title+"'") - trustBoundary := TrustBoundary{ + tags, err := parsedModel.CheckTags(lowerCaseAndTrim(boundary.Tags), "trust boundary '"+title+"'") + trustBoundary := types.TrustBoundary{ Id: id, Title: title, //fmt.Sprintf("%v", boundary["title"]), Description: withDefault(fmt.Sprintf("%v", boundary.Description), title), @@ -430,13 +389,13 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { //fmt.Println("Asset "+technicalAsset+" is directly in trust boundary "+trustBoundary.Id) } } - err = parsedModel.checkNestedTrustBoundariesExisting() + err = parsedModel.CheckNestedTrustBoundariesExisting() if err != nil { return nil, err } // Shared Runtime =============================================================================== - parsedModel.SharedRuntimes = make(map[string]SharedRuntime) + parsedModel.SharedRuntimes = make(map[string]types.SharedRuntime) for title, inputRuntime := range modelInput.SharedRuntimes { id := fmt.Sprintf("%v", inputRuntime.ID) @@ -453,11 +412,11 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { technicalAssetsRunning[i] = assetId } } - tags, err := parsedModel.checkTags(lowerCaseAndTrim(inputRuntime.Tags), "shared runtime '"+title+"'") + tags, err := parsedModel.CheckTags(lowerCaseAndTrim(inputRuntime.Tags), "shared runtime '"+title+"'") if err != nil { return nil, err } - sharedRuntime := SharedRuntime{ + sharedRuntime := types.SharedRuntime{ Id: id, Title: title, //fmt.Sprintf("%v", boundary["title"]), Description: withDefault(fmt.Sprintf("%v", inputRuntime.Description), title), @@ -474,8 +433,19 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { parsedModel.SharedRuntimes[id] = sharedRuntime } + parsedModel.BuiltInRiskCategories = make(map[string]types.RiskCategory) + for _, rule := range builtinRiskRules { + category := rule.Category() + parsedModel.BuiltInRiskCategories[category.Id] = category + } + + parsedModel.IndividualRiskCategories = make(map[string]types.RiskCategory) + for _, rule := range customRiskRules { + parsedModel.IndividualRiskCategories[rule.Category.Id] = rule.Category + } + // Individual Risk Categories (just used as regular risk categories) =============================================================================== - parsedModel.IndividualRiskCategories = make(map[string]RiskCategory) + parsedModel.IndividualRiskCategories = make(map[string]types.RiskCategory) for title, individualCategory := range modelInput.IndividualRiskCategories { id := fmt.Sprintf("%v", individualCategory.ID) @@ -488,7 +458,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { return nil, errors.New("unknown 'stride' value of individual risk category '" + title + "': " + fmt.Sprintf("%v", individualCategory.STRIDE)) } - cat := RiskCategory{ + cat := types.RiskCategory{ Id: id, Title: title, Description: withDefault(fmt.Sprintf("%v", individualCategory.Description), title), @@ -537,7 +507,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { if len(individualRiskInstance.MostRelevantDataAsset) > 0 { mostRelevantDataAssetId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantDataAsset) - err := parsedModel.checkDataAssetTargetExists(mostRelevantDataAssetId, "individual risk '"+title+"'") + err := parsedModel.CheckDataAssetTargetExists(mostRelevantDataAssetId, "individual risk '"+title+"'") if err != nil { return nil, err } @@ -553,7 +523,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { if len(individualRiskInstance.MostRelevantCommunicationLink) > 0 { mostRelevantCommunicationLinkId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantCommunicationLink) - err := parsedModel.checkCommunicationLinkExists(mostRelevantCommunicationLinkId, "individual risk '"+title+"'") + err := parsedModel.CheckCommunicationLinkExists(mostRelevantCommunicationLinkId, "individual risk '"+title+"'") if err != nil { return nil, err } @@ -561,7 +531,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { if len(individualRiskInstance.MostRelevantTrustBoundary) > 0 { mostRelevantTrustBoundaryId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantTrustBoundary) - err := parsedModel.checkTrustBoundaryExists(mostRelevantTrustBoundaryId, "individual risk '"+title+"'") + err := parsedModel.CheckTrustBoundaryExists(mostRelevantTrustBoundaryId, "individual risk '"+title+"'") if err != nil { return nil, err } @@ -569,7 +539,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { if len(individualRiskInstance.MostRelevantSharedRuntime) > 0 { mostRelevantSharedRuntimeId = fmt.Sprintf("%v", individualRiskInstance.MostRelevantSharedRuntime) - err := parsedModel.checkSharedRuntimeExists(mostRelevantSharedRuntimeId, "individual risk '"+title+"'") + err := parsedModel.CheckSharedRuntimeExists(mostRelevantSharedRuntimeId, "individual risk '"+title+"'") if err != nil { return nil, err } @@ -592,10 +562,10 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { } } - individualRiskInstance := Risk{ + individualRiskInstance := types.Risk{ SyntheticId: createSyntheticId(cat.Id, mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId), Title: fmt.Sprintf("%v", title), - Category: cat, + CategoryId: cat.Id, Severity: severity, ExploitationLikelihood: exploitationLikelihood, ExploitationImpact: exploitationImpact, @@ -613,7 +583,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { } // Risk Tracking =============================================================================== - parsedModel.RiskTracking = make(map[string]RiskTracking) + parsedModel.RiskTracking = make(map[string]types.RiskTracking) for syntheticRiskId, riskTracking := range modelInput.RiskTracking { justification := fmt.Sprintf("%v", riskTracking.Justification) checkedBy := fmt.Sprintf("%v", riskTracking.CheckedBy) @@ -632,7 +602,7 @@ func ParseModel(modelInput *input.ModelInput) (*ParsedModel, error) { return nil, errors.New("unknown 'status' value of risk tracking '" + syntheticRiskId + "': " + riskTracking.Status) } - tracking := RiskTracking{ + tracking := types.RiskTracking{ SyntheticRiskId: strings.TrimSpace(syntheticRiskId), Justification: justification, CheckedBy: checkedBy, @@ -665,6 +635,14 @@ func checkIdSyntax(id string) error { return nil } +func createDataFlowId(sourceAssetId, title string) (string, error) { + reg, err := regexp.Compile("[^A-Za-z0-9]+") + if err != nil { + return "", err + } + return sourceAssetId + ">" + strings.Trim(reg.ReplaceAllString(strings.ToLower(title), "-"), "- "), nil +} + func createSyntheticId(categoryId string, mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId string) string { result := categoryId @@ -712,197 +690,3 @@ func lowerCaseAndTrim(tags []string) []string { } return tags } - -func (parsedModel *ParsedModel) checkTags(tags []string, where string) ([]string, error) { - var tagsUsed = make([]string, 0) - if tags != nil { - tagsUsed = make([]string, len(tags)) - for i, parsedEntry := range tags { - referencedTag := fmt.Sprintf("%v", parsedEntry) - err := parsedModel.checkTagExists(referencedTag, where) - if err != nil { - return nil, err - } - tagsUsed[i] = referencedTag - } - } - return tagsUsed, nil -} - -func (parsedModel *ParsedModel) checkTagExists(referencedTag, where string) error { - if !contains(parsedModel.TagsAvailable, referencedTag) { - return errors.New("missing referenced tag in overall tag list at " + where + ": " + referencedTag) - } - return nil -} - -func createDataFlowId(sourceAssetId, title string) (string, error) { - reg, err := regexp.Compile("[^A-Za-z0-9]+") - if err != nil { - return "", err - } - return sourceAssetId + ">" + strings.Trim(reg.ReplaceAllString(strings.ToLower(title), "-"), "- "), nil -} - -func (parsedModel *ParsedModel) checkDataAssetTargetExists(referencedAsset, where string) error { - if _, ok := parsedModel.DataAssets[referencedAsset]; !ok { - return errors.New("missing referenced data asset target at " + where + ": " + referencedAsset) - } - return nil -} - -func (parsedModel *ParsedModel) checkTrustBoundaryExists(referencedId, where string) error { - if _, ok := parsedModel.TrustBoundaries[referencedId]; !ok { - return errors.New("missing referenced trust boundary at " + where + ": " + referencedId) - } - return nil -} - -func (parsedModel *ParsedModel) checkSharedRuntimeExists(referencedId, where string) error { - if _, ok := parsedModel.SharedRuntimes[referencedId]; !ok { - return errors.New("missing referenced shared runtime at " + where + ": " + referencedId) - } - return nil -} - -func (parsedModel *ParsedModel) checkCommunicationLinkExists(referencedId, where string) error { - if _, ok := parsedModel.CommunicationLinks[referencedId]; !ok { - return errors.New("missing referenced communication link at " + where + ": " + referencedId) - } - return nil -} - -func (parsedModel *ParsedModel) CheckTechnicalAssetExists(referencedAsset, where string, onlyForTweak bool) error { - if _, ok := parsedModel.TechnicalAssets[referencedAsset]; !ok { - suffix := "" - if onlyForTweak { - suffix = " (only referenced in diagram tweak)" - } - return errors.New("missing referenced technical asset target" + suffix + " at " + where + ": " + referencedAsset) - } - return nil -} - -func (parsedModel *ParsedModel) checkNestedTrustBoundariesExisting() error { - for _, trustBoundary := range parsedModel.TrustBoundaries { - for _, nestedId := range trustBoundary.TrustBoundariesNested { - if _, ok := parsedModel.TrustBoundaries[nestedId]; !ok { - return errors.New("missing referenced nested trust boundary: " + nestedId) - } - } - } - return nil -} - -func CalculateSeverity(likelihood types.RiskExploitationLikelihood, impact types.RiskExploitationImpact) types.RiskSeverity { - result := likelihood.Weight() * impact.Weight() - if result <= 1 { - return types.LowSeverity - } - if result <= 3 { - return types.MediumSeverity - } - if result <= 8 { - return types.ElevatedSeverity - } - if result <= 12 { - return types.HighSeverity - } - return types.CriticalSeverity -} - -func (parsedModel *ParsedModel) InScopeTechnicalAssets() []TechnicalAsset { - result := make([]TechnicalAsset, 0) - for _, asset := range parsedModel.TechnicalAssets { - if !asset.OutOfScope { - result = append(result, asset) - } - } - return result -} - -func (parsedModel *ParsedModel) SortedTechnicalAssetIDs() []string { - res := make([]string, 0) - for id := range parsedModel.TechnicalAssets { - res = append(res, id) - } - sort.Strings(res) - return res -} - -func (parsedModel *ParsedModel) TagsActuallyUsed() []string { - result := make([]string, 0) - for _, tag := range parsedModel.TagsAvailable { - if len(parsedModel.TechnicalAssetsTaggedWithAny(tag)) > 0 || - len(parsedModel.CommunicationLinksTaggedWithAny(tag)) > 0 || - len(parsedModel.DataAssetsTaggedWithAny(tag)) > 0 || - len(parsedModel.TrustBoundariesTaggedWithAny(tag)) > 0 || - len(parsedModel.SharedRuntimesTaggedWithAny(tag)) > 0 { - result = append(result, tag) - } - } - return result -} - -func (parsedModel *ParsedModel) TechnicalAssetsTaggedWithAny(tags ...string) []TechnicalAsset { - result := make([]TechnicalAsset, 0) - for _, candidate := range parsedModel.TechnicalAssets { - if candidate.IsTaggedWithAny(tags...) { - result = append(result, candidate) - } - } - return result -} - -func (parsedModel *ParsedModel) CommunicationLinksTaggedWithAny(tags ...string) []CommunicationLink { - result := make([]CommunicationLink, 0) - for _, asset := range parsedModel.TechnicalAssets { - for _, candidate := range asset.CommunicationLinks { - if candidate.IsTaggedWithAny(tags...) { - result = append(result, candidate) - } - } - } - return result -} - -func (parsedModel *ParsedModel) DataAssetsTaggedWithAny(tags ...string) []DataAsset { - result := make([]DataAsset, 0) - for _, candidate := range parsedModel.DataAssets { - if candidate.IsTaggedWithAny(tags...) { - result = append(result, candidate) - } - } - return result -} - -func (parsedModel *ParsedModel) TrustBoundariesTaggedWithAny(tags ...string) []TrustBoundary { - result := make([]TrustBoundary, 0) - for _, candidate := range parsedModel.TrustBoundaries { - if candidate.IsTaggedWithAny(tags...) { - result = append(result, candidate) - } - } - return result -} - -func (parsedModel *ParsedModel) SharedRuntimesTaggedWithAny(tags ...string) []SharedRuntime { - result := make([]SharedRuntime, 0) - for _, candidate := range parsedModel.SharedRuntimes { - if candidate.IsTaggedWithAny(tags...) { - result = append(result, candidate) - } - } - return result -} - -func (parsedModel *ParsedModel) OutOfScopeTechnicalAssets() []TechnicalAsset { - assets := make([]TechnicalAsset, 0) - for _, asset := range parsedModel.TechnicalAssets { - if asset.OutOfScope { - assets = append(assets, asset) - } - } - sort.Sort(ByTechnicalAssetTitleSort(assets)) - return assets -} diff --git a/pkg/model/trust_boundary.go b/pkg/model/trust_boundary.go deleted file mode 100644 index 376b18a7..00000000 --- a/pkg/model/trust_boundary.go +++ /dev/null @@ -1,127 +0,0 @@ -/* -Copyright © 2023 NAME HERE -*/ -package model - -import ( - "sort" - - "github.com/threagile/threagile/pkg/security/types" -) - -type TrustBoundary struct { - Id, Title, Description string - Type types.TrustBoundaryType - Tags []string - TechnicalAssetsInside []string - TrustBoundariesNested []string -} - -func (what TrustBoundary) RecursivelyAllTechnicalAssetIDsInside(model *ParsedModel) []string { - result := make([]string, 0) - what.addAssetIDsRecursively(model, &result) - return result -} - -func (what TrustBoundary) IsTaggedWithAny(tags ...string) bool { - return containsCaseInsensitiveAny(what.Tags, tags...) -} - -func (what TrustBoundary) IsTaggedWithBaseTag(baseTag string) bool { - return IsTaggedWithBaseTag(what.Tags, baseTag) -} - -func (what TrustBoundary) IsTaggedWithAnyTraversingUp(model *ParsedModel, tags ...string) bool { - if what.IsTaggedWithAny(tags...) { - return true - } - parentID := what.ParentTrustBoundaryID(model) - if len(parentID) > 0 && model.TrustBoundaries[parentID].IsTaggedWithAnyTraversingUp(model, tags...) { - return true - } - return false -} - -func (what TrustBoundary) ParentTrustBoundaryID(model *ParsedModel) string { - var result string - for _, candidate := range model.TrustBoundaries { - if contains(candidate.TrustBoundariesNested, what.Id) { - result = candidate.Id - return result - } - } - return result -} - -func (what TrustBoundary) HighestConfidentiality(model *ParsedModel) types.Confidentiality { - highest := types.Public - for _, id := range what.RecursivelyAllTechnicalAssetIDsInside(model) { - techAsset := model.TechnicalAssets[id] - if techAsset.HighestConfidentiality(model) > highest { - highest = techAsset.HighestConfidentiality(model) - } - } - return highest -} - -func (what TrustBoundary) HighestIntegrity(model *ParsedModel) types.Criticality { - highest := types.Archive - for _, id := range what.RecursivelyAllTechnicalAssetIDsInside(model) { - techAsset := model.TechnicalAssets[id] - if techAsset.HighestIntegrity(model) > highest { - highest = techAsset.HighestIntegrity(model) - } - } - return highest -} - -func (what TrustBoundary) HighestAvailability(model *ParsedModel) types.Criticality { - highest := types.Archive - for _, id := range what.RecursivelyAllTechnicalAssetIDsInside(model) { - techAsset := model.TechnicalAssets[id] - if techAsset.HighestAvailability(model) > highest { - highest = techAsset.HighestAvailability(model) - } - } - return highest -} - -func (what TrustBoundary) AllParentTrustBoundaryIDs(model *ParsedModel) []string { - result := make([]string, 0) - what.addTrustBoundaryIDsRecursively(model, &result) - return result -} - -func (what TrustBoundary) addAssetIDsRecursively(model *ParsedModel, result *[]string) { - *result = append(*result, what.TechnicalAssetsInside...) - for _, nestedBoundaryID := range what.TrustBoundariesNested { - model.TrustBoundaries[nestedBoundaryID].addAssetIDsRecursively(model, result) - } -} - -// TODO: pass ParsedModelRoot as parameter instead of using global variable -func (what TrustBoundary) addTrustBoundaryIDsRecursively(model *ParsedModel, result *[]string) { - *result = append(*result, what.Id) - parentID := what.ParentTrustBoundaryID(model) - if len(parentID) > 0 { - model.TrustBoundaries[parentID].addTrustBoundaryIDsRecursively(model, result) - } -} - -// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: -func SortedKeysOfTrustBoundaries(model *ParsedModel) []string { - keys := make([]string, 0) - for k := range model.TrustBoundaries { - keys = append(keys, k) - } - sort.Strings(keys) - return keys -} - -type ByTrustBoundaryTitleSort []TrustBoundary - -func (what ByTrustBoundaryTitleSort) Len() int { return len(what) } -func (what ByTrustBoundaryTitleSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } -func (what ByTrustBoundaryTitleSort) Less(i, j int) bool { - return what[i].Title < what[j].Title -} diff --git a/pkg/report/excel.go b/pkg/report/excel.go index 2dde8d91..9470db8d 100644 --- a/pkg/report/excel.go +++ b/pkg/report/excel.go @@ -6,14 +6,13 @@ import ( "strings" "github.com/threagile/threagile/pkg/colors" - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" "github.com/xuri/excelize/v2" ) var excelRow int -func WriteRisksExcelToFile(parsedModel *model.ParsedModel, filename string) { +func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) { excelRow = 0 excel := excelize.NewFile() sheetName := parsedModel.Title @@ -339,8 +338,8 @@ func WriteRisksExcelToFile(parsedModel *model.ParsedModel, filename string) { }) excelRow++ // as we have a header line - for _, category := range model.SortedRiskCategories(parsedModel) { - risks := model.SortedRisksOfCategory(parsedModel, category) + for _, category := range types.SortedRiskCategories(parsedModel) { + risks := types.SortedRisksOfCategory(parsedModel, category) for _, risk := range risks { excelRow++ techAsset := parsedModel.TechnicalAssets[risk.MostRelevantTechnicalAssetId] @@ -350,17 +349,17 @@ func WriteRisksExcelToFile(parsedModel *model.ParsedModel, filename string) { err := excel.SetCellValue(sheetName, "A"+strconv.Itoa(excelRow), risk.Severity.Title()) err = excel.SetCellValue(sheetName, "B"+strconv.Itoa(excelRow), risk.ExploitationLikelihood.Title()) err = excel.SetCellValue(sheetName, "C"+strconv.Itoa(excelRow), risk.ExploitationImpact.Title()) - err = excel.SetCellValue(sheetName, "D"+strconv.Itoa(excelRow), risk.Category.STRIDE.Title()) - err = excel.SetCellValue(sheetName, "E"+strconv.Itoa(excelRow), risk.Category.Function.Title()) - err = excel.SetCellValue(sheetName, "F"+strconv.Itoa(excelRow), "CWE-"+strconv.Itoa(risk.Category.CWE)) - err = excel.SetCellValue(sheetName, "G"+strconv.Itoa(excelRow), risk.Category.Title) + err = excel.SetCellValue(sheetName, "D"+strconv.Itoa(excelRow), category.STRIDE.Title()) + err = excel.SetCellValue(sheetName, "E"+strconv.Itoa(excelRow), category.Function.Title()) + err = excel.SetCellValue(sheetName, "F"+strconv.Itoa(excelRow), "CWE-"+strconv.Itoa(category.CWE)) + err = excel.SetCellValue(sheetName, "G"+strconv.Itoa(excelRow), category.Title) err = excel.SetCellValue(sheetName, "H"+strconv.Itoa(excelRow), techAsset.Title) err = excel.SetCellValue(sheetName, "I"+strconv.Itoa(excelRow), commLink.Title) err = excel.SetCellFloat(sheetName, "J"+strconv.Itoa(excelRow), techAsset.RAA, 0, 32) err = excel.SetCellValue(sheetName, "K"+strconv.Itoa(excelRow), removeFormattingTags(risk.Title)) - err = excel.SetCellValue(sheetName, "L"+strconv.Itoa(excelRow), risk.Category.Action) - err = excel.SetCellValue(sheetName, "M"+strconv.Itoa(excelRow), risk.Category.Mitigation) - err = excel.SetCellValue(sheetName, "N"+strconv.Itoa(excelRow), risk.Category.Check) + err = excel.SetCellValue(sheetName, "L"+strconv.Itoa(excelRow), category.Action) + err = excel.SetCellValue(sheetName, "M"+strconv.Itoa(excelRow), category.Mitigation) + err = excel.SetCellValue(sheetName, "N"+strconv.Itoa(excelRow), category.Check) err = excel.SetCellValue(sheetName, "O"+strconv.Itoa(excelRow), risk.SyntheticId) err = excel.SetCellValue(sheetName, "P"+strconv.Itoa(excelRow), riskTrackingStatus.Title()) if riskTrackingStatus != types.Unchecked { @@ -456,7 +455,7 @@ func WriteRisksExcelToFile(parsedModel *model.ParsedModel, filename string) { checkErr(err) } -func WriteTagsExcelToFile(parsedModel *model.ParsedModel, filename string) { // TODO: eventually when len(sortedTagsAvailable) == 0 is: write a hint in the Excel that no tags are used +func WriteTagsExcelToFile(parsedModel *types.ParsedModel, filename string) { // TODO: eventually when len(sortedTagsAvailable) == 0 is: write a hint in the Excel that no tags are used excelRow = 0 excel := excelize.NewFile() sheetName := parsedModel.Title @@ -600,21 +599,21 @@ func WriteTagsExcelToFile(parsedModel *model.ParsedModel, filename string) { // checkErr(err) } -func sortedTrustBoundariesByTitle(parsedModel *model.ParsedModel) []model.TrustBoundary { - boundaries := make([]model.TrustBoundary, 0) +func sortedTrustBoundariesByTitle(parsedModel *types.ParsedModel) []types.TrustBoundary { + boundaries := make([]types.TrustBoundary, 0) for _, boundary := range parsedModel.TrustBoundaries { boundaries = append(boundaries, boundary) } - sort.Sort(model.ByTrustBoundaryTitleSort(boundaries)) + sort.Sort(types.ByTrustBoundaryTitleSort(boundaries)) return boundaries } -func sortedDataAssetsByTitle(parsedModel *model.ParsedModel) []model.DataAsset { - assets := make([]model.DataAsset, 0) +func sortedDataAssetsByTitle(parsedModel *types.ParsedModel) []types.DataAsset { + assets := make([]types.DataAsset, 0) for _, asset := range parsedModel.DataAssets { assets = append(assets, asset) } - sort.Sort(model.ByDataAssetTitleSort(assets)) + sort.Sort(types.ByDataAssetTitleSort(assets)) return assets } diff --git a/pkg/report/json.go b/pkg/report/json.go index 4442643e..9b104633 100644 --- a/pkg/report/json.go +++ b/pkg/report/json.go @@ -2,12 +2,11 @@ package report import ( "encoding/json" + "github.com/threagile/threagile/pkg/security/types" "os" - - "github.com/threagile/threagile/pkg/model" ) -func WriteRisksJSON(parsedModel *model.ParsedModel, filename string) { +func WriteRisksJSON(parsedModel *types.ParsedModel, filename string) { /* remainingRisks := make([]model.Risk, 0) for _, category := range model.SortedRiskCategories() { @@ -17,7 +16,7 @@ func WriteRisksJSON(parsedModel *model.ParsedModel, filename string) { } } */ - jsonBytes, err := json.Marshal(model.AllRisks(parsedModel)) + jsonBytes, err := json.Marshal(types.AllRisks(parsedModel)) if err != nil { panic(err) } @@ -29,7 +28,7 @@ func WriteRisksJSON(parsedModel *model.ParsedModel, filename string) { // TODO: also a "data assets" json? -func WriteTechnicalAssetsJSON(parsedModel *model.ParsedModel, filename string) { +func WriteTechnicalAssetsJSON(parsedModel *types.ParsedModel, filename string) { jsonBytes, err := json.Marshal(parsedModel.TechnicalAssets) if err != nil { panic(err) @@ -40,8 +39,8 @@ func WriteTechnicalAssetsJSON(parsedModel *model.ParsedModel, filename string) { } } -func WriteStatsJSON(parsedModel *model.ParsedModel, filename string) { - jsonBytes, err := json.Marshal(model.OverallRiskStatistics(parsedModel)) +func WriteStatsJSON(parsedModel *types.ParsedModel, filename string) { + jsonBytes, err := json.Marshal(types.OverallRiskStatistics(parsedModel)) if err != nil { panic(err) } diff --git a/pkg/report/report.go b/pkg/report/report.go index 57b01cbd..4b3ed80f 100644 --- a/pkg/report/report.go +++ b/pkg/report/report.go @@ -18,7 +18,6 @@ import ( "github.com/jung-kurt/gofpdf/contrib/gofpdi" "github.com/threagile/threagile/pkg/colors" "github.com/threagile/threagile/pkg/docs" - "github.com/threagile/threagile/pkg/model" accidental_secret_leak "github.com/threagile/threagile/pkg/security/risks/built-in/accidental-secret-leak" code_backdooring "github.com/threagile/threagile/pkg/security/risks/built-in/code-backdooring" container_baseimage_backdooring "github.com/threagile/threagile/pkg/security/risks/built-in/container-baseimage-backdooring" @@ -102,9 +101,9 @@ func WriteReportPDF(reportFilename string, buildTimestamp string, modelHash string, introTextRAA string, - customRiskRules map[string]*model.CustomRisk, + customRiskRules map[string]*types.CustomRisk, tempFolder string, - model *model.ParsedModel) { + model *types.ParsedModel) { initReport() createPdfAndInitMetadata(model) parseBackgroundTemplate(templateFilename) @@ -143,7 +142,7 @@ func checkErr(err error) { } } -func createPdfAndInitMetadata(model *model.ParsedModel) { +func createPdfAndInitMetadata(model *types.ParsedModel) { pdf = gofpdf.New("P", "mm", "A4", "") pdf.SetCreator(model.Author.Homepage, true) pdf.SetAuthor(model.Author.Name, true) @@ -178,7 +177,7 @@ func headerFunc() { } } -func addBreadcrumb(parsedModel *model.ParsedModel) { +func addBreadcrumb(parsedModel *types.ParsedModel) { if len(currentChapterTitleBreadcrumb) > 0 { uni := pdf.UnicodeTranslatorFromDescriptor("") pdf.SetFont("Helvetica", "", 10) @@ -203,7 +202,7 @@ func parseBackgroundTemplate(templateFilename string) { diagramLegendTemplateId = gofpdi.ImportPage(pdf, templateFilename, 3, "/MediaBox") } -func createCover(parsedModel *model.ParsedModel) { +func createCover(parsedModel *types.ParsedModel) { uni := pdf.UnicodeTranslatorFromDescriptor("") pdf.AddPage() gofpdi.UseImportedTemplate(pdf, coverTemplateId, 0, 0, 0, 300) @@ -225,7 +224,7 @@ func createCover(parsedModel *model.ParsedModel) { pdf.SetTextColor(0, 0, 0) } -func createTableOfContents(parsedModel *model.ParsedModel) { +func createTableOfContents(parsedModel *types.ParsedModel) { uni := pdf.UnicodeTranslatorFromDescriptor("") pdf.AddPage() currentChapterTitleBreadcrumb = "Table of Contents" @@ -256,7 +255,7 @@ func createTableOfContents(parsedModel *model.ParsedModel) { risksStr := "Risks" catStr := "Categories" - count, catCount := model.TotalRiskCount(parsedModel), len(parsedModel.GeneratedRisksByCategory) + count, catCount := types.TotalRiskCount(parsedModel), len(parsedModel.GeneratedRisksByCategory) if count == 1 { risksStr = "Risk" } @@ -278,7 +277,7 @@ func createTableOfContents(parsedModel *model.ParsedModel) { y += 6 risksStr = "Risks" catStr = "Categories" - count, catCount = len(model.FilteredByStillAtRisk(parsedModel)), len(model.CategoriesOfOnlyRisksStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory)) + count, catCount = len(types.FilteredByStillAtRisk(parsedModel)), len(types.CategoriesOfOnlyRisksStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory)) if count == 1 { risksStr = "Risk" } @@ -369,13 +368,13 @@ func createTableOfContents(parsedModel *model.ParsedModel) { pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) y += 6 - modelFailures := model.FlattenRiskSlice(model.FilterByModelFailures(parsedModel.GeneratedRisksByCategory)) + modelFailures := types.FlattenRiskSlice(types.FilterByModelFailures(parsedModel, parsedModel.GeneratedRisksByCategory)) risksStr = "Risks" count = len(modelFailures) if count == 1 { risksStr = "Risk" } - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(parsedModel, modelFailures)) + countStillAtRisk := len(types.ReduceToOnlyStillAtRisk(parsedModel, modelFailures)) if countStillAtRisk > 0 { colors.ColorModelFailure(pdf) } @@ -418,9 +417,9 @@ func createTableOfContents(parsedModel *model.ParsedModel) { pdf.Text(175, y, "{intro-risks-by-vulnerability-category}") pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) - for _, category := range model.SortedRiskCategories(parsedModel) { - newRisksStr := model.SortedRisksOfCategory(parsedModel, category) - switch model.HighestSeverityStillAtRisk(parsedModel, newRisksStr) { + for _, category := range types.SortedRiskCategories(parsedModel) { + newRisksStr := types.SortedRisksOfCategory(parsedModel, category) + switch types.HighestSeverityStillAtRisk(parsedModel, newRisksStr) { case types.CriticalSeverity: colors.ColorCriticalRisk(pdf) case types.HighSeverity: @@ -434,7 +433,7 @@ func createTableOfContents(parsedModel *model.ParsedModel) { default: pdfColorBlack() } - if len(model.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) == 0 { + if len(types.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) == 0 { pdfColorBlack() } y += 6 @@ -442,7 +441,7 @@ func createTableOfContents(parsedModel *model.ParsedModel) { pageBreakInLists() y = 40 } - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) + countStillAtRisk := len(types.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(newRisksStr)) + " Risk" if len(newRisksStr) != 1 { suffix += "s" @@ -480,7 +479,7 @@ func createTableOfContents(parsedModel *model.ParsedModel) { pageBreakInLists() y = 40 } - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) + countStillAtRisk := len(types.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(newRisksStr)) + " Risk" if len(newRisksStr) != 1 { suffix += "s" @@ -489,7 +488,7 @@ func createTableOfContents(parsedModel *model.ParsedModel) { pdfColorOutOfScope() suffix = "out-of-scope" } else { - switch model.HighestSeverityStillAtRisk(parsedModel, newRisksStr) { + switch types.HighestSeverityStillAtRisk(parsedModel, newRisksStr) { case types.CriticalSeverity: colors.ColorCriticalRisk(pdf) case types.HighSeverity: @@ -503,7 +502,7 @@ func createTableOfContents(parsedModel *model.ParsedModel) { default: pdfColorBlack() } - if len(model.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) == 0 { + if len(types.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) == 0 { pdfColorBlack() } } @@ -540,7 +539,7 @@ func createTableOfContents(parsedModel *model.ParsedModel) { y = 40 } newRisksStr := dataAsset.IdentifiedDataBreachProbabilityRisks(parsedModel) - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) + countStillAtRisk := len(types.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(newRisksStr)) + " Risk" if len(newRisksStr) != 1 { suffix += "s" @@ -579,7 +578,7 @@ func createTableOfContents(parsedModel *model.ParsedModel) { pdfColorBlack() pdf.Text(11, y, "Trust Boundaries") pdf.SetFont("Helvetica", "", fontSizeBody) - for _, key := range model.SortedKeysOfTrustBoundaries(parsedModel) { + for _, key := range types.SortedKeysOfTrustBoundaries(parsedModel) { trustBoundary := parsedModel.TrustBoundaries[key] y += 6 if y > 275 { @@ -612,7 +611,7 @@ func createTableOfContents(parsedModel *model.ParsedModel) { pdfColorBlack() pdf.Text(11, y, "Shared Runtime") pdf.SetFont("Helvetica", "", fontSizeBody) - for _, key := range model.SortedKeysOfSharedRuntime(parsedModel) { + for _, key := range types.SortedKeysOfSharedRuntime(parsedModel) { sharedRuntime := parsedModel.SharedRuntimes[key] y += 6 if y > 275 { @@ -668,22 +667,22 @@ func createTableOfContents(parsedModel *model.ParsedModel) { // by the current page number. --> See the "pdf.RegisterAlias()" calls during the PDF creation in this file } -func sortedTechnicalAssetsByRiskSeverityAndTitle(parsedModel *model.ParsedModel) []model.TechnicalAsset { - assets := make([]model.TechnicalAsset, 0) +func sortedTechnicalAssetsByRiskSeverityAndTitle(parsedModel *types.ParsedModel) []types.TechnicalAsset { + assets := make([]types.TechnicalAsset, 0) for _, asset := range parsedModel.TechnicalAssets { assets = append(assets, asset) } - model.SortByTechnicalAssetRiskSeverityAndTitleStillAtRisk(assets, parsedModel) + types.SortByTechnicalAssetRiskSeverityAndTitleStillAtRisk(assets, parsedModel) return assets } -func sortedDataAssetsByDataBreachProbabilityAndTitle(parsedModel *model.ParsedModel) []model.DataAsset { - assets := make([]model.DataAsset, 0) +func sortedDataAssetsByDataBreachProbabilityAndTitle(parsedModel *types.ParsedModel) []types.DataAsset { + assets := make([]types.DataAsset, 0) for _, asset := range parsedModel.DataAssets { assets = append(assets, asset) } - model.SortByDataAssetDataBreachProbabilityAndTitleStillAtRisk(parsedModel, assets) + types.SortByDataAssetDataBreachProbabilityAndTitleStillAtRisk(parsedModel, assets) return assets } @@ -699,7 +698,7 @@ func defineLinkTarget(alias string) { linkCounter++ } -func createDisclaimer(parsedModel *model.ParsedModel) { +func createDisclaimer(parsedModel *types.ParsedModel) { pdf.AddPage() currentChapterTitleBreadcrumb = "Disclaimer" defineLinkTarget("{disclaimer}") @@ -752,25 +751,25 @@ func createDisclaimer(parsedModel *model.ParsedModel) { pdfColorBlack() } -func createManagementSummary(parsedModel *model.ParsedModel, tempFolder string) { +func createManagementSummary(parsedModel *types.ParsedModel, tempFolder string) { uni := pdf.UnicodeTranslatorFromDescriptor("") pdf.SetTextColor(0, 0, 0) title := "Management Summary" addHeadline(title, false) defineLinkTarget("{management-summary}") currentChapterTitleBreadcrumb = title - countCritical := len(model.FilteredByOnlyCriticalRisks(parsedModel)) - countHigh := len(model.FilteredByOnlyHighRisks(parsedModel)) - countElevated := len(model.FilteredByOnlyElevatedRisks(parsedModel)) - countMedium := len(model.FilteredByOnlyMediumRisks(parsedModel)) - countLow := len(model.FilteredByOnlyLowRisks(parsedModel)) - - countStatusUnchecked := len(model.FilteredByRiskTrackingUnchecked(parsedModel)) - countStatusInDiscussion := len(model.FilteredByRiskTrackingInDiscussion(parsedModel)) - countStatusAccepted := len(model.FilteredByRiskTrackingAccepted(parsedModel)) - countStatusInProgress := len(model.FilteredByRiskTrackingInProgress(parsedModel)) - countStatusMitigated := len(model.FilteredByRiskTrackingMitigated(parsedModel)) - countStatusFalsePositive := len(model.FilteredByRiskTrackingFalsePositive(parsedModel)) + countCritical := len(types.FilteredByOnlyCriticalRisks(parsedModel)) + countHigh := len(types.FilteredByOnlyHighRisks(parsedModel)) + countElevated := len(types.FilteredByOnlyElevatedRisks(parsedModel)) + countMedium := len(types.FilteredByOnlyMediumRisks(parsedModel)) + countLow := len(types.FilteredByOnlyLowRisks(parsedModel)) + + countStatusUnchecked := len(types.FilteredByRiskTrackingUnchecked(parsedModel)) + countStatusInDiscussion := len(types.FilteredByRiskTrackingInDiscussion(parsedModel)) + countStatusAccepted := len(types.FilteredByRiskTrackingAccepted(parsedModel)) + countStatusInProgress := len(types.FilteredByRiskTrackingInProgress(parsedModel)) + countStatusMitigated := len(types.FilteredByRiskTrackingMitigated(parsedModel)) + countStatusFalsePositive := len(types.FilteredByRiskTrackingFalsePositive(parsedModel)) html := pdf.HTMLBasicNew() html.Write(5, "Threagile toolkit was used to model the architecture of \""+uni(parsedModel.Title)+"\" "+ @@ -786,7 +785,7 @@ func createManagementSummary(parsedModel *model.ParsedModel, tempFolder string) "the application in a Defense-in-Depth approach. Additionally, for each risk finding a "+ "link towards a matching OWASP Cheat Sheet or similar with technical details about how to implement a mitigation is given."+ "

"+ - "In total "+strconv.Itoa(model.TotalRiskCount(parsedModel))+" initial risks in "+strconv.Itoa(len(parsedModel.GeneratedRisksByCategory))+" categories have "+ + "In total "+strconv.Itoa(types.TotalRiskCount(parsedModel))+" initial risks in "+strconv.Itoa(len(parsedModel.GeneratedRisksByCategory))+" categories have "+ "been identified during the threat modeling process:

") // TODO plural singular stuff risk/s category/ies has/have pdf.SetFont("Helvetica", "B", fontSizeBody) @@ -939,9 +938,9 @@ func createManagementSummary(parsedModel *model.ParsedModel, tempFolder string) } } -func createRiskMitigationStatus(parsedModel *model.ParsedModel, tempFolder string) { +func createRiskMitigationStatus(parsedModel *types.ParsedModel, tempFolder string) { pdf.SetTextColor(0, 0, 0) - stillAtRisk := model.FilteredByStillAtRisk(parsedModel) + stillAtRisk := types.FilteredByStillAtRisk(parsedModel) count := len(stillAtRisk) title := "Risk Mitigation" addHeadline(title, false) @@ -951,18 +950,18 @@ func createRiskMitigationStatus(parsedModel *model.ParsedModel, tempFolder strin html := pdf.HTMLBasicNew() html.Write(5, "The following chart gives a high-level overview of the risk tracking status (including mitigated risks):") - risksCritical := model.FilteredByOnlyCriticalRisks(parsedModel) - risksHigh := model.FilteredByOnlyHighRisks(parsedModel) - risksElevated := model.FilteredByOnlyElevatedRisks(parsedModel) - risksMedium := model.FilteredByOnlyMediumRisks(parsedModel) - risksLow := model.FilteredByOnlyLowRisks(parsedModel) + risksCritical := types.FilteredByOnlyCriticalRisks(parsedModel) + risksHigh := types.FilteredByOnlyHighRisks(parsedModel) + risksElevated := types.FilteredByOnlyElevatedRisks(parsedModel) + risksMedium := types.FilteredByOnlyMediumRisks(parsedModel) + risksLow := types.FilteredByOnlyLowRisks(parsedModel) - countStatusUnchecked := len(model.FilteredByRiskTrackingUnchecked(parsedModel)) - countStatusInDiscussion := len(model.FilteredByRiskTrackingInDiscussion(parsedModel)) - countStatusAccepted := len(model.FilteredByRiskTrackingAccepted(parsedModel)) - countStatusInProgress := len(model.FilteredByRiskTrackingInProgress(parsedModel)) - countStatusMitigated := len(model.FilteredByRiskTrackingMitigated(parsedModel)) - countStatusFalsePositive := len(model.FilteredByRiskTrackingFalsePositive(parsedModel)) + countStatusUnchecked := len(types.FilteredByRiskTrackingUnchecked(parsedModel)) + countStatusInDiscussion := len(types.FilteredByRiskTrackingInDiscussion(parsedModel)) + countStatusAccepted := len(types.FilteredByRiskTrackingAccepted(parsedModel)) + countStatusInProgress := len(types.FilteredByRiskTrackingInProgress(parsedModel)) + countStatusMitigated := len(types.FilteredByRiskTrackingMitigated(parsedModel)) + countStatusFalsePositive := len(types.FilteredByRiskTrackingFalsePositive(parsedModel)) stackedBarChartRiskTracking := chart.StackedBarChart{ Width: 4000, @@ -974,17 +973,17 @@ func createRiskMitigationStatus(parsedModel *model.ParsedModel, tempFolder strin Name: types.LowSeverity.Title(), Width: 130, Values: []chart.Value{ - {Value: float64(len(model.ReduceToOnlyRiskTrackingUnchecked(parsedModel, risksLow))), Label: types.Unchecked.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingUnchecked(parsedModel, risksLow))), Label: types.Unchecked.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusUnchecked()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingInDiscussion(parsedModel, risksLow))), Label: types.InDiscussion.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingInDiscussion(parsedModel, risksLow))), Label: types.InDiscussion.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingAccepted(parsedModel, risksLow))), Label: types.Accepted.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingAccepted(parsedModel, risksLow))), Label: types.Accepted.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusAccepted()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingInProgress(parsedModel, risksLow))), Label: types.InProgress.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingInProgress(parsedModel, risksLow))), Label: types.InProgress.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInProgress()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingMitigated(parsedModel, risksLow))), Label: types.Mitigated.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingMitigated(parsedModel, risksLow))), Label: types.Mitigated.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusMitigated()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingFalsePositive(parsedModel, risksLow))), Label: types.FalsePositive.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingFalsePositive(parsedModel, risksLow))), Label: types.FalsePositive.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, }, }, @@ -992,17 +991,17 @@ func createRiskMitigationStatus(parsedModel *model.ParsedModel, tempFolder strin Name: types.MediumSeverity.Title(), Width: 130, Values: []chart.Value{ - {Value: float64(len(model.ReduceToOnlyRiskTrackingUnchecked(parsedModel, risksMedium))), Label: types.Unchecked.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingUnchecked(parsedModel, risksMedium))), Label: types.Unchecked.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusUnchecked()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingInDiscussion(parsedModel, risksMedium))), Label: types.InDiscussion.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingInDiscussion(parsedModel, risksMedium))), Label: types.InDiscussion.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingAccepted(parsedModel, risksMedium))), Label: types.Accepted.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingAccepted(parsedModel, risksMedium))), Label: types.Accepted.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusAccepted()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingInProgress(parsedModel, risksMedium))), Label: types.InProgress.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingInProgress(parsedModel, risksMedium))), Label: types.InProgress.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInProgress()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingMitigated(parsedModel, risksMedium))), Label: types.Mitigated.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingMitigated(parsedModel, risksMedium))), Label: types.Mitigated.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusMitigated()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingFalsePositive(parsedModel, risksMedium))), Label: types.FalsePositive.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingFalsePositive(parsedModel, risksMedium))), Label: types.FalsePositive.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, }, }, @@ -1010,17 +1009,17 @@ func createRiskMitigationStatus(parsedModel *model.ParsedModel, tempFolder strin Name: types.ElevatedSeverity.Title(), Width: 130, Values: []chart.Value{ - {Value: float64(len(model.ReduceToOnlyRiskTrackingUnchecked(parsedModel, risksElevated))), Label: types.Unchecked.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingUnchecked(parsedModel, risksElevated))), Label: types.Unchecked.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusUnchecked()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingInDiscussion(parsedModel, risksElevated))), Label: types.InDiscussion.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingInDiscussion(parsedModel, risksElevated))), Label: types.InDiscussion.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingAccepted(parsedModel, risksElevated))), Label: types.Accepted.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingAccepted(parsedModel, risksElevated))), Label: types.Accepted.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusAccepted()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingInProgress(parsedModel, risksElevated))), Label: types.InProgress.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingInProgress(parsedModel, risksElevated))), Label: types.InProgress.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInProgress()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingMitigated(parsedModel, risksElevated))), Label: types.Mitigated.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingMitigated(parsedModel, risksElevated))), Label: types.Mitigated.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusMitigated()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingFalsePositive(parsedModel, risksElevated))), Label: types.FalsePositive.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingFalsePositive(parsedModel, risksElevated))), Label: types.FalsePositive.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, }, }, @@ -1028,17 +1027,17 @@ func createRiskMitigationStatus(parsedModel *model.ParsedModel, tempFolder strin Name: types.HighSeverity.Title(), Width: 130, Values: []chart.Value{ - {Value: float64(len(model.ReduceToOnlyRiskTrackingUnchecked(parsedModel, risksHigh))), Label: types.Unchecked.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingUnchecked(parsedModel, risksHigh))), Label: types.Unchecked.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusUnchecked()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingInDiscussion(parsedModel, risksHigh))), Label: types.InDiscussion.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingInDiscussion(parsedModel, risksHigh))), Label: types.InDiscussion.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingAccepted(parsedModel, risksHigh))), Label: types.Accepted.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingAccepted(parsedModel, risksHigh))), Label: types.Accepted.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusAccepted()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingInProgress(parsedModel, risksHigh))), Label: types.InProgress.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingInProgress(parsedModel, risksHigh))), Label: types.InProgress.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInProgress()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingMitigated(parsedModel, risksHigh))), Label: types.Mitigated.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingMitigated(parsedModel, risksHigh))), Label: types.Mitigated.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusMitigated()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingFalsePositive(parsedModel, risksHigh))), Label: types.FalsePositive.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingFalsePositive(parsedModel, risksHigh))), Label: types.FalsePositive.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, }, }, @@ -1046,17 +1045,17 @@ func createRiskMitigationStatus(parsedModel *model.ParsedModel, tempFolder strin Name: types.CriticalSeverity.Title(), Width: 130, Values: []chart.Value{ - {Value: float64(len(model.ReduceToOnlyRiskTrackingUnchecked(parsedModel, risksCritical))), Label: types.Unchecked.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingUnchecked(parsedModel, risksCritical))), Label: types.Unchecked.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusUnchecked()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingInDiscussion(parsedModel, risksCritical))), Label: types.InDiscussion.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingInDiscussion(parsedModel, risksCritical))), Label: types.InDiscussion.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingAccepted(parsedModel, risksCritical))), Label: types.Accepted.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingAccepted(parsedModel, risksCritical))), Label: types.Accepted.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusAccepted()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingInProgress(parsedModel, risksCritical))), Label: types.InProgress.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingInProgress(parsedModel, risksCritical))), Label: types.InProgress.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInProgress()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingMitigated(parsedModel, risksCritical))), Label: types.Mitigated.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingMitigated(parsedModel, risksCritical))), Label: types.Mitigated.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusMitigated()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, - {Value: float64(len(model.ReduceToOnlyRiskTrackingFalsePositive(parsedModel, risksCritical))), Label: types.FalsePositive.Title(), + {Value: float64(len(types.ReduceToOnlyRiskTrackingFalsePositive(parsedModel, risksCritical))), Label: types.FalsePositive.Title(), Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, }, }, @@ -1125,16 +1124,16 @@ func createRiskMitigationStatus(parsedModel *model.ParsedModel, tempFolder strin "After removal of risks with status mitigated and false positive "+ "the following "+strconv.Itoa(count)+" remain unmitigated:") - countCritical := len(model.ReduceToOnlyStillAtRisk(parsedModel, model.FilteredByOnlyCriticalRisks(parsedModel))) - countHigh := len(model.ReduceToOnlyStillAtRisk(parsedModel, model.FilteredByOnlyHighRisks(parsedModel))) - countElevated := len(model.ReduceToOnlyStillAtRisk(parsedModel, model.FilteredByOnlyElevatedRisks(parsedModel))) - countMedium := len(model.ReduceToOnlyStillAtRisk(parsedModel, model.FilteredByOnlyMediumRisks(parsedModel))) - countLow := len(model.ReduceToOnlyStillAtRisk(parsedModel, model.FilteredByOnlyLowRisks(parsedModel))) + countCritical := len(types.ReduceToOnlyStillAtRisk(parsedModel, types.FilteredByOnlyCriticalRisks(parsedModel))) + countHigh := len(types.ReduceToOnlyStillAtRisk(parsedModel, types.FilteredByOnlyHighRisks(parsedModel))) + countElevated := len(types.ReduceToOnlyStillAtRisk(parsedModel, types.FilteredByOnlyElevatedRisks(parsedModel))) + countMedium := len(types.ReduceToOnlyStillAtRisk(parsedModel, types.FilteredByOnlyMediumRisks(parsedModel))) + countLow := len(types.ReduceToOnlyStillAtRisk(parsedModel, types.FilteredByOnlyLowRisks(parsedModel))) - countBusinessSide := len(model.ReduceToOnlyStillAtRisk(parsedModel, model.FilteredByOnlyBusinessSide(parsedModel))) - countArchitecture := len(model.ReduceToOnlyStillAtRisk(parsedModel, model.FilteredByOnlyArchitecture(parsedModel))) - countDevelopment := len(model.ReduceToOnlyStillAtRisk(parsedModel, model.FilteredByOnlyDevelopment(parsedModel))) - countOperation := len(model.ReduceToOnlyStillAtRisk(parsedModel, model.FilteredByOnlyOperation(parsedModel))) + countBusinessSide := len(types.ReduceToOnlyStillAtRisk(parsedModel, types.FilteredByOnlyBusinessSide(parsedModel))) + countArchitecture := len(types.ReduceToOnlyStillAtRisk(parsedModel, types.FilteredByOnlyArchitecture(parsedModel))) + countDevelopment := len(types.ReduceToOnlyStillAtRisk(parsedModel, types.FilteredByOnlyDevelopment(parsedModel))) + countOperation := len(types.ReduceToOnlyStillAtRisk(parsedModel, types.FilteredByOnlyOperation(parsedModel))) pieChartRemainingRiskSeverity := chart.PieChart{ Width: 1500, @@ -1280,19 +1279,19 @@ func makeColor(hexColor string) drawing.Color { return drawing.ColorFromHex(hexColor[i:]) // = remove first char, which is # in rgb hex here } -func createImpactInitialRisks(parsedModel *model.ParsedModel) { +func createImpactInitialRisks(parsedModel *types.ParsedModel) { renderImpactAnalysis(parsedModel, true) } -func createImpactRemainingRisks(parsedModel *model.ParsedModel) { +func createImpactRemainingRisks(parsedModel *types.ParsedModel) { renderImpactAnalysis(parsedModel, false) } -func renderImpactAnalysis(parsedModel *model.ParsedModel, initialRisks bool) { +func renderImpactAnalysis(parsedModel *types.ParsedModel, initialRisks bool) { pdf.SetTextColor(0, 0, 0) - count, catCount := model.TotalRiskCount(parsedModel), len(parsedModel.GeneratedRisksByCategory) + count, catCount := types.TotalRiskCount(parsedModel), len(parsedModel.GeneratedRisksByCategory) if !initialRisks { - count, catCount = len(model.FilteredByStillAtRisk(parsedModel)), len(model.CategoriesOfOnlyRisksStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory)) + count, catCount = len(types.FilteredByStillAtRisk(parsedModel)), len(types.CategoriesOfOnlyRisksStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory)) } riskStr, catStr := "Risks", "Categories" if count == 1 { @@ -1333,22 +1332,22 @@ func renderImpactAnalysis(parsedModel *model.ParsedModel, initialRisks bool) { html.Write(5, "Risk finding paragraphs are clickable and link to the corresponding chapter.") pdf.SetFont("Helvetica", "", fontSizeBody) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), types.CriticalSeverity, false, initialRisks, true, false) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), types.HighSeverity, false, initialRisks, true, false) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), types.ElevatedSeverity, false, initialRisks, true, false) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), types.MediumSeverity, false, initialRisks, true, false) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), types.LowSeverity, false, initialRisks, true, false) pdf.SetDrawColor(0, 0, 0) pdf.SetDashPattern([]float64{}, 0) } -func createOutOfScopeAssets(parsedModel *model.ParsedModel) { +func createOutOfScopeAssets(parsedModel *types.ParsedModel) { uni := pdf.UnicodeTranslatorFromDescriptor("") pdf.SetTextColor(0, 0, 0) assets := "Assets" @@ -1411,24 +1410,24 @@ func createOutOfScopeAssets(parsedModel *model.ParsedModel) { pdf.SetDashPattern([]float64{}, 0) } -func sortedTechnicalAssetsByRAAAndTitle(parsedModel *model.ParsedModel) []model.TechnicalAsset { - assets := make([]model.TechnicalAsset, 0) +func sortedTechnicalAssetsByRAAAndTitle(parsedModel *types.ParsedModel) []types.TechnicalAsset { + assets := make([]types.TechnicalAsset, 0) for _, asset := range parsedModel.TechnicalAssets { assets = append(assets, asset) } - sort.Sort(model.ByTechnicalAssetRAAAndTitleSort(assets)) + sort.Sort(types.ByTechnicalAssetRAAAndTitleSort(assets)) return assets } -func createModelFailures(parsedModel *model.ParsedModel) { +func createModelFailures(parsedModel *types.ParsedModel) { pdf.SetTextColor(0, 0, 0) - modelFailures := model.FlattenRiskSlice(model.FilterByModelFailures(parsedModel.GeneratedRisksByCategory)) + modelFailures := types.FlattenRiskSlice(types.FilterByModelFailures(parsedModel, parsedModel.GeneratedRisksByCategory)) risksStr := "Risks" count := len(modelFailures) if count == 1 { risksStr = "Risk" } - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(parsedModel, modelFailures)) + countStillAtRisk := len(types.ReduceToOnlyStillAtRisk(parsedModel, modelFailures)) if countStillAtRisk > 0 { colors.ColorModelFailure(pdf) } @@ -1450,20 +1449,20 @@ func createModelFailures(parsedModel *model.ParsedModel) { html.Write(5, "Risk finding paragraphs are clickable and link to the corresponding chapter.") pdf.SetFont("Helvetica", "", fontSizeBody) - modelFailuresByCategory := model.FilterByModelFailures(parsedModel.GeneratedRisksByCategory) + modelFailuresByCategory := types.FilterByModelFailures(parsedModel, parsedModel.GeneratedRisksByCategory) if len(modelFailuresByCategory) == 0 { pdfColorGray() html.Write(5, "

No potential model failures have been identified.") } else { - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, modelFailuresByCategory, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, modelFailuresByCategory, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, modelFailuresByCategory, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, modelFailuresByCategory, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, modelFailuresByCategory, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, modelFailuresByCategory, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, modelFailuresByCategory, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, modelFailuresByCategory, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, modelFailuresByCategory, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, modelFailuresByCategory, true)), types.LowSeverity, true, true, false, true) } @@ -1471,7 +1470,7 @@ func createModelFailures(parsedModel *model.ParsedModel) { pdf.SetDashPattern([]float64{}, 0) } -func createRAA(parsedModel *model.ParsedModel, introTextRAA string) { +func createRAA(parsedModel *types.ParsedModel, introTextRAA string) { uni := pdf.UnicodeTranslatorFromDescriptor("") pdf.SetTextColor(0, 0, 0) chapTitle := "RAA Analysis" @@ -1501,7 +1500,7 @@ func createRAA(parsedModel *model.ParsedModel, introTextRAA string) { strBuilder.WriteString("

") } newRisksStr := technicalAsset.GeneratedRisks(parsedModel) - switch model.HighestSeverityStillAtRisk(parsedModel, newRisksStr) { + switch types.HighestSeverityStillAtRisk(parsedModel, newRisksStr) { case types.HighSeverity: colors.ColorHighRisk(pdf) case types.MediumSeverity: @@ -1511,7 +1510,7 @@ func createRAA(parsedModel *model.ParsedModel, introTextRAA string) { default: pdfColorBlack() } - if len(model.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) == 0 { + if len(types.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) == 0 { pdfColorBlack() } @@ -1617,14 +1616,14 @@ func createDataRiskQuickWins() { } */ -func addCategories(parsedModel *model.ParsedModel, riskCategories []model.RiskCategory, severity types.RiskSeverity, bothInitialAndRemainingRisks bool, initialRisks bool, describeImpact bool, describeDescription bool) { +func addCategories(parsedModel *types.ParsedModel, riskCategories []types.RiskCategory, severity types.RiskSeverity, bothInitialAndRemainingRisks bool, initialRisks bool, describeImpact bool, describeDescription bool) { html := pdf.HTMLBasicNew() var strBuilder strings.Builder - sort.Sort(model.ByRiskCategoryTitleSort(riskCategories)) + sort.Sort(types.ByRiskCategoryTitleSort(riskCategories)) for _, riskCategory := range riskCategories { risksStr := parsedModel.GeneratedRisksByCategory[riskCategory.Id] if !initialRisks { - risksStr = model.ReduceToOnlyStillAtRisk(parsedModel, risksStr) + risksStr = types.ReduceToOnlyStillAtRisk(parsedModel, risksStr) } if len(risksStr) == 0 { continue @@ -1656,7 +1655,7 @@ func addCategories(parsedModel *model.ParsedModel, riskCategories []model.RiskCa pdfColorBlack() prefix = "" } - switch model.HighestSeverityStillAtRisk(parsedModel, risksStr) { + switch types.HighestSeverityStillAtRisk(parsedModel, risksStr) { case types.CriticalSeverity: colors.ColorCriticalRisk(pdf) case types.HighSeverity: @@ -1668,7 +1667,7 @@ func addCategories(parsedModel *model.ParsedModel, riskCategories []model.RiskCa case types.LowSeverity: colors.ColorLowRisk(pdf) } - if len(model.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) == 0 { + if len(types.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) == 0 { pdfColorBlack() } html.Write(5, strBuilder.String()) @@ -1683,7 +1682,7 @@ func addCategories(parsedModel *model.ParsedModel, riskCategories []model.RiskCa if !initialRisks { initialStr = "Remaining" } - remainingRisks := model.ReduceToOnlyStillAtRisk(parsedModel, risksStr) + remainingRisks := types.ReduceToOnlyStillAtRisk(parsedModel, risksStr) suffix := strconv.Itoa(count) + " " + initialStr + " Risk" if bothInitialAndRemainingRisks { suffix = strconv.Itoa(len(remainingRisks)) + " / " + strconv.Itoa(count) + " Risk" @@ -1693,9 +1692,9 @@ func addCategories(parsedModel *model.ParsedModel, riskCategories []model.RiskCa } suffix += " - Exploitation likelihood is " if initialRisks { - suffix += model.HighestExploitationLikelihood(risksStr).Title() + " with " + model.HighestExploitationImpact(risksStr).Title() + " impact." + suffix += types.HighestExploitationLikelihood(risksStr).Title() + " with " + types.HighestExploitationImpact(risksStr).Title() + " impact." } else { - suffix += model.HighestExploitationLikelihood(remainingRisks).Title() + " with " + model.HighestExploitationImpact(remainingRisks).Title() + " impact." + suffix += types.HighestExploitationLikelihood(remainingRisks).Title() + " with " + types.HighestExploitationImpact(remainingRisks).Title() + " impact." } strBuilder.WriteString(suffix + "
") html.Write(5, strBuilder.String()) @@ -1722,26 +1721,26 @@ func firstParagraph(text string) string { return match[1] } -func createAssignmentByFunction(parsedModel *model.ParsedModel) { +func createAssignmentByFunction(parsedModel *types.ParsedModel) { pdf.SetTextColor(0, 0, 0) title := "Assignment by Function" addHeadline(title, false) defineLinkTarget("{function-assignment}") currentChapterTitleBreadcrumb = title - risksBusinessSideFunction := model.RisksOfOnlyBusinessSide(parsedModel.GeneratedRisksByCategory) - risksArchitectureFunction := model.RisksOfOnlyArchitecture(parsedModel.GeneratedRisksByCategory) - risksDevelopmentFunction := model.RisksOfOnlyDevelopment(parsedModel.GeneratedRisksByCategory) - risksOperationFunction := model.RisksOfOnlyOperation(parsedModel.GeneratedRisksByCategory) + risksBusinessSideFunction := types.RisksOfOnlyBusinessSide(parsedModel, parsedModel.GeneratedRisksByCategory) + risksArchitectureFunction := types.RisksOfOnlyArchitecture(parsedModel, parsedModel.GeneratedRisksByCategory) + risksDevelopmentFunction := types.RisksOfOnlyDevelopment(parsedModel, parsedModel.GeneratedRisksByCategory) + risksOperationFunction := types.RisksOfOnlyOperation(parsedModel, parsedModel.GeneratedRisksByCategory) - countBusinessSideFunction := model.CountRisks(risksBusinessSideFunction) - countArchitectureFunction := model.CountRisks(risksArchitectureFunction) - countDevelopmentFunction := model.CountRisks(risksDevelopmentFunction) - countOperationFunction := model.CountRisks(risksOperationFunction) + countBusinessSideFunction := types.CountRisks(risksBusinessSideFunction) + countArchitectureFunction := types.CountRisks(risksArchitectureFunction) + countDevelopmentFunction := types.CountRisks(risksDevelopmentFunction) + countOperationFunction := types.CountRisks(risksOperationFunction) var intro strings.Builder intro.WriteString("This chapter clusters and assigns the risks by functions which are most likely able to " + "check and mitigate them: " + - "In total " + strconv.Itoa(model.TotalRiskCount(parsedModel)) + " potential risks have been identified during the threat modeling process " + + "In total " + strconv.Itoa(types.TotalRiskCount(parsedModel)) + " potential risks have been identified during the threat modeling process " + "of which " + strconv.Itoa(countBusinessSideFunction) + " should be checked by " + types.BusinessSide.Title() + ", " + "" + strconv.Itoa(countArchitectureFunction) + " should be checked by " + types.Architecture.Title() + ", " + "" + strconv.Itoa(countDevelopmentFunction) + " should be checked by " + types.Development.Title() + ", " + @@ -1770,15 +1769,15 @@ func createAssignmentByFunction(parsedModel *model.ParsedModel) { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksBusinessSideFunction, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksBusinessSideFunction, true)), types.CriticalSeverity, true, true, false, false) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksBusinessSideFunction, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksBusinessSideFunction, true)), types.HighSeverity, true, true, false, false) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksBusinessSideFunction, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksBusinessSideFunction, true)), types.ElevatedSeverity, true, true, false, false) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksBusinessSideFunction, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksBusinessSideFunction, true)), types.MediumSeverity, true, true, false, false) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksBusinessSideFunction, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksBusinessSideFunction, true)), types.LowSeverity, true, true, false, false) } pdf.SetLeftMargin(oldLeft) @@ -1797,15 +1796,15 @@ func createAssignmentByFunction(parsedModel *model.ParsedModel) { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksArchitectureFunction, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksArchitectureFunction, true)), types.CriticalSeverity, true, true, false, false) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksArchitectureFunction, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksArchitectureFunction, true)), types.HighSeverity, true, true, false, false) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksArchitectureFunction, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksArchitectureFunction, true)), types.ElevatedSeverity, true, true, false, false) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksArchitectureFunction, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksArchitectureFunction, true)), types.MediumSeverity, true, true, false, false) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksArchitectureFunction, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksArchitectureFunction, true)), types.LowSeverity, true, true, false, false) } pdf.SetLeftMargin(oldLeft) @@ -1824,15 +1823,15 @@ func createAssignmentByFunction(parsedModel *model.ParsedModel) { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksDevelopmentFunction, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksDevelopmentFunction, true)), types.CriticalSeverity, true, true, false, false) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksDevelopmentFunction, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksDevelopmentFunction, true)), types.HighSeverity, true, true, false, false) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksDevelopmentFunction, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksDevelopmentFunction, true)), types.ElevatedSeverity, true, true, false, false) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksDevelopmentFunction, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksDevelopmentFunction, true)), types.MediumSeverity, true, true, false, false) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksDevelopmentFunction, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksDevelopmentFunction, true)), types.LowSeverity, true, true, false, false) } pdf.SetLeftMargin(oldLeft) @@ -1851,15 +1850,15 @@ func createAssignmentByFunction(parsedModel *model.ParsedModel) { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksOperationFunction, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksOperationFunction, true)), types.CriticalSeverity, true, true, false, false) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksOperationFunction, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksOperationFunction, true)), types.HighSeverity, true, true, false, false) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksOperationFunction, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksOperationFunction, true)), types.ElevatedSeverity, true, true, false, false) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksOperationFunction, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksOperationFunction, true)), types.MediumSeverity, true, true, false, false) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksOperationFunction, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksOperationFunction, true)), types.LowSeverity, true, true, false, false) } pdf.SetLeftMargin(oldLeft) @@ -1868,29 +1867,29 @@ func createAssignmentByFunction(parsedModel *model.ParsedModel) { pdf.SetDashPattern([]float64{}, 0) } -func createSTRIDE(parsedModel *model.ParsedModel) { +func createSTRIDE(parsedModel *types.ParsedModel) { pdf.SetTextColor(0, 0, 0) title := "STRIDE Classification of Identified Risks" addHeadline(title, false) defineLinkTarget("{stride}") currentChapterTitleBreadcrumb = title - risksSTRIDESpoofing := model.RisksOfOnlySTRIDESpoofing(parsedModel.GeneratedRisksByCategory) - risksSTRIDETampering := model.RisksOfOnlySTRIDETampering(parsedModel.GeneratedRisksByCategory) - risksSTRIDERepudiation := model.RisksOfOnlySTRIDERepudiation(parsedModel.GeneratedRisksByCategory) - risksSTRIDEInformationDisclosure := model.RisksOfOnlySTRIDEInformationDisclosure(parsedModel.GeneratedRisksByCategory) - risksSTRIDEDenialOfService := model.RisksOfOnlySTRIDEDenialOfService(parsedModel.GeneratedRisksByCategory) - risksSTRIDEElevationOfPrivilege := model.RisksOfOnlySTRIDEElevationOfPrivilege(parsedModel.GeneratedRisksByCategory) - - countSTRIDESpoofing := model.CountRisks(risksSTRIDESpoofing) - countSTRIDETampering := model.CountRisks(risksSTRIDETampering) - countSTRIDERepudiation := model.CountRisks(risksSTRIDERepudiation) - countSTRIDEInformationDisclosure := model.CountRisks(risksSTRIDEInformationDisclosure) - countSTRIDEDenialOfService := model.CountRisks(risksSTRIDEDenialOfService) - countSTRIDEElevationOfPrivilege := model.CountRisks(risksSTRIDEElevationOfPrivilege) + risksSTRIDESpoofing := types.RisksOfOnlySTRIDESpoofing(parsedModel, parsedModel.GeneratedRisksByCategory) + risksSTRIDETampering := types.RisksOfOnlySTRIDETampering(parsedModel, parsedModel.GeneratedRisksByCategory) + risksSTRIDERepudiation := types.RisksOfOnlySTRIDERepudiation(parsedModel, parsedModel.GeneratedRisksByCategory) + risksSTRIDEInformationDisclosure := types.RisksOfOnlySTRIDEInformationDisclosure(parsedModel, parsedModel.GeneratedRisksByCategory) + risksSTRIDEDenialOfService := types.RisksOfOnlySTRIDEDenialOfService(parsedModel, parsedModel.GeneratedRisksByCategory) + risksSTRIDEElevationOfPrivilege := types.RisksOfOnlySTRIDEElevationOfPrivilege(parsedModel, parsedModel.GeneratedRisksByCategory) + + countSTRIDESpoofing := types.CountRisks(risksSTRIDESpoofing) + countSTRIDETampering := types.CountRisks(risksSTRIDETampering) + countSTRIDERepudiation := types.CountRisks(risksSTRIDERepudiation) + countSTRIDEInformationDisclosure := types.CountRisks(risksSTRIDEInformationDisclosure) + countSTRIDEDenialOfService := types.CountRisks(risksSTRIDEDenialOfService) + countSTRIDEElevationOfPrivilege := types.CountRisks(risksSTRIDEElevationOfPrivilege) var intro strings.Builder intro.WriteString("This chapter clusters and classifies the risks by STRIDE categories: " + - "In total " + strconv.Itoa(model.TotalRiskCount(parsedModel)) + " potential risks have been identified during the threat modeling process " + + "In total " + strconv.Itoa(types.TotalRiskCount(parsedModel)) + " potential risks have been identified during the threat modeling process " + "of which " + strconv.Itoa(countSTRIDESpoofing) + " in the " + types.Spoofing.Title() + " category, " + "" + strconv.Itoa(countSTRIDETampering) + " in the " + types.Tampering.Title() + " category, " + "" + strconv.Itoa(countSTRIDERepudiation) + " in the " + types.Repudiation.Title() + " category, " + @@ -1921,15 +1920,15 @@ func createSTRIDE(parsedModel *model.ParsedModel) { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDESpoofing, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDESpoofing, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDESpoofing, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDESpoofing, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDESpoofing, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDESpoofing, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDESpoofing, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDESpoofing, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDESpoofing, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDESpoofing, true)), types.LowSeverity, true, true, false, true) } pdf.SetLeftMargin(oldLeft) @@ -1948,15 +1947,15 @@ func createSTRIDE(parsedModel *model.ParsedModel) { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDETampering, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDETampering, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDETampering, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDETampering, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDETampering, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDETampering, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDETampering, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDETampering, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDETampering, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDETampering, true)), types.LowSeverity, true, true, false, true) } pdf.SetLeftMargin(oldLeft) @@ -1975,15 +1974,15 @@ func createSTRIDE(parsedModel *model.ParsedModel) { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDERepudiation, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDERepudiation, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDERepudiation, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDERepudiation, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDERepudiation, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDERepudiation, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDERepudiation, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDERepudiation, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDERepudiation, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDERepudiation, true)), types.LowSeverity, true, true, false, true) } pdf.SetLeftMargin(oldLeft) @@ -2002,15 +2001,15 @@ func createSTRIDE(parsedModel *model.ParsedModel) { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), types.LowSeverity, true, true, false, true) } pdf.SetLeftMargin(oldLeft) @@ -2029,15 +2028,15 @@ func createSTRIDE(parsedModel *model.ParsedModel) { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEDenialOfService, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEDenialOfService, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEDenialOfService, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEDenialOfService, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEDenialOfService, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEDenialOfService, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEDenialOfService, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEDenialOfService, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEDenialOfService, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEDenialOfService, true)), types.LowSeverity, true, true, false, true) } pdf.SetLeftMargin(oldLeft) @@ -2056,15 +2055,15 @@ func createSTRIDE(parsedModel *model.ParsedModel) { pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, model.GetRiskCategories(parsedModel, model.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), + addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), types.LowSeverity, true, true, false, true) } pdf.SetLeftMargin(oldLeft) @@ -2073,7 +2072,7 @@ func createSTRIDE(parsedModel *model.ParsedModel) { pdf.SetDashPattern([]float64{}, 0) } -func createSecurityRequirements(parsedModel *model.ParsedModel) { +func createSecurityRequirements(parsedModel *types.ParsedModel) { uni := pdf.UnicodeTranslatorFromDescriptor("") pdf.SetTextColor(0, 0, 0) chapTitle := "Security Requirements" @@ -2105,7 +2104,7 @@ func createSecurityRequirements(parsedModel *model.ParsedModel) { "taken into account as well. Also custom individual security requirements might exist for the project.") } -func sortedKeysOfSecurityRequirements(parsedModel *model.ParsedModel) []string { +func sortedKeysOfSecurityRequirements(parsedModel *types.ParsedModel) []string { keys := make([]string, 0) for k := range parsedModel.SecurityRequirements { keys = append(keys, k) @@ -2114,7 +2113,7 @@ func sortedKeysOfSecurityRequirements(parsedModel *model.ParsedModel) []string { return keys } -func createAbuseCases(parsedModel *model.ParsedModel) { +func createAbuseCases(parsedModel *types.ParsedModel) { pdf.SetTextColor(0, 0, 0) chapTitle := "Abuse Cases" addHeadline(chapTitle, false) @@ -2145,7 +2144,7 @@ func createAbuseCases(parsedModel *model.ParsedModel) { "taken into account as well. Also custom individual abuse cases might exist for the project.") } -func sortedKeysOfAbuseCases(parsedModel *model.ParsedModel) []string { +func sortedKeysOfAbuseCases(parsedModel *types.ParsedModel) []string { keys := make([]string, 0) for k := range parsedModel.AbuseCases { keys = append(keys, k) @@ -2154,7 +2153,7 @@ func sortedKeysOfAbuseCases(parsedModel *model.ParsedModel) []string { return keys } -func createQuestions(parsedModel *model.ParsedModel) { +func createQuestions(parsedModel *types.ParsedModel) { uni := pdf.UnicodeTranslatorFromDescriptor("") pdf.SetTextColor(0, 0, 0) questions := "Questions" @@ -2202,7 +2201,7 @@ func createQuestions(parsedModel *model.ParsedModel) { } } -func sortedKeysOfQuestions(parsedModel *model.ParsedModel) []string { +func sortedKeysOfQuestions(parsedModel *types.ParsedModel) []string { keys := make([]string, 0) for k := range parsedModel.Questions { keys = append(keys, k) @@ -2211,7 +2210,7 @@ func sortedKeysOfQuestions(parsedModel *model.ParsedModel) []string { return keys } -func createTagListing(parsedModel *model.ParsedModel) { +func createTagListing(parsedModel *types.ParsedModel) { pdf.SetTextColor(0, 0, 0) chapTitle := "Tag Listing" addHeadline(chapTitle, false) @@ -2279,25 +2278,25 @@ func createTagListing(parsedModel *model.ParsedModel) { } } -func sortedSharedRuntimesByTitle(parsedModel *model.ParsedModel) []model.SharedRuntime { - result := make([]model.SharedRuntime, 0) +func sortedSharedRuntimesByTitle(parsedModel *types.ParsedModel) []types.SharedRuntime { + result := make([]types.SharedRuntime, 0) for _, runtime := range parsedModel.SharedRuntimes { result = append(result, runtime) } - sort.Sort(model.BySharedRuntimeTitleSort(result)) + sort.Sort(types.BySharedRuntimeTitleSort(result)) return result } -func sortedTechnicalAssetsByTitle(parsedModel *model.ParsedModel) []model.TechnicalAsset { - assets := make([]model.TechnicalAsset, 0) +func sortedTechnicalAssetsByTitle(parsedModel *types.ParsedModel) []types.TechnicalAsset { + assets := make([]types.TechnicalAsset, 0) for _, asset := range parsedModel.TechnicalAssets { assets = append(assets, asset) } - sort.Sort(model.ByTechnicalAssetTitleSort(assets)) + sort.Sort(types.ByTechnicalAssetTitleSort(assets)) return assets } -func createRiskCategories(parsedModel *model.ParsedModel) { +func createRiskCategories(parsedModel *types.ParsedModel) { uni := pdf.UnicodeTranslatorFromDescriptor("") // category title title := "Identified Risks by Vulnerability Category" @@ -2306,23 +2305,23 @@ func createRiskCategories(parsedModel *model.ParsedModel) { defineLinkTarget("{intro-risks-by-vulnerability-category}") html := pdf.HTMLBasicNew() var text strings.Builder - text.WriteString("In total " + strconv.Itoa(model.TotalRiskCount(parsedModel)) + " potential risks have been identified during the threat modeling process " + + text.WriteString("In total " + strconv.Itoa(types.TotalRiskCount(parsedModel)) + " potential risks have been identified during the threat modeling process " + "of which " + - "" + strconv.Itoa(len(model.FilteredByOnlyCriticalRisks(parsedModel))) + " are rated as critical, " + - "" + strconv.Itoa(len(model.FilteredByOnlyHighRisks(parsedModel))) + " as high, " + - "" + strconv.Itoa(len(model.FilteredByOnlyElevatedRisks(parsedModel))) + " as elevated, " + - "" + strconv.Itoa(len(model.FilteredByOnlyMediumRisks(parsedModel))) + " as medium, " + - "and " + strconv.Itoa(len(model.FilteredByOnlyLowRisks(parsedModel))) + " as low. " + + "" + strconv.Itoa(len(types.FilteredByOnlyCriticalRisks(parsedModel))) + " are rated as critical, " + + "" + strconv.Itoa(len(types.FilteredByOnlyHighRisks(parsedModel))) + " as high, " + + "" + strconv.Itoa(len(types.FilteredByOnlyElevatedRisks(parsedModel))) + " as elevated, " + + "" + strconv.Itoa(len(types.FilteredByOnlyMediumRisks(parsedModel))) + " as medium, " + + "and " + strconv.Itoa(len(types.FilteredByOnlyLowRisks(parsedModel))) + " as low. " + "

These risks are distributed across " + strconv.Itoa(len(parsedModel.GeneratedRisksByCategory)) + " vulnerability categories. ") text.WriteString("The following sub-chapters of this section describe each identified risk category.") // TODO more explanation text html.Write(5, text.String()) text.Reset() currentChapterTitleBreadcrumb = title - for _, category := range model.SortedRiskCategories(parsedModel) { - risksStr := model.SortedRisksOfCategory(parsedModel, category) + for _, category := range types.SortedRiskCategories(parsedModel) { + risksStr := types.SortedRisksOfCategory(parsedModel, category) // category color - switch model.HighestSeverityStillAtRisk(parsedModel, risksStr) { + switch types.HighestSeverityStillAtRisk(parsedModel, risksStr) { case types.CriticalSeverity: colors.ColorCriticalRisk(pdf) case types.HighSeverity: @@ -2336,12 +2335,12 @@ func createRiskCategories(parsedModel *model.ParsedModel) { default: pdfColorBlack() } - if len(model.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) == 0 { + if len(types.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) == 0 { pdfColorBlack() } // category title - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) + countStillAtRisk := len(types.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(risksStr)) + " Risk" if len(risksStr) != 1 { suffix += "s" @@ -2517,7 +2516,7 @@ func createRiskCategories(parsedModel *model.ParsedModel) { } } -func writeRiskTrackingStatus(parsedModel *model.ParsedModel, risk model.Risk) { +func writeRiskTrackingStatus(parsedModel *types.ParsedModel, risk types.Risk) { uni := pdf.UnicodeTranslatorFromDescriptor("") tracking := risk.GetRiskTracking(parsedModel) pdfColorBlack() @@ -2564,7 +2563,7 @@ func writeRiskTrackingStatus(parsedModel *model.ParsedModel, risk model.Risk) { pdfColorBlack() } -func createTechnicalAssets(parsedModel *model.ParsedModel) { +func createTechnicalAssets(parsedModel *types.ParsedModel) { uni := pdf.UnicodeTranslatorFromDescriptor("") // category title title := "Identified Risks by Technical Asset" @@ -2573,13 +2572,13 @@ func createTechnicalAssets(parsedModel *model.ParsedModel) { defineLinkTarget("{intro-risks-by-technical-asset}") html := pdf.HTMLBasicNew() var text strings.Builder - text.WriteString("In total " + strconv.Itoa(model.TotalRiskCount(parsedModel)) + " potential risks have been identified during the threat modeling process " + + text.WriteString("In total " + strconv.Itoa(types.TotalRiskCount(parsedModel)) + " potential risks have been identified during the threat modeling process " + "of which " + - "" + strconv.Itoa(len(model.FilteredByOnlyCriticalRisks(parsedModel))) + " are rated as critical, " + - "" + strconv.Itoa(len(model.FilteredByOnlyHighRisks(parsedModel))) + " as high, " + - "" + strconv.Itoa(len(model.FilteredByOnlyElevatedRisks(parsedModel))) + " as elevated, " + - "" + strconv.Itoa(len(model.FilteredByOnlyMediumRisks(parsedModel))) + " as medium, " + - "and " + strconv.Itoa(len(model.FilteredByOnlyLowRisks(parsedModel))) + " as low. " + + "" + strconv.Itoa(len(types.FilteredByOnlyCriticalRisks(parsedModel))) + " are rated as critical, " + + "" + strconv.Itoa(len(types.FilteredByOnlyHighRisks(parsedModel))) + " as high, " + + "" + strconv.Itoa(len(types.FilteredByOnlyElevatedRisks(parsedModel))) + " as elevated, " + + "" + strconv.Itoa(len(types.FilteredByOnlyMediumRisks(parsedModel))) + " as medium, " + + "and " + strconv.Itoa(len(types.FilteredByOnlyLowRisks(parsedModel))) + " as low. " + "

These risks are distributed across " + strconv.Itoa(len(parsedModel.InScopeTechnicalAssets())) + " in-scope technical assets. ") text.WriteString("The following sub-chapters of this section describe each identified risk grouped by technical asset. ") // TODO more explanation text text.WriteString("The RAA value of a technical asset is the calculated \"Relative Attacker Attractiveness\" value in percent.") @@ -2588,7 +2587,7 @@ func createTechnicalAssets(parsedModel *model.ParsedModel) { currentChapterTitleBreadcrumb = title for _, technicalAsset := range sortedTechnicalAssetsByRiskSeverityAndTitle(parsedModel) { risksStr := technicalAsset.GeneratedRisks(parsedModel) - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) + countStillAtRisk := len(types.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(risksStr)) + " Risk" if len(risksStr) != 1 { suffix += "s" @@ -2597,7 +2596,7 @@ func createTechnicalAssets(parsedModel *model.ParsedModel) { pdfColorOutOfScope() suffix = "out-of-scope" } else { - switch model.HighestSeverityStillAtRisk(parsedModel, risksStr) { + switch types.HighestSeverityStillAtRisk(parsedModel, risksStr) { case types.CriticalSeverity: colors.ColorCriticalRisk(pdf) case types.HighSeverity: @@ -2611,7 +2610,7 @@ func createTechnicalAssets(parsedModel *model.ParsedModel) { default: pdfColorBlack() } - if len(model.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) == 0 { + if len(types.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) == 0 { pdfColorBlack() } } @@ -2725,7 +2724,7 @@ func createTechnicalAssets(parsedModel *model.ParsedModel) { pdf.SetFont("Helvetica", "", fontSizeVerySmall) pdfColorGray() pdf.MultiCell(215, 5, uni(risk.SyntheticId), "0", "0", false) - pdf.Link(20, posY, 180, pdf.GetY()-posY, tocLinkIdByAssetId[risk.Category.Id]) + pdf.Link(20, posY, 180, pdf.GetY()-posY, tocLinkIdByAssetId[risk.CategoryId]) pdf.SetFont("Helvetica", "", fontSizeBody) writeRiskTrackingStatus(parsedModel, risk) pdf.SetLeftMargin(oldLeft) @@ -3376,20 +3375,20 @@ func createTechnicalAssets(parsedModel *model.ParsedModel) { } } -func createDataAssets(parsedModel *model.ParsedModel) { +func createDataAssets(parsedModel *types.ParsedModel) { uni := pdf.UnicodeTranslatorFromDescriptor("") title := "Identified Data Breach Probabilities by Data Asset" pdfColorBlack() addHeadline(title, false) defineLinkTarget("{intro-risks-by-data-asset}") html := pdf.HTMLBasicNew() - html.Write(5, "In total "+strconv.Itoa(model.TotalRiskCount(parsedModel))+" potential risks have been identified during the threat modeling process "+ + html.Write(5, "In total "+strconv.Itoa(types.TotalRiskCount(parsedModel))+" potential risks have been identified during the threat modeling process "+ "of which "+ - ""+strconv.Itoa(len(model.FilteredByOnlyCriticalRisks(parsedModel)))+" are rated as critical, "+ - ""+strconv.Itoa(len(model.FilteredByOnlyHighRisks(parsedModel)))+" as high, "+ - ""+strconv.Itoa(len(model.FilteredByOnlyElevatedRisks(parsedModel)))+" as elevated, "+ - ""+strconv.Itoa(len(model.FilteredByOnlyMediumRisks(parsedModel)))+" as medium, "+ - "and "+strconv.Itoa(len(model.FilteredByOnlyLowRisks(parsedModel)))+" as low. "+ + ""+strconv.Itoa(len(types.FilteredByOnlyCriticalRisks(parsedModel)))+" are rated as critical, "+ + ""+strconv.Itoa(len(types.FilteredByOnlyHighRisks(parsedModel)))+" as high, "+ + ""+strconv.Itoa(len(types.FilteredByOnlyElevatedRisks(parsedModel)))+" as elevated, "+ + ""+strconv.Itoa(len(types.FilteredByOnlyMediumRisks(parsedModel)))+" as medium, "+ + "and "+strconv.Itoa(len(types.FilteredByOnlyLowRisks(parsedModel)))+" as low. "+ "

These risks are distributed across "+strconv.Itoa(len(parsedModel.DataAssets))+" data assets. ") html.Write(5, "The following sub-chapters of this section describe the derived data breach probabilities grouped by data asset.
") // TODO more explanation text pdf.SetFont("Helvetica", "", fontSizeSmall) @@ -3419,7 +3418,7 @@ func createDataAssets(parsedModel *model.ParsedModel) { pdfColorBlack() } risksStr := dataAsset.IdentifiedDataBreachProbabilityRisks(parsedModel) - countStillAtRisk := len(model.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) + countStillAtRisk := len(types.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) suffix := strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(len(risksStr)) + " Risk" if len(risksStr) != 1 { suffix += "s" @@ -3748,7 +3747,7 @@ func createDataAssets(parsedModel *model.ParsedModel) { // how can is this data asset be indirectly lost (i.e. why) dataBreachRisksStillAtRisk := dataAsset.IdentifiedDataBreachProbabilityRisksStillAtRisk(parsedModel) - model.SortByDataBreachProbability(dataBreachRisksStillAtRisk, parsedModel) + types.SortByDataBreachProbability(dataBreachRisksStillAtRisk, parsedModel) if pdf.GetY() > 265 { pageBreak() pdf.SetY(36) @@ -3790,14 +3789,14 @@ func createDataAssets(parsedModel *model.ParsedModel) { pdf.SetFont("Helvetica", "", fontSizeVerySmall) pdf.MultiCell(185, 5, dataBreachRisk.DataBreachProbability.Title()+": "+uni(dataBreachRisk.SyntheticId), "0", "0", false) pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.Link(20, posY, 180, pdf.GetY()-posY, tocLinkIdByAssetId[dataBreachRisk.Category.Id]) + pdf.Link(20, posY, 180, pdf.GetY()-posY, tocLinkIdByAssetId[dataBreachRisk.CategoryId]) } pdfColorBlack() } } } -func createTrustBoundaries(parsedModel *model.ParsedModel) { +func createTrustBoundaries(parsedModel *types.ParsedModel) { uni := pdf.UnicodeTranslatorFromDescriptor("") title := "Trust Boundaries" pdfColorBlack() @@ -3916,7 +3915,7 @@ func createTrustBoundaries(parsedModel *model.ParsedModel) { } } -func questionsUnanswered(parsedModel *model.ParsedModel) int { +func questionsUnanswered(parsedModel *types.ParsedModel) int { result := 0 for _, answer := range parsedModel.Questions { if len(strings.TrimSpace(answer)) == 0 { @@ -3926,7 +3925,7 @@ func questionsUnanswered(parsedModel *model.ParsedModel) int { return result } -func createSharedRuntimes(parsedModel *model.ParsedModel) { +func createSharedRuntimes(parsedModel *types.ParsedModel) { uni := pdf.UnicodeTranslatorFromDescriptor("") title := "Shared Runtimes" pdfColorBlack() @@ -4007,7 +4006,7 @@ func createSharedRuntimes(parsedModel *model.ParsedModel) { } } -func createRiskRulesChecked(parsedModel *model.ParsedModel, modelFilename string, skipRiskRules string, buildTimestamp string, modelHash string, customRiskRules map[string]*model.CustomRisk) { +func createRiskRulesChecked(parsedModel *types.ParsedModel, modelFilename string, skipRiskRules string, buildTimestamp string, modelHash string, customRiskRules map[string]*types.CustomRisk) { pdf.SetTextColor(0, 0, 0) title := "Risk Rules Checked by Threagile" addHeadline(title, false) @@ -5543,7 +5542,7 @@ func createRiskRulesChecked(parsedModel *model.ParsedModel, modelFilename string pdf.MultiCell(160, 6, xml_external_entity.Category().RiskAssessment, "0", "0", false) } -func createTargetDescription(parsedModel *model.ParsedModel, baseFolder string) { +func createTargetDescription(parsedModel *types.ParsedModel, baseFolder string) { uni := pdf.UnicodeTranslatorFromDescriptor("") pdf.SetTextColor(0, 0, 0) title := "Application Overview" @@ -5779,7 +5778,7 @@ func embedDataFlowDiagram(diagramFilenamePNG string, tempFolder string) { } } -func sortedKeysOfIndividualRiskCategories(parsedModel *model.ParsedModel) []string { +func sortedKeysOfIndividualRiskCategories(parsedModel *types.ParsedModel) []string { keys := make([]string, 0) for k := range parsedModel.IndividualRiskCategories { keys = append(keys, k) diff --git a/pkg/run/runner.go b/pkg/run/runner.go index 08f1cb4b..c0c53538 100644 --- a/pkg/run/runner.go +++ b/pkg/run/runner.go @@ -2,9 +2,9 @@ package run import ( + "bytes" "encoding/json" "fmt" - "io" "os" "os/exec" ) @@ -49,17 +49,11 @@ func (p *Runner) Run(in any, out any, parameters ...string) error { } defer func() { _ = stdin.Close() }() - stdout, stdoutError := plugin.StdoutPipe() - if stdoutError != nil { - return stdoutError - } - defer func() { _ = stdout.Close() }() + var stdoutBuf bytes.Buffer + plugin.Stdout = &stdoutBuf - stderr, stderrError := plugin.StderrPipe() - if stderrError != nil { - return stderrError - } - defer func() { _ = stderr.Close() }() + var stderrBuf bytes.Buffer + plugin.Stderr = &stderrBuf startError := plugin.Start() if startError != nil { @@ -71,8 +65,6 @@ func (p *Runner) Run(in any, out any, parameters ...string) error { return inError } - _ = os.WriteFile("../../all.json", inData, 0644) - _, writeError := stdin.Write(inData) if writeError != nil { return writeError @@ -83,23 +75,15 @@ func (p *Runner) Run(in any, out any, parameters ...string) error { return inCloseError } - errData, errError := io.ReadAll(stderr) - if errError != nil { - return errError - } - p.ErrorOutput = string(errData) - - outData, outError := io.ReadAll(stdout) - if outError != nil { - return outError - } - waitError := plugin.Wait() if waitError != nil { return fmt.Errorf("%v: %v", waitError, p.ErrorOutput) } - unmarshalError := json.Unmarshal(outData, &p.Out) + p.ErrorOutput = stderrBuf.String() + stdout := stdoutBuf.Bytes() + + unmarshalError := json.Unmarshal(stdout, &p.Out) if unmarshalError != nil { return unmarshalError } diff --git a/pkg/security/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go b/pkg/security/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go index 7df9371a..8303a3d4 100644 --- a/pkg/security/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go +++ b/pkg/security/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go @@ -1,20 +1,19 @@ package accidental_secret_leak import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "accidental-secret-leak", Title: "Accidental Secret Leak", Description: "Sourcecode repositories (including their histories) as well as artifact registries can accidentally contain secrets like " + @@ -43,13 +42,13 @@ func SupportedTags() []string { return []string{"git", "nexus"} } -func GenerateRisks(parsedModel *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range parsedModel.SortedTechnicalAssetIDs() { techAsset := parsedModel.TechnicalAssets[id] if !techAsset.OutOfScope && (techAsset.Technology == types.SourcecodeRepository || techAsset.Technology == types.ArtifactRegistry) { - var risk model.Risk + var risk types.Risk if techAsset.IsTaggedWithAny("git") { risk = createRisk(parsedModel, techAsset, "Git", "Git Leak Prevention") } else { @@ -61,7 +60,7 @@ func GenerateRisks(parsedModel *model.ParsedModel) []model.Risk { return risks } -func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAsset, prefix, details string) model.Risk { +func createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset, prefix, details string) types.Risk { if len(prefix) > 0 { prefix = " (" + prefix + ")" } @@ -81,9 +80,9 @@ func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAs impact = types.HighImpact } // create risk - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, @@ -91,6 +90,6 @@ func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAs DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/code-backdooring/code-backdooring-rule.go b/pkg/security/risks/built-in/code-backdooring/code-backdooring-rule.go index 3cae4418..0cee94b6 100644 --- a/pkg/security/risks/built-in/code-backdooring/code-backdooring-rule.go +++ b/pkg/security/risks/built-in/code-backdooring/code-backdooring-rule.go @@ -1,20 +1,19 @@ package code_backdooring import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "code-backdooring", Title: "Code Backdooring", Description: "For each build-pipeline component Code Backdooring risks might arise where attackers compromise the build-pipeline " + @@ -49,8 +48,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(parsedModel *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range parsedModel.SortedTechnicalAssetIDs() { technicalAsset := parsedModel.TechnicalAssets[id] if !technicalAsset.OutOfScope && technicalAsset.Technology.IsDevelopmentRelevant() { @@ -75,7 +74,7 @@ func GenerateRisks(parsedModel *model.ParsedModel) []model.Risk { return risks } -func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, elevatedRisk bool) model.Risk { +func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, elevatedRisk bool) types.Risk { title := "Code Backdooring risk at " + technicalAsset.Title + "" impact := types.LowImpact if technicalAsset.Technology != types.CodeInspectionPlatform { @@ -109,9 +108,9 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, e dataBreachTechnicalAssetIDs = append(dataBreachTechnicalAssetIDs, key) } // create risk - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, @@ -119,6 +118,6 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, e DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: dataBreachTechnicalAssetIDs, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go b/pkg/security/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go index 65ee7058..cae9fb6d 100644 --- a/pkg/security/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go +++ b/pkg/security/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go @@ -1,20 +1,19 @@ package container_baseimage_backdooring import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "container-baseimage-backdooring", Title: "Container Base Image Backdooring", Description: "When a technical asset is built using container technologies, Base Image Backdooring risks might arise where " + @@ -44,8 +43,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(parsedModel *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range parsedModel.SortedTechnicalAssetIDs() { technicalAsset := parsedModel.TechnicalAssets[id] if !technicalAsset.OutOfScope && technicalAsset.Machine == types.Container { @@ -55,7 +54,7 @@ func GenerateRisks(parsedModel *model.ParsedModel) []model.Risk { return risks } -func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAsset) model.Risk { +func createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { title := "Container Base Image Backdooring risk at " + technicalAsset.Title + "" impact := types.MediumImpact if technicalAsset.HighestConfidentiality(parsedModel) == types.StrictlyConfidential || @@ -63,9 +62,9 @@ func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAs technicalAsset.HighestAvailability(parsedModel) == types.MissionCritical { impact = types.HighImpact } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, @@ -73,6 +72,6 @@ func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAs DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/container-platform-escape/container-platform-escape-rule.go b/pkg/security/risks/built-in/container-platform-escape/container-platform-escape-rule.go index b56de4ed..bf4a2cf2 100644 --- a/pkg/security/risks/built-in/container-platform-escape/container-platform-escape-rule.go +++ b/pkg/security/risks/built-in/container-platform-escape/container-platform-escape-rule.go @@ -1,20 +1,19 @@ package container_platform_escape import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "container-platform-escape", Title: "Container Platform Escape", Description: "Container platforms are especially interesting targets for attackers as they host big parts of a containerized runtime infrastructure. " + @@ -49,8 +48,8 @@ func SupportedTags() []string { return []string{"docker", "kubernetes", "openshift"} } -func GenerateRisks(parsedModel *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range parsedModel.SortedTechnicalAssetIDs() { technicalAsset := parsedModel.TechnicalAssets[id] if !technicalAsset.OutOfScope && technicalAsset.Technology == types.ContainerPlatform { @@ -60,7 +59,7 @@ func GenerateRisks(parsedModel *model.ParsedModel) []model.Risk { return risks } -func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAsset) model.Risk { +func createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { title := "Container Platform Escape risk at " + technicalAsset.Title + "" impact := types.MediumImpact if technicalAsset.HighestConfidentiality(parsedModel) == types.StrictlyConfidential || @@ -76,9 +75,9 @@ func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAs } } // create risk - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, @@ -86,6 +85,6 @@ func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAs DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: dataBreachTechnicalAssetIDs, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go b/pkg/security/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go index 6c05f755..83f5317b 100644 --- a/pkg/security/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go +++ b/pkg/security/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go @@ -1,20 +1,19 @@ package cross_site_request_forgery import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "cross-site-request-forgery", Title: "Cross-Site Request Forgery (CSRF)", Description: "When a web application is accessed via web protocols Cross-Site Request Forgery (CSRF) risks might arise.", @@ -45,8 +44,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(parsedModel *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range parsedModel.SortedTechnicalAssetIDs() { technicalAsset := parsedModel.TechnicalAssets[id] if technicalAsset.OutOfScope || !technicalAsset.Technology.IsWebApplication() { @@ -66,16 +65,16 @@ func GenerateRisks(parsedModel *model.ParsedModel) []model.Risk { return risks } -func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink, likelihood types.RiskExploitationLikelihood) model.Risk { +func createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingFlow types.CommunicationLink, likelihood types.RiskExploitationLikelihood) types.Risk { sourceAsset := parsedModel.TechnicalAssets[incomingFlow.SourceId] title := "Cross-Site Request Forgery (CSRF) risk at " + technicalAsset.Title + " via " + incomingFlow.Title + " from " + sourceAsset.Title + "" impact := types.LowImpact if incomingFlow.HighestIntegrity(parsedModel) == types.MissionCritical { impact = types.MediumImpact } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(likelihood, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, Title: title, @@ -84,6 +83,6 @@ func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAs DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + "@" + incomingFlow.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id + "@" + incomingFlow.Id return risk } diff --git a/pkg/security/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go b/pkg/security/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go index 836ef20a..02d4483c 100644 --- a/pkg/security/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go +++ b/pkg/security/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go @@ -1,20 +1,19 @@ package cross_site_scripting import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "cross-site-scripting", Title: "Cross-Site Scripting (XSS)", Description: "For each web application Cross-Site Scripting (XSS) risks might arise. In terms " + @@ -43,8 +42,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope || !technicalAsset.Technology.IsWebApplication() { // TODO: also mobile clients or rich-clients as long as they use web-view... @@ -55,15 +54,15 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAsset) model.Risk { +func createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { title := "Cross-Site Scripting (XSS) risk at " + technicalAsset.Title + "" impact := types.MediumImpact if technicalAsset.HighestConfidentiality(parsedModel) == types.StrictlyConfidential || technicalAsset.HighestIntegrity(parsedModel) == types.MissionCritical { impact = types.HighImpact } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Likely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Likely, impact), ExploitationLikelihood: types.Likely, ExploitationImpact: impact, Title: title, @@ -71,6 +70,6 @@ func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAs DataBreachProbability: types.Possible, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go b/pkg/security/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go index d31d5597..83a8775f 100644 --- a/pkg/security/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go +++ b/pkg/security/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go @@ -1,20 +1,19 @@ package dos_risky_access_across_trust_boundary import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "dos-risky-access-across-trust-boundary", Title: "DoS-risky Access Across Trust-Boundary", Description: "Assets accessed across trust boundaries with critical or mission-critical availability rating " + @@ -47,8 +46,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope && technicalAsset.Technology != types.LoadBalancer && @@ -70,7 +69,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func checkRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingAccess model.CommunicationLink, hopBetween string, risks []model.Risk) []model.Risk { +func checkRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingAccess types.CommunicationLink, hopBetween string, risks []types.Risk) []types.Risk { if incomingAccess.IsAcrossTrustBoundaryNetworkOnly(input) && !incomingAccess.Protocol.IsProcessLocal() && incomingAccess.Usage != types.DevOps { highRisk := technicalAsset.Availability == types.MissionCritical && @@ -81,8 +80,8 @@ func checkRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, in return risks } -func createRisk(techAsset model.TechnicalAsset, dataFlow model.CommunicationLink, hopBetween string, - clientOutsideTrustBoundary model.TechnicalAsset, moreRisky bool) model.Risk { +func createRisk(techAsset types.TechnicalAsset, dataFlow types.CommunicationLink, hopBetween string, + clientOutsideTrustBoundary types.TechnicalAsset, moreRisky bool) types.Risk { impact := types.LowImpact if moreRisky { impact = types.MediumImpact @@ -90,9 +89,9 @@ func createRisk(techAsset model.TechnicalAsset, dataFlow model.CommunicationLink if len(hopBetween) > 0 { hopBetween = " forwarded via " + hopBetween + "" } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: "Denial-of-Service risky access of " + techAsset.Title + " by " + clientOutsideTrustBoundary.Title + @@ -102,6 +101,6 @@ func createRisk(techAsset model.TechnicalAsset, dataFlow model.CommunicationLink DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{}, } - risk.SyntheticId = risk.Category.Id + "@" + techAsset.Id + "@" + clientOutsideTrustBoundary.Id + "@" + dataFlow.Id + risk.SyntheticId = risk.CategoryId + "@" + techAsset.Id + "@" + clientOutsideTrustBoundary.Id + "@" + dataFlow.Id return risk } diff --git a/pkg/security/risks/built-in/incomplete-model/incomplete-model-rule.go b/pkg/security/risks/built-in/incomplete-model/incomplete-model-rule.go index 5b51c5ee..9ec239ff 100644 --- a/pkg/security/risks/built-in/incomplete-model/incomplete-model-rule.go +++ b/pkg/security/risks/built-in/incomplete-model/incomplete-model-rule.go @@ -1,20 +1,19 @@ package incomplete_model import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "incomplete-model", Title: "Incomplete Model", Description: "When the threat model contains unknown technologies or transfers data over unknown protocols, this is " + @@ -39,8 +38,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope { @@ -57,11 +56,11 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRiskTechAsset(technicalAsset model.TechnicalAsset) model.Risk { +func createRiskTechAsset(technicalAsset types.TechnicalAsset) types.Risk { title := "Unknown Technology specified at technical asset " + technicalAsset.Title + "" - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, types.LowImpact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, types.LowImpact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: types.LowImpact, Title: title, @@ -69,15 +68,15 @@ func createRiskTechAsset(technicalAsset model.TechnicalAsset) model.Risk { DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } -func createRiskCommLink(technicalAsset model.TechnicalAsset, commLink model.CommunicationLink) model.Risk { +func createRiskCommLink(technicalAsset types.TechnicalAsset, commLink types.CommunicationLink) types.Risk { title := "Unknown Protocol specified for communication link " + commLink.Title + " at technical asset " + technicalAsset.Title + "" - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, types.LowImpact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, types.LowImpact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: types.LowImpact, Title: title, @@ -86,6 +85,6 @@ func createRiskCommLink(technicalAsset model.TechnicalAsset, commLink model.Comm DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + commLink.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + commLink.Id + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/ldap-injection/ldap-injection-rule.go b/pkg/security/risks/built-in/ldap-injection/ldap-injection-rule.go index 2b17e9e7..16ccfced 100644 --- a/pkg/security/risks/built-in/ldap-injection/ldap-injection-rule.go +++ b/pkg/security/risks/built-in/ldap-injection/ldap-injection-rule.go @@ -1,20 +1,19 @@ package ldap_injection import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "ldap-injection", Title: "LDAP-Injection", Description: "When an LDAP server is accessed LDAP-Injection risks might arise. " + @@ -38,8 +37,8 @@ func Category() model.RiskCategory { } } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, technicalAsset := range input.TechnicalAssets { incomingFlows := input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] for _, incomingFlow := range incomingFlows { @@ -62,7 +61,7 @@ func SupportedTags() []string { return []string{} } -func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink, likelihood types.RiskExploitationLikelihood) model.Risk { +func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingFlow types.CommunicationLink, likelihood types.RiskExploitationLikelihood) types.Risk { caller := input.TechnicalAssets[incomingFlow.SourceId] title := "LDAP-Injection risk at " + caller.Title + " against LDAP server " + technicalAsset.Title + "" + " via " + incomingFlow.Title + "" @@ -70,9 +69,9 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, i if technicalAsset.HighestConfidentiality(input) == types.StrictlyConfidential || technicalAsset.HighestIntegrity(input) == types.MissionCritical { impact = types.HighImpact } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(likelihood, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, Title: title, @@ -81,6 +80,6 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, i DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + caller.Id + "@" + technicalAsset.Id + "@" + incomingFlow.Id + risk.SyntheticId = risk.CategoryId + "@" + caller.Id + "@" + technicalAsset.Id + "@" + incomingFlow.Id return risk } diff --git a/pkg/security/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go b/pkg/security/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go index 8a2a41d4..4b1d7e05 100644 --- a/pkg/security/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go +++ b/pkg/security/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go @@ -1,21 +1,20 @@ package missing_authentication_second_factor import ( - "github.com/threagile/threagile/pkg/model" missing_authentication "github.com/threagile/threagile/pkg/security/risks/built-in/missing-authentication" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "missing-authentication-second-factor", Title: "Missing Two-Factor Authentication (2FA)", Description: "Technical assets (especially multi-tenant systems) should authenticate incoming requests with " + @@ -43,8 +42,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope || diff --git a/pkg/security/risks/built-in/missing-authentication/missing-authentication-rule.go b/pkg/security/risks/built-in/missing-authentication/missing-authentication-rule.go index ddd8474b..c5e919bc 100644 --- a/pkg/security/risks/built-in/missing-authentication/missing-authentication-rule.go +++ b/pkg/security/risks/built-in/missing-authentication/missing-authentication-rule.go @@ -1,20 +1,19 @@ package missing_authentication import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "missing-authentication", Title: "Missing Authentication", Description: "Technical assets (especially multi-tenant systems) should authenticate incoming requests when the asset processes or stores sensitive data. ", @@ -42,8 +41,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope || technicalAsset.Technology == types.LoadBalancer || @@ -80,8 +79,8 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func CreateRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingAccess, incomingAccessOrigin model.CommunicationLink, hopBetween string, - impact types.RiskExploitationImpact, likelihood types.RiskExploitationLikelihood, twoFactor bool, category model.RiskCategory) model.Risk { +func CreateRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingAccess, incomingAccessOrigin types.CommunicationLink, hopBetween string, + impact types.RiskExploitationImpact, likelihood types.RiskExploitationLikelihood, twoFactor bool, category types.RiskCategory) types.Risk { factorString := "" if twoFactor { factorString = "Two-Factor " @@ -89,9 +88,9 @@ func CreateRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, i if len(hopBetween) > 0 { hopBetween = "forwarded via " + hopBetween + " " } - risk := model.Risk{ - Category: category, - Severity: model.CalculateSeverity(likelihood, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, Title: "Missing " + factorString + "Authentication covering communication link " + incomingAccess.Title + " " + @@ -102,6 +101,6 @@ func CreateRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, i DataBreachProbability: types.Possible, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + incomingAccess.Id + "@" + input.TechnicalAssets[incomingAccess.SourceId].Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + incomingAccess.Id + "@" + input.TechnicalAssets[incomingAccess.SourceId].Id + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go b/pkg/security/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go index e6d0447c..759d9bea 100644 --- a/pkg/security/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go +++ b/pkg/security/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go @@ -1,20 +1,19 @@ package missing_build_infrastructure import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "missing-build-infrastructure", Title: "Missing Build Infrastructure", Description: "The modeled architecture does not contain a build infrastructure (devops-client, sourcecode-repo, build-pipeline, etc.), " + @@ -44,11 +43,11 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) hasCustomDevelopedParts, hasBuildPipeline, hasSourcecodeRepo, hasDevOpsClient := false, false, false, false impact := types.LowImpact - var mostRelevantAsset model.TechnicalAsset + var mostRelevantAsset types.TechnicalAsset for _, id := range input.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset technicalAsset := input.TechnicalAssets[id] if technicalAsset.CustomDevelopedParts && !technicalAsset.OutOfScope { @@ -88,11 +87,11 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(technicalAsset model.TechnicalAsset, impact types.RiskExploitationImpact) model.Risk { +func createRisk(technicalAsset types.TechnicalAsset, impact types.RiskExploitationImpact) types.Risk { title := "Missing Build Infrastructure in the threat model (referencing asset " + technicalAsset.Title + " as an example)" - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, @@ -100,6 +99,6 @@ func createRisk(technicalAsset model.TechnicalAsset, impact types.RiskExploitati DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{}, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go b/pkg/security/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go index 00339cfa..a3ff56e6 100644 --- a/pkg/security/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go +++ b/pkg/security/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go @@ -3,20 +3,19 @@ package missing_cloud_hardening import ( "sort" - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "missing-cloud-hardening", Title: "Missing Cloud Hardening", Description: "Cloud components should be hardened according to the cloud vendor best practices. This affects their " + @@ -59,8 +58,8 @@ func SupportedTags() []string { return res } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) sharedRuntimesWithUnspecificCloudRisks := make(map[string]bool) trustBoundariesWithUnspecificCloudRisks := make(map[string]bool) @@ -279,7 +278,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func addTrustBoundaryAccordingToBaseTag(trustBoundary model.TrustBoundary, +func addTrustBoundaryAccordingToBaseTag(trustBoundary types.TrustBoundary, trustBoundariesWithUnspecificCloudRisks map[string]bool, trustBoundaryIDsAWS map[string]bool, trustBoundaryIDsAzure map[string]bool, @@ -303,7 +302,7 @@ func addTrustBoundaryAccordingToBaseTag(trustBoundary model.TrustBoundary, } } -func addSharedRuntimeAccordingToBaseTag(sharedRuntime model.SharedRuntime, +func addSharedRuntimeAccordingToBaseTag(sharedRuntime types.SharedRuntime, sharedRuntimesWithUnspecificCloudRisks map[string]bool, sharedRuntimeIDsAWS map[string]bool, sharedRuntimeIDsAzure map[string]bool, @@ -327,7 +326,7 @@ func addSharedRuntimeAccordingToBaseTag(sharedRuntime model.SharedRuntime, } } -func addAccordingToBaseTag(techAsset model.TechnicalAsset, tags []string, +func addAccordingToBaseTag(techAsset types.TechnicalAsset, tags []string, techAssetIDsWithTagSpecificCloudRisks map[string]bool, techAssetIDsAWS map[string]bool, techAssetIDsAzure map[string]bool, @@ -336,22 +335,22 @@ func addAccordingToBaseTag(techAsset model.TechnicalAsset, tags []string, if techAsset.IsTaggedWithAny(specificSubTagsAWS...) { techAssetIDsWithTagSpecificCloudRisks[techAsset.Id] = true } - if model.IsTaggedWithBaseTag(tags, "aws") { + if types.IsTaggedWithBaseTag(tags, "aws") { techAssetIDsAWS[techAsset.Id] = true } - if model.IsTaggedWithBaseTag(tags, "azure") { + if types.IsTaggedWithBaseTag(tags, "azure") { techAssetIDsAzure[techAsset.Id] = true } - if model.IsTaggedWithBaseTag(tags, "gcp") { + if types.IsTaggedWithBaseTag(tags, "gcp") { techAssetIDsGCP[techAsset.Id] = true } - if model.IsTaggedWithBaseTag(tags, "ocp") { + if types.IsTaggedWithBaseTag(tags, "ocp") { techAssetIDsOCP[techAsset.Id] = true } } -func findMostSensitiveTechnicalAsset(input *model.ParsedModel, techAssets map[string]bool) model.TechnicalAsset { - var mostRelevantAsset model.TechnicalAsset +func findMostSensitiveTechnicalAsset(input *types.ParsedModel, techAssets map[string]bool) types.TechnicalAsset { + var mostRelevantAsset types.TechnicalAsset keys := make([]string, 0, len(techAssets)) for k := range techAssets { keys = append(keys, k) @@ -366,7 +365,7 @@ func findMostSensitiveTechnicalAsset(input *model.ParsedModel, techAssets map[st return mostRelevantAsset } -func createRiskForSharedRuntime(input *model.ParsedModel, sharedRuntime model.SharedRuntime, prefix, details string) model.Risk { +func createRiskForSharedRuntime(input *types.ParsedModel, sharedRuntime types.SharedRuntime, prefix, details string) types.Risk { if len(prefix) > 0 { prefix = " (" + prefix + ")" } @@ -386,9 +385,9 @@ func createRiskForSharedRuntime(input *model.ParsedModel, sharedRuntime model.Sh impact = types.VeryHighImpact } // create risk - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, @@ -396,11 +395,11 @@ func createRiskForSharedRuntime(input *model.ParsedModel, sharedRuntime model.Sh DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: sharedRuntime.TechnicalAssetsRunning, } - risk.SyntheticId = risk.Category.Id + "@" + sharedRuntime.Id + risk.SyntheticId = risk.CategoryId + "@" + sharedRuntime.Id return risk } -func createRiskForTrustBoundary(parsedModel *model.ParsedModel, trustBoundary model.TrustBoundary, prefix, details string) model.Risk { +func createRiskForTrustBoundary(parsedModel *types.ParsedModel, trustBoundary types.TrustBoundary, prefix, details string) types.Risk { if len(prefix) > 0 { prefix = " (" + prefix + ")" } @@ -420,9 +419,9 @@ func createRiskForTrustBoundary(parsedModel *model.ParsedModel, trustBoundary mo impact = types.VeryHighImpact } // create risk - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, @@ -430,11 +429,11 @@ func createRiskForTrustBoundary(parsedModel *model.ParsedModel, trustBoundary mo DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: trustBoundary.RecursivelyAllTechnicalAssetIDsInside(parsedModel), } - risk.SyntheticId = risk.Category.Id + "@" + trustBoundary.Id + risk.SyntheticId = risk.CategoryId + "@" + trustBoundary.Id return risk } -func createRiskForTechnicalAsset(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAsset, prefix, details string) model.Risk { +func createRiskForTechnicalAsset(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset, prefix, details string) types.Risk { if len(prefix) > 0 { prefix = " (" + prefix + ")" } @@ -454,9 +453,9 @@ func createRiskForTechnicalAsset(parsedModel *model.ParsedModel, technicalAsset impact = types.VeryHighImpact } // create risk - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, @@ -464,6 +463,6 @@ func createRiskForTechnicalAsset(parsedModel *model.ParsedModel, technicalAsset DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/missing-file-validation/missing-file-validation-rule.go b/pkg/security/risks/built-in/missing-file-validation/missing-file-validation-rule.go index 342c7fcc..c7b64a8e 100644 --- a/pkg/security/risks/built-in/missing-file-validation/missing-file-validation-rule.go +++ b/pkg/security/risks/built-in/missing-file-validation/missing-file-validation-rule.go @@ -1,20 +1,19 @@ package missing_file_validation import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "missing-file-validation", Title: "Missing File Validation", Description: "When a technical asset accepts files, these input files should be strictly validated about filename and type.", @@ -43,8 +42,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope || !technicalAsset.CustomDevelopedParts { @@ -59,7 +58,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset) model.Risk { +func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { title := "Missing File Validation risk at " + technicalAsset.Title + "" impact := types.LowImpact if technicalAsset.HighestConfidentiality(input) == types.StrictlyConfidential || @@ -67,9 +66,9 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset) m technicalAsset.HighestAvailability(input) == types.MissionCritical { impact = types.MediumImpact } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.VeryLikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.VeryLikely, impact), ExploitationLikelihood: types.VeryLikely, ExploitationImpact: impact, Title: title, @@ -77,6 +76,6 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset) m DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/missing-hardening/missing-hardening-rule.go b/pkg/security/risks/built-in/missing-hardening/missing-hardening-rule.go index 9cf57bab..29323a75 100644 --- a/pkg/security/risks/built-in/missing-hardening/missing-hardening-rule.go +++ b/pkg/security/risks/built-in/missing-hardening/missing-hardening-rule.go @@ -3,23 +3,22 @@ package missing_hardening import ( "strconv" - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) const raaLimit = 55 const raaLimitReduced = 40 -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "missing-hardening", Title: "Missing Hardening", Description: "Technical assets with a Relative Attacker Attractiveness (RAA) value of " + strconv.Itoa(raaLimit) + " % or higher should be " + @@ -46,8 +45,8 @@ func SupportedTags() []string { return []string{"tomcat"} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope { @@ -60,15 +59,15 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset) model.Risk { +func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { title := "Missing Hardening risk at " + technicalAsset.Title + "" impact := types.LowImpact if technicalAsset.HighestConfidentiality(input) == types.StrictlyConfidential || technicalAsset.HighestIntegrity(input) == types.MissionCritical { impact = types.MediumImpact } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Likely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Likely, impact), ExploitationLikelihood: types.Likely, ExploitationImpact: impact, Title: title, @@ -76,6 +75,6 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset) m DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go b/pkg/security/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go index 4213938a..5af2c24e 100644 --- a/pkg/security/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go +++ b/pkg/security/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go @@ -1,20 +1,19 @@ package missing_identity_propagation import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "missing-identity-propagation", Title: "Missing Identity Propagation", Description: "Technical assets (especially multi-tenant systems), which usually process data for end users should " + @@ -48,8 +47,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope { @@ -86,14 +85,14 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingAccess model.CommunicationLink, moreRisky bool) model.Risk { +func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingAccess types.CommunicationLink, moreRisky bool) types.Risk { impact := types.LowImpact if moreRisky { impact = types.MediumImpact } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: "Missing End User Identity Propagation over communication link " + incomingAccess.Title + " " + @@ -104,6 +103,6 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, i DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + incomingAccess.Id + "@" + input.TechnicalAssets[incomingAccess.SourceId].Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + incomingAccess.Id + "@" + input.TechnicalAssets[incomingAccess.SourceId].Id + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go b/pkg/security/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go index 7e1db5f8..9c064cb0 100644 --- a/pkg/security/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go +++ b/pkg/security/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go @@ -1,20 +1,19 @@ package missing_identity_provider_isolation import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "missing-identity-provider-isolation", Title: "Missing Identity Provider Isolation", Description: "Highly sensitive identity provider assets and their identity data stores should be isolated from other assets " + @@ -44,8 +43,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, technicalAsset := range input.TechnicalAssets { if !technicalAsset.OutOfScope && technicalAsset.Technology.IsIdentityRelated() { moreImpact := technicalAsset.Confidentiality == types.StrictlyConfidential || @@ -75,7 +74,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(techAsset model.TechnicalAsset, moreImpact bool, sameExecutionEnv bool) model.Risk { +func createRisk(techAsset types.TechnicalAsset, moreImpact bool, sameExecutionEnv bool) types.Risk { impact := types.HighImpact likelihood := types.Unlikely others := "in the same network segment" @@ -86,9 +85,9 @@ func createRisk(techAsset model.TechnicalAsset, moreImpact bool, sameExecutionEn likelihood = types.Likely others = "in the same execution environment" } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(likelihood, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, Title: "Missing Identity Provider Isolation to further encapsulate and protect identity-related asset " + techAsset.Title + " against unrelated " + @@ -97,6 +96,6 @@ func createRisk(techAsset model.TechnicalAsset, moreImpact bool, sameExecutionEn DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{techAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + techAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + techAsset.Id return risk } diff --git a/pkg/security/risks/built-in/missing-identity-store/missing-identity-store-rule.go b/pkg/security/risks/built-in/missing-identity-store/missing-identity-store-rule.go index c470079e..2e6767dd 100644 --- a/pkg/security/risks/built-in/missing-identity-store/missing-identity-store-rule.go +++ b/pkg/security/risks/built-in/missing-identity-store/missing-identity-store-rule.go @@ -1,20 +1,19 @@ package missing_identity_store import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "missing-identity-store", Title: "Missing Identity Store", Description: "The modeled architecture does not contain an identity store, which might be the risk of a model missing " + @@ -42,8 +41,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, technicalAsset := range input.TechnicalAssets { if !technicalAsset.OutOfScope && (technicalAsset.Technology == types.IdentityStoreLDAP || technicalAsset.Technology == types.IdentityStoreDatabase) { @@ -53,7 +52,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { } // now check if we have end user identity authorized communication links, then it's a risk riskIdentified := false - var mostRelevantAsset model.TechnicalAsset + var mostRelevantAsset types.TechnicalAsset impact := types.LowImpact for _, id := range input.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset technicalAsset := input.TechnicalAssets[id] @@ -87,11 +86,11 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(technicalAsset model.TechnicalAsset, impact types.RiskExploitationImpact) model.Risk { +func createRisk(technicalAsset types.TechnicalAsset, impact types.RiskExploitationImpact) types.Risk { title := "Missing Identity Store in the threat model (referencing asset " + technicalAsset.Title + " as an example)" - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, @@ -99,6 +98,6 @@ func createRisk(technicalAsset model.TechnicalAsset, impact types.RiskExploitati DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{}, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go b/pkg/security/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go index d0db7a29..ab11b08a 100644 --- a/pkg/security/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go +++ b/pkg/security/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go @@ -3,22 +3,21 @@ package missing_network_segmentation import ( "sort" - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) const raaLimit = 50 -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "missing-network-segmentation", Title: "Missing Network Segmentation", Description: "Highly sensitive assets and/or data stores residing in the same network segment than other " + @@ -51,8 +50,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: keys := make([]string, 0) @@ -86,14 +85,14 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(techAsset model.TechnicalAsset, moreRisky bool) model.Risk { +func createRisk(techAsset types.TechnicalAsset, moreRisky bool) types.Risk { impact := types.LowImpact if moreRisky { impact = types.MediumImpact } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: "Missing Network Segmentation to further encapsulate and protect " + techAsset.Title + " against unrelated " + @@ -102,6 +101,6 @@ func createRisk(techAsset model.TechnicalAsset, moreRisky bool) model.Risk { DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{techAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + techAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + techAsset.Id return risk } diff --git a/pkg/security/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go b/pkg/security/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go index 1bb54e82..5080663f 100644 --- a/pkg/security/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go +++ b/pkg/security/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go @@ -1,20 +1,19 @@ package missing_vault_isolation import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "missing-vault-isolation", Title: "Missing Vault Isolation", Description: "Highly sensitive vault assets and their data stores should be isolated from other assets " + @@ -44,8 +43,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, technicalAsset := range input.TechnicalAssets { if !technicalAsset.OutOfScope && technicalAsset.Technology == types.Vault { moreImpact := technicalAsset.Confidentiality == types.StrictlyConfidential || @@ -75,11 +74,11 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func isVaultStorage(parsedModel *model.ParsedModel, vault model.TechnicalAsset, storage model.TechnicalAsset) bool { +func isVaultStorage(parsedModel *types.ParsedModel, vault types.TechnicalAsset, storage types.TechnicalAsset) bool { return storage.Type == types.Datastore && vault.HasDirectConnection(parsedModel, storage.Id) } -func createRisk(techAsset model.TechnicalAsset, moreImpact bool, sameExecutionEnv bool) model.Risk { +func createRisk(techAsset types.TechnicalAsset, moreImpact bool, sameExecutionEnv bool) types.Risk { impact := types.MediumImpact likelihood := types.Unlikely others := "in the same network segment" @@ -90,9 +89,9 @@ func createRisk(techAsset model.TechnicalAsset, moreImpact bool, sameExecutionEn likelihood = types.Likely others = "in the same execution environment" } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(likelihood, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, Title: "Missing Vault Isolation to further encapsulate and protect vault-related asset " + techAsset.Title + " against unrelated " + @@ -101,6 +100,6 @@ func createRisk(techAsset model.TechnicalAsset, moreImpact bool, sameExecutionEn DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{techAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + techAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + techAsset.Id return risk } diff --git a/pkg/security/risks/built-in/missing-vault/missing-vault-rule.go b/pkg/security/risks/built-in/missing-vault/missing-vault-rule.go index dbb596be..7cd26ca1 100644 --- a/pkg/security/risks/built-in/missing-vault/missing-vault-rule.go +++ b/pkg/security/risks/built-in/missing-vault/missing-vault-rule.go @@ -1,20 +1,19 @@ package missing_vault import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "missing-vault", Title: "Missing Vault (Secret Storage)", Description: "In order to avoid the risk of secret leakage via config files (when attacked through vulnerabilities being able to " + @@ -43,10 +42,10 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) hasVault := false - var mostRelevantAsset model.TechnicalAsset + var mostRelevantAsset types.TechnicalAsset impact := types.LowImpact for _, id := range input.SortedTechnicalAssetIDs() { // use the sorted one to always get the same tech asset with the highest sensitivity as example asset techAsset := input.TechnicalAssets[id] @@ -74,11 +73,11 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(technicalAsset model.TechnicalAsset, impact types.RiskExploitationImpact) model.Risk { +func createRisk(technicalAsset types.TechnicalAsset, impact types.RiskExploitationImpact) types.Risk { title := "Missing Vault (Secret Storage) in the threat model (referencing asset " + technicalAsset.Title + " as an example)" - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, @@ -86,6 +85,6 @@ func createRisk(technicalAsset model.TechnicalAsset, impact types.RiskExploitati DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{}, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/missing-waf/missing-waf-rule.go b/pkg/security/risks/built-in/missing-waf/missing-waf-rule.go index 67166f51..8ffcc3b3 100644 --- a/pkg/security/risks/built-in/missing-waf/missing-waf-rule.go +++ b/pkg/security/risks/built-in/missing-waf/missing-waf-rule.go @@ -1,20 +1,19 @@ package missing_waf import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "missing-waf", Title: "Missing Web Application Firewall (WAF)", Description: "To have a first line of filtering defense, security architectures with web-services or web-applications should include a WAF in front of them. " + @@ -42,8 +41,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, technicalAsset := range input.TechnicalAssets { if !technicalAsset.OutOfScope && (technicalAsset.Technology.IsWebApplication() || technicalAsset.Technology.IsWebService()) { @@ -60,7 +59,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset) model.Risk { +func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { title := "Missing Web Application Firewall (WAF) risk at " + technicalAsset.Title + "" likelihood := types.Unlikely impact := types.LowImpact @@ -69,9 +68,9 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset) m technicalAsset.HighestAvailability(input) == types.MissionCritical { impact = types.MediumImpact } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(likelihood, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, Title: title, @@ -79,6 +78,6 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset) m DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go b/pkg/security/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go index 5460af2d..6ab3f365 100644 --- a/pkg/security/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go +++ b/pkg/security/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go @@ -3,20 +3,19 @@ package mixed_targets_on_shared_runtime import ( "sort" - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "mixed-targets-on-shared-runtime", Title: "Mixed Targets on Shared Runtime", Description: "Different attacker targets (like frontend and backend/datastore components) should not be running on the same " + @@ -47,8 +46,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: keys := make([]string, 0) for k := range input.SharedRuntimes { @@ -82,14 +81,14 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(input *model.ParsedModel, sharedRuntime model.SharedRuntime) model.Risk { +func createRisk(input *types.ParsedModel, sharedRuntime types.SharedRuntime) types.Risk { impact := types.LowImpact if isMoreRisky(input, sharedRuntime) { impact = types.MediumImpact } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: "Mixed Targets on Shared Runtime named " + sharedRuntime.Title + " might enable attackers moving from one less " + @@ -98,11 +97,11 @@ func createRisk(input *model.ParsedModel, sharedRuntime model.SharedRuntime) mod DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: sharedRuntime.TechnicalAssetsRunning, } - risk.SyntheticId = risk.Category.Id + "@" + sharedRuntime.Id + risk.SyntheticId = risk.CategoryId + "@" + sharedRuntime.Id return risk } -func isMoreRisky(input *model.ParsedModel, sharedRuntime model.SharedRuntime) bool { +func isMoreRisky(input *types.ParsedModel, sharedRuntime types.SharedRuntime) bool { for _, techAssetId := range sharedRuntime.TechnicalAssetsRunning { techAsset := input.TechnicalAssets[techAssetId] if techAsset.Confidentiality == types.StrictlyConfidential || techAsset.Integrity == types.MissionCritical || diff --git a/pkg/security/risks/built-in/path-traversal/path-traversal-rule.go b/pkg/security/risks/built-in/path-traversal/path-traversal-rule.go index 6aaf85cd..fd6b6e45 100644 --- a/pkg/security/risks/built-in/path-traversal/path-traversal-rule.go +++ b/pkg/security/risks/built-in/path-traversal/path-traversal-rule.go @@ -1,20 +1,19 @@ package path_traversal import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "path-traversal", Title: "Path-Traversal", Description: "When a filesystem is accessed Path-Traversal or Local-File-Inclusion (LFI) risks might arise. " + @@ -40,8 +39,8 @@ func Category() model.RiskCategory { } } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if technicalAsset.Technology != types.FileServer && technicalAsset.Technology != types.LocalFileSystem { @@ -66,7 +65,7 @@ func SupportedTags() []string { return []string{} } -func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink, likelihood types.RiskExploitationLikelihood) model.Risk { +func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingFlow types.CommunicationLink, likelihood types.RiskExploitationLikelihood) types.Risk { caller := input.TechnicalAssets[incomingFlow.SourceId] title := "Path-Traversal risk at " + caller.Title + " against filesystem " + technicalAsset.Title + "" + " via " + incomingFlow.Title + "" @@ -74,9 +73,9 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, i if technicalAsset.HighestConfidentiality(input) == types.StrictlyConfidential || technicalAsset.HighestIntegrity(input) == types.MissionCritical { impact = types.HighImpact } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(likelihood, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, Title: title, @@ -85,6 +84,6 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, i DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + caller.Id + "@" + technicalAsset.Id + "@" + incomingFlow.Id + risk.SyntheticId = risk.CategoryId + "@" + caller.Id + "@" + technicalAsset.Id + "@" + incomingFlow.Id return risk } diff --git a/pkg/security/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go b/pkg/security/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go index 76a2dee8..93bb296a 100644 --- a/pkg/security/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go +++ b/pkg/security/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go @@ -1,20 +1,19 @@ package push_instead_of_pull_deployment import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "push-instead-of-pull-deployment", Title: "Push instead of Pull Deployment", Description: "When comparing push-based vs. pull-based deployments from a security perspective, pull-based " + @@ -44,8 +43,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) impact := types.LowImpact for _, buildPipeline := range input.TechnicalAssets { if buildPipeline.Technology == types.BuildPipeline { @@ -66,11 +65,11 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(buildPipeline model.TechnicalAsset, deploymentTarget model.TechnicalAsset, deploymentCommLink model.CommunicationLink, impact types.RiskExploitationImpact) model.Risk { +func createRisk(buildPipeline types.TechnicalAsset, deploymentTarget types.TechnicalAsset, deploymentCommLink types.CommunicationLink, impact types.RiskExploitationImpact) types.Risk { title := "Push instead of Pull Deployment at " + deploymentTarget.Title + " via build pipeline asset " + buildPipeline.Title + "" - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, @@ -79,6 +78,6 @@ func createRisk(buildPipeline model.TechnicalAsset, deploymentTarget model.Techn DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{deploymentTarget.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + buildPipeline.Id + risk.SyntheticId = risk.CategoryId + "@" + buildPipeline.Id return risk } diff --git a/pkg/security/risks/built-in/search-query-injection/search-query-injection-rule.go b/pkg/security/risks/built-in/search-query-injection/search-query-injection-rule.go index 4cc24fb7..96c1e621 100644 --- a/pkg/security/risks/built-in/search-query-injection/search-query-injection-rule.go +++ b/pkg/security/risks/built-in/search-query-injection/search-query-injection-rule.go @@ -1,20 +1,19 @@ package search_query_injection import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "search-query-injection", Title: "Search-Query Injection", Description: "When a search engine server is accessed Search-Query Injection risks might arise." + @@ -41,8 +40,8 @@ func Category() model.RiskCategory { } } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if technicalAsset.Technology == types.SearchEngine || technicalAsset.Technology == types.SearchIndex { @@ -69,7 +68,7 @@ func SupportedTags() []string { return []string{} } -func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink, likelihood types.RiskExploitationLikelihood) model.Risk { +func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingFlow types.CommunicationLink, likelihood types.RiskExploitationLikelihood) types.Risk { caller := input.TechnicalAssets[incomingFlow.SourceId] title := "Search Query Injection risk at " + caller.Title + " against search engine server " + technicalAsset.Title + "" + " via " + incomingFlow.Title + "" @@ -79,9 +78,9 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, i } else if technicalAsset.HighestConfidentiality(input) <= types.Internal && technicalAsset.HighestIntegrity(input) == types.Operational { impact = types.LowImpact } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(likelihood, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, Title: title, @@ -90,6 +89,6 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, i DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + caller.Id + "@" + technicalAsset.Id + "@" + incomingFlow.Id + risk.SyntheticId = risk.CategoryId + "@" + caller.Id + "@" + technicalAsset.Id + "@" + incomingFlow.Id return risk } diff --git a/pkg/security/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go b/pkg/security/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go index 85b53452..c0106916 100644 --- a/pkg/security/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go +++ b/pkg/security/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go @@ -1,20 +1,19 @@ package server_side_request_forgery import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "server-side-request-forgery", Title: "Server-Side Request Forgery (SSRF)", Description: "When a server system (i.e. not a client) is accessing other server systems via typical web protocols " + @@ -44,8 +43,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope || technicalAsset.Technology.IsClient() || technicalAsset.Technology == types.LoadBalancer { @@ -60,7 +59,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, outgoingFlow model.CommunicationLink) model.Risk { +func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, outgoingFlow types.CommunicationLink) types.Risk { target := input.TechnicalAssets[outgoingFlow.TargetId] title := "Server-Side Request Forgery (SSRF) risk at " + technicalAsset.Title + " server-side web-requesting " + "the target " + target.Title + " via " + outgoingFlow.Title + "" @@ -96,9 +95,9 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, o if outgoingFlow.Usage == types.DevOps { likelihood = types.Unlikely } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(likelihood, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, Title: title, @@ -107,6 +106,6 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, o DataBreachProbability: types.Possible, DataBreachTechnicalAssetIDs: dataBreachTechnicalAssetIDs, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + "@" + target.Id + "@" + outgoingFlow.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id + "@" + target.Id + "@" + outgoingFlow.Id return risk } diff --git a/pkg/security/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go b/pkg/security/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go index e9d286ef..d0727f67 100644 --- a/pkg/security/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go +++ b/pkg/security/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go @@ -1,20 +1,19 @@ package service_registry_poisoning import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "service-registry-poisoning", Title: "Service Registry Poisoning", Description: "When a service registry used for discovery of trusted service endpoints Service Registry Poisoning risks might arise.", @@ -41,8 +40,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope && technicalAsset.Technology == types.ServiceRegistry { @@ -53,7 +52,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlows []model.CommunicationLink) model.Risk { +func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingFlows []types.CommunicationLink) types.Risk { title := "Service Registry Poisoning risk at " + technicalAsset.Title + "" impact := types.LowImpact @@ -67,9 +66,9 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, i } } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, @@ -77,6 +76,6 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, i DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, // TODO: find all service-lookup-using tech assets, which then might use spoofed lookups? } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go b/pkg/security/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go index 5fe4cfd9..48f8bd46 100644 --- a/pkg/security/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go +++ b/pkg/security/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go @@ -1,20 +1,19 @@ package sql_nosql_injection import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "sql-nosql-injection", Title: "SQL/NoSQL-Injection", Description: "When a database is accessed via database access protocols SQL/NoSQL-Injection risks might arise. " + @@ -41,8 +40,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] incomingFlows := input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] @@ -59,7 +58,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink) model.Risk { +func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingFlow types.CommunicationLink) types.Risk { caller := input.TechnicalAssets[incomingFlow.SourceId] title := "SQL/NoSQL-Injection risk at " + caller.Title + " against database " + technicalAsset.Title + "" + " via " + incomingFlow.Title + "" @@ -71,9 +70,9 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, i if incomingFlow.Usage == types.DevOps { likelihood = types.Likely } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(likelihood, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, Title: title, @@ -82,6 +81,6 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, i DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + caller.Id + "@" + technicalAsset.Id + "@" + incomingFlow.Id + risk.SyntheticId = risk.CategoryId + "@" + caller.Id + "@" + technicalAsset.Id + "@" + incomingFlow.Id return risk } diff --git a/pkg/security/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go b/pkg/security/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go index e57a0d91..82c175cc 100644 --- a/pkg/security/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go +++ b/pkg/security/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go @@ -1,20 +1,19 @@ package unchecked_deployment import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "unchecked-deployment", Title: "Unchecked Deployment", Description: "For each build-pipeline component Unchecked Deployment risks might arise when the build-pipeline " + @@ -44,8 +43,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, technicalAsset := range input.TechnicalAssets { if technicalAsset.Technology.IsDevelopmentRelevant() { risks = append(risks, createRisk(input, technicalAsset)) @@ -54,7 +53,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset) model.Risk { +func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { title := "Unchecked Deployment risk at " + technicalAsset.Title + "" // impact is depending on highest rating impact := types.LowImpact @@ -84,9 +83,9 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset) m dataBreachTechnicalAssetIDs = append(dataBreachTechnicalAssetIDs, key) } // create risk - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, @@ -94,6 +93,6 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset) m DataBreachProbability: types.Possible, DataBreachTechnicalAssetIDs: dataBreachTechnicalAssetIDs, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go b/pkg/security/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go index d43b88b9..cc8bd777 100644 --- a/pkg/security/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go +++ b/pkg/security/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go @@ -1,20 +1,19 @@ package unencrypted_asset import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "unencrypted-asset", Title: "Unencrypted Technical Assets", Description: "Due to the confidentiality rating of the technical asset itself and/or the processed data assets " + @@ -46,8 +45,8 @@ func SupportedTags() []string { // check for technical assets that should be encrypted due to their confidentiality -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope && !IsEncryptionWaiver(technicalAsset) && @@ -74,20 +73,20 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { // Simple routing assets like 'Reverse Proxy' or 'Load Balancer' usually don't have their own storage and thus have no // encryption requirement for the asset itself (though for the communication, but that's a different rule) -func IsEncryptionWaiver(asset model.TechnicalAsset) bool { +func IsEncryptionWaiver(asset types.TechnicalAsset) bool { return asset.Technology == types.ReverseProxy || asset.Technology == types.LoadBalancer || asset.Technology == types.WAF || asset.Technology == types.IDS || asset.Technology == types.IPS || asset.Technology.IsEmbeddedComponent() } -func createRisk(technicalAsset model.TechnicalAsset, impact types.RiskExploitationImpact, requiresEndUserKey bool) model.Risk { +func createRisk(technicalAsset types.TechnicalAsset, impact types.RiskExploitationImpact, requiresEndUserKey bool) types.Risk { title := "Unencrypted Technical Asset named " + technicalAsset.Title + "" if requiresEndUserKey { title += " missing end user individual encryption with " + types.DataWithEndUserIndividualKey.String() } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, @@ -95,6 +94,6 @@ func createRisk(technicalAsset model.TechnicalAsset, impact types.RiskExploitati DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go b/pkg/security/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go index 2f66e243..96a71c2b 100644 --- a/pkg/security/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go +++ b/pkg/security/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go @@ -1,20 +1,19 @@ package unencrypted_communication import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "unencrypted-communication", Title: "Unencrypted Communication", Description: "Due to the confidentiality and/or integrity rating of the data assets transferred over the " + @@ -43,8 +42,8 @@ func SupportedTags() []string { // check for communication links that should be encrypted due to their confidentiality and/or integrity -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, technicalAsset := range input.TechnicalAssets { for _, dataFlow := range technicalAsset.CommunicationLinks { transferringAuthData := dataFlow.Authentication != types.NoneAuthentication @@ -86,7 +85,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, dataFlow model.CommunicationLink, highRisk bool, transferringAuthData bool) model.Risk { +func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, dataFlow types.CommunicationLink, highRisk bool, transferringAuthData bool) types.Risk { impact := types.MediumImpact if highRisk { impact = types.HighImpact @@ -104,9 +103,9 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, d if dataFlow.IsAcrossTrustBoundaryNetworkOnly(input) { likelihood = types.Likely } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(likelihood, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, Title: title, @@ -115,14 +114,14 @@ func createRisk(input *model.ParsedModel, technicalAsset model.TechnicalAsset, d DataBreachProbability: types.Possible, DataBreachTechnicalAssetIDs: []string{target.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + dataFlow.Id + "@" + technicalAsset.Id + "@" + target.Id + risk.SyntheticId = risk.CategoryId + "@" + dataFlow.Id + "@" + technicalAsset.Id + "@" + target.Id return risk } -func isHighSensitivity(dataAsset model.DataAsset) bool { +func isHighSensitivity(dataAsset types.DataAsset) bool { return dataAsset.Confidentiality == types.StrictlyConfidential || dataAsset.Integrity == types.MissionCritical } -func isMediumSensitivity(dataAsset model.DataAsset) bool { +func isMediumSensitivity(dataAsset types.DataAsset) bool { return dataAsset.Confidentiality == types.Confidential || dataAsset.Integrity == types.Critical } diff --git a/pkg/security/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go b/pkg/security/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go index 5996199c..2c0e240d 100644 --- a/pkg/security/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go +++ b/pkg/security/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go @@ -3,20 +3,19 @@ package unguarded_access_from_internet import ( "sort" - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "unguarded-access-from-internet", Title: "Unguarded Access From Internet", Description: "Internet-exposed assets must be guarded by a protecting service, application, " + @@ -53,13 +52,13 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope { commLinks := input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] - sort.Sort(model.ByTechnicalCommunicationLinkIdSort(commLinks)) + sort.Sort(types.ByTechnicalCommunicationLinkIdSort(commLinks)) for _, incomingAccess := range commLinks { if technicalAsset.Technology != types.LoadBalancer { if !technicalAsset.CustomDevelopedParts { @@ -92,15 +91,15 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(dataStore model.TechnicalAsset, dataFlow model.CommunicationLink, - clientFromInternet model.TechnicalAsset, moreRisky bool) model.Risk { +func createRisk(dataStore types.TechnicalAsset, dataFlow types.CommunicationLink, + clientFromInternet types.TechnicalAsset, moreRisky bool) types.Risk { impact := types.LowImpact if moreRisky || dataStore.RAA > 40 { impact = types.MediumImpact } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.VeryLikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.VeryLikely, impact), ExploitationLikelihood: types.VeryLikely, ExploitationImpact: impact, Title: "Unguarded Access from Internet of " + dataStore.Title + " by " + @@ -110,6 +109,6 @@ func createRisk(dataStore model.TechnicalAsset, dataFlow model.CommunicationLink DataBreachProbability: types.Possible, DataBreachTechnicalAssetIDs: []string{dataStore.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + dataStore.Id + "@" + clientFromInternet.Id + "@" + dataFlow.Id + risk.SyntheticId = risk.CategoryId + "@" + dataStore.Id + "@" + clientFromInternet.Id + "@" + dataFlow.Id return risk } diff --git a/pkg/security/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go b/pkg/security/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go index fbd3cef3..2ab941ef 100644 --- a/pkg/security/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go +++ b/pkg/security/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go @@ -1,20 +1,19 @@ package unguarded_direct_datastore_access import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "unguarded-direct-datastore-access", Title: "Unguarded Direct Datastore Access", Description: "Data stores accessed across trust boundaries must be guarded by some protecting service or application.", @@ -45,8 +44,8 @@ func SupportedTags() []string { // check for data stores that should not be accessed directly across trust boundaries -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope && technicalAsset.Type == types.Datastore { @@ -71,7 +70,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func isSharingSameParentTrustBoundary(input *model.ParsedModel, left, right model.TechnicalAsset) bool { +func isSharingSameParentTrustBoundary(input *types.ParsedModel, left, right types.TechnicalAsset) bool { tbIDLeft, tbIDRight := left.GetTrustBoundaryId(input), right.GetTrustBoundaryId(input) if len(tbIDLeft) == 0 && len(tbIDRight) > 0 { return false @@ -97,19 +96,19 @@ func isSharingSameParentTrustBoundary(input *model.ParsedModel, left, right mode return false } -func FileServerAccessViaFTP(technicalAsset model.TechnicalAsset, incomingAccess model.CommunicationLink) bool { +func FileServerAccessViaFTP(technicalAsset types.TechnicalAsset, incomingAccess types.CommunicationLink) bool { return technicalAsset.Technology == types.FileServer && (incomingAccess.Protocol == types.FTP || incomingAccess.Protocol == types.FTPS || incomingAccess.Protocol == types.SFTP) } -func createRisk(dataStore model.TechnicalAsset, dataFlow model.CommunicationLink, clientOutsideTrustBoundary model.TechnicalAsset, moreRisky bool) model.Risk { +func createRisk(dataStore types.TechnicalAsset, dataFlow types.CommunicationLink, clientOutsideTrustBoundary types.TechnicalAsset, moreRisky bool) types.Risk { impact := types.LowImpact if moreRisky || dataStore.RAA > 40 { impact = types.MediumImpact } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Likely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Likely, impact), ExploitationLikelihood: types.Likely, ExploitationImpact: impact, Title: "Unguarded Direct Datastore Access of " + dataStore.Title + " by " + @@ -119,6 +118,6 @@ func createRisk(dataStore model.TechnicalAsset, dataFlow model.CommunicationLink DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{dataStore.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + dataFlow.Id + "@" + clientOutsideTrustBoundary.Id + "@" + dataStore.Id + risk.SyntheticId = risk.CategoryId + "@" + dataFlow.Id + "@" + clientOutsideTrustBoundary.Id + "@" + dataStore.Id return risk } diff --git a/pkg/security/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go b/pkg/security/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go index 2e158a87..38646c2b 100644 --- a/pkg/security/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go +++ b/pkg/security/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go @@ -1,20 +1,19 @@ package unnecessary_communication_link import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "unnecessary-communication-link", Title: "Unnecessary Communication Link", Description: "When a technical communication link does not send or receive any data assets, this is " + @@ -39,8 +38,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] for _, commLink := range technicalAsset.CommunicationLinks { @@ -54,11 +53,11 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(technicalAsset model.TechnicalAsset, commLink model.CommunicationLink) model.Risk { +func createRisk(technicalAsset types.TechnicalAsset, commLink types.CommunicationLink) types.Risk { title := "Unnecessary Communication Link titled " + commLink.Title + " at technical asset " + technicalAsset.Title + "" - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, types.LowImpact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, types.LowImpact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: types.LowImpact, Title: title, @@ -67,6 +66,6 @@ func createRisk(technicalAsset model.TechnicalAsset, commLink model.Communicatio DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + commLink.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + commLink.Id + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go b/pkg/security/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go index 76084df7..4c9d6a5b 100644 --- a/pkg/security/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go +++ b/pkg/security/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go @@ -3,20 +3,19 @@ package unnecessary_data_asset import ( "sort" - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "unnecessary-data-asset", Title: "Unnecessary Data Asset", Description: "When a data asset is not processed or stored by any data assets and also not transferred by any " + @@ -43,8 +42,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) // first create them in memory - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: unusedDataAssetIDs := make(map[string]bool) @@ -78,12 +77,12 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(input *model.ParsedModel, unusedDataAssetID string) model.Risk { +func createRisk(input *types.ParsedModel, unusedDataAssetID string) types.Risk { unusedDataAsset := input.DataAssets[unusedDataAssetID] title := "Unnecessary Data Asset named " + unusedDataAsset.Title + "" - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, types.LowImpact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, types.LowImpact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: types.LowImpact, Title: title, @@ -91,6 +90,6 @@ func createRisk(input *model.ParsedModel, unusedDataAssetID string) model.Risk { DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{unusedDataAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + unusedDataAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + unusedDataAsset.Id return risk } diff --git a/pkg/security/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go b/pkg/security/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go index a4de4a95..3b18d8b2 100644 --- a/pkg/security/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go +++ b/pkg/security/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go @@ -3,20 +3,19 @@ package unnecessary_data_transfer import ( "sort" - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "unnecessary-data-transfer", Title: "Unnecessary Data Transfer", Description: "When a technical asset sends or receives data assets, which it neither processes or stores this is " + @@ -48,8 +47,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope { @@ -65,7 +64,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { } // incoming data flows commLinks := input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] - sort.Sort(model.ByTechnicalCommunicationLinkIdSort(commLinks)) + sort.Sort(types.ByTechnicalCommunicationLinkIdSort(commLinks)) for _, incomingDataFlow := range commLinks { targetAsset := input.TechnicalAssets[incomingDataFlow.SourceId] if targetAsset.Technology.IsUnnecessaryDataTolerated() { @@ -77,8 +76,8 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func checkRisksAgainstTechnicalAsset(input *model.ParsedModel, risks []model.Risk, technicalAsset model.TechnicalAsset, - dataFlow model.CommunicationLink, inverseDirection bool) []model.Risk { +func checkRisksAgainstTechnicalAsset(input *types.ParsedModel, risks []types.Risk, technicalAsset types.TechnicalAsset, + dataFlow types.CommunicationLink, inverseDirection bool) []types.Risk { for _, transferredDataAssetId := range dataFlow.DataAssetsSent { if !technicalAsset.ProcessesOrStoresDataAsset(transferredDataAssetId) { transferredDataAsset := input.DataAssets[transferredDataAssetId] @@ -116,7 +115,7 @@ func checkRisksAgainstTechnicalAsset(input *model.ParsedModel, risks []model.Ris return risks } -func isNewRisk(risks []model.Risk, risk model.Risk) bool { +func isNewRisk(risks []types.Risk, risk types.Risk) bool { for _, check := range risks { if check.SyntheticId == risk.SyntheticId { return false @@ -125,7 +124,7 @@ func isNewRisk(risks []model.Risk, risk model.Risk) bool { return true } -func createRisk(technicalAsset model.TechnicalAsset, dataAssetTransferred model.DataAsset, commPartnerAsset model.TechnicalAsset) model.Risk { +func createRisk(technicalAsset types.TechnicalAsset, dataAssetTransferred types.DataAsset, commPartnerAsset types.TechnicalAsset) types.Risk { moreRisky := dataAssetTransferred.Confidentiality == types.StrictlyConfidential || dataAssetTransferred.Integrity == types.MissionCritical impact := types.LowImpact @@ -135,9 +134,9 @@ func createRisk(technicalAsset model.TechnicalAsset, dataAssetTransferred model. title := "Unnecessary Data Transfer of " + dataAssetTransferred.Title + " data at " + technicalAsset.Title + " " + "from/to " + commPartnerAsset.Title + "" - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, Title: title, @@ -146,6 +145,6 @@ func createRisk(technicalAsset model.TechnicalAsset, dataAssetTransferred model. DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + dataAssetTransferred.Id + "@" + technicalAsset.Id + "@" + commPartnerAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + dataAssetTransferred.Id + "@" + technicalAsset.Id + "@" + commPartnerAsset.Id return risk } diff --git a/pkg/security/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go b/pkg/security/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go index 9ee98791..e84e7602 100644 --- a/pkg/security/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go +++ b/pkg/security/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go @@ -1,20 +1,19 @@ package unnecessary_technical_asset import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "unnecessary-technical-asset", Title: "Unnecessary Technical Asset", Description: "When a technical asset does not process or store any data assets, this is " + @@ -40,8 +39,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if len(technicalAsset.DataAssetsProcessed) == 0 && len(technicalAsset.DataAssetsStored) == 0 || @@ -52,11 +51,11 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(technicalAsset model.TechnicalAsset) model.Risk { +func createRisk(technicalAsset types.TechnicalAsset) types.Risk { title := "Unnecessary Technical Asset named " + technicalAsset.Title + "" - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, types.LowImpact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, types.LowImpact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: types.LowImpact, Title: title, @@ -64,6 +63,6 @@ func createRisk(technicalAsset model.TechnicalAsset) model.Risk { DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go b/pkg/security/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go index 31357a78..528c1637 100644 --- a/pkg/security/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go +++ b/pkg/security/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go @@ -1,20 +1,19 @@ package untrusted_deserialization import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "untrusted-deserialization", Title: "Untrusted Deserialization", Description: "When a technical asset accepts data in a specific serialized form (like Java or .NET serialization), " + @@ -45,8 +44,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope { @@ -80,7 +79,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAsset, acrossTrustBoundary bool, commLinkTitle string) model.Risk { +func createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset, acrossTrustBoundary bool, commLinkTitle string) types.Risk { title := "Untrusted Deserialization risk at " + technicalAsset.Title + "" impact := types.HighImpact likelihood := types.Likely @@ -93,9 +92,9 @@ func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAs technicalAsset.HighestAvailability(parsedModel) == types.MissionCritical { impact = types.VeryHighImpact } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(likelihood, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, Title: title, @@ -103,6 +102,6 @@ func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAs DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go b/pkg/security/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go index dabd4219..dabdfb9f 100644 --- a/pkg/security/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go +++ b/pkg/security/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go @@ -1,20 +1,19 @@ package wrong_communication_link_content import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "wrong-communication-link-content", Title: "Wrong Communication Link Content", Description: "When a communication link is defined as readonly, but does not receive any data asset, " + @@ -40,8 +39,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, techAsset := range input.TechnicalAssets { for _, commLink := range techAsset.CommunicationLinks { // check readonly consistency @@ -75,12 +74,12 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(technicalAsset model.TechnicalAsset, commLink model.CommunicationLink, reason string) model.Risk { +func createRisk(technicalAsset types.TechnicalAsset, commLink types.CommunicationLink, reason string) types.Risk { title := "Wrong Communication Link Content " + reason + " at " + technicalAsset.Title + " " + "regarding communication link " + commLink.Title + "" - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, types.LowImpact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, types.LowImpact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: types.LowImpact, Title: title, @@ -89,6 +88,6 @@ func createRisk(technicalAsset model.TechnicalAsset, commLink model.Communicatio DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{}, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + "@" + commLink.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id + "@" + commLink.Id return risk } diff --git a/pkg/security/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go b/pkg/security/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go index 2f7b3d76..bd68e8a2 100644 --- a/pkg/security/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go +++ b/pkg/security/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go @@ -1,20 +1,19 @@ package wrong_trust_boundary_content import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "wrong-trust-boundary-content", Title: "Wrong Trust Boundary Content", Description: "When a trust boundary of type " + types.NetworkPolicyNamespaceIsolation.String() + " contains " + @@ -39,8 +38,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, trustBoundary := range input.TrustBoundaries { if trustBoundary.Type == types.NetworkPolicyNamespaceIsolation { for _, techAssetID := range trustBoundary.TechnicalAssetsInside { @@ -54,11 +53,11 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(technicalAsset model.TechnicalAsset) model.Risk { +func createRisk(technicalAsset types.TechnicalAsset) types.Risk { title := "Wrong Trust Boundary Content (non-container asset inside container trust boundary) at " + technicalAsset.Title + "" - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.Unlikely, types.LowImpact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.Unlikely, types.LowImpact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: types.LowImpact, Title: title, @@ -66,6 +65,6 @@ func createRisk(technicalAsset model.TechnicalAsset) model.Risk { DataBreachProbability: types.Improbable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/built-in/xml-external-entity/xml-external-entity-rule.go b/pkg/security/risks/built-in/xml-external-entity/xml-external-entity-rule.go index 1dbee6ac..01356e2d 100644 --- a/pkg/security/risks/built-in/xml-external-entity/xml-external-entity-rule.go +++ b/pkg/security/risks/built-in/xml-external-entity/xml-external-entity-rule.go @@ -1,20 +1,19 @@ package xml_external_entity import ( - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() model.CustomRiskRule { - return model.CustomRiskRule{ +func Rule() types.RiskRule { + return types.RiskRule{ Category: Category, SupportedTags: SupportedTags, GenerateRisks: GenerateRisks, } } -func Category() model.RiskCategory { - return model.RiskCategory{ +func Category() types.RiskCategory { + return types.RiskCategory{ Id: "xml-external-entity", Title: "XML External Entity (XXE)", Description: "When a technical asset accepts data in XML format, XML External Entity (XXE) risks might arise.", @@ -43,8 +42,8 @@ func SupportedTags() []string { return []string{} } -func GenerateRisks(input *model.ParsedModel) []model.Risk { - risks := make([]model.Risk, 0) +func GenerateRisks(input *types.ParsedModel) []types.Risk { + risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope { @@ -59,7 +58,7 @@ func GenerateRisks(input *model.ParsedModel) []model.Risk { return risks } -func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAsset) model.Risk { +func createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { title := "XML External Entity (XXE) risk at " + technicalAsset.Title + "" impact := types.MediumImpact if technicalAsset.HighestConfidentiality(parsedModel) == types.StrictlyConfidential || @@ -67,9 +66,9 @@ func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAs technicalAsset.HighestAvailability(parsedModel) == types.MissionCritical { impact = types.HighImpact } - risk := model.Risk{ - Category: Category(), - Severity: model.CalculateSeverity(types.VeryLikely, impact), + risk := types.Risk{ + CategoryId: Category().Id, + Severity: types.CalculateSeverity(types.VeryLikely, impact), ExploitationLikelihood: types.VeryLikely, ExploitationImpact: impact, Title: title, @@ -77,6 +76,6 @@ func createRisk(parsedModel *model.ParsedModel, technicalAsset model.TechnicalAs DataBreachProbability: types.Probable, DataBreachTechnicalAssetIDs: []string{technicalAsset.Id}, // TODO: use the same logic here as for SSRF rule, as XXE is also SSRF ;) } - risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id + risk.SyntheticId = risk.CategoryId + "@" + technicalAsset.Id return risk } diff --git a/pkg/security/risks/rules.go b/pkg/security/risks/risks.go similarity index 82% rename from pkg/security/risks/rules.go rename to pkg/security/risks/risks.go index 1177a6d6..3eb521ac 100644 --- a/pkg/security/risks/rules.go +++ b/pkg/security/risks/risks.go @@ -1,12 +1,6 @@ -/* -Copyright © 2023 NAME HERE -*/ package risks import ( - "github.com/threagile/threagile/pkg/model" - "github.com/threagile/threagile/pkg/run" - accidentalsecretleak "github.com/threagile/threagile/pkg/security/risks/built-in/accidental-secret-leak" codebackdooring "github.com/threagile/threagile/pkg/security/risks/built-in/code-backdooring" containerbaseimagebackdooring "github.com/threagile/threagile/pkg/security/risks/built-in/container-baseimage-backdooring" @@ -49,45 +43,11 @@ import ( wrongcommunicationlinkcontent "github.com/threagile/threagile/pkg/security/risks/built-in/wrong-communication-link-content" wrongtrustboundarycontent "github.com/threagile/threagile/pkg/security/risks/built-in/wrong-trust-boundary-content" xmlexternalentity "github.com/threagile/threagile/pkg/security/risks/built-in/xml-external-entity" + "github.com/threagile/threagile/pkg/security/types" ) -type progressReporter interface { - Println(a ...any) (n int, err error) - Fatalf(format string, v ...any) -} - -func LoadCustomRiskRules(pluginFiles []string, reporter progressReporter) map[string]*model.CustomRisk { - customRiskRules := make(map[string]*model.CustomRisk) - if len(pluginFiles) > 0 { - reporter.Println("Loading custom risk rules:", pluginFiles) - - for _, pluginFile := range pluginFiles { - if len(pluginFile) > 0 { - runner, loadError := new(run.Runner).Load(pluginFile) - if loadError != nil { - reporter.Fatalf("WARNING: Custom risk rule %q not loaded: %v\n", pluginFile, loadError) - } - - risk := new(model.CustomRisk) - runError := runner.Run(nil, &risk, "-get-info") - if runError != nil { - reporter.Fatalf("WARNING: Failed to get ID for custom risk rule %q: %v\n", pluginFile, runError) - } - - risk.Runner = runner - customRiskRules[risk.ID] = risk - reporter.Println("Custom risk rule loaded:", risk.ID) - } - } - - reporter.Println("Loaded custom risk rules:", customRiskRules) - } - - return customRiskRules -} - -func GetBuiltInRiskRules() []model.CustomRiskRule { - return []model.CustomRiskRule{ +func GetBuiltInRiskRules() []types.RiskRule { + return []types.RiskRule{ accidentalsecretleak.Rule(), codebackdooring.Rule(), containerbaseimagebackdooring.Rule(), diff --git a/pkg/security/types/authentication.go b/pkg/security/types/authentication.go index 6595347f..adba1e13 100644 --- a/pkg/security/types/authentication.go +++ b/pkg/security/types/authentication.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/authentication_test.go b/pkg/security/types/authentication_test.go index ac212ba3..d0849790 100644 --- a/pkg/security/types/authentication_test.go +++ b/pkg/security/types/authentication_test.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/authorization.go b/pkg/security/types/authorization.go index 80c595b3..3e376e88 100644 --- a/pkg/security/types/authorization.go +++ b/pkg/security/types/authorization.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/authorization_test.go b/pkg/security/types/authorization_test.go index dd60ad60..c4d274b4 100644 --- a/pkg/security/types/authorization_test.go +++ b/pkg/security/types/authorization_test.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/model/communication_link.go b/pkg/security/types/communication_link.go similarity index 80% rename from pkg/model/communication_link.go rename to pkg/security/types/communication_link.go index bcbb8853..9adae059 100644 --- a/pkg/model/communication_link.go +++ b/pkg/security/types/communication_link.go @@ -1,27 +1,34 @@ /* Copyright © 2023 NAME HERE */ -package model + +package types import ( "fmt" "sort" "github.com/threagile/threagile/pkg/colors" - "github.com/threagile/threagile/pkg/security/types" ) type CommunicationLink struct { - Id, SourceId, TargetId, Title, Description string - Protocol types.Protocol - Tags []string - VPN, IpFiltered, Readonly bool - Authentication types.Authentication - Authorization types.Authorization - Usage types.Usage - DataAssetsSent, DataAssetsReceived []string - DiagramTweakWeight int - DiagramTweakConstraint bool + Id string `json:"id,omitempty"` + SourceId string `json:"source_id,omitempty"` + TargetId string `json:"target_id,omitempty"` + Title string `json:"title,omitempty"` + Description string `json:"description,omitempty"` + Protocol Protocol `json:"protocol,omitempty"` + Tags []string `json:"tags,omitempty"` + VPN bool `json:"vpn,omitempty"` + IpFiltered bool `json:"ip_filtered,omitempty"` + Readonly bool `json:"readonly,omitempty"` + Authentication Authentication `json:"authentication,omitempty"` + Authorization Authorization `json:"authorization,omitempty"` + Usage Usage `json:"usage,omitempty"` + DataAssetsSent []string `json:"data_assets_sent,omitempty"` + DataAssetsReceived []string `json:"data_assets_received,omitempty"` + DiagramTweakWeight int `json:"diagram_tweak_weight,omitempty"` + DiagramTweakConstraint bool `json:"diagram_tweak_constraint,omitempty"` } func (what CommunicationLink) IsTaggedWithAny(tags ...string) bool { @@ -50,8 +57,8 @@ func (what CommunicationLink) IsAcrossTrustBoundaryNetworkOnly(parsedModel *Pars return trustBoundaryOfSourceAsset.Id != trustBoundaryOfTargetAsset.Id && trustBoundaryOfTargetAsset.Type.IsNetworkBoundary() } -func (what CommunicationLink) HighestConfidentiality(parsedModel *ParsedModel) types.Confidentiality { - highest := types.Public +func (what CommunicationLink) HighestConfidentiality(parsedModel *ParsedModel) Confidentiality { + highest := Public for _, dataId := range what.DataAssetsSent { dataAsset := parsedModel.DataAssets[dataId] if dataAsset.Confidentiality > highest { @@ -67,8 +74,8 @@ func (what CommunicationLink) HighestConfidentiality(parsedModel *ParsedModel) t return highest } -func (what CommunicationLink) HighestIntegrity(parsedModel *ParsedModel) types.Criticality { - highest := types.Archive +func (what CommunicationLink) HighestIntegrity(parsedModel *ParsedModel) Criticality { + highest := Archive for _, dataId := range what.DataAssetsSent { dataAsset := parsedModel.DataAssets[dataId] if dataAsset.Integrity > highest { @@ -84,8 +91,8 @@ func (what CommunicationLink) HighestIntegrity(parsedModel *ParsedModel) types.C return highest } -func (what CommunicationLink) HighestAvailability(parsedModel *ParsedModel) types.Criticality { - highest := types.Archive +func (what CommunicationLink) HighestAvailability(parsedModel *ParsedModel) Criticality { + highest := Archive for _, dataId := range what.DataAssetsSent { dataAsset := parsedModel.DataAssets[dataId] if dataAsset.Availability > highest { @@ -133,7 +140,7 @@ func (what CommunicationLink) DetermineArrowLineStyle() string { if len(what.DataAssetsSent) == 0 && len(what.DataAssetsReceived) == 0 { return "dotted" // dotted, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... } - if what.Usage == types.DevOps { + if what.Usage == DevOps { return "dashed" } return "solid" @@ -159,23 +166,23 @@ func (what CommunicationLink) DetermineLabelColor(parsedModel *ParsedModel) stri } else {*/ // check for red for _, sentDataAsset := range what.DataAssetsSent { - if parsedModel.DataAssets[sentDataAsset].Integrity == types.MissionCritical { + if parsedModel.DataAssets[sentDataAsset].Integrity == MissionCritical { return colors.Red } } for _, receivedDataAsset := range what.DataAssetsReceived { - if parsedModel.DataAssets[receivedDataAsset].Integrity == types.MissionCritical { + if parsedModel.DataAssets[receivedDataAsset].Integrity == MissionCritical { return colors.Red } } // check for amber for _, sentDataAsset := range what.DataAssetsSent { - if parsedModel.DataAssets[sentDataAsset].Integrity == types.Critical { + if parsedModel.DataAssets[sentDataAsset].Integrity == Critical { return colors.Amber } } for _, receivedDataAsset := range what.DataAssetsReceived { - if parsedModel.DataAssets[receivedDataAsset].Integrity == types.Critical { + if parsedModel.DataAssets[receivedDataAsset].Integrity == Critical { return colors.Amber } } @@ -189,10 +196,10 @@ func (what CommunicationLink) DetermineLabelColor(parsedModel *ParsedModel) stri func (what CommunicationLink) DetermineArrowColor(parsedModel *ParsedModel) string { // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here if len(what.DataAssetsSent) == 0 && len(what.DataAssetsReceived) == 0 || - what.Protocol == types.UnknownProtocol { + what.Protocol == UnknownProtocol { return colors.Pink // pink, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... } - if what.Usage == types.DevOps { + if what.Usage == DevOps { return colors.MiddleLightGray } else if what.VPN { return colors.DarkBlue @@ -201,23 +208,23 @@ func (what CommunicationLink) DetermineArrowColor(parsedModel *ParsedModel) stri } // check for red for _, sentDataAsset := range what.DataAssetsSent { - if parsedModel.DataAssets[sentDataAsset].Confidentiality == types.StrictlyConfidential { + if parsedModel.DataAssets[sentDataAsset].Confidentiality == StrictlyConfidential { return colors.Red } } for _, receivedDataAsset := range what.DataAssetsReceived { - if parsedModel.DataAssets[receivedDataAsset].Confidentiality == types.StrictlyConfidential { + if parsedModel.DataAssets[receivedDataAsset].Confidentiality == StrictlyConfidential { return colors.Red } } // check for amber for _, sentDataAsset := range what.DataAssetsSent { - if parsedModel.DataAssets[sentDataAsset].Confidentiality == types.Confidential { + if parsedModel.DataAssets[sentDataAsset].Confidentiality == Confidential { return colors.Amber } } for _, receivedDataAsset := range what.DataAssetsReceived { - if parsedModel.DataAssets[receivedDataAsset].Confidentiality == types.Confidential { + if parsedModel.DataAssets[receivedDataAsset].Confidentiality == Confidential { return colors.Amber } } diff --git a/pkg/security/types/confidentiality.go b/pkg/security/types/confidentiality.go index bc3ed6ef..0be71d3d 100644 --- a/pkg/security/types/confidentiality.go +++ b/pkg/security/types/confidentiality.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/confidentiality_test.go b/pkg/security/types/confidentiality_test.go index e13f92ff..2edacf6a 100644 --- a/pkg/security/types/confidentiality_test.go +++ b/pkg/security/types/confidentiality_test.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/criticality.go b/pkg/security/types/criticality.go index 7b8ef539..a3a1511a 100644 --- a/pkg/security/types/criticality.go +++ b/pkg/security/types/criticality.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/criticality_test.go b/pkg/security/types/criticality_test.go index 937d1c26..55912084 100644 --- a/pkg/security/types/criticality_test.go +++ b/pkg/security/types/criticality_test.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/custom-risk.go b/pkg/security/types/custom-risk.go new file mode 100644 index 00000000..beb66ffe --- /dev/null +++ b/pkg/security/types/custom-risk.go @@ -0,0 +1,27 @@ +package types + +import ( + "github.com/threagile/threagile/pkg/run" + "log" +) + +type CustomRisk struct { + ID string + Category RiskCategory + Tags []string + Runner *run.Runner +} + +func (r *CustomRisk) GenerateRisks(m *ParsedModel) []Risk { + if r.Runner == nil { + return nil + } + + risks := make([]Risk, 0) + runError := r.Runner.Run(m, &risks, "-generate-risks") + if runError != nil { + log.Fatalf("Failed to generate risks for custom risk rule %q: %v\n", r.Runner.Filename, runError) + } + + return risks +} diff --git a/pkg/model/data_asset.go b/pkg/security/types/data_asset.go similarity index 86% rename from pkg/model/data_asset.go rename to pkg/security/types/data_asset.go index eb468d85..899a0c63 100644 --- a/pkg/model/data_asset.go +++ b/pkg/security/types/data_asset.go @@ -1,27 +1,26 @@ /* Copyright © 2023 NAME HERE */ -package model + +package types import ( "sort" - - "github.com/threagile/threagile/pkg/security/types" ) type DataAsset struct { - Id string `yaml:"id" json:"id"` // TODO: tag here still required? - Title string `yaml:"title" json:"title"` // TODO: tag here still required? - Description string `yaml:"description" json:"description"` // TODO: tag here still required? - Usage types.Usage `yaml:"usage" json:"usage"` - Tags []string `yaml:"tags" json:"tags"` - Origin string `yaml:"origin" json:"origin"` - Owner string `yaml:"owner" json:"owner"` - Quantity types.Quantity `yaml:"quantity" json:"quantity"` - Confidentiality types.Confidentiality `yaml:"confidentiality" json:"confidentiality"` - Integrity types.Criticality `yaml:"integrity" json:"integrity"` - Availability types.Criticality `yaml:"availability" json:"availability"` - JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` + Id string `yaml:"id" json:"id"` // TODO: tag here still required? + Title string `yaml:"title" json:"title"` // TODO: tag here still required? + Description string `yaml:"description" json:"description"` // TODO: tag here still required? + Usage Usage `yaml:"usage" json:"usage"` + Tags []string `yaml:"tags" json:"tags"` + Origin string `yaml:"origin" json:"origin"` + Owner string `yaml:"owner" json:"owner"` + Quantity Quantity `yaml:"quantity" json:"quantity"` + Confidentiality Confidentiality `yaml:"confidentiality" json:"confidentiality"` + Integrity Criticality `yaml:"integrity" json:"integrity"` + Availability Criticality `yaml:"availability" json:"availability"` + JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` } func (what DataAsset) IsTaggedWithAny(tags ...string) bool { @@ -101,8 +100,8 @@ func (what DataAsset) IsDataBreachPotentialStillAtRisk(parsedModel *ParsedModel) return false } -func (what DataAsset) IdentifiedDataBreachProbability(parsedModel *ParsedModel) types.DataBreachProbability { - highestProbability := types.Improbable +func (what DataAsset) IdentifiedDataBreachProbability(parsedModel *ParsedModel) DataBreachProbability { + highestProbability := Improbable for _, risk := range AllRisks(parsedModel) { for _, techAsset := range risk.DataBreachTechnicalAssetIDs { if contains(parsedModel.TechnicalAssets[techAsset].DataAssetsProcessed, what.Id) { @@ -122,8 +121,8 @@ func (what DataAsset) IdentifiedDataBreachProbability(parsedModel *ParsedModel) return highestProbability } -func (what DataAsset) IdentifiedDataBreachProbabilityStillAtRisk(parsedModel *ParsedModel) types.DataBreachProbability { - highestProbability := types.Improbable +func (what DataAsset) IdentifiedDataBreachProbabilityStillAtRisk(parsedModel *ParsedModel) DataBreachProbability { + highestProbability := Improbable for _, risk := range FilteredByStillAtRisk(parsedModel) { for _, techAsset := range risk.DataBreachTechnicalAssetIDs { if contains(parsedModel.TechnicalAssets[techAsset].DataAssetsProcessed, what.Id) { diff --git a/pkg/security/types/data_breach_probability.go b/pkg/security/types/data_breach_probability.go index 7adb6c50..bf007a49 100644 --- a/pkg/security/types/data_breach_probability.go +++ b/pkg/security/types/data_breach_probability.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/data_breach_probability_test.go b/pkg/security/types/data_breach_probability_test.go index b041c887..38feaaba 100644 --- a/pkg/security/types/data_breach_probability_test.go +++ b/pkg/security/types/data_breach_probability_test.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/data_format.go b/pkg/security/types/data_format.go index ad2571f6..121c3c90 100644 --- a/pkg/security/types/data_format.go +++ b/pkg/security/types/data_format.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/data_format_test.go b/pkg/security/types/data_format_test.go index 03a759c2..df2ade98 100644 --- a/pkg/security/types/data_format_test.go +++ b/pkg/security/types/data_format_test.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/encryption_style.go b/pkg/security/types/encryption_style.go index 01ce909a..257b1080 100644 --- a/pkg/security/types/encryption_style.go +++ b/pkg/security/types/encryption_style.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/encryption_style_test.go b/pkg/security/types/encryption_style_test.go index 80187fa5..36bdf560 100644 --- a/pkg/security/types/encryption_style_test.go +++ b/pkg/security/types/encryption_style_test.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/model/helpers.go b/pkg/security/types/helpers.go similarity index 98% rename from pkg/model/helpers.go rename to pkg/security/types/helpers.go index 4c21e5de..0090195e 100644 --- a/pkg/model/helpers.go +++ b/pkg/security/types/helpers.go @@ -1,7 +1,8 @@ /* Copyright © 2023 NAME HERE */ -package model + +package types import ( "regexp" diff --git a/pkg/security/types/model.go b/pkg/security/types/model.go new file mode 100644 index 00000000..48e983bd --- /dev/null +++ b/pkg/security/types/model.go @@ -0,0 +1,327 @@ +/* +Copyright © 2023 NAME HERE +*/ + +package types + +import ( + "errors" + "fmt" + "sort" + "time" + + "github.com/threagile/threagile/pkg/input" +) + +type ParsedModel struct { + Author input.Author `json:"author" yaml:"author"` + Title string `json:"title,omitempty" yaml:"title"` + Date time.Time `json:"date" yaml:"date"` + ManagementSummaryComment string `json:"management_summary_comment,omitempty" yaml:"management_summary_comment"` + BusinessOverview input.Overview `json:"business_overview" yaml:"business_overview"` + TechnicalOverview input.Overview `json:"technical_overview" yaml:"technical_overview"` + BusinessCriticality Criticality `json:"business_criticality,omitempty" yaml:"business_criticality"` + SecurityRequirements map[string]string `json:"security_requirements,omitempty" yaml:"security_requirements"` + Questions map[string]string `json:"questions,omitempty" yaml:"questions"` + AbuseCases map[string]string `json:"abuse_cases,omitempty" yaml:"abuse_cases"` + TagsAvailable []string `json:"tags_available,omitempty" yaml:"tags_available"` + DataAssets map[string]DataAsset `json:"data_assets,omitempty" yaml:"data_assets"` + TechnicalAssets map[string]TechnicalAsset `json:"technical_assets,omitempty" yaml:"technical_assets"` + TrustBoundaries map[string]TrustBoundary `json:"trust_boundaries,omitempty" yaml:"trust_boundaries"` + SharedRuntimes map[string]SharedRuntime `json:"shared_runtimes,omitempty" yaml:"shared_runtimes"` + IndividualRiskCategories map[string]RiskCategory `json:"individual_risk_categories,omitempty" yaml:"individual_risk_categories"` + BuiltInRiskCategories map[string]RiskCategory `json:"built_in_risk_categories,omitempty" yaml:"built_in_risk_categories"` + RiskTracking map[string]RiskTracking `json:"risk_tracking,omitempty" yaml:"risk_tracking"` + CommunicationLinks map[string]CommunicationLink `json:"communication_links,omitempty" yaml:"communication_links"` + AllSupportedTags map[string]bool `json:"all_supported_tags,omitempty" yaml:"all_supported_tags"` + DiagramTweakNodesep int `json:"diagram_tweak_nodesep,omitempty" yaml:"diagram_tweak_nodesep"` + DiagramTweakRanksep int `json:"diagram_tweak_ranksep,omitempty" yaml:"diagram_tweak_ranksep"` + DiagramTweakEdgeLayout string `json:"diagram_tweak_edge_layout,omitempty" yaml:"diagram_tweak_edge_layout"` + DiagramTweakSuppressEdgeLabels bool `json:"diagram_tweak_suppress_edge_labels,omitempty" yaml:"diagram_tweak_suppress_edge_labels"` + DiagramTweakLayoutLeftToRight bool `json:"diagram_tweak_layout_left_to_right,omitempty" yaml:"diagram_tweak_layout_left_to_right"` + DiagramTweakInvisibleConnectionsBetweenAssets []string `json:"diagram_tweak_invisible_connections_between_assets,omitempty" yaml:"diagram_tweak_invisible_connections_between_assets"` + DiagramTweakSameRankAssets []string `json:"diagram_tweak_same_rank_assets,omitempty" yaml:"diagram_tweak_same_rank_assets"` + + // TODO: those are generated based on items above and needs to be private + IncomingTechnicalCommunicationLinksMappedByTargetId map[string][]CommunicationLink `json:"incoming_technical_communication_links_mapped_by_target_id,omitempty" yaml:"incoming_technical_communication_links_mapped_by_target_id"` + DirectContainingTrustBoundaryMappedByTechnicalAssetId map[string]TrustBoundary `json:"direct_containing_trust_boundary_mapped_by_technical_asset_id,omitempty" yaml:"direct_containing_trust_boundary_mapped_by_technical_asset_id"` + GeneratedRisksByCategory map[string][]Risk `json:"generated_risks_by_category,omitempty" yaml:"generated_risks_by_category"` + GeneratedRisksBySyntheticId map[string]Risk `json:"generated_risks_by_synthetic_id,omitempty" yaml:"generated_risks_by_synthetic_id"` +} + +func (parsedModel *ParsedModel) CheckTags(tags []string, where string) ([]string, error) { + var tagsUsed = make([]string, 0) + if tags != nil { + tagsUsed = make([]string, len(tags)) + for i, parsedEntry := range tags { + referencedTag := fmt.Sprintf("%v", parsedEntry) + err := parsedModel.CheckTagExists(referencedTag, where) + if err != nil { + return nil, err + } + tagsUsed[i] = referencedTag + } + } + return tagsUsed, nil +} + +func (parsedModel *ParsedModel) CheckTagExists(referencedTag, where string) error { + if !contains(parsedModel.TagsAvailable, referencedTag) { + return errors.New("missing referenced tag in overall tag list at " + where + ": " + referencedTag) + } + return nil +} + +func (parsedModel *ParsedModel) CheckDataAssetTargetExists(referencedAsset, where string) error { + if _, ok := parsedModel.DataAssets[referencedAsset]; !ok { + return errors.New("missing referenced data asset target at " + where + ": " + referencedAsset) + } + return nil +} + +func (parsedModel *ParsedModel) CheckTrustBoundaryExists(referencedId, where string) error { + if _, ok := parsedModel.TrustBoundaries[referencedId]; !ok { + return errors.New("missing referenced trust boundary at " + where + ": " + referencedId) + } + return nil +} + +func (parsedModel *ParsedModel) CheckSharedRuntimeExists(referencedId, where string) error { + if _, ok := parsedModel.SharedRuntimes[referencedId]; !ok { + return errors.New("missing referenced shared runtime at " + where + ": " + referencedId) + } + return nil +} + +func (parsedModel *ParsedModel) CheckCommunicationLinkExists(referencedId, where string) error { + if _, ok := parsedModel.CommunicationLinks[referencedId]; !ok { + return errors.New("missing referenced communication link at " + where + ": " + referencedId) + } + return nil +} + +func (parsedModel *ParsedModel) CheckTechnicalAssetExists(referencedAsset, where string, onlyForTweak bool) error { + if _, ok := parsedModel.TechnicalAssets[referencedAsset]; !ok { + suffix := "" + if onlyForTweak { + suffix = " (only referenced in diagram tweak)" + } + return errors.New("missing referenced technical asset target" + suffix + " at " + where + ": " + referencedAsset) + } + return nil +} + +func (parsedModel *ParsedModel) CheckNestedTrustBoundariesExisting() error { + for _, trustBoundary := range parsedModel.TrustBoundaries { + for _, nestedId := range trustBoundary.TrustBoundariesNested { + if _, ok := parsedModel.TrustBoundaries[nestedId]; !ok { + return errors.New("missing referenced nested trust boundary: " + nestedId) + } + } + } + return nil +} + +func CalculateSeverity(likelihood RiskExploitationLikelihood, impact RiskExploitationImpact) RiskSeverity { + result := likelihood.Weight() * impact.Weight() + if result <= 1 { + return LowSeverity + } + if result <= 3 { + return MediumSeverity + } + if result <= 8 { + return ElevatedSeverity + } + if result <= 12 { + return HighSeverity + } + return CriticalSeverity +} + +func (parsedModel *ParsedModel) InScopeTechnicalAssets() []TechnicalAsset { + result := make([]TechnicalAsset, 0) + for _, asset := range parsedModel.TechnicalAssets { + if !asset.OutOfScope { + result = append(result, asset) + } + } + return result +} + +func (parsedModel *ParsedModel) SortedTechnicalAssetIDs() []string { + res := make([]string, 0) + for id := range parsedModel.TechnicalAssets { + res = append(res, id) + } + sort.Strings(res) + return res +} + +func (parsedModel *ParsedModel) TagsActuallyUsed() []string { + result := make([]string, 0) + for _, tag := range parsedModel.TagsAvailable { + if len(parsedModel.TechnicalAssetsTaggedWithAny(tag)) > 0 || + len(parsedModel.CommunicationLinksTaggedWithAny(tag)) > 0 || + len(parsedModel.DataAssetsTaggedWithAny(tag)) > 0 || + len(parsedModel.TrustBoundariesTaggedWithAny(tag)) > 0 || + len(parsedModel.SharedRuntimesTaggedWithAny(tag)) > 0 { + result = append(result, tag) + } + } + return result +} + +func (parsedModel *ParsedModel) TechnicalAssetsTaggedWithAny(tags ...string) []TechnicalAsset { + result := make([]TechnicalAsset, 0) + for _, candidate := range parsedModel.TechnicalAssets { + if candidate.IsTaggedWithAny(tags...) { + result = append(result, candidate) + } + } + return result +} + +func (parsedModel *ParsedModel) CommunicationLinksTaggedWithAny(tags ...string) []CommunicationLink { + result := make([]CommunicationLink, 0) + for _, asset := range parsedModel.TechnicalAssets { + for _, candidate := range asset.CommunicationLinks { + if candidate.IsTaggedWithAny(tags...) { + result = append(result, candidate) + } + } + } + return result +} + +func (parsedModel *ParsedModel) DataAssetsTaggedWithAny(tags ...string) []DataAsset { + result := make([]DataAsset, 0) + for _, candidate := range parsedModel.DataAssets { + if candidate.IsTaggedWithAny(tags...) { + result = append(result, candidate) + } + } + return result +} + +func (parsedModel *ParsedModel) TrustBoundariesTaggedWithAny(tags ...string) []TrustBoundary { + result := make([]TrustBoundary, 0) + for _, candidate := range parsedModel.TrustBoundaries { + if candidate.IsTaggedWithAny(tags...) { + result = append(result, candidate) + } + } + return result +} + +func (parsedModel *ParsedModel) SharedRuntimesTaggedWithAny(tags ...string) []SharedRuntime { + result := make([]SharedRuntime, 0) + for _, candidate := range parsedModel.SharedRuntimes { + if candidate.IsTaggedWithAny(tags...) { + result = append(result, candidate) + } + } + return result +} + +func (parsedModel *ParsedModel) OutOfScopeTechnicalAssets() []TechnicalAsset { + assets := make([]TechnicalAsset, 0) + for _, asset := range parsedModel.TechnicalAssets { + if asset.OutOfScope { + assets = append(assets, asset) + } + } + sort.Sort(ByTechnicalAssetTitleSort(assets)) + return assets +} + +func (parsedModel *ParsedModel) RisksOfOnlySTRIDEInformationDisclosure(risksByCategory map[string][]Risk) map[string][]Risk { + result := make(map[string][]Risk) + for categoryId, categoryRisks := range risksByCategory { + for _, risk := range categoryRisks { + category := GetRiskCategory(parsedModel, categoryId) + if category.STRIDE == InformationDisclosure { + result[categoryId] = append(result[categoryId], risk) + } + } + } + return result +} + +func (parsedModel *ParsedModel) RisksOfOnlySTRIDEDenialOfService(risksByCategory map[string][]Risk) map[string][]Risk { + result := make(map[string][]Risk) + for categoryId, categoryRisks := range risksByCategory { + for _, risk := range categoryRisks { + category := GetRiskCategory(parsedModel, categoryId) + if category.STRIDE == DenialOfService { + result[categoryId] = append(result[categoryId], risk) + } + } + } + return result +} + +func (parsedModel *ParsedModel) RisksOfOnlySTRIDEElevationOfPrivilege(risksByCategory map[string][]Risk) map[string][]Risk { + result := make(map[string][]Risk) + for categoryId, categoryRisks := range risksByCategory { + for _, risk := range categoryRisks { + category := GetRiskCategory(parsedModel, categoryId) + if category.STRIDE == ElevationOfPrivilege { + result[categoryId] = append(result[categoryId], risk) + } + } + } + return result +} + +func (parsedModel *ParsedModel) RisksOfOnlyBusinessSide(risksByCategory map[string][]Risk) map[string][]Risk { + result := make(map[string][]Risk) + for categoryId, categoryRisks := range risksByCategory { + for _, risk := range categoryRisks { + category := GetRiskCategory(parsedModel, categoryId) + if category.Function == BusinessSide { + result[categoryId] = append(result[categoryId], risk) + } + } + } + return result +} + +func (parsedModel *ParsedModel) RisksOfOnlyArchitecture(risksByCategory map[string][]Risk) map[string][]Risk { + result := make(map[string][]Risk) + for categoryId, categoryRisks := range risksByCategory { + for _, risk := range categoryRisks { + category := GetRiskCategory(parsedModel, categoryId) + if category.Function == Architecture { + result[categoryId] = append(result[categoryId], risk) + } + } + } + return result +} + +func (parsedModel *ParsedModel) RisksOfOnlyDevelopment(risksByCategory map[string][]Risk) map[string][]Risk { + result := make(map[string][]Risk) + for categoryId, categoryRisks := range risksByCategory { + for _, risk := range categoryRisks { + category := GetRiskCategory(parsedModel, categoryId) + if category.Function == Development { + result[categoryId] = append(result[categoryId], risk) + } + } + } + return result +} + +func (parsedModel *ParsedModel) RisksOfOnlyOperation(risksByCategory map[string][]Risk) map[string][]Risk { + result := make(map[string][]Risk) + for categoryId, categoryRisks := range risksByCategory { + for _, risk := range categoryRisks { + category := GetRiskCategory(parsedModel, categoryId) + if category.Function == Operations { + result[categoryId] = append(result[categoryId], risk) + } + } + } + return result +} diff --git a/pkg/security/types/protocol.go b/pkg/security/types/protocol.go index 89e08f33..e4ebd9aa 100644 --- a/pkg/security/types/protocol.go +++ b/pkg/security/types/protocol.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/protocol_test.go b/pkg/security/types/protocol_test.go index 07981a7a..cd9cefd4 100644 --- a/pkg/security/types/protocol_test.go +++ b/pkg/security/types/protocol_test.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/quantity.go b/pkg/security/types/quantity.go index d410440c..d10ad872 100644 --- a/pkg/security/types/quantity.go +++ b/pkg/security/types/quantity.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/quantity_test.go b/pkg/security/types/quantity_test.go index 82fbf306..1ddfb52c 100644 --- a/pkg/security/types/quantity_test.go +++ b/pkg/security/types/quantity_test.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/risk-category.go b/pkg/security/types/risk-category.go new file mode 100644 index 00000000..232a7092 --- /dev/null +++ b/pkg/security/types/risk-category.go @@ -0,0 +1,21 @@ +package types + +type RiskCategory struct { + // TODO: refactor all "Id" here and elsewhere to "ID" + Id string `json:"id,omitempty"` + Title string `json:"title,omitempty"` + Description string `json:"description,omitempty"` + Impact string `json:"impact,omitempty"` + ASVS string `json:"asvs,omitempty"` + CheatSheet string `json:"cheat_sheet,omitempty"` + Action string `json:"action,omitempty"` + Mitigation string `json:"mitigation,omitempty"` + Check string `json:"check,omitempty"` + DetectionLogic string `json:"detection_logic,omitempty"` + RiskAssessment string `json:"risk_assessment,omitempty"` + FalsePositives string `json:"false_positives,omitempty"` + Function RiskFunction `json:"function,omitempty"` + STRIDE STRIDE `json:"stride,omitempty"` + ModelFailurePossibleReason bool `json:"model_failure_possible_reason,omitempty"` + CWE int `json:"cwe,omitempty"` +} diff --git a/pkg/security/types/risk-rule.go b/pkg/security/types/risk-rule.go new file mode 100644 index 00000000..33fca296 --- /dev/null +++ b/pkg/security/types/risk-rule.go @@ -0,0 +1,7 @@ +package types + +type RiskRule struct { + Category func() RiskCategory + SupportedTags func() []string + GenerateRisks func(input *ParsedModel) []Risk +} diff --git a/pkg/security/types/risk-tracking.go b/pkg/security/types/risk-tracking.go new file mode 100644 index 00000000..526d462e --- /dev/null +++ b/pkg/security/types/risk-tracking.go @@ -0,0 +1,14 @@ +package types + +import ( + "time" +) + +type RiskTracking struct { + SyntheticRiskId string `json:"synthetic_risk_id,omitempty"` + Justification string `json:"justification,omitempty"` + Ticket string `json:"ticket,omitempty"` + CheckedBy string `json:"checked_by,omitempty"` + Status RiskStatus `json:"status,omitempty"` + Date time.Time `json:"date"` +} diff --git a/pkg/security/types/risk.go b/pkg/security/types/risk.go new file mode 100644 index 00000000..dba2f3c0 --- /dev/null +++ b/pkg/security/types/risk.go @@ -0,0 +1,41 @@ +package types + +type Risk struct { + CategoryId string `yaml:"category" json:"category"` // used for better JSON marshalling, is assigned in risk evaluation phase automatically + RiskStatus RiskStatus `yaml:"risk_status" json:"risk_status"` // used for better JSON marshalling, is assigned in risk evaluation phase automatically + Severity RiskSeverity `yaml:"severity" json:"severity"` + ExploitationLikelihood RiskExploitationLikelihood `yaml:"exploitation_likelihood" json:"exploitation_likelihood"` + ExploitationImpact RiskExploitationImpact `yaml:"exploitation_impact" json:"exploitation_impact"` + Title string `yaml:"title" json:"title"` + SyntheticId string `yaml:"synthetic_id" json:"synthetic_id"` + MostRelevantDataAssetId string `yaml:"most_relevant_data_asset" json:"most_relevant_data_asset"` + MostRelevantTechnicalAssetId string `yaml:"most_relevant_technical_asset" json:"most_relevant_technical_asset"` + MostRelevantTrustBoundaryId string `yaml:"most_relevant_trust_boundary" json:"most_relevant_trust_boundary"` + MostRelevantSharedRuntimeId string `yaml:"most_relevant_shared_runtime" json:"most_relevant_shared_runtime"` + MostRelevantCommunicationLinkId string `yaml:"most_relevant_communication_link" json:"most_relevant_communication_link"` + DataBreachProbability DataBreachProbability `yaml:"data_breach_probability" json:"data_breach_probability"` + DataBreachTechnicalAssetIDs []string `yaml:"data_breach_technical_assets" json:"data_breach_technical_assets"` + // TODO: refactor all "Id" here to "ID"? +} + +func (what Risk) GetRiskTracking(model *ParsedModel) RiskTracking { // TODO: Unify function naming regarding Get etc. + var result RiskTracking + if riskTracking, ok := model.RiskTracking[what.SyntheticId]; ok { + result = riskTracking + } + return result +} + +func (what Risk) GetRiskTrackingStatusDefaultingUnchecked(model *ParsedModel) RiskStatus { + if riskTracking, ok := model.RiskTracking[what.SyntheticId]; ok { + return riskTracking.Status + } + return Unchecked +} + +func (what Risk) IsRiskTracked(model *ParsedModel) bool { + if _, ok := model.RiskTracking[what.SyntheticId]; ok { + return true + } + return false +} diff --git a/pkg/security/types/risk_exploitation_impact.go b/pkg/security/types/risk_exploitation_impact.go index 3c01038b..c2a42cc9 100644 --- a/pkg/security/types/risk_exploitation_impact.go +++ b/pkg/security/types/risk_exploitation_impact.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/risk_exploitation_impact_test.go b/pkg/security/types/risk_exploitation_impact_test.go index ffb77f29..aa4a03cb 100644 --- a/pkg/security/types/risk_exploitation_impact_test.go +++ b/pkg/security/types/risk_exploitation_impact_test.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/risk_exploitation_likelihood.go b/pkg/security/types/risk_exploitation_likelihood.go index 2b0ef292..4357eb40 100644 --- a/pkg/security/types/risk_exploitation_likelihood.go +++ b/pkg/security/types/risk_exploitation_likelihood.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/risk_exploitation_likelihood_test.go b/pkg/security/types/risk_exploitation_likelihood_test.go index cb981818..0c5f060e 100644 --- a/pkg/security/types/risk_exploitation_likelihood_test.go +++ b/pkg/security/types/risk_exploitation_likelihood_test.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/risk_function.go b/pkg/security/types/risk_function.go index 853d1d02..55e5ba89 100644 --- a/pkg/security/types/risk_function.go +++ b/pkg/security/types/risk_function.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/risk_function_test.go b/pkg/security/types/risk_function_test.go index 375dae50..68703188 100644 --- a/pkg/security/types/risk_function_test.go +++ b/pkg/security/types/risk_function_test.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/risk_severity.go b/pkg/security/types/risk_severity.go index 6535d865..13d6396a 100644 --- a/pkg/security/types/risk_severity.go +++ b/pkg/security/types/risk_severity.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/risk_severity_test.go b/pkg/security/types/risk_severity_test.go index 1d50ac12..4a987eb8 100644 --- a/pkg/security/types/risk_severity_test.go +++ b/pkg/security/types/risk_severity_test.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/risk_status.go b/pkg/security/types/risk_status.go index 803f1043..012f156d 100644 --- a/pkg/security/types/risk_status.go +++ b/pkg/security/types/risk_status.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/risk_status_test.go b/pkg/security/types/risk_status_test.go index 2ef16816..989ef3f3 100644 --- a/pkg/security/types/risk_status_test.go +++ b/pkg/security/types/risk_status_test.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/model/risks.go b/pkg/security/types/risks.go similarity index 58% rename from pkg/model/risks.go rename to pkg/security/types/risks.go index 407a2798..72bb3569 100644 --- a/pkg/model/risks.go +++ b/pkg/security/types/risks.go @@ -1,116 +1,37 @@ /* Copyright © 2023 NAME HERE */ -package model + +package types import ( - "log" "sort" - "time" - - "github.com/threagile/threagile/pkg/run" - "github.com/threagile/threagile/pkg/security/types" ) -type RiskCategory struct { - // TODO: refactor all "Id" here and elsewhere to "ID" - Id string - Title string - Description string - Impact string - ASVS string - CheatSheet string - Action string - Mitigation string - Check string - DetectionLogic string - RiskAssessment string - FalsePositives string - Function types.RiskFunction - STRIDE types.STRIDE - ModelFailurePossibleReason bool - CWE int -} - -type BuiltInRisk struct { - Category func() RiskCategory - SupportedTags func() []string - GenerateRisks func(m *ParsedModel) []Risk -} - -type CustomRisk struct { - ID string - Category RiskCategory - Tags []string - Runner *run.Runner -} - -func (r *CustomRisk) GenerateRisks(m *ParsedModel) []Risk { - if r.Runner == nil { - return nil - } - - risks := make([]Risk, 0) - runError := r.Runner.Run(m, &risks, "-generate-risks") - if runError != nil { - log.Fatalf("Failed to generate risks for custom risk rule %q: %v\n", r.Runner.Filename, runError) - } - - return risks -} - -type RiskTracking struct { - SyntheticRiskId, Justification, Ticket, CheckedBy string - Status types.RiskStatus - Date time.Time -} - -type Risk struct { - Category RiskCategory `yaml:"-" json:"-"` // just for navigational convenience... not JSON marshalled - CategoryId string `yaml:"category" json:"category"` // used for better JSON marshalling, is assigned in risk evaluation phase automatically - RiskStatus types.RiskStatus `yaml:"risk_status" json:"risk_status"` // used for better JSON marshalling, is assigned in risk evaluation phase automatically - Severity types.RiskSeverity `yaml:"severity" json:"severity"` - ExploitationLikelihood types.RiskExploitationLikelihood `yaml:"exploitation_likelihood" json:"exploitation_likelihood"` - ExploitationImpact types.RiskExploitationImpact `yaml:"exploitation_impact" json:"exploitation_impact"` - Title string `yaml:"title" json:"title"` - SyntheticId string `yaml:"synthetic_id" json:"synthetic_id"` - MostRelevantDataAssetId string `yaml:"most_relevant_data_asset" json:"most_relevant_data_asset"` - MostRelevantTechnicalAssetId string `yaml:"most_relevant_technical_asset" json:"most_relevant_technical_asset"` - MostRelevantTrustBoundaryId string `yaml:"most_relevant_trust_boundary" json:"most_relevant_trust_boundary"` - MostRelevantSharedRuntimeId string `yaml:"most_relevant_shared_runtime" json:"most_relevant_shared_runtime"` - MostRelevantCommunicationLinkId string `yaml:"most_relevant_communication_link" json:"most_relevant_communication_link"` - DataBreachProbability types.DataBreachProbability `yaml:"data_breach_probability" json:"data_breach_probability"` - DataBreachTechnicalAssetIDs []string `yaml:"data_breach_technical_assets" json:"data_breach_technical_assets"` - // TODO: refactor all "Id" here to "ID"? -} - -func (what Risk) GetRiskTracking(model *ParsedModel) RiskTracking { // TODO: Unify function naming regarding Get etc. - var result RiskTracking - if riskTracking, ok := model.RiskTracking[what.SyntheticId]; ok { - result = riskTracking +func GetRiskCategory(parsedModel *ParsedModel, categoryID string) *RiskCategory { + if len(parsedModel.IndividualRiskCategories) > 0 { + custom, customOk := parsedModel.IndividualRiskCategories[categoryID] + if customOk { + return &custom + } } - return result -} -func (what Risk) GetRiskTrackingStatusDefaultingUnchecked(model *ParsedModel) types.RiskStatus { - if riskTracking, ok := model.RiskTracking[what.SyntheticId]; ok { - return riskTracking.Status + if len(parsedModel.BuiltInRiskCategories) > 0 { + builtIn, builtInOk := parsedModel.BuiltInRiskCategories[categoryID] + if builtInOk { + return &builtIn + } } - return types.Unchecked -} -func (what Risk) IsRiskTracked(model *ParsedModel) bool { - if _, ok := model.RiskTracking[what.SyntheticId]; ok { - return true - } - return false + return nil } func GetRiskCategories(parsedModel *ParsedModel, categoryIDs []string) []RiskCategory { categoryMap := make(map[string]RiskCategory) for _, categoryId := range categoryIDs { - if len(parsedModel.GeneratedRisksByCategory[categoryId]) > 0 { - categoryMap[categoryId] = parsedModel.GeneratedRisksByCategory[categoryId][0].Category + category := GetRiskCategory(parsedModel, categoryId) + if category != nil { + categoryMap[categoryId] = *category } } @@ -142,8 +63,8 @@ func ReduceToOnlyStillAtRisk(parsedModel *ParsedModel, risks []Risk) []Risk { return filteredRisks } -func HighestExploitationLikelihood(risks []Risk) types.RiskExploitationLikelihood { - result := types.Unlikely +func HighestExploitationLikelihood(risks []Risk) RiskExploitationLikelihood { + result := Unlikely for _, risk := range risks { if risk.ExploitationLikelihood > result { result = risk.ExploitationLikelihood @@ -152,8 +73,8 @@ func HighestExploitationLikelihood(risks []Risk) types.RiskExploitationLikelihoo return result } -func HighestExploitationImpact(risks []Risk) types.RiskExploitationImpact { - result := types.LowImpact +func HighestExploitationImpact(risks []Risk) RiskExploitationImpact { + result := LowImpact for _, risk := range risks { if risk.ExploitationImpact > result { result = risk.ExploitationImpact @@ -162,14 +83,8 @@ func HighestExploitationImpact(risks []Risk) types.RiskExploitationImpact { return result } -type CustomRiskRule struct { - Category func() RiskCategory - SupportedTags func() []string - GenerateRisks func(input *ParsedModel) []Risk -} - -func HighestSeverityStillAtRisk(model *ParsedModel, risks []Risk) types.RiskSeverity { - result := types.LowSeverity +func HighestSeverityStillAtRisk(model *ParsedModel, risks []Risk) RiskSeverity { + result := LowSeverity for _, risk := range risks { if risk.Severity > result && risk.GetRiskTrackingStatusDefaultingUnchecked(model).IsStillAtRisk() { result = risk.Severity @@ -256,18 +171,14 @@ func SortByDataBreachProbability(risks []Risk, parsedModel *ParsedModel) { }) } -type RiskRule interface { - Category() RiskCategory - GenerateRisks(parsedModel *ParsedModel) []Risk -} - // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: func SortedRiskCategories(parsedModel *ParsedModel) []RiskCategory { categoryMap := make(map[string]RiskCategory) - for categoryId, risks := range parsedModel.GeneratedRisksByCategory { - for _, risk := range risks { - categoryMap[categoryId] = risk.Category + for categoryId := range parsedModel.GeneratedRisksByCategory { + category := GetRiskCategory(parsedModel, categoryId) + if category != nil { + categoryMap[categoryId] = *category } } @@ -294,120 +205,134 @@ func CountRisks(risksByCategory map[string][]Risk) int { return result } -func RisksOfOnlySTRIDESpoofing(risksByCategory map[string][]Risk) map[string][]Risk { +func RisksOfOnlySTRIDESpoofing(parsedModel *ParsedModel, risksByCategory map[string][]Risk) map[string][]Risk { result := make(map[string][]Risk) - for _, risks := range risksByCategory { + for categoryId, risks := range risksByCategory { for _, risk := range risks { - if risk.Category.STRIDE == types.Spoofing { - result[risk.Category.Id] = append(result[risk.Category.Id], risk) + category := GetRiskCategory(parsedModel, categoryId) + if category != nil { + if category.STRIDE == Spoofing { + result[categoryId] = append(result[categoryId], risk) + } } } } return result } -func RisksOfOnlySTRIDETampering(risksByCategory map[string][]Risk) map[string][]Risk { +func RisksOfOnlySTRIDETampering(parsedModel *ParsedModel, risksByCategory map[string][]Risk) map[string][]Risk { result := make(map[string][]Risk) - for _, risks := range risksByCategory { + for categoryId, risks := range risksByCategory { for _, risk := range risks { - if risk.Category.STRIDE == types.Tampering { - result[risk.Category.Id] = append(result[risk.Category.Id], risk) + category := GetRiskCategory(parsedModel, categoryId) + if category != nil { + if category.STRIDE == Tampering { + result[categoryId] = append(result[categoryId], risk) + } } } } return result } -func RisksOfOnlySTRIDERepudiation(risksByCategory map[string][]Risk) map[string][]Risk { +func RisksOfOnlySTRIDERepudiation(parsedModel *ParsedModel, risksByCategory map[string][]Risk) map[string][]Risk { result := make(map[string][]Risk) - for _, risks := range risksByCategory { + for categoryId, risks := range risksByCategory { for _, risk := range risks { - if risk.Category.STRIDE == types.Repudiation { - result[risk.Category.Id] = append(result[risk.Category.Id], risk) + category := GetRiskCategory(parsedModel, categoryId) + if category.STRIDE == Repudiation { + result[categoryId] = append(result[categoryId], risk) } } } return result } -func RisksOfOnlySTRIDEInformationDisclosure(risksByCategory map[string][]Risk) map[string][]Risk { +func RisksOfOnlySTRIDEInformationDisclosure(parsedModel *ParsedModel, risksByCategory map[string][]Risk) map[string][]Risk { result := make(map[string][]Risk) - for _, risks := range risksByCategory { + for categoryId, risks := range risksByCategory { for _, risk := range risks { - if risk.Category.STRIDE == types.InformationDisclosure { - result[risk.Category.Id] = append(result[risk.Category.Id], risk) + category := GetRiskCategory(parsedModel, categoryId) + if category.STRIDE == InformationDisclosure { + result[categoryId] = append(result[categoryId], risk) } } } return result } -func RisksOfOnlySTRIDEDenialOfService(risksByCategory map[string][]Risk) map[string][]Risk { +func RisksOfOnlySTRIDEDenialOfService(parsedModel *ParsedModel, risksByCategory map[string][]Risk) map[string][]Risk { result := make(map[string][]Risk) - for _, risks := range risksByCategory { + for categoryId, risks := range risksByCategory { for _, risk := range risks { - if risk.Category.STRIDE == types.DenialOfService { - result[risk.Category.Id] = append(result[risk.Category.Id], risk) + category := GetRiskCategory(parsedModel, categoryId) + if category.STRIDE == DenialOfService { + result[categoryId] = append(result[categoryId], risk) } } } return result } -func RisksOfOnlySTRIDEElevationOfPrivilege(risksByCategory map[string][]Risk) map[string][]Risk { +func RisksOfOnlySTRIDEElevationOfPrivilege(parsedModel *ParsedModel, risksByCategory map[string][]Risk) map[string][]Risk { result := make(map[string][]Risk) - for _, risks := range risksByCategory { + for categoryId, risks := range risksByCategory { for _, risk := range risks { - if risk.Category.STRIDE == types.ElevationOfPrivilege { - result[risk.Category.Id] = append(result[risk.Category.Id], risk) + category := GetRiskCategory(parsedModel, categoryId) + if category.STRIDE == ElevationOfPrivilege { + result[categoryId] = append(result[categoryId], risk) } } } return result } -func RisksOfOnlyBusinessSide(risksByCategory map[string][]Risk) map[string][]Risk { +func RisksOfOnlyBusinessSide(parsedModel *ParsedModel, risksByCategory map[string][]Risk) map[string][]Risk { result := make(map[string][]Risk) - for _, risks := range risksByCategory { + for categoryId, risks := range risksByCategory { for _, risk := range risks { - if risk.Category.Function == types.BusinessSide { - result[risk.Category.Id] = append(result[risk.Category.Id], risk) + category := GetRiskCategory(parsedModel, categoryId) + if category.Function == BusinessSide { + result[categoryId] = append(result[categoryId], risk) } } } return result } -func RisksOfOnlyArchitecture(risksByCategory map[string][]Risk) map[string][]Risk { +func RisksOfOnlyArchitecture(parsedModel *ParsedModel, risksByCategory map[string][]Risk) map[string][]Risk { result := make(map[string][]Risk) - for _, risks := range risksByCategory { + for categoryId, risks := range risksByCategory { for _, risk := range risks { - if risk.Category.Function == types.Architecture { - result[risk.Category.Id] = append(result[risk.Category.Id], risk) + category := GetRiskCategory(parsedModel, categoryId) + if category.Function == Architecture { + result[categoryId] = append(result[categoryId], risk) } } } return result } -func RisksOfOnlyDevelopment(risksByCategory map[string][]Risk) map[string][]Risk { +func RisksOfOnlyDevelopment(parsedModel *ParsedModel, risksByCategory map[string][]Risk) map[string][]Risk { result := make(map[string][]Risk) - for _, risks := range risksByCategory { + for categoryId, risks := range risksByCategory { for _, risk := range risks { - if risk.Category.Function == types.Development { - result[risk.Category.Id] = append(result[risk.Category.Id], risk) + category := GetRiskCategory(parsedModel, categoryId) + if category.Function == Development { + result[categoryId] = append(result[categoryId], risk) } } } return result } -func RisksOfOnlyOperation(risksByCategory map[string][]Risk) map[string][]Risk { +func RisksOfOnlyOperation(parsedModel *ParsedModel, risksByCategory map[string][]Risk) map[string][]Risk { result := make(map[string][]Risk) - for _, risks := range risksByCategory { + for categoryId, risks := range risksByCategory { for _, risk := range risks { - if risk.Category.Function == types.Operations { - result[risk.Category.Id] = append(result[risk.Category.Id], risk) + category := GetRiskCategory(parsedModel, categoryId) + if category.Function == Operations { + result[categoryId] = append(result[categoryId], risk) } } } @@ -416,12 +341,12 @@ func RisksOfOnlyOperation(risksByCategory map[string][]Risk) map[string][]Risk { func CategoriesOfOnlyRisksStillAtRisk(parsedModel *ParsedModel, risksByCategory map[string][]Risk) []string { categories := make(map[string]struct{}) // Go's trick of unique elements is a map - for _, risks := range risksByCategory { + for categoryId, risks := range risksByCategory { for _, risk := range risks { if !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { continue } - categories[risk.Category.Id] = struct{}{} + categories[categoryId] = struct{}{} } } // return as slice (of now unique values) @@ -430,13 +355,13 @@ func CategoriesOfOnlyRisksStillAtRisk(parsedModel *ParsedModel, risksByCategory func CategoriesOfOnlyCriticalRisks(parsedModel *ParsedModel, risksByCategory map[string][]Risk, initialRisks bool) []string { categories := make(map[string]struct{}) // Go's trick of unique elements is a map - for _, risks := range risksByCategory { + for categoryId, risks := range risksByCategory { for _, risk := range risks { if !initialRisks && !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { continue } - if risk.Severity == types.CriticalSeverity { - categories[risk.Category.Id] = struct{}{} + if risk.Severity == CriticalSeverity { + categories[categoryId] = struct{}{} } } } @@ -446,17 +371,17 @@ func CategoriesOfOnlyCriticalRisks(parsedModel *ParsedModel, risksByCategory map func CategoriesOfOnlyHighRisks(parsedModel *ParsedModel, risksByCategory map[string][]Risk, initialRisks bool) []string { categories := make(map[string]struct{}) // Go's trick of unique elements is a map - for _, risks := range risksByCategory { + for categoryId, risks := range risksByCategory { for _, risk := range risks { if !initialRisks && !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { continue } - highest := HighestSeverity(parsedModel.GeneratedRisksByCategory[risk.Category.Id]) + highest := HighestSeverity(parsedModel.GeneratedRisksByCategory[categoryId]) if !initialRisks { - highest = HighestSeverityStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[risk.Category.Id]) + highest = HighestSeverityStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[categoryId]) } - if risk.Severity == types.HighSeverity && highest < types.CriticalSeverity { - categories[risk.Category.Id] = struct{}{} + if risk.Severity == HighSeverity && highest < CriticalSeverity { + categories[categoryId] = struct{}{} } } } @@ -466,17 +391,17 @@ func CategoriesOfOnlyHighRisks(parsedModel *ParsedModel, risksByCategory map[str func CategoriesOfOnlyElevatedRisks(parsedModel *ParsedModel, risksByCategory map[string][]Risk, initialRisks bool) []string { categories := make(map[string]struct{}) // Go's trick of unique elements is a map - for _, risks := range risksByCategory { + for categoryId, risks := range risksByCategory { for _, risk := range risks { if !initialRisks && !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { continue } - highest := HighestSeverity(parsedModel.GeneratedRisksByCategory[risk.Category.Id]) + highest := HighestSeverity(parsedModel.GeneratedRisksByCategory[categoryId]) if !initialRisks { - highest = HighestSeverityStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[risk.Category.Id]) + highest = HighestSeverityStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[categoryId]) } - if risk.Severity == types.ElevatedSeverity && highest < types.HighSeverity { - categories[risk.Category.Id] = struct{}{} + if risk.Severity == ElevatedSeverity && highest < HighSeverity { + categories[categoryId] = struct{}{} } } } @@ -486,17 +411,17 @@ func CategoriesOfOnlyElevatedRisks(parsedModel *ParsedModel, risksByCategory map func CategoriesOfOnlyMediumRisks(parsedModel *ParsedModel, risksByCategory map[string][]Risk, initialRisks bool) []string { categories := make(map[string]struct{}) // Go's trick of unique elements is a map - for _, risks := range risksByCategory { + for categoryId, risks := range risksByCategory { for _, risk := range risks { if !initialRisks && !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { continue } - highest := HighestSeverity(parsedModel.GeneratedRisksByCategory[risk.Category.Id]) + highest := HighestSeverity(parsedModel.GeneratedRisksByCategory[categoryId]) if !initialRisks { - highest = HighestSeverityStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[risk.Category.Id]) + highest = HighestSeverityStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[categoryId]) } - if risk.Severity == types.MediumSeverity && highest < types.ElevatedSeverity { - categories[risk.Category.Id] = struct{}{} + if risk.Severity == MediumSeverity && highest < ElevatedSeverity { + categories[categoryId] = struct{}{} } } } @@ -506,17 +431,17 @@ func CategoriesOfOnlyMediumRisks(parsedModel *ParsedModel, risksByCategory map[s func CategoriesOfOnlyLowRisks(parsedModel *ParsedModel, risksByCategory map[string][]Risk, initialRisks bool) []string { categories := make(map[string]struct{}) // Go's trick of unique elements is a map - for _, risks := range risksByCategory { + for categoryId, risks := range risksByCategory { for _, risk := range risks { if !initialRisks && !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { continue } - highest := HighestSeverity(parsedModel.GeneratedRisksByCategory[risk.Category.Id]) + highest := HighestSeverity(parsedModel.GeneratedRisksByCategory[categoryId]) if !initialRisks { - highest = HighestSeverityStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[risk.Category.Id]) + highest = HighestSeverityStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory[categoryId]) } - if risk.Severity == types.LowSeverity && highest < types.MediumSeverity { - categories[risk.Category.Id] = struct{}{} + if risk.Severity == LowSeverity && highest < MediumSeverity { + categories[categoryId] = struct{}{} } } } @@ -524,8 +449,8 @@ func CategoriesOfOnlyLowRisks(parsedModel *ParsedModel, risksByCategory map[stri return keysAsSlice(categories) } -func HighestSeverity(risks []Risk) types.RiskSeverity { - result := types.LowSeverity +func HighestSeverity(risks []Risk) RiskSeverity { + result := LowSeverity for _, risk := range risks { if risk.Severity > result { result = risk.Severity @@ -544,9 +469,10 @@ func keysAsSlice(categories map[string]struct{}) []string { func FilteredByOnlyBusinessSide(parsedModel *ParsedModel) []Risk { filteredRisks := make([]Risk, 0) - for _, risks := range parsedModel.GeneratedRisksByCategory { + for categoryId, risks := range parsedModel.GeneratedRisksByCategory { for _, risk := range risks { - if risk.Category.Function == types.BusinessSide { + category := GetRiskCategory(parsedModel, categoryId) + if category.Function == BusinessSide { filteredRisks = append(filteredRisks, risk) } } @@ -556,9 +482,10 @@ func FilteredByOnlyBusinessSide(parsedModel *ParsedModel) []Risk { func FilteredByOnlyArchitecture(parsedModel *ParsedModel) []Risk { filteredRisks := make([]Risk, 0) - for _, risks := range parsedModel.GeneratedRisksByCategory { + for categoryId, risks := range parsedModel.GeneratedRisksByCategory { for _, risk := range risks { - if risk.Category.Function == types.Architecture { + category := GetRiskCategory(parsedModel, categoryId) + if category.Function == Architecture { filteredRisks = append(filteredRisks, risk) } } @@ -568,9 +495,10 @@ func FilteredByOnlyArchitecture(parsedModel *ParsedModel) []Risk { func FilteredByOnlyDevelopment(parsedModel *ParsedModel) []Risk { filteredRisks := make([]Risk, 0) - for _, risks := range parsedModel.GeneratedRisksByCategory { + for categoryId, risks := range parsedModel.GeneratedRisksByCategory { for _, risk := range risks { - if risk.Category.Function == types.Development { + category := GetRiskCategory(parsedModel, categoryId) + if category.Function == Development { filteredRisks = append(filteredRisks, risk) } } @@ -580,9 +508,10 @@ func FilteredByOnlyDevelopment(parsedModel *ParsedModel) []Risk { func FilteredByOnlyOperation(parsedModel *ParsedModel) []Risk { filteredRisks := make([]Risk, 0) - for _, risks := range parsedModel.GeneratedRisksByCategory { + for categoryId, risks := range parsedModel.GeneratedRisksByCategory { for _, risk := range risks { - if risk.Category.Function == types.Operations { + category := GetRiskCategory(parsedModel, categoryId) + if category.Function == Operations { filteredRisks = append(filteredRisks, risk) } } @@ -594,7 +523,7 @@ func FilteredByOnlyCriticalRisks(parsedModel *ParsedModel) []Risk { filteredRisks := make([]Risk, 0) for _, risks := range parsedModel.GeneratedRisksByCategory { for _, risk := range risks { - if risk.Severity == types.CriticalSeverity { + if risk.Severity == CriticalSeverity { filteredRisks = append(filteredRisks, risk) } } @@ -606,7 +535,7 @@ func FilteredByOnlyHighRisks(parsedModel *ParsedModel) []Risk { filteredRisks := make([]Risk, 0) for _, risks := range parsedModel.GeneratedRisksByCategory { for _, risk := range risks { - if risk.Severity == types.HighSeverity { + if risk.Severity == HighSeverity { filteredRisks = append(filteredRisks, risk) } } @@ -618,7 +547,7 @@ func FilteredByOnlyElevatedRisks(parsedModel *ParsedModel) []Risk { filteredRisks := make([]Risk, 0) for _, risks := range parsedModel.GeneratedRisksByCategory { for _, risk := range risks { - if risk.Severity == types.ElevatedSeverity { + if risk.Severity == ElevatedSeverity { filteredRisks = append(filteredRisks, risk) } } @@ -630,7 +559,7 @@ func FilteredByOnlyMediumRisks(parsedModel *ParsedModel) []Risk { filteredRisks := make([]Risk, 0) for _, risks := range parsedModel.GeneratedRisksByCategory { for _, risk := range risks { - if risk.Severity == types.MediumSeverity { + if risk.Severity == MediumSeverity { filteredRisks = append(filteredRisks, risk) } } @@ -642,7 +571,7 @@ func FilteredByOnlyLowRisks(parsedModel *ParsedModel) []Risk { filteredRisks := make([]Risk, 0) for _, risks := range parsedModel.GeneratedRisksByCategory { for _, risk := range risks { - if risk.Severity == types.LowSeverity { + if risk.Severity == LowSeverity { filteredRisks = append(filteredRisks, risk) } } @@ -650,13 +579,12 @@ func FilteredByOnlyLowRisks(parsedModel *ParsedModel) []Risk { return filteredRisks } -func FilterByModelFailures(risksByCat map[string][]Risk) map[string][]Risk { +func FilterByModelFailures(parsedModel *ParsedModel, risksByCat map[string][]Risk) map[string][]Risk { result := make(map[string][]Risk) for categoryId, risks := range risksByCat { - for _, risk := range risks { - if risk.Category.ModelFailurePossibleReason { - result[categoryId] = risks - } + category := GetRiskCategory(parsedModel, categoryId) + if category.ModelFailurePossibleReason { + result[categoryId] = risks } } @@ -683,7 +611,7 @@ func FilteredByRiskTrackingUnchecked(parsedModel *ParsedModel) []Risk { filteredRisks := make([]Risk, 0) for _, risks := range parsedModel.GeneratedRisksByCategory { for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.Unchecked { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == Unchecked { filteredRisks = append(filteredRisks, risk) } } @@ -695,7 +623,7 @@ func FilteredByRiskTrackingInDiscussion(parsedModel *ParsedModel) []Risk { filteredRisks := make([]Risk, 0) for _, risks := range parsedModel.GeneratedRisksByCategory { for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.InDiscussion { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == InDiscussion { filteredRisks = append(filteredRisks, risk) } } @@ -707,7 +635,7 @@ func FilteredByRiskTrackingAccepted(parsedModel *ParsedModel) []Risk { filteredRisks := make([]Risk, 0) for _, risks := range parsedModel.GeneratedRisksByCategory { for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.Accepted { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == Accepted { filteredRisks = append(filteredRisks, risk) } } @@ -719,7 +647,7 @@ func FilteredByRiskTrackingInProgress(parsedModel *ParsedModel) []Risk { filteredRisks := make([]Risk, 0) for _, risks := range parsedModel.GeneratedRisksByCategory { for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.InProgress { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == InProgress { filteredRisks = append(filteredRisks, risk) } } @@ -731,7 +659,7 @@ func FilteredByRiskTrackingMitigated(parsedModel *ParsedModel) []Risk { filteredRisks := make([]Risk, 0) for _, risks := range parsedModel.GeneratedRisksByCategory { for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.Mitigated { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == Mitigated { filteredRisks = append(filteredRisks, risk) } } @@ -743,7 +671,7 @@ func FilteredByRiskTrackingFalsePositive(parsedModel *ParsedModel) []Risk { filteredRisks := make([]Risk, 0) for _, risks := range parsedModel.GeneratedRisksByCategory { for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.FalsePositive { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == FalsePositive { filteredRisks = append(filteredRisks, risk) } } @@ -754,7 +682,7 @@ func FilteredByRiskTrackingFalsePositive(parsedModel *ParsedModel) []Risk { func ReduceToOnlyHighRisk(risks []Risk) []Risk { filteredRisks := make([]Risk, 0) for _, risk := range risks { - if risk.Severity == types.HighSeverity { + if risk.Severity == HighSeverity { filteredRisks = append(filteredRisks, risk) } } @@ -764,7 +692,7 @@ func ReduceToOnlyHighRisk(risks []Risk) []Risk { func ReduceToOnlyMediumRisk(risks []Risk) []Risk { filteredRisks := make([]Risk, 0) for _, risk := range risks { - if risk.Severity == types.MediumSeverity { + if risk.Severity == MediumSeverity { filteredRisks = append(filteredRisks, risk) } } @@ -774,7 +702,7 @@ func ReduceToOnlyMediumRisk(risks []Risk) []Risk { func ReduceToOnlyLowRisk(risks []Risk) []Risk { filteredRisks := make([]Risk, 0) for _, risk := range risks { - if risk.Severity == types.LowSeverity { + if risk.Severity == LowSeverity { filteredRisks = append(filteredRisks, risk) } } @@ -784,7 +712,7 @@ func ReduceToOnlyLowRisk(risks []Risk) []Risk { func ReduceToOnlyRiskTrackingUnchecked(parsedModel *ParsedModel, risks []Risk) []Risk { filteredRisks := make([]Risk, 0) for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.Unchecked { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == Unchecked { filteredRisks = append(filteredRisks, risk) } } @@ -794,7 +722,7 @@ func ReduceToOnlyRiskTrackingUnchecked(parsedModel *ParsedModel, risks []Risk) [ func ReduceToOnlyRiskTrackingInDiscussion(parsedModel *ParsedModel, risks []Risk) []Risk { filteredRisks := make([]Risk, 0) for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.InDiscussion { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == InDiscussion { filteredRisks = append(filteredRisks, risk) } } @@ -804,7 +732,7 @@ func ReduceToOnlyRiskTrackingInDiscussion(parsedModel *ParsedModel, risks []Risk func ReduceToOnlyRiskTrackingAccepted(parsedModel *ParsedModel, risks []Risk) []Risk { filteredRisks := make([]Risk, 0) for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.Accepted { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == Accepted { filteredRisks = append(filteredRisks, risk) } } @@ -814,7 +742,7 @@ func ReduceToOnlyRiskTrackingAccepted(parsedModel *ParsedModel, risks []Risk) [] func ReduceToOnlyRiskTrackingInProgress(parsedModel *ParsedModel, risks []Risk) []Risk { filteredRisks := make([]Risk, 0) for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.InProgress { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == InProgress { filteredRisks = append(filteredRisks, risk) } } @@ -824,7 +752,7 @@ func ReduceToOnlyRiskTrackingInProgress(parsedModel *ParsedModel, risks []Risk) func ReduceToOnlyRiskTrackingMitigated(parsedModel *ParsedModel, risks []Risk) []Risk { filteredRisks := make([]Risk, 0) for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.Mitigated { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == Mitigated { filteredRisks = append(filteredRisks, risk) } } @@ -834,7 +762,7 @@ func ReduceToOnlyRiskTrackingMitigated(parsedModel *ParsedModel, risks []Risk) [ func ReduceToOnlyRiskTrackingFalsePositive(parsedModel *ParsedModel, risks []Risk) []Risk { filteredRisks := make([]Risk, 0) for _, risk := range risks { - if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == types.FalsePositive { + if risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel) == FalsePositive { filteredRisks = append(filteredRisks, risk) } } @@ -856,41 +784,41 @@ func FilteredByStillAtRisk(parsedModel *ParsedModel) []Risk { func OverallRiskStatistics(parsedModel *ParsedModel) RiskStatistics { result := RiskStatistics{} result.Risks = make(map[string]map[string]int) - result.Risks[types.CriticalSeverity.String()] = make(map[string]int) - result.Risks[types.CriticalSeverity.String()][types.Unchecked.String()] = 0 - result.Risks[types.CriticalSeverity.String()][types.InDiscussion.String()] = 0 - result.Risks[types.CriticalSeverity.String()][types.Accepted.String()] = 0 - result.Risks[types.CriticalSeverity.String()][types.InProgress.String()] = 0 - result.Risks[types.CriticalSeverity.String()][types.Mitigated.String()] = 0 - result.Risks[types.CriticalSeverity.String()][types.FalsePositive.String()] = 0 - result.Risks[types.HighSeverity.String()] = make(map[string]int) - result.Risks[types.HighSeverity.String()][types.Unchecked.String()] = 0 - result.Risks[types.HighSeverity.String()][types.InDiscussion.String()] = 0 - result.Risks[types.HighSeverity.String()][types.Accepted.String()] = 0 - result.Risks[types.HighSeverity.String()][types.InProgress.String()] = 0 - result.Risks[types.HighSeverity.String()][types.Mitigated.String()] = 0 - result.Risks[types.HighSeverity.String()][types.FalsePositive.String()] = 0 - result.Risks[types.ElevatedSeverity.String()] = make(map[string]int) - result.Risks[types.ElevatedSeverity.String()][types.Unchecked.String()] = 0 - result.Risks[types.ElevatedSeverity.String()][types.InDiscussion.String()] = 0 - result.Risks[types.ElevatedSeverity.String()][types.Accepted.String()] = 0 - result.Risks[types.ElevatedSeverity.String()][types.InProgress.String()] = 0 - result.Risks[types.ElevatedSeverity.String()][types.Mitigated.String()] = 0 - result.Risks[types.ElevatedSeverity.String()][types.FalsePositive.String()] = 0 - result.Risks[types.MediumSeverity.String()] = make(map[string]int) - result.Risks[types.MediumSeverity.String()][types.Unchecked.String()] = 0 - result.Risks[types.MediumSeverity.String()][types.InDiscussion.String()] = 0 - result.Risks[types.MediumSeverity.String()][types.Accepted.String()] = 0 - result.Risks[types.MediumSeverity.String()][types.InProgress.String()] = 0 - result.Risks[types.MediumSeverity.String()][types.Mitigated.String()] = 0 - result.Risks[types.MediumSeverity.String()][types.FalsePositive.String()] = 0 - result.Risks[types.LowSeverity.String()] = make(map[string]int) - result.Risks[types.LowSeverity.String()][types.Unchecked.String()] = 0 - result.Risks[types.LowSeverity.String()][types.InDiscussion.String()] = 0 - result.Risks[types.LowSeverity.String()][types.Accepted.String()] = 0 - result.Risks[types.LowSeverity.String()][types.InProgress.String()] = 0 - result.Risks[types.LowSeverity.String()][types.Mitigated.String()] = 0 - result.Risks[types.LowSeverity.String()][types.FalsePositive.String()] = 0 + result.Risks[CriticalSeverity.String()] = make(map[string]int) + result.Risks[CriticalSeverity.String()][Unchecked.String()] = 0 + result.Risks[CriticalSeverity.String()][InDiscussion.String()] = 0 + result.Risks[CriticalSeverity.String()][Accepted.String()] = 0 + result.Risks[CriticalSeverity.String()][InProgress.String()] = 0 + result.Risks[CriticalSeverity.String()][Mitigated.String()] = 0 + result.Risks[CriticalSeverity.String()][FalsePositive.String()] = 0 + result.Risks[HighSeverity.String()] = make(map[string]int) + result.Risks[HighSeverity.String()][Unchecked.String()] = 0 + result.Risks[HighSeverity.String()][InDiscussion.String()] = 0 + result.Risks[HighSeverity.String()][Accepted.String()] = 0 + result.Risks[HighSeverity.String()][InProgress.String()] = 0 + result.Risks[HighSeverity.String()][Mitigated.String()] = 0 + result.Risks[HighSeverity.String()][FalsePositive.String()] = 0 + result.Risks[ElevatedSeverity.String()] = make(map[string]int) + result.Risks[ElevatedSeverity.String()][Unchecked.String()] = 0 + result.Risks[ElevatedSeverity.String()][InDiscussion.String()] = 0 + result.Risks[ElevatedSeverity.String()][Accepted.String()] = 0 + result.Risks[ElevatedSeverity.String()][InProgress.String()] = 0 + result.Risks[ElevatedSeverity.String()][Mitigated.String()] = 0 + result.Risks[ElevatedSeverity.String()][FalsePositive.String()] = 0 + result.Risks[MediumSeverity.String()] = make(map[string]int) + result.Risks[MediumSeverity.String()][Unchecked.String()] = 0 + result.Risks[MediumSeverity.String()][InDiscussion.String()] = 0 + result.Risks[MediumSeverity.String()][Accepted.String()] = 0 + result.Risks[MediumSeverity.String()][InProgress.String()] = 0 + result.Risks[MediumSeverity.String()][Mitigated.String()] = 0 + result.Risks[MediumSeverity.String()][FalsePositive.String()] = 0 + result.Risks[LowSeverity.String()] = make(map[string]int) + result.Risks[LowSeverity.String()][Unchecked.String()] = 0 + result.Risks[LowSeverity.String()][InDiscussion.String()] = 0 + result.Risks[LowSeverity.String()][Accepted.String()] = 0 + result.Risks[LowSeverity.String()][InProgress.String()] = 0 + result.Risks[LowSeverity.String()][Mitigated.String()] = 0 + result.Risks[LowSeverity.String()][FalsePositive.String()] = 0 for _, risks := range parsedModel.GeneratedRisksByCategory { for _, risk := range risks { result.Risks[risk.Severity.String()][risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).String()]++ diff --git a/pkg/security/types/rules.go b/pkg/security/types/rules.go new file mode 100644 index 00000000..aeb3608f --- /dev/null +++ b/pkg/security/types/rules.go @@ -0,0 +1,47 @@ +/* +Copyright © 2023 NAME HERE +*/ + +package types + +import ( + "github.com/threagile/threagile/pkg/run" + "strings" +) + +type progressReporter interface { + Println(a ...any) (n int, err error) + Fatalf(format string, v ...any) +} + +func LoadCustomRiskRules(pluginFiles []string, reporter progressReporter) map[string]*CustomRisk { + customRiskRuleList := make([]string, 0) + customRiskRules := make(map[string]*CustomRisk) + if len(pluginFiles) > 0 { + _, _ = reporter.Println("Loading custom risk rules:", strings.Join(pluginFiles, ", ")) + + for _, pluginFile := range pluginFiles { + if len(pluginFile) > 0 { + runner, loadError := new(run.Runner).Load(pluginFile) + if loadError != nil { + reporter.Fatalf("WARNING: Custom risk rule %q not loaded: %v\n", pluginFile, loadError) + } + + risk := new(CustomRisk) + runError := runner.Run(nil, &risk, "-get-info") + if runError != nil { + reporter.Fatalf("WARNING: Failed to get ID for custom risk rule %q: %v\n", pluginFile, runError) + } + + risk.Runner = runner + customRiskRules[risk.ID] = risk + customRiskRuleList = append(customRiskRuleList, risk.ID) + _, _ = reporter.Println("Custom risk rule loaded:", risk.ID) + } + } + + _, _ = reporter.Println("Loaded custom risk rules:", strings.Join(customRiskRuleList, ", ")) + } + + return customRiskRules +} diff --git a/pkg/model/shared_runtime.go b/pkg/security/types/shared_runtime.go similarity index 80% rename from pkg/model/shared_runtime.go rename to pkg/security/types/shared_runtime.go index c2231b6d..1d6d0ec6 100644 --- a/pkg/model/shared_runtime.go +++ b/pkg/security/types/shared_runtime.go @@ -1,18 +1,19 @@ /* Copyright © 2023 NAME HERE */ -package model + +package types import ( "sort" - - "github.com/threagile/threagile/pkg/security/types" ) type SharedRuntime struct { - Id, Title, Description string - Tags []string - TechnicalAssetsRunning []string + Id string `json:"id,omitempty"` + Title string `json:"title,omitempty"` + Description string `json:"description,omitempty"` + Tags []string `json:"tags,omitempty"` + TechnicalAssetsRunning []string `json:"technical_assets_running,omitempty"` } func (what SharedRuntime) IsTaggedWithAny(tags ...string) bool { @@ -23,8 +24,8 @@ func (what SharedRuntime) IsTaggedWithBaseTag(baseTag string) bool { return IsTaggedWithBaseTag(what.Tags, baseTag) } -func (what SharedRuntime) HighestConfidentiality(model *ParsedModel) types.Confidentiality { - highest := types.Public +func (what SharedRuntime) HighestConfidentiality(model *ParsedModel) Confidentiality { + highest := Public for _, id := range what.TechnicalAssetsRunning { techAsset := model.TechnicalAssets[id] if techAsset.HighestConfidentiality(model) > highest { @@ -34,8 +35,8 @@ func (what SharedRuntime) HighestConfidentiality(model *ParsedModel) types.Confi return highest } -func (what SharedRuntime) HighestIntegrity(model *ParsedModel) types.Criticality { - highest := types.Archive +func (what SharedRuntime) HighestIntegrity(model *ParsedModel) Criticality { + highest := Archive for _, id := range what.TechnicalAssetsRunning { techAsset := model.TechnicalAssets[id] if techAsset.HighestIntegrity(model) > highest { @@ -45,8 +46,8 @@ func (what SharedRuntime) HighestIntegrity(model *ParsedModel) types.Criticality return highest } -func (what SharedRuntime) HighestAvailability(model *ParsedModel) types.Criticality { - highest := types.Archive +func (what SharedRuntime) HighestAvailability(model *ParsedModel) Criticality { + highest := Archive for _, id := range what.TechnicalAssetsRunning { techAsset := model.TechnicalAssets[id] if techAsset.HighestAvailability(model) > highest { diff --git a/pkg/security/types/stride.go b/pkg/security/types/stride.go index a068d00d..f6f09cde 100644 --- a/pkg/security/types/stride.go +++ b/pkg/security/types/stride.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/stride_test.go b/pkg/security/types/stride_test.go index 7bb5ae5d..9f10059e 100644 --- a/pkg/security/types/stride_test.go +++ b/pkg/security/types/stride_test.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/model/technical_asset.go b/pkg/security/types/technical_asset.go similarity index 84% rename from pkg/model/technical_asset.go rename to pkg/security/types/technical_asset.go index 3dbdf504..19d40bbb 100644 --- a/pkg/model/technical_asset.go +++ b/pkg/security/types/technical_asset.go @@ -1,36 +1,46 @@ /* Copyright © 2023 NAME HERE */ -package model + +package types import ( "fmt" "sort" "github.com/threagile/threagile/pkg/colors" - "github.com/threagile/threagile/pkg/security/types" ) type TechnicalAsset struct { - Id, Title, Description string - Usage types.Usage - Type types.TechnicalAssetType - Size types.TechnicalAssetSize - Technology types.TechnicalAssetTechnology - Machine types.TechnicalAssetMachine - Internet, MultiTenant, Redundant, CustomDevelopedParts, OutOfScope, UsedAsClientByHuman bool - Encryption types.EncryptionStyle - JustificationOutOfScope string - Owner string - Confidentiality types.Confidentiality - Integrity, Availability types.Criticality - JustificationCiaRating string - Tags, DataAssetsProcessed, DataAssetsStored []string - DataFormatsAccepted []types.DataFormat - CommunicationLinks []CommunicationLink - DiagramTweakOrder int + Id string `json:"id,omitempty"` + Title string `json:"title,omitempty"` + Description string `json:"description,omitempty"` + Usage Usage `json:"usage,omitempty"` + Type TechnicalAssetType `json:"type,omitempty"` + Size TechnicalAssetSize `json:"size,omitempty"` + Technology TechnicalAssetTechnology `json:"technology,omitempty"` + Machine TechnicalAssetMachine `json:"machine,omitempty"` + Internet bool `json:"internet,omitempty"` + MultiTenant bool `json:"multi_tenant,omitempty"` + Redundant bool `json:"redundant,omitempty"` + CustomDevelopedParts bool `json:"custom_developed_parts,omitempty"` + OutOfScope bool `json:"out_of_scope,omitempty"` + UsedAsClientByHuman bool `json:"used_as_client_by_human,omitempty"` + Encryption EncryptionStyle `json:"encryption,omitempty"` + JustificationOutOfScope string `json:"justification_out_of_scope,omitempty"` + Owner string `json:"owner,omitempty"` + Confidentiality Confidentiality `json:"confidentiality,omitempty"` + Integrity Criticality `json:"integrity,omitempty"` + Availability Criticality `json:"availability,omitempty"` + JustificationCiaRating string `json:"justification_cia_rating,omitempty"` + Tags []string `json:"tags,omitempty"` + DataAssetsProcessed []string `json:"data_assets_processed,omitempty"` + DataAssetsStored []string `json:"data_assets_stored,omitempty"` + DataFormatsAccepted []DataFormat `json:"data_formats_accepted,omitempty"` + CommunicationLinks []CommunicationLink `json:"communication_links,omitempty"` + DiagramTweakOrder int `json:"diagram_tweak_order,omitempty"` // will be set by separate calculation step: - RAA float64 + RAA float64 `json:"raa,omitempty"` } func (what TechnicalAsset) IsTaggedWithAny(tags ...string) bool { @@ -70,7 +80,7 @@ func (what TechnicalAsset) IsSameTrustBoundary(parsedModel *ParsedModel, otherAs func (what TechnicalAsset) IsSameExecutionEnvironment(parsedModel *ParsedModel, otherAssetId string) bool { trustBoundaryOfMyAsset := parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId[what.Id] trustBoundaryOfOtherAsset := parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId[otherAssetId] - if trustBoundaryOfMyAsset.Type == types.ExecutionEnvironment && trustBoundaryOfOtherAsset.Type == types.ExecutionEnvironment { + if trustBoundaryOfMyAsset.Type == ExecutionEnvironment && trustBoundaryOfOtherAsset.Type == ExecutionEnvironment { return trustBoundaryOfMyAsset.Id == trustBoundaryOfOtherAsset.Id } return false @@ -94,7 +104,7 @@ func (what TechnicalAsset) HighestSensitivityScore() float64 { what.Availability.AttackerAttractivenessForAsset() } -func (what TechnicalAsset) HighestConfidentiality(parsedModel *ParsedModel) types.Confidentiality { +func (what TechnicalAsset) HighestConfidentiality(parsedModel *ParsedModel) Confidentiality { highest := what.Confidentiality for _, dataId := range what.DataAssetsProcessed { dataAsset := parsedModel.DataAssets[dataId] @@ -129,12 +139,12 @@ func (what TechnicalAsset) DataAssetsStoredSorted(parsedModel *ParsedModel) []Da return result } -func (what TechnicalAsset) DataFormatsAcceptedSorted() []types.DataFormat { - result := make([]types.DataFormat, 0) +func (what TechnicalAsset) DataFormatsAcceptedSorted() []DataFormat { + result := make([]DataFormat, 0) for _, format := range what.DataFormatsAccepted { result = append(result, format) } - sort.Sort(types.ByDataFormatAcceptedSort(result)) + sort.Sort(ByDataFormatAcceptedSort(result)) return result } @@ -147,7 +157,7 @@ func (what TechnicalAsset) CommunicationLinksSorted() []CommunicationLink { return result } -func (what TechnicalAsset) HighestIntegrity(model *ParsedModel) types.Criticality { +func (what TechnicalAsset) HighestIntegrity(model *ParsedModel) Criticality { highest := what.Integrity for _, dataId := range what.DataAssetsProcessed { dataAsset := model.DataAssets[dataId] @@ -164,7 +174,7 @@ func (what TechnicalAsset) HighestIntegrity(model *ParsedModel) types.Criticalit return highest } -func (what TechnicalAsset) HighestAvailability(model *ParsedModel) types.Criticality { +func (what TechnicalAsset) HighestAvailability(model *ParsedModel) Criticality { highest := what.Availability for _, dataId := range what.DataAssetsProcessed { dataAsset := model.DataAssets[dataId] @@ -244,30 +254,30 @@ func (what TechnicalAsset) ProcessesOrStoresDataAsset(dataAssetId string) bool { func (what TechnicalAsset) DetermineLabelColor(model *ParsedModel) string { // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here // Check for red - if what.Integrity == types.MissionCritical { + if what.Integrity == MissionCritical { return colors.Red } for _, storedDataAsset := range what.DataAssetsStored { - if model.DataAssets[storedDataAsset].Integrity == types.MissionCritical { + if model.DataAssets[storedDataAsset].Integrity == MissionCritical { return colors.Red } } for _, processedDataAsset := range what.DataAssetsProcessed { - if model.DataAssets[processedDataAsset].Integrity == types.MissionCritical { + if model.DataAssets[processedDataAsset].Integrity == MissionCritical { return colors.Red } } // Check for amber - if what.Integrity == types.Critical { + if what.Integrity == Critical { return colors.Amber } for _, storedDataAsset := range what.DataAssetsStored { - if model.DataAssets[storedDataAsset].Integrity == types.Critical { + if model.DataAssets[storedDataAsset].Integrity == Critical { return colors.Amber } } for _, processedDataAsset := range what.DataAssetsProcessed { - if model.DataAssets[processedDataAsset].Integrity == types.Critical { + if model.DataAssets[processedDataAsset].Integrity == Critical { return colors.Amber } } @@ -304,30 +314,30 @@ func (what TechnicalAsset) DetermineLabelColor(model *ParsedModel) string { func (what TechnicalAsset) DetermineShapeBorderColor(parsedModel *ParsedModel) string { // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here // Check for red - if what.Confidentiality == types.StrictlyConfidential { + if what.Confidentiality == StrictlyConfidential { return colors.Red } for _, storedDataAsset := range what.DataAssetsStored { - if parsedModel.DataAssets[storedDataAsset].Confidentiality == types.StrictlyConfidential { + if parsedModel.DataAssets[storedDataAsset].Confidentiality == StrictlyConfidential { return colors.Red } } for _, processedDataAsset := range what.DataAssetsProcessed { - if parsedModel.DataAssets[processedDataAsset].Confidentiality == types.StrictlyConfidential { + if parsedModel.DataAssets[processedDataAsset].Confidentiality == StrictlyConfidential { return colors.Red } } // Check for amber - if what.Confidentiality == types.Confidential { + if what.Confidentiality == Confidential { return colors.Amber } for _, storedDataAsset := range what.DataAssetsStored { - if parsedModel.DataAssets[storedDataAsset].Confidentiality == types.Confidential { + if parsedModel.DataAssets[storedDataAsset].Confidentiality == Confidential { return colors.Amber } } for _, processedDataAsset := range what.DataAssetsProcessed { - if parsedModel.DataAssets[processedDataAsset].Confidentiality == types.Confidential { + if parsedModel.DataAssets[processedDataAsset].Confidentiality == Confidential { return colors.Amber } } @@ -434,7 +444,7 @@ func (what TechnicalAsset) GetTrustBoundaryId(model *ParsedModel) string { func (what TechnicalAsset) DetermineShapeFillColor(parsedModel *ParsedModel) string { fillColor := colors.VeryLightGray if len(what.DataAssetsProcessed) == 0 && len(what.DataAssetsStored) == 0 || - what.Technology == types.UnknownTechnology { + what.Technology == UnknownTechnology { fillColor = colors.LightPink // lightPink, because it's strange when too many technical assets process no data... some ok, but many in a diagram ist a sign of model forgery... } else if len(what.CommunicationLinks) == 0 && len(parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId[what.Id]) == 0 { fillColor = colors.LightPink @@ -446,13 +456,13 @@ func (what TechnicalAsset) DetermineShapeFillColor(parsedModel *ParsedModel) str fillColor = colors.CustomDevelopedParts } switch what.Machine { - case types.Physical: + case Physical: fillColor = colors.DarkenHexColor(fillColor) - case types.Container: + case Container: fillColor = colors.BrightenHexColor(fillColor) - case types.Serverless: + case Serverless: fillColor = colors.BrightenHexColor(colors.BrightenHexColor(fillColor)) - case types.Virtual: + case Virtual: } return fillColor } diff --git a/pkg/security/types/technical_asset_machine.go b/pkg/security/types/technical_asset_machine.go index ccd1ba04..8536104f 100644 --- a/pkg/security/types/technical_asset_machine.go +++ b/pkg/security/types/technical_asset_machine.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/technical_asset_machine_test.go b/pkg/security/types/technical_asset_machine_test.go index b2af5e52..3f53c685 100644 --- a/pkg/security/types/technical_asset_machine_test.go +++ b/pkg/security/types/technical_asset_machine_test.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/technical_asset_size.go b/pkg/security/types/technical_asset_size.go index 7effc122..178bf5ef 100644 --- a/pkg/security/types/technical_asset_size.go +++ b/pkg/security/types/technical_asset_size.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/technical_asset_size_test.go b/pkg/security/types/technical_asset_size_test.go index 0c5d9190..62d34591 100644 --- a/pkg/security/types/technical_asset_size_test.go +++ b/pkg/security/types/technical_asset_size_test.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/technical_asset_technology.go b/pkg/security/types/technical_asset_technology.go index 8daaba10..2dc272c8 100644 --- a/pkg/security/types/technical_asset_technology.go +++ b/pkg/security/types/technical_asset_technology.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/technical_asset_technology_test.go b/pkg/security/types/technical_asset_technology_test.go index b40dd355..0f1fc086 100644 --- a/pkg/security/types/technical_asset_technology_test.go +++ b/pkg/security/types/technical_asset_technology_test.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/technical_asset_type.go b/pkg/security/types/technical_asset_type.go index 5a105a42..513a47f4 100644 --- a/pkg/security/types/technical_asset_type.go +++ b/pkg/security/types/technical_asset_type.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/technical_asset_type_test.go b/pkg/security/types/technical_asset_type_test.go index d04ac6be..a61cecb2 100644 --- a/pkg/security/types/technical_asset_type_test.go +++ b/pkg/security/types/technical_asset_type_test.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( diff --git a/pkg/security/types/trust_boundary.go b/pkg/security/types/trust_boundary.go index d08da6e8..4a90c66f 100644 --- a/pkg/security/types/trust_boundary.go +++ b/pkg/security/types/trust_boundary.go @@ -1,88 +1,128 @@ /* Copyright © 2023 NAME HERE */ + package types import ( - "encoding/json" - "errors" - "fmt" - "strings" + "sort" ) -type TrustBoundaryType int +type TrustBoundary struct { + Id string `json:"id,omitempty"` + Title string `json:"title,omitempty"` + Description string `json:"description,omitempty"` + Type TrustBoundaryType `json:"type,omitempty"` + Tags []string `json:"tags,omitempty"` + TechnicalAssetsInside []string `json:"technical_assets_inside,omitempty"` + TrustBoundariesNested []string `json:"trust_boundaries_nested,omitempty"` +} -const ( - NetworkOnPrem TrustBoundaryType = iota - NetworkDedicatedHoster - NetworkVirtualLAN - NetworkCloudProvider - NetworkCloudSecurityGroup - NetworkPolicyNamespaceIsolation - ExecutionEnvironment -) +func (what TrustBoundary) RecursivelyAllTechnicalAssetIDsInside(model *ParsedModel) []string { + result := make([]string, 0) + what.addAssetIDsRecursively(model, &result) + return result +} + +func (what TrustBoundary) IsTaggedWithAny(tags ...string) bool { + return containsCaseInsensitiveAny(what.Tags, tags...) +} + +func (what TrustBoundary) IsTaggedWithBaseTag(baseTag string) bool { + return IsTaggedWithBaseTag(what.Tags, baseTag) +} -func TrustBoundaryTypeValues() []TypeEnum { - return []TypeEnum{ - NetworkOnPrem, - NetworkDedicatedHoster, - NetworkVirtualLAN, - NetworkCloudProvider, - NetworkCloudSecurityGroup, - NetworkPolicyNamespaceIsolation, - ExecutionEnvironment, +func (what TrustBoundary) IsTaggedWithAnyTraversingUp(model *ParsedModel, tags ...string) bool { + if what.IsTaggedWithAny(tags...) { + return true } + parentID := what.ParentTrustBoundaryID(model) + if len(parentID) > 0 && model.TrustBoundaries[parentID].IsTaggedWithAnyTraversingUp(model, tags...) { + return true + } + return false } -var TrustBoundaryTypeDescription = [...]TypeDescription{ - {"network-on-prem", "The whole network is on prem"}, - {"network-dedicated-hoster", "The network is at a dedicated hoster"}, - {"network-virtual-lan", "Network is a VLAN"}, - {"network-cloud-provider", "Network is at a cloud provider"}, - {"network-cloud-security-group", "Cloud rules controlling network traffic"}, - {"network-policy-namespace-isolation", "Segregation in a Kubernetes cluster"}, - {"execution-environment", "Logical group of items (not a protective network boundary in that sense). More like a namespace or another logical group of items"}, +func (what TrustBoundary) ParentTrustBoundaryID(model *ParsedModel) string { + var result string + for _, candidate := range model.TrustBoundaries { + if contains(candidate.TrustBoundariesNested, what.Id) { + result = candidate.Id + return result + } + } + return result } -func ParseTrustBoundary(value string) (trustBoundary TrustBoundaryType, err error) { - value = strings.TrimSpace(value) - for _, candidate := range TrustBoundaryTypeValues() { - if candidate.String() == value { - return candidate.(TrustBoundaryType), err +func (what TrustBoundary) HighestConfidentiality(model *ParsedModel) Confidentiality { + highest := Public + for _, id := range what.RecursivelyAllTechnicalAssetIDsInside(model) { + techAsset := model.TechnicalAssets[id] + if techAsset.HighestConfidentiality(model) > highest { + highest = techAsset.HighestConfidentiality(model) } } - return trustBoundary, errors.New("Unable to parse into type: " + value) + return highest } -func (what TrustBoundaryType) String() string { - // NOTE: maintain list also in schema.json for validation in IDEs - return TrustBoundaryTypeDescription[what].Name +func (what TrustBoundary) HighestIntegrity(model *ParsedModel) Criticality { + highest := Archive + for _, id := range what.RecursivelyAllTechnicalAssetIDsInside(model) { + techAsset := model.TechnicalAssets[id] + if techAsset.HighestIntegrity(model) > highest { + highest = techAsset.HighestIntegrity(model) + } + } + return highest } -func (what TrustBoundaryType) Explain() string { - return TrustBoundaryTypeDescription[what].Description +func (what TrustBoundary) HighestAvailability(model *ParsedModel) Criticality { + highest := Archive + for _, id := range what.RecursivelyAllTechnicalAssetIDsInside(model) { + techAsset := model.TechnicalAssets[id] + if techAsset.HighestAvailability(model) > highest { + highest = techAsset.HighestAvailability(model) + } + } + return highest } -func (what TrustBoundaryType) IsNetworkBoundary() bool { - return what == NetworkOnPrem || what == NetworkDedicatedHoster || what == NetworkVirtualLAN || - what == NetworkCloudProvider || what == NetworkCloudSecurityGroup || what == NetworkPolicyNamespaceIsolation +func (what TrustBoundary) AllParentTrustBoundaryIDs(model *ParsedModel) []string { + result := make([]string, 0) + what.addTrustBoundaryIDsRecursively(model, &result) + return result } -func (what TrustBoundaryType) IsWithinCloud() bool { - return what == NetworkCloudProvider || what == NetworkCloudSecurityGroup +func (what TrustBoundary) addAssetIDsRecursively(model *ParsedModel, result *[]string) { + *result = append(*result, what.TechnicalAssetsInside...) + for _, nestedBoundaryID := range what.TrustBoundariesNested { + model.TrustBoundaries[nestedBoundaryID].addAssetIDsRecursively(model, result) + } } -func (what TrustBoundaryType) MarshalJSON() ([]byte, error) { - return json.Marshal(what.String()) +// TODO: pass ParsedModelRoot as parameter instead of using global variable +func (what TrustBoundary) addTrustBoundaryIDsRecursively(model *ParsedModel, result *[]string) { + *result = append(*result, what.Id) + parentID := what.ParentTrustBoundaryID(model) + if len(parentID) > 0 { + model.TrustBoundaries[parentID].addTrustBoundaryIDsRecursively(model, result) + } } -func (what *TrustBoundaryType) UnmarshalJSON([]byte) error { - for index, description := range TrustBoundaryTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = TrustBoundaryType(index) - return nil - } +// as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: +func SortedKeysOfTrustBoundaries(model *ParsedModel) []string { + keys := make([]string, 0) + for k := range model.TrustBoundaries { + keys = append(keys, k) } + sort.Strings(keys) + return keys +} + +type ByTrustBoundaryTitleSort []TrustBoundary - return fmt.Errorf("unknown trust boundary type value %q", int(*what)) +func (what ByTrustBoundaryTitleSort) Len() int { return len(what) } +func (what ByTrustBoundaryTitleSort) Swap(i, j int) { what[i], what[j] = what[j], what[i] } +func (what ByTrustBoundaryTitleSort) Less(i, j int) bool { + return what[i].Title < what[j].Title } diff --git a/pkg/security/types/trust_boundary_type.go b/pkg/security/types/trust_boundary_type.go new file mode 100644 index 00000000..d08da6e8 --- /dev/null +++ b/pkg/security/types/trust_boundary_type.go @@ -0,0 +1,88 @@ +/* +Copyright © 2023 NAME HERE +*/ +package types + +import ( + "encoding/json" + "errors" + "fmt" + "strings" +) + +type TrustBoundaryType int + +const ( + NetworkOnPrem TrustBoundaryType = iota + NetworkDedicatedHoster + NetworkVirtualLAN + NetworkCloudProvider + NetworkCloudSecurityGroup + NetworkPolicyNamespaceIsolation + ExecutionEnvironment +) + +func TrustBoundaryTypeValues() []TypeEnum { + return []TypeEnum{ + NetworkOnPrem, + NetworkDedicatedHoster, + NetworkVirtualLAN, + NetworkCloudProvider, + NetworkCloudSecurityGroup, + NetworkPolicyNamespaceIsolation, + ExecutionEnvironment, + } +} + +var TrustBoundaryTypeDescription = [...]TypeDescription{ + {"network-on-prem", "The whole network is on prem"}, + {"network-dedicated-hoster", "The network is at a dedicated hoster"}, + {"network-virtual-lan", "Network is a VLAN"}, + {"network-cloud-provider", "Network is at a cloud provider"}, + {"network-cloud-security-group", "Cloud rules controlling network traffic"}, + {"network-policy-namespace-isolation", "Segregation in a Kubernetes cluster"}, + {"execution-environment", "Logical group of items (not a protective network boundary in that sense). More like a namespace or another logical group of items"}, +} + +func ParseTrustBoundary(value string) (trustBoundary TrustBoundaryType, err error) { + value = strings.TrimSpace(value) + for _, candidate := range TrustBoundaryTypeValues() { + if candidate.String() == value { + return candidate.(TrustBoundaryType), err + } + } + return trustBoundary, errors.New("Unable to parse into type: " + value) +} + +func (what TrustBoundaryType) String() string { + // NOTE: maintain list also in schema.json for validation in IDEs + return TrustBoundaryTypeDescription[what].Name +} + +func (what TrustBoundaryType) Explain() string { + return TrustBoundaryTypeDescription[what].Description +} + +func (what TrustBoundaryType) IsNetworkBoundary() bool { + return what == NetworkOnPrem || what == NetworkDedicatedHoster || what == NetworkVirtualLAN || + what == NetworkCloudProvider || what == NetworkCloudSecurityGroup || what == NetworkPolicyNamespaceIsolation +} + +func (what TrustBoundaryType) IsWithinCloud() bool { + return what == NetworkCloudProvider || what == NetworkCloudSecurityGroup +} + +func (what TrustBoundaryType) MarshalJSON() ([]byte, error) { + return json.Marshal(what.String()) +} + +func (what *TrustBoundaryType) UnmarshalJSON([]byte) error { + for index, description := range TrustBoundaryTypeDescription { + if strings.ToLower(what.String()) == strings.ToLower(description.Name) { + *what = TrustBoundaryType(index) + return nil + } + } + + return fmt.Errorf("unknown trust boundary type value %q", int(*what)) +} diff --git a/pkg/security/types/trust_boundary_test.go b/pkg/security/types/trust_boundary_type_test.go similarity index 100% rename from pkg/security/types/trust_boundary_test.go rename to pkg/security/types/trust_boundary_type_test.go diff --git a/pkg/security/types/types.go b/pkg/security/types/types.go index 62e76535..78b669a8 100644 --- a/pkg/security/types/types.go +++ b/pkg/security/types/types.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types // TypeDescription contains a name for a type and its description diff --git a/pkg/security/types/usage_test.go b/pkg/security/types/usage_test.go index bad781e3..fafb08f1 100644 --- a/pkg/security/types/usage_test.go +++ b/pkg/security/types/usage_test.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package types import ( From dee54910393c5138a557c636f43f5ae4b256dc1b Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Wed, 27 Dec 2023 13:01:20 -0800 Subject: [PATCH 26/68] completed merge --- cmd/threagile/main.go | 3 +- internal/threagile/context.go | 1727 +++------------------------------ pkg/server/server.go | 6 +- 3 files changed, 143 insertions(+), 1593 deletions(-) diff --git a/cmd/threagile/main.go b/cmd/threagile/main.go index 8c384bbd..ec57ff7e 100644 --- a/cmd/threagile/main.go +++ b/cmd/threagile/main.go @@ -2,6 +2,7 @@ package main import ( "github.com/threagile/threagile/internal/threagile" + "github.com/threagile/threagile/pkg/server" ) const ( @@ -19,7 +20,7 @@ func main() { context := new(threagile.Context).Defaults(buildTimestamp) context.ParseCommandlineArgs() if context.ServerMode { - context.StartServer() + server.RunServer(server.Configuration{}) } else { context.DoIt() } diff --git a/internal/threagile/context.go b/internal/threagile/context.go index 3b51f7e9..c785d207 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -1047,1469 +1047,181 @@ func (context *Context) applyRAA() string { return runner.ErrorOutput } -func (context *Context) StartServer() { - router := gin.Default() - router.LoadHTMLGlob("server/static/*.html") // <== - router.GET("/", func(c *gin.Context) { - c.HTML(http.StatusOK, "index.html", gin.H{}) - }) - router.HEAD("/", func(c *gin.Context) { - c.HTML(http.StatusOK, "index.html", gin.H{}) - }) - router.StaticFile("/threagile.png", "server/static/threagile.png") // <== - router.StaticFile("/site.webmanifest", "server/static/site.webmanifest") - router.StaticFile("/favicon.ico", "server/static/favicon.ico") - router.StaticFile("/favicon-32x32.png", "server/static/favicon-32x32.png") - router.StaticFile("/favicon-16x16.png", "server/static/favicon-16x16.png") - router.StaticFile("/apple-touch-icon.png", "server/static/apple-touch-icon.png") - router.StaticFile("/android-chrome-512x512.png", "server/static/android-chrome-512x512.png") - router.StaticFile("/android-chrome-192x192.png", "server/static/android-chrome-192x192.png") - - router.StaticFile("/schema.json", "schema.json") - router.StaticFile("/live-templates.txt", "live-templates.txt") - router.StaticFile("/openapi.yaml", "openapi.yaml") - router.StaticFile("/swagger-ui/", "server/static/swagger-ui/index.html") - router.StaticFile("/swagger-ui/index.html", "server/static/swagger-ui/index.html") - router.StaticFile("/swagger-ui/oauth2-redirect.html", "server/static/swagger-ui/oauth2-redirect.html") - router.StaticFile("/swagger-ui/swagger-ui.css", "server/static/swagger-ui/swagger-ui.css") - router.StaticFile("/swagger-ui/swagger-ui.js", "server/static/swagger-ui/swagger-ui.js") - router.StaticFile("/swagger-ui/swagger-ui-bundle.js", "server/static/swagger-ui/swagger-ui-bundle.js") - router.StaticFile("/swagger-ui/swagger-ui-standalone-preset.js", "server/static/swagger-ui/swagger-ui-standalone-preset.js") // <== - - router.GET("/threagile-example-model.yaml", context.exampleFile) - router.GET("/threagile-stub-model.yaml", context.stubFile) - - router.GET("/meta/ping", func(c *gin.Context) { - c.JSON(200, gin.H{ - "message": "pong", - }) - }) - router.GET("/meta/version", func(c *gin.Context) { - c.JSON(200, gin.H{ - "version": docs.ThreagileVersion, - "build_timestamp": context.buildTimestamp, - }) - }) - router.GET("/meta/types", func(c *gin.Context) { - c.JSON(200, gin.H{ - "quantity": context.arrayOfStringValues(types.QuantityValues()), - "confidentiality": context.arrayOfStringValues(types.ConfidentialityValues()), - "criticality": context.arrayOfStringValues(types.CriticalityValues()), - "technical_asset_type": context.arrayOfStringValues(types.TechnicalAssetTypeValues()), - "technical_asset_size": context.arrayOfStringValues(types.TechnicalAssetSizeValues()), - "authorization": context.arrayOfStringValues(types.AuthorizationValues()), - "authentication": context.arrayOfStringValues(types.AuthenticationValues()), - "usage": context.arrayOfStringValues(types.UsageValues()), - "encryption": context.arrayOfStringValues(types.EncryptionStyleValues()), - "data_format": context.arrayOfStringValues(types.DataFormatValues()), - "protocol": context.arrayOfStringValues(types.ProtocolValues()), - "technical_asset_technology": context.arrayOfStringValues(types.TechnicalAssetTechnologyValues()), - "technical_asset_machine": context.arrayOfStringValues(types.TechnicalAssetMachineValues()), - "trust_boundary_type": context.arrayOfStringValues(types.TrustBoundaryTypeValues()), - "data_breach_probability": context.arrayOfStringValues(types.DataBreachProbabilityValues()), - "risk_severity": context.arrayOfStringValues(types.RiskSeverityValues()), - "risk_exploitation_likelihood": context.arrayOfStringValues(types.RiskExploitationLikelihoodValues()), - "risk_exploitation_impact": context.arrayOfStringValues(types.RiskExploitationImpactValues()), - "risk_function": context.arrayOfStringValues(types.RiskFunctionValues()), - "risk_status": context.arrayOfStringValues(types.RiskStatusValues()), - "stride": context.arrayOfStringValues(types.STRIDEValues()), - }) - }) - - // TODO router.GET("/meta/risk-rules", listRiskRules) - // TODO router.GET("/meta/model-macros", listModelMacros) - - router.GET("/meta/stats", context.stats) - - router.POST("/direct/analyze", context.analyze) - router.POST("/direct/check", context.check) - router.GET("/direct/stub", context.stubFile) - - router.POST("/auth/keys", context.createKey) - router.DELETE("/auth/keys", context.deleteKey) - router.POST("/auth/tokens", context.createToken) - router.DELETE("/auth/tokens", context.deleteToken) - - router.POST("/models", context.createNewModel) - router.GET("/models", context.listModels) - router.DELETE("/models/:model-id", context.deleteModel) - router.GET("/models/:model-id", context.getModel) - router.PUT("/models/:model-id", context.importModel) - router.GET("/models/:model-id/data-flow-diagram", context.streamDataFlowDiagram) - router.GET("/models/:model-id/data-asset-diagram", context.streamDataAssetDiagram) - router.GET("/models/:model-id/report-pdf", context.streamReportPDF) - router.GET("/models/:model-id/risks-excel", context.streamRisksExcel) - router.GET("/models/:model-id/tags-excel", context.streamTagsExcel) - router.GET("/models/:model-id/risks", context.streamRisksJSON) - router.GET("/models/:model-id/technical-assets", context.streamTechnicalAssetsJSON) - router.GET("/models/:model-id/stats", context.streamStatsJSON) - router.GET("/models/:model-id/analysis", context.analyzeModelOnServerDirectly) - - router.GET("/models/:model-id/cover", context.getCover) - router.PUT("/models/:model-id/cover", context.setCover) - router.GET("/models/:model-id/overview", context.getOverview) - router.PUT("/models/:model-id/overview", context.setOverview) - //router.GET("/models/:model-id/questions", getQuestions) - //router.PUT("/models/:model-id/questions", setQuestions) - router.GET("/models/:model-id/abuse-cases", context.getAbuseCases) - router.PUT("/models/:model-id/abuse-cases", context.setAbuseCases) - router.GET("/models/:model-id/security-requirements", context.getSecurityRequirements) - router.PUT("/models/:model-id/security-requirements", context.setSecurityRequirements) - //router.GET("/models/:model-id/tags", getTags) - //router.PUT("/models/:model-id/tags", setTags) - - router.GET("/models/:model-id/data-assets", context.getDataAssets) - router.POST("/models/:model-id/data-assets", context.createNewDataAsset) - router.GET("/models/:model-id/data-assets/:data-asset-id", context.getDataAsset) - router.PUT("/models/:model-id/data-assets/:data-asset-id", context.setDataAsset) - router.DELETE("/models/:model-id/data-assets/:data-asset-id", context.deleteDataAsset) - - router.GET("/models/:model-id/trust-boundaries", context.getTrustBoundaries) - // router.POST("/models/:model-id/trust-boundaries", createNewTrustBoundary) - // router.GET("/models/:model-id/trust-boundaries/:trust-boundary-id", getTrustBoundary) - // router.PUT("/models/:model-id/trust-boundaries/:trust-boundary-id", setTrustBoundary) - // router.DELETE("/models/:model-id/trust-boundaries/:trust-boundary-id", deleteTrustBoundary) - - router.GET("/models/:model-id/shared-runtimes", context.getSharedRuntimes) - router.POST("/models/:model-id/shared-runtimes", context.createNewSharedRuntime) - router.GET("/models/:model-id/shared-runtimes/:shared-runtime-id", context.getSharedRuntime) - router.PUT("/models/:model-id/shared-runtimes/:shared-runtime-id", context.setSharedRuntime) - router.DELETE("/models/:model-id/shared-runtimes/:shared-runtime-id", context.deleteSharedRuntime) - - fmt.Println("Threagile server running...") - _ = router.Run(":" + strconv.Itoa(*context.serverPort)) // listen and serve on 0.0.0.0:8080 or whatever port was specified -} - func (context *Context) exampleFile(ginContext *gin.Context) { example, err := os.ReadFile(filepath.Join(*context.appFolder, "threagile-example-model.yaml")) checkErr(err) ginContext.Data(http.StatusOK, gin.MIMEYAML, example) } - -func (context *Context) stubFile(ginContext *gin.Context) { - stub, err := os.ReadFile(filepath.Join(*context.appFolder, "threagile-stub-model.yaml")) - checkErr(err) - ginContext.Data(http.StatusOK, gin.MIMEYAML, context.addSupportedTags(stub)) // TODO use also the MIMEYAML way of serving YAML in model export? -} - -func (context *Context) addSupportedTags(input []byte) []byte { - // add distinct tags as "tags_available" - supportedTags := make(map[string]bool) - for _, customRule := range context.customRiskRules { - for _, tag := range customRule.Tags { - supportedTags[strings.ToLower(tag)] = true - } - } - - for _, rule := range context.builtinRiskRules { - for _, tag := range rule.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - } - - tags := make([]string, 0, len(supportedTags)) - for t := range supportedTags { - tags = append(tags, t) - } - if len(tags) == 0 { - return input - } - sort.Strings(tags) - if *context.verbose { - fmt.Print("Supported tags of all risk rules: ") - for i, tag := range tags { - if i > 0 { - fmt.Print(", ") - } - fmt.Print(tag) - } - fmt.Println() - } - replacement := "tags_available:" - for _, tag := range tags { - replacement += "\n - " + tag - } - return []byte(strings.Replace(string(input), "tags_available:", replacement, 1)) -} - -var mapFolderNameToTokenHash = make(map[string]string) - -const keySize = 32 - -func (context *Context) createToken(ginContext *gin.Context) { - folderName, key, ok := context.checkKeyToFolderName(ginContext) - if !ok { - return - } - context.globalLock.Lock() - defer context.globalLock.Unlock() - if tokenHash, exists := mapFolderNameToTokenHash[folderName]; exists { - // invalidate previous token - delete(mapTokenHashToTimeoutStruct, tokenHash) - } - // create a strong random 256 bit value (used to xor) - xorBytesArr := make([]byte, keySize) - n, err := rand.Read(xorBytesArr[:]) - if n != keySize || err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to create token", - }) - return - } - now := time.Now().UnixNano() - token := xor(key, xorBytesArr) - tokenHash := hashSHA256(token) - housekeepingTokenMaps() - mapTokenHashToTimeoutStruct[tokenHash] = timeoutStruct{ - xorRand: xorBytesArr, - createdNanoTime: now, - lastAccessedNanoTime: now, - } - mapFolderNameToTokenHash[folderName] = tokenHash - ginContext.JSON(http.StatusCreated, gin.H{ - "token": base64.RawURLEncoding.EncodeToString(token[:]), - }) -} - -type tokenHeader struct { - Token string `header:"token"` -} - -func (context *Context) deleteToken(ginContext *gin.Context) { - header := tokenHeader{} - if err := ginContext.ShouldBindHeader(&header); err != nil { - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", - }) - return - } - token, err := base64.RawURLEncoding.DecodeString(strings.TrimSpace(header.Token)) - if len(token) == 0 || err != nil { - if err != nil { - log.Println(err) - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", - }) - return - } - context.globalLock.Lock() - defer context.globalLock.Unlock() - deleteTokenHashFromMaps(hashSHA256(token)) - ginContext.JSON(http.StatusOK, gin.H{ - "message": "token deleted", - }) -} - -type responseType int - -const ( - dataFlowDiagram responseType = iota - dataAssetDiagram - reportPDF - risksExcel - tagsExcel - risksJSON - technicalAssetsJSON - statsJSON -) - -func (context *Context) streamDataFlowDiagram(ginContext *gin.Context) { - context.streamResponse(ginContext, dataFlowDiagram) -} - -func (context *Context) streamDataAssetDiagram(ginContext *gin.Context) { - context.streamResponse(ginContext, dataAssetDiagram) -} - -func (context *Context) streamReportPDF(ginContext *gin.Context) { - context.streamResponse(ginContext, reportPDF) -} - -func (context *Context) streamRisksExcel(ginContext *gin.Context) { - context.streamResponse(ginContext, risksExcel) -} - -func (context *Context) streamTagsExcel(ginContext *gin.Context) { - context.streamResponse(ginContext, tagsExcel) -} - -func (context *Context) streamRisksJSON(ginContext *gin.Context) { - context.streamResponse(ginContext, risksJSON) -} - -func (context *Context) streamTechnicalAssetsJSON(ginContext *gin.Context) { - context.streamResponse(ginContext, technicalAssetsJSON) -} - -func (context *Context) streamStatsJSON(ginContext *gin.Context) { - context.streamResponse(ginContext, statsJSON) -} - -func (context *Context) streamResponse(ginContext *gin.Context, responseType responseType) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer func() { - context.unlockFolder(folderNameOfKey) - var err error - if r := recover(); r != nil { - err = r.(error) - if *context.verbose { - log.Println(err) - } - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": strings.TrimSpace(err.Error()), - }) - ok = false - } - }() - dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(context.DefaultGraphvizDPI))) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - _, yamlText, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if !ok { - return - } - tmpModelFile, err := os.CreateTemp(*context.tempFolder, "threagile-render-*") - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - defer func() { _ = os.Remove(tmpModelFile.Name()) }() - tmpOutputDir, err := os.MkdirTemp(*context.tempFolder, "threagile-render-") - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - defer func() { _ = os.RemoveAll(tmpOutputDir) }() - err = os.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400) - if responseType == dataFlowDiagram { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, true, false, false, false, false, false, false, false, dpi) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - ginContext.File(filepath.Join(tmpOutputDir, context.dataFlowDiagramFilenamePNG)) - } else if responseType == dataAssetDiagram { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, true, false, false, false, false, false, false, dpi) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - ginContext.File(filepath.Join(tmpOutputDir, context.dataAssetDiagramFilenamePNG)) - } else if responseType == reportPDF { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, true, false, false, false, false, false, dpi) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - ginContext.FileAttachment(filepath.Join(tmpOutputDir, context.reportFilename), context.reportFilename) - } else if responseType == risksExcel { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, true, false, false, false, false, dpi) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - ginContext.FileAttachment(filepath.Join(tmpOutputDir, context.excelRisksFilename), context.excelRisksFilename) - } else if responseType == tagsExcel { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, true, false, false, false, dpi) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - ginContext.FileAttachment(filepath.Join(tmpOutputDir, context.excelTagsFilename), context.excelTagsFilename) - } else if responseType == risksJSON { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, true, false, false, dpi) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, context.jsonRisksFilename)) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download - } else if responseType == technicalAssetsJSON { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, true, true, false, dpi) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, context.jsonTechnicalAssetsFilename)) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download - } else if responseType == statsJSON { - context.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, false, false, true, dpi) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, context.jsonStatsFilename)) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - ginContext.Data(http.StatusOK, "application/json", jsonData) // stream directly with JSON content-type in response instead of file download - } -} - -// fully replaces threagile.yaml in sub-folder given by UUID -func (context *Context) importModel(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - - aUuid := ginContext.Param("model-id") // UUID is syntactically validated in readModel+checkModelFolder (next line) via uuid.Parse(modelUUID) - _, _, ok = context.readModel(ginContext, aUuid, key, folderNameOfKey) - if ok { - // first analyze it simply by executing the full risk process (just discard the result) to ensure that everything would work - yamlContent, ok := context.execute(ginContext, true) - if ok { - // if we're here, then no problem was raised, so ok to proceed - ok = context.writeModelYAML(ginContext, string(yamlContent), key, folderNameForModel(folderNameOfKey, aUuid), "Model Import", false) - if ok { - ginContext.JSON(http.StatusCreated, gin.H{ - "message": "model imported", - }) - } - } - } -} - -func (context *Context) stats(ginContext *gin.Context) { - keyCount, modelCount := 0, 0 - keyFolders, err := os.ReadDir(filepath.Join(*context.serverFolder, context.keyDir)) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to collect stats", - }) - return - } - for _, keyFolder := range keyFolders { - if len(keyFolder.Name()) == 128 { // it's a sha512 token hash probably, so count it as token folder for the stats - keyCount++ - if keyFolder.Name() != filepath.Clean(keyFolder.Name()) { - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "weird file path", - }) - return - } - modelFolders, err := os.ReadDir(filepath.Join(*context.serverFolder, context.keyDir, keyFolder.Name())) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to collect stats", - }) - return - } - for _, modelFolder := range modelFolders { - if len(modelFolder.Name()) == 36 { // it's a uuid model folder probably, so count it as model folder for the stats - modelCount++ - } - } - } - } - // TODO collect and deliver more stats (old model count?) and health info - ginContext.JSON(http.StatusOK, gin.H{ - "key_count": keyCount, - "model_count": modelCount, - "success_count": context.successCount, - "error_count": context.errorCount, - }) -} - -func (context *Context) getDataAsset(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, dataAsset := range modelInput.DataAssets { - if dataAsset.ID == ginContext.Param("data-asset-id") { - ginContext.JSON(http.StatusOK, gin.H{ - title: dataAsset, - }) - return - } - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "data asset not found", - }) - } -} - -func (context *Context) deleteDataAsset(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - referencesDeleted := false - // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, dataAsset := range modelInput.DataAssets { - if dataAsset.ID == ginContext.Param("data-asset-id") { - // also remove all usages of this data asset !! - for _, techAsset := range modelInput.TechnicalAssets { - if techAsset.DataAssetsProcessed != nil { - for i, parsedChangeCandidateAsset := range techAsset.DataAssetsProcessed { - referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) - if referencedAsset == dataAsset.ID { // apply the removal - referencesDeleted = true - // Remove the element at index i - // TODO needs more testing - copy(techAsset.DataAssetsProcessed[i:], techAsset.DataAssetsProcessed[i+1:]) // Shift a[i+1:] left one index. - techAsset.DataAssetsProcessed[len(techAsset.DataAssetsProcessed)-1] = "" // Erase last element (write zero value). - techAsset.DataAssetsProcessed = techAsset.DataAssetsProcessed[:len(techAsset.DataAssetsProcessed)-1] // Truncate slice. - } - } - } - if techAsset.DataAssetsStored != nil { - for i, parsedChangeCandidateAsset := range techAsset.DataAssetsStored { - referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) - if referencedAsset == dataAsset.ID { // apply the removal - referencesDeleted = true - // Remove the element at index i - // TODO needs more testing - copy(techAsset.DataAssetsStored[i:], techAsset.DataAssetsStored[i+1:]) // Shift a[i+1:] left one index. - techAsset.DataAssetsStored[len(techAsset.DataAssetsStored)-1] = "" // Erase last element (write zero value). - techAsset.DataAssetsStored = techAsset.DataAssetsStored[:len(techAsset.DataAssetsStored)-1] // Truncate slice. - } - } - } - if techAsset.CommunicationLinks != nil { - for title, commLink := range techAsset.CommunicationLinks { - for i, dataAssetSent := range commLink.DataAssetsSent { - referencedAsset := fmt.Sprintf("%v", dataAssetSent) - if referencedAsset == dataAsset.ID { // apply the removal - referencesDeleted = true - // Remove the element at index i - // TODO needs more testing - copy(techAsset.CommunicationLinks[title].DataAssetsSent[i:], techAsset.CommunicationLinks[title].DataAssetsSent[i+1:]) // Shift a[i+1:] left one index. - techAsset.CommunicationLinks[title].DataAssetsSent[len(techAsset.CommunicationLinks[title].DataAssetsSent)-1] = "" // Erase last element (write zero value). - x := techAsset.CommunicationLinks[title] - x.DataAssetsSent = techAsset.CommunicationLinks[title].DataAssetsSent[:len(techAsset.CommunicationLinks[title].DataAssetsSent)-1] // Truncate slice. - techAsset.CommunicationLinks[title] = x - } - } - for i, dataAssetReceived := range commLink.DataAssetsReceived { - referencedAsset := fmt.Sprintf("%v", dataAssetReceived) - if referencedAsset == dataAsset.ID { // apply the removal - referencesDeleted = true - // Remove the element at index i - // TODO needs more testing - copy(techAsset.CommunicationLinks[title].DataAssetsReceived[i:], techAsset.CommunicationLinks[title].DataAssetsReceived[i+1:]) // Shift a[i+1:] left one index. - techAsset.CommunicationLinks[title].DataAssetsReceived[len(techAsset.CommunicationLinks[title].DataAssetsReceived)-1] = "" // Erase last element (write zero value). - x := techAsset.CommunicationLinks[title] - x.DataAssetsReceived = techAsset.CommunicationLinks[title].DataAssetsReceived[:len(techAsset.CommunicationLinks[title].DataAssetsReceived)-1] // Truncate slice. - techAsset.CommunicationLinks[title] = x - } - } - } - } - } - for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { - if individualRiskCat.RisksIdentified != nil { - for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { - if individualRiskInstance.MostRelevantDataAsset == dataAsset.ID { // apply the removal - referencesDeleted = true - x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] - x.MostRelevantDataAsset = "" // TODO needs more testing - modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x - } - } - } - } - // remove it itself - delete(modelInput.DataAssets, title) - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Data Asset Deletion") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "data asset deleted", - "id": dataAsset.ID, - "references_deleted": referencesDeleted, // in order to signal to clients, that other model parts might've been deleted as well - }) - } - return - } - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "data asset not found", - }) - } -} - -type payloadSharedRuntime struct { - Title string `yaml:"title" json:"title"` - Id string `yaml:"id" json:"id"` - Description string `yaml:"description" json:"description"` - Tags []string `yaml:"tags" json:"tags"` - TechnicalAssetsRunning []string `yaml:"technical_assets_running" json:"technical_assets_running"` -} - -func (context *Context) setSharedRuntime(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, sharedRuntime := range modelInput.SharedRuntimes { - if sharedRuntime.ID == ginContext.Param("shared-runtime-id") { - payload := payloadSharedRuntime{} - err := ginContext.BindJSON(&payload) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) - return - } - sharedRuntimeInput, ok := populateSharedRuntime(ginContext, payload) - if !ok { - return - } - // in order to also update the title, remove the shared runtime from the map and re-insert it (with new key) - delete(modelInput.SharedRuntimes, title) - modelInput.SharedRuntimes[payload.Title] = sharedRuntimeInput - idChanged := sharedRuntimeInput.ID != sharedRuntime.ID - if idChanged { // ID-CHANGE-PROPAGATION - for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { - if individualRiskCat.RisksIdentified != nil { - for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { - if individualRiskInstance.MostRelevantSharedRuntime == sharedRuntime.ID { // apply the ID change - x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] - x.MostRelevantSharedRuntime = sharedRuntimeInput.ID // TODO needs more testing - modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x - } - } - } - } - } - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Shared Runtime Update") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "shared runtime updated", - "id": sharedRuntimeInput.ID, - "id_changed": idChanged, // in order to signal to clients, that other model parts might've received updates as well and should be reloaded - }) - } - return - } - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "shared runtime not found", - }) - } -} - -type payloadDataAsset struct { - Title string `yaml:"title" json:"title"` - Id string `yaml:"id" json:"id"` - Description string `yaml:"description" json:"description"` - Usage string `yaml:"usage" json:"usage"` - Tags []string `yaml:"tags" json:"tags"` - Origin string `yaml:"origin" json:"origin"` - Owner string `yaml:"owner" json:"owner"` - Quantity string `yaml:"quantity" json:"quantity"` - Confidentiality string `yaml:"confidentiality" json:"confidentiality"` - Integrity string `yaml:"integrity" json:"integrity"` - Availability string `yaml:"availability" json:"availability"` - JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` -} - -func (context *Context) setDataAsset(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, dataAsset := range modelInput.DataAssets { - if dataAsset.ID == ginContext.Param("data-asset-id") { - payload := payloadDataAsset{} - err := ginContext.BindJSON(&payload) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) - return - } - dataAssetInput, ok := context.populateDataAsset(ginContext, payload) - if !ok { - return - } - // in order to also update the title, remove the asset from the map and re-insert it (with new key) - delete(modelInput.DataAssets, title) - modelInput.DataAssets[payload.Title] = dataAssetInput - idChanged := dataAssetInput.ID != dataAsset.ID - if idChanged { // ID-CHANGE-PROPAGATION - // also update all usages to point to the new (changed) ID !! - for techAssetTitle, techAsset := range modelInput.TechnicalAssets { - if techAsset.DataAssetsProcessed != nil { - for i, parsedChangeCandidateAsset := range techAsset.DataAssetsProcessed { - referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) - if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.TechnicalAssets[techAssetTitle].DataAssetsProcessed[i] = dataAssetInput.ID - } - } - } - if techAsset.DataAssetsStored != nil { - for i, parsedChangeCandidateAsset := range techAsset.DataAssetsStored { - referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset) - if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.TechnicalAssets[techAssetTitle].DataAssetsStored[i] = dataAssetInput.ID - } - } - } - if techAsset.CommunicationLinks != nil { - for title, commLink := range techAsset.CommunicationLinks { - for i, dataAssetSent := range commLink.DataAssetsSent { - referencedAsset := fmt.Sprintf("%v", dataAssetSent) - if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.TechnicalAssets[techAssetTitle].CommunicationLinks[title].DataAssetsSent[i] = dataAssetInput.ID - } - } - for i, dataAssetReceived := range commLink.DataAssetsReceived { - referencedAsset := fmt.Sprintf("%v", dataAssetReceived) - if referencedAsset == dataAsset.ID { // apply the ID change - modelInput.TechnicalAssets[techAssetTitle].CommunicationLinks[title].DataAssetsReceived[i] = dataAssetInput.ID - } - } - } - } - } - for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { - if individualRiskCat.RisksIdentified != nil { - for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { - if individualRiskInstance.MostRelevantDataAsset == dataAsset.ID { // apply the ID change - x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] - x.MostRelevantDataAsset = dataAssetInput.ID // TODO needs more testing - modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x - } - } - } - } - } - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Data Asset Update") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "data asset updated", - "id": dataAssetInput.ID, - "id_changed": idChanged, // in order to signal to clients, that other model parts might've received updates as well and should be reloaded - }) - } - return - } - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "data asset not found", - }) - } -} - -func (context *Context) getSharedRuntime(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, sharedRuntime := range modelInput.SharedRuntimes { - if sharedRuntime.ID == ginContext.Param("shared-runtime-id") { - ginContext.JSON(http.StatusOK, gin.H{ - title: sharedRuntime, - }) - return - } - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "shared runtime not found", - }) - } -} - -func (context *Context) createNewSharedRuntime(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - payload := payloadSharedRuntime{} - err := ginContext.BindJSON(&payload) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) - return - } - // yes, here keyed by title in YAML for better readability in the YAML file itself - if _, exists := modelInput.SharedRuntimes[payload.Title]; exists { - ginContext.JSON(http.StatusConflict, gin.H{ - "error": "shared runtime with this title already exists", - }) - return - } - // but later it will in memory keyed by its "id", so do this uniqueness check also - for _, sharedRuntime := range modelInput.SharedRuntimes { - if sharedRuntime.ID == payload.Id { - ginContext.JSON(http.StatusConflict, gin.H{ - "error": "shared runtime with this id already exists", - }) - return - } - } - if !context.checkTechnicalAssetsExisting(modelInput, payload.TechnicalAssetsRunning) { - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "referenced technical asset does not exist", - }) - return - } - sharedRuntimeInput, ok := populateSharedRuntime(ginContext, payload) - if !ok { - return - } - if modelInput.SharedRuntimes == nil { - modelInput.SharedRuntimes = make(map[string]input.InputSharedRuntime) - } - modelInput.SharedRuntimes[payload.Title] = sharedRuntimeInput - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Shared Runtime Creation") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "shared runtime created", - "id": sharedRuntimeInput.ID, - }) - } - } -} - -func (context *Context) checkTechnicalAssetsExisting(modelInput input.ModelInput, techAssetIDs []string) (ok bool) { - for _, techAssetID := range techAssetIDs { - exists := false - for _, val := range modelInput.TechnicalAssets { - if val.ID == techAssetID { - exists = true - break - } - } - if !exists { - return false - } - } - return true -} - -func populateSharedRuntime(_ *gin.Context, payload payloadSharedRuntime) (sharedRuntimeInput input.InputSharedRuntime, ok bool) { - sharedRuntimeInput = input.InputSharedRuntime{ - ID: payload.Id, - Description: payload.Description, - Tags: lowerCaseAndTrim(payload.Tags), - TechnicalAssetsRunning: payload.TechnicalAssetsRunning, - } - return sharedRuntimeInput, true -} - -func (context *Context) deleteSharedRuntime(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - referencesDeleted := false - // yes, here keyed by title in YAML for better readability in the YAML file itself - for title, sharedRuntime := range modelInput.SharedRuntimes { - if sharedRuntime.ID == ginContext.Param("shared-runtime-id") { - // also remove all usages of this shared runtime !! - for individualRiskCatTitle, individualRiskCat := range modelInput.IndividualRiskCategories { - if individualRiskCat.RisksIdentified != nil { - for individualRiskInstanceTitle, individualRiskInstance := range individualRiskCat.RisksIdentified { - if individualRiskInstance.MostRelevantSharedRuntime == sharedRuntime.ID { // apply the removal - referencesDeleted = true - x := modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] - x.MostRelevantSharedRuntime = "" // TODO needs more testing - modelInput.IndividualRiskCategories[individualRiskCatTitle].RisksIdentified[individualRiskInstanceTitle] = x - } - } - } - } - // remove it itself - delete(modelInput.SharedRuntimes, title) - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Shared Runtime Deletion") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "shared runtime deleted", - "id": sharedRuntime.ID, - "references_deleted": referencesDeleted, // in order to signal to clients, that other model parts might've been deleted as well - }) - } - return - } - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "shared runtime not found", - }) - } -} - -func (context *Context) createNewDataAsset(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - payload := payloadDataAsset{} - err := ginContext.BindJSON(&payload) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) - return - } - // yes, here keyed by title in YAML for better readability in the YAML file itself - if _, exists := modelInput.DataAssets[payload.Title]; exists { - ginContext.JSON(http.StatusConflict, gin.H{ - "error": "data asset with this title already exists", - }) - return - } - // but later it will in memory keyed by its "id", so do this uniqueness check also - for _, asset := range modelInput.DataAssets { - if asset.ID == payload.Id { - ginContext.JSON(http.StatusConflict, gin.H{ - "error": "data asset with this id already exists", - }) - return - } - } - dataAssetInput, ok := context.populateDataAsset(ginContext, payload) - if !ok { - return - } - if modelInput.DataAssets == nil { - modelInput.DataAssets = make(map[string]input.InputDataAsset) - } - modelInput.DataAssets[payload.Title] = dataAssetInput - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Data Asset Creation") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "data asset created", - "id": dataAssetInput.ID, - }) - } - } -} - -func (context *Context) populateDataAsset(ginContext *gin.Context, payload payloadDataAsset) (dataAssetInput input.InputDataAsset, ok bool) { - usage, err := types.ParseUsage(payload.Usage) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return dataAssetInput, false - } - quantity, err := types.ParseQuantity(payload.Quantity) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return dataAssetInput, false - } - confidentiality, err := types.ParseConfidentiality(payload.Confidentiality) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return dataAssetInput, false - } - integrity, err := types.ParseCriticality(payload.Integrity) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return dataAssetInput, false - } - availability, err := types.ParseCriticality(payload.Availability) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return dataAssetInput, false - } - dataAssetInput = input.InputDataAsset{ - ID: payload.Id, - Description: payload.Description, - Usage: usage.String(), - Tags: lowerCaseAndTrim(payload.Tags), - Origin: payload.Origin, - Owner: payload.Owner, - Quantity: quantity.String(), - Confidentiality: confidentiality.String(), - Integrity: integrity.String(), - Availability: availability.String(), - JustificationCiaRating: payload.JustificationCiaRating, - } - return dataAssetInput, true -} - -func (context *Context) getDataAssets(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - ginContext.JSON(http.StatusOK, aModel.DataAssets) - } -} - -func (context *Context) getTrustBoundaries(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - ginContext.JSON(http.StatusOK, aModel.TrustBoundaries) - } -} - -func (context *Context) getSharedRuntimes(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - ginContext.JSON(http.StatusOK, aModel.SharedRuntimes) - } -} - -func (context *Context) arrayOfStringValues(values []types.TypeEnum) []string { - result := make([]string, 0) - for _, value := range values { - result = append(result, value.String()) - } - return result -} - -func (context *Context) getModel(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - _, yamlText, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - tmpResultFile, err := os.CreateTemp(*context.tempFolder, "threagile-*.yaml") - checkErr(err) - err = os.WriteFile(tmpResultFile.Name(), []byte(yamlText), 0400) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to stream model file", - }) - return - } - defer func() { _ = os.Remove(tmpResultFile.Name()) }() - ginContext.FileAttachment(tmpResultFile.Name(), context.inputFile) - } -} - -type payloadSecurityRequirements map[string]string - -func (context *Context) setSecurityRequirements(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - payload := payloadSecurityRequirements{} - err := ginContext.BindJSON(&payload) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) - return - } - modelInput.SecurityRequirements = payload - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Security Requirements Update") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "model updated", - }) - } - } -} - -func (context *Context) getSecurityRequirements(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - ginContext.JSON(http.StatusOK, aModel.SecurityRequirements) - } -} - -type payloadAbuseCases map[string]string - -func (context *Context) setAbuseCases(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - payload := payloadAbuseCases{} - err := ginContext.BindJSON(&payload) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) - return - } - modelInput.AbuseCases = payload - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Abuse Cases Update") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "model updated", - }) - } - } -} - -func (context *Context) getAbuseCases(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - ginContext.JSON(http.StatusOK, aModel.AbuseCases) - } -} - -type payloadOverview struct { - ManagementSummaryComment string `yaml:"management_summary_comment" json:"management_summary_comment"` - BusinessCriticality string `yaml:"business_criticality" json:"business_criticality"` - BusinessOverview input.Overview `yaml:"business_overview" json:"business_overview"` - TechnicalOverview input.Overview `yaml:"technical_overview" json:"technical_overview"` -} - -func (context *Context) setOverview(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - payload := payloadOverview{} - err := ginContext.BindJSON(&payload) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) - return - } - criticality, err := types.ParseCriticality(payload.BusinessCriticality) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return - } - modelInput.ManagementSummaryComment = payload.ManagementSummaryComment - modelInput.BusinessCriticality = criticality.String() - modelInput.BusinessOverview.Description = payload.BusinessOverview.Description - modelInput.BusinessOverview.Images = payload.BusinessOverview.Images - modelInput.TechnicalOverview.Description = payload.TechnicalOverview.Description - modelInput.TechnicalOverview.Images = payload.TechnicalOverview.Images - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Overview Update") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "model updated", - }) - } - } -} - -func handleErrorInServiceCall(err error, ginContext *gin.Context) { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": strings.TrimSpace(err.Error()), - }) -} - -func (context *Context) getOverview(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "management_summary_comment": aModel.ManagementSummaryComment, - "business_criticality": aModel.BusinessCriticality, - "business_overview": aModel.BusinessOverview, - "technical_overview": aModel.TechnicalOverview, - }) - } -} - -type payloadCover struct { - Title string `yaml:"title" json:"title"` - Date time.Time `yaml:"date" json:"date"` - Author input.Author `yaml:"author" json:"author"` -} - -func (context *Context) setCover(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - modelInput, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - payload := payloadCover{} - err := ginContext.BindJSON(&payload) - if err != nil { - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": "unable to parse request payload", - }) - return - } - modelInput.Title = payload.Title - if !payload.Date.IsZero() { - modelInput.Date = payload.Date.Format("2006-01-02") - } - modelInput.Author.Name = payload.Author.Name - modelInput.Author.Homepage = payload.Author.Homepage - ok = context.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Cover Update") - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "message": "model updated", - }) - } - } + +func (context *Context) stubFile(ginContext *gin.Context) { + stub, err := os.ReadFile(filepath.Join(*context.appFolder, "threagile-stub-model.yaml")) + checkErr(err) + ginContext.Data(http.StatusOK, gin.MIMEYAML, context.addSupportedTags(stub)) // TODO use also the MIMEYAML way of serving YAML in model export? } -func (context *Context) getCover(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return +func (context *Context) addSupportedTags(input []byte) []byte { + // add distinct tags as "tags_available" + supportedTags := make(map[string]bool) + for _, customRule := range context.customRiskRules { + for _, tag := range customRule.Tags { + supportedTags[strings.ToLower(tag)] = true + } } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - aModel, _, ok := context.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) - if ok { - ginContext.JSON(http.StatusOK, gin.H{ - "title": aModel.Title, - "date": aModel.Date, - "author": aModel.Author, - }) + + for _, rule := range context.builtinRiskRules { + for _, tag := range rule.SupportedTags() { + supportedTags[strings.ToLower(tag)] = true + } } -} -// creates a sub-folder (named by a new UUID) inside the token folder -func (context *Context) createNewModel(ginContext *gin.Context) { - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return + tags := make([]string, 0, len(supportedTags)) + for t := range supportedTags { + tags = append(tags, t) } - ok = context.checkObjectCreationThrottler(ginContext, "MODEL") - if !ok { - return + if len(tags) == 0 { + return input } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - - aUuid := uuid.New().String() - err := os.Mkdir(folderNameForModel(folderNameOfKey, aUuid), 0700) - if err != nil { - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to create model", - }) - return + sort.Strings(tags) + if *context.verbose { + fmt.Print("Supported tags of all risk rules: ") + for i, tag := range tags { + if i > 0 { + fmt.Print(", ") + } + fmt.Print(tag) + } + fmt.Println() } - - aYaml := `title: New Threat Model -threagile_version: ` + docs.ThreagileVersion + ` -author: - name: "" - homepage: "" -date: -business_overview: - description: "" - images: [] -technical_overview: - description: "" - images: [] -business_criticality: "" -management_summary_comment: "" -questions: {} -abuse_cases: {} -security_requirements: {} -tags_available: [] -data_assets: {} -technical_assets: {} -trust_boundaries: {} -shared_runtimes: {} -individual_risk_categories: {} -risk_tracking: {} -diagram_tweak_nodesep: "" -diagram_tweak_ranksep: "" -diagram_tweak_edge_layout: "" -diagram_tweak_suppress_edge_labels: false -diagram_tweak_invisible_connections_between_assets: [] -diagram_tweak_same_rank_assets: []` - - ok = context.writeModelYAML(ginContext, aYaml, key, folderNameForModel(folderNameOfKey, aUuid), "New Model Creation", true) - if ok { - ginContext.JSON(http.StatusCreated, gin.H{ - "message": "model created", - "id": aUuid, - }) + replacement := "tags_available:" + for _, tag := range tags { + replacement += "\n - " + tag } + return []byte(strings.Replace(string(input), "tags_available:", replacement, 1)) } -type payloadModels struct { - ID string `yaml:"id" json:"id"` - Title string `yaml:"title" json:"title"` - TimestampCreated time.Time `yaml:"timestamp_created" json:"timestamp_created"` - TimestampModified time.Time `yaml:"timestamp_modified" json:"timestamp_modified"` -} - -func (context *Context) listModels(ginContext *gin.Context) { // TODO currently returns error when any model is no longer valid in syntax, so eventually have some fallback to not just bark on an invalid model... - folderNameOfKey, key, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) +const keySize = 32 - result := make([]payloadModels, 0) - modelFolders, err := os.ReadDir(folderNameOfKey) +func (context *Context) stats(ginContext *gin.Context) { + keyCount, modelCount := 0, 0 + keyFolders, err := os.ReadDir(filepath.Join(*context.serverFolder, context.keyDir)) if err != nil { log.Println(err) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to collect stats", }) return } - for _, dirEntry := range modelFolders { - if dirEntry.IsDir() { - modelStat, err := os.Stat(filepath.Join(folderNameOfKey, dirEntry.Name(), context.inputFile)) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "unable to list model", + for _, keyFolder := range keyFolders { + if len(keyFolder.Name()) == 128 { // it's a sha512 token hash probably, so count it as token folder for the stats + keyCount++ + if keyFolder.Name() != filepath.Clean(keyFolder.Name()) { + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "weird file path", }) return } - aModel, _, ok := context.readModel(ginContext, dirEntry.Name(), key, folderNameOfKey) - if !ok { - return - } - fileInfo, err := dirEntry.Info() + modelFolders, err := os.ReadDir(filepath.Join(*context.serverFolder, context.keyDir, keyFolder.Name())) if err != nil { log.Println(err) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "unable to get file info", + ginContext.JSON(http.StatusInternalServerError, gin.H{ + "error": "unable to collect stats", }) return } - result = append(result, payloadModels{ - ID: dirEntry.Name(), - Title: aModel.Title, - TimestampCreated: fileInfo.ModTime(), - TimestampModified: modelStat.ModTime(), - }) + for _, modelFolder := range modelFolders { + if len(modelFolder.Name()) == 36 { // it's a uuid model folder probably, so count it as model folder for the stats + modelCount++ + } + } } } - ginContext.JSON(http.StatusOK, result) + // TODO collect and deliver more stats (old model count?) and health info + ginContext.JSON(http.StatusOK, gin.H{ + "key_count": keyCount, + "model_count": modelCount, + "success_count": context.successCount, + "error_count": context.errorCount, + }) } -func (context *Context) deleteModel(ginContext *gin.Context) { - folderNameOfKey, _, ok := context.checkTokenToFolderName(ginContext) - if !ok { - return - } - context.lockFolder(folderNameOfKey) - defer context.unlockFolder(folderNameOfKey) - folder, ok := context.checkModelFolder(ginContext, ginContext.Param("model-id"), folderNameOfKey) - if ok { - if folder != filepath.Clean(folder) { - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "model-id is weird", - }) - return +type payloadDataAsset struct { + Title string `yaml:"title" json:"title"` + Id string `yaml:"id" json:"id"` + Description string `yaml:"description" json:"description"` + Usage string `yaml:"usage" json:"usage"` + Tags []string `yaml:"tags" json:"tags"` + Origin string `yaml:"origin" json:"origin"` + Owner string `yaml:"owner" json:"owner"` + Quantity string `yaml:"quantity" json:"quantity"` + Confidentiality string `yaml:"confidentiality" json:"confidentiality"` + Integrity string `yaml:"integrity" json:"integrity"` + Availability string `yaml:"availability" json:"availability"` + JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` +} + +func (context *Context) checkTechnicalAssetsExisting(modelInput input.ModelInput, techAssetIDs []string) (ok bool) { + for _, techAssetID := range techAssetIDs { + exists := false + for _, val := range modelInput.TechnicalAssets { + if val.ID == techAssetID { + exists = true + break + } } - err := os.RemoveAll(folder) - if err != nil { - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "model not found", - }) - return + if !exists { + return false } - ginContext.JSON(http.StatusOK, gin.H{ - "message": "model deleted", - }) } + return true +} + +func (context *Context) populateDataAsset(ginContext *gin.Context, payload payloadDataAsset) (dataAssetInput input.InputDataAsset, ok bool) { + usage, err := types.ParseUsage(payload.Usage) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return dataAssetInput, false + } + quantity, err := types.ParseQuantity(payload.Quantity) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return dataAssetInput, false + } + confidentiality, err := types.ParseConfidentiality(payload.Confidentiality) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return dataAssetInput, false + } + integrity, err := types.ParseCriticality(payload.Integrity) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return dataAssetInput, false + } + availability, err := types.ParseCriticality(payload.Availability) + if err != nil { + handleErrorInServiceCall(err, ginContext) + return dataAssetInput, false + } + dataAssetInput = input.InputDataAsset{ + ID: payload.Id, + Description: payload.Description, + Usage: usage.String(), + Tags: lowerCaseAndTrim(payload.Tags), + Origin: payload.Origin, + Owner: payload.Owner, + Quantity: quantity.String(), + Confidentiality: confidentiality.String(), + Integrity: integrity.String(), + Availability: availability.String(), + JustificationCiaRating: payload.JustificationCiaRating, + } + return dataAssetInput, true +} + +func handleErrorInServiceCall(err error, ginContext *gin.Context) { + log.Println(err) + ginContext.JSON(http.StatusBadRequest, gin.H{ + "error": strings.TrimSpace(err.Error()), + }) } func (context *Context) checkModelFolder(ginContext *gin.Context, modelUUID string, folderNameOfKey string) (modelFolder string, ok bool) { @@ -2794,25 +1506,6 @@ func (context *Context) checkObjectCreationThrottler(ginContext *gin.Context, ty return false } -var locksByFolderName = make(map[string]*sync.Mutex) - -func (context *Context) lockFolder(folderName string) { - context.globalLock.Lock() - defer context.globalLock.Unlock() - _, exists := locksByFolderName[folderName] - if !exists { - locksByFolderName[folderName] = &sync.Mutex{} - } - locksByFolderName[folderName].Lock() -} - -func (context *Context) unlockFolder(folderName string) { - if _, exists := locksByFolderName[folderName]; exists { - locksByFolderName[folderName].Unlock() - delete(locksByFolderName, folderName) - } -} - func (context *Context) folderNameFromKey(key []byte) string { sha512Hash := hashSHA256(key) return filepath.Join(*context.serverFolder, context.keyDir, sha512Hash) @@ -2824,130 +1517,6 @@ func hashSHA256(key []byte) string { return hex.EncodeToString(hasher.Sum(nil)) } -func (context *Context) createKey(ginContext *gin.Context) { - ok := context.checkObjectCreationThrottler(ginContext, "KEY") - if !ok { - return - } - context.globalLock.Lock() - defer context.globalLock.Unlock() - - keyBytesArr := make([]byte, keySize) - n, err := rand.Read(keyBytesArr[:]) - if n != keySize || err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to create key", - }) - return - } - err = os.MkdirAll(context.folderNameFromKey(keyBytesArr), 0700) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to create key", - }) - return - } - ginContext.JSON(http.StatusCreated, gin.H{ - "key": base64.RawURLEncoding.EncodeToString(keyBytesArr[:]), - }) -} - -func (context *Context) checkTokenToFolderName(ginContext *gin.Context) (folderNameOfKey string, key []byte, ok bool) { - header := tokenHeader{} - if err := ginContext.ShouldBindHeader(&header); err != nil { - log.Println(err) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", - }) - return folderNameOfKey, key, false - } - token, err := base64.RawURLEncoding.DecodeString(strings.TrimSpace(header.Token)) - if len(token) == 0 || err != nil { - if err != nil { - log.Println(err) - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", - }) - return folderNameOfKey, key, false - } - context.globalLock.Lock() - defer context.globalLock.Unlock() - housekeepingTokenMaps() // to remove timed-out ones - tokenHash := hashSHA256(token) - if timeoutStruct, exists := mapTokenHashToTimeoutStruct[tokenHash]; exists { - // re-create the key from token - key := xor(token, timeoutStruct.xorRand) - folderNameOfKey := context.folderNameFromKey(key) - if _, err := os.Stat(folderNameOfKey); os.IsNotExist(err) { - log.Println(err) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", - }) - return folderNameOfKey, key, false - } - timeoutStruct.lastAccessedNanoTime = time.Now().UnixNano() - return folderNameOfKey, key, true - } else { - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "token not found", - }) - return folderNameOfKey, key, false - } -} - -func xor(key []byte, xor []byte) []byte { - if len(key) != len(xor) { - panic(errors.New("key length not matching XOR length")) - } - result := make([]byte, len(xor)) - for i, b := range key { - result[i] = b ^ xor[i] - } - return result -} - -type timeoutStruct struct { - xorRand []byte - createdNanoTime, lastAccessedNanoTime int64 -} - -var mapTokenHashToTimeoutStruct = make(map[string]timeoutStruct) - -const extremeShortTimeoutsForTesting = false - -func housekeepingTokenMaps() { - now := time.Now().UnixNano() - for tokenHash, val := range mapTokenHashToTimeoutStruct { - if extremeShortTimeoutsForTesting { - // remove all elements older than 1 minute (= 60000000000 ns) soft - // and all elements older than 3 minutes (= 180000000000 ns) hard - if now-val.lastAccessedNanoTime > 60000000000 || now-val.createdNanoTime > 180000000000 { - fmt.Println("About to remove a token hash from maps") - deleteTokenHashFromMaps(tokenHash) - } - } else { - // remove all elements older than 30 minutes (= 1800000000000 ns) soft - // and all elements older than 10 hours (= 36000000000000 ns) hard - if now-val.lastAccessedNanoTime > 1800000000000 || now-val.createdNanoTime > 36000000000000 { - deleteTokenHashFromMaps(tokenHash) - } - } - } -} - -func deleteTokenHashFromMaps(tokenHash string) { - delete(mapTokenHashToTimeoutStruct, tokenHash) - for folderName, check := range mapFolderNameToTokenHash { - if check == tokenHash { - delete(mapFolderNameToTokenHash, folderName) - break - } - } -} - type keyHeader struct { Key string `header:"key"` } @@ -2982,26 +1551,6 @@ func (context *Context) checkKeyToFolderName(ginContext *gin.Context) (folderNam return folderNameOfKey, key, true } -func (context *Context) deleteKey(ginContext *gin.Context) { - folderName, _, ok := context.checkKeyToFolderName(ginContext) - if !ok { - return - } - context.globalLock.Lock() - defer context.globalLock.Unlock() - err := os.RemoveAll(folderName) - if err != nil { - log.Println("error during key delete: " + err.Error()) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "key not found", - }) - return - } - ginContext.JSON(http.StatusOK, gin.H{ - "message": "key deleted", - }) -} - func (context *Context) userHomeDir() string { switch runtime.GOOS { case "windows": diff --git a/pkg/server/server.go b/pkg/server/server.go index 6c29bf46..374b8ab9 100644 --- a/pkg/server/server.go +++ b/pkg/server/server.go @@ -21,7 +21,7 @@ import ( ) type server struct { - configuration ServerConfiguration + configuration Configuration successCount int errorCount int globalLock sync.Mutex @@ -33,7 +33,7 @@ type server struct { locksByFolderName map[string]*sync.Mutex } -type ServerConfiguration struct { +type Configuration struct { ServerFolder string AppDir string BuildTimestamp string @@ -63,7 +63,7 @@ type ServerConfiguration struct { BackupHistoryFilesToKeep int } -func RunServer(serverConfiguration ServerConfiguration) { +func RunServer(serverConfiguration Configuration) { server := &server{ configuration: serverConfiguration, createdObjectsThrottler: make(map[string][]int64), From bbe2aea265bc9a1345a209646f94662cbd3933f7 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Wed, 27 Dec 2023 15:23:43 -0800 Subject: [PATCH 27/68] implemented config file loading and handling --- cmd/threagile/main.go | 5 +- internal/threagile/context.go | 385 ++++++++---------- internal/threagile/rules.go | 5 +- pkg/common/config.go | 274 ++++++++++++- pkg/common/consts.go | 23 +- .../common}/progress-reporter.go | 4 +- pkg/server/execute.go | 46 +-- pkg/server/model.go | 58 +-- pkg/server/report.go | 24 +- pkg/server/server.go | 185 ++++----- pkg/server/token.go | 2 +- 11 files changed, 586 insertions(+), 425 deletions(-) rename {internal/threagile => pkg/common}/progress-reporter.go (91%) diff --git a/cmd/threagile/main.go b/cmd/threagile/main.go index ec57ff7e..60a56315 100644 --- a/cmd/threagile/main.go +++ b/cmd/threagile/main.go @@ -17,10 +17,9 @@ func main() { // TODO: remove below as soon as refactoring is finished - everything will go through rootCmd.Execute // for now it's fine to have as frequently uncommented to see the actual behaviour - context := new(threagile.Context).Defaults(buildTimestamp) - context.ParseCommandlineArgs() + context := new(threagile.Context).Defaults(buildTimestamp).ParseCommandlineArgs() if context.ServerMode { - server.RunServer(server.Configuration{}) + server.RunServer(context.Config) } else { context.DoIt() } diff --git a/internal/threagile/context.go b/internal/threagile/context.go index c785d207..6811296c 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -54,10 +54,6 @@ import ( "github.com/threagile/threagile/pkg/security/types" ) -const ( - defaultGraphvizDPI, maxGraphvizDPI = 120, 240 -) - type Context struct { common.Config @@ -66,51 +62,17 @@ type Context struct { successCount int errorCount int drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks bool - buildTimestamp string - - modelInput input.ModelInput - // TODO: remove refactoring note below - // moved from types.go + modelInput input.ModelInput parsedModel types.ParsedModel - modelFilename, templateFilename *string - verbose, ignoreOrphanedRiskTracking *bool - generateDataFlowDiagram, generateDataAssetDiagram, generateRisksJSON, generateTechnicalAssetsJSON *bool - generateStatsJSON, generateRisksExcel, generateTagsExcel, generateReportPDF *bool - outputDir, raaPlugin, skipRiskRules, riskRulesPlugins, executeModelMacro *string - customRiskRules map[string]*types.CustomRisk - builtinRiskRules map[string]types.RiskRule - diagramDPI, serverPort *int - addModelTitle bool - keepDiagramSourceFiles bool - appFolder *string - binFolder *string - serverFolder *string - tempFolder *string - - backupHistoryFilesToKeep int - - tempDir string - binDir string - appDir string - dataDir string - keyDir string - reportFilename string - excelRisksFilename string - excelTagsFilename string - jsonRisksFilename string - jsonTechnicalAssetsFilename string - jsonStatsFilename string - dataFlowDiagramFilenameDOT string - dataFlowDiagramFilenamePNG string - dataAssetDiagramFilenameDOT string - dataAssetDiagramFilenamePNG string - graphvizDataFlowDiagramConversionCall string - graphvizDataAssetDiagramConversionCall string - inputFile string - - progressReporter ProgressReporter + generateDataFlowDiagram, generateDataAssetDiagram, generateRisksJSON, generateTechnicalAssetsJSON bool + generateStatsJSON, generateRisksExcel, generateTagsExcel, generateReportPDF bool + + customRiskRules map[string]*types.CustomRisk + builtinRiskRules map[string]types.RiskRule + + progressReporter common.ProgressReporter } func (context *Context) addToListOfSupportedTags(tags []string) { @@ -120,12 +82,12 @@ func (context *Context) addToListOfSupportedTags(tags []string) { } func (context *Context) checkRiskTracking() { - if *context.verbose { + if context.Config.Verbose { fmt.Println("Checking risk tracking") } for _, tracking := range context.parsedModel.RiskTracking { if _, ok := context.parsedModel.GeneratedRisksBySyntheticId[tracking.SyntheticRiskId]; !ok { - if *context.ignoreOrphanedRiskTracking { + if context.Config.IgnoreOrphanedRiskTracking { fmt.Println("Risk tracking references unknown risk (risk id not found): " + tracking.SyntheticRiskId) } else { panic(errors.New("Risk tracking references unknown risk (risk id not found) - you might want to use the option -ignore-orphaned-risk-tracking: " + tracking.SyntheticRiskId + @@ -150,12 +112,10 @@ func (context *Context) checkRiskTracking() { } } -func (context *Context) Init(buildTimestamp string) *Context { +func (context *Context) Init() *Context { *context = Context{ - keepDiagramSourceFiles: false, - addModelTitle: false, - buildTimestamp: buildTimestamp, - customRiskRules: make(map[string]*types.CustomRisk), + customRiskRules: make(map[string]*types.CustomRisk), + builtinRiskRules: make(map[string]types.RiskRule), drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks: true, } @@ -163,28 +123,8 @@ func (context *Context) Init(buildTimestamp string) *Context { } func (context *Context) Defaults(buildTimestamp string) *Context { - *context = *new(Context).Init(buildTimestamp) - context.backupHistoryFilesToKeep = 50 - context.tempDir = common.TempDir - context.binDir = common.BinDir - context.appDir = common.AppDir - context.dataDir = common.DataDir - context.keyDir = common.KeyDir - context.reportFilename = common.ReportFilename - context.excelRisksFilename = common.ExcelRisksFilename - context.excelTagsFilename = common.ExcelTagsFilename - context.jsonRisksFilename = common.JsonRisksFilename - context.jsonTechnicalAssetsFilename = common.JsonTechnicalAssetsFilename - context.jsonStatsFilename = common.JsonStatsFilename - context.dataFlowDiagramFilenameDOT = common.DataFlowDiagramFilenameDOT - context.dataFlowDiagramFilenamePNG = common.DataFlowDiagramFilenamePNG - context.dataAssetDiagramFilenameDOT = common.DataAssetDiagramFilenameDOT - context.dataAssetDiagramFilenamePNG = common.DataAssetDiagramFilenamePNG - context.graphvizDataFlowDiagramConversionCall = common.GraphvizDataFlowDiagramConversionCall - context.graphvizDataAssetDiagramConversionCall = common.GraphvizDataAssetDiagramConversionCall - context.inputFile = common.InputFile - - context.Config.Defaults() + *context = *new(Context).Init() + context.Config.Defaults(buildTimestamp) return context } @@ -210,13 +150,13 @@ func (context *Context) applyRisk(rule types.RiskRule, skippedRules *map[string] } func (context *Context) applyRiskGeneration() { - if *context.verbose { + if context.Config.Verbose { fmt.Println("Applying risk generation") } skippedRules := make(map[string]bool) - if len(*context.skipRiskRules) > 0 { - for _, id := range strings.Split(*context.skipRiskRules, ",") { + if len(context.Config.SkipRiskRules) > 0 { + for _, id := range strings.Split(context.Config.SkipRiskRules, ",") { skippedRules[id] = true } } @@ -229,12 +169,12 @@ func (context *Context) applyRiskGeneration() { for id, customRule := range context.customRiskRules { _, ok := skippedRules[id] if ok { - if *context.verbose { + if context.Config.Verbose { fmt.Println("Skipping custom risk rule:", id) } delete(skippedRules, id) } else { - if *context.verbose { + if context.Config.Verbose { fmt.Println("Executing custom risk rule:", id) } context.addToListOfSupportedTags(customRule.Tags) @@ -243,7 +183,7 @@ func (context *Context) applyRiskGeneration() { context.parsedModel.GeneratedRisksByCategory[customRule.Category.Id] = customRisks } - if *context.verbose { + if context.Config.Verbose { fmt.Println("Added custom risks:", len(customRisks)) } } @@ -269,7 +209,7 @@ func (context *Context) applyRiskGeneration() { } func (context *Context) writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.File { - if *context.verbose { + if context.Config.Verbose { fmt.Println("Writing data flow diagram input") } var dotContent strings.Builder @@ -312,7 +252,7 @@ func (context *Context) writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT strin rankdir = "LR" } modelTitle := "" - if context.addModelTitle { + if context.Config.AddModelTitle { modelTitle = `label="` + context.parsedModel.Title + `"` } dotContent.WriteString(` graph [ ` + modelTitle + ` @@ -555,27 +495,27 @@ func (context *Context) DoIt() { var err error if r := recover(); r != nil { err = r.(error) - if *context.verbose { + if context.Config.Verbose { log.Println(err) } _, _ = os.Stderr.WriteString(err.Error() + "\n") os.Exit(2) } }() - if len(*context.executeModelMacro) > 0 { + if len(context.Config.ExecuteModelMacro) > 0 { fmt.Println(docs.Logo + "\n\n" + docs.VersionText) } else { - if *context.verbose { - fmt.Println("Writing into output directory:", *context.outputDir) + if context.Config.Verbose { + fmt.Println("Writing into output directory:", context.Config.OutputFolder) } } - if *context.verbose { - fmt.Println("Parsing model:", *context.modelFilename) + if context.Config.Verbose { + fmt.Println("Parsing model:", context.Config.InputFile) } context.modelInput = *new(input.ModelInput).Defaults() - loadError := context.modelInput.Load(*context.modelFilename) + loadError := context.modelInput.Load(context.Config.InputFile) if loadError != nil { log.Fatal("Unable to load model yaml: ", loadError) } @@ -584,7 +524,7 @@ func (context *Context) DoIt() { for _, rule := range risks.GetBuiltInRiskRules() { context.builtinRiskRules[rule.Category().Id] = rule } - context.customRiskRules = types.LoadCustomRiskRules(strings.Split(*context.riskRulesPlugins, ","), context.progressReporter) + context.customRiskRules = types.LoadCustomRiskRules(context.Config.RiskRulesPlugins, context.progressReporter) parsedModel, parseError := model.ParseModel(&context.modelInput, context.builtinRiskRules, context.customRiskRules) if parseError != nil { @@ -599,9 +539,9 @@ func (context *Context) DoIt() { context.applyWildcardRiskTrackingEvaluation() context.checkRiskTracking() - if len(*context.executeModelMacro) > 0 { + if len(context.Config.ExecuteModelMacro) > 0 { var macroDetails macros.MacroDetails - switch *context.executeModelMacro { + switch context.Config.ExecuteModelMacro { case addbuildpipeline.GetMacroDetails().ID: macroDetails = addbuildpipeline.GetMacroDetails() case addvault.GetMacroDetails().ID: @@ -615,7 +555,7 @@ func (context *Context) DoIt() { case seedtags.GetMacroDetails().ID: macroDetails = seedtags.GetMacroDetails() default: - log.Fatal("Unknown model macro: ", *context.executeModelMacro) + log.Fatal("Unknown model macro: ", context.Config.ExecuteModelMacro) } fmt.Println("Executing model macro:", macroDetails.ID) fmt.Println() @@ -863,9 +803,9 @@ func (context *Context) DoIt() { } fmt.Println(message) fmt.Println() - backupFilename := *context.modelFilename + ".backup" + backupFilename := context.Config.InputFile + ".backup" fmt.Println("Creating backup model file:", backupFilename) // TODO add random files in /dev/shm space? - _, err = copyFile(*context.modelFilename, backupFilename) + _, err = copyFile(context.Config.InputFile, backupFilename) checkErr(err) fmt.Println("Updating model") yamlBytes, err := yaml.Marshal(context.modelInput) @@ -873,8 +813,8 @@ func (context *Context) DoIt() { /* yamlBytes = model.ReformatYAML(yamlBytes) */ - fmt.Println("Writing model file:", *context.modelFilename) - err = os.WriteFile(*context.modelFilename, yamlBytes, 0400) + fmt.Println("Writing model file:", context.Config.InputFile) + err = os.WriteFile(context.Config.InputFile, yamlBytes, 0400) checkErr(err) fmt.Println("Model file successfully updated") return @@ -885,86 +825,79 @@ func (context *Context) DoIt() { } } - renderDataFlowDiagram := *context.generateDataFlowDiagram - renderDataAssetDiagram := *context.generateDataAssetDiagram - renderRisksJSON := *context.generateRisksJSON - renderTechnicalAssetsJSON := *context.generateTechnicalAssetsJSON - renderStatsJSON := *context.generateStatsJSON - renderRisksExcel := *context.generateRisksExcel - renderTagsExcel := *context.generateTagsExcel - renderPDF := *context.generateReportPDF + renderPDF := context.generateReportPDF if renderPDF { // as the PDF report includes both diagrams - renderDataFlowDiagram, renderDataAssetDiagram = true, true + context.generateDataFlowDiagram, context.generateDataAssetDiagram = true, true } // Data-flow Diagram rendering - if renderDataFlowDiagram { - gvFile := filepath.Join(*context.outputDir, context.dataFlowDiagramFilenameDOT) - if !context.keepDiagramSourceFiles { - tmpFileGV, err := os.CreateTemp(*context.tempFolder, context.dataFlowDiagramFilenameDOT) + if context.generateDataFlowDiagram { + gvFile := filepath.Join(context.Config.OutputFolder, context.Config.DataFlowDiagramFilenameDOT) + if !context.Config.KeepDiagramSourceFiles { + tmpFileGV, err := os.CreateTemp(context.Config.TempFolder, context.Config.DataFlowDiagramFilenameDOT) checkErr(err) gvFile = tmpFileGV.Name() defer func() { _ = os.Remove(gvFile) }() } - dotFile := context.writeDataFlowDiagramGraphvizDOT(gvFile, *context.diagramDPI) - context.renderDataFlowDiagramGraphvizImage(dotFile, *context.outputDir) + dotFile := context.writeDataFlowDiagramGraphvizDOT(gvFile, context.Config.DiagramDPI) + context.generateDataFlowDiagramGraphvizImage(dotFile, context.Config.OutputFolder) } // Data Asset Diagram rendering - if renderDataAssetDiagram { - gvFile := filepath.Join(*context.outputDir, context.dataAssetDiagramFilenameDOT) - if !context.keepDiagramSourceFiles { - tmpFile, err := os.CreateTemp(*context.tempFolder, context.dataAssetDiagramFilenameDOT) + if context.generateDataAssetDiagram { + gvFile := filepath.Join(context.Config.OutputFolder, context.Config.DataAssetDiagramFilenameDOT) + if !context.Config.KeepDiagramSourceFiles { + tmpFile, err := os.CreateTemp(context.Config.TempFolder, context.Config.DataAssetDiagramFilenameDOT) checkErr(err) gvFile = tmpFile.Name() defer func() { _ = os.Remove(gvFile) }() } - dotFile := context.writeDataAssetDiagramGraphvizDOT(gvFile, *context.diagramDPI) - context.renderDataAssetDiagramGraphvizImage(dotFile, *context.outputDir) + dotFile := context.writeDataAssetDiagramGraphvizDOT(gvFile, context.Config.DiagramDPI) + context.generateDataAssetDiagramGraphvizImage(dotFile, context.Config.OutputFolder) } // risks as risks json - if renderRisksJSON { - if *context.verbose { + if context.generateRisksJSON { + if context.Config.Verbose { fmt.Println("Writing risks json") } - report.WriteRisksJSON(&context.parsedModel, filepath.Join(*context.outputDir, context.jsonRisksFilename)) + report.WriteRisksJSON(&context.parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.JsonRisksFilename)) } // technical assets json - if renderTechnicalAssetsJSON { - if *context.verbose { + if context.generateTechnicalAssetsJSON { + if context.Config.Verbose { fmt.Println("Writing technical assets json") } - report.WriteTechnicalAssetsJSON(&context.parsedModel, filepath.Join(*context.outputDir, context.jsonTechnicalAssetsFilename)) + report.WriteTechnicalAssetsJSON(&context.parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.JsonTechnicalAssetsFilename)) } // risks as risks json - if renderStatsJSON { - if *context.verbose { + if context.generateStatsJSON { + if context.Config.Verbose { fmt.Println("Writing stats json") } - report.WriteStatsJSON(&context.parsedModel, filepath.Join(*context.outputDir, context.jsonStatsFilename)) + report.WriteStatsJSON(&context.parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.JsonStatsFilename)) } // risks Excel - if renderRisksExcel { - if *context.verbose { + if context.generateRisksExcel { + if context.Config.Verbose { fmt.Println("Writing risks excel") } - report.WriteRisksExcelToFile(&context.parsedModel, filepath.Join(*context.outputDir, context.excelRisksFilename)) + report.WriteRisksExcelToFile(&context.parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.ExcelRisksFilename)) } // tags Excel - if renderTagsExcel { - if *context.verbose { + if context.generateTagsExcel { + if context.Config.Verbose { fmt.Println("Writing tags excel") } - report.WriteTagsExcelToFile(&context.parsedModel, filepath.Join(*context.outputDir, context.excelTagsFilename)) + report.WriteTagsExcelToFile(&context.parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.ExcelTagsFilename)) } if renderPDF { // hash the YAML input file - f, err := os.Open(*context.modelFilename) + f, err := os.Open(context.Config.InputFile) checkErr(err) defer func() { _ = f.Close() }() hasher := sha256.New() @@ -973,20 +906,20 @@ func (context *Context) DoIt() { } modelHash := hex.EncodeToString(hasher.Sum(nil)) // report PDF - if *context.verbose { + if context.Config.Verbose { fmt.Println("Writing report pdf") } - report.WriteReportPDF(filepath.Join(*context.outputDir, context.reportFilename), - filepath.Join(*context.appFolder, *context.templateFilename), - filepath.Join(*context.outputDir, context.dataFlowDiagramFilenamePNG), - filepath.Join(*context.outputDir, context.dataAssetDiagramFilenamePNG), - *context.modelFilename, - *context.skipRiskRules, - context.buildTimestamp, + report.WriteReportPDF(filepath.Join(context.Config.OutputFolder, context.Config.ReportFilename), + filepath.Join(context.Config.AppFolder, context.Config.TemplateFilename), + filepath.Join(context.Config.OutputFolder, context.Config.DataFlowDiagramFilenamePNG), + filepath.Join(context.Config.OutputFolder, context.Config.DataAssetDiagramFilenamePNG), + context.Config.InputFile, + context.Config.SkipRiskRules, + context.Config.BuildTimestamp, modelHash, introTextRAA, context.customRiskRules, - *context.tempFolder, + context.Config.TempFolder, &context.parsedModel) } } @@ -1028,19 +961,19 @@ func (context *Context) printBorder(length int, bold bool) { } func (context *Context) applyRAA() string { - if *context.verbose { - fmt.Println("Applying RAA calculation:", *context.raaPlugin) + if context.Config.Verbose { + fmt.Println("Applying RAA calculation:", context.Config.RAAPlugin) } - runner, loadError := new(run.Runner).Load(filepath.Join(*context.binFolder, *context.raaPlugin)) + runner, loadError := new(run.Runner).Load(filepath.Join(context.Config.BinFolder, context.Config.RAAPlugin)) if loadError != nil { - fmt.Printf("WARNING: raa %q not loaded: %v\n", *context.raaPlugin, loadError) + fmt.Printf("WARNING: raa %q not loaded: %v\n", context.Config.RAAPlugin, loadError) return "" } runError := runner.Run(context.parsedModel, &context.parsedModel) if runError != nil { - fmt.Printf("WARNING: raa %q not applied: %v\n", *context.raaPlugin, runError) + fmt.Printf("WARNING: raa %q not applied: %v\n", context.Config.RAAPlugin, runError) return "" } @@ -1048,13 +981,13 @@ func (context *Context) applyRAA() string { } func (context *Context) exampleFile(ginContext *gin.Context) { - example, err := os.ReadFile(filepath.Join(*context.appFolder, "threagile-example-model.yaml")) + example, err := os.ReadFile(filepath.Join(context.Config.AppFolder, "threagile-example-model.yaml")) checkErr(err) ginContext.Data(http.StatusOK, gin.MIMEYAML, example) } func (context *Context) stubFile(ginContext *gin.Context) { - stub, err := os.ReadFile(filepath.Join(*context.appFolder, "threagile-stub-model.yaml")) + stub, err := os.ReadFile(filepath.Join(context.Config.AppFolder, "threagile-stub-model.yaml")) checkErr(err) ginContext.Data(http.StatusOK, gin.MIMEYAML, context.addSupportedTags(stub)) // TODO use also the MIMEYAML way of serving YAML in model export? } @@ -1082,7 +1015,7 @@ func (context *Context) addSupportedTags(input []byte) []byte { return input } sort.Strings(tags) - if *context.verbose { + if context.Config.Verbose { fmt.Print("Supported tags of all risk rules: ") for i, tag := range tags { if i > 0 { @@ -1103,7 +1036,7 @@ const keySize = 32 func (context *Context) stats(ginContext *gin.Context) { keyCount, modelCount := 0, 0 - keyFolders, err := os.ReadDir(filepath.Join(*context.serverFolder, context.keyDir)) + keyFolders, err := os.ReadDir(filepath.Join(context.Config.ServerFolder, context.Config.KeyFolder)) if err != nil { log.Println(err) ginContext.JSON(http.StatusInternalServerError, gin.H{ @@ -1120,7 +1053,7 @@ func (context *Context) stats(ginContext *gin.Context) { }) return } - modelFolders, err := os.ReadDir(filepath.Join(*context.serverFolder, context.keyDir, keyFolder.Name())) + modelFolders, err := os.ReadDir(filepath.Join(context.Config.ServerFolder, context.Config.KeyFolder, keyFolder.Name())) if err != nil { log.Println(err) ginContext.JSON(http.StatusInternalServerError, gin.H{ @@ -1265,7 +1198,7 @@ func (context *Context) readModel(ginContext *gin.Context, modelUUID string, key return modelInputResult, yamlText, false } - fileBytes, err := os.ReadFile(filepath.Join(modelFolder, context.inputFile)) + fileBytes, err := os.ReadFile(filepath.Join(modelFolder, context.Config.InputFile)) if err != nil { log.Println(err) ginContext.JSON(http.StatusInternalServerError, gin.H{ @@ -1329,7 +1262,7 @@ func (context *Context) writeModel(ginContext *gin.Context, key []byte, folderNa } func (context *Context) writeModelYAML(ginContext *gin.Context, yaml string, key []byte, modelFolder string, changeReasonForHistory string, skipBackup bool) (ok bool) { - if *context.verbose { + if context.Config.Verbose { fmt.Println("about to write " + strconv.Itoa(len(yaml)) + " bytes of yaml into model folder: " + modelFolder) } var b bytes.Buffer @@ -1374,7 +1307,7 @@ func (context *Context) writeModelYAML(ginContext *gin.Context, yaml string, key return false } } - f, err := os.Create(filepath.Join(modelFolder, context.inputFile)) + f, err := os.Create(filepath.Join(modelFolder, context.Config.InputFile)) if err != nil { log.Println(err) ginContext.JSON(http.StatusInternalServerError, gin.H{ @@ -1396,7 +1329,7 @@ func (context *Context) backupModelToHistory(modelFolder string, changeReasonFor return err } } - inputModel, err := os.ReadFile(filepath.Join(modelFolder, context.inputFile)) + inputModel, err := os.ReadFile(filepath.Join(modelFolder, context.Config.InputFile)) if err != nil { return err } @@ -1410,8 +1343,8 @@ func (context *Context) backupModelToHistory(modelFolder string, changeReasonFor if err != nil { return err } - if len(files) > context.backupHistoryFilesToKeep { - requiredToDelete := len(files) - context.backupHistoryFilesToKeep + if len(files) > context.Config.BackupHistoryFilesToKeep { + requiredToDelete := len(files) - context.Config.BackupHistoryFilesToKeep sort.Slice(files, func(i, j int) bool { return files[i].Name() < files[j].Name() }) @@ -1508,7 +1441,7 @@ func (context *Context) checkObjectCreationThrottler(ginContext *gin.Context, ty func (context *Context) folderNameFromKey(key []byte) string { sha512Hash := hashSHA256(key) - return filepath.Join(*context.serverFolder, context.keyDir, sha512Hash) + return filepath.Join(context.Config.ServerFolder, context.Config.KeyFolder, sha512Hash) } func hashSHA256(key []byte) string { @@ -1565,7 +1498,7 @@ func (context *Context) userHomeDir() string { } } -func (context *Context) expandPath(path string) *string { +func (context *Context) expandPath(path string) string { home := context.userHomeDir() if strings.HasPrefix(path, "~") { path = strings.Replace(path, "~", home, 1) @@ -1575,39 +1508,47 @@ func (context *Context) expandPath(path string) *string { path = strings.Replace(path, "$HOME", home, -1) } - return &path + return path } -func (context *Context) ParseCommandlineArgs() { // folders - context.appFolder = flag.String("app-dir", common.AppDir, "app folder (default: "+common.AppDir+")") - context.serverFolder = flag.String("server-dir", common.DataDir, "base folder for server mode (default: "+common.DataDir+")") - context.tempFolder = flag.String("temp-dir", common.TempDir, "temporary folder location") - context.binFolder = flag.String("bin-dir", common.BinDir, "binary folder location") - context.outputDir = flag.String("output", ".", "output directory") +func (context *Context) ParseCommandlineArgs() *Context { + configFile := flag.String("config", "", "config file") + configError := context.Config.Load(*configFile) + if configError != nil { + fmt.Printf("WARNING: failed to load config file %q: %v\n", *configFile, configError) + } - // files - context.modelFilename = flag.String("model", common.InputFile, "input model yaml file") - context.raaPlugin = flag.String("raa-run", "raa_calc", "RAA calculation run file name") + // folders + flag.StringVar(&context.Config.AppFolder, "app-dir", common.AppDir, "app folder (default: "+common.AppDir+")") + flag.StringVar(&context.Config.ServerFolder, "server-dir", common.DataDir, "base folder for server mode (default: "+common.DataDir+")") + flag.StringVar(&context.Config.TempFolder, "temp-dir", common.TempDir, "temporary folder location") + flag.StringVar(&context.Config.BinFolder, "bin-dir", common.BinDir, "binary folder location") + flag.StringVar(&context.Config.OutputFolder, "output", ".", "output directory") - // flags - context.verbose = flag.Bool("verbose", false, "verbose output") - context.diagramDPI = flag.Int("diagram-dpi", defaultGraphvizDPI, "DPI used to render: maximum is "+strconv.Itoa(maxGraphvizDPI)+"") - context.skipRiskRules = flag.String("skip-risk-rules", "", "comma-separated list of risk rules (by their ID) to skip") - context.riskRulesPlugins = flag.String("custom-risk-rules-plugins", "", "comma-separated list of plugins file names with custom risk rules to load") - context.ignoreOrphanedRiskTracking = flag.Bool("ignore-orphaned-risk-tracking", false, "ignore orphaned risk tracking (just log them) not matching a concrete risk") + // files + flag.StringVar(&context.Config.InputFile, "model", common.InputFile, "input model yaml file") + flag.StringVar(&context.RAAPlugin, "raa-run", "raa_calc", "RAA calculation run file name") + + // flags / parameters + flag.BoolVar(&context.Config.Verbose, "verbose", false, "verbose output") + flag.IntVar(&context.Config.DiagramDPI, "diagram-dpi", context.Config.DiagramDPI, "DPI used to render: maximum is "+strconv.Itoa(context.Config.MaxGraphvizDPI)+"") + flag.StringVar(&context.Config.SkipRiskRules, "skip-risk-rules", "", "comma-separated list of risk rules (by their ID) to skip") + flag.BoolVar(&context.Config.IgnoreOrphanedRiskTracking, "ignore-orphaned-risk-tracking", false, "ignore orphaned risk tracking (just log them) not matching a concrete risk") + flag.IntVar(&context.Config.ServerPort, "server", 0, "start a server (instead of commandline execution) on the given port") + flag.StringVar(&context.Config.ExecuteModelMacro, "execute-model-macro", "", "Execute model macro (by ID)") + flag.StringVar(&context.Config.TemplateFilename, "background", "background.pdf", "background pdf file") + riskRulesPlugins := flag.String("custom-risk-rules-plugins", "", "comma-separated list of plugins file names with custom risk rules to load") + context.Config.RiskRulesPlugins = strings.Split(*riskRulesPlugins, ",") // commands - context.serverPort = flag.Int("server", 0, "start a server (instead of commandline execution) on the given port") - context.executeModelMacro = flag.String("execute-model-macro", "", "Execute model macro (by ID)") - context.templateFilename = flag.String("background", "background.pdf", "background pdf file") - context.generateDataFlowDiagram = flag.Bool("generate-data-flow-diagram", true, "generate data-flow diagram") - context.generateDataAssetDiagram = flag.Bool("generate-data-asset-diagram", true, "generate data asset diagram") - context.generateRisksJSON = flag.Bool("generate-risks-json", true, "generate risks json") - context.generateStatsJSON = flag.Bool("generate-stats-json", true, "generate stats json") - context.generateTechnicalAssetsJSON = flag.Bool("generate-technical-assets-json", true, "generate technical assets json") - context.generateRisksExcel = flag.Bool("generate-risks-excel", true, "generate risks excel") - context.generateTagsExcel = flag.Bool("generate-tags-excel", true, "generate tags excel") - context.generateReportPDF = flag.Bool("generate-report-pdf", true, "generate report pdf, including diagrams") + flag.BoolVar(&context.generateDataFlowDiagram, "generate-data-flow-diagram", true, "generate data-flow diagram") + flag.BoolVar(&context.generateDataAssetDiagram, "generate-data-asset-diagram", true, "generate data asset diagram") + flag.BoolVar(&context.generateRisksJSON, "generate-risks-json", true, "generate risks json") + flag.BoolVar(&context.generateStatsJSON, "generate-stats-json", true, "generate stats json") + flag.BoolVar(&context.generateTechnicalAssetsJSON, "generate-technical-assets-json", true, "generate technical assets json") + flag.BoolVar(&context.generateRisksExcel, "generate-risks-excel", true, "generate risks excel") + flag.BoolVar(&context.generateTagsExcel, "generate-tags-excel", true, "generate tags excel") + flag.BoolVar(&context.generateReportPDF, "generate-report-pdf", true, "generate report pdf, including diagrams") flag.Usage = func() { fmt.Println(docs.Logo + "\n\n" + docs.VersionText) @@ -1616,33 +1557,35 @@ func (context *Context) ParseCommandlineArgs() { // folders } flag.Parse() - context.modelFilename = context.expandPath(*context.modelFilename) - context.appFolder = context.expandPath(*context.appFolder) - context.serverFolder = context.expandPath(*context.serverFolder) - context.tempFolder = context.expandPath(*context.tempFolder) - context.binFolder = context.expandPath(*context.binFolder) - context.outputDir = context.expandPath(*context.outputDir) + context.Config.InputFile = context.expandPath(context.Config.InputFile) + context.Config.AppFolder = context.expandPath(context.Config.AppFolder) + context.Config.ServerFolder = context.expandPath(context.Config.ServerFolder) + context.Config.TempFolder = context.expandPath(context.Config.TempFolder) + context.Config.BinFolder = context.expandPath(context.Config.BinFolder) + context.Config.OutputFolder = context.expandPath(context.Config.OutputFolder) - if *context.diagramDPI < 20 { - *context.diagramDPI = 20 - } else if *context.diagramDPI > context.MaxGraphvizDPI { - *context.diagramDPI = 300 + if context.Config.DiagramDPI < common.MinGraphvizDPI { + context.Config.DiagramDPI = common.MinGraphvizDPI + } else if context.Config.DiagramDPI > common.MaxGraphvizDPI { + context.Config.DiagramDPI = common.MaxGraphvizDPI } - context.progressReporter = SilentProgressReporter{} - if *context.verbose { - context.progressReporter = CommandLineProgressReporter{} + context.progressReporter = common.SilentProgressReporter{} + if context.Config.Verbose { + context.progressReporter = common.CommandLineProgressReporter{} } - context.ServerMode = *context.serverPort > 0 + context.ServerMode = context.Config.ServerPort > 0 + + return context } func (context *Context) applyWildcardRiskTrackingEvaluation() { - if *context.verbose { + if context.Config.Verbose { fmt.Println("Executing risk tracking evaluation") } for syntheticRiskIdPattern, riskTracking := range context.getDeferredRiskTrackingDueToWildcardMatching() { - if *context.verbose { + if context.Config.Verbose { fmt.Println("Applying wildcard risk tracking for risk id: " + syntheticRiskIdPattern) } @@ -1663,7 +1606,7 @@ func (context *Context) applyWildcardRiskTrackingEvaluation() { } if !foundSome { - if *context.ignoreOrphanedRiskTracking { + if context.Config.IgnoreOrphanedRiskTracking { fmt.Println("WARNING: Wildcard risk tracking does not match any risk id: " + syntheticRiskIdPattern) } else { panic(errors.New("wildcard risk tracking does not match any risk id: " + syntheticRiskIdPattern)) @@ -1691,7 +1634,7 @@ func (context *Context) hasNotYetAnyDirectNonWildcardRiskTracking(syntheticRiskI } func (context *Context) writeDataAssetDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.File { - if *context.verbose { + if context.Config.Verbose { fmt.Println("Writing data asset diagram input") } var dotContent strings.Builder @@ -1865,16 +1808,16 @@ func (context *Context) makeDataAssetNode(dataAsset types.DataAsset) string { return " " + hash(dataAsset.Id) + ` [ label=<` + encode(dataAsset.Title) + `> penwidth="3.0" style="filled" fillcolor="` + color + `" color="` + color + "\"\n ]; " } -func (context *Context) renderDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string) { - if *context.verbose { +func (context *Context) generateDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string) { + if context.Config.Verbose { fmt.Println("Rendering data flow diagram input") } // tmp files - tmpFileDOT, err := os.CreateTemp(*context.tempFolder, "diagram-*-.gv") + tmpFileDOT, err := os.CreateTemp(context.Config.TempFolder, "diagram-*-.gv") checkErr(err) defer func() { _ = os.Remove(tmpFileDOT.Name()) }() - tmpFilePNG, err := os.CreateTemp(*context.tempFolder, "diagram-*-.png") + tmpFilePNG, err := os.CreateTemp(context.Config.TempFolder, "diagram-*-.png") checkErr(err) defer func() { _ = os.Remove(tmpFilePNG.Name()) }() @@ -1892,7 +1835,7 @@ func (context *Context) renderDataFlowDiagramGraphvizImage(dotFile *os.File, tar } // exec - cmd := exec.Command(filepath.Join(*context.binFolder, context.graphvizDataFlowDiagramConversionCall), tmpFileDOT.Name(), tmpFilePNG.Name()) + cmd := exec.Command(filepath.Join(context.Config.BinFolder, common.GraphvizDataFlowDiagramConversionCall), tmpFileDOT.Name(), tmpFilePNG.Name()) cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr err = cmd.Run() @@ -1905,24 +1848,24 @@ func (context *Context) renderDataFlowDiagramGraphvizImage(dotFile *os.File, tar fmt.Println(err) return } - err = os.WriteFile(filepath.Join(targetDir, context.dataFlowDiagramFilenamePNG), inputPNG, 0644) + err = os.WriteFile(filepath.Join(targetDir, context.Config.DataFlowDiagramFilenamePNG), inputPNG, 0644) if err != nil { - fmt.Println("Error creating", context.dataFlowDiagramFilenamePNG) + fmt.Println("Error creating", context.Config.DataFlowDiagramFilenamePNG) fmt.Println(err) return } } -func (context *Context) renderDataAssetDiagramGraphvizImage(dotFile *os.File, targetDir string) { // TODO dedupe with other render...() method here - if *context.verbose { +func (context *Context) generateDataAssetDiagramGraphvizImage(dotFile *os.File, targetDir string) { // TODO dedupe with other render...() method here + if context.Config.Verbose { fmt.Println("Rendering data asset diagram input") } // tmp files - tmpFileDOT, err := os.CreateTemp(*context.tempFolder, "diagram-*-.gv") + tmpFileDOT, err := os.CreateTemp(context.Config.TempFolder, "diagram-*-.gv") checkErr(err) defer func() { _ = os.Remove(tmpFileDOT.Name()) }() - tmpFilePNG, err := os.CreateTemp(*context.tempFolder, "diagram-*-.png") + tmpFilePNG, err := os.CreateTemp(context.Config.TempFolder, "diagram-*-.png") checkErr(err) defer func() { _ = os.Remove(tmpFilePNG.Name()) }() @@ -1940,7 +1883,7 @@ func (context *Context) renderDataAssetDiagramGraphvizImage(dotFile *os.File, ta } // exec - cmd := exec.Command(filepath.Join(*context.binFolder, context.graphvizDataAssetDiagramConversionCall), tmpFileDOT.Name(), tmpFilePNG.Name()) + cmd := exec.Command(filepath.Join(context.Config.BinFolder, common.GraphvizDataAssetDiagramConversionCall), tmpFileDOT.Name(), tmpFilePNG.Name()) cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr err = cmd.Run() @@ -1953,9 +1896,9 @@ func (context *Context) renderDataAssetDiagramGraphvizImage(dotFile *os.File, ta fmt.Println(err) return } - err = os.WriteFile(filepath.Join(targetDir, context.dataAssetDiagramFilenamePNG), inputPNG, 0644) + err = os.WriteFile(filepath.Join(targetDir, context.Config.DataAssetDiagramFilenamePNG), inputPNG, 0644) if err != nil { - fmt.Println("Error creating", context.dataAssetDiagramFilenamePNG) + fmt.Println("Error creating", context.Config.DataAssetDiagramFilenamePNG) fmt.Println(err) return } diff --git a/internal/threagile/rules.go b/internal/threagile/rules.go index 7c9f6cf7..83c39ca4 100644 --- a/internal/threagile/rules.go +++ b/internal/threagile/rules.go @@ -5,6 +5,7 @@ Copyright © 2023 NAME HERE package threagile import ( + "github.com/threagile/threagile/pkg/common" "github.com/threagile/threagile/pkg/security/risks" "github.com/threagile/threagile/pkg/security/types" "strings" @@ -30,7 +31,7 @@ var listRiskRules = &cobra.Command{ cmd.Println("----------------------") cmd.Println("Custom risk rules:") cmd.Println("----------------------") - customRiskRules := types.LoadCustomRiskRules(strings.Split(plugins, ","), getProgressReporter(cmd)) + customRiskRules := types.LoadCustomRiskRules(strings.Split(plugins, ","), common.GetProgressReporter(cmd)) for id, customRule := range customRiskRules { cmd.Println(id, "-->", customRule.Category.Title, "--> with tags:", customRule.Tags) } @@ -63,7 +64,7 @@ var explainRiskRules = &cobra.Command{ cmd.Println("----------------------") cmd.Println("Custom risk rules:") cmd.Println("----------------------") - customRiskRules := types.LoadCustomRiskRules(strings.Split(plugins, ","), getProgressReporter(cmd)) + customRiskRules := types.LoadCustomRiskRules(strings.Split(plugins, ","), common.GetProgressReporter(cmd)) for _, customRule := range customRiskRules { cmd.Printf("%v: %v\n", customRule.Category.Id, customRule.Category.Description) } diff --git a/pkg/common/config.go b/pkg/common/config.go index c6c44fae..944f69af 100644 --- a/pkg/common/config.go +++ b/pkg/common/config.go @@ -1,33 +1,265 @@ package common +import ( + "encoding/json" + "fmt" + "os" + "strings" +) + type Config struct { - Verbose bool - IgnoreOrphanedRiskTracking bool - OutputDir string - RAAPlugin string - SkipRiskRules string - RiskRulesPlugins string - ModelFilename string - TemplateFilename string - ExecuteModelMacro string - DiagramDPI int - ServerPort int + BuildTimestamp string + Verbose bool + + AppFolder string + BinFolder string + DataFolder string + OutputFolder string + ServerFolder string + TempFolder string + KeyFolder string + + InputFile string + DataFlowDiagramFilenamePNG string + DataAssetDiagramFilenamePNG string + DataFlowDiagramFilenameDOT string + DataAssetDiagramFilenameDOT string + ReportFilename string + ExcelRisksFilename string + ExcelTagsFilename string + JsonRisksFilename string + JsonTechnicalAssetsFilename string + JsonStatsFilename string + TemplateFilename string + + RAAPlugin string + RiskRulesPlugins []string + SkipRiskRules string + ExecuteModelMacro string + + DiagramDPI int + ServerPort int + GraphvizDPI int + MaxGraphvizDPI int + BackupHistoryFilesToKeep int + AddModelTitle bool KeepDiagramSourceFiles bool - AppFolder string - BinFolder string - ServerFolder string - TempFolder string - DefaultGraphvizDPI int - MaxGraphvizDPI int - Attractiveness Attractiveness + IgnoreOrphanedRiskTracking bool + + Attractiveness Attractiveness } -func (c *Config) Defaults() *Config { +func (c *Config) Defaults(buildTimestamp string) *Config { *c = Config{ - DefaultGraphvizDPI: 120, - MaxGraphvizDPI: 240, + BuildTimestamp: buildTimestamp, + Verbose: false, + + AppFolder: AppDir, + BinFolder: BinDir, + DataFolder: DataDir, + OutputFolder: OutputDir, + ServerFolder: ServerDir, + TempFolder: TempDir, + KeyFolder: KeyDir, + + InputFile: InputFile, + DataFlowDiagramFilenamePNG: DataFlowDiagramFilenamePNG, + DataAssetDiagramFilenamePNG: DataAssetDiagramFilenamePNG, + DataFlowDiagramFilenameDOT: DataFlowDiagramFilenameDOT, + DataAssetDiagramFilenameDOT: DataAssetDiagramFilenameDOT, + ReportFilename: ReportFilename, + ExcelRisksFilename: ExcelRisksFilename, + ExcelTagsFilename: ExcelTagsFilename, + JsonRisksFilename: JsonRisksFilename, + JsonTechnicalAssetsFilename: JsonTechnicalAssetsFilename, + JsonStatsFilename: JsonStatsFilename, + TemplateFilename: TemplateFilename, + RAAPlugin: RAAPluginName, + RiskRulesPlugins: make([]string, 0), + SkipRiskRules: "", + ExecuteModelMacro: "", + ServerPort: 0, + + GraphvizDPI: DefaultGraphvizDPI, + BackupHistoryFilesToKeep: DefaultBackupHistoryFilesToKeep, + + AddModelTitle: false, + KeepDiagramSourceFiles: false, + IgnoreOrphanedRiskTracking: false, + + Attractiveness: Attractiveness{ + Quantity: 0, + Confidentiality: AttackerFocus{ + Asset: 0, + ProcessedOrStoredData: 0, + TransferredData: 0, + }, + Integrity: AttackerFocus{ + Asset: 0, + ProcessedOrStoredData: 0, + TransferredData: 0, + }, + Availability: AttackerFocus{ + Asset: 0, + ProcessedOrStoredData: 0, + TransferredData: 0, + }, + }, } return c } + +func (c *Config) Load(configFilename string) error { + if len(configFilename) == 0 { + return nil + } + + data, readError := os.ReadFile(configFilename) + if readError != nil { + return readError + } + + values := make(map[string]any) + parseError := json.Unmarshal(data, &values) + if parseError != nil { + return fmt.Errorf("failed to parse config file %q: %v", configFilename, parseError) + } + + var config Config + unmarshalError := json.Unmarshal(data, &config) + if unmarshalError != nil { + return fmt.Errorf("failed to parse config file %q: %v", configFilename, unmarshalError) + } + + c.Merge(config, values) + + return nil +} + +func (c *Config) Merge(config Config, values map[string]any) { + for key := range values { + switch strings.ToLower(key) { + case strings.ToLower("Verbose"): + c.Verbose = config.Verbose + break + + case strings.ToLower("AppFolder"): + c.AppFolder = config.AppFolder + break + + case strings.ToLower("BinFolder"): + c.BinFolder = config.BinFolder + break + + case strings.ToLower("DataFolder"): + c.DataFolder = config.DataFolder + break + + case strings.ToLower("OutputFolder"): + c.OutputFolder = config.OutputFolder + break + + case strings.ToLower("ServerFolder"): + c.ServerFolder = config.ServerFolder + break + + case strings.ToLower("TempFolder"): + c.TempFolder = config.TempFolder + break + + case strings.ToLower("KeyFolder"): + c.KeyFolder = config.KeyFolder + break + + case strings.ToLower("InputFile"): + c.InputFile = config.InputFile + break + + case strings.ToLower("DataFlowDiagramFilenamePNG"): + c.DataFlowDiagramFilenamePNG = config.DataFlowDiagramFilenamePNG + break + + case strings.ToLower("DataAssetDiagramFilenamePNG"): + c.DataAssetDiagramFilenamePNG = config.DataAssetDiagramFilenamePNG + break + + case strings.ToLower("DataFlowDiagramFilenameDOT"): + c.DataFlowDiagramFilenameDOT = config.DataFlowDiagramFilenameDOT + break + + case strings.ToLower("DataAssetDiagramFilenameDOT"): + c.DataAssetDiagramFilenameDOT = config.DataAssetDiagramFilenameDOT + break + + case strings.ToLower("ReportFilename"): + c.ReportFilename = config.ReportFilename + break + + case strings.ToLower("ExcelRisksFilename"): + c.ExcelRisksFilename = config.ExcelRisksFilename + break + + case strings.ToLower("ExcelTagsFilename"): + c.ExcelTagsFilename = config.ExcelTagsFilename + break + + case strings.ToLower("JsonRisksFilename"): + c.JsonRisksFilename = config.JsonRisksFilename + break + + case strings.ToLower("JsonTechnicalAssetsFilename"): + c.JsonTechnicalAssetsFilename = config.JsonTechnicalAssetsFilename + break + + case strings.ToLower("JsonStatsFilename"): + c.JsonStatsFilename = config.JsonStatsFilename + break + + case strings.ToLower("TemplateFilename"): + c.TemplateFilename = config.TemplateFilename + break + + case strings.ToLower("RAAPlugin"): + c.RAAPlugin = config.RAAPlugin + break + + case strings.ToLower("RiskRulesPlugins"): + c.RiskRulesPlugins = config.RiskRulesPlugins + break + + case strings.ToLower("SkipRiskRules"): + c.SkipRiskRules = config.SkipRiskRules + break + + case strings.ToLower("ExecuteModelMacro"): + c.ExecuteModelMacro = config.ExecuteModelMacro + break + + case strings.ToLower("ServerPort"): + c.ServerPort = config.ServerPort + break + + case strings.ToLower("GraphvizDPI"): + c.GraphvizDPI = config.GraphvizDPI + break + + case strings.ToLower("BackupHistoryFilesToKeep"): + c.BackupHistoryFilesToKeep = config.BackupHistoryFilesToKeep + break + + case strings.ToLower("AddModelTitle"): + c.AddModelTitle = config.AddModelTitle + break + + case strings.ToLower("KeepDiagramSourceFiles"): + c.KeepDiagramSourceFiles = config.KeepDiagramSourceFiles + break + + case strings.ToLower("IgnoreOrphanedRiskTracking"): + c.IgnoreOrphanedRiskTracking = config.IgnoreOrphanedRiskTracking + break + } + } +} diff --git a/pkg/common/consts.go b/pkg/common/consts.go index 5a45f897..27a08cd5 100644 --- a/pkg/common/consts.go +++ b/pkg/common/consts.go @@ -1,24 +1,35 @@ package common const ( - TempDir = "/dev/shm" // TODO: make configurable via cmdline arg? - BinDir = "/app" - AppDir = "/app" - DataDir = "/data" - KeyDir = "keys" + TempDir = "/dev/shm" // TODO: make configurable via cmdline arg? + AppDir = "/app" + BinDir = "/app" + DataDir = "/data" + OutputDir = "." + ServerDir = "/server" + KeyDir = "keys" + + InputFile = "threagile.yaml" ReportFilename = "report.pdf" ExcelRisksFilename = "risks.xlsx" ExcelTagsFilename = "tags.xlsx" JsonRisksFilename = "risks.json" JsonTechnicalAssetsFilename = "technical-assets.json" JsonStatsFilename = "stats.json" + TemplateFilename = "background.pdf" DataFlowDiagramFilenameDOT = "data-flow-diagram.gv" DataFlowDiagramFilenamePNG = "data-flow-diagram.png" DataAssetDiagramFilenameDOT = "data-asset-diagram.gv" DataAssetDiagramFilenamePNG = "data-asset-diagram.png" GraphvizDataFlowDiagramConversionCall = "render-data-flow-diagram.sh" GraphvizDataAssetDiagramConversionCall = "render-data-asset-diagram.sh" - InputFile = "threagile.yaml" + + RAAPluginName = "raa_calc" + + DefaultGraphvizDPI = 120 + MinGraphvizDPI = 20 + MaxGraphvizDPI = 300 + DefaultBackupHistoryFilesToKeep = 50 ) const ( diff --git a/internal/threagile/progress-reporter.go b/pkg/common/progress-reporter.go similarity index 91% rename from internal/threagile/progress-reporter.go rename to pkg/common/progress-reporter.go index 266bd5fa..839a3d4f 100644 --- a/internal/threagile/progress-reporter.go +++ b/pkg/common/progress-reporter.go @@ -2,7 +2,7 @@ Copyright © 2023 NAME HERE */ -package threagile +package common import ( "fmt" @@ -34,7 +34,7 @@ func (CommandLineProgressReporter) Fatalf(format string, v ...any) { log.Fatalf(format, v...) } -func getProgressReporter(cobraCmd *cobra.Command) ProgressReporter { +func GetProgressReporter(cobraCmd *cobra.Command) ProgressReporter { if cobraCmd == nil { return CommandLineProgressReporter{} } diff --git a/pkg/server/execute.go b/pkg/server/execute.go index 08daf765..e0dcbae8 100644 --- a/pkg/server/execute.go +++ b/pkg/server/execute.go @@ -45,7 +45,7 @@ func (s *server) execute(ginContext *gin.Context, dryRun bool) (yamlContent []by } }() - dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(s.configuration.DefaultGraphvizDPI))) + dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(s.config.GraphvizDPI))) if err != nil { handleErrorInServiceCall(err, ginContext) return yamlContent, false @@ -68,7 +68,7 @@ func (s *server) execute(ginContext *gin.Context, dryRun bool) (yamlContent []by filenameUploaded := strings.TrimSpace(header.Filename) - tmpInputDir, err := os.MkdirTemp(s.configuration.TempFolder, "threagile-input-") + tmpInputDir, err := os.MkdirTemp(s.config.TempFolder, "threagile-input-") if err != nil { handleErrorInServiceCall(err, ginContext) return yamlContent, false @@ -91,7 +91,7 @@ func (s *server) execute(ginContext *gin.Context, dryRun bool) (yamlContent []by if strings.ToLower(filepath.Ext(filenameUploaded)) == ".zip" { // unzip first (including the resources like images etc.) - if s.configuration.Verbose { + if s.config.Verbose { fmt.Println("Decompressing uploaded archive") } filenamesUnzipped, err := unzip(tmpModelFile.Name(), tmpInputDir) @@ -112,14 +112,14 @@ func (s *server) execute(ginContext *gin.Context, dryRun bool) (yamlContent []by } } - tmpOutputDir, err := os.MkdirTemp(s.configuration.TempFolder, "threagile-output-") + tmpOutputDir, err := os.MkdirTemp(s.config.TempFolder, "threagile-output-") if err != nil { handleErrorInServiceCall(err, ginContext) return yamlContent, false } defer func() { _ = os.RemoveAll(tmpOutputDir) }() - tmpResultFile, err := os.CreateTemp(s.configuration.TempFolder, "threagile-result-*.zip") + tmpResultFile, err := os.CreateTemp(s.config.TempFolder, "threagile-result-*.zip") if err != nil { handleErrorInServiceCall(err, ginContext) return yamlContent, false @@ -137,7 +137,7 @@ func (s *server) execute(ginContext *gin.Context, dryRun bool) (yamlContent []by handleErrorInServiceCall(err, ginContext) return yamlContent, false } - err = os.WriteFile(filepath.Join(tmpOutputDir, s.configuration.InputFile), yamlContent, 0400) + err = os.WriteFile(filepath.Join(tmpOutputDir, s.config.InputFile), yamlContent, 0400) if err != nil { handleErrorInServiceCall(err, ginContext) return yamlContent, false @@ -145,26 +145,26 @@ func (s *server) execute(ginContext *gin.Context, dryRun bool) (yamlContent []by if !dryRun { files := []string{ - filepath.Join(tmpOutputDir, s.configuration.InputFile), - filepath.Join(tmpOutputDir, s.configuration.DataFlowDiagramFilenamePNG), - filepath.Join(tmpOutputDir, s.configuration.DataAssetDiagramFilenamePNG), - filepath.Join(tmpOutputDir, s.configuration.ReportFilename), - filepath.Join(tmpOutputDir, s.configuration.ExcelRisksFilename), - filepath.Join(tmpOutputDir, s.configuration.ExcelTagsFilename), - filepath.Join(tmpOutputDir, s.configuration.JsonRisksFilename), - filepath.Join(tmpOutputDir, s.configuration.JsonTechnicalAssetsFilename), - filepath.Join(tmpOutputDir, s.configuration.JsonStatsFilename), + filepath.Join(tmpOutputDir, s.config.InputFile), + filepath.Join(tmpOutputDir, s.config.DataFlowDiagramFilenamePNG), + filepath.Join(tmpOutputDir, s.config.DataAssetDiagramFilenamePNG), + filepath.Join(tmpOutputDir, s.config.ReportFilename), + filepath.Join(tmpOutputDir, s.config.ExcelRisksFilename), + filepath.Join(tmpOutputDir, s.config.ExcelTagsFilename), + filepath.Join(tmpOutputDir, s.config.JsonRisksFilename), + filepath.Join(tmpOutputDir, s.config.JsonTechnicalAssetsFilename), + filepath.Join(tmpOutputDir, s.config.JsonStatsFilename), } - if s.configuration.KeepDiagramSourceFiles { - files = append(files, filepath.Join(tmpOutputDir, s.configuration.DataAssetDiagramFilenamePNG)) - files = append(files, filepath.Join(tmpOutputDir, s.configuration.DataAssetDiagramFilenameDOT)) + if s.config.KeepDiagramSourceFiles { + files = append(files, filepath.Join(tmpOutputDir, s.config.DataAssetDiagramFilenamePNG)) + files = append(files, filepath.Join(tmpOutputDir, s.config.DataAssetDiagramFilenameDOT)) } err = zipFiles(tmpResultFile.Name(), files) if err != nil { handleErrorInServiceCall(err, ginContext) return yamlContent, false } - if s.configuration.Verbose { + if s.config.Verbose { log.Println("Streaming back result file: " + tmpResultFile.Name()) } ginContext.FileAttachment(tmpResultFile.Name(), "threagile-result.zip") @@ -179,11 +179,11 @@ func (s *server) doItViaRuntimeCall(modelFile string, outputDir string, dpi int) { // Remember to also add the same args to the exec based sub-process calls! var cmd *exec.Cmd - args := []string{"-model", modelFile, "-output", outputDir, "-execute-model-macro", s.configuration.ExecuteModelMacro, "-raa-run", s.configuration.RaaPlugin, "-custom-risk-rules-plugins", s.configuration.CustomRiskRulesPlugins, "-skip-risk-rules", s.configuration.SkipRiskRules, "-diagram-dpi", strconv.Itoa(dpi)} - if s.configuration.Verbose { + args := []string{"-model", modelFile, "-output", outputDir, "-execute-model-macro", s.config.ExecuteModelMacro, "-raa-run", s.config.RAAPlugin, "-custom-risk-rules-plugins", strings.Join(s.config.RiskRulesPlugins, ","), "-skip-risk-rules", s.config.SkipRiskRules, "-diagram-dpi", strconv.Itoa(dpi)} + if s.config.Verbose { args = append(args, "-verbose") } - if s.configuration.IgnoreOrphanedRiskTracking { // TODO why add all them as arguments, when they are also variables on outer level? + if s.config.IgnoreOrphanedRiskTracking { // TODO why add all them as arguments, when they are also variables on outer level? args = append(args, "-ignore-orphaned-risk-tracking") } if generateDataFlowDiagram { @@ -219,7 +219,7 @@ func (s *server) doItViaRuntimeCall(modelFile string, outputDir string, if err != nil { panic(errors.New(string(out))) } else { - if s.configuration.Verbose && len(out) > 0 { + if s.config.Verbose && len(out) > 0 { fmt.Println("---") fmt.Print(string(out)) fmt.Println("---") diff --git a/pkg/server/model.go b/pkg/server/model.go index 84066a6d..c74ec4b5 100644 --- a/pkg/server/model.go +++ b/pkg/server/model.go @@ -119,7 +119,7 @@ func (s *server) listModels(ginContext *gin.Context) { // TODO currently returns } for _, dirEntry := range modelFolders { if dirEntry.IsDir() { - modelStat, err := os.Stat(filepath.Join(folderNameOfKey, dirEntry.Name(), s.configuration.InputFile)) + modelStat, err := os.Stat(filepath.Join(folderNameOfKey, dirEntry.Name(), s.config.InputFile)) if err != nil { log.Println(err) ginContext.JSON(http.StatusNotFound, gin.H{ @@ -994,7 +994,7 @@ func (s *server) readModel(ginContext *gin.Context, modelUUID string, key []byte return modelInputResult, yamlText, false } - fileBytes, err := os.ReadFile(filepath.Join(modelFolder, s.configuration.InputFile)) + fileBytes, err := os.ReadFile(filepath.Join(modelFolder, s.config.InputFile)) if err != nil { log.Println(err) ginContext.JSON(http.StatusInternalServerError, gin.H{ @@ -1084,7 +1084,7 @@ func (s *server) getModel(ginContext *gin.Context) { defer s.unlockFolder(folderNameOfKey) _, yamlText, ok := s.readModel(ginContext, ginContext.Param("model-id"), key, folderNameOfKey) if ok { - tmpResultFile, err := os.CreateTemp(s.configuration.TempFolder, "threagile-*.yaml") + tmpResultFile, err := os.CreateTemp(s.config.TempFolder, "threagile-*.yaml") if err != nil { handleErrorInServiceCall(err, ginContext) return @@ -1098,7 +1098,7 @@ func (s *server) getModel(ginContext *gin.Context) { return } defer func() { _ = os.Remove(tmpResultFile.Name()) }() - ginContext.FileAttachment(tmpResultFile.Name(), s.configuration.InputFile) + ginContext.FileAttachment(tmpResultFile.Name(), s.config.InputFile) } } @@ -1139,7 +1139,7 @@ func (s *server) analyzeModelOnServerDirectly(ginContext *gin.Context) { var err error if r := recover(); r != nil { err = r.(error) - if s.configuration.Verbose { + if s.config.Verbose { log.Println(err) } log.Println(err) @@ -1150,7 +1150,7 @@ func (s *server) analyzeModelOnServerDirectly(ginContext *gin.Context) { } }() - dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(s.configuration.DefaultGraphvizDPI))) + dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(s.config.GraphvizDPI))) if err != nil { handleErrorInServiceCall(err, ginContext) return @@ -1160,19 +1160,19 @@ func (s *server) analyzeModelOnServerDirectly(ginContext *gin.Context) { if !ok { return } - tmpModelFile, err := os.CreateTemp(s.configuration.TempFolder, "threagile-direct-analyze-*") + tmpModelFile, err := os.CreateTemp(s.config.TempFolder, "threagile-direct-analyze-*") if err != nil { handleErrorInServiceCall(err, ginContext) return } defer func() { _ = os.Remove(tmpModelFile.Name()) }() - tmpOutputDir, err := os.MkdirTemp(s.configuration.TempFolder, "threagile-direct-analyze-") + tmpOutputDir, err := os.MkdirTemp(s.config.TempFolder, "threagile-direct-analyze-") if err != nil { handleErrorInServiceCall(err, ginContext) return } defer func() { _ = os.RemoveAll(tmpOutputDir) }() - tmpResultFile, err := os.CreateTemp(s.configuration.TempFolder, "threagile-result-*.zip") + tmpResultFile, err := os.CreateTemp(s.config.TempFolder, "threagile-result-*.zip") if err != nil { handleErrorInServiceCall(err, ginContext) return @@ -1186,40 +1186,40 @@ func (s *server) analyzeModelOnServerDirectly(ginContext *gin.Context) { handleErrorInServiceCall(err, ginContext) return } - err = os.WriteFile(filepath.Join(tmpOutputDir, s.configuration.InputFile), []byte(yamlText), 0400) + err = os.WriteFile(filepath.Join(tmpOutputDir, s.config.InputFile), []byte(yamlText), 0400) if err != nil { handleErrorInServiceCall(err, ginContext) return } files := []string{ - filepath.Join(tmpOutputDir, s.configuration.InputFile), - filepath.Join(tmpOutputDir, s.configuration.DataFlowDiagramFilenamePNG), - filepath.Join(tmpOutputDir, s.configuration.DataAssetDiagramFilenamePNG), - filepath.Join(tmpOutputDir, s.configuration.ReportFilename), - filepath.Join(tmpOutputDir, s.configuration.ExcelRisksFilename), - filepath.Join(tmpOutputDir, s.configuration.ExcelTagsFilename), - filepath.Join(tmpOutputDir, s.configuration.JsonRisksFilename), - filepath.Join(tmpOutputDir, s.configuration.JsonTechnicalAssetsFilename), - filepath.Join(tmpOutputDir, s.configuration.JsonStatsFilename), - } - if s.configuration.KeepDiagramSourceFiles { - files = append(files, filepath.Join(tmpOutputDir, s.configuration.DataFlowDiagramFilenameDOT)) - files = append(files, filepath.Join(tmpOutputDir, s.configuration.DataAssetDiagramFilenameDOT)) + filepath.Join(tmpOutputDir, s.config.InputFile), + filepath.Join(tmpOutputDir, s.config.DataFlowDiagramFilenamePNG), + filepath.Join(tmpOutputDir, s.config.DataAssetDiagramFilenamePNG), + filepath.Join(tmpOutputDir, s.config.ReportFilename), + filepath.Join(tmpOutputDir, s.config.ExcelRisksFilename), + filepath.Join(tmpOutputDir, s.config.ExcelTagsFilename), + filepath.Join(tmpOutputDir, s.config.JsonRisksFilename), + filepath.Join(tmpOutputDir, s.config.JsonTechnicalAssetsFilename), + filepath.Join(tmpOutputDir, s.config.JsonStatsFilename), + } + if s.config.KeepDiagramSourceFiles { + files = append(files, filepath.Join(tmpOutputDir, s.config.DataFlowDiagramFilenameDOT)) + files = append(files, filepath.Join(tmpOutputDir, s.config.DataAssetDiagramFilenameDOT)) } err = zipFiles(tmpResultFile.Name(), files) if err != nil { handleErrorInServiceCall(err, ginContext) return } - if s.configuration.Verbose { + if s.config.Verbose { fmt.Println("Streaming back result file: " + tmpResultFile.Name()) } ginContext.FileAttachment(tmpResultFile.Name(), "threagile-result.zip") } func (s *server) writeModelYAML(ginContext *gin.Context, yaml string, key []byte, modelFolder string, changeReasonForHistory string, skipBackup bool) (ok bool) { - if s.configuration.Verbose { + if s.config.Verbose { fmt.Println("about to write " + strconv.Itoa(len(yaml)) + " bytes of yaml into model folder: " + modelFolder) } var b bytes.Buffer @@ -1264,7 +1264,7 @@ func (s *server) writeModelYAML(ginContext *gin.Context, yaml string, key []byte return false } } - f, err := os.Create(filepath.Join(modelFolder, s.configuration.InputFile)) + f, err := os.Create(filepath.Join(modelFolder, s.config.InputFile)) if err != nil { log.Println(err) ginContext.JSON(http.StatusInternalServerError, gin.H{ @@ -1303,7 +1303,7 @@ func (s *server) backupModelToHistory(modelFolder string, changeReasonForHistory return err } } - inputModel, err := os.ReadFile(filepath.Join(modelFolder, s.configuration.InputFile)) + inputModel, err := os.ReadFile(filepath.Join(modelFolder, s.config.InputFile)) if err != nil { return err } @@ -1317,8 +1317,8 @@ func (s *server) backupModelToHistory(modelFolder string, changeReasonForHistory if err != nil { return err } - if len(files) > s.configuration.BackupHistoryFilesToKeep { - requiredToDelete := len(files) - s.configuration.BackupHistoryFilesToKeep + if len(files) > s.config.BackupHistoryFilesToKeep { + requiredToDelete := len(files) - s.config.BackupHistoryFilesToKeep sort.Slice(files, func(i, j int) bool { return files[i].Name() < files[j].Name() }) diff --git a/pkg/server/report.go b/pkg/server/report.go index b24b2bc0..b4f804bf 100644 --- a/pkg/server/report.go +++ b/pkg/server/report.go @@ -70,7 +70,7 @@ func (s *server) streamResponse(ginContext *gin.Context, responseType responseTy var err error if r := recover(); r != nil { err = r.(error) - if s.configuration.Verbose { + if s.config.Verbose { log.Println(err) } log.Println(err) @@ -80,7 +80,7 @@ func (s *server) streamResponse(ginContext *gin.Context, responseType responseTy ok = false } }() - dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(s.configuration.DefaultGraphvizDPI))) + dpi, err := strconv.Atoi(ginContext.DefaultQuery("dpi", strconv.Itoa(s.config.GraphvizDPI))) if err != nil { handleErrorInServiceCall(err, ginContext) return @@ -89,13 +89,13 @@ func (s *server) streamResponse(ginContext *gin.Context, responseType responseTy if !ok { return } - tmpModelFile, err := os.CreateTemp(s.configuration.TempFolder, "threagile-render-*") + tmpModelFile, err := os.CreateTemp(s.config.TempFolder, "threagile-render-*") if err != nil { handleErrorInServiceCall(err, ginContext) return } defer func() { _ = os.Remove(tmpModelFile.Name()) }() - tmpOutputDir, err := os.MkdirTemp(s.configuration.TempFolder, "threagile-render-") + tmpOutputDir, err := os.MkdirTemp(s.config.TempFolder, "threagile-render-") if err != nil { handleErrorInServiceCall(err, ginContext) return @@ -108,42 +108,42 @@ func (s *server) streamResponse(ginContext *gin.Context, responseType responseTy handleErrorInServiceCall(err, ginContext) return } - ginContext.File(filepath.Join(tmpOutputDir, s.configuration.DataFlowDiagramFilenamePNG)) + ginContext.File(filepath.Join(tmpOutputDir, s.config.DataFlowDiagramFilenamePNG)) } else if responseType == dataAssetDiagram { s.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, true, false, false, false, false, false, false, dpi) if err != nil { handleErrorInServiceCall(err, ginContext) return } - ginContext.File(filepath.Join(tmpOutputDir, s.configuration.DataAssetDiagramFilenamePNG)) + ginContext.File(filepath.Join(tmpOutputDir, s.config.DataAssetDiagramFilenamePNG)) } else if responseType == reportPDF { s.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, true, false, false, false, false, false, dpi) if err != nil { handleErrorInServiceCall(err, ginContext) return } - ginContext.FileAttachment(filepath.Join(tmpOutputDir, s.configuration.ReportFilename), s.configuration.ReportFilename) + ginContext.FileAttachment(filepath.Join(tmpOutputDir, s.config.ReportFilename), s.config.ReportFilename) } else if responseType == risksExcel { s.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, true, false, false, false, false, dpi) if err != nil { handleErrorInServiceCall(err, ginContext) return } - ginContext.FileAttachment(filepath.Join(tmpOutputDir, s.configuration.ExcelRisksFilename), s.configuration.ExcelRisksFilename) + ginContext.FileAttachment(filepath.Join(tmpOutputDir, s.config.ExcelRisksFilename), s.config.ExcelRisksFilename) } else if responseType == tagsExcel { s.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, true, false, false, false, dpi) if err != nil { handleErrorInServiceCall(err, ginContext) return } - ginContext.FileAttachment(filepath.Join(tmpOutputDir, s.configuration.ExcelTagsFilename), s.configuration.ExcelTagsFilename) + ginContext.FileAttachment(filepath.Join(tmpOutputDir, s.config.ExcelTagsFilename), s.config.ExcelTagsFilename) } else if responseType == risksJSON { s.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, true, false, false, dpi) if err != nil { handleErrorInServiceCall(err, ginContext) return } - jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, s.configuration.JsonRisksFilename)) + jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, s.config.JsonRisksFilename)) if err != nil { handleErrorInServiceCall(err, ginContext) return @@ -155,7 +155,7 @@ func (s *server) streamResponse(ginContext *gin.Context, responseType responseTy handleErrorInServiceCall(err, ginContext) return } - jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, s.configuration.JsonTechnicalAssetsFilename)) + jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, s.config.JsonTechnicalAssetsFilename)) if err != nil { handleErrorInServiceCall(err, ginContext) return @@ -167,7 +167,7 @@ func (s *server) streamResponse(ginContext *gin.Context, responseType responseTy handleErrorInServiceCall(err, ginContext) return } - jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, s.configuration.JsonStatsFilename)) + jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, s.config.JsonStatsFilename)) if err != nil { handleErrorInServiceCall(err, ginContext) return diff --git a/pkg/server/server.go b/pkg/server/server.go index 374b8ab9..59beb2b5 100644 --- a/pkg/server/server.go +++ b/pkg/server/server.go @@ -1,10 +1,12 @@ /* Copyright © 2023 NAME HERE */ + package server import ( "fmt" + "github.com/threagile/threagile/pkg/common" "log" "net/http" "os" @@ -21,7 +23,7 @@ import ( ) type server struct { - configuration Configuration + config common.Config successCount int errorCount int globalLock sync.Mutex @@ -31,41 +33,12 @@ type server struct { mapFolderNameToTokenHash map[string]string extremeShortTimeoutsForTesting bool locksByFolderName map[string]*sync.Mutex + customRiskRules map[string]*types.CustomRisk } -type Configuration struct { - ServerFolder string - AppDir string - BuildTimestamp string - KeyDir string - InputFile string - ExecuteModelMacro string - ServerPort int - Verbose bool - IgnoreOrphanedRiskTracking bool - KeepDiagramSourceFiles bool - CustomRiskRules map[string]*types.CustomRisk - DefaultGraphvizDPI int - TempFolder string - DataFlowDiagramFilenamePNG string - DataAssetDiagramFilenamePNG string - DataFlowDiagramFilenameDOT string - DataAssetDiagramFilenameDOT string - ReportFilename string - ExcelRisksFilename string - ExcelTagsFilename string - JsonRisksFilename string - JsonTechnicalAssetsFilename string - JsonStatsFilename string - CustomRiskRulesPlugins string - RaaPlugin string - SkipRiskRules string - BackupHistoryFilesToKeep int -} - -func RunServer(serverConfiguration Configuration) { - server := &server{ - configuration: serverConfiguration, +func RunServer(config common.Config) { + s := &server{ + config: config, createdObjectsThrottler: make(map[string][]int64), mapTokenHashToTimeoutStruct: make(map[string]timeoutStruct), mapFolderNameToTokenHash: make(map[string]string), @@ -73,35 +46,35 @@ func RunServer(serverConfiguration Configuration) { locksByFolderName: make(map[string]*sync.Mutex), } router := gin.Default() - router.LoadHTMLGlob(filepath.Join(server.configuration.ServerFolder, "server/static/*.html")) // <== + router.LoadHTMLGlob(filepath.Join(s.config.ServerFolder, "s", "static", "*.html")) // <== router.GET("/", func(c *gin.Context) { c.HTML(http.StatusOK, "index.html", gin.H{}) }) router.HEAD("/", func(c *gin.Context) { c.HTML(http.StatusOK, "index.html", gin.H{}) }) - router.StaticFile("/threagile.png", filepath.Join(server.configuration.ServerFolder, "server/static/threagile.png")) // <== - router.StaticFile("/site.webmanifest", filepath.Join(server.configuration.ServerFolder, "server/static/site.webmanifest")) - router.StaticFile("/favicon.ico", filepath.Join(server.configuration.ServerFolder, "server/static/favicon.ico")) - router.StaticFile("/favicon-32x32.png", filepath.Join(server.configuration.ServerFolder, "server/static/favicon-32x32.png")) - router.StaticFile("/favicon-16x16.png", filepath.Join(server.configuration.ServerFolder, "server/static/favicon-16x16.png")) - router.StaticFile("/apple-touch-icon.png", filepath.Join(server.configuration.ServerFolder, "server/static/apple-touch-icon.png")) - router.StaticFile("/android-chrome-512x512.png", filepath.Join(server.configuration.ServerFolder, "server/static/android-chrome-512x512.png")) - router.StaticFile("/android-chrome-192x192.png", filepath.Join(server.configuration.ServerFolder, "server/static/android-chrome-192x192.png")) + router.StaticFile("/threagile.png", filepath.Join(s.config.ServerFolder, "s", "static", "threagile.png")) // <== + router.StaticFile("/site.webmanifest", filepath.Join(s.config.ServerFolder, "s", "static", "site.webmanifest")) + router.StaticFile("/favicon.ico", filepath.Join(s.config.ServerFolder, "s", "static", "favicon.ico")) + router.StaticFile("/favicon-32x32.png", filepath.Join(s.config.ServerFolder, "s", "static", "favicon-32x32.png")) + router.StaticFile("/favicon-16x16.png", filepath.Join(s.config.ServerFolder, "s", "static", "favicon-16x16.png")) + router.StaticFile("/apple-touch-icon.png", filepath.Join(s.config.ServerFolder, "s", "static", "apple-touch-icon.png")) + router.StaticFile("/android-chrome-512x512.png", filepath.Join(s.config.ServerFolder, "s", "static", "android-chrome-512x512.png")) + router.StaticFile("/android-chrome-192x192.png", filepath.Join(s.config.ServerFolder, "s", "static", "android-chrome-192x192.png")) - router.StaticFile("/schema.json", filepath.Join(server.configuration.AppDir, "schema.json")) - router.StaticFile("/live-templates.txt", filepath.Join(server.configuration.AppDir, "live-templates.txt")) - router.StaticFile("/openapi.yaml", filepath.Join(server.configuration.AppDir, "openapi.yaml")) - router.StaticFile("/swagger-ui/", filepath.Join(server.configuration.ServerFolder, "server/static/swagger-ui/index.html")) - router.StaticFile("/swagger-ui/index.html", filepath.Join(server.configuration.ServerFolder, "server/static/swagger-ui/index.html")) - router.StaticFile("/swagger-ui/oauth2-redirect.html", filepath.Join(server.configuration.ServerFolder, "server/static/swagger-ui/oauth2-redirect.html")) - router.StaticFile("/swagger-ui/swagger-ui.css", filepath.Join(server.configuration.ServerFolder, "server/static/swagger-ui/swagger-ui.css")) - router.StaticFile("/swagger-ui/swagger-ui.js", filepath.Join(server.configuration.ServerFolder, "server/static/swagger-ui/swagger-ui.js")) - router.StaticFile("/swagger-ui/swagger-ui-bundle.js", filepath.Join(server.configuration.ServerFolder, "server/static/swagger-ui/swagger-ui-bundle.js")) - router.StaticFile("/swagger-ui/swagger-ui-standalone-preset.js", filepath.Join(server.configuration.ServerFolder, "server/static/swagger-ui/swagger-ui-standalone-preset.js")) // <== + router.StaticFile("/schema.json", filepath.Join(s.config.AppFolder, "schema.json")) + router.StaticFile("/live-templates.txt", filepath.Join(s.config.AppFolder, "live-templates.txt")) + router.StaticFile("/openapi.yaml", filepath.Join(s.config.AppFolder, "openapi.yaml")) + router.StaticFile("/swagger-ui/", filepath.Join(s.config.ServerFolder, "s", "static", "swagger-ui/index.html")) + router.StaticFile("/swagger-ui/index.html", filepath.Join(s.config.ServerFolder, "s", "static", "swagger-ui/index.html")) + router.StaticFile("/swagger-ui/oauth2-redirect.html", filepath.Join(s.config.ServerFolder, "s", "static", "swagger-ui/oauth2-redirect.html")) + router.StaticFile("/swagger-ui/swagger-ui.css", filepath.Join(s.config.ServerFolder, "s", "static", "swagger-ui/swagger-ui.css")) + router.StaticFile("/swagger-ui/swagger-ui.js", filepath.Join(s.config.ServerFolder, "s", "static", "swagger-ui/swagger-ui.js")) + router.StaticFile("/swagger-ui/swagger-ui-bundle.js", filepath.Join(s.config.ServerFolder, "s", "static", "swagger-ui/swagger-ui-bundle.js")) + router.StaticFile("/swagger-ui/swagger-ui-standalone-preset.js", filepath.Join(s.config.ServerFolder, "s", "static", "swagger-ui/swagger-ui-standalone-preset.js")) // <== - router.GET("/threagile-example-model.yaml", server.exampleFile) - router.GET("/threagile-stub-model.yaml", server.stubFile) + router.GET("/threagile-example-model.yaml", s.exampleFile) + router.GET("/threagile-stub-model.yaml", s.stubFile) router.GET("/meta/ping", func(c *gin.Context) { c.JSON(200, gin.H{ @@ -111,7 +84,7 @@ func RunServer(serverConfiguration Configuration) { router.GET("/meta/version", func(c *gin.Context) { c.JSON(200, gin.H{ "version": docs.ThreagileVersion, - "build_timestamp": server.configuration.BuildTimestamp, + "build_timestamp": s.config.BuildTimestamp, }) }) router.GET("/meta/types", func(c *gin.Context) { @@ -143,69 +116,71 @@ func RunServer(serverConfiguration Configuration) { // TODO router.GET("/meta/risk-rules", listRiskRules) // TODO router.GET("/meta/model-macros", listModelMacros) - router.GET("/meta/stats", server.stats) + router.GET("/meta/stats", s.stats) - router.POST("/direct/analyze", server.analyze) - router.POST("/direct/check", server.check) - router.GET("/direct/stub", server.stubFile) + router.POST("/direct/analyze", s.analyze) + router.POST("/direct/check", s.check) + router.GET("/direct/stub", s.stubFile) - router.POST("/auth/keys", server.createKey) - router.DELETE("/auth/keys", server.deleteKey) - router.POST("/auth/tokens", server.createToken) - router.DELETE("/auth/tokens", server.deleteToken) + router.POST("/auth/keys", s.createKey) + router.DELETE("/auth/keys", s.deleteKey) + router.POST("/auth/tokens", s.createToken) + router.DELETE("/auth/tokens", s.deleteToken) - router.POST("/models", server.createNewModel) - router.GET("/models", server.listModels) - router.DELETE("/models/:model-id", server.deleteModel) - router.GET("/models/:model-id", server.getModel) - router.PUT("/models/:model-id", server.importModel) - router.GET("/models/:model-id/data-flow-diagram", server.streamDataFlowDiagram) - router.GET("/models/:model-id/data-asset-diagram", server.streamDataAssetDiagram) - router.GET("/models/:model-id/report-pdf", server.streamReportPDF) - router.GET("/models/:model-id/risks-excel", server.streamRisksExcel) - router.GET("/models/:model-id/tags-excel", server.streamTagsExcel) - router.GET("/models/:model-id/risks", server.streamRisksJSON) - router.GET("/models/:model-id/technical-assets", server.streamTechnicalAssetsJSON) - router.GET("/models/:model-id/stats", server.streamStatsJSON) - router.GET("/models/:model-id/analysis", server.analyzeModelOnServerDirectly) + router.POST("/models", s.createNewModel) + router.GET("/models", s.listModels) + router.DELETE("/models/:model-id", s.deleteModel) + router.GET("/models/:model-id", s.getModel) + router.PUT("/models/:model-id", s.importModel) + router.GET("/models/:model-id/data-flow-diagram", s.streamDataFlowDiagram) + router.GET("/models/:model-id/data-asset-diagram", s.streamDataAssetDiagram) + router.GET("/models/:model-id/report-pdf", s.streamReportPDF) + router.GET("/models/:model-id/risks-excel", s.streamRisksExcel) + router.GET("/models/:model-id/tags-excel", s.streamTagsExcel) + router.GET("/models/:model-id/risks", s.streamRisksJSON) + router.GET("/models/:model-id/technical-assets", s.streamTechnicalAssetsJSON) + router.GET("/models/:model-id/stats", s.streamStatsJSON) + router.GET("/models/:model-id/analysis", s.analyzeModelOnServerDirectly) - router.GET("/models/:model-id/cover", server.getCover) - router.PUT("/models/:model-id/cover", server.setCover) - router.GET("/models/:model-id/overview", server.getOverview) - router.PUT("/models/:model-id/overview", server.setOverview) + router.GET("/models/:model-id/cover", s.getCover) + router.PUT("/models/:model-id/cover", s.setCover) + router.GET("/models/:model-id/overview", s.getOverview) + router.PUT("/models/:model-id/overview", s.setOverview) //router.GET("/models/:model-id/questions", getQuestions) //router.PUT("/models/:model-id/questions", setQuestions) - router.GET("/models/:model-id/abuse-cases", server.getAbuseCases) - router.PUT("/models/:model-id/abuse-cases", server.setAbuseCases) - router.GET("/models/:model-id/security-requirements", server.getSecurityRequirements) - router.PUT("/models/:model-id/security-requirements", server.setSecurityRequirements) + router.GET("/models/:model-id/abuse-cases", s.getAbuseCases) + router.PUT("/models/:model-id/abuse-cases", s.setAbuseCases) + router.GET("/models/:model-id/security-requirements", s.getSecurityRequirements) + router.PUT("/models/:model-id/security-requirements", s.setSecurityRequirements) //router.GET("/models/:model-id/tags", getTags) //router.PUT("/models/:model-id/tags", setTags) - router.GET("/models/:model-id/data-assets", server.getDataAssets) - router.POST("/models/:model-id/data-assets", server.createNewDataAsset) - router.GET("/models/:model-id/data-assets/:data-asset-id", server.getDataAsset) - router.PUT("/models/:model-id/data-assets/:data-asset-id", server.setDataAsset) - router.DELETE("/models/:model-id/data-assets/:data-asset-id", server.deleteDataAsset) + router.GET("/models/:model-id/data-assets", s.getDataAssets) + router.POST("/models/:model-id/data-assets", s.createNewDataAsset) + router.GET("/models/:model-id/data-assets/:data-asset-id", s.getDataAsset) + router.PUT("/models/:model-id/data-assets/:data-asset-id", s.setDataAsset) + router.DELETE("/models/:model-id/data-assets/:data-asset-id", s.deleteDataAsset) - router.GET("/models/:model-id/trust-boundaries", server.getTrustBoundaries) + router.GET("/models/:model-id/trust-boundaries", s.getTrustBoundaries) // router.POST("/models/:model-id/trust-boundaries", createNewTrustBoundary) // router.GET("/models/:model-id/trust-boundaries/:trust-boundary-id", getTrustBoundary) // router.PUT("/models/:model-id/trust-boundaries/:trust-boundary-id", setTrustBoundary) // router.DELETE("/models/:model-id/trust-boundaries/:trust-boundary-id", deleteTrustBoundary) - router.GET("/models/:model-id/shared-runtimes", server.getSharedRuntimes) - router.POST("/models/:model-id/shared-runtimes", server.createNewSharedRuntime) - router.GET("/models/:model-id/shared-runtimes/:shared-runtime-id", server.getSharedRuntime) - router.PUT("/models/:model-id/shared-runtimes/:shared-runtime-id", server.setSharedRuntime) - router.DELETE("/models/:model-id/shared-runtimes/:shared-runtime-id", server.deleteSharedRuntime) + router.GET("/models/:model-id/shared-runtimes", s.getSharedRuntimes) + router.POST("/models/:model-id/shared-runtimes", s.createNewSharedRuntime) + router.GET("/models/:model-id/shared-runtimes/:shared-runtime-id", s.getSharedRuntime) + router.PUT("/models/:model-id/shared-runtimes/:shared-runtime-id", s.setSharedRuntime) + router.DELETE("/models/:model-id/shared-runtimes/:shared-runtime-id", s.deleteSharedRuntime) + + s.customRiskRules = types.LoadCustomRiskRules(s.config.RiskRulesPlugins, common.CommandLineProgressReporter{}) - fmt.Println("Threagile server running...") - _ = router.Run(":" + strconv.Itoa(server.configuration.ServerPort)) // listen and serve on 0.0.0.0:8080 or whatever port was specified + fmt.Println("Threagile s running...") + _ = router.Run(":" + strconv.Itoa(s.config.ServerPort)) // listen and serve on 0.0.0.0:8080 or whatever port was specified } func (s *server) exampleFile(ginContext *gin.Context) { - example, err := os.ReadFile(filepath.Join(s.configuration.AppDir, "threagile-example-model.yaml")) + example, err := os.ReadFile(filepath.Join(s.config.AppFolder, "threagile-example-model.yaml")) if err != nil { handleErrorInServiceCall(err, ginContext) return @@ -214,7 +189,7 @@ func (s *server) exampleFile(ginContext *gin.Context) { } func (s *server) stubFile(ginContext *gin.Context) { - stub, err := os.ReadFile(filepath.Join(s.configuration.AppDir, "threagile-stub-model.yaml")) + stub, err := os.ReadFile(filepath.Join(s.config.AppFolder, "threagile-stub-model.yaml")) if err != nil { handleErrorInServiceCall(err, ginContext) return @@ -225,7 +200,7 @@ func (s *server) stubFile(ginContext *gin.Context) { func (s *server) addSupportedTags(input []byte) []byte { // add distinct tags as "tags_available" supportedTags := make(map[string]bool) - for _, customRule := range s.configuration.CustomRiskRules { + for _, customRule := range s.customRiskRules { for _, tag := range customRule.Tags { supportedTags[strings.ToLower(tag)] = true } @@ -245,7 +220,7 @@ func (s *server) addSupportedTags(input []byte) []byte { return input } sort.Strings(tags) - if s.configuration.Verbose { + if s.config.Verbose { fmt.Print("Supported tags of all risk rules: ") for i, tag := range tags { if i > 0 { @@ -272,7 +247,7 @@ func arrayOfStringValues(values []types.TypeEnum) []string { func (s *server) stats(ginContext *gin.Context) { keyCount, modelCount := 0, 0 - keyFolders, err := os.ReadDir(filepath.Join(s.configuration.ServerFolder, s.configuration.KeyDir)) + keyFolders, err := os.ReadDir(filepath.Join(s.config.ServerFolder, s.config.KeyFolder)) if err != nil { log.Println(err) ginContext.JSON(http.StatusInternalServerError, gin.H{ @@ -289,7 +264,7 @@ func (s *server) stats(ginContext *gin.Context) { }) return } - modelFolders, err := os.ReadDir(filepath.Join(s.configuration.ServerFolder, s.configuration.KeyDir, keyFolder.Name())) + modelFolders, err := os.ReadDir(filepath.Join(s.config.ServerFolder, s.config.KeyFolder, keyFolder.Name())) if err != nil { log.Println(err) ginContext.JSON(http.StatusInternalServerError, gin.H{ diff --git a/pkg/server/token.go b/pkg/server/token.go index d77fed28..d9acb2b6 100644 --- a/pkg/server/token.go +++ b/pkg/server/token.go @@ -263,7 +263,7 @@ func (s *server) checkTokenToFolderName(ginContext *gin.Context) (folderNameOfKe func (s *server) folderNameFromKey(key []byte) string { sha512Hash := hashSHA256(key) - return filepath.Join(s.configuration.ServerFolder, s.configuration.KeyDir, sha512Hash) + return filepath.Join(s.config.ServerFolder, s.config.KeyFolder, sha512Hash) } func (s *server) housekeepingTokenMaps() { From d953aa944db770eb19113715317c114d687d0fed Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Thu, 4 Jan 2024 13:38:44 +0000 Subject: [PATCH 28/68] Remove server related code from context.go --- internal/threagile/context.go | 506 +--------------------------------- 1 file changed, 3 insertions(+), 503 deletions(-) diff --git a/internal/threagile/context.go b/internal/threagile/context.go index 6811296c..c090bcb6 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -2,27 +2,14 @@ package threagile import ( "bufio" - "bytes" - "compress/gzip" - "crypto/aes" - "crypto/cipher" - "crypto/rand" "crypto/sha256" - "crypto/sha512" - "encoding/base64" "encoding/hex" "errors" "flag" "fmt" // TODO: no fmt.Println here - "github.com/gin-gonic/gin" - "github.com/google/uuid" - "github.com/threagile/threagile/pkg/model" - "github.com/threagile/threagile/pkg/security/risks" - "golang.org/x/crypto/argon2" "hash/fnv" "io" "log" - "net/http" "os" "os/exec" "path/filepath" @@ -31,8 +18,9 @@ import ( "sort" "strconv" "strings" - "sync" - "time" + + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/security/risks" "github.com/threagile/threagile/pkg/common" @@ -980,118 +968,6 @@ func (context *Context) applyRAA() string { return runner.ErrorOutput } -func (context *Context) exampleFile(ginContext *gin.Context) { - example, err := os.ReadFile(filepath.Join(context.Config.AppFolder, "threagile-example-model.yaml")) - checkErr(err) - ginContext.Data(http.StatusOK, gin.MIMEYAML, example) -} - -func (context *Context) stubFile(ginContext *gin.Context) { - stub, err := os.ReadFile(filepath.Join(context.Config.AppFolder, "threagile-stub-model.yaml")) - checkErr(err) - ginContext.Data(http.StatusOK, gin.MIMEYAML, context.addSupportedTags(stub)) // TODO use also the MIMEYAML way of serving YAML in model export? -} - -func (context *Context) addSupportedTags(input []byte) []byte { - // add distinct tags as "tags_available" - supportedTags := make(map[string]bool) - for _, customRule := range context.customRiskRules { - for _, tag := range customRule.Tags { - supportedTags[strings.ToLower(tag)] = true - } - } - - for _, rule := range context.builtinRiskRules { - for _, tag := range rule.SupportedTags() { - supportedTags[strings.ToLower(tag)] = true - } - } - - tags := make([]string, 0, len(supportedTags)) - for t := range supportedTags { - tags = append(tags, t) - } - if len(tags) == 0 { - return input - } - sort.Strings(tags) - if context.Config.Verbose { - fmt.Print("Supported tags of all risk rules: ") - for i, tag := range tags { - if i > 0 { - fmt.Print(", ") - } - fmt.Print(tag) - } - fmt.Println() - } - replacement := "tags_available:" - for _, tag := range tags { - replacement += "\n - " + tag - } - return []byte(strings.Replace(string(input), "tags_available:", replacement, 1)) -} - -const keySize = 32 - -func (context *Context) stats(ginContext *gin.Context) { - keyCount, modelCount := 0, 0 - keyFolders, err := os.ReadDir(filepath.Join(context.Config.ServerFolder, context.Config.KeyFolder)) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to collect stats", - }) - return - } - for _, keyFolder := range keyFolders { - if len(keyFolder.Name()) == 128 { // it's a sha512 token hash probably, so count it as token folder for the stats - keyCount++ - if keyFolder.Name() != filepath.Clean(keyFolder.Name()) { - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "weird file path", - }) - return - } - modelFolders, err := os.ReadDir(filepath.Join(context.Config.ServerFolder, context.Config.KeyFolder, keyFolder.Name())) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to collect stats", - }) - return - } - for _, modelFolder := range modelFolders { - if len(modelFolder.Name()) == 36 { // it's a uuid model folder probably, so count it as model folder for the stats - modelCount++ - } - } - } - } - // TODO collect and deliver more stats (old model count?) and health info - ginContext.JSON(http.StatusOK, gin.H{ - "key_count": keyCount, - "model_count": modelCount, - "success_count": context.successCount, - "error_count": context.errorCount, - }) -} - -type payloadDataAsset struct { - Title string `yaml:"title" json:"title"` - Id string `yaml:"id" json:"id"` - Description string `yaml:"description" json:"description"` - Usage string `yaml:"usage" json:"usage"` - Tags []string `yaml:"tags" json:"tags"` - Origin string `yaml:"origin" json:"origin"` - Owner string `yaml:"owner" json:"owner"` - Quantity string `yaml:"quantity" json:"quantity"` - Confidentiality string `yaml:"confidentiality" json:"confidentiality"` - Integrity string `yaml:"integrity" json:"integrity"` - Availability string `yaml:"availability" json:"availability"` - JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` -} - func (context *Context) checkTechnicalAssetsExisting(modelInput input.ModelInput, techAssetIDs []string) (ok bool) { for _, techAssetID := range techAssetIDs { exists := false @@ -1108,382 +984,6 @@ func (context *Context) checkTechnicalAssetsExisting(modelInput input.ModelInput return true } -func (context *Context) populateDataAsset(ginContext *gin.Context, payload payloadDataAsset) (dataAssetInput input.InputDataAsset, ok bool) { - usage, err := types.ParseUsage(payload.Usage) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return dataAssetInput, false - } - quantity, err := types.ParseQuantity(payload.Quantity) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return dataAssetInput, false - } - confidentiality, err := types.ParseConfidentiality(payload.Confidentiality) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return dataAssetInput, false - } - integrity, err := types.ParseCriticality(payload.Integrity) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return dataAssetInput, false - } - availability, err := types.ParseCriticality(payload.Availability) - if err != nil { - handleErrorInServiceCall(err, ginContext) - return dataAssetInput, false - } - dataAssetInput = input.InputDataAsset{ - ID: payload.Id, - Description: payload.Description, - Usage: usage.String(), - Tags: lowerCaseAndTrim(payload.Tags), - Origin: payload.Origin, - Owner: payload.Owner, - Quantity: quantity.String(), - Confidentiality: confidentiality.String(), - Integrity: integrity.String(), - Availability: availability.String(), - JustificationCiaRating: payload.JustificationCiaRating, - } - return dataAssetInput, true -} - -func handleErrorInServiceCall(err error, ginContext *gin.Context) { - log.Println(err) - ginContext.JSON(http.StatusBadRequest, gin.H{ - "error": strings.TrimSpace(err.Error()), - }) -} - -func (context *Context) checkModelFolder(ginContext *gin.Context, modelUUID string, folderNameOfKey string) (modelFolder string, ok bool) { - uuidParsed, err := uuid.Parse(modelUUID) - if err != nil { - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "model not found", - }) - return modelFolder, false - } - modelFolder = folderNameForModel(folderNameOfKey, uuidParsed.String()) - if _, err := os.Stat(modelFolder); os.IsNotExist(err) { - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "model not found", - }) - return modelFolder, false - } - return modelFolder, true -} - -func (context *Context) readModel(ginContext *gin.Context, modelUUID string, key []byte, folderNameOfKey string) (modelInputResult input.ModelInput, yamlText string, ok bool) { - modelFolder, ok := context.checkModelFolder(ginContext, modelUUID, folderNameOfKey) - if !ok { - return modelInputResult, yamlText, false - } - cryptoKey := context.generateKeyFromAlreadyStrongRandomInput(key) - block, err := aes.NewCipher(cryptoKey) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to open model", - }) - return modelInputResult, yamlText, false - } - aesGcm, err := cipher.NewGCM(block) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to open model", - }) - return modelInputResult, yamlText, false - } - - fileBytes, err := os.ReadFile(filepath.Join(modelFolder, context.Config.InputFile)) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to open model", - }) - return modelInputResult, yamlText, false - } - - nonce := fileBytes[0:12] - ciphertext := fileBytes[12:] - plaintext, err := aesGcm.Open(nil, nonce, ciphertext, nil) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to open model", - }) - return modelInputResult, yamlText, false - } - - r, err := gzip.NewReader(bytes.NewReader(plaintext)) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to open model", - }) - return modelInputResult, yamlText, false - } - buf := new(bytes.Buffer) - _, _ = buf.ReadFrom(r) - modelInput := new(input.ModelInput).Defaults() - yamlBytes := buf.Bytes() - err = yaml.Unmarshal(yamlBytes, &modelInput) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to open model", - }) - return modelInputResult, yamlText, false - } - return *modelInput, string(yamlBytes), true -} - -func (context *Context) writeModel(ginContext *gin.Context, key []byte, folderNameOfKey string, modelInput *input.ModelInput, changeReasonForHistory string) (ok bool) { - modelFolder, ok := context.checkModelFolder(ginContext, ginContext.Param("model-id"), folderNameOfKey) - if ok { - modelInput.ThreagileVersion = docs.ThreagileVersion - yamlBytes, err := yaml.Marshal(modelInput) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to write model", - }) - return false - } - /* - yamlBytes = model.ReformatYAML(yamlBytes) - */ - return context.writeModelYAML(ginContext, string(yamlBytes), key, modelFolder, changeReasonForHistory, false) - } - return false -} - -func (context *Context) writeModelYAML(ginContext *gin.Context, yaml string, key []byte, modelFolder string, changeReasonForHistory string, skipBackup bool) (ok bool) { - if context.Config.Verbose { - fmt.Println("about to write " + strconv.Itoa(len(yaml)) + " bytes of yaml into model folder: " + modelFolder) - } - var b bytes.Buffer - w := gzip.NewWriter(&b) - _, _ = w.Write([]byte(yaml)) - _ = w.Close() - plaintext := b.Bytes() - cryptoKey := context.generateKeyFromAlreadyStrongRandomInput(key) - block, err := aes.NewCipher(cryptoKey) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to write model", - }) - return false - } - // Never use more than 2^32 random nonces with a given key because of the risk of a repeat. - nonce := make([]byte, 12) - if _, err := io.ReadFull(rand.Reader, nonce); err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to write model", - }) - return false - } - aesGcm, err := cipher.NewGCM(block) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to write model", - }) - return false - } - ciphertext := aesGcm.Seal(nil, nonce, plaintext, nil) - if !skipBackup { - err = context.backupModelToHistory(modelFolder, changeReasonForHistory) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to write model", - }) - return false - } - } - f, err := os.Create(filepath.Join(modelFolder, context.Config.InputFile)) - if err != nil { - log.Println(err) - ginContext.JSON(http.StatusInternalServerError, gin.H{ - "error": "unable to write model", - }) - return false - } - _, _ = f.Write(nonce) - _, _ = f.Write(ciphertext) - _ = f.Close() - return true -} - -func (context *Context) backupModelToHistory(modelFolder string, changeReasonForHistory string) (err error) { - historyFolder := filepath.Join(modelFolder, "history") - if _, err := os.Stat(historyFolder); os.IsNotExist(err) { - err = os.Mkdir(historyFolder, 0700) - if err != nil { - return err - } - } - inputModel, err := os.ReadFile(filepath.Join(modelFolder, context.Config.InputFile)) - if err != nil { - return err - } - historyFile := filepath.Join(historyFolder, time.Now().Format("2006-01-02 15:04:05")+" "+changeReasonForHistory+".backup") - err = os.WriteFile(historyFile, inputModel, 0400) - if err != nil { - return err - } - // now delete any old files if over limit to keep - files, err := os.ReadDir(historyFolder) - if err != nil { - return err - } - if len(files) > context.Config.BackupHistoryFilesToKeep { - requiredToDelete := len(files) - context.Config.BackupHistoryFilesToKeep - sort.Slice(files, func(i, j int) bool { - return files[i].Name() < files[j].Name() - }) - for _, file := range files { - requiredToDelete-- - if file.Name() != filepath.Clean(file.Name()) { - return fmt.Errorf("weird file name %v", file.Name()) - } - err = os.Remove(filepath.Join(historyFolder, file.Name())) - if err != nil { - return err - } - if requiredToDelete <= 0 { - break - } - } - } - return -} - -func (context *Context) generateKeyFromAlreadyStrongRandomInput(alreadyRandomInput []byte) []byte { - // Establish the parameters to use for Argon2. - p := &argon2Params{ - memory: 64 * 1024, - iterations: 3, - parallelism: 2, - saltLength: 16, - keyLength: keySize, - } - // As the input is already cryptographically secure random, the salt is simply the first n bytes - salt := alreadyRandomInput[0:p.saltLength] - hash := argon2.IDKey(alreadyRandomInput[p.saltLength:], salt, p.iterations, p.memory, p.parallelism, p.keyLength) - return hash -} - -func folderNameForModel(folderNameOfKey string, uuid string) string { - return filepath.Join(folderNameOfKey, uuid) -} - -type argon2Params struct { - memory uint32 - iterations uint32 - parallelism uint8 - saltLength uint32 - keyLength uint32 -} - -var throttlerLock sync.Mutex - -var createdObjectsThrottler = make(map[string][]int64) - -func (context *Context) checkObjectCreationThrottler(ginContext *gin.Context, typeName string) bool { - throttlerLock.Lock() - defer throttlerLock.Unlock() - - // remove all elements older than 3 minutes (= 180000000000 ns) - now := time.Now().UnixNano() - cutoff := now - 180000000000 - for keyCheck := range createdObjectsThrottler { - for i := 0; i < len(createdObjectsThrottler[keyCheck]); i++ { - if createdObjectsThrottler[keyCheck][i] < cutoff { - // Remove the element at index i from slice (safe while looping using i as iterator) - createdObjectsThrottler[keyCheck] = append(createdObjectsThrottler[keyCheck][:i], createdObjectsThrottler[keyCheck][i+1:]...) - i-- // Since we just deleted a[i], we must redo that index - } - } - length := len(createdObjectsThrottler[keyCheck]) - if length == 0 { - delete(createdObjectsThrottler, keyCheck) - } - /* - if *verbose { - log.Println("Throttling count: "+strconv.Itoa(length)) - } - */ - } - - // check current request - keyHash := hash(typeName) // getting the real client ip is not easy inside fully encapsulated containerized runtime - if _, ok := createdObjectsThrottler[keyHash]; !ok { - createdObjectsThrottler[keyHash] = make([]int64, 0) - } - // check the limit of 20 creations for this type per 3 minutes - withinLimit := len(createdObjectsThrottler[keyHash]) < 20 - if withinLimit { - createdObjectsThrottler[keyHash] = append(createdObjectsThrottler[keyHash], now) - return true - } - ginContext.JSON(http.StatusTooManyRequests, gin.H{ - "error": "object creation throttling exceeded (denial-of-service protection): please wait some time and try again", - }) - return false -} - -func (context *Context) folderNameFromKey(key []byte) string { - sha512Hash := hashSHA256(key) - return filepath.Join(context.Config.ServerFolder, context.Config.KeyFolder, sha512Hash) -} - -func hashSHA256(key []byte) string { - hasher := sha512.New() - hasher.Write(key) - return hex.EncodeToString(hasher.Sum(nil)) -} - -type keyHeader struct { - Key string `header:"key"` -} - -func (context *Context) checkKeyToFolderName(ginContext *gin.Context) (folderNameOfKey string, key []byte, ok bool) { - header := keyHeader{} - if err := ginContext.ShouldBindHeader(&header); err != nil { - log.Println(err) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "key not found", - }) - return folderNameOfKey, key, false - } - key, err := base64.RawURLEncoding.DecodeString(strings.TrimSpace(header.Key)) - if len(key) == 0 || err != nil { - if err != nil { - log.Println(err) - } - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "key not found", - }) - return folderNameOfKey, key, false - } - folderNameOfKey = context.folderNameFromKey(key) - if _, err := os.Stat(folderNameOfKey); os.IsNotExist(err) { - log.Println(err) - ginContext.JSON(http.StatusNotFound, gin.H{ - "error": "key not found", - }) - return folderNameOfKey, key, false - } - return folderNameOfKey, key, true -} - func (context *Context) userHomeDir() string { switch runtime.GOOS { case "windows": From 25268ff75f9102bb0f17930cd875233b2fc0c067 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Thu, 4 Jan 2024 13:42:31 +0000 Subject: [PATCH 29/68] Remove server related variables from context.go --- internal/threagile/context.go | 2 -- 1 file changed, 2 deletions(-) diff --git a/internal/threagile/context.go b/internal/threagile/context.go index c090bcb6..4d83097c 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -47,8 +47,6 @@ type Context struct { ServerMode bool - successCount int - errorCount int drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks bool modelInput input.ModelInput From aa558e0c50bd174f000d6d2b1a6fa52cb0e26266 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Thu, 4 Jan 2024 13:46:29 +0000 Subject: [PATCH 30/68] Remove modelInput from context object because it's just a variable during parsing model --- internal/threagile/context.go | 33 ++++++++++++++++----------------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/internal/threagile/context.go b/internal/threagile/context.go index 4d83097c..2398ceef 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -49,7 +49,6 @@ type Context struct { drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks bool - modelInput input.ModelInput parsedModel types.ParsedModel generateDataFlowDiagram, generateDataAssetDiagram, generateRisksJSON, generateTechnicalAssetsJSON bool @@ -500,8 +499,8 @@ func (context *Context) DoIt() { fmt.Println("Parsing model:", context.Config.InputFile) } - context.modelInput = *new(input.ModelInput).Defaults() - loadError := context.modelInput.Load(context.Config.InputFile) + modelInput := *new(input.ModelInput).Defaults() + loadError := modelInput.Load(context.Config.InputFile) if loadError != nil { log.Fatal("Unable to load model yaml: ", loadError) } @@ -512,7 +511,7 @@ func (context *Context) DoIt() { } context.customRiskRules = types.LoadCustomRiskRules(context.Config.RiskRulesPlugins, context.progressReporter) - parsedModel, parseError := model.ParseModel(&context.modelInput, context.builtinRiskRules, context.customRiskRules) + parsedModel, parseError := model.ParseModel(&modelInput, context.builtinRiskRules, context.customRiskRules) if parseError != nil { log.Fatal("Unable to parse model yaml: ", parseError) } @@ -734,17 +733,17 @@ func (context *Context) DoIt() { var err error switch macroDetails.ID { case addbuildpipeline.GetMacroDetails().ID: - changes, message, validResult, err = addbuildpipeline.GetFinalChangeImpact(&context.modelInput, &context.parsedModel) + changes, message, validResult, err = addbuildpipeline.GetFinalChangeImpact(&modelInput, &context.parsedModel) case addvault.GetMacroDetails().ID: - changes, message, validResult, err = addvault.GetFinalChangeImpact(&context.modelInput, &context.parsedModel) + changes, message, validResult, err = addvault.GetFinalChangeImpact(&modelInput, &context.parsedModel) case prettyprint.GetMacroDetails().ID: - changes, message, validResult, err = prettyprint.GetFinalChangeImpact(&context.modelInput) + changes, message, validResult, err = prettyprint.GetFinalChangeImpact(&modelInput) case removeunusedtags.GetMacroDetails().ID: - changes, message, validResult, err = removeunusedtags.GetFinalChangeImpact(&context.modelInput) + changes, message, validResult, err = removeunusedtags.GetFinalChangeImpact(&modelInput) case seedrisktracking.GetMacroDetails().ID: - changes, message, validResult, err = seedrisktracking.GetFinalChangeImpact(&context.modelInput) + changes, message, validResult, err = seedrisktracking.GetFinalChangeImpact(&modelInput) case seedtags.GetMacroDetails().ID: - changes, message, validResult, err = seedtags.GetFinalChangeImpact(&context.modelInput) + changes, message, validResult, err = seedtags.GetFinalChangeImpact(&modelInput) } checkErr(err) for _, change := range changes { @@ -770,17 +769,17 @@ func (context *Context) DoIt() { var err error switch macroDetails.ID { case addbuildpipeline.GetMacroDetails().ID: - message, validResult, err = addbuildpipeline.Execute(&context.modelInput, &context.parsedModel) + message, validResult, err = addbuildpipeline.Execute(&modelInput, &context.parsedModel) case addvault.GetMacroDetails().ID: - message, validResult, err = addvault.Execute(&context.modelInput, &context.parsedModel) + message, validResult, err = addvault.Execute(&modelInput, &context.parsedModel) case prettyprint.GetMacroDetails().ID: - message, validResult, err = prettyprint.Execute(&context.modelInput) + message, validResult, err = prettyprint.Execute(&modelInput) case removeunusedtags.GetMacroDetails().ID: - message, validResult, err = removeunusedtags.Execute(&context.modelInput, &context.parsedModel) + message, validResult, err = removeunusedtags.Execute(&modelInput, &context.parsedModel) case seedrisktracking.GetMacroDetails().ID: - message, validResult, err = seedrisktracking.Execute(&context.parsedModel, &context.modelInput) + message, validResult, err = seedrisktracking.Execute(&context.parsedModel, &modelInput) case seedtags.GetMacroDetails().ID: - message, validResult, err = seedtags.Execute(&context.modelInput, &context.parsedModel) + message, validResult, err = seedtags.Execute(&modelInput, &context.parsedModel) } checkErr(err) if !validResult { @@ -794,7 +793,7 @@ func (context *Context) DoIt() { _, err = copyFile(context.Config.InputFile, backupFilename) checkErr(err) fmt.Println("Updating model") - yamlBytes, err := yaml.Marshal(context.modelInput) + yamlBytes, err := yaml.Marshal(modelInput) checkErr(err) /* yamlBytes = model.ReformatYAML(yamlBytes) From 7b81de0fbd026ea02b82a0ee3410c43ffb90cfaa Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Thu, 4 Jan 2024 13:58:17 +0000 Subject: [PATCH 31/68] Make drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks as variable instead of part of context --- internal/threagile/context.go | 63 +++++++++++------------------------ 1 file changed, 19 insertions(+), 44 deletions(-) diff --git a/internal/threagile/context.go b/internal/threagile/context.go index 2398ceef..c108d630 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -47,8 +47,6 @@ type Context struct { ServerMode bool - drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks bool - parsedModel types.ParsedModel generateDataFlowDiagram, generateDataAssetDiagram, generateRisksJSON, generateTechnicalAssetsJSON bool @@ -101,7 +99,6 @@ func (context *Context) Init() *Context { *context = Context{ customRiskRules: make(map[string]*types.CustomRisk), builtinRiskRules: make(map[string]types.RiskRule), - drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks: true, } return context @@ -209,24 +206,25 @@ func (context *Context) writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT strin tweaks += "\n ranksep=\"" + strconv.Itoa(context.parsedModel.DiagramTweakRanksep) + "\"" } suppressBidirectionalArrows := true + drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks := true splines := "ortho" if len(context.parsedModel.DiagramTweakEdgeLayout) > 0 { switch context.parsedModel.DiagramTweakEdgeLayout { case "spline": splines = "spline" - context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false + drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false case "polyline": splines = "polyline" - context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false + drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false case "ortho": splines = "ortho" suppressBidirectionalArrows = true case "curved": splines = "curved" - context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false + drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false case "false": splines = "false" - context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false + drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false default: panic(errors.New("unknown value for diagram_tweak_suppress_edge_labels (spline, polyline, ortho, curved, false): " + context.parsedModel.DiagramTweakEdgeLayout)) @@ -274,7 +272,7 @@ func (context *Context) writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT strin trustBoundary := context.parsedModel.TrustBoundaries[key] var snippet strings.Builder if len(trustBoundary.TechnicalAssetsInside) > 0 || len(trustBoundary.TrustBoundariesNested) > 0 { - if context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks { + if drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks { // see https://stackoverflow.com/questions/17247455/how-do-i-add-extra-space-between-clusters?noredirect=1&lq=1 snippet.WriteString("\n subgraph cluster_space_boundary_for_layout_only_1" + hash(trustBoundary.Id) + " {\n") snippet.WriteString(` graph [ @@ -337,7 +335,7 @@ func (context *Context) writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT strin snippet.WriteString(";\n") } snippet.WriteString(" }\n\n") - if context.drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks { + if drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks { snippet.WriteString(" }\n\n") } } @@ -659,17 +657,17 @@ func (context *Context) DoIt() { } else if strings.ToLower(answer) == "back" { switch macroDetails.ID { case addbuildpipeline.GetMacroDetails().ID: - message, validResult, err = addbuildpipeline.GoBack() + message, validResult, _ = addbuildpipeline.GoBack() case addvault.GetMacroDetails().ID: - message, validResult, err = addvault.GoBack() + message, validResult, _ = addvault.GoBack() case prettyprint.GetMacroDetails().ID: - message, validResult, err = prettyprint.GoBack() + message, validResult, _ = prettyprint.GoBack() case removeunusedtags.GetMacroDetails().ID: - message, validResult, err = removeunusedtags.GoBack() + message, validResult, _ = removeunusedtags.GoBack() case seedrisktracking.GetMacroDetails().ID: - message, validResult, err = seedrisktracking.GoBack() + message, validResult, _ = seedrisktracking.GoBack() case seedtags.GetMacroDetails().ID: - message, validResult, err = seedtags.GoBack() + message, validResult, _ = seedtags.GoBack() } } else if len(answer) > 0 { // individual answer if nextQuestion.IsValueConstrained() { @@ -682,17 +680,17 @@ func (context *Context) DoIt() { } switch macroDetails.ID { case addbuildpipeline.GetMacroDetails().ID: - message, validResult, err = addbuildpipeline.ApplyAnswer(nextQuestion.ID, answer) + message, validResult, _ = addbuildpipeline.ApplyAnswer(nextQuestion.ID, answer) case addvault.GetMacroDetails().ID: - message, validResult, err = addvault.ApplyAnswer(nextQuestion.ID, answer) + message, validResult, _ = addvault.ApplyAnswer(nextQuestion.ID, answer) case prettyprint.GetMacroDetails().ID: - message, validResult, err = prettyprint.ApplyAnswer(nextQuestion.ID, answer) + message, validResult, _ = prettyprint.ApplyAnswer(nextQuestion.ID, answer) case removeunusedtags.GetMacroDetails().ID: - message, validResult, err = removeunusedtags.ApplyAnswer(nextQuestion.ID, answer) + message, validResult, _ = removeunusedtags.ApplyAnswer(nextQuestion.ID, answer) case seedrisktracking.GetMacroDetails().ID: - message, validResult, err = seedrisktracking.ApplyAnswer(nextQuestion.ID, answer) + message, validResult, _ = seedrisktracking.ApplyAnswer(nextQuestion.ID, answer) case seedtags.GetMacroDetails().ID: - message, validResult, err = seedtags.ApplyAnswer(nextQuestion.ID, answer) + message, validResult, _ = seedtags.ApplyAnswer(nextQuestion.ID, answer) } } } else { @@ -965,22 +963,6 @@ func (context *Context) applyRAA() string { return runner.ErrorOutput } -func (context *Context) checkTechnicalAssetsExisting(modelInput input.ModelInput, techAssetIDs []string) (ok bool) { - for _, techAssetID := range techAssetIDs { - exists := false - for _, val := range modelInput.TechnicalAssets { - if val.ID == techAssetID { - exists = true - break - } - } - if !exists { - return false - } - } - return true -} - func (context *Context) userHomeDir() string { switch runtime.GOOS { case "windows": @@ -1407,13 +1389,6 @@ func checkErr(err error) { } } -func lowerCaseAndTrim(tags []string) []string { - for i := range tags { - tags[i] = strings.ToLower(strings.TrimSpace(tags[i])) - } - return tags -} - func contains(a []string, x string) bool { for _, n := range a { if x == n { From 68e1e923cdd0ed560c9226b19b56e494baefbe44 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Thu, 4 Jan 2024 19:31:03 +0000 Subject: [PATCH 32/68] Allow to run application with cobra cli --- internal/threagile/context.go | 257 +++++++++++++++++++--------------- internal/threagile/flags.go | 36 +++++ internal/threagile/root.go | 93 +++++++++++- internal/threagile/rules.go | 22 +-- pkg/common/consts.go | 2 + 5 files changed, 269 insertions(+), 141 deletions(-) diff --git a/internal/threagile/context.go b/internal/threagile/context.go index c108d630..a8c0123f 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -42,20 +42,41 @@ import ( "github.com/threagile/threagile/pkg/security/types" ) +type GenerateCommands struct { + DataFlowDiagram bool + DataAssetDiagram bool + RisksJSON bool + TechnicalAssetsJSON bool + StatsJSON bool + RisksExcel bool + TagsExcel bool + ReportPDF bool +} + +func (c *GenerateCommands) Defaults() *GenerateCommands { + *c = GenerateCommands{ + DataFlowDiagram: true, + DataAssetDiagram: true, + RisksJSON: true, + TechnicalAssetsJSON: true, + StatsJSON: true, + RisksExcel: true, + TagsExcel: true, + ReportPDF: true, + } + return c +} + type Context struct { common.Config + *GenerateCommands ServerMode bool parsedModel types.ParsedModel - generateDataFlowDiagram, generateDataAssetDiagram, generateRisksJSON, generateTechnicalAssetsJSON bool - generateStatsJSON, generateRisksExcel, generateTagsExcel, generateReportPDF bool - customRiskRules map[string]*types.CustomRisk builtinRiskRules map[string]types.RiskRule - - progressReporter common.ProgressReporter } func (context *Context) addToListOfSupportedTags(tags []string) { @@ -99,6 +120,7 @@ func (context *Context) Init() *Context { *context = Context{ customRiskRules: make(map[string]*types.CustomRisk), builtinRiskRules: make(map[string]types.RiskRule), + GenerateCommands: &GenerateCommands{}, } return context @@ -107,6 +129,7 @@ func (context *Context) Init() *Context { func (context *Context) Defaults(buildTimestamp string) *Context { *context = *new(Context).Init() context.Config.Defaults(buildTimestamp) + context.GenerateCommands.Defaults() return context } @@ -473,7 +496,6 @@ func (context *Context) makeDiagramInvisibleConnectionsTweaks() string { } func (context *Context) DoIt() { - defer func() { var err error if r := recover(); r != nil { @@ -485,6 +507,12 @@ func (context *Context) DoIt() { os.Exit(2) } }() + + var progressReporter common.ProgressReporter = common.SilentProgressReporter{} + if context.Config.Verbose { + progressReporter = common.CommandLineProgressReporter{} + } + if len(context.Config.ExecuteModelMacro) > 0 { fmt.Println(docs.Logo + "\n\n" + docs.VersionText) } else { @@ -507,7 +535,7 @@ func (context *Context) DoIt() { for _, rule := range risks.GetBuiltInRiskRules() { context.builtinRiskRules[rule.Category().Id] = rule } - context.customRiskRules = types.LoadCustomRiskRules(context.Config.RiskRulesPlugins, context.progressReporter) + context.customRiskRules = types.LoadCustomRiskRules(context.Config.RiskRulesPlugins, progressReporter) parsedModel, parseError := model.ParseModel(&modelInput, context.builtinRiskRules, context.customRiskRules) if parseError != nil { @@ -808,13 +836,19 @@ func (context *Context) DoIt() { } } - renderPDF := context.generateReportPDF - if renderPDF { // as the PDF report includes both diagrams - context.generateDataFlowDiagram, context.generateDataAssetDiagram = true, true + if context.GenerateCommands.ReportPDF { // as the PDF report includes both diagrams + context.GenerateCommands.DataFlowDiagram = true + context.GenerateCommands.DataAssetDiagram = true } + diagramDPI := context.Config.DiagramDPI + if diagramDPI < common.MinGraphvizDPI { + diagramDPI = common.MinGraphvizDPI + } else if diagramDPI > common.MaxGraphvizDPI { + diagramDPI = common.MaxGraphvizDPI + } // Data-flow Diagram rendering - if context.generateDataFlowDiagram { + if context.GenerateCommands.DataFlowDiagram { gvFile := filepath.Join(context.Config.OutputFolder, context.Config.DataFlowDiagramFilenameDOT) if !context.Config.KeepDiagramSourceFiles { tmpFileGV, err := os.CreateTemp(context.Config.TempFolder, context.Config.DataFlowDiagramFilenameDOT) @@ -822,11 +856,11 @@ func (context *Context) DoIt() { gvFile = tmpFileGV.Name() defer func() { _ = os.Remove(gvFile) }() } - dotFile := context.writeDataFlowDiagramGraphvizDOT(gvFile, context.Config.DiagramDPI) + dotFile := context.writeDataFlowDiagramGraphvizDOT(gvFile, diagramDPI) context.generateDataFlowDiagramGraphvizImage(dotFile, context.Config.OutputFolder) } // Data Asset Diagram rendering - if context.generateDataAssetDiagram { + if context.GenerateCommands.DataAssetDiagram { gvFile := filepath.Join(context.Config.OutputFolder, context.Config.DataAssetDiagramFilenameDOT) if !context.Config.KeepDiagramSourceFiles { tmpFile, err := os.CreateTemp(context.Config.TempFolder, context.Config.DataAssetDiagramFilenameDOT) @@ -834,12 +868,12 @@ func (context *Context) DoIt() { gvFile = tmpFile.Name() defer func() { _ = os.Remove(gvFile) }() } - dotFile := context.writeDataAssetDiagramGraphvizDOT(gvFile, context.Config.DiagramDPI) + dotFile := context.writeDataAssetDiagramGraphvizDOT(gvFile, diagramDPI) context.generateDataAssetDiagramGraphvizImage(dotFile, context.Config.OutputFolder) } // risks as risks json - if context.generateRisksJSON { + if context.GenerateCommands.RisksJSON { if context.Config.Verbose { fmt.Println("Writing risks json") } @@ -847,7 +881,7 @@ func (context *Context) DoIt() { } // technical assets json - if context.generateTechnicalAssetsJSON { + if context.GenerateCommands.TechnicalAssetsJSON { if context.Config.Verbose { fmt.Println("Writing technical assets json") } @@ -855,7 +889,7 @@ func (context *Context) DoIt() { } // risks as risks json - if context.generateStatsJSON { + if context.GenerateCommands.StatsJSON { if context.Config.Verbose { fmt.Println("Writing stats json") } @@ -863,7 +897,7 @@ func (context *Context) DoIt() { } // risks Excel - if context.generateRisksExcel { + if context.GenerateCommands.RisksExcel { if context.Config.Verbose { fmt.Println("Writing risks excel") } @@ -871,14 +905,14 @@ func (context *Context) DoIt() { } // tags Excel - if context.generateTagsExcel { + if context.GenerateCommands.TagsExcel { if context.Config.Verbose { fmt.Println("Writing tags excel") } report.WriteTagsExcelToFile(&context.parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.ExcelTagsFilename)) } - if renderPDF { + if context.GenerateCommands.ReportPDF { // hash the YAML input file f, err := os.Open(context.Config.InputFile) checkErr(err) @@ -963,102 +997,6 @@ func (context *Context) applyRAA() string { return runner.ErrorOutput } -func (context *Context) userHomeDir() string { - switch runtime.GOOS { - case "windows": - home := os.Getenv("HOMEDRIVE") + os.Getenv("HOMEPATH") - if home == "" { - home = os.Getenv("USERPROFILE") - } - return home - - default: - return os.Getenv("HOME") - } -} - -func (context *Context) expandPath(path string) string { - home := context.userHomeDir() - if strings.HasPrefix(path, "~") { - path = strings.Replace(path, "~", home, 1) - } - - if strings.HasPrefix(path, "$HOME") { - path = strings.Replace(path, "$HOME", home, -1) - } - - return path -} - -func (context *Context) ParseCommandlineArgs() *Context { - configFile := flag.String("config", "", "config file") - configError := context.Config.Load(*configFile) - if configError != nil { - fmt.Printf("WARNING: failed to load config file %q: %v\n", *configFile, configError) - } - - // folders - flag.StringVar(&context.Config.AppFolder, "app-dir", common.AppDir, "app folder (default: "+common.AppDir+")") - flag.StringVar(&context.Config.ServerFolder, "server-dir", common.DataDir, "base folder for server mode (default: "+common.DataDir+")") - flag.StringVar(&context.Config.TempFolder, "temp-dir", common.TempDir, "temporary folder location") - flag.StringVar(&context.Config.BinFolder, "bin-dir", common.BinDir, "binary folder location") - flag.StringVar(&context.Config.OutputFolder, "output", ".", "output directory") - - // files - flag.StringVar(&context.Config.InputFile, "model", common.InputFile, "input model yaml file") - flag.StringVar(&context.RAAPlugin, "raa-run", "raa_calc", "RAA calculation run file name") - - // flags / parameters - flag.BoolVar(&context.Config.Verbose, "verbose", false, "verbose output") - flag.IntVar(&context.Config.DiagramDPI, "diagram-dpi", context.Config.DiagramDPI, "DPI used to render: maximum is "+strconv.Itoa(context.Config.MaxGraphvizDPI)+"") - flag.StringVar(&context.Config.SkipRiskRules, "skip-risk-rules", "", "comma-separated list of risk rules (by their ID) to skip") - flag.BoolVar(&context.Config.IgnoreOrphanedRiskTracking, "ignore-orphaned-risk-tracking", false, "ignore orphaned risk tracking (just log them) not matching a concrete risk") - flag.IntVar(&context.Config.ServerPort, "server", 0, "start a server (instead of commandline execution) on the given port") - flag.StringVar(&context.Config.ExecuteModelMacro, "execute-model-macro", "", "Execute model macro (by ID)") - flag.StringVar(&context.Config.TemplateFilename, "background", "background.pdf", "background pdf file") - riskRulesPlugins := flag.String("custom-risk-rules-plugins", "", "comma-separated list of plugins file names with custom risk rules to load") - context.Config.RiskRulesPlugins = strings.Split(*riskRulesPlugins, ",") - - // commands - flag.BoolVar(&context.generateDataFlowDiagram, "generate-data-flow-diagram", true, "generate data-flow diagram") - flag.BoolVar(&context.generateDataAssetDiagram, "generate-data-asset-diagram", true, "generate data asset diagram") - flag.BoolVar(&context.generateRisksJSON, "generate-risks-json", true, "generate risks json") - flag.BoolVar(&context.generateStatsJSON, "generate-stats-json", true, "generate stats json") - flag.BoolVar(&context.generateTechnicalAssetsJSON, "generate-technical-assets-json", true, "generate technical assets json") - flag.BoolVar(&context.generateRisksExcel, "generate-risks-excel", true, "generate risks excel") - flag.BoolVar(&context.generateTagsExcel, "generate-tags-excel", true, "generate tags excel") - flag.BoolVar(&context.generateReportPDF, "generate-report-pdf", true, "generate report pdf, including diagrams") - - flag.Usage = func() { - fmt.Println(docs.Logo + "\n\n" + docs.VersionText) - _, _ = fmt.Fprintf(os.Stderr, "Usage: threagile [options]") - fmt.Println() - } - flag.Parse() - - context.Config.InputFile = context.expandPath(context.Config.InputFile) - context.Config.AppFolder = context.expandPath(context.Config.AppFolder) - context.Config.ServerFolder = context.expandPath(context.Config.ServerFolder) - context.Config.TempFolder = context.expandPath(context.Config.TempFolder) - context.Config.BinFolder = context.expandPath(context.Config.BinFolder) - context.Config.OutputFolder = context.expandPath(context.Config.OutputFolder) - - if context.Config.DiagramDPI < common.MinGraphvizDPI { - context.Config.DiagramDPI = common.MinGraphvizDPI - } else if context.Config.DiagramDPI > common.MaxGraphvizDPI { - context.Config.DiagramDPI = common.MaxGraphvizDPI - } - - context.progressReporter = common.SilentProgressReporter{} - if context.Config.Verbose { - context.progressReporter = common.CommandLineProgressReporter{} - } - - context.ServerMode = context.Config.ServerPort > 0 - - return context -} - func (context *Context) applyWildcardRiskTrackingEvaluation() { if context.Config.Verbose { fmt.Println("Executing risk tracking evaluation") @@ -1407,3 +1345,90 @@ func hash(s string) string { func encode(value string) string { return strings.ReplaceAll(value, "&", "&") } + +// TODO: remove from here as soon as moved to cobra, here is only for a backward compatibility +// this file supposed to be only about the logic +func userHomeDir() string { + switch runtime.GOOS { + case "windows": + home := os.Getenv("HOMEDRIVE") + os.Getenv("HOMEPATH") + if home == "" { + home = os.Getenv("USERPROFILE") + } + return home + + default: + return os.Getenv("HOME") + } +} + +func expandPath(path string) string { + home := userHomeDir() + if strings.HasPrefix(path, "~") { + path = strings.Replace(path, "~", home, 1) + } + + if strings.HasPrefix(path, "$HOME") { + path = strings.Replace(path, "$HOME", home, -1) + } + + return path +} + +func (context *Context) ParseCommandlineArgs() *Context { + configFile := flag.String("config", "", "config file") + configError := context.Config.Load(*configFile) + if configError != nil { + fmt.Printf("WARNING: failed to load config file %q: %v\n", *configFile, configError) + } + + // folders + flag.StringVar(&context.Config.AppFolder, "app-dir", common.AppDir, "app folder (default: "+common.AppDir+")") + flag.StringVar(&context.Config.ServerFolder, "server-dir", common.DataDir, "base folder for server mode (default: "+common.DataDir+")") + flag.StringVar(&context.Config.TempFolder, "temp-dir", common.TempDir, "temporary folder location") + flag.StringVar(&context.Config.BinFolder, "bin-dir", common.BinDir, "binary folder location") + flag.StringVar(&context.Config.OutputFolder, "output", ".", "output directory") + + // files + flag.StringVar(&context.Config.InputFile, "model", common.InputFile, "input model yaml file") + flag.StringVar(&context.RAAPlugin, "raa-run", "raa_calc", "RAA calculation run file name") + + // flags / parameters + flag.BoolVar(&context.Config.Verbose, "verbose", false, "verbose output") + flag.IntVar(&context.Config.DiagramDPI, "diagram-dpi", context.Config.DiagramDPI, "DPI used to render: maximum is "+strconv.Itoa(context.Config.MaxGraphvizDPI)+"") + flag.StringVar(&context.Config.SkipRiskRules, "skip-risk-rules", "", "comma-separated list of risk rules (by their ID) to skip") + flag.BoolVar(&context.Config.IgnoreOrphanedRiskTracking, "ignore-orphaned-risk-tracking", false, "ignore orphaned risk tracking (just log them) not matching a concrete risk") + flag.IntVar(&context.Config.ServerPort, "server", 0, "start a server (instead of commandline execution) on the given port") + flag.StringVar(&context.Config.ExecuteModelMacro, "execute-model-macro", "", "Execute model macro (by ID)") + flag.StringVar(&context.Config.TemplateFilename, "background", "background.pdf", "background pdf file") + riskRulesPlugins := flag.String("custom-risk-rules-plugins", "", "comma-separated list of plugins file names with custom risk rules to load") + context.Config.RiskRulesPlugins = strings.Split(*riskRulesPlugins, ",") + + // commands + flag.BoolVar(&context.GenerateCommands.DataFlowDiagram, "generate-data-flow-diagram", true, "generate data-flow diagram") + flag.BoolVar(&context.GenerateCommands.DataAssetDiagram, "generate-data-asset-diagram", true, "generate data asset diagram") + flag.BoolVar(&context.GenerateCommands.RisksJSON, "generate-risks-json", true, "generate risks json") + flag.BoolVar(&context.GenerateCommands.StatsJSON, "generate-stats-json", true, "generate stats json") + flag.BoolVar(&context.GenerateCommands.TechnicalAssetsJSON, "generate-technical-assets-json", true, "generate technical assets json") + flag.BoolVar(&context.GenerateCommands.RisksExcel, "generate-risks-excel", true, "generate risks excel") + flag.BoolVar(&context.GenerateCommands.TagsExcel, "generate-tags-excel", true, "generate tags excel") + flag.BoolVar(&context.GenerateCommands.ReportPDF, "generate-report-pdf", true, "generate report pdf, including diagrams") + + flag.Usage = func() { + fmt.Println(docs.Logo + "\n\n" + docs.VersionText) + _, _ = fmt.Fprintf(os.Stderr, "Usage: threagile [options]") + fmt.Println() + } + flag.Parse() + + context.Config.InputFile = expandPath(context.Config.InputFile) + context.Config.AppFolder = expandPath(context.Config.AppFolder) + context.Config.ServerFolder = expandPath(context.Config.ServerFolder) + context.Config.TempFolder = expandPath(context.Config.TempFolder) + context.Config.BinFolder = expandPath(context.Config.BinFolder) + context.Config.OutputFolder = expandPath(context.Config.OutputFolder) + + context.ServerMode = context.Config.ServerPort > 0 + + return context +} diff --git a/internal/threagile/flags.go b/internal/threagile/flags.go index d04c4cde..ac51c9f3 100644 --- a/internal/threagile/flags.go +++ b/internal/threagile/flags.go @@ -3,10 +3,46 @@ Copyright © 2023 NAME HERE */ package threagile +var verboseFlag *bool +var appDirFlag, binDirFlag, outputDirFlag, tempDirFlag *string +var inputFileFlag, raaPluginFlag *string +var serverPortFlag *int +var serverDirFlag *string + +var skipRiskRulesFlag, customRiskRulesPluginFlag *string +var ignoreOrphandedRiskTrackingFlag *bool +var templateFileNameFlag *string +var diagramDpiFlag *int + +var generateDataFlowDiagramFlag, generateDataAssetDiagramFlag, generateRisksJSONFlag, + generateTechnicalAssetsJSONFlag, generateStatsJSONFlag, generateRisksExcelFlag, + generateTagsExcelFlag, generateReportPDFFlag *bool + const verboseFlagName = "verbose" const verboseFlagShorthand = "v" + const appDirFlagName = "app-dir" const binDirFlagName = "bin-dir" const outputFlagName = "output" const tempDirFlagName = "temp-dir" + +const serverDirFlagName = "server-dir" +const serverPortFlagName = "server-port" + +const inputFileFlagName = "model" +const raaPluginFlagName = "raa-run" + const customRiskRulesPluginFlagName = "custom-risk-rules-plugin" +const diagramDpiFlagName = "diagram-dpi" +const skipRiskRulesFlagName = "skip-risk-rules" +const ignoreOrphandedRiskTrackingFlagName = "ignore-orphaned-risk-tracking" +const templateFileNameFlagName = "background" + +const generateDataFlowDiagramFlagName = "generate-data-flow-diagram" +const generateDataAssetDiagramFlagName = "generate-data-asset-diagram" +const generateRisksJSONFlagName = "generate-risks-json" +const generateTechnicalAssetsJSONFlagName = "generate-technical-assets-json" +const generateStatsJSONFlagName = "generate-stats-json" +const generateRisksExcelFlagName = "generate-risks-excel" +const generateTagsExcelFlagName = "generate-tags-excel" +const generateReportPDFFlagName = "generate-report-pdf" diff --git a/internal/threagile/root.go b/internal/threagile/root.go index f11716dc..6155f9d5 100644 --- a/internal/threagile/root.go +++ b/internal/threagile/root.go @@ -4,21 +4,40 @@ Copyright © 2023 NAME HERE package threagile import ( + "fmt" "os" + "strings" "github.com/spf13/cobra" + "github.com/threagile/threagile/pkg/common" "github.com/threagile/threagile/pkg/docs" + "github.com/threagile/threagile/pkg/server" ) var rootCmd = &cobra.Command{ Use: "threagile", Short: "\n" + docs.Logo, Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\n" + docs.Examples, + RunE: func(cmd *cobra.Command, args []string) error { + ctx := new(Context).Defaults("buildTimestamp") + ctx.Config = *readConfig("buildTimestamp") + ctx.GenerateCommands = readCommands() + ctx.DoIt() + return nil + }, +} + +var serverCmd = &cobra.Command{ + Use: "server", + Short: "Run server", + RunE: func(cmd *cobra.Command, args []string) error { + cfg := readConfig("buildTimestamp") + server.RunServer(*cfg) + return nil + }, } -// Execute adds all child commands to the root command and sets flags appropriately. -// This is called by main.main(). It only needs to happen once to the rootCmd. func Execute() { err := rootCmd.Execute() if err != nil { @@ -27,9 +46,69 @@ func Execute() { } func init() { - rootCmd.PersistentFlags().BoolP(verboseFlagName, verboseFlagShorthand, false, "verbose output") - rootCmd.PersistentFlags().String(appDirFlagName, "/app", "app folder (default: /app)") - rootCmd.PersistentFlags().String(binDirFlagName, "/app", "binary folder location") - rootCmd.PersistentFlags().String(outputFlagName, ".", "output directory") - rootCmd.PersistentFlags().String(tempDirFlagName, "/tmp", "output directory") + appDirFlag = rootCmd.PersistentFlags().String(appDirFlagName, common.AppDir, "app folder") + binDirFlag = rootCmd.PersistentFlags().String(binDirFlagName, common.BinDir, "binary folder location") + outputDirFlag = rootCmd.PersistentFlags().String(outputFlagName, common.OutputDir, "output directory") + tempDirFlag = rootCmd.PersistentFlags().String(tempDirFlagName, common.TempDir, "temporary folder location") + + inputFileFlag = rootCmd.PersistentFlags().String(inputFileFlagName, common.InputFile, "input model yaml file") + raaPluginFlag = rootCmd.PersistentFlags().String(raaPluginFlagName, "raa_calc", "RAA calculation run file name") + + serverPortFlag = serverCmd.PersistentFlags().Int(serverPortFlagName, common.DefaultServerPort, "the server port") + serverDirFlag = serverCmd.PersistentFlags().String(serverDirFlagName, common.DataDir, "base folder for server mode (default: "+common.DataDir+")") + + verboseFlag = rootCmd.PersistentFlags().BoolP(verboseFlagName, verboseFlagShorthand, false, "verbose output") + + customRiskRulesPluginFlag = rootCmd.PersistentFlags().String(customRiskRulesPluginFlagName, "", "comma-separated list of plugins file names with custom risk rules to load") + diagramDpiFlag = rootCmd.PersistentFlags().Int(diagramDpiFlagName, 0, "DPI used to render: maximum is "+fmt.Sprintf("%d", common.MaxGraphvizDPI)+"") + skipRiskRulesFlag = rootCmd.PersistentFlags().String(skipRiskRulesFlagName, "", "comma-separated list of risk rules (by their ID) to skip") + ignoreOrphandedRiskTrackingFlag = rootCmd.PersistentFlags().Bool(ignoreOrphandedRiskTrackingFlagName, false, "ignore orphaned risk tracking (just log them) not matching a concrete risk") + templateFileNameFlag = rootCmd.PersistentFlags().String(templateFileNameFlagName, common.TemplateFilename, "background pdf file") + + generateDataFlowDiagramFlag = rootCmd.PersistentFlags().Bool(generateDataFlowDiagramFlagName, true, "generate data flow diagram") + generateDataAssetDiagramFlag = rootCmd.PersistentFlags().Bool(generateDataAssetDiagramFlagName, true, "generate data asset diagram") + generateRisksJSONFlag = rootCmd.PersistentFlags().Bool(generateRisksJSONFlagName, true, "generate risks json") + generateTechnicalAssetsJSONFlag = rootCmd.PersistentFlags().Bool(generateTechnicalAssetsJSONFlagName, true, "generate technical assets json") + generateStatsJSONFlag = rootCmd.PersistentFlags().Bool(generateStatsJSONFlagName, true, "generate stats json") + generateRisksExcelFlag = rootCmd.PersistentFlags().Bool(generateRisksExcelFlagName, true, "generate risks excel") + generateTagsExcelFlag = rootCmd.PersistentFlags().Bool(generateTagsExcelFlagName, true, "generate tags excel") + generateReportPDFFlag = rootCmd.PersistentFlags().Bool(generateReportPDFFlagName, true, "generate report pdf, including diagrams") + + rootCmd.AddCommand(serverCmd) +} + +func readConfig(buildTimestamp string) *common.Config { + cfg := new(common.Config).Defaults(buildTimestamp) + cfg.ServerPort = *serverPortFlag + cfg.ServerFolder = expandPath(*serverDirFlag) + + cfg.AppFolder = expandPath(*appDirFlag) + cfg.BinFolder = expandPath(*binDirFlag) + cfg.OutputFolder = expandPath(*outputDirFlag) + cfg.TempFolder = expandPath(*tempDirFlag) + + cfg.Verbose = *verboseFlag + + cfg.InputFile = expandPath(*inputFileFlag) + cfg.RAAPlugin = *raaPluginFlag + + cfg.RiskRulesPlugins = strings.Split(*customRiskRulesPluginFlag, ",") + cfg.SkipRiskRules = *skipRiskRulesFlag + cfg.IgnoreOrphanedRiskTracking = *ignoreOrphandedRiskTrackingFlag + cfg.DiagramDPI = *diagramDpiFlag + cfg.TemplateFilename = *templateFileNameFlag + return cfg +} + +func readCommands() *GenerateCommands { + commands := new(GenerateCommands).Defaults() + commands.DataFlowDiagram = *generateDataFlowDiagramFlag + commands.DataAssetDiagram = *generateDataAssetDiagramFlag + commands.RisksJSON = *generateRisksJSONFlag + commands.StatsJSON = *generateStatsJSONFlag + commands.TechnicalAssetsJSON = *generateTechnicalAssetsJSONFlag + commands.RisksExcel = *generateRisksExcelFlag + commands.TagsExcel = *generateTagsExcelFlag + commands.ReportPDF = *generateReportPDFFlag + return commands } diff --git a/internal/threagile/rules.go b/internal/threagile/rules.go index 83c39ca4..4616f216 100644 --- a/internal/threagile/rules.go +++ b/internal/threagile/rules.go @@ -5,10 +5,11 @@ Copyright © 2023 NAME HERE package threagile import ( + "strings" + "github.com/threagile/threagile/pkg/common" "github.com/threagile/threagile/pkg/security/risks" "github.com/threagile/threagile/pkg/security/types" - "strings" "github.com/spf13/cobra" @@ -19,19 +20,13 @@ var listRiskRules = &cobra.Command{ Use: "list-risk-rules", Short: "Print available risk rules", RunE: func(cmd *cobra.Command, args []string) error { - plugins, err := cmd.Flags().GetString("custom-risk-rules-plugin") - if err != nil { - cmd.Printf("Unable to read custom-risk-rules-plugin flag: %v", err) - return err - } - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) cmd.Println("The following risk rules are available (can be extended via custom risk rules):") cmd.Println() cmd.Println("----------------------") cmd.Println("Custom risk rules:") cmd.Println("----------------------") - customRiskRules := types.LoadCustomRiskRules(strings.Split(plugins, ","), common.GetProgressReporter(cmd)) + customRiskRules := types.LoadCustomRiskRules(strings.Split(*customRiskRulesPluginFlag, ","), common.GetProgressReporter(cmd)) for id, customRule := range customRiskRules { cmd.Println(id, "-->", customRule.Category.Title, "--> with tags:", customRule.Tags) } @@ -52,19 +47,13 @@ var explainRiskRules = &cobra.Command{ Use: "explain-risk-rules", Short: "Detailed explanation of all the risk rules", RunE: func(cmd *cobra.Command, args []string) error { - plugins, err := cmd.Flags().GetString("custom-risk-rules-plugin") - if err != nil { - cmd.Printf("Unable to read custom-risk-rules-plugin flag: %v", err) - return err - } - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) cmd.Println("Explanation for risk rules:") cmd.Println() cmd.Println("----------------------") cmd.Println("Custom risk rules:") cmd.Println("----------------------") - customRiskRules := types.LoadCustomRiskRules(strings.Split(plugins, ","), common.GetProgressReporter(cmd)) + customRiskRules := types.LoadCustomRiskRules(strings.Split(*customRiskRulesPluginFlag, ","), common.GetProgressReporter(cmd)) for _, customRule := range customRiskRules { cmd.Printf("%v: %v\n", customRule.Category.Id, customRule.Category.Description) } @@ -83,9 +72,6 @@ var explainRiskRules = &cobra.Command{ } func init() { - listRiskRules.PersistentFlags().String(customRiskRulesPluginFlagName, "", "custom risk rules plugin (default: none)") rootCmd.AddCommand(listRiskRules) - - explainRiskRules.PersistentFlags().String(customRiskRulesPluginFlagName, "", "custom risk rules plugin (default: none)") rootCmd.AddCommand(explainRiskRules) } diff --git a/pkg/common/consts.go b/pkg/common/consts.go index 27a08cd5..bb452330 100644 --- a/pkg/common/consts.go +++ b/pkg/common/consts.go @@ -9,6 +9,8 @@ const ( ServerDir = "/server" KeyDir = "keys" + DefaultServerPort = 8080 + InputFile = "threagile.yaml" ReportFilename = "report.pdf" ExcelRisksFilename = "risks.xlsx" From 2e916613bc310fa40626d665acee5ec0bcf06e35 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Thu, 4 Jan 2024 19:37:18 +0000 Subject: [PATCH 33/68] Move one more function out of context --- internal/threagile/context.go | 33 +++++++++++---------------------- pkg/security/types/model.go | 6 ++++++ 2 files changed, 17 insertions(+), 22 deletions(-) diff --git a/internal/threagile/context.go b/internal/threagile/context.go index a8c0123f..35bb2a8b 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -74,15 +74,6 @@ type Context struct { ServerMode bool parsedModel types.ParsedModel - - customRiskRules map[string]*types.CustomRisk - builtinRiskRules map[string]types.RiskRule -} - -func (context *Context) addToListOfSupportedTags(tags []string) { - for _, tag := range tags { - context.parsedModel.AllSupportedTags[tag] = true - } } func (context *Context) checkRiskTracking() { @@ -118,8 +109,6 @@ func (context *Context) checkRiskTracking() { func (context *Context) Init() *Context { *context = Context{ - customRiskRules: make(map[string]*types.CustomRisk), - builtinRiskRules: make(map[string]types.RiskRule), GenerateCommands: &GenerateCommands{}, } @@ -142,7 +131,7 @@ func (context *Context) applyRisk(rule types.RiskRule, skippedRules *map[string] fmt.Printf("Skipping risk rule %q\n", rule.Category().Id) delete(*skippedRules, rule.Category().Id) } else { - context.addToListOfSupportedTags(rule.SupportedTags()) + context.parsedModel.AddToListOfSupportedTags(rule.SupportedTags()) generatedRisks := rule.GenerateRisks(&context.parsedModel) if generatedRisks != nil { if len(generatedRisks) > 0 { @@ -154,7 +143,7 @@ func (context *Context) applyRisk(rule types.RiskRule, skippedRules *map[string] } } -func (context *Context) applyRiskGeneration() { +func (context *Context) applyRiskGeneration(customRiskRules map[string]*types.CustomRisk, builtinRiskRules map[string]types.RiskRule) { if context.Config.Verbose { fmt.Println("Applying risk generation") } @@ -166,12 +155,12 @@ func (context *Context) applyRiskGeneration() { } } - for _, rule := range context.builtinRiskRules { + for _, rule := range builtinRiskRules { context.applyRisk(rule, &skippedRules) } // NOW THE CUSTOM RISK RULES (if any) - for id, customRule := range context.customRiskRules { + for id, customRule := range customRiskRules { _, ok := skippedRules[id] if ok { if context.Config.Verbose { @@ -182,7 +171,7 @@ func (context *Context) applyRiskGeneration() { if context.Config.Verbose { fmt.Println("Executing custom risk rule:", id) } - context.addToListOfSupportedTags(customRule.Tags) + context.parsedModel.AddToListOfSupportedTags(customRule.Tags) customRisks := customRule.GenerateRisks(&context.parsedModel) if len(customRisks) > 0 { context.parsedModel.GeneratedRisksByCategory[customRule.Category.Id] = customRisks @@ -531,13 +520,13 @@ func (context *Context) DoIt() { log.Fatal("Unable to load model yaml: ", loadError) } - context.builtinRiskRules = make(map[string]types.RiskRule) + builtinRiskRules := make(map[string]types.RiskRule) for _, rule := range risks.GetBuiltInRiskRules() { - context.builtinRiskRules[rule.Category().Id] = rule + builtinRiskRules[rule.Category().Id] = rule } - context.customRiskRules = types.LoadCustomRiskRules(context.Config.RiskRulesPlugins, progressReporter) + customRiskRules := types.LoadCustomRiskRules(context.Config.RiskRulesPlugins, progressReporter) - parsedModel, parseError := model.ParseModel(&modelInput, context.builtinRiskRules, context.customRiskRules) + parsedModel, parseError := model.ParseModel(&modelInput, builtinRiskRules, customRiskRules) if parseError != nil { log.Fatal("Unable to parse model yaml: ", parseError) } @@ -546,7 +535,7 @@ func (context *Context) DoIt() { introTextRAA := context.applyRAA() - context.applyRiskGeneration() + context.applyRiskGeneration(customRiskRules, builtinRiskRules) context.applyWildcardRiskTrackingEvaluation() context.checkRiskTracking() @@ -935,7 +924,7 @@ func (context *Context) DoIt() { context.Config.BuildTimestamp, modelHash, introTextRAA, - context.customRiskRules, + customRiskRules, context.Config.TempFolder, &context.parsedModel) } diff --git a/pkg/security/types/model.go b/pkg/security/types/model.go index 48e983bd..30ca5c89 100644 --- a/pkg/security/types/model.go +++ b/pkg/security/types/model.go @@ -49,6 +49,12 @@ type ParsedModel struct { GeneratedRisksBySyntheticId map[string]Risk `json:"generated_risks_by_synthetic_id,omitempty" yaml:"generated_risks_by_synthetic_id"` } +func (parsedModel *ParsedModel) AddToListOfSupportedTags(tags []string) { + for _, tag := range tags { + parsedModel.AllSupportedTags[tag] = true + } +} + func (parsedModel *ParsedModel) CheckTags(tags []string, where string) ([]string, error) { var tagsUsed = make([]string, 0) if tags != nil { From f391430d62ea3ede94674511bff46f601e3da05a Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Thu, 4 Jan 2024 22:13:31 +0000 Subject: [PATCH 34/68] Move out graphviz code into report package --- internal/threagile/context.go | 779 ++------------------------------ internal/threagile/rules.go | 4 +- pkg/common/progress-reporter.go | 40 +- pkg/security/types/model.go | 166 +++++++ pkg/security/types/rules.go | 19 +- pkg/server/server.go | 6 +- 6 files changed, 221 insertions(+), 793 deletions(-) diff --git a/internal/threagile/context.go b/internal/threagile/context.go index 35bb2a8b..eb8194d0 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -4,18 +4,13 @@ import ( "bufio" "crypto/sha256" "encoding/hex" - "errors" "flag" "fmt" // TODO: no fmt.Println here - "hash/fnv" "io" "log" "os" - "os/exec" "path/filepath" - "regexp" "runtime" - "sort" "strconv" "strings" @@ -33,7 +28,6 @@ import ( "gopkg.in/yaml.v3" - "github.com/threagile/threagile/pkg/colors" "github.com/threagile/threagile/pkg/docs" "github.com/threagile/threagile/pkg/input" "github.com/threagile/threagile/pkg/macros" @@ -76,37 +70,6 @@ type Context struct { parsedModel types.ParsedModel } -func (context *Context) checkRiskTracking() { - if context.Config.Verbose { - fmt.Println("Checking risk tracking") - } - for _, tracking := range context.parsedModel.RiskTracking { - if _, ok := context.parsedModel.GeneratedRisksBySyntheticId[tracking.SyntheticRiskId]; !ok { - if context.Config.IgnoreOrphanedRiskTracking { - fmt.Println("Risk tracking references unknown risk (risk id not found): " + tracking.SyntheticRiskId) - } else { - panic(errors.New("Risk tracking references unknown risk (risk id not found) - you might want to use the option -ignore-orphaned-risk-tracking: " + tracking.SyntheticRiskId + - "\n\nNOTE: For risk tracking each risk-id needs to be defined (the string with the @ sign in it). " + - "These unique risk IDs are visible in the PDF report (the small grey string under each risk), " + - "the Excel (column \"ID\"), as well as the JSON responses. Some risk IDs have only one @ sign in them, " + - "while others multiple. The idea is to allow for unique but still speaking IDs. Therefore each risk instance " + - "creates its individual ID by taking all affected elements causing the risk to be within an @-delimited part. " + - "Using wildcards (the * sign) for parts delimited by @ signs allows to handle groups of certain risks at once. " + - "Best is to lookup the IDs to use in the created Excel file. Alternatively a model macro \"seed-risk-tracking\" " + - "is available that helps in initially seeding the risk tracking part here based on already identified and not yet handled risks.")) - } - } - } - - // save also the risk-category-id and risk-status directly in the risk for better JSON marshalling - for category := range context.parsedModel.GeneratedRisksByCategory { - for i := range context.parsedModel.GeneratedRisksByCategory[category] { - // context.parsedModel.GeneratedRisksByCategory[category][i].CategoryId = category - context.parsedModel.GeneratedRisksByCategory[category][i].RiskStatus = context.parsedModel.GeneratedRisksByCategory[category][i].GetRiskTrackingStatusDefaultingUnchecked(&context.parsedModel) - } - } -} - func (context *Context) Init() *Context { *context = Context{ GenerateCommands: &GenerateCommands{}, @@ -123,367 +86,6 @@ func (context *Context) Defaults(buildTimestamp string) *Context { return context } -func (context *Context) applyRisk(rule types.RiskRule, skippedRules *map[string]bool) { - id := rule.Category().Id - _, ok := (*skippedRules)[id] - - if ok { - fmt.Printf("Skipping risk rule %q\n", rule.Category().Id) - delete(*skippedRules, rule.Category().Id) - } else { - context.parsedModel.AddToListOfSupportedTags(rule.SupportedTags()) - generatedRisks := rule.GenerateRisks(&context.parsedModel) - if generatedRisks != nil { - if len(generatedRisks) > 0 { - context.parsedModel.GeneratedRisksByCategory[rule.Category().Id] = generatedRisks - } - } else { - fmt.Printf("Failed to generate risks for %q\n", id) - } - } -} - -func (context *Context) applyRiskGeneration(customRiskRules map[string]*types.CustomRisk, builtinRiskRules map[string]types.RiskRule) { - if context.Config.Verbose { - fmt.Println("Applying risk generation") - } - - skippedRules := make(map[string]bool) - if len(context.Config.SkipRiskRules) > 0 { - for _, id := range strings.Split(context.Config.SkipRiskRules, ",") { - skippedRules[id] = true - } - } - - for _, rule := range builtinRiskRules { - context.applyRisk(rule, &skippedRules) - } - - // NOW THE CUSTOM RISK RULES (if any) - for id, customRule := range customRiskRules { - _, ok := skippedRules[id] - if ok { - if context.Config.Verbose { - fmt.Println("Skipping custom risk rule:", id) - } - delete(skippedRules, id) - } else { - if context.Config.Verbose { - fmt.Println("Executing custom risk rule:", id) - } - context.parsedModel.AddToListOfSupportedTags(customRule.Tags) - customRisks := customRule.GenerateRisks(&context.parsedModel) - if len(customRisks) > 0 { - context.parsedModel.GeneratedRisksByCategory[customRule.Category.Id] = customRisks - } - - if context.Config.Verbose { - fmt.Println("Added custom risks:", len(customRisks)) - } - } - } - - if len(skippedRules) > 0 { - keys := make([]string, 0) - for k := range skippedRules { - keys = append(keys, k) - } - if len(keys) > 0 { - log.Println("Unknown risk rules to skip:", keys) - } - } - - // save also in map keyed by synthetic risk-id - for _, category := range types.SortedRiskCategories(&context.parsedModel) { - someRisks := types.SortedRisksOfCategory(&context.parsedModel, category) - for _, risk := range someRisks { - context.parsedModel.GeneratedRisksBySyntheticId[strings.ToLower(risk.SyntheticId)] = risk - } - } -} - -func (context *Context) writeDataFlowDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.File { - if context.Config.Verbose { - fmt.Println("Writing data flow diagram input") - } - var dotContent strings.Builder - dotContent.WriteString("digraph generatedModel { concentrate=false \n") - - // Metadata init =============================================================================== - tweaks := "" - if context.parsedModel.DiagramTweakNodesep > 0 { - tweaks += "\n nodesep=\"" + strconv.Itoa(context.parsedModel.DiagramTweakNodesep) + "\"" - } - if context.parsedModel.DiagramTweakRanksep > 0 { - tweaks += "\n ranksep=\"" + strconv.Itoa(context.parsedModel.DiagramTweakRanksep) + "\"" - } - suppressBidirectionalArrows := true - drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks := true - splines := "ortho" - if len(context.parsedModel.DiagramTweakEdgeLayout) > 0 { - switch context.parsedModel.DiagramTweakEdgeLayout { - case "spline": - splines = "spline" - drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false - case "polyline": - splines = "polyline" - drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false - case "ortho": - splines = "ortho" - suppressBidirectionalArrows = true - case "curved": - splines = "curved" - drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false - case "false": - splines = "false" - drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false - default: - panic(errors.New("unknown value for diagram_tweak_suppress_edge_labels (spline, polyline, ortho, curved, false): " + - context.parsedModel.DiagramTweakEdgeLayout)) - } - } - rankdir := "TB" - if context.parsedModel.DiagramTweakLayoutLeftToRight { - rankdir = "LR" - } - modelTitle := "" - if context.Config.AddModelTitle { - modelTitle = `label="` + context.parsedModel.Title + `"` - } - dotContent.WriteString(` graph [ ` + modelTitle + ` - labelloc=t - fontname="Verdana" - fontsize=40 - outputorder="nodesfirst" - dpi=` + strconv.Itoa(dpi) + ` - splines=` + splines + ` - rankdir="` + rankdir + `" -` + tweaks + ` - ]; - node [ - fontname="Verdana" - fontsize="20" - ]; - edge [ - shape="none" - fontname="Verdana" - fontsize="18" - ]; -`) - - // Trust Boundaries =============================================================================== - var subgraphSnippetsById = make(map[string]string) - // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order - // range over them in sorted (hence re-producible) way: - keys := make([]string, 0) - for k := range context.parsedModel.TrustBoundaries { - keys = append(keys, k) - } - sort.Strings(keys) - for _, key := range keys { - trustBoundary := context.parsedModel.TrustBoundaries[key] - var snippet strings.Builder - if len(trustBoundary.TechnicalAssetsInside) > 0 || len(trustBoundary.TrustBoundariesNested) > 0 { - if drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks { - // see https://stackoverflow.com/questions/17247455/how-do-i-add-extra-space-between-clusters?noredirect=1&lq=1 - snippet.WriteString("\n subgraph cluster_space_boundary_for_layout_only_1" + hash(trustBoundary.Id) + " {\n") - snippet.WriteString(` graph [ - dpi=` + strconv.Itoa(dpi) + ` - label=<
> - fontsize="21" - style="invis" - color="green" - fontcolor="green" - margin="50.0" - penwidth="6.5" - outputorder="nodesfirst" - ];`) - } - snippet.WriteString("\n subgraph cluster_" + hash(trustBoundary.Id) + " {\n") - color, fontColor, bgColor, style, fontname := colors.RgbHexColorTwilight(), colors.RgbHexColorTwilight() /*"#550E0C"*/, "#FAFAFA", "dashed", "Verdana" - penWidth := 4.5 - if len(trustBoundary.TrustBoundariesNested) > 0 { - //color, fontColor, style, fontname = colors.Blue, colors.Blue, "dashed", "Verdana" - penWidth = 5.5 - } - if len(trustBoundary.ParentTrustBoundaryID(&context.parsedModel)) > 0 { - bgColor = "#F1F1F1" - } - if trustBoundary.Type == types.NetworkPolicyNamespaceIsolation { - fontColor, bgColor = "#222222", "#DFF4FF" - } - if trustBoundary.Type == types.ExecutionEnvironment { - fontColor, bgColor, style = "#555555", "#FFFFF0", "dotted" - } - snippet.WriteString(` graph [ - dpi=` + strconv.Itoa(dpi) + ` - label=<
` + trustBoundary.Title + ` (` + trustBoundary.Type.String() + `)
> - fontsize="21" - style="` + style + `" - color="` + color + `" - bgcolor="` + bgColor + `" - fontcolor="` + fontColor + `" - fontname="` + fontname + `" - penwidth="` + fmt.Sprintf("%f", penWidth) + `" - forcelabels=true - outputorder="nodesfirst" - margin="50.0" - ];`) - snippet.WriteString("\n") - keys := trustBoundary.TechnicalAssetsInside - sort.Strings(keys) - for _, technicalAssetInside := range keys { - //log.Println("About to add technical asset link to trust boundary: ", technicalAssetInside) - technicalAsset := context.parsedModel.TechnicalAssets[technicalAssetInside] - snippet.WriteString(hash(technicalAsset.Id)) - snippet.WriteString(";\n") - } - keys = trustBoundary.TrustBoundariesNested - sort.Strings(keys) - for _, trustBoundaryNested := range keys { - //log.Println("About to add nested trust boundary to trust boundary: ", trustBoundaryNested) - trustBoundaryNested := context.parsedModel.TrustBoundaries[trustBoundaryNested] - snippet.WriteString("LINK-NEEDS-REPLACED-BY-cluster_" + hash(trustBoundaryNested.Id)) - snippet.WriteString(";\n") - } - snippet.WriteString(" }\n\n") - if drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks { - snippet.WriteString(" }\n\n") - } - } - subgraphSnippetsById[hash(trustBoundary.Id)] = snippet.String() - } - // here replace links and remove from map after replacement (i.e. move snippet into nested) - for i := range subgraphSnippetsById { - re := regexp.MustCompile(`LINK-NEEDS-REPLACED-BY-cluster_([0-9]*);`) - for { - matches := re.FindStringSubmatch(subgraphSnippetsById[i]) - if len(matches) > 0 { - embeddedSnippet := " //nested:" + subgraphSnippetsById[matches[1]] - subgraphSnippetsById[i] = strings.ReplaceAll(subgraphSnippetsById[i], matches[0], embeddedSnippet) - subgraphSnippetsById[matches[1]] = "" // to something like remove it - } else { - break - } - } - } - // now write them all - keys = make([]string, 0) - for k := range subgraphSnippetsById { - keys = append(keys, k) - } - sort.Strings(keys) - for _, key := range keys { - snippet := subgraphSnippetsById[key] - dotContent.WriteString(snippet) - } - - // Technical Assets =============================================================================== - // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order - // range over them in sorted (hence re-producible) way: - // Convert map to slice of values: - var techAssets []types.TechnicalAsset - for _, techAsset := range context.parsedModel.TechnicalAssets { - techAssets = append(techAssets, techAsset) - } - sort.Sort(types.ByOrderAndIdSort(techAssets)) - for _, technicalAsset := range techAssets { - dotContent.WriteString(context.makeTechAssetNode(technicalAsset, false)) - dotContent.WriteString("\n") - } - - // Data Flows (Technical Communication Links) =============================================================================== - for _, technicalAsset := range techAssets { - for _, dataFlow := range technicalAsset.CommunicationLinks { - sourceId := technicalAsset.Id - targetId := dataFlow.TargetId - //log.Println("About to add link from", sourceId, "to", targetId, "with id", dataFlow.Id) - var arrowStyle, arrowColor, readOrWriteHead, readOrWriteTail string - if dataFlow.Readonly { - readOrWriteHead = "empty" - readOrWriteTail = "odot" - } else { - readOrWriteHead = "normal" - readOrWriteTail = "dot" - } - dir := "forward" - if dataFlow.IsBidirectional() { - if !suppressBidirectionalArrows { // as it does not work as bug in graphviz with ortho: https://gitlab.com/graphviz/graphviz/issues/144 - dir = "both" - } - } - arrowStyle = ` style="` + dataFlow.DetermineArrowLineStyle() + `" penwidth="` + dataFlow.DetermineArrowPenWidth(&context.parsedModel) + `" arrowtail="` + readOrWriteTail + `" arrowhead="` + readOrWriteHead + `" dir="` + dir + `" arrowsize="2.0" ` - arrowColor = ` color="` + dataFlow.DetermineArrowColor(&context.parsedModel) + `"` - tweaks := "" - if dataFlow.DiagramTweakWeight > 0 { - tweaks += " weight=\"" + strconv.Itoa(dataFlow.DiagramTweakWeight) + "\" " - } - - dotContent.WriteString("\n") - dotContent.WriteString(" " + hash(sourceId) + " -> " + hash(targetId) + - ` [` + arrowColor + ` ` + arrowStyle + tweaks + ` constraint=` + strconv.FormatBool(dataFlow.DiagramTweakConstraint) + ` `) - if !context.parsedModel.DiagramTweakSuppressEdgeLabels { - dotContent.WriteString(` xlabel="` + encode(dataFlow.Protocol.String()) + `" fontcolor="` + dataFlow.DetermineLabelColor(&context.parsedModel) + `" `) - } - dotContent.WriteString(" ];\n") - } - } - - dotContent.WriteString(context.makeDiagramInvisibleConnectionsTweaks()) - dotContent.WriteString(context.makeDiagramSameRankNodeTweaks()) - - dotContent.WriteString("}") - - //fmt.Println(dotContent.String()) - - // Write the DOT file - file, err := os.Create(diagramFilenameDOT) - checkErr(err) - defer func() { _ = file.Close() }() - _, err = fmt.Fprintln(file, dotContent.String()) - checkErr(err) - return file -} - -func (context *Context) makeDiagramSameRankNodeTweaks() string { - // see https://stackoverflow.com/questions/25734244/how-do-i-place-nodes-on-the-same-level-in-dot - tweak := "" - if len(context.parsedModel.DiagramTweakSameRankAssets) > 0 { - for _, sameRank := range context.parsedModel.DiagramTweakSameRankAssets { - assetIDs := strings.Split(sameRank, ":") - if len(assetIDs) > 0 { - tweak += "{ rank=same; " - for _, id := range assetIDs { - checkErr(context.parsedModel.CheckTechnicalAssetExists(id, "diagram tweak same-rank", true)) - if len(context.parsedModel.TechnicalAssets[id].GetTrustBoundaryId(&context.parsedModel)) > 0 { - panic(errors.New("technical assets (referenced in same rank diagram tweak) are inside trust boundaries: " + - fmt.Sprintf("%v", context.parsedModel.DiagramTweakSameRankAssets))) - } - tweak += " " + hash(id) + "; " - } - tweak += " }" - } - } - } - return tweak -} - -func (context *Context) makeDiagramInvisibleConnectionsTweaks() string { - // see https://stackoverflow.com/questions/2476575/how-to-control-node-placement-in-graphviz-i-e-avoid-edge-crossings - tweak := "" - if len(context.parsedModel.DiagramTweakInvisibleConnectionsBetweenAssets) > 0 { - for _, invisibleConnections := range context.parsedModel.DiagramTweakInvisibleConnectionsBetweenAssets { - assetIDs := strings.Split(invisibleConnections, ":") - if len(assetIDs) == 2 { - checkErr(context.parsedModel.CheckTechnicalAssetExists(assetIDs[0], "diagram tweak connections", true)) - checkErr(context.parsedModel.CheckTechnicalAssetExists(assetIDs[1], "diagram tweak connections", true)) - tweak += "\n" + hash(assetIDs[0]) + " -> " + hash(assetIDs[1]) + " [style=invis]; \n" - } - } - } - return tweak -} - func (context *Context) DoIt() { defer func() { var err error @@ -497,10 +99,7 @@ func (context *Context) DoIt() { } }() - var progressReporter common.ProgressReporter = common.SilentProgressReporter{} - if context.Config.Verbose { - progressReporter = common.CommandLineProgressReporter{} - } + progressReporter := common.DefaultProgressReporter{Verbose: context.Config.Verbose} if len(context.Config.ExecuteModelMacro) > 0 { fmt.Println(docs.Logo + "\n\n" + docs.VersionText) @@ -535,9 +134,19 @@ func (context *Context) DoIt() { introTextRAA := context.applyRAA() - context.applyRiskGeneration(customRiskRules, builtinRiskRules) - context.applyWildcardRiskTrackingEvaluation() - context.checkRiskTracking() + context.parsedModel.ApplyRiskGeneration(customRiskRules, builtinRiskRules, + context.Config.SkipRiskRules, progressReporter) + err := context.parsedModel.ApplyWildcardRiskTrackingEvaluation(context.Config.IgnoreOrphanedRiskTracking, progressReporter) + if err != nil { + // TODO: do not panic and gracefully handle the error + panic(err) + } + + err = context.parsedModel.CheckRiskTracking(context.Config.IgnoreOrphanedRiskTracking, progressReporter) + if err != nil { + // TODO: do not panic and gracefully handle the error + panic(err) + } if len(context.Config.ExecuteModelMacro) > 0 { var macroDetails macros.MacroDetails @@ -845,8 +454,13 @@ func (context *Context) DoIt() { gvFile = tmpFileGV.Name() defer func() { _ = os.Remove(gvFile) }() } - dotFile := context.writeDataFlowDiagramGraphvizDOT(gvFile, diagramDPI) - context.generateDataFlowDiagramGraphvizImage(dotFile, context.Config.OutputFolder) + dotFile := report.WriteDataFlowDiagramGraphvizDOT(&context.parsedModel, gvFile, diagramDPI, context.Config.AddModelTitle, progressReporter) + + err := report.GenerateDataFlowDiagramGraphvizImage(dotFile, context.Config.OutputFolder, + context.Config.TempFolder, context.Config.BinFolder, context.Config.DataFlowDiagramFilenamePNG, progressReporter) + if err != nil { + fmt.Println(err) + } } // Data Asset Diagram rendering if context.GenerateCommands.DataAssetDiagram { @@ -857,8 +471,12 @@ func (context *Context) DoIt() { gvFile = tmpFile.Name() defer func() { _ = os.Remove(gvFile) }() } - dotFile := context.writeDataAssetDiagramGraphvizDOT(gvFile, diagramDPI) - context.generateDataAssetDiagramGraphvizImage(dotFile, context.Config.OutputFolder) + dotFile := report.WriteDataAssetDiagramGraphvizDOT(&context.parsedModel, gvFile, diagramDPI, progressReporter) + err := report.GenerateDataAssetDiagramGraphvizImage(dotFile, context.Config.OutputFolder, + context.Config.TempFolder, context.Config.BinFolder, context.Config.DataAssetDiagramFilenamePNG, progressReporter) + if err != nil { + fmt.Println(err) + } } // risks as risks json @@ -986,355 +604,12 @@ func (context *Context) applyRAA() string { return runner.ErrorOutput } -func (context *Context) applyWildcardRiskTrackingEvaluation() { - if context.Config.Verbose { - fmt.Println("Executing risk tracking evaluation") - } - for syntheticRiskIdPattern, riskTracking := range context.getDeferredRiskTrackingDueToWildcardMatching() { - if context.Config.Verbose { - fmt.Println("Applying wildcard risk tracking for risk id: " + syntheticRiskIdPattern) - } - - foundSome := false - var matchingRiskIdExpression = regexp.MustCompile(strings.ReplaceAll(regexp.QuoteMeta(syntheticRiskIdPattern), `\*`, `[^@]+`)) - for syntheticRiskId := range context.parsedModel.GeneratedRisksBySyntheticId { - if matchingRiskIdExpression.Match([]byte(syntheticRiskId)) && context.hasNotYetAnyDirectNonWildcardRiskTracking(syntheticRiskId) { - foundSome = true - context.parsedModel.RiskTracking[syntheticRiskId] = types.RiskTracking{ - SyntheticRiskId: strings.TrimSpace(syntheticRiskId), - Justification: riskTracking.Justification, - CheckedBy: riskTracking.CheckedBy, - Ticket: riskTracking.Ticket, - Status: riskTracking.Status, - Date: riskTracking.Date, - } - } - } - - if !foundSome { - if context.Config.IgnoreOrphanedRiskTracking { - fmt.Println("WARNING: Wildcard risk tracking does not match any risk id: " + syntheticRiskIdPattern) - } else { - panic(errors.New("wildcard risk tracking does not match any risk id: " + syntheticRiskIdPattern)) - } - } - } -} - -func (context *Context) getDeferredRiskTrackingDueToWildcardMatching() map[string]types.RiskTracking { - deferredRiskTrackingDueToWildcardMatching := make(map[string]types.RiskTracking) - for syntheticRiskId, riskTracking := range context.parsedModel.RiskTracking { - if strings.Contains(syntheticRiskId, "*") { // contains a wildcard char - deferredRiskTrackingDueToWildcardMatching[syntheticRiskId] = riskTracking - } - } - - return deferredRiskTrackingDueToWildcardMatching -} - -func (context *Context) hasNotYetAnyDirectNonWildcardRiskTracking(syntheticRiskId string) bool { - if _, ok := context.parsedModel.RiskTracking[syntheticRiskId]; ok { - return false - } - return true -} - -func (context *Context) writeDataAssetDiagramGraphvizDOT(diagramFilenameDOT string, dpi int) *os.File { - if context.Config.Verbose { - fmt.Println("Writing data asset diagram input") - } - var dotContent strings.Builder - dotContent.WriteString("digraph generatedModel { concentrate=true \n") - - // Metadata init =============================================================================== - dotContent.WriteString(` graph [ - dpi=` + strconv.Itoa(dpi) + ` - fontname="Verdana" - labelloc="c" - fontsize="20" - splines=false - rankdir="LR" - nodesep=1.0 - ranksep=3.0 - outputorder="nodesfirst" - ]; - node [ - fontcolor="white" - fontname="Verdana" - fontsize="20" - ]; - edge [ - shape="none" - fontname="Verdana" - fontsize="18" - ]; -`) - - // Technical Assets =============================================================================== - techAssets := make([]types.TechnicalAsset, 0) - for _, techAsset := range context.parsedModel.TechnicalAssets { - techAssets = append(techAssets, techAsset) - } - sort.Sort(types.ByOrderAndIdSort(techAssets)) - for _, technicalAsset := range techAssets { - if len(technicalAsset.DataAssetsStored) > 0 || len(technicalAsset.DataAssetsProcessed) > 0 { - dotContent.WriteString(context.makeTechAssetNode(technicalAsset, true)) - dotContent.WriteString("\n") - } - } - - // Data Assets =============================================================================== - dataAssets := make([]types.DataAsset, 0) - for _, dataAsset := range context.parsedModel.DataAssets { - dataAssets = append(dataAssets, dataAsset) - } - - types.SortByDataAssetDataBreachProbabilityAndTitle(&context.parsedModel, dataAssets) - for _, dataAsset := range dataAssets { - dotContent.WriteString(context.makeDataAssetNode(dataAsset)) - dotContent.WriteString("\n") - } - - // Data Asset to Tech Asset links =============================================================================== - for _, technicalAsset := range techAssets { - for _, sourceId := range technicalAsset.DataAssetsStored { - targetId := technicalAsset.Id - dotContent.WriteString("\n") - dotContent.WriteString(hash(sourceId) + " -> " + hash(targetId) + - ` [ color="blue" style="solid" ];`) - dotContent.WriteString("\n") - } - for _, sourceId := range technicalAsset.DataAssetsProcessed { - if !contains(technicalAsset.DataAssetsStored, sourceId) { // here only if not already drawn above - targetId := technicalAsset.Id - dotContent.WriteString("\n") - dotContent.WriteString(hash(sourceId) + " -> " + hash(targetId) + - ` [ color="#666666" style="dashed" ];`) - dotContent.WriteString("\n") - } - } - } - - dotContent.WriteString("}") - - // Write the DOT file - file, err := os.Create(diagramFilenameDOT) - checkErr(err) - defer func() { _ = file.Close() }() - _, err = fmt.Fprintln(file, dotContent.String()) - checkErr(err) - return file -} - -func (context *Context) makeTechAssetNode(technicalAsset types.TechnicalAsset, simplified bool) string { - if simplified { - color := colors.RgbHexColorOutOfScope() - if !technicalAsset.OutOfScope { - generatedRisks := technicalAsset.GeneratedRisks(&context.parsedModel) - switch types.HighestSeverityStillAtRisk(&context.parsedModel, generatedRisks) { - case types.CriticalSeverity: - color = colors.RgbHexColorCriticalRisk() - case types.HighSeverity: - color = colors.RgbHexColorHighRisk() - case types.ElevatedSeverity: - color = colors.RgbHexColorElevatedRisk() - case types.MediumSeverity: - color = colors.RgbHexColorMediumRisk() - case types.LowSeverity: - color = colors.RgbHexColorLowRisk() - default: - color = "#444444" // since black is too dark here as fill color - } - if len(types.ReduceToOnlyStillAtRisk(&context.parsedModel, generatedRisks)) == 0 { - color = "#444444" // since black is too dark here as fill color - } - } - return " " + hash(technicalAsset.Id) + ` [ shape="box" style="filled" fillcolor="` + color + `" - label=<` + encode(technicalAsset.Title) + `> penwidth="3.0" color="` + color + `" ]; - ` - } else { - var shape, title string - var lineBreak = "" - switch technicalAsset.Type { - case types.ExternalEntity: - shape = "box" - title = technicalAsset.Title - case types.Process: - shape = "ellipse" - title = technicalAsset.Title - case types.Datastore: - shape = "cylinder" - title = technicalAsset.Title - if technicalAsset.Redundant { - lineBreak = "
" - } - } - - if technicalAsset.UsedAsClientByHuman { - shape = "octagon" - } - - // RAA = Relative Attacker Attractiveness - raa := technicalAsset.RAA - var attackerAttractivenessLabel string - if technicalAsset.OutOfScope { - attackerAttractivenessLabel = "RAA: out of scope" - } else { - attackerAttractivenessLabel = "RAA: " + fmt.Sprintf("%.0f", raa) + " %" - } - - compartmentBorder := "0" - if technicalAsset.MultiTenant { - compartmentBorder = "1" - } - - return " " + hash(technicalAsset.Id) + ` [ - label=<
` + lineBreak + technicalAsset.Technology.String() + `
` + technicalAsset.Size.String() + `
` + encode(title) + `
` + attackerAttractivenessLabel + `
> - shape=` + shape + ` style="` + technicalAsset.DetermineShapeBorderLineStyle() + `,` + technicalAsset.DetermineShapeStyle() + `" penwidth="` + technicalAsset.DetermineShapeBorderPenWidth(&context.parsedModel) + `" fillcolor="` + technicalAsset.DetermineShapeFillColor(&context.parsedModel) + `" - peripheries=` + strconv.Itoa(technicalAsset.DetermineShapePeripheries()) + ` - color="` + technicalAsset.DetermineShapeBorderColor(&context.parsedModel) + "\"\n ]; " - } -} - -func (context *Context) makeDataAssetNode(dataAsset types.DataAsset) string { - var color string - switch dataAsset.IdentifiedDataBreachProbabilityStillAtRisk(&context.parsedModel) { - case types.Probable: - color = colors.RgbHexColorHighRisk() - case types.Possible: - color = colors.RgbHexColorMediumRisk() - case types.Improbable: - color = colors.RgbHexColorLowRisk() - default: - color = "#444444" // since black is too dark here as fill color - } - if !dataAsset.IsDataBreachPotentialStillAtRisk(&context.parsedModel) { - color = "#444444" // since black is too dark here as fill color - } - return " " + hash(dataAsset.Id) + ` [ label=<` + encode(dataAsset.Title) + `> penwidth="3.0" style="filled" fillcolor="` + color + `" color="` + color + "\"\n ]; " -} - -func (context *Context) generateDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string) { - if context.Config.Verbose { - fmt.Println("Rendering data flow diagram input") - } - // tmp files - tmpFileDOT, err := os.CreateTemp(context.Config.TempFolder, "diagram-*-.gv") - checkErr(err) - defer func() { _ = os.Remove(tmpFileDOT.Name()) }() - - tmpFilePNG, err := os.CreateTemp(context.Config.TempFolder, "diagram-*-.png") - checkErr(err) - defer func() { _ = os.Remove(tmpFilePNG.Name()) }() - - // copy into tmp file as input - inputDOT, err := os.ReadFile(dotFile.Name()) - if err != nil { - fmt.Println(err) - return - } - err = os.WriteFile(tmpFileDOT.Name(), inputDOT, 0644) - if err != nil { - fmt.Println("Error creating", tmpFileDOT.Name()) - fmt.Println(err) - return - } - - // exec - cmd := exec.Command(filepath.Join(context.Config.BinFolder, common.GraphvizDataFlowDiagramConversionCall), tmpFileDOT.Name(), tmpFilePNG.Name()) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - err = cmd.Run() - if err != nil { - panic(errors.New("graph rendering call failed with error:" + err.Error())) - } - // copy into resulting file - inputPNG, err := os.ReadFile(tmpFilePNG.Name()) - if err != nil { - fmt.Println(err) - return - } - err = os.WriteFile(filepath.Join(targetDir, context.Config.DataFlowDiagramFilenamePNG), inputPNG, 0644) - if err != nil { - fmt.Println("Error creating", context.Config.DataFlowDiagramFilenamePNG) - fmt.Println(err) - return - } -} - -func (context *Context) generateDataAssetDiagramGraphvizImage(dotFile *os.File, targetDir string) { // TODO dedupe with other render...() method here - if context.Config.Verbose { - fmt.Println("Rendering data asset diagram input") - } - // tmp files - tmpFileDOT, err := os.CreateTemp(context.Config.TempFolder, "diagram-*-.gv") - checkErr(err) - defer func() { _ = os.Remove(tmpFileDOT.Name()) }() - - tmpFilePNG, err := os.CreateTemp(context.Config.TempFolder, "diagram-*-.png") - checkErr(err) - defer func() { _ = os.Remove(tmpFilePNG.Name()) }() - - // copy into tmp file as input - inputDOT, err := os.ReadFile(dotFile.Name()) - if err != nil { - fmt.Println(err) - return - } - err = os.WriteFile(tmpFileDOT.Name(), inputDOT, 0644) - if err != nil { - fmt.Println("Error creating", tmpFileDOT.Name()) - fmt.Println(err) - return - } - - // exec - cmd := exec.Command(filepath.Join(context.Config.BinFolder, common.GraphvizDataAssetDiagramConversionCall), tmpFileDOT.Name(), tmpFilePNG.Name()) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - err = cmd.Run() - if err != nil { - panic(errors.New("graph rendering call failed with error: " + err.Error())) - } - // copy into resulting file - inputPNG, err := os.ReadFile(tmpFilePNG.Name()) - if err != nil { - fmt.Println(err) - return - } - err = os.WriteFile(filepath.Join(targetDir, context.Config.DataAssetDiagramFilenamePNG), inputPNG, 0644) - if err != nil { - fmt.Println("Error creating", context.Config.DataAssetDiagramFilenamePNG) - fmt.Println(err) - return - } -} - func checkErr(err error) { if err != nil { panic(err) } } -func contains(a []string, x string) bool { - for _, n := range a { - if x == n { - return true - } - } - return false -} - -func hash(s string) string { - h := fnv.New32a() - _, _ = h.Write([]byte(s)) - return fmt.Sprintf("%v", h.Sum32()) -} - -func encode(value string) string { - return strings.ReplaceAll(value, "&", "&") -} - // TODO: remove from here as soon as moved to cobra, here is only for a backward compatibility // this file supposed to be only about the logic func userHomeDir() string { diff --git a/internal/threagile/rules.go b/internal/threagile/rules.go index 4616f216..136beb3e 100644 --- a/internal/threagile/rules.go +++ b/internal/threagile/rules.go @@ -26,7 +26,7 @@ var listRiskRules = &cobra.Command{ cmd.Println("----------------------") cmd.Println("Custom risk rules:") cmd.Println("----------------------") - customRiskRules := types.LoadCustomRiskRules(strings.Split(*customRiskRulesPluginFlag, ","), common.GetProgressReporter(cmd)) + customRiskRules := types.LoadCustomRiskRules(strings.Split(*customRiskRulesPluginFlag, ","), common.DefaultProgressReporter{Verbose: *verboseFlag}) for id, customRule := range customRiskRules { cmd.Println(id, "-->", customRule.Category.Title, "--> with tags:", customRule.Tags) } @@ -53,7 +53,7 @@ var explainRiskRules = &cobra.Command{ cmd.Println("----------------------") cmd.Println("Custom risk rules:") cmd.Println("----------------------") - customRiskRules := types.LoadCustomRiskRules(strings.Split(*customRiskRulesPluginFlag, ","), common.GetProgressReporter(cmd)) + customRiskRules := types.LoadCustomRiskRules(strings.Split(*customRiskRulesPluginFlag, ","), common.DefaultProgressReporter{Verbose: *verboseFlag}) for _, customRule := range customRiskRules { cmd.Printf("%v: %v\n", customRule.Category.Id, customRule.Category.Description) } diff --git a/pkg/common/progress-reporter.go b/pkg/common/progress-reporter.go index 839a3d4f..5e61ad94 100644 --- a/pkg/common/progress-reporter.go +++ b/pkg/common/progress-reporter.go @@ -7,39 +7,27 @@ package common import ( "fmt" "log" - - "github.com/spf13/cobra" ) -type ProgressReporter interface { - Println(a ...any) (n int, err error) - Fatalf(format string, v ...any) -} - -type SilentProgressReporter struct{} - -func (SilentProgressReporter) Println(a ...any) (n int, err error) { - return 0, nil +type DefaultProgressReporter struct { + Verbose bool + SuppressError bool } -func (SilentProgressReporter) Fatalf(format string, v ...any) { +func (r DefaultProgressReporter) Info(a ...any) { + if r.Verbose { + fmt.Println(a...) + } } -type CommandLineProgressReporter struct{} - -func (CommandLineProgressReporter) Println(a ...any) (n int, err error) { - return fmt.Println(a...) -} -func (CommandLineProgressReporter) Fatalf(format string, v ...any) { - log.Fatalf(format, v...) +func (DefaultProgressReporter) Warn(a ...any) { + fmt.Println(a...) } -func GetProgressReporter(cobraCmd *cobra.Command) ProgressReporter { - if cobraCmd == nil { - return CommandLineProgressReporter{} - } - if cobraCmd.Flags().Lookup("verbose") != nil && cobraCmd.Flags().Lookup("verbose").Changed { - return SilentProgressReporter{} +func (r DefaultProgressReporter) Error(v ...any) { + if r.SuppressError { + r.Warn(v...) + return } - return CommandLineProgressReporter{} + log.Fatal(v...) } diff --git a/pkg/security/types/model.go b/pkg/security/types/model.go index 30ca5c89..6aac1bb6 100644 --- a/pkg/security/types/model.go +++ b/pkg/security/types/model.go @@ -7,12 +7,17 @@ package types import ( "errors" "fmt" + "regexp" "sort" + "strings" "time" "github.com/threagile/threagile/pkg/input" ) +// TODO: move model out of types package and +// rename parsedModel to model or something like this to emphasize that it's just a model +// maybe type ParsedModel struct { Author input.Author `json:"author" yaml:"author"` Title string `json:"title,omitempty" yaml:"title"` @@ -55,6 +60,44 @@ func (parsedModel *ParsedModel) AddToListOfSupportedTags(tags []string) { } } +func (parsedModel *ParsedModel) GetDeferredRiskTrackingDueToWildcardMatching() map[string]RiskTracking { + deferredRiskTrackingDueToWildcardMatching := make(map[string]RiskTracking) + for syntheticRiskId, riskTracking := range parsedModel.RiskTracking { + if strings.Contains(syntheticRiskId, "*") { // contains a wildcard char + deferredRiskTrackingDueToWildcardMatching[syntheticRiskId] = riskTracking + } + } + + return deferredRiskTrackingDueToWildcardMatching +} + +func (parsedModel *ParsedModel) HasNotYetAnyDirectNonWildcardRiskTracking(syntheticRiskId string) bool { + if _, ok := parsedModel.RiskTracking[syntheticRiskId]; ok { + return false + } + return true +} + +func (parsedModel *ParsedModel) ApplyRisk(rule RiskRule, skippedRules *map[string]bool) { + id := rule.Category().Id + _, ok := (*skippedRules)[id] + + if ok { + fmt.Printf("Skipping risk rule %q\n", rule.Category().Id) + delete(*skippedRules, rule.Category().Id) + } else { + parsedModel.AddToListOfSupportedTags(rule.SupportedTags()) + generatedRisks := rule.GenerateRisks(parsedModel) + if generatedRisks != nil { + if len(generatedRisks) > 0 { + parsedModel.GeneratedRisksByCategory[rule.Category().Id] = generatedRisks + } + } else { + fmt.Printf("Failed to generate risks for %q\n", id) + } + } +} + func (parsedModel *ParsedModel) CheckTags(tags []string, where string) ([]string, error) { var tagsUsed = make([]string, 0) if tags != nil { @@ -71,6 +114,123 @@ func (parsedModel *ParsedModel) CheckTags(tags []string, where string) ([]string return tagsUsed, nil } +// TODO: refactor skipRiskRules to be a string array instead of a comma-separated string +func (parsedModel *ParsedModel) ApplyRiskGeneration(customRiskRules map[string]*CustomRisk, + builtinRiskRules map[string]RiskRule, + skipRiskRules string, + progressReporter progressReporter) { + progressReporter.Info("Applying risk generation") + + skippedRules := make(map[string]bool) + if len(skipRiskRules) > 0 { + for _, id := range strings.Split(skipRiskRules, ",") { + skippedRules[id] = true + } + } + + for _, rule := range builtinRiskRules { + parsedModel.ApplyRisk(rule, &skippedRules) + } + + // NOW THE CUSTOM RISK RULES (if any) + for id, customRule := range customRiskRules { + _, ok := skippedRules[id] + if ok { + progressReporter.Info("Skipping custom risk rule:", id) + delete(skippedRules, id) + } else { + progressReporter.Info("Executing custom risk rule:", id) + parsedModel.AddToListOfSupportedTags(customRule.Tags) + customRisks := customRule.GenerateRisks(parsedModel) + if len(customRisks) > 0 { + parsedModel.GeneratedRisksByCategory[customRule.Category.Id] = customRisks + } + + progressReporter.Info("Added custom risks:", len(customRisks)) + } + } + + if len(skippedRules) > 0 { + keys := make([]string, 0) + for k := range skippedRules { + keys = append(keys, k) + } + if len(keys) > 0 { + progressReporter.Info("Unknown risk rules to skip:", keys) + } + } + + // save also in map keyed by synthetic risk-id + for _, category := range SortedRiskCategories(parsedModel) { + someRisks := SortedRisksOfCategory(parsedModel, category) + for _, risk := range someRisks { + parsedModel.GeneratedRisksBySyntheticId[strings.ToLower(risk.SyntheticId)] = risk + } + } +} + +func (parsedModel *ParsedModel) ApplyWildcardRiskTrackingEvaluation(ignoreOrphanedRiskTracking bool, progressReporter progressReporter) error { + progressReporter.Info("Executing risk tracking evaluation") + for syntheticRiskIdPattern, riskTracking := range parsedModel.GetDeferredRiskTrackingDueToWildcardMatching() { + progressReporter.Info("Applying wildcard risk tracking for risk id: " + syntheticRiskIdPattern) + + foundSome := false + var matchingRiskIdExpression = regexp.MustCompile(strings.ReplaceAll(regexp.QuoteMeta(syntheticRiskIdPattern), `\*`, `[^@]+`)) + for syntheticRiskId := range parsedModel.GeneratedRisksBySyntheticId { + if matchingRiskIdExpression.Match([]byte(syntheticRiskId)) && parsedModel.HasNotYetAnyDirectNonWildcardRiskTracking(syntheticRiskId) { + foundSome = true + parsedModel.RiskTracking[syntheticRiskId] = RiskTracking{ + SyntheticRiskId: strings.TrimSpace(syntheticRiskId), + Justification: riskTracking.Justification, + CheckedBy: riskTracking.CheckedBy, + Ticket: riskTracking.Ticket, + Status: riskTracking.Status, + Date: riskTracking.Date, + } + } + } + + if !foundSome { + if ignoreOrphanedRiskTracking { + progressReporter.Warn("WARNING: Wildcard risk tracking does not match any risk id: " + syntheticRiskIdPattern) + } else { + return errors.New("wildcard risk tracking does not match any risk id: " + syntheticRiskIdPattern) + } + } + } + return nil +} + +func (parsedModel *ParsedModel) CheckRiskTracking(ignoreOrphanedRiskTracking bool, progressReporter progressReporter) error { + progressReporter.Info("Checking risk tracking") + for _, tracking := range parsedModel.RiskTracking { + if _, ok := parsedModel.GeneratedRisksBySyntheticId[tracking.SyntheticRiskId]; !ok { + if ignoreOrphanedRiskTracking { + progressReporter.Info("Risk tracking references unknown risk (risk id not found): " + tracking.SyntheticRiskId) + } else { + return errors.New("Risk tracking references unknown risk (risk id not found) - you might want to use the option -ignore-orphaned-risk-tracking: " + tracking.SyntheticRiskId + + "\n\nNOTE: For risk tracking each risk-id needs to be defined (the string with the @ sign in it). " + + "These unique risk IDs are visible in the PDF report (the small grey string under each risk), " + + "the Excel (column \"ID\"), as well as the JSON responses. Some risk IDs have only one @ sign in them, " + + "while others multiple. The idea is to allow for unique but still speaking IDs. Therefore each risk instance " + + "creates its individual ID by taking all affected elements causing the risk to be within an @-delimited part. " + + "Using wildcards (the * sign) for parts delimited by @ signs allows to handle groups of certain risks at once. " + + "Best is to lookup the IDs to use in the created Excel file. Alternatively a model macro \"seed-risk-tracking\" " + + "is available that helps in initially seeding the risk tracking part here based on already identified and not yet handled risks.") + } + } + } + + // save also the risk-category-id and risk-status directly in the risk for better JSON marshalling + for category := range parsedModel.GeneratedRisksByCategory { + for i := range parsedModel.GeneratedRisksByCategory[category] { + // context.parsedModel.GeneratedRisksByCategory[category][i].CategoryId = category + parsedModel.GeneratedRisksByCategory[category][i].RiskStatus = parsedModel.GeneratedRisksByCategory[category][i].GetRiskTrackingStatusDefaultingUnchecked(parsedModel) + } + } + return nil +} + func (parsedModel *ParsedModel) CheckTagExists(referencedTag, where string) error { if !contains(parsedModel.TagsAvailable, referencedTag) { return errors.New("missing referenced tag in overall tag list at " + where + ": " + referencedTag) @@ -331,3 +491,9 @@ func (parsedModel *ParsedModel) RisksOfOnlyOperation(risksByCategory map[string] } return result } + +type progressReporter interface { + Info(a ...any) + Warn(a ...any) + Error(a ...any) +} diff --git a/pkg/security/types/rules.go b/pkg/security/types/rules.go index aeb3608f..91048d39 100644 --- a/pkg/security/types/rules.go +++ b/pkg/security/types/rules.go @@ -5,42 +5,39 @@ Copyright © 2023 NAME HERE package types import ( - "github.com/threagile/threagile/pkg/run" + "fmt" "strings" -) -type progressReporter interface { - Println(a ...any) (n int, err error) - Fatalf(format string, v ...any) -} + "github.com/threagile/threagile/pkg/run" +) func LoadCustomRiskRules(pluginFiles []string, reporter progressReporter) map[string]*CustomRisk { customRiskRuleList := make([]string, 0) customRiskRules := make(map[string]*CustomRisk) if len(pluginFiles) > 0 { - _, _ = reporter.Println("Loading custom risk rules:", strings.Join(pluginFiles, ", ")) + reporter.Info("Loading custom risk rules:", strings.Join(pluginFiles, ", ")) for _, pluginFile := range pluginFiles { if len(pluginFile) > 0 { runner, loadError := new(run.Runner).Load(pluginFile) if loadError != nil { - reporter.Fatalf("WARNING: Custom risk rule %q not loaded: %v\n", pluginFile, loadError) + reporter.Error(fmt.Sprintf("WARNING: Custom risk rule %q not loaded: %v\n", pluginFile, loadError)) } risk := new(CustomRisk) runError := runner.Run(nil, &risk, "-get-info") if runError != nil { - reporter.Fatalf("WARNING: Failed to get ID for custom risk rule %q: %v\n", pluginFile, runError) + reporter.Error(fmt.Sprintf("WARNING: Failed to get info for custom risk rule %q: %v\n", pluginFile, runError)) } risk.Runner = runner customRiskRules[risk.ID] = risk customRiskRuleList = append(customRiskRuleList, risk.ID) - _, _ = reporter.Println("Custom risk rule loaded:", risk.ID) + reporter.Info("Custom risk rule loaded:", risk.ID) } } - _, _ = reporter.Println("Loaded custom risk rules:", strings.Join(customRiskRuleList, ", ")) + reporter.Info("Loaded custom risk rules:", strings.Join(customRiskRuleList, ", ")) } return customRiskRules diff --git a/pkg/server/server.go b/pkg/server/server.go index 59beb2b5..a4c8909d 100644 --- a/pkg/server/server.go +++ b/pkg/server/server.go @@ -6,7 +6,6 @@ package server import ( "fmt" - "github.com/threagile/threagile/pkg/common" "log" "net/http" "os" @@ -16,6 +15,8 @@ import ( "strings" "sync" + "github.com/threagile/threagile/pkg/common" + "github.com/gin-gonic/gin" "github.com/threagile/threagile/pkg/docs" "github.com/threagile/threagile/pkg/security/risks" @@ -173,7 +174,8 @@ func RunServer(config common.Config) { router.PUT("/models/:model-id/shared-runtimes/:shared-runtime-id", s.setSharedRuntime) router.DELETE("/models/:model-id/shared-runtimes/:shared-runtime-id", s.deleteSharedRuntime) - s.customRiskRules = types.LoadCustomRiskRules(s.config.RiskRulesPlugins, common.CommandLineProgressReporter{}) + reporter := common.DefaultProgressReporter{Verbose: s.config.Verbose} + s.customRiskRules = types.LoadCustomRiskRules(s.config.RiskRulesPlugins, reporter) fmt.Println("Threagile s running...") _ = router.Run(":" + strconv.Itoa(s.config.ServerPort)) // listen and serve on 0.0.0.0:8080 or whatever port was specified From 5eb5b5be4888830247ea3bf9296999baef55ab8c Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Fri, 5 Jan 2024 11:27:35 +0000 Subject: [PATCH 35/68] Add forgotten file --- pkg/report/graphviz.go | 578 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 578 insertions(+) create mode 100644 pkg/report/graphviz.go diff --git a/pkg/report/graphviz.go b/pkg/report/graphviz.go new file mode 100644 index 00000000..db65d791 --- /dev/null +++ b/pkg/report/graphviz.go @@ -0,0 +1,578 @@ +package report + +import ( + "errors" + "fmt" + "hash/fnv" + "os" + "os/exec" + "path/filepath" + "regexp" + "sort" + "strconv" + "strings" + + "github.com/threagile/threagile/pkg/colors" + "github.com/threagile/threagile/pkg/common" + "github.com/threagile/threagile/pkg/security/types" +) + +func WriteDataFlowDiagramGraphvizDOT(parsedModel *types.ParsedModel, + diagramFilenameDOT string, dpi int, addModelTitle bool, + progressReporter progressReporter) *os.File { + progressReporter.Info("Writing data flow diagram input") + + var dotContent strings.Builder + dotContent.WriteString("digraph generatedModel { concentrate=false \n") + + // Metadata init =============================================================================== + tweaks := "" + if parsedModel.DiagramTweakNodesep > 0 { + tweaks += "\n nodesep=\"" + strconv.Itoa(parsedModel.DiagramTweakNodesep) + "\"" + } + if parsedModel.DiagramTweakRanksep > 0 { + tweaks += "\n ranksep=\"" + strconv.Itoa(parsedModel.DiagramTweakRanksep) + "\"" + } + suppressBidirectionalArrows := true + drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks := true + splines := "ortho" + if len(parsedModel.DiagramTweakEdgeLayout) > 0 { + switch parsedModel.DiagramTweakEdgeLayout { + case "spline": + splines = "spline" + drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false + case "polyline": + splines = "polyline" + drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false + case "ortho": + splines = "ortho" + suppressBidirectionalArrows = true + case "curved": + splines = "curved" + drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false + case "false": + splines = "false" + drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false + default: + panic(errors.New("unknown value for diagram_tweak_suppress_edge_labels (spline, polyline, ortho, curved, false): " + + parsedModel.DiagramTweakEdgeLayout)) + } + } + rankdir := "TB" + if parsedModel.DiagramTweakLayoutLeftToRight { + rankdir = "LR" + } + modelTitle := "" + if addModelTitle { + modelTitle = `label="` + parsedModel.Title + `"` + } + dotContent.WriteString(` graph [ ` + modelTitle + ` + labelloc=t + fontname="Verdana" + fontsize=40 + outputorder="nodesfirst" + dpi=` + strconv.Itoa(dpi) + ` + splines=` + splines + ` + rankdir="` + rankdir + `" +` + tweaks + ` + ]; + node [ + fontname="Verdana" + fontsize="20" + ]; + edge [ + shape="none" + fontname="Verdana" + fontsize="18" + ]; +`) + + // Trust Boundaries =============================================================================== + var subgraphSnippetsById = make(map[string]string) + // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order + // range over them in sorted (hence re-producible) way: + keys := make([]string, 0) + for k := range parsedModel.TrustBoundaries { + keys = append(keys, k) + } + sort.Strings(keys) + for _, key := range keys { + trustBoundary := parsedModel.TrustBoundaries[key] + var snippet strings.Builder + if len(trustBoundary.TechnicalAssetsInside) > 0 || len(trustBoundary.TrustBoundariesNested) > 0 { + if drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks { + // see https://stackoverflow.com/questions/17247455/how-do-i-add-extra-space-between-clusters?noredirect=1&lq=1 + snippet.WriteString("\n subgraph cluster_space_boundary_for_layout_only_1" + hash(trustBoundary.Id) + " {\n") + snippet.WriteString(` graph [ + dpi=` + strconv.Itoa(dpi) + ` + label=<
> + fontsize="21" + style="invis" + color="green" + fontcolor="green" + margin="50.0" + penwidth="6.5" + outputorder="nodesfirst" + ];`) + } + snippet.WriteString("\n subgraph cluster_" + hash(trustBoundary.Id) + " {\n") + color, fontColor, bgColor, style, fontname := colors.RgbHexColorTwilight(), colors.RgbHexColorTwilight() /*"#550E0C"*/, "#FAFAFA", "dashed", "Verdana" + penWidth := 4.5 + if len(trustBoundary.TrustBoundariesNested) > 0 { + //color, fontColor, style, fontname = colors.Blue, colors.Blue, "dashed", "Verdana" + penWidth = 5.5 + } + if len(trustBoundary.ParentTrustBoundaryID(parsedModel)) > 0 { + bgColor = "#F1F1F1" + } + if trustBoundary.Type == types.NetworkPolicyNamespaceIsolation { + fontColor, bgColor = "#222222", "#DFF4FF" + } + if trustBoundary.Type == types.ExecutionEnvironment { + fontColor, bgColor, style = "#555555", "#FFFFF0", "dotted" + } + snippet.WriteString(` graph [ + dpi=` + strconv.Itoa(dpi) + ` + label=<
` + trustBoundary.Title + ` (` + trustBoundary.Type.String() + `)
> + fontsize="21" + style="` + style + `" + color="` + color + `" + bgcolor="` + bgColor + `" + fontcolor="` + fontColor + `" + fontname="` + fontname + `" + penwidth="` + fmt.Sprintf("%f", penWidth) + `" + forcelabels=true + outputorder="nodesfirst" + margin="50.0" + ];`) + snippet.WriteString("\n") + keys := trustBoundary.TechnicalAssetsInside + sort.Strings(keys) + for _, technicalAssetInside := range keys { + //log.Println("About to add technical asset link to trust boundary: ", technicalAssetInside) + technicalAsset := parsedModel.TechnicalAssets[technicalAssetInside] + snippet.WriteString(hash(technicalAsset.Id)) + snippet.WriteString(";\n") + } + keys = trustBoundary.TrustBoundariesNested + sort.Strings(keys) + for _, trustBoundaryNested := range keys { + //log.Println("About to add nested trust boundary to trust boundary: ", trustBoundaryNested) + trustBoundaryNested := parsedModel.TrustBoundaries[trustBoundaryNested] + snippet.WriteString("LINK-NEEDS-REPLACED-BY-cluster_" + hash(trustBoundaryNested.Id)) + snippet.WriteString(";\n") + } + snippet.WriteString(" }\n\n") + if drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks { + snippet.WriteString(" }\n\n") + } + } + subgraphSnippetsById[hash(trustBoundary.Id)] = snippet.String() + } + // here replace links and remove from map after replacement (i.e. move snippet into nested) + for i := range subgraphSnippetsById { + re := regexp.MustCompile(`LINK-NEEDS-REPLACED-BY-cluster_([0-9]*);`) + for { + matches := re.FindStringSubmatch(subgraphSnippetsById[i]) + if len(matches) > 0 { + embeddedSnippet := " //nested:" + subgraphSnippetsById[matches[1]] + subgraphSnippetsById[i] = strings.ReplaceAll(subgraphSnippetsById[i], matches[0], embeddedSnippet) + subgraphSnippetsById[matches[1]] = "" // to something like remove it + } else { + break + } + } + } + // now write them all + keys = make([]string, 0) + for k := range subgraphSnippetsById { + keys = append(keys, k) + } + sort.Strings(keys) + for _, key := range keys { + snippet := subgraphSnippetsById[key] + dotContent.WriteString(snippet) + } + + // Technical Assets =============================================================================== + // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order + // range over them in sorted (hence re-producible) way: + // Convert map to slice of values: + var techAssets []types.TechnicalAsset + for _, techAsset := range parsedModel.TechnicalAssets { + techAssets = append(techAssets, techAsset) + } + sort.Sort(types.ByOrderAndIdSort(techAssets)) + for _, technicalAsset := range techAssets { + dotContent.WriteString(makeTechAssetNode(parsedModel, technicalAsset, false)) + dotContent.WriteString("\n") + } + + // Data Flows (Technical Communication Links) =============================================================================== + for _, technicalAsset := range techAssets { + for _, dataFlow := range technicalAsset.CommunicationLinks { + sourceId := technicalAsset.Id + targetId := dataFlow.TargetId + //log.Println("About to add link from", sourceId, "to", targetId, "with id", dataFlow.Id) + var arrowStyle, arrowColor, readOrWriteHead, readOrWriteTail string + if dataFlow.Readonly { + readOrWriteHead = "empty" + readOrWriteTail = "odot" + } else { + readOrWriteHead = "normal" + readOrWriteTail = "dot" + } + dir := "forward" + if dataFlow.IsBidirectional() { + if !suppressBidirectionalArrows { // as it does not work as bug in graphviz with ortho: https://gitlab.com/graphviz/graphviz/issues/144 + dir = "both" + } + } + arrowStyle = ` style="` + dataFlow.DetermineArrowLineStyle() + `" penwidth="` + dataFlow.DetermineArrowPenWidth(parsedModel) + `" arrowtail="` + readOrWriteTail + `" arrowhead="` + readOrWriteHead + `" dir="` + dir + `" arrowsize="2.0" ` + arrowColor = ` color="` + dataFlow.DetermineArrowColor(parsedModel) + `"` + tweaks := "" + if dataFlow.DiagramTweakWeight > 0 { + tweaks += " weight=\"" + strconv.Itoa(dataFlow.DiagramTweakWeight) + "\" " + } + + dotContent.WriteString("\n") + dotContent.WriteString(" " + hash(sourceId) + " -> " + hash(targetId) + + ` [` + arrowColor + ` ` + arrowStyle + tweaks + ` constraint=` + strconv.FormatBool(dataFlow.DiagramTweakConstraint) + ` `) + if !parsedModel.DiagramTweakSuppressEdgeLabels { + dotContent.WriteString(` xlabel="` + encode(dataFlow.Protocol.String()) + `" fontcolor="` + dataFlow.DetermineLabelColor(parsedModel) + `" `) + } + dotContent.WriteString(" ];\n") + } + } + + dotContent.WriteString(makeDiagramInvisibleConnectionsTweaks(parsedModel)) + dotContent.WriteString(makeDiagramSameRankNodeTweaks(parsedModel)) + + dotContent.WriteString("}") + + //fmt.Println(dotContent.String()) + + // Write the DOT file + file, err := os.Create(diagramFilenameDOT) + checkErr(err) + defer func() { _ = file.Close() }() + _, err = fmt.Fprintln(file, dotContent.String()) + checkErr(err) + return file +} + +func GenerateDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string, + tempFolder, binFolder, dataFlowDiagramFilenamePNG string, progressReporter progressReporter) error { + progressReporter.Info("Rendering data flow diagram input") + // tmp files + tmpFileDOT, err := os.CreateTemp(tempFolder, "diagram-*-.gv") + checkErr(err) + defer func() { _ = os.Remove(tmpFileDOT.Name()) }() + + tmpFilePNG, err := os.CreateTemp(tempFolder, "diagram-*-.png") + checkErr(err) + defer func() { _ = os.Remove(tmpFilePNG.Name()) }() + + // copy into tmp file as input + inputDOT, err := os.ReadFile(dotFile.Name()) + if err != nil { + return fmt.Errorf("Error reading %s: %v", dotFile.Name(), err) + } + err = os.WriteFile(tmpFileDOT.Name(), inputDOT, 0644) + if err != nil { + return fmt.Errorf("Error creating %s: %v", tmpFileDOT.Name(), err) + } + + // exec + cmd := exec.Command(filepath.Join(binFolder, common.GraphvizDataFlowDiagramConversionCall), tmpFileDOT.Name(), tmpFilePNG.Name()) + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + err = cmd.Run() + if err != nil { + panic(errors.New("graph rendering call failed with error:" + err.Error())) + } + // copy into resulting file + inputPNG, err := os.ReadFile(tmpFilePNG.Name()) + if err != nil { + return fmt.Errorf("Error copying into resulting file %s: %v", tmpFilePNG.Name(), err) + } + err = os.WriteFile(filepath.Join(targetDir, dataFlowDiagramFilenamePNG), inputPNG, 0644) + if err != nil { + return fmt.Errorf("Error creating %s: %v", filepath.Join(targetDir, dataFlowDiagramFilenamePNG), err) + } + return nil +} + +func makeDiagramSameRankNodeTweaks(parsedModel *types.ParsedModel) string { + // see https://stackoverflow.com/questions/25734244/how-do-i-place-nodes-on-the-same-level-in-dot + tweak := "" + if len(parsedModel.DiagramTweakSameRankAssets) > 0 { + for _, sameRank := range parsedModel.DiagramTweakSameRankAssets { + assetIDs := strings.Split(sameRank, ":") + if len(assetIDs) > 0 { + tweak += "{ rank=same; " + for _, id := range assetIDs { + checkErr(parsedModel.CheckTechnicalAssetExists(id, "diagram tweak same-rank", true)) + if len(parsedModel.TechnicalAssets[id].GetTrustBoundaryId(parsedModel)) > 0 { + panic(errors.New("technical assets (referenced in same rank diagram tweak) are inside trust boundaries: " + + fmt.Sprintf("%v", parsedModel.DiagramTweakSameRankAssets))) + } + tweak += " " + hash(id) + "; " + } + tweak += " }" + } + } + } + return tweak +} + +func makeDiagramInvisibleConnectionsTweaks(parsedModel *types.ParsedModel) string { + // see https://stackoverflow.com/questions/2476575/how-to-control-node-placement-in-graphviz-i-e-avoid-edge-crossings + tweak := "" + if len(parsedModel.DiagramTweakInvisibleConnectionsBetweenAssets) > 0 { + for _, invisibleConnections := range parsedModel.DiagramTweakInvisibleConnectionsBetweenAssets { + assetIDs := strings.Split(invisibleConnections, ":") + if len(assetIDs) == 2 { + checkErr(parsedModel.CheckTechnicalAssetExists(assetIDs[0], "diagram tweak connections", true)) + checkErr(parsedModel.CheckTechnicalAssetExists(assetIDs[1], "diagram tweak connections", true)) + tweak += "\n" + hash(assetIDs[0]) + " -> " + hash(assetIDs[1]) + " [style=invis]; \n" + } + } + } + return tweak +} + +func WriteDataAssetDiagramGraphvizDOT(parsedModel *types.ParsedModel, diagramFilenameDOT string, dpi int, + progressReporter progressReporter) *os.File { + progressReporter.Info("Writing data asset diagram input") + + var dotContent strings.Builder + dotContent.WriteString("digraph generatedModel { concentrate=true \n") + + // Metadata init =============================================================================== + dotContent.WriteString(` graph [ + dpi=` + strconv.Itoa(dpi) + ` + fontname="Verdana" + labelloc="c" + fontsize="20" + splines=false + rankdir="LR" + nodesep=1.0 + ranksep=3.0 + outputorder="nodesfirst" + ]; + node [ + fontcolor="white" + fontname="Verdana" + fontsize="20" + ]; + edge [ + shape="none" + fontname="Verdana" + fontsize="18" + ]; +`) + + // Technical Assets =============================================================================== + techAssets := make([]types.TechnicalAsset, 0) + for _, techAsset := range parsedModel.TechnicalAssets { + techAssets = append(techAssets, techAsset) + } + sort.Sort(types.ByOrderAndIdSort(techAssets)) + for _, technicalAsset := range techAssets { + if len(technicalAsset.DataAssetsStored) > 0 || len(technicalAsset.DataAssetsProcessed) > 0 { + dotContent.WriteString(makeTechAssetNode(parsedModel, technicalAsset, true)) + dotContent.WriteString("\n") + } + } + + // Data Assets =============================================================================== + dataAssets := make([]types.DataAsset, 0) + for _, dataAsset := range parsedModel.DataAssets { + dataAssets = append(dataAssets, dataAsset) + } + + types.SortByDataAssetDataBreachProbabilityAndTitle(parsedModel, dataAssets) + for _, dataAsset := range dataAssets { + dotContent.WriteString(makeDataAssetNode(parsedModel, dataAsset)) + dotContent.WriteString("\n") + } + + // Data Asset to Tech Asset links =============================================================================== + for _, technicalAsset := range techAssets { + for _, sourceId := range technicalAsset.DataAssetsStored { + targetId := technicalAsset.Id + dotContent.WriteString("\n") + dotContent.WriteString(hash(sourceId) + " -> " + hash(targetId) + + ` [ color="blue" style="solid" ];`) + dotContent.WriteString("\n") + } + for _, sourceId := range technicalAsset.DataAssetsProcessed { + if !contains(technicalAsset.DataAssetsStored, sourceId) { // here only if not already drawn above + targetId := technicalAsset.Id + dotContent.WriteString("\n") + dotContent.WriteString(hash(sourceId) + " -> " + hash(targetId) + + ` [ color="#666666" style="dashed" ];`) + dotContent.WriteString("\n") + } + } + } + + dotContent.WriteString("}") + + // Write the DOT file + file, err := os.Create(diagramFilenameDOT) + checkErr(err) + defer func() { _ = file.Close() }() + _, err = fmt.Fprintln(file, dotContent.String()) + checkErr(err) + return file +} + +func makeDataAssetNode(parsedModel *types.ParsedModel, dataAsset types.DataAsset) string { + var color string + switch dataAsset.IdentifiedDataBreachProbabilityStillAtRisk(parsedModel) { + case types.Probable: + color = colors.RgbHexColorHighRisk() + case types.Possible: + color = colors.RgbHexColorMediumRisk() + case types.Improbable: + color = colors.RgbHexColorLowRisk() + default: + color = "#444444" // since black is too dark here as fill color + } + if !dataAsset.IsDataBreachPotentialStillAtRisk(parsedModel) { + color = "#444444" // since black is too dark here as fill color + } + return " " + hash(dataAsset.Id) + ` [ label=<` + encode(dataAsset.Title) + `> penwidth="3.0" style="filled" fillcolor="` + color + `" color="` + color + "\"\n ]; " +} + +func makeTechAssetNode(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset, simplified bool) string { + if simplified { + color := colors.RgbHexColorOutOfScope() + if !technicalAsset.OutOfScope { + generatedRisks := technicalAsset.GeneratedRisks(parsedModel) + switch types.HighestSeverityStillAtRisk(parsedModel, generatedRisks) { + case types.CriticalSeverity: + color = colors.RgbHexColorCriticalRisk() + case types.HighSeverity: + color = colors.RgbHexColorHighRisk() + case types.ElevatedSeverity: + color = colors.RgbHexColorElevatedRisk() + case types.MediumSeverity: + color = colors.RgbHexColorMediumRisk() + case types.LowSeverity: + color = colors.RgbHexColorLowRisk() + default: + color = "#444444" // since black is too dark here as fill color + } + if len(types.ReduceToOnlyStillAtRisk(parsedModel, generatedRisks)) == 0 { + color = "#444444" // since black is too dark here as fill color + } + } + return " " + hash(technicalAsset.Id) + ` [ shape="box" style="filled" fillcolor="` + color + `" + label=<` + encode(technicalAsset.Title) + `> penwidth="3.0" color="` + color + `" ]; + ` + } else { + var shape, title string + var lineBreak = "" + switch technicalAsset.Type { + case types.ExternalEntity: + shape = "box" + title = technicalAsset.Title + case types.Process: + shape = "ellipse" + title = technicalAsset.Title + case types.Datastore: + shape = "cylinder" + title = technicalAsset.Title + if technicalAsset.Redundant { + lineBreak = "
" + } + } + + if technicalAsset.UsedAsClientByHuman { + shape = "octagon" + } + + // RAA = Relative Attacker Attractiveness + raa := technicalAsset.RAA + var attackerAttractivenessLabel string + if technicalAsset.OutOfScope { + attackerAttractivenessLabel = "RAA: out of scope" + } else { + attackerAttractivenessLabel = "RAA: " + fmt.Sprintf("%.0f", raa) + " %" + } + + compartmentBorder := "0" + if technicalAsset.MultiTenant { + compartmentBorder = "1" + } + + return " " + hash(technicalAsset.Id) + ` [ + label=<
` + lineBreak + technicalAsset.Technology.String() + `
` + technicalAsset.Size.String() + `
` + encode(title) + `
` + attackerAttractivenessLabel + `
> + shape=` + shape + ` style="` + technicalAsset.DetermineShapeBorderLineStyle() + `,` + technicalAsset.DetermineShapeStyle() + `" penwidth="` + technicalAsset.DetermineShapeBorderPenWidth(parsedModel) + `" fillcolor="` + technicalAsset.DetermineShapeFillColor(parsedModel) + `" + peripheries=` + strconv.Itoa(technicalAsset.DetermineShapePeripheries()) + ` + color="` + technicalAsset.DetermineShapeBorderColor(parsedModel) + "\"\n ]; " + } +} + +func GenerateDataAssetDiagramGraphvizImage(dotFile *os.File, targetDir string, + tempFolder, binFolder, dataAssetDiagramFilenamePNG string, progressReporter progressReporter) error { // TODO dedupe with other render...() method here + progressReporter.Info("Rendering data asset diagram input") + // tmp files + tmpFileDOT, err := os.CreateTemp(tempFolder, "diagram-*-.gv") + if err != nil { + return fmt.Errorf("Error creating temp file: %v", err) + } + defer func() { _ = os.Remove(tmpFileDOT.Name()) }() + + tmpFilePNG, err := os.CreateTemp(tempFolder, "diagram-*-.png") + if err != nil { + return fmt.Errorf("Error creating temp file: %v", err) + } + defer func() { _ = os.Remove(tmpFilePNG.Name()) }() + + // copy into tmp file as input + inputDOT, err := os.ReadFile(dotFile.Name()) + if err != nil { + return fmt.Errorf("Error reading %s: %v", dotFile.Name(), err) + } + err = os.WriteFile(tmpFileDOT.Name(), inputDOT, 0644) + if err != nil { + return fmt.Errorf("Error creating %s: %v", tmpFileDOT.Name(), err) + } + + // exec + cmd := exec.Command(filepath.Join(binFolder, common.GraphvizDataAssetDiagramConversionCall), tmpFileDOT.Name(), tmpFilePNG.Name()) + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + err = cmd.Run() + if err != nil { + return errors.New("graph rendering call failed with error: " + err.Error()) + } + // copy into resulting file + inputPNG, err := os.ReadFile(tmpFilePNG.Name()) + if err != nil { + return fmt.Errorf("Error copying into resulting file %s: %v", tmpFilePNG.Name(), err) + } + err = os.WriteFile(filepath.Join(targetDir, dataAssetDiagramFilenamePNG), inputPNG, 0644) + if err != nil { + return fmt.Errorf("Error creating %s: %v", filepath.Join(targetDir, dataAssetDiagramFilenamePNG), err) + } + return nil +} + +func hash(s string) string { + h := fnv.New32a() + _, _ = h.Write([]byte(s)) + return fmt.Sprintf("%v", h.Sum32()) +} + +func encode(value string) string { + return strings.ReplaceAll(value, "&", "&") +} + +type progressReporter interface { + Info(a ...any) +} From 027df78a1f3cac38392d2c5a99f7bea281456b9f Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Fri, 5 Jan 2024 11:40:21 +0000 Subject: [PATCH 36/68] Move parsed model out of context --- internal/threagile/context.go | 78 ++++++++++++++--------------------- 1 file changed, 31 insertions(+), 47 deletions(-) diff --git a/internal/threagile/context.go b/internal/threagile/context.go index eb8194d0..e3152042 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -66,8 +66,6 @@ type Context struct { *GenerateCommands ServerMode bool - - parsedModel types.ParsedModel } func (context *Context) Init() *Context { @@ -87,31 +85,22 @@ func (context *Context) Defaults(buildTimestamp string) *Context { } func (context *Context) DoIt() { + progressReporter := common.DefaultProgressReporter{Verbose: context.Config.Verbose} defer func() { var err error if r := recover(); r != nil { err = r.(error) - if context.Config.Verbose { - log.Println(err) - } + progressReporter.Info("ERROR: " + err.Error()) _, _ = os.Stderr.WriteString(err.Error() + "\n") os.Exit(2) } }() - progressReporter := common.DefaultProgressReporter{Verbose: context.Config.Verbose} - if len(context.Config.ExecuteModelMacro) > 0 { fmt.Println(docs.Logo + "\n\n" + docs.VersionText) - } else { - if context.Config.Verbose { - fmt.Println("Writing into output directory:", context.Config.OutputFolder) - } - } - - if context.Config.Verbose { - fmt.Println("Parsing model:", context.Config.InputFile) } + progressReporter.Info("Writing into output directory:", context.Config.OutputFolder) + progressReporter.Info("Parsing model:", context.Config.InputFile) modelInput := *new(input.ModelInput).Defaults() loadError := modelInput.Load(context.Config.InputFile) @@ -129,20 +118,17 @@ func (context *Context) DoIt() { if parseError != nil { log.Fatal("Unable to parse model yaml: ", parseError) } + introTextRAA := applyRAA(parsedModel, context.Config.BinFolder, context.RAAPlugin, progressReporter) - context.parsedModel = *parsedModel - - introTextRAA := context.applyRAA() - - context.parsedModel.ApplyRiskGeneration(customRiskRules, builtinRiskRules, + parsedModel.ApplyRiskGeneration(customRiskRules, builtinRiskRules, context.Config.SkipRiskRules, progressReporter) - err := context.parsedModel.ApplyWildcardRiskTrackingEvaluation(context.Config.IgnoreOrphanedRiskTracking, progressReporter) + err := parsedModel.ApplyWildcardRiskTrackingEvaluation(context.Config.IgnoreOrphanedRiskTracking, progressReporter) if err != nil { // TODO: do not panic and gracefully handle the error panic(err) } - err = context.parsedModel.CheckRiskTracking(context.Config.IgnoreOrphanedRiskTracking, progressReporter) + err = parsedModel.CheckRiskTracking(context.Config.IgnoreOrphanedRiskTracking, progressReporter) if err != nil { // TODO: do not panic and gracefully handle the error panic(err) @@ -182,9 +168,9 @@ func (context *Context) DoIt() { for { switch macroDetails.ID { case addbuildpipeline.GetMacroDetails().ID: - nextQuestion, err = addbuildpipeline.GetNextQuestion(&context.parsedModel) + nextQuestion, err = addbuildpipeline.GetNextQuestion(parsedModel) case addvault.GetMacroDetails().ID: - nextQuestion, err = addvault.GetNextQuestion(&context.parsedModel) + nextQuestion, err = addvault.GetNextQuestion(parsedModel) case prettyprint.GetMacroDetails().ID: nextQuestion, err = prettyprint.GetNextQuestion() case removeunusedtags.GetMacroDetails().ID: @@ -357,9 +343,9 @@ func (context *Context) DoIt() { var err error switch macroDetails.ID { case addbuildpipeline.GetMacroDetails().ID: - changes, message, validResult, err = addbuildpipeline.GetFinalChangeImpact(&modelInput, &context.parsedModel) + changes, message, validResult, err = addbuildpipeline.GetFinalChangeImpact(&modelInput, parsedModel) case addvault.GetMacroDetails().ID: - changes, message, validResult, err = addvault.GetFinalChangeImpact(&modelInput, &context.parsedModel) + changes, message, validResult, err = addvault.GetFinalChangeImpact(&modelInput, parsedModel) case prettyprint.GetMacroDetails().ID: changes, message, validResult, err = prettyprint.GetFinalChangeImpact(&modelInput) case removeunusedtags.GetMacroDetails().ID: @@ -393,17 +379,17 @@ func (context *Context) DoIt() { var err error switch macroDetails.ID { case addbuildpipeline.GetMacroDetails().ID: - message, validResult, err = addbuildpipeline.Execute(&modelInput, &context.parsedModel) + message, validResult, err = addbuildpipeline.Execute(&modelInput, parsedModel) case addvault.GetMacroDetails().ID: - message, validResult, err = addvault.Execute(&modelInput, &context.parsedModel) + message, validResult, err = addvault.Execute(&modelInput, parsedModel) case prettyprint.GetMacroDetails().ID: message, validResult, err = prettyprint.Execute(&modelInput) case removeunusedtags.GetMacroDetails().ID: - message, validResult, err = removeunusedtags.Execute(&modelInput, &context.parsedModel) + message, validResult, err = removeunusedtags.Execute(&modelInput, parsedModel) case seedrisktracking.GetMacroDetails().ID: - message, validResult, err = seedrisktracking.Execute(&context.parsedModel, &modelInput) + message, validResult, err = seedrisktracking.Execute(parsedModel, &modelInput) case seedtags.GetMacroDetails().ID: - message, validResult, err = seedtags.Execute(&modelInput, &context.parsedModel) + message, validResult, err = seedtags.Execute(&modelInput, parsedModel) } checkErr(err) if !validResult { @@ -454,7 +440,7 @@ func (context *Context) DoIt() { gvFile = tmpFileGV.Name() defer func() { _ = os.Remove(gvFile) }() } - dotFile := report.WriteDataFlowDiagramGraphvizDOT(&context.parsedModel, gvFile, diagramDPI, context.Config.AddModelTitle, progressReporter) + dotFile := report.WriteDataFlowDiagramGraphvizDOT(parsedModel, gvFile, diagramDPI, context.Config.AddModelTitle, progressReporter) err := report.GenerateDataFlowDiagramGraphvizImage(dotFile, context.Config.OutputFolder, context.Config.TempFolder, context.Config.BinFolder, context.Config.DataFlowDiagramFilenamePNG, progressReporter) @@ -471,7 +457,7 @@ func (context *Context) DoIt() { gvFile = tmpFile.Name() defer func() { _ = os.Remove(gvFile) }() } - dotFile := report.WriteDataAssetDiagramGraphvizDOT(&context.parsedModel, gvFile, diagramDPI, progressReporter) + dotFile := report.WriteDataAssetDiagramGraphvizDOT(parsedModel, gvFile, diagramDPI, progressReporter) err := report.GenerateDataAssetDiagramGraphvizImage(dotFile, context.Config.OutputFolder, context.Config.TempFolder, context.Config.BinFolder, context.Config.DataAssetDiagramFilenamePNG, progressReporter) if err != nil { @@ -484,7 +470,7 @@ func (context *Context) DoIt() { if context.Config.Verbose { fmt.Println("Writing risks json") } - report.WriteRisksJSON(&context.parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.JsonRisksFilename)) + report.WriteRisksJSON(parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.JsonRisksFilename)) } // technical assets json @@ -492,7 +478,7 @@ func (context *Context) DoIt() { if context.Config.Verbose { fmt.Println("Writing technical assets json") } - report.WriteTechnicalAssetsJSON(&context.parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.JsonTechnicalAssetsFilename)) + report.WriteTechnicalAssetsJSON(parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.JsonTechnicalAssetsFilename)) } // risks as risks json @@ -500,7 +486,7 @@ func (context *Context) DoIt() { if context.Config.Verbose { fmt.Println("Writing stats json") } - report.WriteStatsJSON(&context.parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.JsonStatsFilename)) + report.WriteStatsJSON(parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.JsonStatsFilename)) } // risks Excel @@ -508,7 +494,7 @@ func (context *Context) DoIt() { if context.Config.Verbose { fmt.Println("Writing risks excel") } - report.WriteRisksExcelToFile(&context.parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.ExcelRisksFilename)) + report.WriteRisksExcelToFile(parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.ExcelRisksFilename)) } // tags Excel @@ -516,7 +502,7 @@ func (context *Context) DoIt() { if context.Config.Verbose { fmt.Println("Writing tags excel") } - report.WriteTagsExcelToFile(&context.parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.ExcelTagsFilename)) + report.WriteTagsExcelToFile(parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.ExcelTagsFilename)) } if context.GenerateCommands.ReportPDF { @@ -544,7 +530,7 @@ func (context *Context) DoIt() { introTextRAA, customRiskRules, context.Config.TempFolder, - &context.parsedModel) + parsedModel) } } @@ -584,20 +570,18 @@ func (context *Context) printBorder(length int, bold bool) { fmt.Println() } -func (context *Context) applyRAA() string { - if context.Config.Verbose { - fmt.Println("Applying RAA calculation:", context.Config.RAAPlugin) - } +func applyRAA(parsedModel *types.ParsedModel, binFolder, raaPlugin string, progressReporter common.DefaultProgressReporter) string { + progressReporter.Info("Applying RAA calculation:", raaPlugin) - runner, loadError := new(run.Runner).Load(filepath.Join(context.Config.BinFolder, context.Config.RAAPlugin)) + runner, loadError := new(run.Runner).Load(filepath.Join(binFolder, raaPlugin)) if loadError != nil { - fmt.Printf("WARNING: raa %q not loaded: %v\n", context.Config.RAAPlugin, loadError) + progressReporter.Warn(fmt.Sprintf("WARNING: raa %q not loaded: %v\n", raaPlugin, loadError)) return "" } - runError := runner.Run(context.parsedModel, &context.parsedModel) + runError := runner.Run(parsedModel, parsedModel) if runError != nil { - fmt.Printf("WARNING: raa %q not applied: %v\n", context.Config.RAAPlugin, runError) + progressReporter.Warn(fmt.Sprintf("WARNING: raa %q not applied: %v\n", raaPlugin, runError)) return "" } From 6f5de2bc9a18de54b8f0d36af3df90016b8491cd Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Fri, 5 Jan 2024 13:09:36 +0000 Subject: [PATCH 37/68] Refactor macros --- internal/threagile/context.go | 344 +----------------- internal/threagile/macros.go | 22 +- .../add-build-pipeline-macro.go | 273 +++++++------- .../add-vault => }/add-vault-macro.go | 128 ++++--- pkg/macros/built-in/built-in.go | 22 -- .../pretty-print/pretty-print-macro.go | 34 -- pkg/macros/macros.go | 295 ++++++++++++++- pkg/macros/pretty-print-macro.go | 41 +++ .../remove-unused-tags-macro.go | 28 +- .../seed-risk-tracking-macro.go | 26 +- .../seed-tags => }/seed-tags-macro.go | 28 +- 11 files changed, 616 insertions(+), 625 deletions(-) rename pkg/macros/{built-in/add-build-pipeline => }/add-build-pipeline-macro.go (80%) rename pkg/macros/{built-in/add-vault => }/add-vault-macro.go (80%) delete mode 100644 pkg/macros/built-in/built-in.go delete mode 100644 pkg/macros/built-in/pretty-print/pretty-print-macro.go create mode 100644 pkg/macros/pretty-print-macro.go rename pkg/macros/{built-in/remove-unused-tags => }/remove-unused-tags-macro.go (65%) rename pkg/macros/{built-in/seed-risk-tracking => }/seed-risk-tracking-macro.go (61%) rename pkg/macros/{built-in/seed-tags => }/seed-tags-macro.go (56%) diff --git a/internal/threagile/context.go b/internal/threagile/context.go index e3152042..7b0ab7b9 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -1,7 +1,6 @@ package threagile import ( - "bufio" "crypto/sha256" "encoding/hex" "flag" @@ -19,15 +18,6 @@ import ( "github.com/threagile/threagile/pkg/common" - addbuildpipeline "github.com/threagile/threagile/pkg/macros/built-in/add-build-pipeline" - addvault "github.com/threagile/threagile/pkg/macros/built-in/add-vault" - prettyprint "github.com/threagile/threagile/pkg/macros/built-in/pretty-print" - removeunusedtags "github.com/threagile/threagile/pkg/macros/built-in/remove-unused-tags" - seedrisktracking "github.com/threagile/threagile/pkg/macros/built-in/seed-risk-tracking" - seedtags "github.com/threagile/threagile/pkg/macros/built-in/seed-tags" - - "gopkg.in/yaml.v3" - "github.com/threagile/threagile/pkg/docs" "github.com/threagile/threagile/pkg/input" "github.com/threagile/threagile/pkg/macros" @@ -102,18 +92,18 @@ func (context *Context) DoIt() { progressReporter.Info("Writing into output directory:", context.Config.OutputFolder) progressReporter.Info("Parsing model:", context.Config.InputFile) - modelInput := *new(input.ModelInput).Defaults() - loadError := modelInput.Load(context.Config.InputFile) - if loadError != nil { - log.Fatal("Unable to load model yaml: ", loadError) - } - builtinRiskRules := make(map[string]types.RiskRule) for _, rule := range risks.GetBuiltInRiskRules() { builtinRiskRules[rule.Category().Id] = rule } customRiskRules := types.LoadCustomRiskRules(context.Config.RiskRulesPlugins, progressReporter) + modelInput := *new(input.ModelInput).Defaults() + loadError := modelInput.Load(context.Config.InputFile) + if loadError != nil { + log.Fatal("Unable to load model yaml: ", loadError) + } + parsedModel, parseError := model.ParseModel(&modelInput, builtinRiskRules, customRiskRules) if parseError != nil { log.Fatal("Unable to parse model yaml: ", parseError) @@ -135,289 +125,11 @@ func (context *Context) DoIt() { } if len(context.Config.ExecuteModelMacro) > 0 { - var macroDetails macros.MacroDetails - switch context.Config.ExecuteModelMacro { - case addbuildpipeline.GetMacroDetails().ID: - macroDetails = addbuildpipeline.GetMacroDetails() - case addvault.GetMacroDetails().ID: - macroDetails = addvault.GetMacroDetails() - case prettyprint.GetMacroDetails().ID: - macroDetails = prettyprint.GetMacroDetails() - case removeunusedtags.GetMacroDetails().ID: - macroDetails = removeunusedtags.GetMacroDetails() - case seedrisktracking.GetMacroDetails().ID: - macroDetails = seedrisktracking.GetMacroDetails() - case seedtags.GetMacroDetails().ID: - macroDetails = seedtags.GetMacroDetails() - default: - log.Fatal("Unknown model macro: ", context.Config.ExecuteModelMacro) - } - fmt.Println("Executing model macro:", macroDetails.ID) - fmt.Println() - fmt.Println() - context.printBorder(len(macroDetails.Title), true) - fmt.Println(macroDetails.Title) - context.printBorder(len(macroDetails.Title), true) - if len(macroDetails.Description) > 0 { - fmt.Println(macroDetails.Description) - } - fmt.Println() - reader := bufio.NewReader(os.Stdin) - var err error - var nextQuestion macros.MacroQuestion - for { - switch macroDetails.ID { - case addbuildpipeline.GetMacroDetails().ID: - nextQuestion, err = addbuildpipeline.GetNextQuestion(parsedModel) - case addvault.GetMacroDetails().ID: - nextQuestion, err = addvault.GetNextQuestion(parsedModel) - case prettyprint.GetMacroDetails().ID: - nextQuestion, err = prettyprint.GetNextQuestion() - case removeunusedtags.GetMacroDetails().ID: - nextQuestion, err = removeunusedtags.GetNextQuestion() - case seedrisktracking.GetMacroDetails().ID: - nextQuestion, err = seedrisktracking.GetNextQuestion() - case seedtags.GetMacroDetails().ID: - nextQuestion, err = seedtags.GetNextQuestion() - } - checkErr(err) - if nextQuestion.NoMoreQuestions() { - break - } - fmt.Println() - context.printBorder(len(nextQuestion.Title), false) - fmt.Println(nextQuestion.Title) - context.printBorder(len(nextQuestion.Title), false) - if len(nextQuestion.Description) > 0 { - fmt.Println(nextQuestion.Description) - } - resultingMultiValueSelection := make([]string, 0) - if nextQuestion.IsValueConstrained() { - if nextQuestion.MultiSelect { - selectedValues := make(map[string]bool) - for { - fmt.Println("Please select (multiple executions possible) from the following values (use number to select/deselect):") - fmt.Println(" 0:", "SELECTION PROCESS FINISHED: CONTINUE TO NEXT QUESTION") - for i, val := range nextQuestion.PossibleAnswers { - number := i + 1 - padding, selected := "", " " - if number < 10 { - padding = " " - } - if val, exists := selectedValues[val]; exists && val { - selected = "*" - } - fmt.Println(" "+selected+" "+padding+strconv.Itoa(number)+":", val) - } - fmt.Println() - fmt.Print("Enter number to select/deselect (or 0 when finished): ") - answer, err := reader.ReadString('\n') - // convert CRLF to LF - answer = strings.TrimSpace(strings.Replace(answer, "\n", "", -1)) - checkErr(err) - if val, err := strconv.Atoi(answer); err == nil { // flip selection - if val == 0 { - for key, selected := range selectedValues { - if selected { - resultingMultiValueSelection = append(resultingMultiValueSelection, key) - } - } - break - } else if val > 0 && val <= len(nextQuestion.PossibleAnswers) { - selectedValues[nextQuestion.PossibleAnswers[val-1]] = !selectedValues[nextQuestion.PossibleAnswers[val-1]] - } - } - } - } else { - fmt.Println("Please choose from the following values (enter value directly or use number):") - for i, val := range nextQuestion.PossibleAnswers { - number := i + 1 - padding := "" - if number < 10 { - padding = " " - } - fmt.Println(" "+padding+strconv.Itoa(number)+":", val) - } - } - } - message := "" - validResult := true - if !nextQuestion.IsValueConstrained() || !nextQuestion.MultiSelect { - fmt.Println() - fmt.Println("Enter your answer (use 'BACK' to go one step back or 'QUIT' to quit without executing the model macro)") - fmt.Print("Answer") - if len(nextQuestion.DefaultAnswer) > 0 { - fmt.Print(" (default '" + nextQuestion.DefaultAnswer + "')") - } - fmt.Print(": ") - answer, err := reader.ReadString('\n') - // convert CRLF to LF - answer = strings.TrimSpace(strings.Replace(answer, "\n", "", -1)) - checkErr(err) - if len(answer) == 0 && len(nextQuestion.DefaultAnswer) > 0 { // accepting the default - answer = nextQuestion.DefaultAnswer - } else if nextQuestion.IsValueConstrained() { // convert number to value - if val, err := strconv.Atoi(answer); err == nil { - if val > 0 && val <= len(nextQuestion.PossibleAnswers) { - answer = nextQuestion.PossibleAnswers[val-1] - } - } - } - if strings.ToLower(answer) == "quit" { - fmt.Println("Quitting without executing the model macro") - return - } else if strings.ToLower(answer) == "back" { - switch macroDetails.ID { - case addbuildpipeline.GetMacroDetails().ID: - message, validResult, _ = addbuildpipeline.GoBack() - case addvault.GetMacroDetails().ID: - message, validResult, _ = addvault.GoBack() - case prettyprint.GetMacroDetails().ID: - message, validResult, _ = prettyprint.GoBack() - case removeunusedtags.GetMacroDetails().ID: - message, validResult, _ = removeunusedtags.GoBack() - case seedrisktracking.GetMacroDetails().ID: - message, validResult, _ = seedrisktracking.GoBack() - case seedtags.GetMacroDetails().ID: - message, validResult, _ = seedtags.GoBack() - } - } else if len(answer) > 0 { // individual answer - if nextQuestion.IsValueConstrained() { - if !nextQuestion.IsMatchingValueConstraint(answer) { - fmt.Println() - fmt.Println(">>> INVALID <<<") - fmt.Println("Answer does not match any allowed value. Please try again:") - continue - } - } - switch macroDetails.ID { - case addbuildpipeline.GetMacroDetails().ID: - message, validResult, _ = addbuildpipeline.ApplyAnswer(nextQuestion.ID, answer) - case addvault.GetMacroDetails().ID: - message, validResult, _ = addvault.ApplyAnswer(nextQuestion.ID, answer) - case prettyprint.GetMacroDetails().ID: - message, validResult, _ = prettyprint.ApplyAnswer(nextQuestion.ID, answer) - case removeunusedtags.GetMacroDetails().ID: - message, validResult, _ = removeunusedtags.ApplyAnswer(nextQuestion.ID, answer) - case seedrisktracking.GetMacroDetails().ID: - message, validResult, _ = seedrisktracking.ApplyAnswer(nextQuestion.ID, answer) - case seedtags.GetMacroDetails().ID: - message, validResult, _ = seedtags.ApplyAnswer(nextQuestion.ID, answer) - } - } - } else { - switch macroDetails.ID { - case addbuildpipeline.GetMacroDetails().ID: - message, validResult, err = addbuildpipeline.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case addvault.GetMacroDetails().ID: - message, validResult, err = addvault.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case prettyprint.GetMacroDetails().ID: - message, validResult, err = prettyprint.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case removeunusedtags.GetMacroDetails().ID: - message, validResult, err = removeunusedtags.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case seedrisktracking.GetMacroDetails().ID: - message, validResult, err = seedrisktracking.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - case seedtags.GetMacroDetails().ID: - message, validResult, err = seedtags.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) - } - } - checkErr(err) - if !validResult { - fmt.Println() - fmt.Println(">>> INVALID <<<") - } - fmt.Println(message) - fmt.Println() - } - for { - fmt.Println() - fmt.Println() - fmt.Println("#################################################################") - fmt.Println("Do you want to execute the model macro (updating the model file)?") - fmt.Println("#################################################################") - fmt.Println() - fmt.Println("The following changes will be applied:") - var changes []string - message := "" - validResult := true - var err error - switch macroDetails.ID { - case addbuildpipeline.GetMacroDetails().ID: - changes, message, validResult, err = addbuildpipeline.GetFinalChangeImpact(&modelInput, parsedModel) - case addvault.GetMacroDetails().ID: - changes, message, validResult, err = addvault.GetFinalChangeImpact(&modelInput, parsedModel) - case prettyprint.GetMacroDetails().ID: - changes, message, validResult, err = prettyprint.GetFinalChangeImpact(&modelInput) - case removeunusedtags.GetMacroDetails().ID: - changes, message, validResult, err = removeunusedtags.GetFinalChangeImpact(&modelInput) - case seedrisktracking.GetMacroDetails().ID: - changes, message, validResult, err = seedrisktracking.GetFinalChangeImpact(&modelInput) - case seedtags.GetMacroDetails().ID: - changes, message, validResult, err = seedtags.GetFinalChangeImpact(&modelInput) - } - checkErr(err) - for _, change := range changes { - fmt.Println(" -", change) - } - if !validResult { - fmt.Println() - fmt.Println(">>> INVALID <<<") - } - fmt.Println() - fmt.Println(message) - fmt.Println() - fmt.Print("Apply these changes to the model file?\nType Yes or No: ") - answer, err := reader.ReadString('\n') - // convert CRLF to LF - answer = strings.TrimSpace(strings.Replace(answer, "\n", "", -1)) - checkErr(err) - answer = strings.ToLower(answer) - fmt.Println() - if answer == "yes" || answer == "y" { - message := "" - validResult := true - var err error - switch macroDetails.ID { - case addbuildpipeline.GetMacroDetails().ID: - message, validResult, err = addbuildpipeline.Execute(&modelInput, parsedModel) - case addvault.GetMacroDetails().ID: - message, validResult, err = addvault.Execute(&modelInput, parsedModel) - case prettyprint.GetMacroDetails().ID: - message, validResult, err = prettyprint.Execute(&modelInput) - case removeunusedtags.GetMacroDetails().ID: - message, validResult, err = removeunusedtags.Execute(&modelInput, parsedModel) - case seedrisktracking.GetMacroDetails().ID: - message, validResult, err = seedrisktracking.Execute(parsedModel, &modelInput) - case seedtags.GetMacroDetails().ID: - message, validResult, err = seedtags.Execute(&modelInput, parsedModel) - } - checkErr(err) - if !validResult { - fmt.Println() - fmt.Println(">>> INVALID <<<") - } - fmt.Println(message) - fmt.Println() - backupFilename := context.Config.InputFile + ".backup" - fmt.Println("Creating backup model file:", backupFilename) // TODO add random files in /dev/shm space? - _, err = copyFile(context.Config.InputFile, backupFilename) - checkErr(err) - fmt.Println("Updating model") - yamlBytes, err := yaml.Marshal(modelInput) - checkErr(err) - /* - yamlBytes = model.ReformatYAML(yamlBytes) - */ - fmt.Println("Writing model file:", context.Config.InputFile) - err = os.WriteFile(context.Config.InputFile, yamlBytes, 0400) - checkErr(err) - fmt.Println("Model file successfully updated") - return - } else if answer == "no" || answer == "n" { - fmt.Println("Quitting without executing the model macro") - return - } + err := macros.ExecuteModelMacro(&modelInput, context.Config.InputFile, parsedModel, context.Config.ExecuteModelMacro) + if err != nil { + log.Fatal("Unable to execute model macro: ", err) } + return } if context.GenerateCommands.ReportPDF { // as the PDF report includes both diagrams @@ -534,42 +246,6 @@ func (context *Context) DoIt() { } } -func copyFile(src, dst string) (int64, error) { - sourceFileStat, err := os.Stat(src) - if err != nil { - return 0, err - } - - if !sourceFileStat.Mode().IsRegular() { - return 0, fmt.Errorf("%s is not a regular file", src) - } - - source, err := os.Open(src) - if err != nil { - return 0, err - } - defer func() { _ = source.Close() }() - - destination, err := os.Create(dst) - if err != nil { - return 0, err - } - defer func() { _ = destination.Close() }() - nBytes, err := io.Copy(destination, source) - return nBytes, err -} - -func (context *Context) printBorder(length int, bold bool) { - char := "-" - if bold { - char = "=" - } - for i := 1; i <= length; i++ { - fmt.Print(char) - } - fmt.Println() -} - func applyRAA(parsedModel *types.ParsedModel, binFolder, raaPlugin string, progressReporter common.DefaultProgressReporter) string { progressReporter.Info("Applying RAA calculation:", raaPlugin) diff --git a/internal/threagile/macros.go b/internal/threagile/macros.go index c4e6494f..63b8f930 100644 --- a/internal/threagile/macros.go +++ b/internal/threagile/macros.go @@ -7,7 +7,7 @@ import ( "github.com/spf13/cobra" "github.com/threagile/threagile/pkg/docs" - builinmacros "github.com/threagile/threagile/pkg/macros/built-in" + "github.com/threagile/threagile/pkg/macros" ) var listMacrosCmd = &cobra.Command{ @@ -19,16 +19,18 @@ var listMacrosCmd = &cobra.Command{ cmd.Println() /* TODO finish plugin stuff cmd.Println("Custom model macros:") - for id, customModelMacro := range macros.ListCustomMacros() { - cmd.Println(id, "-->", customModelMacro.GetMacroDetails().Title) + for _, macros := range macros.ListCustomMacros() { + details := macros.GetMacroDetails() + cmd.Println(details.ID, "-->", details.Title) } cmd.Println() */ cmd.Println("----------------------") cmd.Println("Built-in model macros:") cmd.Println("----------------------") - for _, macros := range builinmacros.ListBuiltInMacros() { - cmd.Println(macros.ID, "-->", macros.Title) + for _, macros := range macros.ListBuiltInMacros() { + details := macros.GetMacroDetails() + cmd.Println(details.ID, "-->", details.Title) } cmd.Println() }, @@ -43,16 +45,18 @@ var explainMacrosCmd = &cobra.Command{ cmd.Println() /* TODO finish plugin stuff cmd.Println("Custom model macros:") - for id, customModelMacro := range macros.ListCustomMacros() { - cmd.Printf("%v: %v\n", macros.ID, macros.Title) + for _, macros := range macros.ListCustomMacros() { + details := macros.GetMacroDetails() + cmd.Println(details.ID, "-->", details.Title) } cmd.Println() */ cmd.Println("----------------------") cmd.Println("Built-in model macros:") cmd.Println("----------------------") - for _, macros := range builinmacros.ListBuiltInMacros() { - cmd.Printf("%v: %v\n", macros.ID, macros.Title) + for _, macros := range macros.ListBuiltInMacros() { + details := macros.GetMacroDetails() + cmd.Printf("%v: %v\n", details.ID, details.Title) } cmd.Println() diff --git a/pkg/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go b/pkg/macros/add-build-pipeline-macro.go similarity index 80% rename from pkg/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go rename to pkg/macros/add-build-pipeline-macro.go index 41fbb0fa..e6500be1 100644 --- a/pkg/macros/built-in/add-build-pipeline/add-build-pipeline-macro.go +++ b/pkg/macros/add-build-pipeline-macro.go @@ -1,4 +1,4 @@ -package add_build_pipeline +package macros import ( "fmt" @@ -6,49 +6,58 @@ import ( "strings" "github.com/threagile/threagile/pkg/input" - "github.com/threagile/threagile/pkg/macros" "github.com/threagile/threagile/pkg/security/types" ) -func GetMacroDetails() macros.MacroDetails { - return macros.MacroDetails{ - ID: "add-build-pipeline", - Title: "Add Build Pipeline", - Description: "This model macro adds a build pipeline (development client, build pipeline, artifact registry, container image registry, " + - "source code repository, etc.) to the model.", - } +type addBuildPipeline struct { + macroState map[string][]string + questionsAnswered []string + codeInspectionUsed bool + containerTechUsed bool + withinTrustBoundary bool + createNewTrustBoundary bool } -var macroState = make(map[string][]string) -var questionsAnswered = make([]string, 0) -var codeInspectionUsed, containerTechUsed, withinTrustBoundary, createNewTrustBoundary bool - -const createNewTrustBoundaryLabel = "CREATE NEW TRUST BOUNDARY" +func NewBuildPipeline() Macros { + return &addBuildPipeline{ + macroState: make(map[string][]string), + questionsAnswered: make([]string, 0), + } +} var pushOrPull = []string{ "Push-based Deployment (build pipeline deploys towards target asset)", "Pull-based Deployment (deployment target asset fetches deployment from registry)", } +func (m *addBuildPipeline) GetMacroDetails() MacroDetails { + return MacroDetails{ + ID: "add-build-pipeline", + Title: "Add Build Pipeline", + Description: "This model macro adds a build pipeline (development client, build pipeline, artifact registry, container image registry, " + + "source code repository, etc.) to the model.", + } +} + // TODO add question for type of machine (either physical, virtual, container, etc.) -func GetNextQuestion(model *types.ParsedModel) (nextQuestion macros.MacroQuestion, err error) { - counter := len(questionsAnswered) - if counter > 3 && !codeInspectionUsed { +func (m *addBuildPipeline) GetNextQuestion(model *types.ParsedModel) (nextQuestion MacroQuestion, err error) { + counter := len(m.questionsAnswered) + if counter > 3 && !m.codeInspectionUsed { counter++ } - if counter > 5 && !containerTechUsed { + if counter > 5 && !m.containerTechUsed { counter += 2 } - if counter > 12 && !withinTrustBoundary { + if counter > 12 && !m.withinTrustBoundary { counter++ } - if counter > 13 && !createNewTrustBoundary { + if counter > 13 && !m.createNewTrustBoundary { counter++ } switch counter { case 0: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "source-repository", Title: "What product is used as the sourcecode repository?", Description: "This name affects the technical asset's title and ID plus also the tags used.", @@ -57,7 +66,7 @@ func GetNextQuestion(model *types.ParsedModel) (nextQuestion macros.MacroQuestio DefaultAnswer: "Git", }, nil case 1: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "build-pipeline", Title: "What product is used as the build pipeline?", Description: "This name affects the technical asset's title and ID plus also the tags used.", @@ -66,7 +75,7 @@ func GetNextQuestion(model *types.ParsedModel) (nextQuestion macros.MacroQuestio DefaultAnswer: "Jenkins", }, nil case 2: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "artifact-registry", Title: "What product is used as the artifact registry?", Description: "This name affects the technical asset's title and ID plus also the tags used.", @@ -75,7 +84,7 @@ func GetNextQuestion(model *types.ParsedModel) (nextQuestion macros.MacroQuestio DefaultAnswer: "Nexus", }, nil case 3: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "code-inspection-used", Title: "Are code inspection platforms (like SonarQube) used?", Description: "This affects whether code inspection platform are added.", @@ -84,7 +93,7 @@ func GetNextQuestion(model *types.ParsedModel) (nextQuestion macros.MacroQuestio DefaultAnswer: "Yes", }, nil case 4: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "code-inspection-platform", Title: "What product is used as the code inspection platform?", Description: "This name affects the technical asset's title and ID plus also the tags used.", @@ -93,7 +102,7 @@ func GetNextQuestion(model *types.ParsedModel) (nextQuestion macros.MacroQuestio DefaultAnswer: "SonarQube", }, nil case 5: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "container-technology-used", Title: "Is container technology (like Docker) used?", Description: "This affects whether container registries are added.", @@ -102,7 +111,7 @@ func GetNextQuestion(model *types.ParsedModel) (nextQuestion macros.MacroQuestio DefaultAnswer: "Yes", }, nil case 6: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "container-registry", Title: "What product is used as the container registry?", Description: "This name affects the technical asset's title and ID plus also the tags used.", @@ -111,7 +120,7 @@ func GetNextQuestion(model *types.ParsedModel) (nextQuestion macros.MacroQuestio DefaultAnswer: "Docker", }, nil case 7: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "container-platform", Title: "What product is used as the container platform (for orchestration and runtime)?", Description: "This name affects the technical asset's title and ID plus also the tags used.", @@ -120,7 +129,7 @@ func GetNextQuestion(model *types.ParsedModel) (nextQuestion macros.MacroQuestio DefaultAnswer: "Kubernetes", }, nil case 8: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "internet", Title: "Are build pipeline components exposed on the internet?", Description: "", @@ -129,7 +138,7 @@ func GetNextQuestion(model *types.ParsedModel) (nextQuestion macros.MacroQuestio DefaultAnswer: "No", }, nil case 9: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "multi-tenant", Title: "Are build pipeline components used by multiple tenants?", Description: "", @@ -138,7 +147,7 @@ func GetNextQuestion(model *types.ParsedModel) (nextQuestion macros.MacroQuestio DefaultAnswer: "No", }, nil case 10: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "encryption", Title: "Are build pipeline components encrypted?", Description: "", @@ -153,7 +162,7 @@ func GetNextQuestion(model *types.ParsedModel) (nextQuestion macros.MacroQuestio } sort.Strings(possibleAnswers) if len(possibleAnswers) > 0 { - return macros.MacroQuestion{ + return MacroQuestion{ ID: "deploy-targets", Title: "Select all technical assets where the build pipeline deploys to:", Description: "This affects the communication links being generated.", @@ -163,7 +172,7 @@ func GetNextQuestion(model *types.ParsedModel) (nextQuestion macros.MacroQuestio }, nil } case 12: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "within-trust-boundary", Title: "Are the server-side components of the build pipeline components within a network trust boundary?", Description: "", @@ -179,7 +188,7 @@ func GetNextQuestion(model *types.ParsedModel) (nextQuestion macros.MacroQuestio } } sort.Strings(possibleAnswers) - return macros.MacroQuestion{ + return MacroQuestion{ ID: "selected-trust-boundary", Title: "Choose from the list of existing network trust boundaries or create a new one?", Description: "", @@ -188,7 +197,7 @@ func GetNextQuestion(model *types.ParsedModel) (nextQuestion macros.MacroQuestio DefaultAnswer: "", }, nil case 14: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "new-trust-boundary-type", Title: "Of which type shall the new trust boundary be?", Description: "", @@ -202,7 +211,7 @@ func GetNextQuestion(model *types.ParsedModel) (nextQuestion macros.MacroQuestio DefaultAnswer: types.NetworkOnPrem.String(), }, nil case 15: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "push-or-pull", Title: "What type of deployment strategy is used?", Description: "Push-based deployments are more classic ones and pull-based are more GitOps-like ones.", @@ -211,7 +220,7 @@ func GetNextQuestion(model *types.ParsedModel) (nextQuestion macros.MacroQuestio DefaultAnswer: "", }, nil case 16: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "owner", Title: "Who is the owner of the build pipeline and runtime assets?", Description: "This name affects the technical asset's and data asset's owner.", @@ -220,74 +229,74 @@ func GetNextQuestion(model *types.ParsedModel) (nextQuestion macros.MacroQuestio DefaultAnswer: "", }, nil } - return macros.NoMoreQuestions(), nil + return NoMoreQuestions(), nil } -func ApplyAnswer(questionID string, answer ...string) (message string, validResult bool, err error) { - macroState[questionID] = answer - questionsAnswered = append(questionsAnswered, questionID) +func (m *addBuildPipeline) ApplyAnswer(questionID string, answer ...string) (message string, validResult bool, err error) { + m.macroState[questionID] = answer + m.questionsAnswered = append(m.questionsAnswered, questionID) if questionID == "code-inspection-used" { - codeInspectionUsed = strings.ToLower(macroState["code-inspection-used"][0]) == "yes" + m.codeInspectionUsed = strings.ToLower(m.macroState["code-inspection-used"][0]) == "yes" } else if questionID == "container-technology-used" { - containerTechUsed = strings.ToLower(macroState["container-technology-used"][0]) == "yes" + m.containerTechUsed = strings.ToLower(m.macroState["container-technology-used"][0]) == "yes" } else if questionID == "within-trust-boundary" { - withinTrustBoundary = strings.ToLower(macroState["within-trust-boundary"][0]) == "yes" + m.withinTrustBoundary = strings.ToLower(m.macroState["within-trust-boundary"][0]) == "yes" } else if questionID == "selected-trust-boundary" { - createNewTrustBoundary = strings.ToLower(macroState["selected-trust-boundary"][0]) == strings.ToLower(createNewTrustBoundaryLabel) + m.createNewTrustBoundary = strings.ToLower(m.macroState["selected-trust-boundary"][0]) == strings.ToLower(createNewTrustBoundaryLabel) } return "Answer processed", true, nil } -func GoBack() (message string, validResult bool, err error) { - if len(questionsAnswered) == 0 { +func (m *addBuildPipeline) GoBack() (message string, validResult bool, err error) { + if len(m.questionsAnswered) == 0 { return "Cannot go back further", false, nil } - lastQuestionID := questionsAnswered[len(questionsAnswered)-1] - questionsAnswered = questionsAnswered[:len(questionsAnswered)-1] - delete(macroState, lastQuestionID) + lastQuestionID := m.questionsAnswered[len(m.questionsAnswered)-1] + m.questionsAnswered = m.questionsAnswered[:len(m.questionsAnswered)-1] + delete(m.macroState, lastQuestionID) return "Undo successful", true, nil } -func GetFinalChangeImpact(modelInput *input.ModelInput, model *types.ParsedModel) (changes []string, message string, validResult bool, err error) { +func (m *addBuildPipeline) GetFinalChangeImpact(modelInput *input.ModelInput, model *types.ParsedModel) (changes []string, message string, validResult bool, err error) { changeLogCollector := make([]string, 0) - message, validResult, err = applyChange(modelInput, model, &changeLogCollector, true) + message, validResult, err = m.applyChange(modelInput, model, &changeLogCollector, true) return changeLogCollector, message, validResult, err } -func Execute(modelInput *input.ModelInput, model *types.ParsedModel) (message string, validResult bool, err error) { +func (m *addBuildPipeline) Execute(modelInput *input.ModelInput, model *types.ParsedModel) (message string, validResult bool, err error) { changeLogCollector := make([]string, 0) - message, validResult, err = applyChange(modelInput, model, &changeLogCollector, false) + message, validResult, err = m.applyChange(modelInput, model, &changeLogCollector, false) return message, validResult, err } -func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, changeLogCollector *[]string, dryRun bool) (message string, validResult bool, err error) { +func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, changeLogCollector *[]string, dryRun bool) (message string, validResult bool, err error) { var serverSideTechAssets = make([]string, 0) // ################################################ - input.AddTagToModelInput(modelInput, macroState["source-repository"][0], dryRun, changeLogCollector) - input.AddTagToModelInput(modelInput, macroState["build-pipeline"][0], dryRun, changeLogCollector) - input.AddTagToModelInput(modelInput, macroState["artifact-registry"][0], dryRun, changeLogCollector) - if containerTechUsed { - input.AddTagToModelInput(modelInput, macroState["container-registry"][0], dryRun, changeLogCollector) - input.AddTagToModelInput(modelInput, macroState["container-platform"][0], dryRun, changeLogCollector) + input.AddTagToModelInput(modelInput, m.macroState["source-repository"][0], dryRun, changeLogCollector) + input.AddTagToModelInput(modelInput, m.macroState["build-pipeline"][0], dryRun, changeLogCollector) + input.AddTagToModelInput(modelInput, m.macroState["artifact-registry"][0], dryRun, changeLogCollector) + if m.containerTechUsed { + input.AddTagToModelInput(modelInput, m.macroState["container-registry"][0], dryRun, changeLogCollector) + input.AddTagToModelInput(modelInput, m.macroState["container-platform"][0], dryRun, changeLogCollector) } - if codeInspectionUsed { - input.AddTagToModelInput(modelInput, macroState["code-inspection-platform"][0], dryRun, changeLogCollector) + if m.codeInspectionUsed { + input.AddTagToModelInput(modelInput, m.macroState["code-inspection-platform"][0], dryRun, changeLogCollector) } - sourceRepoID := types.MakeID(macroState["source-repository"][0]) + "-sourcecode-repository" - buildPipelineID := types.MakeID(macroState["build-pipeline"][0]) + "-build-pipeline" - artifactRegistryID := types.MakeID(macroState["artifact-registry"][0]) + "-artifact-registry" + sourceRepoID := types.MakeID(m.macroState["source-repository"][0]) + "-sourcecode-repository" + buildPipelineID := types.MakeID(m.macroState["build-pipeline"][0]) + "-build-pipeline" + artifactRegistryID := types.MakeID(m.macroState["artifact-registry"][0]) + "-artifact-registry" containerRepoID, containerPlatformID, containerSharedRuntimeID := "", "", "" - if containerTechUsed { - containerRepoID = types.MakeID(macroState["container-registry"][0]) + "-container-registry" - containerPlatformID = types.MakeID(macroState["container-platform"][0]) + "-container-platform" - containerSharedRuntimeID = types.MakeID(macroState["container-platform"][0]) + "-container-runtime" + if m.containerTechUsed { + containerRepoID = types.MakeID(m.macroState["container-registry"][0]) + "-container-registry" + containerPlatformID = types.MakeID(m.macroState["container-platform"][0]) + "-container-platform" + containerSharedRuntimeID = types.MakeID(m.macroState["container-platform"][0]) + "-container-runtime" } codeInspectionPlatformID := "" - if codeInspectionUsed { - codeInspectionPlatformID = types.MakeID(macroState["code-inspection-platform"][0]) + "-code-inspection-platform" + if m.codeInspectionUsed { + codeInspectionPlatformID = types.MakeID(m.macroState["code-inspection-platform"][0]) + "-code-inspection-platform" } - owner := macroState["owner"][0] + owner := m.macroState["owner"][0] if _, exists := parsedModel.DataAssets["Sourcecode"]; !exists { //fmt.Println("Adding data asset:", "sourcecode") // ################################################ @@ -337,7 +346,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c if _, exists := parsedModel.TechnicalAssets[id]; !exists { //fmt.Println("Adding technical asset:", id) // ################################################ encryption := types.NoneEncryption.String() - if strings.ToLower(macroState["encryption"][0]) == "yes" { + if strings.ToLower(m.macroState["encryption"][0]) == "yes" { encryption = types.Transparent.String() } @@ -390,7 +399,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c DiagramTweakWeight: 0, DiagramTweakConstraint: false, } - if containerTechUsed { + if m.containerTechUsed { commLinks["Container Registry Traffic"] = input.InputCommunicationLink{ Target: containerRepoID, Description: "Container Registry Traffic", @@ -424,7 +433,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c DiagramTweakConstraint: false, } } - if codeInspectionUsed { + if m.codeInspectionUsed { commLinks["Code Inspection Platform Traffic"] = input.InputCommunicationLink{ Target: codeInspectionPlatformID, Description: "Code Inspection Platform Traffic", @@ -454,7 +463,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c Size: types.System.String(), Technology: types.DevOpsClient.String(), Tags: []string{}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Internet: strings.ToLower(m.macroState["internet"][0]) == "yes", Machine: types.Physical.String(), Encryption: encryption, Owner: owner, @@ -482,12 +491,12 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c //fmt.Println("Adding technical asset:", id) // ################################################ serverSideTechAssets = append(serverSideTechAssets, id) encryption := types.NoneEncryption.String() - if strings.ToLower(macroState["encryption"][0]) == "yes" { + if strings.ToLower(m.macroState["encryption"][0]) == "yes" { encryption = types.Transparent.String() } techAsset := input.InputTechnicalAsset{ ID: id, - Description: macroState["source-repository"][0] + " Sourcecode Repository", + Description: m.macroState["source-repository"][0] + " Sourcecode Repository", Type: types.Process.String(), Usage: types.DevOps.String(), UsedAsClientByHuman: false, @@ -495,8 +504,8 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c JustificationOutOfScope: "", Size: types.Service.String(), Technology: types.SourcecodeRepository.String(), - Tags: []string{input.NormalizeTag(macroState["source-repository"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Tags: []string{input.NormalizeTag(m.macroState["source-repository"][0])}, + Internet: strings.ToLower(m.macroState["internet"][0]) == "yes", Machine: types.Virtual.String(), Encryption: encryption, Owner: owner, @@ -505,7 +514,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c Availability: types.Important.String(), JustificationCiaRating: "Sourcecode processing components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", - MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + MultiTenant: strings.ToLower(m.macroState["multi-tenant"][0]) == "yes", Redundant: false, CustomDevelopedParts: false, DataAssetsProcessed: []string{"sourcecode"}, @@ -515,22 +524,22 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.TechnicalAssets[macroState["source-repository"][0]+" Sourcecode Repository"] = techAsset + modelInput.TechnicalAssets[m.macroState["source-repository"][0]+" Sourcecode Repository"] = techAsset } } - if containerTechUsed { + if m.containerTechUsed { id = containerRepoID if _, exists := parsedModel.TechnicalAssets[id]; !exists { //fmt.Println("Adding technical asset:", id) // ################################################ serverSideTechAssets = append(serverSideTechAssets, id) encryption := types.NoneEncryption.String() - if strings.ToLower(macroState["encryption"][0]) == "yes" { + if strings.ToLower(m.macroState["encryption"][0]) == "yes" { encryption = types.Transparent.String() } techAsset := input.InputTechnicalAsset{ ID: id, - Description: macroState["container-registry"][0] + " Container Registry", + Description: m.macroState["container-registry"][0] + " Container Registry", Type: types.Process.String(), Usage: types.DevOps.String(), UsedAsClientByHuman: false, @@ -538,8 +547,8 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c JustificationOutOfScope: "", Size: types.Service.String(), Technology: types.ArtifactRegistry.String(), - Tags: []string{input.NormalizeTag(macroState["container-registry"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Tags: []string{input.NormalizeTag(m.macroState["container-registry"][0])}, + Internet: strings.ToLower(m.macroState["internet"][0]) == "yes", Machine: types.Virtual.String(), Encryption: encryption, Owner: owner, @@ -548,7 +557,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c Availability: types.Important.String(), JustificationCiaRating: "Container registry components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", - MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + MultiTenant: strings.ToLower(m.macroState["multi-tenant"][0]) == "yes", Redundant: false, CustomDevelopedParts: false, DataAssetsProcessed: []string{"deployment"}, @@ -558,7 +567,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.TechnicalAssets[macroState["container-registry"][0]+" Container Registry"] = techAsset + modelInput.TechnicalAssets[m.macroState["container-registry"][0]+" Container Registry"] = techAsset } } @@ -567,12 +576,12 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c //fmt.Println("Adding technical asset:", id) // ################################################ serverSideTechAssets = append(serverSideTechAssets, id) encryption := types.NoneEncryption.String() - if strings.ToLower(macroState["encryption"][0]) == "yes" { + if strings.ToLower(m.macroState["encryption"][0]) == "yes" { encryption = types.Transparent.String() } techAsset := input.InputTechnicalAsset{ ID: id, - Description: macroState["container-platform"][0] + " Container Platform", + Description: m.macroState["container-platform"][0] + " Container Platform", Type: types.Process.String(), Usage: types.DevOps.String(), UsedAsClientByHuman: false, @@ -580,8 +589,8 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c JustificationOutOfScope: "", Size: types.System.String(), Technology: types.ContainerPlatform.String(), - Tags: []string{input.NormalizeTag(macroState["container-platform"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Tags: []string{input.NormalizeTag(m.macroState["container-platform"][0])}, + Internet: strings.ToLower(m.macroState["internet"][0]) == "yes", Machine: types.Virtual.String(), Encryption: encryption, Owner: owner, @@ -590,7 +599,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c Availability: types.MissionCritical.String(), JustificationCiaRating: "Container platform components are rated as 'mission-critical' in terms of integrity and availability, because any " + "malicious modification of it might lead to a backdoored production system.", - MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + MultiTenant: strings.ToLower(m.macroState["multi-tenant"][0]) == "yes", Redundant: false, CustomDevelopedParts: false, DataAssetsProcessed: []string{"deployment"}, @@ -600,7 +609,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.TechnicalAssets[macroState["container-platform"][0]+" Container Platform"] = techAsset + modelInput.TechnicalAssets[m.macroState["container-platform"][0]+" Container Platform"] = techAsset } } } @@ -610,7 +619,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c //fmt.Println("Adding technical asset:", id) // ################################################ serverSideTechAssets = append(serverSideTechAssets, id) encryption := types.NoneEncryption.String() - if strings.ToLower(macroState["encryption"][0]) == "yes" { + if strings.ToLower(m.macroState["encryption"][0]) == "yes" { encryption = types.Transparent.String() } @@ -647,7 +656,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c DiagramTweakWeight: 0, DiagramTweakConstraint: false, } - if containerTechUsed { + if m.containerTechUsed { commLinks["Container Registry Traffic"] = input.InputCommunicationLink{ Target: containerRepoID, Description: "Container Registry Traffic", @@ -664,7 +673,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c DiagramTweakWeight: 0, DiagramTweakConstraint: false, } - if macroState["push-or-pull"][0] == pushOrPull[0] { // Push + if m.macroState["push-or-pull"][0] == pushOrPull[0] { // Push commLinks["Container Platform Push"] = input.InputCommunicationLink{ Target: containerPlatformID, Description: "Container Platform Push", @@ -699,7 +708,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c DiagramTweakConstraint: false, } if !dryRun { - titleOfTargetAsset := macroState["container-platform"][0] + " Container Platform" + titleOfTargetAsset := m.macroState["container-platform"][0] + " Container Platform" containerPlatform := modelInput.TechnicalAssets[titleOfTargetAsset] if containerPlatform.CommunicationLinks == nil { containerPlatform.CommunicationLinks = make(map[string]input.InputCommunicationLink) @@ -709,7 +718,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c } } } - if codeInspectionUsed { + if m.codeInspectionUsed { commLinks["Code Inspection Platform Traffic"] = input.InputCommunicationLink{ Target: codeInspectionPlatformID, Description: "Code Inspection Platform Traffic", @@ -728,11 +737,11 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c } } // The individual deployments - for _, deployTargetID := range macroState["deploy-targets"] { // add a connection to each deployment target + for _, deployTargetID := range m.macroState["deploy-targets"] { // add a connection to each deployment target //fmt.Println("Adding deployment flow to:", deployTargetID) - if containerTechUsed { + if m.containerTechUsed { if !dryRun { - containerPlatform := modelInput.TechnicalAssets[macroState["container-platform"][0]+" Container Platform"] + containerPlatform := modelInput.TechnicalAssets[m.macroState["container-platform"][0]+" Container Platform"] if containerPlatform.CommunicationLinks == nil { containerPlatform.CommunicationLinks = make(map[string]input.InputCommunicationLink) } @@ -752,10 +761,10 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c DiagramTweakWeight: 0, DiagramTweakConstraint: false, } - modelInput.TechnicalAssets[macroState["container-platform"][0]+" Container Platform"] = containerPlatform + modelInput.TechnicalAssets[m.macroState["container-platform"][0]+" Container Platform"] = containerPlatform } } else { // No Containers used - if macroState["push-or-pull"][0] == pushOrPull[0] { // Push + if m.macroState["push-or-pull"][0] == pushOrPull[0] { // Push commLinks["Deployment Push ("+deployTargetID+")"] = input.InputCommunicationLink{ Target: deployTargetID, Description: "Deployment Push to " + deployTargetID, @@ -825,7 +834,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c techAsset := input.InputTechnicalAsset{ ID: id, - Description: macroState["build-pipeline"][0] + " Build Pipeline", + Description: m.macroState["build-pipeline"][0] + " Build Pipeline", Type: types.Process.String(), Usage: types.DevOps.String(), UsedAsClientByHuman: false, @@ -833,8 +842,8 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c JustificationOutOfScope: "", Size: types.Service.String(), Technology: types.BuildPipeline.String(), - Tags: []string{input.NormalizeTag(macroState["build-pipeline"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Tags: []string{input.NormalizeTag(m.macroState["build-pipeline"][0])}, + Internet: strings.ToLower(m.macroState["internet"][0]) == "yes", Machine: types.Virtual.String(), Encryption: encryption, Owner: owner, @@ -843,7 +852,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c Availability: types.Important.String(), JustificationCiaRating: "Build pipeline components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", - MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + MultiTenant: strings.ToLower(m.macroState["multi-tenant"][0]) == "yes", Redundant: false, CustomDevelopedParts: false, DataAssetsProcessed: []string{"sourcecode", "deployment"}, @@ -853,7 +862,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.TechnicalAssets[macroState["build-pipeline"][0]+" Build Pipeline"] = techAsset + modelInput.TechnicalAssets[m.macroState["build-pipeline"][0]+" Build Pipeline"] = techAsset } } @@ -862,12 +871,12 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c //fmt.Println("Adding technical asset:", id) // ################################################ serverSideTechAssets = append(serverSideTechAssets, id) encryption := types.NoneEncryption.String() - if strings.ToLower(macroState["encryption"][0]) == "yes" { + if strings.ToLower(m.macroState["encryption"][0]) == "yes" { encryption = types.Transparent.String() } techAsset := input.InputTechnicalAsset{ ID: id, - Description: macroState["artifact-registry"][0] + " Artifact Registry", + Description: m.macroState["artifact-registry"][0] + " Artifact Registry", Type: types.Process.String(), Usage: types.DevOps.String(), UsedAsClientByHuman: false, @@ -875,8 +884,8 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c JustificationOutOfScope: "", Size: types.Service.String(), Technology: types.ArtifactRegistry.String(), - Tags: []string{input.NormalizeTag(macroState["artifact-registry"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Tags: []string{input.NormalizeTag(m.macroState["artifact-registry"][0])}, + Internet: strings.ToLower(m.macroState["internet"][0]) == "yes", Machine: types.Virtual.String(), Encryption: encryption, Owner: owner, @@ -885,7 +894,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c Availability: types.Important.String(), JustificationCiaRating: "Artifact registry components are at least rated as 'critical' in terms of integrity, because any " + "malicious modification of it might lead to a backdoored production system.", - MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + MultiTenant: strings.ToLower(m.macroState["multi-tenant"][0]) == "yes", Redundant: false, CustomDevelopedParts: false, DataAssetsProcessed: []string{"sourcecode", "deployment"}, @@ -895,22 +904,22 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.TechnicalAssets[macroState["artifact-registry"][0]+" Artifact Registry"] = techAsset + modelInput.TechnicalAssets[m.macroState["artifact-registry"][0]+" Artifact Registry"] = techAsset } } - if codeInspectionUsed { + if m.codeInspectionUsed { id = codeInspectionPlatformID if _, exists := parsedModel.TechnicalAssets[id]; !exists { //fmt.Println("Adding technical asset:", id) // ################################################ serverSideTechAssets = append(serverSideTechAssets, id) encryption := types.NoneEncryption.String() - if strings.ToLower(macroState["encryption"][0]) == "yes" { + if strings.ToLower(m.macroState["encryption"][0]) == "yes" { encryption = types.Transparent.String() } techAsset := input.InputTechnicalAsset{ ID: id, - Description: macroState["code-inspection-platform"][0] + " Code Inspection Platform", + Description: m.macroState["code-inspection-platform"][0] + " Code Inspection Platform", Type: types.Process.String(), Usage: types.DevOps.String(), UsedAsClientByHuman: false, @@ -918,8 +927,8 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c JustificationOutOfScope: "", Size: types.Service.String(), Technology: types.CodeInspectionPlatform.String(), - Tags: []string{input.NormalizeTag(macroState["code-inspection-platform"][0])}, - Internet: strings.ToLower(macroState["internet"][0]) == "yes", + Tags: []string{input.NormalizeTag(m.macroState["code-inspection-platform"][0])}, + Internet: strings.ToLower(m.macroState["internet"][0]) == "yes", Machine: types.Virtual.String(), Encryption: encryption, Owner: owner, @@ -928,7 +937,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c Availability: types.Operational.String(), JustificationCiaRating: "Sourcecode inspection platforms are rated at least 'important' in terms of integrity, because any " + "malicious modification of it might lead to vulnerabilities found by the scanner engine not being shown.", - MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + MultiTenant: strings.ToLower(m.macroState["multi-tenant"][0]) == "yes", Redundant: false, CustomDevelopedParts: false, DataAssetsProcessed: []string{"sourcecode"}, @@ -938,14 +947,14 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+id) if !dryRun { - modelInput.TechnicalAssets[macroState["code-inspection-platform"][0]+" Code Inspection Platform"] = techAsset + modelInput.TechnicalAssets[m.macroState["code-inspection-platform"][0]+" Code Inspection Platform"] = techAsset } } } - if withinTrustBoundary { - if createNewTrustBoundary { - trustBoundaryType := macroState["new-trust-boundary-type"][0] + if m.withinTrustBoundary { + if m.createNewTrustBoundary { + trustBoundaryType := m.macroState["new-trust-boundary-type"][0] //fmt.Println("Adding new trust boundary of type:", trustBoundaryType) title := "DevOps Network" trustBoundary := input.InputTrustBoundary{ @@ -961,7 +970,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c modelInput.TrustBoundaries[title] = trustBoundary } } else { - existingTrustBoundaryToAddTo := macroState["selected-trust-boundary"][0] + existingTrustBoundaryToAddTo := m.macroState["selected-trust-boundary"][0] //fmt.Println("Adding to existing trust boundary:", existingTrustBoundaryToAddTo) title := parsedModel.TrustBoundaries[existingTrustBoundaryToAddTo].Title assetsInside := make([]string, 0) @@ -988,17 +997,17 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c } } - if containerTechUsed { + if m.containerTechUsed { // create shared runtime assetsRunning := make([]string, 0) - for _, deployTargetID := range macroState["deploy-targets"] { + for _, deployTargetID := range m.macroState["deploy-targets"] { assetsRunning = append(assetsRunning, deployTargetID) } - title := macroState["container-platform"][0] + " Runtime" + title := m.macroState["container-platform"][0] + " Runtime" sharedRuntime := input.InputSharedRuntime{ ID: containerSharedRuntimeID, Description: title, - Tags: []string{input.NormalizeTag(macroState["container-platform"][0])}, + Tags: []string{input.NormalizeTag(m.macroState["container-platform"][0])}, TechnicalAssetsRunning: assetsRunning, } *changeLogCollector = append(*changeLogCollector, "adding shared runtime: "+containerSharedRuntimeID) diff --git a/pkg/macros/built-in/add-vault/add-vault-macro.go b/pkg/macros/add-vault-macro.go similarity index 80% rename from pkg/macros/built-in/add-vault/add-vault-macro.go rename to pkg/macros/add-vault-macro.go index 70fc04a0..4fe72854 100644 --- a/pkg/macros/built-in/add-vault/add-vault-macro.go +++ b/pkg/macros/add-vault-macro.go @@ -1,4 +1,4 @@ -package add_vault +package macros import ( "fmt" @@ -6,22 +6,16 @@ import ( "strings" "github.com/threagile/threagile/pkg/input" - "github.com/threagile/threagile/pkg/macros" "github.com/threagile/threagile/pkg/security/types" ) -func GetMacroDetails() macros.MacroDetails { - return macros.MacroDetails{ - ID: "add-vault", - Title: "Add Vault", - Description: "This model macro adds a vault (secret storage) to the model.", - } +type addVaultMacro struct { + macroState map[string][]string + questionsAnswered []string + withinTrustBoundary bool + createNewTrustBoundary bool } -var macroState = make(map[string][]string) -var questionsAnswered = make([]string, 0) -var withinTrustBoundary, createNewTrustBoundary bool - const createNewTrustBoundaryLabel = "CREATE NEW TRUST BOUNDARY" var storageTypes = []string{ @@ -32,7 +26,6 @@ var storageTypes = []string{ "In-Memory (no persistent storage of secrets)", "Service Registry", // TODO let user choose which technical asset the registry is (for comm link) } - var authenticationTypes = []string{ "Certificate", "Cloud Provider (relying on cloud provider instance authentication)", @@ -40,17 +33,32 @@ var authenticationTypes = []string{ "Credentials (username/password, API-key, secret token, etc.)", } -func GetNextQuestion(parsedModel *types.ParsedModel) (nextQuestion macros.MacroQuestion, err error) { - counter := len(questionsAnswered) - if counter > 5 && !withinTrustBoundary { +func NewAddVault() Macros { + return &addVaultMacro{ + macroState: make(map[string][]string), + questionsAnswered: make([]string, 0), + } +} + +func (m *addVaultMacro) GetMacroDetails() MacroDetails { + return MacroDetails{ + ID: "add-vault", + Title: "Add Vault", + Description: "This model macro adds a vault (secret storage) to the model.", + } +} + +func (m *addVaultMacro) GetNextQuestion(parsedModel *types.ParsedModel) (nextQuestion MacroQuestion, err error) { + counter := len(m.questionsAnswered) + if counter > 5 && !m.withinTrustBoundary { counter++ } - if counter > 6 && !createNewTrustBoundary { + if counter > 6 && !m.createNewTrustBoundary { counter++ } switch counter { case 0: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "vault-name", Title: "What product is used as the vault?", Description: "This name affects the technical asset's title and ID plus also the tags used.", @@ -59,7 +67,7 @@ func GetNextQuestion(parsedModel *types.ParsedModel) (nextQuestion macros.MacroQ DefaultAnswer: "", }, nil case 1: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "storage-type", Title: "What type of storage is used for the vault?", Description: "This selection affects the type of technical asset for the persistence.", @@ -68,7 +76,7 @@ func GetNextQuestion(parsedModel *types.ParsedModel) (nextQuestion macros.MacroQ DefaultAnswer: "", }, nil case 2: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "authentication-type", Title: "What type of authentication is used for accessing the vault?", Description: "This selection affects the type of communication links.", @@ -77,7 +85,7 @@ func GetNextQuestion(parsedModel *types.ParsedModel) (nextQuestion macros.MacroQ DefaultAnswer: "", }, nil case 3: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "multi-tenant", Title: "Is the vault used by multiple tenants?", Description: "", @@ -92,7 +100,7 @@ func GetNextQuestion(parsedModel *types.ParsedModel) (nextQuestion macros.MacroQ } sort.Strings(possibleAnswers) if len(possibleAnswers) > 0 { - return macros.MacroQuestion{ + return MacroQuestion{ ID: "clients", Title: "Select all technical assets that make use of the vault and access it:", Description: "This affects the communication links being generated.", @@ -102,7 +110,7 @@ func GetNextQuestion(parsedModel *types.ParsedModel) (nextQuestion macros.MacroQ }, nil } case 5: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "within-trust-boundary", Title: "Is the vault placed within a network trust boundary?", Description: "", @@ -118,7 +126,7 @@ func GetNextQuestion(parsedModel *types.ParsedModel) (nextQuestion macros.MacroQ } } sort.Strings(possibleAnswers) - return macros.MacroQuestion{ + return MacroQuestion{ ID: "selected-trust-boundary", Title: "Choose from the list of existing network trust boundaries or create a new one?", Description: "", @@ -127,7 +135,7 @@ func GetNextQuestion(parsedModel *types.ParsedModel) (nextQuestion macros.MacroQ DefaultAnswer: "", }, nil case 7: - return macros.MacroQuestion{ + return MacroQuestion{ ID: "new-trust-boundary-type", Title: "Of which type shall the new trust boundary be?", Description: "", @@ -141,44 +149,44 @@ func GetNextQuestion(parsedModel *types.ParsedModel) (nextQuestion macros.MacroQ DefaultAnswer: types.NetworkOnPrem.String(), }, nil } - return macros.NoMoreQuestions(), nil + return NoMoreQuestions(), nil } -func ApplyAnswer(questionID string, answer ...string) (message string, validResult bool, err error) { - macroState[questionID] = answer - questionsAnswered = append(questionsAnswered, questionID) +func (m *addVaultMacro) ApplyAnswer(questionID string, answer ...string) (message string, validResult bool, err error) { + m.macroState[questionID] = answer + m.questionsAnswered = append(m.questionsAnswered, questionID) if questionID == "within-trust-boundary" { - withinTrustBoundary = strings.ToLower(macroState["within-trust-boundary"][0]) == "yes" + m.withinTrustBoundary = strings.ToLower(m.macroState["within-trust-boundary"][0]) == "yes" } else if questionID == "selected-trust-boundary" { - createNewTrustBoundary = strings.ToLower(macroState["selected-trust-boundary"][0]) == strings.ToLower(createNewTrustBoundaryLabel) + m.createNewTrustBoundary = strings.ToLower(m.macroState["selected-trust-boundary"][0]) == strings.ToLower(createNewTrustBoundaryLabel) } return "Answer processed", true, nil } -func GoBack() (message string, validResult bool, err error) { - if len(questionsAnswered) == 0 { +func (m *addVaultMacro) GoBack() (message string, validResult bool, err error) { + if len(m.questionsAnswered) == 0 { return "Cannot go back further", false, nil } - lastQuestionID := questionsAnswered[len(questionsAnswered)-1] - questionsAnswered = questionsAnswered[:len(questionsAnswered)-1] - delete(macroState, lastQuestionID) + lastQuestionID := m.questionsAnswered[len(m.questionsAnswered)-1] + m.questionsAnswered = m.questionsAnswered[:len(m.questionsAnswered)-1] + delete(m.macroState, lastQuestionID) return "Undo successful", true, nil } -func GetFinalChangeImpact(modelInput *input.ModelInput, parsedModel *types.ParsedModel) (changes []string, message string, validResult bool, err error) { +func (m *addVaultMacro) GetFinalChangeImpact(modelInput *input.ModelInput, parsedModel *types.ParsedModel) (changes []string, message string, validResult bool, err error) { changeLogCollector := make([]string, 0) - message, validResult, err = applyChange(modelInput, parsedModel, &changeLogCollector, true) + message, validResult, err = m.applyChange(modelInput, parsedModel, &changeLogCollector, true) return changeLogCollector, message, validResult, err } -func Execute(modelInput *input.ModelInput, parsedModel *types.ParsedModel) (message string, validResult bool, err error) { +func (m *addVaultMacro) Execute(modelInput *input.ModelInput, parsedModel *types.ParsedModel) (message string, validResult bool, err error) { changeLogCollector := make([]string, 0) - message, validResult, err = applyChange(modelInput, parsedModel, &changeLogCollector, false) + message, validResult, err = m.applyChange(modelInput, parsedModel, &changeLogCollector, false) return message, validResult, err } -func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, changeLogCollector *[]string, dryRun bool) (message string, validResult bool, err error) { - input.AddTagToModelInput(modelInput, macroState["vault-name"][0], dryRun, changeLogCollector) +func (m *addVaultMacro) applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, changeLogCollector *[]string, dryRun bool) (message string, validResult bool, err error) { + input.AddTagToModelInput(modelInput, m.macroState["vault-name"][0], dryRun, changeLogCollector) var serverSideTechAssets = make([]string, 0) @@ -202,9 +210,9 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c } } - databaseUsed := macroState["storage-type"][0] == storageTypes[2] - filesystemUsed := macroState["storage-type"][0] == storageTypes[3] - inMemoryUsed := macroState["storage-type"][0] == storageTypes[4] + databaseUsed := m.macroState["storage-type"][0] == storageTypes[2] + filesystemUsed := m.macroState["storage-type"][0] == storageTypes[3] + inMemoryUsed := m.macroState["storage-type"][0] == storageTypes[4] storageID := "vault-storage" @@ -234,7 +242,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c Integrity: types.Critical.String(), Availability: types.Critical.String(), JustificationCiaRating: "Vault components are only rated as 'confidential' as vaults usually apply a trust barrier to encrypt all data-at-rest with a vault key.", - MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + MultiTenant: strings.ToLower(m.macroState["multi-tenant"][0]) == "yes", Redundant: false, CustomDevelopedParts: false, DataAssetsProcessed: nil, @@ -249,7 +257,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c } } - vaultID := types.MakeID(macroState["vault-name"][0]) + "-vault" + vaultID := types.MakeID(m.macroState["vault-name"][0]) + "-vault" if _, exists := parsedModel.TechnicalAssets[vaultID]; !exists { serverSideTechAssets = append(serverSideTechAssets, vaultID) @@ -279,16 +287,16 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c } authentication := types.NoneAuthentication.String() - if macroState["authentication-type"][0] == authenticationTypes[0] { + if m.macroState["authentication-type"][0] == authenticationTypes[0] { authentication = types.ClientCertificate.String() - } else if macroState["authentication-type"][0] == authenticationTypes[1] { + } else if m.macroState["authentication-type"][0] == authenticationTypes[1] { authentication = types.Externalized.String() - } else if macroState["authentication-type"][0] == authenticationTypes[2] { + } else if m.macroState["authentication-type"][0] == authenticationTypes[2] { authentication = types.Externalized.String() - } else if macroState["authentication-type"][0] == authenticationTypes[3] { + } else if m.macroState["authentication-type"][0] == authenticationTypes[3] { authentication = types.Credentials.String() } - for _, clientID := range macroState["clients"] { // add a connection from each client + for _, clientID := range m.macroState["clients"] { // add a connection from each client clientAccessCommLink := input.InputCommunicationLink{ Target: vaultID, Description: "Vault Access Traffic (by " + clientID + ")", @@ -332,7 +340,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c techAsset := input.InputTechnicalAsset{ ID: vaultID, - Description: macroState["vault-name"][0] + " Vault", + Description: m.macroState["vault-name"][0] + " Vault", Type: types.Process.String(), Usage: types.DevOps.String(), UsedAsClientByHuman: false, @@ -340,7 +348,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c JustificationOutOfScope: "", Size: types.Service.String(), Technology: types.Vault.String(), - Tags: []string{input.NormalizeTag(macroState["vault-name"][0])}, + Tags: []string{input.NormalizeTag(m.macroState["vault-name"][0])}, Internet: false, Machine: types.Virtual.String(), Encryption: types.Transparent.String(), @@ -349,7 +357,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c Integrity: types.Critical.String(), Availability: types.Critical.String(), JustificationCiaRating: "Vault components are rated as 'strictly-confidential'.", - MultiTenant: strings.ToLower(macroState["multi-tenant"][0]) == "yes", + MultiTenant: strings.ToLower(m.macroState["multi-tenant"][0]) == "yes", Redundant: false, CustomDevelopedParts: false, DataAssetsProcessed: []string{"configuration-secrets"}, @@ -362,7 +370,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c } *changeLogCollector = append(*changeLogCollector, "adding technical asset (including communication links): "+vaultID) if !dryRun { - modelInput.TechnicalAssets[macroState["vault-name"][0]+" Vault"] = techAsset + modelInput.TechnicalAssets[m.macroState["vault-name"][0]+" Vault"] = techAsset } } @@ -383,9 +391,9 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c } } - if withinTrustBoundary { - if createNewTrustBoundary { - trustBoundaryType := macroState["new-trust-boundary-type"][0] + if m.withinTrustBoundary { + if m.createNewTrustBoundary { + trustBoundaryType := m.macroState["new-trust-boundary-type"][0] title := "Vault Network" trustBoundary := input.InputTrustBoundary{ ID: "vault-network", @@ -403,7 +411,7 @@ func applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, c modelInput.TrustBoundaries[title] = trustBoundary } } else { // adding to existing trust boundary - existingTrustBoundaryToAddTo := macroState["selected-trust-boundary"][0] + existingTrustBoundaryToAddTo := m.macroState["selected-trust-boundary"][0] title := parsedModel.TrustBoundaries[existingTrustBoundaryToAddTo].Title if filesystemUsed { // ---------------------- nest as execution-environment trust boundary ---------------------- diff --git a/pkg/macros/built-in/built-in.go b/pkg/macros/built-in/built-in.go deleted file mode 100644 index bbf0b632..00000000 --- a/pkg/macros/built-in/built-in.go +++ /dev/null @@ -1,22 +0,0 @@ -package builtin - -import ( - "github.com/threagile/threagile/pkg/macros" - addbuildpipeline "github.com/threagile/threagile/pkg/macros/built-in/add-build-pipeline" - addvault "github.com/threagile/threagile/pkg/macros/built-in/add-vault" - prettyprint "github.com/threagile/threagile/pkg/macros/built-in/pretty-print" - removeunusedtags "github.com/threagile/threagile/pkg/macros/built-in/remove-unused-tags" - seedrisktracking "github.com/threagile/threagile/pkg/macros/built-in/seed-risk-tracking" - seedtags "github.com/threagile/threagile/pkg/macros/built-in/seed-tags" -) - -func ListBuiltInMacros() []macros.MacroDetails { - return []macros.MacroDetails{ - addbuildpipeline.GetMacroDetails(), - addvault.GetMacroDetails(), - prettyprint.GetMacroDetails(), - removeunusedtags.GetMacroDetails(), - seedrisktracking.GetMacroDetails(), - seedtags.GetMacroDetails(), - } -} diff --git a/pkg/macros/built-in/pretty-print/pretty-print-macro.go b/pkg/macros/built-in/pretty-print/pretty-print-macro.go deleted file mode 100644 index 93772caf..00000000 --- a/pkg/macros/built-in/pretty-print/pretty-print-macro.go +++ /dev/null @@ -1,34 +0,0 @@ -package pretty_print - -import ( - "github.com/threagile/threagile/pkg/input" - "github.com/threagile/threagile/pkg/macros" -) - -func GetMacroDetails() macros.MacroDetails { - return macros.MacroDetails{ - ID: "pretty-print", - Title: "Pretty Print", - Description: "This model macro simply reformats the model file in a pretty-print style.", - } -} - -func GetNextQuestion() (nextQuestion macros.MacroQuestion, err error) { - return macros.NoMoreQuestions(), nil -} - -func ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { - return "Answer processed", true, nil -} - -func GoBack() (message string, validResult bool, err error) { - return "Cannot go back further", false, nil -} - -func GetFinalChangeImpact(_ *input.ModelInput) (changes []string, message string, validResult bool, err error) { - return []string{"pretty-printing the model file"}, "Changeset valid", true, err -} - -func Execute(_ *input.ModelInput) (message string, validResult bool, err error) { - return "Model pretty printing successful", true, nil -} diff --git a/pkg/macros/macros.go b/pkg/macros/macros.go index 64720344..68ff2272 100644 --- a/pkg/macros/macros.go +++ b/pkg/macros/macros.go @@ -4,12 +4,303 @@ Copyright © 2023 NAME HERE package macros import ( + "bufio" + "errors" + "fmt" + "io" + "os" + "strconv" "strings" + + "github.com/threagile/threagile/pkg/input" + "github.com/threagile/threagile/pkg/security/types" + "gopkg.in/yaml.v3" ) -func ListCustomMacros() []MacroDetails { +type Macros interface { + GetMacroDetails() MacroDetails + GetNextQuestion(model *types.ParsedModel) (nextQuestion MacroQuestion, err error) + ApplyAnswer(questionID string, answer ...string) (message string, validResult bool, err error) + GoBack() (message string, validResult bool, err error) + GetFinalChangeImpact(modelInput *input.ModelInput, model *types.ParsedModel) (changes []string, message string, validResult bool, err error) + Execute(modelInput *input.ModelInput, model *types.ParsedModel) (message string, validResult bool, err error) +} + +func ListBuiltInMacros() []Macros { + return []Macros{ + NewBuildPipeline(), + NewAddVault(), + NewPrettyPrint(), + NewRemoveUnusedTags(), + NewSeedRiskTracking(), + NewSeedTags(), + } +} + +func ListCustomMacros() []Macros { // TODO: implement - return []MacroDetails{} + return []Macros{} +} + +func GetMacroByID(id string) (Macros, error) { + builtinMacros := ListBuiltInMacros() + customMacros := ListCustomMacros() + allMacros := append(builtinMacros, customMacros...) + for _, macro := range allMacros { + if macro.GetMacroDetails().ID == id { + return macro, nil + } + } + return nil, errors.New("unknown macro id: " + id) +} + +func ExecuteModelMacro(modelInput *input.ModelInput, inputFile string, parsedModel *types.ParsedModel, macroID string) error { + macros, err := GetMacroByID(macroID) + if err != nil { + return err + } + + macroDetails := macros.GetMacroDetails() + + fmt.Println("Executing model macro:", macroDetails.ID) + fmt.Println() + fmt.Println() + printBorder(len(macroDetails.Title), true) + fmt.Println(macroDetails.Title) + printBorder(len(macroDetails.Title), true) + if len(macroDetails.Description) > 0 { + fmt.Println(macroDetails.Description) + } + fmt.Println() + reader := bufio.NewReader(os.Stdin) + for { + nextQuestion, err := macros.GetNextQuestion(parsedModel) + if err != nil { + return err + } + if nextQuestion.NoMoreQuestions() { + break + } + fmt.Println() + printBorder(len(nextQuestion.Title), false) + fmt.Println(nextQuestion.Title) + printBorder(len(nextQuestion.Title), false) + if len(nextQuestion.Description) > 0 { + fmt.Println(nextQuestion.Description) + } + resultingMultiValueSelection := make([]string, 0) + if nextQuestion.IsValueConstrained() { + if nextQuestion.MultiSelect { + selectedValues := make(map[string]bool) + for { + fmt.Println("Please select (multiple executions possible) from the following values (use number to select/deselect):") + fmt.Println(" 0:", "SELECTION PROCESS FINISHED: CONTINUE TO NEXT QUESTION") + for i, val := range nextQuestion.PossibleAnswers { + number := i + 1 + padding, selected := "", " " + if number < 10 { + padding = " " + } + if val, exists := selectedValues[val]; exists && val { + selected = "*" + } + fmt.Println(" "+selected+" "+padding+strconv.Itoa(number)+":", val) + } + fmt.Println() + fmt.Print("Enter number to select/deselect (or 0 when finished): ") + answer, err := reader.ReadString('\n') + // convert CRLF to LF + answer = strings.TrimSpace(strings.Replace(answer, "\n", "", -1)) + if err != nil { + return err + } + if val, err := strconv.Atoi(answer); err == nil { // flip selection + if val == 0 { + for key, selected := range selectedValues { + if selected { + resultingMultiValueSelection = append(resultingMultiValueSelection, key) + } + } + break + } else if val > 0 && val <= len(nextQuestion.PossibleAnswers) { + selectedValues[nextQuestion.PossibleAnswers[val-1]] = !selectedValues[nextQuestion.PossibleAnswers[val-1]] + } + } + } + } else { + fmt.Println("Please choose from the following values (enter value directly or use number):") + for i, val := range nextQuestion.PossibleAnswers { + number := i + 1 + padding := "" + if number < 10 { + padding = " " + } + fmt.Println(" "+padding+strconv.Itoa(number)+":", val) + } + } + } + message := "" + validResult := true + if !nextQuestion.IsValueConstrained() || !nextQuestion.MultiSelect { + fmt.Println() + fmt.Println("Enter your answer (use 'BACK' to go one step back or 'QUIT' to quit without executing the model macro)") + fmt.Print("Answer") + if len(nextQuestion.DefaultAnswer) > 0 { + fmt.Print(" (default '" + nextQuestion.DefaultAnswer + "')") + } + fmt.Print(": ") + answer, err := reader.ReadString('\n') + // convert CRLF to LF + answer = strings.TrimSpace(strings.Replace(answer, "\n", "", -1)) + if err != nil { + return err + } + if len(answer) == 0 && len(nextQuestion.DefaultAnswer) > 0 { // accepting the default + answer = nextQuestion.DefaultAnswer + } else if nextQuestion.IsValueConstrained() { // convert number to value + if val, err := strconv.Atoi(answer); err == nil { + if val > 0 && val <= len(nextQuestion.PossibleAnswers) { + answer = nextQuestion.PossibleAnswers[val-1] + } + } + } + if strings.ToLower(answer) == "quit" { + fmt.Println("Quitting without executing the model macro") + return nil + } else if strings.ToLower(answer) == "back" { + message, validResult, _ = macros.GoBack() + } else if len(answer) > 0 { // individual answer + if nextQuestion.IsValueConstrained() { + if !nextQuestion.IsMatchingValueConstraint(answer) { + fmt.Println() + fmt.Println(">>> INVALID <<<") + fmt.Println("Answer does not match any allowed value. Please try again:") + continue + } + } + message, validResult, _ = macros.ApplyAnswer(nextQuestion.ID, answer) + } + } else { + message, validResult, _ = macros.ApplyAnswer(nextQuestion.ID, resultingMultiValueSelection...) + } + if err != nil { + return err + } + if !validResult { + fmt.Println() + fmt.Println(">>> INVALID <<<") + } + fmt.Println(message) + fmt.Println() + } + for { + fmt.Println() + fmt.Println() + fmt.Println("#################################################################") + fmt.Println("Do you want to execute the model macro (updating the model file)?") + fmt.Println("#################################################################") + fmt.Println() + fmt.Println("The following changes will be applied:") + var changes []string + message := "" + validResult := true + + changes, message, validResult, err = macros.GetFinalChangeImpact(modelInput, parsedModel) + if err != nil { + return err + } + for _, change := range changes { + fmt.Println(" -", change) + } + if !validResult { + fmt.Println() + fmt.Println(">>> INVALID <<<") + } + fmt.Println() + fmt.Println(message) + fmt.Println() + fmt.Print("Apply these changes to the model file?\nType Yes or No: ") + answer, err := reader.ReadString('\n') + // convert CRLF to LF + answer = strings.TrimSpace(strings.Replace(answer, "\n", "", -1)) + if err != nil { + return err + } + answer = strings.ToLower(answer) + fmt.Println() + if answer == "yes" || answer == "y" { + message, validResult, err = macros.Execute(modelInput, parsedModel) + if err != nil { + return err + } + if !validResult { + fmt.Println() + fmt.Println(">>> INVALID <<<") + } + fmt.Println(message) + fmt.Println() + backupFilename := inputFile + ".backup" + fmt.Println("Creating backup model file:", backupFilename) // TODO add random files in /dev/shm space? + _, err = copyFile(inputFile, backupFilename) + if err != nil { + return err + } + fmt.Println("Updating model") + yamlBytes, err := yaml.Marshal(modelInput) + if err != nil { + return err + } + /* + yamlBytes = model.ReformatYAML(yamlBytes) + */ + fmt.Println("Writing model file:", inputFile) + err = os.WriteFile(inputFile, yamlBytes, 0400) + if err != nil { + return err + } + fmt.Println("Model file successfully updated") + return nil + } else if answer == "no" || answer == "n" { + fmt.Println("Quitting without executing the model macro") + return nil + } + } +} + +func printBorder(length int, bold bool) { + char := "-" + if bold { + char = "=" + } + for i := 1; i <= length; i++ { + fmt.Print(char) + } + fmt.Println() +} + +func copyFile(src, dst string) (int64, error) { + sourceFileStat, err := os.Stat(src) + if err != nil { + return 0, err + } + + if !sourceFileStat.Mode().IsRegular() { + return 0, fmt.Errorf("%s is not a regular file", src) + } + + source, err := os.Open(src) + if err != nil { + return 0, err + } + defer func() { _ = source.Close() }() + + destination, err := os.Create(dst) + if err != nil { + return 0, err + } + defer func() { _ = destination.Close() }() + nBytes, err := io.Copy(destination, source) + return nBytes, err } type MacroDetails struct { diff --git a/pkg/macros/pretty-print-macro.go b/pkg/macros/pretty-print-macro.go new file mode 100644 index 00000000..f07a8b58 --- /dev/null +++ b/pkg/macros/pretty-print-macro.go @@ -0,0 +1,41 @@ +package macros + +import ( + "github.com/threagile/threagile/pkg/input" + "github.com/threagile/threagile/pkg/security/types" +) + +type prettyPrintMacro struct { +} + +func NewPrettyPrint() Macros { + return &prettyPrintMacro{} +} + +func (*prettyPrintMacro) GetMacroDetails() MacroDetails { + return MacroDetails{ + ID: "pretty-print", + Title: "Pretty Print", + Description: "This model macro simply reformats the model file in a pretty-print style.", + } +} + +func (*prettyPrintMacro) GetNextQuestion(_ *types.ParsedModel) (nextQuestion MacroQuestion, err error) { + return NoMoreQuestions(), nil +} + +func (*prettyPrintMacro) ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { + return "Answer processed", true, nil +} + +func (*prettyPrintMacro) GoBack() (message string, validResult bool, err error) { + return "Cannot go back further", false, nil +} + +func (*prettyPrintMacro) GetFinalChangeImpact(_ *input.ModelInput, _ *types.ParsedModel) (changes []string, message string, validResult bool, err error) { + return []string{"pretty-printing the model file"}, "Changeset valid", true, err +} + +func (*prettyPrintMacro) Execute(_ *input.ModelInput, _ *types.ParsedModel) (message string, validResult bool, err error) { + return "Model pretty printing successful", true, nil +} diff --git a/pkg/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go b/pkg/macros/remove-unused-tags-macro.go similarity index 65% rename from pkg/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go rename to pkg/macros/remove-unused-tags-macro.go index 2c06a83d..9de7f4bf 100644 --- a/pkg/macros/built-in/remove-unused-tags/remove-unused-tags-macro.go +++ b/pkg/macros/remove-unused-tags-macro.go @@ -1,39 +1,45 @@ -package remove_unused_tags +package macros import ( - "github.com/threagile/threagile/pkg/security/types" "sort" "strconv" "github.com/threagile/threagile/pkg/input" - "github.com/threagile/threagile/pkg/macros" + "github.com/threagile/threagile/pkg/security/types" ) -func GetMacroDetails() macros.MacroDetails { - return macros.MacroDetails{ +type removeUnusedTagsMacro struct { +} + +func NewRemoveUnusedTags() Macros { + return &removeUnusedTagsMacro{} +} + +func (*removeUnusedTagsMacro) GetMacroDetails() MacroDetails { + return MacroDetails{ ID: "remove-unused-tags", Title: "Remove Unused Tags", Description: "This model macro simply removes all unused tags from the model file.", } } -func GetNextQuestion() (nextQuestion macros.MacroQuestion, err error) { - return macros.NoMoreQuestions(), nil +func (*removeUnusedTagsMacro) GetNextQuestion(*types.ParsedModel) (nextQuestion MacroQuestion, err error) { + return NoMoreQuestions(), nil } -func ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { +func (*removeUnusedTagsMacro) ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { return "Answer processed", true, nil } -func GoBack() (message string, validResult bool, err error) { +func (*removeUnusedTagsMacro) GoBack() (message string, validResult bool, err error) { return "Cannot go back further", false, nil } -func GetFinalChangeImpact(_ *input.ModelInput) (changes []string, message string, validResult bool, err error) { +func (*removeUnusedTagsMacro) GetFinalChangeImpact(_ *input.ModelInput, _ *types.ParsedModel) (changes []string, message string, validResult bool, err error) { return []string{"remove unused tags from the model file"}, "Changeset valid", true, err } -func Execute(modelInput *input.ModelInput, parsedModel *types.ParsedModel) (message string, validResult bool, err error) { +func (*removeUnusedTagsMacro) Execute(modelInput *input.ModelInput, parsedModel *types.ParsedModel) (message string, validResult bool, err error) { tagUsageMap := make(map[string]bool) for _, tag := range parsedModel.TagsAvailable { tagUsageMap[tag] = false // false = tag is not used diff --git a/pkg/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go b/pkg/macros/seed-risk-tracking-macro.go similarity index 61% rename from pkg/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go rename to pkg/macros/seed-risk-tracking-macro.go index 4210119e..91a6df9c 100644 --- a/pkg/macros/built-in/seed-risk-tracking/seed-risk-tracking-macro.go +++ b/pkg/macros/seed-risk-tracking-macro.go @@ -1,39 +1,45 @@ -package seed_risk_tracking +package macros import ( "sort" "strconv" "github.com/threagile/threagile/pkg/input" - "github.com/threagile/threagile/pkg/macros" "github.com/threagile/threagile/pkg/security/types" ) -func GetMacroDetails() macros.MacroDetails { - return macros.MacroDetails{ +type seedRiskTrackingMacro struct { +} + +func NewSeedRiskTracking() Macros { + return &seedRiskTrackingMacro{} +} + +func (*seedRiskTrackingMacro) GetMacroDetails() MacroDetails { + return MacroDetails{ ID: "seed-risk-tracking", Title: "Seed Risk Tracking", Description: "This model macro simply seeds the model file with initial risk tracking entries for all untracked risks.", } } -func GetNextQuestion() (nextQuestion macros.MacroQuestion, err error) { - return macros.NoMoreQuestions(), nil +func (*seedRiskTrackingMacro) GetNextQuestion(*types.ParsedModel) (nextQuestion MacroQuestion, err error) { + return NoMoreQuestions(), nil } -func ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { +func (*seedRiskTrackingMacro) ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { return "Answer processed", true, nil } -func GoBack() (message string, validResult bool, err error) { +func (*seedRiskTrackingMacro) GoBack() (message string, validResult bool, err error) { return "Cannot go back further", false, nil } -func GetFinalChangeImpact(_ *input.ModelInput) (changes []string, message string, validResult bool, err error) { +func (*seedRiskTrackingMacro) GetFinalChangeImpact(_ *input.ModelInput, _ *types.ParsedModel) (changes []string, message string, validResult bool, err error) { return []string{"seed the model file with with initial risk tracking entries for all untracked risks"}, "Changeset valid", true, err } -func Execute(parsedModel *types.ParsedModel, modelInput *input.ModelInput) (message string, validResult bool, err error) { +func (*seedRiskTrackingMacro) Execute(modelInput *input.ModelInput, parsedModel *types.ParsedModel) (message string, validResult bool, err error) { syntheticRiskIDsToCreateTrackingFor := make([]string, 0) for id, risk := range parsedModel.GeneratedRisksBySyntheticId { if !risk.IsRiskTracked(parsedModel) { diff --git a/pkg/macros/built-in/seed-tags/seed-tags-macro.go b/pkg/macros/seed-tags-macro.go similarity index 56% rename from pkg/macros/built-in/seed-tags/seed-tags-macro.go rename to pkg/macros/seed-tags-macro.go index 0aac3b97..e9c2d29c 100644 --- a/pkg/macros/built-in/seed-tags/seed-tags-macro.go +++ b/pkg/macros/seed-tags-macro.go @@ -1,39 +1,45 @@ -package seed_tags +package macros import ( - "github.com/threagile/threagile/pkg/security/types" "sort" "strconv" "github.com/threagile/threagile/pkg/input" - "github.com/threagile/threagile/pkg/macros" + "github.com/threagile/threagile/pkg/security/types" ) -func GetMacroDetails() macros.MacroDetails { - return macros.MacroDetails{ +type seedTagsMacro struct { +} + +func NewSeedTags() Macros { + return &seedTagsMacro{} +} + +func (*seedTagsMacro) GetMacroDetails() MacroDetails { + return MacroDetails{ ID: "seed-tags", Title: "Seed Tags", Description: "This model macro simply seeds the model file with supported tags from all risk rules.", } } -func GetNextQuestion() (nextQuestion macros.MacroQuestion, err error) { - return macros.NoMoreQuestions(), nil +func (*seedTagsMacro) GetNextQuestion(parsedModel *types.ParsedModel) (nextQuestion MacroQuestion, err error) { + return NoMoreQuestions(), nil } -func ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { +func (*seedTagsMacro) ApplyAnswer(_ string, _ ...string) (message string, validResult bool, err error) { return "Answer processed", true, nil } -func GoBack() (message string, validResult bool, err error) { +func (*seedTagsMacro) GoBack() (message string, validResult bool, err error) { return "Cannot go back further", false, nil } -func GetFinalChangeImpact(_ *input.ModelInput) (changes []string, message string, validResult bool, err error) { +func (*seedTagsMacro) GetFinalChangeImpact(_ *input.ModelInput, _ *types.ParsedModel) (changes []string, message string, validResult bool, err error) { return []string{"seed the model file with supported tags from all risk rules"}, "Changeset valid", true, err } -func Execute(modelInput *input.ModelInput, parsedModel *types.ParsedModel) (message string, validResult bool, err error) { +func (*seedTagsMacro) Execute(modelInput *input.ModelInput, parsedModel *types.ParsedModel) (message string, validResult bool, err error) { tagMap := make(map[string]bool) for k, v := range parsedModel.AllSupportedTags { tagMap[k] = v From c490c6b95a7f61aa6b7f82c64fcc103bc4635e4b Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Fri, 5 Jan 2024 14:15:52 +0000 Subject: [PATCH 38/68] Use parameters instead of context --- cmd/threagile/main.go | 10 +- internal/threagile/context.go | 223 ++++++++++++++-------------------- internal/threagile/macros.go | 12 ++ internal/threagile/root.go | 7 +- pkg/server/server.go | 4 +- 5 files changed, 115 insertions(+), 141 deletions(-) diff --git a/cmd/threagile/main.go b/cmd/threagile/main.go index 60a56315..fd0a5215 100644 --- a/cmd/threagile/main.go +++ b/cmd/threagile/main.go @@ -1,7 +1,7 @@ package main import ( - "github.com/threagile/threagile/internal/threagile" + threagile "github.com/threagile/threagile/internal/threagile" "github.com/threagile/threagile/pkg/server" ) @@ -17,10 +17,10 @@ func main() { // TODO: remove below as soon as refactoring is finished - everything will go through rootCmd.Execute // for now it's fine to have as frequently uncommented to see the actual behaviour - context := new(threagile.Context).Defaults(buildTimestamp).ParseCommandlineArgs() - if context.ServerMode { - server.RunServer(context.Config) + config, commands := threagile.ParseCommandlineArgs(buildTimestamp) + if config.ServerPort > 0 { + server.RunServer(&config) } else { - context.DoIt() + threagile.DoIt(&config, &commands) } } diff --git a/internal/threagile/context.go b/internal/threagile/context.go index 7b0ab7b9..f1e9a997 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -13,16 +13,14 @@ import ( "strconv" "strings" - "github.com/threagile/threagile/pkg/model" - "github.com/threagile/threagile/pkg/security/risks" - "github.com/threagile/threagile/pkg/common" - "github.com/threagile/threagile/pkg/docs" "github.com/threagile/threagile/pkg/input" "github.com/threagile/threagile/pkg/macros" + "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/report" "github.com/threagile/threagile/pkg/run" + "github.com/threagile/threagile/pkg/security/risks" "github.com/threagile/threagile/pkg/security/types" ) @@ -51,31 +49,8 @@ func (c *GenerateCommands) Defaults() *GenerateCommands { return c } -type Context struct { - common.Config - *GenerateCommands - - ServerMode bool -} - -func (context *Context) Init() *Context { - *context = Context{ - GenerateCommands: &GenerateCommands{}, - } - - return context -} - -func (context *Context) Defaults(buildTimestamp string) *Context { - *context = *new(Context).Init() - context.Config.Defaults(buildTimestamp) - context.GenerateCommands.Defaults() - - return context -} - -func (context *Context) DoIt() { - progressReporter := common.DefaultProgressReporter{Verbose: context.Config.Verbose} +func DoIt(config *common.Config, commands *GenerateCommands) { + progressReporter := common.DefaultProgressReporter{Verbose: config.Verbose} defer func() { var err error if r := recover(); r != nil { @@ -86,20 +61,20 @@ func (context *Context) DoIt() { } }() - if len(context.Config.ExecuteModelMacro) > 0 { + if len(config.ExecuteModelMacro) > 0 { fmt.Println(docs.Logo + "\n\n" + docs.VersionText) } - progressReporter.Info("Writing into output directory:", context.Config.OutputFolder) - progressReporter.Info("Parsing model:", context.Config.InputFile) + progressReporter.Info("Writing into output directory:", config.OutputFolder) + progressReporter.Info("Parsing model:", config.InputFile) builtinRiskRules := make(map[string]types.RiskRule) for _, rule := range risks.GetBuiltInRiskRules() { builtinRiskRules[rule.Category().Id] = rule } - customRiskRules := types.LoadCustomRiskRules(context.Config.RiskRulesPlugins, progressReporter) + customRiskRules := types.LoadCustomRiskRules(config.RiskRulesPlugins, progressReporter) modelInput := *new(input.ModelInput).Defaults() - loadError := modelInput.Load(context.Config.InputFile) + loadError := modelInput.Load(config.InputFile) if loadError != nil { log.Fatal("Unable to load model yaml: ", loadError) } @@ -108,118 +83,110 @@ func (context *Context) DoIt() { if parseError != nil { log.Fatal("Unable to parse model yaml: ", parseError) } - introTextRAA := applyRAA(parsedModel, context.Config.BinFolder, context.RAAPlugin, progressReporter) + introTextRAA := applyRAA(parsedModel, config.BinFolder, config.RAAPlugin, progressReporter) parsedModel.ApplyRiskGeneration(customRiskRules, builtinRiskRules, - context.Config.SkipRiskRules, progressReporter) - err := parsedModel.ApplyWildcardRiskTrackingEvaluation(context.Config.IgnoreOrphanedRiskTracking, progressReporter) + config.SkipRiskRules, progressReporter) + err := parsedModel.ApplyWildcardRiskTrackingEvaluation(config.IgnoreOrphanedRiskTracking, progressReporter) if err != nil { // TODO: do not panic and gracefully handle the error panic(err) } - err = parsedModel.CheckRiskTracking(context.Config.IgnoreOrphanedRiskTracking, progressReporter) + err = parsedModel.CheckRiskTracking(config.IgnoreOrphanedRiskTracking, progressReporter) if err != nil { // TODO: do not panic and gracefully handle the error panic(err) } - if len(context.Config.ExecuteModelMacro) > 0 { - err := macros.ExecuteModelMacro(&modelInput, context.Config.InputFile, parsedModel, context.Config.ExecuteModelMacro) + if len(config.ExecuteModelMacro) > 0 { + err := macros.ExecuteModelMacro(&modelInput, config.InputFile, parsedModel, config.ExecuteModelMacro) if err != nil { log.Fatal("Unable to execute model macro: ", err) } return } - if context.GenerateCommands.ReportPDF { // as the PDF report includes both diagrams - context.GenerateCommands.DataFlowDiagram = true - context.GenerateCommands.DataAssetDiagram = true + generateDataFlowDiagram := commands.DataFlowDiagram + generateDataAssetsDiagram := commands.DataAssetDiagram + if commands.ReportPDF { // as the PDF report includes both diagrams + generateDataFlowDiagram = true + generateDataAssetsDiagram = true } - diagramDPI := context.Config.DiagramDPI + diagramDPI := config.DiagramDPI if diagramDPI < common.MinGraphvizDPI { diagramDPI = common.MinGraphvizDPI } else if diagramDPI > common.MaxGraphvizDPI { diagramDPI = common.MaxGraphvizDPI } // Data-flow Diagram rendering - if context.GenerateCommands.DataFlowDiagram { - gvFile := filepath.Join(context.Config.OutputFolder, context.Config.DataFlowDiagramFilenameDOT) - if !context.Config.KeepDiagramSourceFiles { - tmpFileGV, err := os.CreateTemp(context.Config.TempFolder, context.Config.DataFlowDiagramFilenameDOT) + if generateDataFlowDiagram { + gvFile := filepath.Join(config.OutputFolder, config.DataFlowDiagramFilenameDOT) + if !config.KeepDiagramSourceFiles { + tmpFileGV, err := os.CreateTemp(config.TempFolder, config.DataFlowDiagramFilenameDOT) checkErr(err) gvFile = tmpFileGV.Name() defer func() { _ = os.Remove(gvFile) }() } - dotFile := report.WriteDataFlowDiagramGraphvizDOT(parsedModel, gvFile, diagramDPI, context.Config.AddModelTitle, progressReporter) + dotFile := report.WriteDataFlowDiagramGraphvizDOT(parsedModel, gvFile, diagramDPI, config.AddModelTitle, progressReporter) - err := report.GenerateDataFlowDiagramGraphvizImage(dotFile, context.Config.OutputFolder, - context.Config.TempFolder, context.Config.BinFolder, context.Config.DataFlowDiagramFilenamePNG, progressReporter) + err := report.GenerateDataFlowDiagramGraphvizImage(dotFile, config.OutputFolder, + config.TempFolder, config.BinFolder, config.DataFlowDiagramFilenamePNG, progressReporter) if err != nil { fmt.Println(err) } } // Data Asset Diagram rendering - if context.GenerateCommands.DataAssetDiagram { - gvFile := filepath.Join(context.Config.OutputFolder, context.Config.DataAssetDiagramFilenameDOT) - if !context.Config.KeepDiagramSourceFiles { - tmpFile, err := os.CreateTemp(context.Config.TempFolder, context.Config.DataAssetDiagramFilenameDOT) + if generateDataAssetsDiagram { + gvFile := filepath.Join(config.OutputFolder, config.DataAssetDiagramFilenameDOT) + if !config.KeepDiagramSourceFiles { + tmpFile, err := os.CreateTemp(config.TempFolder, config.DataAssetDiagramFilenameDOT) checkErr(err) gvFile = tmpFile.Name() defer func() { _ = os.Remove(gvFile) }() } dotFile := report.WriteDataAssetDiagramGraphvizDOT(parsedModel, gvFile, diagramDPI, progressReporter) - err := report.GenerateDataAssetDiagramGraphvizImage(dotFile, context.Config.OutputFolder, - context.Config.TempFolder, context.Config.BinFolder, context.Config.DataAssetDiagramFilenamePNG, progressReporter) + err := report.GenerateDataAssetDiagramGraphvizImage(dotFile, config.OutputFolder, + config.TempFolder, config.BinFolder, config.DataAssetDiagramFilenamePNG, progressReporter) if err != nil { fmt.Println(err) } } // risks as risks json - if context.GenerateCommands.RisksJSON { - if context.Config.Verbose { - fmt.Println("Writing risks json") - } - report.WriteRisksJSON(parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.JsonRisksFilename)) + if commands.RisksJSON { + progressReporter.Info("Writing risks json") + report.WriteRisksJSON(parsedModel, filepath.Join(config.OutputFolder, config.JsonRisksFilename)) } // technical assets json - if context.GenerateCommands.TechnicalAssetsJSON { - if context.Config.Verbose { - fmt.Println("Writing technical assets json") - } - report.WriteTechnicalAssetsJSON(parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.JsonTechnicalAssetsFilename)) + if commands.TechnicalAssetsJSON { + progressReporter.Info("Writing technical assets json") + report.WriteTechnicalAssetsJSON(parsedModel, filepath.Join(config.OutputFolder, config.JsonTechnicalAssetsFilename)) } // risks as risks json - if context.GenerateCommands.StatsJSON { - if context.Config.Verbose { - fmt.Println("Writing stats json") - } - report.WriteStatsJSON(parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.JsonStatsFilename)) + if commands.StatsJSON { + progressReporter.Info("Writing stats json") + report.WriteStatsJSON(parsedModel, filepath.Join(config.OutputFolder, config.JsonStatsFilename)) } // risks Excel - if context.GenerateCommands.RisksExcel { - if context.Config.Verbose { - fmt.Println("Writing risks excel") - } - report.WriteRisksExcelToFile(parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.ExcelRisksFilename)) + if commands.RisksExcel { + progressReporter.Info("Writing risks excel") + report.WriteRisksExcelToFile(parsedModel, filepath.Join(config.OutputFolder, config.ExcelRisksFilename)) } // tags Excel - if context.GenerateCommands.TagsExcel { - if context.Config.Verbose { - fmt.Println("Writing tags excel") - } - report.WriteTagsExcelToFile(parsedModel, filepath.Join(context.Config.OutputFolder, context.Config.ExcelTagsFilename)) + if commands.TagsExcel { + progressReporter.Info("Writing tags excel") + report.WriteTagsExcelToFile(parsedModel, filepath.Join(config.OutputFolder, config.ExcelTagsFilename)) } - if context.GenerateCommands.ReportPDF { + if commands.ReportPDF { // hash the YAML input file - f, err := os.Open(context.Config.InputFile) + f, err := os.Open(config.InputFile) checkErr(err) defer func() { _ = f.Close() }() hasher := sha256.New() @@ -228,20 +195,18 @@ func (context *Context) DoIt() { } modelHash := hex.EncodeToString(hasher.Sum(nil)) // report PDF - if context.Config.Verbose { - fmt.Println("Writing report pdf") - } - report.WriteReportPDF(filepath.Join(context.Config.OutputFolder, context.Config.ReportFilename), - filepath.Join(context.Config.AppFolder, context.Config.TemplateFilename), - filepath.Join(context.Config.OutputFolder, context.Config.DataFlowDiagramFilenamePNG), - filepath.Join(context.Config.OutputFolder, context.Config.DataAssetDiagramFilenamePNG), - context.Config.InputFile, - context.Config.SkipRiskRules, - context.Config.BuildTimestamp, + progressReporter.Info("Writing report pdf") + report.WriteReportPDF(filepath.Join(config.OutputFolder, config.ReportFilename), + filepath.Join(config.AppFolder, config.TemplateFilename), + filepath.Join(config.OutputFolder, config.DataFlowDiagramFilenamePNG), + filepath.Join(config.OutputFolder, config.DataAssetDiagramFilenamePNG), + config.InputFile, + config.SkipRiskRules, + config.BuildTimestamp, modelHash, introTextRAA, customRiskRules, - context.Config.TempFolder, + config.TempFolder, parsedModel) } } @@ -299,44 +264,46 @@ func expandPath(path string) string { return path } -func (context *Context) ParseCommandlineArgs() *Context { +func ParseCommandlineArgs(buildTimestamp string) (common.Config, GenerateCommands) { configFile := flag.String("config", "", "config file") - configError := context.Config.Load(*configFile) + config := new(common.Config).Defaults(buildTimestamp) + configError := config.Load(*configFile) if configError != nil { fmt.Printf("WARNING: failed to load config file %q: %v\n", *configFile, configError) } // folders - flag.StringVar(&context.Config.AppFolder, "app-dir", common.AppDir, "app folder (default: "+common.AppDir+")") - flag.StringVar(&context.Config.ServerFolder, "server-dir", common.DataDir, "base folder for server mode (default: "+common.DataDir+")") - flag.StringVar(&context.Config.TempFolder, "temp-dir", common.TempDir, "temporary folder location") - flag.StringVar(&context.Config.BinFolder, "bin-dir", common.BinDir, "binary folder location") - flag.StringVar(&context.Config.OutputFolder, "output", ".", "output directory") + flag.StringVar(&config.AppFolder, "app-dir", common.AppDir, "app folder (default: "+common.AppDir+")") + flag.StringVar(&config.ServerFolder, "server-dir", common.DataDir, "base folder for server mode (default: "+common.DataDir+")") + flag.StringVar(&config.TempFolder, "temp-dir", common.TempDir, "temporary folder location") + flag.StringVar(&config.BinFolder, "bin-dir", common.BinDir, "binary folder location") + flag.StringVar(&config.OutputFolder, "output", ".", "output directory") // files - flag.StringVar(&context.Config.InputFile, "model", common.InputFile, "input model yaml file") - flag.StringVar(&context.RAAPlugin, "raa-run", "raa_calc", "RAA calculation run file name") + flag.StringVar(&config.InputFile, "model", common.InputFile, "input model yaml file") + flag.StringVar(&config.RAAPlugin, "raa-run", "raa_calc", "RAA calculation run file name") // flags / parameters - flag.BoolVar(&context.Config.Verbose, "verbose", false, "verbose output") - flag.IntVar(&context.Config.DiagramDPI, "diagram-dpi", context.Config.DiagramDPI, "DPI used to render: maximum is "+strconv.Itoa(context.Config.MaxGraphvizDPI)+"") - flag.StringVar(&context.Config.SkipRiskRules, "skip-risk-rules", "", "comma-separated list of risk rules (by their ID) to skip") - flag.BoolVar(&context.Config.IgnoreOrphanedRiskTracking, "ignore-orphaned-risk-tracking", false, "ignore orphaned risk tracking (just log them) not matching a concrete risk") - flag.IntVar(&context.Config.ServerPort, "server", 0, "start a server (instead of commandline execution) on the given port") - flag.StringVar(&context.Config.ExecuteModelMacro, "execute-model-macro", "", "Execute model macro (by ID)") - flag.StringVar(&context.Config.TemplateFilename, "background", "background.pdf", "background pdf file") + flag.BoolVar(&config.Verbose, "verbose", false, "verbose output") + flag.IntVar(&config.DiagramDPI, "diagram-dpi", config.DiagramDPI, "DPI used to render: maximum is "+strconv.Itoa(config.MaxGraphvizDPI)+"") + flag.StringVar(&config.SkipRiskRules, "skip-risk-rules", "", "comma-separated list of risk rules (by their ID) to skip") + flag.BoolVar(&config.IgnoreOrphanedRiskTracking, "ignore-orphaned-risk-tracking", false, "ignore orphaned risk tracking (just log them) not matching a concrete risk") + flag.IntVar(&config.ServerPort, "server", 0, "start a server (instead of commandline execution) on the given port") + flag.StringVar(&config.ExecuteModelMacro, "execute-model-macro", "", "Execute model macro (by ID)") + flag.StringVar(&config.TemplateFilename, "background", "background.pdf", "background pdf file") riskRulesPlugins := flag.String("custom-risk-rules-plugins", "", "comma-separated list of plugins file names with custom risk rules to load") - context.Config.RiskRulesPlugins = strings.Split(*riskRulesPlugins, ",") + config.RiskRulesPlugins = strings.Split(*riskRulesPlugins, ",") // commands - flag.BoolVar(&context.GenerateCommands.DataFlowDiagram, "generate-data-flow-diagram", true, "generate data-flow diagram") - flag.BoolVar(&context.GenerateCommands.DataAssetDiagram, "generate-data-asset-diagram", true, "generate data asset diagram") - flag.BoolVar(&context.GenerateCommands.RisksJSON, "generate-risks-json", true, "generate risks json") - flag.BoolVar(&context.GenerateCommands.StatsJSON, "generate-stats-json", true, "generate stats json") - flag.BoolVar(&context.GenerateCommands.TechnicalAssetsJSON, "generate-technical-assets-json", true, "generate technical assets json") - flag.BoolVar(&context.GenerateCommands.RisksExcel, "generate-risks-excel", true, "generate risks excel") - flag.BoolVar(&context.GenerateCommands.TagsExcel, "generate-tags-excel", true, "generate tags excel") - flag.BoolVar(&context.GenerateCommands.ReportPDF, "generate-report-pdf", true, "generate report pdf, including diagrams") + commands := new(GenerateCommands).Defaults() + flag.BoolVar(&commands.DataFlowDiagram, "generate-data-flow-diagram", true, "generate data-flow diagram") + flag.BoolVar(&commands.DataAssetDiagram, "generate-data-asset-diagram", true, "generate data asset diagram") + flag.BoolVar(&commands.RisksJSON, "generate-risks-json", true, "generate risks json") + flag.BoolVar(&commands.StatsJSON, "generate-stats-json", true, "generate stats json") + flag.BoolVar(&commands.TechnicalAssetsJSON, "generate-technical-assets-json", true, "generate technical assets json") + flag.BoolVar(&commands.RisksExcel, "generate-risks-excel", true, "generate risks excel") + flag.BoolVar(&commands.TagsExcel, "generate-tags-excel", true, "generate tags excel") + flag.BoolVar(&commands.ReportPDF, "generate-report-pdf", true, "generate report pdf, including diagrams") flag.Usage = func() { fmt.Println(docs.Logo + "\n\n" + docs.VersionText) @@ -345,14 +312,12 @@ func (context *Context) ParseCommandlineArgs() *Context { } flag.Parse() - context.Config.InputFile = expandPath(context.Config.InputFile) - context.Config.AppFolder = expandPath(context.Config.AppFolder) - context.Config.ServerFolder = expandPath(context.Config.ServerFolder) - context.Config.TempFolder = expandPath(context.Config.TempFolder) - context.Config.BinFolder = expandPath(context.Config.BinFolder) - context.Config.OutputFolder = expandPath(context.Config.OutputFolder) - - context.ServerMode = context.Config.ServerPort > 0 + config.InputFile = expandPath(config.InputFile) + config.AppFolder = expandPath(config.AppFolder) + config.ServerFolder = expandPath(config.ServerFolder) + config.TempFolder = expandPath(config.TempFolder) + config.BinFolder = expandPath(config.BinFolder) + config.OutputFolder = expandPath(config.OutputFolder) - return context + return *config, *commands } diff --git a/internal/threagile/macros.go b/internal/threagile/macros.go index 63b8f930..533f4430 100644 --- a/internal/threagile/macros.go +++ b/internal/threagile/macros.go @@ -4,6 +4,8 @@ Copyright © 2023 NAME HERE package threagile import ( + "fmt" + "github.com/spf13/cobra" "github.com/threagile/threagile/pkg/docs" @@ -63,7 +65,17 @@ var explainMacrosCmd = &cobra.Command{ }, } +var executeModelMacrosCmd = &cobra.Command{ + Use: "execute-model-macro", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + fmt.Println("execute-model-macro called with args:", args) + return nil + }, +} + func init() { rootCmd.AddCommand(listMacrosCmd) rootCmd.AddCommand(explainMacrosCmd) + rootCmd.AddCommand(executeModelMacrosCmd) } diff --git a/internal/threagile/root.go b/internal/threagile/root.go index 6155f9d5..86f832d6 100644 --- a/internal/threagile/root.go +++ b/internal/threagile/root.go @@ -20,10 +20,7 @@ var rootCmd = &cobra.Command{ Short: "\n" + docs.Logo, Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\n" + docs.Examples, RunE: func(cmd *cobra.Command, args []string) error { - ctx := new(Context).Defaults("buildTimestamp") - ctx.Config = *readConfig("buildTimestamp") - ctx.GenerateCommands = readCommands() - ctx.DoIt() + DoIt(readConfig("buildTimestamp"), readCommands()) return nil }, } @@ -33,7 +30,7 @@ var serverCmd = &cobra.Command{ Short: "Run server", RunE: func(cmd *cobra.Command, args []string) error { cfg := readConfig("buildTimestamp") - server.RunServer(*cfg) + server.RunServer(cfg) return nil }, } diff --git a/pkg/server/server.go b/pkg/server/server.go index a4c8909d..cc551236 100644 --- a/pkg/server/server.go +++ b/pkg/server/server.go @@ -24,7 +24,7 @@ import ( ) type server struct { - config common.Config + config *common.Config successCount int errorCount int globalLock sync.Mutex @@ -37,7 +37,7 @@ type server struct { customRiskRules map[string]*types.CustomRisk } -func RunServer(config common.Config) { +func RunServer(config *common.Config) { s := &server{ config: config, createdObjectsThrottler: make(map[string][]int64), From e4857c2bcd87962948bed929312d2061e5abb832 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Fri, 5 Jan 2024 15:18:12 +0000 Subject: [PATCH 39/68] Move logic to packages --- cmd/threagile/main.go | 17 +- cmd/threagile/threagile.yaml | 1354 +++++++++++++++++++++++++++++++++ internal/threagile/context.go | 208 +---- internal/threagile/root.go | 5 +- pkg/model/read.go | 89 +++ pkg/report/generate.go | 156 ++++ pkg/report/graphviz.go | 4 - 7 files changed, 1620 insertions(+), 213 deletions(-) create mode 100644 cmd/threagile/threagile.yaml create mode 100644 pkg/model/read.go create mode 100644 pkg/report/generate.go diff --git a/cmd/threagile/main.go b/cmd/threagile/main.go index fd0a5215..40eea37a 100644 --- a/cmd/threagile/main.go +++ b/cmd/threagile/main.go @@ -2,25 +2,22 @@ package main import ( threagile "github.com/threagile/threagile/internal/threagile" - "github.com/threagile/threagile/pkg/server" ) const ( buildTimestamp = "" ) -// === Error handling stuff ======================================== - func main() { // TODO: uncomment below as soon as refactoring is finished - everything will go through rootCmd.Execute - // threagile.Execute() + threagile.Execute() // TODO: remove below as soon as refactoring is finished - everything will go through rootCmd.Execute // for now it's fine to have as frequently uncommented to see the actual behaviour - config, commands := threagile.ParseCommandlineArgs(buildTimestamp) - if config.ServerPort > 0 { - server.RunServer(&config) - } else { - threagile.DoIt(&config, &commands) - } + // config, commands := threagile.ParseCommandlineArgs(buildTimestamp) + // if config.ServerPort > 0 { + // server.RunServer(&config) + // } else { + // threagile.DoIt(&config, &commands) + // } } diff --git a/cmd/threagile/threagile.yaml b/cmd/threagile/threagile.yaml new file mode 100644 index 00000000..77815f77 --- /dev/null +++ b/cmd/threagile/threagile.yaml @@ -0,0 +1,1354 @@ +threagile_version: 1.0.0 + +# NOTE: +# +# For a perfect editing experience within your IDE of choice you can easily +# get model syntax validation and autocompletion (very handy for enum values) +# as well as live templates: Just import the schema.json into your IDE and assign +# it as "schema" to each Threagile YAML file. Also try to import individual parts +# from the live-templates.txt file into your IDE as live editing templates. +# +# You might also want to try the REST API when running in server mode... + + + +title: Some Example Application + +date: 2020-07-01 + +author: + name: John Doe + homepage: www.example.com + + + + +management_summary_comment: > + Just some more custom summary possible here... + +business_criticality: important # values: archive, operational, important, critical, mission-critical + + + + +business_overview: + description: Some more demo text here and even images... + images: +# - custom-image-1.png: Some dummy image 1 +# - custom-image-2.png: Some dummy image 2 + + +technical_overview: + description: Some more demo text here and even images... + images: +# - custom-image-1.png: Some dummy image 1 +# - custom-image-2.png: Some dummy image 2 + + + +questions: # simply use "" as answer to signal "unanswered" + How are the admin clients managed/protected against compromise?: "" + How are the development clients managed/protected against compromise?: > + Managed by XYZ + How are the build pipeline components managed/protected against compromise?: > + Managed by XYZ + + + +abuse_cases: + Denial-of-Service: > + As a hacker I want to disturb the functionality of the backend system in order to cause indirect + financial damage via unusable features. + CPU-Cycle Theft: > + As a hacker I want to steal CPU cycles in order to transform them into money via installed crypto currency miners. + Ransomware: > + As a hacker I want to encrypt the storage and file systems in order to demand ransom. + Identity Theft: > + As a hacker I want to steal identity data in order to reuse credentials and/or keys on other targets of the same company or outside. + PII Theft: > + As a hacker I want to steal PII (Personally Identifiable Information) data in order to blackmail the company and/or damage + their repudiation by publishing them. + + ERP-System Compromise: > + As a hacker I want to access the ERP-System in order to steal/modify sensitive business data. + Database Compromise: > + As a hacker I want to access the database backend of the ERP-System in order to steal/modify sensitive + business data. + Contract Filesystem Compromise: > + As a hacker I want to access the filesystem storing the contract PDFs in order to steal/modify contract data. + Cross-Site Scripting Attacks: > + As a hacker I want to execute Cross-Site Scripting (XSS) and similar attacks in order to takeover victim sessions and + cause reputational damage. + Denial-of-Service of Enduser Functionality: > + As a hacker I want to disturb the functionality of the enduser parts of the application in order to cause direct financial + damage (lower sales). + Denial-of-Service of ERP/DB Functionality: > + As a hacker I want to disturb the functionality of the ERP system and/or it's database in order to cause indirect + financial damage via unusable internal ERP features (not related to customer portal). + + +security_requirements: + Input Validation: Strict input validation is required to reduce the overall attack surface. + Securing Administrative Access: Administrative access must be secured with strong encryption and multi-factor authentication. + EU-DSGVO: Mandatory EU-Datenschutzgrundverordnung + + +# Tags can be used for anything, it's just a tag. Also risk rules can act based on tags if you like. +# Tags can be used for example to name the products used (which is more concrete than the technology types that only specify the type) +tags_available: + - linux + - apache + - mysql + - jboss + - keycloak + - jenkins + - git + - oracle + - some-erp + - vmware + - aws + - aws:ec2 + - aws:s3 + + + + +data_assets: + + + Customer Contracts: &customer-contracts # this example shows the inheritance-like features of YAML + id: customer-contracts + description: Customer Contracts (PDF) + usage: business # values: business, devops + tags: + origin: Customer + owner: Company XYZ + quantity: many # values: very-few, few, many, very-many + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Contract data might contain financial data as well as personally identifiable information (PII). The integrity and + availability of contract data is required for clearing payment disputes. + + + Customer Contract Summaries: + <<: *customer-contracts # here we're referencing the above created asset as base and just overwrite few values + id: contract-summaries + description: Customer Contract Summaries + quantity: very-few # values: very-few, few, many, very-many + confidentiality: restricted # values: public, internal, restricted, confidential, strictly-confidential + integrity: operational # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Just some summaries. + + + Customer Operational Data: + <<: *customer-contracts # here we're referencing the above created asset as base and just overwrite few values + id: customer-operational-data + description: Customer Operational Data + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Customer operational data for using the portal are required to be available to offer the portal functionality + and are used in the backend transactions. + + + Customer Accounts: + <<: *customer-contracts # here we're referencing the above created asset as base and just overwrite few values + id: customer-accounts + description: Customer Accounts (including transient credentials when entered for checking them) + confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Customer account data for using the portal are required to be available to offer the portal functionality. + + + Some Internal Business Data: + id: internal-business-data + description: Internal business data of the ERP system used unrelated to the customer-facing processes. + usage: business # values: business, devops + tags: + origin: Company XYZ + owner: Company XYZ + quantity: few # values: very-few, few, many, very-many + confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Data used and/or generated during unrelated other usecases of the ERP-system (when used also by Company XYZ for + internal non-customer-portal-related stuff). + + + Client Application Code: &client-application-code # this example shows the inheritance-like features of YAML + id: client-application-code + description: Angular and other client-side code delivered by the application. + usage: devops # values: business, devops + tags: + origin: Company ABC + owner: Company ABC + quantity: very-few # values: very-few, few, many, very-many + confidentiality: public # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The integrity of the public data is critical to avoid reputational damage and the availability is important on the + long-term scale (but not critical) to keep the growth rate of the customer base steady. + + + Server Application Code: + <<: *client-application-code # here we're referencing the above created asset as base and just overwrite few values + id: server-application-code + description: API and other server-side code of the application. + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: mission-critical # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The integrity of the API code is critical to avoid reputational damage and the availability is important on the + long-term scale (but not critical) to keep the growth rate of the customer base steady. + + + Build Job Config: + id: build-job-config + description: Data for customizing of the build job system. + usage: devops # values: business, devops + tags: + origin: Company XYZ + owner: Company XYZ + quantity: very-few # values: very-few, few, many, very-many + confidentiality: restricted # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Data for customizing of the build job system. + + + Marketing Material: + <<: *client-application-code # here we're referencing the above created asset as base and just overwrite few values + id: marketing-material + description: Website and marketing data to inform potential customers and generate new leads. + integrity: important # values: archive, operational, important, critical, mission-critical + + + ERP Logs: + id: erp-logs + description: Logs generated by the ERP system. + usage: devops # values: business, devops + tags: + origin: Company XYZ + owner: Company XYZ + quantity: many # values: very-few, few, many, very-many + confidentiality: restricted # values: public, internal, restricted, confidential, strictly-confidential + integrity: archive # values: archive, operational, important, critical, mission-critical + availability: archive # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Logs should not contain PII data and are only required for failure analysis, i.e. they are not considered as hard + transactional logs. + + + ERP Customizing Data: + id: erp-customizing + description: Data for customizing of the ERP system. + usage: devops # values: business, devops + tags: + origin: Company XYZ + owner: Company XYZ + quantity: very-few # values: very-few, few, many, very-many + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Data for customizing of the ERP system. + + + Database Customizing and Dumps: + id: db-dumps + description: Data for customizing of the DB system, which might include full database dumps. + usage: devops # values: business, devops + tags: + - oracle + origin: Company XYZ + owner: Company XYZ + quantity: very-few # values: very-few, few, many, very-many + confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Data for customizing of the DB system, which might include full database dumps. + + + + + + +technical_assets: + + + Customer Web Client: + id: customer-client + description: Customer Web Client + type: external-entity # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: true + out_of_scope: true + justification_out_of_scope: Owned and managed by enduser customer + size: component # values: system, service, application, component + technology: browser # values: see help + tags: + internet: true + machine: physical # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Customer + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: operational # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The client used by the customer to access the system. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - client-application-code + - marketing-material + data_assets_stored: # sequence of IDs to reference + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + Customer Traffic: + target: load-balancer + description: Link to the load balancer + protocol: https # values: see help + authentication: session-id # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: enduser-identity-propagation # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + data_assets_received: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - client-application-code + - marketing-material + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + + + Backoffice Client: + id: backoffice-client + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: Backoffice client + type: external-entity # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: true + out_of_scope: true + justification_out_of_scope: Owned and managed by Company XYZ company + size: component # values: system, service, application, component + technology: desktop # values: see help + tags: + internet: false + machine: physical # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company XYZ + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: important # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The client used by Company XYZ to administer and use the system. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-contracts + - internal-business-data + - erp-logs + data_assets_stored: # sequence of IDs to reference + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + ERP Internal Access: + target: erp-system + description: Link to the ERP system + protocol: https # values: see help + authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: enduser-identity-propagation # values: none, technical-user, enduser-identity-propagation + tags: + - some-erp + vpn: true + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - internal-business-data + data_assets_received: # sequence of IDs to reference + - customer-contracts + - internal-business-data + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + Marketing CMS Editing: + target: marketing-cms + description: Link to the CMS for editing content + protocol: https # values: see help + authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: enduser-identity-propagation # values: none, technical-user, enduser-identity-propagation + tags: + vpn: true + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - marketing-material + data_assets_received: # sequence of IDs to reference + - marketing-material + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + + + Backend Admin Client: + id: backend-admin-client + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: Backend admin client + type: external-entity # values: external-entity, process, datastore + usage: devops # values: business, devops + used_as_client_by_human: true + out_of_scope: true + justification_out_of_scope: Owned and managed by ops provider + size: component # values: system, service, application, component + technology: browser # values: see help + tags: + internet: false + machine: physical # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company XYZ + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: operational # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The client used by Company XYZ to administer the system. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - erp-logs + data_assets_stored: # sequence of IDs to reference + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + ERP Web Access: + target: erp-system + description: Link to the ERP system (Web) + protocol: https # values: see help + authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - erp-customizing + data_assets_received: # sequence of IDs to reference + - erp-logs + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + DB Update Access: + target: sql-database + description: Link to the database (JDBC tunneled via SSH) + protocol: ssh # values: see help + authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - db-dumps + data_assets_received: # sequence of IDs to reference + - db-dumps + - erp-logs + - customer-accounts + - customer-operational-data + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + User Management Access: + target: ldap-auth-server + description: Link to the LDAP auth server for managing users + protocol: ldaps # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + data_assets_received: # sequence of IDs to reference + - customer-accounts + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + + + Load Balancer: + id: load-balancer + #diagram_tweak_order: 50 # affects left to right positioning (only within a trust boundary) + description: Load Balancer (HA-Proxy) + type: process # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: component # values: system, service, application, component + technology: load-balancer # values: see help + tags: + internet: false + machine: physical # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: mission-critical # values: archive, operational, important, critical, mission-critical + availability: mission-critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The correct configuration and reachability of the load balancer is mandatory for all customer and Company XYZ + usages of the portal and ERP system. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - internal-business-data + - client-application-code + - marketing-material + data_assets_stored: # sequence of IDs to reference + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + Web Application Traffic: + target: apache-webserver + description: Link to the web server + protocol: http # values: see help + authentication: session-id # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: enduser-identity-propagation # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + data_assets_received: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - client-application-code + #diagram_tweak_weight: 5 + #diagram_tweak_constraint: false + CMS Content Traffic: + target: marketing-cms + description: Link to the CMS server + protocol: http # values: see help + authentication: none # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: none # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: true + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + data_assets_received: # sequence of IDs to reference + - marketing-material + #diagram_tweak_weight: 5 + #diagram_tweak_constraint: false + + + Apache Webserver: + id: apache-webserver + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: Apache Webserver hosting the API code and client-side code + type: process # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: application # values: system, service, application, component + technology: web-server # values: see help + tags: + - linux + - apache + - aws:ec2 + internet: false + machine: container # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The correct configuration and reachability of the web server is mandatory for all customer usages of the portal. + multi_tenant: false + redundant: false + custom_developed_parts: true + data_assets_processed: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - internal-business-data + - client-application-code + - server-application-code + data_assets_stored: # sequence of IDs to reference + - client-application-code + - server-application-code + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - json + - file + communication_links: + ERP System Traffic: + target: erp-system + description: Link to the ERP system + protocol: https # values: see help + authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - internal-business-data + data_assets_received: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - internal-business-data + #diagram_tweak_weight: 5 + #diagram_tweak_constraint: false + Auth Credential Check Traffic: + target: identity-provider + description: Link to the identity provider server + protocol: https # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + data_assets_received: # sequence of IDs to reference + + + Identity Provider: + id: identity-provider + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: Identity provider server + type: process # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: component # values: system, service, application, component + technology: identity-provider # values: see help + tags: + - linux + - jboss + - keycloak + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The auth data of the application + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-accounts + data_assets_stored: # sequence of IDs to reference + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + LDAP Credential Check Traffic: + target: ldap-auth-server + description: Link to the LDAP server + protocol: ldaps # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + data_assets_received: # sequence of IDs to reference + + + LDAP Auth Server: + id: ldap-auth-server + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: LDAP authentication server + type: datastore # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: component # values: system, service, application, component + technology: identity-store-ldap # values: see help + tags: + - linux + internet: false + machine: physical # values: physical, virtual, container, serverless + encryption: transparent # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The auth data of the application + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-accounts + data_assets_stored: # sequence of IDs to reference + - customer-accounts + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + + + Marketing CMS: + id: marketing-cms + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: CMS for the marketing content + type: process # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: application # values: system, service, application, component + technology: cms # values: see help + tags: + - linux + internet: false + machine: container # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: important # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The correct configuration and reachability of the web server is mandatory for all customer usages of the portal. + multi_tenant: false + redundant: false + custom_developed_parts: true + data_assets_processed: # sequence of IDs to reference + - marketing-material + - customer-accounts + data_assets_stored: # sequence of IDs to reference + - marketing-material + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + Auth Traffic: + target: ldap-auth-server + description: Link to the LDAP auth server + protocol: ldap # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: true + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + data_assets_received: # sequence of IDs to reference + - customer-accounts + #diagram_tweak_weight: 5 + #diagram_tweak_constraint: false + + + Backoffice ERP System: + id: erp-system + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: ERP system + type: process # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: system # values: system, service, application, component + technology: erp # values: see help + tags: + - linux + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: mission-critical # values: archive, operational, important, critical, mission-critical + availability: mission-critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The ERP system contains business-relevant sensitive data for the leasing processes and eventually also for other + Company XYZ internal processes. + multi_tenant: false + redundant: true + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - internal-business-data + - erp-customizing + data_assets_stored: # sequence of IDs to reference + - erp-logs + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - xml + - file + - serialization + communication_links: + Database Traffic: + target: sql-database + description: Link to the DB system + protocol: jdbc # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - internal-business-data + data_assets_received: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - internal-business-data + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + NFS Filesystem Access: + target: contract-fileserver + description: Link to the file system + protocol: nfs # values: see help + authentication: none # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: none # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-contracts + data_assets_received: # sequence of IDs to reference + - customer-contracts + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + + + Contract Fileserver: + id: contract-fileserver + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: NFS Filesystem for storing the contract PDFs + type: datastore # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: component # values: system, service, application, component + technology: file-server # values: see help + tags: + - linux + - aws:s3 + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Contract data might contain financial data as well as personally identifiable information (PII). The integrity and + availability of contract data is required for clearing payment disputes. The filesystem is also required to be available + for storing new contracts of freshly generated customers. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + data_assets_stored: # sequence of IDs to reference + - customer-contracts + - contract-summaries + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - file + communication_links: + + + Customer Contract Database: + id: sql-database + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: The database behind the ERP system + type: datastore # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: component # values: system, service, application, component + technology: database # values: see help + tags: + - linux + - mysql + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: data-with-symmetric-shared-key # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: mission-critical # values: archive, operational, important, critical, mission-critical + availability: mission-critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The ERP system's database contains business-relevant sensitive data for the leasing processes and eventually also + for other Company XYZ internal processes. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - db-dumps + data_assets_stored: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - internal-business-data + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + + + External Development Client: + id: external-dev-client + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: External developer client + type: external-entity # values: external-entity, process, datastore + usage: devops # values: business, devops + used_as_client_by_human: true + out_of_scope: true + justification_out_of_scope: Owned and managed by external developers + size: system # values: system, service, application, component + technology: devops-client # values: see help + tags: + - linux + internet: true + machine: physical # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: External Developers + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The clients used by external developers to create parts of the application code. + multi_tenant: true + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - client-application-code + - server-application-code + data_assets_stored: # sequence of IDs to reference + - client-application-code + - server-application-code + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - file + communication_links: + Git-Repo Code Write Access: + target: git-repo + description: Link to the Git repo + protocol: ssh # values: see help + authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - client-application-code + - server-application-code + data_assets_received: # sequence of IDs to reference + - client-application-code + - server-application-code + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + Git-Repo Web-UI Access: + target: git-repo + description: Link to the Git repo + protocol: https # values: see help + authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - client-application-code + - server-application-code + data_assets_received: # sequence of IDs to reference + - client-application-code + - server-application-code + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + Jenkins Web-UI Access: + target: jenkins-buildserver + description: Link to the Jenkins build server + protocol: https # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - build-job-config + data_assets_received: # sequence of IDs to reference + - build-job-config + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + + + Git Repository: + id: git-repo + #diagram_tweak_order: 99 # affects left to right positioning (only within a trust boundary) + description: Git repository server + type: process # values: external-entity, process, datastore + usage: devops # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: system # values: system, service, application, component + technology: sourcecode-repository # values: see help + tags: + - linux + - git + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: important # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The code repo pipeline might contain sensitive configuration values like backend credentials, certificates etc. and is + therefore rated as confidential. + multi_tenant: true + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - client-application-code + - server-application-code + data_assets_stored: # sequence of IDs to reference + - client-application-code + - server-application-code + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - file + communication_links: + + + Jenkins Buildserver: + id: jenkins-buildserver + #diagram_tweak_order: 99 # affects left to right positioning (only within a trust boundary) + description: Jenkins buildserver + type: process # values: external-entity, process, datastore + usage: devops # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: system # values: system, service, application, component + technology: build-pipeline # values: see help + tags: + - linux + - jenkins + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The build pipeline might contain sensitive configuration values like backend credentials, certificates etc. and is + therefore rated as confidential. The integrity and availability is rated as critical and important due to the risk + of reputation damage and application update unavailability when the build pipeline is compromised. + multi_tenant: true + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - build-job-config + - client-application-code + - server-application-code + - marketing-material + data_assets_stored: # sequence of IDs to reference + - build-job-config + - client-application-code + - server-application-code + - marketing-material + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - file + - serialization + communication_links: + Git Repo Code Read Access: + target: git-repo + description: Link to the Git repository server + protocol: ssh # values: see help + authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: true + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + data_assets_received: # sequence of IDs to reference + - client-application-code + - server-application-code + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + Application Deployment: + target: apache-webserver + description: Link to the Apache webserver + protocol: ssh # values: see help + authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - client-application-code + - server-application-code + data_assets_received: # sequence of IDs to reference + CMS Updates: + target: marketing-cms + description: Link to the CMS + protocol: ssh # values: see help + authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - marketing-material + data_assets_received: # sequence of IDs to reference + + + + + +trust_boundaries: + + + Web DMZ: + id: web-dmz + description: Web DMZ + type: network-cloud-security-group # values: see help + tags: + technical_assets_inside: # sequence of IDs to reference + - apache-webserver + - marketing-cms + trust_boundaries_nested: # sequence of IDs to reference + + + ERP DMZ: + id: erp-dmz + description: ERP DMZ + type: network-cloud-security-group # values: see help + tags: + - some-erp + technical_assets_inside: # sequence of IDs to reference + - erp-system + - contract-fileserver + - sql-database + trust_boundaries_nested: # sequence of IDs to reference + + + Application Network: + id: application-network + description: Application Network + type: network-cloud-provider # values: see help + tags: + - aws + technical_assets_inside: # sequence of IDs to reference + - load-balancer + trust_boundaries_nested: # sequence of IDs to reference + - web-dmz + - erp-dmz + - auth-env + + + Auth Handling Environment: + id: auth-env + description: Auth Handling Environment + type: execution-environment # values: see help + tags: + technical_assets_inside: # sequence of IDs to reference + - identity-provider + - ldap-auth-server + trust_boundaries_nested: # sequence of IDs to reference + + + Dev Network: + id: dev-network + description: Development Network + type: network-on-prem # values: see help + tags: + technical_assets_inside: # sequence of IDs to reference + - jenkins-buildserver + - git-repo + - backend-admin-client + - backoffice-client + trust_boundaries_nested: # sequence of IDs to reference + + + + + +shared_runtimes: + + + WebApp and Backoffice Virtualization: + id: webapp-virtualization + description: WebApp Virtualization + tags: + - vmware + technical_assets_running: # sequence of IDs to reference + - apache-webserver + - marketing-cms + - erp-system + - contract-fileserver + - sql-database + + + + +individual_risk_categories: # used for adding custom manually identified risks + + + Some Individual Risk Example: + id: something-strange + description: Some text describing the risk category... + impact: Some text describing the impact... + asvs: V0 - Something Strange + cheat_sheet: https://example.com + action: Some text describing the action... + mitigation: Some text describing the mitigation... + check: Check if XYZ... + function: business-side # values: business-side, architecture, development, operations + stride: repudiation # values: spoofing, tampering, repudiation, information-disclosure, denial-of-service, elevation-of-privilege + detection_logic: Some text describing the detection logic... + risk_assessment: Some text describing the risk assessment... + false_positives: Some text describing the most common types of false positives... + model_failure_possible_reason: false + cwe: 693 + risks_identified: + Example Individual Risk at Database: + severity: critical # values: low, medium, elevated, high, critical + exploitation_likelihood: likely # values: unlikely, likely, very-likely, frequent + exploitation_impact: medium # values: low, medium, high, very-high + data_breach_probability: probable # values: improbable, possible, probable + data_breach_technical_assets: # list of technical asset IDs which might have data breach + - sql-database + most_relevant_data_asset: + most_relevant_technical_asset: sql-database + most_relevant_communication_link: + most_relevant_trust_boundary: + most_relevant_shared_runtime: + Example Individual Risk at Contract Filesystem: + severity: medium # values: low, medium, elevated, high, critical + exploitation_likelihood: frequent # values: unlikely, likely, very-likely, frequent + exploitation_impact: very-high # values: low, medium, high, very-high + data_breach_probability: improbable # values: improbable, possible, probable + data_breach_technical_assets: # list of technical asset IDs which might have data breach + most_relevant_data_asset: + most_relevant_technical_asset: contract-fileserver + most_relevant_communication_link: + most_relevant_trust_boundary: + most_relevant_shared_runtime: + + + +# NOTE: +# For risk tracking each risk-id needs to be defined (the string with the @ sign in it). These unique risk IDs +# are visible in the PDF report (the small grey string under each risk), the Excel (column "ID"), as well as the JSON responses. +# Some risk IDs have only one @ sign in them, while others multiple. The idea is to allow for unique but still speaking IDs. +# Therefore each risk instance creates its individual ID by taking all affected elements causing the risk to be within an @-delimited part. +# Using wildcards (the * sign) for parts delimited by @ signs allows to handle groups of certain risks at once. Best is to lookup the IDs +# to use in the created Excel file. Alternatively a model macro "seed-risk-tracking" is available that helps in initially +# seeding the risk tracking part here based on already identified and not yet handled risks. +risk_tracking: + + untrusted-deserialization@erp-system: # wildcards "*" between the @ characters are possible + status: accepted # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: Risk accepted as tolerable + ticket: XYZ-1234 + date: 2020-01-04 + checked_by: John Doe + + ldap-injection@*@ldap-auth-server@*: # wildcards "*" between the @ characters are possible + status: mitigated # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: The hardening measures were implemented and checked + ticket: XYZ-5678 + date: 2020-01-05 + checked_by: John Doe + + unencrypted-asset@*: # wildcards "*" between the @ characters are possible + status: mitigated # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: The hardening measures were implemented and checked + ticket: XYZ-1234 + date: 2020-01-04 + checked_by: John Doe + + missing-authentication-second-factor@*@*@*: # wildcards "*" between the @ characters are possible + status: mitigated # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: The hardening measures were implemented and checked + ticket: XYZ-1234 + date: 2020-01-04 + checked_by: John Doe + + missing-hardening@*: # wildcards "*" between the @ characters are possible + status: mitigated # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: The hardening measures were implemented and checked + ticket: XYZ-1234 + date: 2020-01-04 + checked_by: John Doe + + dos-risky-access-across-trust-boundary@*@*@*: # wildcards "*" between the @ characters are possible + status: in-progress # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: The hardening measures are being implemented and checked + ticket: XYZ-1234 + date: 2020-01-04 + checked_by: John Doe + + + +#diagram_tweak_edge_layout: spline # values: spline, polyline, false, ortho (this suppresses edge labels), curved (this suppresses edge labels and can cause problems with edges) + +#diagram_tweak_suppress_edge_labels: true +#diagram_tweak_layout_left_to_right: true +#diagram_tweak_nodesep: 2 +#diagram_tweak_ranksep: 2 +#diagram_tweak_invisible_connections_between_assets: +# - tech-asset-source-id-A:tech-asset-target-id-B +# - tech-asset-source-id-C:tech-asset-target-id-D +#diagram_tweak_same_rank_assets: +# - tech-asset-source-id-E:tech-asset-target-id-F:tech-asset-source-id-G:tech-asset-target-id-H +# - tech-asset-source-id-M:tech-asset-target-id-N:tech-asset-source-id-O diff --git a/internal/threagile/context.go b/internal/threagile/context.go index f1e9a997..50569f14 100644 --- a/internal/threagile/context.go +++ b/internal/threagile/context.go @@ -1,55 +1,22 @@ package threagile import ( - "crypto/sha256" - "encoding/hex" "flag" - "fmt" // TODO: no fmt.Println here - "io" + "fmt" "log" "os" - "path/filepath" "runtime" "strconv" "strings" "github.com/threagile/threagile/pkg/common" "github.com/threagile/threagile/pkg/docs" - "github.com/threagile/threagile/pkg/input" "github.com/threagile/threagile/pkg/macros" "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/report" - "github.com/threagile/threagile/pkg/run" - "github.com/threagile/threagile/pkg/security/risks" - "github.com/threagile/threagile/pkg/security/types" ) -type GenerateCommands struct { - DataFlowDiagram bool - DataAssetDiagram bool - RisksJSON bool - TechnicalAssetsJSON bool - StatsJSON bool - RisksExcel bool - TagsExcel bool - ReportPDF bool -} - -func (c *GenerateCommands) Defaults() *GenerateCommands { - *c = GenerateCommands{ - DataFlowDiagram: true, - DataAssetDiagram: true, - RisksJSON: true, - TechnicalAssetsJSON: true, - StatsJSON: true, - RisksExcel: true, - TagsExcel: true, - ReportPDF: true, - } - return c -} - -func DoIt(config *common.Config, commands *GenerateCommands) { +func DoIt(config *common.Config, commands *report.GenerateCommands) { progressReporter := common.DefaultProgressReporter{Verbose: config.Verbose} defer func() { var err error @@ -61,177 +28,24 @@ func DoIt(config *common.Config, commands *GenerateCommands) { } }() - if len(config.ExecuteModelMacro) > 0 { - fmt.Println(docs.Logo + "\n\n" + docs.VersionText) - } - progressReporter.Info("Writing into output directory:", config.OutputFolder) - progressReporter.Info("Parsing model:", config.InputFile) - - builtinRiskRules := make(map[string]types.RiskRule) - for _, rule := range risks.GetBuiltInRiskRules() { - builtinRiskRules[rule.Category().Id] = rule - } - customRiskRules := types.LoadCustomRiskRules(config.RiskRulesPlugins, progressReporter) - - modelInput := *new(input.ModelInput).Defaults() - loadError := modelInput.Load(config.InputFile) - if loadError != nil { - log.Fatal("Unable to load model yaml: ", loadError) - } - - parsedModel, parseError := model.ParseModel(&modelInput, builtinRiskRules, customRiskRules) - if parseError != nil { - log.Fatal("Unable to parse model yaml: ", parseError) - } - introTextRAA := applyRAA(parsedModel, config.BinFolder, config.RAAPlugin, progressReporter) - - parsedModel.ApplyRiskGeneration(customRiskRules, builtinRiskRules, - config.SkipRiskRules, progressReporter) - err := parsedModel.ApplyWildcardRiskTrackingEvaluation(config.IgnoreOrphanedRiskTracking, progressReporter) - if err != nil { - // TODO: do not panic and gracefully handle the error - panic(err) - } - - err = parsedModel.CheckRiskTracking(config.IgnoreOrphanedRiskTracking, progressReporter) + r, err := model.ReadAndAnalyzeModel(*config, progressReporter) if err != nil { - // TODO: do not panic and gracefully handle the error - panic(err) + log.Fatal(err) + return } if len(config.ExecuteModelMacro) > 0 { - err := macros.ExecuteModelMacro(&modelInput, config.InputFile, parsedModel, config.ExecuteModelMacro) + err := macros.ExecuteModelMacro(r.ModelInput, config.InputFile, r.ParsedModel, config.ExecuteModelMacro) if err != nil { log.Fatal("Unable to execute model macro: ", err) } return } - generateDataFlowDiagram := commands.DataFlowDiagram - generateDataAssetsDiagram := commands.DataAssetDiagram - if commands.ReportPDF { // as the PDF report includes both diagrams - generateDataFlowDiagram = true - generateDataAssetsDiagram = true - } - - diagramDPI := config.DiagramDPI - if diagramDPI < common.MinGraphvizDPI { - diagramDPI = common.MinGraphvizDPI - } else if diagramDPI > common.MaxGraphvizDPI { - diagramDPI = common.MaxGraphvizDPI - } - // Data-flow Diagram rendering - if generateDataFlowDiagram { - gvFile := filepath.Join(config.OutputFolder, config.DataFlowDiagramFilenameDOT) - if !config.KeepDiagramSourceFiles { - tmpFileGV, err := os.CreateTemp(config.TempFolder, config.DataFlowDiagramFilenameDOT) - checkErr(err) - gvFile = tmpFileGV.Name() - defer func() { _ = os.Remove(gvFile) }() - } - dotFile := report.WriteDataFlowDiagramGraphvizDOT(parsedModel, gvFile, diagramDPI, config.AddModelTitle, progressReporter) - - err := report.GenerateDataFlowDiagramGraphvizImage(dotFile, config.OutputFolder, - config.TempFolder, config.BinFolder, config.DataFlowDiagramFilenamePNG, progressReporter) - if err != nil { - fmt.Println(err) - } - } - // Data Asset Diagram rendering - if generateDataAssetsDiagram { - gvFile := filepath.Join(config.OutputFolder, config.DataAssetDiagramFilenameDOT) - if !config.KeepDiagramSourceFiles { - tmpFile, err := os.CreateTemp(config.TempFolder, config.DataAssetDiagramFilenameDOT) - checkErr(err) - gvFile = tmpFile.Name() - defer func() { _ = os.Remove(gvFile) }() - } - dotFile := report.WriteDataAssetDiagramGraphvizDOT(parsedModel, gvFile, diagramDPI, progressReporter) - err := report.GenerateDataAssetDiagramGraphvizImage(dotFile, config.OutputFolder, - config.TempFolder, config.BinFolder, config.DataAssetDiagramFilenamePNG, progressReporter) - if err != nil { - fmt.Println(err) - } - } - - // risks as risks json - if commands.RisksJSON { - progressReporter.Info("Writing risks json") - report.WriteRisksJSON(parsedModel, filepath.Join(config.OutputFolder, config.JsonRisksFilename)) - } - - // technical assets json - if commands.TechnicalAssetsJSON { - progressReporter.Info("Writing technical assets json") - report.WriteTechnicalAssetsJSON(parsedModel, filepath.Join(config.OutputFolder, config.JsonTechnicalAssetsFilename)) - } - - // risks as risks json - if commands.StatsJSON { - progressReporter.Info("Writing stats json") - report.WriteStatsJSON(parsedModel, filepath.Join(config.OutputFolder, config.JsonStatsFilename)) - } - - // risks Excel - if commands.RisksExcel { - progressReporter.Info("Writing risks excel") - report.WriteRisksExcelToFile(parsedModel, filepath.Join(config.OutputFolder, config.ExcelRisksFilename)) - } - - // tags Excel - if commands.TagsExcel { - progressReporter.Info("Writing tags excel") - report.WriteTagsExcelToFile(parsedModel, filepath.Join(config.OutputFolder, config.ExcelTagsFilename)) - } - - if commands.ReportPDF { - // hash the YAML input file - f, err := os.Open(config.InputFile) - checkErr(err) - defer func() { _ = f.Close() }() - hasher := sha256.New() - if _, err := io.Copy(hasher, f); err != nil { - panic(err) - } - modelHash := hex.EncodeToString(hasher.Sum(nil)) - // report PDF - progressReporter.Info("Writing report pdf") - report.WriteReportPDF(filepath.Join(config.OutputFolder, config.ReportFilename), - filepath.Join(config.AppFolder, config.TemplateFilename), - filepath.Join(config.OutputFolder, config.DataFlowDiagramFilenamePNG), - filepath.Join(config.OutputFolder, config.DataAssetDiagramFilenamePNG), - config.InputFile, - config.SkipRiskRules, - config.BuildTimestamp, - modelHash, - introTextRAA, - customRiskRules, - config.TempFolder, - parsedModel) - } -} - -func applyRAA(parsedModel *types.ParsedModel, binFolder, raaPlugin string, progressReporter common.DefaultProgressReporter) string { - progressReporter.Info("Applying RAA calculation:", raaPlugin) - - runner, loadError := new(run.Runner).Load(filepath.Join(binFolder, raaPlugin)) - if loadError != nil { - progressReporter.Warn(fmt.Sprintf("WARNING: raa %q not loaded: %v\n", raaPlugin, loadError)) - return "" - } - - runError := runner.Run(parsedModel, parsedModel) - if runError != nil { - progressReporter.Warn(fmt.Sprintf("WARNING: raa %q not applied: %v\n", raaPlugin, runError)) - return "" - } - - return runner.ErrorOutput -} - -func checkErr(err error) { + err = report.Generate(config, r, commands, progressReporter) if err != nil { - panic(err) + log.Fatal(err) + return } } @@ -264,7 +78,7 @@ func expandPath(path string) string { return path } -func ParseCommandlineArgs(buildTimestamp string) (common.Config, GenerateCommands) { +func ParseCommandlineArgs(buildTimestamp string) (common.Config, report.GenerateCommands) { configFile := flag.String("config", "", "config file") config := new(common.Config).Defaults(buildTimestamp) configError := config.Load(*configFile) @@ -295,7 +109,7 @@ func ParseCommandlineArgs(buildTimestamp string) (common.Config, GenerateCommand config.RiskRulesPlugins = strings.Split(*riskRulesPlugins, ",") // commands - commands := new(GenerateCommands).Defaults() + commands := new(report.GenerateCommands).Defaults() flag.BoolVar(&commands.DataFlowDiagram, "generate-data-flow-diagram", true, "generate data-flow diagram") flag.BoolVar(&commands.DataAssetDiagram, "generate-data-asset-diagram", true, "generate data asset diagram") flag.BoolVar(&commands.RisksJSON, "generate-risks-json", true, "generate risks json") diff --git a/internal/threagile/root.go b/internal/threagile/root.go index 86f832d6..2c4e788a 100644 --- a/internal/threagile/root.go +++ b/internal/threagile/root.go @@ -12,6 +12,7 @@ import ( "github.com/threagile/threagile/pkg/common" "github.com/threagile/threagile/pkg/docs" + "github.com/threagile/threagile/pkg/report" "github.com/threagile/threagile/pkg/server" ) @@ -97,8 +98,8 @@ func readConfig(buildTimestamp string) *common.Config { return cfg } -func readCommands() *GenerateCommands { - commands := new(GenerateCommands).Defaults() +func readCommands() *report.GenerateCommands { + commands := new(report.GenerateCommands).Defaults() commands.DataFlowDiagram = *generateDataFlowDiagramFlag commands.DataAssetDiagram = *generateDataAssetDiagramFlag commands.RisksJSON = *generateRisksJSONFlag diff --git a/pkg/model/read.go b/pkg/model/read.go new file mode 100644 index 00000000..aa16d5de --- /dev/null +++ b/pkg/model/read.go @@ -0,0 +1,89 @@ +package model + +import ( + "fmt" + "path/filepath" + + "github.com/threagile/threagile/pkg/common" + "github.com/threagile/threagile/pkg/input" + "github.com/threagile/threagile/pkg/run" + "github.com/threagile/threagile/pkg/security/risks" + "github.com/threagile/threagile/pkg/security/types" +) + +type progressReporter interface { + Info(a ...any) + Warn(a ...any) + Error(a ...any) +} + +type ReadResult struct { + ModelInput *input.ModelInput + ParsedModel *types.ParsedModel + IntroTextRAA string + BuiltinRiskRules map[string]types.RiskRule + CustomRiskRules map[string]*types.CustomRisk +} + +// TODO: consider about splitting this function into smaller ones for better reusability +func ReadAndAnalyzeModel(config common.Config, progressReporter progressReporter) (*ReadResult, error) { + progressReporter.Info("Writing into output directory:", config.OutputFolder) + progressReporter.Info("Parsing model:", config.InputFile) + + builtinRiskRules := make(map[string]types.RiskRule) + for _, rule := range risks.GetBuiltInRiskRules() { + builtinRiskRules[rule.Category().Id] = rule + } + customRiskRules := types.LoadCustomRiskRules(config.RiskRulesPlugins, progressReporter) + + modelInput := new(input.ModelInput).Defaults() + loadError := modelInput.Load(config.InputFile) + if loadError != nil { + return nil, fmt.Errorf("unable to load model yaml: %v", loadError) + } + + parsedModel, parseError := ParseModel(modelInput, builtinRiskRules, customRiskRules) + if parseError != nil { + return nil, fmt.Errorf("unable to parse model yaml: %v", parseError) + } + + introTextRAA := applyRAA(parsedModel, config.BinFolder, config.RAAPlugin, progressReporter) + + parsedModel.ApplyRiskGeneration(customRiskRules, builtinRiskRules, + config.SkipRiskRules, progressReporter) + err := parsedModel.ApplyWildcardRiskTrackingEvaluation(config.IgnoreOrphanedRiskTracking, progressReporter) + if err != nil { + return nil, fmt.Errorf("unable to apply wildcard risk tracking evaluation: %v", err) + } + + err = parsedModel.CheckRiskTracking(config.IgnoreOrphanedRiskTracking, progressReporter) + if err != nil { + return nil, fmt.Errorf("unable to check risk tracking: %v", err) + } + + return &ReadResult{ + ModelInput: modelInput, + ParsedModel: parsedModel, + IntroTextRAA: introTextRAA, + BuiltinRiskRules: builtinRiskRules, + CustomRiskRules: customRiskRules, + }, nil +} + +func applyRAA(parsedModel *types.ParsedModel, binFolder, raaPlugin string, progressReporter progressReporter) string { + progressReporter.Info("Applying RAA calculation:", raaPlugin) + + runner, loadError := new(run.Runner).Load(filepath.Join(binFolder, raaPlugin)) + if loadError != nil { + progressReporter.Warn(fmt.Sprintf("WARNING: raa %q not loaded: %v\n", raaPlugin, loadError)) + return "" + } + + runError := runner.Run(parsedModel, parsedModel) + if runError != nil { + progressReporter.Warn(fmt.Sprintf("WARNING: raa %q not applied: %v\n", raaPlugin, runError)) + return "" + } + + return runner.ErrorOutput +} diff --git a/pkg/report/generate.go b/pkg/report/generate.go new file mode 100644 index 00000000..7b5c176e --- /dev/null +++ b/pkg/report/generate.go @@ -0,0 +1,156 @@ +package report + +import ( + "crypto/sha256" + "encoding/hex" + "io" + "os" + "path/filepath" + + "github.com/threagile/threagile/pkg/common" + "github.com/threagile/threagile/pkg/model" +) + +type GenerateCommands struct { + DataFlowDiagram bool + DataAssetDiagram bool + RisksJSON bool + TechnicalAssetsJSON bool + StatsJSON bool + RisksExcel bool + TagsExcel bool + ReportPDF bool +} + +func (c *GenerateCommands) Defaults() *GenerateCommands { + *c = GenerateCommands{ + DataFlowDiagram: true, + DataAssetDiagram: true, + RisksJSON: true, + TechnicalAssetsJSON: true, + StatsJSON: true, + RisksExcel: true, + TagsExcel: true, + ReportPDF: true, + } + return c +} + +func Generate(config *common.Config, readResult *model.ReadResult, commands *GenerateCommands, progressReporter progressReporter) error { + generateDataFlowDiagram := commands.DataFlowDiagram + generateDataAssetsDiagram := commands.DataAssetDiagram + if commands.ReportPDF { // as the PDF report includes both diagrams + generateDataFlowDiagram = true + generateDataAssetsDiagram = true + } + + diagramDPI := config.DiagramDPI + if diagramDPI < common.MinGraphvizDPI { + diagramDPI = common.MinGraphvizDPI + } else if diagramDPI > common.MaxGraphvizDPI { + diagramDPI = common.MaxGraphvizDPI + } + // Data-flow Diagram rendering + if generateDataFlowDiagram { + gvFile := filepath.Join(config.OutputFolder, config.DataFlowDiagramFilenameDOT) + if !config.KeepDiagramSourceFiles { + tmpFileGV, err := os.CreateTemp(config.TempFolder, config.DataFlowDiagramFilenameDOT) + if err != nil { + return err + } + gvFile = tmpFileGV.Name() + defer func() { _ = os.Remove(gvFile) }() + } + dotFile := WriteDataFlowDiagramGraphvizDOT(readResult.ParsedModel, gvFile, diagramDPI, config.AddModelTitle, progressReporter) + + err := GenerateDataFlowDiagramGraphvizImage(dotFile, config.OutputFolder, + config.TempFolder, config.BinFolder, config.DataFlowDiagramFilenamePNG, progressReporter) + if err != nil { + progressReporter.Warn(err) + } + } + // Data Asset Diagram rendering + if generateDataAssetsDiagram { + gvFile := filepath.Join(config.OutputFolder, config.DataAssetDiagramFilenameDOT) + if !config.KeepDiagramSourceFiles { + tmpFile, err := os.CreateTemp(config.TempFolder, config.DataAssetDiagramFilenameDOT) + if err != nil { + return err + } + gvFile = tmpFile.Name() + defer func() { _ = os.Remove(gvFile) }() + } + dotFile := WriteDataAssetDiagramGraphvizDOT(readResult.ParsedModel, gvFile, diagramDPI, progressReporter) + err := GenerateDataAssetDiagramGraphvizImage(dotFile, config.OutputFolder, + config.TempFolder, config.BinFolder, config.DataAssetDiagramFilenamePNG, progressReporter) + if err != nil { + progressReporter.Warn(err) + } + } + + // risks as risks json + if commands.RisksJSON { + progressReporter.Info("Writing risks json") + WriteRisksJSON(readResult.ParsedModel, filepath.Join(config.OutputFolder, config.JsonRisksFilename)) + } + + // technical assets json + if commands.TechnicalAssetsJSON { + progressReporter.Info("Writing technical assets json") + WriteTechnicalAssetsJSON(readResult.ParsedModel, filepath.Join(config.OutputFolder, config.JsonTechnicalAssetsFilename)) + } + + // risks as risks json + if commands.StatsJSON { + progressReporter.Info("Writing stats json") + WriteStatsJSON(readResult.ParsedModel, filepath.Join(config.OutputFolder, config.JsonStatsFilename)) + } + + // risks Excel + if commands.RisksExcel { + progressReporter.Info("Writing risks excel") + WriteRisksExcelToFile(readResult.ParsedModel, filepath.Join(config.OutputFolder, config.ExcelRisksFilename)) + } + + // tags Excel + if commands.TagsExcel { + progressReporter.Info("Writing tags excel") + WriteTagsExcelToFile(readResult.ParsedModel, filepath.Join(config.OutputFolder, config.ExcelTagsFilename)) + } + + if commands.ReportPDF { + // hash the YAML input file + f, err := os.Open(config.InputFile) + if err != nil { + return err + } + defer func() { _ = f.Close() }() + hasher := sha256.New() + if _, err := io.Copy(hasher, f); err != nil { + return err + } + modelHash := hex.EncodeToString(hasher.Sum(nil)) + // report PDF + progressReporter.Info("Writing report pdf") + WriteReportPDF(filepath.Join(config.OutputFolder, config.ReportFilename), + filepath.Join(config.AppFolder, config.TemplateFilename), + filepath.Join(config.OutputFolder, config.DataFlowDiagramFilenamePNG), + filepath.Join(config.OutputFolder, config.DataAssetDiagramFilenamePNG), + config.InputFile, + config.SkipRiskRules, + config.BuildTimestamp, + modelHash, + readResult.IntroTextRAA, + readResult.CustomRiskRules, + config.TempFolder, + readResult.ParsedModel) + } + + return nil +} + +type progressReporter interface { + Info(a ...any) + Warn(a ...any) + Error(a ...any) +} diff --git a/pkg/report/graphviz.go b/pkg/report/graphviz.go index db65d791..d40e0ca5 100644 --- a/pkg/report/graphviz.go +++ b/pkg/report/graphviz.go @@ -572,7 +572,3 @@ func hash(s string) string { func encode(value string) string { return strings.ReplaceAll(value, "&", "&") } - -type progressReporter interface { - Info(a ...any) -} From a45ab19ebd86f6ba67ec0311fce5e31129216712 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Fri, 5 Jan 2024 17:35:45 +0000 Subject: [PATCH 40/68] Remove panic and global variables --- cmd/threagile/main.go | 10 - internal/threagile/context.go | 137 - internal/threagile/macros.go | 16 +- internal/threagile/root.go | 68 +- pkg/report/excel.go | 122 +- pkg/report/generate.go | 56 +- pkg/report/graphviz.go | 78 +- pkg/report/json.go | 25 +- pkg/report/report.go | 6523 +++++++++++++++++---------------- 9 files changed, 3558 insertions(+), 3477 deletions(-) delete mode 100644 internal/threagile/context.go diff --git a/cmd/threagile/main.go b/cmd/threagile/main.go index 40eea37a..da2d2e53 100644 --- a/cmd/threagile/main.go +++ b/cmd/threagile/main.go @@ -9,15 +9,5 @@ const ( ) func main() { - // TODO: uncomment below as soon as refactoring is finished - everything will go through rootCmd.Execute threagile.Execute() - - // TODO: remove below as soon as refactoring is finished - everything will go through rootCmd.Execute - // for now it's fine to have as frequently uncommented to see the actual behaviour - // config, commands := threagile.ParseCommandlineArgs(buildTimestamp) - // if config.ServerPort > 0 { - // server.RunServer(&config) - // } else { - // threagile.DoIt(&config, &commands) - // } } diff --git a/internal/threagile/context.go b/internal/threagile/context.go deleted file mode 100644 index 50569f14..00000000 --- a/internal/threagile/context.go +++ /dev/null @@ -1,137 +0,0 @@ -package threagile - -import ( - "flag" - "fmt" - "log" - "os" - "runtime" - "strconv" - "strings" - - "github.com/threagile/threagile/pkg/common" - "github.com/threagile/threagile/pkg/docs" - "github.com/threagile/threagile/pkg/macros" - "github.com/threagile/threagile/pkg/model" - "github.com/threagile/threagile/pkg/report" -) - -func DoIt(config *common.Config, commands *report.GenerateCommands) { - progressReporter := common.DefaultProgressReporter{Verbose: config.Verbose} - defer func() { - var err error - if r := recover(); r != nil { - err = r.(error) - progressReporter.Info("ERROR: " + err.Error()) - _, _ = os.Stderr.WriteString(err.Error() + "\n") - os.Exit(2) - } - }() - - r, err := model.ReadAndAnalyzeModel(*config, progressReporter) - if err != nil { - log.Fatal(err) - return - } - - if len(config.ExecuteModelMacro) > 0 { - err := macros.ExecuteModelMacro(r.ModelInput, config.InputFile, r.ParsedModel, config.ExecuteModelMacro) - if err != nil { - log.Fatal("Unable to execute model macro: ", err) - } - return - } - - err = report.Generate(config, r, commands, progressReporter) - if err != nil { - log.Fatal(err) - return - } -} - -// TODO: remove from here as soon as moved to cobra, here is only for a backward compatibility -// this file supposed to be only about the logic -func userHomeDir() string { - switch runtime.GOOS { - case "windows": - home := os.Getenv("HOMEDRIVE") + os.Getenv("HOMEPATH") - if home == "" { - home = os.Getenv("USERPROFILE") - } - return home - - default: - return os.Getenv("HOME") - } -} - -func expandPath(path string) string { - home := userHomeDir() - if strings.HasPrefix(path, "~") { - path = strings.Replace(path, "~", home, 1) - } - - if strings.HasPrefix(path, "$HOME") { - path = strings.Replace(path, "$HOME", home, -1) - } - - return path -} - -func ParseCommandlineArgs(buildTimestamp string) (common.Config, report.GenerateCommands) { - configFile := flag.String("config", "", "config file") - config := new(common.Config).Defaults(buildTimestamp) - configError := config.Load(*configFile) - if configError != nil { - fmt.Printf("WARNING: failed to load config file %q: %v\n", *configFile, configError) - } - - // folders - flag.StringVar(&config.AppFolder, "app-dir", common.AppDir, "app folder (default: "+common.AppDir+")") - flag.StringVar(&config.ServerFolder, "server-dir", common.DataDir, "base folder for server mode (default: "+common.DataDir+")") - flag.StringVar(&config.TempFolder, "temp-dir", common.TempDir, "temporary folder location") - flag.StringVar(&config.BinFolder, "bin-dir", common.BinDir, "binary folder location") - flag.StringVar(&config.OutputFolder, "output", ".", "output directory") - - // files - flag.StringVar(&config.InputFile, "model", common.InputFile, "input model yaml file") - flag.StringVar(&config.RAAPlugin, "raa-run", "raa_calc", "RAA calculation run file name") - - // flags / parameters - flag.BoolVar(&config.Verbose, "verbose", false, "verbose output") - flag.IntVar(&config.DiagramDPI, "diagram-dpi", config.DiagramDPI, "DPI used to render: maximum is "+strconv.Itoa(config.MaxGraphvizDPI)+"") - flag.StringVar(&config.SkipRiskRules, "skip-risk-rules", "", "comma-separated list of risk rules (by their ID) to skip") - flag.BoolVar(&config.IgnoreOrphanedRiskTracking, "ignore-orphaned-risk-tracking", false, "ignore orphaned risk tracking (just log them) not matching a concrete risk") - flag.IntVar(&config.ServerPort, "server", 0, "start a server (instead of commandline execution) on the given port") - flag.StringVar(&config.ExecuteModelMacro, "execute-model-macro", "", "Execute model macro (by ID)") - flag.StringVar(&config.TemplateFilename, "background", "background.pdf", "background pdf file") - riskRulesPlugins := flag.String("custom-risk-rules-plugins", "", "comma-separated list of plugins file names with custom risk rules to load") - config.RiskRulesPlugins = strings.Split(*riskRulesPlugins, ",") - - // commands - commands := new(report.GenerateCommands).Defaults() - flag.BoolVar(&commands.DataFlowDiagram, "generate-data-flow-diagram", true, "generate data-flow diagram") - flag.BoolVar(&commands.DataAssetDiagram, "generate-data-asset-diagram", true, "generate data asset diagram") - flag.BoolVar(&commands.RisksJSON, "generate-risks-json", true, "generate risks json") - flag.BoolVar(&commands.StatsJSON, "generate-stats-json", true, "generate stats json") - flag.BoolVar(&commands.TechnicalAssetsJSON, "generate-technical-assets-json", true, "generate technical assets json") - flag.BoolVar(&commands.RisksExcel, "generate-risks-excel", true, "generate risks excel") - flag.BoolVar(&commands.TagsExcel, "generate-tags-excel", true, "generate tags excel") - flag.BoolVar(&commands.ReportPDF, "generate-report-pdf", true, "generate report pdf, including diagrams") - - flag.Usage = func() { - fmt.Println(docs.Logo + "\n\n" + docs.VersionText) - _, _ = fmt.Fprintf(os.Stderr, "Usage: threagile [options]") - fmt.Println() - } - flag.Parse() - - config.InputFile = expandPath(config.InputFile) - config.AppFolder = expandPath(config.AppFolder) - config.ServerFolder = expandPath(config.ServerFolder) - config.TempFolder = expandPath(config.TempFolder) - config.BinFolder = expandPath(config.BinFolder) - config.OutputFolder = expandPath(config.OutputFolder) - - return *config, *commands -} diff --git a/internal/threagile/macros.go b/internal/threagile/macros.go index 533f4430..6bf591d1 100644 --- a/internal/threagile/macros.go +++ b/internal/threagile/macros.go @@ -8,8 +8,10 @@ import ( "github.com/spf13/cobra" + "github.com/threagile/threagile/pkg/common" "github.com/threagile/threagile/pkg/docs" "github.com/threagile/threagile/pkg/macros" + "github.com/threagile/threagile/pkg/model" ) var listMacrosCmd = &cobra.Command{ @@ -69,7 +71,19 @@ var executeModelMacrosCmd = &cobra.Command{ Use: "execute-model-macro", Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - fmt.Println("execute-model-macro called with args:", args) + cfg := readConfig("buildTimestamp") + progressReporter := common.DefaultProgressReporter{Verbose: cfg.Verbose} + + r, err := model.ReadAndAnalyzeModel(*cfg, progressReporter) + if err != nil { + return fmt.Errorf("unable to read and analyze model: %v", err) + } + + macrosId := args[0] + err = macros.ExecuteModelMacro(r.ModelInput, cfg.InputFile, r.ParsedModel, macrosId) + if err != nil { + return fmt.Errorf("unable to execute model macro: %v", err) + } return nil }, } diff --git a/internal/threagile/root.go b/internal/threagile/root.go index 2c4e788a..98b3ead1 100644 --- a/internal/threagile/root.go +++ b/internal/threagile/root.go @@ -6,12 +6,14 @@ package threagile import ( "fmt" "os" + "runtime" "strings" "github.com/spf13/cobra" "github.com/threagile/threagile/pkg/common" "github.com/threagile/threagile/pkg/docs" + "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/report" "github.com/threagile/threagile/pkg/server" ) @@ -21,7 +23,22 @@ var rootCmd = &cobra.Command{ Short: "\n" + docs.Logo, Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\n" + docs.Examples, RunE: func(cmd *cobra.Command, args []string) error { - DoIt(readConfig("buildTimestamp"), readCommands()) + cfg := readConfig("buildTimestamp") + commands := readCommands() + progressReporter := common.DefaultProgressReporter{Verbose: cfg.Verbose} + + r, err := model.ReadAndAnalyzeModel(*cfg, progressReporter) + if err != nil { + cmd.Println("Failed to read and analyze model") + return err + } + + err = report.Generate(cfg, r, commands, progressReporter) + if err != nil { + cmd.Println("Failed to generate reports") + cmd.PrintErr(err) + return err + } return nil }, } @@ -75,6 +92,19 @@ func init() { rootCmd.AddCommand(serverCmd) } +func readCommands() *report.GenerateCommands { + commands := new(report.GenerateCommands).Defaults() + commands.DataFlowDiagram = *generateDataFlowDiagramFlag + commands.DataAssetDiagram = *generateDataAssetDiagramFlag + commands.RisksJSON = *generateRisksJSONFlag + commands.StatsJSON = *generateStatsJSONFlag + commands.TechnicalAssetsJSON = *generateTechnicalAssetsJSONFlag + commands.RisksExcel = *generateRisksExcelFlag + commands.TagsExcel = *generateTagsExcelFlag + commands.ReportPDF = *generateReportPDFFlag + return commands +} + func readConfig(buildTimestamp string) *common.Config { cfg := new(common.Config).Defaults(buildTimestamp) cfg.ServerPort = *serverPortFlag @@ -98,15 +128,29 @@ func readConfig(buildTimestamp string) *common.Config { return cfg } -func readCommands() *report.GenerateCommands { - commands := new(report.GenerateCommands).Defaults() - commands.DataFlowDiagram = *generateDataFlowDiagramFlag - commands.DataAssetDiagram = *generateDataAssetDiagramFlag - commands.RisksJSON = *generateRisksJSONFlag - commands.StatsJSON = *generateStatsJSONFlag - commands.TechnicalAssetsJSON = *generateTechnicalAssetsJSONFlag - commands.RisksExcel = *generateRisksExcelFlag - commands.TagsExcel = *generateTagsExcelFlag - commands.ReportPDF = *generateReportPDFFlag - return commands +func expandPath(path string) string { + home := userHomeDir() + if strings.HasPrefix(path, "~") { + path = strings.Replace(path, "~", home, 1) + } + + if strings.HasPrefix(path, "$HOME") { + path = strings.Replace(path, "$HOME", home, -1) + } + + return path +} + +func userHomeDir() string { + switch runtime.GOOS { + case "windows": + home := os.Getenv("HOMEDRIVE") + os.Getenv("HOMEPATH") + if home == "" { + home = os.Getenv("USERPROFILE") + } + return home + + default: + return os.Getenv("HOME") + } } diff --git a/pkg/report/excel.go b/pkg/report/excel.go index 9470db8d..315de9fc 100644 --- a/pkg/report/excel.go +++ b/pkg/report/excel.go @@ -1,6 +1,7 @@ package report import ( + "fmt" "sort" "strconv" "strings" @@ -10,10 +11,8 @@ import ( "github.com/xuri/excelize/v2" ) -var excelRow int - -func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) { - excelRow = 0 +func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) error { + excelRow := 0 excel := excelize.NewFile() sheetName := parsedModel.Title err := excel.SetDocProps(&excelize.DocProperties{ @@ -30,14 +29,18 @@ func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) { Language: "en-US", Version: "1.0.0", }) - checkErr(err) + if err != nil { + return fmt.Errorf("unable to set doc properties: %w", err) + } sheetIndex, _ := excel.NewSheet(sheetName) _ = excel.DeleteSheet("Sheet1") orientation := "landscape" size := 9 err = excel.SetPageLayout(sheetName, &excelize.PageLayoutOptions{Orientation: &orientation, Size: &size}) // A4 - checkErr(err) + if err != nil { + return fmt.Errorf("unable to set page layout: %w", err) + } err = excel.SetHeaderFooter(sheetName, &excelize.HeaderFooterOptions{ DifferentFirst: false, @@ -48,7 +51,9 @@ func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) { EvenFooter: "&L&D&R&T", FirstHeader: `&Threat Model &"-,` + parsedModel.Title + `"Bold&"-,Regular"Risks Summary+000A&D`, }) - checkErr(err) + if err != nil { + return fmt.Errorf("unable to set header/footer: %w", err) + } err = excel.SetCellValue(sheetName, "A1", "Severity") err = excel.SetCellValue(sheetName, "B1", "Likelihood") @@ -91,7 +96,9 @@ func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) { err = excel.SetColWidth(sheetName, "R", "R", 18) err = excel.SetColWidth(sheetName, "S", "S", 20) err = excel.SetColWidth(sheetName, "T", "T", 20) - checkErr(err) + if err != nil { + return fmt.Errorf("unable to set column width: %w", err) + } // styleSeverityCriticalBold, err := excel.NewStyle(`{"font":{"color":"` + colors.RgbHexColorCriticalRisk() + `","size":12,"bold":true}}`) styleSeverityCriticalBold, err := excel.NewStyle(&excelize.Style{ @@ -422,7 +429,9 @@ func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) { err = excel.SetCellStyle(sheetName, "R"+strconv.Itoa(excelRow), "R"+strconv.Itoa(excelRow), styleBlackCenter) err = excel.SetCellStyle(sheetName, "S"+strconv.Itoa(excelRow), "S"+strconv.Itoa(excelRow), styleBlackCenter) err = excel.SetCellStyle(sheetName, "T"+strconv.Itoa(excelRow), "T"+strconv.Itoa(excelRow), styleBlackLeft) - checkErr(err) + if err != nil { + return fmt.Errorf("unable to set cell style: %w", err) + } } } @@ -448,15 +457,20 @@ func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) { }) err = excel.SetCellStyle(sheetName, "A1", "T1", styleHeadCenter) - checkErr(err) + if err != nil { + return fmt.Errorf("unable to set cell style: %w", err) + } excel.SetActiveSheet(sheetIndex) err = excel.SaveAs(filename) - checkErr(err) + if err != nil { + return fmt.Errorf("unable to save excel file: %w", err) + } + return nil } -func WriteTagsExcelToFile(parsedModel *types.ParsedModel, filename string) { // TODO: eventually when len(sortedTagsAvailable) == 0 is: write a hint in the Excel that no tags are used - excelRow = 0 +func WriteTagsExcelToFile(parsedModel *types.ParsedModel, filename string) error { // TODO: eventually when len(sortedTagsAvailable) == 0 is: write a hint in the Excel that no tags are used + excelRow := 0 excel := excelize.NewFile() sheetName := parsedModel.Title err := excel.SetDocProps(&excelize.DocProperties{ @@ -473,14 +487,18 @@ func WriteTagsExcelToFile(parsedModel *types.ParsedModel, filename string) { // Language: "en-US", Version: "1.0.0", }) - checkErr(err) + if err != nil { + return err + } sheetIndex, _ := excel.NewSheet(sheetName) _ = excel.DeleteSheet("Sheet1") orientation := "landscape" size := 9 err = excel.SetPageLayout(sheetName, &excelize.PageLayoutOptions{Orientation: &orientation, Size: &size}) // A4 - checkErr(err) + if err != nil { + return err + } err = excel.SetHeaderFooter(sheetName, &excelize.HeaderFooterOptions{ DifferentFirst: false, @@ -491,7 +509,9 @@ func WriteTagsExcelToFile(parsedModel *types.ParsedModel, filename string) { // EvenFooter: "&L&D&R&T", FirstHeader: `&Tag Matrix &"-,` + parsedModel.Title + `"Bold&"-,Regular"Summary+000A&D`, }) - checkErr(err) + if err != nil { + return err + } err = excel.SetCellValue(sheetName, "A1", "Element") // TODO is "Element" the correct generic name when referencing assets, links, trust boundaries etc.? Eventually add separate column "type of element" like "technical asset" or "data asset"? sortedTagsAvailable := parsedModel.TagsActuallyUsed() @@ -503,10 +523,16 @@ func WriteTagsExcelToFile(parsedModel *types.ParsedModel, filename string) { // } err = excel.SetColWidth(sheetName, "A", "A", 60) + if err != nil { + return err + } + if len(sortedTagsAvailable) > 0 { err = excel.SetColWidth(sheetName, "B", axis, 35) } - checkErr(err) + if err != nil { + return err + } // styleBlackCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"#000000","size":12}}`) styleBlackCenter, err := excel.NewStyle(&excelize.Style{ @@ -537,19 +563,34 @@ func WriteTagsExcelToFile(parsedModel *types.ParsedModel, filename string) { // excelRow++ // as we have a header line if len(sortedTagsAvailable) > 0 { for _, techAsset := range sortedTechnicalAssetsByTitle(parsedModel) { - writeRow(excel, sheetName, axis, styleBlackLeftBold, styleBlackCenter, sortedTagsAvailable, techAsset.Title, techAsset.Tags) + err := writeRow(excel, &excelRow, sheetName, axis, styleBlackLeftBold, styleBlackCenter, sortedTagsAvailable, techAsset.Title, techAsset.Tags) + if err != nil { + return fmt.Errorf("unable to write row: %w", err) + } for _, commLink := range techAsset.CommunicationLinksSorted() { - writeRow(excel, sheetName, axis, styleBlackLeftBold, styleBlackCenter, sortedTagsAvailable, commLink.Title, commLink.Tags) + err := writeRow(excel, &excelRow, sheetName, axis, styleBlackLeftBold, styleBlackCenter, sortedTagsAvailable, commLink.Title, commLink.Tags) + if err != nil { + return fmt.Errorf("unable to write row: %w", err) + } } } for _, dataAsset := range sortedDataAssetsByTitle(parsedModel) { - writeRow(excel, sheetName, axis, styleBlackLeftBold, styleBlackCenter, sortedTagsAvailable, dataAsset.Title, dataAsset.Tags) + err := writeRow(excel, &excelRow, sheetName, axis, styleBlackLeftBold, styleBlackCenter, sortedTagsAvailable, dataAsset.Title, dataAsset.Tags) + if err != nil { + return fmt.Errorf("unable to write row: %w", err) + } } for _, trustBoundary := range sortedTrustBoundariesByTitle(parsedModel) { - writeRow(excel, sheetName, axis, styleBlackLeftBold, styleBlackCenter, sortedTagsAvailable, trustBoundary.Title, trustBoundary.Tags) + err := writeRow(excel, &excelRow, sheetName, axis, styleBlackLeftBold, styleBlackCenter, sortedTagsAvailable, trustBoundary.Title, trustBoundary.Tags) + if err != nil { + return fmt.Errorf("unable to write row: %w", err) + } } for _, sharedRuntime := range sortedSharedRuntimesByTitle(parsedModel) { - writeRow(excel, sheetName, axis, styleBlackLeftBold, styleBlackCenter, sortedTagsAvailable, sharedRuntime.Title, sharedRuntime.Tags) + err := writeRow(excel, &excelRow, sheetName, axis, styleBlackLeftBold, styleBlackCenter, sortedTagsAvailable, sharedRuntime.Title, sharedRuntime.Tags) + if err != nil { + return fmt.Errorf("unable to write row: %w", err) + } } } @@ -570,6 +611,9 @@ func WriteTagsExcelToFile(parsedModel *types.ParsedModel, filename string) { // Pattern: 1, }, }) + if err != nil { + return fmt.Errorf("unable to set cell style: %w", err) + } // styleHeadCenterBold, err := excel.NewStyle(`{"font":{"bold":true,"italic":false,"size":14,"color":"#000000"},"fill":{"type":"pattern","color":["#eeeeee"],"pattern":1},"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false}}`) styleHeadCenterBold, err := excel.NewStyle(&excelize.Style{ Alignment: &excelize.Alignment{ @@ -592,11 +636,16 @@ func WriteTagsExcelToFile(parsedModel *types.ParsedModel, filename string) { // if len(sortedTagsAvailable) > 0 { err = excel.SetCellStyle(sheetName, "B1", axis+"1", styleHeadCenter) } - checkErr(err) + if err != nil { + return fmt.Errorf("unable to set cell style: %w", err) + } excel.SetActiveSheet(sheetIndex) err = excel.SaveAs(filename) - checkErr(err) + if err != nil { + return fmt.Errorf("unable to save excel file: %w", err) + } + return nil } func sortedTrustBoundariesByTitle(parsedModel *types.ParsedModel) []types.TrustBoundary { @@ -617,18 +666,27 @@ func sortedDataAssetsByTitle(parsedModel *types.ParsedModel) []types.DataAsset { return assets } -func writeRow(excel *excelize.File, sheetName string, axis string, styleBlackLeftBold int, styleBlackCenter int, - sortedTags []string, assetTitle string, tagsUsed []string) { - excelRow++ - err := excel.SetCellValue(sheetName, "A"+strconv.Itoa(excelRow), assetTitle) +func writeRow(excel *excelize.File, excelRow *int, sheetName string, axis string, styleBlackLeftBold int, styleBlackCenter int, + sortedTags []string, assetTitle string, tagsUsed []string) error { + *excelRow++ + err := excel.SetCellValue(sheetName, "A"+strconv.Itoa(*excelRow), assetTitle) + if err != nil { + return fmt.Errorf("unable to write row: %w", err) + } for i, tag := range sortedTags { if contains(tagsUsed, tag) { - err = excel.SetCellValue(sheetName, determineColumnLetter(i)+strconv.Itoa(excelRow), "X") + err = excel.SetCellValue(sheetName, determineColumnLetter(i)+strconv.Itoa(*excelRow), "X") + if err != nil { + return fmt.Errorf("unable to write row: %w", err) + } } } - err = excel.SetCellStyle(sheetName, "A"+strconv.Itoa(excelRow), "A"+strconv.Itoa(excelRow), styleBlackLeftBold) - err = excel.SetCellStyle(sheetName, "B"+strconv.Itoa(excelRow), axis+strconv.Itoa(excelRow), styleBlackCenter) - checkErr(err) + err = excel.SetCellStyle(sheetName, "A"+strconv.Itoa(*excelRow), "A"+strconv.Itoa(*excelRow), styleBlackLeftBold) + err = excel.SetCellStyle(sheetName, "B"+strconv.Itoa(*excelRow), axis+strconv.Itoa(*excelRow), styleBlackCenter) + if err != nil { + return fmt.Errorf("unable to write row: %w", err) + } + return nil } var alphabet = []string{"A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z"} diff --git a/pkg/report/generate.go b/pkg/report/generate.go index 7b5c176e..d1c9ef44 100644 --- a/pkg/report/generate.go +++ b/pkg/report/generate.go @@ -3,6 +3,7 @@ package report import ( "crypto/sha256" "encoding/hex" + "fmt" "io" "os" "path/filepath" @@ -61,9 +62,12 @@ func Generate(config *common.Config, readResult *model.ReadResult, commands *Gen gvFile = tmpFileGV.Name() defer func() { _ = os.Remove(gvFile) }() } - dotFile := WriteDataFlowDiagramGraphvizDOT(readResult.ParsedModel, gvFile, diagramDPI, config.AddModelTitle, progressReporter) + dotFile, err := WriteDataFlowDiagramGraphvizDOT(readResult.ParsedModel, gvFile, diagramDPI, config.AddModelTitle, progressReporter) + if err != nil { + return fmt.Errorf("error while generating data flow diagram: %s", err) + } - err := GenerateDataFlowDiagramGraphvizImage(dotFile, config.OutputFolder, + err = GenerateDataFlowDiagramGraphvizImage(dotFile, config.OutputFolder, config.TempFolder, config.BinFolder, config.DataFlowDiagramFilenamePNG, progressReporter) if err != nil { progressReporter.Warn(err) @@ -80,8 +84,11 @@ func Generate(config *common.Config, readResult *model.ReadResult, commands *Gen gvFile = tmpFile.Name() defer func() { _ = os.Remove(gvFile) }() } - dotFile := WriteDataAssetDiagramGraphvizDOT(readResult.ParsedModel, gvFile, diagramDPI, progressReporter) - err := GenerateDataAssetDiagramGraphvizImage(dotFile, config.OutputFolder, + dotFile, err := WriteDataAssetDiagramGraphvizDOT(readResult.ParsedModel, gvFile, diagramDPI, progressReporter) + if err != nil { + return fmt.Errorf("error while generating data asset diagram: %s", err) + } + err = GenerateDataAssetDiagramGraphvizImage(dotFile, config.OutputFolder, config.TempFolder, config.BinFolder, config.DataAssetDiagramFilenamePNG, progressReporter) if err != nil { progressReporter.Warn(err) @@ -91,31 +98,46 @@ func Generate(config *common.Config, readResult *model.ReadResult, commands *Gen // risks as risks json if commands.RisksJSON { progressReporter.Info("Writing risks json") - WriteRisksJSON(readResult.ParsedModel, filepath.Join(config.OutputFolder, config.JsonRisksFilename)) + err := WriteRisksJSON(readResult.ParsedModel, filepath.Join(config.OutputFolder, config.JsonRisksFilename)) + if err != nil { + return fmt.Errorf("error while writing risks json: %s", err) + } } // technical assets json if commands.TechnicalAssetsJSON { progressReporter.Info("Writing technical assets json") - WriteTechnicalAssetsJSON(readResult.ParsedModel, filepath.Join(config.OutputFolder, config.JsonTechnicalAssetsFilename)) + err := WriteTechnicalAssetsJSON(readResult.ParsedModel, filepath.Join(config.OutputFolder, config.JsonTechnicalAssetsFilename)) + if err != nil { + return fmt.Errorf("error while writing technical assets json: %s", err) + } } // risks as risks json if commands.StatsJSON { progressReporter.Info("Writing stats json") - WriteStatsJSON(readResult.ParsedModel, filepath.Join(config.OutputFolder, config.JsonStatsFilename)) + err := WriteStatsJSON(readResult.ParsedModel, filepath.Join(config.OutputFolder, config.JsonStatsFilename)) + if err != nil { + return fmt.Errorf("error while writing stats json: %s", err) + } } // risks Excel if commands.RisksExcel { progressReporter.Info("Writing risks excel") - WriteRisksExcelToFile(readResult.ParsedModel, filepath.Join(config.OutputFolder, config.ExcelRisksFilename)) + err := WriteRisksExcelToFile(readResult.ParsedModel, filepath.Join(config.OutputFolder, config.ExcelRisksFilename)) + if err != nil { + return err + } } // tags Excel if commands.TagsExcel { progressReporter.Info("Writing tags excel") - WriteTagsExcelToFile(readResult.ParsedModel, filepath.Join(config.OutputFolder, config.ExcelTagsFilename)) + err := WriteTagsExcelToFile(readResult.ParsedModel, filepath.Join(config.OutputFolder, config.ExcelTagsFilename)) + if err != nil { + return err + } } if commands.ReportPDF { @@ -132,7 +154,9 @@ func Generate(config *common.Config, readResult *model.ReadResult, commands *Gen modelHash := hex.EncodeToString(hasher.Sum(nil)) // report PDF progressReporter.Info("Writing report pdf") - WriteReportPDF(filepath.Join(config.OutputFolder, config.ReportFilename), + + pdfReporter := pdfReporter{} + err = pdfReporter.WriteReportPDF(filepath.Join(config.OutputFolder, config.ReportFilename), filepath.Join(config.AppFolder, config.TemplateFilename), filepath.Join(config.OutputFolder, config.DataFlowDiagramFilenamePNG), filepath.Join(config.OutputFolder, config.DataAssetDiagramFilenamePNG), @@ -144,6 +168,9 @@ func Generate(config *common.Config, readResult *model.ReadResult, commands *Gen readResult.CustomRiskRules, config.TempFolder, readResult.ParsedModel) + if err != nil { + return err + } } return nil @@ -154,3 +181,12 @@ type progressReporter interface { Warn(a ...any) Error(a ...any) } + +func contains(a []string, x string) bool { + for _, n := range a { + if x == n { + return true + } + } + return false +} diff --git a/pkg/report/graphviz.go b/pkg/report/graphviz.go index d40e0ca5..aaaea003 100644 --- a/pkg/report/graphviz.go +++ b/pkg/report/graphviz.go @@ -19,7 +19,7 @@ import ( func WriteDataFlowDiagramGraphvizDOT(parsedModel *types.ParsedModel, diagramFilenameDOT string, dpi int, addModelTitle bool, - progressReporter progressReporter) *os.File { + progressReporter progressReporter) (*os.File, error) { progressReporter.Info("Writing data flow diagram input") var dotContent strings.Builder @@ -54,8 +54,7 @@ func WriteDataFlowDiagramGraphvizDOT(parsedModel *types.ParsedModel, splines = "false" drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = false default: - panic(errors.New("unknown value for diagram_tweak_suppress_edge_labels (spline, polyline, ortho, curved, false): " + - parsedModel.DiagramTweakEdgeLayout)) + return nil, fmt.Errorf("unknown value for diagram_tweak_suppress_edge_labels (spline, polyline, ortho, curved, false): %s", parsedModel.DiagramTweakEdgeLayout) } } rankdir := "TB" @@ -245,8 +244,17 @@ func WriteDataFlowDiagramGraphvizDOT(parsedModel *types.ParsedModel, } } - dotContent.WriteString(makeDiagramInvisibleConnectionsTweaks(parsedModel)) - dotContent.WriteString(makeDiagramSameRankNodeTweaks(parsedModel)) + diagramInvisibleConnectionsTweaks, err := makeDiagramInvisibleConnectionsTweaks(parsedModel) + if err != nil { + return nil, fmt.Errorf("error while making diagram invisible connections tweaks: %s", err) + } + dotContent.WriteString(diagramInvisibleConnectionsTweaks) + + diagramSameRankNodeTweaks, err := makeDiagramSameRankNodeTweaks(parsedModel) + if err != nil { + return nil, fmt.Errorf("error while making diagram same-rank node tweaks: %s", err) + } + dotContent.WriteString(diagramSameRankNodeTweaks) dotContent.WriteString("}") @@ -254,11 +262,15 @@ func WriteDataFlowDiagramGraphvizDOT(parsedModel *types.ParsedModel, // Write the DOT file file, err := os.Create(diagramFilenameDOT) - checkErr(err) + if err != nil { + return nil, fmt.Errorf("Error creating %s: %v", diagramFilenameDOT, err) + } defer func() { _ = file.Close() }() _, err = fmt.Fprintln(file, dotContent.String()) - checkErr(err) - return file + if err != nil { + return nil, fmt.Errorf("Error writing %s: %v", diagramFilenameDOT, err) + } + return file, nil } func GenerateDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string, @@ -266,11 +278,15 @@ func GenerateDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string, progressReporter.Info("Rendering data flow diagram input") // tmp files tmpFileDOT, err := os.CreateTemp(tempFolder, "diagram-*-.gv") - checkErr(err) + if err != nil { + return fmt.Errorf("Error creating temp file: %v", err) + } defer func() { _ = os.Remove(tmpFileDOT.Name()) }() tmpFilePNG, err := os.CreateTemp(tempFolder, "diagram-*-.png") - checkErr(err) + if err != nil { + return fmt.Errorf("Error creating temp file: %v", err) + } defer func() { _ = os.Remove(tmpFilePNG.Name()) }() // copy into tmp file as input @@ -289,7 +305,7 @@ func GenerateDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string, cmd.Stderr = os.Stderr err = cmd.Run() if err != nil { - panic(errors.New("graph rendering call failed with error:" + err.Error())) + return errors.New("graph rendering call failed with error: " + err.Error()) } // copy into resulting file inputPNG, err := os.ReadFile(tmpFilePNG.Name()) @@ -303,7 +319,7 @@ func GenerateDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string, return nil } -func makeDiagramSameRankNodeTweaks(parsedModel *types.ParsedModel) string { +func makeDiagramSameRankNodeTweaks(parsedModel *types.ParsedModel) (string, error) { // see https://stackoverflow.com/questions/25734244/how-do-i-place-nodes-on-the-same-level-in-dot tweak := "" if len(parsedModel.DiagramTweakSameRankAssets) > 0 { @@ -312,10 +328,13 @@ func makeDiagramSameRankNodeTweaks(parsedModel *types.ParsedModel) string { if len(assetIDs) > 0 { tweak += "{ rank=same; " for _, id := range assetIDs { - checkErr(parsedModel.CheckTechnicalAssetExists(id, "diagram tweak same-rank", true)) + err := parsedModel.CheckTechnicalAssetExists(id, "diagram tweak same-rank", true) + if err != nil { + return "", fmt.Errorf("error while checking technical asset existence: %s", err) + } if len(parsedModel.TechnicalAssets[id].GetTrustBoundaryId(parsedModel)) > 0 { - panic(errors.New("technical assets (referenced in same rank diagram tweak) are inside trust boundaries: " + - fmt.Sprintf("%v", parsedModel.DiagramTweakSameRankAssets))) + return "", fmt.Errorf("technical assets (referenced in same rank diagram tweak) are inside trust boundaries: " + + fmt.Sprintf("%v", parsedModel.DiagramTweakSameRankAssets)) } tweak += " " + hash(id) + "; " } @@ -323,27 +342,34 @@ func makeDiagramSameRankNodeTweaks(parsedModel *types.ParsedModel) string { } } } - return tweak + return tweak, nil } -func makeDiagramInvisibleConnectionsTweaks(parsedModel *types.ParsedModel) string { +func makeDiagramInvisibleConnectionsTweaks(parsedModel *types.ParsedModel) (string, error) { // see https://stackoverflow.com/questions/2476575/how-to-control-node-placement-in-graphviz-i-e-avoid-edge-crossings tweak := "" if len(parsedModel.DiagramTweakInvisibleConnectionsBetweenAssets) > 0 { for _, invisibleConnections := range parsedModel.DiagramTweakInvisibleConnectionsBetweenAssets { assetIDs := strings.Split(invisibleConnections, ":") if len(assetIDs) == 2 { - checkErr(parsedModel.CheckTechnicalAssetExists(assetIDs[0], "diagram tweak connections", true)) - checkErr(parsedModel.CheckTechnicalAssetExists(assetIDs[1], "diagram tweak connections", true)) + err := parsedModel.CheckTechnicalAssetExists(assetIDs[0], "diagram tweak connections", true) + if err != nil { + return "", fmt.Errorf("error while checking technical asset existence: %s", err) + } + err = parsedModel.CheckTechnicalAssetExists(assetIDs[1], "diagram tweak connections", true) + if err != nil { + return "", fmt.Errorf("error while checking technical asset existence: %s", err) + } + tweak += "\n" + hash(assetIDs[0]) + " -> " + hash(assetIDs[1]) + " [style=invis]; \n" } } } - return tweak + return tweak, nil } func WriteDataAssetDiagramGraphvizDOT(parsedModel *types.ParsedModel, diagramFilenameDOT string, dpi int, - progressReporter progressReporter) *os.File { + progressReporter progressReporter) (*os.File, error) { progressReporter.Info("Writing data asset diagram input") var dotContent strings.Builder @@ -422,11 +448,15 @@ func WriteDataAssetDiagramGraphvizDOT(parsedModel *types.ParsedModel, diagramFil // Write the DOT file file, err := os.Create(diagramFilenameDOT) - checkErr(err) + if err != nil { + return nil, fmt.Errorf("Error creating %s: %v", diagramFilenameDOT, err) + } defer func() { _ = file.Close() }() _, err = fmt.Fprintln(file, dotContent.String()) - checkErr(err) - return file + if err != nil { + return nil, fmt.Errorf("Error writing %s: %v", diagramFilenameDOT, err) + } + return file, nil } func makeDataAssetNode(parsedModel *types.ParsedModel, dataAsset types.DataAsset) string { diff --git a/pkg/report/json.go b/pkg/report/json.go index 9b104633..290d1333 100644 --- a/pkg/report/json.go +++ b/pkg/report/json.go @@ -2,11 +2,13 @@ package report import ( "encoding/json" - "github.com/threagile/threagile/pkg/security/types" + "fmt" "os" + + "github.com/threagile/threagile/pkg/security/types" ) -func WriteRisksJSON(parsedModel *types.ParsedModel, filename string) { +func WriteRisksJSON(parsedModel *types.ParsedModel, filename string) error { /* remainingRisks := make([]model.Risk, 0) for _, category := range model.SortedRiskCategories() { @@ -18,34 +20,37 @@ func WriteRisksJSON(parsedModel *types.ParsedModel, filename string) { */ jsonBytes, err := json.Marshal(types.AllRisks(parsedModel)) if err != nil { - panic(err) + return fmt.Errorf("failed to marshal risks to JSON: %w", err) } err = os.WriteFile(filename, jsonBytes, 0644) if err != nil { - panic(err) + return fmt.Errorf("failed to write risks to JSON file: %w", err) } + return nil } // TODO: also a "data assets" json? -func WriteTechnicalAssetsJSON(parsedModel *types.ParsedModel, filename string) { +func WriteTechnicalAssetsJSON(parsedModel *types.ParsedModel, filename string) error { jsonBytes, err := json.Marshal(parsedModel.TechnicalAssets) if err != nil { - panic(err) + return fmt.Errorf("failed to marshal technical assets to JSON: %w", err) } err = os.WriteFile(filename, jsonBytes, 0644) if err != nil { - panic(err) + return fmt.Errorf("failed to write technical assets to JSON file: %w", err) } + return nil } -func WriteStatsJSON(parsedModel *types.ParsedModel, filename string) { +func WriteStatsJSON(parsedModel *types.ParsedModel, filename string) error { jsonBytes, err := json.Marshal(types.OverallRiskStatistics(parsedModel)) if err != nil { - panic(err) + return fmt.Errorf("failed to marshal stats to JSON: %w", err) } err = os.WriteFile(filename, jsonBytes, 0644) if err != nil { - panic(err) + return fmt.Errorf("failed to write stats to JSON file: %w", err) } + return nil } diff --git a/pkg/report/report.go b/pkg/report/report.go index 4b3ed80f..14808204 100644 --- a/pkg/report/report.go +++ b/pkg/report/report.go @@ -1,7 +1,6 @@ package report import ( - "errors" "fmt" "image" "log" @@ -68,31 +67,30 @@ import ( const fontSizeHeadline, fontSizeHeadlineSmall, fontSizeBody, fontSizeSmall, fontSizeVerySmall = 20, 16, 12, 9, 7 const /*dataFlowDiagramFullscreen,*/ allowedPdfLandscapePages, embedDiagramLegendPage = /*false,*/ true, false -var isLandscapePage bool - -var pdf *gofpdf.Fpdf - -// var alreadyTemplateImported = false -var coverTemplateId, contentTemplateId, diagramLegendTemplateId int -var pageNo int -var linkCounter int -var tocLinkIdByAssetId map[string]int -var homeLink int -var currentChapterTitleBreadcrumb string - -var firstParagraphRegEx = regexp.MustCompile(`(.*?)((
)|(

))`) +type pdfReporter struct { + isLandscapePage bool + pdf *gofpdf.Fpdf + coverTemplateId int + contentTemplateId int + diagramLegendTemplateId int + pageNo int + linkCounter int + tocLinkIdByAssetId map[string]int + homeLink int + currentChapterTitleBreadcrumb string +} -func initReport() { - pdf = nil - isLandscapePage = false - pageNo = 0 - linkCounter = 0 - homeLink = 0 - currentChapterTitleBreadcrumb = "" - tocLinkIdByAssetId = make(map[string]int) +func (r *pdfReporter) initReport() { + r.pdf = nil + r.isLandscapePage = false + r.pageNo = 0 + r.linkCounter = 0 + r.homeLink = 0 + r.currentChapterTitleBreadcrumb = "" + r.tocLinkIdByAssetId = make(map[string]int) } -func WriteReportPDF(reportFilename string, +func (r *pdfReporter) WriteReportPDF(reportFilename string, templateFilename string, dataFlowDiagramFilenamePNG string, dataAssetDiagramFilenamePNG string, @@ -103,155 +101,162 @@ func WriteReportPDF(reportFilename string, introTextRAA string, customRiskRules map[string]*types.CustomRisk, tempFolder string, - model *types.ParsedModel) { - initReport() - createPdfAndInitMetadata(model) - parseBackgroundTemplate(templateFilename) - createCover(model) - createTableOfContents(model) - createManagementSummary(model, tempFolder) - createImpactInitialRisks(model) - createRiskMitigationStatus(model, tempFolder) - createImpactRemainingRisks(model) - createTargetDescription(model, filepath.Dir(modelFilename)) - embedDataFlowDiagram(dataFlowDiagramFilenamePNG, tempFolder) - createSecurityRequirements(model) - createAbuseCases(model) - createTagListing(model) - createSTRIDE(model) - createAssignmentByFunction(model) - createRAA(model, introTextRAA) - embedDataRiskMapping(dataAssetDiagramFilenamePNG, tempFolder) + model *types.ParsedModel) error { + r.initReport() + r.createPdfAndInitMetadata(model) + r.parseBackgroundTemplate(templateFilename) + r.createCover(model) + r.createTableOfContents(model) + err := r.createManagementSummary(model, tempFolder) + if err != nil { + return fmt.Errorf("error creating management summary: %w", err) + } + r.createImpactInitialRisks(model) + err = r.createRiskMitigationStatus(model, tempFolder) + if err != nil { + return fmt.Errorf("error creating risk mitigation status: %w", err) + } + r.createImpactRemainingRisks(model) + err = r.createTargetDescription(model, filepath.Dir(modelFilename)) + if err != nil { + return fmt.Errorf("error creating target description: %w", err) + } + r.embedDataFlowDiagram(dataFlowDiagramFilenamePNG, tempFolder) + r.createSecurityRequirements(model) + r.createAbuseCases(model) + r.createTagListing(model) + r.createSTRIDE(model) + r.createAssignmentByFunction(model) + r.createRAA(model, introTextRAA) + r.embedDataRiskMapping(dataAssetDiagramFilenamePNG, tempFolder) //createDataRiskQuickWins() - createOutOfScopeAssets(model) - createModelFailures(model) - createQuestions(model) - createRiskCategories(model) - createTechnicalAssets(model) - createDataAssets(model) - createTrustBoundaries(model) - createSharedRuntimes(model) - createRiskRulesChecked(model, modelFilename, skipRiskRules, buildTimestamp, modelHash, customRiskRules) - createDisclaimer(model) - writeReportToFile(reportFilename) -} - -func checkErr(err error) { + r.createOutOfScopeAssets(model) + r.createModelFailures(model) + r.createQuestions(model) + r.createRiskCategories(model) + r.createTechnicalAssets(model) + r.createDataAssets(model) + r.createTrustBoundaries(model) + r.createSharedRuntimes(model) + r.createRiskRulesChecked(model, modelFilename, skipRiskRules, buildTimestamp, modelHash, customRiskRules) + r.createDisclaimer(model) + err = r.writeReportToFile(reportFilename) if err != nil { - panic(err) + return fmt.Errorf("error writing report to file: %w", err) } + return nil } -func createPdfAndInitMetadata(model *types.ParsedModel) { - pdf = gofpdf.New("P", "mm", "A4", "") - pdf.SetCreator(model.Author.Homepage, true) - pdf.SetAuthor(model.Author.Name, true) - pdf.SetTitle("Threat Model Report: "+model.Title, true) - pdf.SetSubject("Threat Model Report: "+model.Title, true) - // pdf.SetPageBox("crop", 0, 0, 100, 010) - pdf.SetHeaderFunc(headerFunc) - pdf.SetFooterFunc(func() { - addBreadcrumb(model) - pdf.SetFont("Helvetica", "", 10) - pdf.SetTextColor(127, 127, 127) - pdf.Text(8.6, 284, "Threat Model Report via Threagile") //: "+parsedModel.Title) - pdf.Link(8.4, 281, 54.6, 4, homeLink) - pageNo++ - text := "Page " + strconv.Itoa(pageNo) - if pageNo < 10 { +func (r *pdfReporter) createPdfAndInitMetadata(model *types.ParsedModel) { + r.pdf = gofpdf.New("P", "mm", "A4", "") + r.pdf.SetCreator(model.Author.Homepage, true) + r.pdf.SetAuthor(model.Author.Name, true) + r.pdf.SetTitle("Threat Model Report: "+model.Title, true) + r.pdf.SetSubject("Threat Model Report: "+model.Title, true) + // r.pdf.SetPageBox("crop", 0, 0, 100, 010) + r.pdf.SetHeaderFunc(func() { + if r.isLandscapePage { + return + } + + gofpdi.UseImportedTemplate(r.pdf, r.contentTemplateId, 0, 0, 0, 300) + r.pdf.SetTopMargin(35) + }) + r.pdf.SetFooterFunc(func() { + r.addBreadcrumb(model) + r.pdf.SetFont("Helvetica", "", 10) + r.pdf.SetTextColor(127, 127, 127) + r.pdf.Text(8.6, 284, "Threat Model Report via Threagile") //: "+parsedModel.Title) + r.pdf.Link(8.4, 281, 54.6, 4, r.homeLink) + r.pageNo++ + text := "Page " + strconv.Itoa(r.pageNo) + if r.pageNo < 10 { text = " " + text - } else if pageNo < 100 { + } else if r.pageNo < 100 { text = " " + text } - if pageNo > 1 { - pdf.Text(186, 284, text) + if r.pageNo > 1 { + r.pdf.Text(186, 284, text) } }) - linkCounter = 1 // link counting starts at 1 via pdf.AddLink + r.linkCounter = 1 // link counting starts at 1 via r.pdf.AddLink } -func headerFunc() { - if !isLandscapePage { - gofpdi.UseImportedTemplate(pdf, contentTemplateId, 0, 0, 0, 300) - pdf.SetTopMargin(35) +func (r *pdfReporter) addBreadcrumb(parsedModel *types.ParsedModel) { + if len(r.currentChapterTitleBreadcrumb) > 0 { + uni := r.pdf.UnicodeTranslatorFromDescriptor("") + r.pdf.SetFont("Helvetica", "", 10) + r.pdf.SetTextColor(127, 127, 127) + r.pdf.Text(46.7, 24.5, uni(r.currentChapterTitleBreadcrumb+" - "+parsedModel.Title)) } } -func addBreadcrumb(parsedModel *types.ParsedModel) { - if len(currentChapterTitleBreadcrumb) > 0 { - uni := pdf.UnicodeTranslatorFromDescriptor("") - pdf.SetFont("Helvetica", "", 10) - pdf.SetTextColor(127, 127, 127) - pdf.Text(46.7, 24.5, uni(currentChapterTitleBreadcrumb+" - "+parsedModel.Title)) - } -} - -func parseBackgroundTemplate(templateFilename string) { +func (r *pdfReporter) parseBackgroundTemplate(templateFilename string) { /* imageBox, err := rice.FindBox("template") checkErr(err) - file, err := os.CreateTemp("", "background-*-.pdf") + file, err := os.CreateTemp("", "background-*-.r.pdf") checkErr(err) defer os.Remove(file.Name()) - backgroundBytes := imageBox.MustBytes("background.pdf") + backgroundBytes := imageBox.MustBytes("background.r.pdf") err = os.WriteFile(file.Name(), backgroundBytes, 0644) checkErr(err) */ - coverTemplateId = gofpdi.ImportPage(pdf, templateFilename, 1, "/MediaBox") - contentTemplateId = gofpdi.ImportPage(pdf, templateFilename, 2, "/MediaBox") - diagramLegendTemplateId = gofpdi.ImportPage(pdf, templateFilename, 3, "/MediaBox") + r.coverTemplateId = gofpdi.ImportPage(r.pdf, templateFilename, 1, "/MediaBox") + r.contentTemplateId = gofpdi.ImportPage(r.pdf, templateFilename, 2, "/MediaBox") + r.diagramLegendTemplateId = gofpdi.ImportPage(r.pdf, templateFilename, 3, "/MediaBox") } -func createCover(parsedModel *types.ParsedModel) { - uni := pdf.UnicodeTranslatorFromDescriptor("") - pdf.AddPage() - gofpdi.UseImportedTemplate(pdf, coverTemplateId, 0, 0, 0, 300) - pdf.SetFont("Helvetica", "B", 28) - pdf.SetTextColor(0, 0, 0) - pdf.Text(40, 110, "Threat Model Report") - pdf.Text(40, 125, uni(parsedModel.Title)) - pdf.SetFont("Helvetica", "", 12) +func (r *pdfReporter) createCover(parsedModel *types.ParsedModel) { + uni := r.pdf.UnicodeTranslatorFromDescriptor("") + r.pdf.AddPage() + gofpdi.UseImportedTemplate(r.pdf, r.coverTemplateId, 0, 0, 0, 300) + r.pdf.SetFont("Helvetica", "B", 28) + r.pdf.SetTextColor(0, 0, 0) + r.pdf.Text(40, 110, "Threat Model Report") + r.pdf.Text(40, 125, uni(parsedModel.Title)) + r.pdf.SetFont("Helvetica", "", 12) reportDate := parsedModel.Date if reportDate.IsZero() { reportDate = time.Now() } - pdf.Text(40.7, 145, reportDate.Format("2 January 2006")) - pdf.Text(40.7, 153, uni(parsedModel.Author.Name)) - pdf.SetFont("Helvetica", "", 10) - pdf.SetTextColor(80, 80, 80) - pdf.Text(8.6, 275, parsedModel.Author.Homepage) - pdf.SetFont("Helvetica", "", 12) - pdf.SetTextColor(0, 0, 0) + r.pdf.Text(40.7, 145, reportDate.Format("2 January 2006")) + r.pdf.Text(40.7, 153, uni(parsedModel.Author.Name)) + r.pdf.SetFont("Helvetica", "", 10) + r.pdf.SetTextColor(80, 80, 80) + r.pdf.Text(8.6, 275, parsedModel.Author.Homepage) + r.pdf.SetFont("Helvetica", "", 12) + r.pdf.SetTextColor(0, 0, 0) } -func createTableOfContents(parsedModel *types.ParsedModel) { - uni := pdf.UnicodeTranslatorFromDescriptor("") - pdf.AddPage() - currentChapterTitleBreadcrumb = "Table of Contents" - homeLink = pdf.AddLink() - defineLinkTarget("{home}") - gofpdi.UseImportedTemplate(pdf, contentTemplateId, 0, 0, 0, 300) - pdf.SetFont("Helvetica", "B", fontSizeHeadline) - pdf.Text(11, 40, "Table of Contents") - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetY(46) - - pdf.SetLineWidth(0.25) - pdf.SetDrawColor(160, 160, 160) - pdf.SetDashPattern([]float64{0.5, 0.5}, 0) +func (r *pdfReporter) createTableOfContents(parsedModel *types.ParsedModel) { + uni := r.pdf.UnicodeTranslatorFromDescriptor("") + r.pdf.AddPage() + r.currentChapterTitleBreadcrumb = "Table of Contents" + r.homeLink = r.pdf.AddLink() + r.defineLinkTarget("{home}") + gofpdi.UseImportedTemplate(r.pdf, r.contentTemplateId, 0, 0, 0, 300) + r.pdf.SetFont("Helvetica", "B", fontSizeHeadline) + r.pdf.Text(11, 40, "Table of Contents") + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetY(46) + + r.pdf.SetLineWidth(0.25) + r.pdf.SetDrawColor(160, 160, 160) + r.pdf.SetDashPattern([]float64{0.5, 0.5}, 0) // =============== var y float64 = 50 - pdf.SetFont("Helvetica", "B", fontSizeBody) - pdf.Text(11, y, "Results Overview") - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.Text(11, y, "Results Overview") + r.pdf.SetFont("Helvetica", "", fontSizeBody) y += 6 - pdf.Text(11, y, " "+"Management Summary") - pdf.Text(175, y, "{management-summary}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"Management Summary") + r.pdf.Text(175, y, "{management-summary}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) risksStr := "Risks" catStr := "Categories" @@ -263,16 +268,16 @@ func createTableOfContents(parsedModel *types.ParsedModel) { catStr = "Category" } y += 6 - pdf.Text(11, y, " "+"Impact Analysis of "+strconv.Itoa(count)+" Initial "+risksStr+" in "+strconv.Itoa(catCount)+" "+catStr) - pdf.Text(175, y, "{impact-analysis-initial-risks}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"Impact Analysis of "+strconv.Itoa(count)+" Initial "+risksStr+" in "+strconv.Itoa(catCount)+" "+catStr) + r.pdf.Text(175, y, "{impact-analysis-initial-risks}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) y += 6 - pdf.Text(11, y, " "+"Risk Mitigation") - pdf.Text(175, y, "{risk-mitigation-status}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"Risk Mitigation") + r.pdf.Text(175, y, "{risk-mitigation-status}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) y += 6 risksStr = "Risks" @@ -284,64 +289,64 @@ func createTableOfContents(parsedModel *types.ParsedModel) { if catCount == 1 { catStr = "Category" } - pdf.Text(11, y, " "+"Impact Analysis of "+strconv.Itoa(count)+" Remaining "+risksStr+" in "+strconv.Itoa(catCount)+" "+catStr) - pdf.Text(175, y, "{impact-analysis-remaining-risks}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"Impact Analysis of "+strconv.Itoa(count)+" Remaining "+risksStr+" in "+strconv.Itoa(catCount)+" "+catStr) + r.pdf.Text(175, y, "{impact-analysis-remaining-risks}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) y += 6 - pdf.Text(11, y, " "+"Application Overview") - pdf.Text(175, y, "{target-overview}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"Application Overview") + r.pdf.Text(175, y, "{target-overview}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) y += 6 - pdf.Text(11, y, " "+"Data-Flow Diagram") - pdf.Text(175, y, "{data-flow-diagram}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"Data-Flow Diagram") + r.pdf.Text(175, y, "{data-flow-diagram}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) y += 6 - pdf.Text(11, y, " "+"Security Requirements") - pdf.Text(175, y, "{security-requirements}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"Security Requirements") + r.pdf.Text(175, y, "{security-requirements}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) y += 6 - pdf.Text(11, y, " "+"Abuse Cases") - pdf.Text(175, y, "{abuse-cases}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"Abuse Cases") + r.pdf.Text(175, y, "{abuse-cases}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) y += 6 - pdf.Text(11, y, " "+"Tag Listing") - pdf.Text(175, y, "{tag-listing}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"Tag Listing") + r.pdf.Text(175, y, "{tag-listing}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) y += 6 - pdf.Text(11, y, " "+"STRIDE Classification of Identified Risks") - pdf.Text(175, y, "{stride}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"STRIDE Classification of Identified Risks") + r.pdf.Text(175, y, "{stride}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) y += 6 - pdf.Text(11, y, " "+"Assignment by Function") - pdf.Text(175, y, "{function-assignment}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"Assignment by Function") + r.pdf.Text(175, y, "{function-assignment}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) y += 6 - pdf.Text(11, y, " "+"RAA Analysis") - pdf.Text(175, y, "{raa-analysis}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"RAA Analysis") + r.pdf.Text(175, y, "{raa-analysis}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) y += 6 - pdf.Text(11, y, " "+"Data Mapping") - pdf.Text(175, y, "{data-risk-mapping}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"Data Mapping") + r.pdf.Text(175, y, "{data-risk-mapping}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) /* y += 6 @@ -350,10 +355,10 @@ func createTableOfContents(parsedModel *types.ParsedModel) { if count == 1 { assets = "asset" } - pdf.Text(11, y, " "+"Data Risk Quick Wins: "+strconv.Itoa(count)+" "+assets) - pdf.Text(175, y, "{data-risk-quick-wins}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"Data Risk Quick Wins: "+strconv.Itoa(count)+" "+assets) + r.pdf.Text(175, y, "{data-risk-quick-wins}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) */ y += 6 @@ -362,10 +367,10 @@ func createTableOfContents(parsedModel *types.ParsedModel) { if count == 1 { assets = "Asset" } - pdf.Text(11, y, " "+"Out-of-Scope Assets: "+strconv.Itoa(count)+" "+assets) - pdf.Text(175, y, "{out-of-scope-assets}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"Out-of-Scope Assets: "+strconv.Itoa(count)+" "+assets) + r.pdf.Text(175, y, "{out-of-scope-assets}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) y += 6 modelFailures := types.FlattenRiskSlice(types.FilterByModelFailures(parsedModel, parsedModel.GeneratedRisksByCategory)) @@ -376,13 +381,13 @@ func createTableOfContents(parsedModel *types.ParsedModel) { } countStillAtRisk := len(types.ReduceToOnlyStillAtRisk(parsedModel, modelFailures)) if countStillAtRisk > 0 { - colors.ColorModelFailure(pdf) + colors.ColorModelFailure(r.pdf) } - pdf.Text(11, y, " "+"Potential Model Failures: "+strconv.Itoa(countStillAtRisk)+" / "+strconv.Itoa(count)+" "+risksStr) - pdf.Text(175, y, "{model-failures}") - pdfColorBlack() - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"Potential Model Failures: "+strconv.Itoa(countStillAtRisk)+" / "+strconv.Itoa(count)+" "+risksStr) + r.pdf.Text(175, y, "{model-failures}") + r.pdfColorBlack() + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) y += 6 questions := "Questions" @@ -391,13 +396,13 @@ func createTableOfContents(parsedModel *types.ParsedModel) { questions = "Question" } if questionsUnanswered(parsedModel) > 0 { - colors.ColorModelFailure(pdf) + colors.ColorModelFailure(r.pdf) } - pdf.Text(11, y, " "+"Questions: "+strconv.Itoa(questionsUnanswered(parsedModel))+" / "+strconv.Itoa(count)+" "+questions) - pdf.Text(175, y, "{questions}") - pdfColorBlack() - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"Questions: "+strconv.Itoa(questionsUnanswered(parsedModel))+" / "+strconv.Itoa(count)+" "+questions) + r.pdf.Text(175, y, "{questions}") + r.pdfColorBlack() + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) // =============== @@ -405,40 +410,40 @@ func createTableOfContents(parsedModel *types.ParsedModel) { y += 6 y += 6 if y > 260 { // 260 instead of 275 for major group headlines to avoid "Schusterjungen" - pageBreakInLists() + r.pageBreakInLists() y = 40 } - pdf.SetFont("Helvetica", "B", fontSizeBody) - pdf.SetTextColor(0, 0, 0) - pdf.Text(11, y, "Risks by Vulnerability Category") - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.SetTextColor(0, 0, 0) + r.pdf.Text(11, y, "Risks by Vulnerability Category") + r.pdf.SetFont("Helvetica", "", fontSizeBody) y += 6 - pdf.Text(11, y, " "+"Identified Risks by Vulnerability Category") - pdf.Text(175, y, "{intro-risks-by-vulnerability-category}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"Identified Risks by Vulnerability Category") + r.pdf.Text(175, y, "{intro-risks-by-vulnerability-category}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) for _, category := range types.SortedRiskCategories(parsedModel) { newRisksStr := types.SortedRisksOfCategory(parsedModel, category) switch types.HighestSeverityStillAtRisk(parsedModel, newRisksStr) { case types.CriticalSeverity: - colors.ColorCriticalRisk(pdf) + colors.ColorCriticalRisk(r.pdf) case types.HighSeverity: - colors.ColorHighRisk(pdf) + colors.ColorHighRisk(r.pdf) case types.ElevatedSeverity: - colors.ColorElevatedRisk(pdf) + colors.ColorElevatedRisk(r.pdf) case types.MediumSeverity: - colors.ColorMediumRisk(pdf) + colors.ColorMediumRisk(r.pdf) case types.LowSeverity: - colors.ColorLowRisk(pdf) + colors.ColorLowRisk(r.pdf) default: - pdfColorBlack() + r.pdfColorBlack() } if len(types.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) == 0 { - pdfColorBlack() + r.pdfColorBlack() } y += 6 if y > 275 { - pageBreakInLists() + r.pageBreakInLists() y = 40 } countStillAtRisk := len(types.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) @@ -446,11 +451,11 @@ func createTableOfContents(parsedModel *types.ParsedModel) { if len(newRisksStr) != 1 { suffix += "s" } - pdf.Text(11, y, " "+uni(category.Title)+": "+suffix) - pdf.Text(175, y, "{"+category.Id+"}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - tocLinkIdByAssetId[category.Id] = pdf.AddLink() - pdf.Link(10, y-5, 172.5, 6.5, tocLinkIdByAssetId[category.Id]) + r.pdf.Text(11, y, " "+uni(category.Title)+": "+suffix) + r.pdf.Text(175, y, "{"+category.Id+"}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.tocLinkIdByAssetId[category.Id] = r.pdf.AddLink() + r.pdf.Link(10, y-5, 172.5, 6.5, r.tocLinkIdByAssetId[category.Id]) } } @@ -460,23 +465,23 @@ func createTableOfContents(parsedModel *types.ParsedModel) { y += 6 y += 6 if y > 260 { // 260 instead of 275 for major group headlines to avoid "Schusterjungen" - pageBreakInLists() + r.pageBreakInLists() y = 40 } - pdf.SetFont("Helvetica", "B", fontSizeBody) - pdf.SetTextColor(0, 0, 0) - pdf.Text(11, y, "Risks by Technical Asset") - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.SetTextColor(0, 0, 0) + r.pdf.Text(11, y, "Risks by Technical Asset") + r.pdf.SetFont("Helvetica", "", fontSizeBody) y += 6 - pdf.Text(11, y, " "+"Identified Risks by Technical Asset") - pdf.Text(175, y, "{intro-risks-by-technical-asset}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"Identified Risks by Technical Asset") + r.pdf.Text(175, y, "{intro-risks-by-technical-asset}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) for _, technicalAsset := range sortedTechnicalAssetsByRiskSeverityAndTitle(parsedModel) { newRisksStr := technicalAsset.GeneratedRisks(parsedModel) y += 6 if y > 275 { - pageBreakInLists() + r.pageBreakInLists() y = 40 } countStillAtRisk := len(types.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) @@ -485,32 +490,32 @@ func createTableOfContents(parsedModel *types.ParsedModel) { suffix += "s" } if technicalAsset.OutOfScope { - pdfColorOutOfScope() + r.pdfColorOutOfScope() suffix = "out-of-scope" } else { switch types.HighestSeverityStillAtRisk(parsedModel, newRisksStr) { case types.CriticalSeverity: - colors.ColorCriticalRisk(pdf) + colors.ColorCriticalRisk(r.pdf) case types.HighSeverity: - colors.ColorHighRisk(pdf) + colors.ColorHighRisk(r.pdf) case types.ElevatedSeverity: - colors.ColorElevatedRisk(pdf) + colors.ColorElevatedRisk(r.pdf) case types.MediumSeverity: - colors.ColorMediumRisk(pdf) + colors.ColorMediumRisk(r.pdf) case types.LowSeverity: - colors.ColorLowRisk(pdf) + colors.ColorLowRisk(r.pdf) default: - pdfColorBlack() + r.pdfColorBlack() } if len(types.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) == 0 { - pdfColorBlack() + r.pdfColorBlack() } } - pdf.Text(11, y, " "+uni(technicalAsset.Title)+": "+suffix) - pdf.Text(175, y, "{"+technicalAsset.Id+"}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - tocLinkIdByAssetId[technicalAsset.Id] = pdf.AddLink() - pdf.Link(10, y-5, 172.5, 6.5, tocLinkIdByAssetId[technicalAsset.Id]) + r.pdf.Text(11, y, " "+uni(technicalAsset.Title)+": "+suffix) + r.pdf.Text(175, y, "{"+technicalAsset.Id+"}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.tocLinkIdByAssetId[technicalAsset.Id] = r.pdf.AddLink() + r.pdf.Link(10, y-5, 172.5, 6.5, r.tocLinkIdByAssetId[technicalAsset.Id]) } } @@ -520,22 +525,22 @@ func createTableOfContents(parsedModel *types.ParsedModel) { y += 6 y += 6 if y > 260 { // 260 instead of 275 for major group headlines to avoid "Schusterjungen" - pageBreakInLists() + r.pageBreakInLists() y = 40 } - pdf.SetFont("Helvetica", "B", fontSizeBody) - pdfColorBlack() - pdf.Text(11, y, "Data Breach Probabilities by Data Asset") - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdfColorBlack() + r.pdf.Text(11, y, "Data Breach Probabilities by Data Asset") + r.pdf.SetFont("Helvetica", "", fontSizeBody) y += 6 - pdf.Text(11, y, " "+"Identified Data Breach Probabilities by Data Asset") - pdf.Text(175, y, "{intro-risks-by-data-asset}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"Identified Data Breach Probabilities by Data Asset") + r.pdf.Text(175, y, "{intro-risks-by-data-asset}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) for _, dataAsset := range sortedDataAssetsByDataBreachProbabilityAndTitle(parsedModel) { y += 6 if y > 275 { - pageBreakInLists() + r.pageBreakInLists() y = 40 } newRisksStr := dataAsset.IdentifiedDataBreachProbabilityRisks(parsedModel) @@ -546,22 +551,22 @@ func createTableOfContents(parsedModel *types.ParsedModel) { } switch dataAsset.IdentifiedDataBreachProbabilityStillAtRisk(parsedModel) { case types.Probable: - colors.ColorHighRisk(pdf) + colors.ColorHighRisk(r.pdf) case types.Possible: - colors.ColorMediumRisk(pdf) + colors.ColorMediumRisk(r.pdf) case types.Improbable: - colors.ColorLowRisk(pdf) + colors.ColorLowRisk(r.pdf) default: - pdfColorBlack() + r.pdfColorBlack() } if !dataAsset.IsDataBreachPotentialStillAtRisk(parsedModel) { - pdfColorBlack() + r.pdfColorBlack() } - pdf.Text(11, y, " "+uni(dataAsset.Title)+": "+suffix) - pdf.Text(175, y, "{data:"+dataAsset.Id+"}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - tocLinkIdByAssetId[dataAsset.Id] = pdf.AddLink() - pdf.Link(10, y-5, 172.5, 6.5, tocLinkIdByAssetId[dataAsset.Id]) + r.pdf.Text(11, y, " "+uni(dataAsset.Title)+": "+suffix) + r.pdf.Text(175, y, "{data:"+dataAsset.Id+"}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.tocLinkIdByAssetId[dataAsset.Id] = r.pdf.AddLink() + r.pdf.Link(10, y-5, 172.5, 6.5, r.tocLinkIdByAssetId[dataAsset.Id]) } } @@ -571,31 +576,31 @@ func createTableOfContents(parsedModel *types.ParsedModel) { y += 6 y += 6 if y > 260 { // 260 instead of 275 for major group headlines to avoid "Schusterjungen" - pageBreakInLists() + r.pageBreakInLists() y = 40 } - pdf.SetFont("Helvetica", "B", fontSizeBody) - pdfColorBlack() - pdf.Text(11, y, "Trust Boundaries") - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdfColorBlack() + r.pdf.Text(11, y, "Trust Boundaries") + r.pdf.SetFont("Helvetica", "", fontSizeBody) for _, key := range types.SortedKeysOfTrustBoundaries(parsedModel) { trustBoundary := parsedModel.TrustBoundaries[key] y += 6 if y > 275 { - pageBreakInLists() + r.pageBreakInLists() y = 40 } - colors.ColorTwilight(pdf) + colors.ColorTwilight(r.pdf) if !trustBoundary.Type.IsNetworkBoundary() { - pdfColorLightGray() + r.pdfColorLightGray() } - pdf.Text(11, y, " "+uni(trustBoundary.Title)) - pdf.Text(175, y, "{boundary:"+trustBoundary.Id+"}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - tocLinkIdByAssetId[trustBoundary.Id] = pdf.AddLink() - pdf.Link(10, y-5, 172.5, 6.5, tocLinkIdByAssetId[trustBoundary.Id]) + r.pdf.Text(11, y, " "+uni(trustBoundary.Title)) + r.pdf.Text(175, y, "{boundary:"+trustBoundary.Id+"}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.tocLinkIdByAssetId[trustBoundary.Id] = r.pdf.AddLink() + r.pdf.Link(10, y-5, 172.5, 6.5, r.tocLinkIdByAssetId[trustBoundary.Id]) } - pdfColorBlack() + r.pdfColorBlack() } // =============== @@ -604,25 +609,25 @@ func createTableOfContents(parsedModel *types.ParsedModel) { y += 6 y += 6 if y > 260 { // 260 instead of 275 for major group headlines to avoid "Schusterjungen" - pageBreakInLists() + r.pageBreakInLists() y = 40 } - pdf.SetFont("Helvetica", "B", fontSizeBody) - pdfColorBlack() - pdf.Text(11, y, "Shared Runtime") - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdfColorBlack() + r.pdf.Text(11, y, "Shared Runtime") + r.pdf.SetFont("Helvetica", "", fontSizeBody) for _, key := range types.SortedKeysOfSharedRuntime(parsedModel) { sharedRuntime := parsedModel.SharedRuntimes[key] y += 6 if y > 275 { - pageBreakInLists() + r.pageBreakInLists() y = 40 } - pdf.Text(11, y, " "+uni(sharedRuntime.Title)) - pdf.Text(175, y, "{runtime:"+sharedRuntime.Id+"}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - tocLinkIdByAssetId[sharedRuntime.Id] = pdf.AddLink() - pdf.Link(10, y-5, 172.5, 6.5, tocLinkIdByAssetId[sharedRuntime.Id]) + r.pdf.Text(11, y, " "+uni(sharedRuntime.Title)) + r.pdf.Text(175, y, "{runtime:"+sharedRuntime.Id+"}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.tocLinkIdByAssetId[sharedRuntime.Id] = r.pdf.AddLink() + r.pdf.Link(10, y-5, 172.5, 6.5, r.tocLinkIdByAssetId[sharedRuntime.Id]) } } @@ -631,40 +636,40 @@ func createTableOfContents(parsedModel *types.ParsedModel) { y += 6 y += 6 if y > 260 { // 260 instead of 275 for major group headlines to avoid "Schusterjungen" - pageBreakInLists() + r.pageBreakInLists() y = 40 } - pdfColorBlack() - pdf.SetFont("Helvetica", "B", fontSizeBody) - pdf.Text(11, y, "About Threagile") - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorBlack() + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.Text(11, y, "About Threagile") + r.pdf.SetFont("Helvetica", "", fontSizeBody) y += 6 if y > 275 { - pageBreakInLists() + r.pageBreakInLists() y = 40 } - pdf.Text(11, y, " "+"Risk Rules Checked by Threagile") - pdf.Text(175, y, "{risk-rules-checked}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) + r.pdf.Text(11, y, " "+"Risk Rules Checked by Threagile") + r.pdf.Text(175, y, "{risk-rules-checked}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) y += 6 if y > 275 { - pageBreakInLists() + r.pageBreakInLists() y = 40 } - pdfColorDisclaimer() - pdf.Text(11, y, " "+"Disclaimer") - pdf.Text(175, y, "{disclaimer}") - pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) - pdf.Link(10, y-5, 172.5, 6.5, pdf.AddLink()) - pdfColorBlack() + r.pdfColorDisclaimer() + r.pdf.Text(11, y, " "+"Disclaimer") + r.pdf.Text(175, y, "{disclaimer}") + r.pdf.Line(15.6, y+1.3, 11+171.5, y+1.3) + r.pdf.Link(10, y-5, 172.5, 6.5, r.pdf.AddLink()) + r.pdfColorBlack() - pdf.SetDrawColor(0, 0, 0) - pdf.SetDashPattern([]float64{}, 0) + r.pdf.SetDrawColor(0, 0, 0) + r.pdf.SetDashPattern([]float64{}, 0) // Now write all the sections/pages. Before we start writing, we use `RegisterAlias` to // ensure that the alias written in the table of contents will be replaced - // by the current page number. --> See the "pdf.RegisterAlias()" calls during the PDF creation in this file + // by the current page number. --> See the "r.pdf.RegisterAlias()" calls during the PDF creation in this file } func sortedTechnicalAssetsByRiskSeverityAndTitle(parsedModel *types.ParsedModel) []types.TechnicalAsset { @@ -686,28 +691,28 @@ func sortedDataAssetsByDataBreachProbabilityAndTitle(parsedModel *types.ParsedMo return assets } -func defineLinkTarget(alias string) { - pageNumbStr := strconv.Itoa(pdf.PageNo()) +func (r *pdfReporter) defineLinkTarget(alias string) { + pageNumbStr := strconv.Itoa(r.pdf.PageNo()) if len(pageNumbStr) == 1 { pageNumbStr = " " + pageNumbStr } else if len(pageNumbStr) == 2 { pageNumbStr = " " + pageNumbStr } - pdf.RegisterAlias(alias, pageNumbStr) - pdf.SetLink(linkCounter, 0, -1) - linkCounter++ + r.pdf.RegisterAlias(alias, pageNumbStr) + r.pdf.SetLink(r.linkCounter, 0, -1) + r.linkCounter++ } -func createDisclaimer(parsedModel *types.ParsedModel) { - pdf.AddPage() - currentChapterTitleBreadcrumb = "Disclaimer" - defineLinkTarget("{disclaimer}") - gofpdi.UseImportedTemplate(pdf, contentTemplateId, 0, 0, 0, 300) - pdfColorDisclaimer() - pdf.SetFont("Helvetica", "B", fontSizeHeadline) - pdf.Text(11, 40, "Disclaimer") - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetY(46) +func (r *pdfReporter) createDisclaimer(parsedModel *types.ParsedModel) { + r.pdf.AddPage() + r.currentChapterTitleBreadcrumb = "Disclaimer" + r.defineLinkTarget("{disclaimer}") + gofpdi.UseImportedTemplate(r.pdf, r.contentTemplateId, 0, 0, 0, 300) + r.pdfColorDisclaimer() + r.pdf.SetFont("Helvetica", "B", fontSizeHeadline) + r.pdf.Text(11, 40, "Disclaimer") + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetY(46) var disclaimer strings.Builder disclaimer.WriteString(parsedModel.Author.Name + " conducted this threat analysis using the open-source Threagile toolkit " + @@ -746,18 +751,18 @@ func createDisclaimer(parsedModel *types.ParsedModel) { "Distribution of this report (in full or in part like diagrams or risk findings) requires that this disclaimer " + "as well as the chapter about the Threagile toolkit and method used is kept intact as part of the " + "distributed report or referenced from the distributed parts.") - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() html.Write(5, disclaimer.String()) - pdfColorBlack() + r.pdfColorBlack() } -func createManagementSummary(parsedModel *types.ParsedModel, tempFolder string) { - uni := pdf.UnicodeTranslatorFromDescriptor("") - pdf.SetTextColor(0, 0, 0) +func (r *pdfReporter) createManagementSummary(parsedModel *types.ParsedModel, tempFolder string) error { + uni := r.pdf.UnicodeTranslatorFromDescriptor("") + r.pdf.SetTextColor(0, 0, 0) title := "Management Summary" - addHeadline(title, false) - defineLinkTarget("{management-summary}") - currentChapterTitleBreadcrumb = title + r.addHeadline(title, false) + r.defineLinkTarget("{management-summary}") + r.currentChapterTitleBreadcrumb = title countCritical := len(types.FilteredByOnlyCriticalRisks(parsedModel)) countHigh := len(types.FilteredByOnlyHighRisks(parsedModel)) countElevated := len(types.FilteredByOnlyElevatedRisks(parsedModel)) @@ -771,7 +776,7 @@ func createManagementSummary(parsedModel *types.ParsedModel, tempFolder string) countStatusMitigated := len(types.FilteredByRiskTrackingMitigated(parsedModel)) countStatusFalsePositive := len(types.FilteredByRiskTrackingFalsePositive(parsedModel)) - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() html.Write(5, "Threagile toolkit was used to model the architecture of \""+uni(parsedModel.Title)+"\" "+ "and derive risks by analyzing the components and data flows. The risks identified during this analysis are shown "+ "in the following chapters. Identified risks during threat modeling do not necessarily mean that the "+ @@ -788,72 +793,72 @@ func createManagementSummary(parsedModel *types.ParsedModel, tempFolder string) "In total "+strconv.Itoa(types.TotalRiskCount(parsedModel))+" initial risks in "+strconv.Itoa(len(parsedModel.GeneratedRisksByCategory))+" categories have "+ "been identified during the threat modeling process:

") // TODO plural singular stuff risk/s category/ies has/have - pdf.SetFont("Helvetica", "B", fontSizeBody) - - pdf.CellFormat(17, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(60, 6, "", "0", 0, "", false, 0, "") - colors.ColorRiskStatusUnchecked(pdf) - pdf.CellFormat(23, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countStatusUnchecked), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "unchecked", "0", 0, "", false, 0, "") - pdf.Ln(-1) - - colors.ColorCriticalRisk(pdf) - pdf.CellFormat(17, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countCritical), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "critical risk", "0", 0, "", false, 0, "") - colors.ColorRiskStatusInDiscussion(pdf) - pdf.CellFormat(23, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countStatusInDiscussion), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "in discussion", "0", 0, "", false, 0, "") - pdf.Ln(-1) - - colors.ColorHighRisk(pdf) - pdf.CellFormat(17, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countHigh), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "high risk", "0", 0, "", false, 0, "") - colors.ColorRiskStatusAccepted(pdf) - pdf.CellFormat(23, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countStatusAccepted), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "accepted", "0", 0, "", false, 0, "") - pdf.Ln(-1) - - colors.ColorElevatedRisk(pdf) - pdf.CellFormat(17, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countElevated), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "elevated risk", "0", 0, "", false, 0, "") - colors.ColorRiskStatusInProgress(pdf) - pdf.CellFormat(23, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countStatusInProgress), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "in progress", "0", 0, "", false, 0, "") - pdf.Ln(-1) - - colors.ColorMediumRisk(pdf) - pdf.CellFormat(17, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countMedium), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "medium risk", "0", 0, "", false, 0, "") - colors.ColorRiskStatusMitigated(pdf) - pdf.CellFormat(23, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countStatusMitigated), "0", 0, "R", false, 0, "") - pdf.SetFont("Helvetica", "BI", fontSizeBody) - pdf.CellFormat(60, 6, "mitigated", "0", 0, "", false, 0, "") - pdf.SetFont("Helvetica", "B", fontSizeBody) - pdf.Ln(-1) - - colors.ColorLowRisk(pdf) - pdf.CellFormat(17, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countLow), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "low risk", "0", 0, "", false, 0, "") - colors.ColorRiskStatusFalsePositive(pdf) - pdf.CellFormat(23, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countStatusFalsePositive), "0", 0, "R", false, 0, "") - pdf.SetFont("Helvetica", "BI", fontSizeBody) - pdf.CellFormat(60, 6, "false positive", "0", 0, "", false, 0, "") - pdf.SetFont("Helvetica", "B", fontSizeBody) - pdf.Ln(-1) - - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + + r.pdf.CellFormat(17, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(60, 6, "", "0", 0, "", false, 0, "") + colors.ColorRiskStatusUnchecked(r.pdf) + r.pdf.CellFormat(23, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusUnchecked), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "unchecked", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + + colors.ColorCriticalRisk(r.pdf) + r.pdf.CellFormat(17, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countCritical), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "critical risk", "0", 0, "", false, 0, "") + colors.ColorRiskStatusInDiscussion(r.pdf) + r.pdf.CellFormat(23, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusInDiscussion), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "in discussion", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + + colors.ColorHighRisk(r.pdf) + r.pdf.CellFormat(17, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countHigh), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "high risk", "0", 0, "", false, 0, "") + colors.ColorRiskStatusAccepted(r.pdf) + r.pdf.CellFormat(23, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusAccepted), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "accepted", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + + colors.ColorElevatedRisk(r.pdf) + r.pdf.CellFormat(17, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countElevated), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "elevated risk", "0", 0, "", false, 0, "") + colors.ColorRiskStatusInProgress(r.pdf) + r.pdf.CellFormat(23, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusInProgress), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "in progress", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + + colors.ColorMediumRisk(r.pdf) + r.pdf.CellFormat(17, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countMedium), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "medium risk", "0", 0, "", false, 0, "") + colors.ColorRiskStatusMitigated(r.pdf) + r.pdf.CellFormat(23, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusMitigated), "0", 0, "R", false, 0, "") + r.pdf.SetFont("Helvetica", "BI", fontSizeBody) + r.pdf.CellFormat(60, 6, "mitigated", "0", 0, "", false, 0, "") + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.Ln(-1) + + colors.ColorLowRisk(r.pdf) + r.pdf.CellFormat(17, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countLow), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "low risk", "0", 0, "", false, 0, "") + colors.ColorRiskStatusFalsePositive(r.pdf) + r.pdf.CellFormat(23, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusFalsePositive), "0", 0, "R", false, 0, "") + r.pdf.SetFont("Helvetica", "BI", fontSizeBody) + r.pdf.CellFormat(60, 6, "false positive", "0", 0, "", false, 0, "") + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.Ln(-1) + + r.pdf.SetFont("Helvetica", "", fontSizeBody) // pie chart: risk severity pieChartRiskSeverity := chart.PieChart{ @@ -926,28 +931,36 @@ func createManagementSummary(parsedModel *types.ParsedModel, tempFolder string) }, } - y := pdf.GetY() + 5 - embedPieChart(pieChartRiskSeverity, 15.0, y, tempFolder) - embedPieChart(pieChartRiskStatus, 110.0, y, tempFolder) + y := r.pdf.GetY() + 5 + err := r.embedPieChart(pieChartRiskSeverity, 15.0, y, tempFolder) + if err != nil { + return fmt.Errorf("unable to embed pie chart: %w", err) + } + + err = r.embedPieChart(pieChartRiskStatus, 110.0, y, tempFolder) + if err != nil { + return fmt.Errorf("unable to embed pie chart: %w", err) + } // individual management summary comment - pdfColorBlack() + r.pdfColorBlack() if len(parsedModel.ManagementSummaryComment) > 0 { html.Write(5, "















"+ parsedModel.ManagementSummaryComment) } + return nil } -func createRiskMitigationStatus(parsedModel *types.ParsedModel, tempFolder string) { - pdf.SetTextColor(0, 0, 0) +func (r *pdfReporter) createRiskMitigationStatus(parsedModel *types.ParsedModel, tempFolder string) error { + r.pdf.SetTextColor(0, 0, 0) stillAtRisk := types.FilteredByStillAtRisk(parsedModel) count := len(stillAtRisk) title := "Risk Mitigation" - addHeadline(title, false) - defineLinkTarget("{risk-mitigation-status}") - currentChapterTitleBreadcrumb = title + r.addHeadline(title, false) + r.defineLinkTarget("{risk-mitigation-status}") + r.currentChapterTitleBreadcrumb = title - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() html.Write(5, "The following chart gives a high-level overview of the risk tracking status (including mitigated risks):") risksCritical := types.FilteredByOnlyCriticalRisks(parsedModel) @@ -1062,59 +1075,62 @@ func createRiskMitigationStatus(parsedModel *types.ParsedModel, tempFolder strin }, } - y := pdf.GetY() + 12 - embedStackedBarChart(stackedBarChartRiskTracking, 15.0, y, tempFolder) + y := r.pdf.GetY() + 12 + err := r.embedStackedBarChart(stackedBarChartRiskTracking, 15.0, y, tempFolder) + if err != nil { + return err + } // draw the X-Axis legend on my own - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdfColorBlack() - pdf.Text(24.02, 169, "Low ("+strconv.Itoa(len(risksLow))+")") - pdf.Text(46.10, 169, "Medium ("+strconv.Itoa(len(risksMedium))+")") - pdf.Text(69.74, 169, "Elevated ("+strconv.Itoa(len(risksElevated))+")") - pdf.Text(97.95, 169, "High ("+strconv.Itoa(len(risksHigh))+")") - pdf.Text(121.65, 169, "Critical ("+strconv.Itoa(len(risksCritical))+")") - - pdf.SetFont("Helvetica", "B", fontSizeBody) - pdf.Ln(20) - - colors.ColorRiskStatusUnchecked(pdf) - pdf.CellFormat(150, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countStatusUnchecked), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "unchecked", "0", 0, "", false, 0, "") - pdf.Ln(-1) - colors.ColorRiskStatusInDiscussion(pdf) - pdf.CellFormat(150, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countStatusInDiscussion), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "in discussion", "0", 0, "", false, 0, "") - pdf.Ln(-1) - colors.ColorRiskStatusAccepted(pdf) - pdf.CellFormat(150, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countStatusAccepted), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "accepted", "0", 0, "", false, 0, "") - pdf.Ln(-1) - colors.ColorRiskStatusInProgress(pdf) - pdf.CellFormat(150, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countStatusInProgress), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "in progress", "0", 0, "", false, 0, "") - pdf.Ln(-1) - colors.ColorRiskStatusMitigated(pdf) - pdf.CellFormat(150, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countStatusMitigated), "0", 0, "R", false, 0, "") - pdf.SetFont("Helvetica", "BI", fontSizeBody) - pdf.CellFormat(60, 6, "mitigated", "0", 0, "", false, 0, "") - pdf.SetFont("Helvetica", "B", fontSizeBody) - pdf.Ln(-1) - colors.ColorRiskStatusFalsePositive(pdf) - pdf.CellFormat(150, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countStatusFalsePositive), "0", 0, "R", false, 0, "") - pdf.SetFont("Helvetica", "BI", fontSizeBody) - pdf.CellFormat(60, 6, "false positive", "0", 0, "", false, 0, "") - pdf.SetFont("Helvetica", "B", fontSizeBody) - pdf.Ln(-1) - - pdf.SetFont("Helvetica", "", fontSizeBody) - - pdfColorBlack() + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdfColorBlack() + r.pdf.Text(24.02, 169, "Low ("+strconv.Itoa(len(risksLow))+")") + r.pdf.Text(46.10, 169, "Medium ("+strconv.Itoa(len(risksMedium))+")") + r.pdf.Text(69.74, 169, "Elevated ("+strconv.Itoa(len(risksElevated))+")") + r.pdf.Text(97.95, 169, "High ("+strconv.Itoa(len(risksHigh))+")") + r.pdf.Text(121.65, 169, "Critical ("+strconv.Itoa(len(risksCritical))+")") + + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.Ln(20) + + colors.ColorRiskStatusUnchecked(r.pdf) + r.pdf.CellFormat(150, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusUnchecked), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "unchecked", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + colors.ColorRiskStatusInDiscussion(r.pdf) + r.pdf.CellFormat(150, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusInDiscussion), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "in discussion", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + colors.ColorRiskStatusAccepted(r.pdf) + r.pdf.CellFormat(150, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusAccepted), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "accepted", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + colors.ColorRiskStatusInProgress(r.pdf) + r.pdf.CellFormat(150, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusInProgress), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "in progress", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + colors.ColorRiskStatusMitigated(r.pdf) + r.pdf.CellFormat(150, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusMitigated), "0", 0, "R", false, 0, "") + r.pdf.SetFont("Helvetica", "BI", fontSizeBody) + r.pdf.CellFormat(60, 6, "mitigated", "0", 0, "", false, 0, "") + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.Ln(-1) + colors.ColorRiskStatusFalsePositive(r.pdf) + r.pdf.CellFormat(150, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusFalsePositive), "0", 0, "R", false, 0, "") + r.pdf.SetFont("Helvetica", "BI", fontSizeBody) + r.pdf.CellFormat(60, 6, "false positive", "0", 0, "", false, 0, "") + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.Ln(-1) + + r.pdf.SetFont("Helvetica", "", fontSizeBody) + + r.pdfColorBlack() if count == 0 { html.Write(5, "














"+ "After removal of risks with status mitigated and false positive "+ @@ -1190,88 +1206,101 @@ func createRiskMitigationStatus(parsedModel *types.ParsedModel, tempFolder strin }, } - embedPieChart(pieChartRemainingRiskSeverity, 15.0, 216, tempFolder) - embedPieChart(pieChartRemainingRisksByFunction, 110.0, 216, tempFolder) - - pdf.SetFont("Helvetica", "B", fontSizeBody) - pdf.Ln(8) - - colors.ColorCriticalRisk(pdf) - pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countCritical), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "unmitigated critical risk", "0", 0, "", false, 0, "") - pdf.CellFormat(22, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, "", "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "", "0", 0, "", false, 0, "") - pdf.Ln(-1) - colors.ColorHighRisk(pdf) - pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countHigh), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "unmitigated high risk", "0", 0, "", false, 0, "") - colors.ColorBusiness(pdf) - pdf.CellFormat(22, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countBusinessSide), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "business side related", "0", 0, "", false, 0, "") - pdf.Ln(-1) - colors.ColorElevatedRisk(pdf) - pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countElevated), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "unmitigated elevated risk", "0", 0, "", false, 0, "") - colors.ColorArchitecture(pdf) - pdf.CellFormat(22, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countArchitecture), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "architecture related", "0", 0, "", false, 0, "") - pdf.Ln(-1) - colors.ColorMediumRisk(pdf) - pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countMedium), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "unmitigated medium risk", "0", 0, "", false, 0, "") - colors.ColorDevelopment(pdf) - pdf.CellFormat(22, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countDevelopment), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "development related", "0", 0, "", false, 0, "") - pdf.Ln(-1) - colors.ColorLowRisk(pdf) - pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countLow), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "unmitigated low risk", "0", 0, "", false, 0, "") - colors.ColorOperation(pdf) - pdf.CellFormat(22, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(10, 6, strconv.Itoa(countOperation), "0", 0, "R", false, 0, "") - pdf.CellFormat(60, 6, "operations related", "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - } + r.embedPieChart(pieChartRemainingRiskSeverity, 15.0, 216, tempFolder) + r.embedPieChart(pieChartRemainingRisksByFunction, 110.0, 216, tempFolder) + + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.Ln(8) + + colors.ColorCriticalRisk(r.pdf) + r.pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countCritical), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "unmitigated critical risk", "0", 0, "", false, 0, "") + r.pdf.CellFormat(22, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, "", "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + colors.ColorHighRisk(r.pdf) + r.pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countHigh), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "unmitigated high risk", "0", 0, "", false, 0, "") + colors.ColorBusiness(r.pdf) + r.pdf.CellFormat(22, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countBusinessSide), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "business side related", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + colors.ColorElevatedRisk(r.pdf) + r.pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countElevated), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "unmitigated elevated risk", "0", 0, "", false, 0, "") + colors.ColorArchitecture(r.pdf) + r.pdf.CellFormat(22, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countArchitecture), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "architecture related", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + colors.ColorMediumRisk(r.pdf) + r.pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countMedium), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "unmitigated medium risk", "0", 0, "", false, 0, "") + colors.ColorDevelopment(r.pdf) + r.pdf.CellFormat(22, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countDevelopment), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "development related", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + colors.ColorLowRisk(r.pdf) + r.pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countLow), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "unmitigated low risk", "0", 0, "", false, 0, "") + colors.ColorOperation(r.pdf) + r.pdf.CellFormat(22, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(10, 6, strconv.Itoa(countOperation), "0", 0, "R", false, 0, "") + r.pdf.CellFormat(60, 6, "operations related", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + } + return nil } // CAUTION: Long labels might cause endless loop, then remove labels and render them manually later inside the PDF -func embedStackedBarChart(sbcChart chart.StackedBarChart, x float64, y float64, tempFolder string) { +func (r *pdfReporter) embedStackedBarChart(sbcChart chart.StackedBarChart, x float64, y float64, tempFolder string) error { tmpFilePNG, err := os.CreateTemp(tempFolder, "chart-*-.png") - checkErr(err) + if err != nil { + return fmt.Errorf("error creating temporary file for chart: %w", err) + } defer func() { _ = os.Remove(tmpFilePNG.Name()) }() file, _ := os.Create(tmpFilePNG.Name()) defer func() { _ = file.Close() }() err = sbcChart.Render(chart.PNG, file) - checkErr(err) + if err != nil { + return fmt.Errorf("error rendering chart: %w", err) + } var options gofpdf.ImageOptions options.ImageType = "" - pdf.RegisterImage(tmpFilePNG.Name(), "") - pdf.ImageOptions(tmpFilePNG.Name(), x, y, 0, 110, false, options, 0, "") + r.pdf.RegisterImage(tmpFilePNG.Name(), "") + r.pdf.ImageOptions(tmpFilePNG.Name(), x, y, 0, 110, false, options, 0, "") + return nil } -func embedPieChart(pieChart chart.PieChart, x float64, y float64, tempFolder string) { +func (r *pdfReporter) embedPieChart(pieChart chart.PieChart, x float64, y float64, tempFolder string) error { tmpFilePNG, err := os.CreateTemp(tempFolder, "chart-*-.png") - checkErr(err) + if err != nil { + return fmt.Errorf("error creating temporary file for chart: %w", err) + } defer func() { _ = os.Remove(tmpFilePNG.Name()) }() file, err := os.Create(tmpFilePNG.Name()) - checkErr(err) + if err != nil { + return fmt.Errorf("error creating temporary file for chart: %w", err) + } defer func() { _ = file.Close() }() err = pieChart.Render(chart.PNG, file) - checkErr(err) + if err != nil { + return fmt.Errorf("error rendering chart: %w", err) + } var options gofpdf.ImageOptions options.ImageType = "" - pdf.RegisterImage(tmpFilePNG.Name(), "") - pdf.ImageOptions(tmpFilePNG.Name(), x, y, 60, 0, false, options, 0, "") + r.pdf.RegisterImage(tmpFilePNG.Name(), "") + r.pdf.ImageOptions(tmpFilePNG.Name(), x, y, 60, 0, false, options, 0, "") + return nil } func makeColor(hexColor string) drawing.Color { @@ -1279,16 +1308,16 @@ func makeColor(hexColor string) drawing.Color { return drawing.ColorFromHex(hexColor[i:]) // = remove first char, which is # in rgb hex here } -func createImpactInitialRisks(parsedModel *types.ParsedModel) { - renderImpactAnalysis(parsedModel, true) +func (r *pdfReporter) createImpactInitialRisks(parsedModel *types.ParsedModel) { + r.renderImpactAnalysis(parsedModel, true) } -func createImpactRemainingRisks(parsedModel *types.ParsedModel) { - renderImpactAnalysis(parsedModel, false) +func (r *pdfReporter) createImpactRemainingRisks(parsedModel *types.ParsedModel) { + r.renderImpactAnalysis(parsedModel, false) } -func renderImpactAnalysis(parsedModel *types.ParsedModel, initialRisks bool) { - pdf.SetTextColor(0, 0, 0) +func (r *pdfReporter) renderImpactAnalysis(parsedModel *types.ParsedModel, initialRisks bool) { + r.pdf.SetTextColor(0, 0, 0) count, catCount := types.TotalRiskCount(parsedModel), len(parsedModel.GeneratedRisksByCategory) if !initialRisks { count, catCount = len(types.FilteredByStillAtRisk(parsedModel)), len(types.CategoriesOfOnlyRisksStillAtRisk(parsedModel, parsedModel.GeneratedRisksByCategory)) @@ -1302,17 +1331,17 @@ func renderImpactAnalysis(parsedModel *types.ParsedModel, initialRisks bool) { } if initialRisks { chapTitle := "Impact Analysis of " + strconv.Itoa(count) + " Initial " + riskStr + " in " + strconv.Itoa(catCount) + " " + catStr - addHeadline(chapTitle, false) - defineLinkTarget("{impact-analysis-initial-risks}") - currentChapterTitleBreadcrumb = chapTitle + r.addHeadline(chapTitle, false) + r.defineLinkTarget("{impact-analysis-initial-risks}") + r.currentChapterTitleBreadcrumb = chapTitle } else { chapTitle := "Impact Analysis of " + strconv.Itoa(count) + " Remaining " + riskStr + " in " + strconv.Itoa(catCount) + " " + catStr - addHeadline(chapTitle, false) - defineLinkTarget("{impact-analysis-remaining-risks}") - currentChapterTitleBreadcrumb = chapTitle + r.addHeadline(chapTitle, false) + r.defineLinkTarget("{impact-analysis-remaining-risks}") + r.currentChapterTitleBreadcrumb = chapTitle } - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() var strBuilder strings.Builder riskStr = "risks" if count == 1 { @@ -1327,65 +1356,65 @@ func renderImpactAnalysis(parsedModel *types.ParsedModel, initialRisks bool) { "(taking the severity ratings into account and using the highest for each category):
") html.Write(5, strBuilder.String()) strBuilder.Reset() - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdfColorGray() + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdfColorGray() html.Write(5, "Risk finding paragraphs are clickable and link to the corresponding chapter.") - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetFont("Helvetica", "", fontSizeBody) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), types.CriticalSeverity, false, initialRisks, true, false) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), types.HighSeverity, false, initialRisks, true, false) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), types.ElevatedSeverity, false, initialRisks, true, false) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), types.MediumSeverity, false, initialRisks, true, false) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, parsedModel.GeneratedRisksByCategory, initialRisks)), types.LowSeverity, false, initialRisks, true, false) - pdf.SetDrawColor(0, 0, 0) - pdf.SetDashPattern([]float64{}, 0) + r.pdf.SetDrawColor(0, 0, 0) + r.pdf.SetDashPattern([]float64{}, 0) } -func createOutOfScopeAssets(parsedModel *types.ParsedModel) { - uni := pdf.UnicodeTranslatorFromDescriptor("") - pdf.SetTextColor(0, 0, 0) +func (r *pdfReporter) createOutOfScopeAssets(parsedModel *types.ParsedModel) { + uni := r.pdf.UnicodeTranslatorFromDescriptor("") + r.pdf.SetTextColor(0, 0, 0) assets := "Assets" count := len(parsedModel.OutOfScopeTechnicalAssets()) if count == 1 { assets = "Asset" } chapTitle := "Out-of-Scope Assets: " + strconv.Itoa(count) + " " + assets - addHeadline(chapTitle, false) - defineLinkTarget("{out-of-scope-assets}") - currentChapterTitleBreadcrumb = chapTitle + r.addHeadline(chapTitle, false) + r.defineLinkTarget("{out-of-scope-assets}") + r.currentChapterTitleBreadcrumb = chapTitle - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() var strBuilder strings.Builder strBuilder.WriteString("This chapter lists all technical assets that have been defined as out-of-scope. " + "Each one should be checked in the model whether it should better be included in the " + "overall risk analysis:
") html.Write(5, strBuilder.String()) strBuilder.Reset() - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdfColorGray() + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdfColorGray() html.Write(5, "Technical asset paragraphs are clickable and link to the corresponding chapter.") - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetFont("Helvetica", "", fontSizeBody) outOfScopeAssetCount := 0 for _, technicalAsset := range sortedTechnicalAssetsByRAAAndTitle(parsedModel) { if technicalAsset.OutOfScope { outOfScopeAssetCount++ - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { strBuilder.WriteString("

") } html.Write(5, strBuilder.String()) strBuilder.Reset() - posY := pdf.GetY() - pdfColorOutOfScope() + posY := r.pdf.GetY() + r.pdfColorOutOfScope() strBuilder.WriteString("") strBuilder.WriteString(uni(technicalAsset.Title)) strBuilder.WriteString("") @@ -1393,21 +1422,21 @@ func createOutOfScopeAssets(parsedModel *types.ParsedModel) { strBuilder.WriteString("
") html.Write(5, strBuilder.String()) strBuilder.Reset() - pdf.SetTextColor(0, 0, 0) + r.pdf.SetTextColor(0, 0, 0) strBuilder.WriteString(uni(technicalAsset.JustificationOutOfScope)) html.Write(5, strBuilder.String()) strBuilder.Reset() - pdf.Link(9, posY, 190, pdf.GetY()-posY+4, tocLinkIdByAssetId[technicalAsset.Id]) + r.pdf.Link(9, posY, 190, r.pdf.GetY()-posY+4, r.tocLinkIdByAssetId[technicalAsset.Id]) } } if outOfScopeAssetCount == 0 { - pdfColorGray() + r.pdfColorGray() html.Write(5, "

No technical assets have been defined as out-of-scope.") } - pdf.SetDrawColor(0, 0, 0) - pdf.SetDashPattern([]float64{}, 0) + r.pdf.SetDrawColor(0, 0, 0) + r.pdf.SetDashPattern([]float64{}, 0) } func sortedTechnicalAssetsByRAAAndTitle(parsedModel *types.ParsedModel) []types.TechnicalAsset { @@ -1419,8 +1448,8 @@ func sortedTechnicalAssetsByRAAAndTitle(parsedModel *types.ParsedModel) []types. return assets } -func createModelFailures(parsedModel *types.ParsedModel) { - pdf.SetTextColor(0, 0, 0) +func (r *pdfReporter) createModelFailures(parsedModel *types.ParsedModel) { + r.pdf.SetTextColor(0, 0, 0) modelFailures := types.FlattenRiskSlice(types.FilterByModelFailures(parsedModel, parsedModel.GeneratedRisksByCategory)) risksStr := "Risks" count := len(modelFailures) @@ -1429,94 +1458,94 @@ func createModelFailures(parsedModel *types.ParsedModel) { } countStillAtRisk := len(types.ReduceToOnlyStillAtRisk(parsedModel, modelFailures)) if countStillAtRisk > 0 { - colors.ColorModelFailure(pdf) + colors.ColorModelFailure(r.pdf) } chapTitle := "Potential Model Failures: " + strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(count) + " " + risksStr - addHeadline(chapTitle, false) - defineLinkTarget("{model-failures}") - currentChapterTitleBreadcrumb = chapTitle - pdfColorBlack() + r.addHeadline(chapTitle, false) + r.defineLinkTarget("{model-failures}") + r.currentChapterTitleBreadcrumb = chapTitle + r.pdfColorBlack() - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() var strBuilder strings.Builder strBuilder.WriteString("This chapter lists potential model failures where not all relevant assets have been " + "modeled or the model might itself contain inconsistencies. Each potential model failure should be checked " + "in the model against the architecture design:
") html.Write(5, strBuilder.String()) strBuilder.Reset() - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdfColorGray() + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdfColorGray() html.Write(5, "Risk finding paragraphs are clickable and link to the corresponding chapter.") - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetFont("Helvetica", "", fontSizeBody) modelFailuresByCategory := types.FilterByModelFailures(parsedModel, parsedModel.GeneratedRisksByCategory) if len(modelFailuresByCategory) == 0 { - pdfColorGray() + r.pdfColorGray() html.Write(5, "

No potential model failures have been identified.") } else { - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, modelFailuresByCategory, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, modelFailuresByCategory, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, modelFailuresByCategory, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, modelFailuresByCategory, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, modelFailuresByCategory, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, modelFailuresByCategory, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, modelFailuresByCategory, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, modelFailuresByCategory, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, modelFailuresByCategory, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, modelFailuresByCategory, true)), types.LowSeverity, true, true, false, true) } - pdf.SetDrawColor(0, 0, 0) - pdf.SetDashPattern([]float64{}, 0) + r.pdf.SetDrawColor(0, 0, 0) + r.pdf.SetDashPattern([]float64{}, 0) } -func createRAA(parsedModel *types.ParsedModel, introTextRAA string) { - uni := pdf.UnicodeTranslatorFromDescriptor("") - pdf.SetTextColor(0, 0, 0) +func (r *pdfReporter) createRAA(parsedModel *types.ParsedModel, introTextRAA string) { + uni := r.pdf.UnicodeTranslatorFromDescriptor("") + r.pdf.SetTextColor(0, 0, 0) chapTitle := "RAA Analysis" - addHeadline(chapTitle, false) - defineLinkTarget("{raa-analysis}") - currentChapterTitleBreadcrumb = chapTitle + r.addHeadline(chapTitle, false) + r.defineLinkTarget("{raa-analysis}") + r.currentChapterTitleBreadcrumb = chapTitle - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() var strBuilder strings.Builder strBuilder.WriteString(introTextRAA) strBuilder.WriteString("
") html.Write(5, strBuilder.String()) strBuilder.Reset() - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdfColorGray() + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdfColorGray() html.Write(5, "Technical asset paragraphs are clickable and link to the corresponding chapter.") - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetFont("Helvetica", "", fontSizeBody) for _, technicalAsset := range sortedTechnicalAssetsByRAAAndTitle(parsedModel) { if technicalAsset.OutOfScope { continue } - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { strBuilder.WriteString("

") } newRisksStr := technicalAsset.GeneratedRisks(parsedModel) switch types.HighestSeverityStillAtRisk(parsedModel, newRisksStr) { case types.HighSeverity: - colors.ColorHighRisk(pdf) + colors.ColorHighRisk(r.pdf) case types.MediumSeverity: - colors.ColorMediumRisk(pdf) + colors.ColorMediumRisk(r.pdf) case types.LowSeverity: - colors.ColorLowRisk(pdf) + colors.ColorLowRisk(r.pdf) default: - pdfColorBlack() + r.pdfColorBlack() } if len(types.ReduceToOnlyStillAtRisk(parsedModel, newRisksStr)) == 0 { - pdfColorBlack() + r.pdfColorBlack() } html.Write(5, strBuilder.String()) strBuilder.Reset() - posY := pdf.GetY() + posY := r.pdf.GetY() strBuilder.WriteString("") strBuilder.WriteString(uni(technicalAsset.Title)) strBuilder.WriteString("") @@ -1530,32 +1559,32 @@ func createRAA(parsedModel *types.ParsedModel, introTextRAA string) { strBuilder.WriteString("
") html.Write(5, strBuilder.String()) strBuilder.Reset() - pdf.SetTextColor(0, 0, 0) + r.pdf.SetTextColor(0, 0, 0) strBuilder.WriteString(uni(technicalAsset.Description)) html.Write(5, strBuilder.String()) strBuilder.Reset() - pdf.Link(9, posY, 190, pdf.GetY()-posY+4, tocLinkIdByAssetId[technicalAsset.Id]) + r.pdf.Link(9, posY, 190, r.pdf.GetY()-posY+4, r.tocLinkIdByAssetId[technicalAsset.Id]) } - pdf.SetDrawColor(0, 0, 0) - pdf.SetDashPattern([]float64{}, 0) + r.pdf.SetDrawColor(0, 0, 0) + r.pdf.SetDashPattern([]float64{}, 0) } /* func createDataRiskQuickWins() { - uni := pdf.UnicodeTranslatorFromDescriptor("") - pdf.SetTextColor(0, 0, 0) + uni := r.pdf.UnicodeTranslatorFromDescriptor("") + r.pdf.SetTextColor(0, 0, 0) assets := "assets" count := len(model.SortedTechnicalAssetsByQuickWinsAndTitle()) if count == 1 { assets = "asset" } chapTitle := "Data Risk Quick Wins: " + strconv.Itoa(count) + " " + assets - addHeadline(chapTitle, false) + r.addHeadline(chapTitle, false) defineLinkTarget("{data-risk-quick-wins}") currentChapterTitleBreadcrumb = chapTitle - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() var strBuilder strings.Builder strBuilder.WriteString("For each technical asset it was checked how many data assets at risk might " + "get their risk-rating reduced (partly or fully) when the risks of the technical asset are mitigated. " + @@ -1564,37 +1593,37 @@ func createDataRiskQuickWins() { "This list can be used to prioritize on efforts with the greatest effects of reducing data asset risks:
") html.Write(5, strBuilder.String()) strBuilder.Reset() - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdfColorGray() + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdfColorGray() html.Write(5, "Technical asset paragraphs are clickable and link to the corresponding chapter.") - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetFont("Helvetica", "", fontSizeBody) for _, technicalAsset := range model.SortedTechnicalAssetsByQuickWinsAndTitle() { quickWins := technicalAsset.QuickWins() - if pdf.GetY() > 260 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 260 { + r.pageBreak() + r.pdf.SetY(36) } else { strBuilder.WriteString("

") } risks := technicalAsset.GeneratedRisks() switch model.HighestSeverityStillAtRisk(risks) { case model.High: - colors.ColorHighRisk(pdf) + colors.ColorHighRisk(r.pdf) case model.Medium: - colors.ColorMediumRisk(pdf) + colors.ColorMediumRisk(r.pdf) case model.Low: - colors.ColorLowRisk(pdf) + colors.ColorLowRisk(r.pdf) default: - pdfColorBlack() + r.pdfColorBlack() } if len(model.ReduceToOnlyStillAtRisk(risks)) == 0 { - pdfColorBlack() + r.pdfColorBlack() } html.Write(5, strBuilder.String()) strBuilder.Reset() - posY := pdf.GetY() + posY := r.pdf.GetY() strBuilder.WriteString("") strBuilder.WriteString(uni(technicalAsset.Title)) strBuilder.WriteString("") @@ -1604,20 +1633,20 @@ func createDataRiskQuickWins() { strBuilder.WriteString("
") html.Write(5, strBuilder.String()) strBuilder.Reset() - pdf.SetTextColor(0, 0, 0) + r.pdf.SetTextColor(0, 0, 0) strBuilder.WriteString(uni(technicalAsset.Description)) html.Write(5, strBuilder.String()) strBuilder.Reset() - pdf.Link(9, posY, 190, pdf.GetY()-posY+4, tocLinkIdByAssetId[technicalAsset.Id]) + r.pdf.Link(9, posY, 190, r.pdf.GetY()-posY+4, tocLinkIdByAssetId[technicalAsset.Id]) } - pdf.SetDrawColor(0, 0, 0) - pdf.SetDashPattern([]float64{}, 0) + r.pdf.SetDrawColor(0, 0, 0) + r.pdf.SetDashPattern([]float64{}, 0) } */ -func addCategories(parsedModel *types.ParsedModel, riskCategories []types.RiskCategory, severity types.RiskSeverity, bothInitialAndRemainingRisks bool, initialRisks bool, describeImpact bool, describeDescription bool) { - html := pdf.HTMLBasicNew() +func (r *pdfReporter) addCategories(parsedModel *types.ParsedModel, riskCategories []types.RiskCategory, severity types.RiskSeverity, bothInitialAndRemainingRisks bool, initialRisks bool, describeImpact bool, describeDescription bool) { + html := r.pdf.HTMLBasicNew() var strBuilder strings.Builder sort.Sort(types.ByRiskCategoryTitleSort(riskCategories)) for _, riskCategory := range riskCategories { @@ -1628,51 +1657,51 @@ func addCategories(parsedModel *types.ParsedModel, riskCategories []types.RiskCa if len(risksStr) == 0 { continue } - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { strBuilder.WriteString("

") } var prefix string switch severity { case types.CriticalSeverity: - colors.ColorCriticalRisk(pdf) + colors.ColorCriticalRisk(r.pdf) prefix = "Critical: " case types.HighSeverity: - colors.ColorHighRisk(pdf) + colors.ColorHighRisk(r.pdf) prefix = "High: " case types.ElevatedSeverity: - colors.ColorElevatedRisk(pdf) + colors.ColorElevatedRisk(r.pdf) prefix = "Elevated: " case types.MediumSeverity: - colors.ColorMediumRisk(pdf) + colors.ColorMediumRisk(r.pdf) prefix = "Medium: " case types.LowSeverity: - colors.ColorLowRisk(pdf) + colors.ColorLowRisk(r.pdf) prefix = "Low: " default: - pdfColorBlack() + r.pdfColorBlack() prefix = "" } switch types.HighestSeverityStillAtRisk(parsedModel, risksStr) { case types.CriticalSeverity: - colors.ColorCriticalRisk(pdf) + colors.ColorCriticalRisk(r.pdf) case types.HighSeverity: - colors.ColorHighRisk(pdf) + colors.ColorHighRisk(r.pdf) case types.ElevatedSeverity: - colors.ColorElevatedRisk(pdf) + colors.ColorElevatedRisk(r.pdf) case types.MediumSeverity: - colors.ColorMediumRisk(pdf) + colors.ColorMediumRisk(r.pdf) case types.LowSeverity: - colors.ColorLowRisk(pdf) + colors.ColorLowRisk(r.pdf) } if len(types.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) == 0 { - pdfColorBlack() + r.pdfColorBlack() } html.Write(5, strBuilder.String()) strBuilder.Reset() - posY := pdf.GetY() + posY := r.pdf.GetY() strBuilder.WriteString(prefix) strBuilder.WriteString("") strBuilder.WriteString(riskCategory.Title) @@ -1699,7 +1728,7 @@ func addCategories(parsedModel *types.ParsedModel, riskCategories []types.RiskCa strBuilder.WriteString(suffix + "
") html.Write(5, strBuilder.String()) strBuilder.Reset() - pdf.SetTextColor(0, 0, 0) + r.pdf.SetTextColor(0, 0, 0) if describeImpact { strBuilder.WriteString(firstParagraph(riskCategory.Impact)) } else if describeDescription { @@ -1709,11 +1738,12 @@ func addCategories(parsedModel *types.ParsedModel, riskCategories []types.RiskCa } html.Write(5, strBuilder.String()) strBuilder.Reset() - pdf.Link(9, posY, 190, pdf.GetY()-posY+4, tocLinkIdByAssetId[riskCategory.Id]) + r.pdf.Link(9, posY, 190, r.pdf.GetY()-posY+4, r.tocLinkIdByAssetId[riskCategory.Id]) } } func firstParagraph(text string) string { + firstParagraphRegEx := regexp.MustCompile(`(.*?)((
)|(

))`) match := firstParagraphRegEx.FindStringSubmatch(text) if len(match) == 0 { return text @@ -1721,12 +1751,12 @@ func firstParagraph(text string) string { return match[1] } -func createAssignmentByFunction(parsedModel *types.ParsedModel) { - pdf.SetTextColor(0, 0, 0) +func (r *pdfReporter) createAssignmentByFunction(parsedModel *types.ParsedModel) { + r.pdf.SetTextColor(0, 0, 0) title := "Assignment by Function" - addHeadline(title, false) - defineLinkTarget("{function-assignment}") - currentChapterTitleBreadcrumb = title + r.addHeadline(title, false) + r.defineLinkTarget("{function-assignment}") + r.currentChapterTitleBreadcrumb = title risksBusinessSideFunction := types.RisksOfOnlyBusinessSide(parsedModel, parsedModel.GeneratedRisksByCategory) risksArchitectureFunction := types.RisksOfOnlyArchitecture(parsedModel, parsedModel.GeneratedRisksByCategory) @@ -1745,134 +1775,134 @@ func createAssignmentByFunction(parsedModel *types.ParsedModel) { "" + strconv.Itoa(countArchitectureFunction) + " should be checked by " + types.Architecture.Title() + ", " + "" + strconv.Itoa(countDevelopmentFunction) + " should be checked by " + types.Development.Title() + ", " + "and " + strconv.Itoa(countOperationFunction) + " should be checked by " + types.Operations.Title() + ".
") - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() html.Write(5, intro.String()) intro.Reset() - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdfColorGray() + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdfColorGray() html.Write(5, "Risk finding paragraphs are clickable and link to the corresponding chapter.") - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetFont("Helvetica", "", fontSizeBody) - oldLeft, _, _, _ := pdf.GetMargins() + oldLeft, _, _, _ := r.pdf.GetMargins() - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { html.Write(5, "


") } - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetTextColor(0, 0, 0) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetTextColor(0, 0, 0) html.Write(5, ""+types.BusinessSide.Title()+"") - pdf.SetLeftMargin(15) + r.pdf.SetLeftMargin(15) if len(risksBusinessSideFunction) == 0 { - pdf.SetTextColor(150, 150, 150) + r.pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksBusinessSideFunction, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksBusinessSideFunction, true)), types.CriticalSeverity, true, true, false, false) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksBusinessSideFunction, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksBusinessSideFunction, true)), types.HighSeverity, true, true, false, false) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksBusinessSideFunction, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksBusinessSideFunction, true)), types.ElevatedSeverity, true, true, false, false) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksBusinessSideFunction, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksBusinessSideFunction, true)), types.MediumSeverity, true, true, false, false) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksBusinessSideFunction, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksBusinessSideFunction, true)), types.LowSeverity, true, true, false, false) } - pdf.SetLeftMargin(oldLeft) + r.pdf.SetLeftMargin(oldLeft) - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { html.Write(5, "


") } - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetTextColor(0, 0, 0) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetTextColor(0, 0, 0) html.Write(5, ""+types.Architecture.Title()+"") - pdf.SetLeftMargin(15) + r.pdf.SetLeftMargin(15) if len(risksArchitectureFunction) == 0 { - pdf.SetTextColor(150, 150, 150) + r.pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksArchitectureFunction, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksArchitectureFunction, true)), types.CriticalSeverity, true, true, false, false) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksArchitectureFunction, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksArchitectureFunction, true)), types.HighSeverity, true, true, false, false) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksArchitectureFunction, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksArchitectureFunction, true)), types.ElevatedSeverity, true, true, false, false) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksArchitectureFunction, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksArchitectureFunction, true)), types.MediumSeverity, true, true, false, false) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksArchitectureFunction, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksArchitectureFunction, true)), types.LowSeverity, true, true, false, false) } - pdf.SetLeftMargin(oldLeft) + r.pdf.SetLeftMargin(oldLeft) - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { html.Write(5, "


") } - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetTextColor(0, 0, 0) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetTextColor(0, 0, 0) html.Write(5, ""+types.Development.Title()+"") - pdf.SetLeftMargin(15) + r.pdf.SetLeftMargin(15) if len(risksDevelopmentFunction) == 0 { - pdf.SetTextColor(150, 150, 150) + r.pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksDevelopmentFunction, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksDevelopmentFunction, true)), types.CriticalSeverity, true, true, false, false) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksDevelopmentFunction, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksDevelopmentFunction, true)), types.HighSeverity, true, true, false, false) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksDevelopmentFunction, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksDevelopmentFunction, true)), types.ElevatedSeverity, true, true, false, false) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksDevelopmentFunction, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksDevelopmentFunction, true)), types.MediumSeverity, true, true, false, false) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksDevelopmentFunction, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksDevelopmentFunction, true)), types.LowSeverity, true, true, false, false) } - pdf.SetLeftMargin(oldLeft) + r.pdf.SetLeftMargin(oldLeft) - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { html.Write(5, "


") } - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetTextColor(0, 0, 0) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetTextColor(0, 0, 0) html.Write(5, ""+types.Operations.Title()+"") - pdf.SetLeftMargin(15) + r.pdf.SetLeftMargin(15) if len(risksOperationFunction) == 0 { - pdf.SetTextColor(150, 150, 150) + r.pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksOperationFunction, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksOperationFunction, true)), types.CriticalSeverity, true, true, false, false) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksOperationFunction, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksOperationFunction, true)), types.HighSeverity, true, true, false, false) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksOperationFunction, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksOperationFunction, true)), types.ElevatedSeverity, true, true, false, false) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksOperationFunction, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksOperationFunction, true)), types.MediumSeverity, true, true, false, false) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksOperationFunction, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksOperationFunction, true)), types.LowSeverity, true, true, false, false) } - pdf.SetLeftMargin(oldLeft) + r.pdf.SetLeftMargin(oldLeft) - pdf.SetDrawColor(0, 0, 0) - pdf.SetDashPattern([]float64{}, 0) + r.pdf.SetDrawColor(0, 0, 0) + r.pdf.SetDashPattern([]float64{}, 0) } -func createSTRIDE(parsedModel *types.ParsedModel) { - pdf.SetTextColor(0, 0, 0) +func (r *pdfReporter) createSTRIDE(parsedModel *types.ParsedModel) { + r.pdf.SetTextColor(0, 0, 0) title := "STRIDE Classification of Identified Risks" - addHeadline(title, false) - defineLinkTarget("{stride}") - currentChapterTitleBreadcrumb = title + r.addHeadline(title, false) + r.defineLinkTarget("{stride}") + r.currentChapterTitleBreadcrumb = title risksSTRIDESpoofing := types.RisksOfOnlySTRIDESpoofing(parsedModel, parsedModel.GeneratedRisksByCategory) risksSTRIDETampering := types.RisksOfOnlySTRIDETampering(parsedModel, parsedModel.GeneratedRisksByCategory) @@ -1896,207 +1926,207 @@ func createSTRIDE(parsedModel *types.ParsedModel) { "" + strconv.Itoa(countSTRIDEInformationDisclosure) + " in the " + types.InformationDisclosure.Title() + " category, " + "" + strconv.Itoa(countSTRIDEDenialOfService) + " in the " + types.DenialOfService.Title() + " category, " + "and " + strconv.Itoa(countSTRIDEElevationOfPrivilege) + " in the " + types.ElevationOfPrivilege.Title() + " category.
") - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() html.Write(5, intro.String()) intro.Reset() - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdfColorGray() + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdfColorGray() html.Write(5, "Risk finding paragraphs are clickable and link to the corresponding chapter.") - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetFont("Helvetica", "", fontSizeBody) - oldLeft, _, _, _ := pdf.GetMargins() + oldLeft, _, _, _ := r.pdf.GetMargins() - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { html.Write(5, "


") } - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetTextColor(0, 0, 0) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetTextColor(0, 0, 0) html.Write(5, ""+types.Spoofing.Title()+"") - pdf.SetLeftMargin(15) + r.pdf.SetLeftMargin(15) if len(risksSTRIDESpoofing) == 0 { - pdf.SetTextColor(150, 150, 150) + r.pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDESpoofing, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDESpoofing, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDESpoofing, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDESpoofing, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDESpoofing, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDESpoofing, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDESpoofing, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDESpoofing, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDESpoofing, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDESpoofing, true)), types.LowSeverity, true, true, false, true) } - pdf.SetLeftMargin(oldLeft) + r.pdf.SetLeftMargin(oldLeft) - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { html.Write(5, "


") } - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetTextColor(0, 0, 0) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetTextColor(0, 0, 0) html.Write(5, ""+types.Tampering.Title()+"") - pdf.SetLeftMargin(15) + r.pdf.SetLeftMargin(15) if len(risksSTRIDETampering) == 0 { - pdf.SetTextColor(150, 150, 150) + r.pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDETampering, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDETampering, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDETampering, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDETampering, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDETampering, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDETampering, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDETampering, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDETampering, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDETampering, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDETampering, true)), types.LowSeverity, true, true, false, true) } - pdf.SetLeftMargin(oldLeft) + r.pdf.SetLeftMargin(oldLeft) - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { html.Write(5, "


") } - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetTextColor(0, 0, 0) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetTextColor(0, 0, 0) html.Write(5, ""+types.Repudiation.Title()+"") - pdf.SetLeftMargin(15) + r.pdf.SetLeftMargin(15) if len(risksSTRIDERepudiation) == 0 { - pdf.SetTextColor(150, 150, 150) + r.pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDERepudiation, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDERepudiation, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDERepudiation, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDERepudiation, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDERepudiation, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDERepudiation, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDERepudiation, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDERepudiation, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDERepudiation, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDERepudiation, true)), types.LowSeverity, true, true, false, true) } - pdf.SetLeftMargin(oldLeft) + r.pdf.SetLeftMargin(oldLeft) - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { html.Write(5, "


") } - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetTextColor(0, 0, 0) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetTextColor(0, 0, 0) html.Write(5, ""+types.InformationDisclosure.Title()+"") - pdf.SetLeftMargin(15) + r.pdf.SetLeftMargin(15) if len(risksSTRIDEInformationDisclosure) == 0 { - pdf.SetTextColor(150, 150, 150) + r.pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEInformationDisclosure, true)), types.LowSeverity, true, true, false, true) } - pdf.SetLeftMargin(oldLeft) + r.pdf.SetLeftMargin(oldLeft) - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { html.Write(5, "


") } - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetTextColor(0, 0, 0) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetTextColor(0, 0, 0) html.Write(5, ""+types.DenialOfService.Title()+"") - pdf.SetLeftMargin(15) + r.pdf.SetLeftMargin(15) if len(risksSTRIDEDenialOfService) == 0 { - pdf.SetTextColor(150, 150, 150) + r.pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEDenialOfService, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEDenialOfService, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEDenialOfService, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEDenialOfService, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEDenialOfService, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEDenialOfService, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEDenialOfService, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEDenialOfService, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEDenialOfService, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEDenialOfService, true)), types.LowSeverity, true, true, false, true) } - pdf.SetLeftMargin(oldLeft) + r.pdf.SetLeftMargin(oldLeft) - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { html.Write(5, "


") } - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetTextColor(0, 0, 0) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetTextColor(0, 0, 0) html.Write(5, ""+types.ElevationOfPrivilege.Title()+"") - pdf.SetLeftMargin(15) + r.pdf.SetLeftMargin(15) if len(risksSTRIDEElevationOfPrivilege) == 0 { - pdf.SetTextColor(150, 150, 150) + r.pdf.SetTextColor(150, 150, 150) html.Write(5, "

n/a") } else { - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyCriticalRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), types.CriticalSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyHighRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), types.HighSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyElevatedRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), types.ElevatedSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyMediumRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), types.MediumSeverity, true, true, false, true) - addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), + r.addCategories(parsedModel, types.GetRiskCategories(parsedModel, types.CategoriesOfOnlyLowRisks(parsedModel, risksSTRIDEElevationOfPrivilege, true)), types.LowSeverity, true, true, false, true) } - pdf.SetLeftMargin(oldLeft) + r.pdf.SetLeftMargin(oldLeft) - pdf.SetDrawColor(0, 0, 0) - pdf.SetDashPattern([]float64{}, 0) + r.pdf.SetDrawColor(0, 0, 0) + r.pdf.SetDashPattern([]float64{}, 0) } -func createSecurityRequirements(parsedModel *types.ParsedModel) { - uni := pdf.UnicodeTranslatorFromDescriptor("") - pdf.SetTextColor(0, 0, 0) +func (r *pdfReporter) createSecurityRequirements(parsedModel *types.ParsedModel) { + uni := r.pdf.UnicodeTranslatorFromDescriptor("") + r.pdf.SetTextColor(0, 0, 0) chapTitle := "Security Requirements" - addHeadline(chapTitle, false) - defineLinkTarget("{security-requirements}") - currentChapterTitleBreadcrumb = chapTitle + r.addHeadline(chapTitle, false) + r.defineLinkTarget("{security-requirements}") + r.currentChapterTitleBreadcrumb = chapTitle - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() html.Write(5, "This chapter lists the custom security requirements which have been defined for the modeled target.") - pdfColorBlack() + r.pdfColorBlack() for _, title := range sortedKeysOfSecurityRequirements(parsedModel) { description := parsedModel.SecurityRequirements[title] - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { html.Write(5, "


") } html.Write(5, ""+uni(title)+"
") html.Write(5, uni(description)) } - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { html.Write(5, "


") } @@ -2113,30 +2143,30 @@ func sortedKeysOfSecurityRequirements(parsedModel *types.ParsedModel) []string { return keys } -func createAbuseCases(parsedModel *types.ParsedModel) { - pdf.SetTextColor(0, 0, 0) +func (r *pdfReporter) createAbuseCases(parsedModel *types.ParsedModel) { + r.pdf.SetTextColor(0, 0, 0) chapTitle := "Abuse Cases" - addHeadline(chapTitle, false) - defineLinkTarget("{abuse-cases}") - currentChapterTitleBreadcrumb = chapTitle + r.addHeadline(chapTitle, false) + r.defineLinkTarget("{abuse-cases}") + r.currentChapterTitleBreadcrumb = chapTitle - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() html.Write(5, "This chapter lists the custom abuse cases which have been defined for the modeled target.") - pdfColorBlack() + r.pdfColorBlack() for _, title := range sortedKeysOfAbuseCases(parsedModel) { description := parsedModel.AbuseCases[title] - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { html.Write(5, "


") } html.Write(5, ""+title+"
") html.Write(5, description) } - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { html.Write(5, "


") } @@ -2153,50 +2183,50 @@ func sortedKeysOfAbuseCases(parsedModel *types.ParsedModel) []string { return keys } -func createQuestions(parsedModel *types.ParsedModel) { - uni := pdf.UnicodeTranslatorFromDescriptor("") - pdf.SetTextColor(0, 0, 0) +func (r *pdfReporter) createQuestions(parsedModel *types.ParsedModel) { + uni := r.pdf.UnicodeTranslatorFromDescriptor("") + r.pdf.SetTextColor(0, 0, 0) questions := "Questions" count := len(parsedModel.Questions) if count == 1 { questions = "Question" } if questionsUnanswered(parsedModel) > 0 { - colors.ColorModelFailure(pdf) + colors.ColorModelFailure(r.pdf) } chapTitle := "Questions: " + strconv.Itoa(questionsUnanswered(parsedModel)) + " / " + strconv.Itoa(count) + " " + questions - addHeadline(chapTitle, false) - defineLinkTarget("{questions}") - currentChapterTitleBreadcrumb = chapTitle - pdfColorBlack() + r.addHeadline(chapTitle, false) + r.defineLinkTarget("{questions}") + r.currentChapterTitleBreadcrumb = chapTitle + r.pdfColorBlack() - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() html.Write(5, "This chapter lists custom questions that arose during the threat modeling process.") if len(parsedModel.Questions) == 0 { - pdfColorLightGray() + r.pdfColorLightGray() html.Write(5, "


") html.Write(5, "No custom questions arose during the threat modeling process.") } - pdfColorBlack() + r.pdfColorBlack() for _, question := range sortedKeysOfQuestions(parsedModel) { answer := parsedModel.Questions[question] - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { html.Write(5, "


") } - pdfColorBlack() + r.pdfColorBlack() if len(strings.TrimSpace(answer)) > 0 { html.Write(5, ""+uni(question)+"
") html.Write(5, ""+uni(strings.TrimSpace(answer))+"") } else { - colors.ColorModelFailure(pdf) + colors.ColorModelFailure(r.pdf) html.Write(5, ""+uni(question)+"
") - pdfColorLightGray() + r.pdfColorLightGray() html.Write(5, "- answer pending -") - pdfColorBlack() + r.pdfColorBlack() } } } @@ -2210,16 +2240,16 @@ func sortedKeysOfQuestions(parsedModel *types.ParsedModel) []string { return keys } -func createTagListing(parsedModel *types.ParsedModel) { - pdf.SetTextColor(0, 0, 0) +func (r *pdfReporter) createTagListing(parsedModel *types.ParsedModel) { + r.pdf.SetTextColor(0, 0, 0) chapTitle := "Tag Listing" - addHeadline(chapTitle, false) - defineLinkTarget("{tag-listing}") - currentChapterTitleBreadcrumb = chapTitle + r.addHeadline(chapTitle, false) + r.defineLinkTarget("{tag-listing}") + r.currentChapterTitleBreadcrumb = chapTitle - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() html.Write(5, "This chapter lists what tags are used by which elements.") - pdfColorBlack() + r.pdfColorBlack() sorted := parsedModel.TagsAvailable sort.Strings(sorted) for _, tag := range sorted { @@ -2265,13 +2295,13 @@ func createTagListing(parsedModel *types.ParsedModel) { } } if len(description) > 0 { - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { html.Write(5, "


") } - pdfColorBlack() + r.pdfColorBlack() html.Write(5, ""+tag+"
") html.Write(5, description) } @@ -2296,14 +2326,14 @@ func sortedTechnicalAssetsByTitle(parsedModel *types.ParsedModel) []types.Techni return assets } -func createRiskCategories(parsedModel *types.ParsedModel) { - uni := pdf.UnicodeTranslatorFromDescriptor("") +func (r *pdfReporter) createRiskCategories(parsedModel *types.ParsedModel) { + uni := r.pdf.UnicodeTranslatorFromDescriptor("") // category title title := "Identified Risks by Vulnerability Category" - pdfColorBlack() - addHeadline(title, false) - defineLinkTarget("{intro-risks-by-vulnerability-category}") - html := pdf.HTMLBasicNew() + r.pdfColorBlack() + r.addHeadline(title, false) + r.defineLinkTarget("{intro-risks-by-vulnerability-category}") + html := r.pdf.HTMLBasicNew() var text strings.Builder text.WriteString("In total " + strconv.Itoa(types.TotalRiskCount(parsedModel)) + " potential risks have been identified during the threat modeling process " + "of which " + @@ -2316,27 +2346,27 @@ func createRiskCategories(parsedModel *types.ParsedModel) { text.WriteString("The following sub-chapters of this section describe each identified risk category.") // TODO more explanation text html.Write(5, text.String()) text.Reset() - currentChapterTitleBreadcrumb = title + r.currentChapterTitleBreadcrumb = title for _, category := range types.SortedRiskCategories(parsedModel) { risksStr := types.SortedRisksOfCategory(parsedModel, category) // category color switch types.HighestSeverityStillAtRisk(parsedModel, risksStr) { case types.CriticalSeverity: - colors.ColorCriticalRisk(pdf) + colors.ColorCriticalRisk(r.pdf) case types.HighSeverity: - colors.ColorHighRisk(pdf) + colors.ColorHighRisk(r.pdf) case types.ElevatedSeverity: - colors.ColorElevatedRisk(pdf) + colors.ColorElevatedRisk(r.pdf) case types.MediumSeverity: - colors.ColorMediumRisk(pdf) + colors.ColorMediumRisk(r.pdf) case types.LowSeverity: - colors.ColorLowRisk(pdf) + colors.ColorLowRisk(r.pdf) default: - pdfColorBlack() + r.pdfColorBlack() } if len(types.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) == 0 { - pdfColorBlack() + r.pdfColorBlack() } // category title @@ -2346,10 +2376,10 @@ func createRiskCategories(parsedModel *types.ParsedModel) { suffix += "s" } title := category.Title + ": " + suffix - addHeadline(uni(title), true) - pdfColorBlack() - defineLinkTarget("{" + category.Id + "}") - currentChapterTitleBreadcrumb = title + r.addHeadline(uni(title), true) + r.pdfColorBlack() + r.defineLinkTarget("{" + category.Id + "}") + r.currentChapterTitleBreadcrumb = title // category details var text strings.Builder @@ -2368,12 +2398,12 @@ func createRiskCategories(parsedModel *types.ParsedModel) { text.WriteString(category.RiskAssessment) html.Write(5, text.String()) text.Reset() - colors.ColorRiskStatusFalsePositive(pdf) + colors.ColorRiskStatusFalsePositive(r.pdf) text.WriteString("


False Positives

") text.WriteString(category.FalsePositives) html.Write(5, text.String()) text.Reset() - colors.ColorRiskStatusMitigated(pdf) + colors.ColorRiskStatusMitigated(r.pdf) text.WriteString("


Mitigation (" + category.Function.Title() + "): " + category.Action + "

") text.WriteString(category.Mitigation) @@ -2403,11 +2433,11 @@ func createRiskCategories(parsedModel *types.ParsedModel) { html.Write(5, text.String()) text.Reset() - pdf.SetTextColor(0, 0, 0) + r.pdf.SetTextColor(0, 0, 0) // risk details - pageBreak() - pdf.SetY(36) + r.pageBreak() + r.pdf.SetY(36) text.WriteString("Risk Findings

") times := strconv.Itoa(len(risksStr)) + " time" if len(risksStr) > 1 { @@ -2418,159 +2448,159 @@ func createRiskCategories(parsedModel *types.ParsedModel) { "controls have been applied properly in order to mitigate each risk.
") html.Write(5, text.String()) text.Reset() - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdfColorGray() + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdfColorGray() html.Write(5, "Risk finding paragraphs are clickable and link to the corresponding chapter.
") - pdf.SetFont("Helvetica", "", fontSizeBody) - oldLeft, _, _, _ := pdf.GetMargins() + r.pdf.SetFont("Helvetica", "", fontSizeBody) + oldLeft, _, _, _ := r.pdf.GetMargins() headlineCriticalWritten, headlineHighWritten, headlineElevatedWritten, headlineMediumWritten, headlineLowWritten := false, false, false, false, false for _, risk := range risksStr { text.WriteString("
") html.Write(5, text.String()) text.Reset() - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } switch risk.Severity { case types.CriticalSeverity: - colors.ColorCriticalRisk(pdf) + colors.ColorCriticalRisk(r.pdf) if !headlineCriticalWritten { - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetLeftMargin(oldLeft) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetLeftMargin(oldLeft) text.WriteString("
Critical Risk Severity

") html.Write(5, text.String()) text.Reset() headlineCriticalWritten = true } case types.HighSeverity: - colors.ColorHighRisk(pdf) + colors.ColorHighRisk(r.pdf) if !headlineHighWritten { - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetLeftMargin(oldLeft) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetLeftMargin(oldLeft) text.WriteString("
High Risk Severity

") html.Write(5, text.String()) text.Reset() headlineHighWritten = true } case types.ElevatedSeverity: - colors.ColorElevatedRisk(pdf) + colors.ColorElevatedRisk(r.pdf) if !headlineElevatedWritten { - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetLeftMargin(oldLeft) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetLeftMargin(oldLeft) text.WriteString("
Elevated Risk Severity

") html.Write(5, text.String()) text.Reset() headlineElevatedWritten = true } case types.MediumSeverity: - colors.ColorMediumRisk(pdf) + colors.ColorMediumRisk(r.pdf) if !headlineMediumWritten { - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetLeftMargin(oldLeft) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetLeftMargin(oldLeft) text.WriteString("
Medium Risk Severity

") html.Write(5, text.String()) text.Reset() headlineMediumWritten = true } case types.LowSeverity: - colors.ColorLowRisk(pdf) + colors.ColorLowRisk(r.pdf) if !headlineLowWritten { - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetLeftMargin(oldLeft) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetLeftMargin(oldLeft) text.WriteString("
Low Risk Severity

") html.Write(5, text.String()) text.Reset() headlineLowWritten = true } default: - pdfColorBlack() + r.pdfColorBlack() } if !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { - pdfColorBlack() + r.pdfColorBlack() } - posY := pdf.GetY() - pdf.SetLeftMargin(oldLeft + 10) - pdf.SetFont("Helvetica", "", fontSizeBody) + posY := r.pdf.GetY() + r.pdf.SetLeftMargin(oldLeft + 10) + r.pdf.SetFont("Helvetica", "", fontSizeBody) text.WriteString(uni(risk.Title) + ": Exploitation likelihood is " + risk.ExploitationLikelihood.Title() + " with " + risk.ExploitationImpact.Title() + " impact.") text.WriteString("
") html.Write(5, text.String()) text.Reset() - pdfColorGray() - pdf.SetFont("Helvetica", "", fontSizeVerySmall) - pdf.MultiCell(215, 5, uni(risk.SyntheticId), "0", "0", false) - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.SetFont("Helvetica", "", fontSizeVerySmall) + r.pdf.MultiCell(215, 5, uni(risk.SyntheticId), "0", "0", false) + r.pdf.SetFont("Helvetica", "", fontSizeBody) if len(risk.MostRelevantSharedRuntimeId) > 0 { - pdf.Link(20, posY, 180, pdf.GetY()-posY, tocLinkIdByAssetId[risk.MostRelevantSharedRuntimeId]) + r.pdf.Link(20, posY, 180, r.pdf.GetY()-posY, r.tocLinkIdByAssetId[risk.MostRelevantSharedRuntimeId]) } else if len(risk.MostRelevantTrustBoundaryId) > 0 { - pdf.Link(20, posY, 180, pdf.GetY()-posY, tocLinkIdByAssetId[risk.MostRelevantTrustBoundaryId]) + r.pdf.Link(20, posY, 180, r.pdf.GetY()-posY, r.tocLinkIdByAssetId[risk.MostRelevantTrustBoundaryId]) } else if len(risk.MostRelevantTechnicalAssetId) > 0 { - pdf.Link(20, posY, 180, pdf.GetY()-posY, tocLinkIdByAssetId[risk.MostRelevantTechnicalAssetId]) + r.pdf.Link(20, posY, 180, r.pdf.GetY()-posY, r.tocLinkIdByAssetId[risk.MostRelevantTechnicalAssetId]) } - writeRiskTrackingStatus(parsedModel, risk) - pdf.SetLeftMargin(oldLeft) + r.writeRiskTrackingStatus(parsedModel, risk) + r.pdf.SetLeftMargin(oldLeft) html.Write(5, text.String()) text.Reset() } - pdf.SetLeftMargin(oldLeft) + r.pdf.SetLeftMargin(oldLeft) } } -func writeRiskTrackingStatus(parsedModel *types.ParsedModel, risk types.Risk) { - uni := pdf.UnicodeTranslatorFromDescriptor("") +func (r *pdfReporter) writeRiskTrackingStatus(parsedModel *types.ParsedModel, risk types.Risk) { + uni := r.pdf.UnicodeTranslatorFromDescriptor("") tracking := risk.GetRiskTracking(parsedModel) - pdfColorBlack() - pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") switch tracking.Status { case types.Unchecked: - colors.ColorRiskStatusUnchecked(pdf) + colors.ColorRiskStatusUnchecked(r.pdf) case types.InDiscussion: - colors.ColorRiskStatusInDiscussion(pdf) + colors.ColorRiskStatusInDiscussion(r.pdf) case types.Accepted: - colors.ColorRiskStatusAccepted(pdf) + colors.ColorRiskStatusAccepted(r.pdf) case types.InProgress: - colors.ColorRiskStatusInProgress(pdf) + colors.ColorRiskStatusInProgress(r.pdf) case types.Mitigated: - colors.ColorRiskStatusMitigated(pdf) + colors.ColorRiskStatusMitigated(r.pdf) case types.FalsePositive: - colors.ColorRiskStatusFalsePositive(pdf) + colors.ColorRiskStatusFalsePositive(r.pdf) default: - pdfColorBlack() + r.pdfColorBlack() } - pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) if tracking.Status == types.Unchecked { - pdf.SetFont("Helvetica", "B", fontSizeSmall) + r.pdf.SetFont("Helvetica", "B", fontSizeSmall) } - pdf.CellFormat(25, 4, tracking.Status.Title(), "0", 0, "B", false, 0, "") + r.pdf.CellFormat(25, 4, tracking.Status.Title(), "0", 0, "B", false, 0, "") if tracking.Status != types.Unchecked { dateStr := tracking.Date.Format("2006-01-02") if dateStr == "0001-01-01" { dateStr = "" } justificationStr := tracking.Justification - pdfColorGray() - pdf.CellFormat(20, 4, dateStr, "0", 0, "B", false, 0, "") - pdf.CellFormat(35, 4, uni(tracking.CheckedBy), "0", 0, "B", false, 0, "") - pdf.CellFormat(35, 4, uni(tracking.Ticket), "0", 0, "B", false, 0, "") - pdf.Ln(-1) - pdfColorBlack() - pdf.CellFormat(10, 4, "", "0", 0, "", false, 0, "") - pdf.MultiCell(170, 4, uni(justificationStr), "0", "0", false) - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(20, 4, dateStr, "0", 0, "B", false, 0, "") + r.pdf.CellFormat(35, 4, uni(tracking.CheckedBy), "0", 0, "B", false, 0, "") + r.pdf.CellFormat(35, 4, uni(tracking.Ticket), "0", 0, "B", false, 0, "") + r.pdf.Ln(-1) + r.pdfColorBlack() + r.pdf.CellFormat(10, 4, "", "0", 0, "", false, 0, "") + r.pdf.MultiCell(170, 4, uni(justificationStr), "0", "0", false) + r.pdf.SetFont("Helvetica", "", fontSizeBody) } else { - pdf.Ln(-1) + r.pdf.Ln(-1) } - pdfColorBlack() + r.pdfColorBlack() } -func createTechnicalAssets(parsedModel *types.ParsedModel) { - uni := pdf.UnicodeTranslatorFromDescriptor("") +func (r *pdfReporter) createTechnicalAssets(parsedModel *types.ParsedModel) { + uni := r.pdf.UnicodeTranslatorFromDescriptor("") // category title title := "Identified Risks by Technical Asset" - pdfColorBlack() - addHeadline(title, false) - defineLinkTarget("{intro-risks-by-technical-asset}") - html := pdf.HTMLBasicNew() + r.pdfColorBlack() + r.addHeadline(title, false) + r.defineLinkTarget("{intro-risks-by-technical-asset}") + html := r.pdf.HTMLBasicNew() var text strings.Builder text.WriteString("In total " + strconv.Itoa(types.TotalRiskCount(parsedModel)) + " potential risks have been identified during the threat modeling process " + "of which " + @@ -2584,7 +2614,7 @@ func createTechnicalAssets(parsedModel *types.ParsedModel) { text.WriteString("The RAA value of a technical asset is the calculated \"Relative Attacker Attractiveness\" value in percent.") html.Write(5, text.String()) text.Reset() - currentChapterTitleBreadcrumb = title + r.currentChapterTitleBreadcrumb = title for _, technicalAsset := range sortedTechnicalAssetsByRiskSeverityAndTitle(parsedModel) { risksStr := technicalAsset.GeneratedRisks(parsedModel) countStillAtRisk := len(types.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) @@ -2593,233 +2623,233 @@ func createTechnicalAssets(parsedModel *types.ParsedModel) { suffix += "s" } if technicalAsset.OutOfScope { - pdfColorOutOfScope() + r.pdfColorOutOfScope() suffix = "out-of-scope" } else { switch types.HighestSeverityStillAtRisk(parsedModel, risksStr) { case types.CriticalSeverity: - colors.ColorCriticalRisk(pdf) + colors.ColorCriticalRisk(r.pdf) case types.HighSeverity: - colors.ColorHighRisk(pdf) + colors.ColorHighRisk(r.pdf) case types.ElevatedSeverity: - colors.ColorElevatedRisk(pdf) + colors.ColorElevatedRisk(r.pdf) case types.MediumSeverity: - colors.ColorMediumRisk(pdf) + colors.ColorMediumRisk(r.pdf) case types.LowSeverity: - colors.ColorLowRisk(pdf) + colors.ColorLowRisk(r.pdf) default: - pdfColorBlack() + r.pdfColorBlack() } if len(types.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) == 0 { - pdfColorBlack() + r.pdfColorBlack() } } // asset title title := technicalAsset.Title + ": " + suffix - addHeadline(uni(title), true) - pdfColorBlack() - defineLinkTarget("{" + technicalAsset.Id + "}") - currentChapterTitleBreadcrumb = title + r.addHeadline(uni(title), true) + r.pdfColorBlack() + r.defineLinkTarget("{" + technicalAsset.Id + "}") + r.currentChapterTitleBreadcrumb = title // asset description - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() var text strings.Builder text.WriteString("Description

") text.WriteString(uni(technicalAsset.Description)) html.Write(5, text.String()) text.Reset() - pdf.SetTextColor(0, 0, 0) + r.pdf.SetTextColor(0, 0, 0) // and more metadata of asset in tabular view - pdf.Ln(-1) - pdf.Ln(-1) - pdf.Ln(-1) - if pdf.GetY() > 260 { // 260 only for major titles (to avoid "Schusterjungen"), for the rest attributes 270 - pageBreak() - pdf.SetY(36) - } - pdf.SetFont("Helvetica", "B", fontSizeBody) - pdfColorBlack() - pdf.CellFormat(190, 6, "Identified Risks of Asset", "0", 0, "", false, 0, "") - pdfColorGray() - oldLeft, _, _, _ := pdf.GetMargins() + r.pdf.Ln(-1) + r.pdf.Ln(-1) + r.pdf.Ln(-1) + if r.pdf.GetY() > 260 { // 260 only for major titles (to avoid "Schusterjungen"), for the rest attributes 270 + r.pageBreak() + r.pdf.SetY(36) + } + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdfColorBlack() + r.pdf.CellFormat(190, 6, "Identified Risks of Asset", "0", 0, "", false, 0, "") + r.pdfColorGray() + oldLeft, _, _, _ := r.pdf.GetMargins() if len(risksStr) > 0 { - pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) html.Write(5, "Risk finding paragraphs are clickable and link to the corresponding chapter.") - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetLeftMargin(15) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetLeftMargin(15) /* - pdf.Ln(-1) - pdf.Ln(-1) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(185, 6, strconv.Itoa(len(risksStr))+" risksStr in total were identified", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.Ln(-1) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(185, 6, strconv.Itoa(len(risksStr))+" risksStr in total were identified", "0", 0, "", false, 0, "") */ headlineCriticalWritten, headlineHighWritten, headlineElevatedWritten, headlineMediumWritten, headlineLowWritten := false, false, false, false, false - pdf.Ln(-1) + r.pdf.Ln(-1) for _, risk := range risksStr { text.WriteString("
") html.Write(5, text.String()) text.Reset() - if pdf.GetY() > 250 { // 250 only for major titles (to avoid "Schusterjungen"), for the rest attributes 270 - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { // 250 only for major titles (to avoid "Schusterjungen"), for the rest attributes 270 + r.pageBreak() + r.pdf.SetY(36) } switch risk.Severity { case types.CriticalSeverity: - colors.ColorCriticalRisk(pdf) + colors.ColorCriticalRisk(r.pdf) if !headlineCriticalWritten { - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetLeftMargin(oldLeft + 3) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetLeftMargin(oldLeft + 3) html.Write(5, "
Critical Risk Severity

") headlineCriticalWritten = true } case types.HighSeverity: - colors.ColorHighRisk(pdf) + colors.ColorHighRisk(r.pdf) if !headlineHighWritten { - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetLeftMargin(oldLeft + 3) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetLeftMargin(oldLeft + 3) html.Write(5, "
High Risk Severity

") headlineHighWritten = true } case types.ElevatedSeverity: - colors.ColorElevatedRisk(pdf) + colors.ColorElevatedRisk(r.pdf) if !headlineElevatedWritten { - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetLeftMargin(oldLeft + 3) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetLeftMargin(oldLeft + 3) html.Write(5, "
Elevated Risk Severity

") headlineElevatedWritten = true } case types.MediumSeverity: - colors.ColorMediumRisk(pdf) + colors.ColorMediumRisk(r.pdf) if !headlineMediumWritten { - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetLeftMargin(oldLeft + 3) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetLeftMargin(oldLeft + 3) html.Write(5, "
Medium Risk Severity

") headlineMediumWritten = true } case types.LowSeverity: - colors.ColorLowRisk(pdf) + colors.ColorLowRisk(r.pdf) if !headlineLowWritten { - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetLeftMargin(oldLeft + 3) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetLeftMargin(oldLeft + 3) html.Write(5, "
Low Risk Severity

") headlineLowWritten = true } default: - pdfColorBlack() + r.pdfColorBlack() } if !risk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { - pdfColorBlack() + r.pdfColorBlack() } - posY := pdf.GetY() - pdf.SetLeftMargin(oldLeft + 10) - pdf.SetFont("Helvetica", "", fontSizeBody) + posY := r.pdf.GetY() + r.pdf.SetLeftMargin(oldLeft + 10) + r.pdf.SetFont("Helvetica", "", fontSizeBody) text.WriteString(uni(risk.Title) + ": Exploitation likelihood is " + risk.ExploitationLikelihood.Title() + " with " + risk.ExploitationImpact.Title() + " impact.") text.WriteString("
") html.Write(5, text.String()) text.Reset() - pdf.SetFont("Helvetica", "", fontSizeVerySmall) - pdfColorGray() - pdf.MultiCell(215, 5, uni(risk.SyntheticId), "0", "0", false) - pdf.Link(20, posY, 180, pdf.GetY()-posY, tocLinkIdByAssetId[risk.CategoryId]) - pdf.SetFont("Helvetica", "", fontSizeBody) - writeRiskTrackingStatus(parsedModel, risk) - pdf.SetLeftMargin(oldLeft) + r.pdf.SetFont("Helvetica", "", fontSizeVerySmall) + r.pdfColorGray() + r.pdf.MultiCell(215, 5, uni(risk.SyntheticId), "0", "0", false) + r.pdf.Link(20, posY, 180, r.pdf.GetY()-posY, r.tocLinkIdByAssetId[risk.CategoryId]) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.writeRiskTrackingStatus(parsedModel, risk) + r.pdf.SetLeftMargin(oldLeft) } } else { - pdf.Ln(-1) - pdf.Ln(-1) - pdfColorGray() - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetLeftMargin(15) + r.pdf.Ln(-1) + r.pdf.Ln(-1) + r.pdfColorGray() + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetLeftMargin(15) text := "No risksStr were identified." if technicalAsset.OutOfScope { text = "Asset was defined as out-of-scope." } html.Write(5, text) - pdf.Ln(-1) - } - pdf.SetLeftMargin(oldLeft) - - pdf.Ln(-1) - pdf.Ln(4) - if pdf.GetY() > 260 { // 260 only for major titles (to avoid "Schusterjungen"), for the rest attributes 270 - pageBreak() - pdf.SetY(36) - } - pdfColorBlack() - pdf.SetFont("Helvetica", "B", fontSizeBody) - pdf.CellFormat(190, 6, "Asset Information", "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "ID:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, technicalAsset.Id, "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Type:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, technicalAsset.Type.String(), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Usage:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, technicalAsset.Usage.String(), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "RAA:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdf.Ln(-1) + } + r.pdf.SetLeftMargin(oldLeft) + + r.pdf.Ln(-1) + r.pdf.Ln(4) + if r.pdf.GetY() > 260 { // 260 only for major titles (to avoid "Schusterjungen"), for the rest attributes 270 + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorBlack() + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 6, "Asset Information", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "ID:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, technicalAsset.Id, "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Type:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, technicalAsset.Type.String(), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Usage:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, technicalAsset.Usage.String(), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "RAA:", "0", 0, "", false, 0, "") + r.pdfColorBlack() textRAA := fmt.Sprintf("%.0f", technicalAsset.RAA) + " %" if technicalAsset.OutOfScope { - pdfColorGray() + r.pdfColorGray() textRAA = "out-of-scope" } - pdf.MultiCell(145, 6, textRAA, "0", "0", false) - pdfColorBlack() - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Size:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, technicalAsset.Size.String(), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Technology:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, technicalAsset.Technology.String(), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Tags:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdf.MultiCell(145, 6, textRAA, "0", "0", false) + r.pdfColorBlack() + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Size:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, technicalAsset.Size.String(), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Technology:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, technicalAsset.Technology.String(), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Tags:", "0", 0, "", false, 0, "") + r.pdfColorBlack() tagsUsedText := "" sorted := technicalAsset.Tags sort.Strings(sorted) @@ -2830,77 +2860,77 @@ func createTechnicalAssets(parsedModel *types.ParsedModel) { tagsUsedText += tag } if len(tagsUsedText) == 0 { - pdfColorGray() + r.pdfColorGray() tagsUsedText = "none" } - pdf.MultiCell(145, 6, uni(tagsUsedText), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Internet:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, strconv.FormatBool(technicalAsset.Internet), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Machine:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, technicalAsset.Machine.String(), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Encryption:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, technicalAsset.Encryption.String(), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Multi-Tenant:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, strconv.FormatBool(technicalAsset.MultiTenant), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Redundant:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, strconv.FormatBool(technicalAsset.Redundant), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Custom-Developed:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, strconv.FormatBool(technicalAsset.CustomDevelopedParts), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Client by Human:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, strconv.FormatBool(technicalAsset.UsedAsClientByHuman), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Data Processed:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdf.MultiCell(145, 6, uni(tagsUsedText), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Internet:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, strconv.FormatBool(technicalAsset.Internet), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Machine:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, technicalAsset.Machine.String(), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Encryption:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, technicalAsset.Encryption.String(), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Multi-Tenant:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, strconv.FormatBool(technicalAsset.MultiTenant), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Redundant:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, strconv.FormatBool(technicalAsset.Redundant), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Custom-Developed:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, strconv.FormatBool(technicalAsset.CustomDevelopedParts), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Client by Human:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, strconv.FormatBool(technicalAsset.UsedAsClientByHuman), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Data Processed:", "0", 0, "", false, 0, "") + r.pdfColorBlack() dataAssetsProcessedText := "" for _, dataAsset := range technicalAsset.DataAssetsProcessedSorted(parsedModel) { if len(dataAssetsProcessedText) > 0 { @@ -2909,15 +2939,15 @@ func createTechnicalAssets(parsedModel *types.ParsedModel) { dataAssetsProcessedText += dataAsset.Title } if len(dataAssetsProcessedText) == 0 { - pdfColorGray() + r.pdfColorGray() dataAssetsProcessedText = "none" } - pdf.MultiCell(145, 6, uni(dataAssetsProcessedText), "0", "0", false) + r.pdf.MultiCell(145, 6, uni(dataAssetsProcessedText), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Data Stored:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Data Stored:", "0", 0, "", false, 0, "") + r.pdfColorBlack() dataAssetsStoredText := "" for _, dataAsset := range technicalAsset.DataAssetsStoredSorted(parsedModel) { if len(dataAssetsStoredText) > 0 { @@ -2926,15 +2956,15 @@ func createTechnicalAssets(parsedModel *types.ParsedModel) { dataAssetsStoredText += dataAsset.Title } if len(dataAssetsStoredText) == 0 { - pdfColorGray() + r.pdfColorGray() dataAssetsStoredText = "none" } - pdf.MultiCell(145, 6, uni(dataAssetsStoredText), "0", "0", false) + r.pdf.MultiCell(145, 6, uni(dataAssetsStoredText), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Formats Accepted:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Formats Accepted:", "0", 0, "", false, 0, "") + r.pdfColorBlack() formatsAcceptedText := "" for _, formatAccepted := range technicalAsset.DataFormatsAcceptedSorted() { if len(formatsAcceptedText) > 0 { @@ -2943,196 +2973,196 @@ func createTechnicalAssets(parsedModel *types.ParsedModel) { formatsAcceptedText += formatAccepted.Title() } if len(formatsAcceptedText) == 0 { - pdfColorGray() + r.pdfColorGray() formatsAcceptedText = "none of the special data formats accepted" } - pdf.MultiCell(145, 6, formatsAcceptedText, "0", "0", false) - - pdf.Ln(-1) - pdf.Ln(4) - if pdf.GetY() > 260 { // 260 only for major titles (to avoid "Schusterjungen"), for the rest attributes 270 - pageBreak() - pdf.SetY(36) - } - pdfColorBlack() - pdf.SetFont("Helvetica", "B", fontSizeBody) - pdf.CellFormat(190, 6, "Asset Rating", "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Owner:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, uni(technicalAsset.Owner), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Confidentiality:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.CellFormat(40, 6, technicalAsset.Confidentiality.String(), "0", 0, "", false, 0, "") - pdfColorGray() - pdf.CellFormat(115, 6, technicalAsset.Confidentiality.RatingStringInScale(), "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.Ln(-1) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Integrity:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.CellFormat(40, 6, technicalAsset.Integrity.String(), "0", 0, "", false, 0, "") - pdfColorGray() - pdf.CellFormat(115, 6, technicalAsset.Integrity.RatingStringInScale(), "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.Ln(-1) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Availability:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.CellFormat(40, 6, technicalAsset.Availability.String(), "0", 0, "", false, 0, "") - pdfColorGray() - pdf.CellFormat(115, 6, technicalAsset.Availability.RatingStringInScale(), "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.Ln(-1) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "CIA-Justification:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, uni(technicalAsset.JustificationCiaRating), "0", "0", false) + r.pdf.MultiCell(145, 6, formatsAcceptedText, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.Ln(4) + if r.pdf.GetY() > 260 { // 260 only for major titles (to avoid "Schusterjungen"), for the rest attributes 270 + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorBlack() + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 6, "Asset Rating", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Owner:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, uni(technicalAsset.Owner), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Confidentiality:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.CellFormat(40, 6, technicalAsset.Confidentiality.String(), "0", 0, "", false, 0, "") + r.pdfColorGray() + r.pdf.CellFormat(115, 6, technicalAsset.Confidentiality.RatingStringInScale(), "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.Ln(-1) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Integrity:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.CellFormat(40, 6, technicalAsset.Integrity.String(), "0", 0, "", false, 0, "") + r.pdfColorGray() + r.pdf.CellFormat(115, 6, technicalAsset.Integrity.RatingStringInScale(), "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.Ln(-1) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Availability:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.CellFormat(40, 6, technicalAsset.Availability.String(), "0", 0, "", false, 0, "") + r.pdfColorGray() + r.pdf.CellFormat(115, 6, technicalAsset.Availability.RatingStringInScale(), "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.Ln(-1) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "CIA-Justification:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, uni(technicalAsset.JustificationCiaRating), "0", "0", false) if technicalAsset.OutOfScope { - pdf.Ln(-1) - pdf.Ln(4) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdf.Ln(-1) + r.pdf.Ln(4) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorBlack() - pdf.SetFont("Helvetica", "B", fontSizeBody) - pdf.CellFormat(190, 6, "Asset Out-of-Scope Justification", "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.MultiCell(190, 6, uni(technicalAsset.JustificationOutOfScope), "0", "0", false) - pdf.Ln(-1) + r.pdfColorBlack() + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 6, "Asset Out-of-Scope Justification", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.MultiCell(190, 6, uni(technicalAsset.JustificationOutOfScope), "0", "0", false) + r.pdf.Ln(-1) } - pdf.Ln(-1) + r.pdf.Ln(-1) if len(technicalAsset.CommunicationLinks) > 0 { - pdf.Ln(-1) - if pdf.GetY() > 260 { // 260 only for major titles (to avoid "Schusterjungen"), for the rest attributes 270 - pageBreak() - pdf.SetY(36) + r.pdf.Ln(-1) + if r.pdf.GetY() > 260 { // 260 only for major titles (to avoid "Schusterjungen"), for the rest attributes 270 + r.pageBreak() + r.pdf.SetY(36) } - pdfColorBlack() - pdf.SetFont("Helvetica", "B", fontSizeBody) - pdf.CellFormat(190, 6, "Outgoing Communication Links: "+strconv.Itoa(len(technicalAsset.CommunicationLinks)), "0", 0, "", false, 0, "") - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdfColorGray() + r.pdfColorBlack() + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 6, "Outgoing Communication Links: "+strconv.Itoa(len(technicalAsset.CommunicationLinks)), "0", 0, "", false, 0, "") + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdfColorGray() html.Write(5, "Target technical asset names are clickable and link to the corresponding chapter.") - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.Ln(-1) - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.Ln(-1) + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) for _, outgoingCommLink := range technicalAsset.CommunicationLinksSorted() { - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorBlack() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(185, 6, uni(outgoingCommLink.Title)+" (outgoing)", "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.MultiCell(185, 6, uni(outgoingCommLink.Description), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdfColorBlack() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(185, 6, uni(outgoingCommLink.Title)+" (outgoing)", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.MultiCell(185, 6, uni(outgoingCommLink.Description), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdf.Ln(-1) - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "Target:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(125, 6, uni(parsedModel.TechnicalAssets[outgoingCommLink.TargetId].Title), "0", "0", false) - pdf.Link(60, pdf.GetY()-5, 70, 5, tocLinkIdByAssetId[outgoingCommLink.TargetId]) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdf.Ln(-1) + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "Target:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(125, 6, uni(parsedModel.TechnicalAssets[outgoingCommLink.TargetId].Title), "0", "0", false) + r.pdf.Link(60, r.pdf.GetY()-5, 70, 5, r.tocLinkIdByAssetId[outgoingCommLink.TargetId]) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "Protocol:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(140, 6, outgoingCommLink.Protocol.String(), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "Protocol:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(140, 6, outgoingCommLink.Protocol.String(), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "Encrypted:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(140, 6, strconv.FormatBool(outgoingCommLink.Protocol.IsEncrypted()), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "Encrypted:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(140, 6, strconv.FormatBool(outgoingCommLink.Protocol.IsEncrypted()), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "Authentication:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(140, 6, outgoingCommLink.Authentication.String(), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "Authentication:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(140, 6, outgoingCommLink.Authentication.String(), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "Authorization:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(140, 6, outgoingCommLink.Authorization.String(), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "Authorization:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(140, 6, outgoingCommLink.Authorization.String(), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "Read-Only:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(140, 6, strconv.FormatBool(outgoingCommLink.Readonly), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "Read-Only:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(140, 6, strconv.FormatBool(outgoingCommLink.Readonly), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "Usage:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(140, 6, outgoingCommLink.Usage.String(), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "Usage:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(140, 6, outgoingCommLink.Usage.String(), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "Tags:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "Tags:", "0", 0, "", false, 0, "") + r.pdfColorBlack() tagsUsedText := "" sorted := outgoingCommLink.Tags sort.Strings(sorted) @@ -3143,32 +3173,32 @@ func createTechnicalAssets(parsedModel *types.ParsedModel) { tagsUsedText += tag } if len(tagsUsedText) == 0 { - pdfColorGray() + r.pdfColorGray() tagsUsedText = "none" } - pdf.MultiCell(140, 6, uni(tagsUsedText), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdf.MultiCell(140, 6, uni(tagsUsedText), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "VPN:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(140, 6, strconv.FormatBool(outgoingCommLink.VPN), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "VPN:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(140, 6, strconv.FormatBool(outgoingCommLink.VPN), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "IP-Filtered:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(140, 6, strconv.FormatBool(outgoingCommLink.IpFiltered), "0", "0", false) - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "Data Sent:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "IP-Filtered:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(140, 6, strconv.FormatBool(outgoingCommLink.IpFiltered), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "Data Sent:", "0", 0, "", false, 0, "") + r.pdfColorBlack() dataAssetsSentText := "" for _, dataAsset := range outgoingCommLink.DataAssetsSentSorted(parsedModel) { if len(dataAssetsSentText) > 0 { @@ -3177,14 +3207,14 @@ func createTechnicalAssets(parsedModel *types.ParsedModel) { dataAssetsSentText += dataAsset.Title } if len(dataAssetsSentText) == 0 { - pdfColorGray() + r.pdfColorGray() dataAssetsSentText = "none" } - pdf.MultiCell(140, 6, uni(dataAssetsSentText), "0", "0", false) - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "Data Received:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdf.MultiCell(140, 6, uni(dataAssetsSentText), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "Data Received:", "0", 0, "", false, 0, "") + r.pdfColorBlack() dataAssetsReceivedText := "" for _, dataAsset := range outgoingCommLink.DataAssetsReceivedSorted(parsedModel) { if len(dataAssetsReceivedText) > 0 { @@ -3193,116 +3223,116 @@ func createTechnicalAssets(parsedModel *types.ParsedModel) { dataAssetsReceivedText += dataAsset.Title } if len(dataAssetsReceivedText) == 0 { - pdfColorGray() + r.pdfColorGray() dataAssetsReceivedText = "none" } - pdf.MultiCell(140, 6, uni(dataAssetsReceivedText), "0", "0", false) - pdf.Ln(-1) + r.pdf.MultiCell(140, 6, uni(dataAssetsReceivedText), "0", "0", false) + r.pdf.Ln(-1) } } incomingCommLinks := parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] if len(incomingCommLinks) > 0 { - pdf.Ln(-1) - if pdf.GetY() > 260 { // 260 only for major titles (to avoid "Schusterjungen"), for the rest attributes 270 - pageBreak() - pdf.SetY(36) + r.pdf.Ln(-1) + if r.pdf.GetY() > 260 { // 260 only for major titles (to avoid "Schusterjungen"), for the rest attributes 270 + r.pageBreak() + r.pdf.SetY(36) } - pdfColorBlack() - pdf.SetFont("Helvetica", "B", fontSizeBody) - pdf.CellFormat(190, 6, "Incoming Communication Links: "+strconv.Itoa(len(incomingCommLinks)), "0", 0, "", false, 0, "") - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdfColorGray() + r.pdfColorBlack() + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 6, "Incoming Communication Links: "+strconv.Itoa(len(incomingCommLinks)), "0", 0, "", false, 0, "") + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdfColorGray() html.Write(5, "Source technical asset names are clickable and link to the corresponding chapter.") - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.Ln(-1) - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.Ln(-1) + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) for _, incomingCommLink := range incomingCommLinks { - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorBlack() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(185, 6, uni(incomingCommLink.Title)+" (incoming)", "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.MultiCell(185, 6, uni(incomingCommLink.Description), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdfColorBlack() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(185, 6, uni(incomingCommLink.Title)+" (incoming)", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.MultiCell(185, 6, uni(incomingCommLink.Description), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdf.Ln(-1) - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "Source:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(140, 6, uni(parsedModel.TechnicalAssets[incomingCommLink.SourceId].Title), "0", "0", false) - pdf.Link(60, pdf.GetY()-5, 70, 5, tocLinkIdByAssetId[incomingCommLink.SourceId]) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdf.Ln(-1) + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "Source:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(140, 6, uni(parsedModel.TechnicalAssets[incomingCommLink.SourceId].Title), "0", "0", false) + r.pdf.Link(60, r.pdf.GetY()-5, 70, 5, r.tocLinkIdByAssetId[incomingCommLink.SourceId]) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "Protocol:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(140, 6, incomingCommLink.Protocol.String(), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "Protocol:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(140, 6, incomingCommLink.Protocol.String(), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "Encrypted:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(140, 6, strconv.FormatBool(incomingCommLink.Protocol.IsEncrypted()), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "Encrypted:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(140, 6, strconv.FormatBool(incomingCommLink.Protocol.IsEncrypted()), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "Authentication:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(140, 6, incomingCommLink.Authentication.String(), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "Authentication:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(140, 6, incomingCommLink.Authentication.String(), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "Authorization:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(140, 6, incomingCommLink.Authorization.String(), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "Authorization:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(140, 6, incomingCommLink.Authorization.String(), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "Read-Only:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(140, 6, strconv.FormatBool(incomingCommLink.Readonly), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "Read-Only:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(140, 6, strconv.FormatBool(incomingCommLink.Readonly), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "Usage:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(140, 6, incomingCommLink.Usage.String(), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "Usage:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(140, 6, incomingCommLink.Usage.String(), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "Tags:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "Tags:", "0", 0, "", false, 0, "") + r.pdfColorBlack() tagsUsedText := "" sorted := incomingCommLink.Tags sort.Strings(sorted) @@ -3313,32 +3343,32 @@ func createTechnicalAssets(parsedModel *types.ParsedModel) { tagsUsedText += tag } if len(tagsUsedText) == 0 { - pdfColorGray() + r.pdfColorGray() tagsUsedText = "none" } - pdf.MultiCell(140, 6, uni(tagsUsedText), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdf.MultiCell(140, 6, uni(tagsUsedText), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "VPN:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(140, 6, strconv.FormatBool(incomingCommLink.VPN), "0", "0", false) - if pdf.GetY() > 270 { - pageBreak() - pdf.SetY(36) + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "VPN:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(140, 6, strconv.FormatBool(incomingCommLink.VPN), "0", "0", false) + if r.pdf.GetY() > 270 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "IP-Filtered:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(140, 6, strconv.FormatBool(incomingCommLink.IpFiltered), "0", "0", false) - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "Data Received:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "IP-Filtered:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(140, 6, strconv.FormatBool(incomingCommLink.IpFiltered), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "Data Received:", "0", 0, "", false, 0, "") + r.pdfColorBlack() dataAssetsSentText := "" // yep, here we reverse the sent/received direction, as it's the incoming stuff for _, dataAsset := range incomingCommLink.DataAssetsSentSorted(parsedModel) { @@ -3348,14 +3378,14 @@ func createTechnicalAssets(parsedModel *types.ParsedModel) { dataAssetsSentText += dataAsset.Title } if len(dataAssetsSentText) == 0 { - pdfColorGray() + r.pdfColorGray() dataAssetsSentText = "none" } - pdf.MultiCell(140, 6, uni(dataAssetsSentText), "0", "0", false) - pdfColorGray() - pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(35, 6, "Data Sent:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdf.MultiCell(140, 6, uni(dataAssetsSentText), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(15, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(35, 6, "Data Sent:", "0", 0, "", false, 0, "") + r.pdfColorBlack() dataAssetsReceivedText := "" // yep, here we reverse the sent/received direction, as it's the incoming stuff for _, dataAsset := range incomingCommLink.DataAssetsReceivedSorted(parsedModel) { @@ -3365,23 +3395,23 @@ func createTechnicalAssets(parsedModel *types.ParsedModel) { dataAssetsReceivedText += dataAsset.Title } if len(dataAssetsReceivedText) == 0 { - pdfColorGray() + r.pdfColorGray() dataAssetsReceivedText = "none" } - pdf.MultiCell(140, 6, uni(dataAssetsReceivedText), "0", "0", false) - pdf.Ln(-1) + r.pdf.MultiCell(140, 6, uni(dataAssetsReceivedText), "0", "0", false) + r.pdf.Ln(-1) } } } } -func createDataAssets(parsedModel *types.ParsedModel) { - uni := pdf.UnicodeTranslatorFromDescriptor("") +func (r *pdfReporter) createDataAssets(parsedModel *types.ParsedModel) { + uni := r.pdf.UnicodeTranslatorFromDescriptor("") title := "Identified Data Breach Probabilities by Data Asset" - pdfColorBlack() - addHeadline(title, false) - defineLinkTarget("{intro-risks-by-data-asset}") - html := pdf.HTMLBasicNew() + r.pdfColorBlack() + r.addHeadline(title, false) + r.defineLinkTarget("{intro-risks-by-data-asset}") + html := r.pdf.HTMLBasicNew() html.Write(5, "In total "+strconv.Itoa(types.TotalRiskCount(parsedModel))+" potential risks have been identified during the threat modeling process "+ "of which "+ ""+strconv.Itoa(len(types.FilteredByOnlyCriticalRisks(parsedModel)))+" are rated as critical, "+ @@ -3391,31 +3421,31 @@ func createDataAssets(parsedModel *types.ParsedModel) { "and "+strconv.Itoa(len(types.FilteredByOnlyLowRisks(parsedModel)))+" as low. "+ "

These risks are distributed across "+strconv.Itoa(len(parsedModel.DataAssets))+" data assets. ") html.Write(5, "The following sub-chapters of this section describe the derived data breach probabilities grouped by data asset.
") // TODO more explanation text - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdfColorGray() + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdfColorGray() html.Write(5, "Technical asset names and risk IDs are clickable and link to the corresponding chapter.") - pdf.SetFont("Helvetica", "", fontSizeBody) - currentChapterTitleBreadcrumb = title + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.currentChapterTitleBreadcrumb = title for _, dataAsset := range sortedDataAssetsByDataBreachProbabilityAndTitle(parsedModel) { - if pdf.GetY() > 280 { // 280 as only small font previously (not 250) - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 280 { // 280 as only small font previously (not 250) + r.pageBreak() + r.pdf.SetY(36) } else { html.Write(5, "


") } - pdfColorBlack() + r.pdfColorBlack() switch dataAsset.IdentifiedDataBreachProbabilityStillAtRisk(parsedModel) { case types.Probable: - colors.ColorHighRisk(pdf) + colors.ColorHighRisk(r.pdf) case types.Possible: - colors.ColorMediumRisk(pdf) + colors.ColorMediumRisk(r.pdf) case types.Improbable: - colors.ColorLowRisk(pdf) + colors.ColorLowRisk(r.pdf) default: - pdfColorBlack() + r.pdfColorBlack() } if !dataAsset.IsDataBreachPotentialStillAtRisk(parsedModel) { - pdfColorBlack() + r.pdfColorBlack() } risksStr := dataAsset.IdentifiedDataBreachProbabilityRisks(parsedModel) countStillAtRisk := len(types.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) @@ -3424,73 +3454,73 @@ func createDataAssets(parsedModel *types.ParsedModel) { suffix += "s" } title := uni(dataAsset.Title) + ": " + suffix - addHeadline(title, true) - defineLinkTarget("{data:" + dataAsset.Id + "}") - pdfColorBlack() + r.addHeadline(title, true) + r.defineLinkTarget("{data:" + dataAsset.Id + "}") + r.pdfColorBlack() html.Write(5, uni(dataAsset.Description)) html.Write(5, "

") - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetFont("Helvetica", "", fontSizeBody) /* - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Indirect Breach:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Indirect Breach:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.SetFont("Helvetica", "B", fontSizeBody) probability := dataAsset.IdentifiedDataBreachProbability() dataBreachText := probability.String() switch probability { case model.Probable: - colors.ColorHighRisk(pdf) + colors.ColorHighRisk(r.pdf) case model.Possible: - colors.ColorMediumRisk(pdf) + colors.ColorMediumRisk(r.pdf) case model.Improbable: - colors.ColorLowRisk(pdf) + colors.ColorLowRisk(r.pdf) default: - pdfColorBlack() + r.pdfColorBlack() } if !dataAsset.IsDataBreachPotentialStillAtRisk() { - pdfColorBlack() + r.pdfColorBlack() dataBreachText = "none" } - pdf.MultiCell(145, 6, dataBreachText, "0", "0", false) - pdf.SetFont("Helvetica", "", fontSizeBody) - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) + r.pdf.MultiCell(145, 6, dataBreachText, "0", "0", false) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) } */ - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "ID:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, dataAsset.Id, "0", "0", false) - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Usage:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, dataAsset.Usage.String(), "0", "0", false) - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Quantity:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, dataAsset.Quantity.String(), "0", "0", false) - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Tags:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "ID:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, dataAsset.Id, "0", "0", false) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Usage:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, dataAsset.Usage.String(), "0", "0", false) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Quantity:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, dataAsset.Quantity.String(), "0", "0", false) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Tags:", "0", 0, "", false, 0, "") + r.pdfColorBlack() tagsUsedText := "" sorted := dataAsset.Tags sort.Strings(sorted) @@ -3501,85 +3531,85 @@ func createDataAssets(parsedModel *types.ParsedModel) { tagsUsedText += tag } if len(tagsUsedText) == 0 { - pdfColorGray() + r.pdfColorGray() tagsUsedText = "none" } - pdf.MultiCell(145, 6, uni(tagsUsedText), "0", "0", false) - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Origin:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, uni(dataAsset.Origin), "0", "0", false) - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Owner:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, uni(dataAsset.Owner), "0", "0", false) - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Confidentiality:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.CellFormat(40, 6, dataAsset.Confidentiality.String(), "0", 0, "", false, 0, "") - pdfColorGray() - pdf.CellFormat(115, 6, dataAsset.Confidentiality.RatingStringInScale(), "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.Ln(-1) - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Integrity:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.CellFormat(40, 6, dataAsset.Integrity.String(), "0", 0, "", false, 0, "") - pdfColorGray() - pdf.CellFormat(115, 6, dataAsset.Integrity.RatingStringInScale(), "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.Ln(-1) - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Availability:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.CellFormat(40, 6, dataAsset.Availability.String(), "0", 0, "", false, 0, "") - pdfColorGray() - pdf.CellFormat(115, 6, dataAsset.Availability.RatingStringInScale(), "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.Ln(-1) - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "CIA-Justification:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, uni(dataAsset.JustificationCiaRating), "0", "0", false) - - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) - } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Processed by:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdf.MultiCell(145, 6, uni(tagsUsedText), "0", "0", false) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Origin:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, uni(dataAsset.Origin), "0", "0", false) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Owner:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, uni(dataAsset.Owner), "0", "0", false) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Confidentiality:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.CellFormat(40, 6, dataAsset.Confidentiality.String(), "0", 0, "", false, 0, "") + r.pdfColorGray() + r.pdf.CellFormat(115, 6, dataAsset.Confidentiality.RatingStringInScale(), "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.Ln(-1) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Integrity:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.CellFormat(40, 6, dataAsset.Integrity.String(), "0", 0, "", false, 0, "") + r.pdfColorGray() + r.pdf.CellFormat(115, 6, dataAsset.Integrity.RatingStringInScale(), "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.Ln(-1) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Availability:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.CellFormat(40, 6, dataAsset.Availability.String(), "0", 0, "", false, 0, "") + r.pdfColorGray() + r.pdf.CellFormat(115, 6, dataAsset.Availability.RatingStringInScale(), "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.Ln(-1) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "CIA-Justification:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, uni(dataAsset.JustificationCiaRating), "0", "0", false) + + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) + } + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Processed by:", "0", 0, "", false, 0, "") + r.pdfColorBlack() processedByText := "" for _, dataAsset := range dataAsset.ProcessedByTechnicalAssetsSorted(parsedModel) { if len(processedByText) > 0 { @@ -3588,19 +3618,19 @@ func createDataAssets(parsedModel *types.ParsedModel) { processedByText += dataAsset.Title // TODO add link to technical asset detail chapter and back } if len(processedByText) == 0 { - pdfColorGray() + r.pdfColorGray() processedByText = "none" } - pdf.MultiCell(145, 6, uni(processedByText), "0", "0", false) + r.pdf.MultiCell(145, 6, uni(processedByText), "0", "0", false) - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Stored by:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Stored by:", "0", 0, "", false, 0, "") + r.pdfColorBlack() storedByText := "" for _, dataAsset := range dataAsset.StoredByTechnicalAssetsSorted(parsedModel) { if len(storedByText) > 0 { @@ -3609,19 +3639,19 @@ func createDataAssets(parsedModel *types.ParsedModel) { storedByText += dataAsset.Title // TODO add link to technical asset detail chapter and back } if len(storedByText) == 0 { - pdfColorGray() + r.pdfColorGray() storedByText = "none" } - pdf.MultiCell(145, 6, uni(storedByText), "0", "0", false) + r.pdf.MultiCell(145, 6, uni(storedByText), "0", "0", false) - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Sent via:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Sent via:", "0", 0, "", false, 0, "") + r.pdfColorBlack() sentViaText := "" for _, commLink := range dataAsset.SentViaCommLinksSorted(parsedModel) { if len(sentViaText) > 0 { @@ -3630,19 +3660,19 @@ func createDataAssets(parsedModel *types.ParsedModel) { sentViaText += commLink.Title // TODO add link to technical asset detail chapter and back } if len(sentViaText) == 0 { - pdfColorGray() + r.pdfColorGray() sentViaText = "none" } - pdf.MultiCell(145, 6, uni(sentViaText), "0", "0", false) + r.pdf.MultiCell(145, 6, uni(sentViaText), "0", "0", false) - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Received via:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Received via:", "0", 0, "", false, 0, "") + r.pdfColorBlack() receivedViaText := "" for _, commLink := range dataAsset.ReceivedViaCommLinksSorted(parsedModel) { if len(receivedViaText) > 0 { @@ -3651,10 +3681,10 @@ func createDataAssets(parsedModel *types.ParsedModel) { receivedViaText += commLink.Title // TODO add link to technical asset detail chapter and back } if len(receivedViaText) == 0 { - pdfColorGray() + r.pdfColorGray() receivedViaText = "none" } - pdf.MultiCell(145, 6, uni(receivedViaText), "0", "0", false) + r.pdf.MultiCell(145, 6, uni(receivedViaText), "0", "0", false) /* // where is this data asset at risk (i.e. why) @@ -3668,194 +3698,194 @@ func createDataAssets(parsedModel *types.ParsedModel) { if len(techAssetsResponsible) == 1 { assetStr = "asset" } - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Risk via:", "0", 0, "", false, 0, "") + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Risk via:", "0", 0, "", false, 0, "") if len(techAssetsResponsible) == 0 { - pdfColorGray() - pdf.MultiCell(145, 6, "This data asset is not directly at risk via any technical asset.", "0", "0", false) + r.pdfColorGray() + r.pdf.MultiCell(145, 6, "This data asset is not directly at risk via any technical asset.", "0", "0", false) } else { - pdfColorBlack() - pdf.MultiCell(145, 6, "This data asset is at direct risk via "+strconv.Itoa(len(techAssetsResponsible))+" technical "+assetStr+":", "0", "0", false) + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, "This data asset is at direct risk via "+strconv.Itoa(len(techAssetsResponsible))+" technical "+assetStr+":", "0", "0", false) for _, techAssetResponsible := range techAssetsResponsible { - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) } switch model.HighestSeverityStillAtRisk(techAssetResponsible.GeneratedRisks()) { case model.High: - colors.ColorHighRisk(pdf) + colors.ColorHighRisk(r.pdf) case model.Medium: - colors.ColorMediumRisk(pdf) + colors.ColorMediumRisk(r.pdf) case model.Low: - colors.ColorLowRisk(pdf) + colors.ColorLowRisk(r.pdf) default: - pdfColorBlack() + r.pdfColorBlack() } risksStr := techAssetResponsible.GeneratedRisks() if len(model.ReduceToOnlyStillAtRisk(risksStr)) == 0 { - pdfColorBlack() + r.pdfColorBlack() } riskStr := "risksStr" if len(risksStr) == 1 { riskStr = "risk" } - pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") - posY := pdf.GetY() + r.pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") + posY := r.pdf.GetY() risksResponsible := techAssetResponsible.GeneratedRisks() risksResponsibleStillAtRisk := model.ReduceToOnlyStillAtRisk(risksResponsible) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.MultiCell(185, 6, uni(techAssetResponsible.Title)+": "+strconv.Itoa(len(risksResponsibleStillAtRisk))+" / "+strconv.Itoa(len(risksResponsible))+" "+riskStr, "0", "0", false) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.Link(20, posY, 180, pdf.GetY()-posY, tocLinkIdByAssetId[techAssetResponsible.Id]) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.MultiCell(185, 6, uni(techAssetResponsible.Title)+": "+strconv.Itoa(len(risksResponsibleStillAtRisk))+" / "+strconv.Itoa(len(risksResponsible))+" "+riskStr, "0", "0", false) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.Link(20, posY, 180, r.pdf.GetY()-posY, tocLinkIdByAssetId[techAssetResponsible.Id]) } - pdfColorBlack() + r.pdfColorBlack() } */ - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Data Breach:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Data Breach:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.SetFont("Helvetica", "B", fontSizeBody) dataBreachProbability := dataAsset.IdentifiedDataBreachProbabilityStillAtRisk(parsedModel) riskText := dataBreachProbability.String() switch dataBreachProbability { case types.Probable: - colors.ColorHighRisk(pdf) + colors.ColorHighRisk(r.pdf) case types.Possible: - colors.ColorMediumRisk(pdf) + colors.ColorMediumRisk(r.pdf) case types.Improbable: - colors.ColorLowRisk(pdf) + colors.ColorLowRisk(r.pdf) default: - pdfColorBlack() + r.pdfColorBlack() } if !dataAsset.IsDataBreachPotentialStillAtRisk(parsedModel) { - pdfColorBlack() + r.pdfColorBlack() riskText = "none" } - pdf.MultiCell(145, 6, riskText, "0", "0", false) - pdf.SetFont("Helvetica", "", fontSizeBody) - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) + r.pdf.MultiCell(145, 6, riskText, "0", "0", false) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) } // how can is this data asset be indirectly lost (i.e. why) dataBreachRisksStillAtRisk := dataAsset.IdentifiedDataBreachProbabilityRisksStillAtRisk(parsedModel) types.SortByDataBreachProbability(dataBreachRisksStillAtRisk, parsedModel) - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Data Breach Risks:", "0", 0, "", false, 0, "") + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Data Breach Risks:", "0", 0, "", false, 0, "") if len(dataBreachRisksStillAtRisk) == 0 { - pdfColorGray() - pdf.MultiCell(145, 6, "This data asset has no data breach potential.", "0", "0", false) + r.pdfColorGray() + r.pdf.MultiCell(145, 6, "This data asset has no data breach potential.", "0", "0", false) } else { - pdfColorBlack() + r.pdfColorBlack() riskRemainingStr := "risksStr" if countStillAtRisk == 1 { riskRemainingStr = "risk" } - pdf.MultiCell(145, 6, "This data asset has data breach potential because of "+ + r.pdf.MultiCell(145, 6, "This data asset has data breach potential because of "+ ""+strconv.Itoa(countStillAtRisk)+" remaining "+riskRemainingStr+":", "0", "0", false) for _, dataBreachRisk := range dataBreachRisksStillAtRisk { - if pdf.GetY() > 280 { // 280 as only small font here - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 280 { // 280 as only small font here + r.pageBreak() + r.pdf.SetY(36) } switch dataBreachRisk.DataBreachProbability { case types.Probable: - colors.ColorHighRisk(pdf) + colors.ColorHighRisk(r.pdf) case types.Possible: - colors.ColorMediumRisk(pdf) + colors.ColorMediumRisk(r.pdf) case types.Improbable: - colors.ColorLowRisk(pdf) + colors.ColorLowRisk(r.pdf) default: - pdfColorBlack() + r.pdfColorBlack() } if !dataBreachRisk.GetRiskTrackingStatusDefaultingUnchecked(parsedModel).IsStillAtRisk() { - pdfColorBlack() + r.pdfColorBlack() } - pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") - posY := pdf.GetY() - pdf.SetFont("Helvetica", "", fontSizeVerySmall) - pdf.MultiCell(185, 5, dataBreachRisk.DataBreachProbability.Title()+": "+uni(dataBreachRisk.SyntheticId), "0", "0", false) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.Link(20, posY, 180, pdf.GetY()-posY, tocLinkIdByAssetId[dataBreachRisk.CategoryId]) + r.pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") + posY := r.pdf.GetY() + r.pdf.SetFont("Helvetica", "", fontSizeVerySmall) + r.pdf.MultiCell(185, 5, dataBreachRisk.DataBreachProbability.Title()+": "+uni(dataBreachRisk.SyntheticId), "0", "0", false) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.Link(20, posY, 180, r.pdf.GetY()-posY, r.tocLinkIdByAssetId[dataBreachRisk.CategoryId]) } - pdfColorBlack() + r.pdfColorBlack() } } } -func createTrustBoundaries(parsedModel *types.ParsedModel) { - uni := pdf.UnicodeTranslatorFromDescriptor("") +func (r *pdfReporter) createTrustBoundaries(parsedModel *types.ParsedModel) { + uni := r.pdf.UnicodeTranslatorFromDescriptor("") title := "Trust Boundaries" - pdfColorBlack() - addHeadline(title, false) + r.pdfColorBlack() + r.addHeadline(title, false) - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() word := "has" if len(parsedModel.TrustBoundaries) > 1 { word = "have" } html.Write(5, "In total "+strconv.Itoa(len(parsedModel.TrustBoundaries))+" trust boundaries "+word+" been "+ "modeled during the threat modeling process.") - currentChapterTitleBreadcrumb = title + r.currentChapterTitleBreadcrumb = title for _, trustBoundary := range sortedTrustBoundariesByTitle(parsedModel) { - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { html.Write(5, "


") } - colors.ColorTwilight(pdf) + colors.ColorTwilight(r.pdf) if !trustBoundary.Type.IsNetworkBoundary() { - pdfColorLightGray() + r.pdfColorLightGray() } html.Write(5, ""+uni(trustBoundary.Title)+"
") - defineLinkTarget("{boundary:" + trustBoundary.Id + "}") + r.defineLinkTarget("{boundary:" + trustBoundary.Id + "}") html.Write(5, uni(trustBoundary.Description)) html.Write(5, "

") - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "ID:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, trustBoundary.Id, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "ID:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, trustBoundary.Id, "0", "0", false) - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Type:", "0", 0, "", false, 0, "") - colors.ColorTwilight(pdf) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Type:", "0", 0, "", false, 0, "") + colors.ColorTwilight(r.pdf) if !trustBoundary.Type.IsNetworkBoundary() { - pdfColorLightGray() + r.pdfColorLightGray() } - pdf.MultiCell(145, 6, trustBoundary.Type.String(), "0", "0", false) - pdfColorBlack() + r.pdf.MultiCell(145, 6, trustBoundary.Type.String(), "0", "0", false) + r.pdfColorBlack() - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Tags:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Tags:", "0", 0, "", false, 0, "") + r.pdfColorBlack() tagsUsedText := "" sorted := trustBoundary.Tags sort.Strings(sorted) @@ -3866,19 +3896,19 @@ func createTrustBoundaries(parsedModel *types.ParsedModel) { tagsUsedText += tag } if len(tagsUsedText) == 0 { - pdfColorGray() + r.pdfColorGray() tagsUsedText = "none" } - pdf.MultiCell(145, 6, uni(tagsUsedText), "0", "0", false) + r.pdf.MultiCell(145, 6, uni(tagsUsedText), "0", "0", false) - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Assets inside:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Assets inside:", "0", 0, "", false, 0, "") + r.pdfColorBlack() assetsInsideText := "" for _, assetKey := range trustBoundary.TechnicalAssetsInside { if len(assetsInsideText) > 0 { @@ -3887,19 +3917,19 @@ func createTrustBoundaries(parsedModel *types.ParsedModel) { assetsInsideText += parsedModel.TechnicalAssets[assetKey].Title // TODO add link to technical asset detail chapter and back } if len(assetsInsideText) == 0 { - pdfColorGray() + r.pdfColorGray() assetsInsideText = "none" } - pdf.MultiCell(145, 6, uni(assetsInsideText), "0", "0", false) + r.pdf.MultiCell(145, 6, uni(assetsInsideText), "0", "0", false) - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Boundaries nested:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Boundaries nested:", "0", 0, "", false, 0, "") + r.pdfColorBlack() boundariesNestedText := "" for _, assetKey := range trustBoundary.TrustBoundariesNested { if len(boundariesNestedText) > 0 { @@ -3908,10 +3938,10 @@ func createTrustBoundaries(parsedModel *types.ParsedModel) { boundariesNestedText += parsedModel.TrustBoundaries[assetKey].Title } if len(boundariesNestedText) == 0 { - pdfColorGray() + r.pdfColorGray() boundariesNestedText = "none" } - pdf.MultiCell(145, 6, uni(boundariesNestedText), "0", "0", false) + r.pdf.MultiCell(145, 6, uni(boundariesNestedText), "0", "0", false) } } @@ -3925,49 +3955,49 @@ func questionsUnanswered(parsedModel *types.ParsedModel) int { return result } -func createSharedRuntimes(parsedModel *types.ParsedModel) { - uni := pdf.UnicodeTranslatorFromDescriptor("") +func (r *pdfReporter) createSharedRuntimes(parsedModel *types.ParsedModel) { + uni := r.pdf.UnicodeTranslatorFromDescriptor("") title := "Shared Runtimes" - pdfColorBlack() - addHeadline(title, false) + r.pdfColorBlack() + r.addHeadline(title, false) - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() word, runtime := "has", "runtime" if len(parsedModel.SharedRuntimes) > 1 { word, runtime = "have", "runtimes" } html.Write(5, "In total "+strconv.Itoa(len(parsedModel.SharedRuntimes))+" shared "+runtime+" "+word+" been "+ "modeled during the threat modeling process.") - currentChapterTitleBreadcrumb = title + r.currentChapterTitleBreadcrumb = title for _, sharedRuntime := range sortedSharedRuntimesByTitle(parsedModel) { - pdfColorBlack() - if pdf.GetY() > 250 { - pageBreak() - pdf.SetY(36) + r.pdfColorBlack() + if r.pdf.GetY() > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { html.Write(5, "


") } html.Write(5, ""+uni(sharedRuntime.Title)+"
") - defineLinkTarget("{runtime:" + sharedRuntime.Id + "}") + r.defineLinkTarget("{runtime:" + sharedRuntime.Id + "}") html.Write(5, uni(sharedRuntime.Description)) html.Write(5, "

") - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "ID:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(145, 6, sharedRuntime.Id, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "ID:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(145, 6, sharedRuntime.Id, "0", "0", false) - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Tags:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Tags:", "0", 0, "", false, 0, "") + r.pdfColorBlack() tagsUsedText := "" sorted := sharedRuntime.Tags sort.Strings(sorted) @@ -3978,19 +4008,19 @@ func createSharedRuntimes(parsedModel *types.ParsedModel) { tagsUsedText += tag } if len(tagsUsedText) == 0 { - pdfColorGray() + r.pdfColorGray() tagsUsedText = "none" } - pdf.MultiCell(145, 6, uni(tagsUsedText), "0", "0", false) + r.pdf.MultiCell(145, 6, uni(tagsUsedText), "0", "0", false) - if pdf.GetY() > 265 { - pageBreak() - pdf.SetY(36) + if r.pdf.GetY() > 265 { + r.pageBreak() + r.pdf.SetY(36) } - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(40, 6, "Assets running:", "0", 0, "", false, 0, "") - pdfColorBlack() + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(40, 6, "Assets running:", "0", 0, "", false, 0, "") + r.pdfColorBlack() assetsInsideText := "" for _, assetKey := range sharedRuntime.TechnicalAssetsRunning { if len(assetsInsideText) > 0 { @@ -3999,24 +4029,24 @@ func createSharedRuntimes(parsedModel *types.ParsedModel) { assetsInsideText += parsedModel.TechnicalAssets[assetKey].Title // TODO add link to technical asset detail chapter and back } if len(assetsInsideText) == 0 { - pdfColorGray() + r.pdfColorGray() assetsInsideText = "none" } - pdf.MultiCell(145, 6, uni(assetsInsideText), "0", "0", false) + r.pdf.MultiCell(145, 6, uni(assetsInsideText), "0", "0", false) } } -func createRiskRulesChecked(parsedModel *types.ParsedModel, modelFilename string, skipRiskRules string, buildTimestamp string, modelHash string, customRiskRules map[string]*types.CustomRisk) { - pdf.SetTextColor(0, 0, 0) +func (r *pdfReporter) createRiskRulesChecked(parsedModel *types.ParsedModel, modelFilename string, skipRiskRules string, buildTimestamp string, modelHash string, customRiskRules map[string]*types.CustomRisk) { + r.pdf.SetTextColor(0, 0, 0) title := "Risk Rules Checked by Threagile" - addHeadline(title, false) - defineLinkTarget("{risk-rules-checked}") - currentChapterTitleBreadcrumb = title + r.addHeadline(title, false) + r.defineLinkTarget("{risk-rules-checked}") + r.currentChapterTitleBreadcrumb = title - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() var strBuilder strings.Builder - pdfColorGray() - pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdfColorGray() + r.pdf.SetFont("Helvetica", "", fontSizeSmall) timestamp := time.Now() strBuilder.WriteString("Threagile Version: " + docs.ThreagileVersion) strBuilder.WriteString("
Threagile Build Timestamp: " + buildTimestamp) @@ -4025,8 +4055,8 @@ func createRiskRulesChecked(parsedModel *types.ParsedModel, modelFilename string strBuilder.WriteString("
Model Hash (SHA256): " + modelHash) html.Write(5, strBuilder.String()) strBuilder.Reset() - pdfColorBlack() - pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorBlack() + r.pdf.SetFont("Helvetica", "", fontSizeBody) strBuilder.WriteString("

Threagile (see https://threagile.io for more details) is an open-source toolkit for agile threat modeling, created by Christian Schneider (https://christian-schneider.net): It allows to model an architecture with its assets in an agile fashion as a YAML file " + "directly inside the IDE. Upon execution of the Threagile toolkit all standard risk rules (as well as individual custom rules if present) " + "are checked against the architecture model. At the time the Threagile toolkit was executed on the model input file " + @@ -4037,1537 +4067,1537 @@ func createRiskRulesChecked(parsedModel *types.ParsedModel, modelFilename string // TODO use the new run system to discover risk rules instead of hard-coding them here: skippedRules := strings.Split(skipRiskRules, ",") skipped := "" - pdf.Ln(-1) + r.pdf.Ln(-1) for id, customRule := range customRiskRules { - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+customRule.Category.Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "I", fontSizeBody) - pdf.CellFormat(190, 6, "Custom Risk Rule", "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, customRule.Category.STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(customRule.Category.Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, customRule.Category.DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, customRule.Category.RiskAssessment, "0", "0", false) + r.pdf.CellFormat(190, 3, skipped+customRule.Category.Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "I", fontSizeBody) + r.pdf.CellFormat(190, 6, "Custom Risk Rule", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, customRule.Category.STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(customRule.Category.Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, customRule.Category.DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, customRule.Category.RiskAssessment, "0", "0", false) } for _, key := range sortedKeysOfIndividualRiskCategories(parsedModel) { individualRiskCategory := parsedModel.IndividualRiskCategories[key] - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) - pdf.CellFormat(190, 3, individualRiskCategory.Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, individualRiskCategory.Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "I", fontSizeBody) - pdf.CellFormat(190, 6, "Individual Risk Category", "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, individualRiskCategory.STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(individualRiskCategory.Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, individualRiskCategory.DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, individualRiskCategory.RiskAssessment, "0", "0", false) - } - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, individualRiskCategory.Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, individualRiskCategory.Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "I", fontSizeBody) + r.pdf.CellFormat(190, 6, "Individual Risk Category", "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, individualRiskCategory.STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(individualRiskCategory.Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, individualRiskCategory.DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, individualRiskCategory.RiskAssessment, "0", "0", false) + } + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, accidental_secret_leak.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+accidental_secret_leak.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, accidental_secret_leak.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, accidental_secret_leak.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(accidental_secret_leak.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, accidental_secret_leak.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, accidental_secret_leak.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+accidental_secret_leak.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, accidental_secret_leak.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, accidental_secret_leak.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(accidental_secret_leak.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, accidental_secret_leak.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, accidental_secret_leak.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, code_backdooring.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+code_backdooring.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, code_backdooring.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, code_backdooring.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(code_backdooring.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, code_backdooring.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, code_backdooring.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+code_backdooring.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, code_backdooring.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, code_backdooring.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(code_backdooring.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, code_backdooring.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, code_backdooring.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, container_baseimage_backdooring.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+container_baseimage_backdooring.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, container_baseimage_backdooring.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, container_baseimage_backdooring.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(container_baseimage_backdooring.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, container_baseimage_backdooring.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, container_baseimage_backdooring.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+container_baseimage_backdooring.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, container_baseimage_backdooring.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, container_baseimage_backdooring.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(container_baseimage_backdooring.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, container_baseimage_backdooring.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, container_baseimage_backdooring.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, container_platform_escape.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+container_platform_escape.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, container_platform_escape.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, container_platform_escape.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(container_platform_escape.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, container_platform_escape.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, container_platform_escape.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+container_platform_escape.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, container_platform_escape.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, container_platform_escape.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(container_platform_escape.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, container_platform_escape.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, container_platform_escape.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, cross_site_request_forgery.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+cross_site_request_forgery.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, cross_site_request_forgery.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, cross_site_request_forgery.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(cross_site_request_forgery.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, cross_site_request_forgery.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, cross_site_request_forgery.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+cross_site_request_forgery.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, cross_site_request_forgery.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, cross_site_request_forgery.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(cross_site_request_forgery.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, cross_site_request_forgery.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, cross_site_request_forgery.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, cross_site_scripting.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+cross_site_scripting.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, cross_site_scripting.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, cross_site_scripting.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(cross_site_scripting.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, cross_site_scripting.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, cross_site_scripting.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+cross_site_scripting.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, cross_site_scripting.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, cross_site_scripting.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(cross_site_scripting.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, cross_site_scripting.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, cross_site_scripting.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, dos_risky_access_across_trust_boundary.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+dos_risky_access_across_trust_boundary.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, dos_risky_access_across_trust_boundary.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, dos_risky_access_across_trust_boundary.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(dos_risky_access_across_trust_boundary.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, dos_risky_access_across_trust_boundary.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, dos_risky_access_across_trust_boundary.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+dos_risky_access_across_trust_boundary.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, dos_risky_access_across_trust_boundary.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, dos_risky_access_across_trust_boundary.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(dos_risky_access_across_trust_boundary.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, dos_risky_access_across_trust_boundary.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, dos_risky_access_across_trust_boundary.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, incomplete_model.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+incomplete_model.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, incomplete_model.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, incomplete_model.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(incomplete_model.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, incomplete_model.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, incomplete_model.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+incomplete_model.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, incomplete_model.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, incomplete_model.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(incomplete_model.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, incomplete_model.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, incomplete_model.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, ldap_injection.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+ldap_injection.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, ldap_injection.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, ldap_injection.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(ldap_injection.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, ldap_injection.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, ldap_injection.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+ldap_injection.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, ldap_injection.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, ldap_injection.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(ldap_injection.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, ldap_injection.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, ldap_injection.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, missing_authentication.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+missing_authentication.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, missing_authentication.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_authentication.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(missing_authentication.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_authentication.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_authentication.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+missing_authentication.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, missing_authentication.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_authentication.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(missing_authentication.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_authentication.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_authentication.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, missing_authentication_second_factor.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+missing_authentication_second_factor.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, missing_authentication_second_factor.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_authentication_second_factor.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(missing_authentication_second_factor.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_authentication_second_factor.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_authentication_second_factor.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+missing_authentication_second_factor.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, missing_authentication_second_factor.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_authentication_second_factor.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(missing_authentication_second_factor.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_authentication_second_factor.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_authentication_second_factor.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, missing_build_infrastructure.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+missing_build_infrastructure.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, missing_build_infrastructure.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_build_infrastructure.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(missing_build_infrastructure.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_build_infrastructure.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_build_infrastructure.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+missing_build_infrastructure.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, missing_build_infrastructure.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_build_infrastructure.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(missing_build_infrastructure.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_build_infrastructure.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_build_infrastructure.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, missing_cloud_hardening.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+missing_cloud_hardening.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, missing_cloud_hardening.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_cloud_hardening.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(missing_cloud_hardening.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_cloud_hardening.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_cloud_hardening.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+missing_cloud_hardening.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, missing_cloud_hardening.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_cloud_hardening.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(missing_cloud_hardening.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_cloud_hardening.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_cloud_hardening.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, missing_file_validation.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+missing_file_validation.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, missing_file_validation.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_file_validation.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(missing_file_validation.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_file_validation.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_file_validation.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+missing_file_validation.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, missing_file_validation.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_file_validation.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(missing_file_validation.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_file_validation.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_file_validation.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, missing_hardening.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+missing_hardening.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, missing_hardening.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_hardening.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(missing_hardening.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_hardening.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_hardening.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+missing_hardening.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, missing_hardening.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_hardening.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(missing_hardening.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_hardening.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_hardening.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, missing_identity_propagation.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+missing_identity_propagation.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, missing_identity_propagation.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_identity_propagation.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(missing_identity_propagation.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_identity_propagation.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_identity_propagation.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+missing_identity_propagation.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, missing_identity_propagation.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_identity_propagation.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(missing_identity_propagation.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_identity_propagation.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_identity_propagation.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, missing_identity_provider_isolation.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+missing_identity_provider_isolation.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, missing_identity_provider_isolation.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_identity_provider_isolation.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(missing_identity_provider_isolation.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_identity_provider_isolation.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_identity_provider_isolation.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+missing_identity_provider_isolation.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, missing_identity_provider_isolation.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_identity_provider_isolation.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(missing_identity_provider_isolation.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_identity_provider_isolation.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_identity_provider_isolation.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, missing_identity_store.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+missing_identity_store.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, missing_identity_store.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_identity_store.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(missing_identity_store.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_identity_store.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_identity_store.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+missing_identity_store.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, missing_identity_store.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_identity_store.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(missing_identity_store.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_identity_store.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_identity_store.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, missing_network_segmentation.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+missing_network_segmentation.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, missing_network_segmentation.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_network_segmentation.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(missing_network_segmentation.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_network_segmentation.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_network_segmentation.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+missing_network_segmentation.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, missing_network_segmentation.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_network_segmentation.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(missing_network_segmentation.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_network_segmentation.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_network_segmentation.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, missing_vault.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+missing_vault.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, missing_vault.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_vault.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(missing_vault.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_vault.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_vault.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+missing_vault.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, missing_vault.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_vault.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(missing_vault.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_vault.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_vault.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, missing_vault_isolation.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+missing_vault_isolation.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, missing_vault_isolation.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_vault_isolation.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(missing_vault_isolation.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_vault_isolation.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_vault_isolation.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+missing_vault_isolation.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, missing_vault_isolation.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_vault_isolation.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(missing_vault_isolation.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_vault_isolation.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_vault_isolation.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, missing_waf.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+missing_waf.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, missing_waf.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_waf.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(missing_waf.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_waf.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, missing_waf.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+missing_waf.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, missing_waf.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_waf.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(missing_waf.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_waf.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, missing_waf.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, mixed_targets_on_shared_runtime.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+mixed_targets_on_shared_runtime.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, mixed_targets_on_shared_runtime.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, mixed_targets_on_shared_runtime.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(mixed_targets_on_shared_runtime.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, mixed_targets_on_shared_runtime.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, mixed_targets_on_shared_runtime.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+mixed_targets_on_shared_runtime.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, mixed_targets_on_shared_runtime.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, mixed_targets_on_shared_runtime.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(mixed_targets_on_shared_runtime.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, mixed_targets_on_shared_runtime.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, mixed_targets_on_shared_runtime.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, path_traversal.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+path_traversal.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, path_traversal.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, path_traversal.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(path_traversal.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, path_traversal.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, path_traversal.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+path_traversal.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, path_traversal.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, path_traversal.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(path_traversal.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, path_traversal.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, path_traversal.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, push_instead_of_pull_deployment.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+push_instead_of_pull_deployment.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, push_instead_of_pull_deployment.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, push_instead_of_pull_deployment.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(push_instead_of_pull_deployment.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, push_instead_of_pull_deployment.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, push_instead_of_pull_deployment.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+push_instead_of_pull_deployment.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, push_instead_of_pull_deployment.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, push_instead_of_pull_deployment.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(push_instead_of_pull_deployment.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, push_instead_of_pull_deployment.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, push_instead_of_pull_deployment.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, search_query_injection.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+search_query_injection.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, search_query_injection.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, search_query_injection.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(search_query_injection.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, search_query_injection.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, search_query_injection.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+search_query_injection.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, search_query_injection.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, search_query_injection.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(search_query_injection.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, search_query_injection.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, search_query_injection.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, server_side_request_forgery.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+server_side_request_forgery.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, server_side_request_forgery.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, server_side_request_forgery.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(server_side_request_forgery.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, server_side_request_forgery.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, server_side_request_forgery.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+server_side_request_forgery.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, server_side_request_forgery.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, server_side_request_forgery.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(server_side_request_forgery.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, server_side_request_forgery.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, server_side_request_forgery.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, service_registry_poisoning.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+service_registry_poisoning.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, service_registry_poisoning.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, service_registry_poisoning.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(service_registry_poisoning.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, service_registry_poisoning.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, service_registry_poisoning.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+service_registry_poisoning.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, service_registry_poisoning.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, service_registry_poisoning.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(service_registry_poisoning.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, service_registry_poisoning.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, service_registry_poisoning.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, sql_nosql_injection.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+sql_nosql_injection.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, sql_nosql_injection.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, sql_nosql_injection.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(sql_nosql_injection.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, sql_nosql_injection.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, sql_nosql_injection.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+sql_nosql_injection.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, sql_nosql_injection.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, sql_nosql_injection.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(sql_nosql_injection.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, sql_nosql_injection.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, sql_nosql_injection.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, unchecked_deployment.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+unchecked_deployment.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, unchecked_deployment.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unchecked_deployment.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(unchecked_deployment.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unchecked_deployment.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unchecked_deployment.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+unchecked_deployment.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, unchecked_deployment.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unchecked_deployment.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(unchecked_deployment.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unchecked_deployment.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unchecked_deployment.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, unencrypted_asset.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+unencrypted_asset.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, unencrypted_asset.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unencrypted_asset.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(unencrypted_asset.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unencrypted_asset.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unencrypted_asset.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+unencrypted_asset.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, unencrypted_asset.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unencrypted_asset.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(unencrypted_asset.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unencrypted_asset.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unencrypted_asset.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, unencrypted_communication.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+unencrypted_communication.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, unencrypted_communication.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unencrypted_communication.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(unencrypted_communication.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unencrypted_communication.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unencrypted_communication.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+unencrypted_communication.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, unencrypted_communication.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unencrypted_communication.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(unencrypted_communication.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unencrypted_communication.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unencrypted_communication.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, unguarded_access_from_internet.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+unguarded_access_from_internet.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, unguarded_access_from_internet.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unguarded_access_from_internet.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(unguarded_access_from_internet.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unguarded_access_from_internet.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unguarded_access_from_internet.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+unguarded_access_from_internet.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, unguarded_access_from_internet.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unguarded_access_from_internet.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(unguarded_access_from_internet.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unguarded_access_from_internet.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unguarded_access_from_internet.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, unguarded_direct_datastore_access.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+unguarded_direct_datastore_access.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, unguarded_direct_datastore_access.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unguarded_direct_datastore_access.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(unguarded_direct_datastore_access.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unguarded_direct_datastore_access.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unguarded_direct_datastore_access.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+unguarded_direct_datastore_access.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, unguarded_direct_datastore_access.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unguarded_direct_datastore_access.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(unguarded_direct_datastore_access.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unguarded_direct_datastore_access.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unguarded_direct_datastore_access.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, unnecessary_communication_link.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+unnecessary_communication_link.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, unnecessary_communication_link.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unnecessary_communication_link.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(unnecessary_communication_link.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unnecessary_communication_link.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unnecessary_communication_link.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+unnecessary_communication_link.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, unnecessary_communication_link.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unnecessary_communication_link.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(unnecessary_communication_link.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unnecessary_communication_link.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unnecessary_communication_link.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, unnecessary_data_asset.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+unnecessary_data_asset.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, unnecessary_data_asset.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unnecessary_data_asset.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(unnecessary_data_asset.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unnecessary_data_asset.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unnecessary_data_asset.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+unnecessary_data_asset.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, unnecessary_data_asset.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unnecessary_data_asset.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(unnecessary_data_asset.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unnecessary_data_asset.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unnecessary_data_asset.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, unnecessary_data_transfer.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+unnecessary_data_transfer.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, unnecessary_data_transfer.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unnecessary_data_transfer.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(unnecessary_data_transfer.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unnecessary_data_transfer.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unnecessary_data_transfer.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+unnecessary_data_transfer.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, unnecessary_data_transfer.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unnecessary_data_transfer.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(unnecessary_data_transfer.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unnecessary_data_transfer.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unnecessary_data_transfer.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, unnecessary_technical_asset.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+unnecessary_technical_asset.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, unnecessary_technical_asset.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unnecessary_technical_asset.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(unnecessary_technical_asset.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unnecessary_technical_asset.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, unnecessary_technical_asset.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+unnecessary_technical_asset.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, unnecessary_technical_asset.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unnecessary_technical_asset.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(unnecessary_technical_asset.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unnecessary_technical_asset.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, unnecessary_technical_asset.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, untrusted_deserialization.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+untrusted_deserialization.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, untrusted_deserialization.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, untrusted_deserialization.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(untrusted_deserialization.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, untrusted_deserialization.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, untrusted_deserialization.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+untrusted_deserialization.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, untrusted_deserialization.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, untrusted_deserialization.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(untrusted_deserialization.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, untrusted_deserialization.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, untrusted_deserialization.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, wrong_communication_link_content.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+wrong_communication_link_content.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, wrong_communication_link_content.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, wrong_communication_link_content.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(wrong_communication_link_content.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, wrong_communication_link_content.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, wrong_communication_link_content.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+wrong_communication_link_content.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, wrong_communication_link_content.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, wrong_communication_link_content.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(wrong_communication_link_content.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, wrong_communication_link_content.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, wrong_communication_link_content.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, wrong_trust_boundary_content.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+wrong_trust_boundary_content.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, wrong_trust_boundary_content.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, wrong_trust_boundary_content.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(wrong_trust_boundary_content.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, wrong_trust_boundary_content.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, wrong_trust_boundary_content.Category().RiskAssessment, "0", "0", false) - - pdf.Ln(-1) - pdf.SetFont("Helvetica", "B", fontSizeBody) + r.pdf.CellFormat(190, 3, skipped+wrong_trust_boundary_content.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, wrong_trust_boundary_content.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, wrong_trust_boundary_content.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(wrong_trust_boundary_content.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, wrong_trust_boundary_content.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, wrong_trust_boundary_content.Category().RiskAssessment, "0", "0", false) + + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) if contains(skippedRules, xml_external_entity.Category().Id) { skipped = "SKIPPED - " } else { skipped = "" } - pdf.CellFormat(190, 3, skipped+xml_external_entity.Category().Title, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeSmall) - pdf.CellFormat(190, 6, xml_external_entity.Category().Id, "0", 0, "", false, 0, "") - pdf.Ln(-1) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, xml_external_entity.Category().STRIDE.Title(), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, firstParagraph(xml_external_entity.Category().Description), "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, xml_external_entity.Category().DetectionLogic, "0", "0", false) - pdfColorGray() - pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - pdfColorBlack() - pdf.MultiCell(160, 6, xml_external_entity.Category().RiskAssessment, "0", "0", false) + r.pdf.CellFormat(190, 3, skipped+xml_external_entity.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, xml_external_entity.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, xml_external_entity.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(xml_external_entity.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, xml_external_entity.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, xml_external_entity.Category().RiskAssessment, "0", "0", false) } -func createTargetDescription(parsedModel *types.ParsedModel, baseFolder string) { - uni := pdf.UnicodeTranslatorFromDescriptor("") - pdf.SetTextColor(0, 0, 0) +func (r *pdfReporter) createTargetDescription(parsedModel *types.ParsedModel, baseFolder string) error { + uni := r.pdf.UnicodeTranslatorFromDescriptor("") + r.pdf.SetTextColor(0, 0, 0) title := "Application Overview" - addHeadline(title, false) - defineLinkTarget("{target-overview}") - currentChapterTitleBreadcrumb = title + r.addHeadline(title, false) + r.defineLinkTarget("{target-overview}") + r.currentChapterTitleBreadcrumb = title var intro strings.Builder - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() intro.WriteString("Business Criticality

") intro.WriteString("The overall business criticality of \"" + uni(parsedModel.Title) + "\" was rated as:

") html.Write(5, intro.String()) criticality := parsedModel.BusinessCriticality intro.Reset() - pdfColorGray() + r.pdfColorGray() intro.WriteString("( ") if criticality == types.Archive { html.Write(5, intro.String()) intro.Reset() - pdfColorBlack() + r.pdfColorBlack() intro.WriteString("" + strings.ToUpper(types.Archive.String()) + "") html.Write(5, intro.String()) intro.Reset() - pdfColorGray() + r.pdfColorGray() } else { intro.WriteString(types.Archive.String()) } @@ -5575,11 +5605,11 @@ func createTargetDescription(parsedModel *types.ParsedModel, baseFolder string) if criticality == types.Operational { html.Write(5, intro.String()) intro.Reset() - pdfColorBlack() + r.pdfColorBlack() intro.WriteString("" + strings.ToUpper(types.Operational.String()) + "") html.Write(5, intro.String()) intro.Reset() - pdfColorGray() + r.pdfColorGray() } else { intro.WriteString(types.Operational.String()) } @@ -5587,11 +5617,11 @@ func createTargetDescription(parsedModel *types.ParsedModel, baseFolder string) if criticality == types.Important { html.Write(5, intro.String()) intro.Reset() - pdfColorBlack() + r.pdfColorBlack() intro.WriteString("" + strings.ToUpper(types.Important.String()) + "") html.Write(5, intro.String()) intro.Reset() - pdfColorGray() + r.pdfColorGray() } else { intro.WriteString(types.Important.String()) } @@ -5599,11 +5629,11 @@ func createTargetDescription(parsedModel *types.ParsedModel, baseFolder string) if criticality == types.Critical { html.Write(5, intro.String()) intro.Reset() - pdfColorBlack() + r.pdfColorBlack() intro.WriteString("" + strings.ToUpper(types.Critical.String()) + "") html.Write(5, intro.String()) intro.Reset() - pdfColorGray() + r.pdfColorGray() } else { intro.WriteString(types.Critical.String()) } @@ -5611,33 +5641,40 @@ func createTargetDescription(parsedModel *types.ParsedModel, baseFolder string) if criticality == types.MissionCritical { html.Write(5, intro.String()) intro.Reset() - pdfColorBlack() + r.pdfColorBlack() intro.WriteString("" + strings.ToUpper(types.MissionCritical.String()) + "") html.Write(5, intro.String()) intro.Reset() - pdfColorGray() + r.pdfColorGray() } else { intro.WriteString(types.MissionCritical.String()) } intro.WriteString(" )") html.Write(5, intro.String()) intro.Reset() - pdfColorBlack() + r.pdfColorBlack() intro.WriteString("


Business Overview

") intro.WriteString(uni(parsedModel.BusinessOverview.Description)) html.Write(5, intro.String()) intro.Reset() - addCustomImages(parsedModel.BusinessOverview.Images, baseFolder, html) + err := r.addCustomImages(parsedModel.BusinessOverview.Images, baseFolder, html) + if err != nil { + return fmt.Errorf("error adding custom images: %w", err) + } intro.WriteString("


Technical Overview

") intro.WriteString(uni(parsedModel.TechnicalOverview.Description)) html.Write(5, intro.String()) intro.Reset() - addCustomImages(parsedModel.TechnicalOverview.Images, baseFolder, html) + err = r.addCustomImages(parsedModel.TechnicalOverview.Images, baseFolder, html) + if err != nil { + return fmt.Errorf("error adding custom images: %w", err) + } + return nil } -func addCustomImages(customImages []map[string]string, baseFolder string, html gofpdf.HTMLBasicType) { +func (r *pdfReporter) addCustomImages(customImages []map[string]string, baseFolder string, html gofpdf.HTMLBasicType) error { var text strings.Builder for _, customImage := range customImages { for imageFilename := range customImage { @@ -5646,9 +5683,13 @@ func addCustomImages(customImages []map[string]string, baseFolder string, html g extension := strings.ToLower(filepath.Ext(imageFilenameWithoutPath)) if extension == ".jpeg" || extension == ".jpg" || extension == ".png" || extension == ".gif" { imageFullFilename := filepath.Join(baseFolder, imageFilenameWithoutPath) - if pdf.GetY()+getHeightWhenWidthIsFix(imageFullFilename, 180) > 250 { - pageBreak() - pdf.SetY(36) + heightWhenWidthIsFix, err := getHeightWhenWidthIsFix(imageFullFilename, 180) + if err != nil { + return fmt.Errorf("error getting height of image file: %w", err) + } + if r.pdf.GetY()+heightWhenWidthIsFix > 250 { + r.pageBreak() + r.pdf.SetY(36) } else { text.WriteString("

") } @@ -5658,13 +5699,14 @@ func addCustomImages(customImages []map[string]string, baseFolder string, html g var options gofpdf.ImageOptions options.ImageType = "" - pdf.RegisterImage(imageFullFilename, "") - pdf.ImageOptions(imageFullFilename, 15, pdf.GetY()+50, 170, 0, true, options, 0, "") + r.pdf.RegisterImage(imageFullFilename, "") + r.pdf.ImageOptions(imageFullFilename, 15, r.pdf.GetY()+50, 170, 0, true, options, 0, "") } else { log.Print("Ignoring custom image file: ", imageFilenameWithoutPath) } } } + return nil } // fileExists checks if a file exists and is not a directory before we @@ -5677,25 +5719,29 @@ func fileExists(filename string) bool { return !info.IsDir() } -func getHeightWhenWidthIsFix(imageFullFilename string, width float64) float64 { +func getHeightWhenWidthIsFix(imageFullFilename string, width float64) (float64, error) { if !fileExists(imageFullFilename) { - panic(errors.New("Image file does not exist (or is not readable as file): " + filepath.Base(imageFullFilename))) + return 0, fmt.Errorf("image file does not exist (or is not readable as file): %s", filepath.Base(imageFullFilename)) } /* #nosec imageFullFilename is not tainted (see caller restricting it to image files of model folder only) */ file, err := os.Open(imageFullFilename) defer func() { _ = file.Close() }() - checkErr(err) + if err != nil { + return 0, fmt.Errorf("error opening image file: %w", err) + } img, _, err := image.DecodeConfig(file) - checkErr(err) - return float64(img.Height) / (float64(img.Width) / width) + if err != nil { + return 0, fmt.Errorf("error decoding image file: %w", err) + } + return float64(img.Height) / (float64(img.Width) / width), nil } -func embedDataFlowDiagram(diagramFilenamePNG string, tempFolder string) { - pdf.SetTextColor(0, 0, 0) +func (r *pdfReporter) embedDataFlowDiagram(diagramFilenamePNG string, tempFolder string) { + r.pdf.SetTextColor(0, 0, 0) title := "Data-Flow Diagram" - addHeadline(title, false) - defineLinkTarget("{data-flow-diagram}") - currentChapterTitleBreadcrumb = title + r.addHeadline(title, false) + r.defineLinkTarget("{data-flow-diagram}") + r.currentChapterTitleBreadcrumb = title var intro strings.Builder intro.WriteString("The following diagram was generated by Threagile based on the model input and gives a high-level " + @@ -5703,7 +5749,7 @@ func embedDataFlowDiagram(diagramFilenamePNG string, tempFolder string) { "The RAA value is the calculated Relative Attacker Attractiveness in percent. " + "For a full high-resolution version of this diagram please refer to the PNG image file alongside this report.") - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() html.Write(5, intro.String()) // check to rotate the image if it is wider than high @@ -5715,7 +5761,7 @@ func embedDataFlowDiagram(diagramFilenamePNG string, tempFolder string) { // wider than high? muchWiderThanHigh := srcDimensions.Dx() > int(float64(srcDimensions.Dy())*1.25) // fresh page (eventually landscape)? - isLandscapePage = false + r.isLandscapePage = false _ = tempFolder /* pinnedWidth, pinnedHeight := 190.0, 210.0 @@ -5725,7 +5771,7 @@ func embedDataFlowDiagram(diagramFilenamePNG string, tempFolder string) { if allowedPdfLandscapePages { pinnedWidth = 275.0 isLandscapePage = true - pdf.AddPageFormat("L", pdf.GetPageSizeStr("A4")) + r.pdf.AddPageFormat("L", r.pdf.GetPageSizeStr("A4")) } else { // so rotate the image left by 90 degrees // ok, use temp PNG then @@ -5743,23 +5789,23 @@ func embedDataFlowDiagram(diagramFilenamePNG string, tempFolder string) { diagramFilenamePNG = rotatedFile.Name() } } else { - pdf.AddPage() + r.pdf.AddPage() } } else { - pdf.Ln(10) + r.pdf.Ln(10) }*/ // embed in PDF var options gofpdf.ImageOptions options.ImageType = "" - pdf.RegisterImage(diagramFilenamePNG, "") + r.pdf.RegisterImage(diagramFilenamePNG, "") var maxWidth, maxHeight, newWidth int var embedWidth, embedHeight float64 if allowedPdfLandscapePages && muchWiderThanHigh { maxWidth, maxHeight = 275, 150 - isLandscapePage = true - pdf.AddPageFormat("L", pdf.GetPageSizeStr("A4")) + r.isLandscapePage = true + r.pdf.AddPageFormat("L", r.pdf.GetPageSizeStr("A4")) } else { - pdf.Ln(10) + r.pdf.Ln(10) maxWidth, maxHeight = 190, 200 // reduced height as a text paragraph is above } newWidth = srcDimensions.Dx() / (srcDimensions.Dy() / maxHeight) @@ -5768,13 +5814,13 @@ func embedDataFlowDiagram(diagramFilenamePNG string, tempFolder string) { } else { embedWidth, embedHeight = float64(maxWidth), 0 } - pdf.ImageOptions(diagramFilenamePNG, 10, pdf.GetY(), embedWidth, embedHeight, true, options, 0, "") - isLandscapePage = false + r.pdf.ImageOptions(diagramFilenamePNG, 10, r.pdf.GetY(), embedWidth, embedHeight, true, options, 0, "") + r.isLandscapePage = false // add diagram legend page if embedDiagramLegendPage { - pdf.AddPage() - gofpdi.UseImportedTemplate(pdf, diagramLegendTemplateId, 0, 0, 0, 300) + r.pdf.AddPage() + gofpdi.UseImportedTemplate(r.pdf, r.diagramLegendTemplateId, 0, 0, 0, 300) } } @@ -5787,12 +5833,12 @@ func sortedKeysOfIndividualRiskCategories(parsedModel *types.ParsedModel) []stri return keys } -func embedDataRiskMapping(diagramFilenamePNG string, tempFolder string) { - pdf.SetTextColor(0, 0, 0) +func (r *pdfReporter) embedDataRiskMapping(diagramFilenamePNG string, tempFolder string) { + r.pdf.SetTextColor(0, 0, 0) title := "Data Mapping" - addHeadline(title, false) - defineLinkTarget("{data-risk-mapping}") - currentChapterTitleBreadcrumb = title + r.addHeadline(title, false) + r.defineLinkTarget("{data-risk-mapping}") + r.currentChapterTitleBreadcrumb = title var intro strings.Builder intro.WriteString("The following diagram was generated by Threagile based on the model input and gives a high-level " + @@ -5802,7 +5848,7 @@ func embedDataRiskMapping(diagramFilenamePNG string, tempFolder string) { "data is processed by the asset. For a full high-resolution version of this diagram please refer to the PNG image " + "file alongside this report.") - html := pdf.HTMLBasicNew() + html := r.pdf.HTMLBasicNew() html.Write(5, intro.String()) // TODO dedupe with code from other diagram embedding (almost same code) @@ -5816,7 +5862,7 @@ func embedDataRiskMapping(diagramFilenamePNG string, tempFolder string) { widerThanHigh := srcDimensions.Dx() > srcDimensions.Dy() pinnedWidth, pinnedHeight := 190.0, 195.0 // fresh page (eventually landscape)? - isLandscapePage = false + r.isLandscapePage = false _ = tempFolder /* if dataFlowDiagramFullscreen { @@ -5825,7 +5871,7 @@ func embedDataRiskMapping(diagramFilenamePNG string, tempFolder string) { if allowedPdfLandscapePages { pinnedWidth = 275.0 isLandscapePage = true - pdf.AddPageFormat("L", pdf.GetPageSizeStr("A4")) + r.pdf.AddPageFormat("L", r.pdf.GetPageSizeStr("A4")) } else { // so rotate the image left by 90 degrees // ok, use temp PNG then @@ -5843,144 +5889,139 @@ func embedDataRiskMapping(diagramFilenamePNG string, tempFolder string) { diagramFilenamePNG = rotatedFile.Name() } } else { - pdf.AddPage() + r.pdf.AddPage() } } else { - pdf.Ln(10) + r.pdf.Ln(10) } */ // embed in PDF - pdf.Ln(10) + r.pdf.Ln(10) var options gofpdf.ImageOptions options.ImageType = "" - pdf.RegisterImage(diagramFilenamePNG, "") + r.pdf.RegisterImage(diagramFilenamePNG, "") if widerThanHigh { pinnedHeight = 0 } else { pinnedWidth = 0 } - pdf.ImageOptions(diagramFilenamePNG, 10, pdf.GetY(), pinnedWidth, pinnedHeight, true, options, 0, "") - isLandscapePage = false + r.pdf.ImageOptions(diagramFilenamePNG, 10, r.pdf.GetY(), pinnedWidth, pinnedHeight, true, options, 0, "") + r.isLandscapePage = false } -func writeReportToFile(reportFilename string) { - err := pdf.OutputFileAndClose(reportFilename) - checkErr(err) +func (r *pdfReporter) writeReportToFile(reportFilename string) error { + err := r.pdf.OutputFileAndClose(reportFilename) + if err != nil { + return fmt.Errorf("error writing PDF report file: %w", err) + } + return nil } -func addHeadline(headline string, small bool) { - pdf.AddPage() - gofpdi.UseImportedTemplate(pdf, contentTemplateId, 0, 0, 0, 300) +func (r *pdfReporter) addHeadline(headline string, small bool) { + r.pdf.AddPage() + gofpdi.UseImportedTemplate(r.pdf, r.contentTemplateId, 0, 0, 0, 300) fontSize := fontSizeHeadline if small { fontSize = fontSizeHeadlineSmall } - pdf.SetFont("Helvetica", "B", float64(fontSize)) - pdf.Text(11, 40, headline) - pdf.SetFont("Helvetica", "", fontSizeBody) - pdf.SetX(17) - pdf.SetY(46) + r.pdf.SetFont("Helvetica", "B", float64(fontSize)) + r.pdf.Text(11, 40, headline) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdf.SetX(17) + r.pdf.SetY(46) } -func pageBreak() { - pdf.SetDrawColor(0, 0, 0) - pdf.SetDashPattern([]float64{}, 0) - pdf.AddPage() - gofpdi.UseImportedTemplate(pdf, contentTemplateId, 0, 0, 0, 300) - pdf.SetX(17) - pdf.SetY(20) +func (r *pdfReporter) pageBreak() { + r.pdf.SetDrawColor(0, 0, 0) + r.pdf.SetDashPattern([]float64{}, 0) + r.pdf.AddPage() + gofpdi.UseImportedTemplate(r.pdf, r.contentTemplateId, 0, 0, 0, 300) + r.pdf.SetX(17) + r.pdf.SetY(20) } -func pageBreakInLists() { - pageBreak() - pdf.SetLineWidth(0.25) - pdf.SetDrawColor(160, 160, 160) - pdf.SetDashPattern([]float64{0.5, 0.5}, 0) +func (r *pdfReporter) pageBreakInLists() { + r.pageBreak() + r.pdf.SetLineWidth(0.25) + r.pdf.SetDrawColor(160, 160, 160) + r.pdf.SetDashPattern([]float64{0.5, 0.5}, 0) } -func pdfColorDataAssets() { - pdf.SetTextColor(18, 36, 111) +func (r *pdfReporter) pdfColorDataAssets() { + r.pdf.SetTextColor(18, 36, 111) } func rgbHexColorDataAssets() string { return "#12246F" } -func pdfColorTechnicalAssets() { - pdf.SetTextColor(18, 36, 111) +func (r *pdfReporter) pdfColorTechnicalAssets() { + r.pdf.SetTextColor(18, 36, 111) } func rgbHexColorTechnicalAssets() string { return "#12246F" } -func pdfColorTrustBoundaries() { - pdf.SetTextColor(18, 36, 111) +func (r *pdfReporter) pdfColorTrustBoundaries() { + r.pdf.SetTextColor(18, 36, 111) } func rgbHexColorTrustBoundaries() string { return "#12246F" } -func pdfColorSharedRuntime() { - pdf.SetTextColor(18, 36, 111) +func (r *pdfReporter) pdfColorSharedRuntime() { + r.pdf.SetTextColor(18, 36, 111) } func rgbHexColorSharedRuntime() string { return "#12246F" } -func pdfColorRiskFindings() { - pdf.SetTextColor(160, 40, 30) +func (r *pdfReporter) pdfColorRiskFindings() { + r.pdf.SetTextColor(160, 40, 30) } func rgbHexColorRiskFindings() string { return "#A0281E" } -func pdfColorDisclaimer() { - pdf.SetTextColor(140, 140, 140) +func (r *pdfReporter) pdfColorDisclaimer() { + r.pdf.SetTextColor(140, 140, 140) } func rgbHexColorDisclaimer() string { return "#8C8C8C" } -func pdfColorOutOfScope() { - pdf.SetTextColor(127, 127, 127) +func (r *pdfReporter) pdfColorOutOfScope() { + r.pdf.SetTextColor(127, 127, 127) } + func rgbHexColorOutOfScope() string { return "#7F7F7F" } -func pdfColorGray() { - pdf.SetTextColor(80, 80, 80) +func (r *pdfReporter) pdfColorGray() { + r.pdf.SetTextColor(80, 80, 80) } func rgbHexColorGray() string { return "#505050" } -func pdfColorLightGray() { - pdf.SetTextColor(100, 100, 100) +func (r *pdfReporter) pdfColorLightGray() { + r.pdf.SetTextColor(100, 100, 100) } func rgbHexColorLightGray() string { return "#646464" } -func pdfColorBlack() { - pdf.SetTextColor(0, 0, 0) +func (r *pdfReporter) pdfColorBlack() { + r.pdf.SetTextColor(0, 0, 0) } func rgbHexColorBlack() string { return "#000000" } -func pdfColorRed() { - pdf.SetTextColor(255, 0, 0) +func (r *pdfReporter) pdfColorRed() { + r.pdf.SetTextColor(255, 0, 0) } func rgbHexColorRed() string { return "#FF0000" } - -func contains(a []string, x string) bool { - for _, n := range a { - if x == n { - return true - } - } - return false -} From 6cdea6dc419499e569797bf8f4f769cf7e913a34 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Fri, 5 Jan 2024 17:37:03 +0000 Subject: [PATCH 41/68] Remove accidentally uploaded file --- cmd/threagile/threagile.yaml | 1354 ---------------------------------- 1 file changed, 1354 deletions(-) delete mode 100644 cmd/threagile/threagile.yaml diff --git a/cmd/threagile/threagile.yaml b/cmd/threagile/threagile.yaml deleted file mode 100644 index 77815f77..00000000 --- a/cmd/threagile/threagile.yaml +++ /dev/null @@ -1,1354 +0,0 @@ -threagile_version: 1.0.0 - -# NOTE: -# -# For a perfect editing experience within your IDE of choice you can easily -# get model syntax validation and autocompletion (very handy for enum values) -# as well as live templates: Just import the schema.json into your IDE and assign -# it as "schema" to each Threagile YAML file. Also try to import individual parts -# from the live-templates.txt file into your IDE as live editing templates. -# -# You might also want to try the REST API when running in server mode... - - - -title: Some Example Application - -date: 2020-07-01 - -author: - name: John Doe - homepage: www.example.com - - - - -management_summary_comment: > - Just some more custom summary possible here... - -business_criticality: important # values: archive, operational, important, critical, mission-critical - - - - -business_overview: - description: Some more demo text here and even images... - images: -# - custom-image-1.png: Some dummy image 1 -# - custom-image-2.png: Some dummy image 2 - - -technical_overview: - description: Some more demo text here and even images... - images: -# - custom-image-1.png: Some dummy image 1 -# - custom-image-2.png: Some dummy image 2 - - - -questions: # simply use "" as answer to signal "unanswered" - How are the admin clients managed/protected against compromise?: "" - How are the development clients managed/protected against compromise?: > - Managed by XYZ - How are the build pipeline components managed/protected against compromise?: > - Managed by XYZ - - - -abuse_cases: - Denial-of-Service: > - As a hacker I want to disturb the functionality of the backend system in order to cause indirect - financial damage via unusable features. - CPU-Cycle Theft: > - As a hacker I want to steal CPU cycles in order to transform them into money via installed crypto currency miners. - Ransomware: > - As a hacker I want to encrypt the storage and file systems in order to demand ransom. - Identity Theft: > - As a hacker I want to steal identity data in order to reuse credentials and/or keys on other targets of the same company or outside. - PII Theft: > - As a hacker I want to steal PII (Personally Identifiable Information) data in order to blackmail the company and/or damage - their repudiation by publishing them. - - ERP-System Compromise: > - As a hacker I want to access the ERP-System in order to steal/modify sensitive business data. - Database Compromise: > - As a hacker I want to access the database backend of the ERP-System in order to steal/modify sensitive - business data. - Contract Filesystem Compromise: > - As a hacker I want to access the filesystem storing the contract PDFs in order to steal/modify contract data. - Cross-Site Scripting Attacks: > - As a hacker I want to execute Cross-Site Scripting (XSS) and similar attacks in order to takeover victim sessions and - cause reputational damage. - Denial-of-Service of Enduser Functionality: > - As a hacker I want to disturb the functionality of the enduser parts of the application in order to cause direct financial - damage (lower sales). - Denial-of-Service of ERP/DB Functionality: > - As a hacker I want to disturb the functionality of the ERP system and/or it's database in order to cause indirect - financial damage via unusable internal ERP features (not related to customer portal). - - -security_requirements: - Input Validation: Strict input validation is required to reduce the overall attack surface. - Securing Administrative Access: Administrative access must be secured with strong encryption and multi-factor authentication. - EU-DSGVO: Mandatory EU-Datenschutzgrundverordnung - - -# Tags can be used for anything, it's just a tag. Also risk rules can act based on tags if you like. -# Tags can be used for example to name the products used (which is more concrete than the technology types that only specify the type) -tags_available: - - linux - - apache - - mysql - - jboss - - keycloak - - jenkins - - git - - oracle - - some-erp - - vmware - - aws - - aws:ec2 - - aws:s3 - - - - -data_assets: - - - Customer Contracts: &customer-contracts # this example shows the inheritance-like features of YAML - id: customer-contracts - description: Customer Contracts (PDF) - usage: business # values: business, devops - tags: - origin: Customer - owner: Company XYZ - quantity: many # values: very-few, few, many, very-many - confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential - integrity: critical # values: archive, operational, important, critical, mission-critical - availability: operational # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - Contract data might contain financial data as well as personally identifiable information (PII). The integrity and - availability of contract data is required for clearing payment disputes. - - - Customer Contract Summaries: - <<: *customer-contracts # here we're referencing the above created asset as base and just overwrite few values - id: contract-summaries - description: Customer Contract Summaries - quantity: very-few # values: very-few, few, many, very-many - confidentiality: restricted # values: public, internal, restricted, confidential, strictly-confidential - integrity: operational # values: archive, operational, important, critical, mission-critical - availability: operational # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - Just some summaries. - - - Customer Operational Data: - <<: *customer-contracts # here we're referencing the above created asset as base and just overwrite few values - id: customer-operational-data - description: Customer Operational Data - availability: critical # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - Customer operational data for using the portal are required to be available to offer the portal functionality - and are used in the backend transactions. - - - Customer Accounts: - <<: *customer-contracts # here we're referencing the above created asset as base and just overwrite few values - id: customer-accounts - description: Customer Accounts (including transient credentials when entered for checking them) - confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential - availability: critical # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - Customer account data for using the portal are required to be available to offer the portal functionality. - - - Some Internal Business Data: - id: internal-business-data - description: Internal business data of the ERP system used unrelated to the customer-facing processes. - usage: business # values: business, devops - tags: - origin: Company XYZ - owner: Company XYZ - quantity: few # values: very-few, few, many, very-many - confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential - integrity: critical # values: archive, operational, important, critical, mission-critical - availability: critical # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - Data used and/or generated during unrelated other usecases of the ERP-system (when used also by Company XYZ for - internal non-customer-portal-related stuff). - - - Client Application Code: &client-application-code # this example shows the inheritance-like features of YAML - id: client-application-code - description: Angular and other client-side code delivered by the application. - usage: devops # values: business, devops - tags: - origin: Company ABC - owner: Company ABC - quantity: very-few # values: very-few, few, many, very-many - confidentiality: public # values: public, internal, restricted, confidential, strictly-confidential - integrity: critical # values: archive, operational, important, critical, mission-critical - availability: important # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - The integrity of the public data is critical to avoid reputational damage and the availability is important on the - long-term scale (but not critical) to keep the growth rate of the customer base steady. - - - Server Application Code: - <<: *client-application-code # here we're referencing the above created asset as base and just overwrite few values - id: server-application-code - description: API and other server-side code of the application. - confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential - integrity: mission-critical # values: archive, operational, important, critical, mission-critical - availability: important # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - The integrity of the API code is critical to avoid reputational damage and the availability is important on the - long-term scale (but not critical) to keep the growth rate of the customer base steady. - - - Build Job Config: - id: build-job-config - description: Data for customizing of the build job system. - usage: devops # values: business, devops - tags: - origin: Company XYZ - owner: Company XYZ - quantity: very-few # values: very-few, few, many, very-many - confidentiality: restricted # values: public, internal, restricted, confidential, strictly-confidential - integrity: critical # values: archive, operational, important, critical, mission-critical - availability: operational # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - Data for customizing of the build job system. - - - Marketing Material: - <<: *client-application-code # here we're referencing the above created asset as base and just overwrite few values - id: marketing-material - description: Website and marketing data to inform potential customers and generate new leads. - integrity: important # values: archive, operational, important, critical, mission-critical - - - ERP Logs: - id: erp-logs - description: Logs generated by the ERP system. - usage: devops # values: business, devops - tags: - origin: Company XYZ - owner: Company XYZ - quantity: many # values: very-few, few, many, very-many - confidentiality: restricted # values: public, internal, restricted, confidential, strictly-confidential - integrity: archive # values: archive, operational, important, critical, mission-critical - availability: archive # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - Logs should not contain PII data and are only required for failure analysis, i.e. they are not considered as hard - transactional logs. - - - ERP Customizing Data: - id: erp-customizing - description: Data for customizing of the ERP system. - usage: devops # values: business, devops - tags: - origin: Company XYZ - owner: Company XYZ - quantity: very-few # values: very-few, few, many, very-many - confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential - integrity: critical # values: archive, operational, important, critical, mission-critical - availability: critical # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - Data for customizing of the ERP system. - - - Database Customizing and Dumps: - id: db-dumps - description: Data for customizing of the DB system, which might include full database dumps. - usage: devops # values: business, devops - tags: - - oracle - origin: Company XYZ - owner: Company XYZ - quantity: very-few # values: very-few, few, many, very-many - confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential - integrity: critical # values: archive, operational, important, critical, mission-critical - availability: critical # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - Data for customizing of the DB system, which might include full database dumps. - - - - - - -technical_assets: - - - Customer Web Client: - id: customer-client - description: Customer Web Client - type: external-entity # values: external-entity, process, datastore - usage: business # values: business, devops - used_as_client_by_human: true - out_of_scope: true - justification_out_of_scope: Owned and managed by enduser customer - size: component # values: system, service, application, component - technology: browser # values: see help - tags: - internet: true - machine: physical # values: physical, virtual, container, serverless - encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key - owner: Customer - confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential - integrity: operational # values: archive, operational, important, critical, mission-critical - availability: operational # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - The client used by the customer to access the system. - multi_tenant: false - redundant: false - custom_developed_parts: false - data_assets_processed: # sequence of IDs to reference - - customer-accounts - - customer-operational-data - - customer-contracts - - client-application-code - - marketing-material - data_assets_stored: # sequence of IDs to reference - data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv - communication_links: - Customer Traffic: - target: load-balancer - description: Link to the load balancer - protocol: https # values: see help - authentication: session-id # values: none, credentials, session-id, token, client-certificate, two-factor - authorization: enduser-identity-propagation # values: none, technical-user, enduser-identity-propagation - tags: - vpn: false - ip_filtered: false - readonly: false - usage: business # values: business, devops - data_assets_sent: # sequence of IDs to reference - - customer-accounts - - customer-operational-data - data_assets_received: # sequence of IDs to reference - - customer-accounts - - customer-operational-data - - customer-contracts - - client-application-code - - marketing-material - #diagram_tweak_weight: 1 - #diagram_tweak_constraint: false - - - Backoffice Client: - id: backoffice-client - #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) - description: Backoffice client - type: external-entity # values: external-entity, process, datastore - usage: business # values: business, devops - used_as_client_by_human: true - out_of_scope: true - justification_out_of_scope: Owned and managed by Company XYZ company - size: component # values: system, service, application, component - technology: desktop # values: see help - tags: - internet: false - machine: physical # values: physical, virtual, container, serverless - encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key - owner: Company XYZ - confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential - integrity: important # values: archive, operational, important, critical, mission-critical - availability: important # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - The client used by Company XYZ to administer and use the system. - multi_tenant: false - redundant: false - custom_developed_parts: false - data_assets_processed: # sequence of IDs to reference - - customer-contracts - - internal-business-data - - erp-logs - data_assets_stored: # sequence of IDs to reference - data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv - communication_links: - ERP Internal Access: - target: erp-system - description: Link to the ERP system - protocol: https # values: see help - authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor - authorization: enduser-identity-propagation # values: none, technical-user, enduser-identity-propagation - tags: - - some-erp - vpn: true - ip_filtered: false - readonly: false - usage: business # values: business, devops - data_assets_sent: # sequence of IDs to reference - - internal-business-data - data_assets_received: # sequence of IDs to reference - - customer-contracts - - internal-business-data - #diagram_tweak_weight: 1 - #diagram_tweak_constraint: false - Marketing CMS Editing: - target: marketing-cms - description: Link to the CMS for editing content - protocol: https # values: see help - authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor - authorization: enduser-identity-propagation # values: none, technical-user, enduser-identity-propagation - tags: - vpn: true - ip_filtered: false - readonly: false - usage: business # values: business, devops - data_assets_sent: # sequence of IDs to reference - - marketing-material - data_assets_received: # sequence of IDs to reference - - marketing-material - #diagram_tweak_weight: 1 - #diagram_tweak_constraint: false - - - Backend Admin Client: - id: backend-admin-client - #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) - description: Backend admin client - type: external-entity # values: external-entity, process, datastore - usage: devops # values: business, devops - used_as_client_by_human: true - out_of_scope: true - justification_out_of_scope: Owned and managed by ops provider - size: component # values: system, service, application, component - technology: browser # values: see help - tags: - internet: false - machine: physical # values: physical, virtual, container, serverless - encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key - owner: Company XYZ - confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential - integrity: operational # values: archive, operational, important, critical, mission-critical - availability: operational # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - The client used by Company XYZ to administer the system. - multi_tenant: false - redundant: false - custom_developed_parts: false - data_assets_processed: # sequence of IDs to reference - - erp-logs - data_assets_stored: # sequence of IDs to reference - data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv - communication_links: - ERP Web Access: - target: erp-system - description: Link to the ERP system (Web) - protocol: https # values: see help - authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor - authorization: technical-user # values: none, technical-user, enduser-identity-propagation - tags: - vpn: false - ip_filtered: false - readonly: false - usage: devops # values: business, devops - data_assets_sent: # sequence of IDs to reference - - erp-customizing - data_assets_received: # sequence of IDs to reference - - erp-logs - #diagram_tweak_weight: 1 - #diagram_tweak_constraint: false - DB Update Access: - target: sql-database - description: Link to the database (JDBC tunneled via SSH) - protocol: ssh # values: see help - authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor - authorization: technical-user # values: none, technical-user, enduser-identity-propagation - tags: - vpn: false - ip_filtered: false - readonly: false - usage: devops # values: business, devops - data_assets_sent: # sequence of IDs to reference - - db-dumps - data_assets_received: # sequence of IDs to reference - - db-dumps - - erp-logs - - customer-accounts - - customer-operational-data - #diagram_tweak_weight: 1 - #diagram_tweak_constraint: false - User Management Access: - target: ldap-auth-server - description: Link to the LDAP auth server for managing users - protocol: ldaps # values: see help - authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor - authorization: technical-user # values: none, technical-user, enduser-identity-propagation - tags: - vpn: false - ip_filtered: false - readonly: false - usage: devops # values: business, devops - data_assets_sent: # sequence of IDs to reference - - customer-accounts - data_assets_received: # sequence of IDs to reference - - customer-accounts - #diagram_tweak_weight: 1 - #diagram_tweak_constraint: false - - - Load Balancer: - id: load-balancer - #diagram_tweak_order: 50 # affects left to right positioning (only within a trust boundary) - description: Load Balancer (HA-Proxy) - type: process # values: external-entity, process, datastore - usage: business # values: business, devops - used_as_client_by_human: false - out_of_scope: false - justification_out_of_scope: - size: component # values: system, service, application, component - technology: load-balancer # values: see help - tags: - internet: false - machine: physical # values: physical, virtual, container, serverless - encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key - owner: Company ABC - confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential - integrity: mission-critical # values: archive, operational, important, critical, mission-critical - availability: mission-critical # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - The correct configuration and reachability of the load balancer is mandatory for all customer and Company XYZ - usages of the portal and ERP system. - multi_tenant: false - redundant: false - custom_developed_parts: false - data_assets_processed: # sequence of IDs to reference - - customer-accounts - - customer-operational-data - - customer-contracts - - internal-business-data - - client-application-code - - marketing-material - data_assets_stored: # sequence of IDs to reference - data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv - communication_links: - Web Application Traffic: - target: apache-webserver - description: Link to the web server - protocol: http # values: see help - authentication: session-id # values: none, credentials, session-id, token, client-certificate, two-factor - authorization: enduser-identity-propagation # values: none, technical-user, enduser-identity-propagation - tags: - vpn: false - ip_filtered: false - readonly: false - usage: business # values: business, devops - data_assets_sent: # sequence of IDs to reference - - customer-accounts - - customer-operational-data - data_assets_received: # sequence of IDs to reference - - customer-accounts - - customer-operational-data - - customer-contracts - - client-application-code - #diagram_tweak_weight: 5 - #diagram_tweak_constraint: false - CMS Content Traffic: - target: marketing-cms - description: Link to the CMS server - protocol: http # values: see help - authentication: none # values: none, credentials, session-id, token, client-certificate, two-factor - authorization: none # values: none, technical-user, enduser-identity-propagation - tags: - vpn: false - ip_filtered: false - readonly: true - usage: business # values: business, devops - data_assets_sent: # sequence of IDs to reference - data_assets_received: # sequence of IDs to reference - - marketing-material - #diagram_tweak_weight: 5 - #diagram_tweak_constraint: false - - - Apache Webserver: - id: apache-webserver - #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) - description: Apache Webserver hosting the API code and client-side code - type: process # values: external-entity, process, datastore - usage: business # values: business, devops - used_as_client_by_human: false - out_of_scope: false - justification_out_of_scope: - size: application # values: system, service, application, component - technology: web-server # values: see help - tags: - - linux - - apache - - aws:ec2 - internet: false - machine: container # values: physical, virtual, container, serverless - encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key - owner: Company ABC - confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential - integrity: critical # values: archive, operational, important, critical, mission-critical - availability: critical # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - The correct configuration and reachability of the web server is mandatory for all customer usages of the portal. - multi_tenant: false - redundant: false - custom_developed_parts: true - data_assets_processed: # sequence of IDs to reference - - customer-accounts - - customer-operational-data - - customer-contracts - - internal-business-data - - client-application-code - - server-application-code - data_assets_stored: # sequence of IDs to reference - - client-application-code - - server-application-code - data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv - - json - - file - communication_links: - ERP System Traffic: - target: erp-system - description: Link to the ERP system - protocol: https # values: see help - authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor - authorization: technical-user # values: none, technical-user, enduser-identity-propagation - tags: - vpn: false - ip_filtered: false - readonly: false - usage: business # values: business, devops - data_assets_sent: # sequence of IDs to reference - - customer-accounts - - customer-operational-data - - internal-business-data - data_assets_received: # sequence of IDs to reference - - customer-accounts - - customer-operational-data - - customer-contracts - - internal-business-data - #diagram_tweak_weight: 5 - #diagram_tweak_constraint: false - Auth Credential Check Traffic: - target: identity-provider - description: Link to the identity provider server - protocol: https # values: see help - authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor - authorization: technical-user # values: none, technical-user, enduser-identity-propagation - tags: - vpn: false - ip_filtered: false - readonly: false - usage: business # values: business, devops - data_assets_sent: # sequence of IDs to reference - - customer-accounts - data_assets_received: # sequence of IDs to reference - - - Identity Provider: - id: identity-provider - #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) - description: Identity provider server - type: process # values: external-entity, process, datastore - usage: business # values: business, devops - used_as_client_by_human: false - out_of_scope: false - justification_out_of_scope: - size: component # values: system, service, application, component - technology: identity-provider # values: see help - tags: - - linux - - jboss - - keycloak - internet: false - machine: virtual # values: physical, virtual, container, serverless - encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key - owner: Company ABC - confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential - integrity: critical # values: archive, operational, important, critical, mission-critical - availability: critical # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - The auth data of the application - multi_tenant: false - redundant: false - custom_developed_parts: false - data_assets_processed: # sequence of IDs to reference - - customer-accounts - data_assets_stored: # sequence of IDs to reference - data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv - communication_links: - LDAP Credential Check Traffic: - target: ldap-auth-server - description: Link to the LDAP server - protocol: ldaps # values: see help - authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor - authorization: technical-user # values: none, technical-user, enduser-identity-propagation - tags: - vpn: false - ip_filtered: false - readonly: false - usage: business # values: business, devops - data_assets_sent: # sequence of IDs to reference - - customer-accounts - data_assets_received: # sequence of IDs to reference - - - LDAP Auth Server: - id: ldap-auth-server - #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) - description: LDAP authentication server - type: datastore # values: external-entity, process, datastore - usage: business # values: business, devops - used_as_client_by_human: false - out_of_scope: false - justification_out_of_scope: - size: component # values: system, service, application, component - technology: identity-store-ldap # values: see help - tags: - - linux - internet: false - machine: physical # values: physical, virtual, container, serverless - encryption: transparent # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key - owner: Company ABC - confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential - integrity: critical # values: archive, operational, important, critical, mission-critical - availability: critical # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - The auth data of the application - multi_tenant: false - redundant: false - custom_developed_parts: false - data_assets_processed: # sequence of IDs to reference - - customer-accounts - data_assets_stored: # sequence of IDs to reference - - customer-accounts - data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv - communication_links: - - - Marketing CMS: - id: marketing-cms - #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) - description: CMS for the marketing content - type: process # values: external-entity, process, datastore - usage: business # values: business, devops - used_as_client_by_human: false - out_of_scope: false - justification_out_of_scope: - size: application # values: system, service, application, component - technology: cms # values: see help - tags: - - linux - internet: false - machine: container # values: physical, virtual, container, serverless - encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key - owner: Company ABC - confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential - integrity: important # values: archive, operational, important, critical, mission-critical - availability: important # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - The correct configuration and reachability of the web server is mandatory for all customer usages of the portal. - multi_tenant: false - redundant: false - custom_developed_parts: true - data_assets_processed: # sequence of IDs to reference - - marketing-material - - customer-accounts - data_assets_stored: # sequence of IDs to reference - - marketing-material - data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv - communication_links: - Auth Traffic: - target: ldap-auth-server - description: Link to the LDAP auth server - protocol: ldap # values: see help - authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor - authorization: technical-user # values: none, technical-user, enduser-identity-propagation - tags: - vpn: false - ip_filtered: false - readonly: true - usage: business # values: business, devops - data_assets_sent: # sequence of IDs to reference - - customer-accounts - data_assets_received: # sequence of IDs to reference - - customer-accounts - #diagram_tweak_weight: 5 - #diagram_tweak_constraint: false - - - Backoffice ERP System: - id: erp-system - #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) - description: ERP system - type: process # values: external-entity, process, datastore - usage: business # values: business, devops - used_as_client_by_human: false - out_of_scope: false - justification_out_of_scope: - size: system # values: system, service, application, component - technology: erp # values: see help - tags: - - linux - internet: false - machine: virtual # values: physical, virtual, container, serverless - encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key - owner: Company ABC - confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential - integrity: mission-critical # values: archive, operational, important, critical, mission-critical - availability: mission-critical # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - The ERP system contains business-relevant sensitive data for the leasing processes and eventually also for other - Company XYZ internal processes. - multi_tenant: false - redundant: true - custom_developed_parts: false - data_assets_processed: # sequence of IDs to reference - - customer-accounts - - customer-operational-data - - customer-contracts - - internal-business-data - - erp-customizing - data_assets_stored: # sequence of IDs to reference - - erp-logs - data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv - - xml - - file - - serialization - communication_links: - Database Traffic: - target: sql-database - description: Link to the DB system - protocol: jdbc # values: see help - authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor - authorization: technical-user # values: none, technical-user, enduser-identity-propagation - tags: - vpn: false - ip_filtered: false - readonly: false - usage: business # values: business, devops - data_assets_sent: # sequence of IDs to reference - - customer-accounts - - customer-operational-data - - internal-business-data - data_assets_received: # sequence of IDs to reference - - customer-accounts - - customer-operational-data - - internal-business-data - #diagram_tweak_weight: 1 - #diagram_tweak_constraint: false - NFS Filesystem Access: - target: contract-fileserver - description: Link to the file system - protocol: nfs # values: see help - authentication: none # values: none, credentials, session-id, token, client-certificate, two-factor - authorization: none # values: none, technical-user, enduser-identity-propagation - tags: - vpn: false - ip_filtered: false - readonly: false - usage: business # values: business, devops - data_assets_sent: # sequence of IDs to reference - - customer-contracts - data_assets_received: # sequence of IDs to reference - - customer-contracts - #diagram_tweak_weight: 1 - #diagram_tweak_constraint: false - - - Contract Fileserver: - id: contract-fileserver - #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) - description: NFS Filesystem for storing the contract PDFs - type: datastore # values: external-entity, process, datastore - usage: business # values: business, devops - used_as_client_by_human: false - out_of_scope: false - justification_out_of_scope: - size: component # values: system, service, application, component - technology: file-server # values: see help - tags: - - linux - - aws:s3 - internet: false - machine: virtual # values: physical, virtual, container, serverless - encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key - owner: Company ABC - confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential - integrity: critical # values: archive, operational, important, critical, mission-critical - availability: important # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - Contract data might contain financial data as well as personally identifiable information (PII). The integrity and - availability of contract data is required for clearing payment disputes. The filesystem is also required to be available - for storing new contracts of freshly generated customers. - multi_tenant: false - redundant: false - custom_developed_parts: false - data_assets_processed: # sequence of IDs to reference - data_assets_stored: # sequence of IDs to reference - - customer-contracts - - contract-summaries - data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv - - file - communication_links: - - - Customer Contract Database: - id: sql-database - #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) - description: The database behind the ERP system - type: datastore # values: external-entity, process, datastore - usage: business # values: business, devops - used_as_client_by_human: false - out_of_scope: false - justification_out_of_scope: - size: component # values: system, service, application, component - technology: database # values: see help - tags: - - linux - - mysql - internet: false - machine: virtual # values: physical, virtual, container, serverless - encryption: data-with-symmetric-shared-key # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key - owner: Company ABC - confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential - integrity: mission-critical # values: archive, operational, important, critical, mission-critical - availability: mission-critical # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - The ERP system's database contains business-relevant sensitive data for the leasing processes and eventually also - for other Company XYZ internal processes. - multi_tenant: false - redundant: false - custom_developed_parts: false - data_assets_processed: # sequence of IDs to reference - - db-dumps - data_assets_stored: # sequence of IDs to reference - - customer-accounts - - customer-operational-data - - internal-business-data - data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv - communication_links: - - - External Development Client: - id: external-dev-client - #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) - description: External developer client - type: external-entity # values: external-entity, process, datastore - usage: devops # values: business, devops - used_as_client_by_human: true - out_of_scope: true - justification_out_of_scope: Owned and managed by external developers - size: system # values: system, service, application, component - technology: devops-client # values: see help - tags: - - linux - internet: true - machine: physical # values: physical, virtual, container, serverless - encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key - owner: External Developers - confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential - integrity: critical # values: archive, operational, important, critical, mission-critical - availability: operational # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - The clients used by external developers to create parts of the application code. - multi_tenant: true - redundant: false - custom_developed_parts: false - data_assets_processed: # sequence of IDs to reference - - client-application-code - - server-application-code - data_assets_stored: # sequence of IDs to reference - - client-application-code - - server-application-code - data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv - - file - communication_links: - Git-Repo Code Write Access: - target: git-repo - description: Link to the Git repo - protocol: ssh # values: see help - authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor - authorization: technical-user # values: none, technical-user, enduser-identity-propagation - tags: - vpn: false - ip_filtered: false - readonly: false - usage: devops # values: business, devops - data_assets_sent: # sequence of IDs to reference - - client-application-code - - server-application-code - data_assets_received: # sequence of IDs to reference - - client-application-code - - server-application-code - #diagram_tweak_weight: 1 - #diagram_tweak_constraint: false - Git-Repo Web-UI Access: - target: git-repo - description: Link to the Git repo - protocol: https # values: see help - authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor - authorization: technical-user # values: none, technical-user, enduser-identity-propagation - tags: - vpn: false - ip_filtered: false - readonly: false - usage: devops # values: business, devops - data_assets_sent: # sequence of IDs to reference - - client-application-code - - server-application-code - data_assets_received: # sequence of IDs to reference - - client-application-code - - server-application-code - #diagram_tweak_weight: 1 - #diagram_tweak_constraint: false - Jenkins Web-UI Access: - target: jenkins-buildserver - description: Link to the Jenkins build server - protocol: https # values: see help - authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor - authorization: technical-user # values: none, technical-user, enduser-identity-propagation - tags: - vpn: false - ip_filtered: false - readonly: false - usage: devops # values: business, devops - data_assets_sent: # sequence of IDs to reference - - build-job-config - data_assets_received: # sequence of IDs to reference - - build-job-config - #diagram_tweak_weight: 1 - #diagram_tweak_constraint: false - - - Git Repository: - id: git-repo - #diagram_tweak_order: 99 # affects left to right positioning (only within a trust boundary) - description: Git repository server - type: process # values: external-entity, process, datastore - usage: devops # values: business, devops - used_as_client_by_human: false - out_of_scope: false - justification_out_of_scope: - size: system # values: system, service, application, component - technology: sourcecode-repository # values: see help - tags: - - linux - - git - internet: false - machine: virtual # values: physical, virtual, container, serverless - encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key - owner: Company ABC - confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential - integrity: important # values: archive, operational, important, critical, mission-critical - availability: important # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - The code repo pipeline might contain sensitive configuration values like backend credentials, certificates etc. and is - therefore rated as confidential. - multi_tenant: true - redundant: false - custom_developed_parts: false - data_assets_processed: # sequence of IDs to reference - - client-application-code - - server-application-code - data_assets_stored: # sequence of IDs to reference - - client-application-code - - server-application-code - data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv - - file - communication_links: - - - Jenkins Buildserver: - id: jenkins-buildserver - #diagram_tweak_order: 99 # affects left to right positioning (only within a trust boundary) - description: Jenkins buildserver - type: process # values: external-entity, process, datastore - usage: devops # values: business, devops - used_as_client_by_human: false - out_of_scope: false - justification_out_of_scope: - size: system # values: system, service, application, component - technology: build-pipeline # values: see help - tags: - - linux - - jenkins - internet: false - machine: virtual # values: physical, virtual, container, serverless - encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key - owner: Company ABC - confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential - integrity: critical # values: archive, operational, important, critical, mission-critical - availability: important # values: archive, operational, important, critical, mission-critical - justification_cia_rating: > - The build pipeline might contain sensitive configuration values like backend credentials, certificates etc. and is - therefore rated as confidential. The integrity and availability is rated as critical and important due to the risk - of reputation damage and application update unavailability when the build pipeline is compromised. - multi_tenant: true - redundant: false - custom_developed_parts: false - data_assets_processed: # sequence of IDs to reference - - build-job-config - - client-application-code - - server-application-code - - marketing-material - data_assets_stored: # sequence of IDs to reference - - build-job-config - - client-application-code - - server-application-code - - marketing-material - data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv - - file - - serialization - communication_links: - Git Repo Code Read Access: - target: git-repo - description: Link to the Git repository server - protocol: ssh # values: see help - authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor - authorization: technical-user # values: none, technical-user, enduser-identity-propagation - tags: - vpn: false - ip_filtered: false - readonly: true - usage: devops # values: business, devops - data_assets_sent: # sequence of IDs to reference - data_assets_received: # sequence of IDs to reference - - client-application-code - - server-application-code - #diagram_tweak_weight: 1 - #diagram_tweak_constraint: false - Application Deployment: - target: apache-webserver - description: Link to the Apache webserver - protocol: ssh # values: see help - authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor - authorization: technical-user # values: none, technical-user, enduser-identity-propagation - tags: - vpn: false - ip_filtered: false - readonly: false - usage: devops # values: business, devops - data_assets_sent: # sequence of IDs to reference - - client-application-code - - server-application-code - data_assets_received: # sequence of IDs to reference - CMS Updates: - target: marketing-cms - description: Link to the CMS - protocol: ssh # values: see help - authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor - authorization: technical-user # values: none, technical-user, enduser-identity-propagation - tags: - vpn: false - ip_filtered: false - readonly: false - usage: devops # values: business, devops - data_assets_sent: # sequence of IDs to reference - - marketing-material - data_assets_received: # sequence of IDs to reference - - - - - -trust_boundaries: - - - Web DMZ: - id: web-dmz - description: Web DMZ - type: network-cloud-security-group # values: see help - tags: - technical_assets_inside: # sequence of IDs to reference - - apache-webserver - - marketing-cms - trust_boundaries_nested: # sequence of IDs to reference - - - ERP DMZ: - id: erp-dmz - description: ERP DMZ - type: network-cloud-security-group # values: see help - tags: - - some-erp - technical_assets_inside: # sequence of IDs to reference - - erp-system - - contract-fileserver - - sql-database - trust_boundaries_nested: # sequence of IDs to reference - - - Application Network: - id: application-network - description: Application Network - type: network-cloud-provider # values: see help - tags: - - aws - technical_assets_inside: # sequence of IDs to reference - - load-balancer - trust_boundaries_nested: # sequence of IDs to reference - - web-dmz - - erp-dmz - - auth-env - - - Auth Handling Environment: - id: auth-env - description: Auth Handling Environment - type: execution-environment # values: see help - tags: - technical_assets_inside: # sequence of IDs to reference - - identity-provider - - ldap-auth-server - trust_boundaries_nested: # sequence of IDs to reference - - - Dev Network: - id: dev-network - description: Development Network - type: network-on-prem # values: see help - tags: - technical_assets_inside: # sequence of IDs to reference - - jenkins-buildserver - - git-repo - - backend-admin-client - - backoffice-client - trust_boundaries_nested: # sequence of IDs to reference - - - - - -shared_runtimes: - - - WebApp and Backoffice Virtualization: - id: webapp-virtualization - description: WebApp Virtualization - tags: - - vmware - technical_assets_running: # sequence of IDs to reference - - apache-webserver - - marketing-cms - - erp-system - - contract-fileserver - - sql-database - - - - -individual_risk_categories: # used for adding custom manually identified risks - - - Some Individual Risk Example: - id: something-strange - description: Some text describing the risk category... - impact: Some text describing the impact... - asvs: V0 - Something Strange - cheat_sheet: https://example.com - action: Some text describing the action... - mitigation: Some text describing the mitigation... - check: Check if XYZ... - function: business-side # values: business-side, architecture, development, operations - stride: repudiation # values: spoofing, tampering, repudiation, information-disclosure, denial-of-service, elevation-of-privilege - detection_logic: Some text describing the detection logic... - risk_assessment: Some text describing the risk assessment... - false_positives: Some text describing the most common types of false positives... - model_failure_possible_reason: false - cwe: 693 - risks_identified: - Example Individual Risk at Database: - severity: critical # values: low, medium, elevated, high, critical - exploitation_likelihood: likely # values: unlikely, likely, very-likely, frequent - exploitation_impact: medium # values: low, medium, high, very-high - data_breach_probability: probable # values: improbable, possible, probable - data_breach_technical_assets: # list of technical asset IDs which might have data breach - - sql-database - most_relevant_data_asset: - most_relevant_technical_asset: sql-database - most_relevant_communication_link: - most_relevant_trust_boundary: - most_relevant_shared_runtime: - Example Individual Risk at Contract Filesystem: - severity: medium # values: low, medium, elevated, high, critical - exploitation_likelihood: frequent # values: unlikely, likely, very-likely, frequent - exploitation_impact: very-high # values: low, medium, high, very-high - data_breach_probability: improbable # values: improbable, possible, probable - data_breach_technical_assets: # list of technical asset IDs which might have data breach - most_relevant_data_asset: - most_relevant_technical_asset: contract-fileserver - most_relevant_communication_link: - most_relevant_trust_boundary: - most_relevant_shared_runtime: - - - -# NOTE: -# For risk tracking each risk-id needs to be defined (the string with the @ sign in it). These unique risk IDs -# are visible in the PDF report (the small grey string under each risk), the Excel (column "ID"), as well as the JSON responses. -# Some risk IDs have only one @ sign in them, while others multiple. The idea is to allow for unique but still speaking IDs. -# Therefore each risk instance creates its individual ID by taking all affected elements causing the risk to be within an @-delimited part. -# Using wildcards (the * sign) for parts delimited by @ signs allows to handle groups of certain risks at once. Best is to lookup the IDs -# to use in the created Excel file. Alternatively a model macro "seed-risk-tracking" is available that helps in initially -# seeding the risk tracking part here based on already identified and not yet handled risks. -risk_tracking: - - untrusted-deserialization@erp-system: # wildcards "*" between the @ characters are possible - status: accepted # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive - justification: Risk accepted as tolerable - ticket: XYZ-1234 - date: 2020-01-04 - checked_by: John Doe - - ldap-injection@*@ldap-auth-server@*: # wildcards "*" between the @ characters are possible - status: mitigated # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive - justification: The hardening measures were implemented and checked - ticket: XYZ-5678 - date: 2020-01-05 - checked_by: John Doe - - unencrypted-asset@*: # wildcards "*" between the @ characters are possible - status: mitigated # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive - justification: The hardening measures were implemented and checked - ticket: XYZ-1234 - date: 2020-01-04 - checked_by: John Doe - - missing-authentication-second-factor@*@*@*: # wildcards "*" between the @ characters are possible - status: mitigated # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive - justification: The hardening measures were implemented and checked - ticket: XYZ-1234 - date: 2020-01-04 - checked_by: John Doe - - missing-hardening@*: # wildcards "*" between the @ characters are possible - status: mitigated # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive - justification: The hardening measures were implemented and checked - ticket: XYZ-1234 - date: 2020-01-04 - checked_by: John Doe - - dos-risky-access-across-trust-boundary@*@*@*: # wildcards "*" between the @ characters are possible - status: in-progress # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive - justification: The hardening measures are being implemented and checked - ticket: XYZ-1234 - date: 2020-01-04 - checked_by: John Doe - - - -#diagram_tweak_edge_layout: spline # values: spline, polyline, false, ortho (this suppresses edge labels), curved (this suppresses edge labels and can cause problems with edges) - -#diagram_tweak_suppress_edge_labels: true -#diagram_tweak_layout_left_to_right: true -#diagram_tweak_nodesep: 2 -#diagram_tweak_ranksep: 2 -#diagram_tweak_invisible_connections_between_assets: -# - tech-asset-source-id-A:tech-asset-target-id-B -# - tech-asset-source-id-C:tech-asset-target-id-D -#diagram_tweak_same_rank_assets: -# - tech-asset-source-id-E:tech-asset-target-id-F:tech-asset-source-id-G:tech-asset-target-id-H -# - tech-asset-source-id-M:tech-asset-target-id-N:tech-asset-source-id-O From 87d80332ae84305f7b918a857a43017400c0ee70 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Fri, 5 Jan 2024 19:49:01 +0000 Subject: [PATCH 42/68] Adjust documentation according to changes --- README.md | 116 +++++++++++++++-------------------- demo/stub/threagile.yaml | 2 +- internal/threagile/macros.go | 5 +- internal/threagile/root.go | 3 + pkg/docs/constants.go | 14 ++--- 5 files changed, 62 insertions(+), 78 deletions(-) diff --git a/README.md b/README.md index d55bf470..f8683402 100644 --- a/README.md +++ b/README.md @@ -31,91 +31,71 @@ The easiest way to execute Threagile on the commandline is via its Docker contai Sourcecode: https://github.com/threagile License: Open-Source (MIT License) - Usage: threagile [options] - - - Options: - - -background string - background pdf file (default "background.pdf") - -create-editing-support - just create some editing support stuff in the output directory - -create-example-model - just create an example model named threagile-example-model.yaml in the output directory - -create-stub-model - just create a minimal stub model named threagile-stub-model.yaml in the output directory - -custom-risk-rules-plugins string - comma-separated list of plugins (.so shared object) file names with custom risk rules to load - -diagram-dpi int - DPI used to render: maximum is 240 (default 120) - -execute-model-macro string - Execute model macro (by ID) - -generate-data-asset-diagram - generate data asset diagram (default true) - -generate-data-flow-diagram - generate data-flow diagram (default true) - -generate-report-pdf - generate report pdf, including diagrams (default true) - -generate-risks-excel - generate risks excel (default true) - -generate-risks-json - generate risks json (default true) - -generate-stats-json - generate stats json (default true) - -generate-tags-excel - generate tags excel (default true) - -generate-technical-assets-json - generate technical assets json (default true) - -ignore-orphaned-risk-tracking - ignore orphaned risk tracking (just log them) not matching a concrete risk - -list-model-macros - print model macros - -list-risk-rules - print risk rules - -list-types - print type information (enum values to be used in models) - -model string - input model yaml file (default "threagile.yaml") - -output string - output directory (default ".") - -print-3rd-party-licenses - print 3rd-party license information - -print-license - print license information - -raa-plugin string - RAA calculation plugin (.so shared object) file name (default "raa.so") - -server int - start a server (instead of commandline execution) on the given port - -skip-risk-rules string - comma-separated list of risk rules (by their ID) to skip - -verbose - verbose output - -version - print version + Usage: + threagile [flags] + threagile [command] + + Available Commands: + create-editing-support Create editing support + create-example-model Create example threagile model + create-stub-model Create stub threagile model + execute-model-macro Execute model macro + explain-model-macros Explain model macros + explain-risk-rules Detailed explanation of all the risk rules + explain-types Print type information (enum values to be used in models) + help Help about any command + list-model-macros Print model macros + list-risk-rules Print available risk rules + list-types Print type information (enum values to be used in models) + print-license Print license information + server Run server + + Flags: + --app-dir string app folder (default "/app") + --background string background pdf file (default "background.pdf") + --bin-dir string binary folder location (default "/app") + --custom-risk-rules-plugin string comma-separated list of plugins file names with custom risk rules to load + --diagram-dpi int DPI used to render: maximum is 300 + --generate-data-asset-diagram generate data asset diagram (default true) + --generate-data-flow-diagram generate data flow diagram (default true) + --generate-report-pdf generate report pdf, including diagrams (default true) + --generate-risks-excel generate risks excel (default true) + --generate-risks-json generate risks json (default true) + --generate-stats-json generate stats json (default true) + --generate-tags-excel generate tags excel (default true) + --generate-technical-assets-json generate technical assets json (default true) + -h, --help help for threagile + --ignore-orphaned-risk-tracking ignore orphaned risk tracking (just log them) not matching a concrete risk + --model string input model yaml file (default "threagile.yaml") + --output string output directory (default ".") + --raa-run string RAA calculation run file name (default "raa_calc") + --skip-risk-rules string comma-separated list of risk rules (by their ID) to skip + --temp-dir string temporary folder location (default "/dev/shm") + -v, --verbose verbose output Examples: If you want to create an example model (via docker) as a starting point to learn about Threagile just run: - docker run --rm -it -v "$(pwd)":/app/work threagile/threagile -create-example-model -output /app/work + docker run --rm -it -v "$(pwd)":/app/work threagile/threagile create-example-model -output /app/work If you want to create a minimal stub model (via docker) as a starting point for your own model just run: - docker run --rm -it -v "$(pwd)":/app/work threagile/threagile -create-stub-model -output /app/work + docker run --rm -it -v "$(pwd)":/app/work threagile/threagile create-stub-model -output /app/work If you want to execute Threagile on a model yaml file (via docker): docker run --rm -it -v "$(pwd)":/app/work threagile/threagile -verbose -model /app/work/threagile.yaml -output /app/work If you want to run Threagile as a server (REST API) on some port (here 8080): - docker run --rm -it --shm-size=256m -p 8080:8080 --name threagile-server --mount 'type=volume,src=threagile-storage,dst=/data,readonly=false' threagile/threagile -server 8080 + docker run --rm -it --shm-size=256m -p 8080:8080 --name threagile-server --mount 'type=volume,src=threagile-storage,dst=/data,readonly=false' threagile/threagile server --server-port 8080 If you want to find out about the different enum values usable in the model yaml file: - docker run --rm -it threagile/threagile -list-types + docker run --rm -it threagile/threagile list-types If you want to use some nice editing help (syntax validation, autocompletion, and live templates) in your favourite IDE: - docker run --rm -it -v "$(pwd)":/app/work threagile/threagile -create-editing-support -output /app/work + docker run --rm -it -v "$(pwd)":/app/work threagile/threagile create-editing-support -output /app/work If you want to list all available model macros (which are macros capable of reading a model yaml file, asking you questions in a wizard-style and then update the model yaml file accordingly): - docker run --rm -it threagile/threagile -list-model-macros + docker run --rm -it threagile/threagile list-model-macros If you want to execute a certain model macro on the model yaml file (here the macro add-build-pipeline): - docker run --rm -it -v "$(pwd)":/app/work threagile/threagile -model /app/work/threagile.yaml -output /app/work -execute-model-macro add-build-pipeline + docker run --rm -it -v "$(pwd)":/app/work threagile/threagile -model /app/work/threagile.yaml -output /app/work execute-model-macro add-build-pipeline diff --git a/demo/stub/threagile.yaml b/demo/stub/threagile.yaml index 1e3539e5..287eacd8 100644 --- a/demo/stub/threagile.yaml +++ b/demo/stub/threagile.yaml @@ -12,7 +12,7 @@ threagile_version: 1.0.0 # This is only a stub for simple quick editing and is not complete. -# For a complete usable example model see the "-create-example-model" option. +# For a complete usable example model see the "create-example-model" option. title: Model Stub diff --git a/internal/threagile/macros.go b/internal/threagile/macros.go index 6bf591d1..01615d72 100644 --- a/internal/threagile/macros.go +++ b/internal/threagile/macros.go @@ -68,8 +68,9 @@ var explainMacrosCmd = &cobra.Command{ } var executeModelMacrosCmd = &cobra.Command{ - Use: "execute-model-macro", - Args: cobra.ExactArgs(1), + Use: "execute-model-macro", + Short: "Execute model macro", + Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { cfg := readConfig("buildTimestamp") progressReporter := common.DefaultProgressReporter{Verbose: cfg.Verbose} diff --git a/internal/threagile/root.go b/internal/threagile/root.go index 98b3ead1..2e38c944 100644 --- a/internal/threagile/root.go +++ b/internal/threagile/root.go @@ -41,6 +41,9 @@ var rootCmd = &cobra.Command{ } return nil }, + CompletionOptions: cobra.CompletionOptions{ + DisableDefaultCmd: true, + }, } var serverCmd = &cobra.Command{ diff --git a/pkg/docs/constants.go b/pkg/docs/constants.go index 28e1af39..98c6cd70 100644 --- a/pkg/docs/constants.go +++ b/pkg/docs/constants.go @@ -14,21 +14,21 @@ const ( "Version: " + ThreagileVersion // TODO: add buildTimestamp + " (" + buildTimestamp + ")" Examples = "Examples:\n\n" + "If you want to create an example model (via docker) as a starting point to learn about Threagile just run: \n" + - " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile -create-example-model -output app/work \n\n" + + " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile create-example-model -output app/work \n\n" + "If you want to create a minimal stub model (via docker) as a starting point for your own model just run: \n" + - " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile -create-stub-model -output app/work \n\n" + + " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile create-stub-model -output app/work \n\n" + "If you want to execute Threagile on a model yaml file (via docker): \n" + " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile -verbose -model -output app/work \n\n" + "If you want to run Threagile as a server (REST API) on some port (here 8080): \n" + - " docker run --rm -it --shm-size=256m -p 8080:8080 --name --mount 'type=volume,src=threagile-storage,dst=/data,readonly=false' threagile/threagile -server 8080 \n\n" + + " docker run --rm -it --shm-size=256m -p 8080:8080 --name --mount 'type=volume,src=threagile-storage,dst=/data,readonly=false' threagile/threagile server --server-port 8080 \n\n" + "If you want to find out about the different enum values usable in the model yaml file: \n" + - " docker run --rm -it threagile/threagile -list-types\n\n" + + " docker run --rm -it threagile/threagile list-types\n\n" + "If you want to use some nice editing help (syntax validation, autocompletion, and live templates) in your favourite IDE: " + - " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile -create-editing-support -output app/work\n\n" + + " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile create-editing-support -output app/work\n\n" + "If you want to list all available model macros (which are macros capable of reading a model yaml file, asking you questions in a wizard-style and then update the model yaml file accordingly): \n" + - " docker run --rm -it threagile/threagile -list-model-macros \n\n" + + " docker run --rm -it threagile/threagile list-model-macros \n\n" + "If you want to execute a certain model macro on the model yaml file (here the macro add-build-pipeline): \n" + - " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile -model app/work/threagile.yaml -output app/work -execute-model-macro add-build-pipeline" + " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile -model app/work/threagile.yaml -output app/work execute-model-macro add-build-pipeline" ThirdPartyLicenses = " - golang (Google Go License): https://golang.org/LICENSE\n" + " - go-yaml (MIT License): https://github.com/go-yaml/yaml/blob/v3/LICENSE\n" + " - graphviz (CPL License): https://graphviz.gitlab.io/license/\n" + From 3713215167f907d02aa431c76f6ea237a2ae1870 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Fri, 5 Jan 2024 19:59:01 +0000 Subject: [PATCH 43/68] Inline calls to dot --- Dockerfile | 2 -- Dockerfile.local | 2 -- Makefile | 5 ----- pkg/common/consts.go | 26 ++++++++++++-------------- pkg/report/graphviz.go | 6 +++--- support/render-data-asset-diagram.sh | 2 -- support/render-data-flow-diagram.sh | 2 -- 7 files changed, 15 insertions(+), 30 deletions(-) delete mode 100755 support/render-data-asset-diagram.sh delete mode 100755 support/render-data-flow-diagram.sh diff --git a/Dockerfile b/Dockerfile index b071d6d7..e039dee4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -69,8 +69,6 @@ COPY --from=build --chown=1000:1000 /app/report/template/background.pdf /app/ COPY --from=build --chown=1000:1000 /app/support/openapi.yaml /app/ COPY --from=build --chown=1000:1000 /app/support/schema.json /app/ COPY --from=build --chown=1000:1000 /app/support/live-templates.txt /app/ -COPY --from=build --chown=1000:1000 /app/support/render-data-asset-diagram.sh /app/ -COPY --from=build --chown=1000:1000 /app/support/render-data-flow-diagram.sh /app/ COPY --from=build --chown=1000:1000 /app/demo/example/threagile-example-model.yaml /app/ COPY --from=build --chown=1000:1000 /app/demo/stub/threagile-stub-model.yaml /app/ COPY --from=build --chown=1000:1000 /app/server /app/server diff --git a/Dockerfile.local b/Dockerfile.local index b4dcee26..4c5b4cc0 100644 --- a/Dockerfile.local +++ b/Dockerfile.local @@ -69,8 +69,6 @@ COPY --from=build --chown=1000:1000 /app/report/template/background.pdf /app/ COPY --from=build --chown=1000:1000 /app/support/openapi.yaml /app/ COPY --from=build --chown=1000:1000 /app/support/schema.json /app/ COPY --from=build --chown=1000:1000 /app/support/live-templates.txt /app/ -COPY --from=build --chown=1000:1000 /app/support/render-data-asset-diagram.sh /app/ -COPY --from=build --chown=1000:1000 /app/support/render-data-flow-diagram.sh /app/ COPY --from=build --chown=1000:1000 /app/demo/example/threagile-example-model.yaml /app/ COPY --from=build --chown=1000:1000 /app/demo/stub/threagile-stub-model.yaml /app/ COPY --from=build --chown=1000:1000 /app/server /app/server diff --git a/Makefile b/Makefile index 796b695a..5b55bbd9 100644 --- a/Makefile +++ b/Makefile @@ -13,9 +13,6 @@ BIN = \ raa_dummy \ risk_demo_rule \ threagile -SCRIPTS = \ - support/render-data-asset-diagram.sh \ - support/render-data-flow-diagram.sh # Commands and Flags GOFLAGS = -a -ldflags="-s -w -X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" @@ -51,14 +48,12 @@ tidy: clean install: all mkdir -p $(BIN_DIR) $(ASSET_DIR) $(CP) $(addprefix bin/,$(BIN)) $(BIN_DIR) - $(CP) $(SCRIPTS) $(BIN_DIR) $(CP) $(ASSETS) $(ASSET_DIR) $(CP) demo/example/threagile.yaml $(ASSET_DIR)/threagile-example-model.yaml $(CP) demo/stub/threagile.yaml $(ASSET_DIR)/threagile-stub-model.yaml uninstall: $(RM) $(addprefix $(BIN_DIR)/,$(BIN)) - $(RM) $(addprefix $(BIN_DIR)/,$(notdir $(SCRIPTS))) $(RM) $(ASSET_DIR) bin/raa_calc: cmd/raa/main.go diff --git a/pkg/common/consts.go b/pkg/common/consts.go index bb452330..c57ee68e 100644 --- a/pkg/common/consts.go +++ b/pkg/common/consts.go @@ -11,20 +11,18 @@ const ( DefaultServerPort = 8080 - InputFile = "threagile.yaml" - ReportFilename = "report.pdf" - ExcelRisksFilename = "risks.xlsx" - ExcelTagsFilename = "tags.xlsx" - JsonRisksFilename = "risks.json" - JsonTechnicalAssetsFilename = "technical-assets.json" - JsonStatsFilename = "stats.json" - TemplateFilename = "background.pdf" - DataFlowDiagramFilenameDOT = "data-flow-diagram.gv" - DataFlowDiagramFilenamePNG = "data-flow-diagram.png" - DataAssetDiagramFilenameDOT = "data-asset-diagram.gv" - DataAssetDiagramFilenamePNG = "data-asset-diagram.png" - GraphvizDataFlowDiagramConversionCall = "render-data-flow-diagram.sh" - GraphvizDataAssetDiagramConversionCall = "render-data-asset-diagram.sh" + InputFile = "threagile.yaml" + ReportFilename = "report.pdf" + ExcelRisksFilename = "risks.xlsx" + ExcelTagsFilename = "tags.xlsx" + JsonRisksFilename = "risks.json" + JsonTechnicalAssetsFilename = "technical-assets.json" + JsonStatsFilename = "stats.json" + TemplateFilename = "background.pdf" + DataFlowDiagramFilenameDOT = "data-flow-diagram.gv" + DataFlowDiagramFilenamePNG = "data-flow-diagram.png" + DataAssetDiagramFilenameDOT = "data-asset-diagram.gv" + DataAssetDiagramFilenamePNG = "data-asset-diagram.png" RAAPluginName = "raa_calc" diff --git a/pkg/report/graphviz.go b/pkg/report/graphviz.go index aaaea003..4ca0aee9 100644 --- a/pkg/report/graphviz.go +++ b/pkg/report/graphviz.go @@ -13,7 +13,6 @@ import ( "strings" "github.com/threagile/threagile/pkg/colors" - "github.com/threagile/threagile/pkg/common" "github.com/threagile/threagile/pkg/security/types" ) @@ -300,7 +299,8 @@ func GenerateDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string, } // exec - cmd := exec.Command(filepath.Join(binFolder, common.GraphvizDataFlowDiagramConversionCall), tmpFileDOT.Name(), tmpFilePNG.Name()) + + cmd := exec.Command("dot", "-Tpng", tmpFileDOT.Name(), "-o", tmpFilePNG.Name()) cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr err = cmd.Run() @@ -574,7 +574,7 @@ func GenerateDataAssetDiagramGraphvizImage(dotFile *os.File, targetDir string, } // exec - cmd := exec.Command(filepath.Join(binFolder, common.GraphvizDataAssetDiagramConversionCall), tmpFileDOT.Name(), tmpFilePNG.Name()) + cmd := exec.Command("dot", "-Tpng", tmpFileDOT.Name(), "-o", tmpFilePNG.Name()) cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr err = cmd.Run() diff --git a/support/render-data-asset-diagram.sh b/support/render-data-asset-diagram.sh deleted file mode 100755 index 0cd54739..00000000 --- a/support/render-data-asset-diagram.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -dot -Tpng $1 -o $2 \ No newline at end of file diff --git a/support/render-data-flow-diagram.sh b/support/render-data-flow-diagram.sh deleted file mode 100755 index 0cd54739..00000000 --- a/support/render-data-flow-diagram.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -dot -Tpng $1 -o $2 \ No newline at end of file From 87ab8e64a306c3504bc77297fb29bc3216c79a51 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Mon, 8 Jan 2024 16:59:29 +0000 Subject: [PATCH 44/68] Restore reading config file --- internal/threagile/flags.go | 4 ++ internal/threagile/macros.go | 2 +- internal/threagile/root.go | 108 +++++++++++++++++++++++++---------- pkg/common/config.go | 2 +- 4 files changed, 83 insertions(+), 33 deletions(-) diff --git a/internal/threagile/flags.go b/internal/threagile/flags.go index ac51c9f3..0beb318b 100644 --- a/internal/threagile/flags.go +++ b/internal/threagile/flags.go @@ -3,6 +3,8 @@ Copyright © 2023 NAME HERE */ package threagile +var configFlag *string + var verboseFlag *bool var appDirFlag, binDirFlag, outputDirFlag, tempDirFlag *string var inputFileFlag, raaPluginFlag *string @@ -18,6 +20,8 @@ var generateDataFlowDiagramFlag, generateDataAssetDiagramFlag, generateRisksJSON generateTechnicalAssetsJSONFlag, generateStatsJSONFlag, generateRisksExcelFlag, generateTagsExcelFlag, generateReportPDFFlag *bool +const configFlagName = "config" + const verboseFlagName = "verbose" const verboseFlagShorthand = "v" diff --git a/internal/threagile/macros.go b/internal/threagile/macros.go index 01615d72..fe157103 100644 --- a/internal/threagile/macros.go +++ b/internal/threagile/macros.go @@ -72,7 +72,7 @@ var executeModelMacrosCmd = &cobra.Command{ Short: "Execute model macro", Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - cfg := readConfig("buildTimestamp") + cfg := readConfig(cmd, "buildTimestamp") progressReporter := common.DefaultProgressReporter{Verbose: cfg.Verbose} r, err := model.ReadAndAnalyzeModel(*cfg, progressReporter) diff --git a/internal/threagile/root.go b/internal/threagile/root.go index 2e38c944..ebc2c88a 100644 --- a/internal/threagile/root.go +++ b/internal/threagile/root.go @@ -10,6 +10,7 @@ import ( "strings" "github.com/spf13/cobra" + "github.com/spf13/pflag" "github.com/threagile/threagile/pkg/common" "github.com/threagile/threagile/pkg/docs" @@ -23,7 +24,7 @@ var rootCmd = &cobra.Command{ Short: "\n" + docs.Logo, Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\n" + docs.Examples, RunE: func(cmd *cobra.Command, args []string) error { - cfg := readConfig("buildTimestamp") + cfg := readConfig(cmd, "buildTimestamp") commands := readCommands() progressReporter := common.DefaultProgressReporter{Verbose: cfg.Verbose} @@ -50,7 +51,7 @@ var serverCmd = &cobra.Command{ Use: "server", Short: "Run server", RunE: func(cmd *cobra.Command, args []string) error { - cfg := readConfig("buildTimestamp") + cfg := readConfig(cmd, "buildTimestamp") server.RunServer(cfg) return nil }, @@ -64,24 +65,27 @@ func Execute() { } func init() { - appDirFlag = rootCmd.PersistentFlags().String(appDirFlagName, common.AppDir, "app folder") - binDirFlag = rootCmd.PersistentFlags().String(binDirFlagName, common.BinDir, "binary folder location") - outputDirFlag = rootCmd.PersistentFlags().String(outputFlagName, common.OutputDir, "output directory") - tempDirFlag = rootCmd.PersistentFlags().String(tempDirFlagName, common.TempDir, "temporary folder location") + cfg := new(common.Config).Defaults("") + appDirFlag = rootCmd.PersistentFlags().String(appDirFlagName, cfg.AppFolder, "app folder") + binDirFlag = rootCmd.PersistentFlags().String(binDirFlagName, cfg.BinFolder, "binary folder location") + outputDirFlag = rootCmd.PersistentFlags().String(outputFlagName, cfg.OutputFolder, "output directory") + tempDirFlag = rootCmd.PersistentFlags().String(tempDirFlagName, cfg.TempFolder, "temporary folder location") - inputFileFlag = rootCmd.PersistentFlags().String(inputFileFlagName, common.InputFile, "input model yaml file") - raaPluginFlag = rootCmd.PersistentFlags().String(raaPluginFlagName, "raa_calc", "RAA calculation run file name") + inputFileFlag = rootCmd.PersistentFlags().String(inputFileFlagName, cfg.InputFile, "input model yaml file") + raaPluginFlag = rootCmd.PersistentFlags().String(raaPluginFlagName, cfg.RAAPlugin, "RAA calculation run file name") - serverPortFlag = serverCmd.PersistentFlags().Int(serverPortFlagName, common.DefaultServerPort, "the server port") - serverDirFlag = serverCmd.PersistentFlags().String(serverDirFlagName, common.DataDir, "base folder for server mode (default: "+common.DataDir+")") + serverPortFlag = serverCmd.PersistentFlags().Int(serverPortFlagName, cfg.ServerPort, "the server port") + serverDirFlag = serverCmd.PersistentFlags().String(serverDirFlagName, cfg.DataFolder, "base folder for server mode (default: "+common.DataDir+")") - verboseFlag = rootCmd.PersistentFlags().BoolP(verboseFlagName, verboseFlagShorthand, false, "verbose output") + verboseFlag = rootCmd.PersistentFlags().BoolP(verboseFlagName, verboseFlagShorthand, cfg.Verbose, "verbose output") - customRiskRulesPluginFlag = rootCmd.PersistentFlags().String(customRiskRulesPluginFlagName, "", "comma-separated list of plugins file names with custom risk rules to load") - diagramDpiFlag = rootCmd.PersistentFlags().Int(diagramDpiFlagName, 0, "DPI used to render: maximum is "+fmt.Sprintf("%d", common.MaxGraphvizDPI)+"") - skipRiskRulesFlag = rootCmd.PersistentFlags().String(skipRiskRulesFlagName, "", "comma-separated list of risk rules (by their ID) to skip") - ignoreOrphandedRiskTrackingFlag = rootCmd.PersistentFlags().Bool(ignoreOrphandedRiskTrackingFlagName, false, "ignore orphaned risk tracking (just log them) not matching a concrete risk") - templateFileNameFlag = rootCmd.PersistentFlags().String(templateFileNameFlagName, common.TemplateFilename, "background pdf file") + configFlag = rootCmd.PersistentFlags().String(configFlagName, "", "config file") + + customRiskRulesPluginFlag = rootCmd.PersistentFlags().String(customRiskRulesPluginFlagName, strings.Join(cfg.RiskRulesPlugins, ","), "comma-separated list of plugins file names with custom risk rules to load") + diagramDpiFlag = rootCmd.PersistentFlags().Int(diagramDpiFlagName, cfg.DiagramDPI, "DPI used to render: maximum is "+fmt.Sprintf("%d", common.MaxGraphvizDPI)+"") + skipRiskRulesFlag = rootCmd.PersistentFlags().String(skipRiskRulesFlagName, cfg.SkipRiskRules, "comma-separated list of risk rules (by their ID) to skip") + ignoreOrphandedRiskTrackingFlag = rootCmd.PersistentFlags().Bool(ignoreOrphandedRiskTrackingFlagName, cfg.IgnoreOrphanedRiskTracking, "ignore orphaned risk tracking (just log them) not matching a concrete risk") + templateFileNameFlag = rootCmd.PersistentFlags().String(templateFileNameFlagName, cfg.TemplateFilename, "background pdf file") generateDataFlowDiagramFlag = rootCmd.PersistentFlags().Bool(generateDataFlowDiagramFlagName, true, "generate data flow diagram") generateDataAssetDiagramFlag = rootCmd.PersistentFlags().Bool(generateDataAssetDiagramFlagName, true, "generate data asset diagram") @@ -108,29 +112,71 @@ func readCommands() *report.GenerateCommands { return commands } -func readConfig(buildTimestamp string) *common.Config { +func readConfig(cmd *cobra.Command, buildTimestamp string) *common.Config { cfg := new(common.Config).Defaults(buildTimestamp) - cfg.ServerPort = *serverPortFlag - cfg.ServerFolder = expandPath(*serverDirFlag) + configError := cfg.Load(*configFlag) + if configError != nil { + fmt.Printf("WARNING: failed to load config file %q: %v\n", *configFlag, configError) + } + + flags := cmd.Flags() + if isFlagOverriden(flags, serverPortFlagName) { + cfg.ServerPort = *serverPortFlag + } + if isFlagOverriden(flags, serverDirFlagName) { + cfg.ServerFolder = expandPath(*serverDirFlag) + } - cfg.AppFolder = expandPath(*appDirFlag) - cfg.BinFolder = expandPath(*binDirFlag) - cfg.OutputFolder = expandPath(*outputDirFlag) - cfg.TempFolder = expandPath(*tempDirFlag) + if isFlagOverriden(flags, appDirFlagName) { + cfg.AppFolder = expandPath(*appDirFlag) + } + if isFlagOverriden(flags, binDirFlagName) { + cfg.BinFolder = expandPath(*binDirFlag) + } + if isFlagOverriden(flags, outputFlagName) { + cfg.OutputFolder = expandPath(*outputDirFlag) + } + if isFlagOverriden(flags, tempDirFlagName) { + cfg.TempFolder = expandPath(*tempDirFlag) + } - cfg.Verbose = *verboseFlag + if isFlagOverriden(flags, verboseFlagName) { + cfg.Verbose = *verboseFlag + } - cfg.InputFile = expandPath(*inputFileFlag) - cfg.RAAPlugin = *raaPluginFlag + if isFlagOverriden(flags, inputFileFlagName) { + cfg.InputFile = expandPath(*inputFileFlag) + } + if isFlagOverriden(flags, raaPluginFlagName) { + cfg.RAAPlugin = *raaPluginFlag + } - cfg.RiskRulesPlugins = strings.Split(*customRiskRulesPluginFlag, ",") - cfg.SkipRiskRules = *skipRiskRulesFlag - cfg.IgnoreOrphanedRiskTracking = *ignoreOrphandedRiskTrackingFlag - cfg.DiagramDPI = *diagramDpiFlag - cfg.TemplateFilename = *templateFileNameFlag + if isFlagOverriden(flags, customRiskRulesPluginFlagName) { + cfg.RiskRulesPlugins = strings.Split(*customRiskRulesPluginFlag, ",") + } + if isFlagOverriden(flags, skipRiskRulesFlagName) { + cfg.SkipRiskRules = *skipRiskRulesFlag + } + if isFlagOverriden(flags, ignoreOrphandedRiskTrackingFlagName) { + cfg.IgnoreOrphanedRiskTracking = *ignoreOrphandedRiskTrackingFlag + } + if isFlagOverriden(flags, diagramDpiFlagName) { + cfg.DiagramDPI = *diagramDpiFlag + } + if isFlagOverriden(flags, templateFileNameFlagName) { + cfg.TemplateFilename = *templateFileNameFlag + } return cfg } +func isFlagOverriden(flags *pflag.FlagSet, flagName string) bool { + flag := flags.Lookup(flagName) + if flag == nil { + return false + } + return flag.Changed +} + func expandPath(path string) string { home := userHomeDir() if strings.HasPrefix(path, "~") { diff --git a/pkg/common/config.go b/pkg/common/config.go index 944f69af..ef3d25e0 100644 --- a/pkg/common/config.go +++ b/pkg/common/config.go @@ -79,7 +79,7 @@ func (c *Config) Defaults(buildTimestamp string) *Config { RiskRulesPlugins: make([]string, 0), SkipRiskRules: "", ExecuteModelMacro: "", - ServerPort: 0, + ServerPort: DefaultServerPort, GraphvizDPI: DefaultGraphvizDPI, BackupHistoryFilesToKeep: DefaultBackupHistoryFilesToKeep, From d6bb1c046d13b3d4f7524099dd73cf5357f08c41 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Mon, 8 Jan 2024 17:29:25 +0000 Subject: [PATCH 45/68] Move colors package to report package --- pkg/colors/colors.go | 188 --------- pkg/report/colors.go | 483 +++++++++++++++++++++++ pkg/report/excel.go | 73 ++-- pkg/report/graphviz.go | 37 +- pkg/report/report.go | 345 ++++++++-------- pkg/security/types/communication_link.go | 134 ------- pkg/security/types/technical_asset.go | 178 --------- 7 files changed, 709 insertions(+), 729 deletions(-) delete mode 100644 pkg/colors/colors.go create mode 100644 pkg/report/colors.go diff --git a/pkg/colors/colors.go b/pkg/colors/colors.go deleted file mode 100644 index eb68c4f3..00000000 --- a/pkg/colors/colors.go +++ /dev/null @@ -1,188 +0,0 @@ -// TODO: move content of this package to internal because it's only bunch of helper which is actually detail of implementation on how to generate -package colors - -import ( - "encoding/hex" - - "github.com/jung-kurt/gofpdf" -) - -const ( - Amber = "#AF780E" - Green = "#008000" - Blue = "#000080" - DarkBlue = "#000060" - Black = "#000000" - Gray = "#444444" - LightGray = "#666666" - MiddleLightGray = "#999999" - MoreLightGray = "#D2D2D2" - VeryLightGray = "#E5E5E5" - ExtremeLightGray = "#F6F6F6" - Pink = "#F987C5" - LightPink = "#FFE7EF" - Red = "#CC0000" - OutOfScopeFancy = "#D5D7FF" - CustomDevelopedParts = "#FFFC97" - ExtremeLightBlue = "#DDFFFF" - LightBlue = "#77FFFF" - Brown = "#8C4C17" -) - -var ( - _ = Green + Blue + MoreLightGray + ExtremeLightGray + LightBlue - _ = ColorOutOfScope - _ = RgbHexColorModelFailure -) - -func DarkenHexColor(hexString string) string { - colorBytes, _ := hex.DecodeString(hexString[1:]) - adjusted := make([]byte, 3) - for i := 0; i < 3; i++ { - if colorBytes[i] > 0x22 { - adjusted[i] = colorBytes[i] - 0x20 - } else { - adjusted[i] = 0x00 - } - } - return "#" + hex.EncodeToString(adjusted) -} - -func BrightenHexColor(hexString string) string { - colorBytes, _ := hex.DecodeString(hexString[1:]) - adjusted := make([]byte, 3) - for i := 0; i < 3; i++ { - if colorBytes[i] < 0xDD { - adjusted[i] = colorBytes[i] + 0x20 - } else { - adjusted[i] = 0xFF - } - } - return "#" + hex.EncodeToString(adjusted) -} - -func ColorCriticalRisk(pdf *gofpdf.Fpdf) { - pdf.SetTextColor(255, 38, 0) -} -func RgbHexColorCriticalRisk() string { - return "#FF2600" -} - -func ColorHighRisk(pdf *gofpdf.Fpdf) { - pdf.SetTextColor(160, 40, 30) -} -func RgbHexColorHighRisk() string { - return "#A0281E" -} - -func ColorElevatedRisk(pdf *gofpdf.Fpdf) { - pdf.SetTextColor(255, 142, 0) -} -func RgbHexColorElevatedRisk() string { - return "#FF8E00" -} - -func ColorMediumRisk(pdf *gofpdf.Fpdf) { - pdf.SetTextColor(200, 120, 50) -} -func RgbHexColorMediumRisk() string { - return "#C87832" -} - -func ColorLowRisk(pdf *gofpdf.Fpdf) { - pdf.SetTextColor(35, 70, 95) -} -func RgbHexColorLowRisk() string { - return "#23465F" -} - -func ColorOutOfScope(pdf *gofpdf.Fpdf) { - pdf.SetTextColor(127, 127, 127) -} -func RgbHexColorOutOfScope() string { - return "#7F7F7F" -} - -func ColorRiskStatusUnchecked(pdf *gofpdf.Fpdf) { - pdf.SetTextColor(256, 0, 0) -} -func RgbHexColorRiskStatusUnchecked() string { - return "#FF0000" -} - -func ColorRiskStatusMitigated(pdf *gofpdf.Fpdf) { - pdf.SetTextColor(0, 143, 0) -} -func RgbHexColorRiskStatusMitigated() string { - return "#008F00" -} - -func ColorRiskStatusInProgress(pdf *gofpdf.Fpdf) { - pdf.SetTextColor(0, 0, 256) -} -func RgbHexColorRiskStatusInProgress() string { - return "#0000FF" -} - -func ColorRiskStatusAccepted(pdf *gofpdf.Fpdf) { - pdf.SetTextColor(255, 64, 255) -} -func RgbHexColorRiskStatusAccepted() string { - return "#FF40FF" -} - -func ColorRiskStatusInDiscussion(pdf *gofpdf.Fpdf) { - pdf.SetTextColor(256, 147, 0) -} -func RgbHexColorRiskStatusInDiscussion() string { - return "#FF9300" -} - -func ColorRiskStatusFalsePositive(pdf *gofpdf.Fpdf) { - pdf.SetTextColor(102, 102, 102) -} -func RgbHexColorRiskStatusFalsePositive() string { - return "#666666" -} - -func ColorTwilight(pdf *gofpdf.Fpdf) { - pdf.SetTextColor(58, 82, 200) -} -func RgbHexColorTwilight() string { - return "#3A52C8" -} - -func ColorBusiness(pdf *gofpdf.Fpdf) { - pdf.SetTextColor(83, 27, 147) -} -func RgbHexColorBusiness() string { - return "#531B93" -} - -func ColorArchitecture(pdf *gofpdf.Fpdf) { - pdf.SetTextColor(0, 84, 147) -} -func RgbHexColorArchitecture() string { - return "#005493" -} - -func ColorDevelopment(pdf *gofpdf.Fpdf) { - pdf.SetTextColor(222, 146, 35) -} -func RgbHexColorDevelopment() string { - return "#DE9223" -} - -func ColorOperation(pdf *gofpdf.Fpdf) { - pdf.SetTextColor(148, 127, 80) -} -func RgbHexColorOperation() string { - return "#947F50" -} - -func ColorModelFailure(pdf *gofpdf.Fpdf) { - pdf.SetTextColor(148, 82, 0) -} -func RgbHexColorModelFailure() string { - return "#945200" -} diff --git a/pkg/report/colors.go b/pkg/report/colors.go new file mode 100644 index 00000000..46b16155 --- /dev/null +++ b/pkg/report/colors.go @@ -0,0 +1,483 @@ +package report + +import ( + "encoding/hex" + "fmt" + + "github.com/jung-kurt/gofpdf" + "github.com/threagile/threagile/pkg/security/types" +) + +const ( + Amber = "#AF780E" + Green = "#008000" + Blue = "#000080" + DarkBlue = "#000060" + Black = "#000000" + Gray = "#444444" + LightGray = "#666666" + MiddleLightGray = "#999999" + MoreLightGray = "#D2D2D2" + VeryLightGray = "#E5E5E5" + ExtremeLightGray = "#F6F6F6" + Pink = "#F987C5" + LightPink = "#FFE7EF" + Red = "#CC0000" + OutOfScopeFancy = "#D5D7FF" + CustomDevelopedParts = "#FFFC97" + ExtremeLightBlue = "#DDFFFF" + LightBlue = "#77FFFF" + Brown = "#8C4C17" +) + +func DarkenHexColor(hexString string) string { + colorBytes, _ := hex.DecodeString(hexString[1:]) + adjusted := make([]byte, 3) + for i := 0; i < 3; i++ { + if colorBytes[i] > 0x22 { + adjusted[i] = colorBytes[i] - 0x20 + } else { + adjusted[i] = 0x00 + } + } + return "#" + hex.EncodeToString(adjusted) +} + +func BrightenHexColor(hexString string) string { + colorBytes, _ := hex.DecodeString(hexString[1:]) + adjusted := make([]byte, 3) + for i := 0; i < 3; i++ { + if colorBytes[i] < 0xDD { + adjusted[i] = colorBytes[i] + 0x20 + } else { + adjusted[i] = 0xFF + } + } + return "#" + hex.EncodeToString(adjusted) +} + +func ColorCriticalRisk(pdf *gofpdf.Fpdf) { + pdf.SetTextColor(255, 38, 0) +} +func RgbHexColorCriticalRisk() string { + return "#FF2600" +} + +func ColorHighRisk(pdf *gofpdf.Fpdf) { + pdf.SetTextColor(160, 40, 30) +} +func RgbHexColorHighRisk() string { + return "#A0281E" +} + +func ColorElevatedRisk(pdf *gofpdf.Fpdf) { + pdf.SetTextColor(255, 142, 0) +} +func RgbHexColorElevatedRisk() string { + return "#FF8E00" +} + +func ColorMediumRisk(pdf *gofpdf.Fpdf) { + pdf.SetTextColor(200, 120, 50) +} +func RgbHexColorMediumRisk() string { + return "#C87832" +} + +func ColorLowRisk(pdf *gofpdf.Fpdf) { + pdf.SetTextColor(35, 70, 95) +} +func RgbHexColorLowRisk() string { + return "#23465F" +} + +func ColorOutOfScope(pdf *gofpdf.Fpdf) { + pdf.SetTextColor(127, 127, 127) +} +func RgbHexColorOutOfScope() string { + return "#7F7F7F" +} + +func ColorRiskStatusUnchecked(pdf *gofpdf.Fpdf) { + pdf.SetTextColor(256, 0, 0) +} +func RgbHexColorRiskStatusUnchecked() string { + return "#FF0000" +} + +func ColorRiskStatusMitigated(pdf *gofpdf.Fpdf) { + pdf.SetTextColor(0, 143, 0) +} +func RgbHexColorRiskStatusMitigated() string { + return "#008F00" +} + +func ColorRiskStatusInProgress(pdf *gofpdf.Fpdf) { + pdf.SetTextColor(0, 0, 256) +} +func RgbHexColorRiskStatusInProgress() string { + return "#0000FF" +} + +func ColorRiskStatusAccepted(pdf *gofpdf.Fpdf) { + pdf.SetTextColor(255, 64, 255) +} +func RgbHexColorRiskStatusAccepted() string { + return "#FF40FF" +} + +func ColorRiskStatusInDiscussion(pdf *gofpdf.Fpdf) { + pdf.SetTextColor(256, 147, 0) +} +func RgbHexColorRiskStatusInDiscussion() string { + return "#FF9300" +} + +func ColorRiskStatusFalsePositive(pdf *gofpdf.Fpdf) { + pdf.SetTextColor(102, 102, 102) +} +func RgbHexColorRiskStatusFalsePositive() string { + return "#666666" +} + +func ColorTwilight(pdf *gofpdf.Fpdf) { + pdf.SetTextColor(58, 82, 200) +} +func RgbHexColorTwilight() string { + return "#3A52C8" +} + +func ColorBusiness(pdf *gofpdf.Fpdf) { + pdf.SetTextColor(83, 27, 147) +} +func RgbHexColorBusiness() string { + return "#531B93" +} + +func ColorArchitecture(pdf *gofpdf.Fpdf) { + pdf.SetTextColor(0, 84, 147) +} +func RgbHexColorArchitecture() string { + return "#005493" +} + +func ColorDevelopment(pdf *gofpdf.Fpdf) { + pdf.SetTextColor(222, 146, 35) +} +func RgbHexColorDevelopment() string { + return "#DE9223" +} + +func ColorOperation(pdf *gofpdf.Fpdf) { + pdf.SetTextColor(148, 127, 80) +} +func RgbHexColorOperation() string { + return "#947F50" +} + +func ColorModelFailure(pdf *gofpdf.Fpdf) { + pdf.SetTextColor(148, 82, 0) +} +func RgbHexColorModelFailure() string { + return "#945200" +} + +func determineArrowLineStyle(cl types.CommunicationLink) string { + if len(cl.DataAssetsSent) == 0 && len(cl.DataAssetsReceived) == 0 { + return "dotted" // dotted, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... + } + if cl.Usage == types.DevOps { + return "dashed" + } + return "solid" +} + +// Pen Widths: + +func determineArrowPenWidth(cl types.CommunicationLink, parsedModel *types.ParsedModel) string { + if determineArrowColor(cl, parsedModel) == Pink { + return fmt.Sprintf("%f", 3.0) + } + if determineArrowColor(cl, parsedModel) != Black { + return fmt.Sprintf("%f", 2.5) + } + return fmt.Sprintf("%f", 1.5) +} + +func determineLabelColor(cl types.CommunicationLink, parsedModel *types.ParsedModel) string { + // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here + /* + if dataFlow.Protocol.IsEncrypted() { + return Gray + } else {*/ + // check for red + for _, sentDataAsset := range cl.DataAssetsSent { + if parsedModel.DataAssets[sentDataAsset].Integrity == types.MissionCritical { + return Red + } + } + for _, receivedDataAsset := range cl.DataAssetsReceived { + if parsedModel.DataAssets[receivedDataAsset].Integrity == types.MissionCritical { + return Red + } + } + // check for amber + for _, sentDataAsset := range cl.DataAssetsSent { + if parsedModel.DataAssets[sentDataAsset].Integrity == types.Critical { + return Amber + } + } + for _, receivedDataAsset := range cl.DataAssetsReceived { + if parsedModel.DataAssets[receivedDataAsset].Integrity == types.Critical { + return Amber + } + } + // default + return Gray + +} + +// pink when model forgery attempt (i.e. nothing being sent and received) + +func determineArrowColor(cl types.CommunicationLink, parsedModel *types.ParsedModel) string { + // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here + if len(cl.DataAssetsSent) == 0 && len(cl.DataAssetsReceived) == 0 || + cl.Protocol == types.UnknownProtocol { + return Pink // pink, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... + } + if cl.Usage == types.DevOps { + return MiddleLightGray + } else if cl.VPN { + return DarkBlue + } else if cl.IpFiltered { + return Brown + } + // check for red + for _, sentDataAsset := range cl.DataAssetsSent { + if parsedModel.DataAssets[sentDataAsset].Confidentiality == types.StrictlyConfidential { + return Red + } + } + for _, receivedDataAsset := range cl.DataAssetsReceived { + if parsedModel.DataAssets[receivedDataAsset].Confidentiality == types.StrictlyConfidential { + return Red + } + } + // check for amber + for _, sentDataAsset := range cl.DataAssetsSent { + if parsedModel.DataAssets[sentDataAsset].Confidentiality == types.Confidential { + return Amber + } + } + for _, receivedDataAsset := range cl.DataAssetsReceived { + if parsedModel.DataAssets[receivedDataAsset].Confidentiality == types.Confidential { + return Amber + } + } + // default + return Black + /* + } else if dataFlow.Authentication != NoneAuthentication { + return Black + } else { + // check for red + for _, sentDataAsset := range dataFlow.DataAssetsSent { // first check if any red? + if ParsedModelRoot.DataAssets[sentDataAsset].Integrity == MissionCritical { + return Red + } + } + for _, receivedDataAsset := range dataFlow.DataAssetsReceived { // first check if any red? + if ParsedModelRoot.DataAssets[receivedDataAsset].Integrity == MissionCritical { + return Red + } + } + // check for amber + for _, sentDataAsset := range dataFlow.DataAssetsSent { // then check if any amber? + if ParsedModelRoot.DataAssets[sentDataAsset].Integrity == Critical { + return Amber + } + } + for _, receivedDataAsset := range dataFlow.DataAssetsReceived { // then check if any amber? + if ParsedModelRoot.DataAssets[receivedDataAsset].Integrity == Critical { + return Amber + } + } + return Black + } + */ +} + +// red when >= confidential data stored in unencrypted technical asset + +func determineTechnicalAssetLabelColor(ta types.TechnicalAsset, model *types.ParsedModel) string { + // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here + // Check for red + if ta.Integrity == types.MissionCritical { + return Red + } + for _, storedDataAsset := range ta.DataAssetsStored { + if model.DataAssets[storedDataAsset].Integrity == types.MissionCritical { + return Red + } + } + for _, processedDataAsset := range ta.DataAssetsProcessed { + if model.DataAssets[processedDataAsset].Integrity == types.MissionCritical { + return Red + } + } + // Check for amber + if ta.Integrity == types.Critical { + return Amber + } + for _, storedDataAsset := range ta.DataAssetsStored { + if model.DataAssets[storedDataAsset].Integrity == types.Critical { + return Amber + } + } + for _, processedDataAsset := range ta.DataAssetsProcessed { + if model.DataAssets[processedDataAsset].Integrity == types.Critical { + return Amber + } + } + return Black + /* + if what.Encrypted { + return Black + } else { + if what.Confidentiality == StrictlyConfidential { + return Red + } + for _, storedDataAsset := range what.DataAssetsStored { + if ParsedModelRoot.DataAssets[storedDataAsset].Confidentiality == StrictlyConfidential { + return Red + } + } + if what.Confidentiality == Confidential { + return Amber + } + for _, storedDataAsset := range what.DataAssetsStored { + if ParsedModelRoot.DataAssets[storedDataAsset].Confidentiality == Confidential { + return Amber + } + } + return Black + } + */ +} + +// red when mission-critical integrity, but still unauthenticated (non-readonly) channels access it +// amber when critical integrity, but still unauthenticated (non-readonly) channels access it +// pink when model forgery attempt (i.e. nothing being processed or stored) + +func determineShapeBorderColor(ta types.TechnicalAsset, parsedModel *types.ParsedModel) string { + // Check for red + if ta.Confidentiality == types.StrictlyConfidential { + return Red + } + for _, storedDataAsset := range ta.DataAssetsStored { + if parsedModel.DataAssets[storedDataAsset].Confidentiality == types.StrictlyConfidential { + return Red + } + } + for _, processedDataAsset := range ta.DataAssetsProcessed { + if parsedModel.DataAssets[processedDataAsset].Confidentiality == types.StrictlyConfidential { + return Red + } + } + // Check for amber + if ta.Confidentiality == types.Confidential { + return Amber + } + for _, storedDataAsset := range ta.DataAssetsStored { + if parsedModel.DataAssets[storedDataAsset].Confidentiality == types.Confidential { + return Amber + } + } + for _, processedDataAsset := range ta.DataAssetsProcessed { + if parsedModel.DataAssets[processedDataAsset].Confidentiality == types.Confidential { + return Amber + } + } + return Black + /* + if what.Integrity == MissionCritical { + for _, dataFlow := range IncomingTechnicalCommunicationLinksMappedByTargetId[what.Id] { + if !dataFlow.Readonly && dataFlow.Authentication == NoneAuthentication { + return Red + } + } + } + + if what.Integrity == Critical { + for _, dataFlow := range IncomingTechnicalCommunicationLinksMappedByTargetId[what.Id] { + if !dataFlow.Readonly && dataFlow.Authentication == NoneAuthentication { + return Amber + } + } + } + + if len(what.DataAssetsProcessed) == 0 && len(what.DataAssetsStored) == 0 { + return Pink // pink, because it's strange when too many technical assets process no data... some are ok, but many in a diagram is a sign of model forgery... + } + + return Black + */ +} + +// dotted when model forgery attempt (i.e. nothing being processed or stored) + +func determineShapeBorderLineStyle(ta types.TechnicalAsset) string { + if len(ta.DataAssetsProcessed) == 0 && len(ta.DataAssetsStored) == 0 || ta.OutOfScope { + return "dotted" // dotted, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... + } + return "solid" +} + +// 3 when redundant + +func determineShapePeripheries(ta types.TechnicalAsset) int { + if ta.Redundant { + return 2 + } + return 1 +} + +func determineShapeStyle(ta types.TechnicalAsset) string { + return "filled" +} + +func determineShapeFillColor(ta types.TechnicalAsset, parsedModel *types.ParsedModel) string { + fillColor := VeryLightGray + if len(ta.DataAssetsProcessed) == 0 && len(ta.DataAssetsStored) == 0 || + ta.Technology == types.UnknownTechnology { + fillColor = LightPink // lightPink, because it's strange when too many technical assets process no data... some ok, but many in a diagram ist a sign of model forgery... + } else if len(ta.CommunicationLinks) == 0 && len(parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId[ta.Id]) == 0 { + fillColor = LightPink + } else if ta.Internet { + fillColor = ExtremeLightBlue + } else if ta.OutOfScope { + fillColor = OutOfScopeFancy + } else if ta.CustomDevelopedParts { + fillColor = CustomDevelopedParts + } + switch ta.Machine { + case types.Physical: + fillColor = DarkenHexColor(fillColor) + case types.Container: + fillColor = BrightenHexColor(fillColor) + case types.Serverless: + fillColor = BrightenHexColor(BrightenHexColor(fillColor)) + case types.Virtual: + } + return fillColor +} + +func determineShapeBorderPenWidth(ta types.TechnicalAsset, parsedModel *types.ParsedModel) string { + if determineShapeBorderColor(ta, parsedModel) == Pink { + return fmt.Sprintf("%f", 3.5) + } + if determineShapeBorderColor(ta, parsedModel) != Black { + return fmt.Sprintf("%f", 3.0) + } + return fmt.Sprintf("%f", 2.0) +} diff --git a/pkg/report/excel.go b/pkg/report/excel.go index 315de9fc..38bccb94 100644 --- a/pkg/report/excel.go +++ b/pkg/report/excel.go @@ -6,7 +6,6 @@ import ( "strconv" "strings" - "github.com/threagile/threagile/pkg/colors" "github.com/threagile/threagile/pkg/security/types" "github.com/xuri/excelize/v2" ) @@ -100,15 +99,15 @@ func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) erro return fmt.Errorf("unable to set column width: %w", err) } - // styleSeverityCriticalBold, err := excel.NewStyle(`{"font":{"color":"` + colors.RgbHexColorCriticalRisk() + `","size":12,"bold":true}}`) + // styleSeverityCriticalBold, err := excel.NewStyle(`{"font":{"color":"` + RgbHexColorCriticalRisk() + `","size":12,"bold":true}}`) styleSeverityCriticalBold, err := excel.NewStyle(&excelize.Style{ Font: &excelize.Font{ - Color: colors.RgbHexColorCriticalRisk(), + Color: RgbHexColorCriticalRisk(), Size: 12, Bold: true, }, }) - // styleSeverityCriticalCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"` + colors.RgbHexColorCriticalRisk() + `","size":12}}`) + // styleSeverityCriticalCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"` + RgbHexColorCriticalRisk() + `","size":12}}`) styleSeverityCriticalCenter, err := excel.NewStyle(&excelize.Style{ Alignment: &excelize.Alignment{ Horizontal: "center", @@ -116,19 +115,19 @@ func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) erro WrapText: false, }, Font: &excelize.Font{ - Color: colors.RgbHexColorCriticalRisk(), + Color: RgbHexColorCriticalRisk(), Size: 12, }, }) - // styleSeverityHighBold, err := excel.NewStyle(`{"font":{"color":"` + colors.RgbHexColorHighRisk() + `","size":12,"bold":true}}`) + // styleSeverityHighBold, err := excel.NewStyle(`{"font":{"color":"` + RgbHexColorHighRisk() + `","size":12,"bold":true}}`) styleSeverityHighBold, err := excel.NewStyle(&excelize.Style{ Font: &excelize.Font{ - Color: colors.RgbHexColorHighRisk(), + Color: RgbHexColorHighRisk(), Size: 12, Bold: true, }, }) - // styleSeverityHighCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"` + colors.RgbHexColorHighRisk() + `","size":12}}`) + // styleSeverityHighCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"` + RgbHexColorHighRisk() + `","size":12}}`) styleSeverityHighCenter, err := excel.NewStyle(&excelize.Style{ Alignment: &excelize.Alignment{ Horizontal: "center", @@ -136,19 +135,19 @@ func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) erro WrapText: false, }, Font: &excelize.Font{ - Color: colors.RgbHexColorHighRisk(), + Color: RgbHexColorHighRisk(), Size: 12, }, }) - // styleSeverityElevatedBold, err := excel.NewStyle(`{"font":{"color":"` + colors.RgbHexColorElevatedRisk() + `","size":12,"bold":true}}`) + // styleSeverityElevatedBold, err := excel.NewStyle(`{"font":{"color":"` + RgbHexColorElevatedRisk() + `","size":12,"bold":true}}`) styleSeverityElevatedBold, err := excel.NewStyle(&excelize.Style{ Font: &excelize.Font{ - Color: colors.RgbHexColorElevatedRisk(), + Color: RgbHexColorElevatedRisk(), Size: 12, Bold: true, }, }) - // styleSeverityElevatedCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"` + colors.RgbHexColorElevatedRisk() + `","size":12}}`) + // styleSeverityElevatedCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"` + RgbHexColorElevatedRisk() + `","size":12}}`) styleSeverityElevatedCenter, err := excel.NewStyle(&excelize.Style{ Alignment: &excelize.Alignment{ Horizontal: "center", @@ -156,19 +155,19 @@ func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) erro WrapText: false, }, Font: &excelize.Font{ - Color: colors.RgbHexColorElevatedRisk(), + Color: RgbHexColorElevatedRisk(), Size: 12, }, }) - // styleSeverityMediumBold, err := excel.NewStyle(`{"font":{"color":"` + colors.RgbHexColorMediumRisk() + `","size":12,"bold":true}}`) + // styleSeverityMediumBold, err := excel.NewStyle(`{"font":{"color":"` + RgbHexColorMediumRisk() + `","size":12,"bold":true}}`) styleSeverityMediumBold, err := excel.NewStyle(&excelize.Style{ Font: &excelize.Font{ - Color: colors.RgbHexColorMediumRisk(), + Color: RgbHexColorMediumRisk(), Size: 12, Bold: true, }, }) - // styleSeverityMediumCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"` + colors.RgbHexColorMediumRisk() + `","size":12}}`) + // styleSeverityMediumCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"` + RgbHexColorMediumRisk() + `","size":12}}`) styleSeverityMediumCenter, err := excel.NewStyle(&excelize.Style{ Alignment: &excelize.Alignment{ Horizontal: "center", @@ -176,19 +175,19 @@ func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) erro WrapText: false, }, Font: &excelize.Font{ - Color: colors.RgbHexColorMediumRisk(), + Color: RgbHexColorMediumRisk(), Size: 12, }, }) - // styleSeverityLowBold, err := excel.NewStyle(`{"font":{"color":"` + colors.RgbHexColorLowRisk() + `","size":12,"bold":true}}`) + // styleSeverityLowBold, err := excel.NewStyle(`{"font":{"color":"` + RgbHexColorLowRisk() + `","size":12,"bold":true}}`) styleSeverityLowBold, err := excel.NewStyle(&excelize.Style{ Font: &excelize.Font{ - Color: colors.RgbHexColorLowRisk(), + Color: RgbHexColorLowRisk(), Size: 12, Bold: true, }, }) - // styleSeverityLowCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"` + colors.RgbHexColorLowRisk() + `","size":12}}`) + // styleSeverityLowCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"` + RgbHexColorLowRisk() + `","size":12}}`) styleSeverityLowCenter, err := excel.NewStyle(&excelize.Style{ Alignment: &excelize.Alignment{ Horizontal: "center", @@ -196,12 +195,12 @@ func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) erro WrapText: false, }, Font: &excelize.Font{ - Color: colors.RgbHexColorLowRisk(), + Color: RgbHexColorLowRisk(), Size: 12, }, }) - // styleRedCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"` + colors.RgbHexColorRiskStatusUnchecked() + `","size":12}}`) + // styleRedCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"` + RgbHexColorRiskStatusUnchecked() + `","size":12}}`) styleRedCenter, err := excel.NewStyle(&excelize.Style{ Alignment: &excelize.Alignment{ Horizontal: "center", @@ -209,11 +208,11 @@ func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) erro WrapText: false, }, Font: &excelize.Font{ - Color: colors.RgbHexColorLowRisk(), + Color: RgbHexColorLowRisk(), Size: 12, }, }) - // styleGreenCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"` + colors.RgbHexColorRiskStatusMitigated() + `","size":12}}`) + // styleGreenCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"` + RgbHexColorRiskStatusMitigated() + `","size":12}}`) styleGreenCenter, err := excel.NewStyle(&excelize.Style{ Alignment: &excelize.Alignment{ Horizontal: "center", @@ -221,11 +220,11 @@ func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) erro WrapText: false, }, Font: &excelize.Font{ - Color: colors.RgbHexColorRiskStatusMitigated(), + Color: RgbHexColorRiskStatusMitigated(), Size: 12, }, }) - // styleBlueCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"#` + colors.RgbHexColorRiskStatusInProgress() + `","size":12}}`) + // styleBlueCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"#` + RgbHexColorRiskStatusInProgress() + `","size":12}}`) styleBlueCenter, err := excel.NewStyle(&excelize.Style{ Alignment: &excelize.Alignment{ Horizontal: "center", @@ -233,11 +232,11 @@ func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) erro WrapText: false, }, Font: &excelize.Font{ - Color: colors.RgbHexColorRiskStatusInProgress(), + Color: RgbHexColorRiskStatusInProgress(), Size: 12, }, }) - // styleYellowCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"#` + colors.RgbHexColorRiskStatusAccepted() + `","size":12}}`) + // styleYellowCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"#` + RgbHexColorRiskStatusAccepted() + `","size":12}}`) styleYellowCenter, err := excel.NewStyle(&excelize.Style{ Alignment: &excelize.Alignment{ Horizontal: "center", @@ -245,11 +244,11 @@ func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) erro WrapText: false, }, Font: &excelize.Font{ - Color: colors.RgbHexColorRiskStatusAccepted(), + Color: RgbHexColorRiskStatusAccepted(), Size: 12, }, }) - // styleOrangeCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"#` + colors.RgbHexColorRiskStatusInDiscussion() + `","size":12}}`) + // styleOrangeCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"#` + RgbHexColorRiskStatusInDiscussion() + `","size":12}}`) styleOrangeCenter, err := excel.NewStyle(&excelize.Style{ Alignment: &excelize.Alignment{ Horizontal: "center", @@ -257,11 +256,11 @@ func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) erro WrapText: false, }, Font: &excelize.Font{ - Color: colors.RgbHexColorRiskStatusInDiscussion(), + Color: RgbHexColorRiskStatusInDiscussion(), Size: 12, }, }) - // styleGrayCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"#` + colors.RgbHexColorRiskStatusFalsePositive() + `","size":12}}`) + // styleGrayCenter, err := excel.NewStyle(`{"alignment":{"horizontal":"center","shrink_to_fit":true,"wrap_text":false},"font":{"color":"#` + RgbHexColorRiskStatusFalsePositive() + `","size":12}}`) styleGrayCenter, err := excel.NewStyle(&excelize.Style{ Alignment: &excelize.Alignment{ Horizontal: "center", @@ -269,7 +268,7 @@ func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) erro WrapText: false, }, Font: &excelize.Font{ - Color: colors.RgbHexColorRiskStatusFalsePositive(), + Color: RgbHexColorRiskStatusFalsePositive(), Size: 12, }, }) @@ -316,10 +315,10 @@ func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) erro Size: 10, }, }) - // styleGraySmall, err := excel.NewStyle(`{"font":{"color":"` + colors.RgbHexColorOutOfScope() + `","size":10}}`) + // styleGraySmall, err := excel.NewStyle(`{"font":{"color":"` + RgbHexColorOutOfScope() + `","size":10}}`) styleGraySmall, err := excel.NewStyle(&excelize.Style{ Font: &excelize.Font{ - Color: colors.RgbHexColorOutOfScope(), + Color: RgbHexColorOutOfScope(), Size: 10, }, }) @@ -336,10 +335,10 @@ func WriteRisksExcelToFile(parsedModel *types.ParsedModel, filename string) erro Bold: true, }, }) - // styleMitigation, err := excel.NewStyle(`{"font":{"color":"` + colors.RgbHexColorRiskStatusMitigated() + `","size":10}}`) + // styleMitigation, err := excel.NewStyle(`{"font":{"color":"` + RgbHexColorRiskStatusMitigated() + `","size":10}}`) styleMitigation, err := excel.NewStyle(&excelize.Style{ Font: &excelize.Font{ - Color: colors.RgbHexColorRiskStatusMitigated(), + Color: RgbHexColorRiskStatusMitigated(), Size: 10, }, }) diff --git a/pkg/report/graphviz.go b/pkg/report/graphviz.go index 4ca0aee9..0cdfa708 100644 --- a/pkg/report/graphviz.go +++ b/pkg/report/graphviz.go @@ -12,7 +12,6 @@ import ( "strconv" "strings" - "github.com/threagile/threagile/pkg/colors" "github.com/threagile/threagile/pkg/security/types" ) @@ -114,10 +113,10 @@ func WriteDataFlowDiagramGraphvizDOT(parsedModel *types.ParsedModel, ];`) } snippet.WriteString("\n subgraph cluster_" + hash(trustBoundary.Id) + " {\n") - color, fontColor, bgColor, style, fontname := colors.RgbHexColorTwilight(), colors.RgbHexColorTwilight() /*"#550E0C"*/, "#FAFAFA", "dashed", "Verdana" + color, fontColor, bgColor, style, fontname := RgbHexColorTwilight(), RgbHexColorTwilight() /*"#550E0C"*/, "#FAFAFA", "dashed", "Verdana" penWidth := 4.5 if len(trustBoundary.TrustBoundariesNested) > 0 { - //color, fontColor, style, fontname = colors.Blue, colors.Blue, "dashed", "Verdana" + //color, fontColor, style, fontname = Blue, Blue, "dashed", "Verdana" penWidth = 5.5 } if len(trustBoundary.ParentTrustBoundaryID(parsedModel)) > 0 { @@ -226,8 +225,8 @@ func WriteDataFlowDiagramGraphvizDOT(parsedModel *types.ParsedModel, dir = "both" } } - arrowStyle = ` style="` + dataFlow.DetermineArrowLineStyle() + `" penwidth="` + dataFlow.DetermineArrowPenWidth(parsedModel) + `" arrowtail="` + readOrWriteTail + `" arrowhead="` + readOrWriteHead + `" dir="` + dir + `" arrowsize="2.0" ` - arrowColor = ` color="` + dataFlow.DetermineArrowColor(parsedModel) + `"` + arrowStyle = ` style="` + determineArrowLineStyle(dataFlow) + `" penwidth="` + determineArrowPenWidth(dataFlow, parsedModel) + `" arrowtail="` + readOrWriteTail + `" arrowhead="` + readOrWriteHead + `" dir="` + dir + `" arrowsize="2.0" ` + arrowColor = ` color="` + determineArrowColor(dataFlow, parsedModel) + `"` tweaks := "" if dataFlow.DiagramTweakWeight > 0 { tweaks += " weight=\"" + strconv.Itoa(dataFlow.DiagramTweakWeight) + "\" " @@ -237,7 +236,7 @@ func WriteDataFlowDiagramGraphvizDOT(parsedModel *types.ParsedModel, dotContent.WriteString(" " + hash(sourceId) + " -> " + hash(targetId) + ` [` + arrowColor + ` ` + arrowStyle + tweaks + ` constraint=` + strconv.FormatBool(dataFlow.DiagramTweakConstraint) + ` `) if !parsedModel.DiagramTweakSuppressEdgeLabels { - dotContent.WriteString(` xlabel="` + encode(dataFlow.Protocol.String()) + `" fontcolor="` + dataFlow.DetermineLabelColor(parsedModel) + `" `) + dotContent.WriteString(` xlabel="` + encode(dataFlow.Protocol.String()) + `" fontcolor="` + determineLabelColor(dataFlow, parsedModel) + `" `) } dotContent.WriteString(" ];\n") } @@ -463,11 +462,11 @@ func makeDataAssetNode(parsedModel *types.ParsedModel, dataAsset types.DataAsset var color string switch dataAsset.IdentifiedDataBreachProbabilityStillAtRisk(parsedModel) { case types.Probable: - color = colors.RgbHexColorHighRisk() + color = RgbHexColorHighRisk() case types.Possible: - color = colors.RgbHexColorMediumRisk() + color = RgbHexColorMediumRisk() case types.Improbable: - color = colors.RgbHexColorLowRisk() + color = RgbHexColorLowRisk() default: color = "#444444" // since black is too dark here as fill color } @@ -479,20 +478,20 @@ func makeDataAssetNode(parsedModel *types.ParsedModel, dataAsset types.DataAsset func makeTechAssetNode(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset, simplified bool) string { if simplified { - color := colors.RgbHexColorOutOfScope() + color := RgbHexColorOutOfScope() if !technicalAsset.OutOfScope { generatedRisks := technicalAsset.GeneratedRisks(parsedModel) switch types.HighestSeverityStillAtRisk(parsedModel, generatedRisks) { case types.CriticalSeverity: - color = colors.RgbHexColorCriticalRisk() + color = RgbHexColorCriticalRisk() case types.HighSeverity: - color = colors.RgbHexColorHighRisk() + color = RgbHexColorHighRisk() case types.ElevatedSeverity: - color = colors.RgbHexColorElevatedRisk() + color = RgbHexColorElevatedRisk() case types.MediumSeverity: - color = colors.RgbHexColorMediumRisk() + color = RgbHexColorMediumRisk() case types.LowSeverity: - color = colors.RgbHexColorLowRisk() + color = RgbHexColorLowRisk() default: color = "#444444" // since black is too dark here as fill color } @@ -540,10 +539,10 @@ func makeTechAssetNode(parsedModel *types.ParsedModel, technicalAsset types.Tech } return " " + hash(technicalAsset.Id) + ` [ - label=<
` + lineBreak + technicalAsset.Technology.String() + `
` + technicalAsset.Size.String() + `
` + encode(title) + `
` + attackerAttractivenessLabel + `
> - shape=` + shape + ` style="` + technicalAsset.DetermineShapeBorderLineStyle() + `,` + technicalAsset.DetermineShapeStyle() + `" penwidth="` + technicalAsset.DetermineShapeBorderPenWidth(parsedModel) + `" fillcolor="` + technicalAsset.DetermineShapeFillColor(parsedModel) + `" - peripheries=` + strconv.Itoa(technicalAsset.DetermineShapePeripheries()) + ` - color="` + technicalAsset.DetermineShapeBorderColor(parsedModel) + "\"\n ]; " + label=<
` + lineBreak + technicalAsset.Technology.String() + `
` + technicalAsset.Size.String() + `
` + encode(title) + `
` + attackerAttractivenessLabel + `
> + shape=` + shape + ` style="` + determineShapeBorderLineStyle(technicalAsset) + `,` + determineShapeStyle(technicalAsset) + `" penwidth="` + determineShapeBorderPenWidth(technicalAsset, parsedModel) + `" fillcolor="` + determineShapeFillColor(technicalAsset, parsedModel) + `" + peripheries=` + strconv.Itoa(determineShapePeripheries(technicalAsset)) + ` + color="` + determineShapeBorderColor(technicalAsset, parsedModel) + "\"\n ]; " } } diff --git a/pkg/report/report.go b/pkg/report/report.go index 14808204..c9e4ca77 100644 --- a/pkg/report/report.go +++ b/pkg/report/report.go @@ -15,7 +15,6 @@ import ( "github.com/jung-kurt/gofpdf" "github.com/jung-kurt/gofpdf/contrib/gofpdi" - "github.com/threagile/threagile/pkg/colors" "github.com/threagile/threagile/pkg/docs" accidental_secret_leak "github.com/threagile/threagile/pkg/security/risks/built-in/accidental-secret-leak" code_backdooring "github.com/threagile/threagile/pkg/security/risks/built-in/code-backdooring" @@ -381,7 +380,7 @@ func (r *pdfReporter) createTableOfContents(parsedModel *types.ParsedModel) { } countStillAtRisk := len(types.ReduceToOnlyStillAtRisk(parsedModel, modelFailures)) if countStillAtRisk > 0 { - colors.ColorModelFailure(r.pdf) + ColorModelFailure(r.pdf) } r.pdf.Text(11, y, " "+"Potential Model Failures: "+strconv.Itoa(countStillAtRisk)+" / "+strconv.Itoa(count)+" "+risksStr) r.pdf.Text(175, y, "{model-failures}") @@ -396,7 +395,7 @@ func (r *pdfReporter) createTableOfContents(parsedModel *types.ParsedModel) { questions = "Question" } if questionsUnanswered(parsedModel) > 0 { - colors.ColorModelFailure(r.pdf) + ColorModelFailure(r.pdf) } r.pdf.Text(11, y, " "+"Questions: "+strconv.Itoa(questionsUnanswered(parsedModel))+" / "+strconv.Itoa(count)+" "+questions) r.pdf.Text(175, y, "{questions}") @@ -426,15 +425,15 @@ func (r *pdfReporter) createTableOfContents(parsedModel *types.ParsedModel) { newRisksStr := types.SortedRisksOfCategory(parsedModel, category) switch types.HighestSeverityStillAtRisk(parsedModel, newRisksStr) { case types.CriticalSeverity: - colors.ColorCriticalRisk(r.pdf) + ColorCriticalRisk(r.pdf) case types.HighSeverity: - colors.ColorHighRisk(r.pdf) + ColorHighRisk(r.pdf) case types.ElevatedSeverity: - colors.ColorElevatedRisk(r.pdf) + ColorElevatedRisk(r.pdf) case types.MediumSeverity: - colors.ColorMediumRisk(r.pdf) + ColorMediumRisk(r.pdf) case types.LowSeverity: - colors.ColorLowRisk(r.pdf) + ColorLowRisk(r.pdf) default: r.pdfColorBlack() } @@ -495,15 +494,15 @@ func (r *pdfReporter) createTableOfContents(parsedModel *types.ParsedModel) { } else { switch types.HighestSeverityStillAtRisk(parsedModel, newRisksStr) { case types.CriticalSeverity: - colors.ColorCriticalRisk(r.pdf) + ColorCriticalRisk(r.pdf) case types.HighSeverity: - colors.ColorHighRisk(r.pdf) + ColorHighRisk(r.pdf) case types.ElevatedSeverity: - colors.ColorElevatedRisk(r.pdf) + ColorElevatedRisk(r.pdf) case types.MediumSeverity: - colors.ColorMediumRisk(r.pdf) + ColorMediumRisk(r.pdf) case types.LowSeverity: - colors.ColorLowRisk(r.pdf) + ColorLowRisk(r.pdf) default: r.pdfColorBlack() } @@ -551,11 +550,11 @@ func (r *pdfReporter) createTableOfContents(parsedModel *types.ParsedModel) { } switch dataAsset.IdentifiedDataBreachProbabilityStillAtRisk(parsedModel) { case types.Probable: - colors.ColorHighRisk(r.pdf) + ColorHighRisk(r.pdf) case types.Possible: - colors.ColorMediumRisk(r.pdf) + ColorMediumRisk(r.pdf) case types.Improbable: - colors.ColorLowRisk(r.pdf) + ColorLowRisk(r.pdf) default: r.pdfColorBlack() } @@ -590,7 +589,7 @@ func (r *pdfReporter) createTableOfContents(parsedModel *types.ParsedModel) { r.pageBreakInLists() y = 40 } - colors.ColorTwilight(r.pdf) + ColorTwilight(r.pdf) if !trustBoundary.Type.IsNetworkBoundary() { r.pdfColorLightGray() } @@ -798,47 +797,47 @@ func (r *pdfReporter) createManagementSummary(parsedModel *types.ParsedModel, te r.pdf.CellFormat(17, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(60, 6, "", "0", 0, "", false, 0, "") - colors.ColorRiskStatusUnchecked(r.pdf) + ColorRiskStatusUnchecked(r.pdf) r.pdf.CellFormat(23, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusUnchecked), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "unchecked", "0", 0, "", false, 0, "") r.pdf.Ln(-1) - colors.ColorCriticalRisk(r.pdf) + ColorCriticalRisk(r.pdf) r.pdf.CellFormat(17, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countCritical), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "critical risk", "0", 0, "", false, 0, "") - colors.ColorRiskStatusInDiscussion(r.pdf) + ColorRiskStatusInDiscussion(r.pdf) r.pdf.CellFormat(23, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusInDiscussion), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "in discussion", "0", 0, "", false, 0, "") r.pdf.Ln(-1) - colors.ColorHighRisk(r.pdf) + ColorHighRisk(r.pdf) r.pdf.CellFormat(17, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countHigh), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "high risk", "0", 0, "", false, 0, "") - colors.ColorRiskStatusAccepted(r.pdf) + ColorRiskStatusAccepted(r.pdf) r.pdf.CellFormat(23, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusAccepted), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "accepted", "0", 0, "", false, 0, "") r.pdf.Ln(-1) - colors.ColorElevatedRisk(r.pdf) + ColorElevatedRisk(r.pdf) r.pdf.CellFormat(17, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countElevated), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "elevated risk", "0", 0, "", false, 0, "") - colors.ColorRiskStatusInProgress(r.pdf) + ColorRiskStatusInProgress(r.pdf) r.pdf.CellFormat(23, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusInProgress), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "in progress", "0", 0, "", false, 0, "") r.pdf.Ln(-1) - colors.ColorMediumRisk(r.pdf) + ColorMediumRisk(r.pdf) r.pdf.CellFormat(17, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countMedium), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "medium risk", "0", 0, "", false, 0, "") - colors.ColorRiskStatusMitigated(r.pdf) + ColorRiskStatusMitigated(r.pdf) r.pdf.CellFormat(23, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusMitigated), "0", 0, "R", false, 0, "") r.pdf.SetFont("Helvetica", "BI", fontSizeBody) @@ -846,11 +845,11 @@ func (r *pdfReporter) createManagementSummary(parsedModel *types.ParsedModel, te r.pdf.SetFont("Helvetica", "B", fontSizeBody) r.pdf.Ln(-1) - colors.ColorLowRisk(r.pdf) + ColorLowRisk(r.pdf) r.pdf.CellFormat(17, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countLow), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "low risk", "0", 0, "", false, 0, "") - colors.ColorRiskStatusFalsePositive(r.pdf) + ColorRiskStatusFalsePositive(r.pdf) r.pdf.CellFormat(23, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusFalsePositive), "0", 0, "R", false, 0, "") r.pdf.SetFont("Helvetica", "BI", fontSizeBody) @@ -867,28 +866,28 @@ func (r *pdfReporter) createManagementSummary(parsedModel *types.ParsedModel, te Values: []chart.Value{ {Value: float64(countLow), //Label: strconv.Itoa(countLow) + " Low", Style: chart.Style{ - FillColor: makeColor(colors.RgbHexColorLowRisk()).WithAlpha(98), - //FontColor: makeColor(colors.RgbHexColorLowRisk()), + FillColor: makeColor(RgbHexColorLowRisk()).WithAlpha(98), + //FontColor: makeColor(RgbHexColorLowRisk()), FontSize: 65}}, {Value: float64(countMedium), //Label: strconv.Itoa(countMedium) + " Medium", Style: chart.Style{ - FillColor: makeColor(colors.RgbHexColorMediumRisk()).WithAlpha(98), - //FontColor: makeColor(colors.RgbHexColorMediumRisk()), + FillColor: makeColor(RgbHexColorMediumRisk()).WithAlpha(98), + //FontColor: makeColor(RgbHexColorMediumRisk()), FontSize: 65}}, {Value: float64(countElevated), //Label: strconv.Itoa(countElevated) + " Elevated", Style: chart.Style{ - FillColor: makeColor(colors.RgbHexColorElevatedRisk()).WithAlpha(98), - //FontColor: makeColor(colors.RgbHexColorElevatedRisk()), + FillColor: makeColor(RgbHexColorElevatedRisk()).WithAlpha(98), + //FontColor: makeColor(RgbHexColorElevatedRisk()), FontSize: 65}}, {Value: float64(countHigh), //Label: strconv.Itoa(countHigh) + " High", Style: chart.Style{ - FillColor: makeColor(colors.RgbHexColorHighRisk()).WithAlpha(98), - //FontColor: makeColor(colors.RgbHexColorHighRisk()), + FillColor: makeColor(RgbHexColorHighRisk()).WithAlpha(98), + //FontColor: makeColor(RgbHexColorHighRisk()), FontSize: 65}}, {Value: float64(countCritical), //Label: strconv.Itoa(countCritical) + " Critical", Style: chart.Style{ - FillColor: makeColor(colors.RgbHexColorCriticalRisk()).WithAlpha(98), - //FontColor: makeColor(colors.RgbHexColorCriticalRisk()), + FillColor: makeColor(RgbHexColorCriticalRisk()).WithAlpha(98), + //FontColor: makeColor(RgbHexColorCriticalRisk()), FontSize: 65}}, }, } @@ -900,33 +899,33 @@ func (r *pdfReporter) createManagementSummary(parsedModel *types.ParsedModel, te Values: []chart.Value{ {Value: float64(countStatusFalsePositive), //Label: strconv.Itoa(countStatusFalsePositive) + " False Positive", Style: chart.Style{ - FillColor: makeColor(colors.RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), - //FontColor: makeColor(colors.RgbHexColorRiskStatusFalsePositive()), + FillColor: makeColor(RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), + //FontColor: makeColor(RgbHexColorRiskStatusFalsePositive()), FontSize: 65}}, {Value: float64(countStatusMitigated), //Label: strconv.Itoa(countStatusMitigated) + " Mitigated", Style: chart.Style{ - FillColor: makeColor(colors.RgbHexColorRiskStatusMitigated()).WithAlpha(98), - //FontColor: makeColor(colors.RgbHexColorRiskStatusMitigated()), + FillColor: makeColor(RgbHexColorRiskStatusMitigated()).WithAlpha(98), + //FontColor: makeColor(RgbHexColorRiskStatusMitigated()), FontSize: 65}}, {Value: float64(countStatusInProgress), //Label: strconv.Itoa(countStatusInProgress) + " InProgress", Style: chart.Style{ - FillColor: makeColor(colors.RgbHexColorRiskStatusInProgress()).WithAlpha(98), - //FontColor: makeColor(colors.RgbHexColorRiskStatusInProgress()), + FillColor: makeColor(RgbHexColorRiskStatusInProgress()).WithAlpha(98), + //FontColor: makeColor(RgbHexColorRiskStatusInProgress()), FontSize: 65}}, {Value: float64(countStatusAccepted), //Label: strconv.Itoa(countStatusAccepted) + " Accepted", Style: chart.Style{ - FillColor: makeColor(colors.RgbHexColorRiskStatusAccepted()).WithAlpha(98), - //FontColor: makeColor(colors.RgbHexColorRiskStatusAccepted()), + FillColor: makeColor(RgbHexColorRiskStatusAccepted()).WithAlpha(98), + //FontColor: makeColor(RgbHexColorRiskStatusAccepted()), FontSize: 65}}, {Value: float64(countStatusInDiscussion), //Label: strconv.Itoa(countStatusInDiscussion) + " InDiscussion", Style: chart.Style{ - FillColor: makeColor(colors.RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), - //FontColor: makeColor(colors.RgbHexColorRiskStatusInDiscussion()), + FillColor: makeColor(RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), + //FontColor: makeColor(RgbHexColorRiskStatusInDiscussion()), FontSize: 65}}, {Value: float64(countStatusUnchecked), //Label: strconv.Itoa(countStatusUnchecked) + " Unchecked", Style: chart.Style{ - FillColor: makeColor(colors.RgbHexColorRiskStatusUnchecked()).WithAlpha(98), - //FontColor: makeColor(colors.RgbHexColorRiskStatusUnchecked()), + FillColor: makeColor(RgbHexColorRiskStatusUnchecked()).WithAlpha(98), + //FontColor: makeColor(RgbHexColorRiskStatusUnchecked()), FontSize: 65}}, }, } @@ -987,17 +986,17 @@ func (r *pdfReporter) createRiskMitigationStatus(parsedModel *types.ParsedModel, Width: 130, Values: []chart.Value{ {Value: float64(len(types.ReduceToOnlyRiskTrackingUnchecked(parsedModel, risksLow))), Label: types.Unchecked.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusUnchecked()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusUnchecked()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingInDiscussion(parsedModel, risksLow))), Label: types.InDiscussion.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingAccepted(parsedModel, risksLow))), Label: types.Accepted.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusAccepted()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusAccepted()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingInProgress(parsedModel, risksLow))), Label: types.InProgress.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInProgress()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusInProgress()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingMitigated(parsedModel, risksLow))), Label: types.Mitigated.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusMitigated()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusMitigated()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingFalsePositive(parsedModel, risksLow))), Label: types.FalsePositive.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, }, }, { @@ -1005,17 +1004,17 @@ func (r *pdfReporter) createRiskMitigationStatus(parsedModel *types.ParsedModel, Width: 130, Values: []chart.Value{ {Value: float64(len(types.ReduceToOnlyRiskTrackingUnchecked(parsedModel, risksMedium))), Label: types.Unchecked.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusUnchecked()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusUnchecked()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingInDiscussion(parsedModel, risksMedium))), Label: types.InDiscussion.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingAccepted(parsedModel, risksMedium))), Label: types.Accepted.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusAccepted()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusAccepted()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingInProgress(parsedModel, risksMedium))), Label: types.InProgress.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInProgress()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusInProgress()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingMitigated(parsedModel, risksMedium))), Label: types.Mitigated.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusMitigated()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusMitigated()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingFalsePositive(parsedModel, risksMedium))), Label: types.FalsePositive.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, }, }, { @@ -1023,17 +1022,17 @@ func (r *pdfReporter) createRiskMitigationStatus(parsedModel *types.ParsedModel, Width: 130, Values: []chart.Value{ {Value: float64(len(types.ReduceToOnlyRiskTrackingUnchecked(parsedModel, risksElevated))), Label: types.Unchecked.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusUnchecked()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusUnchecked()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingInDiscussion(parsedModel, risksElevated))), Label: types.InDiscussion.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingAccepted(parsedModel, risksElevated))), Label: types.Accepted.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusAccepted()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusAccepted()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingInProgress(parsedModel, risksElevated))), Label: types.InProgress.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInProgress()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusInProgress()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingMitigated(parsedModel, risksElevated))), Label: types.Mitigated.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusMitigated()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusMitigated()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingFalsePositive(parsedModel, risksElevated))), Label: types.FalsePositive.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, }, }, { @@ -1041,17 +1040,17 @@ func (r *pdfReporter) createRiskMitigationStatus(parsedModel *types.ParsedModel, Width: 130, Values: []chart.Value{ {Value: float64(len(types.ReduceToOnlyRiskTrackingUnchecked(parsedModel, risksHigh))), Label: types.Unchecked.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusUnchecked()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusUnchecked()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingInDiscussion(parsedModel, risksHigh))), Label: types.InDiscussion.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingAccepted(parsedModel, risksHigh))), Label: types.Accepted.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusAccepted()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusAccepted()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingInProgress(parsedModel, risksHigh))), Label: types.InProgress.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInProgress()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusInProgress()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingMitigated(parsedModel, risksHigh))), Label: types.Mitigated.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusMitigated()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusMitigated()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingFalsePositive(parsedModel, risksHigh))), Label: types.FalsePositive.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, }, }, { @@ -1059,17 +1058,17 @@ func (r *pdfReporter) createRiskMitigationStatus(parsedModel *types.ParsedModel, Width: 130, Values: []chart.Value{ {Value: float64(len(types.ReduceToOnlyRiskTrackingUnchecked(parsedModel, risksCritical))), Label: types.Unchecked.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusUnchecked()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusUnchecked()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingInDiscussion(parsedModel, risksCritical))), Label: types.InDiscussion.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusInDiscussion()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingAccepted(parsedModel, risksCritical))), Label: types.Accepted.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusAccepted()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusAccepted()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingInProgress(parsedModel, risksCritical))), Label: types.InProgress.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusInProgress()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusInProgress()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingMitigated(parsedModel, risksCritical))), Label: types.Mitigated.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusMitigated()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusMitigated()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, {Value: float64(len(types.ReduceToOnlyRiskTrackingFalsePositive(parsedModel, risksCritical))), Label: types.FalsePositive.Title(), - Style: chart.Style{FillColor: makeColor(colors.RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, + Style: chart.Style{FillColor: makeColor(RgbHexColorRiskStatusFalsePositive()).WithAlpha(98), StrokeColor: drawing.ColorFromHex("999")}}, }, }, }, @@ -1093,34 +1092,34 @@ func (r *pdfReporter) createRiskMitigationStatus(parsedModel *types.ParsedModel, r.pdf.SetFont("Helvetica", "B", fontSizeBody) r.pdf.Ln(20) - colors.ColorRiskStatusUnchecked(r.pdf) + ColorRiskStatusUnchecked(r.pdf) r.pdf.CellFormat(150, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusUnchecked), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "unchecked", "0", 0, "", false, 0, "") r.pdf.Ln(-1) - colors.ColorRiskStatusInDiscussion(r.pdf) + ColorRiskStatusInDiscussion(r.pdf) r.pdf.CellFormat(150, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusInDiscussion), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "in discussion", "0", 0, "", false, 0, "") r.pdf.Ln(-1) - colors.ColorRiskStatusAccepted(r.pdf) + ColorRiskStatusAccepted(r.pdf) r.pdf.CellFormat(150, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusAccepted), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "accepted", "0", 0, "", false, 0, "") r.pdf.Ln(-1) - colors.ColorRiskStatusInProgress(r.pdf) + ColorRiskStatusInProgress(r.pdf) r.pdf.CellFormat(150, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusInProgress), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "in progress", "0", 0, "", false, 0, "") r.pdf.Ln(-1) - colors.ColorRiskStatusMitigated(r.pdf) + ColorRiskStatusMitigated(r.pdf) r.pdf.CellFormat(150, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusMitigated), "0", 0, "R", false, 0, "") r.pdf.SetFont("Helvetica", "BI", fontSizeBody) r.pdf.CellFormat(60, 6, "mitigated", "0", 0, "", false, 0, "") r.pdf.SetFont("Helvetica", "B", fontSizeBody) r.pdf.Ln(-1) - colors.ColorRiskStatusFalsePositive(r.pdf) + ColorRiskStatusFalsePositive(r.pdf) r.pdf.CellFormat(150, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countStatusFalsePositive), "0", 0, "R", false, 0, "") r.pdf.SetFont("Helvetica", "BI", fontSizeBody) @@ -1157,28 +1156,28 @@ func (r *pdfReporter) createRiskMitigationStatus(parsedModel *types.ParsedModel, Values: []chart.Value{ {Value: float64(countLow), //Label: strconv.Itoa(countLow) + " Low", Style: chart.Style{ - FillColor: makeColor(colors.RgbHexColorLowRisk()).WithAlpha(98), - //FontColor: makeColor(colors.RgbHexColorLowRisk()), + FillColor: makeColor(RgbHexColorLowRisk()).WithAlpha(98), + //FontColor: makeColor(RgbHexColorLowRisk()), FontSize: 65}}, {Value: float64(countMedium), //Label: strconv.Itoa(countMedium) + " Medium", Style: chart.Style{ - FillColor: makeColor(colors.RgbHexColorMediumRisk()).WithAlpha(98), - //FontColor: makeColor(colors.RgbHexColorMediumRisk()), + FillColor: makeColor(RgbHexColorMediumRisk()).WithAlpha(98), + //FontColor: makeColor(RgbHexColorMediumRisk()), FontSize: 65}}, {Value: float64(countElevated), //Label: strconv.Itoa(countElevated) + " Elevated", Style: chart.Style{ - FillColor: makeColor(colors.RgbHexColorElevatedRisk()).WithAlpha(98), - //FontColor: makeColor(colors.RgbHexColorElevatedRisk()), + FillColor: makeColor(RgbHexColorElevatedRisk()).WithAlpha(98), + //FontColor: makeColor(RgbHexColorElevatedRisk()), FontSize: 65}}, {Value: float64(countHigh), //Label: strconv.Itoa(countHigh) + " High", Style: chart.Style{ - FillColor: makeColor(colors.RgbHexColorHighRisk()).WithAlpha(98), - //FontColor: makeColor(colors.RgbHexColorHighRisk()), + FillColor: makeColor(RgbHexColorHighRisk()).WithAlpha(98), + //FontColor: makeColor(RgbHexColorHighRisk()), FontSize: 65}}, {Value: float64(countCritical), //Label: strconv.Itoa(countCritical) + " Critical", Style: chart.Style{ - FillColor: makeColor(colors.RgbHexColorCriticalRisk()).WithAlpha(98), - //FontColor: makeColor(colors.RgbHexColorCriticalRisk()), + FillColor: makeColor(RgbHexColorCriticalRisk()).WithAlpha(98), + //FontColor: makeColor(RgbHexColorCriticalRisk()), FontSize: 65}}, }, } @@ -1189,19 +1188,19 @@ func (r *pdfReporter) createRiskMitigationStatus(parsedModel *types.ParsedModel, Values: []chart.Value{ {Value: float64(countBusinessSide), Style: chart.Style{ - FillColor: makeColor(colors.RgbHexColorBusiness()).WithAlpha(98), + FillColor: makeColor(RgbHexColorBusiness()).WithAlpha(98), FontSize: 65}}, {Value: float64(countArchitecture), Style: chart.Style{ - FillColor: makeColor(colors.RgbHexColorArchitecture()).WithAlpha(98), + FillColor: makeColor(RgbHexColorArchitecture()).WithAlpha(98), FontSize: 65}}, {Value: float64(countDevelopment), Style: chart.Style{ - FillColor: makeColor(colors.RgbHexColorDevelopment()).WithAlpha(98), + FillColor: makeColor(RgbHexColorDevelopment()).WithAlpha(98), FontSize: 65}}, {Value: float64(countOperation), Style: chart.Style{ - FillColor: makeColor(colors.RgbHexColorOperation()).WithAlpha(98), + FillColor: makeColor(RgbHexColorOperation()).WithAlpha(98), FontSize: 65}}, }, } @@ -1212,7 +1211,7 @@ func (r *pdfReporter) createRiskMitigationStatus(parsedModel *types.ParsedModel, r.pdf.SetFont("Helvetica", "B", fontSizeBody) r.pdf.Ln(8) - colors.ColorCriticalRisk(r.pdf) + ColorCriticalRisk(r.pdf) r.pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countCritical), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "unmitigated critical risk", "0", 0, "", false, 0, "") @@ -1220,38 +1219,38 @@ func (r *pdfReporter) createRiskMitigationStatus(parsedModel *types.ParsedModel, r.pdf.CellFormat(10, 6, "", "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "", "0", 0, "", false, 0, "") r.pdf.Ln(-1) - colors.ColorHighRisk(r.pdf) + ColorHighRisk(r.pdf) r.pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countHigh), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "unmitigated high risk", "0", 0, "", false, 0, "") - colors.ColorBusiness(r.pdf) + ColorBusiness(r.pdf) r.pdf.CellFormat(22, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countBusinessSide), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "business side related", "0", 0, "", false, 0, "") r.pdf.Ln(-1) - colors.ColorElevatedRisk(r.pdf) + ColorElevatedRisk(r.pdf) r.pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countElevated), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "unmitigated elevated risk", "0", 0, "", false, 0, "") - colors.ColorArchitecture(r.pdf) + ColorArchitecture(r.pdf) r.pdf.CellFormat(22, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countArchitecture), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "architecture related", "0", 0, "", false, 0, "") r.pdf.Ln(-1) - colors.ColorMediumRisk(r.pdf) + ColorMediumRisk(r.pdf) r.pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countMedium), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "unmitigated medium risk", "0", 0, "", false, 0, "") - colors.ColorDevelopment(r.pdf) + ColorDevelopment(r.pdf) r.pdf.CellFormat(22, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countDevelopment), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "development related", "0", 0, "", false, 0, "") r.pdf.Ln(-1) - colors.ColorLowRisk(r.pdf) + ColorLowRisk(r.pdf) r.pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countLow), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "unmitigated low risk", "0", 0, "", false, 0, "") - colors.ColorOperation(r.pdf) + ColorOperation(r.pdf) r.pdf.CellFormat(22, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(10, 6, strconv.Itoa(countOperation), "0", 0, "R", false, 0, "") r.pdf.CellFormat(60, 6, "operations related", "0", 0, "", false, 0, "") @@ -1458,7 +1457,7 @@ func (r *pdfReporter) createModelFailures(parsedModel *types.ParsedModel) { } countStillAtRisk := len(types.ReduceToOnlyStillAtRisk(parsedModel, modelFailures)) if countStillAtRisk > 0 { - colors.ColorModelFailure(r.pdf) + ColorModelFailure(r.pdf) } chapTitle := "Potential Model Failures: " + strconv.Itoa(countStillAtRisk) + " / " + strconv.Itoa(count) + " " + risksStr r.addHeadline(chapTitle, false) @@ -1531,11 +1530,11 @@ func (r *pdfReporter) createRAA(parsedModel *types.ParsedModel, introTextRAA str newRisksStr := technicalAsset.GeneratedRisks(parsedModel) switch types.HighestSeverityStillAtRisk(parsedModel, newRisksStr) { case types.HighSeverity: - colors.ColorHighRisk(r.pdf) + ColorHighRisk(r.pdf) case types.MediumSeverity: - colors.ColorMediumRisk(r.pdf) + ColorMediumRisk(r.pdf) case types.LowSeverity: - colors.ColorLowRisk(r.pdf) + ColorLowRisk(r.pdf) default: r.pdfColorBlack() } @@ -1609,11 +1608,11 @@ func createDataRiskQuickWins() { risks := technicalAsset.GeneratedRisks() switch model.HighestSeverityStillAtRisk(risks) { case model.High: - colors.ColorHighRisk(r.pdf) + ColorHighRisk(r.pdf) case model.Medium: - colors.ColorMediumRisk(r.pdf) + ColorMediumRisk(r.pdf) case model.Low: - colors.ColorLowRisk(r.pdf) + ColorLowRisk(r.pdf) default: r.pdfColorBlack() } @@ -1666,19 +1665,19 @@ func (r *pdfReporter) addCategories(parsedModel *types.ParsedModel, riskCategori var prefix string switch severity { case types.CriticalSeverity: - colors.ColorCriticalRisk(r.pdf) + ColorCriticalRisk(r.pdf) prefix = "Critical: " case types.HighSeverity: - colors.ColorHighRisk(r.pdf) + ColorHighRisk(r.pdf) prefix = "High: " case types.ElevatedSeverity: - colors.ColorElevatedRisk(r.pdf) + ColorElevatedRisk(r.pdf) prefix = "Elevated: " case types.MediumSeverity: - colors.ColorMediumRisk(r.pdf) + ColorMediumRisk(r.pdf) prefix = "Medium: " case types.LowSeverity: - colors.ColorLowRisk(r.pdf) + ColorLowRisk(r.pdf) prefix = "Low: " default: r.pdfColorBlack() @@ -1686,15 +1685,15 @@ func (r *pdfReporter) addCategories(parsedModel *types.ParsedModel, riskCategori } switch types.HighestSeverityStillAtRisk(parsedModel, risksStr) { case types.CriticalSeverity: - colors.ColorCriticalRisk(r.pdf) + ColorCriticalRisk(r.pdf) case types.HighSeverity: - colors.ColorHighRisk(r.pdf) + ColorHighRisk(r.pdf) case types.ElevatedSeverity: - colors.ColorElevatedRisk(r.pdf) + ColorElevatedRisk(r.pdf) case types.MediumSeverity: - colors.ColorMediumRisk(r.pdf) + ColorMediumRisk(r.pdf) case types.LowSeverity: - colors.ColorLowRisk(r.pdf) + ColorLowRisk(r.pdf) } if len(types.ReduceToOnlyStillAtRisk(parsedModel, risksStr)) == 0 { r.pdfColorBlack() @@ -2192,7 +2191,7 @@ func (r *pdfReporter) createQuestions(parsedModel *types.ParsedModel) { questions = "Question" } if questionsUnanswered(parsedModel) > 0 { - colors.ColorModelFailure(r.pdf) + ColorModelFailure(r.pdf) } chapTitle := "Questions: " + strconv.Itoa(questionsUnanswered(parsedModel)) + " / " + strconv.Itoa(count) + " " + questions r.addHeadline(chapTitle, false) @@ -2222,7 +2221,7 @@ func (r *pdfReporter) createQuestions(parsedModel *types.ParsedModel) { html.Write(5, ""+uni(question)+"
") html.Write(5, ""+uni(strings.TrimSpace(answer))+"") } else { - colors.ColorModelFailure(r.pdf) + ColorModelFailure(r.pdf) html.Write(5, ""+uni(question)+"
") r.pdfColorLightGray() html.Write(5, "- answer pending -") @@ -2353,15 +2352,15 @@ func (r *pdfReporter) createRiskCategories(parsedModel *types.ParsedModel) { // category color switch types.HighestSeverityStillAtRisk(parsedModel, risksStr) { case types.CriticalSeverity: - colors.ColorCriticalRisk(r.pdf) + ColorCriticalRisk(r.pdf) case types.HighSeverity: - colors.ColorHighRisk(r.pdf) + ColorHighRisk(r.pdf) case types.ElevatedSeverity: - colors.ColorElevatedRisk(r.pdf) + ColorElevatedRisk(r.pdf) case types.MediumSeverity: - colors.ColorMediumRisk(r.pdf) + ColorMediumRisk(r.pdf) case types.LowSeverity: - colors.ColorLowRisk(r.pdf) + ColorLowRisk(r.pdf) default: r.pdfColorBlack() } @@ -2398,12 +2397,12 @@ func (r *pdfReporter) createRiskCategories(parsedModel *types.ParsedModel) { text.WriteString(category.RiskAssessment) html.Write(5, text.String()) text.Reset() - colors.ColorRiskStatusFalsePositive(r.pdf) + ColorRiskStatusFalsePositive(r.pdf) text.WriteString("


False Positives

") text.WriteString(category.FalsePositives) html.Write(5, text.String()) text.Reset() - colors.ColorRiskStatusMitigated(r.pdf) + ColorRiskStatusMitigated(r.pdf) text.WriteString("


Mitigation (" + category.Function.Title() + "): " + category.Action + "

") text.WriteString(category.Mitigation) @@ -2464,7 +2463,7 @@ func (r *pdfReporter) createRiskCategories(parsedModel *types.ParsedModel) { } switch risk.Severity { case types.CriticalSeverity: - colors.ColorCriticalRisk(r.pdf) + ColorCriticalRisk(r.pdf) if !headlineCriticalWritten { r.pdf.SetFont("Helvetica", "", fontSizeBody) r.pdf.SetLeftMargin(oldLeft) @@ -2474,7 +2473,7 @@ func (r *pdfReporter) createRiskCategories(parsedModel *types.ParsedModel) { headlineCriticalWritten = true } case types.HighSeverity: - colors.ColorHighRisk(r.pdf) + ColorHighRisk(r.pdf) if !headlineHighWritten { r.pdf.SetFont("Helvetica", "", fontSizeBody) r.pdf.SetLeftMargin(oldLeft) @@ -2484,7 +2483,7 @@ func (r *pdfReporter) createRiskCategories(parsedModel *types.ParsedModel) { headlineHighWritten = true } case types.ElevatedSeverity: - colors.ColorElevatedRisk(r.pdf) + ColorElevatedRisk(r.pdf) if !headlineElevatedWritten { r.pdf.SetFont("Helvetica", "", fontSizeBody) r.pdf.SetLeftMargin(oldLeft) @@ -2494,7 +2493,7 @@ func (r *pdfReporter) createRiskCategories(parsedModel *types.ParsedModel) { headlineElevatedWritten = true } case types.MediumSeverity: - colors.ColorMediumRisk(r.pdf) + ColorMediumRisk(r.pdf) if !headlineMediumWritten { r.pdf.SetFont("Helvetica", "", fontSizeBody) r.pdf.SetLeftMargin(oldLeft) @@ -2504,7 +2503,7 @@ func (r *pdfReporter) createRiskCategories(parsedModel *types.ParsedModel) { headlineMediumWritten = true } case types.LowSeverity: - colors.ColorLowRisk(r.pdf) + ColorLowRisk(r.pdf) if !headlineLowWritten { r.pdf.SetFont("Helvetica", "", fontSizeBody) r.pdf.SetLeftMargin(oldLeft) @@ -2553,17 +2552,17 @@ func (r *pdfReporter) writeRiskTrackingStatus(parsedModel *types.ParsedModel, ri r.pdf.CellFormat(10, 6, "", "0", 0, "", false, 0, "") switch tracking.Status { case types.Unchecked: - colors.ColorRiskStatusUnchecked(r.pdf) + ColorRiskStatusUnchecked(r.pdf) case types.InDiscussion: - colors.ColorRiskStatusInDiscussion(r.pdf) + ColorRiskStatusInDiscussion(r.pdf) case types.Accepted: - colors.ColorRiskStatusAccepted(r.pdf) + ColorRiskStatusAccepted(r.pdf) case types.InProgress: - colors.ColorRiskStatusInProgress(r.pdf) + ColorRiskStatusInProgress(r.pdf) case types.Mitigated: - colors.ColorRiskStatusMitigated(r.pdf) + ColorRiskStatusMitigated(r.pdf) case types.FalsePositive: - colors.ColorRiskStatusFalsePositive(r.pdf) + ColorRiskStatusFalsePositive(r.pdf) default: r.pdfColorBlack() } @@ -2628,15 +2627,15 @@ func (r *pdfReporter) createTechnicalAssets(parsedModel *types.ParsedModel) { } else { switch types.HighestSeverityStillAtRisk(parsedModel, risksStr) { case types.CriticalSeverity: - colors.ColorCriticalRisk(r.pdf) + ColorCriticalRisk(r.pdf) case types.HighSeverity: - colors.ColorHighRisk(r.pdf) + ColorHighRisk(r.pdf) case types.ElevatedSeverity: - colors.ColorElevatedRisk(r.pdf) + ColorElevatedRisk(r.pdf) case types.MediumSeverity: - colors.ColorMediumRisk(r.pdf) + ColorMediumRisk(r.pdf) case types.LowSeverity: - colors.ColorLowRisk(r.pdf) + ColorLowRisk(r.pdf) default: r.pdfColorBlack() } @@ -2698,7 +2697,7 @@ func (r *pdfReporter) createTechnicalAssets(parsedModel *types.ParsedModel) { } switch risk.Severity { case types.CriticalSeverity: - colors.ColorCriticalRisk(r.pdf) + ColorCriticalRisk(r.pdf) if !headlineCriticalWritten { r.pdf.SetFont("Helvetica", "", fontSizeBody) r.pdf.SetLeftMargin(oldLeft + 3) @@ -2706,7 +2705,7 @@ func (r *pdfReporter) createTechnicalAssets(parsedModel *types.ParsedModel) { headlineCriticalWritten = true } case types.HighSeverity: - colors.ColorHighRisk(r.pdf) + ColorHighRisk(r.pdf) if !headlineHighWritten { r.pdf.SetFont("Helvetica", "", fontSizeBody) r.pdf.SetLeftMargin(oldLeft + 3) @@ -2714,7 +2713,7 @@ func (r *pdfReporter) createTechnicalAssets(parsedModel *types.ParsedModel) { headlineHighWritten = true } case types.ElevatedSeverity: - colors.ColorElevatedRisk(r.pdf) + ColorElevatedRisk(r.pdf) if !headlineElevatedWritten { r.pdf.SetFont("Helvetica", "", fontSizeBody) r.pdf.SetLeftMargin(oldLeft + 3) @@ -2722,7 +2721,7 @@ func (r *pdfReporter) createTechnicalAssets(parsedModel *types.ParsedModel) { headlineElevatedWritten = true } case types.MediumSeverity: - colors.ColorMediumRisk(r.pdf) + ColorMediumRisk(r.pdf) if !headlineMediumWritten { r.pdf.SetFont("Helvetica", "", fontSizeBody) r.pdf.SetLeftMargin(oldLeft + 3) @@ -2730,7 +2729,7 @@ func (r *pdfReporter) createTechnicalAssets(parsedModel *types.ParsedModel) { headlineMediumWritten = true } case types.LowSeverity: - colors.ColorLowRisk(r.pdf) + ColorLowRisk(r.pdf) if !headlineLowWritten { r.pdf.SetFont("Helvetica", "", fontSizeBody) r.pdf.SetLeftMargin(oldLeft + 3) @@ -3436,11 +3435,11 @@ func (r *pdfReporter) createDataAssets(parsedModel *types.ParsedModel) { r.pdfColorBlack() switch dataAsset.IdentifiedDataBreachProbabilityStillAtRisk(parsedModel) { case types.Probable: - colors.ColorHighRisk(r.pdf) + ColorHighRisk(r.pdf) case types.Possible: - colors.ColorMediumRisk(r.pdf) + ColorMediumRisk(r.pdf) case types.Improbable: - colors.ColorLowRisk(r.pdf) + ColorLowRisk(r.pdf) default: r.pdfColorBlack() } @@ -3471,11 +3470,11 @@ func (r *pdfReporter) createDataAssets(parsedModel *types.ParsedModel) { dataBreachText := probability.String() switch probability { case model.Probable: - colors.ColorHighRisk(r.pdf) + ColorHighRisk(r.pdf) case model.Possible: - colors.ColorMediumRisk(r.pdf) + ColorMediumRisk(r.pdf) case model.Improbable: - colors.ColorLowRisk(r.pdf) + ColorLowRisk(r.pdf) default: r.pdfColorBlack() } @@ -3718,11 +3717,11 @@ func (r *pdfReporter) createDataAssets(parsedModel *types.ParsedModel) { } switch model.HighestSeverityStillAtRisk(techAssetResponsible.GeneratedRisks()) { case model.High: - colors.ColorHighRisk(r.pdf) + ColorHighRisk(r.pdf) case model.Medium: - colors.ColorMediumRisk(r.pdf) + ColorMediumRisk(r.pdf) case model.Low: - colors.ColorLowRisk(r.pdf) + ColorLowRisk(r.pdf) default: r.pdfColorBlack() } @@ -3756,11 +3755,11 @@ func (r *pdfReporter) createDataAssets(parsedModel *types.ParsedModel) { riskText := dataBreachProbability.String() switch dataBreachProbability { case types.Probable: - colors.ColorHighRisk(r.pdf) + ColorHighRisk(r.pdf) case types.Possible: - colors.ColorMediumRisk(r.pdf) + ColorMediumRisk(r.pdf) case types.Improbable: - colors.ColorLowRisk(r.pdf) + ColorLowRisk(r.pdf) default: r.pdfColorBlack() } @@ -3803,11 +3802,11 @@ func (r *pdfReporter) createDataAssets(parsedModel *types.ParsedModel) { } switch dataBreachRisk.DataBreachProbability { case types.Probable: - colors.ColorHighRisk(r.pdf) + ColorHighRisk(r.pdf) case types.Possible: - colors.ColorMediumRisk(r.pdf) + ColorMediumRisk(r.pdf) case types.Improbable: - colors.ColorLowRisk(r.pdf) + ColorLowRisk(r.pdf) default: r.pdfColorBlack() } @@ -3847,7 +3846,7 @@ func (r *pdfReporter) createTrustBoundaries(parsedModel *types.ParsedModel) { } else { html.Write(5, "


") } - colors.ColorTwilight(r.pdf) + ColorTwilight(r.pdf) if !trustBoundary.Type.IsNetworkBoundary() { r.pdfColorLightGray() } @@ -3871,7 +3870,7 @@ func (r *pdfReporter) createTrustBoundaries(parsedModel *types.ParsedModel) { r.pdfColorGray() r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") r.pdf.CellFormat(40, 6, "Type:", "0", 0, "", false, 0, "") - colors.ColorTwilight(r.pdf) + ColorTwilight(r.pdf) if !trustBoundary.Type.IsNetworkBoundary() { r.pdfColorLightGray() } diff --git a/pkg/security/types/communication_link.go b/pkg/security/types/communication_link.go index 9adae059..5576e68c 100644 --- a/pkg/security/types/communication_link.go +++ b/pkg/security/types/communication_link.go @@ -5,10 +5,7 @@ Copyright © 2023 NAME HERE package types import ( - "fmt" "sort" - - "github.com/threagile/threagile/pkg/colors" ) type CommunicationLink struct { @@ -130,137 +127,6 @@ func (what CommunicationLink) IsBidirectional() bool { return len(what.DataAssetsSent) > 0 && len(what.DataAssetsReceived) > 0 } -// === Style stuff ======================================= - -// Line Styles: - -// dotted when model forgery attempt (i.e. nothing being sent and received) - -func (what CommunicationLink) DetermineArrowLineStyle() string { - if len(what.DataAssetsSent) == 0 && len(what.DataAssetsReceived) == 0 { - return "dotted" // dotted, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... - } - if what.Usage == DevOps { - return "dashed" - } - return "solid" -} - -// Pen Widths: - -func (what CommunicationLink) DetermineArrowPenWidth(parsedModel *ParsedModel) string { - if what.DetermineArrowColor(parsedModel) == colors.Pink { - return fmt.Sprintf("%f", 3.0) - } - if what.DetermineArrowColor(parsedModel) != colors.Black { - return fmt.Sprintf("%f", 2.5) - } - return fmt.Sprintf("%f", 1.5) -} - -func (what CommunicationLink) DetermineLabelColor(parsedModel *ParsedModel) string { - // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here - /* - if dataFlow.Protocol.IsEncrypted() { - return colors.Gray - } else {*/ - // check for red - for _, sentDataAsset := range what.DataAssetsSent { - if parsedModel.DataAssets[sentDataAsset].Integrity == MissionCritical { - return colors.Red - } - } - for _, receivedDataAsset := range what.DataAssetsReceived { - if parsedModel.DataAssets[receivedDataAsset].Integrity == MissionCritical { - return colors.Red - } - } - // check for amber - for _, sentDataAsset := range what.DataAssetsSent { - if parsedModel.DataAssets[sentDataAsset].Integrity == Critical { - return colors.Amber - } - } - for _, receivedDataAsset := range what.DataAssetsReceived { - if parsedModel.DataAssets[receivedDataAsset].Integrity == Critical { - return colors.Amber - } - } - // default - return colors.Gray - -} - -// pink when model forgery attempt (i.e. nothing being sent and received) - -func (what CommunicationLink) DetermineArrowColor(parsedModel *ParsedModel) string { - // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here - if len(what.DataAssetsSent) == 0 && len(what.DataAssetsReceived) == 0 || - what.Protocol == UnknownProtocol { - return colors.Pink // pink, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... - } - if what.Usage == DevOps { - return colors.MiddleLightGray - } else if what.VPN { - return colors.DarkBlue - } else if what.IpFiltered { - return colors.Brown - } - // check for red - for _, sentDataAsset := range what.DataAssetsSent { - if parsedModel.DataAssets[sentDataAsset].Confidentiality == StrictlyConfidential { - return colors.Red - } - } - for _, receivedDataAsset := range what.DataAssetsReceived { - if parsedModel.DataAssets[receivedDataAsset].Confidentiality == StrictlyConfidential { - return colors.Red - } - } - // check for amber - for _, sentDataAsset := range what.DataAssetsSent { - if parsedModel.DataAssets[sentDataAsset].Confidentiality == Confidential { - return colors.Amber - } - } - for _, receivedDataAsset := range what.DataAssetsReceived { - if parsedModel.DataAssets[receivedDataAsset].Confidentiality == Confidential { - return colors.Amber - } - } - // default - return colors.Black - /* - } else if dataFlow.Authentication != NoneAuthentication { - return colors.Black - } else { - // check for red - for _, sentDataAsset := range dataFlow.DataAssetsSent { // first check if any red? - if ParsedModelRoot.DataAssets[sentDataAsset].Integrity == MissionCritical { - return colors.Red - } - } - for _, receivedDataAsset := range dataFlow.DataAssetsReceived { // first check if any red? - if ParsedModelRoot.DataAssets[receivedDataAsset].Integrity == MissionCritical { - return colors.Red - } - } - // check for amber - for _, sentDataAsset := range dataFlow.DataAssetsSent { // then check if any amber? - if ParsedModelRoot.DataAssets[sentDataAsset].Integrity == Critical { - return colors.Amber - } - } - for _, receivedDataAsset := range dataFlow.DataAssetsReceived { // then check if any amber? - if ParsedModelRoot.DataAssets[receivedDataAsset].Integrity == Critical { - return colors.Amber - } - } - return colors.Black - } - */ -} - type ByTechnicalCommunicationLinkIdSort []CommunicationLink func (what ByTechnicalCommunicationLinkIdSort) Len() int { return len(what) } diff --git a/pkg/security/types/technical_asset.go b/pkg/security/types/technical_asset.go index 19d40bbb..32b33e50 100644 --- a/pkg/security/types/technical_asset.go +++ b/pkg/security/types/technical_asset.go @@ -7,8 +7,6 @@ package types import ( "fmt" "sort" - - "github.com/threagile/threagile/pkg/colors" ) type TechnicalAsset struct { @@ -249,124 +247,6 @@ func (what TechnicalAsset) ProcessesOrStoresDataAsset(dataAssetId string) bool { return false } -// red when >= confidential data stored in unencrypted technical asset - -func (what TechnicalAsset) DetermineLabelColor(model *ParsedModel) string { - // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here - // Check for red - if what.Integrity == MissionCritical { - return colors.Red - } - for _, storedDataAsset := range what.DataAssetsStored { - if model.DataAssets[storedDataAsset].Integrity == MissionCritical { - return colors.Red - } - } - for _, processedDataAsset := range what.DataAssetsProcessed { - if model.DataAssets[processedDataAsset].Integrity == MissionCritical { - return colors.Red - } - } - // Check for amber - if what.Integrity == Critical { - return colors.Amber - } - for _, storedDataAsset := range what.DataAssetsStored { - if model.DataAssets[storedDataAsset].Integrity == Critical { - return colors.Amber - } - } - for _, processedDataAsset := range what.DataAssetsProcessed { - if model.DataAssets[processedDataAsset].Integrity == Critical { - return colors.Amber - } - } - return colors.Black - /* - if what.Encrypted { - return colors.Black - } else { - if what.Confidentiality == StrictlyConfidential { - return colors.Red - } - for _, storedDataAsset := range what.DataAssetsStored { - if ParsedModelRoot.DataAssets[storedDataAsset].Confidentiality == StrictlyConfidential { - return colors.Red - } - } - if what.Confidentiality == Confidential { - return colors.Amber - } - for _, storedDataAsset := range what.DataAssetsStored { - if ParsedModelRoot.DataAssets[storedDataAsset].Confidentiality == Confidential { - return colors.Amber - } - } - return colors.Black - } - */ -} - -// red when mission-critical integrity, but still unauthenticated (non-readonly) channels access it -// amber when critical integrity, but still unauthenticated (non-readonly) channels access it -// pink when model forgery attempt (i.e. nothing being processed or stored) - -func (what TechnicalAsset) DetermineShapeBorderColor(parsedModel *ParsedModel) string { - // TODO: Just move into main.go and let the generated risk determine the color, don't duplicate the logic here - // Check for red - if what.Confidentiality == StrictlyConfidential { - return colors.Red - } - for _, storedDataAsset := range what.DataAssetsStored { - if parsedModel.DataAssets[storedDataAsset].Confidentiality == StrictlyConfidential { - return colors.Red - } - } - for _, processedDataAsset := range what.DataAssetsProcessed { - if parsedModel.DataAssets[processedDataAsset].Confidentiality == StrictlyConfidential { - return colors.Red - } - } - // Check for amber - if what.Confidentiality == Confidential { - return colors.Amber - } - for _, storedDataAsset := range what.DataAssetsStored { - if parsedModel.DataAssets[storedDataAsset].Confidentiality == Confidential { - return colors.Amber - } - } - for _, processedDataAsset := range what.DataAssetsProcessed { - if parsedModel.DataAssets[processedDataAsset].Confidentiality == Confidential { - return colors.Amber - } - } - return colors.Black - /* - if what.Integrity == MissionCritical { - for _, dataFlow := range IncomingTechnicalCommunicationLinksMappedByTargetId[what.Id] { - if !dataFlow.Readonly && dataFlow.Authentication == NoneAuthentication { - return colors.Red - } - } - } - - if what.Integrity == Critical { - for _, dataFlow := range IncomingTechnicalCommunicationLinksMappedByTargetId[what.Id] { - if !dataFlow.Readonly && dataFlow.Authentication == NoneAuthentication { - return colors.Amber - } - } - } - - if len(what.DataAssetsProcessed) == 0 && len(what.DataAssetsStored) == 0 { - return colors.Pink // pink, because it's strange when too many technical assets process no data... some are ok, but many in a diagram is a sign of model forgery... - } - - return colors.Black - */ -} - /* // Loops over all data assets (stored and processed by this technical asset) and determines for each // data asset, how many percentage of the data risk is reduced when this technical asset has all risks mitigated. @@ -408,28 +288,6 @@ func (what TechnicalAsset) QuickWins() float64 { } */ -// dotted when model forgery attempt (i.e. nothing being processed or stored) - -func (what TechnicalAsset) DetermineShapeBorderLineStyle() string { - if len(what.DataAssetsProcessed) == 0 && len(what.DataAssetsStored) == 0 || what.OutOfScope { - return "dotted" // dotted, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... - } - return "solid" -} - -// 3 when redundant - -func (what TechnicalAsset) DetermineShapePeripheries() int { - if what.Redundant { - return 2 - } - return 1 -} - -func (what TechnicalAsset) DetermineShapeStyle() string { - return "filled" -} - func (what TechnicalAsset) GetTrustBoundaryId(model *ParsedModel) string { for _, trustBoundary := range model.TrustBoundaries { for _, techAssetInside := range trustBoundary.TechnicalAssetsInside { @@ -441,32 +299,6 @@ func (what TechnicalAsset) GetTrustBoundaryId(model *ParsedModel) string { return "" } -func (what TechnicalAsset) DetermineShapeFillColor(parsedModel *ParsedModel) string { - fillColor := colors.VeryLightGray - if len(what.DataAssetsProcessed) == 0 && len(what.DataAssetsStored) == 0 || - what.Technology == UnknownTechnology { - fillColor = colors.LightPink // lightPink, because it's strange when too many technical assets process no data... some ok, but many in a diagram ist a sign of model forgery... - } else if len(what.CommunicationLinks) == 0 && len(parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId[what.Id]) == 0 { - fillColor = colors.LightPink - } else if what.Internet { - fillColor = colors.ExtremeLightBlue - } else if what.OutOfScope { - fillColor = colors.OutOfScopeFancy - } else if what.CustomDevelopedParts { - fillColor = colors.CustomDevelopedParts - } - switch what.Machine { - case Physical: - fillColor = colors.DarkenHexColor(fillColor) - case Container: - fillColor = colors.BrightenHexColor(fillColor) - case Serverless: - fillColor = colors.BrightenHexColor(colors.BrightenHexColor(fillColor)) - case Virtual: - } - return fillColor -} - func SortByTechnicalAssetRiskSeverityAndTitleStillAtRisk(assets []TechnicalAsset, parsedModel *ParsedModel) { sort.Slice(assets, func(i, j int) bool { risksLeft := ReduceToOnlyStillAtRisk(parsedModel, assets[i].GeneratedRisks(parsedModel)) @@ -496,16 +328,6 @@ func SortByTechnicalAssetRiskSeverityAndTitleStillAtRisk(assets []TechnicalAsset }) } -func (what TechnicalAsset) DetermineShapeBorderPenWidth(parsedModel *ParsedModel) string { - if what.DetermineShapeBorderColor(parsedModel) == colors.Pink { - return fmt.Sprintf("%f", 3.5) - } - if what.DetermineShapeBorderColor(parsedModel) != colors.Black { - return fmt.Sprintf("%f", 3.0) - } - return fmt.Sprintf("%f", 2.0) -} - type ByTechnicalAssetRAAAndTitleSort []TechnicalAsset func (what ByTechnicalAssetRAAAndTitleSort) Len() int { return len(what) } From f42969e9970f2831b599a91cb126a7cb97f545f9 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Mon, 8 Jan 2024 20:44:30 +0000 Subject: [PATCH 46/68] Reshuffle files to avoid too much logic inside parsed model, parsedModel shall be only parsed input model information and not deal with applying risks --- cmd/risk_demo/main.go | 3 +- cmd/threagile/config.json | 3 + cmd/threagile/threagile.yaml | 1354 ++++++++++++++++++++++++ internal/threagile/rules.go | 6 +- pkg/model/parse.go | 7 +- pkg/model/read.go | 65 +- pkg/{security/types => model}/rules.go | 32 +- pkg/{run => model}/runner.go | 12 +- pkg/report/report.go | 5 +- pkg/security/types/custom-risk.go | 27 - pkg/security/types/model.go | 55 - pkg/security/types/trust_boundary.go | 1 - pkg/server/server.go | 5 +- 13 files changed, 1463 insertions(+), 112 deletions(-) create mode 100644 cmd/threagile/config.json create mode 100644 cmd/threagile/threagile.yaml rename pkg/{security/types => model}/rules.go (64%) rename pkg/{run => model}/runner.go (88%) delete mode 100644 pkg/security/types/custom-risk.go diff --git a/cmd/risk_demo/main.go b/cmd/risk_demo/main.go index 2cbc89c5..77471d32 100644 --- a/cmd/risk_demo/main.go +++ b/cmd/risk_demo/main.go @@ -8,6 +8,7 @@ import ( "io" "os" + "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/types" ) @@ -25,7 +26,7 @@ func main() { if *getInfo { rule := new(customRiskRule) category := rule.Category() - riskData, marshalError := json.Marshal(types.CustomRisk{ + riskData, marshalError := json.Marshal(model.CustomRisk{ ID: category.Id, Category: category, Tags: rule.SupportedTags(), diff --git a/cmd/threagile/config.json b/cmd/threagile/config.json new file mode 100644 index 00000000..aa2f4ffe --- /dev/null +++ b/cmd/threagile/config.json @@ -0,0 +1,3 @@ +{ + "appFolder": "config-app-folder" +} diff --git a/cmd/threagile/threagile.yaml b/cmd/threagile/threagile.yaml new file mode 100644 index 00000000..77815f77 --- /dev/null +++ b/cmd/threagile/threagile.yaml @@ -0,0 +1,1354 @@ +threagile_version: 1.0.0 + +# NOTE: +# +# For a perfect editing experience within your IDE of choice you can easily +# get model syntax validation and autocompletion (very handy for enum values) +# as well as live templates: Just import the schema.json into your IDE and assign +# it as "schema" to each Threagile YAML file. Also try to import individual parts +# from the live-templates.txt file into your IDE as live editing templates. +# +# You might also want to try the REST API when running in server mode... + + + +title: Some Example Application + +date: 2020-07-01 + +author: + name: John Doe + homepage: www.example.com + + + + +management_summary_comment: > + Just some more custom summary possible here... + +business_criticality: important # values: archive, operational, important, critical, mission-critical + + + + +business_overview: + description: Some more demo text here and even images... + images: +# - custom-image-1.png: Some dummy image 1 +# - custom-image-2.png: Some dummy image 2 + + +technical_overview: + description: Some more demo text here and even images... + images: +# - custom-image-1.png: Some dummy image 1 +# - custom-image-2.png: Some dummy image 2 + + + +questions: # simply use "" as answer to signal "unanswered" + How are the admin clients managed/protected against compromise?: "" + How are the development clients managed/protected against compromise?: > + Managed by XYZ + How are the build pipeline components managed/protected against compromise?: > + Managed by XYZ + + + +abuse_cases: + Denial-of-Service: > + As a hacker I want to disturb the functionality of the backend system in order to cause indirect + financial damage via unusable features. + CPU-Cycle Theft: > + As a hacker I want to steal CPU cycles in order to transform them into money via installed crypto currency miners. + Ransomware: > + As a hacker I want to encrypt the storage and file systems in order to demand ransom. + Identity Theft: > + As a hacker I want to steal identity data in order to reuse credentials and/or keys on other targets of the same company or outside. + PII Theft: > + As a hacker I want to steal PII (Personally Identifiable Information) data in order to blackmail the company and/or damage + their repudiation by publishing them. + + ERP-System Compromise: > + As a hacker I want to access the ERP-System in order to steal/modify sensitive business data. + Database Compromise: > + As a hacker I want to access the database backend of the ERP-System in order to steal/modify sensitive + business data. + Contract Filesystem Compromise: > + As a hacker I want to access the filesystem storing the contract PDFs in order to steal/modify contract data. + Cross-Site Scripting Attacks: > + As a hacker I want to execute Cross-Site Scripting (XSS) and similar attacks in order to takeover victim sessions and + cause reputational damage. + Denial-of-Service of Enduser Functionality: > + As a hacker I want to disturb the functionality of the enduser parts of the application in order to cause direct financial + damage (lower sales). + Denial-of-Service of ERP/DB Functionality: > + As a hacker I want to disturb the functionality of the ERP system and/or it's database in order to cause indirect + financial damage via unusable internal ERP features (not related to customer portal). + + +security_requirements: + Input Validation: Strict input validation is required to reduce the overall attack surface. + Securing Administrative Access: Administrative access must be secured with strong encryption and multi-factor authentication. + EU-DSGVO: Mandatory EU-Datenschutzgrundverordnung + + +# Tags can be used for anything, it's just a tag. Also risk rules can act based on tags if you like. +# Tags can be used for example to name the products used (which is more concrete than the technology types that only specify the type) +tags_available: + - linux + - apache + - mysql + - jboss + - keycloak + - jenkins + - git + - oracle + - some-erp + - vmware + - aws + - aws:ec2 + - aws:s3 + + + + +data_assets: + + + Customer Contracts: &customer-contracts # this example shows the inheritance-like features of YAML + id: customer-contracts + description: Customer Contracts (PDF) + usage: business # values: business, devops + tags: + origin: Customer + owner: Company XYZ + quantity: many # values: very-few, few, many, very-many + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Contract data might contain financial data as well as personally identifiable information (PII). The integrity and + availability of contract data is required for clearing payment disputes. + + + Customer Contract Summaries: + <<: *customer-contracts # here we're referencing the above created asset as base and just overwrite few values + id: contract-summaries + description: Customer Contract Summaries + quantity: very-few # values: very-few, few, many, very-many + confidentiality: restricted # values: public, internal, restricted, confidential, strictly-confidential + integrity: operational # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Just some summaries. + + + Customer Operational Data: + <<: *customer-contracts # here we're referencing the above created asset as base and just overwrite few values + id: customer-operational-data + description: Customer Operational Data + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Customer operational data for using the portal are required to be available to offer the portal functionality + and are used in the backend transactions. + + + Customer Accounts: + <<: *customer-contracts # here we're referencing the above created asset as base and just overwrite few values + id: customer-accounts + description: Customer Accounts (including transient credentials when entered for checking them) + confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Customer account data for using the portal are required to be available to offer the portal functionality. + + + Some Internal Business Data: + id: internal-business-data + description: Internal business data of the ERP system used unrelated to the customer-facing processes. + usage: business # values: business, devops + tags: + origin: Company XYZ + owner: Company XYZ + quantity: few # values: very-few, few, many, very-many + confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Data used and/or generated during unrelated other usecases of the ERP-system (when used also by Company XYZ for + internal non-customer-portal-related stuff). + + + Client Application Code: &client-application-code # this example shows the inheritance-like features of YAML + id: client-application-code + description: Angular and other client-side code delivered by the application. + usage: devops # values: business, devops + tags: + origin: Company ABC + owner: Company ABC + quantity: very-few # values: very-few, few, many, very-many + confidentiality: public # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The integrity of the public data is critical to avoid reputational damage and the availability is important on the + long-term scale (but not critical) to keep the growth rate of the customer base steady. + + + Server Application Code: + <<: *client-application-code # here we're referencing the above created asset as base and just overwrite few values + id: server-application-code + description: API and other server-side code of the application. + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: mission-critical # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The integrity of the API code is critical to avoid reputational damage and the availability is important on the + long-term scale (but not critical) to keep the growth rate of the customer base steady. + + + Build Job Config: + id: build-job-config + description: Data for customizing of the build job system. + usage: devops # values: business, devops + tags: + origin: Company XYZ + owner: Company XYZ + quantity: very-few # values: very-few, few, many, very-many + confidentiality: restricted # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Data for customizing of the build job system. + + + Marketing Material: + <<: *client-application-code # here we're referencing the above created asset as base and just overwrite few values + id: marketing-material + description: Website and marketing data to inform potential customers and generate new leads. + integrity: important # values: archive, operational, important, critical, mission-critical + + + ERP Logs: + id: erp-logs + description: Logs generated by the ERP system. + usage: devops # values: business, devops + tags: + origin: Company XYZ + owner: Company XYZ + quantity: many # values: very-few, few, many, very-many + confidentiality: restricted # values: public, internal, restricted, confidential, strictly-confidential + integrity: archive # values: archive, operational, important, critical, mission-critical + availability: archive # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Logs should not contain PII data and are only required for failure analysis, i.e. they are not considered as hard + transactional logs. + + + ERP Customizing Data: + id: erp-customizing + description: Data for customizing of the ERP system. + usage: devops # values: business, devops + tags: + origin: Company XYZ + owner: Company XYZ + quantity: very-few # values: very-few, few, many, very-many + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Data for customizing of the ERP system. + + + Database Customizing and Dumps: + id: db-dumps + description: Data for customizing of the DB system, which might include full database dumps. + usage: devops # values: business, devops + tags: + - oracle + origin: Company XYZ + owner: Company XYZ + quantity: very-few # values: very-few, few, many, very-many + confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Data for customizing of the DB system, which might include full database dumps. + + + + + + +technical_assets: + + + Customer Web Client: + id: customer-client + description: Customer Web Client + type: external-entity # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: true + out_of_scope: true + justification_out_of_scope: Owned and managed by enduser customer + size: component # values: system, service, application, component + technology: browser # values: see help + tags: + internet: true + machine: physical # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Customer + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: operational # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The client used by the customer to access the system. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - client-application-code + - marketing-material + data_assets_stored: # sequence of IDs to reference + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + Customer Traffic: + target: load-balancer + description: Link to the load balancer + protocol: https # values: see help + authentication: session-id # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: enduser-identity-propagation # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + data_assets_received: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - client-application-code + - marketing-material + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + + + Backoffice Client: + id: backoffice-client + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: Backoffice client + type: external-entity # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: true + out_of_scope: true + justification_out_of_scope: Owned and managed by Company XYZ company + size: component # values: system, service, application, component + technology: desktop # values: see help + tags: + internet: false + machine: physical # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company XYZ + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: important # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The client used by Company XYZ to administer and use the system. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-contracts + - internal-business-data + - erp-logs + data_assets_stored: # sequence of IDs to reference + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + ERP Internal Access: + target: erp-system + description: Link to the ERP system + protocol: https # values: see help + authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: enduser-identity-propagation # values: none, technical-user, enduser-identity-propagation + tags: + - some-erp + vpn: true + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - internal-business-data + data_assets_received: # sequence of IDs to reference + - customer-contracts + - internal-business-data + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + Marketing CMS Editing: + target: marketing-cms + description: Link to the CMS for editing content + protocol: https # values: see help + authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: enduser-identity-propagation # values: none, technical-user, enduser-identity-propagation + tags: + vpn: true + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - marketing-material + data_assets_received: # sequence of IDs to reference + - marketing-material + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + + + Backend Admin Client: + id: backend-admin-client + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: Backend admin client + type: external-entity # values: external-entity, process, datastore + usage: devops # values: business, devops + used_as_client_by_human: true + out_of_scope: true + justification_out_of_scope: Owned and managed by ops provider + size: component # values: system, service, application, component + technology: browser # values: see help + tags: + internet: false + machine: physical # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company XYZ + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: operational # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The client used by Company XYZ to administer the system. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - erp-logs + data_assets_stored: # sequence of IDs to reference + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + ERP Web Access: + target: erp-system + description: Link to the ERP system (Web) + protocol: https # values: see help + authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - erp-customizing + data_assets_received: # sequence of IDs to reference + - erp-logs + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + DB Update Access: + target: sql-database + description: Link to the database (JDBC tunneled via SSH) + protocol: ssh # values: see help + authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - db-dumps + data_assets_received: # sequence of IDs to reference + - db-dumps + - erp-logs + - customer-accounts + - customer-operational-data + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + User Management Access: + target: ldap-auth-server + description: Link to the LDAP auth server for managing users + protocol: ldaps # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + data_assets_received: # sequence of IDs to reference + - customer-accounts + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + + + Load Balancer: + id: load-balancer + #diagram_tweak_order: 50 # affects left to right positioning (only within a trust boundary) + description: Load Balancer (HA-Proxy) + type: process # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: component # values: system, service, application, component + technology: load-balancer # values: see help + tags: + internet: false + machine: physical # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: mission-critical # values: archive, operational, important, critical, mission-critical + availability: mission-critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The correct configuration and reachability of the load balancer is mandatory for all customer and Company XYZ + usages of the portal and ERP system. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - internal-business-data + - client-application-code + - marketing-material + data_assets_stored: # sequence of IDs to reference + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + Web Application Traffic: + target: apache-webserver + description: Link to the web server + protocol: http # values: see help + authentication: session-id # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: enduser-identity-propagation # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + data_assets_received: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - client-application-code + #diagram_tweak_weight: 5 + #diagram_tweak_constraint: false + CMS Content Traffic: + target: marketing-cms + description: Link to the CMS server + protocol: http # values: see help + authentication: none # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: none # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: true + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + data_assets_received: # sequence of IDs to reference + - marketing-material + #diagram_tweak_weight: 5 + #diagram_tweak_constraint: false + + + Apache Webserver: + id: apache-webserver + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: Apache Webserver hosting the API code and client-side code + type: process # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: application # values: system, service, application, component + technology: web-server # values: see help + tags: + - linux + - apache + - aws:ec2 + internet: false + machine: container # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The correct configuration and reachability of the web server is mandatory for all customer usages of the portal. + multi_tenant: false + redundant: false + custom_developed_parts: true + data_assets_processed: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - internal-business-data + - client-application-code + - server-application-code + data_assets_stored: # sequence of IDs to reference + - client-application-code + - server-application-code + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - json + - file + communication_links: + ERP System Traffic: + target: erp-system + description: Link to the ERP system + protocol: https # values: see help + authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - internal-business-data + data_assets_received: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - internal-business-data + #diagram_tweak_weight: 5 + #diagram_tweak_constraint: false + Auth Credential Check Traffic: + target: identity-provider + description: Link to the identity provider server + protocol: https # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + data_assets_received: # sequence of IDs to reference + + + Identity Provider: + id: identity-provider + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: Identity provider server + type: process # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: component # values: system, service, application, component + technology: identity-provider # values: see help + tags: + - linux + - jboss + - keycloak + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The auth data of the application + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-accounts + data_assets_stored: # sequence of IDs to reference + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + LDAP Credential Check Traffic: + target: ldap-auth-server + description: Link to the LDAP server + protocol: ldaps # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + data_assets_received: # sequence of IDs to reference + + + LDAP Auth Server: + id: ldap-auth-server + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: LDAP authentication server + type: datastore # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: component # values: system, service, application, component + technology: identity-store-ldap # values: see help + tags: + - linux + internet: false + machine: physical # values: physical, virtual, container, serverless + encryption: transparent # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The auth data of the application + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-accounts + data_assets_stored: # sequence of IDs to reference + - customer-accounts + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + + + Marketing CMS: + id: marketing-cms + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: CMS for the marketing content + type: process # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: application # values: system, service, application, component + technology: cms # values: see help + tags: + - linux + internet: false + machine: container # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: internal # values: public, internal, restricted, confidential, strictly-confidential + integrity: important # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The correct configuration and reachability of the web server is mandatory for all customer usages of the portal. + multi_tenant: false + redundant: false + custom_developed_parts: true + data_assets_processed: # sequence of IDs to reference + - marketing-material + - customer-accounts + data_assets_stored: # sequence of IDs to reference + - marketing-material + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + Auth Traffic: + target: ldap-auth-server + description: Link to the LDAP auth server + protocol: ldap # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: true + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + data_assets_received: # sequence of IDs to reference + - customer-accounts + #diagram_tweak_weight: 5 + #diagram_tweak_constraint: false + + + Backoffice ERP System: + id: erp-system + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: ERP system + type: process # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: system # values: system, service, application, component + technology: erp # values: see help + tags: + - linux + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: mission-critical # values: archive, operational, important, critical, mission-critical + availability: mission-critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The ERP system contains business-relevant sensitive data for the leasing processes and eventually also for other + Company XYZ internal processes. + multi_tenant: false + redundant: true + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - customer-contracts + - internal-business-data + - erp-customizing + data_assets_stored: # sequence of IDs to reference + - erp-logs + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - xml + - file + - serialization + communication_links: + Database Traffic: + target: sql-database + description: Link to the DB system + protocol: jdbc # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - internal-business-data + data_assets_received: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - internal-business-data + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + NFS Filesystem Access: + target: contract-fileserver + description: Link to the file system + protocol: nfs # values: see help + authentication: none # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: none # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: business # values: business, devops + data_assets_sent: # sequence of IDs to reference + - customer-contracts + data_assets_received: # sequence of IDs to reference + - customer-contracts + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + + + Contract Fileserver: + id: contract-fileserver + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: NFS Filesystem for storing the contract PDFs + type: datastore # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: component # values: system, service, application, component + technology: file-server # values: see help + tags: + - linux + - aws:s3 + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + Contract data might contain financial data as well as personally identifiable information (PII). The integrity and + availability of contract data is required for clearing payment disputes. The filesystem is also required to be available + for storing new contracts of freshly generated customers. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + data_assets_stored: # sequence of IDs to reference + - customer-contracts + - contract-summaries + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - file + communication_links: + + + Customer Contract Database: + id: sql-database + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: The database behind the ERP system + type: datastore # values: external-entity, process, datastore + usage: business # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: component # values: system, service, application, component + technology: database # values: see help + tags: + - linux + - mysql + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: data-with-symmetric-shared-key # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: strictly-confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: mission-critical # values: archive, operational, important, critical, mission-critical + availability: mission-critical # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The ERP system's database contains business-relevant sensitive data for the leasing processes and eventually also + for other Company XYZ internal processes. + multi_tenant: false + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - db-dumps + data_assets_stored: # sequence of IDs to reference + - customer-accounts + - customer-operational-data + - internal-business-data + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + communication_links: + + + External Development Client: + id: external-dev-client + #diagram_tweak_order: 0 # affects left to right positioning (only within a trust boundary) + description: External developer client + type: external-entity # values: external-entity, process, datastore + usage: devops # values: business, devops + used_as_client_by_human: true + out_of_scope: true + justification_out_of_scope: Owned and managed by external developers + size: system # values: system, service, application, component + technology: devops-client # values: see help + tags: + - linux + internet: true + machine: physical # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: External Developers + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: operational # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The clients used by external developers to create parts of the application code. + multi_tenant: true + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - client-application-code + - server-application-code + data_assets_stored: # sequence of IDs to reference + - client-application-code + - server-application-code + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - file + communication_links: + Git-Repo Code Write Access: + target: git-repo + description: Link to the Git repo + protocol: ssh # values: see help + authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - client-application-code + - server-application-code + data_assets_received: # sequence of IDs to reference + - client-application-code + - server-application-code + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + Git-Repo Web-UI Access: + target: git-repo + description: Link to the Git repo + protocol: https # values: see help + authentication: token # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - client-application-code + - server-application-code + data_assets_received: # sequence of IDs to reference + - client-application-code + - server-application-code + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + Jenkins Web-UI Access: + target: jenkins-buildserver + description: Link to the Jenkins build server + protocol: https # values: see help + authentication: credentials # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - build-job-config + data_assets_received: # sequence of IDs to reference + - build-job-config + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + + + Git Repository: + id: git-repo + #diagram_tweak_order: 99 # affects left to right positioning (only within a trust boundary) + description: Git repository server + type: process # values: external-entity, process, datastore + usage: devops # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: system # values: system, service, application, component + technology: sourcecode-repository # values: see help + tags: + - linux + - git + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: important # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The code repo pipeline might contain sensitive configuration values like backend credentials, certificates etc. and is + therefore rated as confidential. + multi_tenant: true + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - client-application-code + - server-application-code + data_assets_stored: # sequence of IDs to reference + - client-application-code + - server-application-code + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - file + communication_links: + + + Jenkins Buildserver: + id: jenkins-buildserver + #diagram_tweak_order: 99 # affects left to right positioning (only within a trust boundary) + description: Jenkins buildserver + type: process # values: external-entity, process, datastore + usage: devops # values: business, devops + used_as_client_by_human: false + out_of_scope: false + justification_out_of_scope: + size: system # values: system, service, application, component + technology: build-pipeline # values: see help + tags: + - linux + - jenkins + internet: false + machine: virtual # values: physical, virtual, container, serverless + encryption: none # values: none, transparent, data-with-symmetric-shared-key, data-with-asymmetric-shared-key, data-with-enduser-individual-key + owner: Company ABC + confidentiality: confidential # values: public, internal, restricted, confidential, strictly-confidential + integrity: critical # values: archive, operational, important, critical, mission-critical + availability: important # values: archive, operational, important, critical, mission-critical + justification_cia_rating: > + The build pipeline might contain sensitive configuration values like backend credentials, certificates etc. and is + therefore rated as confidential. The integrity and availability is rated as critical and important due to the risk + of reputation damage and application update unavailability when the build pipeline is compromised. + multi_tenant: true + redundant: false + custom_developed_parts: false + data_assets_processed: # sequence of IDs to reference + - build-job-config + - client-application-code + - server-application-code + - marketing-material + data_assets_stored: # sequence of IDs to reference + - build-job-config + - client-application-code + - server-application-code + - marketing-material + data_formats_accepted: # sequence of formats like: json, xml, serialization, file, csv + - file + - serialization + communication_links: + Git Repo Code Read Access: + target: git-repo + description: Link to the Git repository server + protocol: ssh # values: see help + authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: true + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + data_assets_received: # sequence of IDs to reference + - client-application-code + - server-application-code + #diagram_tweak_weight: 1 + #diagram_tweak_constraint: false + Application Deployment: + target: apache-webserver + description: Link to the Apache webserver + protocol: ssh # values: see help + authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - client-application-code + - server-application-code + data_assets_received: # sequence of IDs to reference + CMS Updates: + target: marketing-cms + description: Link to the CMS + protocol: ssh # values: see help + authentication: client-certificate # values: none, credentials, session-id, token, client-certificate, two-factor + authorization: technical-user # values: none, technical-user, enduser-identity-propagation + tags: + vpn: false + ip_filtered: false + readonly: false + usage: devops # values: business, devops + data_assets_sent: # sequence of IDs to reference + - marketing-material + data_assets_received: # sequence of IDs to reference + + + + + +trust_boundaries: + + + Web DMZ: + id: web-dmz + description: Web DMZ + type: network-cloud-security-group # values: see help + tags: + technical_assets_inside: # sequence of IDs to reference + - apache-webserver + - marketing-cms + trust_boundaries_nested: # sequence of IDs to reference + + + ERP DMZ: + id: erp-dmz + description: ERP DMZ + type: network-cloud-security-group # values: see help + tags: + - some-erp + technical_assets_inside: # sequence of IDs to reference + - erp-system + - contract-fileserver + - sql-database + trust_boundaries_nested: # sequence of IDs to reference + + + Application Network: + id: application-network + description: Application Network + type: network-cloud-provider # values: see help + tags: + - aws + technical_assets_inside: # sequence of IDs to reference + - load-balancer + trust_boundaries_nested: # sequence of IDs to reference + - web-dmz + - erp-dmz + - auth-env + + + Auth Handling Environment: + id: auth-env + description: Auth Handling Environment + type: execution-environment # values: see help + tags: + technical_assets_inside: # sequence of IDs to reference + - identity-provider + - ldap-auth-server + trust_boundaries_nested: # sequence of IDs to reference + + + Dev Network: + id: dev-network + description: Development Network + type: network-on-prem # values: see help + tags: + technical_assets_inside: # sequence of IDs to reference + - jenkins-buildserver + - git-repo + - backend-admin-client + - backoffice-client + trust_boundaries_nested: # sequence of IDs to reference + + + + + +shared_runtimes: + + + WebApp and Backoffice Virtualization: + id: webapp-virtualization + description: WebApp Virtualization + tags: + - vmware + technical_assets_running: # sequence of IDs to reference + - apache-webserver + - marketing-cms + - erp-system + - contract-fileserver + - sql-database + + + + +individual_risk_categories: # used for adding custom manually identified risks + + + Some Individual Risk Example: + id: something-strange + description: Some text describing the risk category... + impact: Some text describing the impact... + asvs: V0 - Something Strange + cheat_sheet: https://example.com + action: Some text describing the action... + mitigation: Some text describing the mitigation... + check: Check if XYZ... + function: business-side # values: business-side, architecture, development, operations + stride: repudiation # values: spoofing, tampering, repudiation, information-disclosure, denial-of-service, elevation-of-privilege + detection_logic: Some text describing the detection logic... + risk_assessment: Some text describing the risk assessment... + false_positives: Some text describing the most common types of false positives... + model_failure_possible_reason: false + cwe: 693 + risks_identified: + Example Individual Risk at Database: + severity: critical # values: low, medium, elevated, high, critical + exploitation_likelihood: likely # values: unlikely, likely, very-likely, frequent + exploitation_impact: medium # values: low, medium, high, very-high + data_breach_probability: probable # values: improbable, possible, probable + data_breach_technical_assets: # list of technical asset IDs which might have data breach + - sql-database + most_relevant_data_asset: + most_relevant_technical_asset: sql-database + most_relevant_communication_link: + most_relevant_trust_boundary: + most_relevant_shared_runtime: + Example Individual Risk at Contract Filesystem: + severity: medium # values: low, medium, elevated, high, critical + exploitation_likelihood: frequent # values: unlikely, likely, very-likely, frequent + exploitation_impact: very-high # values: low, medium, high, very-high + data_breach_probability: improbable # values: improbable, possible, probable + data_breach_technical_assets: # list of technical asset IDs which might have data breach + most_relevant_data_asset: + most_relevant_technical_asset: contract-fileserver + most_relevant_communication_link: + most_relevant_trust_boundary: + most_relevant_shared_runtime: + + + +# NOTE: +# For risk tracking each risk-id needs to be defined (the string with the @ sign in it). These unique risk IDs +# are visible in the PDF report (the small grey string under each risk), the Excel (column "ID"), as well as the JSON responses. +# Some risk IDs have only one @ sign in them, while others multiple. The idea is to allow for unique but still speaking IDs. +# Therefore each risk instance creates its individual ID by taking all affected elements causing the risk to be within an @-delimited part. +# Using wildcards (the * sign) for parts delimited by @ signs allows to handle groups of certain risks at once. Best is to lookup the IDs +# to use in the created Excel file. Alternatively a model macro "seed-risk-tracking" is available that helps in initially +# seeding the risk tracking part here based on already identified and not yet handled risks. +risk_tracking: + + untrusted-deserialization@erp-system: # wildcards "*" between the @ characters are possible + status: accepted # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: Risk accepted as tolerable + ticket: XYZ-1234 + date: 2020-01-04 + checked_by: John Doe + + ldap-injection@*@ldap-auth-server@*: # wildcards "*" between the @ characters are possible + status: mitigated # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: The hardening measures were implemented and checked + ticket: XYZ-5678 + date: 2020-01-05 + checked_by: John Doe + + unencrypted-asset@*: # wildcards "*" between the @ characters are possible + status: mitigated # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: The hardening measures were implemented and checked + ticket: XYZ-1234 + date: 2020-01-04 + checked_by: John Doe + + missing-authentication-second-factor@*@*@*: # wildcards "*" between the @ characters are possible + status: mitigated # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: The hardening measures were implemented and checked + ticket: XYZ-1234 + date: 2020-01-04 + checked_by: John Doe + + missing-hardening@*: # wildcards "*" between the @ characters are possible + status: mitigated # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: The hardening measures were implemented and checked + ticket: XYZ-1234 + date: 2020-01-04 + checked_by: John Doe + + dos-risky-access-across-trust-boundary@*@*@*: # wildcards "*" between the @ characters are possible + status: in-progress # values: unchecked, in-discussion, accepted, in-progress, mitigated, false-positive + justification: The hardening measures are being implemented and checked + ticket: XYZ-1234 + date: 2020-01-04 + checked_by: John Doe + + + +#diagram_tweak_edge_layout: spline # values: spline, polyline, false, ortho (this suppresses edge labels), curved (this suppresses edge labels and can cause problems with edges) + +#diagram_tweak_suppress_edge_labels: true +#diagram_tweak_layout_left_to_right: true +#diagram_tweak_nodesep: 2 +#diagram_tweak_ranksep: 2 +#diagram_tweak_invisible_connections_between_assets: +# - tech-asset-source-id-A:tech-asset-target-id-B +# - tech-asset-source-id-C:tech-asset-target-id-D +#diagram_tweak_same_rank_assets: +# - tech-asset-source-id-E:tech-asset-target-id-F:tech-asset-source-id-G:tech-asset-target-id-H +# - tech-asset-source-id-M:tech-asset-target-id-N:tech-asset-source-id-O diff --git a/internal/threagile/rules.go b/internal/threagile/rules.go index 136beb3e..2388837a 100644 --- a/internal/threagile/rules.go +++ b/internal/threagile/rules.go @@ -8,8 +8,8 @@ import ( "strings" "github.com/threagile/threagile/pkg/common" + "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/security/risks" - "github.com/threagile/threagile/pkg/security/types" "github.com/spf13/cobra" @@ -26,7 +26,7 @@ var listRiskRules = &cobra.Command{ cmd.Println("----------------------") cmd.Println("Custom risk rules:") cmd.Println("----------------------") - customRiskRules := types.LoadCustomRiskRules(strings.Split(*customRiskRulesPluginFlag, ","), common.DefaultProgressReporter{Verbose: *verboseFlag}) + customRiskRules := model.LoadCustomRiskRules(strings.Split(*customRiskRulesPluginFlag, ","), common.DefaultProgressReporter{Verbose: *verboseFlag}) for id, customRule := range customRiskRules { cmd.Println(id, "-->", customRule.Category.Title, "--> with tags:", customRule.Tags) } @@ -53,7 +53,7 @@ var explainRiskRules = &cobra.Command{ cmd.Println("----------------------") cmd.Println("Custom risk rules:") cmd.Println("----------------------") - customRiskRules := types.LoadCustomRiskRules(strings.Split(*customRiskRulesPluginFlag, ","), common.DefaultProgressReporter{Verbose: *verboseFlag}) + customRiskRules := model.LoadCustomRiskRules(strings.Split(*customRiskRulesPluginFlag, ","), common.DefaultProgressReporter{Verbose: *verboseFlag}) for _, customRule := range customRiskRules { cmd.Printf("%v: %v\n", customRule.Category.Id, customRule.Category.Description) } diff --git a/pkg/model/parse.go b/pkg/model/parse.go index 9874106e..a8a7f025 100644 --- a/pkg/model/parse.go +++ b/pkg/model/parse.go @@ -3,15 +3,16 @@ package model import ( "errors" "fmt" - "github.com/threagile/threagile/pkg/input" - "github.com/threagile/threagile/pkg/security/types" "path/filepath" "regexp" "strings" "time" + + "github.com/threagile/threagile/pkg/input" + "github.com/threagile/threagile/pkg/security/types" ) -func ParseModel(modelInput *input.ModelInput, builtinRiskRules map[string]types.RiskRule, customRiskRules map[string]*types.CustomRisk) (*types.ParsedModel, error) { +func ParseModel(modelInput *input.ModelInput, builtinRiskRules map[string]types.RiskRule, customRiskRules map[string]*CustomRisk) (*types.ParsedModel, error) { businessCriticality, err := types.ParseCriticality(modelInput.BusinessCriticality) if err != nil { return nil, errors.New("unknown 'business_criticality' value of application: " + modelInput.BusinessCriticality) diff --git a/pkg/model/read.go b/pkg/model/read.go index aa16d5de..f92b4131 100644 --- a/pkg/model/read.go +++ b/pkg/model/read.go @@ -3,10 +3,10 @@ package model import ( "fmt" "path/filepath" + "strings" "github.com/threagile/threagile/pkg/common" "github.com/threagile/threagile/pkg/input" - "github.com/threagile/threagile/pkg/run" "github.com/threagile/threagile/pkg/security/risks" "github.com/threagile/threagile/pkg/security/types" ) @@ -22,7 +22,7 @@ type ReadResult struct { ParsedModel *types.ParsedModel IntroTextRAA string BuiltinRiskRules map[string]types.RiskRule - CustomRiskRules map[string]*types.CustomRisk + CustomRiskRules map[string]*CustomRisk } // TODO: consider about splitting this function into smaller ones for better reusability @@ -34,7 +34,7 @@ func ReadAndAnalyzeModel(config common.Config, progressReporter progressReporter for _, rule := range risks.GetBuiltInRiskRules() { builtinRiskRules[rule.Category().Id] = rule } - customRiskRules := types.LoadCustomRiskRules(config.RiskRulesPlugins, progressReporter) + customRiskRules := LoadCustomRiskRules(config.RiskRulesPlugins, progressReporter) modelInput := new(input.ModelInput).Defaults() loadError := modelInput.Load(config.InputFile) @@ -49,7 +49,7 @@ func ReadAndAnalyzeModel(config common.Config, progressReporter progressReporter introTextRAA := applyRAA(parsedModel, config.BinFolder, config.RAAPlugin, progressReporter) - parsedModel.ApplyRiskGeneration(customRiskRules, builtinRiskRules, + applyRiskGeneration(parsedModel, customRiskRules, builtinRiskRules, config.SkipRiskRules, progressReporter) err := parsedModel.ApplyWildcardRiskTrackingEvaluation(config.IgnoreOrphanedRiskTracking, progressReporter) if err != nil { @@ -70,10 +70,65 @@ func ReadAndAnalyzeModel(config common.Config, progressReporter progressReporter }, nil } +// TODO: refactor skipRiskRules to be a string array instead of a comma-separated string +func applyRiskGeneration(parsedModel *types.ParsedModel, customRiskRules map[string]*CustomRisk, + builtinRiskRules map[string]types.RiskRule, + skipRiskRules string, + progressReporter progressReporter) { + progressReporter.Info("Applying risk generation") + + skippedRules := make(map[string]bool) + if len(skipRiskRules) > 0 { + for _, id := range strings.Split(skipRiskRules, ",") { + skippedRules[id] = true + } + } + + for _, rule := range builtinRiskRules { + parsedModel.ApplyRisk(rule, &skippedRules) + } + + // NOW THE CUSTOM RISK RULES (if any) + for id, customRule := range customRiskRules { + _, ok := skippedRules[id] + if ok { + progressReporter.Info("Skipping custom risk rule:", id) + delete(skippedRules, id) + } else { + progressReporter.Info("Executing custom risk rule:", id) + parsedModel.AddToListOfSupportedTags(customRule.Tags) + customRisks := customRule.GenerateRisks(parsedModel) + if len(customRisks) > 0 { + parsedModel.GeneratedRisksByCategory[customRule.Category.Id] = customRisks + } + + progressReporter.Info("Added custom risks:", len(customRisks)) + } + } + + if len(skippedRules) > 0 { + keys := make([]string, 0) + for k := range skippedRules { + keys = append(keys, k) + } + if len(keys) > 0 { + progressReporter.Info("Unknown risk rules to skip:", keys) + } + } + + // save also in map keyed by synthetic risk-id + for _, category := range types.SortedRiskCategories(parsedModel) { + someRisks := types.SortedRisksOfCategory(parsedModel, category) + for _, risk := range someRisks { + parsedModel.GeneratedRisksBySyntheticId[strings.ToLower(risk.SyntheticId)] = risk + } + } +} + func applyRAA(parsedModel *types.ParsedModel, binFolder, raaPlugin string, progressReporter progressReporter) string { progressReporter.Info("Applying RAA calculation:", raaPlugin) - runner, loadError := new(run.Runner).Load(filepath.Join(binFolder, raaPlugin)) + runner, loadError := new(runner).Load(filepath.Join(binFolder, raaPlugin)) if loadError != nil { progressReporter.Warn(fmt.Sprintf("WARNING: raa %q not loaded: %v\n", raaPlugin, loadError)) return "" diff --git a/pkg/security/types/rules.go b/pkg/model/rules.go similarity index 64% rename from pkg/security/types/rules.go rename to pkg/model/rules.go index 91048d39..c50b4050 100644 --- a/pkg/security/types/rules.go +++ b/pkg/model/rules.go @@ -1,16 +1,34 @@ -/* -Copyright © 2023 NAME HERE -*/ - -package types +package model import ( "fmt" + "log" "strings" - "github.com/threagile/threagile/pkg/run" + "github.com/threagile/threagile/pkg/security/types" ) +type CustomRisk struct { + ID string + Category types.RiskCategory + Tags []string + Runner *runner +} + +func (r *CustomRisk) GenerateRisks(m *types.ParsedModel) []types.Risk { + if r.Runner == nil { + return nil + } + + risks := make([]types.Risk, 0) + runError := r.Runner.Run(m, &risks, "-generate-risks") + if runError != nil { + log.Fatalf("Failed to generate risks for custom risk rule %q: %v\n", r.Runner.Filename, runError) + } + + return risks +} + func LoadCustomRiskRules(pluginFiles []string, reporter progressReporter) map[string]*CustomRisk { customRiskRuleList := make([]string, 0) customRiskRules := make(map[string]*CustomRisk) @@ -19,7 +37,7 @@ func LoadCustomRiskRules(pluginFiles []string, reporter progressReporter) map[st for _, pluginFile := range pluginFiles { if len(pluginFile) > 0 { - runner, loadError := new(run.Runner).Load(pluginFile) + runner, loadError := new(runner).Load(pluginFile) if loadError != nil { reporter.Error(fmt.Sprintf("WARNING: Custom risk rule %q not loaded: %v\n", pluginFile, loadError)) } diff --git a/pkg/run/runner.go b/pkg/model/runner.go similarity index 88% rename from pkg/run/runner.go rename to pkg/model/runner.go index c0c53538..a8b6da0f 100644 --- a/pkg/run/runner.go +++ b/pkg/model/runner.go @@ -1,5 +1,5 @@ // TODO: consider moving to internal -package run +package model import ( "bytes" @@ -9,7 +9,7 @@ import ( "os/exec" ) -type Runner struct { +type runner struct { Filename string Parameters []string In any @@ -17,8 +17,8 @@ type Runner struct { ErrorOutput string } -func (p *Runner) Load(filename string) (*Runner, error) { - *p = Runner{ +func (p *runner) Load(filename string) (*runner, error) { + *p = runner{ Filename: filename, } @@ -34,8 +34,8 @@ func (p *Runner) Load(filename string) (*Runner, error) { return p, nil } -func (p *Runner) Run(in any, out any, parameters ...string) error { - *p = Runner{ +func (p *runner) Run(in any, out any, parameters ...string) error { + *p = runner{ Filename: p.Filename, Parameters: parameters, In: in, diff --git a/pkg/report/report.go b/pkg/report/report.go index c9e4ca77..aead5c1e 100644 --- a/pkg/report/report.go +++ b/pkg/report/report.go @@ -16,6 +16,7 @@ import ( "github.com/jung-kurt/gofpdf" "github.com/jung-kurt/gofpdf/contrib/gofpdi" "github.com/threagile/threagile/pkg/docs" + "github.com/threagile/threagile/pkg/model" accidental_secret_leak "github.com/threagile/threagile/pkg/security/risks/built-in/accidental-secret-leak" code_backdooring "github.com/threagile/threagile/pkg/security/risks/built-in/code-backdooring" container_baseimage_backdooring "github.com/threagile/threagile/pkg/security/risks/built-in/container-baseimage-backdooring" @@ -98,7 +99,7 @@ func (r *pdfReporter) WriteReportPDF(reportFilename string, buildTimestamp string, modelHash string, introTextRAA string, - customRiskRules map[string]*types.CustomRisk, + customRiskRules map[string]*model.CustomRisk, tempFolder string, model *types.ParsedModel) error { r.initReport() @@ -4035,7 +4036,7 @@ func (r *pdfReporter) createSharedRuntimes(parsedModel *types.ParsedModel) { } } -func (r *pdfReporter) createRiskRulesChecked(parsedModel *types.ParsedModel, modelFilename string, skipRiskRules string, buildTimestamp string, modelHash string, customRiskRules map[string]*types.CustomRisk) { +func (r *pdfReporter) createRiskRulesChecked(parsedModel *types.ParsedModel, modelFilename string, skipRiskRules string, buildTimestamp string, modelHash string, customRiskRules map[string]*model.CustomRisk) { r.pdf.SetTextColor(0, 0, 0) title := "Risk Rules Checked by Threagile" r.addHeadline(title, false) diff --git a/pkg/security/types/custom-risk.go b/pkg/security/types/custom-risk.go deleted file mode 100644 index beb66ffe..00000000 --- a/pkg/security/types/custom-risk.go +++ /dev/null @@ -1,27 +0,0 @@ -package types - -import ( - "github.com/threagile/threagile/pkg/run" - "log" -) - -type CustomRisk struct { - ID string - Category RiskCategory - Tags []string - Runner *run.Runner -} - -func (r *CustomRisk) GenerateRisks(m *ParsedModel) []Risk { - if r.Runner == nil { - return nil - } - - risks := make([]Risk, 0) - runError := r.Runner.Run(m, &risks, "-generate-risks") - if runError != nil { - log.Fatalf("Failed to generate risks for custom risk rule %q: %v\n", r.Runner.Filename, runError) - } - - return risks -} diff --git a/pkg/security/types/model.go b/pkg/security/types/model.go index 6aac1bb6..ba14f108 100644 --- a/pkg/security/types/model.go +++ b/pkg/security/types/model.go @@ -114,61 +114,6 @@ func (parsedModel *ParsedModel) CheckTags(tags []string, where string) ([]string return tagsUsed, nil } -// TODO: refactor skipRiskRules to be a string array instead of a comma-separated string -func (parsedModel *ParsedModel) ApplyRiskGeneration(customRiskRules map[string]*CustomRisk, - builtinRiskRules map[string]RiskRule, - skipRiskRules string, - progressReporter progressReporter) { - progressReporter.Info("Applying risk generation") - - skippedRules := make(map[string]bool) - if len(skipRiskRules) > 0 { - for _, id := range strings.Split(skipRiskRules, ",") { - skippedRules[id] = true - } - } - - for _, rule := range builtinRiskRules { - parsedModel.ApplyRisk(rule, &skippedRules) - } - - // NOW THE CUSTOM RISK RULES (if any) - for id, customRule := range customRiskRules { - _, ok := skippedRules[id] - if ok { - progressReporter.Info("Skipping custom risk rule:", id) - delete(skippedRules, id) - } else { - progressReporter.Info("Executing custom risk rule:", id) - parsedModel.AddToListOfSupportedTags(customRule.Tags) - customRisks := customRule.GenerateRisks(parsedModel) - if len(customRisks) > 0 { - parsedModel.GeneratedRisksByCategory[customRule.Category.Id] = customRisks - } - - progressReporter.Info("Added custom risks:", len(customRisks)) - } - } - - if len(skippedRules) > 0 { - keys := make([]string, 0) - for k := range skippedRules { - keys = append(keys, k) - } - if len(keys) > 0 { - progressReporter.Info("Unknown risk rules to skip:", keys) - } - } - - // save also in map keyed by synthetic risk-id - for _, category := range SortedRiskCategories(parsedModel) { - someRisks := SortedRisksOfCategory(parsedModel, category) - for _, risk := range someRisks { - parsedModel.GeneratedRisksBySyntheticId[strings.ToLower(risk.SyntheticId)] = risk - } - } -} - func (parsedModel *ParsedModel) ApplyWildcardRiskTrackingEvaluation(ignoreOrphanedRiskTracking bool, progressReporter progressReporter) error { progressReporter.Info("Executing risk tracking evaluation") for syntheticRiskIdPattern, riskTracking := range parsedModel.GetDeferredRiskTrackingDueToWildcardMatching() { diff --git a/pkg/security/types/trust_boundary.go b/pkg/security/types/trust_boundary.go index 4a90c66f..2b9cacd5 100644 --- a/pkg/security/types/trust_boundary.go +++ b/pkg/security/types/trust_boundary.go @@ -100,7 +100,6 @@ func (what TrustBoundary) addAssetIDsRecursively(model *ParsedModel, result *[]s } } -// TODO: pass ParsedModelRoot as parameter instead of using global variable func (what TrustBoundary) addTrustBoundaryIDsRecursively(model *ParsedModel, result *[]string) { *result = append(*result, what.Id) parentID := what.ParentTrustBoundaryID(model) diff --git a/pkg/server/server.go b/pkg/server/server.go index cc551236..2801a469 100644 --- a/pkg/server/server.go +++ b/pkg/server/server.go @@ -16,6 +16,7 @@ import ( "sync" "github.com/threagile/threagile/pkg/common" + "github.com/threagile/threagile/pkg/model" "github.com/gin-gonic/gin" "github.com/threagile/threagile/pkg/docs" @@ -34,7 +35,7 @@ type server struct { mapFolderNameToTokenHash map[string]string extremeShortTimeoutsForTesting bool locksByFolderName map[string]*sync.Mutex - customRiskRules map[string]*types.CustomRisk + customRiskRules map[string]*model.CustomRisk } func RunServer(config *common.Config) { @@ -175,7 +176,7 @@ func RunServer(config *common.Config) { router.DELETE("/models/:model-id/shared-runtimes/:shared-runtime-id", s.deleteSharedRuntime) reporter := common.DefaultProgressReporter{Verbose: s.config.Verbose} - s.customRiskRules = types.LoadCustomRiskRules(s.config.RiskRulesPlugins, reporter) + s.customRiskRules = model.LoadCustomRiskRules(s.config.RiskRulesPlugins, reporter) fmt.Println("Threagile s running...") _ = router.Run(":" + strconv.Itoa(s.config.ServerPort)) // listen and serve on 0.0.0.0:8080 or whatever port was specified From 3329cd53b335727ee27d7d14e9a896577c5fdb75 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Tue, 9 Jan 2024 14:22:30 +0000 Subject: [PATCH 47/68] Use go interfaces to specify built in risk rules, move custom risk rules out of parsed model --- pkg/macros/add-build-pipeline-macro.go | 2 +- pkg/macros/add-vault-macro.go | 2 +- pkg/macros/pretty-print-macro.go | 2 +- pkg/macros/remove-unused-tags-macro.go | 2 +- pkg/macros/seed-risk-tracking-macro.go | 2 +- pkg/macros/seed-tags-macro.go | 2 +- pkg/model/parse.go | 3 +- pkg/model/read.go | 28 +- pkg/report/report.go | 1503 +---------------- .../accidental-secret-leak-rule.go | 26 +- .../code-backdooring-rule.go | 26 +- .../container-baseimage-backdooring-rule.go | 24 +- .../container-platform-escape-rule.go | 24 +- .../cross-site-request-forgery-rule.go | 24 +- .../cross-site-scripting-rule.go | 24 +- ...risky-access-across-trust-boundary-rule.go | 30 +- .../incomplete-model-rule.go | 30 +- .../ldap-injection-rule.go | 30 +- .../missing-authentication-rule.go | 24 +- ...ssing-authentication-second-factor-rule.go | 25 +- .../missing-build-infrastructure-rule.go | 24 +- .../missing-cloud-hardening-rule.go | 86 +- .../missing-file-validation-rule.go | 24 +- .../missing-hardening-rule.go | 36 +- .../missing-identity-propagation-rule.go | 24 +- ...issing-identity-provider-isolation-rule.go | 24 +- .../missing-identity-store-rule.go | 24 +- .../missing-network-segmentation-rule.go | 28 +- .../missing-vault-isolation-rule.go | 24 +- .../missing-vault-rule.go | 24 +- .../missing-waf-rule.go | 24 +- .../mixed-targets-on-shared-runtime-rule.go | 26 +- .../path-traversal-rule.go | 30 +- .../push-instead-of-pull-deployment-rule.go | 24 +- .../search-query-injection-rule.go | 30 +- .../server-side-request-forgery-rule.go | 24 +- .../service-registry-poisoning-rule.go | 24 +- .../sql-nosql-injection-rule.go | 24 +- .../unchecked-deployment-rule.go | 24 +- .../unencrypted-asset-rule.go | 30 +- .../unencrypted-communication-rule.go | 30 +- .../unguarded-access-from-internet-rule.go | 24 +- .../unguarded-direct-datastore-access-rule.go | 28 +- .../unnecessary-communication-link-rule.go | 24 +- .../unnecessary-data-asset-rule.go | 24 +- .../unnecessary-data-transfer-rule.go | 32 +- .../unnecessary-technical-asset-rule.go | 24 +- .../untrusted-deserialization-rule.go | 24 +- .../wrong-communication-link-content-rule.go | 32 +- .../wrong-trust-boundary-content.go | 24 +- .../xml-external-entity-rule.go | 24 +- pkg/security/risks/risks.go | 137 +- pkg/security/types/model.go | 20 - pkg/security/types/risk-rule.go | 7 - 54 files changed, 654 insertions(+), 2211 deletions(-) rename pkg/security/risks/{built-in/accidental-secret-leak => builtin}/accidental-secret-leak-rule.go (83%) rename pkg/security/risks/{built-in/code-backdooring => builtin}/code-backdooring-rule.go (88%) rename pkg/security/risks/{built-in/container-baseimage-backdooring => builtin}/container-baseimage-backdooring-rule.go (82%) rename pkg/security/risks/{built-in/container-platform-escape => builtin}/container-platform-escape-rule.go (86%) rename pkg/security/risks/{built-in/cross-site-request-forgery => builtin}/cross-site-request-forgery-rule.go (82%) rename pkg/security/risks/{built-in/cross-site-scripting => builtin}/cross-site-scripting-rule.go (82%) rename pkg/security/risks/{built-in/dos-risky-access-across-trust-boundary => builtin}/dos-risky-access-across-trust-boundary-rule.go (79%) rename pkg/security/risks/{built-in/incomplete-model => builtin}/incomplete-model-rule.go (79%) rename pkg/security/risks/{built-in/ldap-injection => builtin}/ldap-injection-rule.go (83%) rename pkg/security/risks/{built-in/missing-authentication => builtin}/missing-authentication-rule.go (86%) rename pkg/security/risks/{built-in/missing-authentication-second-factor => builtin}/missing-authentication-second-factor-rule.go (80%) rename pkg/security/risks/{built-in/missing-build-infrastructure => builtin}/missing-build-infrastructure-rule.go (85%) rename pkg/security/risks/{built-in/missing-cloud-hardening => builtin}/missing-cloud-hardening-rule.go (80%) rename pkg/security/risks/{built-in/missing-file-validation => builtin}/missing-file-validation-rule.go (83%) rename pkg/security/risks/{built-in/missing-hardening => builtin}/missing-hardening-rule.go (73%) rename pkg/security/risks/{built-in/missing-identity-propagation => builtin}/missing-identity-propagation-rule.go (86%) rename pkg/security/risks/{built-in/missing-identity-provider-isolation => builtin}/missing-identity-provider-isolation-rule.go (85%) rename pkg/security/risks/{built-in/missing-identity-store => builtin}/missing-identity-store-rule.go (86%) rename pkg/security/risks/{built-in/missing-network-segmentation => builtin}/missing-network-segmentation-rule.go (85%) rename pkg/security/risks/{built-in/missing-vault-isolation => builtin}/missing-vault-isolation-rule.go (87%) rename pkg/security/risks/{built-in/missing-vault => builtin}/missing-vault-rule.go (86%) rename pkg/security/risks/{built-in/missing-waf => builtin}/missing-waf-rule.go (86%) rename pkg/security/risks/{built-in/mixed-targets-on-shared-runtime => builtin}/mixed-targets-on-shared-runtime-rule.go (85%) rename pkg/security/risks/{built-in/path-traversal => builtin}/path-traversal-rule.go (84%) rename pkg/security/risks/{built-in/push-instead-of-pull-deployment => builtin}/push-instead-of-pull-deployment-rule.go (81%) rename pkg/security/risks/{built-in/search-query-injection => builtin}/search-query-injection-rule.go (85%) rename pkg/security/risks/{built-in/server-side-request-forgery => builtin}/server-side-request-forgery-rule.go (87%) rename pkg/security/risks/{built-in/service-registry-poisoning => builtin}/service-registry-poisoning-rule.go (82%) rename pkg/security/risks/{built-in/sql-nosql-injection => builtin}/sql-nosql-injection-rule.go (84%) rename pkg/security/risks/{built-in/unchecked-deployment => builtin}/unchecked-deployment-rule.go (87%) rename pkg/security/risks/{built-in/unencrypted-asset => builtin}/unencrypted-asset-rule.go (83%) rename pkg/security/risks/{built-in/unencrypted-communication => builtin}/unencrypted-communication-rule.go (82%) rename pkg/security/risks/{built-in/unguarded-access-from-internet => builtin}/unguarded-access-from-internet-rule.go (88%) rename pkg/security/risks/{built-in/unguarded-direct-datastore-access => builtin}/unguarded-direct-datastore-access-rule.go (85%) rename pkg/security/risks/{built-in/unnecessary-communication-link => builtin}/unnecessary-communication-link-rule.go (79%) rename pkg/security/risks/{built-in/unnecessary-data-asset => builtin}/unnecessary-data-asset-rule.go (84%) rename pkg/security/risks/{built-in/unnecessary-data-transfer => builtin}/unnecessary-data-transfer-rule.go (83%) rename pkg/security/risks/{built-in/unnecessary-technical-asset => builtin}/unnecessary-technical-asset-rule.go (80%) rename pkg/security/risks/{built-in/untrusted-deserialization => builtin}/untrusted-deserialization-rule.go (85%) rename pkg/security/risks/{built-in/wrong-communication-link-content => builtin}/wrong-communication-link-content-rule.go (80%) rename pkg/security/risks/{built-in/wrong-trust-boundary-content => builtin}/wrong-trust-boundary-content.go (80%) rename pkg/security/risks/{built-in/xml-external-entity => builtin}/xml-external-entity-rule.go (84%) delete mode 100644 pkg/security/types/risk-rule.go diff --git a/pkg/macros/add-build-pipeline-macro.go b/pkg/macros/add-build-pipeline-macro.go index e6500be1..6d486cc3 100644 --- a/pkg/macros/add-build-pipeline-macro.go +++ b/pkg/macros/add-build-pipeline-macro.go @@ -18,7 +18,7 @@ type addBuildPipeline struct { createNewTrustBoundary bool } -func NewBuildPipeline() Macros { +func NewBuildPipeline() *addBuildPipeline { return &addBuildPipeline{ macroState: make(map[string][]string), questionsAnswered: make([]string, 0), diff --git a/pkg/macros/add-vault-macro.go b/pkg/macros/add-vault-macro.go index 4fe72854..9dddc5f1 100644 --- a/pkg/macros/add-vault-macro.go +++ b/pkg/macros/add-vault-macro.go @@ -33,7 +33,7 @@ var authenticationTypes = []string{ "Credentials (username/password, API-key, secret token, etc.)", } -func NewAddVault() Macros { +func NewAddVault() *addVaultMacro { return &addVaultMacro{ macroState: make(map[string][]string), questionsAnswered: make([]string, 0), diff --git a/pkg/macros/pretty-print-macro.go b/pkg/macros/pretty-print-macro.go index f07a8b58..76c2dcca 100644 --- a/pkg/macros/pretty-print-macro.go +++ b/pkg/macros/pretty-print-macro.go @@ -8,7 +8,7 @@ import ( type prettyPrintMacro struct { } -func NewPrettyPrint() Macros { +func NewPrettyPrint() *prettyPrintMacro { return &prettyPrintMacro{} } diff --git a/pkg/macros/remove-unused-tags-macro.go b/pkg/macros/remove-unused-tags-macro.go index 9de7f4bf..8cab35c3 100644 --- a/pkg/macros/remove-unused-tags-macro.go +++ b/pkg/macros/remove-unused-tags-macro.go @@ -11,7 +11,7 @@ import ( type removeUnusedTagsMacro struct { } -func NewRemoveUnusedTags() Macros { +func NewRemoveUnusedTags() *removeUnusedTagsMacro { return &removeUnusedTagsMacro{} } diff --git a/pkg/macros/seed-risk-tracking-macro.go b/pkg/macros/seed-risk-tracking-macro.go index 91a6df9c..3fdad714 100644 --- a/pkg/macros/seed-risk-tracking-macro.go +++ b/pkg/macros/seed-risk-tracking-macro.go @@ -11,7 +11,7 @@ import ( type seedRiskTrackingMacro struct { } -func NewSeedRiskTracking() Macros { +func NewSeedRiskTracking() *seedRiskTrackingMacro { return &seedRiskTrackingMacro{} } diff --git a/pkg/macros/seed-tags-macro.go b/pkg/macros/seed-tags-macro.go index e9c2d29c..54aa990d 100644 --- a/pkg/macros/seed-tags-macro.go +++ b/pkg/macros/seed-tags-macro.go @@ -11,7 +11,7 @@ import ( type seedTagsMacro struct { } -func NewSeedTags() Macros { +func NewSeedTags() *seedTagsMacro { return &seedTagsMacro{} } diff --git a/pkg/model/parse.go b/pkg/model/parse.go index a8a7f025..6fad2fec 100644 --- a/pkg/model/parse.go +++ b/pkg/model/parse.go @@ -9,10 +9,11 @@ import ( "time" "github.com/threagile/threagile/pkg/input" + "github.com/threagile/threagile/pkg/security/risks" "github.com/threagile/threagile/pkg/security/types" ) -func ParseModel(modelInput *input.ModelInput, builtinRiskRules map[string]types.RiskRule, customRiskRules map[string]*CustomRisk) (*types.ParsedModel, error) { +func ParseModel(modelInput *input.ModelInput, builtinRiskRules map[string]risks.RiskRule, customRiskRules map[string]*CustomRisk) (*types.ParsedModel, error) { businessCriticality, err := types.ParseCriticality(modelInput.BusinessCriticality) if err != nil { return nil, errors.New("unknown 'business_criticality' value of application: " + modelInput.BusinessCriticality) diff --git a/pkg/model/read.go b/pkg/model/read.go index f92b4131..c2c79de9 100644 --- a/pkg/model/read.go +++ b/pkg/model/read.go @@ -21,7 +21,7 @@ type ReadResult struct { ModelInput *input.ModelInput ParsedModel *types.ParsedModel IntroTextRAA string - BuiltinRiskRules map[string]types.RiskRule + BuiltinRiskRules map[string]risks.RiskRule CustomRiskRules map[string]*CustomRisk } @@ -30,7 +30,7 @@ func ReadAndAnalyzeModel(config common.Config, progressReporter progressReporter progressReporter.Info("Writing into output directory:", config.OutputFolder) progressReporter.Info("Parsing model:", config.InputFile) - builtinRiskRules := make(map[string]types.RiskRule) + builtinRiskRules := make(map[string]risks.RiskRule) for _, rule := range risks.GetBuiltInRiskRules() { builtinRiskRules[rule.Category().Id] = rule } @@ -70,9 +70,29 @@ func ReadAndAnalyzeModel(config common.Config, progressReporter progressReporter }, nil } +func applyRisk(parsedModel *types.ParsedModel, rule risks.RiskRule, skippedRules *map[string]bool) { + id := rule.Category().Id + _, ok := (*skippedRules)[id] + + if ok { + fmt.Printf("Skipping risk rule %q\n", rule.Category().Id) + delete(*skippedRules, rule.Category().Id) + } else { + parsedModel.AddToListOfSupportedTags(rule.SupportedTags()) + generatedRisks := rule.GenerateRisks(parsedModel) + if generatedRisks != nil { + if len(generatedRisks) > 0 { + parsedModel.GeneratedRisksByCategory[rule.Category().Id] = generatedRisks + } + } else { + fmt.Printf("Failed to generate risks for %q\n", id) + } + } +} + // TODO: refactor skipRiskRules to be a string array instead of a comma-separated string func applyRiskGeneration(parsedModel *types.ParsedModel, customRiskRules map[string]*CustomRisk, - builtinRiskRules map[string]types.RiskRule, + builtinRiskRules map[string]risks.RiskRule, skipRiskRules string, progressReporter progressReporter) { progressReporter.Info("Applying risk generation") @@ -85,7 +105,7 @@ func applyRiskGeneration(parsedModel *types.ParsedModel, customRiskRules map[str } for _, rule := range builtinRiskRules { - parsedModel.ApplyRisk(rule, &skippedRules) + applyRisk(parsedModel, rule, &skippedRules) } // NOW THE CUSTOM RISK RULES (if any) diff --git a/pkg/report/report.go b/pkg/report/report.go index aead5c1e..6ef60e87 100644 --- a/pkg/report/report.go +++ b/pkg/report/report.go @@ -17,48 +17,7 @@ import ( "github.com/jung-kurt/gofpdf/contrib/gofpdi" "github.com/threagile/threagile/pkg/docs" "github.com/threagile/threagile/pkg/model" - accidental_secret_leak "github.com/threagile/threagile/pkg/security/risks/built-in/accidental-secret-leak" - code_backdooring "github.com/threagile/threagile/pkg/security/risks/built-in/code-backdooring" - container_baseimage_backdooring "github.com/threagile/threagile/pkg/security/risks/built-in/container-baseimage-backdooring" - container_platform_escape "github.com/threagile/threagile/pkg/security/risks/built-in/container-platform-escape" - cross_site_request_forgery "github.com/threagile/threagile/pkg/security/risks/built-in/cross-site-request-forgery" - cross_site_scripting "github.com/threagile/threagile/pkg/security/risks/built-in/cross-site-scripting" - dos_risky_access_across_trust_boundary "github.com/threagile/threagile/pkg/security/risks/built-in/dos-risky-access-across-trust-boundary" - incomplete_model "github.com/threagile/threagile/pkg/security/risks/built-in/incomplete-model" - ldap_injection "github.com/threagile/threagile/pkg/security/risks/built-in/ldap-injection" - missing_authentication "github.com/threagile/threagile/pkg/security/risks/built-in/missing-authentication" - missing_authentication_second_factor "github.com/threagile/threagile/pkg/security/risks/built-in/missing-authentication-second-factor" - missing_build_infrastructure "github.com/threagile/threagile/pkg/security/risks/built-in/missing-build-infrastructure" - missing_cloud_hardening "github.com/threagile/threagile/pkg/security/risks/built-in/missing-cloud-hardening" - missing_file_validation "github.com/threagile/threagile/pkg/security/risks/built-in/missing-file-validation" - missing_hardening "github.com/threagile/threagile/pkg/security/risks/built-in/missing-hardening" - missing_identity_propagation "github.com/threagile/threagile/pkg/security/risks/built-in/missing-identity-propagation" - missing_identity_provider_isolation "github.com/threagile/threagile/pkg/security/risks/built-in/missing-identity-provider-isolation" - missing_identity_store "github.com/threagile/threagile/pkg/security/risks/built-in/missing-identity-store" - missing_network_segmentation "github.com/threagile/threagile/pkg/security/risks/built-in/missing-network-segmentation" - missing_vault "github.com/threagile/threagile/pkg/security/risks/built-in/missing-vault" - missing_vault_isolation "github.com/threagile/threagile/pkg/security/risks/built-in/missing-vault-isolation" - missing_waf "github.com/threagile/threagile/pkg/security/risks/built-in/missing-waf" - mixed_targets_on_shared_runtime "github.com/threagile/threagile/pkg/security/risks/built-in/mixed-targets-on-shared-runtime" - path_traversal "github.com/threagile/threagile/pkg/security/risks/built-in/path-traversal" - push_instead_of_pull_deployment "github.com/threagile/threagile/pkg/security/risks/built-in/push-instead-of-pull-deployment" - search_query_injection "github.com/threagile/threagile/pkg/security/risks/built-in/search-query-injection" - server_side_request_forgery "github.com/threagile/threagile/pkg/security/risks/built-in/server-side-request-forgery" - service_registry_poisoning "github.com/threagile/threagile/pkg/security/risks/built-in/service-registry-poisoning" - sql_nosql_injection "github.com/threagile/threagile/pkg/security/risks/built-in/sql-nosql-injection" - unchecked_deployment "github.com/threagile/threagile/pkg/security/risks/built-in/unchecked-deployment" - unencrypted_asset "github.com/threagile/threagile/pkg/security/risks/built-in/unencrypted-asset" - unencrypted_communication "github.com/threagile/threagile/pkg/security/risks/built-in/unencrypted-communication" - unguarded_access_from_internet "github.com/threagile/threagile/pkg/security/risks/built-in/unguarded-access-from-internet" - unguarded_direct_datastore_access "github.com/threagile/threagile/pkg/security/risks/built-in/unguarded-direct-datastore-access" - unnecessary_communication_link "github.com/threagile/threagile/pkg/security/risks/built-in/unnecessary-communication-link" - unnecessary_data_asset "github.com/threagile/threagile/pkg/security/risks/built-in/unnecessary-data-asset" - unnecessary_data_transfer "github.com/threagile/threagile/pkg/security/risks/built-in/unnecessary-data-transfer" - unnecessary_technical_asset "github.com/threagile/threagile/pkg/security/risks/built-in/unnecessary-technical-asset" - untrusted_deserialization "github.com/threagile/threagile/pkg/security/risks/built-in/untrusted-deserialization" - wrong_communication_link_content "github.com/threagile/threagile/pkg/security/risks/built-in/wrong-communication-link-content" - wrong_trust_boundary_content "github.com/threagile/threagile/pkg/security/risks/built-in/wrong-trust-boundary-content" - xml_external_entity "github.com/threagile/threagile/pkg/security/risks/built-in/xml-external-entity" + "github.com/threagile/threagile/pkg/security/risks" "github.com/threagile/threagile/pkg/security/types" "github.com/wcharczuk/go-chart" "github.com/wcharczuk/go-chart/drawing" @@ -4143,1433 +4102,41 @@ func (r *pdfReporter) createRiskRulesChecked(parsedModel *types.ParsedModel, mod r.pdf.MultiCell(160, 6, individualRiskCategory.RiskAssessment, "0", "0", false) } - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, accidental_secret_leak.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+accidental_secret_leak.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, accidental_secret_leak.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, accidental_secret_leak.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(accidental_secret_leak.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, accidental_secret_leak.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, accidental_secret_leak.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, code_backdooring.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+code_backdooring.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, code_backdooring.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, code_backdooring.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(code_backdooring.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, code_backdooring.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, code_backdooring.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, container_baseimage_backdooring.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+container_baseimage_backdooring.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, container_baseimage_backdooring.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, container_baseimage_backdooring.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(container_baseimage_backdooring.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, container_baseimage_backdooring.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, container_baseimage_backdooring.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, container_platform_escape.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+container_platform_escape.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, container_platform_escape.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, container_platform_escape.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(container_platform_escape.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, container_platform_escape.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, container_platform_escape.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, cross_site_request_forgery.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+cross_site_request_forgery.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, cross_site_request_forgery.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, cross_site_request_forgery.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(cross_site_request_forgery.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, cross_site_request_forgery.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, cross_site_request_forgery.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, cross_site_scripting.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+cross_site_scripting.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, cross_site_scripting.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, cross_site_scripting.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(cross_site_scripting.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, cross_site_scripting.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, cross_site_scripting.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, dos_risky_access_across_trust_boundary.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+dos_risky_access_across_trust_boundary.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, dos_risky_access_across_trust_boundary.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, dos_risky_access_across_trust_boundary.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(dos_risky_access_across_trust_boundary.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, dos_risky_access_across_trust_boundary.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, dos_risky_access_across_trust_boundary.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, incomplete_model.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+incomplete_model.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, incomplete_model.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, incomplete_model.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(incomplete_model.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, incomplete_model.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, incomplete_model.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, ldap_injection.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+ldap_injection.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, ldap_injection.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, ldap_injection.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(ldap_injection.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, ldap_injection.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, ldap_injection.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, missing_authentication.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+missing_authentication.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, missing_authentication.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_authentication.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(missing_authentication.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_authentication.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_authentication.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, missing_authentication_second_factor.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+missing_authentication_second_factor.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, missing_authentication_second_factor.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_authentication_second_factor.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(missing_authentication_second_factor.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_authentication_second_factor.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_authentication_second_factor.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, missing_build_infrastructure.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+missing_build_infrastructure.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, missing_build_infrastructure.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_build_infrastructure.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(missing_build_infrastructure.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_build_infrastructure.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_build_infrastructure.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, missing_cloud_hardening.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+missing_cloud_hardening.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, missing_cloud_hardening.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_cloud_hardening.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(missing_cloud_hardening.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_cloud_hardening.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_cloud_hardening.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, missing_file_validation.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+missing_file_validation.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, missing_file_validation.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_file_validation.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(missing_file_validation.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_file_validation.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_file_validation.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, missing_hardening.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+missing_hardening.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, missing_hardening.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_hardening.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(missing_hardening.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_hardening.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_hardening.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, missing_identity_propagation.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+missing_identity_propagation.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, missing_identity_propagation.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_identity_propagation.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(missing_identity_propagation.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_identity_propagation.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_identity_propagation.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, missing_identity_provider_isolation.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+missing_identity_provider_isolation.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, missing_identity_provider_isolation.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_identity_provider_isolation.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(missing_identity_provider_isolation.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_identity_provider_isolation.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_identity_provider_isolation.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, missing_identity_store.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+missing_identity_store.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, missing_identity_store.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_identity_store.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(missing_identity_store.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_identity_store.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_identity_store.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, missing_network_segmentation.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+missing_network_segmentation.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, missing_network_segmentation.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_network_segmentation.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(missing_network_segmentation.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_network_segmentation.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_network_segmentation.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, missing_vault.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+missing_vault.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, missing_vault.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_vault.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(missing_vault.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_vault.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_vault.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, missing_vault_isolation.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+missing_vault_isolation.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, missing_vault_isolation.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_vault_isolation.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(missing_vault_isolation.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_vault_isolation.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_vault_isolation.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, missing_waf.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+missing_waf.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, missing_waf.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_waf.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(missing_waf.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_waf.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, missing_waf.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, mixed_targets_on_shared_runtime.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+mixed_targets_on_shared_runtime.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, mixed_targets_on_shared_runtime.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, mixed_targets_on_shared_runtime.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(mixed_targets_on_shared_runtime.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, mixed_targets_on_shared_runtime.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, mixed_targets_on_shared_runtime.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, path_traversal.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+path_traversal.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, path_traversal.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, path_traversal.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(path_traversal.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, path_traversal.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, path_traversal.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, push_instead_of_pull_deployment.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+push_instead_of_pull_deployment.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, push_instead_of_pull_deployment.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, push_instead_of_pull_deployment.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(push_instead_of_pull_deployment.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, push_instead_of_pull_deployment.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, push_instead_of_pull_deployment.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, search_query_injection.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+search_query_injection.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, search_query_injection.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, search_query_injection.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(search_query_injection.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, search_query_injection.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, search_query_injection.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, server_side_request_forgery.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+server_side_request_forgery.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, server_side_request_forgery.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, server_side_request_forgery.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(server_side_request_forgery.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, server_side_request_forgery.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, server_side_request_forgery.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, service_registry_poisoning.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+service_registry_poisoning.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, service_registry_poisoning.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, service_registry_poisoning.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(service_registry_poisoning.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, service_registry_poisoning.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, service_registry_poisoning.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, sql_nosql_injection.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+sql_nosql_injection.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, sql_nosql_injection.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, sql_nosql_injection.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(sql_nosql_injection.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, sql_nosql_injection.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, sql_nosql_injection.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, unchecked_deployment.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+unchecked_deployment.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, unchecked_deployment.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unchecked_deployment.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(unchecked_deployment.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unchecked_deployment.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unchecked_deployment.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, unencrypted_asset.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+unencrypted_asset.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, unencrypted_asset.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unencrypted_asset.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(unencrypted_asset.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unencrypted_asset.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unencrypted_asset.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, unencrypted_communication.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+unencrypted_communication.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, unencrypted_communication.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unencrypted_communication.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(unencrypted_communication.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unencrypted_communication.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unencrypted_communication.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, unguarded_access_from_internet.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+unguarded_access_from_internet.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, unguarded_access_from_internet.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unguarded_access_from_internet.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(unguarded_access_from_internet.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unguarded_access_from_internet.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unguarded_access_from_internet.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, unguarded_direct_datastore_access.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+unguarded_direct_datastore_access.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, unguarded_direct_datastore_access.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unguarded_direct_datastore_access.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(unguarded_direct_datastore_access.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unguarded_direct_datastore_access.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unguarded_direct_datastore_access.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, unnecessary_communication_link.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+unnecessary_communication_link.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, unnecessary_communication_link.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unnecessary_communication_link.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(unnecessary_communication_link.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unnecessary_communication_link.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unnecessary_communication_link.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, unnecessary_data_asset.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+unnecessary_data_asset.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, unnecessary_data_asset.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unnecessary_data_asset.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(unnecessary_data_asset.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unnecessary_data_asset.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unnecessary_data_asset.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, unnecessary_data_transfer.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+unnecessary_data_transfer.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, unnecessary_data_transfer.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unnecessary_data_transfer.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(unnecessary_data_transfer.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unnecessary_data_transfer.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unnecessary_data_transfer.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, unnecessary_technical_asset.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+unnecessary_technical_asset.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, unnecessary_technical_asset.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unnecessary_technical_asset.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(unnecessary_technical_asset.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unnecessary_technical_asset.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, unnecessary_technical_asset.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, untrusted_deserialization.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+untrusted_deserialization.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, untrusted_deserialization.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, untrusted_deserialization.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(untrusted_deserialization.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, untrusted_deserialization.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, untrusted_deserialization.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, wrong_communication_link_content.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+wrong_communication_link_content.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, wrong_communication_link_content.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, wrong_communication_link_content.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(wrong_communication_link_content.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, wrong_communication_link_content.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, wrong_communication_link_content.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, wrong_trust_boundary_content.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" - } - r.pdf.CellFormat(190, 3, skipped+wrong_trust_boundary_content.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, wrong_trust_boundary_content.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, wrong_trust_boundary_content.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(wrong_trust_boundary_content.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, wrong_trust_boundary_content.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, wrong_trust_boundary_content.Category().RiskAssessment, "0", "0", false) - - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "B", fontSizeBody) - if contains(skippedRules, xml_external_entity.Category().Id) { - skipped = "SKIPPED - " - } else { - skipped = "" + for _, rule := range risks.GetBuiltInRiskRules() { + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "B", fontSizeBody) + if contains(skippedRules, rule.Category().Id) { + skipped = "SKIPPED - " + } else { + skipped = "" + } + r.pdf.CellFormat(190, 3, skipped+rule.Category().Title, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeSmall) + r.pdf.CellFormat(190, 6, rule.Category().Id, "0", 0, "", false, 0, "") + r.pdf.Ln(-1) + r.pdf.SetFont("Helvetica", "", fontSizeBody) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, rule.Category().STRIDE.Title(), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, firstParagraph(rule.Category().Description), "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, rule.Category().DetectionLogic, "0", "0", false) + r.pdfColorGray() + r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") + r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") + r.pdfColorBlack() + r.pdf.MultiCell(160, 6, rule.Category().RiskAssessment, "0", "0", false) } - r.pdf.CellFormat(190, 3, skipped+xml_external_entity.Category().Title, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeSmall) - r.pdf.CellFormat(190, 6, xml_external_entity.Category().Id, "0", 0, "", false, 0, "") - r.pdf.Ln(-1) - r.pdf.SetFont("Helvetica", "", fontSizeBody) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "STRIDE:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, xml_external_entity.Category().STRIDE.Title(), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Description:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, firstParagraph(xml_external_entity.Category().Description), "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Detection:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, xml_external_entity.Category().DetectionLogic, "0", "0", false) - r.pdfColorGray() - r.pdf.CellFormat(5, 6, "", "0", 0, "", false, 0, "") - r.pdf.CellFormat(25, 6, "Rating:", "0", 0, "", false, 0, "") - r.pdfColorBlack() - r.pdf.MultiCell(160, 6, xml_external_entity.Category().RiskAssessment, "0", "0", false) } func (r *pdfReporter) createTargetDescription(parsedModel *types.ParsedModel, baseFolder string) error { diff --git a/pkg/security/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go b/pkg/security/risks/builtin/accidental-secret-leak-rule.go similarity index 83% rename from pkg/security/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go rename to pkg/security/risks/builtin/accidental-secret-leak-rule.go index 8303a3d4..2da19893 100644 --- a/pkg/security/risks/built-in/accidental-secret-leak/accidental-secret-leak-rule.go +++ b/pkg/security/risks/builtin/accidental-secret-leak-rule.go @@ -1,18 +1,16 @@ -package accidental_secret_leak +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type AccidentalSecretLeakRule struct{} + +func NewAccidentalSecretLeakRule() *AccidentalSecretLeakRule { + return &AccidentalSecretLeakRule{} } -func Category() types.RiskCategory { +func (*AccidentalSecretLeakRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "accidental-secret-leak", Title: "Accidental Secret Leak", @@ -38,11 +36,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*AccidentalSecretLeakRule) SupportedTags() []string { return []string{"git", "nexus"} } -func GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { +func (r *AccidentalSecretLeakRule) GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range parsedModel.SortedTechnicalAssetIDs() { techAsset := parsedModel.TechnicalAssets[id] @@ -50,9 +48,9 @@ func GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { (techAsset.Technology == types.SourcecodeRepository || techAsset.Technology == types.ArtifactRegistry) { var risk types.Risk if techAsset.IsTaggedWithAny("git") { - risk = createRisk(parsedModel, techAsset, "Git", "Git Leak Prevention") + risk = r.createRisk(parsedModel, techAsset, "Git", "Git Leak Prevention") } else { - risk = createRisk(parsedModel, techAsset, "", "") + risk = r.createRisk(parsedModel, techAsset, "", "") } risks = append(risks, risk) } @@ -60,7 +58,7 @@ func GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { return risks } -func createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset, prefix, details string) types.Risk { +func (r *AccidentalSecretLeakRule) createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset, prefix, details string) types.Risk { if len(prefix) > 0 { prefix = " (" + prefix + ")" } @@ -81,7 +79,7 @@ func createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAs } // create risk risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/code-backdooring/code-backdooring-rule.go b/pkg/security/risks/builtin/code-backdooring-rule.go similarity index 88% rename from pkg/security/risks/built-in/code-backdooring/code-backdooring-rule.go rename to pkg/security/risks/builtin/code-backdooring-rule.go index 0cee94b6..1e19966f 100644 --- a/pkg/security/risks/built-in/code-backdooring/code-backdooring-rule.go +++ b/pkg/security/risks/builtin/code-backdooring-rule.go @@ -1,18 +1,16 @@ -package code_backdooring +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type CodeBackdooringRule struct{} + +func NewCodeBackdooringRule() *CodeBackdooringRule { + return &CodeBackdooringRule{} } -func Category() types.RiskCategory { +func (*CodeBackdooringRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "code-backdooring", Title: "Code Backdooring", @@ -44,17 +42,17 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*CodeBackdooringRule) SupportedTags() []string { return []string{} } -func GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { +func (r *CodeBackdooringRule) GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range parsedModel.SortedTechnicalAssetIDs() { technicalAsset := parsedModel.TechnicalAssets[id] if !technicalAsset.OutOfScope && technicalAsset.Technology.IsDevelopmentRelevant() { if technicalAsset.Internet { - risks = append(risks, createRisk(parsedModel, technicalAsset, true)) + risks = append(risks, r.createRisk(parsedModel, technicalAsset, true)) continue } @@ -64,7 +62,7 @@ func GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { for _, callerLink := range parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] { caller := parsedModel.TechnicalAssets[callerLink.SourceId] if (!callerLink.VPN && caller.Internet) || caller.OutOfScope { - risks = append(risks, createRisk(parsedModel, technicalAsset, true)) + risks = append(risks, r.createRisk(parsedModel, technicalAsset, true)) //riskByLinkAdded = true break } @@ -74,7 +72,7 @@ func GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { return risks } -func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, elevatedRisk bool) types.Risk { +func (r *CodeBackdooringRule) createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, elevatedRisk bool) types.Risk { title := "Code Backdooring risk at " + technicalAsset.Title + "" impact := types.LowImpact if technicalAsset.Technology != types.CodeInspectionPlatform { @@ -109,7 +107,7 @@ func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, e } // create risk risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go b/pkg/security/risks/builtin/container-baseimage-backdooring-rule.go similarity index 82% rename from pkg/security/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go rename to pkg/security/risks/builtin/container-baseimage-backdooring-rule.go index cae9fb6d..f77d412d 100644 --- a/pkg/security/risks/built-in/container-baseimage-backdooring/container-baseimage-backdooring-rule.go +++ b/pkg/security/risks/builtin/container-baseimage-backdooring-rule.go @@ -1,18 +1,16 @@ -package container_baseimage_backdooring +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type ContainerBaseImageBackdooringRule struct{} + +func NewContainerBaseImageBackdooringRule() *ContainerBaseImageBackdooringRule { + return &ContainerBaseImageBackdooringRule{} } -func Category() types.RiskCategory { +func (*ContainerBaseImageBackdooringRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "container-baseimage-backdooring", Title: "Container Base Image Backdooring", @@ -39,22 +37,22 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*ContainerBaseImageBackdooringRule) SupportedTags() []string { return []string{} } -func GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { +func (r *ContainerBaseImageBackdooringRule) GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range parsedModel.SortedTechnicalAssetIDs() { technicalAsset := parsedModel.TechnicalAssets[id] if !technicalAsset.OutOfScope && technicalAsset.Machine == types.Container { - risks = append(risks, createRisk(parsedModel, technicalAsset)) + risks = append(risks, r.createRisk(parsedModel, technicalAsset)) } } return risks } -func createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { +func (r *ContainerBaseImageBackdooringRule) createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { title := "Container Base Image Backdooring risk at " + technicalAsset.Title + "" impact := types.MediumImpact if technicalAsset.HighestConfidentiality(parsedModel) == types.StrictlyConfidential || @@ -63,7 +61,7 @@ func createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAs impact = types.HighImpact } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/container-platform-escape/container-platform-escape-rule.go b/pkg/security/risks/builtin/container-platform-escape-rule.go similarity index 86% rename from pkg/security/risks/built-in/container-platform-escape/container-platform-escape-rule.go rename to pkg/security/risks/builtin/container-platform-escape-rule.go index bf4a2cf2..4f55f21b 100644 --- a/pkg/security/risks/built-in/container-platform-escape/container-platform-escape-rule.go +++ b/pkg/security/risks/builtin/container-platform-escape-rule.go @@ -1,18 +1,16 @@ -package container_platform_escape +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type ContainerPlatformEscapeRule struct{} + +func NewContainerPlatformEscapeRule() *ContainerPlatformEscapeRule { + return &ContainerPlatformEscapeRule{} } -func Category() types.RiskCategory { +func (*ContainerPlatformEscapeRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "container-platform-escape", Title: "Container Platform Escape", @@ -44,22 +42,22 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*ContainerPlatformEscapeRule) SupportedTags() []string { return []string{"docker", "kubernetes", "openshift"} } -func GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { +func (r *ContainerPlatformEscapeRule) GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range parsedModel.SortedTechnicalAssetIDs() { technicalAsset := parsedModel.TechnicalAssets[id] if !technicalAsset.OutOfScope && technicalAsset.Technology == types.ContainerPlatform { - risks = append(risks, createRisk(parsedModel, technicalAsset)) + risks = append(risks, r.createRisk(parsedModel, technicalAsset)) } } return risks } -func createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { +func (r *ContainerPlatformEscapeRule) createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { title := "Container Platform Escape risk at " + technicalAsset.Title + "" impact := types.MediumImpact if technicalAsset.HighestConfidentiality(parsedModel) == types.StrictlyConfidential || @@ -76,7 +74,7 @@ func createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAs } // create risk risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go b/pkg/security/risks/builtin/cross-site-request-forgery-rule.go similarity index 82% rename from pkg/security/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go rename to pkg/security/risks/builtin/cross-site-request-forgery-rule.go index 83f5317b..1f19d324 100644 --- a/pkg/security/risks/built-in/cross-site-request-forgery/cross-site-request-forgery-rule.go +++ b/pkg/security/risks/builtin/cross-site-request-forgery-rule.go @@ -1,18 +1,16 @@ -package cross_site_request_forgery +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type CrossSiteRequestForgeryRule struct{} + +func NewCrossSiteRequestForgeryRule() *CrossSiteRequestForgeryRule { + return &CrossSiteRequestForgeryRule{} } -func Category() types.RiskCategory { +func (*CrossSiteRequestForgeryRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "cross-site-request-forgery", Title: "Cross-Site Request Forgery (CSRF)", @@ -40,11 +38,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*CrossSiteRequestForgeryRule) SupportedTags() []string { return []string{} } -func GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { +func (r *CrossSiteRequestForgeryRule) GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range parsedModel.SortedTechnicalAssetIDs() { technicalAsset := parsedModel.TechnicalAssets[id] @@ -58,14 +56,14 @@ func GenerateRisks(parsedModel *types.ParsedModel) []types.Risk { if incomingFlow.Usage == types.DevOps { likelihood = types.Likely } - risks = append(risks, createRisk(parsedModel, technicalAsset, incomingFlow, likelihood)) + risks = append(risks, r.createRisk(parsedModel, technicalAsset, incomingFlow, likelihood)) } } } return risks } -func createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingFlow types.CommunicationLink, likelihood types.RiskExploitationLikelihood) types.Risk { +func (r *CrossSiteRequestForgeryRule) createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingFlow types.CommunicationLink, likelihood types.RiskExploitationLikelihood) types.Risk { sourceAsset := parsedModel.TechnicalAssets[incomingFlow.SourceId] title := "Cross-Site Request Forgery (CSRF) risk at " + technicalAsset.Title + " via " + incomingFlow.Title + " from " + sourceAsset.Title + "" impact := types.LowImpact @@ -73,7 +71,7 @@ func createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAs impact = types.MediumImpact } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go b/pkg/security/risks/builtin/cross-site-scripting-rule.go similarity index 82% rename from pkg/security/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go rename to pkg/security/risks/builtin/cross-site-scripting-rule.go index 02d4483c..00e87ab1 100644 --- a/pkg/security/risks/built-in/cross-site-scripting/cross-site-scripting-rule.go +++ b/pkg/security/risks/builtin/cross-site-scripting-rule.go @@ -1,18 +1,16 @@ -package cross_site_scripting +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type CrossSiteScriptingRule struct{} + +func NewCrossSiteScriptingRule() *CrossSiteScriptingRule { + return &CrossSiteScriptingRule{} } -func Category() types.RiskCategory { +func (*CrossSiteScriptingRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "cross-site-scripting", Title: "Cross-Site Scripting (XSS)", @@ -38,30 +36,30 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*CrossSiteScriptingRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *CrossSiteScriptingRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if technicalAsset.OutOfScope || !technicalAsset.Technology.IsWebApplication() { // TODO: also mobile clients or rich-clients as long as they use web-view... continue } - risks = append(risks, createRisk(input, technicalAsset)) + risks = append(risks, r.createRisk(input, technicalAsset)) } return risks } -func createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { +func (r *CrossSiteScriptingRule) createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { title := "Cross-Site Scripting (XSS) risk at " + technicalAsset.Title + "" impact := types.MediumImpact if technicalAsset.HighestConfidentiality(parsedModel) == types.StrictlyConfidential || technicalAsset.HighestIntegrity(parsedModel) == types.MissionCritical { impact = types.HighImpact } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Likely, impact), ExploitationLikelihood: types.Likely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go b/pkg/security/risks/builtin/dos-risky-access-across-trust-boundary-rule.go similarity index 79% rename from pkg/security/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go rename to pkg/security/risks/builtin/dos-risky-access-across-trust-boundary-rule.go index 83a8775f..09fc70b3 100644 --- a/pkg/security/risks/built-in/dos-risky-access-across-trust-boundary/dos-risky-access-across-trust-boundary-rule.go +++ b/pkg/security/risks/builtin/dos-risky-access-across-trust-boundary-rule.go @@ -1,18 +1,16 @@ -package dos_risky_access_across_trust_boundary +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type DosRiskyAccessAcrossTrustBoundaryRule struct{} + +func NewDosRiskyAccessAcrossTrustBoundaryRule() *DosRiskyAccessAcrossTrustBoundaryRule { + return &DosRiskyAccessAcrossTrustBoundaryRule{} } -func Category() types.RiskCategory { +func (*DosRiskyAccessAcrossTrustBoundaryRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "dos-risky-access-across-trust-boundary", Title: "DoS-risky Access Across Trust-Boundary", @@ -42,11 +40,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*DosRiskyAccessAcrossTrustBoundaryRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *DosRiskyAccessAcrossTrustBoundaryRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] @@ -58,10 +56,10 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { // Now try to walk a call chain up (1 hop only) to find a caller's caller used by human callersCommLinks := input.IncomingTechnicalCommunicationLinksMappedByTargetId[sourceAsset.Id] for _, callersCommLink := range callersCommLinks { - risks = checkRisk(input, technicalAsset, callersCommLink, sourceAsset.Title, risks) + risks = r.checkRisk(input, technicalAsset, callersCommLink, sourceAsset.Title, risks) } } else { - risks = checkRisk(input, technicalAsset, incomingAccess, "", risks) + risks = r.checkRisk(input, technicalAsset, incomingAccess, "", risks) } } } @@ -69,18 +67,18 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { return risks } -func checkRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingAccess types.CommunicationLink, hopBetween string, risks []types.Risk) []types.Risk { +func (r *DosRiskyAccessAcrossTrustBoundaryRule) checkRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingAccess types.CommunicationLink, hopBetween string, risks []types.Risk) []types.Risk { if incomingAccess.IsAcrossTrustBoundaryNetworkOnly(input) && !incomingAccess.Protocol.IsProcessLocal() && incomingAccess.Usage != types.DevOps { highRisk := technicalAsset.Availability == types.MissionCritical && !incomingAccess.VPN && !incomingAccess.IpFiltered && !technicalAsset.Redundant - risks = append(risks, createRisk(technicalAsset, incomingAccess, hopBetween, + risks = append(risks, r.createRisk(technicalAsset, incomingAccess, hopBetween, input.TechnicalAssets[incomingAccess.SourceId], highRisk)) } return risks } -func createRisk(techAsset types.TechnicalAsset, dataFlow types.CommunicationLink, hopBetween string, +func (r *DosRiskyAccessAcrossTrustBoundaryRule) createRisk(techAsset types.TechnicalAsset, dataFlow types.CommunicationLink, hopBetween string, clientOutsideTrustBoundary types.TechnicalAsset, moreRisky bool) types.Risk { impact := types.LowImpact if moreRisky { @@ -90,7 +88,7 @@ func createRisk(techAsset types.TechnicalAsset, dataFlow types.CommunicationLink hopBetween = " forwarded via " + hopBetween + "" } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/incomplete-model/incomplete-model-rule.go b/pkg/security/risks/builtin/incomplete-model-rule.go similarity index 79% rename from pkg/security/risks/built-in/incomplete-model/incomplete-model-rule.go rename to pkg/security/risks/builtin/incomplete-model-rule.go index 9ec239ff..b954d646 100644 --- a/pkg/security/risks/built-in/incomplete-model/incomplete-model-rule.go +++ b/pkg/security/risks/builtin/incomplete-model-rule.go @@ -1,18 +1,16 @@ -package incomplete_model +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type IncompleteModelRule struct{} + +func NewIncompleteModelRule() *IncompleteModelRule { + return &IncompleteModelRule{} } -func Category() types.RiskCategory { +func (*IncompleteModelRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "incomplete-model", Title: "Incomplete Model", @@ -34,21 +32,21 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*IncompleteModelRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *IncompleteModelRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope { if technicalAsset.Technology == types.UnknownTechnology { - risks = append(risks, createRiskTechAsset(technicalAsset)) + risks = append(risks, r.createRiskTechAsset(technicalAsset)) } for _, commLink := range technicalAsset.CommunicationLinks { if commLink.Protocol == types.UnknownProtocol { - risks = append(risks, createRiskCommLink(technicalAsset, commLink)) + risks = append(risks, r.createRiskCommLink(technicalAsset, commLink)) } } } @@ -56,10 +54,10 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { return risks } -func createRiskTechAsset(technicalAsset types.TechnicalAsset) types.Risk { +func (r *IncompleteModelRule) createRiskTechAsset(technicalAsset types.TechnicalAsset) types.Risk { title := "Unknown Technology specified at technical asset " + technicalAsset.Title + "" risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, types.LowImpact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: types.LowImpact, @@ -72,10 +70,10 @@ func createRiskTechAsset(technicalAsset types.TechnicalAsset) types.Risk { return risk } -func createRiskCommLink(technicalAsset types.TechnicalAsset, commLink types.CommunicationLink) types.Risk { +func (r *IncompleteModelRule) createRiskCommLink(technicalAsset types.TechnicalAsset, commLink types.CommunicationLink) types.Risk { title := "Unknown Protocol specified for communication link " + commLink.Title + " at technical asset " + technicalAsset.Title + "" risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, types.LowImpact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: types.LowImpact, diff --git a/pkg/security/risks/built-in/ldap-injection/ldap-injection-rule.go b/pkg/security/risks/builtin/ldap-injection-rule.go similarity index 83% rename from pkg/security/risks/built-in/ldap-injection/ldap-injection-rule.go rename to pkg/security/risks/builtin/ldap-injection-rule.go index 16ccfced..d6e33593 100644 --- a/pkg/security/risks/built-in/ldap-injection/ldap-injection-rule.go +++ b/pkg/security/risks/builtin/ldap-injection-rule.go @@ -1,18 +1,16 @@ -package ldap_injection +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type LdapInjectionRule struct{} + +func NewLdapInjectionRule() *LdapInjectionRule { + return &LdapInjectionRule{} } -func Category() types.RiskCategory { +func (*LdapInjectionRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "ldap-injection", Title: "LDAP-Injection", @@ -37,7 +35,11 @@ func Category() types.RiskCategory { } } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (*LdapInjectionRule) SupportedTags() []string { + return []string{} +} + +func (r *LdapInjectionRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, technicalAsset := range input.TechnicalAssets { incomingFlows := input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] @@ -50,18 +52,14 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { if incomingFlow.Usage == types.DevOps { likelihood = types.Unlikely } - risks = append(risks, createRisk(input, technicalAsset, incomingFlow, likelihood)) + risks = append(risks, r.createRisk(input, technicalAsset, incomingFlow, likelihood)) } } } return risks } -func SupportedTags() []string { - return []string{} -} - -func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingFlow types.CommunicationLink, likelihood types.RiskExploitationLikelihood) types.Risk { +func (r *LdapInjectionRule) createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingFlow types.CommunicationLink, likelihood types.RiskExploitationLikelihood) types.Risk { caller := input.TechnicalAssets[incomingFlow.SourceId] title := "LDAP-Injection risk at " + caller.Title + " against LDAP server " + technicalAsset.Title + "" + " via " + incomingFlow.Title + "" @@ -70,7 +68,7 @@ func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, i impact = types.HighImpact } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/missing-authentication/missing-authentication-rule.go b/pkg/security/risks/builtin/missing-authentication-rule.go similarity index 86% rename from pkg/security/risks/built-in/missing-authentication/missing-authentication-rule.go rename to pkg/security/risks/builtin/missing-authentication-rule.go index c5e919bc..d7015fae 100644 --- a/pkg/security/risks/built-in/missing-authentication/missing-authentication-rule.go +++ b/pkg/security/risks/builtin/missing-authentication-rule.go @@ -1,18 +1,16 @@ -package missing_authentication +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type MissingAuthenticationRule struct{} + +func NewMissingAuthenticationRule() *MissingAuthenticationRule { + return &MissingAuthenticationRule{} } -func Category() types.RiskCategory { +func (*MissingAuthenticationRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "missing-authentication", Title: "Missing Authentication", @@ -37,11 +35,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*MissingAuthenticationRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *MissingAuthenticationRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] @@ -71,7 +69,7 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { impact = types.LowImpact } if commLink.Authentication == types.NoneAuthentication && !commLink.Protocol.IsProcessLocal() { - risks = append(risks, CreateRisk(input, technicalAsset, commLink, commLink, "", impact, types.Likely, false, Category())) + risks = append(risks, r.createRisk(input, technicalAsset, commLink, commLink, "", impact, types.Likely, false, r.Category())) } } } @@ -79,7 +77,7 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { return risks } -func CreateRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingAccess, incomingAccessOrigin types.CommunicationLink, hopBetween string, +func (r *MissingAuthenticationRule) createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingAccess, incomingAccessOrigin types.CommunicationLink, hopBetween string, impact types.RiskExploitationImpact, likelihood types.RiskExploitationLikelihood, twoFactor bool, category types.RiskCategory) types.Risk { factorString := "" if twoFactor { @@ -89,7 +87,7 @@ func CreateRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, i hopBetween = "forwarded via " + hopBetween + " " } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go b/pkg/security/risks/builtin/missing-authentication-second-factor-rule.go similarity index 80% rename from pkg/security/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go rename to pkg/security/risks/builtin/missing-authentication-second-factor-rule.go index 4b1d7e05..9c159187 100644 --- a/pkg/security/risks/built-in/missing-authentication-second-factor/missing-authentication-second-factor-rule.go +++ b/pkg/security/risks/builtin/missing-authentication-second-factor-rule.go @@ -1,19 +1,18 @@ -package missing_authentication_second_factor +package builtin import ( - missing_authentication "github.com/threagile/threagile/pkg/security/risks/built-in/missing-authentication" "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type MissingAuthenticationSecondFactorRule struct { + missingAuthenticationRule *MissingAuthenticationRule +} + +func NewMissingAuthenticationSecondFactorRule(missingAuthenticationRule *MissingAuthenticationRule) *MissingAuthenticationSecondFactorRule { + return &MissingAuthenticationSecondFactorRule{missingAuthenticationRule: missingAuthenticationRule} } -func Category() types.RiskCategory { +func (*MissingAuthenticationSecondFactorRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "missing-authentication-second-factor", Title: "Missing Two-Factor Authentication (2FA)", @@ -38,11 +37,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*MissingAuthenticationSecondFactorRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *MissingAuthenticationSecondFactorRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] @@ -66,7 +65,7 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { moreRisky := commLink.HighestConfidentiality(input) >= types.Confidential || commLink.HighestIntegrity(input) >= types.Critical if moreRisky && commLink.Authentication != types.TwoFactor { - risks = append(risks, missing_authentication.CreateRisk(input, technicalAsset, commLink, commLink, "", types.MediumImpact, types.Unlikely, true, Category())) + risks = append(risks, r.missingAuthenticationRule.createRisk(input, technicalAsset, commLink, commLink, "", types.MediumImpact, types.Unlikely, true, r.Category())) } } else if caller.Technology.IsTrafficForwarding() { // Now try to walk a call chain up (1 hop only) to find a caller's caller used by human @@ -80,7 +79,7 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { moreRisky := callersCommLink.HighestConfidentiality(input) >= types.Confidential || callersCommLink.HighestIntegrity(input) >= types.Critical if moreRisky && callersCommLink.Authentication != types.TwoFactor { - risks = append(risks, missing_authentication.CreateRisk(input, technicalAsset, commLink, callersCommLink, caller.Title, types.MediumImpact, types.Unlikely, true, Category())) + risks = append(risks, r.missingAuthenticationRule.createRisk(input, technicalAsset, commLink, callersCommLink, caller.Title, types.MediumImpact, types.Unlikely, true, r.Category())) } } } diff --git a/pkg/security/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go b/pkg/security/risks/builtin/missing-build-infrastructure-rule.go similarity index 85% rename from pkg/security/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go rename to pkg/security/risks/builtin/missing-build-infrastructure-rule.go index 759d9bea..22dea469 100644 --- a/pkg/security/risks/built-in/missing-build-infrastructure/missing-build-infrastructure-rule.go +++ b/pkg/security/risks/builtin/missing-build-infrastructure-rule.go @@ -1,18 +1,16 @@ -package missing_build_infrastructure +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type MissingBuildInfrastructureRule struct{} + +func NewMissingBuildInfrastructureRule() *MissingBuildInfrastructureRule { + return &MissingBuildInfrastructureRule{} } -func Category() types.RiskCategory { +func (*MissingBuildInfrastructureRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "missing-build-infrastructure", Title: "Missing Build Infrastructure", @@ -39,11 +37,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*MissingBuildInfrastructureRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *MissingBuildInfrastructureRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) hasCustomDevelopedParts, hasBuildPipeline, hasSourcecodeRepo, hasDevOpsClient := false, false, false, false impact := types.LowImpact @@ -82,15 +80,15 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { } hasBuildInfrastructure := hasBuildPipeline && hasSourcecodeRepo && hasDevOpsClient if hasCustomDevelopedParts && !hasBuildInfrastructure { - risks = append(risks, createRisk(mostRelevantAsset, impact)) + risks = append(risks, r.createRisk(mostRelevantAsset, impact)) } return risks } -func createRisk(technicalAsset types.TechnicalAsset, impact types.RiskExploitationImpact) types.Risk { +func (r *MissingBuildInfrastructureRule) createRisk(technicalAsset types.TechnicalAsset, impact types.RiskExploitationImpact) types.Risk { title := "Missing Build Infrastructure in the threat model (referencing asset " + technicalAsset.Title + " as an example)" risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go b/pkg/security/risks/builtin/missing-cloud-hardening-rule.go similarity index 80% rename from pkg/security/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go rename to pkg/security/risks/builtin/missing-cloud-hardening-rule.go index a3ff56e6..8c32b340 100644 --- a/pkg/security/risks/built-in/missing-cloud-hardening/missing-cloud-hardening-rule.go +++ b/pkg/security/risks/builtin/missing-cloud-hardening-rule.go @@ -1,4 +1,4 @@ -package missing_cloud_hardening +package builtin import ( "sort" @@ -6,15 +6,13 @@ import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type MissingCloudHardeningRule struct{} + +func NewMissingCloudHardeningRule() *MissingCloudHardeningRule { + return &MissingCloudHardeningRule{} } -func Category() types.RiskCategory { +func (*MissingCloudHardeningRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "missing-cloud-hardening", Title: "Missing Cloud Hardening", @@ -47,7 +45,7 @@ func Category() types.RiskCategory { var specificSubTagsAWS = []string{"aws:vpc", "aws:ec2", "aws:s3", "aws:ebs", "aws:apigateway", "aws:lambda", "aws:dynamodb", "aws:rds", "aws:sqs", "aws:iam"} -func SupportedTags() []string { +func (*MissingCloudHardeningRule) SupportedTags() []string { res := []string{ "aws", // Amazon AWS "azure", // Microsoft Azure @@ -58,7 +56,7 @@ func SupportedTags() []string { return res } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *MissingCloudHardeningRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) sharedRuntimesWithUnspecificCloudRisks := make(map[string]bool) @@ -84,14 +82,14 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { techAssetIDsWithSubtagSpecificCloudRisks := make(map[string]bool) for _, trustBoundary := range input.TrustBoundaries { - taggedOuterTB := trustBoundary.IsTaggedWithAny(SupportedTags()...) // false = generic cloud risks only // true = cloud-individual risks + taggedOuterTB := trustBoundary.IsTaggedWithAny(r.SupportedTags()...) // false = generic cloud risks only // true = cloud-individual risks if taggedOuterTB || trustBoundary.Type.IsWithinCloud() { - addTrustBoundaryAccordingToBaseTag(trustBoundary, trustBoundariesWithUnspecificCloudRisks, + r.addTrustBoundaryAccordingToBaseTag(trustBoundary, trustBoundariesWithUnspecificCloudRisks, trustBoundaryIDsAWS, trustBoundaryIDsAzure, trustBoundaryIDsGCP, trustBoundaryIDsOCP) for _, techAssetID := range trustBoundary.RecursivelyAllTechnicalAssetIDsInside(input) { added := false tA := input.TechnicalAssets[techAssetID] - if tA.IsTaggedWithAny(SupportedTags()...) { + if tA.IsTaggedWithAny(r.SupportedTags()...) { addAccordingToBaseTag(tA, tA.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) @@ -110,15 +108,15 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { } // now loop over all technical assets, trust boundaries, and shared runtimes model-wide by tag - for _, tA := range input.TechnicalAssetsTaggedWithAny(SupportedTags()...) { + for _, tA := range input.TechnicalAssetsTaggedWithAny(r.SupportedTags()...) { addAccordingToBaseTag(tA, tA.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) } - for _, tB := range input.TrustBoundariesTaggedWithAny(SupportedTags()...) { + for _, tB := range input.TrustBoundariesTaggedWithAny(r.SupportedTags()...) { for _, candidateID := range tB.RecursivelyAllTechnicalAssetIDsInside(input) { tA := input.TechnicalAssets[candidateID] - if tA.IsTaggedWithAny(SupportedTags()...) { + if tA.IsTaggedWithAny(r.SupportedTags()...) { addAccordingToBaseTag(tA, tA.Tags, techAssetIDsWithSubtagSpecificCloudRisks, techAssetIDsAWS, techAssetIDsAzure, techAssetIDsGCP, techAssetIDsOCP) @@ -129,8 +127,8 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { } } } - for _, sR := range input.SharedRuntimesTaggedWithAny(SupportedTags()...) { - addSharedRuntimeAccordingToBaseTag(sR, sharedRuntimesWithUnspecificCloudRisks, + for _, sR := range input.SharedRuntimesTaggedWithAny(r.SupportedTags()...) { + r.addSharedRuntimeAccordingToBaseTag(sR, sharedRuntimesWithUnspecificCloudRisks, sharedRuntimeIDsAWS, sharedRuntimeIDsAzure, sharedRuntimeIDsGCP, sharedRuntimeIDsOCP) for _, candidateID := range sR.TechnicalAssetsRunning { tA := input.TechnicalAssets[candidateID] @@ -190,51 +188,51 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { // first try to add shared runtimes... for id := range sharedRuntimeIDsAWS { - risks = append(risks, createRiskForSharedRuntime(input, input.SharedRuntimes[id], "AWS", "CIS Benchmark for AWS")) + risks = append(risks, r.createRiskForSharedRuntime(input, input.SharedRuntimes[id], "AWS", "CIS Benchmark for AWS")) addedAWS = true } for id := range sharedRuntimeIDsAzure { - risks = append(risks, createRiskForSharedRuntime(input, input.SharedRuntimes[id], "Azure", "CIS Benchmark for Microsoft Azure")) + risks = append(risks, r.createRiskForSharedRuntime(input, input.SharedRuntimes[id], "Azure", "CIS Benchmark for Microsoft Azure")) addedAzure = true } for id := range sharedRuntimeIDsGCP { - risks = append(risks, createRiskForSharedRuntime(input, input.SharedRuntimes[id], "GCP", "CIS Benchmark for Google Cloud Computing Platform")) + risks = append(risks, r.createRiskForSharedRuntime(input, input.SharedRuntimes[id], "GCP", "CIS Benchmark for Google Cloud Computing Platform")) addedGCP = true } for id := range sharedRuntimeIDsOCP { - risks = append(risks, createRiskForSharedRuntime(input, input.SharedRuntimes[id], "OCP", "Vendor Best Practices for Oracle Cloud Platform")) + risks = append(risks, r.createRiskForSharedRuntime(input, input.SharedRuntimes[id], "OCP", "Vendor Best Practices for Oracle Cloud Platform")) addedOCP = true } for id := range sharedRuntimesWithUnspecificCloudRisks { - risks = append(risks, createRiskForSharedRuntime(input, input.SharedRuntimes[id], "", "")) + risks = append(risks, r.createRiskForSharedRuntime(input, input.SharedRuntimes[id], "", "")) } // ... followed by trust boundaries for the generic risks for id := range trustBoundaryIDsAWS { - risks = append(risks, createRiskForTrustBoundary(input, input.TrustBoundaries[id], "AWS", "CIS Benchmark for AWS")) + risks = append(risks, r.createRiskForTrustBoundary(input, input.TrustBoundaries[id], "AWS", "CIS Benchmark for AWS")) addedAWS = true } for id := range trustBoundaryIDsAzure { - risks = append(risks, createRiskForTrustBoundary(input, input.TrustBoundaries[id], "Azure", "CIS Benchmark for Microsoft Azure")) + risks = append(risks, r.createRiskForTrustBoundary(input, input.TrustBoundaries[id], "Azure", "CIS Benchmark for Microsoft Azure")) addedAzure = true } for id := range trustBoundaryIDsGCP { - risks = append(risks, createRiskForTrustBoundary(input, input.TrustBoundaries[id], "GCP", "CIS Benchmark for Google Cloud Computing Platform")) + risks = append(risks, r.createRiskForTrustBoundary(input, input.TrustBoundaries[id], "GCP", "CIS Benchmark for Google Cloud Computing Platform")) addedGCP = true } for id := range trustBoundaryIDsOCP { - risks = append(risks, createRiskForTrustBoundary(input, input.TrustBoundaries[id], "OCP", "Vendor Best Practices for Oracle Cloud Platform")) + risks = append(risks, r.createRiskForTrustBoundary(input, input.TrustBoundaries[id], "OCP", "Vendor Best Practices for Oracle Cloud Platform")) addedOCP = true } for id := range trustBoundariesWithUnspecificCloudRisks { - risks = append(risks, createRiskForTrustBoundary(input, input.TrustBoundaries[id], "", "")) + risks = append(risks, r.createRiskForTrustBoundary(input, input.TrustBoundaries[id], "", "")) } // just use the most sensitive asset as an example - to only create one general "AWS cloud hardening" risk, not many if !addedAWS { mostRelevantAsset := findMostSensitiveTechnicalAsset(input, techAssetIDsAWS) if !mostRelevantAsset.IsZero() { - risks = append(risks, createRiskForTechnicalAsset(input, mostRelevantAsset, "AWS", "CIS Benchmark for AWS")) + risks = append(risks, r.createRiskForTechnicalAsset(input, mostRelevantAsset, "AWS", "CIS Benchmark for AWS")) addedAWS = true } } @@ -242,7 +240,7 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { if !addedAzure { mostRelevantAsset := findMostSensitiveTechnicalAsset(input, techAssetIDsAzure) if !mostRelevantAsset.IsZero() { - risks = append(risks, createRiskForTechnicalAsset(input, mostRelevantAsset, "Azure", "CIS Benchmark for Microsoft Azure")) + risks = append(risks, r.createRiskForTechnicalAsset(input, mostRelevantAsset, "Azure", "CIS Benchmark for Microsoft Azure")) addedAzure = true } } @@ -250,7 +248,7 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { if !addedGCP { mostRelevantAsset := findMostSensitiveTechnicalAsset(input, techAssetIDsGCP) if !mostRelevantAsset.IsZero() { - risks = append(risks, createRiskForTechnicalAsset(input, mostRelevantAsset, "GCP", "CIS Benchmark for Google Cloud Computing Platform")) + risks = append(risks, r.createRiskForTechnicalAsset(input, mostRelevantAsset, "GCP", "CIS Benchmark for Google Cloud Computing Platform")) addedGCP = true } } @@ -258,7 +256,7 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { if !addedOCP { mostRelevantAsset := findMostSensitiveTechnicalAsset(input, techAssetIDsOCP) if !mostRelevantAsset.IsZero() { - risks = append(risks, createRiskForTechnicalAsset(input, mostRelevantAsset, "OCP", "Vendor Best Practices for Oracle Cloud Platform")) + risks = append(risks, r.createRiskForTechnicalAsset(input, mostRelevantAsset, "OCP", "Vendor Best Practices for Oracle Cloud Platform")) addedOCP = true } } @@ -267,10 +265,10 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { for id := range techAssetIDsWithSubtagSpecificCloudRisks { tA := input.TechnicalAssets[id] if tA.IsTaggedWithAnyTraversingUp(input, "aws:ec2") { - risks = append(risks, createRiskForTechnicalAsset(input, tA, "EC2", "CIS Benchmark for Amazon Linux")) + risks = append(risks, r.createRiskForTechnicalAsset(input, tA, "EC2", "CIS Benchmark for Amazon Linux")) } if tA.IsTaggedWithAnyTraversingUp(input, "aws:s3") { - risks = append(risks, createRiskForTechnicalAsset(input, tA, "S3", "Security Best Practices for AWS S3")) + risks = append(risks, r.createRiskForTechnicalAsset(input, tA, "S3", "Security Best Practices for AWS S3")) } // TODO add more tag-specific risks like also for aws:lambda etc. here } @@ -278,13 +276,13 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { return risks } -func addTrustBoundaryAccordingToBaseTag(trustBoundary types.TrustBoundary, +func (r *MissingCloudHardeningRule) addTrustBoundaryAccordingToBaseTag(trustBoundary types.TrustBoundary, trustBoundariesWithUnspecificCloudRisks map[string]bool, trustBoundaryIDsAWS map[string]bool, trustBoundaryIDsAzure map[string]bool, trustBoundaryIDsGCP map[string]bool, trustBoundaryIDsOCP map[string]bool) { - if trustBoundary.IsTaggedWithAny(SupportedTags()...) { + if trustBoundary.IsTaggedWithAny(r.SupportedTags()...) { if trustBoundary.IsTaggedWithBaseTag("aws") { trustBoundaryIDsAWS[trustBoundary.Id] = true } @@ -302,13 +300,13 @@ func addTrustBoundaryAccordingToBaseTag(trustBoundary types.TrustBoundary, } } -func addSharedRuntimeAccordingToBaseTag(sharedRuntime types.SharedRuntime, +func (r *MissingCloudHardeningRule) addSharedRuntimeAccordingToBaseTag(sharedRuntime types.SharedRuntime, sharedRuntimesWithUnspecificCloudRisks map[string]bool, sharedRuntimeIDsAWS map[string]bool, sharedRuntimeIDsAzure map[string]bool, sharedRuntimeIDsGCP map[string]bool, sharedRuntimeIDsOCP map[string]bool) { - if sharedRuntime.IsTaggedWithAny(SupportedTags()...) { + if sharedRuntime.IsTaggedWithAny(r.SupportedTags()...) { if sharedRuntime.IsTaggedWithBaseTag("aws") { sharedRuntimeIDsAWS[sharedRuntime.Id] = true } @@ -365,7 +363,7 @@ func findMostSensitiveTechnicalAsset(input *types.ParsedModel, techAssets map[st return mostRelevantAsset } -func createRiskForSharedRuntime(input *types.ParsedModel, sharedRuntime types.SharedRuntime, prefix, details string) types.Risk { +func (r *MissingCloudHardeningRule) createRiskForSharedRuntime(input *types.ParsedModel, sharedRuntime types.SharedRuntime, prefix, details string) types.Risk { if len(prefix) > 0 { prefix = " (" + prefix + ")" } @@ -386,7 +384,7 @@ func createRiskForSharedRuntime(input *types.ParsedModel, sharedRuntime types.Sh } // create risk risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, @@ -399,7 +397,7 @@ func createRiskForSharedRuntime(input *types.ParsedModel, sharedRuntime types.Sh return risk } -func createRiskForTrustBoundary(parsedModel *types.ParsedModel, trustBoundary types.TrustBoundary, prefix, details string) types.Risk { +func (r *MissingCloudHardeningRule) createRiskForTrustBoundary(parsedModel *types.ParsedModel, trustBoundary types.TrustBoundary, prefix, details string) types.Risk { if len(prefix) > 0 { prefix = " (" + prefix + ")" } @@ -420,7 +418,7 @@ func createRiskForTrustBoundary(parsedModel *types.ParsedModel, trustBoundary ty } // create risk risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, @@ -433,7 +431,7 @@ func createRiskForTrustBoundary(parsedModel *types.ParsedModel, trustBoundary ty return risk } -func createRiskForTechnicalAsset(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset, prefix, details string) types.Risk { +func (r *MissingCloudHardeningRule) createRiskForTechnicalAsset(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset, prefix, details string) types.Risk { if len(prefix) > 0 { prefix = " (" + prefix + ")" } @@ -454,7 +452,7 @@ func createRiskForTechnicalAsset(parsedModel *types.ParsedModel, technicalAsset } // create risk risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/missing-file-validation/missing-file-validation-rule.go b/pkg/security/risks/builtin/missing-file-validation-rule.go similarity index 83% rename from pkg/security/risks/built-in/missing-file-validation/missing-file-validation-rule.go rename to pkg/security/risks/builtin/missing-file-validation-rule.go index c7b64a8e..5a872e1f 100644 --- a/pkg/security/risks/built-in/missing-file-validation/missing-file-validation-rule.go +++ b/pkg/security/risks/builtin/missing-file-validation-rule.go @@ -1,18 +1,16 @@ -package missing_file_validation +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type MissingFileValidationRule struct{} + +func NewMissingFileValidationRule() *MissingFileValidationRule { + return &MissingFileValidationRule{} } -func Category() types.RiskCategory { +func (*MissingFileValidationRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "missing-file-validation", Title: "Missing File Validation", @@ -38,11 +36,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*MissingFileValidationRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *MissingFileValidationRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] @@ -51,14 +49,14 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { } for _, format := range technicalAsset.DataFormatsAccepted { if format == types.File { - risks = append(risks, createRisk(input, technicalAsset)) + risks = append(risks, r.createRisk(input, technicalAsset)) } } } return risks } -func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { +func (r *MissingFileValidationRule) createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { title := "Missing File Validation risk at " + technicalAsset.Title + "" impact := types.LowImpact if technicalAsset.HighestConfidentiality(input) == types.StrictlyConfidential || @@ -67,7 +65,7 @@ func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset) t impact = types.MediumImpact } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.VeryLikely, impact), ExploitationLikelihood: types.VeryLikely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/missing-hardening/missing-hardening-rule.go b/pkg/security/risks/builtin/missing-hardening-rule.go similarity index 73% rename from pkg/security/risks/built-in/missing-hardening/missing-hardening-rule.go rename to pkg/security/risks/builtin/missing-hardening-rule.go index 29323a75..7847cdad 100644 --- a/pkg/security/risks/built-in/missing-hardening/missing-hardening-rule.go +++ b/pkg/security/risks/builtin/missing-hardening-rule.go @@ -1,4 +1,4 @@ -package missing_hardening +package builtin import ( "strconv" @@ -6,22 +6,20 @@ import ( "github.com/threagile/threagile/pkg/security/types" ) -const raaLimit = 55 -const raaLimitReduced = 40 +type MissingHardeningRule struct { + raaLimit int + raaLimitReduced int +} -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +func NewMissingHardeningRule() *MissingHardeningRule { + return &MissingHardeningRule{raaLimit: 55, raaLimitReduced: 40} } -func Category() types.RiskCategory { +func (r *MissingHardeningRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "missing-hardening", Title: "Missing Hardening", - Description: "Technical assets with a Relative Attacker Attractiveness (RAA) value of " + strconv.Itoa(raaLimit) + " % or higher should be " + + Description: "Technical assets with a Relative Attacker Attractiveness (RAA) value of " + strconv.Itoa(r.raaLimit) + " % or higher should be " + "explicitly hardened taking best practices and vendor hardening guides into account.", Impact: "If this risk remains unmitigated, attackers might be able to easier attack high-value targets.", ASVS: "V14 - Configuration Verification Requirements", @@ -32,8 +30,8 @@ func Category() types.RiskCategory { Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", Function: types.Operations, STRIDE: types.Tampering, - DetectionLogic: "In-scope technical assets with RAA values of " + strconv.Itoa(raaLimit) + " % or higher. " + - "Generally for high-value targets like data stores, application servers, identity providers and ERP systems this limit is reduced to " + strconv.Itoa(raaLimitReduced) + " %", + DetectionLogic: "In-scope technical assets with RAA values of " + strconv.Itoa(r.raaLimit) + " % or higher. " + + "Generally for high-value targets like data stores, application servers, identity providers and ERP systems this limit is reduced to " + strconv.Itoa(r.raaLimitReduced) + " %", RiskAssessment: "The risk rating depends on the sensitivity of the data processed or stored in the technical asset.", FalsePositives: "Usually no false positives.", ModelFailurePossibleReason: false, @@ -41,32 +39,32 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*MissingHardeningRule) SupportedTags() []string { return []string{"tomcat"} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *MissingHardeningRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope { - if technicalAsset.RAA >= raaLimit || (technicalAsset.RAA >= raaLimitReduced && + if technicalAsset.RAA >= float64(r.raaLimit) || (technicalAsset.RAA >= float64(r.raaLimitReduced) && (technicalAsset.Type == types.Datastore || technicalAsset.Technology == types.ApplicationServer || technicalAsset.Technology == types.IdentityProvider || technicalAsset.Technology == types.ERP)) { - risks = append(risks, createRisk(input, technicalAsset)) + risks = append(risks, r.createRisk(input, technicalAsset)) } } } return risks } -func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { +func (r *MissingHardeningRule) createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { title := "Missing Hardening risk at " + technicalAsset.Title + "" impact := types.LowImpact if technicalAsset.HighestConfidentiality(input) == types.StrictlyConfidential || technicalAsset.HighestIntegrity(input) == types.MissionCritical { impact = types.MediumImpact } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Likely, impact), ExploitationLikelihood: types.Likely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go b/pkg/security/risks/builtin/missing-identity-propagation-rule.go similarity index 86% rename from pkg/security/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go rename to pkg/security/risks/builtin/missing-identity-propagation-rule.go index 5af2c24e..03be30d1 100644 --- a/pkg/security/risks/built-in/missing-identity-propagation/missing-identity-propagation-rule.go +++ b/pkg/security/risks/builtin/missing-identity-propagation-rule.go @@ -1,18 +1,16 @@ -package missing_identity_propagation +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type MissingIdentityPropagationRule struct{} + +func NewMissingIdentityPropagationRule() *MissingIdentityPropagationRule { + return &MissingIdentityPropagationRule{} } -func Category() types.RiskCategory { +func (*MissingIdentityPropagationRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "missing-identity-propagation", Title: "Missing Identity Propagation", @@ -43,11 +41,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*MissingIdentityPropagationRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *MissingIdentityPropagationRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] @@ -77,7 +75,7 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { highRisk := technicalAsset.Confidentiality == types.StrictlyConfidential || technicalAsset.Integrity == types.MissionCritical || technicalAsset.Availability == types.MissionCritical - risks = append(risks, createRisk(input, technicalAsset, commLink, highRisk)) + risks = append(risks, r.createRisk(input, technicalAsset, commLink, highRisk)) } } } @@ -85,13 +83,13 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { return risks } -func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingAccess types.CommunicationLink, moreRisky bool) types.Risk { +func (r *MissingIdentityPropagationRule) createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingAccess types.CommunicationLink, moreRisky bool) types.Risk { impact := types.LowImpact if moreRisky { impact = types.MediumImpact } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go b/pkg/security/risks/builtin/missing-identity-provider-isolation-rule.go similarity index 85% rename from pkg/security/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go rename to pkg/security/risks/builtin/missing-identity-provider-isolation-rule.go index 9c064cb0..1d47be95 100644 --- a/pkg/security/risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go +++ b/pkg/security/risks/builtin/missing-identity-provider-isolation-rule.go @@ -1,18 +1,16 @@ -package missing_identity_provider_isolation +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type MissingIdentityProviderIsolationRule struct{} + +func NewMissingIdentityProviderIsolationRule() *MissingIdentityProviderIsolationRule { + return &MissingIdentityProviderIsolationRule{} } -func Category() types.RiskCategory { +func (*MissingIdentityProviderIsolationRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "missing-identity-provider-isolation", Title: "Missing Identity Provider Isolation", @@ -39,11 +37,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*MissingIdentityProviderIsolationRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *MissingIdentityProviderIsolationRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, technicalAsset := range input.TechnicalAssets { if !technicalAsset.OutOfScope && technicalAsset.Technology.IsIdentityRelated() { @@ -67,14 +65,14 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { } } if createRiskEntry { - risks = append(risks, createRisk(technicalAsset, moreImpact, sameExecutionEnv)) + risks = append(risks, r.createRisk(technicalAsset, moreImpact, sameExecutionEnv)) } } } return risks } -func createRisk(techAsset types.TechnicalAsset, moreImpact bool, sameExecutionEnv bool) types.Risk { +func (r *MissingIdentityProviderIsolationRule) createRisk(techAsset types.TechnicalAsset, moreImpact bool, sameExecutionEnv bool) types.Risk { impact := types.HighImpact likelihood := types.Unlikely others := "in the same network segment" @@ -86,7 +84,7 @@ func createRisk(techAsset types.TechnicalAsset, moreImpact bool, sameExecutionEn others = "in the same execution environment" } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/missing-identity-store/missing-identity-store-rule.go b/pkg/security/risks/builtin/missing-identity-store-rule.go similarity index 86% rename from pkg/security/risks/built-in/missing-identity-store/missing-identity-store-rule.go rename to pkg/security/risks/builtin/missing-identity-store-rule.go index 2e6767dd..15d98adb 100644 --- a/pkg/security/risks/built-in/missing-identity-store/missing-identity-store-rule.go +++ b/pkg/security/risks/builtin/missing-identity-store-rule.go @@ -1,18 +1,16 @@ -package missing_identity_store +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type MissingIdentityStoreRule struct{} + +func NewMissingIdentityStoreRule() *MissingIdentityStoreRule { + return &MissingIdentityStoreRule{} } -func Category() types.RiskCategory { +func (*MissingIdentityStoreRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "missing-identity-store", Title: "Missing Identity Store", @@ -37,11 +35,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*MissingIdentityStoreRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *MissingIdentityStoreRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, technicalAsset := range input.TechnicalAssets { if !technicalAsset.OutOfScope && @@ -81,15 +79,15 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { } } if riskIdentified { - risks = append(risks, createRisk(mostRelevantAsset, impact)) + risks = append(risks, r.createRisk(mostRelevantAsset, impact)) } return risks } -func createRisk(technicalAsset types.TechnicalAsset, impact types.RiskExploitationImpact) types.Risk { +func (r *MissingIdentityStoreRule) createRisk(technicalAsset types.TechnicalAsset, impact types.RiskExploitationImpact) types.Risk { title := "Missing Identity Store in the threat model (referencing asset " + technicalAsset.Title + " as an example)" risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go b/pkg/security/risks/builtin/missing-network-segmentation-rule.go similarity index 85% rename from pkg/security/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go rename to pkg/security/risks/builtin/missing-network-segmentation-rule.go index ab11b08a..659d01bf 100644 --- a/pkg/security/risks/built-in/missing-network-segmentation/missing-network-segmentation-rule.go +++ b/pkg/security/risks/builtin/missing-network-segmentation-rule.go @@ -1,4 +1,4 @@ -package missing_network_segmentation +package builtin import ( "sort" @@ -6,17 +6,15 @@ import ( "github.com/threagile/threagile/pkg/security/types" ) -const raaLimit = 50 +type MissingNetworkSegmentationRule struct { + raaLimit int +} -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +func NewMissingNetworkSegmentationRule() *MissingNetworkSegmentationRule { + return &MissingNetworkSegmentationRule{raaLimit: 50} } -func Category() types.RiskCategory { +func (*MissingNetworkSegmentationRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "missing-network-segmentation", Title: "Missing Network Segmentation", @@ -46,11 +44,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*MissingNetworkSegmentationRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *MissingNetworkSegmentationRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) // first create them in memory (see the link replacement below for nested trust boundaries) - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: @@ -62,7 +60,7 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { for _, key := range keys { technicalAsset := input.TechnicalAssets[key] if !technicalAsset.OutOfScope && technicalAsset.Technology != types.ReverseProxy && technicalAsset.Technology != types.WAF && technicalAsset.Technology != types.IDS && technicalAsset.Technology != types.IPS && technicalAsset.Technology != types.ServiceRegistry { - if technicalAsset.RAA >= raaLimit && (technicalAsset.Type == types.Datastore || technicalAsset.Confidentiality >= types.Confidential || + if technicalAsset.RAA >= float64(r.raaLimit) && (technicalAsset.Type == types.Datastore || technicalAsset.Confidentiality >= types.Confidential || technicalAsset.Integrity >= types.Critical || technicalAsset.Availability >= types.Critical) { // now check for any other same-network assets of certain types which have no direct connection for _, sparringAssetCandidateId := range keys { // so inner loop again over all assets @@ -74,7 +72,7 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { !sparringAssetCandidate.Technology.IsCloseToHighValueTargetsTolerated() { highRisk := technicalAsset.Confidentiality == types.StrictlyConfidential || technicalAsset.Integrity == types.MissionCritical || technicalAsset.Availability == types.MissionCritical - risks = append(risks, createRisk(technicalAsset, highRisk)) + risks = append(risks, r.createRisk(technicalAsset, highRisk)) break } } @@ -85,13 +83,13 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { return risks } -func createRisk(techAsset types.TechnicalAsset, moreRisky bool) types.Risk { +func (r *MissingNetworkSegmentationRule) createRisk(techAsset types.TechnicalAsset, moreRisky bool) types.Risk { impact := types.LowImpact if moreRisky { impact = types.MediumImpact } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go b/pkg/security/risks/builtin/missing-vault-isolation-rule.go similarity index 87% rename from pkg/security/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go rename to pkg/security/risks/builtin/missing-vault-isolation-rule.go index 5080663f..7a9d6497 100644 --- a/pkg/security/risks/built-in/missing-vault-isolation/missing-vault-isolation-rule.go +++ b/pkg/security/risks/builtin/missing-vault-isolation-rule.go @@ -1,18 +1,16 @@ -package missing_vault_isolation +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type MissingVaultIsolationRule struct{} + +func NewMissingVaultIsolationRule() *MissingVaultIsolationRule { + return &MissingVaultIsolationRule{} } -func Category() types.RiskCategory { +func (*MissingVaultIsolationRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "missing-vault-isolation", Title: "Missing Vault Isolation", @@ -39,11 +37,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*MissingVaultIsolationRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *MissingVaultIsolationRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, technicalAsset := range input.TechnicalAssets { if !technicalAsset.OutOfScope && technicalAsset.Technology == types.Vault { @@ -67,7 +65,7 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { } } if createRiskEntry { - risks = append(risks, createRisk(technicalAsset, moreImpact, sameExecutionEnv)) + risks = append(risks, r.createRisk(technicalAsset, moreImpact, sameExecutionEnv)) } } } @@ -78,7 +76,7 @@ func isVaultStorage(parsedModel *types.ParsedModel, vault types.TechnicalAsset, return storage.Type == types.Datastore && vault.HasDirectConnection(parsedModel, storage.Id) } -func createRisk(techAsset types.TechnicalAsset, moreImpact bool, sameExecutionEnv bool) types.Risk { +func (r *MissingVaultIsolationRule) createRisk(techAsset types.TechnicalAsset, moreImpact bool, sameExecutionEnv bool) types.Risk { impact := types.MediumImpact likelihood := types.Unlikely others := "in the same network segment" @@ -90,7 +88,7 @@ func createRisk(techAsset types.TechnicalAsset, moreImpact bool, sameExecutionEn others = "in the same execution environment" } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/missing-vault/missing-vault-rule.go b/pkg/security/risks/builtin/missing-vault-rule.go similarity index 86% rename from pkg/security/risks/built-in/missing-vault/missing-vault-rule.go rename to pkg/security/risks/builtin/missing-vault-rule.go index 7cd26ca1..575c6eeb 100644 --- a/pkg/security/risks/built-in/missing-vault/missing-vault-rule.go +++ b/pkg/security/risks/builtin/missing-vault-rule.go @@ -1,18 +1,16 @@ -package missing_vault +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type MissingVaultRule struct{} + +func NewMissingVaultRule() *MissingVaultRule { + return &MissingVaultRule{} } -func Category() types.RiskCategory { +func (*MissingVaultRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "missing-vault", Title: "Missing Vault (Secret Storage)", @@ -38,11 +36,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*MissingVaultRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *MissingVaultRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) hasVault := false var mostRelevantAsset types.TechnicalAsset @@ -68,15 +66,15 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { } } if !hasVault { - risks = append(risks, createRisk(mostRelevantAsset, impact)) + risks = append(risks, r.createRisk(mostRelevantAsset, impact)) } return risks } -func createRisk(technicalAsset types.TechnicalAsset, impact types.RiskExploitationImpact) types.Risk { +func (r *MissingVaultRule) createRisk(technicalAsset types.TechnicalAsset, impact types.RiskExploitationImpact) types.Risk { title := "Missing Vault (Secret Storage) in the threat model (referencing asset " + technicalAsset.Title + " as an example)" risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/missing-waf/missing-waf-rule.go b/pkg/security/risks/builtin/missing-waf-rule.go similarity index 86% rename from pkg/security/risks/built-in/missing-waf/missing-waf-rule.go rename to pkg/security/risks/builtin/missing-waf-rule.go index 8ffcc3b3..b39d40c3 100644 --- a/pkg/security/risks/built-in/missing-waf/missing-waf-rule.go +++ b/pkg/security/risks/builtin/missing-waf-rule.go @@ -1,18 +1,16 @@ -package missing_waf +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type MissingWafRule struct{} + +func NewMissingWafRule() *MissingWafRule { + return &MissingWafRule{} } -func Category() types.RiskCategory { +func (*MissingWafRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "missing-waf", Title: "Missing Web Application Firewall (WAF)", @@ -37,11 +35,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*MissingWafRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *MissingWafRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, technicalAsset := range input.TechnicalAssets { if !technicalAsset.OutOfScope && @@ -50,7 +48,7 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { if incomingAccess.IsAcrossTrustBoundaryNetworkOnly(input) && incomingAccess.Protocol.IsPotentialWebAccessProtocol() && input.TechnicalAssets[incomingAccess.SourceId].Technology != types.WAF { - risks = append(risks, createRisk(input, technicalAsset)) + risks = append(risks, r.createRisk(input, technicalAsset)) break } } @@ -59,7 +57,7 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { return risks } -func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { +func (r *MissingWafRule) createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { title := "Missing Web Application Firewall (WAF) risk at " + technicalAsset.Title + "" likelihood := types.Unlikely impact := types.LowImpact @@ -69,7 +67,7 @@ func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset) t impact = types.MediumImpact } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go b/pkg/security/risks/builtin/mixed-targets-on-shared-runtime-rule.go similarity index 85% rename from pkg/security/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go rename to pkg/security/risks/builtin/mixed-targets-on-shared-runtime-rule.go index 6ab3f365..24f07b16 100644 --- a/pkg/security/risks/built-in/mixed-targets-on-shared-runtime/mixed-targets-on-shared-runtime-rule.go +++ b/pkg/security/risks/builtin/mixed-targets-on-shared-runtime-rule.go @@ -1,4 +1,4 @@ -package mixed_targets_on_shared_runtime +package builtin import ( "sort" @@ -6,15 +6,13 @@ import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type MixedTargetsOnSharedRuntimeRule struct{} + +func NewMixedTargetsOnSharedRuntimeRule() *MixedTargetsOnSharedRuntimeRule { + return &MixedTargetsOnSharedRuntimeRule{} } -func Category() types.RiskCategory { +func (*MixedTargetsOnSharedRuntimeRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "mixed-targets-on-shared-runtime", Title: "Mixed Targets on Shared Runtime", @@ -42,11 +40,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*MixedTargetsOnSharedRuntimeRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *MixedTargetsOnSharedRuntimeRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) // as in Go ranging over map is random order, range over them in sorted (hence reproducible) way: keys := make([]string, 0) @@ -62,7 +60,7 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { for _, technicalAssetId := range sharedRuntime.TechnicalAssetsRunning { technicalAsset := input.TechnicalAssets[technicalAssetId] if len(currentTrustBoundaryId) > 0 && currentTrustBoundaryId != technicalAsset.GetTrustBoundaryId(input) { - risks = append(risks, createRisk(input, sharedRuntime)) + risks = append(risks, r.createRisk(input, sharedRuntime)) riskAdded = true break } @@ -75,19 +73,19 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { } } if !riskAdded && hasFrontend && hasBackend { - risks = append(risks, createRisk(input, sharedRuntime)) + risks = append(risks, r.createRisk(input, sharedRuntime)) } } return risks } -func createRisk(input *types.ParsedModel, sharedRuntime types.SharedRuntime) types.Risk { +func (r *MixedTargetsOnSharedRuntimeRule) createRisk(input *types.ParsedModel, sharedRuntime types.SharedRuntime) types.Risk { impact := types.LowImpact if isMoreRisky(input, sharedRuntime) { impact = types.MediumImpact } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/path-traversal/path-traversal-rule.go b/pkg/security/risks/builtin/path-traversal-rule.go similarity index 84% rename from pkg/security/risks/built-in/path-traversal/path-traversal-rule.go rename to pkg/security/risks/builtin/path-traversal-rule.go index fd6b6e45..6bc81197 100644 --- a/pkg/security/risks/built-in/path-traversal/path-traversal-rule.go +++ b/pkg/security/risks/builtin/path-traversal-rule.go @@ -1,18 +1,16 @@ -package path_traversal +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type PathTraversalRule struct{} + +func NewPathTraversalRule() *PathTraversalRule { + return &PathTraversalRule{} } -func Category() types.RiskCategory { +func (*PathTraversalRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "path-traversal", Title: "Path-Traversal", @@ -39,7 +37,11 @@ func Category() types.RiskCategory { } } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (*PathTraversalRule) SupportedTags() []string { + return []string{} +} + +func (r *PathTraversalRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] @@ -55,17 +57,13 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { if incomingFlow.Usage == types.DevOps { likelihood = types.Likely } - risks = append(risks, createRisk(input, technicalAsset, incomingFlow, likelihood)) + risks = append(risks, r.createRisk(input, technicalAsset, incomingFlow, likelihood)) } } return risks } -func SupportedTags() []string { - return []string{} -} - -func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingFlow types.CommunicationLink, likelihood types.RiskExploitationLikelihood) types.Risk { +func (r *PathTraversalRule) createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingFlow types.CommunicationLink, likelihood types.RiskExploitationLikelihood) types.Risk { caller := input.TechnicalAssets[incomingFlow.SourceId] title := "Path-Traversal risk at " + caller.Title + " against filesystem " + technicalAsset.Title + "" + " via " + incomingFlow.Title + "" @@ -74,7 +72,7 @@ func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, i impact = types.HighImpact } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go b/pkg/security/risks/builtin/push-instead-of-pull-deployment-rule.go similarity index 81% rename from pkg/security/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go rename to pkg/security/risks/builtin/push-instead-of-pull-deployment-rule.go index 93bb296a..6320ec5a 100644 --- a/pkg/security/risks/built-in/push-instead-of-pull-deployment/push-instead-of-pull-deployment-rule.go +++ b/pkg/security/risks/builtin/push-instead-of-pull-deployment-rule.go @@ -1,18 +1,16 @@ -package push_instead_of_pull_deployment +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type PushInsteadPullDeploymentRule struct{} + +func NewPushInsteadPullDeploymentRule() *PushInsteadPullDeploymentRule { + return &PushInsteadPullDeploymentRule{} } -func Category() types.RiskCategory { +func (*PushInsteadPullDeploymentRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "push-instead-of-pull-deployment", Title: "Push instead of Pull Deployment", @@ -39,11 +37,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*PushInsteadPullDeploymentRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *PushInsteadPullDeploymentRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) impact := types.LowImpact for _, buildPipeline := range input.TechnicalAssets { @@ -57,7 +55,7 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { targetAsset.HighestAvailability(input) >= types.Critical { impact = types.MediumImpact } - risks = append(risks, createRisk(buildPipeline, targetAsset, deploymentLink, impact)) + risks = append(risks, r.createRisk(buildPipeline, targetAsset, deploymentLink, impact)) } } } @@ -65,10 +63,10 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { return risks } -func createRisk(buildPipeline types.TechnicalAsset, deploymentTarget types.TechnicalAsset, deploymentCommLink types.CommunicationLink, impact types.RiskExploitationImpact) types.Risk { +func (r *PushInsteadPullDeploymentRule) createRisk(buildPipeline types.TechnicalAsset, deploymentTarget types.TechnicalAsset, deploymentCommLink types.CommunicationLink, impact types.RiskExploitationImpact) types.Risk { title := "Push instead of Pull Deployment at " + deploymentTarget.Title + " via build pipeline asset " + buildPipeline.Title + "" risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/search-query-injection/search-query-injection-rule.go b/pkg/security/risks/builtin/search-query-injection-rule.go similarity index 85% rename from pkg/security/risks/built-in/search-query-injection/search-query-injection-rule.go rename to pkg/security/risks/builtin/search-query-injection-rule.go index 96c1e621..3b449d27 100644 --- a/pkg/security/risks/built-in/search-query-injection/search-query-injection-rule.go +++ b/pkg/security/risks/builtin/search-query-injection-rule.go @@ -1,18 +1,16 @@ -package search_query_injection +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type SearchQueryInjectionRule struct{} + +func NewSearchQueryInjectionRule() *SearchQueryInjectionRule { + return &SearchQueryInjectionRule{} } -func Category() types.RiskCategory { +func (*SearchQueryInjectionRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "search-query-injection", Title: "Search-Query Injection", @@ -40,7 +38,11 @@ func Category() types.RiskCategory { } } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (*SearchQueryInjectionRule) SupportedTags() []string { + return []string{} +} + +func (r *SearchQueryInjectionRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] @@ -56,7 +58,7 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { if incomingFlow.Usage == types.DevOps { likelihood = types.Likely } - risks = append(risks, createRisk(input, technicalAsset, incomingFlow, likelihood)) + risks = append(risks, r.createRisk(input, technicalAsset, incomingFlow, likelihood)) } } } @@ -64,11 +66,7 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { return risks } -func SupportedTags() []string { - return []string{} -} - -func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingFlow types.CommunicationLink, likelihood types.RiskExploitationLikelihood) types.Risk { +func (r *SearchQueryInjectionRule) createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingFlow types.CommunicationLink, likelihood types.RiskExploitationLikelihood) types.Risk { caller := input.TechnicalAssets[incomingFlow.SourceId] title := "Search Query Injection risk at " + caller.Title + " against search engine server " + technicalAsset.Title + "" + " via " + incomingFlow.Title + "" @@ -79,7 +77,7 @@ func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, i impact = types.LowImpact } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go b/pkg/security/risks/builtin/server-side-request-forgery-rule.go similarity index 87% rename from pkg/security/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go rename to pkg/security/risks/builtin/server-side-request-forgery-rule.go index c0106916..45c3ce27 100644 --- a/pkg/security/risks/built-in/server-side-request-forgery/server-side-request-forgery-rule.go +++ b/pkg/security/risks/builtin/server-side-request-forgery-rule.go @@ -1,18 +1,16 @@ -package server_side_request_forgery +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type ServerSideRequestForgeryRule struct{} + +func NewServerSideRequestForgeryRule() *ServerSideRequestForgeryRule { + return &ServerSideRequestForgeryRule{} } -func Category() types.RiskCategory { +func (*ServerSideRequestForgeryRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "server-side-request-forgery", Title: "Server-Side Request Forgery (SSRF)", @@ -39,11 +37,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*ServerSideRequestForgeryRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *ServerSideRequestForgeryRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] @@ -52,14 +50,14 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { } for _, outgoingFlow := range technicalAsset.CommunicationLinks { if outgoingFlow.Protocol.IsPotentialWebAccessProtocol() { - risks = append(risks, createRisk(input, technicalAsset, outgoingFlow)) + risks = append(risks, r.createRisk(input, technicalAsset, outgoingFlow)) } } } return risks } -func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, outgoingFlow types.CommunicationLink) types.Risk { +func (r *ServerSideRequestForgeryRule) createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, outgoingFlow types.CommunicationLink) types.Risk { target := input.TechnicalAssets[outgoingFlow.TargetId] title := "Server-Side Request Forgery (SSRF) risk at " + technicalAsset.Title + " server-side web-requesting " + "the target " + target.Title + " via " + outgoingFlow.Title + "" @@ -96,7 +94,7 @@ func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, o likelihood = types.Unlikely } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go b/pkg/security/risks/builtin/service-registry-poisoning-rule.go similarity index 82% rename from pkg/security/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go rename to pkg/security/risks/builtin/service-registry-poisoning-rule.go index d0727f67..76f7acf4 100644 --- a/pkg/security/risks/built-in/service-registry-poisoning/service-registry-poisoning-rule.go +++ b/pkg/security/risks/builtin/service-registry-poisoning-rule.go @@ -1,18 +1,16 @@ -package service_registry_poisoning +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type ServiceRegistryPoisoningRule struct{} + +func NewServiceRegistryPoisoningRule() *ServiceRegistryPoisoningRule { + return &ServiceRegistryPoisoningRule{} } -func Category() types.RiskCategory { +func (*ServiceRegistryPoisoningRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "service-registry-poisoning", Title: "Service Registry Poisoning", @@ -36,23 +34,23 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*ServiceRegistryPoisoningRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *ServiceRegistryPoisoningRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if !technicalAsset.OutOfScope && technicalAsset.Technology == types.ServiceRegistry { incomingFlows := input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] - risks = append(risks, createRisk(input, technicalAsset, incomingFlows)) + risks = append(risks, r.createRisk(input, technicalAsset, incomingFlows)) } } return risks } -func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingFlows []types.CommunicationLink) types.Risk { +func (r *ServiceRegistryPoisoningRule) createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingFlows []types.CommunicationLink) types.Risk { title := "Service Registry Poisoning risk at " + technicalAsset.Title + "" impact := types.LowImpact @@ -67,7 +65,7 @@ func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, i } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go b/pkg/security/risks/builtin/sql-nosql-injection-rule.go similarity index 84% rename from pkg/security/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go rename to pkg/security/risks/builtin/sql-nosql-injection-rule.go index 48f8bd46..48b42300 100644 --- a/pkg/security/risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go +++ b/pkg/security/risks/builtin/sql-nosql-injection-rule.go @@ -1,18 +1,16 @@ -package sql_nosql_injection +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type SqlNoSqlInjectionRule struct{} + +func NewSqlNoSqlInjectionRule() *SqlNoSqlInjectionRule { + return &SqlNoSqlInjectionRule{} } -func Category() types.RiskCategory { +func (*SqlNoSqlInjectionRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "sql-nosql-injection", Title: "SQL/NoSQL-Injection", @@ -36,11 +34,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*SqlNoSqlInjectionRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *SqlNoSqlInjectionRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] @@ -51,14 +49,14 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { } if incomingFlow.Protocol.IsPotentialDatabaseAccessProtocol(true) && (technicalAsset.Technology == types.Database || technicalAsset.Technology == types.IdentityStoreDatabase) || (incomingFlow.Protocol.IsPotentialDatabaseAccessProtocol(false)) { - risks = append(risks, createRisk(input, technicalAsset, incomingFlow)) + risks = append(risks, r.createRisk(input, technicalAsset, incomingFlow)) } } } return risks } -func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingFlow types.CommunicationLink) types.Risk { +func (r *SqlNoSqlInjectionRule) createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, incomingFlow types.CommunicationLink) types.Risk { caller := input.TechnicalAssets[incomingFlow.SourceId] title := "SQL/NoSQL-Injection risk at " + caller.Title + " against database " + technicalAsset.Title + "" + " via " + incomingFlow.Title + "" @@ -71,7 +69,7 @@ func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, i likelihood = types.Likely } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go b/pkg/security/risks/builtin/unchecked-deployment-rule.go similarity index 87% rename from pkg/security/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go rename to pkg/security/risks/builtin/unchecked-deployment-rule.go index 82c175cc..1f864ab0 100644 --- a/pkg/security/risks/built-in/unchecked-deployment/unchecked-deployment-rule.go +++ b/pkg/security/risks/builtin/unchecked-deployment-rule.go @@ -1,18 +1,16 @@ -package unchecked_deployment +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type UncheckedDeploymentRule struct{} + +func NewUncheckedDeploymentRule() *UncheckedDeploymentRule { + return &UncheckedDeploymentRule{} } -func Category() types.RiskCategory { +func (*UncheckedDeploymentRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "unchecked-deployment", Title: "Unchecked Deployment", @@ -39,21 +37,21 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*UncheckedDeploymentRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *UncheckedDeploymentRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, technicalAsset := range input.TechnicalAssets { if technicalAsset.Technology.IsDevelopmentRelevant() { - risks = append(risks, createRisk(input, technicalAsset)) + risks = append(risks, r.createRisk(input, technicalAsset)) } } return risks } -func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { +func (r *UncheckedDeploymentRule) createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { title := "Unchecked Deployment risk at " + technicalAsset.Title + "" // impact is depending on highest rating impact := types.LowImpact @@ -84,7 +82,7 @@ func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset) t } // create risk risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go b/pkg/security/risks/builtin/unencrypted-asset-rule.go similarity index 83% rename from pkg/security/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go rename to pkg/security/risks/builtin/unencrypted-asset-rule.go index cc8bd777..ca4c00b0 100644 --- a/pkg/security/risks/built-in/unencrypted-asset/unencrypted-asset-rule.go +++ b/pkg/security/risks/builtin/unencrypted-asset-rule.go @@ -1,18 +1,16 @@ -package unencrypted_asset +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type UnencryptedAssetRule struct{} + +func NewUnencryptedAssetRule() *UnencryptedAssetRule { + return &UnencryptedAssetRule{} } -func Category() types.RiskCategory { +func (*UnencryptedAssetRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "unencrypted-asset", Title: "Unencrypted Technical Assets", @@ -39,17 +37,17 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*UnencryptedAssetRule) SupportedTags() []string { return []string{} } // check for technical assets that should be encrypted due to their confidentiality -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *UnencryptedAssetRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] - if !technicalAsset.OutOfScope && !IsEncryptionWaiver(technicalAsset) && + if !technicalAsset.OutOfScope && !isEncryptionWaiver(technicalAsset) && (technicalAsset.HighestConfidentiality(input) >= types.Confidential || technicalAsset.HighestIntegrity(input) >= types.Critical) { verySensitive := technicalAsset.HighestConfidentiality(input) == types.StrictlyConfidential || @@ -60,10 +58,10 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { if verySensitive { impact = types.HighImpact } - risks = append(risks, createRisk(technicalAsset, impact, requiresEndUserKey)) + risks = append(risks, r.createRisk(technicalAsset, impact, requiresEndUserKey)) } else if requiresEndUserKey && (technicalAsset.Encryption == types.Transparent || technicalAsset.Encryption == types.DataWithSymmetricSharedKey || technicalAsset.Encryption == types.DataWithAsymmetricSharedKey) { - risks = append(risks, createRisk(technicalAsset, types.MediumImpact, requiresEndUserKey)) + risks = append(risks, r.createRisk(technicalAsset, types.MediumImpact, requiresEndUserKey)) } } } @@ -73,19 +71,19 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { // Simple routing assets like 'Reverse Proxy' or 'Load Balancer' usually don't have their own storage and thus have no // encryption requirement for the asset itself (though for the communication, but that's a different rule) -func IsEncryptionWaiver(asset types.TechnicalAsset) bool { +func isEncryptionWaiver(asset types.TechnicalAsset) bool { return asset.Technology == types.ReverseProxy || asset.Technology == types.LoadBalancer || asset.Technology == types.WAF || asset.Technology == types.IDS || asset.Technology == types.IPS || asset.Technology.IsEmbeddedComponent() } -func createRisk(technicalAsset types.TechnicalAsset, impact types.RiskExploitationImpact, requiresEndUserKey bool) types.Risk { +func (r *UnencryptedAssetRule) createRisk(technicalAsset types.TechnicalAsset, impact types.RiskExploitationImpact, requiresEndUserKey bool) types.Risk { title := "Unencrypted Technical Asset named " + technicalAsset.Title + "" if requiresEndUserKey { title += " missing end user individual encryption with " + types.DataWithEndUserIndividualKey.String() } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go b/pkg/security/risks/builtin/unencrypted-communication-rule.go similarity index 82% rename from pkg/security/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go rename to pkg/security/risks/builtin/unencrypted-communication-rule.go index 96a71c2b..aee4181c 100644 --- a/pkg/security/risks/built-in/unencrypted-communication/unencrypted-communication-rule.go +++ b/pkg/security/risks/builtin/unencrypted-communication-rule.go @@ -1,18 +1,16 @@ -package unencrypted_communication +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type UnencryptedCommunicationRule struct{} + +func NewUnencryptedCommunicationRule() *UnencryptedCommunicationRule { + return &UnencryptedCommunicationRule{} } -func Category() types.RiskCategory { +func (*UnencryptedCommunicationRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "unencrypted-communication", Title: "Unencrypted Communication", @@ -36,13 +34,13 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*UnencryptedCommunicationRule) SupportedTags() []string { return []string{} } // check for communication links that should be encrypted due to their confidentiality and/or integrity -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *UnencryptedCommunicationRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, technicalAsset := range input.TechnicalAssets { for _, dataFlow := range technicalAsset.CommunicationLinks { @@ -57,11 +55,11 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { for _, sentDataAsset := range dataFlow.DataAssetsSent { dataAsset := input.DataAssets[sentDataAsset] if isHighSensitivity(dataAsset) || transferringAuthData { - risks = append(risks, createRisk(input, technicalAsset, dataFlow, true, transferringAuthData)) + risks = append(risks, r.createRisk(input, technicalAsset, dataFlow, true, transferringAuthData)) addedOne = true break } else if !dataFlow.VPN && isMediumSensitivity(dataAsset) { - risks = append(risks, createRisk(input, technicalAsset, dataFlow, false, transferringAuthData)) + risks = append(risks, r.createRisk(input, technicalAsset, dataFlow, false, transferringAuthData)) addedOne = true break } @@ -70,10 +68,10 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { for _, receivedDataAsset := range dataFlow.DataAssetsReceived { dataAsset := input.DataAssets[receivedDataAsset] if isHighSensitivity(dataAsset) || transferringAuthData { - risks = append(risks, createRisk(input, technicalAsset, dataFlow, true, transferringAuthData)) + risks = append(risks, r.createRisk(input, technicalAsset, dataFlow, true, transferringAuthData)) break } else if !dataFlow.VPN && isMediumSensitivity(dataAsset) { - risks = append(risks, createRisk(input, technicalAsset, dataFlow, false, transferringAuthData)) + risks = append(risks, r.createRisk(input, technicalAsset, dataFlow, false, transferringAuthData)) break } } @@ -85,7 +83,7 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { return risks } -func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, dataFlow types.CommunicationLink, highRisk bool, transferringAuthData bool) types.Risk { +func (r *UnencryptedCommunicationRule) createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, dataFlow types.CommunicationLink, highRisk bool, transferringAuthData bool) types.Risk { impact := types.MediumImpact if highRisk { impact = types.HighImpact @@ -104,7 +102,7 @@ func createRisk(input *types.ParsedModel, technicalAsset types.TechnicalAsset, d likelihood = types.Likely } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go b/pkg/security/risks/builtin/unguarded-access-from-internet-rule.go similarity index 88% rename from pkg/security/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go rename to pkg/security/risks/builtin/unguarded-access-from-internet-rule.go index 2c0e240d..9e1ec39d 100644 --- a/pkg/security/risks/built-in/unguarded-access-from-internet/unguarded-access-from-internet-rule.go +++ b/pkg/security/risks/builtin/unguarded-access-from-internet-rule.go @@ -1,4 +1,4 @@ -package unguarded_access_from_internet +package builtin import ( "sort" @@ -6,15 +6,13 @@ import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type UnguardedAccessFromInternetRule struct{} + +func NewUnguardedAccessFromInternetRule() *UnguardedAccessFromInternetRule { + return &UnguardedAccessFromInternetRule{} } -func Category() types.RiskCategory { +func (*UnguardedAccessFromInternetRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "unguarded-access-from-internet", Title: "Unguarded Access From Internet", @@ -48,11 +46,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*UnguardedAccessFromInternetRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *UnguardedAccessFromInternetRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] @@ -80,7 +78,7 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { if sourceAsset.Internet { highRisk := technicalAsset.Confidentiality == types.StrictlyConfidential || technicalAsset.Integrity == types.MissionCritical - risks = append(risks, createRisk(technicalAsset, incomingAccess, + risks = append(risks, r.createRisk(technicalAsset, incomingAccess, input.TechnicalAssets[incomingAccess.SourceId], highRisk)) } } @@ -91,14 +89,14 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { return risks } -func createRisk(dataStore types.TechnicalAsset, dataFlow types.CommunicationLink, +func (r *UnguardedAccessFromInternetRule) createRisk(dataStore types.TechnicalAsset, dataFlow types.CommunicationLink, clientFromInternet types.TechnicalAsset, moreRisky bool) types.Risk { impact := types.LowImpact if moreRisky || dataStore.RAA > 40 { impact = types.MediumImpact } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.VeryLikely, impact), ExploitationLikelihood: types.VeryLikely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go b/pkg/security/risks/builtin/unguarded-direct-datastore-access-rule.go similarity index 85% rename from pkg/security/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go rename to pkg/security/risks/builtin/unguarded-direct-datastore-access-rule.go index 2ab941ef..42e1d600 100644 --- a/pkg/security/risks/built-in/unguarded-direct-datastore-access/unguarded-direct-datastore-access-rule.go +++ b/pkg/security/risks/builtin/unguarded-direct-datastore-access-rule.go @@ -1,18 +1,16 @@ -package unguarded_direct_datastore_access +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type UnguardedDirectDatastoreAccessRule struct{} + +func NewUnguardedDirectDatastoreAccessRule() *UnguardedDirectDatastoreAccessRule { + return &UnguardedDirectDatastoreAccessRule{} } -func Category() types.RiskCategory { +func (*UnguardedDirectDatastoreAccessRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "unguarded-direct-datastore-access", Title: "Unguarded Direct Datastore Access", @@ -38,13 +36,13 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*UnguardedDirectDatastoreAccessRule) SupportedTags() []string { return []string{} } // check for data stores that should not be accessed directly across trust boundaries -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *UnguardedDirectDatastoreAccessRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] @@ -56,11 +54,11 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { continue } if technicalAsset.Confidentiality >= types.Confidential || technicalAsset.Integrity >= types.Critical { - if incomingAccess.IsAcrossTrustBoundaryNetworkOnly(input) && !FileServerAccessViaFTP(technicalAsset, incomingAccess) && + if incomingAccess.IsAcrossTrustBoundaryNetworkOnly(input) && !fileServerAccessViaFTP(technicalAsset, incomingAccess) && incomingAccess.Usage != types.DevOps && !isSharingSameParentTrustBoundary(input, technicalAsset, sourceAsset) { highRisk := technicalAsset.Confidentiality == types.StrictlyConfidential || technicalAsset.Integrity == types.MissionCritical - risks = append(risks, createRisk(technicalAsset, incomingAccess, + risks = append(risks, r.createRisk(technicalAsset, incomingAccess, input.TechnicalAssets[incomingAccess.SourceId], highRisk)) } } @@ -96,18 +94,18 @@ func isSharingSameParentTrustBoundary(input *types.ParsedModel, left, right type return false } -func FileServerAccessViaFTP(technicalAsset types.TechnicalAsset, incomingAccess types.CommunicationLink) bool { +func fileServerAccessViaFTP(technicalAsset types.TechnicalAsset, incomingAccess types.CommunicationLink) bool { return technicalAsset.Technology == types.FileServer && (incomingAccess.Protocol == types.FTP || incomingAccess.Protocol == types.FTPS || incomingAccess.Protocol == types.SFTP) } -func createRisk(dataStore types.TechnicalAsset, dataFlow types.CommunicationLink, clientOutsideTrustBoundary types.TechnicalAsset, moreRisky bool) types.Risk { +func (r *UnguardedDirectDatastoreAccessRule) createRisk(dataStore types.TechnicalAsset, dataFlow types.CommunicationLink, clientOutsideTrustBoundary types.TechnicalAsset, moreRisky bool) types.Risk { impact := types.LowImpact if moreRisky || dataStore.RAA > 40 { impact = types.MediumImpact } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Likely, impact), ExploitationLikelihood: types.Likely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go b/pkg/security/risks/builtin/unnecessary-communication-link-rule.go similarity index 79% rename from pkg/security/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go rename to pkg/security/risks/builtin/unnecessary-communication-link-rule.go index 38646c2b..66ed716b 100644 --- a/pkg/security/risks/built-in/unnecessary-communication-link/unnecessary-communication-link-rule.go +++ b/pkg/security/risks/builtin/unnecessary-communication-link-rule.go @@ -1,18 +1,16 @@ -package unnecessary_communication_link +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type UnnecessaryCommunicationLinkRule struct{} + +func NewUnnecessaryCommunicationLinkRule() *UnnecessaryCommunicationLinkRule { + return &UnnecessaryCommunicationLinkRule{} } -func Category() types.RiskCategory { +func (*UnnecessaryCommunicationLinkRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "unnecessary-communication-link", Title: "Unnecessary Communication Link", @@ -34,18 +32,18 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*UnnecessaryCommunicationLinkRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *UnnecessaryCommunicationLinkRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] for _, commLink := range technicalAsset.CommunicationLinks { if len(commLink.DataAssetsSent) == 0 && len(commLink.DataAssetsReceived) == 0 { if !technicalAsset.OutOfScope || !input.TechnicalAssets[commLink.TargetId].OutOfScope { - risks = append(risks, createRisk(technicalAsset, commLink)) + risks = append(risks, r.createRisk(technicalAsset, commLink)) } } } @@ -53,10 +51,10 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { return risks } -func createRisk(technicalAsset types.TechnicalAsset, commLink types.CommunicationLink) types.Risk { +func (r *UnnecessaryCommunicationLinkRule) createRisk(technicalAsset types.TechnicalAsset, commLink types.CommunicationLink) types.Risk { title := "Unnecessary Communication Link titled " + commLink.Title + " at technical asset " + technicalAsset.Title + "" risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, types.LowImpact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: types.LowImpact, diff --git a/pkg/security/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go b/pkg/security/risks/builtin/unnecessary-data-asset-rule.go similarity index 84% rename from pkg/security/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go rename to pkg/security/risks/builtin/unnecessary-data-asset-rule.go index 4c9d6a5b..a5e7b13d 100644 --- a/pkg/security/risks/built-in/unnecessary-data-asset/unnecessary-data-asset-rule.go +++ b/pkg/security/risks/builtin/unnecessary-data-asset-rule.go @@ -1,4 +1,4 @@ -package unnecessary_data_asset +package builtin import ( "sort" @@ -6,15 +6,13 @@ import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type UnnecessaryDataAssetRule struct{} + +func NewUnnecessaryDataAssetRule() *UnnecessaryDataAssetRule { + return &UnnecessaryDataAssetRule{} } -func Category() types.RiskCategory { +func (*UnnecessaryDataAssetRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "unnecessary-data-asset", Title: "Unnecessary Data Asset", @@ -38,11 +36,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*UnnecessaryDataAssetRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *UnnecessaryDataAssetRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) // first create them in memory - otherwise in Go ranging over map is random order // range over them in sorted (hence re-producible) way: @@ -72,16 +70,16 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { } sort.Strings(keys) for _, unusedDataAssetID := range keys { - risks = append(risks, createRisk(input, unusedDataAssetID)) + risks = append(risks, r.createRisk(input, unusedDataAssetID)) } return risks } -func createRisk(input *types.ParsedModel, unusedDataAssetID string) types.Risk { +func (r *UnnecessaryDataAssetRule) createRisk(input *types.ParsedModel, unusedDataAssetID string) types.Risk { unusedDataAsset := input.DataAssets[unusedDataAssetID] title := "Unnecessary Data Asset named " + unusedDataAsset.Title + "" risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, types.LowImpact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: types.LowImpact, diff --git a/pkg/security/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go b/pkg/security/risks/builtin/unnecessary-data-transfer-rule.go similarity index 83% rename from pkg/security/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go rename to pkg/security/risks/builtin/unnecessary-data-transfer-rule.go index 3b18d8b2..0cd99c7b 100644 --- a/pkg/security/risks/built-in/unnecessary-data-transfer/unnecessary-data-transfer-rule.go +++ b/pkg/security/risks/builtin/unnecessary-data-transfer-rule.go @@ -1,4 +1,4 @@ -package unnecessary_data_transfer +package builtin import ( "sort" @@ -6,15 +6,13 @@ import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type UnnecessaryDataTransferRule struct{} + +func NewUnnecessaryDataTransferRule() *UnnecessaryDataTransferRule { + return &UnnecessaryDataTransferRule{} } -func Category() types.RiskCategory { +func (*UnnecessaryDataTransferRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "unnecessary-data-transfer", Title: "Unnecessary Data Transfer", @@ -43,11 +41,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*UnnecessaryDataTransferRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *UnnecessaryDataTransferRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] @@ -60,7 +58,7 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { if targetAsset.Technology.IsUnnecessaryDataTolerated() { continue } - risks = checkRisksAgainstTechnicalAsset(input, risks, technicalAsset, outgoingDataFlow, false) + risks = r.checkRisksAgainstTechnicalAsset(input, risks, technicalAsset, outgoingDataFlow, false) } // incoming data flows commLinks := input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] @@ -70,13 +68,13 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { if targetAsset.Technology.IsUnnecessaryDataTolerated() { continue } - risks = checkRisksAgainstTechnicalAsset(input, risks, technicalAsset, incomingDataFlow, true) + risks = r.checkRisksAgainstTechnicalAsset(input, risks, technicalAsset, incomingDataFlow, true) } } return risks } -func checkRisksAgainstTechnicalAsset(input *types.ParsedModel, risks []types.Risk, technicalAsset types.TechnicalAsset, +func (r *UnnecessaryDataTransferRule) checkRisksAgainstTechnicalAsset(input *types.ParsedModel, risks []types.Risk, technicalAsset types.TechnicalAsset, dataFlow types.CommunicationLink, inverseDirection bool) []types.Risk { for _, transferredDataAssetId := range dataFlow.DataAssetsSent { if !technicalAsset.ProcessesOrStoresDataAsset(transferredDataAssetId) { @@ -88,7 +86,7 @@ func checkRisksAgainstTechnicalAsset(input *types.ParsedModel, risks []types.Ris commPartnerId = dataFlow.SourceId } commPartnerAsset := input.TechnicalAssets[commPartnerId] - risk := createRisk(technicalAsset, transferredDataAsset, commPartnerAsset) + risk := r.createRisk(technicalAsset, transferredDataAsset, commPartnerAsset) if isNewRisk(risks, risk) { risks = append(risks, risk) } @@ -105,7 +103,7 @@ func checkRisksAgainstTechnicalAsset(input *types.ParsedModel, risks []types.Ris commPartnerId = dataFlow.SourceId } commPartnerAsset := input.TechnicalAssets[commPartnerId] - risk := createRisk(technicalAsset, transferredDataAsset, commPartnerAsset) + risk := r.createRisk(technicalAsset, transferredDataAsset, commPartnerAsset) if isNewRisk(risks, risk) { risks = append(risks, risk) } @@ -124,7 +122,7 @@ func isNewRisk(risks []types.Risk, risk types.Risk) bool { return true } -func createRisk(technicalAsset types.TechnicalAsset, dataAssetTransferred types.DataAsset, commPartnerAsset types.TechnicalAsset) types.Risk { +func (r *UnnecessaryDataTransferRule) createRisk(technicalAsset types.TechnicalAsset, dataAssetTransferred types.DataAsset, commPartnerAsset types.TechnicalAsset) types.Risk { moreRisky := dataAssetTransferred.Confidentiality == types.StrictlyConfidential || dataAssetTransferred.Integrity == types.MissionCritical impact := types.LowImpact @@ -135,7 +133,7 @@ func createRisk(technicalAsset types.TechnicalAsset, dataAssetTransferred types. title := "Unnecessary Data Transfer of " + dataAssetTransferred.Title + " data at " + technicalAsset.Title + " " + "from/to " + commPartnerAsset.Title + "" risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, impact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go b/pkg/security/risks/builtin/unnecessary-technical-asset-rule.go similarity index 80% rename from pkg/security/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go rename to pkg/security/risks/builtin/unnecessary-technical-asset-rule.go index e84e7602..f2ffa71f 100644 --- a/pkg/security/risks/built-in/unnecessary-technical-asset/unnecessary-technical-asset-rule.go +++ b/pkg/security/risks/builtin/unnecessary-technical-asset-rule.go @@ -1,18 +1,16 @@ -package unnecessary_technical_asset +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type UnnecessaryTechnicalAssetRule struct{} + +func NewUnnecessaryTechnicalAssetRule() *UnnecessaryTechnicalAssetRule { + return &UnnecessaryTechnicalAssetRule{} } -func Category() types.RiskCategory { +func (*UnnecessaryTechnicalAssetRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "unnecessary-technical-asset", Title: "Unnecessary Technical Asset", @@ -35,26 +33,26 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*UnnecessaryTechnicalAssetRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *UnnecessaryTechnicalAssetRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] if len(technicalAsset.DataAssetsProcessed) == 0 && len(technicalAsset.DataAssetsStored) == 0 || (len(technicalAsset.CommunicationLinks) == 0 && len(input.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id]) == 0) { - risks = append(risks, createRisk(technicalAsset)) + risks = append(risks, r.createRisk(technicalAsset)) } } return risks } -func createRisk(technicalAsset types.TechnicalAsset) types.Risk { +func (r *UnnecessaryTechnicalAssetRule) createRisk(technicalAsset types.TechnicalAsset) types.Risk { title := "Unnecessary Technical Asset named " + technicalAsset.Title + "" risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, types.LowImpact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: types.LowImpact, diff --git a/pkg/security/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go b/pkg/security/risks/builtin/untrusted-deserialization-rule.go similarity index 85% rename from pkg/security/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go rename to pkg/security/risks/builtin/untrusted-deserialization-rule.go index 528c1637..d23428b4 100644 --- a/pkg/security/risks/built-in/untrusted-deserialization/untrusted-deserialization-rule.go +++ b/pkg/security/risks/builtin/untrusted-deserialization-rule.go @@ -1,18 +1,16 @@ -package untrusted_deserialization +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type UntrustedDeserializationRule struct{} + +func NewUntrustedDeserializationRule() *UntrustedDeserializationRule { + return &UntrustedDeserializationRule{} } -func Category() types.RiskCategory { +func (*UntrustedDeserializationRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "untrusted-deserialization", Title: "Untrusted Deserialization", @@ -40,11 +38,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*UntrustedDeserializationRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *UntrustedDeserializationRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] @@ -73,13 +71,13 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { } } if hasOne { - risks = append(risks, createRisk(input, technicalAsset, acrossTrustBoundary, commLinkTitle)) + risks = append(risks, r.createRisk(input, technicalAsset, acrossTrustBoundary, commLinkTitle)) } } return risks } -func createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset, acrossTrustBoundary bool, commLinkTitle string) types.Risk { +func (r *UntrustedDeserializationRule) createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset, acrossTrustBoundary bool, commLinkTitle string) types.Risk { title := "Untrusted Deserialization risk at " + technicalAsset.Title + "" impact := types.HighImpact likelihood := types.Likely @@ -93,7 +91,7 @@ func createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAs impact = types.VeryHighImpact } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(likelihood, impact), ExploitationLikelihood: likelihood, ExploitationImpact: impact, diff --git a/pkg/security/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go b/pkg/security/risks/builtin/wrong-communication-link-content-rule.go similarity index 80% rename from pkg/security/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go rename to pkg/security/risks/builtin/wrong-communication-link-content-rule.go index dabdfb9f..7f4997c7 100644 --- a/pkg/security/risks/built-in/wrong-communication-link-content/wrong-communication-link-content-rule.go +++ b/pkg/security/risks/builtin/wrong-communication-link-content-rule.go @@ -1,18 +1,16 @@ -package wrong_communication_link_content +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type WrongCommunicationLinkContentRule struct{} + +func NewWrongCommunicationLinkContentRule() *WrongCommunicationLinkContentRule { + return &WrongCommunicationLinkContentRule{} } -func Category() types.RiskCategory { +func (*WrongCommunicationLinkContentRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "wrong-communication-link-content", Title: "Wrong Communication Link Content", @@ -35,38 +33,38 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*WrongCommunicationLinkContentRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *WrongCommunicationLinkContentRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, techAsset := range input.TechnicalAssets { for _, commLink := range techAsset.CommunicationLinks { // check readonly consistency if commLink.Readonly { if len(commLink.DataAssetsReceived) == 0 { - risks = append(risks, createRisk(techAsset, commLink, + risks = append(risks, r.createRisk(techAsset, commLink, "(data assets sent/received not matching the communication link's readonly flag)")) } } else { if len(commLink.DataAssetsSent) == 0 { - risks = append(risks, createRisk(techAsset, commLink, + risks = append(risks, r.createRisk(techAsset, commLink, "(data assets sent/received not matching the communication link's readonly flag)")) } } // check for protocol inconsistencies targetAsset := input.TechnicalAssets[commLink.TargetId] if commLink.Protocol == types.InProcessLibraryCall && targetAsset.Technology != types.Library { - risks = append(risks, createRisk(techAsset, commLink, + risks = append(risks, r.createRisk(techAsset, commLink, "(protocol type \""+types.InProcessLibraryCall.String()+"\" does not match target technology type \""+targetAsset.Technology.String()+"\": expected \""+types.Library.String()+"\")")) } if commLink.Protocol == types.LocalFileAccess && targetAsset.Technology != types.LocalFileSystem { - risks = append(risks, createRisk(techAsset, commLink, + risks = append(risks, r.createRisk(techAsset, commLink, "(protocol type \""+types.LocalFileAccess.String()+"\" does not match target technology type \""+targetAsset.Technology.String()+"\": expected \""+types.LocalFileSystem.String()+"\")")) } if commLink.Protocol == types.ContainerSpawning && targetAsset.Machine != types.Container { - risks = append(risks, createRisk(techAsset, commLink, + risks = append(risks, r.createRisk(techAsset, commLink, "(protocol type \""+types.ContainerSpawning.String()+"\" does not match target machine type \""+targetAsset.Machine.String()+"\": expected \""+types.Container.String()+"\")")) } } @@ -74,11 +72,11 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { return risks } -func createRisk(technicalAsset types.TechnicalAsset, commLink types.CommunicationLink, reason string) types.Risk { +func (r *WrongCommunicationLinkContentRule) createRisk(technicalAsset types.TechnicalAsset, commLink types.CommunicationLink, reason string) types.Risk { title := "Wrong Communication Link Content " + reason + " at " + technicalAsset.Title + " " + "regarding communication link " + commLink.Title + "" risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, types.LowImpact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: types.LowImpact, diff --git a/pkg/security/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go b/pkg/security/risks/builtin/wrong-trust-boundary-content.go similarity index 80% rename from pkg/security/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go rename to pkg/security/risks/builtin/wrong-trust-boundary-content.go index bd68e8a2..81959f8a 100644 --- a/pkg/security/risks/built-in/wrong-trust-boundary-content/wrong-trust-boundary-content.go +++ b/pkg/security/risks/builtin/wrong-trust-boundary-content.go @@ -1,18 +1,16 @@ -package wrong_trust_boundary_content +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type WrongTrustBoundaryContentRule struct{} + +func NewWrongTrustBoundaryContentRule() *WrongTrustBoundaryContentRule { + return &WrongTrustBoundaryContentRule{} } -func Category() types.RiskCategory { +func (*WrongTrustBoundaryContentRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "wrong-trust-boundary-content", Title: "Wrong Trust Boundary Content", @@ -34,18 +32,18 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*WrongTrustBoundaryContentRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *WrongTrustBoundaryContentRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, trustBoundary := range input.TrustBoundaries { if trustBoundary.Type == types.NetworkPolicyNamespaceIsolation { for _, techAssetID := range trustBoundary.TechnicalAssetsInside { techAsset := input.TechnicalAssets[techAssetID] if techAsset.Machine != types.Container && techAsset.Machine != types.Serverless { - risks = append(risks, createRisk(techAsset)) + risks = append(risks, r.createRisk(techAsset)) } } } @@ -53,10 +51,10 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { return risks } -func createRisk(technicalAsset types.TechnicalAsset) types.Risk { +func (r *WrongTrustBoundaryContentRule) createRisk(technicalAsset types.TechnicalAsset) types.Risk { title := "Wrong Trust Boundary Content (non-container asset inside container trust boundary) at " + technicalAsset.Title + "" risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.Unlikely, types.LowImpact), ExploitationLikelihood: types.Unlikely, ExploitationImpact: types.LowImpact, diff --git a/pkg/security/risks/built-in/xml-external-entity/xml-external-entity-rule.go b/pkg/security/risks/builtin/xml-external-entity-rule.go similarity index 84% rename from pkg/security/risks/built-in/xml-external-entity/xml-external-entity-rule.go rename to pkg/security/risks/builtin/xml-external-entity-rule.go index 01356e2d..f7bf8d09 100644 --- a/pkg/security/risks/built-in/xml-external-entity/xml-external-entity-rule.go +++ b/pkg/security/risks/builtin/xml-external-entity-rule.go @@ -1,18 +1,16 @@ -package xml_external_entity +package builtin import ( "github.com/threagile/threagile/pkg/security/types" ) -func Rule() types.RiskRule { - return types.RiskRule{ - Category: Category, - SupportedTags: SupportedTags, - GenerateRisks: GenerateRisks, - } +type XmlExternalEntityRule struct{} + +func NewXmlExternalEntityRule() *XmlExternalEntityRule { + return &XmlExternalEntityRule{} } -func Category() types.RiskCategory { +func (*XmlExternalEntityRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "xml-external-entity", Title: "XML External Entity (XXE)", @@ -38,11 +36,11 @@ func Category() types.RiskCategory { } } -func SupportedTags() []string { +func (*XmlExternalEntityRule) SupportedTags() []string { return []string{} } -func GenerateRisks(input *types.ParsedModel) []types.Risk { +func (r *XmlExternalEntityRule) GenerateRisks(input *types.ParsedModel) []types.Risk { risks := make([]types.Risk, 0) for _, id := range input.SortedTechnicalAssetIDs() { technicalAsset := input.TechnicalAssets[id] @@ -51,14 +49,14 @@ func GenerateRisks(input *types.ParsedModel) []types.Risk { } for _, format := range technicalAsset.DataFormatsAccepted { if format == types.XML { - risks = append(risks, createRisk(input, technicalAsset)) + risks = append(risks, r.createRisk(input, technicalAsset)) } } } return risks } -func createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { +func (r *XmlExternalEntityRule) createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAsset) types.Risk { title := "XML External Entity (XXE) risk at " + technicalAsset.Title + "" impact := types.MediumImpact if technicalAsset.HighestConfidentiality(parsedModel) == types.StrictlyConfidential || @@ -67,7 +65,7 @@ func createRisk(parsedModel *types.ParsedModel, technicalAsset types.TechnicalAs impact = types.HighImpact } risk := types.Risk{ - CategoryId: Category().Id, + CategoryId: r.Category().Id, Severity: types.CalculateSeverity(types.VeryLikely, impact), ExploitationLikelihood: types.VeryLikely, ExploitationImpact: impact, diff --git a/pkg/security/risks/risks.go b/pkg/security/risks/risks.go index 3eb521ac..33fae818 100644 --- a/pkg/security/risks/risks.go +++ b/pkg/security/risks/risks.go @@ -1,94 +1,59 @@ package risks import ( - accidentalsecretleak "github.com/threagile/threagile/pkg/security/risks/built-in/accidental-secret-leak" - codebackdooring "github.com/threagile/threagile/pkg/security/risks/built-in/code-backdooring" - containerbaseimagebackdooring "github.com/threagile/threagile/pkg/security/risks/built-in/container-baseimage-backdooring" - containerplatformescape "github.com/threagile/threagile/pkg/security/risks/built-in/container-platform-escape" - crosssiterequestforgery "github.com/threagile/threagile/pkg/security/risks/built-in/cross-site-request-forgery" - crosssitescripting "github.com/threagile/threagile/pkg/security/risks/built-in/cross-site-scripting" - dosriskyaccessacrosstrustboundary "github.com/threagile/threagile/pkg/security/risks/built-in/dos-risky-access-across-trust-boundary" - incompletemodel "github.com/threagile/threagile/pkg/security/risks/built-in/incomplete-model" - ldapinjection "github.com/threagile/threagile/pkg/security/risks/built-in/ldap-injection" - missingauthentication "github.com/threagile/threagile/pkg/security/risks/built-in/missing-authentication" - missingauthenticationsecondfactor "github.com/threagile/threagile/pkg/security/risks/built-in/missing-authentication-second-factor" - missingbuildinfrastructure "github.com/threagile/threagile/pkg/security/risks/built-in/missing-build-infrastructure" - missingcloudhardening "github.com/threagile/threagile/pkg/security/risks/built-in/missing-cloud-hardening" - missingfilevalidation "github.com/threagile/threagile/pkg/security/risks/built-in/missing-file-validation" - missinghardening "github.com/threagile/threagile/pkg/security/risks/built-in/missing-hardening" - missingidentitypropagation "github.com/threagile/threagile/pkg/security/risks/built-in/missing-identity-propagation" - missingidentityproviderisolation "github.com/threagile/threagile/pkg/security/risks/built-in/missing-identity-provider-isolation" - missingidentitystore "github.com/threagile/threagile/pkg/security/risks/built-in/missing-identity-store" - missingnetworksegmentation "github.com/threagile/threagile/pkg/security/risks/built-in/missing-network-segmentation" - missingvault "github.com/threagile/threagile/pkg/security/risks/built-in/missing-vault" - missingvaultisolation "github.com/threagile/threagile/pkg/security/risks/built-in/missing-vault-isolation" - missingwaf "github.com/threagile/threagile/pkg/security/risks/built-in/missing-waf" - mixedtargetsonsharedruntime "github.com/threagile/threagile/pkg/security/risks/built-in/mixed-targets-on-shared-runtime" - pathtraversal "github.com/threagile/threagile/pkg/security/risks/built-in/path-traversal" - pushinsteadofpulldeployment "github.com/threagile/threagile/pkg/security/risks/built-in/push-instead-of-pull-deployment" - searchqueryinjection "github.com/threagile/threagile/pkg/security/risks/built-in/search-query-injection" - serversiderequestforgery "github.com/threagile/threagile/pkg/security/risks/built-in/server-side-request-forgery" - serviceregistrypoisoning "github.com/threagile/threagile/pkg/security/risks/built-in/service-registry-poisoning" - sqlnosqlinjection "github.com/threagile/threagile/pkg/security/risks/built-in/sql-nosql-injection" - uncheckeddeployment "github.com/threagile/threagile/pkg/security/risks/built-in/unchecked-deployment" - unencryptedasset "github.com/threagile/threagile/pkg/security/risks/built-in/unencrypted-asset" - unencryptedcommunication "github.com/threagile/threagile/pkg/security/risks/built-in/unencrypted-communication" - unguardedaccessfrominternet "github.com/threagile/threagile/pkg/security/risks/built-in/unguarded-access-from-internet" - unguardeddirectdatastoreaccess "github.com/threagile/threagile/pkg/security/risks/built-in/unguarded-direct-datastore-access" - unnecessarycommunicationlink "github.com/threagile/threagile/pkg/security/risks/built-in/unnecessary-communication-link" - unnecessarydataasset "github.com/threagile/threagile/pkg/security/risks/built-in/unnecessary-data-asset" - unnecessarydatatransfer "github.com/threagile/threagile/pkg/security/risks/built-in/unnecessary-data-transfer" - unnecessarytechnicalasset "github.com/threagile/threagile/pkg/security/risks/built-in/unnecessary-technical-asset" - untrusteddeserialization "github.com/threagile/threagile/pkg/security/risks/built-in/untrusted-deserialization" - wrongcommunicationlinkcontent "github.com/threagile/threagile/pkg/security/risks/built-in/wrong-communication-link-content" - wrongtrustboundarycontent "github.com/threagile/threagile/pkg/security/risks/built-in/wrong-trust-boundary-content" - xmlexternalentity "github.com/threagile/threagile/pkg/security/risks/built-in/xml-external-entity" + "github.com/threagile/threagile/pkg/security/risks/builtin" "github.com/threagile/threagile/pkg/security/types" ) -func GetBuiltInRiskRules() []types.RiskRule { - return []types.RiskRule{ - accidentalsecretleak.Rule(), - codebackdooring.Rule(), - containerbaseimagebackdooring.Rule(), - containerplatformescape.Rule(), - crosssiterequestforgery.Rule(), - crosssitescripting.Rule(), - dosriskyaccessacrosstrustboundary.Rule(), - incompletemodel.Rule(), - ldapinjection.Rule(), - missingauthentication.Rule(), - missingauthenticationsecondfactor.Rule(), - missingbuildinfrastructure.Rule(), - missingcloudhardening.Rule(), - missingfilevalidation.Rule(), - missinghardening.Rule(), - missingidentitypropagation.Rule(), - missingidentityproviderisolation.Rule(), - missingidentitystore.Rule(), - missingnetworksegmentation.Rule(), - missingvault.Rule(), - missingvaultisolation.Rule(), - missingwaf.Rule(), - mixedtargetsonsharedruntime.Rule(), - pathtraversal.Rule(), - pushinsteadofpulldeployment.Rule(), - searchqueryinjection.Rule(), - serversiderequestforgery.Rule(), - serviceregistrypoisoning.Rule(), - sqlnosqlinjection.Rule(), - uncheckeddeployment.Rule(), - unencryptedasset.Rule(), - unencryptedcommunication.Rule(), - unguardedaccessfrominternet.Rule(), - unguardeddirectdatastoreaccess.Rule(), - unnecessarycommunicationlink.Rule(), - unnecessarydataasset.Rule(), - unnecessarydatatransfer.Rule(), - unnecessarytechnicalasset.Rule(), - untrusteddeserialization.Rule(), - wrongcommunicationlinkcontent.Rule(), - wrongtrustboundarycontent.Rule(), - xmlexternalentity.Rule(), +type RiskRule interface { + Category() types.RiskCategory + SupportedTags() []string + GenerateRisks(*types.ParsedModel) []types.Risk +} + +func GetBuiltInRiskRules() []RiskRule { + return []RiskRule{ + builtin.NewAccidentalSecretLeakRule(), + builtin.NewCodeBackdooringRule(), + builtin.NewContainerBaseImageBackdooringRule(), + builtin.NewContainerPlatformEscapeRule(), + builtin.NewCrossSiteRequestForgeryRule(), + builtin.NewCrossSiteScriptingRule(), + builtin.NewDosRiskyAccessAcrossTrustBoundaryRule(), + builtin.NewIncompleteModelRule(), + builtin.NewLdapInjectionRule(), + builtin.NewMissingAuthenticationRule(), + builtin.NewMissingAuthenticationSecondFactorRule(builtin.NewMissingAuthenticationRule()), + builtin.NewMissingBuildInfrastructureRule(), + builtin.NewMissingCloudHardeningRule(), + builtin.NewMissingFileValidationRule(), + builtin.NewMissingHardeningRule(), + builtin.NewMissingIdentityPropagationRule(), + builtin.NewMissingIdentityProviderIsolationRule(), + builtin.NewMissingIdentityStoreRule(), + builtin.NewMissingNetworkSegmentationRule(), + builtin.NewMissingVaultRule(), + builtin.NewMissingVaultIsolationRule(), + builtin.NewMissingWafRule(), + builtin.NewMixedTargetsOnSharedRuntimeRule(), + builtin.NewPathTraversalRule(), + builtin.NewPushInsteadPullDeploymentRule(), + builtin.NewSearchQueryInjectionRule(), + builtin.NewServerSideRequestForgeryRule(), + builtin.NewServiceRegistryPoisoningRule(), + builtin.NewSqlNoSqlInjectionRule(), + builtin.NewUncheckedDeploymentRule(), + builtin.NewUnencryptedAssetRule(), + builtin.NewUnencryptedCommunicationRule(), + builtin.NewUnguardedAccessFromInternetRule(), + builtin.NewUnguardedDirectDatastoreAccessRule(), + builtin.NewUnnecessaryCommunicationLinkRule(), + builtin.NewUnnecessaryDataAssetRule(), + builtin.NewUnnecessaryDataTransferRule(), + builtin.NewUnnecessaryTechnicalAssetRule(), + builtin.NewUntrustedDeserializationRule(), + builtin.NewWrongCommunicationLinkContentRule(), + builtin.NewWrongTrustBoundaryContentRule(), + builtin.NewXmlExternalEntityRule(), } } diff --git a/pkg/security/types/model.go b/pkg/security/types/model.go index ba14f108..ff735391 100644 --- a/pkg/security/types/model.go +++ b/pkg/security/types/model.go @@ -78,26 +78,6 @@ func (parsedModel *ParsedModel) HasNotYetAnyDirectNonWildcardRiskTracking(synthe return true } -func (parsedModel *ParsedModel) ApplyRisk(rule RiskRule, skippedRules *map[string]bool) { - id := rule.Category().Id - _, ok := (*skippedRules)[id] - - if ok { - fmt.Printf("Skipping risk rule %q\n", rule.Category().Id) - delete(*skippedRules, rule.Category().Id) - } else { - parsedModel.AddToListOfSupportedTags(rule.SupportedTags()) - generatedRisks := rule.GenerateRisks(parsedModel) - if generatedRisks != nil { - if len(generatedRisks) > 0 { - parsedModel.GeneratedRisksByCategory[rule.Category().Id] = generatedRisks - } - } else { - fmt.Printf("Failed to generate risks for %q\n", id) - } - } -} - func (parsedModel *ParsedModel) CheckTags(tags []string, where string) ([]string, error) { var tagsUsed = make([]string, 0) if tags != nil { diff --git a/pkg/security/types/risk-rule.go b/pkg/security/types/risk-rule.go deleted file mode 100644 index 33fca296..00000000 --- a/pkg/security/types/risk-rule.go +++ /dev/null @@ -1,7 +0,0 @@ -package types - -type RiskRule struct { - Category func() RiskCategory - SupportedTags func() []string - GenerateRisks func(input *ParsedModel) []Risk -} From 77bc54966914cf021ea8007bc568632d523a1086 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Tue, 9 Jan 2024 14:50:11 +0000 Subject: [PATCH 48/68] Tidy up go mod --- go.mod | 14 ++------------ go.sum | 32 +------------------------------- 2 files changed, 3 insertions(+), 43 deletions(-) diff --git a/go.mod b/go.mod index a165e526..c3b9162d 100644 --- a/go.mod +++ b/go.mod @@ -6,6 +6,7 @@ require ( github.com/gin-gonic/gin v1.9.1 github.com/google/uuid v1.5.0 github.com/jung-kurt/gofpdf v1.16.2 + github.com/spf13/pflag v1.0.5 github.com/wcharczuk/go-chart v2.0.1+incompatible github.com/xuri/excelize/v2 v2.8.0 golang.org/x/crypto v0.17.0 @@ -16,34 +17,23 @@ require ( github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/gin-contrib/sse v0.1.0 // indirect - github.com/go-ole/go-ole v1.2.6 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect github.com/goccy/go-json v0.10.2 // indirect - github.com/google/gops v0.3.28 // indirect + github.com/google/go-cmp v0.5.9 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/leodido/go-urn v1.2.4 // indirect - github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect github.com/pkg/errors v0.8.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect github.com/richardlehane/mscfb v1.0.4 // indirect github.com/richardlehane/msoleps v1.0.3 // indirect - github.com/shirou/gopsutil/v3 v3.23.7 // indirect - github.com/shoenig/go-m1cpu v0.1.6 // indirect - github.com/spf13/pflag v1.0.5 // indirect - github.com/tklauser/go-sysconf v0.3.11 // indirect - github.com/tklauser/numcpus v0.6.0 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect - github.com/xlab/treeprint v1.2.0 // indirect - github.com/yusufpapurcu/wmi v1.2.3 // indirect golang.org/x/sys v0.15.0 // indirect golang.org/x/text v0.14.0 // indirect - rsc.io/goversion v1.2.0 // indirect ) require ( diff --git a/go.sum b/go.sum index 278ce0b6..79872297 100644 --- a/go.sum +++ b/go.sum @@ -24,8 +24,6 @@ github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= -github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= -github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= @@ -37,12 +35,9 @@ github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g= github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= -github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= -github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/gops v0.3.28 h1:2Xr57tqKAmQYRAfG12E+yLcoa2Y42UJo2lOrUFL9ark= -github.com/google/gops v0.3.28/go.mod h1:6f6+Nl8LcHrzJwi8+p0ii+vmBFSlB4f8cOOkTJ7sk4c= github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU= github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= @@ -58,8 +53,6 @@ github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZY github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= -github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= -github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -78,8 +71,6 @@ github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= -github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= github.com/richardlehane/mscfb v1.0.4 h1:WULscsljNPConisD5hR0+OyZjwK46Pfyr6mPu5ZawpM= github.com/richardlehane/mscfb v1.0.4/go.mod h1:YzVpcZg9czvAuhk9T+a3avCpcFPMUWm7gK3DypaEsUk= github.com/richardlehane/msoleps v1.0.1/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg= @@ -87,11 +78,6 @@ github.com/richardlehane/msoleps v1.0.3 h1:aznSZzrwYRl3rLKRT3gUk9am7T/mLNSnJINvN github.com/richardlehane/msoleps v1.0.3/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w= -github.com/shirou/gopsutil/v3 v3.23.7 h1:C+fHO8hfIppoJ1WdsVm1RoI0RwXoNdfTK7yWXV0wVj4= -github.com/shirou/gopsutil/v3 v3.23.7/go.mod h1:c4gnmoRC0hQuaLqvxnx1//VXQ0Ms/X9UnJF8pddY5z4= -github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= -github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= -github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k= github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= @@ -108,18 +94,12 @@ github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= -github.com/tklauser/go-sysconf v0.3.11 h1:89WgdJhk5SNwJfu+GKyYveZ4IaJ7xAkecBo+KdJV0CM= -github.com/tklauser/go-sysconf v0.3.11/go.mod h1:GqXfhXY3kiPa0nAXPDIQIWzJbMCB7AmcWpGR8lSZfqI= -github.com/tklauser/numcpus v0.6.0 h1:kebhY2Qt+3U6RNK7UqpYNA+tJ23IBEGKkB7JQBfDYms= -github.com/tklauser/numcpus v0.6.0/go.mod h1:FEZLMke0lhOUG6w2JadTzp0a+Nl8PF/GFkQ5UVIcaL4= github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= github.com/wcharczuk/go-chart v2.0.1+incompatible h1:0pz39ZAycJFF7ju/1mepnk26RLVLBCWz1STcD3doU0A= github.com/wcharczuk/go-chart v2.0.1+incompatible/go.mod h1:PF5tmL4EIx/7Wf+hEkpCqYi5He4u90sw+0+6FhrryuE= -github.com/xlab/treeprint v1.2.0 h1:HzHnuAF1plUN2zGlAFHbSQP2qJ0ZAD3XF5XD7OesXRQ= -github.com/xlab/treeprint v1.2.0/go.mod h1:gj5Gd3gPdKtR1ikdDK6fnFLdmIS0X30kTTuNd/WEJu0= github.com/xuri/efp v0.0.0-20230802181842-ad255f2331ca/go.mod h1:ybY/Jr0T0GTCnYjKqmdwxyxn2BQf2RcQIIvex5QldPI= github.com/xuri/efp v0.0.0-20231025114914-d1ff6096ae53 h1:Chd9DkqERQQuHpXjR/HSV1jLZA6uaoiwwH3vSuF3IW0= github.com/xuri/efp v0.0.0-20231025114914-d1ff6096ae53/go.mod h1:ybY/Jr0T0GTCnYjKqmdwxyxn2BQf2RcQIIvex5QldPI= @@ -129,8 +109,6 @@ github.com/xuri/nfp v0.0.0-20230819163627-dc951e3ffe1a/go.mod h1:WwHg+CVyzlv/TX9 github.com/xuri/nfp v0.0.0-20230919160717-d98342af3f05 h1:qhbILQo1K3mphbwKh1vNm4oGezE1eF9fQWmNiIpSfI4= github.com/xuri/nfp v0.0.0-20230919160717-d98342af3f05/go.mod h1:WwHg+CVyzlv/TX9xqBFXEZAuxOPxn2k1GNHwG41IIUQ= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= -github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/arch v0.6.0 h1:S0JTfE48HbRj80+4tbvZDYsJ3tGv6BUU3XxyZ7CirAc= golang.org/x/arch v0.6.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= @@ -157,17 +135,13 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= @@ -189,8 +163,6 @@ golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= @@ -199,6 +171,4 @@ gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50= -rsc.io/goversion v1.2.0 h1:SPn+NLTiAG7w30IRK/DKp1BjvpWabYgxlLp/+kx5J8w= -rsc.io/goversion v1.2.0/go.mod h1:Eih9y/uIBS3ulggl7KNJ09xGSLcuNaLgmvvqa07sgfo= rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= From 8f26d96db032849c63c45829b1dfc7ea2cd8226d Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Tue, 9 Jan 2024 14:58:16 -0800 Subject: [PATCH 49/68] - added list of contributors in addition to author to schema and input structure - added application description to schema and input structure - added contact information to author and contributors to schema and input structure - eliminated globals => TODO: add the added items to the generated report --- cmd/threagile/main.go | 2 +- cmd/threagile/main_test.go | 4 +- internal/threagile/about.go | 77 +++++---- internal/threagile/examples.go | 185 +++++++++++---------- internal/threagile/flags.go | 108 +++++++------ internal/threagile/macros.go | 145 +++++++++-------- internal/threagile/root.go | 215 ++++++++++++------------- internal/threagile/rules.go | 107 ++++++------ internal/threagile/threagile.go | 22 +++ internal/threagile/types.go | 66 ++++---- pkg/input/input.go | 182 +++++++++++---------- pkg/macros/add-build-pipeline-macro.go | 72 ++++----- pkg/macros/add-vault-macro.go | 22 +-- pkg/macros/macros.go | 6 +- pkg/macros/pretty-print-macro.go | 4 +- pkg/macros/remove-unused-tags-macro.go | 4 +- pkg/macros/seed-risk-tracking-macro.go | 8 +- pkg/macros/seed-tags-macro.go | 4 +- pkg/model/parse.go | 2 +- pkg/model/read.go | 4 +- pkg/server/model.go | 20 +-- support/schema.json | 67 +++++++- 22 files changed, 712 insertions(+), 614 deletions(-) create mode 100644 internal/threagile/threagile.go diff --git a/cmd/threagile/main.go b/cmd/threagile/main.go index da2d2e53..c3d008ac 100644 --- a/cmd/threagile/main.go +++ b/cmd/threagile/main.go @@ -9,5 +9,5 @@ const ( ) func main() { - threagile.Execute() + new(threagile.Threagile).Init().Execute() } diff --git a/cmd/threagile/main_test.go b/cmd/threagile/main_test.go index 1df0f50d..5664f3c7 100644 --- a/cmd/threagile/main_test.go +++ b/cmd/threagile/main_test.go @@ -15,7 +15,7 @@ import ( func TestParseModelYaml(t *testing.T) { flatModelFile := filepath.Join("..", "..", "test", "all.yaml") - flatModel := *new(input.ModelInput).Defaults() + flatModel := *new(input.Model).Defaults() flatLoadError := flatModel.Load(flatModelFile) if flatLoadError != nil { t.Errorf("unable to parse model yaml %q: %v", flatModelFile, flatLoadError) @@ -32,7 +32,7 @@ func TestParseModelYaml(t *testing.T) { } splitModelFile := filepath.Join("..", "..", "test", "main.yaml") - splitModel := *new(input.ModelInput).Defaults() + splitModel := *new(input.Model).Defaults() splitLoadError := splitModel.Load(splitModelFile) if splitLoadError != nil { t.Errorf("unable to parse model yaml %q: %v", splitModelFile, splitLoadError) diff --git a/internal/threagile/about.go b/internal/threagile/about.go index ab90336f..635b0c71 100644 --- a/internal/threagile/about.go +++ b/internal/threagile/about.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package threagile import ( @@ -13,46 +14,44 @@ import ( "github.com/threagile/threagile/pkg/docs" ) -var versionCmd = &cobra.Command{ - Use: "version", - Short: "Get version information", - Long: "\n" + docs.Logo + "\n\n" + docs.VersionText, -} +func (what *Threagile) initAbout() *Threagile { + what.rootCmd.AddCommand(&cobra.Command{ + Use: "version", + Short: "Get version information", + Long: "\n" + docs.Logo + "\n\n" + docs.VersionText, + }) -var print3rdPartyCmd = &cobra.Command{ - Use: "print-3rd-party-licenses", - Short: "Print 3rd-party license information", - Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\n" + docs.ThirdPartyLicenses, -} + what.rootCmd.AddCommand(&cobra.Command{ + Use: "print-3rd-party-licenses", + Short: "Print 3rd-party license information", + Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\n" + docs.ThirdPartyLicenses, + }) -var printLicenseCmd = &cobra.Command{ - Use: "print-license", - Short: "Print license information", - RunE: func(cmd *cobra.Command, args []string) error { - appDir, err := cmd.Flags().GetString(appDirFlagName) - if err != nil { - cmd.Printf("Unable to read app-dir flag: %v", err) - return err - } - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) - if appDir != filepath.Clean(appDir) { - // TODO: do we need this check here? - cmd.Printf("weird app folder %v", appDir) - return errors.New("weird app folder") - } - content, err := os.ReadFile(filepath.Join(appDir, "LICENSE.txt")) - if err != nil { - cmd.Printf("Unable to read license file: %v", err) - return err - } - cmd.Print(string(content)) - cmd.Println() - return nil - }, -} + what.rootCmd.AddCommand(&cobra.Command{ + Use: "print-license", + Short: "Print license information", + RunE: func(cmd *cobra.Command, args []string) error { + appDir, err := cmd.Flags().GetString(appDirFlagName) + if err != nil { + cmd.Printf("Unable to read app-dir flag: %v", err) + return err + } + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + if appDir != filepath.Clean(appDir) { + // TODO: do we need this check here? + cmd.Printf("weird app folder %v", appDir) + return errors.New("weird app folder") + } + content, err := os.ReadFile(filepath.Join(appDir, "LICENSE.txt")) + if err != nil { + cmd.Printf("Unable to read license file: %v", err) + return err + } + cmd.Print(string(content)) + cmd.Println() + return nil + }, + }) -func init() { - rootCmd.AddCommand(versionCmd) - rootCmd.AddCommand(print3rdPartyCmd) - rootCmd.AddCommand(printLicenseCmd) + return what } diff --git a/internal/threagile/examples.go b/internal/threagile/examples.go index 9aa371f3..9e3c26fb 100644 --- a/internal/threagile/examples.go +++ b/internal/threagile/examples.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package threagile import ( @@ -9,106 +10,104 @@ import ( "github.com/threagile/threagile/pkg/examples" ) -var createExampleModelCmd = &cobra.Command{ - Use: "create-example-model", - Short: "Create example threagile model", - Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\njust create an example model named threagile-example-model.yaml in the output directory", - RunE: func(cmd *cobra.Command, args []string) error { - appDir, err := cmd.Flags().GetString(appDirFlagName) - if err != nil { - cmd.Printf("Unable to read app-dir flag: %v", err) - return err - } - outDir, err := cmd.Flags().GetString(outputFlagName) - if err != nil { - cmd.Printf("Unable to read output flag: %v", err) - return err - } +func (what *Threagile) initExamples() *Threagile { + what.rootCmd.AddCommand(&cobra.Command{ + Use: "create-example-model", + Short: "Create example threagile model", + Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\njust create an example model named threagile-example-model.yaml in the output directory", + RunE: func(cmd *cobra.Command, args []string) error { + appDir, err := cmd.Flags().GetString(appDirFlagName) + if err != nil { + cmd.Printf("Unable to read app-dir flag: %v", err) + return err + } + outDir, err := cmd.Flags().GetString(outputFlagName) + if err != nil { + cmd.Printf("Unable to read output flag: %v", err) + return err + } - err = examples.CreateExampleModelFile(appDir, outDir) - if err != nil { - cmd.Printf("Unable to copy example model: %v", err) - return err - } + err = examples.CreateExampleModelFile(appDir, outDir) + if err != nil { + cmd.Printf("Unable to copy example model: %v", err) + return err + } - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) - cmd.Println("An example model was created named threagile-example-model.yaml in the output directory.") - cmd.Println() - cmd.Println(docs.Examples) - cmd.Println() - return nil - }, -} + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println("An example model was created named threagile-example-model.yaml in the output directory.") + cmd.Println() + cmd.Println(docs.Examples) + cmd.Println() + return nil + }, + }) -var createStubModelCmd = &cobra.Command{ - Use: "create-stub-model", - Short: "Create stub threagile model", - Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\njust create a minimal stub model named threagile-stub-model.yaml in the output directory", - RunE: func(cmd *cobra.Command, args []string) error { - appDir, err := cmd.Flags().GetString(appDirFlagName) - if err != nil { - cmd.Printf("Unable to read app-dir flag: %v", err) - return err - } - outDir, err := cmd.Flags().GetString(outputFlagName) - if err != nil { - cmd.Printf("Unable to read output flag: %v", err) - return err - } + what.rootCmd.AddCommand(&cobra.Command{ + Use: "create-stub-model", + Short: "Create stub threagile model", + Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\njust create a minimal stub model named threagile-stub-model.yaml in the output directory", + RunE: func(cmd *cobra.Command, args []string) error { + appDir, err := cmd.Flags().GetString(appDirFlagName) + if err != nil { + cmd.Printf("Unable to read app-dir flag: %v", err) + return err + } + outDir, err := cmd.Flags().GetString(outputFlagName) + if err != nil { + cmd.Printf("Unable to read output flag: %v", err) + return err + } - err = examples.CreateStubModelFile(appDir, outDir) - if err != nil { - cmd.Printf("Unable to copy stub model: %v", err) - return err - } + err = examples.CreateStubModelFile(appDir, outDir) + if err != nil { + cmd.Printf("Unable to copy stub model: %v", err) + return err + } - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) - cmd.Println("A minimal stub model was created named threagile-stub-model.yaml in the output directory.") - cmd.Println() - cmd.Println(docs.Examples) - cmd.Println() - return nil - }, -} + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println("A minimal stub model was created named threagile-stub-model.yaml in the output directory.") + cmd.Println() + cmd.Println(docs.Examples) + cmd.Println() + return nil + }, + }) -var createEditingSupportCmd = &cobra.Command{ - Use: "create-editing-support", - Short: "Create editing support", - Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\njust create some editing support stuff in the output directory", - RunE: func(cmd *cobra.Command, args []string) error { - appDir, err := cmd.Flags().GetString(appDirFlagName) - if err != nil { - cmd.Printf("Unable to read app-dir flag: %v", err) - return err - } - outDir, err := cmd.Flags().GetString(outputFlagName) - if err != nil { - cmd.Printf("Unable to read output flag: %v", err) - return err - } + what.rootCmd.AddCommand(&cobra.Command{ + Use: "create-editing-support", + Short: "Create editing support", + Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\njust create some editing support stuff in the output directory", + RunE: func(cmd *cobra.Command, args []string) error { + appDir, err := cmd.Flags().GetString(appDirFlagName) + if err != nil { + cmd.Printf("Unable to read app-dir flag: %v", err) + return err + } + outDir, err := cmd.Flags().GetString(outputFlagName) + if err != nil { + cmd.Printf("Unable to read output flag: %v", err) + return err + } - err = examples.CreateEditingSupportFiles(appDir, outDir) - if err != nil { - cmd.Printf("Unable to copy editing support files: %v", err) - return err - } + err = examples.CreateEditingSupportFiles(appDir, outDir) + if err != nil { + cmd.Printf("Unable to copy editing support files: %v", err) + return err + } - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) - cmd.Println("The following files were created in the output directory:") - cmd.Println(" - schema.json") - cmd.Println(" - live-templates.txt") - cmd.Println() - cmd.Println("For a perfect editing experience within your IDE of choice you can easily get " + - "model syntax validation and autocompletion (very handy for enum values) as well as live templates: " + - "Just import the schema.json into your IDE and assign it as \"schema\" to each Threagile YAML file. " + - "Also try to import individual parts from the live-templates.txt file into your IDE as live editing templates.") - cmd.Println() - return nil - }, -} + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println("The following files were created in the output directory:") + cmd.Println(" - schema.json") + cmd.Println(" - live-templates.txt") + cmd.Println() + cmd.Println("For a perfect editing experience within your IDE of choice you can easily get " + + "model syntax validation and autocompletion (very handy for enum values) as well as live templates: " + + "Just import the schema.json into your IDE and assign it as \"schema\" to each Threagile YAML file. " + + "Also try to import individual parts from the live-templates.txt file into your IDE as live editing templates.") + cmd.Println() + return nil + }, + }) -func init() { - rootCmd.AddCommand(createExampleModelCmd) - rootCmd.AddCommand(createStubModelCmd) - rootCmd.AddCommand(createEditingSupportCmd) + return what } diff --git a/internal/threagile/flags.go b/internal/threagile/flags.go index 0beb318b..3eab3d78 100644 --- a/internal/threagile/flags.go +++ b/internal/threagile/flags.go @@ -1,52 +1,66 @@ /* Copyright © 2023 NAME HERE */ + package threagile -var configFlag *string - -var verboseFlag *bool -var appDirFlag, binDirFlag, outputDirFlag, tempDirFlag *string -var inputFileFlag, raaPluginFlag *string -var serverPortFlag *int -var serverDirFlag *string - -var skipRiskRulesFlag, customRiskRulesPluginFlag *string -var ignoreOrphandedRiskTrackingFlag *bool -var templateFileNameFlag *string -var diagramDpiFlag *int - -var generateDataFlowDiagramFlag, generateDataAssetDiagramFlag, generateRisksJSONFlag, - generateTechnicalAssetsJSONFlag, generateStatsJSONFlag, generateRisksExcelFlag, - generateTagsExcelFlag, generateReportPDFFlag *bool - -const configFlagName = "config" - -const verboseFlagName = "verbose" -const verboseFlagShorthand = "v" - -const appDirFlagName = "app-dir" -const binDirFlagName = "bin-dir" -const outputFlagName = "output" -const tempDirFlagName = "temp-dir" - -const serverDirFlagName = "server-dir" -const serverPortFlagName = "server-port" - -const inputFileFlagName = "model" -const raaPluginFlagName = "raa-run" - -const customRiskRulesPluginFlagName = "custom-risk-rules-plugin" -const diagramDpiFlagName = "diagram-dpi" -const skipRiskRulesFlagName = "skip-risk-rules" -const ignoreOrphandedRiskTrackingFlagName = "ignore-orphaned-risk-tracking" -const templateFileNameFlagName = "background" - -const generateDataFlowDiagramFlagName = "generate-data-flow-diagram" -const generateDataAssetDiagramFlagName = "generate-data-asset-diagram" -const generateRisksJSONFlagName = "generate-risks-json" -const generateTechnicalAssetsJSONFlagName = "generate-technical-assets-json" -const generateStatsJSONFlagName = "generate-stats-json" -const generateRisksExcelFlagName = "generate-risks-excel" -const generateTagsExcelFlagName = "generate-tags-excel" -const generateReportPDFFlagName = "generate-report-pdf" +const ( + configFlagName = "config" + + verboseFlagName = "verbose" + verboseFlagShorthand = "v" + + appDirFlagName = "app-dir" + binDirFlagName = "bin-dir" + outputFlagName = "output" + tempDirFlagName = "temp-dir" + + serverDirFlagName = "server-dir" + serverPortFlagName = "server-port" + + inputFileFlagName = "model" + raaPluginFlagName = "raa-run" + + customRiskRulesPluginFlagName = "custom-risk-rules-plugin" + diagramDpiFlagName = "diagram-dpi" + skipRiskRulesFlagName = "skip-risk-rules" + ignoreOrphanedRiskTrackingFlagName = "ignore-orphaned-risk-tracking" + templateFileNameFlagName = "background" + + generateDataFlowDiagramFlagName = "generate-data-flow-diagram" + generateDataAssetDiagramFlagName = "generate-data-asset-diagram" + generateRisksJSONFlagName = "generate-risks-json" + generateTechnicalAssetsJSONFlagName = "generate-technical-assets-json" + generateStatsJSONFlagName = "generate-stats-json" + generateRisksExcelFlagName = "generate-risks-excel" + generateTagsExcelFlagName = "generate-tags-excel" + generateReportPDFFlagName = "generate-report-pdf" +) + +type Flags struct { + configFlag string + verboseFlag bool + appDirFlag string + binDirFlag string + outputDirFlag string + tempDirFlag string + inputFileFlag string + raaPluginFlag string + serverPortFlag int + serverDirFlag string + + skipRiskRulesFlag string + customRiskRulesPluginFlag string + ignoreOrphanedRiskTrackingFlag bool + templateFileNameFlag string + diagramDpiFlag int + + generateDataFlowDiagramFlag bool + generateDataAssetDiagramFlag bool + generateRisksJSONFlag bool + generateTechnicalAssetsJSONFlag bool + generateStatsJSONFlag bool + generateRisksExcelFlag bool + generateTagsExcelFlag bool + generateReportPDFFlag bool +} diff --git a/internal/threagile/macros.go b/internal/threagile/macros.go index fe157103..a9b68f0b 100644 --- a/internal/threagile/macros.go +++ b/internal/threagile/macros.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package threagile import ( @@ -14,83 +15,81 @@ import ( "github.com/threagile/threagile/pkg/model" ) -var listMacrosCmd = &cobra.Command{ - Use: "list-model-macros", - Short: "Print model macros", - Run: func(cmd *cobra.Command, args []string) { - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) - cmd.Println("The following model macros are available (can be extended via custom model macros):") - cmd.Println() - /* TODO finish plugin stuff - cmd.Println("Custom model macros:") - for _, macros := range macros.ListCustomMacros() { - details := macros.GetMacroDetails() - cmd.Println(details.ID, "-->", details.Title) - } - cmd.Println() - */ - cmd.Println("----------------------") - cmd.Println("Built-in model macros:") - cmd.Println("----------------------") - for _, macros := range macros.ListBuiltInMacros() { - details := macros.GetMacroDetails() - cmd.Println(details.ID, "-->", details.Title) - } - cmd.Println() - }, -} +func (what *Threagile) initMacros() *Threagile { + what.rootCmd.AddCommand(&cobra.Command{ + Use: "list-model-macros", + Short: "Print model macros", + Run: func(cmd *cobra.Command, args []string) { + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println("The following model macros are available (can be extended via custom model macros):") + cmd.Println() + /* TODO finish plugin stuff + cmd.Println("Custom model macros:") + for _, macros := range macros.ListCustomMacros() { + details := macros.GetMacroDetails() + cmd.Println(details.ID, "-->", details.Title) + } + cmd.Println() + */ + cmd.Println("----------------------") + cmd.Println("Built-in model macros:") + cmd.Println("----------------------") + for _, macros := range macros.ListBuiltInMacros() { + details := macros.GetMacroDetails() + cmd.Println(details.ID, "-->", details.Title) + } + cmd.Println() + }, + }) -var explainMacrosCmd = &cobra.Command{ - Use: "explain-model-macros", - Short: "Explain model macros", - Run: func(cmd *cobra.Command, args []string) { - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) - cmd.Println("Explanation for the model macros:") - cmd.Println() - /* TODO finish plugin stuff - cmd.Println("Custom model macros:") - for _, macros := range macros.ListCustomMacros() { - details := macros.GetMacroDetails() - cmd.Println(details.ID, "-->", details.Title) - } - cmd.Println() - */ - cmd.Println("----------------------") - cmd.Println("Built-in model macros:") - cmd.Println("----------------------") - for _, macros := range macros.ListBuiltInMacros() { - details := macros.GetMacroDetails() - cmd.Printf("%v: %v\n", details.ID, details.Title) - } + what.rootCmd.AddCommand(&cobra.Command{ + Use: "explain-model-macros", + Short: "Explain model macros", + Run: func(cmd *cobra.Command, args []string) { + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println("Explanation for the model macros:") + cmd.Println() + /* TODO finish plugin stuff + cmd.Println("Custom model macros:") + for _, macros := range macros.ListCustomMacros() { + details := macros.GetMacroDetails() + cmd.Println(details.ID, "-->", details.Title) + } + cmd.Println() + */ + cmd.Println("----------------------") + cmd.Println("Built-in model macros:") + cmd.Println("----------------------") + for _, macros := range macros.ListBuiltInMacros() { + details := macros.GetMacroDetails() + cmd.Printf("%v: %v\n", details.ID, details.Title) + } - cmd.Println() - }, -} + cmd.Println() + }, + }) -var executeModelMacrosCmd = &cobra.Command{ - Use: "execute-model-macro", - Short: "Execute model macro", - Args: cobra.ExactArgs(1), - RunE: func(cmd *cobra.Command, args []string) error { - cfg := readConfig(cmd, "buildTimestamp") - progressReporter := common.DefaultProgressReporter{Verbose: cfg.Verbose} + what.rootCmd.AddCommand(&cobra.Command{ + Use: "execute-model-macro", + Short: "Execute model macro", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + cfg := what.readConfig(cmd, "buildTimestamp") + progressReporter := common.DefaultProgressReporter{Verbose: cfg.Verbose} - r, err := model.ReadAndAnalyzeModel(*cfg, progressReporter) - if err != nil { - return fmt.Errorf("unable to read and analyze model: %v", err) - } + r, err := model.ReadAndAnalyzeModel(*cfg, progressReporter) + if err != nil { + return fmt.Errorf("unable to read and analyze model: %v", err) + } - macrosId := args[0] - err = macros.ExecuteModelMacro(r.ModelInput, cfg.InputFile, r.ParsedModel, macrosId) - if err != nil { - return fmt.Errorf("unable to execute model macro: %v", err) - } - return nil - }, -} + macrosId := args[0] + err = macros.ExecuteModelMacro(r.ModelInput, cfg.InputFile, r.ParsedModel, macrosId) + if err != nil { + return fmt.Errorf("unable to execute model macro: %v", err) + } + return nil + }, + }) -func init() { - rootCmd.AddCommand(listMacrosCmd) - rootCmd.AddCommand(explainMacrosCmd) - rootCmd.AddCommand(executeModelMacrosCmd) + return what } diff --git a/internal/threagile/root.go b/internal/threagile/root.go index ebc2c88a..85f21307 100644 --- a/internal/threagile/root.go +++ b/internal/threagile/root.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package threagile import ( @@ -19,157 +20,153 @@ import ( "github.com/threagile/threagile/pkg/server" ) -var rootCmd = &cobra.Command{ - Use: "threagile", - Short: "\n" + docs.Logo, - Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\n" + docs.Examples, - RunE: func(cmd *cobra.Command, args []string) error { - cfg := readConfig(cmd, "buildTimestamp") - commands := readCommands() - progressReporter := common.DefaultProgressReporter{Verbose: cfg.Verbose} - - r, err := model.ReadAndAnalyzeModel(*cfg, progressReporter) - if err != nil { - cmd.Println("Failed to read and analyze model") - return err - } - - err = report.Generate(cfg, r, commands, progressReporter) - if err != nil { - cmd.Println("Failed to generate reports") - cmd.PrintErr(err) - return err - } - return nil - }, - CompletionOptions: cobra.CompletionOptions{ - DisableDefaultCmd: true, - }, -} - -var serverCmd = &cobra.Command{ - Use: "server", - Short: "Run server", - RunE: func(cmd *cobra.Command, args []string) error { - cfg := readConfig(cmd, "buildTimestamp") - server.RunServer(cfg) - return nil - }, -} - -func Execute() { - err := rootCmd.Execute() - if err != nil { - os.Exit(1) +func (what *Threagile) initRoot() *Threagile { + what.rootCmd = &cobra.Command{ + Use: "threagile", + Short: "\n" + docs.Logo, + Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\n" + docs.Examples, + RunE: func(cmd *cobra.Command, args []string) error { + cfg := what.readConfig(cmd, "buildTimestamp") + commands := what.readCommands() + progressReporter := common.DefaultProgressReporter{Verbose: cfg.Verbose} + + r, err := model.ReadAndAnalyzeModel(*cfg, progressReporter) + if err != nil { + cmd.Println("Failed to read and analyze model") + return err + } + + err = report.Generate(cfg, r, commands, progressReporter) + if err != nil { + cmd.Println("Failed to generate reports") + cmd.PrintErr(err) + return err + } + return nil + }, + CompletionOptions: cobra.CompletionOptions{ + DisableDefaultCmd: true, + }, + } + + serverCmd := &cobra.Command{ + Use: "server", + Short: "Run server", + RunE: func(cmd *cobra.Command, args []string) error { + cfg := what.readConfig(cmd, "buildTimestamp") + server.RunServer(cfg) + return nil + }, } -} -func init() { cfg := new(common.Config).Defaults("") - appDirFlag = rootCmd.PersistentFlags().String(appDirFlagName, cfg.AppFolder, "app folder") - binDirFlag = rootCmd.PersistentFlags().String(binDirFlagName, cfg.BinFolder, "binary folder location") - outputDirFlag = rootCmd.PersistentFlags().String(outputFlagName, cfg.OutputFolder, "output directory") - tempDirFlag = rootCmd.PersistentFlags().String(tempDirFlagName, cfg.TempFolder, "temporary folder location") - inputFileFlag = rootCmd.PersistentFlags().String(inputFileFlagName, cfg.InputFile, "input model yaml file") - raaPluginFlag = rootCmd.PersistentFlags().String(raaPluginFlagName, cfg.RAAPlugin, "RAA calculation run file name") + what.rootCmd.PersistentFlags().StringVar(&what.flags.appDirFlag, appDirFlagName, cfg.AppFolder, "app folder") + what.rootCmd.PersistentFlags().StringVar(&what.flags.binDirFlag, binDirFlagName, cfg.BinFolder, "binary folder location") + what.rootCmd.PersistentFlags().StringVar(&what.flags.outputDirFlag, outputFlagName, cfg.OutputFolder, "output directory") + what.rootCmd.PersistentFlags().StringVar(&what.flags.tempDirFlag, tempDirFlagName, cfg.TempFolder, "temporary folder location") + + what.rootCmd.PersistentFlags().StringVar(&what.flags.inputFileFlag, inputFileFlagName, cfg.InputFile, "input model yaml file") + what.rootCmd.PersistentFlags().StringVar(&what.flags.raaPluginFlag, raaPluginFlagName, cfg.RAAPlugin, "RAA calculation run file name") + + serverCmd.PersistentFlags().IntVar(&what.flags.serverPortFlag, serverPortFlagName, cfg.ServerPort, "the server port") + serverCmd.PersistentFlags().StringVar(&what.flags.serverDirFlag, serverDirFlagName, cfg.DataFolder, "base folder for server mode (default: "+common.DataDir+")") - serverPortFlag = serverCmd.PersistentFlags().Int(serverPortFlagName, cfg.ServerPort, "the server port") - serverDirFlag = serverCmd.PersistentFlags().String(serverDirFlagName, cfg.DataFolder, "base folder for server mode (default: "+common.DataDir+")") + what.rootCmd.PersistentFlags().BoolVarP(&what.flags.verboseFlag, verboseFlagName, verboseFlagShorthand, cfg.Verbose, "verbose output") - verboseFlag = rootCmd.PersistentFlags().BoolP(verboseFlagName, verboseFlagShorthand, cfg.Verbose, "verbose output") + what.rootCmd.PersistentFlags().StringVar(&what.flags.configFlag, configFlagName, "", "config file") - configFlag = rootCmd.PersistentFlags().String(configFlagName, "", "config file") + what.rootCmd.PersistentFlags().StringVar(&what.flags.customRiskRulesPluginFlag, customRiskRulesPluginFlagName, strings.Join(cfg.RiskRulesPlugins, ","), "comma-separated list of plugins file names with custom risk rules to load") + what.rootCmd.PersistentFlags().IntVar(&what.flags.diagramDpiFlag, diagramDpiFlagName, cfg.DiagramDPI, "DPI used to render: maximum is "+fmt.Sprintf("%d", common.MaxGraphvizDPI)+"") + what.rootCmd.PersistentFlags().StringVar(&what.flags.skipRiskRulesFlag, skipRiskRulesFlagName, cfg.SkipRiskRules, "comma-separated list of risk rules (by their ID) to skip") + what.rootCmd.PersistentFlags().BoolVar(&what.flags.ignoreOrphanedRiskTrackingFlag, ignoreOrphanedRiskTrackingFlagName, cfg.IgnoreOrphanedRiskTracking, "ignore orphaned risk tracking (just log them) not matching a concrete risk") + what.rootCmd.PersistentFlags().StringVar(&what.flags.templateFileNameFlag, templateFileNameFlagName, cfg.TemplateFilename, "background pdf file") - customRiskRulesPluginFlag = rootCmd.PersistentFlags().String(customRiskRulesPluginFlagName, strings.Join(cfg.RiskRulesPlugins, ","), "comma-separated list of plugins file names with custom risk rules to load") - diagramDpiFlag = rootCmd.PersistentFlags().Int(diagramDpiFlagName, cfg.DiagramDPI, "DPI used to render: maximum is "+fmt.Sprintf("%d", common.MaxGraphvizDPI)+"") - skipRiskRulesFlag = rootCmd.PersistentFlags().String(skipRiskRulesFlagName, cfg.SkipRiskRules, "comma-separated list of risk rules (by their ID) to skip") - ignoreOrphandedRiskTrackingFlag = rootCmd.PersistentFlags().Bool(ignoreOrphandedRiskTrackingFlagName, cfg.IgnoreOrphanedRiskTracking, "ignore orphaned risk tracking (just log them) not matching a concrete risk") - templateFileNameFlag = rootCmd.PersistentFlags().String(templateFileNameFlagName, cfg.TemplateFilename, "background pdf file") + what.rootCmd.PersistentFlags().BoolVar(&what.flags.generateDataFlowDiagramFlag, generateDataFlowDiagramFlagName, true, "generate data flow diagram") + what.rootCmd.PersistentFlags().BoolVar(&what.flags.generateDataAssetDiagramFlag, generateDataAssetDiagramFlagName, true, "generate data asset diagram") + what.rootCmd.PersistentFlags().BoolVar(&what.flags.generateRisksJSONFlag, generateRisksJSONFlagName, true, "generate risks json") + what.rootCmd.PersistentFlags().BoolVar(&what.flags.generateTechnicalAssetsJSONFlag, generateTechnicalAssetsJSONFlagName, true, "generate technical assets json") + what.rootCmd.PersistentFlags().BoolVar(&what.flags.generateStatsJSONFlag, generateStatsJSONFlagName, true, "generate stats json") + what.rootCmd.PersistentFlags().BoolVar(&what.flags.generateRisksExcelFlag, generateRisksExcelFlagName, true, "generate risks excel") + what.rootCmd.PersistentFlags().BoolVar(&what.flags.generateTagsExcelFlag, generateTagsExcelFlagName, true, "generate tags excel") + what.rootCmd.PersistentFlags().BoolVar(&what.flags.generateReportPDFFlag, generateReportPDFFlagName, true, "generate report pdf, including diagrams") - generateDataFlowDiagramFlag = rootCmd.PersistentFlags().Bool(generateDataFlowDiagramFlagName, true, "generate data flow diagram") - generateDataAssetDiagramFlag = rootCmd.PersistentFlags().Bool(generateDataAssetDiagramFlagName, true, "generate data asset diagram") - generateRisksJSONFlag = rootCmd.PersistentFlags().Bool(generateRisksJSONFlagName, true, "generate risks json") - generateTechnicalAssetsJSONFlag = rootCmd.PersistentFlags().Bool(generateTechnicalAssetsJSONFlagName, true, "generate technical assets json") - generateStatsJSONFlag = rootCmd.PersistentFlags().Bool(generateStatsJSONFlagName, true, "generate stats json") - generateRisksExcelFlag = rootCmd.PersistentFlags().Bool(generateRisksExcelFlagName, true, "generate risks excel") - generateTagsExcelFlag = rootCmd.PersistentFlags().Bool(generateTagsExcelFlagName, true, "generate tags excel") - generateReportPDFFlag = rootCmd.PersistentFlags().Bool(generateReportPDFFlagName, true, "generate report pdf, including diagrams") + what.rootCmd.AddCommand(serverCmd) - rootCmd.AddCommand(serverCmd) + return what } -func readCommands() *report.GenerateCommands { +func (what *Threagile) readCommands() *report.GenerateCommands { commands := new(report.GenerateCommands).Defaults() - commands.DataFlowDiagram = *generateDataFlowDiagramFlag - commands.DataAssetDiagram = *generateDataAssetDiagramFlag - commands.RisksJSON = *generateRisksJSONFlag - commands.StatsJSON = *generateStatsJSONFlag - commands.TechnicalAssetsJSON = *generateTechnicalAssetsJSONFlag - commands.RisksExcel = *generateRisksExcelFlag - commands.TagsExcel = *generateTagsExcelFlag - commands.ReportPDF = *generateReportPDFFlag + commands.DataFlowDiagram = what.flags.generateDataFlowDiagramFlag + commands.DataAssetDiagram = what.flags.generateDataAssetDiagramFlag + commands.RisksJSON = what.flags.generateRisksJSONFlag + commands.StatsJSON = what.flags.generateStatsJSONFlag + commands.TechnicalAssetsJSON = what.flags.generateTechnicalAssetsJSONFlag + commands.RisksExcel = what.flags.generateRisksExcelFlag + commands.TagsExcel = what.flags.generateTagsExcelFlag + commands.ReportPDF = what.flags.generateReportPDFFlag return commands } -func readConfig(cmd *cobra.Command, buildTimestamp string) *common.Config { +func (what *Threagile) readConfig(cmd *cobra.Command, buildTimestamp string) *common.Config { cfg := new(common.Config).Defaults(buildTimestamp) - configError := cfg.Load(*configFlag) + configError := cfg.Load(what.flags.configFlag) if configError != nil { - fmt.Printf("WARNING: failed to load config file %q: %v\n", *configFlag, configError) + fmt.Printf("WARNING: failed to load config file %q: %v\n", what.flags.configFlag, configError) } flags := cmd.Flags() - if isFlagOverriden(flags, serverPortFlagName) { - cfg.ServerPort = *serverPortFlag + if isFlagOverridden(flags, serverPortFlagName) { + cfg.ServerPort = what.flags.serverPortFlag } - if isFlagOverriden(flags, serverDirFlagName) { - cfg.ServerFolder = expandPath(*serverDirFlag) + if isFlagOverridden(flags, serverDirFlagName) { + cfg.ServerFolder = expandPath(what.flags.serverDirFlag) } - if isFlagOverriden(flags, appDirFlagName) { - cfg.AppFolder = expandPath(*appDirFlag) + if isFlagOverridden(flags, appDirFlagName) { + cfg.AppFolder = expandPath(what.flags.appDirFlag) } - if isFlagOverriden(flags, binDirFlagName) { - cfg.BinFolder = expandPath(*binDirFlag) + if isFlagOverridden(flags, binDirFlagName) { + cfg.BinFolder = expandPath(what.flags.binDirFlag) } - if isFlagOverriden(flags, outputFlagName) { - cfg.OutputFolder = expandPath(*outputDirFlag) + if isFlagOverridden(flags, outputFlagName) { + cfg.OutputFolder = expandPath(what.flags.outputDirFlag) } - if isFlagOverriden(flags, tempDirFlagName) { - cfg.TempFolder = expandPath(*tempDirFlag) + if isFlagOverridden(flags, tempDirFlagName) { + cfg.TempFolder = expandPath(what.flags.tempDirFlag) } - if isFlagOverriden(flags, verboseFlagName) { - cfg.Verbose = *verboseFlag + if isFlagOverridden(flags, verboseFlagName) { + cfg.Verbose = what.flags.verboseFlag } - if isFlagOverriden(flags, inputFileFlagName) { - cfg.InputFile = expandPath(*inputFileFlag) + if isFlagOverridden(flags, inputFileFlagName) { + cfg.InputFile = expandPath(what.flags.inputFileFlag) } - if isFlagOverriden(flags, raaPluginFlagName) { - cfg.RAAPlugin = *raaPluginFlag + if isFlagOverridden(flags, raaPluginFlagName) { + cfg.RAAPlugin = what.flags.raaPluginFlag } - if isFlagOverriden(flags, customRiskRulesPluginFlagName) { - cfg.RiskRulesPlugins = strings.Split(*customRiskRulesPluginFlag, ",") + if isFlagOverridden(flags, customRiskRulesPluginFlagName) { + cfg.RiskRulesPlugins = strings.Split(what.flags.customRiskRulesPluginFlag, ",") } - if isFlagOverriden(flags, skipRiskRulesFlagName) { - cfg.SkipRiskRules = *skipRiskRulesFlag + if isFlagOverridden(flags, skipRiskRulesFlagName) { + cfg.SkipRiskRules = what.flags.skipRiskRulesFlag } - if isFlagOverriden(flags, ignoreOrphandedRiskTrackingFlagName) { - cfg.IgnoreOrphanedRiskTracking = *ignoreOrphandedRiskTrackingFlag + if isFlagOverridden(flags, ignoreOrphanedRiskTrackingFlagName) { + cfg.IgnoreOrphanedRiskTracking = what.flags.ignoreOrphanedRiskTrackingFlag } - if isFlagOverriden(flags, diagramDpiFlagName) { - cfg.DiagramDPI = *diagramDpiFlag + if isFlagOverridden(flags, diagramDpiFlagName) { + cfg.DiagramDPI = what.flags.diagramDpiFlag } - if isFlagOverriden(flags, templateFileNameFlagName) { - cfg.TemplateFilename = *templateFileNameFlag + if isFlagOverridden(flags, templateFileNameFlagName) { + cfg.TemplateFilename = what.flags.templateFileNameFlag } return cfg } -func isFlagOverriden(flags *pflag.FlagSet, flagName string) bool { +func isFlagOverridden(flags *pflag.FlagSet, flagName string) bool { flag := flags.Lookup(flagName) if flag == nil { return false diff --git a/internal/threagile/rules.go b/internal/threagile/rules.go index 2388837a..376755c8 100644 --- a/internal/threagile/rules.go +++ b/internal/threagile/rules.go @@ -16,62 +16,61 @@ import ( "github.com/threagile/threagile/pkg/docs" ) -var listRiskRules = &cobra.Command{ - Use: "list-risk-rules", - Short: "Print available risk rules", - RunE: func(cmd *cobra.Command, args []string) error { - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) - cmd.Println("The following risk rules are available (can be extended via custom risk rules):") - cmd.Println() - cmd.Println("----------------------") - cmd.Println("Custom risk rules:") - cmd.Println("----------------------") - customRiskRules := model.LoadCustomRiskRules(strings.Split(*customRiskRulesPluginFlag, ","), common.DefaultProgressReporter{Verbose: *verboseFlag}) - for id, customRule := range customRiskRules { - cmd.Println(id, "-->", customRule.Category.Title, "--> with tags:", customRule.Tags) - } - cmd.Println() - cmd.Println("--------------------") - cmd.Println("Built-in risk rules:") - cmd.Println("--------------------") - cmd.Println() - for _, rule := range risks.GetBuiltInRiskRules() { - cmd.Println(rule.Category().Id, "-->", rule.Category().Title, "--> with tags:", rule.SupportedTags()) - } +func (what *Threagile) initRules() *Threagile { + what.rootCmd.AddCommand(&cobra.Command{ + Use: "list-risk-rules", + Short: "Print available risk rules", + RunE: func(cmd *cobra.Command, args []string) error { + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println("The following risk rules are available (can be extended via custom risk rules):") + cmd.Println() + cmd.Println("----------------------") + cmd.Println("Custom risk rules:") + cmd.Println("----------------------") + customRiskRules := model.LoadCustomRiskRules(strings.Split(what.flags.customRiskRulesPluginFlag, ","), common.DefaultProgressReporter{Verbose: what.flags.verboseFlag}) + for id, customRule := range customRiskRules { + cmd.Println(id, "-->", customRule.Category.Title, "--> with tags:", customRule.Tags) + } + cmd.Println() + cmd.Println("--------------------") + cmd.Println("Built-in risk rules:") + cmd.Println("--------------------") + cmd.Println() + for _, rule := range risks.GetBuiltInRiskRules() { + cmd.Println(rule.Category().Id, "-->", rule.Category().Title, "--> with tags:", rule.SupportedTags()) + } - return nil - }, -} + return nil + }, + }) -var explainRiskRules = &cobra.Command{ - Use: "explain-risk-rules", - Short: "Detailed explanation of all the risk rules", - RunE: func(cmd *cobra.Command, args []string) error { - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) - cmd.Println("Explanation for risk rules:") - cmd.Println() - cmd.Println("----------------------") - cmd.Println("Custom risk rules:") - cmd.Println("----------------------") - customRiskRules := model.LoadCustomRiskRules(strings.Split(*customRiskRulesPluginFlag, ","), common.DefaultProgressReporter{Verbose: *verboseFlag}) - for _, customRule := range customRiskRules { - cmd.Printf("%v: %v\n", customRule.Category.Id, customRule.Category.Description) - } - cmd.Println() - cmd.Println("--------------------") - cmd.Println("Built-in risk rules:") - cmd.Println("--------------------") - cmd.Println() - for _, rule := range risks.GetBuiltInRiskRules() { - cmd.Printf("%v: %v\n", rule.Category().Id, rule.Category().Description) - } - cmd.Println() + what.rootCmd.AddCommand(&cobra.Command{ + Use: "explain-risk-rules", + Short: "Detailed explanation of all the risk rules", + RunE: func(cmd *cobra.Command, args []string) error { + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println("Explanation for risk rules:") + cmd.Println() + cmd.Println("----------------------") + cmd.Println("Custom risk rules:") + cmd.Println("----------------------") + customRiskRules := model.LoadCustomRiskRules(strings.Split(what.flags.customRiskRulesPluginFlag, ","), common.DefaultProgressReporter{Verbose: what.flags.verboseFlag}) + for _, customRule := range customRiskRules { + cmd.Printf("%v: %v\n", customRule.Category.Id, customRule.Category.Description) + } + cmd.Println() + cmd.Println("--------------------") + cmd.Println("Built-in risk rules:") + cmd.Println("--------------------") + cmd.Println() + for _, rule := range risks.GetBuiltInRiskRules() { + cmd.Printf("%v: %v\n", rule.Category().Id, rule.Category().Description) + } + cmd.Println() - return nil - }, -} + return nil + }, + }) -func init() { - rootCmd.AddCommand(listRiskRules) - rootCmd.AddCommand(explainRiskRules) + return what } diff --git a/internal/threagile/threagile.go b/internal/threagile/threagile.go new file mode 100644 index 00000000..fb84ded3 --- /dev/null +++ b/internal/threagile/threagile.go @@ -0,0 +1,22 @@ +package threagile + +import ( + "github.com/spf13/cobra" + "os" +) + +type Threagile struct { + flags Flags + rootCmd *cobra.Command +} + +func (what *Threagile) Execute() { + err := what.rootCmd.Execute() + if err != nil { + os.Exit(1) + } +} + +func (what *Threagile) Init() *Threagile { + return what.initRoot().initAbout().initRules().initExamples().initMacros().initTypes() +} diff --git a/internal/threagile/types.go b/internal/threagile/types.go index aa4c1a9a..25472859 100644 --- a/internal/threagile/types.go +++ b/internal/threagile/types.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package threagile import ( @@ -12,40 +13,39 @@ import ( "github.com/threagile/threagile/pkg/security/types" ) -var listTypesCmd = &cobra.Command{ - Use: "list-types", - Short: "Print type information (enum values to be used in models)", - Run: func(cmd *cobra.Command, args []string) { - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) - cmd.Println() - cmd.Println() - cmd.Println("The following types are available (can be extended for custom rules):") - cmd.Println() - for name, values := range types.GetBuiltinTypeValues() { - cmd.Println(fmt.Sprintf(" %v: %v", name, values)) - } - }, -} +func (what *Threagile) initTypes() *Threagile { + what.rootCmd.AddCommand(&cobra.Command{ + Use: "list-types", + Short: "Print type information (enum values to be used in models)", + Run: func(cmd *cobra.Command, args []string) { + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println() + cmd.Println() + cmd.Println("The following types are available (can be extended for custom rules):") + cmd.Println() + for name, values := range types.GetBuiltinTypeValues() { + cmd.Println(fmt.Sprintf(" %v: %v", name, values)) + } + }, + }) -var explainTypesCmd = &cobra.Command{ - Use: "explain-types", - Short: "Print type information (enum values to be used in models)", - Run: func(cmd *cobra.Command, args []string) { - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) - fmt.Println("Explanation for the types:") - cmd.Println() - cmd.Println("The following types are available (can be extended for custom rules):") - cmd.Println() - for name, values := range types.GetBuiltinTypeValues() { - cmd.Println(name) - for _, candidate := range values { - cmd.Printf("\t %v: %v\n", candidate, candidate.Explain()) + what.rootCmd.AddCommand(&cobra.Command{ + Use: "explain-types", + Short: "Print type information (enum values to be used in models)", + Run: func(cmd *cobra.Command, args []string) { + cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + fmt.Println("Explanation for the types:") + cmd.Println() + cmd.Println("The following types are available (can be extended for custom rules):") + cmd.Println() + for name, values := range types.GetBuiltinTypeValues() { + cmd.Println(name) + for _, candidate := range values { + cmd.Printf("\t %v: %v\n", candidate, candidate.Explain()) + } } - } - }, -} + }, + }) -func init() { - rootCmd.AddCommand(listTypesCmd) - rootCmd.AddCommand(explainTypesCmd) + return what } diff --git a/pkg/input/input.go b/pkg/input/input.go index 27c598f1..26a61c1c 100644 --- a/pkg/input/input.go +++ b/pkg/input/input.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package input import ( @@ -17,6 +18,7 @@ import ( type Author struct { Name string `yaml:"name" json:"name"` + Contact string `yaml:"contact" json:"contact"` Homepage string `yaml:"homepage" json:"homepage"` } @@ -25,7 +27,7 @@ type Overview struct { Images []map[string]string `yaml:"images" json:"images"` // yes, array of map here, as array keeps the order of the image keys } -type InputDataAsset struct { +type DataAsset struct { ID string `yaml:"id" json:"id"` Description string `yaml:"description" json:"description"` Usage string `yaml:"usage" json:"usage"` @@ -39,36 +41,36 @@ type InputDataAsset struct { JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` } -type InputTechnicalAsset struct { - ID string `yaml:"id" json:"id"` - Description string `yaml:"description" json:"description"` - Type string `yaml:"type" json:"type"` - Usage string `yaml:"usage" json:"usage"` - UsedAsClientByHuman bool `yaml:"used_as_client_by_human" json:"used_as_client_by_human"` - OutOfScope bool `yaml:"out_of_scope" json:"out_of_scope"` - JustificationOutOfScope string `yaml:"justification_out_of_scope" json:"justification_out_of_scope"` - Size string `yaml:"size" json:"size"` - Technology string `yaml:"technology" json:"technology"` - Tags []string `yaml:"tags" json:"tags"` - Internet bool `yaml:"internet" json:"internet"` - Machine string `yaml:"machine" json:"machine"` - Encryption string `yaml:"encryption" json:"encryption"` - Owner string `yaml:"owner" json:"owner"` - Confidentiality string `yaml:"confidentiality" json:"confidentiality"` - Integrity string `yaml:"integrity" json:"integrity"` - Availability string `yaml:"availability" json:"availability"` - JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` - MultiTenant bool `yaml:"multi_tenant" json:"multi_tenant"` - Redundant bool `yaml:"redundant" json:"redundant"` - CustomDevelopedParts bool `yaml:"custom_developed_parts" json:"custom_developed_parts"` - DataAssetsProcessed []string `yaml:"data_assets_processed" json:"data_assets_processed"` - DataAssetsStored []string `yaml:"data_assets_stored" json:"data_assets_stored"` - DataFormatsAccepted []string `yaml:"data_formats_accepted" json:"data_formats_accepted"` - DiagramTweakOrder int `yaml:"diagram_tweak_order" json:"diagram_tweak_order"` - CommunicationLinks map[string]InputCommunicationLink `yaml:"communication_links" json:"communication_links"` +type TechnicalAsset struct { + ID string `yaml:"id" json:"id"` + Description string `yaml:"description" json:"description"` + Type string `yaml:"type" json:"type"` + Usage string `yaml:"usage" json:"usage"` + UsedAsClientByHuman bool `yaml:"used_as_client_by_human" json:"used_as_client_by_human"` + OutOfScope bool `yaml:"out_of_scope" json:"out_of_scope"` + JustificationOutOfScope string `yaml:"justification_out_of_scope" json:"justification_out_of_scope"` + Size string `yaml:"size" json:"size"` + Technology string `yaml:"technology" json:"technology"` + Tags []string `yaml:"tags" json:"tags"` + Internet bool `yaml:"internet" json:"internet"` + Machine string `yaml:"machine" json:"machine"` + Encryption string `yaml:"encryption" json:"encryption"` + Owner string `yaml:"owner" json:"owner"` + Confidentiality string `yaml:"confidentiality" json:"confidentiality"` + Integrity string `yaml:"integrity" json:"integrity"` + Availability string `yaml:"availability" json:"availability"` + JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` + MultiTenant bool `yaml:"multi_tenant" json:"multi_tenant"` + Redundant bool `yaml:"redundant" json:"redundant"` + CustomDevelopedParts bool `yaml:"custom_developed_parts" json:"custom_developed_parts"` + DataAssetsProcessed []string `yaml:"data_assets_processed" json:"data_assets_processed"` + DataAssetsStored []string `yaml:"data_assets_stored" json:"data_assets_stored"` + DataFormatsAccepted []string `yaml:"data_formats_accepted" json:"data_formats_accepted"` + DiagramTweakOrder int `yaml:"diagram_tweak_order" json:"diagram_tweak_order"` + CommunicationLinks map[string]CommunicationLink `yaml:"communication_links" json:"communication_links"` } -type InputCommunicationLink struct { +type CommunicationLink struct { Target string `yaml:"target" json:"target"` Description string `yaml:"description" json:"description"` Protocol string `yaml:"protocol" json:"protocol"` @@ -85,14 +87,14 @@ type InputCommunicationLink struct { DiagramTweakConstraint bool `yaml:"diagram_tweak_constraint" json:"diagram_tweak_constraint"` } -type InputSharedRuntime struct { +type SharedRuntime struct { ID string `yaml:"id" json:"id"` Description string `yaml:"description" json:"description"` Tags []string `yaml:"tags" json:"tags"` TechnicalAssetsRunning []string `yaml:"technical_assets_running" json:"technical_assets_running"` } -type InputTrustBoundary struct { +type TrustBoundary struct { ID string `yaml:"id" json:"id"` Description string `yaml:"description" json:"description"` Type string `yaml:"type" json:"type"` @@ -101,26 +103,26 @@ type InputTrustBoundary struct { TrustBoundariesNested []string `yaml:"trust_boundaries_nested" json:"trust_boundaries_nested"` } -type InputIndividualRiskCategory struct { - ID string `yaml:"id" json:"id"` - Description string `yaml:"description" json:"description"` - Impact string `yaml:"impact" json:"impact"` - ASVS string `yaml:"asvs" json:"asvs"` - CheatSheet string `yaml:"cheat_sheet" json:"cheat_sheet"` - Action string `yaml:"action" json:"action"` - Mitigation string `yaml:"mitigation" json:"mitigation"` - Check string `yaml:"check" json:"check"` - Function string `yaml:"function" json:"function"` - STRIDE string `yaml:"stride" json:"stride"` - DetectionLogic string `yaml:"detection_logic" json:"detection_logic"` - RiskAssessment string `yaml:"risk_assessment" json:"risk_assessment"` - FalsePositives string `yaml:"false_positives" json:"false_positives"` - ModelFailurePossibleReason bool `yaml:"model_failure_possible_reason" json:"model_failure_possible_reason"` - CWE int `yaml:"cwe" json:"cwe"` - RisksIdentified map[string]InputRiskIdentified `yaml:"risks_identified" json:"risks_identified"` +type IndividualRiskCategory struct { + ID string `yaml:"id" json:"id"` + Description string `yaml:"description" json:"description"` + Impact string `yaml:"impact" json:"impact"` + ASVS string `yaml:"asvs" json:"asvs"` + CheatSheet string `yaml:"cheat_sheet" json:"cheat_sheet"` + Action string `yaml:"action" json:"action"` + Mitigation string `yaml:"mitigation" json:"mitigation"` + Check string `yaml:"check" json:"check"` + Function string `yaml:"function" json:"function"` + STRIDE string `yaml:"stride" json:"stride"` + DetectionLogic string `yaml:"detection_logic" json:"detection_logic"` + RiskAssessment string `yaml:"risk_assessment" json:"risk_assessment"` + FalsePositives string `yaml:"false_positives" json:"false_positives"` + ModelFailurePossibleReason bool `yaml:"model_failure_possible_reason" json:"model_failure_possible_reason"` + CWE int `yaml:"cwe" json:"cwe"` + RisksIdentified map[string]RiskIdentified `yaml:"risks_identified" json:"risks_identified"` } -type InputRiskIdentified struct { +type RiskIdentified struct { Severity string `yaml:"severity" json:"severity"` ExploitationLikelihood string `yaml:"exploitation_likelihood" json:"exploitation_likelihood"` ExploitationImpact string `yaml:"exploitation_impact" json:"exploitation_impact"` @@ -133,7 +135,7 @@ type InputRiskIdentified struct { MostRelevantSharedRuntime string `yaml:"most_relevant_shared_runtime" json:"most_relevant_shared_runtime"` } -type InputRiskTracking struct { +type RiskTracking struct { Status string `yaml:"status" json:"status"` Justification string `yaml:"justification" json:"justification"` Ticket string `yaml:"ticket" json:"ticket"` @@ -141,52 +143,54 @@ type InputRiskTracking struct { CheckedBy string `yaml:"checked_by" json:"checked_by"` } -type ModelInput struct { // TODO: Eventually remove this and directly use ParsedModelRoot? But then the error messages for model errors are not quite as good anymore... - Includes []string `yaml:"includes,omitempty" json:"includes,omitempty"` - ThreagileVersion string `yaml:"threagile_version" json:"threagile_version"` - Title string `yaml:"title" json:"title"` - Author Author `yaml:"author" json:"author"` - Date string `yaml:"date" json:"date"` - BusinessOverview Overview `yaml:"business_overview" json:"business_overview"` - TechnicalOverview Overview `yaml:"technical_overview" json:"technical_overview"` - BusinessCriticality string `yaml:"business_criticality" json:"business_criticality"` - ManagementSummaryComment string `yaml:"management_summary_comment" json:"management_summary_comment"` - Questions map[string]string `yaml:"questions" json:"questions"` - AbuseCases map[string]string `yaml:"abuse_cases" json:"abuse_cases"` - SecurityRequirements map[string]string `yaml:"security_requirements" json:"security_requirements"` - TagsAvailable []string `yaml:"tags_available,omitempty" json:"tags_available,omitempty"` - DataAssets map[string]InputDataAsset `yaml:"data_assets" json:"data_assets"` - TechnicalAssets map[string]InputTechnicalAsset `yaml:"technical_assets" json:"technical_assets"` - TrustBoundaries map[string]InputTrustBoundary `yaml:"trust_boundaries" json:"trust_boundaries"` - SharedRuntimes map[string]InputSharedRuntime `yaml:"shared_runtimes" json:"shared_runtimes"` - IndividualRiskCategories map[string]InputIndividualRiskCategory `yaml:"individual_risk_categories" json:"individual_risk_categories"` - RiskTracking map[string]InputRiskTracking `yaml:"risk_tracking" json:"risk_tracking"` - DiagramTweakNodesep int `yaml:"diagram_tweak_nodesep" json:"diagram_tweak_nodesep"` - DiagramTweakRanksep int `yaml:"diagram_tweak_ranksep" json:"diagram_tweak_ranksep"` - DiagramTweakEdgeLayout string `yaml:"diagram_tweak_edge_layout" json:"diagram_tweak_edge_layout"` - DiagramTweakSuppressEdgeLabels bool `yaml:"diagram_tweak_suppress_edge_labels" json:"diagram_tweak_suppress_edge_labels"` - DiagramTweakLayoutLeftToRight bool `yaml:"diagram_tweak_layout_left_to_right" json:"diagram_tweak_layout_left_to_right"` - DiagramTweakInvisibleConnectionsBetweenAssets []string `yaml:"diagram_tweak_invisible_connections_between_assets,omitempty" json:"diagram_tweak_invisible_connections_between_assets,omitempty"` - DiagramTweakSameRankAssets []string `yaml:"diagram_tweak_same_rank_assets,omitempty" json:"diagram_tweak_same_rank_assets,omitempty"` +type Model struct { // TODO: Eventually remove this and directly use ParsedModelRoot? But then the error messages for model errors are not quite as good anymore... + Includes []string `yaml:"includes,omitempty" json:"includes,omitempty"` + ThreagileVersion string `yaml:"threagile_version" json:"threagile_version"` + Title string `yaml:"title" json:"title"` + Author Author `yaml:"author" json:"author"` + Contributors []Author `yaml:"contributors" json:"contributors"` + Date string `yaml:"date" json:"date"` + AppDescription Overview `yaml:"application_description" json:"application_description"` + BusinessOverview Overview `yaml:"business_overview" json:"business_overview"` + TechnicalOverview Overview `yaml:"technical_overview" json:"technical_overview"` + BusinessCriticality string `yaml:"business_criticality" json:"business_criticality"` + ManagementSummaryComment string `yaml:"management_summary_comment" json:"management_summary_comment"` + Questions map[string]string `yaml:"questions" json:"questions"` + AbuseCases map[string]string `yaml:"abuse_cases" json:"abuse_cases"` + SecurityRequirements map[string]string `yaml:"security_requirements" json:"security_requirements"` + TagsAvailable []string `yaml:"tags_available,omitempty" json:"tags_available,omitempty"` + DataAssets map[string]DataAsset `yaml:"data_assets" json:"data_assets"` + TechnicalAssets map[string]TechnicalAsset `yaml:"technical_assets" json:"technical_assets"` + TrustBoundaries map[string]TrustBoundary `yaml:"trust_boundaries" json:"trust_boundaries"` + SharedRuntimes map[string]SharedRuntime `yaml:"shared_runtimes" json:"shared_runtimes"` + IndividualRiskCategories map[string]IndividualRiskCategory `yaml:"individual_risk_categories" json:"individual_risk_categories"` + RiskTracking map[string]RiskTracking `yaml:"risk_tracking" json:"risk_tracking"` + DiagramTweakNodesep int `yaml:"diagram_tweak_nodesep" json:"diagram_tweak_nodesep"` + DiagramTweakRanksep int `yaml:"diagram_tweak_ranksep" json:"diagram_tweak_ranksep"` + DiagramTweakEdgeLayout string `yaml:"diagram_tweak_edge_layout" json:"diagram_tweak_edge_layout"` + DiagramTweakSuppressEdgeLabels bool `yaml:"diagram_tweak_suppress_edge_labels" json:"diagram_tweak_suppress_edge_labels"` + DiagramTweakLayoutLeftToRight bool `yaml:"diagram_tweak_layout_left_to_right" json:"diagram_tweak_layout_left_to_right"` + DiagramTweakInvisibleConnectionsBetweenAssets []string `yaml:"diagram_tweak_invisible_connections_between_assets,omitempty" json:"diagram_tweak_invisible_connections_between_assets,omitempty"` + DiagramTweakSameRankAssets []string `yaml:"diagram_tweak_same_rank_assets,omitempty" json:"diagram_tweak_same_rank_assets,omitempty"` } -func (model *ModelInput) Defaults() *ModelInput { - *model = ModelInput{ +func (model *Model) Defaults() *Model { + *model = Model{ Questions: make(map[string]string), AbuseCases: make(map[string]string), SecurityRequirements: make(map[string]string), - DataAssets: make(map[string]InputDataAsset), - TechnicalAssets: make(map[string]InputTechnicalAsset), - TrustBoundaries: make(map[string]InputTrustBoundary), - SharedRuntimes: make(map[string]InputSharedRuntime), - IndividualRiskCategories: make(map[string]InputIndividualRiskCategory), - RiskTracking: make(map[string]InputRiskTracking), + DataAssets: make(map[string]DataAsset), + TechnicalAssets: make(map[string]TechnicalAsset), + TrustBoundaries: make(map[string]TrustBoundary), + SharedRuntimes: make(map[string]SharedRuntime), + IndividualRiskCategories: make(map[string]IndividualRiskCategory), + RiskTracking: make(map[string]RiskTracking), } return model } -func (model *ModelInput) Load(inputFilename string) error { +func (model *Model) Load(inputFilename string) error { modelYaml, readError := os.ReadFile(inputFilename) if readError != nil { log.Fatal("Unable to read model file: ", readError) @@ -227,7 +231,7 @@ func (slice UniqueStringSlice) Merge(otherSlice []string) []string { return valueSlice } -func (model *ModelInput) Merge(dir string, includeFilename string) error { +func (model *Model) Merge(dir string, includeFilename string) error { modelYaml, readError := os.ReadFile(filepath.Join(dir, includeFilename)) if readError != nil { return fmt.Errorf("unable to read model file: %v", readError) @@ -239,7 +243,7 @@ func (model *ModelInput) Merge(dir string, includeFilename string) error { return fmt.Errorf("unable to parse model structure: %v", unmarshalStructureError) } - var includedModel ModelInput + var includedModel Model unmarshalError := yaml.Unmarshal(modelYaml, &includedModel) if unmarshalError != nil { return fmt.Errorf("unable to parse model yaml: %v", unmarshalError) @@ -378,7 +382,7 @@ func (model *ModelInput) Merge(dir string, includeFilename string) error { return nil } -func AddTagToModelInput(modelInput *ModelInput, tag string, dryRun bool, changes *[]string) { +func AddTagToModelInput(modelInput *Model, tag string, dryRun bool, changes *[]string) { tag = NormalizeTag(tag) if !contains(modelInput.TagsAvailable, tag) { *changes = append(*changes, "adding tag: "+tag) diff --git a/pkg/macros/add-build-pipeline-macro.go b/pkg/macros/add-build-pipeline-macro.go index 6d486cc3..27fdf561 100644 --- a/pkg/macros/add-build-pipeline-macro.go +++ b/pkg/macros/add-build-pipeline-macro.go @@ -257,19 +257,19 @@ func (m *addBuildPipeline) GoBack() (message string, validResult bool, err error return "Undo successful", true, nil } -func (m *addBuildPipeline) GetFinalChangeImpact(modelInput *input.ModelInput, model *types.ParsedModel) (changes []string, message string, validResult bool, err error) { +func (m *addBuildPipeline) GetFinalChangeImpact(modelInput *input.Model, model *types.ParsedModel) (changes []string, message string, validResult bool, err error) { changeLogCollector := make([]string, 0) message, validResult, err = m.applyChange(modelInput, model, &changeLogCollector, true) return changeLogCollector, message, validResult, err } -func (m *addBuildPipeline) Execute(modelInput *input.ModelInput, model *types.ParsedModel) (message string, validResult bool, err error) { +func (m *addBuildPipeline) Execute(modelInput *input.Model, model *types.ParsedModel) (message string, validResult bool, err error) { changeLogCollector := make([]string, 0) message, validResult, err = m.applyChange(modelInput, model, &changeLogCollector, false) return message, validResult, err } -func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, changeLogCollector *[]string, dryRun bool) (message string, validResult bool, err error) { +func (m *addBuildPipeline) applyChange(modelInput *input.Model, parsedModel *types.ParsedModel, changeLogCollector *[]string, dryRun bool) (message string, validResult bool, err error) { var serverSideTechAssets = make([]string, 0) // ################################################ input.AddTagToModelInput(modelInput, m.macroState["source-repository"][0], dryRun, changeLogCollector) @@ -300,7 +300,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel if _, exists := parsedModel.DataAssets["Sourcecode"]; !exists { //fmt.Println("Adding data asset:", "sourcecode") // ################################################ - dataAsset := input.InputDataAsset{ + dataAsset := input.DataAsset{ ID: "sourcecode", Description: "Sourcecode to build the application components from", Usage: types.DevOps.String(), @@ -322,7 +322,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel if _, exists := parsedModel.DataAssets["Deployment"]; !exists { //fmt.Println("Adding data asset:", "deployment") // ################################################ - dataAsset := input.InputDataAsset{ + dataAsset := input.DataAsset{ ID: "deployment", Description: "Deployment unit being installed/shipped", Usage: types.DevOps.String(), @@ -350,8 +350,8 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel encryption = types.Transparent.String() } - commLinks := make(map[string]input.InputCommunicationLink) - commLinks["Sourcecode Repository Traffic"] = input.InputCommunicationLink{ + commLinks := make(map[string]input.CommunicationLink) + commLinks["Sourcecode Repository Traffic"] = input.CommunicationLink{ Target: sourceRepoID, Description: "Sourcecode Repository Traffic", Protocol: types.HTTPS.String(), @@ -367,7 +367,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel DiagramTweakWeight: 0, DiagramTweakConstraint: false, } - commLinks["Build Pipeline Traffic"] = input.InputCommunicationLink{ + commLinks["Build Pipeline Traffic"] = input.CommunicationLink{ Target: buildPipelineID, Description: "Build Pipeline Traffic", Protocol: types.HTTPS.String(), @@ -383,7 +383,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel DiagramTweakWeight: 0, DiagramTweakConstraint: false, } - commLinks["Artifact Registry Traffic"] = input.InputCommunicationLink{ + commLinks["Artifact Registry Traffic"] = input.CommunicationLink{ Target: artifactRegistryID, Description: "Artifact Registry Traffic", Protocol: types.HTTPS.String(), @@ -400,7 +400,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel DiagramTweakConstraint: false, } if m.containerTechUsed { - commLinks["Container Registry Traffic"] = input.InputCommunicationLink{ + commLinks["Container Registry Traffic"] = input.CommunicationLink{ Target: containerRepoID, Description: "Container Registry Traffic", Protocol: types.HTTPS.String(), @@ -416,7 +416,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel DiagramTweakWeight: 0, DiagramTweakConstraint: false, } - commLinks["Container Platform Traffic"] = input.InputCommunicationLink{ + commLinks["Container Platform Traffic"] = input.CommunicationLink{ Target: containerPlatformID, Description: "Container Platform Traffic", Protocol: types.HTTPS.String(), @@ -434,7 +434,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel } } if m.codeInspectionUsed { - commLinks["Code Inspection Platform Traffic"] = input.InputCommunicationLink{ + commLinks["Code Inspection Platform Traffic"] = input.CommunicationLink{ Target: codeInspectionPlatformID, Description: "Code Inspection Platform Traffic", Protocol: types.HTTPS.String(), @@ -452,7 +452,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel } } - techAsset := input.InputTechnicalAsset{ + techAsset := input.TechnicalAsset{ ID: id, Description: "Development Client", Type: types.ExternalEntity.String(), @@ -494,7 +494,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel if strings.ToLower(m.macroState["encryption"][0]) == "yes" { encryption = types.Transparent.String() } - techAsset := input.InputTechnicalAsset{ + techAsset := input.TechnicalAsset{ ID: id, Description: m.macroState["source-repository"][0] + " Sourcecode Repository", Type: types.Process.String(), @@ -537,7 +537,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel if strings.ToLower(m.macroState["encryption"][0]) == "yes" { encryption = types.Transparent.String() } - techAsset := input.InputTechnicalAsset{ + techAsset := input.TechnicalAsset{ ID: id, Description: m.macroState["container-registry"][0] + " Container Registry", Type: types.Process.String(), @@ -579,7 +579,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel if strings.ToLower(m.macroState["encryption"][0]) == "yes" { encryption = types.Transparent.String() } - techAsset := input.InputTechnicalAsset{ + techAsset := input.TechnicalAsset{ ID: id, Description: m.macroState["container-platform"][0] + " Container Platform", Type: types.Process.String(), @@ -623,8 +623,8 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel encryption = types.Transparent.String() } - commLinks := make(map[string]input.InputCommunicationLink) - commLinks["Sourcecode Repository Traffic"] = input.InputCommunicationLink{ + commLinks := make(map[string]input.CommunicationLink) + commLinks["Sourcecode Repository Traffic"] = input.CommunicationLink{ Target: sourceRepoID, Description: "Sourcecode Repository Traffic", Protocol: types.HTTPS.String(), @@ -640,7 +640,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel DiagramTweakWeight: 0, DiagramTweakConstraint: false, } - commLinks["Artifact Registry Traffic"] = input.InputCommunicationLink{ + commLinks["Artifact Registry Traffic"] = input.CommunicationLink{ Target: artifactRegistryID, Description: "Artifact Registry Traffic", Protocol: types.HTTPS.String(), @@ -657,7 +657,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel DiagramTweakConstraint: false, } if m.containerTechUsed { - commLinks["Container Registry Traffic"] = input.InputCommunicationLink{ + commLinks["Container Registry Traffic"] = input.CommunicationLink{ Target: containerRepoID, Description: "Container Registry Traffic", Protocol: types.HTTPS.String(), @@ -674,7 +674,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel DiagramTweakConstraint: false, } if m.macroState["push-or-pull"][0] == pushOrPull[0] { // Push - commLinks["Container Platform Push"] = input.InputCommunicationLink{ + commLinks["Container Platform Push"] = input.CommunicationLink{ Target: containerPlatformID, Description: "Container Platform Push", Protocol: types.HTTPS.String(), @@ -691,7 +691,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel DiagramTweakConstraint: false, } } else { // Pull - commLinkPull := input.InputCommunicationLink{ + commLinkPull := input.CommunicationLink{ Target: containerRepoID, Description: "Container Platform Pull", Protocol: types.HTTPS.String(), @@ -711,7 +711,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel titleOfTargetAsset := m.macroState["container-platform"][0] + " Container Platform" containerPlatform := modelInput.TechnicalAssets[titleOfTargetAsset] if containerPlatform.CommunicationLinks == nil { - containerPlatform.CommunicationLinks = make(map[string]input.InputCommunicationLink) + containerPlatform.CommunicationLinks = make(map[string]input.CommunicationLink) } containerPlatform.CommunicationLinks["Container Platform Pull"] = commLinkPull modelInput.TechnicalAssets[titleOfTargetAsset] = containerPlatform @@ -719,7 +719,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel } } if m.codeInspectionUsed { - commLinks["Code Inspection Platform Traffic"] = input.InputCommunicationLink{ + commLinks["Code Inspection Platform Traffic"] = input.CommunicationLink{ Target: codeInspectionPlatformID, Description: "Code Inspection Platform Traffic", Protocol: types.HTTPS.String(), @@ -743,9 +743,9 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel if !dryRun { containerPlatform := modelInput.TechnicalAssets[m.macroState["container-platform"][0]+" Container Platform"] if containerPlatform.CommunicationLinks == nil { - containerPlatform.CommunicationLinks = make(map[string]input.InputCommunicationLink) + containerPlatform.CommunicationLinks = make(map[string]input.CommunicationLink) } - containerPlatform.CommunicationLinks["Container Spawning ("+deployTargetID+")"] = input.InputCommunicationLink{ + containerPlatform.CommunicationLinks["Container Spawning ("+deployTargetID+")"] = input.CommunicationLink{ Target: deployTargetID, Description: "Container Spawning " + deployTargetID, Protocol: types.ContainerSpawning.String(), @@ -765,7 +765,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel } } else { // No Containers used if m.macroState["push-or-pull"][0] == pushOrPull[0] { // Push - commLinks["Deployment Push ("+deployTargetID+")"] = input.InputCommunicationLink{ + commLinks["Deployment Push ("+deployTargetID+")"] = input.CommunicationLink{ Target: deployTargetID, Description: "Deployment Push to " + deployTargetID, Protocol: types.SSH.String(), @@ -783,7 +783,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel } } else { // Pull pullFromWhere := artifactRegistryID - commLinkPull := input.InputCommunicationLink{ + commLinkPull := input.CommunicationLink{ Target: pullFromWhere, Description: "Deployment Pull from " + deployTargetID, Protocol: types.HTTPS.String(), @@ -804,7 +804,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel titleOfTargetAsset := parsedModel.TechnicalAssets[deployTargetID].Title x := modelInput.TechnicalAssets[titleOfTargetAsset] if x.CommunicationLinks == nil { - x.CommunicationLinks = make(map[string]input.InputCommunicationLink) + x.CommunicationLinks = make(map[string]input.CommunicationLink) } x.CommunicationLinks["Deployment Pull ("+deployTargetID+")"] = commLinkPull modelInput.TechnicalAssets[titleOfTargetAsset] = x @@ -832,7 +832,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel } } - techAsset := input.InputTechnicalAsset{ + techAsset := input.TechnicalAsset{ ID: id, Description: m.macroState["build-pipeline"][0] + " Build Pipeline", Type: types.Process.String(), @@ -874,7 +874,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel if strings.ToLower(m.macroState["encryption"][0]) == "yes" { encryption = types.Transparent.String() } - techAsset := input.InputTechnicalAsset{ + techAsset := input.TechnicalAsset{ ID: id, Description: m.macroState["artifact-registry"][0] + " Artifact Registry", Type: types.Process.String(), @@ -917,7 +917,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel if strings.ToLower(m.macroState["encryption"][0]) == "yes" { encryption = types.Transparent.String() } - techAsset := input.InputTechnicalAsset{ + techAsset := input.TechnicalAsset{ ID: id, Description: m.macroState["code-inspection-platform"][0] + " Code Inspection Platform", Type: types.Process.String(), @@ -957,7 +957,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel trustBoundaryType := m.macroState["new-trust-boundary-type"][0] //fmt.Println("Adding new trust boundary of type:", trustBoundaryType) title := "DevOps Network" - trustBoundary := input.InputTrustBoundary{ + trustBoundary := input.TrustBoundary{ ID: "devops-network", Description: "DevOps Network", Type: trustBoundaryType, @@ -988,7 +988,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel *changeLogCollector = append(*changeLogCollector, "filling existing trust boundary: "+existingTrustBoundaryToAddTo) if !dryRun { if modelInput.TrustBoundaries == nil { - modelInput.TrustBoundaries = make(map[string]input.InputTrustBoundary) + modelInput.TrustBoundaries = make(map[string]input.TrustBoundary) } tb := modelInput.TrustBoundaries[title] tb.TechnicalAssetsInside = mergedArrays @@ -1004,7 +1004,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel assetsRunning = append(assetsRunning, deployTargetID) } title := m.macroState["container-platform"][0] + " Runtime" - sharedRuntime := input.InputSharedRuntime{ + sharedRuntime := input.SharedRuntime{ ID: containerSharedRuntimeID, Description: title, Tags: []string{input.NormalizeTag(m.macroState["container-platform"][0])}, @@ -1013,7 +1013,7 @@ func (m *addBuildPipeline) applyChange(modelInput *input.ModelInput, parsedModel *changeLogCollector = append(*changeLogCollector, "adding shared runtime: "+containerSharedRuntimeID) if !dryRun { if modelInput.SharedRuntimes == nil { - modelInput.SharedRuntimes = make(map[string]input.InputSharedRuntime) + modelInput.SharedRuntimes = make(map[string]input.SharedRuntime) } modelInput.SharedRuntimes[title] = sharedRuntime } diff --git a/pkg/macros/add-vault-macro.go b/pkg/macros/add-vault-macro.go index 9dddc5f1..cf8edc03 100644 --- a/pkg/macros/add-vault-macro.go +++ b/pkg/macros/add-vault-macro.go @@ -173,25 +173,25 @@ func (m *addVaultMacro) GoBack() (message string, validResult bool, err error) { return "Undo successful", true, nil } -func (m *addVaultMacro) GetFinalChangeImpact(modelInput *input.ModelInput, parsedModel *types.ParsedModel) (changes []string, message string, validResult bool, err error) { +func (m *addVaultMacro) GetFinalChangeImpact(modelInput *input.Model, parsedModel *types.ParsedModel) (changes []string, message string, validResult bool, err error) { changeLogCollector := make([]string, 0) message, validResult, err = m.applyChange(modelInput, parsedModel, &changeLogCollector, true) return changeLogCollector, message, validResult, err } -func (m *addVaultMacro) Execute(modelInput *input.ModelInput, parsedModel *types.ParsedModel) (message string, validResult bool, err error) { +func (m *addVaultMacro) Execute(modelInput *input.Model, parsedModel *types.ParsedModel) (message string, validResult bool, err error) { changeLogCollector := make([]string, 0) message, validResult, err = m.applyChange(modelInput, parsedModel, &changeLogCollector, false) return message, validResult, err } -func (m *addVaultMacro) applyChange(modelInput *input.ModelInput, parsedModel *types.ParsedModel, changeLogCollector *[]string, dryRun bool) (message string, validResult bool, err error) { +func (m *addVaultMacro) applyChange(modelInput *input.Model, parsedModel *types.ParsedModel, changeLogCollector *[]string, dryRun bool) (message string, validResult bool, err error) { input.AddTagToModelInput(modelInput, m.macroState["vault-name"][0], dryRun, changeLogCollector) var serverSideTechAssets = make([]string, 0) if _, exists := parsedModel.DataAssets["Configuration Secrets"]; !exists { - dataAsset := input.InputDataAsset{ + dataAsset := input.DataAsset{ ID: "configuration-secrets", Description: "Configuration secrets (like credentials, keys, certificates, etc.) secured and managed by a vault", Usage: types.DevOps.String(), @@ -223,7 +223,7 @@ func (m *addVaultMacro) applyChange(modelInput *input.ModelInput, parsedModel *t } if _, exists := parsedModel.TechnicalAssets[storageID]; !exists { serverSideTechAssets = append(serverSideTechAssets, storageID) - techAsset := input.InputTechnicalAsset{ + techAsset := input.TechnicalAsset{ ID: storageID, Description: "Vault Storage", Type: types.Datastore.String(), @@ -261,10 +261,10 @@ func (m *addVaultMacro) applyChange(modelInput *input.ModelInput, parsedModel *t if _, exists := parsedModel.TechnicalAssets[vaultID]; !exists { serverSideTechAssets = append(serverSideTechAssets, vaultID) - commLinks := make(map[string]input.InputCommunicationLink) + commLinks := make(map[string]input.CommunicationLink) if databaseUsed || filesystemUsed { - accessLink := input.InputCommunicationLink{ + accessLink := input.CommunicationLink{ Target: storageID, Description: "Vault Storage Access", Protocol: types.LocalFileAccess.String(), @@ -297,7 +297,7 @@ func (m *addVaultMacro) applyChange(modelInput *input.ModelInput, parsedModel *t authentication = types.Credentials.String() } for _, clientID := range m.macroState["clients"] { // add a connection from each client - clientAccessCommLink := input.InputCommunicationLink{ + clientAccessCommLink := input.CommunicationLink{ Target: vaultID, Description: "Vault Access Traffic (by " + clientID + ")", Protocol: types.HTTPS.String(), @@ -338,7 +338,7 @@ func (m *addVaultMacro) applyChange(modelInput *input.ModelInput, parsedModel *t } } - techAsset := input.InputTechnicalAsset{ + techAsset := input.TechnicalAsset{ ID: vaultID, Description: m.macroState["vault-name"][0] + " Vault", Type: types.Process.String(), @@ -377,7 +377,7 @@ func (m *addVaultMacro) applyChange(modelInput *input.ModelInput, parsedModel *t vaultEnvID := "vault-environment" if filesystemUsed { title := "Vault Environment" - trustBoundary := input.InputTrustBoundary{ + trustBoundary := input.TrustBoundary{ ID: vaultEnvID, Description: "Vault Environment", Type: types.ExecutionEnvironment.String(), @@ -395,7 +395,7 @@ func (m *addVaultMacro) applyChange(modelInput *input.ModelInput, parsedModel *t if m.createNewTrustBoundary { trustBoundaryType := m.macroState["new-trust-boundary-type"][0] title := "Vault Network" - trustBoundary := input.InputTrustBoundary{ + trustBoundary := input.TrustBoundary{ ID: "vault-network", Description: "Vault Network", Type: trustBoundaryType, diff --git a/pkg/macros/macros.go b/pkg/macros/macros.go index 68ff2272..fbc7977b 100644 --- a/pkg/macros/macros.go +++ b/pkg/macros/macros.go @@ -22,8 +22,8 @@ type Macros interface { GetNextQuestion(model *types.ParsedModel) (nextQuestion MacroQuestion, err error) ApplyAnswer(questionID string, answer ...string) (message string, validResult bool, err error) GoBack() (message string, validResult bool, err error) - GetFinalChangeImpact(modelInput *input.ModelInput, model *types.ParsedModel) (changes []string, message string, validResult bool, err error) - Execute(modelInput *input.ModelInput, model *types.ParsedModel) (message string, validResult bool, err error) + GetFinalChangeImpact(modelInput *input.Model, model *types.ParsedModel) (changes []string, message string, validResult bool, err error) + Execute(modelInput *input.Model, model *types.ParsedModel) (message string, validResult bool, err error) } func ListBuiltInMacros() []Macros { @@ -54,7 +54,7 @@ func GetMacroByID(id string) (Macros, error) { return nil, errors.New("unknown macro id: " + id) } -func ExecuteModelMacro(modelInput *input.ModelInput, inputFile string, parsedModel *types.ParsedModel, macroID string) error { +func ExecuteModelMacro(modelInput *input.Model, inputFile string, parsedModel *types.ParsedModel, macroID string) error { macros, err := GetMacroByID(macroID) if err != nil { return err diff --git a/pkg/macros/pretty-print-macro.go b/pkg/macros/pretty-print-macro.go index 76c2dcca..0415da3f 100644 --- a/pkg/macros/pretty-print-macro.go +++ b/pkg/macros/pretty-print-macro.go @@ -32,10 +32,10 @@ func (*prettyPrintMacro) GoBack() (message string, validResult bool, err error) return "Cannot go back further", false, nil } -func (*prettyPrintMacro) GetFinalChangeImpact(_ *input.ModelInput, _ *types.ParsedModel) (changes []string, message string, validResult bool, err error) { +func (*prettyPrintMacro) GetFinalChangeImpact(_ *input.Model, _ *types.ParsedModel) (changes []string, message string, validResult bool, err error) { return []string{"pretty-printing the model file"}, "Changeset valid", true, err } -func (*prettyPrintMacro) Execute(_ *input.ModelInput, _ *types.ParsedModel) (message string, validResult bool, err error) { +func (*prettyPrintMacro) Execute(_ *input.Model, _ *types.ParsedModel) (message string, validResult bool, err error) { return "Model pretty printing successful", true, nil } diff --git a/pkg/macros/remove-unused-tags-macro.go b/pkg/macros/remove-unused-tags-macro.go index 8cab35c3..23072ca0 100644 --- a/pkg/macros/remove-unused-tags-macro.go +++ b/pkg/macros/remove-unused-tags-macro.go @@ -35,11 +35,11 @@ func (*removeUnusedTagsMacro) GoBack() (message string, validResult bool, err er return "Cannot go back further", false, nil } -func (*removeUnusedTagsMacro) GetFinalChangeImpact(_ *input.ModelInput, _ *types.ParsedModel) (changes []string, message string, validResult bool, err error) { +func (*removeUnusedTagsMacro) GetFinalChangeImpact(_ *input.Model, _ *types.ParsedModel) (changes []string, message string, validResult bool, err error) { return []string{"remove unused tags from the model file"}, "Changeset valid", true, err } -func (*removeUnusedTagsMacro) Execute(modelInput *input.ModelInput, parsedModel *types.ParsedModel) (message string, validResult bool, err error) { +func (*removeUnusedTagsMacro) Execute(modelInput *input.Model, parsedModel *types.ParsedModel) (message string, validResult bool, err error) { tagUsageMap := make(map[string]bool) for _, tag := range parsedModel.TagsAvailable { tagUsageMap[tag] = false // false = tag is not used diff --git a/pkg/macros/seed-risk-tracking-macro.go b/pkg/macros/seed-risk-tracking-macro.go index 3fdad714..e365f2f2 100644 --- a/pkg/macros/seed-risk-tracking-macro.go +++ b/pkg/macros/seed-risk-tracking-macro.go @@ -35,11 +35,11 @@ func (*seedRiskTrackingMacro) GoBack() (message string, validResult bool, err er return "Cannot go back further", false, nil } -func (*seedRiskTrackingMacro) GetFinalChangeImpact(_ *input.ModelInput, _ *types.ParsedModel) (changes []string, message string, validResult bool, err error) { +func (*seedRiskTrackingMacro) GetFinalChangeImpact(_ *input.Model, _ *types.ParsedModel) (changes []string, message string, validResult bool, err error) { return []string{"seed the model file with with initial risk tracking entries for all untracked risks"}, "Changeset valid", true, err } -func (*seedRiskTrackingMacro) Execute(modelInput *input.ModelInput, parsedModel *types.ParsedModel) (message string, validResult bool, err error) { +func (*seedRiskTrackingMacro) Execute(modelInput *input.Model, parsedModel *types.ParsedModel) (message string, validResult bool, err error) { syntheticRiskIDsToCreateTrackingFor := make([]string, 0) for id, risk := range parsedModel.GeneratedRisksBySyntheticId { if !risk.IsRiskTracked(parsedModel) { @@ -48,10 +48,10 @@ func (*seedRiskTrackingMacro) Execute(modelInput *input.ModelInput, parsedModel } sort.Strings(syntheticRiskIDsToCreateTrackingFor) if modelInput.RiskTracking == nil { - modelInput.RiskTracking = make(map[string]input.InputRiskTracking) + modelInput.RiskTracking = make(map[string]input.RiskTracking) } for _, id := range syntheticRiskIDsToCreateTrackingFor { - modelInput.RiskTracking[id] = input.InputRiskTracking{ + modelInput.RiskTracking[id] = input.RiskTracking{ Status: types.Unchecked.String(), Justification: "", Ticket: "", diff --git a/pkg/macros/seed-tags-macro.go b/pkg/macros/seed-tags-macro.go index 54aa990d..48d43159 100644 --- a/pkg/macros/seed-tags-macro.go +++ b/pkg/macros/seed-tags-macro.go @@ -35,11 +35,11 @@ func (*seedTagsMacro) GoBack() (message string, validResult bool, err error) { return "Cannot go back further", false, nil } -func (*seedTagsMacro) GetFinalChangeImpact(_ *input.ModelInput, _ *types.ParsedModel) (changes []string, message string, validResult bool, err error) { +func (*seedTagsMacro) GetFinalChangeImpact(_ *input.Model, _ *types.ParsedModel) (changes []string, message string, validResult bool, err error) { return []string{"seed the model file with supported tags from all risk rules"}, "Changeset valid", true, err } -func (*seedTagsMacro) Execute(modelInput *input.ModelInput, parsedModel *types.ParsedModel) (message string, validResult bool, err error) { +func (*seedTagsMacro) Execute(modelInput *input.Model, parsedModel *types.ParsedModel) (message string, validResult bool, err error) { tagMap := make(map[string]bool) for k, v := range parsedModel.AllSupportedTags { tagMap[k] = v diff --git a/pkg/model/parse.go b/pkg/model/parse.go index 6fad2fec..46894830 100644 --- a/pkg/model/parse.go +++ b/pkg/model/parse.go @@ -13,7 +13,7 @@ import ( "github.com/threagile/threagile/pkg/security/types" ) -func ParseModel(modelInput *input.ModelInput, builtinRiskRules map[string]risks.RiskRule, customRiskRules map[string]*CustomRisk) (*types.ParsedModel, error) { +func ParseModel(modelInput *input.Model, builtinRiskRules map[string]risks.RiskRule, customRiskRules map[string]*CustomRisk) (*types.ParsedModel, error) { businessCriticality, err := types.ParseCriticality(modelInput.BusinessCriticality) if err != nil { return nil, errors.New("unknown 'business_criticality' value of application: " + modelInput.BusinessCriticality) diff --git a/pkg/model/read.go b/pkg/model/read.go index c2c79de9..814cf8f5 100644 --- a/pkg/model/read.go +++ b/pkg/model/read.go @@ -18,7 +18,7 @@ type progressReporter interface { } type ReadResult struct { - ModelInput *input.ModelInput + ModelInput *input.Model ParsedModel *types.ParsedModel IntroTextRAA string BuiltinRiskRules map[string]risks.RiskRule @@ -36,7 +36,7 @@ func ReadAndAnalyzeModel(config common.Config, progressReporter progressReporter } customRiskRules := LoadCustomRiskRules(config.RiskRulesPlugins, progressReporter) - modelInput := new(input.ModelInput).Defaults() + modelInput := new(input.Model).Defaults() loadError := modelInput.Load(config.InputFile) if loadError != nil { return nil, fmt.Errorf("unable to load model yaml: %v", loadError) diff --git a/pkg/server/model.go b/pkg/server/model.go index c74ec4b5..85fce9d1 100644 --- a/pkg/server/model.go +++ b/pkg/server/model.go @@ -670,7 +670,7 @@ func (s *server) createNewDataAsset(ginContext *gin.Context) { return } if modelInput.DataAssets == nil { - modelInput.DataAssets = make(map[string]input.InputDataAsset) + modelInput.DataAssets = make(map[string]input.DataAsset) } modelInput.DataAssets[payload.Title] = dataAssetInput ok = s.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Data Asset Creation") @@ -683,7 +683,7 @@ func (s *server) createNewDataAsset(ginContext *gin.Context) { } } -func (s *server) populateDataAsset(ginContext *gin.Context, payload payloadDataAsset) (dataAssetInput input.InputDataAsset, ok bool) { +func (s *server) populateDataAsset(ginContext *gin.Context, payload payloadDataAsset) (dataAssetInput input.DataAsset, ok bool) { usage, err := types.ParseUsage(payload.Usage) if err != nil { handleErrorInServiceCall(err, ginContext) @@ -709,7 +709,7 @@ func (s *server) populateDataAsset(ginContext *gin.Context, payload payloadDataA handleErrorInServiceCall(err, ginContext) return dataAssetInput, false } - dataAssetInput = input.InputDataAsset{ + dataAssetInput = input.DataAsset{ ID: payload.Id, Description: payload.Description, Usage: usage.String(), @@ -874,7 +874,7 @@ func (s *server) createNewSharedRuntime(ginContext *gin.Context) { return } if modelInput.SharedRuntimes == nil { - modelInput.SharedRuntimes = make(map[string]input.InputSharedRuntime) + modelInput.SharedRuntimes = make(map[string]input.SharedRuntime) } modelInput.SharedRuntimes[payload.Title] = sharedRuntimeInput ok = s.writeModel(ginContext, key, folderNameOfKey, &modelInput, "Shared Runtime Creation") @@ -887,7 +887,7 @@ func (s *server) createNewSharedRuntime(ginContext *gin.Context) { } } -func checkTechnicalAssetsExisting(modelInput input.ModelInput, techAssetIDs []string) (ok bool) { +func checkTechnicalAssetsExisting(modelInput input.Model, techAssetIDs []string) (ok bool) { for _, techAssetID := range techAssetIDs { exists := false for _, val := range modelInput.TechnicalAssets { @@ -903,8 +903,8 @@ func checkTechnicalAssetsExisting(modelInput input.ModelInput, techAssetIDs []st return true } -func populateSharedRuntime(_ *gin.Context, payload payloadSharedRuntime) (sharedRuntimeInput input.InputSharedRuntime, ok bool) { - sharedRuntimeInput = input.InputSharedRuntime{ +func populateSharedRuntime(_ *gin.Context, payload payloadSharedRuntime) (sharedRuntimeInput input.SharedRuntime, ok bool) { + sharedRuntimeInput = input.SharedRuntime{ ID: payload.Id, Description: payload.Description, Tags: lowerCaseAndTrim(payload.Tags), @@ -971,7 +971,7 @@ func (s *server) getSharedRuntimes(ginContext *gin.Context) { } } -func (s *server) readModel(ginContext *gin.Context, modelUUID string, key []byte, folderNameOfKey string) (modelInputResult input.ModelInput, yamlText string, ok bool) { +func (s *server) readModel(ginContext *gin.Context, modelUUID string, key []byte, folderNameOfKey string) (modelInputResult input.Model, yamlText string, ok bool) { modelFolder, ok := s.checkModelFolder(ginContext, modelUUID, folderNameOfKey) if !ok { return modelInputResult, yamlText, false @@ -1024,7 +1024,7 @@ func (s *server) readModel(ginContext *gin.Context, modelUUID string, key []byte } buf := new(bytes.Buffer) _, _ = buf.ReadFrom(r) - modelInput := new(input.ModelInput).Defaults() + modelInput := new(input.Model).Defaults() yamlBytes := buf.Bytes() err = yaml.Unmarshal(yamlBytes, &modelInput) if err != nil { @@ -1037,7 +1037,7 @@ func (s *server) readModel(ginContext *gin.Context, modelUUID string, key []byte return *modelInput, string(yamlBytes), true } -func (s *server) writeModel(ginContext *gin.Context, key []byte, folderNameOfKey string, modelInput *input.ModelInput, changeReasonForHistory string) (ok bool) { +func (s *server) writeModel(ginContext *gin.Context, key []byte, folderNameOfKey string, modelInput *input.Model, changeReasonForHistory string) (ok bool) { modelFolder, ok := s.checkModelFolder(ginContext, ginContext.Param("model-id"), folderNameOfKey) if ok { modelInput.ThreagileVersion = docs.ThreagileVersion diff --git a/support/schema.json b/support/schema.json index b2e5eb5e..c83628a6 100644 --- a/support/schema.json +++ b/support/schema.json @@ -32,6 +32,13 @@ "null" ] }, + "contact": { + "description": "Author contact info", + "type": [ + "string", + "null" + ] + }, "homepage": { "description": "Author homepage", "type": [ @@ -44,6 +51,43 @@ "name" ] }, + "contributors": { + "description": "Contributors to the model", + "type": [ + "array", + "null" + ], + "uniqueItems": true, + "items": { + "type": "object", + "properties": { + "name": { + "description": "Contributor name", + "type": [ + "string", + "null" + ] + }, + "contact": { + "description": "Contributor contact info", + "type": [ + "string", + "null" + ] + }, + "homepage": { + "description": "Contributor homepage", + "type": [ + "string", + "null" + ] + } + }, + "required": [ + "name" + ] + } + }, "management_summary_comment": { "description": "Individual management summary for the report", "type": [ @@ -62,6 +106,27 @@ "mission-critical" ] }, + "application_description": { + "description": "General description of the application, its purpose and functionality.", + "type": "object", + "properties": { + "description": { + "description": "Application description for the report", + "type": [ + "string", + "null" + ] + }, + "images": { + "description": "Application images for the report", + "type": [ + "array", + "null" + ], + "uniqueItems": true + } + } + }, "business_overview": { "description": "Individual business overview for the report", "type": "object", @@ -624,7 +689,7 @@ "description": "VPN", "type": "boolean" }, - "ip_filtered": { + "ip_filtered": { "description": "IP filtered", "type": "boolean" }, From 3e61f05aa2617421b251286969587c9461b8480e Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Wed, 10 Jan 2024 11:03:29 -0800 Subject: [PATCH 50/68] made sure to propagate build timestamp --- cmd/threagile/main.go | 4 ++-- internal/threagile/about.go | 7 ++++--- internal/threagile/examples.go | 13 +++++++------ internal/threagile/macros.go | 6 +++--- internal/threagile/root.go | 6 +++--- internal/threagile/rules.go | 5 +++-- internal/threagile/threagile.go | 8 +++++--- internal/threagile/types.go | 4 ++-- pkg/docs/constants.go | 3 ++- 9 files changed, 31 insertions(+), 25 deletions(-) diff --git a/cmd/threagile/main.go b/cmd/threagile/main.go index c3d008ac..f18adc8e 100644 --- a/cmd/threagile/main.go +++ b/cmd/threagile/main.go @@ -1,7 +1,7 @@ package main import ( - threagile "github.com/threagile/threagile/internal/threagile" + "github.com/threagile/threagile/internal/threagile" ) const ( @@ -9,5 +9,5 @@ const ( ) func main() { - new(threagile.Threagile).Init().Execute() + new(threagile.Threagile).Init(buildTimestamp).Execute() } diff --git a/internal/threagile/about.go b/internal/threagile/about.go index 635b0c71..48d6a477 100644 --- a/internal/threagile/about.go +++ b/internal/threagile/about.go @@ -6,6 +6,7 @@ package threagile import ( "errors" + "fmt" "os" "path/filepath" @@ -18,13 +19,13 @@ func (what *Threagile) initAbout() *Threagile { what.rootCmd.AddCommand(&cobra.Command{ Use: "version", Short: "Get version information", - Long: "\n" + docs.Logo + "\n\n" + docs.VersionText, + Long: "\n" + docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp), }) what.rootCmd.AddCommand(&cobra.Command{ Use: "print-3rd-party-licenses", Short: "Print 3rd-party license information", - Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\n" + docs.ThirdPartyLicenses, + Long: "\n" + docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp) + "\n\n" + docs.ThirdPartyLicenses, }) what.rootCmd.AddCommand(&cobra.Command{ @@ -36,7 +37,7 @@ func (what *Threagile) initAbout() *Threagile { cmd.Printf("Unable to read app-dir flag: %v", err) return err } - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println(docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp)) if appDir != filepath.Clean(appDir) { // TODO: do we need this check here? cmd.Printf("weird app folder %v", appDir) diff --git a/internal/threagile/examples.go b/internal/threagile/examples.go index 9e3c26fb..faa63dd8 100644 --- a/internal/threagile/examples.go +++ b/internal/threagile/examples.go @@ -5,6 +5,7 @@ Copyright © 2023 NAME HERE package threagile import ( + "fmt" "github.com/spf13/cobra" "github.com/threagile/threagile/pkg/docs" "github.com/threagile/threagile/pkg/examples" @@ -14,7 +15,7 @@ func (what *Threagile) initExamples() *Threagile { what.rootCmd.AddCommand(&cobra.Command{ Use: "create-example-model", Short: "Create example threagile model", - Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\njust create an example model named threagile-example-model.yaml in the output directory", + Long: "\n" + docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp) + "\n\njust create an example model named threagile-example-model.yaml in the output directory", RunE: func(cmd *cobra.Command, args []string) error { appDir, err := cmd.Flags().GetString(appDirFlagName) if err != nil { @@ -33,7 +34,7 @@ func (what *Threagile) initExamples() *Threagile { return err } - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println(docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp)) cmd.Println("An example model was created named threagile-example-model.yaml in the output directory.") cmd.Println() cmd.Println(docs.Examples) @@ -45,7 +46,7 @@ func (what *Threagile) initExamples() *Threagile { what.rootCmd.AddCommand(&cobra.Command{ Use: "create-stub-model", Short: "Create stub threagile model", - Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\njust create a minimal stub model named threagile-stub-model.yaml in the output directory", + Long: "\n" + docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp) + "\n\njust create a minimal stub model named threagile-stub-model.yaml in the output directory", RunE: func(cmd *cobra.Command, args []string) error { appDir, err := cmd.Flags().GetString(appDirFlagName) if err != nil { @@ -64,7 +65,7 @@ func (what *Threagile) initExamples() *Threagile { return err } - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println(docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp)) cmd.Println("A minimal stub model was created named threagile-stub-model.yaml in the output directory.") cmd.Println() cmd.Println(docs.Examples) @@ -76,7 +77,7 @@ func (what *Threagile) initExamples() *Threagile { what.rootCmd.AddCommand(&cobra.Command{ Use: "create-editing-support", Short: "Create editing support", - Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\njust create some editing support stuff in the output directory", + Long: "\n" + docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp) + "\n\njust create some editing support stuff in the output directory", RunE: func(cmd *cobra.Command, args []string) error { appDir, err := cmd.Flags().GetString(appDirFlagName) if err != nil { @@ -95,7 +96,7 @@ func (what *Threagile) initExamples() *Threagile { return err } - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println(docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp)) cmd.Println("The following files were created in the output directory:") cmd.Println(" - schema.json") cmd.Println(" - live-templates.txt") diff --git a/internal/threagile/macros.go b/internal/threagile/macros.go index a9b68f0b..c4a58990 100644 --- a/internal/threagile/macros.go +++ b/internal/threagile/macros.go @@ -20,7 +20,7 @@ func (what *Threagile) initMacros() *Threagile { Use: "list-model-macros", Short: "Print model macros", Run: func(cmd *cobra.Command, args []string) { - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println(docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp)) cmd.Println("The following model macros are available (can be extended via custom model macros):") cmd.Println() /* TODO finish plugin stuff @@ -46,7 +46,7 @@ func (what *Threagile) initMacros() *Threagile { Use: "explain-model-macros", Short: "Explain model macros", Run: func(cmd *cobra.Command, args []string) { - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println(docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp)) cmd.Println("Explanation for the model macros:") cmd.Println() /* TODO finish plugin stuff @@ -74,7 +74,7 @@ func (what *Threagile) initMacros() *Threagile { Short: "Execute model macro", Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - cfg := what.readConfig(cmd, "buildTimestamp") + cfg := what.readConfig(cmd, what.buildTimestamp) progressReporter := common.DefaultProgressReporter{Verbose: cfg.Verbose} r, err := model.ReadAndAnalyzeModel(*cfg, progressReporter) diff --git a/internal/threagile/root.go b/internal/threagile/root.go index 85f21307..e16aa65a 100644 --- a/internal/threagile/root.go +++ b/internal/threagile/root.go @@ -24,9 +24,9 @@ func (what *Threagile) initRoot() *Threagile { what.rootCmd = &cobra.Command{ Use: "threagile", Short: "\n" + docs.Logo, - Long: "\n" + docs.Logo + "\n\n" + docs.VersionText + "\n\n" + docs.Examples, + Long: "\n" + docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp) + "\n\n" + docs.Examples, RunE: func(cmd *cobra.Command, args []string) error { - cfg := what.readConfig(cmd, "buildTimestamp") + cfg := what.readConfig(cmd, what.buildTimestamp) commands := what.readCommands() progressReporter := common.DefaultProgressReporter{Verbose: cfg.Verbose} @@ -53,7 +53,7 @@ func (what *Threagile) initRoot() *Threagile { Use: "server", Short: "Run server", RunE: func(cmd *cobra.Command, args []string) error { - cfg := what.readConfig(cmd, "buildTimestamp") + cfg := what.readConfig(cmd, what.buildTimestamp) server.RunServer(cfg) return nil }, diff --git a/internal/threagile/rules.go b/internal/threagile/rules.go index 376755c8..ae0fb582 100644 --- a/internal/threagile/rules.go +++ b/internal/threagile/rules.go @@ -5,6 +5,7 @@ Copyright © 2023 NAME HERE package threagile import ( + "fmt" "strings" "github.com/threagile/threagile/pkg/common" @@ -21,7 +22,7 @@ func (what *Threagile) initRules() *Threagile { Use: "list-risk-rules", Short: "Print available risk rules", RunE: func(cmd *cobra.Command, args []string) error { - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println(docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp)) cmd.Println("The following risk rules are available (can be extended via custom risk rules):") cmd.Println() cmd.Println("----------------------") @@ -48,7 +49,7 @@ func (what *Threagile) initRules() *Threagile { Use: "explain-risk-rules", Short: "Detailed explanation of all the risk rules", RunE: func(cmd *cobra.Command, args []string) error { - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println(docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp)) cmd.Println("Explanation for risk rules:") cmd.Println() cmd.Println("----------------------") diff --git a/internal/threagile/threagile.go b/internal/threagile/threagile.go index fb84ded3..20d0a22f 100644 --- a/internal/threagile/threagile.go +++ b/internal/threagile/threagile.go @@ -6,8 +6,9 @@ import ( ) type Threagile struct { - flags Flags - rootCmd *cobra.Command + flags Flags + rootCmd *cobra.Command + buildTimestamp string } func (what *Threagile) Execute() { @@ -17,6 +18,7 @@ func (what *Threagile) Execute() { } } -func (what *Threagile) Init() *Threagile { +func (what *Threagile) Init(buildTimestamp string) *Threagile { + what.buildTimestamp = buildTimestamp return what.initRoot().initAbout().initRules().initExamples().initMacros().initTypes() } diff --git a/internal/threagile/types.go b/internal/threagile/types.go index 25472859..3aad1787 100644 --- a/internal/threagile/types.go +++ b/internal/threagile/types.go @@ -18,7 +18,7 @@ func (what *Threagile) initTypes() *Threagile { Use: "list-types", Short: "Print type information (enum values to be used in models)", Run: func(cmd *cobra.Command, args []string) { - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println(docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp)) cmd.Println() cmd.Println() cmd.Println("The following types are available (can be extended for custom rules):") @@ -33,7 +33,7 @@ func (what *Threagile) initTypes() *Threagile { Use: "explain-types", Short: "Print type information (enum values to be used in models)", Run: func(cmd *cobra.Command, args []string) { - cmd.Println(docs.Logo + "\n\n" + docs.VersionText) + cmd.Println(docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp)) fmt.Println("Explanation for the types:") cmd.Println() cmd.Println("The following types are available (can be extended for custom rules):") diff --git a/pkg/docs/constants.go b/pkg/docs/constants.go index 98c6cd70..6b52eb28 100644 --- a/pkg/docs/constants.go +++ b/pkg/docs/constants.go @@ -1,6 +1,7 @@ /* Copyright © 2023 NAME HERE */ + package docs const ( @@ -11,7 +12,7 @@ const ( "Docker Images: https://hub.docker.com/r/threagile/threagile\n" + "Sourcecode: https://github.com/threagile\n" + "License: Open-Source (MIT License)" + - "Version: " + ThreagileVersion // TODO: add buildTimestamp + " (" + buildTimestamp + ")" + "Version: " + ThreagileVersion + " (%v)" Examples = "Examples:\n\n" + "If you want to create an example model (via docker) as a starting point to learn about Threagile just run: \n" + " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile create-example-model -output app/work \n\n" + From 89676853b902c203293b487410a215816c1122d3 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Wed, 10 Jan 2024 11:23:47 -0800 Subject: [PATCH 51/68] made sure to omit empty fields what marshaling inpuyt structs --- pkg/input/input.go | 242 ++++++++++++++++++++++----------------------- 1 file changed, 121 insertions(+), 121 deletions(-) diff --git a/pkg/input/input.go b/pkg/input/input.go index 26a61c1c..d69d234f 100644 --- a/pkg/input/input.go +++ b/pkg/input/input.go @@ -17,159 +17,159 @@ import ( // === Model Type Stuff ====================================== type Author struct { - Name string `yaml:"name" json:"name"` - Contact string `yaml:"contact" json:"contact"` - Homepage string `yaml:"homepage" json:"homepage"` + Name string `yaml:"name,omitempty" json:"name,omitempty"` + Contact string `yaml:"contact,omitempty" json:"contact,omitempty"` + Homepage string `yaml:"homepage,omitempty" json:"homepage,omitempty"` } type Overview struct { - Description string `yaml:"description" json:"description"` - Images []map[string]string `yaml:"images" json:"images"` // yes, array of map here, as array keeps the order of the image keys + Description string `yaml:"description,omitempty" json:"description,omitempty"` + Images []map[string]string `yaml:"images,omitempty" json:"images,omitempty"` // yes, array of map here, as array keeps the order of the image keys } type DataAsset struct { - ID string `yaml:"id" json:"id"` - Description string `yaml:"description" json:"description"` - Usage string `yaml:"usage" json:"usage"` - Tags []string `yaml:"tags" json:"tags"` - Origin string `yaml:"origin" json:"origin"` - Owner string `yaml:"owner" json:"owner"` - Quantity string `yaml:"quantity" json:"quantity"` - Confidentiality string `yaml:"confidentiality" json:"confidentiality"` - Integrity string `yaml:"integrity" json:"integrity"` - Availability string `yaml:"availability" json:"availability"` - JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` + ID string `yaml:"id,omitempty" json:"id,omitempty"` + Description string `yaml:"description,omitempty" json:"description,omitempty"` + Usage string `yaml:"usage,omitempty" json:"usage,omitempty"` + Tags []string `yaml:"tags,omitempty" json:"tags,omitempty"` + Origin string `yaml:"origin,omitempty" json:"origin,omitempty"` + Owner string `yaml:"owner,omitempty" json:"owner,omitempty"` + Quantity string `yaml:"quantity,omitempty" json:"quantity,omitempty"` + Confidentiality string `yaml:"confidentiality,omitempty" json:"confidentiality,omitempty"` + Integrity string `yaml:"integrity,omitempty" json:"integrity,omitempty"` + Availability string `yaml:"availability,omitempty" json:"availability,omitempty"` + JustificationCiaRating string `yaml:"justification_cia_rating,omitempty" json:"justification_cia_rating,omitempty"` } type TechnicalAsset struct { - ID string `yaml:"id" json:"id"` - Description string `yaml:"description" json:"description"` - Type string `yaml:"type" json:"type"` - Usage string `yaml:"usage" json:"usage"` - UsedAsClientByHuman bool `yaml:"used_as_client_by_human" json:"used_as_client_by_human"` - OutOfScope bool `yaml:"out_of_scope" json:"out_of_scope"` - JustificationOutOfScope string `yaml:"justification_out_of_scope" json:"justification_out_of_scope"` - Size string `yaml:"size" json:"size"` - Technology string `yaml:"technology" json:"technology"` - Tags []string `yaml:"tags" json:"tags"` - Internet bool `yaml:"internet" json:"internet"` - Machine string `yaml:"machine" json:"machine"` - Encryption string `yaml:"encryption" json:"encryption"` - Owner string `yaml:"owner" json:"owner"` - Confidentiality string `yaml:"confidentiality" json:"confidentiality"` - Integrity string `yaml:"integrity" json:"integrity"` - Availability string `yaml:"availability" json:"availability"` - JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` - MultiTenant bool `yaml:"multi_tenant" json:"multi_tenant"` - Redundant bool `yaml:"redundant" json:"redundant"` - CustomDevelopedParts bool `yaml:"custom_developed_parts" json:"custom_developed_parts"` - DataAssetsProcessed []string `yaml:"data_assets_processed" json:"data_assets_processed"` - DataAssetsStored []string `yaml:"data_assets_stored" json:"data_assets_stored"` - DataFormatsAccepted []string `yaml:"data_formats_accepted" json:"data_formats_accepted"` - DiagramTweakOrder int `yaml:"diagram_tweak_order" json:"diagram_tweak_order"` - CommunicationLinks map[string]CommunicationLink `yaml:"communication_links" json:"communication_links"` + ID string `yaml:"id,omitempty" json:"id,omitempty"` + Description string `yaml:"description,omitempty" json:"description,omitempty"` + Type string `yaml:"type,omitempty" json:"type,omitempty"` + Usage string `yaml:"usage,omitempty" json:"usage,omitempty"` + UsedAsClientByHuman bool `yaml:"used_as_client_by_human,omitempty" json:"used_as_client_by_human,omitempty"` + OutOfScope bool `yaml:"out_of_scope,omitempty" json:"out_of_scope,omitempty"` + JustificationOutOfScope string `yaml:"justification_out_of_scope,omitempty" json:"justification_out_of_scope,omitempty"` + Size string `yaml:"size,omitempty" json:"size,omitempty"` + Technology string `yaml:"technology,omitempty" json:"technology,omitempty"` + Tags []string `yaml:"tags,omitempty" json:"tags,omitempty"` + Internet bool `yaml:"internet,omitempty" json:"internet,omitempty"` + Machine string `yaml:"machine,omitempty" json:"machine,omitempty"` + Encryption string `yaml:"encryption,omitempty" json:"encryption,omitempty"` + Owner string `yaml:"owner,omitempty" json:"owner,omitempty"` + Confidentiality string `yaml:"confidentiality,omitempty" json:"confidentiality,omitempty"` + Integrity string `yaml:"integrity,omitempty" json:"integrity,omitempty"` + Availability string `yaml:"availability,omitempty" json:"availability,omitempty"` + JustificationCiaRating string `yaml:"justification_cia_rating,omitempty" json:"justification_cia_rating,omitempty"` + MultiTenant bool `yaml:"multi_tenant,omitempty" json:"multi_tenant,omitempty"` + Redundant bool `yaml:"redundant,omitempty" json:"redundant,omitempty"` + CustomDevelopedParts bool `yaml:"custom_developed_parts,omitempty" json:"custom_developed_parts,omitempty"` + DataAssetsProcessed []string `yaml:"data_assets_processed,omitempty" json:"data_assets_processed,omitempty"` + DataAssetsStored []string `yaml:"data_assets_stored,omitempty" json:"data_assets_stored,omitempty"` + DataFormatsAccepted []string `yaml:"data_formats_accepted,omitempty" json:"data_formats_accepted,omitempty"` + DiagramTweakOrder int `yaml:"diagram_tweak_order,omitempty" json:"diagram_tweak_order,omitempty"` + CommunicationLinks map[string]CommunicationLink `yaml:"communication_links,omitempty" json:"communication_links,omitempty"` } type CommunicationLink struct { - Target string `yaml:"target" json:"target"` - Description string `yaml:"description" json:"description"` - Protocol string `yaml:"protocol" json:"protocol"` - Authentication string `yaml:"authentication" json:"authentication"` - Authorization string `yaml:"authorization" json:"authorization"` - Tags []string `yaml:"tags" json:"tags"` - VPN bool `yaml:"vpn" json:"vpn"` - IpFiltered bool `yaml:"ip_filtered" json:"ip_filtered"` - Readonly bool `yaml:"readonly" json:"readonly"` - Usage string `yaml:"usage" json:"usage"` - DataAssetsSent []string `yaml:"data_assets_sent" json:"data_assets_sent"` - DataAssetsReceived []string `yaml:"data_assets_received" json:"data_assets_received"` - DiagramTweakWeight int `yaml:"diagram_tweak_weight" json:"diagram_tweak_weight"` - DiagramTweakConstraint bool `yaml:"diagram_tweak_constraint" json:"diagram_tweak_constraint"` + Target string `yaml:"target,omitempty" json:"target,omitempty"` + Description string `yaml:"description,omitempty" json:"description,omitempty"` + Protocol string `yaml:"protocol,omitempty" json:"protocol,omitempty"` + Authentication string `yaml:"authentication,omitempty" json:"authentication,omitempty"` + Authorization string `yaml:"authorization,omitempty" json:"authorization,omitempty"` + Tags []string `yaml:"tags,omitempty" json:"tags,omitempty"` + VPN bool `yaml:"vpn,omitempty" json:"vpn,omitempty"` + IpFiltered bool `yaml:"ip_filtered,omitempty" json:"ip_filtered,omitempty"` + Readonly bool `yaml:"readonly,omitempty" json:"readonly,omitempty"` + Usage string `yaml:"usage,omitempty" json:"usage,omitempty"` + DataAssetsSent []string `yaml:"data_assets_sent,omitempty" json:"data_assets_sent,omitempty"` + DataAssetsReceived []string `yaml:"data_assets_received,omitempty" json:"data_assets_received,omitempty"` + DiagramTweakWeight int `yaml:"diagram_tweak_weight,omitempty" json:"diagram_tweak_weight,omitempty"` + DiagramTweakConstraint bool `yaml:"diagram_tweak_constraint,omitempty" json:"diagram_tweak_constraint,omitempty"` } type SharedRuntime struct { - ID string `yaml:"id" json:"id"` - Description string `yaml:"description" json:"description"` - Tags []string `yaml:"tags" json:"tags"` - TechnicalAssetsRunning []string `yaml:"technical_assets_running" json:"technical_assets_running"` + ID string `yaml:"id,omitempty" json:"id,omitempty"` + Description string `yaml:"description,omitempty" json:"description,omitempty"` + Tags []string `yaml:"tags,omitempty" json:"tag,omitemptys"` + TechnicalAssetsRunning []string `yaml:"technical_assets_running,omitempty" json:"technical_assets_running,omitempty"` } type TrustBoundary struct { - ID string `yaml:"id" json:"id"` - Description string `yaml:"description" json:"description"` - Type string `yaml:"type" json:"type"` - Tags []string `yaml:"tags" json:"tags"` - TechnicalAssetsInside []string `yaml:"technical_assets_inside" json:"technical_assets_inside"` - TrustBoundariesNested []string `yaml:"trust_boundaries_nested" json:"trust_boundaries_nested"` + ID string `yaml:"id,omitempty" json:"id,omitempty"` + Description string `yaml:"description,omitempty" json:"description,omitempty"` + Type string `yaml:"type,omitempty" json:"type,omitempty"` + Tags []string `yaml:"tags,omitempty" json:"tags,omitempty"` + TechnicalAssetsInside []string `yaml:"technical_assets_inside,omitempty" json:"technical_assets_inside,omitempty"` + TrustBoundariesNested []string `yaml:"trust_boundaries_nested,omitempty" json:"trust_boundaries_nested,omitempty"` } type IndividualRiskCategory struct { - ID string `yaml:"id" json:"id"` - Description string `yaml:"description" json:"description"` - Impact string `yaml:"impact" json:"impact"` - ASVS string `yaml:"asvs" json:"asvs"` - CheatSheet string `yaml:"cheat_sheet" json:"cheat_sheet"` - Action string `yaml:"action" json:"action"` - Mitigation string `yaml:"mitigation" json:"mitigation"` - Check string `yaml:"check" json:"check"` - Function string `yaml:"function" json:"function"` - STRIDE string `yaml:"stride" json:"stride"` - DetectionLogic string `yaml:"detection_logic" json:"detection_logic"` - RiskAssessment string `yaml:"risk_assessment" json:"risk_assessment"` - FalsePositives string `yaml:"false_positives" json:"false_positives"` - ModelFailurePossibleReason bool `yaml:"model_failure_possible_reason" json:"model_failure_possible_reason"` - CWE int `yaml:"cwe" json:"cwe"` - RisksIdentified map[string]RiskIdentified `yaml:"risks_identified" json:"risks_identified"` + ID string `yaml:"id,omitempty" json:"id,omitempty"` + Description string `yaml:"description,omitempty" json:"description,omitempty"` + Impact string `yaml:"impact,omitempty" json:"impact,omitempty"` + ASVS string `yaml:"asvs,omitempty" json:"asvs,omitempty"` + CheatSheet string `yaml:"cheat_sheet,omitempty" json:"cheat_sheet,omitempty"` + Action string `yaml:"action,omitempty" json:"action,omitempty"` + Mitigation string `yaml:"mitigation,omitempty" json:"mitigation,omitempty"` + Check string `yaml:"check,omitempty" json:"check,omitempty"` + Function string `yaml:"function,omitempty" json:"function,omitempty"` + STRIDE string `yaml:"stride,omitempty" json:"stride,omitempty"` + DetectionLogic string `yaml:"detection_logic,omitempty" json:"detection_logic,omitempty"` + RiskAssessment string `yaml:"risk_assessment,omitempty" json:"risk_assessment,omitempty"` + FalsePositives string `yaml:"false_positives,omitempty" json:"false_positives,omitempty"` + ModelFailurePossibleReason bool `yaml:"model_failure_possible_reason,omitempty" json:"model_failure_possible_reason,omitempty"` + CWE int `yaml:"cwe,omitempty" json:"cwe,omitempty"` + RisksIdentified map[string]RiskIdentified `yaml:"risks_identified,omitempty" json:"risks_identified,omitempty"` } type RiskIdentified struct { - Severity string `yaml:"severity" json:"severity"` - ExploitationLikelihood string `yaml:"exploitation_likelihood" json:"exploitation_likelihood"` - ExploitationImpact string `yaml:"exploitation_impact" json:"exploitation_impact"` - DataBreachProbability string `yaml:"data_breach_probability" json:"data_breach_probability"` - DataBreachTechnicalAssets []string `yaml:"data_breach_technical_assets" json:"data_breach_technical_assets"` - MostRelevantDataAsset string `yaml:"most_relevant_data_asset" json:"most_relevant_data_asset"` - MostRelevantTechnicalAsset string `yaml:"most_relevant_technical_asset" json:"most_relevant_technical_asset"` - MostRelevantCommunicationLink string `yaml:"most_relevant_communication_link" json:"most_relevant_communication_link"` - MostRelevantTrustBoundary string `yaml:"most_relevant_trust_boundary" json:"most_relevant_trust_boundary"` - MostRelevantSharedRuntime string `yaml:"most_relevant_shared_runtime" json:"most_relevant_shared_runtime"` + Severity string `yaml:"severity,omitempty" json:"severity,omitempty"` + ExploitationLikelihood string `yaml:"exploitation_likelihood,omitempty" json:"exploitation_likelihood,omitempty"` + ExploitationImpact string `yaml:"exploitation_impact,omitempty" json:"exploitation_impact,omitempty"` + DataBreachProbability string `yaml:"data_breach_probability,omitempty" json:"data_breach_probability,omitempty"` + DataBreachTechnicalAssets []string `yaml:"data_breach_technical_assets,omitempty" json:"data_breach_technical_assets,omitempty"` + MostRelevantDataAsset string `yaml:"most_relevant_data_asset,omitempty" json:"most_relevant_data_asset,omitempty"` + MostRelevantTechnicalAsset string `yaml:"most_relevant_technical_asset,omitempty" json:"most_relevant_technical_asset,omitempty"` + MostRelevantCommunicationLink string `yaml:"most_relevant_communication_link,omitempty" json:"most_relevant_communication_link,omitempty"` + MostRelevantTrustBoundary string `yaml:"most_relevant_trust_boundary,omitempty" json:"most_relevant_trust_boundary,omitempty"` + MostRelevantSharedRuntime string `yaml:"most_relevant_shared_runtime,omitempty" json:"most_relevant_shared_runtime,omitempty"` } type RiskTracking struct { - Status string `yaml:"status" json:"status"` - Justification string `yaml:"justification" json:"justification"` - Ticket string `yaml:"ticket" json:"ticket"` - Date string `yaml:"date" json:"date"` - CheckedBy string `yaml:"checked_by" json:"checked_by"` + Status string `yaml:"status,omitempty" json:"status,omitempty"` + Justification string `yaml:"justification,omitempty" json:"justification,omitempty"` + Ticket string `yaml:"ticket,omitempty" json:"ticket,omitempty"` + Date string `yaml:"date,omitempty" json:"date,omitempty"` + CheckedBy string `yaml:"checked_by,omitempty" json:"checked_by,omitempty"` } type Model struct { // TODO: Eventually remove this and directly use ParsedModelRoot? But then the error messages for model errors are not quite as good anymore... Includes []string `yaml:"includes,omitempty" json:"includes,omitempty"` - ThreagileVersion string `yaml:"threagile_version" json:"threagile_version"` - Title string `yaml:"title" json:"title"` - Author Author `yaml:"author" json:"author"` - Contributors []Author `yaml:"contributors" json:"contributors"` - Date string `yaml:"date" json:"date"` - AppDescription Overview `yaml:"application_description" json:"application_description"` - BusinessOverview Overview `yaml:"business_overview" json:"business_overview"` - TechnicalOverview Overview `yaml:"technical_overview" json:"technical_overview"` - BusinessCriticality string `yaml:"business_criticality" json:"business_criticality"` - ManagementSummaryComment string `yaml:"management_summary_comment" json:"management_summary_comment"` - Questions map[string]string `yaml:"questions" json:"questions"` - AbuseCases map[string]string `yaml:"abuse_cases" json:"abuse_cases"` - SecurityRequirements map[string]string `yaml:"security_requirements" json:"security_requirements"` + ThreagileVersion string `yaml:"threagile_version,omitempty" json:"threagile_version,omitempty"` + Title string `yaml:"title,omitempty" json:"title,omitempty"` + Author Author `yaml:"author,omitempty" json:"author,omitempty"` + Contributors []Author `yaml:"contributors,omitempty" json:"contributors,omitempty"` + Date string `yaml:"date,omitempty" json:"date,omitempty"` + AppDescription Overview `yaml:"application_description,omitempty" json:"application_description,omitempty"` + BusinessOverview Overview `yaml:"business_overview,omitempty" json:"business_overview,omitempty"` + TechnicalOverview Overview `yaml:"technical_overview,omitempty" json:"technical_overview,omitempty"` + BusinessCriticality string `yaml:"business_criticality,omitempty" json:"business_criticality,omitempty"` + ManagementSummaryComment string `yaml:"management_summary_comment,omitempty" json:"management_summary_comment,omitempty"` + Questions map[string]string `yaml:"questions,omitempty" json:"questions,omitempty"` + AbuseCases map[string]string `yaml:"abuse_cases,omitempty" json:"abuse_cases,omitempty"` + SecurityRequirements map[string]string `yaml:"security_requirements,omitempty" json:"security_requirements,omitempty"` TagsAvailable []string `yaml:"tags_available,omitempty" json:"tags_available,omitempty"` - DataAssets map[string]DataAsset `yaml:"data_assets" json:"data_assets"` - TechnicalAssets map[string]TechnicalAsset `yaml:"technical_assets" json:"technical_assets"` - TrustBoundaries map[string]TrustBoundary `yaml:"trust_boundaries" json:"trust_boundaries"` - SharedRuntimes map[string]SharedRuntime `yaml:"shared_runtimes" json:"shared_runtimes"` - IndividualRiskCategories map[string]IndividualRiskCategory `yaml:"individual_risk_categories" json:"individual_risk_categories"` - RiskTracking map[string]RiskTracking `yaml:"risk_tracking" json:"risk_tracking"` - DiagramTweakNodesep int `yaml:"diagram_tweak_nodesep" json:"diagram_tweak_nodesep"` - DiagramTweakRanksep int `yaml:"diagram_tweak_ranksep" json:"diagram_tweak_ranksep"` - DiagramTweakEdgeLayout string `yaml:"diagram_tweak_edge_layout" json:"diagram_tweak_edge_layout"` - DiagramTweakSuppressEdgeLabels bool `yaml:"diagram_tweak_suppress_edge_labels" json:"diagram_tweak_suppress_edge_labels"` - DiagramTweakLayoutLeftToRight bool `yaml:"diagram_tweak_layout_left_to_right" json:"diagram_tweak_layout_left_to_right"` + DataAssets map[string]DataAsset `yaml:"data_assets,omitempty" json:"data_assets,omitempty"` + TechnicalAssets map[string]TechnicalAsset `yaml:"technical_assets,omitempty" json:"technical_assets,omitempty"` + TrustBoundaries map[string]TrustBoundary `yaml:"trust_boundaries,omitempty" json:"trust_boundaries,omitempty"` + SharedRuntimes map[string]SharedRuntime `yaml:"shared_runtimes,omitempty" json:"shared_runtimes,omitempty"` + IndividualRiskCategories map[string]IndividualRiskCategory `yaml:"individual_risk_categories,omitempty" json:"individual_risk_categories,omitempty"` + RiskTracking map[string]RiskTracking `yaml:"risk_tracking,omitempty" json:"risk_tracking,omitempty"` + DiagramTweakNodesep int `yaml:"diagram_tweak_nodesep,omitempty" json:"diagram_tweak_nodesep,omitempty"` + DiagramTweakRanksep int `yaml:"diagram_tweak_ranksep,omitempty" json:"diagram_tweak_ranksep,omitempty"` + DiagramTweakEdgeLayout string `yaml:"diagram_tweak_edge_layout,omitempty" json:"diagram_tweak_edge_layout,omitempty"` + DiagramTweakSuppressEdgeLabels bool `yaml:"diagram_tweak_suppress_edge_labels,omitempty" json:"diagram_tweak_suppress_edge_labels,omitempty"` + DiagramTweakLayoutLeftToRight bool `yaml:"diagram_tweak_layout_left_to_right,omitempty" json:"diagram_tweak_layout_left_to_right,omitempty"` DiagramTweakInvisibleConnectionsBetweenAssets []string `yaml:"diagram_tweak_invisible_connections_between_assets,omitempty" json:"diagram_tweak_invisible_connections_between_assets,omitempty"` DiagramTweakSameRankAssets []string `yaml:"diagram_tweak_same_rank_assets,omitempty" json:"diagram_tweak_same_rank_assets,omitempty"` } From b35e4e9848a37644b1bc4733f1e8e83203b1e242 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Wed, 24 Jan 2024 13:02:22 -0800 Subject: [PATCH 52/68] fixed config file loading --- .gitignore | 1 + go.mod | 12 ++--- go.sum | 23 +++++----- internal/threagile/root.go | 52 +++++----------------- pkg/common/config.go | 89 ++++++++++++++++++++++++++++++++++++++ 5 files changed, 120 insertions(+), 57 deletions(-) diff --git a/.gitignore b/.gitignore index f41a2eac..18ce845d 100644 --- a/.gitignore +++ b/.gitignore @@ -29,3 +29,4 @@ bin/ # IDE stuff .idea/ +/config.json \ No newline at end of file diff --git a/go.mod b/go.mod index c3b9162d..c10b01b5 100644 --- a/go.mod +++ b/go.mod @@ -9,7 +9,7 @@ require ( github.com/spf13/pflag v1.0.5 github.com/wcharczuk/go-chart v2.0.1+incompatible github.com/xuri/excelize/v2 v2.8.0 - golang.org/x/crypto v0.17.0 + golang.org/x/crypto v0.18.0 gopkg.in/yaml.v3 v3.0.1 ) @@ -27,12 +27,12 @@ require ( github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect - github.com/pkg/errors v0.8.1 // indirect + github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/richardlehane/mscfb v1.0.4 // indirect github.com/richardlehane/msoleps v1.0.3 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect - golang.org/x/sys v0.15.0 // indirect + golang.org/x/sys v0.16.0 // indirect golang.org/x/text v0.14.0 // indirect ) @@ -53,8 +53,8 @@ require ( github.com/ugorji/go/codec v1.2.12 // indirect github.com/xuri/efp v0.0.0-20231025114914-d1ff6096ae53 // indirect github.com/xuri/nfp v0.0.0-20230919160717-d98342af3f05 // indirect - golang.org/x/arch v0.6.0 // indirect - golang.org/x/image v0.14.0 // indirect - golang.org/x/net v0.19.0 // indirect + golang.org/x/arch v0.7.0 // indirect + golang.org/x/image v0.15.0 // indirect + golang.org/x/net v0.20.0 // indirect google.golang.org/protobuf v1.32.0 // indirect ) diff --git a/go.sum b/go.sum index 79872297..b10c4cfa 100644 --- a/go.sum +++ b/go.sum @@ -67,8 +67,9 @@ github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdU github.com/phpdave11/gofpdi v1.0.7/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= github.com/phpdave11/gofpdi v1.0.13 h1:o61duiW8M9sMlkVXWlvP92sZJtGKENvW3VExs6dZukQ= github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= -github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/richardlehane/mscfb v1.0.4 h1:WULscsljNPConisD5hR0+OyZjwK46Pfyr6mPu5ZawpM= @@ -110,17 +111,17 @@ github.com/xuri/nfp v0.0.0-20230919160717-d98342af3f05 h1:qhbILQo1K3mphbwKh1vNm4 github.com/xuri/nfp v0.0.0-20230919160717-d98342af3f05/go.mod h1:WwHg+CVyzlv/TX9xqBFXEZAuxOPxn2k1GNHwG41IIUQ= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= -golang.org/x/arch v0.6.0 h1:S0JTfE48HbRj80+4tbvZDYsJ3tGv6BUU3XxyZ7CirAc= -golang.org/x/arch v0.6.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= +golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc= +golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= -golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= -golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= +golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.11.0/go.mod h1:bglhjqbqVuEb9e9+eNR45Jfu7D+T4Qan+NhQk8Ck2P8= -golang.org/x/image v0.14.0 h1:tNgSxAFe3jC4uYqvZdTr84SZoM1KfwdC9SKIFrLjFn4= -golang.org/x/image v0.14.0/go.mod h1:HUYqC05R2ZcZ3ejNQsIHQDQiwWM4JBqmm6MKANTp4LE= +golang.org/x/image v0.15.0 h1:kOELfmgrmJlw4Cdb7g/QGuB3CvDrXbqEIww/pNtNBm8= +golang.org/x/image v0.15.0/go.mod h1:HUYqC05R2ZcZ3ejNQsIHQDQiwWM4JBqmm6MKANTp4LE= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -129,8 +130,8 @@ golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= -golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= -golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= +golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo= +golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -143,8 +144,8 @@ golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= -golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= +golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= diff --git a/internal/threagile/root.go b/internal/threagile/root.go index e16aa65a..cd3f6fec 100644 --- a/internal/threagile/root.go +++ b/internal/threagile/root.go @@ -6,8 +6,6 @@ package threagile import ( "fmt" - "os" - "runtime" "strings" "github.com/spf13/cobra" @@ -22,9 +20,11 @@ import ( func (what *Threagile) initRoot() *Threagile { what.rootCmd = &cobra.Command{ - Use: "threagile", - Short: "\n" + docs.Logo, - Long: "\n" + docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp) + "\n\n" + docs.Examples, + Use: "threagile", + Short: "\n" + docs.Logo, + Long: "\n" + docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp) + "\n\n" + docs.Examples, + SilenceErrors: true, + SilenceUsage: true, RunE: func(cmd *cobra.Command, args []string) error { cfg := what.readConfig(cmd, what.buildTimestamp) commands := what.readCommands() @@ -38,8 +38,7 @@ func (what *Threagile) initRoot() *Threagile { err = report.Generate(cfg, r, commands, progressReporter) if err != nil { - cmd.Println("Failed to generate reports") - cmd.PrintErr(err) + cmd.Printf("Failed to generate reports: %v \n", err) return err } return nil @@ -121,20 +120,20 @@ func (what *Threagile) readConfig(cmd *cobra.Command, buildTimestamp string) *co cfg.ServerPort = what.flags.serverPortFlag } if isFlagOverridden(flags, serverDirFlagName) { - cfg.ServerFolder = expandPath(what.flags.serverDirFlag) + cfg.ServerFolder = cfg.CleanPath(what.flags.serverDirFlag) } if isFlagOverridden(flags, appDirFlagName) { - cfg.AppFolder = expandPath(what.flags.appDirFlag) + cfg.AppFolder = cfg.CleanPath(what.flags.appDirFlag) } if isFlagOverridden(flags, binDirFlagName) { - cfg.BinFolder = expandPath(what.flags.binDirFlag) + cfg.BinFolder = cfg.CleanPath(what.flags.binDirFlag) } if isFlagOverridden(flags, outputFlagName) { - cfg.OutputFolder = expandPath(what.flags.outputDirFlag) + cfg.OutputFolder = cfg.CleanPath(what.flags.outputDirFlag) } if isFlagOverridden(flags, tempDirFlagName) { - cfg.TempFolder = expandPath(what.flags.tempDirFlag) + cfg.TempFolder = cfg.CleanPath(what.flags.tempDirFlag) } if isFlagOverridden(flags, verboseFlagName) { @@ -142,7 +141,7 @@ func (what *Threagile) readConfig(cmd *cobra.Command, buildTimestamp string) *co } if isFlagOverridden(flags, inputFileFlagName) { - cfg.InputFile = expandPath(what.flags.inputFileFlag) + cfg.InputFile = cfg.CleanPath(what.flags.inputFileFlag) } if isFlagOverridden(flags, raaPluginFlagName) { cfg.RAAPlugin = what.flags.raaPluginFlag @@ -173,30 +172,3 @@ func isFlagOverridden(flags *pflag.FlagSet, flagName string) bool { } return flag.Changed } - -func expandPath(path string) string { - home := userHomeDir() - if strings.HasPrefix(path, "~") { - path = strings.Replace(path, "~", home, 1) - } - - if strings.HasPrefix(path, "$HOME") { - path = strings.Replace(path, "$HOME", home, -1) - } - - return path -} - -func userHomeDir() string { - switch runtime.GOOS { - case "windows": - home := os.Getenv("HOMEDRIVE") + os.Getenv("HOMEPATH") - if home == "" { - home = os.Getenv("USERPROFILE") - } - return home - - default: - return os.Getenv("HOME") - } -} diff --git a/pkg/common/config.go b/pkg/common/config.go index ef3d25e0..ee0cef86 100644 --- a/pkg/common/config.go +++ b/pkg/common/config.go @@ -4,6 +4,8 @@ import ( "encoding/json" "fmt" "os" + "path/filepath" + "runtime" "strings" ) @@ -135,6 +137,49 @@ func (c *Config) Load(configFilename string) error { c.Merge(config, values) + c.TempFolder = c.CleanPath(c.TempFolder) + tempDirError := os.MkdirAll(c.TempFolder, 0700) + if tempDirError != nil { + return fmt.Errorf("failed to create temp dir %q: %v", c.TempFolder, tempDirError) + } + + c.OutputFolder = c.CleanPath(c.OutputFolder) + outDirError := os.MkdirAll(c.OutputFolder, 0700) + if outDirError != nil { + return fmt.Errorf("failed to create output dir %q: %v", c.OutputFolder, outDirError) + } + + c.AppFolder = c.CleanPath(c.AppFolder) + appDirError := c.checkDir(c.AppFolder, "app") + if appDirError != nil { + return appDirError + } + + c.BinFolder = c.CleanPath(c.BinFolder) + binDirError := c.checkDir(c.BinFolder, "bin") + if binDirError != nil { + return binDirError + } + + c.DataFolder = c.CleanPath(c.DataFolder) + dataDirError := c.checkDir(c.DataFolder, "data") + if dataDirError != nil { + return dataDirError + } + + if c.ServerPort > 0 { + c.ServerFolder = c.CleanPath(c.ServerFolder) + serverDirError := c.checkDir(c.ServerFolder, "server") + if serverDirError != nil { + return serverDirError + } + + keyDirError := os.MkdirAll(filepath.Join(c.ServerFolder, c.KeyFolder), 0700) + if keyDirError != nil { + return fmt.Errorf("failed to create key dir %q: %v", filepath.Join(c.ServerFolder, c.KeyFolder), keyDirError) + } + } + return nil } @@ -263,3 +308,47 @@ func (c *Config) Merge(config Config, values map[string]any) { } } } + +func (c *Config) CleanPath(path string) string { + return filepath.Clean(c.ExpandPath(path)) +} + +func (c *Config) checkDir(dir string, name string) error { + dirInfo, dirError := os.Stat(dir) + if dirError != nil { + return fmt.Errorf("%v folder %q not good: %v", name, dir, dirError) + } + + if !dirInfo.IsDir() { + return fmt.Errorf("%v folder %q is not a folder", name, dir) + } + + return nil +} + +func (c *Config) ExpandPath(path string) string { + home := c.UserHomeDir() + if strings.HasPrefix(path, "~") { + path = strings.Replace(path, "~", home, 1) + } + + if strings.HasPrefix(path, "$HOME") { + path = strings.Replace(path, "$HOME", home, -1) + } + + return path +} + +func (c *Config) UserHomeDir() string { + switch runtime.GOOS { + case "windows": + home := os.Getenv("HOMEDRIVE") + os.Getenv("HOMEPATH") + if home == "" { + home = os.Getenv("USERPROFILE") + } + return home + + default: + return os.Getenv("HOME") + } +} From dc8d4b028c9159f9776ac5430ff0689aecd352e5 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Tue, 30 Jan 2024 19:51:09 -0800 Subject: [PATCH 53/68] improved model merging for split model yaml import --- .gitignore | 7 +- Makefile | 4 +- cmd/raa/main.go | 53 +- cmd/threagile/main.go | 2 +- cmd/threagile/main_test.go | 30 +- go.mod | 4 + go.sum | 8 + internal/threagile/root.go | 2 +- pkg/common/config.go | 14 +- pkg/input/author.go | 68 + pkg/input/communication-link.go | 99 + pkg/input/data-asset.go | 89 + pkg/input/input.go | 406 -- pkg/input/model.go | 303 ++ pkg/input/overview.go | 20 + pkg/input/risk-category.go | 125 + pkg/input/risk-tracking.go | 59 + pkg/input/risk.go | 86 + pkg/input/shared-runtime.go | 47 + pkg/input/strings.go | 68 + pkg/input/technical-asset.go | 160 + pkg/input/trust-boundary.go | 56 + pkg/macros/add-build-pipeline-macro.go | 12 +- pkg/macros/add-vault-macro.go | 2 +- pkg/macros/macros.go | 2 +- pkg/macros/remove-unused-tags-macro.go | 54 +- pkg/macros/seed-tags-macro.go | 18 +- pkg/model/parse.go | 45 +- pkg/model/runner.go | 3 +- pkg/report/report.go | 9 +- pkg/security/types/authentication.go | 40 +- pkg/security/types/authorization.go | 40 +- pkg/security/types/communication_link.go | 34 +- pkg/security/types/confidentiality.go | 40 +- pkg/security/types/criticality.go | 40 +- pkg/security/types/data_asset.go | 24 +- pkg/security/types/data_breach_probability.go | 40 +- pkg/security/types/data_format.go | 40 +- pkg/security/types/date.go | 40 + pkg/security/types/encryption_style.go | 40 +- pkg/security/types/model.go | 73 +- pkg/security/types/protocol.go | 40 +- pkg/security/types/quantity.go | 40 +- pkg/security/types/risk-category.go | 32 +- pkg/security/types/risk-tracking.go | 16 +- pkg/security/types/risk.go | 28 +- .../types/risk_exploitation_impact.go | 40 +- .../types/risk_exploitation_likelihood.go | 40 +- pkg/security/types/risk_function.go | 40 +- pkg/security/types/risk_severity.go | 40 +- pkg/security/types/risk_status.go | 44 +- pkg/security/types/shared_runtime.go | 10 +- pkg/security/types/stride.go | 40 +- pkg/security/types/technical_asset.go | 56 +- pkg/security/types/technical_asset_machine.go | 40 +- pkg/security/types/technical_asset_size.go | 40 +- .../types/technical_asset_technology.go | 40 +- pkg/security/types/technical_asset_type.go | 40 +- pkg/security/types/trust_boundary.go | 14 +- pkg/security/types/trust_boundary_type.go | 40 +- pkg/security/types/usage.go | 40 +- test/all.json | 3711 +++++++++-------- 62 files changed, 4225 insertions(+), 2512 deletions(-) create mode 100644 pkg/input/author.go create mode 100644 pkg/input/communication-link.go create mode 100644 pkg/input/data-asset.go delete mode 100644 pkg/input/input.go create mode 100644 pkg/input/model.go create mode 100644 pkg/input/overview.go create mode 100644 pkg/input/risk-category.go create mode 100644 pkg/input/risk-tracking.go create mode 100644 pkg/input/risk.go create mode 100644 pkg/input/shared-runtime.go create mode 100644 pkg/input/strings.go create mode 100644 pkg/input/technical-asset.go create mode 100644 pkg/input/trust-boundary.go create mode 100644 pkg/security/types/date.go diff --git a/.gitignore b/.gitignore index 18ce845d..f932bbcb 100644 --- a/.gitignore +++ b/.gitignore @@ -24,9 +24,10 @@ stats.json *.out # build artifacts -vendor/ -bin/ +/vendor/ +/bin/ +/out*/ # IDE stuff -.idea/ +/.idea/ /config.json \ No newline at end of file diff --git a/Makefile b/Makefile index 5b55bbd9..ca094ff3 100644 --- a/Makefile +++ b/Makefile @@ -15,7 +15,7 @@ BIN = \ threagile # Commands and Flags -GOFLAGS = -a -ldflags="-s -w -X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" +GOFLAGS = -a -ldflags="-s -w -X main.buildTimestamp=$(shell date '+%Y%m%d%H%M%S')" GO = env GO111MODULE=on go MKDIR = mkdir -p CP = cp -r @@ -27,7 +27,7 @@ RM = rm -rf default: all prep: - env GO111MODULE=on go mod vendor + @# env GO111MODULE=on go mod vendor $(MKDIR) bin run_tests: diff --git a/cmd/raa/main.go b/cmd/raa/main.go index 90b7d5f4..c7f67be5 100644 --- a/cmd/raa/main.go +++ b/cmd/raa/main.go @@ -2,7 +2,9 @@ package main import ( "encoding/json" + "flag" "fmt" + "io" "os" "sort" @@ -12,12 +14,32 @@ import ( // used from run caller: func main() { + inputFilename := flag.String("in", "", "input file") + outputFilename := flag.String("out", "", "output file") + flag.Parse() + + var data []byte + var inputError error + if len(*inputFilename) > 0 { + data, inputError = os.ReadFile(*inputFilename) + if inputError != nil { + _, _ = fmt.Fprintf(os.Stderr, "failed to read input file %q: %v\n", *inputFilename, inputError) + os.Exit(-2) + } + } else { + data, inputError = io.ReadAll(os.Stdin) + if inputError != nil { + _, _ = fmt.Fprintf(os.Stderr, "failed to read input from stdin: %v\n", inputError) + os.Exit(-2) + } + } + + // _ = os.WriteFile("raa_in.json", data, 0644) + var input types.ParsedModel - decoder := json.NewDecoder(os.Stdin) - inError := decoder.Decode(&input) - if inError != nil { - _, _ = fmt.Fprintf(os.Stderr, "failed to parse model: %v\n", inError) - _, _ = fmt.Fprintf(os.Stderr, "\n") + parseError := json.Unmarshal(data, &input) + if parseError != nil { + _, _ = fmt.Fprintf(os.Stderr, "failed to parse model: %v\n", parseError) os.Exit(-2) } @@ -28,12 +50,31 @@ func main() { os.Exit(-2) } - _, _ = fmt.Fprint(os.Stdout, string(outData)) + // _ = os.WriteFile("raa_out.json", outData, 0644) + + var outputFile io.Writer = os.Stdout + if len(*outputFilename) > 0 { + file, outputError := os.Open(*outputFilename) + if outputError != nil { + _, _ = fmt.Fprintf(os.Stderr, "failed to open output file %q: %v\n", *outputFilename, outputError) + os.Exit(-2) + } + + defer closeFile(file) + outputFile = file + } + + _, _ = fmt.Fprint(outputFile, string(outData)) _ = text // _, _ = fmt.Fprint(os.Stderr, text) + os.Exit(0) } +func closeFile(file io.Closer) { + _ = file.Close() +} + func CalculateRAA(input *types.ParsedModel) string { for techAssetID, techAsset := range input.TechnicalAssets { aa := calculateAttackerAttractiveness(input, techAsset) diff --git a/cmd/threagile/main.go b/cmd/threagile/main.go index f18adc8e..d5bdc947 100644 --- a/cmd/threagile/main.go +++ b/cmd/threagile/main.go @@ -4,7 +4,7 @@ import ( "github.com/threagile/threagile/internal/threagile" ) -const ( +var ( buildTimestamp = "" ) diff --git a/cmd/threagile/main_test.go b/cmd/threagile/main_test.go index 5664f3c7..a531c359 100644 --- a/cmd/threagile/main_test.go +++ b/cmd/threagile/main_test.go @@ -2,10 +2,9 @@ package main import ( "encoding/json" + "fmt" "github.com/akedrou/textdiff" "github.com/threagile/threagile/pkg/input" - "github.com/threagile/threagile/pkg/security/types" - "log" "os" "path/filepath" "sort" @@ -56,23 +55,28 @@ func TestParseModelYaml(t *testing.T) { } func TestParseModelJson(t *testing.T) { - modelFile := filepath.Join("..", "..", "test", "all.json") - modelJson, readError := os.ReadFile(modelFile) - if readError != nil { - t.Error("Unable to read model file: ", readError) + modelFile := filepath.Join("..", "..", "test", "all.yaml") + model := *new(input.Model).Defaults() + flatLoadError := model.Load(modelFile) + if flatLoadError != nil { + t.Errorf("unable to parse model yaml %q: %v", modelFile, flatLoadError) return } - var modelStruct types.ParsedModel - unmarshalError := json.Unmarshal(modelJson, &modelStruct) - if unmarshalError != nil { - log.Fatal("Unable to parse model json: ", unmarshalError) + modelJson, marshalError := json.MarshalIndent(model, "", " ") + if marshalError != nil { + t.Error("Unable to print model json: ", marshalError) return } - _, marshalError := json.Marshal(&modelStruct) - if marshalError != nil { - log.Fatal("Unable to print model json: ", marshalError) + var modelStruct input.Model + unmarshalError := json.Unmarshal(modelJson, &modelStruct) + if unmarshalError != nil { + jsonFile := "test.json" + _ = os.WriteFile(jsonFile, modelJson, 0644) + fmt.Printf("Yaml file: %v\n", modelFile) + fmt.Printf("Json file: %v\n", jsonFile) + t.Error("Unable to parse model json: ", unmarshalError) return } } diff --git a/go.mod b/go.mod index c10b01b5..fcc26a36 100644 --- a/go.mod +++ b/go.mod @@ -5,7 +5,9 @@ go 1.20 require ( github.com/gin-gonic/gin v1.9.1 github.com/google/uuid v1.5.0 + github.com/jedib0t/go-pretty/v6 v6.5.4 github.com/jung-kurt/gofpdf v1.16.2 + github.com/mpvl/unique v0.0.0-20150818121801-cbe035fff7de github.com/spf13/pflag v1.0.5 github.com/wcharczuk/go-chart v2.0.1+incompatible github.com/xuri/excelize/v2 v2.8.0 @@ -24,6 +26,7 @@ require ( github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/leodido/go-urn v1.2.4 // indirect + github.com/mattn/go-runewidth v0.0.15 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect @@ -31,6 +34,7 @@ require ( github.com/pmezard/go-difflib v1.0.0 // indirect github.com/richardlehane/mscfb v1.0.4 // indirect github.com/richardlehane/msoleps v1.0.3 // indirect + github.com/rivo/uniseg v0.2.0 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect golang.org/x/sys v0.16.0 // indirect golang.org/x/text v0.14.0 // indirect diff --git a/go.sum b/go.sum index b10c4cfa..4cf5a53f 100644 --- a/go.sum +++ b/go.sum @@ -42,6 +42,8 @@ github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU= github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/jedib0t/go-pretty/v6 v6.5.4 h1:gOGo0613MoqUcf0xCj+h/V3sHDaZasfv152G6/5l91s= +github.com/jedib0t/go-pretty/v6 v6.5.4/go.mod h1:5LQIxa52oJ/DlDSLv0HEkWOFMDGoWkJb9ss5KqPpJBg= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= @@ -55,6 +57,8 @@ github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= +github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -62,6 +66,8 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= +github.com/mpvl/unique v0.0.0-20150818121801-cbe035fff7de h1:D5x39vF5KCwKQaw+OC9ZPiLVHXz3UFw2+psEX+gYcto= +github.com/mpvl/unique v0.0.0-20150818121801-cbe035fff7de/go.mod h1:kJun4WP5gFuHZgRjZUWWuH1DTxCtxbHDOIJsudS8jzY= github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI= github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= github.com/phpdave11/gofpdi v1.0.7/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= @@ -77,6 +83,8 @@ github.com/richardlehane/mscfb v1.0.4/go.mod h1:YzVpcZg9czvAuhk9T+a3avCpcFPMUWm7 github.com/richardlehane/msoleps v1.0.1/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg= github.com/richardlehane/msoleps v1.0.3 h1:aznSZzrwYRl3rLKRT3gUk9am7T/mLNSnJINvN0AQoVM= github.com/richardlehane/msoleps v1.0.3/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg= +github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w= github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= diff --git a/internal/threagile/root.go b/internal/threagile/root.go index cd3f6fec..b1677502 100644 --- a/internal/threagile/root.go +++ b/internal/threagile/root.go @@ -32,7 +32,7 @@ func (what *Threagile) initRoot() *Threagile { r, err := model.ReadAndAnalyzeModel(*cfg, progressReporter) if err != nil { - cmd.Println("Failed to read and analyze model") + cmd.Printf("Failed to read and analyze model: %v", err) return err } diff --git a/pkg/common/config.go b/pkg/common/config.go index ee0cef86..1e392c34 100644 --- a/pkg/common/config.go +++ b/pkg/common/config.go @@ -81,7 +81,7 @@ func (c *Config) Defaults(buildTimestamp string) *Config { RiskRulesPlugins: make([]string, 0), SkipRiskRules: "", ExecuteModelMacro: "", - ServerPort: DefaultServerPort, + ServerPort: 0, //DefaultServerPort, GraphvizDPI: DefaultGraphvizDPI, BackupHistoryFilesToKeep: DefaultBackupHistoryFilesToKeep, @@ -282,6 +282,10 @@ func (c *Config) Merge(config Config, values map[string]any) { c.ExecuteModelMacro = config.ExecuteModelMacro break + case strings.ToLower("DiagramDPI"): + c.DiagramDPI = config.DiagramDPI + break + case strings.ToLower("ServerPort"): c.ServerPort = config.ServerPort break @@ -290,6 +294,10 @@ func (c *Config) Merge(config Config, values map[string]any) { c.GraphvizDPI = config.GraphvizDPI break + case strings.ToLower("MaxGraphvizDPI"): + c.MaxGraphvizDPI = config.MaxGraphvizDPI + break + case strings.ToLower("BackupHistoryFilesToKeep"): c.BackupHistoryFilesToKeep = config.BackupHistoryFilesToKeep break @@ -305,6 +313,10 @@ func (c *Config) Merge(config Config, values map[string]any) { case strings.ToLower("IgnoreOrphanedRiskTracking"): c.IgnoreOrphanedRiskTracking = config.IgnoreOrphanedRiskTracking break + + case strings.ToLower("Attractiveness"): + c.Attractiveness = config.Attractiveness + break } } } diff --git a/pkg/input/author.go b/pkg/input/author.go new file mode 100644 index 00000000..834fc347 --- /dev/null +++ b/pkg/input/author.go @@ -0,0 +1,68 @@ +package input + +import ( + "fmt" + "sort" + "strings" +) + +type Author struct { + Name string `yaml:"name,omitempty" json:"name,omitempty"` + Contact string `yaml:"contact,omitempty" json:"contact,omitempty"` + Homepage string `yaml:"homepage,omitempty" json:"homepage,omitempty"` +} + +func (what *Author) Merge(other Author) error { + if len(what.Name) > 0 && !strings.EqualFold(what.Name, other.Name) { + return fmt.Errorf("author name mismatch") + } + + if len(what.Contact) > 0 && !strings.EqualFold(what.Contact, other.Contact) { + return fmt.Errorf("author contact mismatch") + } + + if len(what.Homepage) > 0 && !strings.EqualFold(what.Homepage, other.Homepage) { + return fmt.Errorf("author homepage mismatch") + } + + what.Name = other.Name + what.Contact = other.Contact + what.Homepage = other.Homepage + + return nil +} + +func (what *Author) MergeList(list []Author) ([]Author, error) { + sort.Slice(list, func(i int, j int) bool { + return strings.Compare(list[i].Name, list[j].Name) < 0 + }) + + if len(list) < 2 { + return list, nil + } + + first := list[0] + tail, mergeError := what.MergeList(list[1:]) + if mergeError != nil { + return nil, mergeError + } + + newList := make([]Author, 1) + newList[0] = first + for _, second := range tail { + if first.Match(second) { + mergeError = first.Merge(second) + if mergeError != nil { + return nil, mergeError + } + } else { + newList = append(newList, second) + } + } + + return newList, nil +} + +func (what *Author) Match(other Author) bool { + return strings.EqualFold(what.Name, other.Name) +} diff --git a/pkg/input/communication-link.go b/pkg/input/communication-link.go new file mode 100644 index 00000000..01070fde --- /dev/null +++ b/pkg/input/communication-link.go @@ -0,0 +1,99 @@ +package input + +import "fmt" + +type CommunicationLink struct { + Target string `yaml:"target,omitempty" json:"target,omitempty"` + Description string `yaml:"description,omitempty" json:"description,omitempty"` + Protocol string `yaml:"protocol,omitempty" json:"protocol,omitempty"` + Authentication string `yaml:"authentication,omitempty" json:"authentication,omitempty"` + Authorization string `yaml:"authorization,omitempty" json:"authorization,omitempty"` + Tags []string `yaml:"tags,omitempty" json:"tags,omitempty"` + VPN bool `yaml:"vpn,omitempty" json:"vpn,omitempty"` + IpFiltered bool `yaml:"ip_filtered,omitempty" json:"ip_filtered,omitempty"` + Readonly bool `yaml:"readonly,omitempty" json:"readonly,omitempty"` + Usage string `yaml:"usage,omitempty" json:"usage,omitempty"` + DataAssetsSent []string `yaml:"data_assets_sent,omitempty" json:"data_assets_sent,omitempty"` + DataAssetsReceived []string `yaml:"data_assets_received,omitempty" json:"data_assets_received,omitempty"` + DiagramTweakWeight int `yaml:"diagram_tweak_weight,omitempty" json:"diagram_tweak_weight,omitempty"` + DiagramTweakConstraint bool `yaml:"diagram_tweak_constraint,omitempty" json:"diagram_tweak_constraint,omitempty"` +} + +func (what *CommunicationLink) Merge(other CommunicationLink) error { + var mergeError error + what.Target, mergeError = new(Strings).MergeSingleton(what.Target, other.Target) + if mergeError != nil { + return fmt.Errorf("failed to merge target: %v", mergeError) + } + + what.Description, mergeError = new(Strings).MergeSingleton(what.Description, other.Description) + if mergeError != nil { + return fmt.Errorf("failed to merge description: %v", mergeError) + } + + what.Protocol, mergeError = new(Strings).MergeSingleton(what.Protocol, other.Protocol) + if mergeError != nil { + return fmt.Errorf("failed to merge protocol: %v", mergeError) + } + + what.Authentication, mergeError = new(Strings).MergeSingleton(what.Authentication, other.Authentication) + if mergeError != nil { + return fmt.Errorf("failed to merge authentication: %v", mergeError) + } + + what.Authorization, mergeError = new(Strings).MergeSingleton(what.Authorization, other.Authorization) + if mergeError != nil { + return fmt.Errorf("failed to merge authorization: %v", mergeError) + } + + what.Tags = new(Strings).MergeUniqueSlice(what.Tags, other.Tags) + + if what.VPN == false { + what.VPN = other.VPN + } + + if what.IpFiltered == false { + what.IpFiltered = other.IpFiltered + } + + if what.Readonly == false { + what.Readonly = other.Readonly + } + + what.Usage, mergeError = new(Strings).MergeSingleton(what.Usage, other.Usage) + if mergeError != nil { + return fmt.Errorf("failed to merge usage: %v", mergeError) + } + + what.DataAssetsSent = new(Strings).MergeUniqueSlice(what.DataAssetsSent, other.DataAssetsSent) + + what.DataAssetsReceived = new(Strings).MergeUniqueSlice(what.DataAssetsReceived, other.DataAssetsReceived) + + if what.DiagramTweakWeight == 0 { + what.DiagramTweakWeight = other.DiagramTweakWeight + } + + if what.DiagramTweakConstraint == false { + what.DiagramTweakConstraint = other.DiagramTweakConstraint + } + + return nil +} + +func (what *CommunicationLink) MergeMap(first map[string]CommunicationLink, second map[string]CommunicationLink) (map[string]CommunicationLink, error) { + for mapKey, mapValue := range second { + mapItem, ok := first[mapKey] + if ok { + mergeError := mapItem.Merge(mapValue) + if mergeError != nil { + return first, fmt.Errorf("failed to merge commuinication link %q: %v", mapKey, mergeError) + } + + first[mapKey] = mapItem + } else { + first[mapKey] = mapValue + } + } + + return first, nil +} diff --git a/pkg/input/data-asset.go b/pkg/input/data-asset.go new file mode 100644 index 00000000..e686f390 --- /dev/null +++ b/pkg/input/data-asset.go @@ -0,0 +1,89 @@ +package input + +import "fmt" + +type DataAsset struct { + ID string `yaml:"id,omitempty" json:"id,omitempty"` + Description string `yaml:"description,omitempty" json:"description,omitempty"` + Usage string `yaml:"usage,omitempty" json:"usage,omitempty"` + Tags []string `yaml:"tags,omitempty" json:"tags,omitempty"` + Origin string `yaml:"origin,omitempty" json:"origin,omitempty"` + Owner string `yaml:"owner,omitempty" json:"owner,omitempty"` + Quantity string `yaml:"quantity,omitempty" json:"quantity,omitempty"` + Confidentiality string `yaml:"confidentiality,omitempty" json:"confidentiality,omitempty"` + Integrity string `yaml:"integrity,omitempty" json:"integrity,omitempty"` + Availability string `yaml:"availability,omitempty" json:"availability,omitempty"` + JustificationCiaRating string `yaml:"justification_cia_rating,omitempty" json:"justification_cia_rating,omitempty"` +} + +func (what *DataAsset) Merge(other DataAsset) error { + var mergeError error + what.ID, mergeError = new(Strings).MergeSingleton(what.ID, other.ID) + if mergeError != nil { + return fmt.Errorf("failed to merge id: %v", mergeError) + } + + what.Description, mergeError = new(Strings).MergeSingleton(what.Description, other.Description) + if mergeError != nil { + return fmt.Errorf("failed to merge description: %v", mergeError) + } + + what.Usage, mergeError = new(Strings).MergeSingleton(what.Usage, other.Usage) + if mergeError != nil { + return fmt.Errorf("failed to merge usage: %v", mergeError) + } + + what.Tags = new(Strings).MergeUniqueSlice(what.Tags, other.Tags) + + what.Origin, mergeError = new(Strings).MergeSingleton(what.Origin, other.Origin) + if mergeError != nil { + return fmt.Errorf("failed to merge origin: %v", mergeError) + } + + what.Owner, mergeError = new(Strings).MergeSingleton(what.Owner, other.Owner) + if mergeError != nil { + return fmt.Errorf("failed to merge owner: %v", mergeError) + } + + what.Quantity, mergeError = new(Strings).MergeSingleton(what.Quantity, other.Quantity) + if mergeError != nil { + return fmt.Errorf("failed to merge quantity: %v", mergeError) + } + + what.Confidentiality, mergeError = new(Strings).MergeSingleton(what.Confidentiality, other.Confidentiality) + if mergeError != nil { + return fmt.Errorf("failed to merge confidentiality: %v", mergeError) + } + + what.Integrity, mergeError = new(Strings).MergeSingleton(what.Integrity, other.Integrity) + if mergeError != nil { + return fmt.Errorf("failed to merge integrity: %v", mergeError) + } + + what.Availability, mergeError = new(Strings).MergeSingleton(what.Availability, other.Availability) + if mergeError != nil { + return fmt.Errorf("failed to merge availability: %v", mergeError) + } + + what.JustificationCiaRating = new(Strings).MergeMultiline(what.JustificationCiaRating, other.JustificationCiaRating) + + return nil +} + +func (what *DataAsset) MergeMap(first map[string]DataAsset, second map[string]DataAsset) (map[string]DataAsset, error) { + for mapKey, mapValue := range second { + mapItem, ok := first[mapKey] + if ok { + mergeError := mapItem.Merge(mapValue) + if mergeError != nil { + return first, fmt.Errorf("failed to merge data asset %q: %v", mapKey, mergeError) + } + + first[mapKey] = mapItem + } else { + first[mapKey] = mapValue + } + } + + return first, nil +} diff --git a/pkg/input/input.go b/pkg/input/input.go deleted file mode 100644 index d69d234f..00000000 --- a/pkg/input/input.go +++ /dev/null @@ -1,406 +0,0 @@ -/* -Copyright © 2023 NAME HERE -*/ - -package input - -import ( - "fmt" - "log" - "os" - "path/filepath" - "strings" - - "gopkg.in/yaml.v3" -) - -// === Model Type Stuff ====================================== - -type Author struct { - Name string `yaml:"name,omitempty" json:"name,omitempty"` - Contact string `yaml:"contact,omitempty" json:"contact,omitempty"` - Homepage string `yaml:"homepage,omitempty" json:"homepage,omitempty"` -} - -type Overview struct { - Description string `yaml:"description,omitempty" json:"description,omitempty"` - Images []map[string]string `yaml:"images,omitempty" json:"images,omitempty"` // yes, array of map here, as array keeps the order of the image keys -} - -type DataAsset struct { - ID string `yaml:"id,omitempty" json:"id,omitempty"` - Description string `yaml:"description,omitempty" json:"description,omitempty"` - Usage string `yaml:"usage,omitempty" json:"usage,omitempty"` - Tags []string `yaml:"tags,omitempty" json:"tags,omitempty"` - Origin string `yaml:"origin,omitempty" json:"origin,omitempty"` - Owner string `yaml:"owner,omitempty" json:"owner,omitempty"` - Quantity string `yaml:"quantity,omitempty" json:"quantity,omitempty"` - Confidentiality string `yaml:"confidentiality,omitempty" json:"confidentiality,omitempty"` - Integrity string `yaml:"integrity,omitempty" json:"integrity,omitempty"` - Availability string `yaml:"availability,omitempty" json:"availability,omitempty"` - JustificationCiaRating string `yaml:"justification_cia_rating,omitempty" json:"justification_cia_rating,omitempty"` -} - -type TechnicalAsset struct { - ID string `yaml:"id,omitempty" json:"id,omitempty"` - Description string `yaml:"description,omitempty" json:"description,omitempty"` - Type string `yaml:"type,omitempty" json:"type,omitempty"` - Usage string `yaml:"usage,omitempty" json:"usage,omitempty"` - UsedAsClientByHuman bool `yaml:"used_as_client_by_human,omitempty" json:"used_as_client_by_human,omitempty"` - OutOfScope bool `yaml:"out_of_scope,omitempty" json:"out_of_scope,omitempty"` - JustificationOutOfScope string `yaml:"justification_out_of_scope,omitempty" json:"justification_out_of_scope,omitempty"` - Size string `yaml:"size,omitempty" json:"size,omitempty"` - Technology string `yaml:"technology,omitempty" json:"technology,omitempty"` - Tags []string `yaml:"tags,omitempty" json:"tags,omitempty"` - Internet bool `yaml:"internet,omitempty" json:"internet,omitempty"` - Machine string `yaml:"machine,omitempty" json:"machine,omitempty"` - Encryption string `yaml:"encryption,omitempty" json:"encryption,omitempty"` - Owner string `yaml:"owner,omitempty" json:"owner,omitempty"` - Confidentiality string `yaml:"confidentiality,omitempty" json:"confidentiality,omitempty"` - Integrity string `yaml:"integrity,omitempty" json:"integrity,omitempty"` - Availability string `yaml:"availability,omitempty" json:"availability,omitempty"` - JustificationCiaRating string `yaml:"justification_cia_rating,omitempty" json:"justification_cia_rating,omitempty"` - MultiTenant bool `yaml:"multi_tenant,omitempty" json:"multi_tenant,omitempty"` - Redundant bool `yaml:"redundant,omitempty" json:"redundant,omitempty"` - CustomDevelopedParts bool `yaml:"custom_developed_parts,omitempty" json:"custom_developed_parts,omitempty"` - DataAssetsProcessed []string `yaml:"data_assets_processed,omitempty" json:"data_assets_processed,omitempty"` - DataAssetsStored []string `yaml:"data_assets_stored,omitempty" json:"data_assets_stored,omitempty"` - DataFormatsAccepted []string `yaml:"data_formats_accepted,omitempty" json:"data_formats_accepted,omitempty"` - DiagramTweakOrder int `yaml:"diagram_tweak_order,omitempty" json:"diagram_tweak_order,omitempty"` - CommunicationLinks map[string]CommunicationLink `yaml:"communication_links,omitempty" json:"communication_links,omitempty"` -} - -type CommunicationLink struct { - Target string `yaml:"target,omitempty" json:"target,omitempty"` - Description string `yaml:"description,omitempty" json:"description,omitempty"` - Protocol string `yaml:"protocol,omitempty" json:"protocol,omitempty"` - Authentication string `yaml:"authentication,omitempty" json:"authentication,omitempty"` - Authorization string `yaml:"authorization,omitempty" json:"authorization,omitempty"` - Tags []string `yaml:"tags,omitempty" json:"tags,omitempty"` - VPN bool `yaml:"vpn,omitempty" json:"vpn,omitempty"` - IpFiltered bool `yaml:"ip_filtered,omitempty" json:"ip_filtered,omitempty"` - Readonly bool `yaml:"readonly,omitempty" json:"readonly,omitempty"` - Usage string `yaml:"usage,omitempty" json:"usage,omitempty"` - DataAssetsSent []string `yaml:"data_assets_sent,omitempty" json:"data_assets_sent,omitempty"` - DataAssetsReceived []string `yaml:"data_assets_received,omitempty" json:"data_assets_received,omitempty"` - DiagramTweakWeight int `yaml:"diagram_tweak_weight,omitempty" json:"diagram_tweak_weight,omitempty"` - DiagramTweakConstraint bool `yaml:"diagram_tweak_constraint,omitempty" json:"diagram_tweak_constraint,omitempty"` -} - -type SharedRuntime struct { - ID string `yaml:"id,omitempty" json:"id,omitempty"` - Description string `yaml:"description,omitempty" json:"description,omitempty"` - Tags []string `yaml:"tags,omitempty" json:"tag,omitemptys"` - TechnicalAssetsRunning []string `yaml:"technical_assets_running,omitempty" json:"technical_assets_running,omitempty"` -} - -type TrustBoundary struct { - ID string `yaml:"id,omitempty" json:"id,omitempty"` - Description string `yaml:"description,omitempty" json:"description,omitempty"` - Type string `yaml:"type,omitempty" json:"type,omitempty"` - Tags []string `yaml:"tags,omitempty" json:"tags,omitempty"` - TechnicalAssetsInside []string `yaml:"technical_assets_inside,omitempty" json:"technical_assets_inside,omitempty"` - TrustBoundariesNested []string `yaml:"trust_boundaries_nested,omitempty" json:"trust_boundaries_nested,omitempty"` -} - -type IndividualRiskCategory struct { - ID string `yaml:"id,omitempty" json:"id,omitempty"` - Description string `yaml:"description,omitempty" json:"description,omitempty"` - Impact string `yaml:"impact,omitempty" json:"impact,omitempty"` - ASVS string `yaml:"asvs,omitempty" json:"asvs,omitempty"` - CheatSheet string `yaml:"cheat_sheet,omitempty" json:"cheat_sheet,omitempty"` - Action string `yaml:"action,omitempty" json:"action,omitempty"` - Mitigation string `yaml:"mitigation,omitempty" json:"mitigation,omitempty"` - Check string `yaml:"check,omitempty" json:"check,omitempty"` - Function string `yaml:"function,omitempty" json:"function,omitempty"` - STRIDE string `yaml:"stride,omitempty" json:"stride,omitempty"` - DetectionLogic string `yaml:"detection_logic,omitempty" json:"detection_logic,omitempty"` - RiskAssessment string `yaml:"risk_assessment,omitempty" json:"risk_assessment,omitempty"` - FalsePositives string `yaml:"false_positives,omitempty" json:"false_positives,omitempty"` - ModelFailurePossibleReason bool `yaml:"model_failure_possible_reason,omitempty" json:"model_failure_possible_reason,omitempty"` - CWE int `yaml:"cwe,omitempty" json:"cwe,omitempty"` - RisksIdentified map[string]RiskIdentified `yaml:"risks_identified,omitempty" json:"risks_identified,omitempty"` -} - -type RiskIdentified struct { - Severity string `yaml:"severity,omitempty" json:"severity,omitempty"` - ExploitationLikelihood string `yaml:"exploitation_likelihood,omitempty" json:"exploitation_likelihood,omitempty"` - ExploitationImpact string `yaml:"exploitation_impact,omitempty" json:"exploitation_impact,omitempty"` - DataBreachProbability string `yaml:"data_breach_probability,omitempty" json:"data_breach_probability,omitempty"` - DataBreachTechnicalAssets []string `yaml:"data_breach_technical_assets,omitempty" json:"data_breach_technical_assets,omitempty"` - MostRelevantDataAsset string `yaml:"most_relevant_data_asset,omitempty" json:"most_relevant_data_asset,omitempty"` - MostRelevantTechnicalAsset string `yaml:"most_relevant_technical_asset,omitempty" json:"most_relevant_technical_asset,omitempty"` - MostRelevantCommunicationLink string `yaml:"most_relevant_communication_link,omitempty" json:"most_relevant_communication_link,omitempty"` - MostRelevantTrustBoundary string `yaml:"most_relevant_trust_boundary,omitempty" json:"most_relevant_trust_boundary,omitempty"` - MostRelevantSharedRuntime string `yaml:"most_relevant_shared_runtime,omitempty" json:"most_relevant_shared_runtime,omitempty"` -} - -type RiskTracking struct { - Status string `yaml:"status,omitempty" json:"status,omitempty"` - Justification string `yaml:"justification,omitempty" json:"justification,omitempty"` - Ticket string `yaml:"ticket,omitempty" json:"ticket,omitempty"` - Date string `yaml:"date,omitempty" json:"date,omitempty"` - CheckedBy string `yaml:"checked_by,omitempty" json:"checked_by,omitempty"` -} - -type Model struct { // TODO: Eventually remove this and directly use ParsedModelRoot? But then the error messages for model errors are not quite as good anymore... - Includes []string `yaml:"includes,omitempty" json:"includes,omitempty"` - ThreagileVersion string `yaml:"threagile_version,omitempty" json:"threagile_version,omitempty"` - Title string `yaml:"title,omitempty" json:"title,omitempty"` - Author Author `yaml:"author,omitempty" json:"author,omitempty"` - Contributors []Author `yaml:"contributors,omitempty" json:"contributors,omitempty"` - Date string `yaml:"date,omitempty" json:"date,omitempty"` - AppDescription Overview `yaml:"application_description,omitempty" json:"application_description,omitempty"` - BusinessOverview Overview `yaml:"business_overview,omitempty" json:"business_overview,omitempty"` - TechnicalOverview Overview `yaml:"technical_overview,omitempty" json:"technical_overview,omitempty"` - BusinessCriticality string `yaml:"business_criticality,omitempty" json:"business_criticality,omitempty"` - ManagementSummaryComment string `yaml:"management_summary_comment,omitempty" json:"management_summary_comment,omitempty"` - Questions map[string]string `yaml:"questions,omitempty" json:"questions,omitempty"` - AbuseCases map[string]string `yaml:"abuse_cases,omitempty" json:"abuse_cases,omitempty"` - SecurityRequirements map[string]string `yaml:"security_requirements,omitempty" json:"security_requirements,omitempty"` - TagsAvailable []string `yaml:"tags_available,omitempty" json:"tags_available,omitempty"` - DataAssets map[string]DataAsset `yaml:"data_assets,omitempty" json:"data_assets,omitempty"` - TechnicalAssets map[string]TechnicalAsset `yaml:"technical_assets,omitempty" json:"technical_assets,omitempty"` - TrustBoundaries map[string]TrustBoundary `yaml:"trust_boundaries,omitempty" json:"trust_boundaries,omitempty"` - SharedRuntimes map[string]SharedRuntime `yaml:"shared_runtimes,omitempty" json:"shared_runtimes,omitempty"` - IndividualRiskCategories map[string]IndividualRiskCategory `yaml:"individual_risk_categories,omitempty" json:"individual_risk_categories,omitempty"` - RiskTracking map[string]RiskTracking `yaml:"risk_tracking,omitempty" json:"risk_tracking,omitempty"` - DiagramTweakNodesep int `yaml:"diagram_tweak_nodesep,omitempty" json:"diagram_tweak_nodesep,omitempty"` - DiagramTweakRanksep int `yaml:"diagram_tweak_ranksep,omitempty" json:"diagram_tweak_ranksep,omitempty"` - DiagramTweakEdgeLayout string `yaml:"diagram_tweak_edge_layout,omitempty" json:"diagram_tweak_edge_layout,omitempty"` - DiagramTweakSuppressEdgeLabels bool `yaml:"diagram_tweak_suppress_edge_labels,omitempty" json:"diagram_tweak_suppress_edge_labels,omitempty"` - DiagramTweakLayoutLeftToRight bool `yaml:"diagram_tweak_layout_left_to_right,omitempty" json:"diagram_tweak_layout_left_to_right,omitempty"` - DiagramTweakInvisibleConnectionsBetweenAssets []string `yaml:"diagram_tweak_invisible_connections_between_assets,omitempty" json:"diagram_tweak_invisible_connections_between_assets,omitempty"` - DiagramTweakSameRankAssets []string `yaml:"diagram_tweak_same_rank_assets,omitempty" json:"diagram_tweak_same_rank_assets,omitempty"` -} - -func (model *Model) Defaults() *Model { - *model = Model{ - Questions: make(map[string]string), - AbuseCases: make(map[string]string), - SecurityRequirements: make(map[string]string), - DataAssets: make(map[string]DataAsset), - TechnicalAssets: make(map[string]TechnicalAsset), - TrustBoundaries: make(map[string]TrustBoundary), - SharedRuntimes: make(map[string]SharedRuntime), - IndividualRiskCategories: make(map[string]IndividualRiskCategory), - RiskTracking: make(map[string]RiskTracking), - } - - return model -} - -func (model *Model) Load(inputFilename string) error { - modelYaml, readError := os.ReadFile(inputFilename) - if readError != nil { - log.Fatal("Unable to read model file: ", readError) - } - - unmarshalError := yaml.Unmarshal(modelYaml, &model) - if unmarshalError != nil { - log.Fatal("Unable to parse model yaml: ", unmarshalError) - } - - for _, includeFile := range model.Includes { - mergeError := model.Merge(filepath.Dir(inputFilename), includeFile) - if mergeError != nil { - log.Fatalf("Unable to merge model include %q: %v", includeFile, mergeError) - } - } - - return nil -} - -type UniqueStringSlice []string - -func (slice UniqueStringSlice) Merge(otherSlice []string) []string { - valueMap := make(map[string]bool) - for _, value := range slice { - valueMap[value] = true - } - - for _, value := range otherSlice { - valueMap[value] = true - } - - valueSlice := make(UniqueStringSlice, 0) - for key := range valueMap { - valueSlice = append(valueSlice, key) - } - - return valueSlice -} - -func (model *Model) Merge(dir string, includeFilename string) error { - modelYaml, readError := os.ReadFile(filepath.Join(dir, includeFilename)) - if readError != nil { - return fmt.Errorf("unable to read model file: %v", readError) - } - - var fileStructure map[string]any - unmarshalStructureError := yaml.Unmarshal(modelYaml, &fileStructure) - if unmarshalStructureError != nil { - return fmt.Errorf("unable to parse model structure: %v", unmarshalStructureError) - } - - var includedModel Model - unmarshalError := yaml.Unmarshal(modelYaml, &includedModel) - if unmarshalError != nil { - return fmt.Errorf("unable to parse model yaml: %v", unmarshalError) - } - - for item := range fileStructure { - switch strings.ToLower(item) { - case strings.ToLower("includes"): - for _, includeFile := range includedModel.Includes { - mergeError := model.Merge(filepath.Join(dir, filepath.Dir(includeFilename)), includeFile) - if mergeError != nil { - return fmt.Errorf("unable to merge model include %q: %v", includeFile, mergeError) - } - } - break - - case strings.ToLower("threagile_version"): - model.ThreagileVersion = includedModel.ThreagileVersion - break - - case strings.ToLower("title"): - model.Title = includedModel.Title - break - - case strings.ToLower("author"): - model.Author = includedModel.Author - break - - case strings.ToLower("date"): - model.Date = includedModel.Date - break - - case strings.ToLower("business_overview"): - model.BusinessOverview = includedModel.BusinessOverview - break - - case strings.ToLower("technical_overview"): - model.TechnicalOverview = includedModel.TechnicalOverview - break - - case strings.ToLower("business_criticality"): - model.BusinessCriticality = includedModel.BusinessCriticality - break - - case strings.ToLower("management_summary_comment"): - model.ManagementSummaryComment = includedModel.ManagementSummaryComment - break - - case strings.ToLower("questions"): - for mapKey, mapValue := range includedModel.Questions { - model.Questions[mapKey] = mapValue - } - break - - case strings.ToLower("abuse_cases"): - for mapKey, mapValue := range includedModel.AbuseCases { - model.AbuseCases[mapKey] = mapValue - } - break - - case strings.ToLower("security_requirements"): - for mapKey, mapValue := range includedModel.SecurityRequirements { - model.SecurityRequirements[mapKey] = mapValue - } - break - - case strings.ToLower("tags_available"): - model.TagsAvailable = UniqueStringSlice(model.TagsAvailable).Merge(includedModel.TagsAvailable) - break - - case strings.ToLower("data_assets"): - for mapKey, mapValue := range includedModel.DataAssets { - model.DataAssets[mapKey] = mapValue - } - break - - case strings.ToLower("technical_assets"): - for mapKey, mapValue := range includedModel.TechnicalAssets { - model.TechnicalAssets[mapKey] = mapValue - } - break - - case strings.ToLower("trust_boundaries"): - for mapKey, mapValue := range includedModel.TrustBoundaries { - model.TrustBoundaries[mapKey] = mapValue - } - break - - case strings.ToLower("shared_runtimes"): - for mapKey, mapValue := range includedModel.SharedRuntimes { - model.SharedRuntimes[mapKey] = mapValue - } - break - - case strings.ToLower("individual_risk_categories"): - for mapKey, mapValue := range includedModel.IndividualRiskCategories { - model.IndividualRiskCategories[mapKey] = mapValue - } - break - - case strings.ToLower("risk_tracking"): - for mapKey, mapValue := range includedModel.RiskTracking { - model.RiskTracking[mapKey] = mapValue - } - break - - case "diagram_tweak_nodesep": - model.DiagramTweakNodesep = includedModel.DiagramTweakNodesep - break - - case "diagram_tweak_ranksep": - model.DiagramTweakRanksep = includedModel.DiagramTweakRanksep - break - - case "diagram_tweak_edge_layout": - model.DiagramTweakEdgeLayout = includedModel.DiagramTweakEdgeLayout - break - - case "diagram_tweak_suppress_edge_labels": - model.DiagramTweakSuppressEdgeLabels = includedModel.DiagramTweakSuppressEdgeLabels - break - - case "diagram_tweak_layout_left_to_right": - model.DiagramTweakLayoutLeftToRight = includedModel.DiagramTweakLayoutLeftToRight - break - - case "diagram_tweak_invisible_connections_between_assets": - model.DiagramTweakInvisibleConnectionsBetweenAssets = append(model.DiagramTweakInvisibleConnectionsBetweenAssets, includedModel.DiagramTweakInvisibleConnectionsBetweenAssets...) - break - - case "diagram_tweak_same_rank_assets": - model.DiagramTweakSameRankAssets = append(model.DiagramTweakSameRankAssets, includedModel.DiagramTweakSameRankAssets...) - } - } - - return nil -} - -func AddTagToModelInput(modelInput *Model, tag string, dryRun bool, changes *[]string) { - tag = NormalizeTag(tag) - if !contains(modelInput.TagsAvailable, tag) { - *changes = append(*changes, "adding tag: "+tag) - if !dryRun { - modelInput.TagsAvailable = append(modelInput.TagsAvailable, tag) - } - } -} - -func NormalizeTag(tag string) string { - return strings.TrimSpace(strings.ToLower(tag)) -} - -func contains(a []string, x string) bool { - for _, n := range a { - if x == n { - return true - } - } - return false -} diff --git a/pkg/input/model.go b/pkg/input/model.go new file mode 100644 index 00000000..f44db7b5 --- /dev/null +++ b/pkg/input/model.go @@ -0,0 +1,303 @@ +/* +Copyright © 2023 NAME HERE +*/ + +package input + +import ( + "fmt" + "github.com/mpvl/unique" + "log" + "os" + "path/filepath" + "slices" + "sort" + "strings" + + "gopkg.in/yaml.v3" +) + +// === Model Type Stuff ====================================== + +type Model struct { // TODO: Eventually remove this and directly use ParsedModelRoot? But then the error messages for model errors are not quite as good anymore... + ThreagileVersion string `yaml:"threagile_version,omitempty" json:"threagile_version,omitempty"` + Includes []string `yaml:"includes,omitempty" json:"includes,omitempty"` + Title string `yaml:"title,omitempty" json:"title,omitempty"` + Author Author `yaml:"author,omitempty" json:"author,omitempty"` + Contributors []Author `yaml:"contributors,omitempty" json:"contributors,omitempty"` + Date string `yaml:"date,omitempty" json:"date,omitempty"` + AppDescription Overview `yaml:"application_description,omitempty" json:"application_description,omitempty"` + BusinessOverview Overview `yaml:"business_overview,omitempty" json:"business_overview,omitempty"` + TechnicalOverview Overview `yaml:"technical_overview,omitempty" json:"technical_overview,omitempty"` + BusinessCriticality string `yaml:"business_criticality,omitempty" json:"business_criticality,omitempty"` + ManagementSummaryComment string `yaml:"management_summary_comment,omitempty" json:"management_summary_comment,omitempty"` + SecurityRequirements map[string]string `yaml:"security_requirements,omitempty" json:"security_requirements,omitempty"` + Questions map[string]string `yaml:"questions,omitempty" json:"questions,omitempty"` + AbuseCases map[string]string `yaml:"abuse_cases,omitempty" json:"abuse_cases,omitempty"` + TagsAvailable []string `yaml:"tags_available,omitempty" json:"tags_available,omitempty"` + DataAssets map[string]DataAsset `yaml:"data_assets,omitempty" json:"data_assets,omitempty"` + TechnicalAssets map[string]TechnicalAsset `yaml:"technical_assets,omitempty" json:"technical_assets,omitempty"` + TrustBoundaries map[string]TrustBoundary `yaml:"trust_boundaries,omitempty" json:"trust_boundaries,omitempty"` + SharedRuntimes map[string]SharedRuntime `yaml:"shared_runtimes,omitempty" json:"shared_runtimes,omitempty"` + IndividualRiskCategories map[string]IndividualRiskCategory `yaml:"individual_risk_categories,omitempty" json:"individual_risk_categories,omitempty"` + RiskTracking map[string]RiskTracking `yaml:"risk_tracking,omitempty" json:"risk_tracking,omitempty"` + DiagramTweakNodesep int `yaml:"diagram_tweak_nodesep,omitempty" json:"diagram_tweak_nodesep,omitempty"` + DiagramTweakRanksep int `yaml:"diagram_tweak_ranksep,omitempty" json:"diagram_tweak_ranksep,omitempty"` + DiagramTweakEdgeLayout string `yaml:"diagram_tweak_edge_layout,omitempty" json:"diagram_tweak_edge_layout,omitempty"` + DiagramTweakSuppressEdgeLabels bool `yaml:"diagram_tweak_suppress_edge_labels,omitempty" json:"diagram_tweak_suppress_edge_labels,omitempty"` + DiagramTweakLayoutLeftToRight bool `yaml:"diagram_tweak_layout_left_to_right,omitempty" json:"diagram_tweak_layout_left_to_right,omitempty"` + DiagramTweakInvisibleConnectionsBetweenAssets []string `yaml:"diagram_tweak_invisible_connections_between_assets,omitempty" json:"diagram_tweak_invisible_connections_between_assets,omitempty"` + DiagramTweakSameRankAssets []string `yaml:"diagram_tweak_same_rank_assets,omitempty" json:"diagram_tweak_same_rank_assets,omitempty"` +} + +func (model *Model) Defaults() *Model { + *model = Model{ + Questions: make(map[string]string), + AbuseCases: make(map[string]string), + SecurityRequirements: make(map[string]string), + DataAssets: make(map[string]DataAsset), + TechnicalAssets: make(map[string]TechnicalAsset), + TrustBoundaries: make(map[string]TrustBoundary), + SharedRuntimes: make(map[string]SharedRuntime), + IndividualRiskCategories: make(map[string]IndividualRiskCategory), + RiskTracking: make(map[string]RiskTracking), + } + + return model +} + +func (model *Model) Load(inputFilename string) error { + modelYaml, readError := os.ReadFile(inputFilename) + if readError != nil { + log.Fatal("Unable to read model file: ", readError) + } + + unmarshalError := yaml.Unmarshal(modelYaml, &model) + if unmarshalError != nil { + log.Fatal("Unable to parse model yaml: ", unmarshalError) + } + + for _, includeFile := range model.Includes { + mergeError := model.Merge(filepath.Dir(inputFilename), includeFile) + if mergeError != nil { + log.Fatalf("Unable to merge model include %q: %v", includeFile, mergeError) + } + } + + return nil +} + +func (model *Model) Merge(dir string, includeFilename string) error { + modelYaml, readError := os.ReadFile(filepath.Join(dir, includeFilename)) + if readError != nil { + return fmt.Errorf("unable to read model file: %v", readError) + } + + var fileStructure map[string]any + unmarshalStructureError := yaml.Unmarshal(modelYaml, &fileStructure) + if unmarshalStructureError != nil { + return fmt.Errorf("unable to parse model structure: %v", unmarshalStructureError) + } + + var includedModel Model + unmarshalError := yaml.Unmarshal(modelYaml, &includedModel) + if unmarshalError != nil { + return fmt.Errorf("unable to parse model yaml: %v", unmarshalError) + } + + var mergeError error + for item := range fileStructure { + switch strings.ToLower(item) { + case strings.ToLower("includes"): + for _, includeFile := range includedModel.Includes { + mergeError = model.Merge(filepath.Join(dir, filepath.Dir(includeFilename)), includeFile) + if mergeError != nil { + return fmt.Errorf("failed to merge model include %q: %v", includeFile, mergeError) + } + } + break + + case strings.ToLower("threagile_version"): + model.ThreagileVersion, mergeError = new(Strings).MergeSingleton(model.ThreagileVersion, includedModel.ThreagileVersion) + if mergeError != nil { + return fmt.Errorf("failed to merge threagile version: %v", mergeError) + } + break + + case strings.ToLower("title"): + model.Title, mergeError = new(Strings).MergeSingleton(model.Title, includedModel.Title) + if mergeError != nil { + return fmt.Errorf("failed to merge title: %v", mergeError) + } + break + + case strings.ToLower("author"): + mergeError = model.Author.Merge(includedModel.Author) + if mergeError != nil { + return fmt.Errorf("failed to merge author: %v", mergeError) + } + break + + case strings.ToLower("contributors"): + model.Contributors, mergeError = new(Author).MergeList(append(model.Contributors, includedModel.Author)) + if mergeError != nil { + return fmt.Errorf("failed to merge contributors: %v", mergeError) + } + break + + case strings.ToLower("date"): + model.Date, mergeError = new(Strings).MergeSingleton(model.Date, includedModel.Date) + if mergeError != nil { + return fmt.Errorf("failed to merge date: %v", mergeError) + } + break + + case strings.ToLower("application_description"): + mergeError = model.AppDescription.Merge(includedModel.AppDescription) + if mergeError != nil { + return fmt.Errorf("failed to merge application description: %v", mergeError) + } + break + + case strings.ToLower("business_overview"): + mergeError = model.BusinessOverview.Merge(includedModel.BusinessOverview) + if mergeError != nil { + return fmt.Errorf("failed to merge business overview: %v", mergeError) + } + break + + case strings.ToLower("technical_overview"): + mergeError = model.TechnicalOverview.Merge(includedModel.TechnicalOverview) + if mergeError != nil { + return fmt.Errorf("failed to merge technical overview: %v", mergeError) + } + break + + case strings.ToLower("business_criticality"): + model.BusinessCriticality, mergeError = new(Strings).MergeSingleton(model.BusinessCriticality, includedModel.BusinessCriticality) + if mergeError != nil { + return fmt.Errorf("failed to merge business criticality: %v", mergeError) + } + break + + case strings.ToLower("management_summary_comment"): + model.ManagementSummaryComment = new(Strings).MergeMultiline(model.ManagementSummaryComment, includedModel.ManagementSummaryComment) + break + + case strings.ToLower("security_requirements"): + model.SecurityRequirements, mergeError = new(Strings).MergeMap(model.SecurityRequirements, includedModel.SecurityRequirements) + if mergeError != nil { + return fmt.Errorf("failed to merge security requirements: %v", mergeError) + } + break + + case strings.ToLower("questions"): + model.Questions, mergeError = new(Strings).MergeMap(model.Questions, includedModel.Questions) + if mergeError != nil { + return fmt.Errorf("failed to merge questions: %v", mergeError) + } + break + + case strings.ToLower("abuse_cases"): + model.AbuseCases, mergeError = new(Strings).MergeMap(model.AbuseCases, includedModel.AbuseCases) + if mergeError != nil { + return fmt.Errorf("failed to merge abuse cases: %v", mergeError) + } + break + + case strings.ToLower("tags_available"): + model.TagsAvailable = new(Strings).MergeUniqueSlice(model.TagsAvailable, includedModel.TagsAvailable) + break + + case strings.ToLower("data_assets"): + model.DataAssets, mergeError = new(DataAsset).MergeMap(model.DataAssets, includedModel.DataAssets) + if mergeError != nil { + return fmt.Errorf("failed to merge data assets: %v", mergeError) + } + break + + case strings.ToLower("technical_assets"): + model.TechnicalAssets, mergeError = new(TechnicalAsset).MergeMap(model.TechnicalAssets, includedModel.TechnicalAssets) + if mergeError != nil { + return fmt.Errorf("failed to merge technical assets: %v", mergeError) + } + break + + case strings.ToLower("trust_boundaries"): + model.TrustBoundaries, mergeError = new(TrustBoundary).MergeMap(model.TrustBoundaries, includedModel.TrustBoundaries) + if mergeError != nil { + return fmt.Errorf("failed to merge trust boundaries: %v", mergeError) + } + break + + case strings.ToLower("shared_runtimes"): + model.SharedRuntimes, mergeError = new(SharedRuntime).MergeMap(model.SharedRuntimes, includedModel.SharedRuntimes) + if mergeError != nil { + return fmt.Errorf("failed to merge shared runtimes: %v", mergeError) + } + break + + case strings.ToLower("individual_risk_categories"): + model.IndividualRiskCategories, mergeError = new(IndividualRiskCategory).MergeMap(model.IndividualRiskCategories, includedModel.IndividualRiskCategories) + if mergeError != nil { + return fmt.Errorf("failed to merge risk categories: %v", mergeError) + } + break + + case strings.ToLower("risk_tracking"): + model.RiskTracking, mergeError = new(RiskTracking).MergeMap(model.RiskTracking, includedModel.RiskTracking) + if mergeError != nil { + return fmt.Errorf("failed to merge risk tracking: %v", mergeError) + } + break + + case "diagram_tweak_nodesep": + model.DiagramTweakNodesep = includedModel.DiagramTweakNodesep + break + + case "diagram_tweak_ranksep": + model.DiagramTweakRanksep = includedModel.DiagramTweakRanksep + break + + case "diagram_tweak_edge_layout": + model.DiagramTweakEdgeLayout = includedModel.DiagramTweakEdgeLayout + break + + case "diagram_tweak_suppress_edge_labels": + model.DiagramTweakSuppressEdgeLabels = includedModel.DiagramTweakSuppressEdgeLabels + break + + case "diagram_tweak_layout_left_to_right": + model.DiagramTweakLayoutLeftToRight = includedModel.DiagramTweakLayoutLeftToRight + break + + case "diagram_tweak_invisible_connections_between_assets": + model.DiagramTweakInvisibleConnectionsBetweenAssets = append(model.DiagramTweakInvisibleConnectionsBetweenAssets, includedModel.DiagramTweakInvisibleConnectionsBetweenAssets...) + sort.Strings(model.DiagramTweakInvisibleConnectionsBetweenAssets) + unique.Strings(&model.DiagramTweakInvisibleConnectionsBetweenAssets) + break + + case "diagram_tweak_same_rank_assets": + model.DiagramTweakSameRankAssets = append(model.DiagramTweakSameRankAssets, includedModel.DiagramTweakSameRankAssets...) + sort.Strings(model.DiagramTweakSameRankAssets) + unique.Strings(&model.DiagramTweakSameRankAssets) + } + } + + return nil +} + +func (model *Model) AddTagToModelInput(tag string, dryRun bool, changes *[]string) { + tag = NormalizeTag(tag) + + if !slices.Contains(model.TagsAvailable, tag) { + *changes = append(*changes, "adding tag: "+tag) + if !dryRun { + model.TagsAvailable = append(model.TagsAvailable, tag) + } + } +} + +func NormalizeTag(tag string) string { + return strings.TrimSpace(strings.ToLower(tag)) +} diff --git a/pkg/input/overview.go b/pkg/input/overview.go new file mode 100644 index 00000000..77820b28 --- /dev/null +++ b/pkg/input/overview.go @@ -0,0 +1,20 @@ +package input + +type Overview struct { + Description string `yaml:"description,omitempty" json:"description,omitempty"` + Images []map[string]string `yaml:"images,omitempty" json:"images,omitempty"` // yes, array of map here, as array keeps the order of the image keys +} + +func (what *Overview) Merge(other Overview) error { + if len(what.Description) > 0 { + if len(other.Description) > 0 { + what.Description += lineSeparator + other.Description + } + } else { + what.Description = other.Description + } + + what.Images = append(what.Images, other.Images...) + + return nil +} diff --git a/pkg/input/risk-category.go b/pkg/input/risk-category.go new file mode 100644 index 00000000..9d94047d --- /dev/null +++ b/pkg/input/risk-category.go @@ -0,0 +1,125 @@ +package input + +import ( + "fmt" +) + +type IndividualRiskCategory struct { + ID string `yaml:"id,omitempty" json:"id,omitempty"` + Description string `yaml:"description,omitempty" json:"description,omitempty"` + Impact string `yaml:"impact,omitempty" json:"impact,omitempty"` + ASVS string `yaml:"asvs,omitempty" json:"asvs,omitempty"` + CheatSheet string `yaml:"cheat_sheet,omitempty" json:"cheat_sheet,omitempty"` + Action string `yaml:"action,omitempty" json:"action,omitempty"` + Mitigation string `yaml:"mitigation,omitempty" json:"mitigation,omitempty"` + Check string `yaml:"check,omitempty" json:"check,omitempty"` + Function string `yaml:"function,omitempty" json:"function,omitempty"` + STRIDE string `yaml:"stride,omitempty" json:"stride,omitempty"` + DetectionLogic string `yaml:"detection_logic,omitempty" json:"detection_logic,omitempty"` + RiskAssessment string `yaml:"risk_assessment,omitempty" json:"risk_assessment,omitempty"` + FalsePositives string `yaml:"false_positives,omitempty" json:"false_positives,omitempty"` + ModelFailurePossibleReason bool `yaml:"model_failure_possible_reason,omitempty" json:"model_failure_possible_reason,omitempty"` + CWE int `yaml:"cwe,omitempty" json:"cwe,omitempty"` + RisksIdentified map[string]RiskIdentified `yaml:"risks_identified,omitempty" json:"risks_identified,omitempty"` +} + +func (what *IndividualRiskCategory) Merge(other IndividualRiskCategory) error { + var mergeError error + what.ID, mergeError = new(Strings).MergeSingleton(what.ID, other.ID) + if mergeError != nil { + return fmt.Errorf("failed to merge id: %v", mergeError) + } + + what.Description, mergeError = new(Strings).MergeSingleton(what.Description, other.Description) + if mergeError != nil { + return fmt.Errorf("failed to merge description: %v", mergeError) + } + + what.Impact, mergeError = new(Strings).MergeSingleton(what.Impact, other.Impact) + if mergeError != nil { + return fmt.Errorf("failed to merge impact: %v", mergeError) + } + + what.ASVS, mergeError = new(Strings).MergeSingleton(what.ASVS, other.ASVS) + if mergeError != nil { + return fmt.Errorf("failed to merge asvs: %v", mergeError) + } + + what.CheatSheet, mergeError = new(Strings).MergeSingleton(what.CheatSheet, other.CheatSheet) + if mergeError != nil { + return fmt.Errorf("failed to merge cheat_sheet: %v", mergeError) + } + + what.Action, mergeError = new(Strings).MergeSingleton(what.Action, other.Action) + if mergeError != nil { + return fmt.Errorf("failed to merge action: %v", mergeError) + } + + what.Mitigation, mergeError = new(Strings).MergeSingleton(what.Mitigation, other.Mitigation) + if mergeError != nil { + return fmt.Errorf("failed to merge mitigation: %v", mergeError) + } + + what.Check, mergeError = new(Strings).MergeSingleton(what.Check, other.Check) + if mergeError != nil { + return fmt.Errorf("failed to merge check: %v", mergeError) + } + + what.Function, mergeError = new(Strings).MergeSingleton(what.Function, other.Function) + if mergeError != nil { + return fmt.Errorf("failed to merge function: %v", mergeError) + } + + what.STRIDE, mergeError = new(Strings).MergeSingleton(what.STRIDE, other.STRIDE) + if mergeError != nil { + return fmt.Errorf("failed to merge STRIDE: %v", mergeError) + } + + what.DetectionLogic, mergeError = new(Strings).MergeSingleton(what.DetectionLogic, other.DetectionLogic) + if mergeError != nil { + return fmt.Errorf("failed to merge detection_logic: %v", mergeError) + } + + what.RiskAssessment, mergeError = new(Strings).MergeSingleton(what.RiskAssessment, other.RiskAssessment) + if mergeError != nil { + return fmt.Errorf("failed to merge risk_assessment: %v", mergeError) + } + + what.FalsePositives, mergeError = new(Strings).MergeSingleton(what.FalsePositives, other.FalsePositives) + if mergeError != nil { + return fmt.Errorf("failed to merge false_positives: %v", mergeError) + } + + if what.ModelFailurePossibleReason == false { + what.ModelFailurePossibleReason = other.ModelFailurePossibleReason + } + + if what.CWE == 0 { + what.CWE = other.CWE + } + + what.RisksIdentified, mergeError = new(RiskIdentified).MergeMap(what.RisksIdentified, other.RisksIdentified) + if mergeError != nil { + return fmt.Errorf("failed to merge identified risks: %v", mergeError) + } + + return nil +} + +func (what *IndividualRiskCategory) MergeMap(first map[string]IndividualRiskCategory, second map[string]IndividualRiskCategory) (map[string]IndividualRiskCategory, error) { + for mapKey, mapValue := range second { + mapItem, ok := first[mapKey] + if ok { + mergeError := mapItem.Merge(mapValue) + if mergeError != nil { + return first, fmt.Errorf("failed to merge risk category %q: %v", mapKey, mergeError) + } + + first[mapKey] = mapItem + } else { + first[mapKey] = mapValue + } + } + + return first, nil +} diff --git a/pkg/input/risk-tracking.go b/pkg/input/risk-tracking.go new file mode 100644 index 00000000..59946047 --- /dev/null +++ b/pkg/input/risk-tracking.go @@ -0,0 +1,59 @@ +package input + +import "fmt" + +type RiskTracking struct { + Status string `yaml:"status,omitempty" json:"status,omitempty"` + Justification string `yaml:"justification,omitempty" json:"justification,omitempty"` + Ticket string `yaml:"ticket,omitempty" json:"ticket,omitempty"` + Date string `yaml:"date,omitempty" json:"date,omitempty"` + CheckedBy string `yaml:"checked_by,omitempty" json:"checked_by,omitempty"` +} + +func (what *RiskTracking) Merge(other RiskTracking) error { + var mergeError error + what.Status, mergeError = new(Strings).MergeSingleton(what.Status, other.Status) + if mergeError != nil { + return fmt.Errorf("failed to merge status: %v", mergeError) + } + + what.Justification, mergeError = new(Strings).MergeSingleton(what.Justification, other.Justification) + if mergeError != nil { + return fmt.Errorf("failed to merge justification: %v", mergeError) + } + + what.Ticket, mergeError = new(Strings).MergeSingleton(what.Ticket, other.Ticket) + if mergeError != nil { + return fmt.Errorf("failed to merge ticket: %v", mergeError) + } + + what.Date, mergeError = new(Strings).MergeSingleton(what.Date, other.Date) + if mergeError != nil { + return fmt.Errorf("failed to merge date: %v", mergeError) + } + + what.CheckedBy, mergeError = new(Strings).MergeSingleton(what.CheckedBy, other.CheckedBy) + if mergeError != nil { + return fmt.Errorf("failed to merge checked_by: %v", mergeError) + } + + return nil +} + +func (what *RiskTracking) MergeMap(first map[string]RiskTracking, second map[string]RiskTracking) (map[string]RiskTracking, error) { + for mapKey, mapValue := range second { + mapItem, ok := first[mapKey] + if ok { + mergeError := mapItem.Merge(mapValue) + if mergeError != nil { + return first, fmt.Errorf("failed to merge risk tracking %q: %v", mapKey, mergeError) + } + + first[mapKey] = mapItem + } else { + first[mapKey] = mapValue + } + } + + return first, nil +} diff --git a/pkg/input/risk.go b/pkg/input/risk.go new file mode 100644 index 00000000..d36e5174 --- /dev/null +++ b/pkg/input/risk.go @@ -0,0 +1,86 @@ +package input + +import "fmt" + +type RiskIdentified struct { + Severity string `yaml:"severity,omitempty" json:"severity,omitempty"` + ExploitationLikelihood string `yaml:"exploitation_likelihood,omitempty" json:"exploitation_likelihood,omitempty"` + ExploitationImpact string `yaml:"exploitation_impact,omitempty" json:"exploitation_impact,omitempty"` + DataBreachProbability string `yaml:"data_breach_probability,omitempty" json:"data_breach_probability,omitempty"` + DataBreachTechnicalAssets []string `yaml:"data_breach_technical_assets,omitempty" json:"data_breach_technical_assets,omitempty"` + MostRelevantDataAsset string `yaml:"most_relevant_data_asset,omitempty" json:"most_relevant_data_asset,omitempty"` + MostRelevantTechnicalAsset string `yaml:"most_relevant_technical_asset,omitempty" json:"most_relevant_technical_asset,omitempty"` + MostRelevantCommunicationLink string `yaml:"most_relevant_communication_link,omitempty" json:"most_relevant_communication_link,omitempty"` + MostRelevantTrustBoundary string `yaml:"most_relevant_trust_boundary,omitempty" json:"most_relevant_trust_boundary,omitempty"` + MostRelevantSharedRuntime string `yaml:"most_relevant_shared_runtime,omitempty" json:"most_relevant_shared_runtime,omitempty"` +} + +func (what *RiskIdentified) Merge(other RiskIdentified) error { + var mergeError error + what.Severity, mergeError = new(Strings).MergeSingleton(what.Severity, other.Severity) + if mergeError != nil { + return fmt.Errorf("failed to merge severity: %v", mergeError) + } + + what.ExploitationLikelihood, mergeError = new(Strings).MergeSingleton(what.ExploitationLikelihood, other.ExploitationLikelihood) + if mergeError != nil { + return fmt.Errorf("failed to merge exploitation_likelihood: %v", mergeError) + } + + what.ExploitationImpact, mergeError = new(Strings).MergeSingleton(what.ExploitationImpact, other.ExploitationImpact) + if mergeError != nil { + return fmt.Errorf("failed to merge exploitation_impact: %v", mergeError) + } + + what.DataBreachProbability, mergeError = new(Strings).MergeSingleton(what.DataBreachProbability, other.DataBreachProbability) + if mergeError != nil { + return fmt.Errorf("failed to merge date: %v", mergeError) + } + + what.DataBreachTechnicalAssets = new(Strings).MergeUniqueSlice(what.DataBreachTechnicalAssets, other.DataBreachTechnicalAssets) + + what.MostRelevantDataAsset, mergeError = new(Strings).MergeSingleton(what.MostRelevantDataAsset, other.MostRelevantDataAsset) + if mergeError != nil { + return fmt.Errorf("failed to merge most_relevant_data_asset: %v", mergeError) + } + + what.MostRelevantTechnicalAsset, mergeError = new(Strings).MergeSingleton(what.MostRelevantTechnicalAsset, other.MostRelevantTechnicalAsset) + if mergeError != nil { + return fmt.Errorf("failed to merge most_relevant_technical_asset: %v", mergeError) + } + + what.MostRelevantCommunicationLink, mergeError = new(Strings).MergeSingleton(what.MostRelevantCommunicationLink, other.MostRelevantCommunicationLink) + if mergeError != nil { + return fmt.Errorf("failed to merge most_relevant_communication_link: %v", mergeError) + } + + what.MostRelevantTrustBoundary, mergeError = new(Strings).MergeSingleton(what.MostRelevantTrustBoundary, other.MostRelevantTrustBoundary) + if mergeError != nil { + return fmt.Errorf("failed to merge most_relevant_trust_boundary: %v", mergeError) + } + + what.MostRelevantSharedRuntime, mergeError = new(Strings).MergeSingleton(what.MostRelevantSharedRuntime, other.MostRelevantSharedRuntime) + if mergeError != nil { + return fmt.Errorf("failed to merge most_relevant_shared_runtime: %v", mergeError) + } + + return nil +} + +func (what *RiskIdentified) MergeMap(first map[string]RiskIdentified, second map[string]RiskIdentified) (map[string]RiskIdentified, error) { + for mapKey, mapValue := range second { + mapItem, ok := first[mapKey] + if ok { + mergeError := mapItem.Merge(mapValue) + if mergeError != nil { + return first, fmt.Errorf("failed to merge risk %q: %v", mapKey, mergeError) + } + + first[mapKey] = mapItem + } else { + first[mapKey] = mapValue + } + } + + return first, nil +} diff --git a/pkg/input/shared-runtime.go b/pkg/input/shared-runtime.go new file mode 100644 index 00000000..caa43c2e --- /dev/null +++ b/pkg/input/shared-runtime.go @@ -0,0 +1,47 @@ +package input + +import "fmt" + +type SharedRuntime struct { + ID string `yaml:"id,omitempty" json:"id,omitempty"` + Description string `yaml:"description,omitempty" json:"description,omitempty"` + Tags []string `yaml:"tags,omitempty" json:"tag,omitemptys"` + TechnicalAssetsRunning []string `yaml:"technical_assets_running,omitempty" json:"technical_assets_running,omitempty"` +} + +func (what *SharedRuntime) Merge(other SharedRuntime) error { + var mergeError error + what.ID, mergeError = new(Strings).MergeSingleton(what.ID, other.ID) + if mergeError != nil { + return fmt.Errorf("failed to merge id: %v", mergeError) + } + + what.Description, mergeError = new(Strings).MergeSingleton(what.Description, other.Description) + if mergeError != nil { + return fmt.Errorf("failed to merge description: %v", mergeError) + } + + what.Tags = new(Strings).MergeUniqueSlice(what.Tags, other.Tags) + + what.TechnicalAssetsRunning = new(Strings).MergeUniqueSlice(what.TechnicalAssetsRunning, other.TechnicalAssetsRunning) + + return nil +} + +func (what *SharedRuntime) MergeMap(first map[string]SharedRuntime, second map[string]SharedRuntime) (map[string]SharedRuntime, error) { + for mapKey, mapValue := range second { + mapItem, ok := first[mapKey] + if ok { + mergeError := mapItem.Merge(mapValue) + if mergeError != nil { + return first, fmt.Errorf("failed to merge shared runtime %q: %v", mapKey, mergeError) + } + + first[mapKey] = mapItem + } else { + first[mapKey] = mapValue + } + } + + return first, nil +} diff --git a/pkg/input/strings.go b/pkg/input/strings.go new file mode 100644 index 00000000..5a2d27f9 --- /dev/null +++ b/pkg/input/strings.go @@ -0,0 +1,68 @@ +package input + +import ( + "fmt" + "github.com/mpvl/unique" + "sort" + "strings" +) + +const ( + lineSeparator = "\n

\n" +) + +type Strings struct { +} + +func (what *Strings) MergeSingleton(first string, second string) (string, error) { + if len(first) > 0 { + if len(second) > 0 { + if !strings.EqualFold(first, second) { + return first, fmt.Errorf("conflicting string values: %q versus %q", first, second) + } + } + + return first, nil + } + + return second, nil +} + +func (what *Strings) MergeMultiline(first string, second string) string { + text := first + if len(first) > 0 { + if len(second) > 0 { + text = text + lineSeparator + second + } + } else { + text = second + } + + return text +} + +func (what *Strings) MergeMap(first map[string]string, second map[string]string) (map[string]string, error) { + for mapKey, mapValue := range second { + _, ok := first[mapKey] + if ok { + return nil, fmt.Errorf("duplicate item %q", mapKey) + } + + first[mapKey] = mapValue + } + + return first, nil +} + +func (what *Strings) MergeUniqueSlice(first []string, second []string) []string { + slice := append(first, second...) + + for n := range slice { + slice[n] = strings.TrimSpace(strings.ToLower(slice[n])) + } + + sort.Strings(slice) + unique.Strings(&slice) + + return slice +} diff --git a/pkg/input/technical-asset.go b/pkg/input/technical-asset.go new file mode 100644 index 00000000..cfb9a7cf --- /dev/null +++ b/pkg/input/technical-asset.go @@ -0,0 +1,160 @@ +package input + +import "fmt" + +type TechnicalAsset struct { + ID string `yaml:"id,omitempty" json:"id,omitempty"` + Description string `yaml:"description,omitempty" json:"description,omitempty"` + Type string `yaml:"type,omitempty" json:"type,omitempty"` + Usage string `yaml:"usage,omitempty" json:"usage,omitempty"` + UsedAsClientByHuman bool `yaml:"used_as_client_by_human,omitempty" json:"used_as_client_by_human,omitempty"` + OutOfScope bool `yaml:"out_of_scope,omitempty" json:"out_of_scope,omitempty"` + JustificationOutOfScope string `yaml:"justification_out_of_scope,omitempty" json:"justification_out_of_scope,omitempty"` + Size string `yaml:"size,omitempty" json:"size,omitempty"` + Technology string `yaml:"technology,omitempty" json:"technology,omitempty"` + Tags []string `yaml:"tags,omitempty" json:"tags,omitempty"` + Internet bool `yaml:"internet,omitempty" json:"internet,omitempty"` + Machine string `yaml:"machine,omitempty" json:"machine,omitempty"` + Encryption string `yaml:"encryption,omitempty" json:"encryption,omitempty"` + Owner string `yaml:"owner,omitempty" json:"owner,omitempty"` + Confidentiality string `yaml:"confidentiality,omitempty" json:"confidentiality,omitempty"` + Integrity string `yaml:"integrity,omitempty" json:"integrity,omitempty"` + Availability string `yaml:"availability,omitempty" json:"availability,omitempty"` + JustificationCiaRating string `yaml:"justification_cia_rating,omitempty" json:"justification_cia_rating,omitempty"` + MultiTenant bool `yaml:"multi_tenant,omitempty" json:"multi_tenant,omitempty"` + Redundant bool `yaml:"redundant,omitempty" json:"redundant,omitempty"` + CustomDevelopedParts bool `yaml:"custom_developed_parts,omitempty" json:"custom_developed_parts,omitempty"` + DataAssetsProcessed []string `yaml:"data_assets_processed,omitempty" json:"data_assets_processed,omitempty"` + DataAssetsStored []string `yaml:"data_assets_stored,omitempty" json:"data_assets_stored,omitempty"` + DataFormatsAccepted []string `yaml:"data_formats_accepted,omitempty" json:"data_formats_accepted,omitempty"` + DiagramTweakOrder int `yaml:"diagram_tweak_order,omitempty" json:"diagram_tweak_order,omitempty"` + CommunicationLinks map[string]CommunicationLink `yaml:"communication_links,omitempty" json:"communication_links,omitempty"` +} + +func (what *TechnicalAsset) Merge(other TechnicalAsset) error { + var mergeError error + what.ID, mergeError = new(Strings).MergeSingleton(what.ID, other.ID) + if mergeError != nil { + return fmt.Errorf("failed to merge id: %v", mergeError) + } + + what.Description, mergeError = new(Strings).MergeSingleton(what.Description, other.Description) + if mergeError != nil { + return fmt.Errorf("failed to merge description: %v", mergeError) + } + + what.Type, mergeError = new(Strings).MergeSingleton(what.Type, other.Type) + if mergeError != nil { + return fmt.Errorf("failed to merge type: %v", mergeError) + } + + what.Usage, mergeError = new(Strings).MergeSingleton(what.Usage, other.Usage) + if mergeError != nil { + return fmt.Errorf("failed to merge usage: %v", mergeError) + } + + if what.UsedAsClientByHuman == false { + what.UsedAsClientByHuman = other.UsedAsClientByHuman + } + + if what.OutOfScope == false { + what.OutOfScope = other.OutOfScope + } + + what.JustificationOutOfScope = new(Strings).MergeMultiline(what.JustificationOutOfScope, other.JustificationOutOfScope) + + what.Size, mergeError = new(Strings).MergeSingleton(what.Size, other.Size) + if mergeError != nil { + return fmt.Errorf("failed to merge size: %v", mergeError) + } + + what.Technology, mergeError = new(Strings).MergeSingleton(what.Technology, other.Technology) + if mergeError != nil { + return fmt.Errorf("failed to merge technology: %v", mergeError) + } + + what.Tags = new(Strings).MergeUniqueSlice(what.Tags, other.Tags) + + if what.Internet == false { + what.Internet = other.Internet + } + + what.Machine, mergeError = new(Strings).MergeSingleton(what.Machine, other.Machine) + if mergeError != nil { + return fmt.Errorf("failed to merge machine: %v", mergeError) + } + + what.Encryption, mergeError = new(Strings).MergeSingleton(what.Encryption, other.Encryption) + if mergeError != nil { + return fmt.Errorf("failed to merge encryption: %v", mergeError) + } + + what.Owner, mergeError = new(Strings).MergeSingleton(what.Owner, other.Owner) + if mergeError != nil { + return fmt.Errorf("failed to merge owner: %v", mergeError) + } + + what.Confidentiality, mergeError = new(Strings).MergeSingleton(what.Confidentiality, other.Confidentiality) + if mergeError != nil { + return fmt.Errorf("failed to merge confidentiality: %v", mergeError) + } + + what.Integrity, mergeError = new(Strings).MergeSingleton(what.Integrity, other.Integrity) + if mergeError != nil { + return fmt.Errorf("failed to merge integrity: %v", mergeError) + } + + what.Availability, mergeError = new(Strings).MergeSingleton(what.Availability, other.Availability) + if mergeError != nil { + return fmt.Errorf("failed to merge availability: %v", mergeError) + } + + what.JustificationCiaRating = new(Strings).MergeMultiline(what.JustificationCiaRating, other.JustificationCiaRating) + + if what.MultiTenant == false { + what.MultiTenant = other.MultiTenant + } + + if what.Redundant == false { + what.Redundant = other.Redundant + } + + if what.CustomDevelopedParts == false { + what.CustomDevelopedParts = other.CustomDevelopedParts + } + + what.DataAssetsProcessed = new(Strings).MergeUniqueSlice(what.DataAssetsProcessed, other.DataAssetsProcessed) + + what.DataAssetsStored = new(Strings).MergeUniqueSlice(what.DataAssetsStored, other.DataAssetsStored) + + what.DataFormatsAccepted = new(Strings).MergeUniqueSlice(what.DataFormatsAccepted, other.DataFormatsAccepted) + + if what.DiagramTweakOrder == 0 { + what.DiagramTweakOrder = other.DiagramTweakOrder + } + + what.CommunicationLinks, mergeError = new(CommunicationLink).MergeMap(what.CommunicationLinks, other.CommunicationLinks) + if mergeError != nil { + return fmt.Errorf("failed to merge communication_links: %v", mergeError) + } + + return nil +} + +func (what *TechnicalAsset) MergeMap(first map[string]TechnicalAsset, second map[string]TechnicalAsset) (map[string]TechnicalAsset, error) { + for mapKey, mapValue := range second { + mapItem, ok := first[mapKey] + if ok { + mergeError := mapItem.Merge(mapValue) + if mergeError != nil { + return first, fmt.Errorf("failed to merge technical asset %q: %v", mapKey, mergeError) + } + + first[mapKey] = mapItem + } else { + first[mapKey] = mapValue + } + } + + return first, nil +} diff --git a/pkg/input/trust-boundary.go b/pkg/input/trust-boundary.go new file mode 100644 index 00000000..48c507e0 --- /dev/null +++ b/pkg/input/trust-boundary.go @@ -0,0 +1,56 @@ +package input + +import "fmt" + +type TrustBoundary struct { + ID string `yaml:"id,omitempty" json:"id,omitempty"` + Description string `yaml:"description,omitempty" json:"description,omitempty"` + Type string `yaml:"type,omitempty" json:"type,omitempty"` + Tags []string `yaml:"tags,omitempty" json:"tags,omitempty"` + TechnicalAssetsInside []string `yaml:"technical_assets_inside,omitempty" json:"technical_assets_inside,omitempty"` + TrustBoundariesNested []string `yaml:"trust_boundaries_nested,omitempty" json:"trust_boundaries_nested,omitempty"` +} + +func (what *TrustBoundary) Merge(other TrustBoundary) error { + var mergeError error + what.ID, mergeError = new(Strings).MergeSingleton(what.ID, other.ID) + if mergeError != nil { + return fmt.Errorf("failed to merge id: %v", mergeError) + } + + what.Description, mergeError = new(Strings).MergeSingleton(what.Description, other.Description) + if mergeError != nil { + return fmt.Errorf("failed to merge description: %v", mergeError) + } + + what.Type, mergeError = new(Strings).MergeSingleton(what.Type, other.Type) + if mergeError != nil { + return fmt.Errorf("failed to merge type: %v", mergeError) + } + + what.Tags = new(Strings).MergeUniqueSlice(what.Tags, other.Tags) + + what.TechnicalAssetsInside = new(Strings).MergeUniqueSlice(what.TechnicalAssetsInside, other.TechnicalAssetsInside) + + what.TrustBoundariesNested = new(Strings).MergeUniqueSlice(what.TrustBoundariesNested, other.TrustBoundariesNested) + + return nil +} + +func (what *TrustBoundary) MergeMap(first map[string]TrustBoundary, second map[string]TrustBoundary) (map[string]TrustBoundary, error) { + for mapKey, mapValue := range second { + mapItem, ok := first[mapKey] + if ok { + mergeError := mapItem.Merge(mapValue) + if mergeError != nil { + return first, fmt.Errorf("failed to merge trust boundary %q: %v", mapKey, mergeError) + } + + first[mapKey] = mapItem + } else { + first[mapKey] = mapValue + } + } + + return first, nil +} diff --git a/pkg/macros/add-build-pipeline-macro.go b/pkg/macros/add-build-pipeline-macro.go index 27fdf561..d032d344 100644 --- a/pkg/macros/add-build-pipeline-macro.go +++ b/pkg/macros/add-build-pipeline-macro.go @@ -272,15 +272,15 @@ func (m *addBuildPipeline) Execute(modelInput *input.Model, model *types.ParsedM func (m *addBuildPipeline) applyChange(modelInput *input.Model, parsedModel *types.ParsedModel, changeLogCollector *[]string, dryRun bool) (message string, validResult bool, err error) { var serverSideTechAssets = make([]string, 0) // ################################################ - input.AddTagToModelInput(modelInput, m.macroState["source-repository"][0], dryRun, changeLogCollector) - input.AddTagToModelInput(modelInput, m.macroState["build-pipeline"][0], dryRun, changeLogCollector) - input.AddTagToModelInput(modelInput, m.macroState["artifact-registry"][0], dryRun, changeLogCollector) + modelInput.AddTagToModelInput(m.macroState["source-repository"][0], dryRun, changeLogCollector) + modelInput.AddTagToModelInput(m.macroState["build-pipeline"][0], dryRun, changeLogCollector) + modelInput.AddTagToModelInput(m.macroState["artifact-registry"][0], dryRun, changeLogCollector) if m.containerTechUsed { - input.AddTagToModelInput(modelInput, m.macroState["container-registry"][0], dryRun, changeLogCollector) - input.AddTagToModelInput(modelInput, m.macroState["container-platform"][0], dryRun, changeLogCollector) + modelInput.AddTagToModelInput(m.macroState["container-registry"][0], dryRun, changeLogCollector) + modelInput.AddTagToModelInput(m.macroState["container-platform"][0], dryRun, changeLogCollector) } if m.codeInspectionUsed { - input.AddTagToModelInput(modelInput, m.macroState["code-inspection-platform"][0], dryRun, changeLogCollector) + modelInput.AddTagToModelInput(m.macroState["code-inspection-platform"][0], dryRun, changeLogCollector) } sourceRepoID := types.MakeID(m.macroState["source-repository"][0]) + "-sourcecode-repository" diff --git a/pkg/macros/add-vault-macro.go b/pkg/macros/add-vault-macro.go index cf8edc03..f879f620 100644 --- a/pkg/macros/add-vault-macro.go +++ b/pkg/macros/add-vault-macro.go @@ -186,7 +186,7 @@ func (m *addVaultMacro) Execute(modelInput *input.Model, parsedModel *types.Pars } func (m *addVaultMacro) applyChange(modelInput *input.Model, parsedModel *types.ParsedModel, changeLogCollector *[]string, dryRun bool) (message string, validResult bool, err error) { - input.AddTagToModelInput(modelInput, m.macroState["vault-name"][0], dryRun, changeLogCollector) + modelInput.AddTagToModelInput(m.macroState["vault-name"][0], dryRun, changeLogCollector) var serverSideTechAssets = make([]string, 0) diff --git a/pkg/macros/macros.go b/pkg/macros/macros.go index fbc7977b..4fb1ffb2 100644 --- a/pkg/macros/macros.go +++ b/pkg/macros/macros.go @@ -31,7 +31,7 @@ func ListBuiltInMacros() []Macros { NewBuildPipeline(), NewAddVault(), NewPrettyPrint(), - NewRemoveUnusedTags(), + newRemoveUnusedTags(), NewSeedRiskTracking(), NewSeedTags(), } diff --git a/pkg/macros/remove-unused-tags-macro.go b/pkg/macros/remove-unused-tags-macro.go index 23072ca0..28c6b3eb 100644 --- a/pkg/macros/remove-unused-tags-macro.go +++ b/pkg/macros/remove-unused-tags-macro.go @@ -1,6 +1,7 @@ package macros import ( + "github.com/mpvl/unique" "sort" "strconv" @@ -11,7 +12,7 @@ import ( type removeUnusedTagsMacro struct { } -func NewRemoveUnusedTags() *removeUnusedTagsMacro { +func newRemoveUnusedTags() *removeUnusedTagsMacro { return &removeUnusedTagsMacro{} } @@ -40,45 +41,24 @@ func (*removeUnusedTagsMacro) GetFinalChangeImpact(_ *input.Model, _ *types.Pars } func (*removeUnusedTagsMacro) Execute(modelInput *input.Model, parsedModel *types.ParsedModel) (message string, validResult bool, err error) { - tagUsageMap := make(map[string]bool) - for _, tag := range parsedModel.TagsAvailable { - tagUsageMap[tag] = false // false = tag is not used + modelInput.TagsAvailable = parsedModel.TagsAvailable + for _, asset := range parsedModel.DataAssets { + modelInput.TagsAvailable = append(modelInput.TagsAvailable, asset.Tags...) } - for _, dA := range parsedModel.DataAssets { - for _, tag := range dA.Tags { - tagUsageMap[tag] = true // true = tag is used + for _, asset := range parsedModel.TechnicalAssets { + modelInput.TagsAvailable = append(modelInput.TagsAvailable, asset.Tags...) + for _, link := range asset.CommunicationLinks { + modelInput.TagsAvailable = append(modelInput.TagsAvailable, link.Tags...) } } - for _, tA := range parsedModel.TechnicalAssets { - for _, tag := range tA.Tags { - tagUsageMap[tag] = true // true = tag is used - } - for _, cL := range tA.CommunicationLinks { - for _, tag := range cL.Tags { - tagUsageMap[tag] = true // true = tag is used - } - } - } - for _, tB := range parsedModel.TrustBoundaries { - for _, tag := range tB.Tags { - tagUsageMap[tag] = true // true = tag is used - } + for _, boundary := range parsedModel.TrustBoundaries { + modelInput.TagsAvailable = append(modelInput.TagsAvailable, boundary.Tags...) } - for _, sR := range parsedModel.SharedRuntimes { - for _, tag := range sR.Tags { - tagUsageMap[tag] = true // true = tag is used - } - } - counter := 0 - tagsSorted := make([]string, 0) - for tag, used := range tagUsageMap { - if used { - tagsSorted = append(tagsSorted, tag) - } else { - counter++ - } + for _, runtime := range parsedModel.SharedRuntimes { + modelInput.TagsAvailable = append(modelInput.TagsAvailable, runtime.Tags...) } - sort.Strings(tagsSorted) - modelInput.TagsAvailable = tagsSorted - return "Model file removal of " + strconv.Itoa(counter) + " unused tags successful", true, nil + count := len(modelInput.TagsAvailable) + unique.Strings(&modelInput.TagsAvailable) + sort.Strings(modelInput.TagsAvailable) + return "Model file removal of " + strconv.Itoa(count-len(modelInput.TagsAvailable)) + " unused tags successful", true, nil } diff --git a/pkg/macros/seed-tags-macro.go b/pkg/macros/seed-tags-macro.go index 48d43159..88cf50b4 100644 --- a/pkg/macros/seed-tags-macro.go +++ b/pkg/macros/seed-tags-macro.go @@ -1,6 +1,7 @@ package macros import ( + "github.com/mpvl/unique" "sort" "strconv" @@ -40,18 +41,11 @@ func (*seedTagsMacro) GetFinalChangeImpact(_ *input.Model, _ *types.ParsedModel) } func (*seedTagsMacro) Execute(modelInput *input.Model, parsedModel *types.ParsedModel) (message string, validResult bool, err error) { - tagMap := make(map[string]bool) - for k, v := range parsedModel.AllSupportedTags { - tagMap[k] = v + modelInput.TagsAvailable = parsedModel.TagsAvailable + for tag := range parsedModel.AllSupportedTags { + modelInput.TagsAvailable = append(modelInput.TagsAvailable, tag) } - for _, tagFromModel := range parsedModel.TagsAvailable { - tagMap[tagFromModel] = true - } - tagsSorted := make([]string, 0) - for tag := range tagMap { - tagsSorted = append(tagsSorted, tag) - } - sort.Strings(tagsSorted) - modelInput.TagsAvailable = tagsSorted + unique.Strings(&modelInput.TagsAvailable) + sort.Strings(modelInput.TagsAvailable) return "Model file seeding with " + strconv.Itoa(len(parsedModel.AllSupportedTags)) + " tags successful", true, nil } diff --git a/pkg/model/parse.go b/pkg/model/parse.go index 46894830..5bf86072 100644 --- a/pkg/model/parse.go +++ b/pkg/model/parse.go @@ -24,21 +24,24 @@ func ParseModel(modelInput *input.Model, builtinRiskRules map[string]risks.RiskR var parseError error reportDate, parseError = time.Parse("2006-01-02", modelInput.Date) if parseError != nil { - return nil, errors.New("unable to parse 'date' value of model file") + return nil, errors.New("unable to parse 'date' value of model file (expected format: '2006-01-02')") } } parsedModel := types.ParsedModel{ - Author: modelInput.Author, + ThreagileVersion: modelInput.ThreagileVersion, Title: modelInput.Title, - Date: reportDate, - ManagementSummaryComment: modelInput.ManagementSummaryComment, - BusinessCriticality: businessCriticality, + Author: modelInput.Author, + Contributors: modelInput.Contributors, + Date: types.Date{Time: reportDate}, + AppDescription: removePathElementsFromImageFiles(modelInput.AppDescription), BusinessOverview: removePathElementsFromImageFiles(modelInput.BusinessOverview), TechnicalOverview: removePathElementsFromImageFiles(modelInput.TechnicalOverview), + BusinessCriticality: businessCriticality, + ManagementSummaryComment: modelInput.ManagementSummaryComment, + SecurityRequirements: modelInput.SecurityRequirements, Questions: modelInput.Questions, AbuseCases: modelInput.AbuseCases, - SecurityRequirements: modelInput.SecurityRequirements, TagsAvailable: lowerCaseAndTrim(modelInput.TagsAvailable), DiagramTweakNodesep: modelInput.DiagramTweakNodesep, DiagramTweakRanksep: modelInput.DiagramTweakRanksep, @@ -50,11 +53,11 @@ func ParseModel(modelInput *input.Model, builtinRiskRules map[string]risks.RiskR } parsedModel.CommunicationLinks = make(map[string]types.CommunicationLink) + parsedModel.AllSupportedTags = make(map[string]bool) parsedModel.IncomingTechnicalCommunicationLinksMappedByTargetId = make(map[string][]types.CommunicationLink) parsedModel.DirectContainingTrustBoundaryMappedByTechnicalAssetId = make(map[string]types.TrustBoundary) parsedModel.GeneratedRisksByCategory = make(map[string][]types.Risk) parsedModel.GeneratedRisksBySyntheticId = make(map[string]types.Risk) - parsedModel.AllSupportedTags = make(map[string]bool) if parsedModel.DiagramTweakNodesep == 0 { parsedModel.DiagramTweakNodesep = 2 @@ -447,7 +450,7 @@ func ParseModel(modelInput *input.Model, builtinRiskRules map[string]risks.RiskR } // Individual Risk Categories (just used as regular risk categories) =============================================================================== - parsedModel.IndividualRiskCategories = make(map[string]types.RiskCategory) + // parsedModel.IndividualRiskCategories = make(map[string]types.RiskCategory) for title, individualCategory := range modelInput.IndividualRiskCategories { id := fmt.Sprintf("%v", individualCategory.ID) @@ -564,7 +567,7 @@ func ParseModel(modelInput *input.Model, builtinRiskRules map[string]risks.RiskR } } - individualRiskInstance := types.Risk{ + parsedModel.GeneratedRisksByCategory[cat.Id] = append(parsedModel.GeneratedRisksByCategory[cat.Id], types.Risk{ SyntheticId: createSyntheticId(cat.Id, mostRelevantDataAssetId, mostRelevantTechnicalAssetId, mostRelevantCommunicationLinkId, mostRelevantTrustBoundaryId, mostRelevantSharedRuntimeId), Title: fmt.Sprintf("%v", title), CategoryId: cat.Id, @@ -578,8 +581,7 @@ func ParseModel(modelInput *input.Model, builtinRiskRules map[string]risks.RiskR MostRelevantSharedRuntimeId: mostRelevantSharedRuntimeId, DataBreachProbability: dataBreachProbability, DataBreachTechnicalAssetIDs: dataBreachTechnicalAssetIDs, - } - parsedModel.GeneratedRisksByCategory[cat.Id] = append(parsedModel.GeneratedRisksByCategory[cat.Id], individualRiskInstance) + }) } } } @@ -609,7 +611,7 @@ func ParseModel(modelInput *input.Model, builtinRiskRules map[string]risks.RiskR Justification: justification, CheckedBy: checkedBy, Ticket: ticket, - Date: date, + Date: types.Date{Time: date}, Status: status, } @@ -626,6 +628,25 @@ func ParseModel(modelInput *input.Model, builtinRiskRules map[string]risks.RiskR } } + /* + data, _ := json.MarshalIndent(parsedModel, "", " ") + _ = os.WriteFile(filepath.Join("all.json"), data, 0644) + */ + + /** + inYamlData, _ := yaml.Marshal(modelInput) + _ = os.WriteFile(filepath.Join("in.yaml"), inYamlData, 0644) + + inJsonData, _ := json.MarshalIndent(modelInput, "", " ") + _ = os.WriteFile(filepath.Join("in.json"), inJsonData, 0644) + + outYamlData, _ := yaml.Marshal(parsedModel) + _ = os.WriteFile(filepath.Join("out.yaml"), outYamlData, 0644) + + outJsonData, _ := json.MarshalIndent(parsedModel, "", " ") + _ = os.WriteFile(filepath.Join("out.json"), outJsonData, 0644) + /**/ + return &parsedModel, nil } diff --git a/pkg/model/runner.go b/pkg/model/runner.go index a8b6da0f..81b61168 100644 --- a/pkg/model/runner.go +++ b/pkg/model/runner.go @@ -76,13 +76,12 @@ func (p *runner) Run(in any, out any, parameters ...string) error { } waitError := plugin.Wait() + p.ErrorOutput = stderrBuf.String() if waitError != nil { return fmt.Errorf("%v: %v", waitError, p.ErrorOutput) } - p.ErrorOutput = stderrBuf.String() stdout := stdoutBuf.Bytes() - unmarshalError := json.Unmarshal(stdout, &p.Out) if unmarshalError != nil { return unmarshalError diff --git a/pkg/report/report.go b/pkg/report/report.go index 6ef60e87..e0a936f5 100644 --- a/pkg/report/report.go +++ b/pkg/report/report.go @@ -61,6 +61,13 @@ func (r *pdfReporter) WriteReportPDF(reportFilename string, customRiskRules map[string]*model.CustomRisk, tempFolder string, model *types.ParsedModel) error { + defer func() { + value := recover() + if value != nil { + fmt.Printf("error creating PDF report: %v", value) + } + }() + r.initReport() r.createPdfAndInitMetadata(model) r.parseBackgroundTemplate(templateFilename) @@ -177,7 +184,7 @@ func (r *pdfReporter) createCover(parsedModel *types.ParsedModel) { r.pdf.SetFont("Helvetica", "", 12) reportDate := parsedModel.Date if reportDate.IsZero() { - reportDate = time.Now() + reportDate = types.Date{Time: time.Now()} } r.pdf.Text(40.7, 145, reportDate.Format("2 January 2006")) r.pdf.Text(40.7, 153, uni(parsedModel.Author.Name)) diff --git a/pkg/security/types/authentication.go b/pkg/security/types/authentication.go index adba1e13..95db6aeb 100644 --- a/pkg/security/types/authentication.go +++ b/pkg/security/types/authentication.go @@ -8,6 +8,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -69,13 +70,42 @@ func (what Authentication) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what *Authentication) UnmarshalJSON([]byte) error { +func (what *Authentication) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what Authentication) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *Authentication) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what Authentication) find(value string) (Authentication, error) { for index, description := range AuthenticationTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = Authentication(index) - return nil + if strings.EqualFold(value, description.Name) { + return Authentication(index), nil } } - return fmt.Errorf("unknown authentication value %q", int(*what)) + return Authentication(0), fmt.Errorf("unknown authentication value %q", value) } diff --git a/pkg/security/types/authorization.go b/pkg/security/types/authorization.go index 3e376e88..eb6c0a87 100644 --- a/pkg/security/types/authorization.go +++ b/pkg/security/types/authorization.go @@ -8,6 +8,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -56,13 +57,42 @@ func (what Authorization) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what *Authorization) UnmarshalJSON([]byte) error { +func (what *Authorization) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what Authorization) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *Authorization) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what Authorization) find(value string) (Authorization, error) { for index, description := range AuthorizationTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = Authorization(index) - return nil + if strings.EqualFold(value, description.Name) { + return Authorization(index), nil } } - return fmt.Errorf("unknown authorization value %q", int(*what)) + return Authorization(0), fmt.Errorf("unknown authorization value %q", value) } diff --git a/pkg/security/types/communication_link.go b/pkg/security/types/communication_link.go index 5576e68c..cabf4255 100644 --- a/pkg/security/types/communication_link.go +++ b/pkg/security/types/communication_link.go @@ -9,23 +9,23 @@ import ( ) type CommunicationLink struct { - Id string `json:"id,omitempty"` - SourceId string `json:"source_id,omitempty"` - TargetId string `json:"target_id,omitempty"` - Title string `json:"title,omitempty"` - Description string `json:"description,omitempty"` - Protocol Protocol `json:"protocol,omitempty"` - Tags []string `json:"tags,omitempty"` - VPN bool `json:"vpn,omitempty"` - IpFiltered bool `json:"ip_filtered,omitempty"` - Readonly bool `json:"readonly,omitempty"` - Authentication Authentication `json:"authentication,omitempty"` - Authorization Authorization `json:"authorization,omitempty"` - Usage Usage `json:"usage,omitempty"` - DataAssetsSent []string `json:"data_assets_sent,omitempty"` - DataAssetsReceived []string `json:"data_assets_received,omitempty"` - DiagramTweakWeight int `json:"diagram_tweak_weight,omitempty"` - DiagramTweakConstraint bool `json:"diagram_tweak_constraint,omitempty"` + Id string `json:"id,omitempty" yaml:"id,omitempty"` + SourceId string `json:"source_id,omitempty" yaml:"source_id,omitempty"` + TargetId string `json:"target_id,omitempty" yaml:"target_id,omitempty"` + Title string `json:"title,omitempty" yaml:"title,omitempty"` + Description string `json:"description,omitempty" yaml:"description,omitempty"` + Protocol Protocol `json:"protocol,omitempty" yaml:"protocol,omitempty"` + Tags []string `json:"tags,omitempty" yaml:"tags,omitempty"` + VPN bool `json:"vpn,omitempty" yaml:"vpn,omitempty"` + IpFiltered bool `json:"ip_filtered,omitempty" yaml:"ip_filtered,omitempty"` + Readonly bool `json:"readonly,omitempty" yaml:"readonly,omitempty"` + Authentication Authentication `json:"authentication,omitempty" yaml:"authentication,omitempty"` + Authorization Authorization `json:"authorization,omitempty" yaml:"authorization,omitempty"` + Usage Usage `json:"usage,omitempty" yaml:"usage,omitempty"` + DataAssetsSent []string `json:"data_assets_sent,omitempty" yaml:"data_assets_sent,omitempty"` + DataAssetsReceived []string `json:"data_assets_received,omitempty" yaml:"data_assets_received,omitempty"` + DiagramTweakWeight int `json:"diagram_tweak_weight,omitempty" yaml:"diagram_tweak_weight,omitempty"` + DiagramTweakConstraint bool `json:"diagram_tweak_constraint,omitempty" yaml:"diagram_tweak_constraint,omitempty"` } func (what CommunicationLink) IsTaggedWithAny(tags ...string) bool { diff --git a/pkg/security/types/confidentiality.go b/pkg/security/types/confidentiality.go index 0be71d3d..fad041ea 100644 --- a/pkg/security/types/confidentiality.go +++ b/pkg/security/types/confidentiality.go @@ -8,6 +8,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -96,13 +97,42 @@ func (what Confidentiality) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what *Confidentiality) UnmarshalJSON([]byte) error { +func (what *Confidentiality) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what Confidentiality) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *Confidentiality) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what Confidentiality) find(value string) (Confidentiality, error) { for index, description := range ConfidentialityTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = Confidentiality(index) - return nil + if strings.EqualFold(value, description.Name) { + return Confidentiality(index), nil } } - return fmt.Errorf("unknown confidentiality value %q", int(*what)) + return Confidentiality(0), fmt.Errorf("unknown confidentiality value %q", value) } diff --git a/pkg/security/types/criticality.go b/pkg/security/types/criticality.go index a3a1511a..a9e221b7 100644 --- a/pkg/security/types/criticality.go +++ b/pkg/security/types/criticality.go @@ -8,6 +8,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -96,13 +97,42 @@ func (what Criticality) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what *Criticality) UnmarshalJSON([]byte) error { +func (what *Criticality) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what Criticality) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *Criticality) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what Criticality) find(value string) (Criticality, error) { for index, description := range CriticalityTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = Criticality(index) - return nil + if strings.EqualFold(value, description.Name) { + return Criticality(index), nil } } - return fmt.Errorf("unknown criticality value %q", int(*what)) + return Criticality(0), fmt.Errorf("unknown criticality value %q", value) } diff --git a/pkg/security/types/data_asset.go b/pkg/security/types/data_asset.go index 899a0c63..a95f80ca 100644 --- a/pkg/security/types/data_asset.go +++ b/pkg/security/types/data_asset.go @@ -9,18 +9,18 @@ import ( ) type DataAsset struct { - Id string `yaml:"id" json:"id"` // TODO: tag here still required? - Title string `yaml:"title" json:"title"` // TODO: tag here still required? - Description string `yaml:"description" json:"description"` // TODO: tag here still required? - Usage Usage `yaml:"usage" json:"usage"` - Tags []string `yaml:"tags" json:"tags"` - Origin string `yaml:"origin" json:"origin"` - Owner string `yaml:"owner" json:"owner"` - Quantity Quantity `yaml:"quantity" json:"quantity"` - Confidentiality Confidentiality `yaml:"confidentiality" json:"confidentiality"` - Integrity Criticality `yaml:"integrity" json:"integrity"` - Availability Criticality `yaml:"availability" json:"availability"` - JustificationCiaRating string `yaml:"justification_cia_rating" json:"justification_cia_rating"` + Id string `yaml:"id,omitempty" json:"id,omitempty"` // TODO: tag here still required? + Title string `yaml:"title,omitempty" json:"title,omitempty"` // TODO: tag here still required? + Description string `yaml:"description,omitempty" json:"description,omitempty"` // TODO: tag here still required? + Usage Usage `yaml:"usage,omitempty" json:"usage,omitempty"` + Tags []string `yaml:"tags,omitempty" json:"tags,omitempty"` + Origin string `yaml:"origin,omitempty" json:"origin,omitempty"` + Owner string `yaml:"owner,omitempty" json:"owner,omitempty"` + Quantity Quantity `yaml:"quantity,omitempty" json:"quantity,omitempty"` + Confidentiality Confidentiality `yaml:"confidentiality,omitempty" json:"confidentiality,omitempty"` + Integrity Criticality `yaml:"integrity,omitempty" json:"integrity,omitempty"` + Availability Criticality `yaml:"availability,omitempty" json:"availability,omitempty"` + JustificationCiaRating string `yaml:"justification_cia_rating,omitempty" json:"justification_cia_rating,omitempty"` } func (what DataAsset) IsTaggedWithAny(tags ...string) bool { diff --git a/pkg/security/types/data_breach_probability.go b/pkg/security/types/data_breach_probability.go index bf007a49..1bd01a1f 100644 --- a/pkg/security/types/data_breach_probability.go +++ b/pkg/security/types/data_breach_probability.go @@ -8,6 +8,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -64,13 +65,42 @@ func (what DataBreachProbability) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what *DataBreachProbability) UnmarshalJSON([]byte) error { +func (what *DataBreachProbability) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what DataBreachProbability) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *DataBreachProbability) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what DataBreachProbability) find(value string) (DataBreachProbability, error) { for index, description := range DataBreachProbabilityTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = DataBreachProbability(index) - return nil + if strings.EqualFold(value, description.Name) { + return DataBreachProbability(index), nil } } - return fmt.Errorf("unknown data breach probability value %q", int(*what)) + return DataBreachProbability(0), fmt.Errorf("unknown data breach probability value %q", value) } diff --git a/pkg/security/types/data_format.go b/pkg/security/types/data_format.go index 121c3c90..52b1aaae 100644 --- a/pkg/security/types/data_format.go +++ b/pkg/security/types/data_format.go @@ -8,6 +8,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -71,15 +72,44 @@ func (what DataFormat) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what *DataFormat) UnmarshalJSON([]byte) error { +func (what *DataFormat) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what DataFormat) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *DataFormat) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what DataFormat) find(value string) (DataFormat, error) { for index, description := range DataFormatTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = DataFormat(index) - return nil + if strings.EqualFold(value, description.Name) { + return DataFormat(index), nil } } - return fmt.Errorf("unknown data format value %q", int(*what)) + return DataFormat(0), fmt.Errorf("unknown data format value %q", value) } type ByDataFormatAcceptedSort []DataFormat diff --git a/pkg/security/types/date.go b/pkg/security/types/date.go new file mode 100644 index 00000000..d800963f --- /dev/null +++ b/pkg/security/types/date.go @@ -0,0 +1,40 @@ +package types + +import ( + "gopkg.in/yaml.v3" + "time" +) + +type Date struct { + time.Time +} + +func (what Date) MarshalJSON() ([]byte, error) { + return []byte(what.Format(`"2006-01-02"`)), nil +} + +func (what *Date) UnmarshalJSON(bytes []byte) error { + date, parseError := time.Parse(`"2006-01-02"`, string(bytes)) + if parseError != nil { + return parseError + } + + what.Time = date + + return nil +} + +func (what Date) MarshalYAML() (interface{}, error) { + return what.Format(`2006-01-02`), nil +} + +func (what *Date) UnmarshalYAML(node *yaml.Node) error { + date, parseError := time.Parse(`2006-01-02`, node.Value) + if parseError != nil { + return parseError + } + + what.Time = date + + return nil +} diff --git a/pkg/security/types/encryption_style.go b/pkg/security/types/encryption_style.go index 257b1080..32afa9f2 100644 --- a/pkg/security/types/encryption_style.go +++ b/pkg/security/types/encryption_style.go @@ -8,6 +8,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -66,13 +67,42 @@ func (what EncryptionStyle) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what *EncryptionStyle) UnmarshalJSON([]byte) error { +func (what *EncryptionStyle) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what EncryptionStyle) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *EncryptionStyle) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what EncryptionStyle) find(value string) (EncryptionStyle, error) { for index, description := range EncryptionStyleTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = EncryptionStyle(index) - return nil + if strings.EqualFold(value, description.Name) { + return EncryptionStyle(index), nil } } - return fmt.Errorf("unknown encryption style value %q", int(*what)) + return EncryptionStyle(0), fmt.Errorf("unknown encryption style value %q", value) } diff --git a/pkg/security/types/model.go b/pkg/security/types/model.go index ff735391..97128f7a 100644 --- a/pkg/security/types/model.go +++ b/pkg/security/types/model.go @@ -7,51 +7,54 @@ package types import ( "errors" "fmt" + "github.com/threagile/threagile/pkg/input" "regexp" + "slices" "sort" "strings" - "time" - - "github.com/threagile/threagile/pkg/input" ) // TODO: move model out of types package and // rename parsedModel to model or something like this to emphasize that it's just a model // maybe type ParsedModel struct { - Author input.Author `json:"author" yaml:"author"` - Title string `json:"title,omitempty" yaml:"title"` - Date time.Time `json:"date" yaml:"date"` - ManagementSummaryComment string `json:"management_summary_comment,omitempty" yaml:"management_summary_comment"` - BusinessOverview input.Overview `json:"business_overview" yaml:"business_overview"` - TechnicalOverview input.Overview `json:"technical_overview" yaml:"technical_overview"` - BusinessCriticality Criticality `json:"business_criticality,omitempty" yaml:"business_criticality"` - SecurityRequirements map[string]string `json:"security_requirements,omitempty" yaml:"security_requirements"` - Questions map[string]string `json:"questions,omitempty" yaml:"questions"` - AbuseCases map[string]string `json:"abuse_cases,omitempty" yaml:"abuse_cases"` - TagsAvailable []string `json:"tags_available,omitempty" yaml:"tags_available"` - DataAssets map[string]DataAsset `json:"data_assets,omitempty" yaml:"data_assets"` - TechnicalAssets map[string]TechnicalAsset `json:"technical_assets,omitempty" yaml:"technical_assets"` - TrustBoundaries map[string]TrustBoundary `json:"trust_boundaries,omitempty" yaml:"trust_boundaries"` - SharedRuntimes map[string]SharedRuntime `json:"shared_runtimes,omitempty" yaml:"shared_runtimes"` - IndividualRiskCategories map[string]RiskCategory `json:"individual_risk_categories,omitempty" yaml:"individual_risk_categories"` - BuiltInRiskCategories map[string]RiskCategory `json:"built_in_risk_categories,omitempty" yaml:"built_in_risk_categories"` - RiskTracking map[string]RiskTracking `json:"risk_tracking,omitempty" yaml:"risk_tracking"` - CommunicationLinks map[string]CommunicationLink `json:"communication_links,omitempty" yaml:"communication_links"` - AllSupportedTags map[string]bool `json:"all_supported_tags,omitempty" yaml:"all_supported_tags"` - DiagramTweakNodesep int `json:"diagram_tweak_nodesep,omitempty" yaml:"diagram_tweak_nodesep"` - DiagramTweakRanksep int `json:"diagram_tweak_ranksep,omitempty" yaml:"diagram_tweak_ranksep"` - DiagramTweakEdgeLayout string `json:"diagram_tweak_edge_layout,omitempty" yaml:"diagram_tweak_edge_layout"` - DiagramTweakSuppressEdgeLabels bool `json:"diagram_tweak_suppress_edge_labels,omitempty" yaml:"diagram_tweak_suppress_edge_labels"` - DiagramTweakLayoutLeftToRight bool `json:"diagram_tweak_layout_left_to_right,omitempty" yaml:"diagram_tweak_layout_left_to_right"` - DiagramTweakInvisibleConnectionsBetweenAssets []string `json:"diagram_tweak_invisible_connections_between_assets,omitempty" yaml:"diagram_tweak_invisible_connections_between_assets"` - DiagramTweakSameRankAssets []string `json:"diagram_tweak_same_rank_assets,omitempty" yaml:"diagram_tweak_same_rank_assets"` + ThreagileVersion string `yaml:"threagile_version,omitempty" json:"threagile_version,omitempty"` + Includes []string `yaml:"includes,omitempty" json:"includes,omitempty"` + Title string `json:"title,omitempty" yaml:"title,omitempty"` + Author input.Author `json:"author,omitempty" yaml:"author,omitempty"` + Contributors []input.Author `yaml:"contributors,omitempty" json:"contributors,omitempty"` + Date Date `json:"date,omitempty" yaml:"date,omitempty"` + AppDescription input.Overview `yaml:"application_description,omitempty" json:"application_description,omitempty"` + BusinessOverview input.Overview `json:"business_overview,omitempty" yaml:"business_overview,omitempty"` + TechnicalOverview input.Overview `json:"technical_overview,omitempty" yaml:"technical_overview,omitempty"` + BusinessCriticality Criticality `json:"business_criticality,omitempty" yaml:"business_criticality,omitempty"` + ManagementSummaryComment string `json:"management_summary_comment,omitempty" yaml:"management_summary_comment,omitempty"` + SecurityRequirements map[string]string `json:"security_requirements,omitempty" yaml:"security_requirements,omitempty"` + Questions map[string]string `json:"questions,omitempty" yaml:"questions,omitempty"` + AbuseCases map[string]string `json:"abuse_cases,omitempty" yaml:"abuse_cases,omitempty"` + TagsAvailable []string `json:"tags_available,omitempty" yaml:"tags_available,omitempty"` + DataAssets map[string]DataAsset `json:"data_assets,omitempty" yaml:"data_assets,omitempty"` + TechnicalAssets map[string]TechnicalAsset `json:"technical_assets,omitempty" yaml:"technical_assets,omitempty"` + TrustBoundaries map[string]TrustBoundary `json:"trust_boundaries,omitempty" yaml:"trust_boundaries,omitempty"` + SharedRuntimes map[string]SharedRuntime `json:"shared_runtimes,omitempty" yaml:"shared_runtimes,omitempty"` + IndividualRiskCategories map[string]RiskCategory `json:"individual_risk_categories,omitempty" yaml:"individual_risk_categories,omitempty"` + BuiltInRiskCategories map[string]RiskCategory `json:"built_in_risk_categories,omitempty" yaml:"built_in_risk_categories,omitempty"` + RiskTracking map[string]RiskTracking `json:"risk_tracking,omitempty" yaml:"risk_tracking,omitempty"` + CommunicationLinks map[string]CommunicationLink `json:"communication_links,omitempty" yaml:"communication_links,omitempty"` + AllSupportedTags map[string]bool `json:"all_supported_tags,omitempty" yaml:"all_supported_tags,omitempty"` + DiagramTweakNodesep int `json:"diagram_tweak_nodesep,omitempty" yaml:"diagram_tweak_nodesep,omitempty"` + DiagramTweakRanksep int `json:"diagram_tweak_ranksep,omitempty" yaml:"diagram_tweak_ranksep,omitempty"` + DiagramTweakEdgeLayout string `json:"diagram_tweak_edge_layout,omitempty" yaml:"diagram_tweak_edge_layout,omitempty"` + DiagramTweakSuppressEdgeLabels bool `json:"diagram_tweak_suppress_edge_labels,omitempty" yaml:"diagram_tweak_suppress_edge_labels,omitempty"` + DiagramTweakLayoutLeftToRight bool `json:"diagram_tweak_layout_left_to_right,omitempty" yaml:"diagram_tweak_layout_left_to_right,omitempty"` + DiagramTweakInvisibleConnectionsBetweenAssets []string `json:"diagram_tweak_invisible_connections_between_assets,omitempty" yaml:"diagram_tweak_invisible_connections_between_assets,omitempty"` + DiagramTweakSameRankAssets []string `json:"diagram_tweak_same_rank_assets,omitempty" yaml:"diagram_tweak_same_rank_assets,omitempty"` // TODO: those are generated based on items above and needs to be private - IncomingTechnicalCommunicationLinksMappedByTargetId map[string][]CommunicationLink `json:"incoming_technical_communication_links_mapped_by_target_id,omitempty" yaml:"incoming_technical_communication_links_mapped_by_target_id"` - DirectContainingTrustBoundaryMappedByTechnicalAssetId map[string]TrustBoundary `json:"direct_containing_trust_boundary_mapped_by_technical_asset_id,omitempty" yaml:"direct_containing_trust_boundary_mapped_by_technical_asset_id"` - GeneratedRisksByCategory map[string][]Risk `json:"generated_risks_by_category,omitempty" yaml:"generated_risks_by_category"` - GeneratedRisksBySyntheticId map[string]Risk `json:"generated_risks_by_synthetic_id,omitempty" yaml:"generated_risks_by_synthetic_id"` + IncomingTechnicalCommunicationLinksMappedByTargetId map[string][]CommunicationLink `json:"incoming_technical_communication_links_mapped_by_target_id,omitempty" yaml:"incoming_technical_communication_links_mapped_by_target_id,omitempty"` + DirectContainingTrustBoundaryMappedByTechnicalAssetId map[string]TrustBoundary `json:"direct_containing_trust_boundary_mapped_by_technical_asset_id,omitempty" yaml:"direct_containing_trust_boundary_mapped_by_technical_asset_id,omitempty"` + GeneratedRisksByCategory map[string][]Risk `json:"generated_risks_by_category,omitempty" yaml:"generated_risks_by_category,omitempty"` + GeneratedRisksBySyntheticId map[string]Risk `json:"generated_risks_by_synthetic_id,omitempty" yaml:"generated_risks_by_synthetic_id,omitempty"` } func (parsedModel *ParsedModel) AddToListOfSupportedTags(tags []string) { @@ -157,7 +160,7 @@ func (parsedModel *ParsedModel) CheckRiskTracking(ignoreOrphanedRiskTracking boo } func (parsedModel *ParsedModel) CheckTagExists(referencedTag, where string) error { - if !contains(parsedModel.TagsAvailable, referencedTag) { + if !slices.Contains(parsedModel.TagsAvailable, referencedTag) { return errors.New("missing referenced tag in overall tag list at " + where + ": " + referencedTag) } return nil diff --git a/pkg/security/types/protocol.go b/pkg/security/types/protocol.go index e4ebd9aa..532d26bc 100644 --- a/pkg/security/types/protocol.go +++ b/pkg/security/types/protocol.go @@ -8,6 +8,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -213,13 +214,42 @@ func (what Protocol) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what *Protocol) UnmarshalJSON([]byte) error { +func (what *Protocol) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what Protocol) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *Protocol) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what Protocol) find(value string) (Protocol, error) { for index, description := range ProtocolTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = Protocol(index) - return nil + if strings.EqualFold(value, description.Name) { + return Protocol(index), nil } } - return fmt.Errorf("unknown protocol value %q", int(*what)) + return Protocol(0), fmt.Errorf("unknown protocol value %q", value) } diff --git a/pkg/security/types/quantity.go b/pkg/security/types/quantity.go index d10ad872..f4942267 100644 --- a/pkg/security/types/quantity.go +++ b/pkg/security/types/quantity.go @@ -8,6 +8,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -68,13 +69,42 @@ func (what Quantity) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what *Quantity) UnmarshalJSON([]byte) error { +func (what *Quantity) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what Quantity) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *Quantity) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what Quantity) find(value string) (Quantity, error) { for index, description := range QuantityTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = Quantity(index) - return nil + if strings.EqualFold(value, description.Name) { + return Quantity(index), nil } } - return fmt.Errorf("unknown quantity value %q", int(*what)) + return Quantity(0), fmt.Errorf("unknown quantity value %q", value) } diff --git a/pkg/security/types/risk-category.go b/pkg/security/types/risk-category.go index 232a7092..58a17b2c 100644 --- a/pkg/security/types/risk-category.go +++ b/pkg/security/types/risk-category.go @@ -2,20 +2,20 @@ package types type RiskCategory struct { // TODO: refactor all "Id" here and elsewhere to "ID" - Id string `json:"id,omitempty"` - Title string `json:"title,omitempty"` - Description string `json:"description,omitempty"` - Impact string `json:"impact,omitempty"` - ASVS string `json:"asvs,omitempty"` - CheatSheet string `json:"cheat_sheet,omitempty"` - Action string `json:"action,omitempty"` - Mitigation string `json:"mitigation,omitempty"` - Check string `json:"check,omitempty"` - DetectionLogic string `json:"detection_logic,omitempty"` - RiskAssessment string `json:"risk_assessment,omitempty"` - FalsePositives string `json:"false_positives,omitempty"` - Function RiskFunction `json:"function,omitempty"` - STRIDE STRIDE `json:"stride,omitempty"` - ModelFailurePossibleReason bool `json:"model_failure_possible_reason,omitempty"` - CWE int `json:"cwe,omitempty"` + Id string `json:"id,omitempty" yaml:"id,omitempty"` + Title string `json:"title,omitempty" yaml:"title,omitempty"` + Description string `json:"description,omitempty" yaml:"description,omitempty"` + Impact string `json:"impact,omitempty" yaml:"impact,omitempty"` + ASVS string `json:"asvs,omitempty" yaml:"asvs,omitempty"` + CheatSheet string `json:"cheat_sheet,omitempty" yaml:"cheat_sheet,omitempty"` + Action string `json:"action,omitempty" yaml:"action,omitempty"` + Mitigation string `json:"mitigation,omitempty" yaml:"mitigation,omitempty"` + Check string `json:"check,omitempty" yaml:"check,omitempty"` + DetectionLogic string `json:"detection_logic,omitempty" yaml:"detection_logic,omitempty"` + RiskAssessment string `json:"risk_assessment,omitempty" yaml:"risk_assessment,omitempty"` + FalsePositives string `json:"false_positives,omitempty" yaml:"false_positives,omitempty"` + Function RiskFunction `json:"function,omitempty" yaml:"function,omitempty"` + STRIDE STRIDE `json:"stride,omitempty" yaml:"stride,omitempty"` + ModelFailurePossibleReason bool `json:"model_failure_possible_reason,omitempty" yaml:"model_failure_possible_reason,omitempty"` + CWE int `json:"cwe,omitempty" yaml:"cwe,omitempty"` } diff --git a/pkg/security/types/risk-tracking.go b/pkg/security/types/risk-tracking.go index 526d462e..fc464226 100644 --- a/pkg/security/types/risk-tracking.go +++ b/pkg/security/types/risk-tracking.go @@ -1,14 +1,10 @@ package types -import ( - "time" -) - type RiskTracking struct { - SyntheticRiskId string `json:"synthetic_risk_id,omitempty"` - Justification string `json:"justification,omitempty"` - Ticket string `json:"ticket,omitempty"` - CheckedBy string `json:"checked_by,omitempty"` - Status RiskStatus `json:"status,omitempty"` - Date time.Time `json:"date"` + SyntheticRiskId string `json:"synthetic_risk_id,omitempty" yaml:"synthetic_risk_id,omitempty"` + Justification string `json:"justification,omitempty" yaml:"justification,omitempty"` + Ticket string `json:"ticket,omitempty" yaml:"ticket,omitempty"` + CheckedBy string `json:"checked_by,omitempty" yaml:"checked_by,omitempty"` + Status RiskStatus `json:"status,omitempty" yaml:"status,omitempty"` + Date Date `json:"date,omitempty" yaml:"date,omitempty"` } diff --git a/pkg/security/types/risk.go b/pkg/security/types/risk.go index dba2f3c0..c98863ed 100644 --- a/pkg/security/types/risk.go +++ b/pkg/security/types/risk.go @@ -1,20 +1,20 @@ package types type Risk struct { - CategoryId string `yaml:"category" json:"category"` // used for better JSON marshalling, is assigned in risk evaluation phase automatically - RiskStatus RiskStatus `yaml:"risk_status" json:"risk_status"` // used for better JSON marshalling, is assigned in risk evaluation phase automatically - Severity RiskSeverity `yaml:"severity" json:"severity"` - ExploitationLikelihood RiskExploitationLikelihood `yaml:"exploitation_likelihood" json:"exploitation_likelihood"` - ExploitationImpact RiskExploitationImpact `yaml:"exploitation_impact" json:"exploitation_impact"` - Title string `yaml:"title" json:"title"` - SyntheticId string `yaml:"synthetic_id" json:"synthetic_id"` - MostRelevantDataAssetId string `yaml:"most_relevant_data_asset" json:"most_relevant_data_asset"` - MostRelevantTechnicalAssetId string `yaml:"most_relevant_technical_asset" json:"most_relevant_technical_asset"` - MostRelevantTrustBoundaryId string `yaml:"most_relevant_trust_boundary" json:"most_relevant_trust_boundary"` - MostRelevantSharedRuntimeId string `yaml:"most_relevant_shared_runtime" json:"most_relevant_shared_runtime"` - MostRelevantCommunicationLinkId string `yaml:"most_relevant_communication_link" json:"most_relevant_communication_link"` - DataBreachProbability DataBreachProbability `yaml:"data_breach_probability" json:"data_breach_probability"` - DataBreachTechnicalAssetIDs []string `yaml:"data_breach_technical_assets" json:"data_breach_technical_assets"` + CategoryId string `yaml:"category,omitempty" json:"category,omitempty"` // used for better JSON marshalling, is assigned in risk evaluation phase automatically + RiskStatus RiskStatus `yaml:"risk_status,omitempty" json:"risk_status,omitempty"` // used for better JSON marshalling, is assigned in risk evaluation phase automatically + Severity RiskSeverity `yaml:"severity,omitempty" json:"severity,omitempty"` + ExploitationLikelihood RiskExploitationLikelihood `yaml:"exploitation_likelihood,omitempty" json:"exploitation_likelihood,omitempty"` + ExploitationImpact RiskExploitationImpact `yaml:"exploitation_impact,omitempty" json:"exploitation_impact,omitempty"` + Title string `yaml:"title,omitempty" json:"title,omitempty"` + SyntheticId string `yaml:"synthetic_id,omitempty" json:"synthetic_id,omitempty"` + MostRelevantDataAssetId string `yaml:"most_relevant_data_asset,omitempty" json:"most_relevant_data_asset,omitempty"` + MostRelevantTechnicalAssetId string `yaml:"most_relevant_technical_asset,omitempty" json:"most_relevant_technical_asset,omitempty"` + MostRelevantTrustBoundaryId string `yaml:"most_relevant_trust_boundary,omitempty" json:"most_relevant_trust_boundary,omitempty"` + MostRelevantSharedRuntimeId string `yaml:"most_relevant_shared_runtime,omitempty" json:"most_relevant_shared_runtime,omitempty"` + MostRelevantCommunicationLinkId string `yaml:"most_relevant_communication_link,omitempty" json:"most_relevant_communication_link,omitempty"` + DataBreachProbability DataBreachProbability `yaml:"data_breach_probability,omitempty" json:"data_breach_probability,omitempty"` + DataBreachTechnicalAssetIDs []string `yaml:"data_breach_technical_assets,omitempty" json:"data_breach_technical_assets,omitempty"` // TODO: refactor all "Id" here to "ID"? } diff --git a/pkg/security/types/risk_exploitation_impact.go b/pkg/security/types/risk_exploitation_impact.go index c2a42cc9..3fb7f6c3 100644 --- a/pkg/security/types/risk_exploitation_impact.go +++ b/pkg/security/types/risk_exploitation_impact.go @@ -8,6 +8,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -70,13 +71,42 @@ func (what RiskExploitationImpact) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what *RiskExploitationImpact) UnmarshalJSON([]byte) error { +func (what *RiskExploitationImpact) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what RiskExploitationImpact) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *RiskExploitationImpact) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what RiskExploitationImpact) find(value string) (RiskExploitationImpact, error) { for index, description := range RiskExploitationImpactTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = RiskExploitationImpact(index) - return nil + if strings.EqualFold(value, description.Name) { + return RiskExploitationImpact(index), nil } } - return fmt.Errorf("unknown risk exploitation impact value %q", int(*what)) + return RiskExploitationImpact(0), fmt.Errorf("unknown risk exploitation impact value %q", value) } diff --git a/pkg/security/types/risk_exploitation_likelihood.go b/pkg/security/types/risk_exploitation_likelihood.go index 4357eb40..d9db35b9 100644 --- a/pkg/security/types/risk_exploitation_likelihood.go +++ b/pkg/security/types/risk_exploitation_likelihood.go @@ -8,6 +8,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -70,13 +71,42 @@ func (what RiskExploitationLikelihood) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what *RiskExploitationLikelihood) UnmarshalJSON([]byte) error { +func (what *RiskExploitationLikelihood) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what RiskExploitationLikelihood) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *RiskExploitationLikelihood) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what RiskExploitationLikelihood) find(value string) (RiskExploitationLikelihood, error) { for index, description := range RiskExploitationLikelihoodTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = RiskExploitationLikelihood(index) - return nil + if strings.EqualFold(value, description.Name) { + return RiskExploitationLikelihood(index), nil } } - return fmt.Errorf("unknown risk exploration likelihood value %q", int(*what)) + return RiskExploitationLikelihood(0), fmt.Errorf("unknown risk exploration likelihood value %q", value) } diff --git a/pkg/security/types/risk_function.go b/pkg/security/types/risk_function.go index 55e5ba89..8a843591 100644 --- a/pkg/security/types/risk_function.go +++ b/pkg/security/types/risk_function.go @@ -8,6 +8,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -63,13 +64,42 @@ func (what RiskFunction) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what *RiskFunction) UnmarshalJSON([]byte) error { +func (what *RiskFunction) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what RiskFunction) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *RiskFunction) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what RiskFunction) find(value string) (RiskFunction, error) { for index, description := range RiskFunctionTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = RiskFunction(index) - return nil + if strings.EqualFold(value, description.Name) { + return RiskFunction(index), nil } } - return fmt.Errorf("unknown risk function %q", int(*what)) + return RiskFunction(0), fmt.Errorf("unknown risk function value %q", value) } diff --git a/pkg/security/types/risk_severity.go b/pkg/security/types/risk_severity.go index 13d6396a..41a665b8 100644 --- a/pkg/security/types/risk_severity.go +++ b/pkg/security/types/risk_severity.go @@ -8,6 +8,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -69,13 +70,42 @@ func (what RiskSeverity) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what *RiskSeverity) UnmarshalJSON([]byte) error { +func (what *RiskSeverity) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what RiskSeverity) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *RiskSeverity) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what RiskSeverity) find(value string) (RiskSeverity, error) { for index, description := range RiskSeverityTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = RiskSeverity(index) - return nil + if strings.EqualFold(value, description.Name) { + return RiskSeverity(index), nil } } - return fmt.Errorf("unknown risk severity value %q", int(*what)) + return RiskSeverity(0), fmt.Errorf("unknown risk severity value %q", value) } diff --git a/pkg/security/types/risk_status.go b/pkg/security/types/risk_status.go index 012f156d..1f024219 100644 --- a/pkg/security/types/risk_status.go +++ b/pkg/security/types/risk_status.go @@ -8,6 +8,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -65,21 +66,50 @@ func (what RiskStatus) Title() string { return [...]string{"Unchecked", "in Discussion", "Accepted", "in Progress", "Mitigated", "False Positive"}[what] } +func (what RiskStatus) IsStillAtRisk() bool { + return what == Unchecked || what == InDiscussion || what == Accepted || what == InProgress +} + func (what RiskStatus) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what RiskStatus) IsStillAtRisk() bool { - return what == Unchecked || what == InDiscussion || what == Accepted || what == InProgress +func (what *RiskStatus) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what RiskStatus) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *RiskStatus) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil } -func (what *RiskStatus) UnmarshalJSON([]byte) error { +func (what RiskStatus) find(value string) (RiskStatus, error) { for index, description := range RiskStatusTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = RiskStatus(index) - return nil + if strings.EqualFold(value, description.Name) { + return RiskStatus(index), nil } } - return fmt.Errorf("unknown risk status value %q", int(*what)) + return RiskStatus(0), fmt.Errorf("unknown risk status value %q", value) } diff --git a/pkg/security/types/shared_runtime.go b/pkg/security/types/shared_runtime.go index 1d6d0ec6..c23873a6 100644 --- a/pkg/security/types/shared_runtime.go +++ b/pkg/security/types/shared_runtime.go @@ -9,11 +9,11 @@ import ( ) type SharedRuntime struct { - Id string `json:"id,omitempty"` - Title string `json:"title,omitempty"` - Description string `json:"description,omitempty"` - Tags []string `json:"tags,omitempty"` - TechnicalAssetsRunning []string `json:"technical_assets_running,omitempty"` + Id string `json:"id,omitempty" yaml:"id,omitempty"` + Title string `json:"title,omitempty" yaml:"title,omitempty"` + Description string `json:"description,omitempty" yaml:"description,omitempty"` + Tags []string `json:"tags,omitempty" yaml:"tags,omitempty"` + TechnicalAssetsRunning []string `json:"technical_assets_running,omitempty" yaml:"technical_assets_running,omitempty"` } func (what SharedRuntime) IsTaggedWithAny(tags ...string) bool { diff --git a/pkg/security/types/stride.go b/pkg/security/types/stride.go index f6f09cde..c8015e54 100644 --- a/pkg/security/types/stride.go +++ b/pkg/security/types/stride.go @@ -8,6 +8,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -69,13 +70,42 @@ func (what STRIDE) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what *STRIDE) UnmarshalJSON([]byte) error { +func (what *STRIDE) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what STRIDE) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *STRIDE) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what STRIDE) find(value string) (STRIDE, error) { for index, description := range StrideTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = STRIDE(index) - return nil + if strings.EqualFold(value, description.Name) { + return STRIDE(index), nil } } - return fmt.Errorf("unknown STRIDE value %q", int(*what)) + return STRIDE(0), fmt.Errorf("unknown STRIDE value %q", value) } diff --git a/pkg/security/types/technical_asset.go b/pkg/security/types/technical_asset.go index 32b33e50..2c12b5fd 100644 --- a/pkg/security/types/technical_asset.go +++ b/pkg/security/types/technical_asset.go @@ -10,35 +10,35 @@ import ( ) type TechnicalAsset struct { - Id string `json:"id,omitempty"` - Title string `json:"title,omitempty"` - Description string `json:"description,omitempty"` - Usage Usage `json:"usage,omitempty"` - Type TechnicalAssetType `json:"type,omitempty"` - Size TechnicalAssetSize `json:"size,omitempty"` - Technology TechnicalAssetTechnology `json:"technology,omitempty"` - Machine TechnicalAssetMachine `json:"machine,omitempty"` - Internet bool `json:"internet,omitempty"` - MultiTenant bool `json:"multi_tenant,omitempty"` - Redundant bool `json:"redundant,omitempty"` - CustomDevelopedParts bool `json:"custom_developed_parts,omitempty"` - OutOfScope bool `json:"out_of_scope,omitempty"` - UsedAsClientByHuman bool `json:"used_as_client_by_human,omitempty"` - Encryption EncryptionStyle `json:"encryption,omitempty"` - JustificationOutOfScope string `json:"justification_out_of_scope,omitempty"` - Owner string `json:"owner,omitempty"` - Confidentiality Confidentiality `json:"confidentiality,omitempty"` - Integrity Criticality `json:"integrity,omitempty"` - Availability Criticality `json:"availability,omitempty"` - JustificationCiaRating string `json:"justification_cia_rating,omitempty"` - Tags []string `json:"tags,omitempty"` - DataAssetsProcessed []string `json:"data_assets_processed,omitempty"` - DataAssetsStored []string `json:"data_assets_stored,omitempty"` - DataFormatsAccepted []DataFormat `json:"data_formats_accepted,omitempty"` - CommunicationLinks []CommunicationLink `json:"communication_links,omitempty"` - DiagramTweakOrder int `json:"diagram_tweak_order,omitempty"` + Id string `json:"id,omitempty" yaml:"id,omitempty"` + Title string `json:"title,omitempty" yaml:"title,omitempty"` + Description string `json:"description,omitempty" yaml:"description,omitempty"` + Usage Usage `json:"usage,omitempty" yaml:"usage,omitempty"` + Type TechnicalAssetType `json:"type,omitempty" yaml:"type,omitempty"` + Size TechnicalAssetSize `json:"size,omitempty" yaml:"size,omitempty"` + Technology TechnicalAssetTechnology `json:"technology,omitempty" yaml:"technology,omitempty"` + Machine TechnicalAssetMachine `json:"machine,omitempty" yaml:"machine,omitempty"` + Internet bool `json:"internet,omitempty" yaml:"internet,omitempty"` + MultiTenant bool `json:"multi_tenant,omitempty" yaml:"multi_tenant,omitempty"` + Redundant bool `json:"redundant,omitempty" yaml:"redundant,omitempty"` + CustomDevelopedParts bool `json:"custom_developed_parts,omitempty" yaml:"custom_developed_parts,omitempty"` + OutOfScope bool `json:"out_of_scope,omitempty" yaml:"out_of_scope,omitempty"` + UsedAsClientByHuman bool `json:"used_as_client_by_human,omitempty" yaml:"used_as_client_by_human,omitempty"` + Encryption EncryptionStyle `json:"encryption,omitempty" yaml:"encryption,omitempty"` + JustificationOutOfScope string `json:"justification_out_of_scope,omitempty" yaml:"justification_out_of_scope,omitempty"` + Owner string `json:"owner,omitempty" yaml:"owner,omitempty"` + Confidentiality Confidentiality `json:"confidentiality,omitempty" yaml:"confidentiality,omitempty"` + Integrity Criticality `json:"integrity,omitempty" yaml:"integrity,omitempty"` + Availability Criticality `json:"availability,omitempty" yaml:"availability,omitempty"` + JustificationCiaRating string `json:"justification_cia_rating,omitempty" yaml:"justification_cia_rating,omitempty"` + Tags []string `json:"tags,omitempty" yaml:"tags,omitempty"` + DataAssetsProcessed []string `json:"data_assets_processed,omitempty" yaml:"data_assets_processed,omitempty"` + DataAssetsStored []string `json:"data_assets_stored,omitempty" yaml:"data_assets_stored,omitempty"` + DataFormatsAccepted []DataFormat `json:"data_formats_accepted,omitempty" yaml:"data_formats_accepted,omitempty"` + CommunicationLinks []CommunicationLink `json:"communication_links,omitempty" yaml:"communication_links,omitempty"` + DiagramTweakOrder int `json:"diagram_tweak_order,omitempty" yaml:"diagram_tweak_order,omitempty"` // will be set by separate calculation step: - RAA float64 `json:"raa,omitempty"` + RAA float64 `json:"raa,omitempty" yaml:"raa,omitempty"` } func (what TechnicalAsset) IsTaggedWithAny(tags ...string) bool { diff --git a/pkg/security/types/technical_asset_machine.go b/pkg/security/types/technical_asset_machine.go index 8536104f..f8463805 100644 --- a/pkg/security/types/technical_asset_machine.go +++ b/pkg/security/types/technical_asset_machine.go @@ -8,6 +8,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -58,13 +59,42 @@ func (what TechnicalAssetMachine) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what *TechnicalAssetMachine) UnmarshalJSON([]byte) error { +func (what *TechnicalAssetMachine) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what TechnicalAssetMachine) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *TechnicalAssetMachine) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what TechnicalAssetMachine) find(value string) (TechnicalAssetMachine, error) { for index, description := range TechnicalAssetMachineTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = TechnicalAssetMachine(index) - return nil + if strings.EqualFold(value, description.Name) { + return TechnicalAssetMachine(index), nil } } - return fmt.Errorf("unknown technical asset machine value %q", int(*what)) + return TechnicalAssetMachine(0), fmt.Errorf("unknown technical asset machine value %q", value) } diff --git a/pkg/security/types/technical_asset_size.go b/pkg/security/types/technical_asset_size.go index 178bf5ef..65ecbc95 100644 --- a/pkg/security/types/technical_asset_size.go +++ b/pkg/security/types/technical_asset_size.go @@ -8,6 +8,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -59,13 +60,42 @@ func (what TechnicalAssetSize) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what *TechnicalAssetSize) UnmarshalJSON([]byte) error { +func (what *TechnicalAssetSize) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what TechnicalAssetSize) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *TechnicalAssetSize) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what TechnicalAssetSize) find(value string) (TechnicalAssetSize, error) { for index, description := range TechnicalAssetSizeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = TechnicalAssetSize(index) - return nil + if strings.EqualFold(value, description.Name) { + return TechnicalAssetSize(index), nil } } - return fmt.Errorf("unknown technical asset size value %q", int(*what)) + return TechnicalAssetSize(0), fmt.Errorf("unknown technical asset size value %q", value) } diff --git a/pkg/security/types/technical_asset_technology.go b/pkg/security/types/technical_asset_technology.go index 2dc272c8..5083cf88 100644 --- a/pkg/security/types/technical_asset_technology.go +++ b/pkg/security/types/technical_asset_technology.go @@ -8,6 +8,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -297,13 +298,42 @@ func (what TechnicalAssetTechnology) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what *TechnicalAssetTechnology) UnmarshalJSON([]byte) error { +func (what *TechnicalAssetTechnology) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what TechnicalAssetTechnology) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *TechnicalAssetTechnology) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what TechnicalAssetTechnology) find(value string) (TechnicalAssetTechnology, error) { for index, description := range TechnicalAssetTechnologyTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = TechnicalAssetTechnology(index) - return nil + if strings.EqualFold(value, description.Name) { + return TechnicalAssetTechnology(index), nil } } - return fmt.Errorf("unknown technical asset technology value %q", int(*what)) + return TechnicalAssetTechnology(0), fmt.Errorf("unknown technical asset technology value %q", value) } diff --git a/pkg/security/types/technical_asset_type.go b/pkg/security/types/technical_asset_type.go index 513a47f4..02f585c2 100644 --- a/pkg/security/types/technical_asset_type.go +++ b/pkg/security/types/technical_asset_type.go @@ -8,6 +8,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -56,13 +57,42 @@ func (what TechnicalAssetType) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what *TechnicalAssetType) UnmarshalJSON([]byte) error { +func (what *TechnicalAssetType) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what TechnicalAssetType) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *TechnicalAssetType) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what TechnicalAssetType) find(value string) (TechnicalAssetType, error) { for index, description := range TechnicalAssetTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = TechnicalAssetType(index) - return nil + if strings.EqualFold(value, description.Name) { + return TechnicalAssetType(index), nil } } - return fmt.Errorf("unknown technical asset type value %q", int(*what)) + return TechnicalAssetType(0), fmt.Errorf("unknown technical asset type value %q", value) } diff --git a/pkg/security/types/trust_boundary.go b/pkg/security/types/trust_boundary.go index 2b9cacd5..15c1d8c7 100644 --- a/pkg/security/types/trust_boundary.go +++ b/pkg/security/types/trust_boundary.go @@ -9,13 +9,13 @@ import ( ) type TrustBoundary struct { - Id string `json:"id,omitempty"` - Title string `json:"title,omitempty"` - Description string `json:"description,omitempty"` - Type TrustBoundaryType `json:"type,omitempty"` - Tags []string `json:"tags,omitempty"` - TechnicalAssetsInside []string `json:"technical_assets_inside,omitempty"` - TrustBoundariesNested []string `json:"trust_boundaries_nested,omitempty"` + Id string `json:"id,omitempty" yaml:"id,omitempty"` + Title string `json:"title,omitempty" yaml:"title,omitempty"` + Description string `json:"description,omitempty" yaml:"description,omitempty"` + Type TrustBoundaryType `json:"type,omitempty" yaml:"type,omitempty"` + Tags []string `json:"tags,omitempty" yaml:"tags,omitempty"` + TechnicalAssetsInside []string `json:"technical_assets_inside,omitempty" yaml:"technical_assets_inside,omitempty"` + TrustBoundariesNested []string `json:"trust_boundaries_nested,omitempty" yaml:"trust_boundaries_nested,omitempty"` } func (what TrustBoundary) RecursivelyAllTechnicalAssetIDsInside(model *ParsedModel) []string { diff --git a/pkg/security/types/trust_boundary_type.go b/pkg/security/types/trust_boundary_type.go index d08da6e8..9a9a005d 100644 --- a/pkg/security/types/trust_boundary_type.go +++ b/pkg/security/types/trust_boundary_type.go @@ -7,6 +7,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -76,13 +77,42 @@ func (what TrustBoundaryType) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what *TrustBoundaryType) UnmarshalJSON([]byte) error { +func (what *TrustBoundaryType) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what TrustBoundaryType) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *TrustBoundaryType) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what TrustBoundaryType) find(value string) (TrustBoundaryType, error) { for index, description := range TrustBoundaryTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = TrustBoundaryType(index) - return nil + if strings.EqualFold(value, description.Name) { + return TrustBoundaryType(index), nil } } - return fmt.Errorf("unknown trust boundary type value %q", int(*what)) + return TrustBoundaryType(0), fmt.Errorf("unknown trust boundary type value %q", value) } diff --git a/pkg/security/types/usage.go b/pkg/security/types/usage.go index 86653106..d0eee3d3 100644 --- a/pkg/security/types/usage.go +++ b/pkg/security/types/usage.go @@ -7,6 +7,7 @@ import ( "encoding/json" "errors" "fmt" + "gopkg.in/yaml.v3" "strings" ) @@ -57,13 +58,42 @@ func (what Usage) MarshalJSON() ([]byte, error) { return json.Marshal(what.String()) } -func (what *Usage) UnmarshalJSON([]byte) error { +func (what *Usage) UnmarshalJSON(data []byte) error { + var text string + unmarshalError := json.Unmarshal(data, &text) + if unmarshalError != nil { + return unmarshalError + } + + value, findError := what.find(text) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what Usage) MarshalYAML() (interface{}, error) { + return what.String(), nil +} + +func (what *Usage) UnmarshalYAML(node *yaml.Node) error { + value, findError := what.find(node.Value) + if findError != nil { + return findError + } + + *what = value + return nil +} + +func (what Usage) find(value string) (Usage, error) { for index, description := range UsageTypeDescription { - if strings.ToLower(what.String()) == strings.ToLower(description.Name) { - *what = Usage(index) - return nil + if strings.EqualFold(value, description.Name) { + return Usage(index), nil } } - return fmt.Errorf("unknown usage type value %q", int(*what)) + return Usage(0), fmt.Errorf("unknown usage type value %q", value) } diff --git a/test/all.json b/test/all.json index f1b92a8c..42123fa5 100644 --- a/test/all.json +++ b/test/all.json @@ -7,14 +7,12 @@ "date": "2020-07-01T00:00:00Z", "management_summary_comment": "Just some \u003cb\u003emore\u003c/b\u003e custom summary possible here...\n", "business_overview": { - "description": "Some more \u003ci\u003edemo text\u003c/i\u003e here and even images...", - "images": null + "description": "Some more \u003ci\u003edemo text\u003c/i\u003e here and even images..." }, "technical_overview": { - "description": "Some more \u003ci\u003edemo text\u003c/i\u003e here and even images...", - "images": null + "description": "Some more \u003ci\u003edemo text\u003c/i\u003e here and even images..." }, - "business_criticality": 2, + "business_criticality": "important", "security_requirements": { "EU-DSGVO": "Mandatory EU-Datenschutzgrundverordnung", "Input Validation": "Strict input validation is required to reduce the overall attack surface.", @@ -58,202 +56,167 @@ "id": "build-job-config", "title": "Build Job Config", "description": "Data for customizing of the build job system.", - "usage": 1, - "tags": [], + "usage": "devops", "origin": "Company XYZ", "owner": "Company XYZ", - "quantity": 0, - "confidentiality": 2, - "integrity": 3, - "availability": 1, + "confidentiality": "restricted", + "integrity": "critical", + "availability": "operational", "justification_cia_rating": "Data for customizing of the build job system.\n" }, "client-application-code": { "id": "client-application-code", "title": "Client Application Code", "description": "Angular and other client-side code delivered by the application.", - "usage": 1, - "tags": [], + "usage": "devops", "origin": "Company ABC", "owner": "Company ABC", - "quantity": 0, - "confidentiality": 0, - "integrity": 3, - "availability": 2, + "integrity": "critical", + "availability": "important", "justification_cia_rating": "The integrity of the public data is critical to avoid reputational damage and the availability is important on the long-term scale (but not critical) to keep the growth rate of the customer base steady.\n" }, "contract-summaries": { "id": "contract-summaries", "title": "Customer Contract Summaries", "description": "Customer Contract Summaries", - "usage": 0, - "tags": [], "origin": "Customer", "owner": "Company XYZ", - "quantity": 0, - "confidentiality": 2, - "integrity": 1, - "availability": 1, + "confidentiality": "restricted", + "integrity": "operational", + "availability": "operational", "justification_cia_rating": "Just some summaries.\n" }, "customer-accounts": { "id": "customer-accounts", "title": "Customer Accounts", "description": "Customer Accounts (including transient credentials when entered for checking them)", - "usage": 0, - "tags": [], "origin": "Customer", "owner": "Company XYZ", - "quantity": 2, - "confidentiality": 4, - "integrity": 3, - "availability": 3, + "quantity": "many", + "confidentiality": "strictly-confidential", + "integrity": "critical", + "availability": "critical", "justification_cia_rating": "Customer account data for using the portal are required to be available to offer the portal functionality.\n" }, "customer-contracts": { "id": "customer-contracts", "title": "Customer Contracts", "description": "Customer Contracts (PDF)", - "usage": 0, - "tags": [], "origin": "Customer", "owner": "Company XYZ", - "quantity": 2, - "confidentiality": 3, - "integrity": 3, - "availability": 1, + "quantity": "many", + "confidentiality": "confidential", + "integrity": "critical", + "availability": "operational", "justification_cia_rating": "Contract data might contain financial data as well as personally identifiable information (PII). The integrity and availability of contract data is required for clearing payment disputes.\n" }, "customer-operational-data": { "id": "customer-operational-data", "title": "Customer Operational Data", "description": "Customer Operational Data", - "usage": 0, - "tags": [], "origin": "Customer", "owner": "Company XYZ", - "quantity": 2, - "confidentiality": 3, - "integrity": 3, - "availability": 3, + "quantity": "many", + "confidentiality": "confidential", + "integrity": "critical", + "availability": "critical", "justification_cia_rating": "Customer operational data for using the portal are required to be available to offer the portal functionality and are used in the backend transactions.\n" }, "db-dumps": { "id": "db-dumps", "title": "Database Customizing and Dumps", "description": "Data for customizing of the DB system, which might include full database dumps.", - "usage": 1, + "usage": "devops", "tags": [ "oracle" ], "origin": "Company XYZ", "owner": "Company XYZ", - "quantity": 0, - "confidentiality": 4, - "integrity": 3, - "availability": 3, + "confidentiality": "strictly-confidential", + "integrity": "critical", + "availability": "critical", "justification_cia_rating": "Data for customizing of the DB system, which might include full database dumps.\n" }, "erp-customizing": { "id": "erp-customizing", "title": "ERP Customizing Data", "description": "Data for customizing of the ERP system.", - "usage": 1, - "tags": [], + "usage": "devops", "origin": "Company XYZ", "owner": "Company XYZ", - "quantity": 0, - "confidentiality": 3, - "integrity": 3, - "availability": 3, + "confidentiality": "confidential", + "integrity": "critical", + "availability": "critical", "justification_cia_rating": "Data for customizing of the ERP system.\n" }, "erp-logs": { "id": "erp-logs", "title": "ERP Logs", "description": "Logs generated by the ERP system.", - "usage": 1, - "tags": [], + "usage": "devops", "origin": "Company XYZ", "owner": "Company XYZ", - "quantity": 2, - "confidentiality": 2, - "integrity": 0, - "availability": 0, + "quantity": "many", + "confidentiality": "restricted", "justification_cia_rating": "Logs should not contain PII data and are only required for failure analysis, i.e. they are not considered as hard transactional logs.\n" }, "internal-business-data": { "id": "internal-business-data", "title": "Some Internal Business Data", "description": "Internal business data of the ERP system used unrelated to the customer-facing processes.", - "usage": 0, - "tags": [], "origin": "Company XYZ", "owner": "Company XYZ", - "quantity": 1, - "confidentiality": 4, - "integrity": 3, - "availability": 3, + "quantity": "few", + "confidentiality": "strictly-confidential", + "integrity": "critical", + "availability": "critical", "justification_cia_rating": "Data used and/or generated during unrelated other usecases of the ERP-system (when used also by Company XYZ for internal non-customer-portal-related stuff).\n" }, "marketing-material": { "id": "marketing-material", "title": "Marketing Material", "description": "Website and marketing data to inform potential customers and generate new leads.", - "usage": 1, - "tags": [], + "usage": "devops", "origin": "Company ABC", "owner": "Company ABC", - "quantity": 0, - "confidentiality": 0, - "integrity": 2, - "availability": 2, + "integrity": "important", + "availability": "important", "justification_cia_rating": "The integrity of the public data is critical to avoid reputational damage and the availability is important on the long-term scale (but not critical) to keep the growth rate of the customer base steady.\n" }, "server-application-code": { "id": "server-application-code", "title": "Server Application Code", "description": "API and other server-side code of the application.", - "usage": 1, - "tags": [], + "usage": "devops", "origin": "Company ABC", "owner": "Company ABC", - "quantity": 0, - "confidentiality": 1, - "integrity": 4, - "availability": 2, + "confidentiality": "internal", + "integrity": "mission-critical", + "availability": "important", "justification_cia_rating": "The integrity of the API code is critical to avoid reputational damage and the availability is important on the long-term scale (but not critical) to keep the growth rate of the customer base steady.\n" } }, "technical_assets": { "apache-webserver": { - "Id": "apache-webserver", - "Title": "Apache Webserver", - "Description": "Apache Webserver hosting the API code and client-side code", - "Usage": 0, - "Type": 1, - "Size": 2, - "Technology": 6, - "Machine": 2, - "Internet": false, - "MultiTenant": false, - "Redundant": false, - "CustomDevelopedParts": true, - "OutOfScope": false, - "UsedAsClientByHuman": false, - "Encryption": 0, - "JustificationOutOfScope": "", - "Owner": "Company ABC", - "Confidentiality": 1, - "Integrity": 3, - "Availability": 3, - "JustificationCiaRating": "The correct configuration and reachability of the web server is mandatory for all customer usages of the portal.\n", - "Tags": [ + "id": "apache-webserver", + "title": "Apache Webserver", + "description": "Apache Webserver hosting the API code and client-side code", + "type": "process", + "size": "application", + "technology": "web-server", + "machine": "container", + "custom_developed_parts": true, + "owner": "Company ABC", + "confidentiality": "internal", + "integrity": "critical", + "availability": "critical", + "justification_cia_rating": "The correct configuration and reachability of the web server is mandatory for all customer usages of the portal.\n", + "tags": [ "linux", "apache", "aws:ec2" ], - "DataAssetsProcessed": [ + "data_assets_processed": [ "customer-accounts", "customer-operational-data", "customer-contracts", @@ -261,840 +224,632 @@ "client-application-code", "server-application-code" ], - "DataAssetsStored": [ + "data_assets_stored": [ "client-application-code", "server-application-code" ], - "DataFormatsAccepted": [ - 0, - 3 + "data_formats_accepted": [ + "json", + "file" ], - "CommunicationLinks": [ + "communication_links": [ { - "Id": "apache-webserver\u003eerp-system-traffic", - "SourceId": "apache-webserver", - "TargetId": "erp-system", - "Title": "ERP System Traffic", - "Description": "Link to the ERP system", - "Protocol": 2, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 3, - "Authorization": 1, - "Usage": 0, - "DataAssetsSent": [ + "id": "apache-webserver\u003eerp-system-traffic", + "source_id": "apache-webserver", + "target_id": "erp-system", + "title": "ERP System Traffic", + "description": "Link to the ERP system", + "protocol": "https", + "authentication": "token", + "authorization": "technical-user", + "data_assets_sent": [ "customer-accounts", "customer-operational-data", "internal-business-data" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-accounts", "customer-operational-data", "customer-contracts", "internal-business-data" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, { - "Id": "apache-webserver\u003eauth-credential-check-traffic", - "SourceId": "apache-webserver", - "TargetId": "identity-provider", - "Title": "Auth Credential Check Traffic", - "Description": "Link to the identity provider server", - "Protocol": 2, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 1, - "Authorization": 1, - "Usage": 0, - "DataAssetsSent": [ + "id": "apache-webserver\u003eauth-credential-check-traffic", + "source_id": "apache-webserver", + "target_id": "identity-provider", + "title": "Auth Credential Check Traffic", + "description": "Link to the identity provider server", + "protocol": "https", + "authentication": "credentials", + "authorization": "technical-user", + "data_assets_sent": [ "customer-accounts" ], - "DataAssetsReceived": null, - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } - ], - "DiagramTweakOrder": 0, - "RAA": 0 + ] }, "backend-admin-client": { - "Id": "backend-admin-client", - "Title": "Backend Admin Client", - "Description": "Backend admin client", - "Usage": 1, - "Type": 0, - "Size": 3, - "Technology": 2, - "Machine": 0, - "Internet": false, - "MultiTenant": false, - "Redundant": false, - "CustomDevelopedParts": false, - "OutOfScope": true, - "UsedAsClientByHuman": true, - "Encryption": 0, - "JustificationOutOfScope": "Owned and managed by ops provider", - "Owner": "Company XYZ", - "Confidentiality": 1, - "Integrity": 1, - "Availability": 1, - "JustificationCiaRating": "The client used by Company XYZ to administer the system.\n", - "Tags": [], - "DataAssetsProcessed": [ + "id": "backend-admin-client", + "title": "Backend Admin Client", + "description": "Backend admin client", + "usage": "devops", + "size": "component", + "technology": "browser", + "out_of_scope": true, + "used_as_client_by_human": true, + "justification_out_of_scope": "Owned and managed by ops provider", + "owner": "Company XYZ", + "confidentiality": "internal", + "integrity": "operational", + "availability": "operational", + "justification_cia_rating": "The client used by Company XYZ to administer the system.\n", + "data_assets_processed": [ "erp-logs" ], - "DataAssetsStored": [], - "DataFormatsAccepted": [], - "CommunicationLinks": [ - { - "Id": "backend-admin-client\u003edb-update-access", - "SourceId": "backend-admin-client", - "TargetId": "sql-database", - "Title": "DB Update Access", - "Description": "Link to the database (JDBC tunneled via SSH)", - "Protocol": 20, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 4, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ - "db-dumps" - ], - "DataAssetsReceived": [ - "db-dumps", - "erp-logs", - "customer-accounts", - "customer-operational-data" - ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true - }, + "communication_links": [ { - "Id": "backend-admin-client\u003euser-management-access", - "SourceId": "backend-admin-client", - "TargetId": "ldap-auth-server", - "Title": "User Management Access", - "Description": "Link to the LDAP auth server for managing users", - "Protocol": 33, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 1, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ + "id": "backend-admin-client\u003euser-management-access", + "source_id": "backend-admin-client", + "target_id": "ldap-auth-server", + "title": "User Management Access", + "description": "Link to the LDAP auth server for managing users", + "protocol": "ldaps", + "authentication": "credentials", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ "customer-accounts" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-accounts" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, { - "Id": "backend-admin-client\u003eerp-web-access", - "SourceId": "backend-admin-client", - "TargetId": "erp-system", - "Title": "ERP Web Access", - "Description": "Link to the ERP system (Web)", - "Protocol": 2, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 3, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ + "id": "backend-admin-client\u003eerp-web-access", + "source_id": "backend-admin-client", + "target_id": "erp-system", + "title": "ERP Web Access", + "description": "Link to the ERP system (Web)", + "protocol": "https", + "authentication": "token", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ "erp-customizing" ], - "DataAssetsReceived": [ + "data_assets_received": [ "erp-logs" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true + }, + { + "id": "backend-admin-client\u003edb-update-access", + "source_id": "backend-admin-client", + "target_id": "sql-database", + "title": "DB Update Access", + "description": "Link to the database (JDBC tunneled via SSH)", + "protocol": "ssh", + "authentication": "client-certificate", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ + "db-dumps" + ], + "data_assets_received": [ + "db-dumps", + "erp-logs", + "customer-accounts", + "customer-operational-data" + ], + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } - ], - "DiagramTweakOrder": 0, - "RAA": 0 + ] }, "backoffice-client": { - "Id": "backoffice-client", - "Title": "Backoffice Client", - "Description": "Backoffice client", - "Usage": 0, - "Type": 0, - "Size": 3, - "Technology": 3, - "Machine": 0, - "Internet": false, - "MultiTenant": false, - "Redundant": false, - "CustomDevelopedParts": false, - "OutOfScope": true, - "UsedAsClientByHuman": true, - "Encryption": 0, - "JustificationOutOfScope": "Owned and managed by Company XYZ company", - "Owner": "Company XYZ", - "Confidentiality": 3, - "Integrity": 2, - "Availability": 2, - "JustificationCiaRating": "The client used by Company XYZ to administer and use the system.\n", - "Tags": [], - "DataAssetsProcessed": [ + "id": "backoffice-client", + "title": "Backoffice Client", + "description": "Backoffice client", + "size": "component", + "technology": "desktop", + "out_of_scope": true, + "used_as_client_by_human": true, + "justification_out_of_scope": "Owned and managed by Company XYZ company", + "owner": "Company XYZ", + "confidentiality": "confidential", + "integrity": "important", + "availability": "important", + "justification_cia_rating": "The client used by Company XYZ to administer and use the system.\n", + "data_assets_processed": [ "customer-contracts", "internal-business-data", "erp-logs" ], - "DataAssetsStored": [], - "DataFormatsAccepted": [], - "CommunicationLinks": [ + "communication_links": [ { - "Id": "backoffice-client\u003eerp-internal-access", - "SourceId": "backoffice-client", - "TargetId": "erp-system", - "Title": "ERP Internal Access", - "Description": "Link to the ERP system", - "Protocol": 2, - "Tags": [ + "id": "backoffice-client\u003eerp-internal-access", + "source_id": "backoffice-client", + "target_id": "erp-system", + "title": "ERP Internal Access", + "description": "Link to the ERP system", + "protocol": "https", + "tags": [ "some-erp" ], - "VPN": true, - "IpFiltered": false, - "Readonly": false, - "Authentication": 3, - "Authorization": 2, - "Usage": 0, - "DataAssetsSent": [ + "vpn": true, + "authentication": "token", + "authorization": "enduser-identity-propagation", + "data_assets_sent": [ "internal-business-data" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-contracts", "internal-business-data" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, { - "Id": "backoffice-client\u003emarketing-cms-editing", - "SourceId": "backoffice-client", - "TargetId": "marketing-cms", - "Title": "Marketing CMS Editing", - "Description": "Link to the CMS for editing content", - "Protocol": 2, - "Tags": [], - "VPN": true, - "IpFiltered": false, - "Readonly": false, - "Authentication": 3, - "Authorization": 2, - "Usage": 0, - "DataAssetsSent": [ + "id": "backoffice-client\u003emarketing-cms-editing", + "source_id": "backoffice-client", + "target_id": "marketing-cms", + "title": "Marketing CMS Editing", + "description": "Link to the CMS for editing content", + "protocol": "https", + "vpn": true, + "authentication": "token", + "authorization": "enduser-identity-propagation", + "data_assets_sent": [ "marketing-material" ], - "DataAssetsReceived": [ + "data_assets_received": [ "marketing-material" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } - ], - "DiagramTweakOrder": 0, - "RAA": 0 + ] }, "contract-fileserver": { - "Id": "contract-fileserver", - "Title": "Contract Fileserver", - "Description": "NFS Filesystem for storing the contract PDFs", - "Usage": 0, - "Type": 2, - "Size": 3, - "Technology": 10, - "Machine": 1, - "Internet": false, - "MultiTenant": false, - "Redundant": false, - "CustomDevelopedParts": false, - "OutOfScope": false, - "UsedAsClientByHuman": false, - "Encryption": 0, - "JustificationOutOfScope": "", - "Owner": "Company ABC", - "Confidentiality": 3, - "Integrity": 3, - "Availability": 2, - "JustificationCiaRating": "Contract data might contain financial data as well as personally identifiable information (PII). The integrity and availability of contract data is required for clearing payment disputes. The filesystem is also required to be available for storing new contracts of freshly generated customers.\n", - "Tags": [ + "id": "contract-fileserver", + "title": "Contract Fileserver", + "description": "NFS Filesystem for storing the contract PDFs", + "type": "datastore", + "size": "component", + "technology": "file-server", + "machine": "virtual", + "owner": "Company ABC", + "confidentiality": "confidential", + "integrity": "critical", + "availability": "important", + "justification_cia_rating": "Contract data might contain financial data as well as personally identifiable information (PII). The integrity and availability of contract data is required for clearing payment disputes. The filesystem is also required to be available for storing new contracts of freshly generated customers.\n", + "tags": [ "linux", "aws:s3" ], - "DataAssetsProcessed": [], - "DataAssetsStored": [ + "data_assets_stored": [ "customer-contracts", "contract-summaries" ], - "DataFormatsAccepted": [ - 3 - ], - "CommunicationLinks": [], - "DiagramTweakOrder": 0, - "RAA": 0 + "data_formats_accepted": [ + "file" + ] }, "customer-client": { - "Id": "customer-client", - "Title": "Customer Web Client", - "Description": "Customer Web Client", - "Usage": 0, - "Type": 0, - "Size": 3, - "Technology": 2, - "Machine": 0, - "Internet": true, - "MultiTenant": false, - "Redundant": false, - "CustomDevelopedParts": false, - "OutOfScope": true, - "UsedAsClientByHuman": true, - "Encryption": 0, - "JustificationOutOfScope": "Owned and managed by enduser customer", - "Owner": "Customer", - "Confidentiality": 1, - "Integrity": 1, - "Availability": 1, - "JustificationCiaRating": "The client used by the customer to access the system.\n", - "Tags": [], - "DataAssetsProcessed": [ + "id": "customer-client", + "title": "Customer Web Client", + "description": "Customer Web Client", + "size": "component", + "technology": "browser", + "internet": true, + "out_of_scope": true, + "used_as_client_by_human": true, + "justification_out_of_scope": "Owned and managed by enduser customer", + "owner": "Customer", + "confidentiality": "internal", + "integrity": "operational", + "availability": "operational", + "justification_cia_rating": "The client used by the customer to access the system.\n", + "data_assets_processed": [ "customer-accounts", "customer-operational-data", "customer-contracts", "client-application-code", "marketing-material" ], - "DataAssetsStored": [], - "DataFormatsAccepted": [], - "CommunicationLinks": [ + "communication_links": [ { - "Id": "customer-client\u003ecustomer-traffic", - "SourceId": "customer-client", - "TargetId": "load-balancer", - "Title": "Customer Traffic", - "Description": "Link to the load balancer", - "Protocol": 2, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 2, - "Authorization": 2, - "Usage": 0, - "DataAssetsSent": [ + "id": "customer-client\u003ecustomer-traffic", + "source_id": "customer-client", + "target_id": "load-balancer", + "title": "Customer Traffic", + "description": "Link to the load balancer", + "protocol": "https", + "authentication": "session-id", + "authorization": "enduser-identity-propagation", + "data_assets_sent": [ "customer-accounts", "customer-operational-data" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-accounts", "customer-operational-data", "customer-contracts", "client-application-code", "marketing-material" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } - ], - "DiagramTweakOrder": 0, - "RAA": 0 + ] }, "erp-system": { - "Id": "erp-system", - "Title": "Backoffice ERP System", - "Description": "ERP system", - "Usage": 0, - "Type": 1, - "Size": 0, - "Technology": 12, - "Machine": 1, - "Internet": false, - "MultiTenant": false, - "Redundant": true, - "CustomDevelopedParts": false, - "OutOfScope": false, - "UsedAsClientByHuman": false, - "Encryption": 0, - "JustificationOutOfScope": "", - "Owner": "Company ABC", - "Confidentiality": 4, - "Integrity": 4, - "Availability": 4, - "JustificationCiaRating": "The ERP system contains business-relevant sensitive data for the leasing processes and eventually also for other Company XYZ internal processes.\n", - "Tags": [ + "id": "erp-system", + "title": "Backoffice ERP System", + "description": "ERP system", + "type": "process", + "technology": "erp", + "machine": "virtual", + "redundant": true, + "owner": "Company ABC", + "confidentiality": "strictly-confidential", + "integrity": "mission-critical", + "availability": "mission-critical", + "justification_cia_rating": "The ERP system contains business-relevant sensitive data for the leasing processes and eventually also for other Company XYZ internal processes.\n", + "tags": [ "linux" ], - "DataAssetsProcessed": [ + "data_assets_processed": [ "customer-accounts", "customer-operational-data", "customer-contracts", "internal-business-data", "erp-customizing" ], - "DataAssetsStored": [ + "data_assets_stored": [ "erp-logs" ], - "DataFormatsAccepted": [ - 1, - 3, - 2 + "data_formats_accepted": [ + "xml", + "file", + "serialization" ], - "CommunicationLinks": [ - { - "Id": "erp-system\u003enfs-filesystem-access", - "SourceId": "erp-system", - "TargetId": "contract-fileserver", - "Title": "NFS Filesystem Access", - "Description": "Link to the file system", - "Protocol": 35, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 0, - "Authorization": 0, - "Usage": 0, - "DataAssetsSent": [ - "customer-contracts" - ], - "DataAssetsReceived": [ - "customer-contracts" - ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true - }, + "communication_links": [ { - "Id": "erp-system\u003edatabase-traffic", - "SourceId": "erp-system", - "TargetId": "sql-database", - "Title": "Database Traffic", - "Description": "Link to the DB system", - "Protocol": 8, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 1, - "Authorization": 1, - "Usage": 0, - "DataAssetsSent": [ + "id": "erp-system\u003edatabase-traffic", + "source_id": "erp-system", + "target_id": "sql-database", + "title": "Database Traffic", + "description": "Link to the DB system", + "protocol": "jdbc", + "authentication": "credentials", + "authorization": "technical-user", + "data_assets_sent": [ "customer-accounts", "customer-operational-data", "internal-business-data" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-accounts", "customer-operational-data", "internal-business-data" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true + }, + { + "id": "erp-system\u003enfs-filesystem-access", + "source_id": "erp-system", + "target_id": "contract-fileserver", + "title": "NFS Filesystem Access", + "description": "Link to the file system", + "protocol": "nfs", + "data_assets_sent": [ + "customer-contracts" + ], + "data_assets_received": [ + "customer-contracts" + ], + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } - ], - "DiagramTweakOrder": 0, - "RAA": 0 + ] }, "external-dev-client": { - "Id": "external-dev-client", - "Title": "External Development Client", - "Description": "External developer client", - "Usage": 1, - "Type": 0, - "Size": 0, - "Technology": 5, - "Machine": 0, - "Internet": true, - "MultiTenant": true, - "Redundant": false, - "CustomDevelopedParts": false, - "OutOfScope": true, - "UsedAsClientByHuman": true, - "Encryption": 0, - "JustificationOutOfScope": "Owned and managed by external developers", - "Owner": "External Developers", - "Confidentiality": 3, - "Integrity": 3, - "Availability": 1, - "JustificationCiaRating": "The clients used by external developers to create parts of the application code.\n", - "Tags": [ + "id": "external-dev-client", + "title": "External Development Client", + "description": "External developer client", + "usage": "devops", + "technology": "devops-client", + "internet": true, + "multi_tenant": true, + "out_of_scope": true, + "used_as_client_by_human": true, + "justification_out_of_scope": "Owned and managed by external developers", + "owner": "External Developers", + "confidentiality": "confidential", + "integrity": "critical", + "availability": "operational", + "justification_cia_rating": "The clients used by external developers to create parts of the application code.\n", + "tags": [ "linux" ], - "DataAssetsProcessed": [ + "data_assets_processed": [ "client-application-code", "server-application-code" ], - "DataAssetsStored": [ + "data_assets_stored": [ "client-application-code", "server-application-code" ], - "DataFormatsAccepted": [ - 3 + "data_formats_accepted": [ + "file" ], - "CommunicationLinks": [ - { - "Id": "external-dev-client\u003ejenkins-web-ui-access", - "SourceId": "external-dev-client", - "TargetId": "jenkins-buildserver", - "Title": "Jenkins Web-UI Access", - "Description": "Link to the Jenkins build server", - "Protocol": 2, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 1, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ - "build-job-config" - ], - "DataAssetsReceived": [ - "build-job-config" - ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true - }, + "communication_links": [ { - "Id": "external-dev-client\u003egit-repo-code-write-access", - "SourceId": "external-dev-client", - "TargetId": "git-repo", - "Title": "Git-Repo Code Write Access", - "Description": "Link to the Git repo", - "Protocol": 20, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 4, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ + "id": "external-dev-client\u003egit-repo-code-write-access", + "source_id": "external-dev-client", + "target_id": "git-repo", + "title": "Git-Repo Code Write Access", + "description": "Link to the Git repo", + "protocol": "ssh", + "authentication": "client-certificate", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ "client-application-code", "server-application-code" ], - "DataAssetsReceived": [ + "data_assets_received": [ "client-application-code", "server-application-code" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, { - "Id": "external-dev-client\u003egit-repo-web-ui-access", - "SourceId": "external-dev-client", - "TargetId": "git-repo", - "Title": "Git-Repo Web-UI Access", - "Description": "Link to the Git repo", - "Protocol": 2, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 3, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ + "id": "external-dev-client\u003egit-repo-web-ui-access", + "source_id": "external-dev-client", + "target_id": "git-repo", + "title": "Git-Repo Web-UI Access", + "description": "Link to the Git repo", + "protocol": "https", + "authentication": "token", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ "client-application-code", "server-application-code" ], - "DataAssetsReceived": [ + "data_assets_received": [ "client-application-code", "server-application-code" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true + }, + { + "id": "external-dev-client\u003ejenkins-web-ui-access", + "source_id": "external-dev-client", + "target_id": "jenkins-buildserver", + "title": "Jenkins Web-UI Access", + "description": "Link to the Jenkins build server", + "protocol": "https", + "authentication": "credentials", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ + "build-job-config" + ], + "data_assets_received": [ + "build-job-config" + ], + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } - ], - "DiagramTweakOrder": 0, - "RAA": 0 + ] }, "git-repo": { - "Id": "git-repo", - "Title": "Git Repository", - "Description": "Git repository server", - "Usage": 1, - "Type": 1, - "Size": 0, - "Technology": 23, - "Machine": 1, - "Internet": false, - "MultiTenant": true, - "Redundant": false, - "CustomDevelopedParts": false, - "OutOfScope": false, - "UsedAsClientByHuman": false, - "Encryption": 0, - "JustificationOutOfScope": "", - "Owner": "Company ABC", - "Confidentiality": 3, - "Integrity": 2, - "Availability": 2, - "JustificationCiaRating": "The code repo pipeline might contain sensitive configuration values like backend credentials, certificates etc. and is therefore rated as confidential.\n", - "Tags": [ + "id": "git-repo", + "title": "Git Repository", + "description": "Git repository server", + "usage": "devops", + "type": "process", + "technology": "sourcecode-repository", + "machine": "virtual", + "multi_tenant": true, + "owner": "Company ABC", + "confidentiality": "confidential", + "integrity": "important", + "availability": "important", + "justification_cia_rating": "The code repo pipeline might contain sensitive configuration values like backend credentials, certificates etc. and is therefore rated as confidential.\n", + "tags": [ "linux", "git" ], - "DataAssetsProcessed": [ + "data_assets_processed": [ "client-application-code", "server-application-code" ], - "DataAssetsStored": [ + "data_assets_stored": [ "client-application-code", "server-application-code" ], - "DataFormatsAccepted": [ - 3 - ], - "CommunicationLinks": [], - "DiagramTweakOrder": 0, - "RAA": 0 + "data_formats_accepted": [ + "file" + ] }, "identity-provider": { - "Id": "identity-provider", - "Title": "Identity Provider", - "Description": "Identity provider server", - "Usage": 0, - "Type": 1, - "Size": 3, - "Technology": 31, - "Machine": 1, - "Internet": false, - "MultiTenant": false, - "Redundant": false, - "CustomDevelopedParts": false, - "OutOfScope": false, - "UsedAsClientByHuman": false, - "Encryption": 0, - "JustificationOutOfScope": "", - "Owner": "Company ABC", - "Confidentiality": 3, - "Integrity": 3, - "Availability": 3, - "JustificationCiaRating": "The auth data of the application\n", - "Tags": [ + "id": "identity-provider", + "title": "Identity Provider", + "description": "Identity provider server", + "type": "process", + "size": "component", + "technology": "identity-provider", + "machine": "virtual", + "owner": "Company ABC", + "confidentiality": "confidential", + "integrity": "critical", + "availability": "critical", + "justification_cia_rating": "The auth data of the application\n", + "tags": [ "linux", "jboss", "keycloak" ], - "DataAssetsProcessed": [ + "data_assets_processed": [ "customer-accounts" ], - "DataAssetsStored": [], - "DataFormatsAccepted": [], - "CommunicationLinks": [ + "communication_links": [ { - "Id": "identity-provider\u003eldap-credential-check-traffic", - "SourceId": "identity-provider", - "TargetId": "ldap-auth-server", - "Title": "LDAP Credential Check Traffic", - "Description": "Link to the LDAP server", - "Protocol": 33, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 1, - "Authorization": 1, - "Usage": 0, - "DataAssetsSent": [ + "id": "identity-provider\u003eldap-credential-check-traffic", + "source_id": "identity-provider", + "target_id": "ldap-auth-server", + "title": "LDAP Credential Check Traffic", + "description": "Link to the LDAP server", + "protocol": "ldaps", + "authentication": "credentials", + "authorization": "technical-user", + "data_assets_sent": [ "customer-accounts" ], - "DataAssetsReceived": null, - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } - ], - "DiagramTweakOrder": 0, - "RAA": 0 + ] }, "jenkins-buildserver": { - "Id": "jenkins-buildserver", - "Title": "Jenkins Buildserver", - "Description": "Jenkins buildserver", - "Usage": 1, - "Type": 1, - "Size": 0, - "Technology": 22, - "Machine": 1, - "Internet": false, - "MultiTenant": true, - "Redundant": false, - "CustomDevelopedParts": false, - "OutOfScope": false, - "UsedAsClientByHuman": false, - "Encryption": 0, - "JustificationOutOfScope": "", - "Owner": "Company ABC", - "Confidentiality": 3, - "Integrity": 3, - "Availability": 2, - "JustificationCiaRating": "The build pipeline might contain sensitive configuration values like backend credentials, certificates etc. and is therefore rated as confidential. The integrity and availability is rated as critical and important due to the risk of reputation damage and application update unavailability when the build pipeline is compromised.\n", - "Tags": [ + "id": "jenkins-buildserver", + "title": "Jenkins Buildserver", + "description": "Jenkins buildserver", + "usage": "devops", + "type": "process", + "technology": "build-pipeline", + "machine": "virtual", + "multi_tenant": true, + "owner": "Company ABC", + "confidentiality": "confidential", + "integrity": "critical", + "availability": "important", + "justification_cia_rating": "The build pipeline might contain sensitive configuration values like backend credentials, certificates etc. and is therefore rated as confidential. The integrity and availability is rated as critical and important due to the risk of reputation damage and application update unavailability when the build pipeline is compromised.\n", + "tags": [ "linux", "jenkins" ], - "DataAssetsProcessed": [ + "data_assets_processed": [ "build-job-config", "client-application-code", "server-application-code", "marketing-material" ], - "DataAssetsStored": [ + "data_assets_stored": [ "build-job-config", "client-application-code", "server-application-code", "marketing-material" ], - "DataFormatsAccepted": [ - 3, - 2 + "data_formats_accepted": [ + "file", + "serialization" ], - "CommunicationLinks": [ + "communication_links": [ { - "Id": "jenkins-buildserver\u003egit-repo-code-read-access", - "SourceId": "jenkins-buildserver", - "TargetId": "git-repo", - "Title": "Git Repo Code Read Access", - "Description": "Link to the Git repository server", - "Protocol": 20, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": true, - "Authentication": 4, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": null, - "DataAssetsReceived": [ + "id": "jenkins-buildserver\u003egit-repo-code-read-access", + "source_id": "jenkins-buildserver", + "target_id": "git-repo", + "title": "Git Repo Code Read Access", + "description": "Link to the Git repository server", + "protocol": "ssh", + "readonly": true, + "authentication": "client-certificate", + "authorization": "technical-user", + "usage": "devops", + "data_assets_received": [ "client-application-code", "server-application-code" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, { - "Id": "jenkins-buildserver\u003eapplication-deployment", - "SourceId": "jenkins-buildserver", - "TargetId": "apache-webserver", - "Title": "Application Deployment", - "Description": "Link to the Apache webserver", - "Protocol": 20, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 4, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ + "id": "jenkins-buildserver\u003eapplication-deployment", + "source_id": "jenkins-buildserver", + "target_id": "apache-webserver", + "title": "Application Deployment", + "description": "Link to the Apache webserver", + "protocol": "ssh", + "authentication": "client-certificate", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ "client-application-code", "server-application-code" ], - "DataAssetsReceived": null, - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, { - "Id": "jenkins-buildserver\u003ecms-updates", - "SourceId": "jenkins-buildserver", - "TargetId": "marketing-cms", - "Title": "CMS Updates", - "Description": "Link to the CMS", - "Protocol": 20, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 4, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ + "id": "jenkins-buildserver\u003ecms-updates", + "source_id": "jenkins-buildserver", + "target_id": "marketing-cms", + "title": "CMS Updates", + "description": "Link to the CMS", + "protocol": "ssh", + "authentication": "client-certificate", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ "marketing-material" ], - "DataAssetsReceived": null, - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } - ], - "DiagramTweakOrder": 0, - "RAA": 0 + ] }, "ldap-auth-server": { - "Id": "ldap-auth-server", - "Title": "LDAP Auth Server", - "Description": "LDAP authentication server", - "Usage": 0, - "Type": 2, - "Size": 3, - "Technology": 32, - "Machine": 0, - "Internet": false, - "MultiTenant": false, - "Redundant": false, - "CustomDevelopedParts": false, - "OutOfScope": false, - "UsedAsClientByHuman": false, - "Encryption": 1, - "JustificationOutOfScope": "", - "Owner": "Company ABC", - "Confidentiality": 3, - "Integrity": 3, - "Availability": 3, - "JustificationCiaRating": "The auth data of the application\n", - "Tags": [ + "id": "ldap-auth-server", + "title": "LDAP Auth Server", + "description": "LDAP authentication server", + "type": "datastore", + "size": "component", + "technology": "identity-store-ldap", + "encryption": "transparent", + "owner": "Company ABC", + "confidentiality": "confidential", + "integrity": "critical", + "availability": "critical", + "justification_cia_rating": "The auth data of the application\n", + "tags": [ "linux" ], - "DataAssetsProcessed": [ + "data_assets_processed": [ "customer-accounts" ], - "DataAssetsStored": [ + "data_assets_stored": [ "customer-accounts" - ], - "DataFormatsAccepted": [], - "CommunicationLinks": [], - "DiagramTweakOrder": 0, - "RAA": 0 + ] }, "load-balancer": { - "Id": "load-balancer", - "Title": "Load Balancer", - "Description": "Load Balancer (HA-Proxy)", - "Usage": 0, - "Type": 1, - "Size": 3, - "Technology": 21, - "Machine": 0, - "Internet": false, - "MultiTenant": false, - "Redundant": false, - "CustomDevelopedParts": false, - "OutOfScope": false, - "UsedAsClientByHuman": false, - "Encryption": 0, - "JustificationOutOfScope": "", - "Owner": "Company ABC", - "Confidentiality": 1, - "Integrity": 4, - "Availability": 4, - "JustificationCiaRating": "The correct configuration and reachability of the load balancer is mandatory for all customer and Company XYZ usages of the portal and ERP system.\n", - "Tags": [], - "DataAssetsProcessed": [ + "id": "load-balancer", + "title": "Load Balancer", + "description": "Load Balancer (HA-Proxy)", + "type": "process", + "size": "component", + "technology": "load-balancer", + "owner": "Company ABC", + "confidentiality": "internal", + "integrity": "mission-critical", + "availability": "mission-critical", + "justification_cia_rating": "The correct configuration and reachability of the load balancer is mandatory for all customer and Company XYZ usages of the portal and ERP system.\n", + "data_assets_processed": [ "customer-accounts", "customer-operational-data", "customer-contracts", @@ -1102,243 +857,192 @@ "client-application-code", "marketing-material" ], - "DataAssetsStored": [], - "DataFormatsAccepted": [], - "CommunicationLinks": [ + "communication_links": [ { - "Id": "load-balancer\u003ecms-content-traffic", - "SourceId": "load-balancer", - "TargetId": "marketing-cms", - "Title": "CMS Content Traffic", - "Description": "Link to the CMS server", - "Protocol": 1, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": true, - "Authentication": 0, - "Authorization": 0, - "Usage": 0, - "DataAssetsSent": null, - "DataAssetsReceived": [ - "marketing-material" - ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true - }, - { - "Id": "load-balancer\u003eweb-application-traffic", - "SourceId": "load-balancer", - "TargetId": "apache-webserver", - "Title": "Web Application Traffic", - "Description": "Link to the web server", - "Protocol": 1, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 2, - "Authorization": 2, - "Usage": 0, - "DataAssetsSent": [ + "id": "load-balancer\u003eweb-application-traffic", + "source_id": "load-balancer", + "target_id": "apache-webserver", + "title": "Web Application Traffic", + "description": "Link to the web server", + "protocol": "http", + "authentication": "session-id", + "authorization": "enduser-identity-propagation", + "data_assets_sent": [ "customer-accounts", "customer-operational-data" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-accounts", "customer-operational-data", "customer-contracts", "client-application-code" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true + }, + { + "id": "load-balancer\u003ecms-content-traffic", + "source_id": "load-balancer", + "target_id": "marketing-cms", + "title": "CMS Content Traffic", + "description": "Link to the CMS server", + "protocol": "http", + "readonly": true, + "data_assets_received": [ + "marketing-material" + ], + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } - ], - "DiagramTweakOrder": 0, - "RAA": 0 + ] }, "marketing-cms": { - "Id": "marketing-cms", - "Title": "Marketing CMS", - "Description": "CMS for the marketing content", - "Usage": 0, - "Type": 1, - "Size": 2, - "Technology": 13, - "Machine": 2, - "Internet": false, - "MultiTenant": false, - "Redundant": false, - "CustomDevelopedParts": true, - "OutOfScope": false, - "UsedAsClientByHuman": false, - "Encryption": 0, - "JustificationOutOfScope": "", - "Owner": "Company ABC", - "Confidentiality": 1, - "Integrity": 2, - "Availability": 2, - "JustificationCiaRating": "The correct configuration and reachability of the web server is mandatory for all customer usages of the portal.\n", - "Tags": [ + "id": "marketing-cms", + "title": "Marketing CMS", + "description": "CMS for the marketing content", + "type": "process", + "size": "application", + "technology": "cms", + "machine": "container", + "custom_developed_parts": true, + "owner": "Company ABC", + "confidentiality": "internal", + "integrity": "important", + "availability": "important", + "justification_cia_rating": "The correct configuration and reachability of the web server is mandatory for all customer usages of the portal.\n", + "tags": [ "linux" ], - "DataAssetsProcessed": [ + "data_assets_processed": [ "marketing-material", "customer-accounts" ], - "DataAssetsStored": [ + "data_assets_stored": [ "marketing-material" ], - "DataFormatsAccepted": [], - "CommunicationLinks": [ + "communication_links": [ { - "Id": "marketing-cms\u003eauth-traffic", - "SourceId": "marketing-cms", - "TargetId": "ldap-auth-server", - "Title": "Auth Traffic", - "Description": "Link to the LDAP auth server", - "Protocol": 32, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": true, - "Authentication": 1, - "Authorization": 1, - "Usage": 0, - "DataAssetsSent": [ + "id": "marketing-cms\u003eauth-traffic", + "source_id": "marketing-cms", + "target_id": "ldap-auth-server", + "title": "Auth Traffic", + "description": "Link to the LDAP auth server", + "protocol": "ldap", + "readonly": true, + "authentication": "credentials", + "authorization": "technical-user", + "data_assets_sent": [ "customer-accounts" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-accounts" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } - ], - "DiagramTweakOrder": 0, - "RAA": 0 + ] }, "sql-database": { - "Id": "sql-database", - "Title": "Customer Contract Database", - "Description": "The database behind the ERP system", - "Usage": 0, - "Type": 2, - "Size": 3, - "Technology": 9, - "Machine": 1, - "Internet": false, - "MultiTenant": false, - "Redundant": false, - "CustomDevelopedParts": false, - "OutOfScope": false, - "UsedAsClientByHuman": false, - "Encryption": 2, - "JustificationOutOfScope": "", - "Owner": "Company ABC", - "Confidentiality": 4, - "Integrity": 4, - "Availability": 4, - "JustificationCiaRating": "The ERP system's database contains business-relevant sensitive data for the leasing processes and eventually also for other Company XYZ internal processes.\n", - "Tags": [ + "id": "sql-database", + "title": "Customer Contract Database", + "description": "The database behind the ERP system", + "type": "datastore", + "size": "component", + "technology": "database", + "machine": "virtual", + "encryption": "data-with-symmetric-shared-key", + "owner": "Company ABC", + "confidentiality": "strictly-confidential", + "integrity": "mission-critical", + "availability": "mission-critical", + "justification_cia_rating": "The ERP system's database contains business-relevant sensitive data for the leasing processes and eventually also for other Company XYZ internal processes.\n", + "tags": [ "linux", "mysql" ], - "DataAssetsProcessed": [ + "data_assets_processed": [ "db-dumps" ], - "DataAssetsStored": [ + "data_assets_stored": [ "customer-accounts", "customer-operational-data", "internal-business-data" - ], - "DataFormatsAccepted": [], - "CommunicationLinks": [], - "DiagramTweakOrder": 0, - "RAA": 0 + ] } }, "trust_boundaries": { "application-network": { - "Id": "application-network", - "Title": "Application Network", - "Description": "Application Network", - "Type": 3, - "Tags": [ + "id": "application-network", + "title": "Application Network", + "description": "Application Network", + "type": "network-cloud-provider", + "tags": [ "aws" ], - "TechnicalAssetsInside": [ + "technical_assets_inside": [ "load-balancer" ], - "TrustBoundariesNested": [ + "trust_boundaries_nested": [ "web-dmz", "erp-dmz", "auth-env" ] }, "auth-env": { - "Id": "auth-env", - "Title": "Auth Handling Environment", - "Description": "Auth Handling Environment", - "Type": 6, - "Tags": [], - "TechnicalAssetsInside": [ + "id": "auth-env", + "title": "Auth Handling Environment", + "description": "Auth Handling Environment", + "type": "execution-environment", + "technical_assets_inside": [ "identity-provider", "ldap-auth-server" - ], - "TrustBoundariesNested": [] + ] }, "dev-network": { - "Id": "dev-network", - "Title": "Dev Network", - "Description": "Development Network", - "Type": 0, - "Tags": [], - "TechnicalAssetsInside": [ + "id": "dev-network", + "title": "Dev Network", + "description": "Development Network", + "technical_assets_inside": [ "jenkins-buildserver", "git-repo", "backend-admin-client", "backoffice-client" - ], - "TrustBoundariesNested": [] + ] }, "erp-dmz": { - "Id": "erp-dmz", - "Title": "ERP DMZ", - "Description": "ERP DMZ", - "Type": 4, - "Tags": [ + "id": "erp-dmz", + "title": "ERP DMZ", + "description": "ERP DMZ", + "type": "network-cloud-security-group", + "tags": [ "some-erp" ], - "TechnicalAssetsInside": [ + "technical_assets_inside": [ "erp-system", "contract-fileserver", "sql-database" - ], - "TrustBoundariesNested": [] + ] }, "web-dmz": { - "Id": "web-dmz", - "Title": "Web DMZ", - "Description": "Web DMZ", - "Type": 4, - "Tags": [], - "TechnicalAssetsInside": [ + "id": "web-dmz", + "title": "Web DMZ", + "description": "Web DMZ", + "type": "network-cloud-security-group", + "technical_assets_inside": [ "apache-webserver", "marketing-cms" - ], - "TrustBoundariesNested": [] + ] } }, "shared_runtimes": { "webapp-virtualization": { - "Id": "webapp-virtualization", - "Title": "WebApp and Backoffice Virtualization", - "Description": "WebApp Virtualization", - "Tags": [ + "id": "webapp-virtualization", + "title": "WebApp and Backoffice Virtualization", + "description": "WebApp Virtualization", + "tags": [ "vmware" ], - "TechnicalAssetsRunning": [ + "technical_assets_running": [ "apache-webserver", "marketing-cms", "erp-system", @@ -1349,552 +1053,1177 @@ }, "individual_risk_categories": { "something-strange": { - "Id": "something-strange", - "Title": "Some Individual Risk Example", - "Description": "Some text describing the risk category...", - "Impact": "Some text describing the impact...", - "ASVS": "V0 - Something Strange", - "CheatSheet": "https://example.com", - "Action": "Some text describing the action...", - "Mitigation": "Some text describing the mitigation...", - "Check": "Check if XYZ...", - "DetectionLogic": "Some text describing the detection logic...", - "RiskAssessment": "Some text describing the risk assessment...", - "FalsePositives": "Some text describing the most common types of false positives...", - "Function": "business-side", - "STRIDE": "repudiation", - "ModelFailurePossibleReason": false, - "CWE": 693 + "id": "something-strange", + "title": "Some Individual Risk Example", + "description": "Some text describing the risk category...", + "impact": "Some text describing the impact...", + "asvs": "V0 - Something Strange", + "cheat_sheet": "https://example.com", + "action": "Some text describing the action...", + "mitigation": "Some text describing the mitigation...", + "check": "Check if XYZ...", + "detection_logic": "Some text describing the detection logic...", + "risk_assessment": "Some text describing the risk assessment...", + "false_positives": "Some text describing the most common types of false positives...", + "stride": "repudiation", + "cwe": 693 + } + }, + "built_in_risk_categories": { + "accidental-secret-leak": { + "id": "accidental-secret-leak", + "title": "Accidental Secret Leak", + "description": "Sourcecode repositories (including their histories) as well as artifact registries can accidentally contain secrets like checked-in or packaged-in passwords, API tokens, certificates, crypto keys, etc.", + "impact": "If this risk is unmitigated, attackers which have access to affected sourcecode repositories or artifact registries might find secrets accidentally checked-in.", + "asvs": "V14 - Configuration Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", + "action": "Build Pipeline Hardening", + "mitigation": "Establish measures preventing accidental check-in or package-in of secrets into sourcecode repositories and artifact registries. This starts by using good .gitignore and .dockerignore files, but does not stop there. See for example tools like \u003ci\u003e\"git-secrets\" or \"Talisman\"\u003c/i\u003e to have check-in preventive measures for secrets. Consider also to regularly scan your repositories for secrets accidentally checked-in using scanning tools like \u003ci\u003e\"gitleaks\" or \"gitrob\"\u003c/i\u003e.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope sourcecode repositories and artifact registries.", + "risk_assessment": "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", + "false_positives": "Usually no false positives.", + "function": "operations", + "stride": "information-disclosure", + "cwe": 200 + }, + "code-backdooring": { + "id": "code-backdooring", + "title": "Code Backdooring", + "description": "For each build-pipeline component Code Backdooring risks might arise where attackers compromise the build-pipeline in order to let backdoored artifacts be shipped into production. Aside from direct code backdooring this includes backdooring of dependencies and even of more lower-level build infrastructure, like backdooring compilers (similar to what the XcodeGhost malware did) or dependencies.", + "impact": "If this risk remains unmitigated, attackers might be able to execute code on and completely takeover production environments.", + "asvs": "V10 - Malicious Code Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Vulnerable_Dependency_Management_Cheat_Sheet.html", + "action": "Build Pipeline Hardening", + "mitigation": "Reduce the attack surface of backdooring the build pipeline by not directly exposing the build pipeline components on the public internet and also not exposing it in front of unmanaged (out-of-scope) developer clients.Also consider the use of code signing to prevent code modifications.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope development relevant technical assets which are either accessed by out-of-scope unmanaged developer clients and/or are directly accessed by any kind of internet-located (non-VPN) component or are themselves directly located on the internet.", + "risk_assessment": "The risk rating depends on the confidentiality and integrity rating of the code being handled and deployed as well as the placement/calling of this technical asset on/from the internet.", + "false_positives": "When the build-pipeline and sourcecode-repo is not exposed to the internet and considered fully trusted (which implies that all accessing clients are also considered fully trusted in terms of their patch management and applied hardening, which must be equivalent to a managed developer client environment) this can be considered a false positive after individual review.", + "function": "operations", + "stride": "tampering", + "cwe": 912 + }, + "container-baseimage-backdooring": { + "id": "container-baseimage-backdooring", + "title": "Container Base Image Backdooring", + "description": "When a technical asset is built using container technologies, Base Image Backdooring risks might arise where base images and other layers used contain vulnerable components or backdoors.\u003cbr\u003e\u003cbr\u003eSee for example: \u003ca href=\"https://techcrunch.com/2018/06/15/tainted-crypto-mining-containers-pulled-from-docker-hub/\"\u003ehttps://techcrunch.com/2018/06/15/tainted-crypto-mining-containers-pulled-from-docker-hub/\u003c/a\u003e", + "impact": "If this risk is unmitigated, attackers might be able to deeply persist in the target system by executing code in deployed containers.", + "asvs": "V10 - Malicious Code Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Docker_Security_Cheat_Sheet.html", + "action": "Container Infrastructure Hardening", + "mitigation": "Apply hardening of all container infrastructures (see for example the \u003ci\u003eCIS-Benchmarks for Docker and Kubernetes\u003c/i\u003e and the \u003ci\u003eDocker Bench for Security\u003c/i\u003e). Use only trusted base images of the original vendors, verify digital signatures and apply image creation best practices. Also consider using Google's \u003ci\u003eDistroless\u003c/i\u003e base images or otherwise very small base images. Regularly execute container image scans with tools checking the layers for vulnerable components.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS/CSVS applied?", + "detection_logic": "In-scope technical assets running as containers.", + "risk_assessment": "The risk rating depends on the sensitivity of the technical asset itself and of the data assets.", + "false_positives": "Fully trusted (i.e. reviewed and cryptographically signed or similar) base images of containers can be considered as false positives after individual review.", + "function": "operations", + "stride": "tampering", + "cwe": 912 + }, + "container-platform-escape": { + "id": "container-platform-escape", + "title": "Container Platform Escape", + "description": "Container platforms are especially interesting targets for attackers as they host big parts of a containerized runtime infrastructure. When not configured and operated with security best practices in mind, attackers might exploit a vulnerability inside an container and escape towards the platform as highly privileged users. These scenarios might give attackers capabilities to attack every other container as owning the container platform (via container escape attacks) equals to owning every container.", + "impact": "If this risk is unmitigated, attackers which have successfully compromised a container (via other vulnerabilities) might be able to deeply persist in the target system by executing code in many deployed containers and the container platform itself.", + "asvs": "V14 - Configuration Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Docker_Security_Cheat_Sheet.html", + "action": "Container Infrastructure Hardening", + "mitigation": "Apply hardening of all container infrastructures. \u003cp\u003eSee for example the \u003ci\u003eCIS-Benchmarks for Docker and Kubernetes\u003c/i\u003e as well as the \u003ci\u003eDocker Bench for Security\u003c/i\u003e ( \u003ca href=\"https://github.com/docker/docker-bench-security\"\u003ehttps://github.com/docker/docker-bench-security\u003c/a\u003e ) or \u003ci\u003eInSpec Checks for Docker and Kubernetes\u003c/i\u003e ( \u003ca href=\"https://github.com/dev-sec/cis-kubernetes-benchmark\"\u003ehttps://github.com/dev-sec/cis-docker-benchmark\u003c/a\u003e and \u003ca href=\"https://github.com/dev-sec/cis-kubernetes-benchmark\"\u003ehttps://github.com/dev-sec/cis-kubernetes-benchmark\u003c/a\u003e ). Use only trusted base images, verify digital signatures and apply image creation best practices. Also consider using Google's \u003cb\u003eDistroless\u003c/i\u003e base images or otherwise very small base images. Apply namespace isolation and nod affinity to separate pods from each other in terms of access and nodes the same style as you separate data.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS or CSVS chapter applied?", + "detection_logic": "In-scope container platforms.", + "risk_assessment": "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", + "false_positives": "Container platforms not running parts of the target architecture can be considered as false positives after individual review.", + "function": "operations", + "stride": "elevation-of-privilege", + "cwe": 1008 + }, + "cross-site-request-forgery": { + "id": "cross-site-request-forgery", + "title": "Cross-Site Request Forgery (CSRF)", + "description": "When a web application is accessed via web protocols Cross-Site Request Forgery (CSRF) risks might arise.", + "impact": "If this risk remains unmitigated, attackers might be able to trick logged-in victim users into unwanted actions within the web application by visiting an attacker controlled web site.", + "asvs": "V4 - Access Control Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html", + "action": "CSRF Prevention", + "mitigation": "Try to use anti-CSRF tokens ot the double-submit patterns (at least for logged-in requests). When your authentication scheme depends on cookies (like session or token cookies), consider marking them with the same-site flag. When a third-party product is used instead of custom developed software, check if the product applies the proper mitigation and ensure a reasonable patch-level.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope web applications accessed via typical web access protocols.", + "risk_assessment": "The risk rating depends on the integrity rating of the data sent across the communication link.", + "false_positives": "Web applications passing the authentication sate via custom headers instead of cookies can eventually be false positives. Also when the web application is not accessed via a browser-like component (i.e not by a human user initiating the request that gets passed through all components until it reaches the web application) this can be considered a false positive.", + "function": "development", + "cwe": 352 + }, + "cross-site-scripting": { + "id": "cross-site-scripting", + "title": "Cross-Site Scripting (XSS)", + "description": "For each web application Cross-Site Scripting (XSS) risks might arise. In terms of the overall risk level take other applications running on the same domain into account as well.", + "impact": "If this risk remains unmitigated, attackers might be able to access individual victim sessions and steal or modify user data.", + "asvs": "V5 - Validation, Sanitization and Encoding Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Cross_Site_Scripting_Prevention_Cheat_Sheet.html", + "action": "XSS Prevention", + "mitigation": "Try to encode all values sent back to the browser and also handle DOM-manipulations in a safe way to avoid DOM-based XSS. When a third-party product is used instead of custom developed software, check if the product applies the proper mitigation and ensure a reasonable patch-level.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope web applications.", + "risk_assessment": "The risk rating depends on the sensitivity of the data processed or stored in the web application.", + "false_positives": "When the technical asset is not accessed via a browser-like component (i.e not by a human user initiating the request that gets passed through all components until it reaches the web application) this can be considered a false positive.", + "function": "development", + "stride": "tampering", + "cwe": 79 + }, + "dos-risky-access-across-trust-boundary": { + "id": "dos-risky-access-across-trust-boundary", + "title": "DoS-risky Access Across Trust-Boundary", + "description": "Assets accessed across trust boundaries with critical or mission-critical availability rating are more prone to Denial-of-Service (DoS) risks.", + "impact": "If this risk remains unmitigated, attackers might be able to disturb the availability of important parts of the system.", + "asvs": "V1 - Architecture, Design and Threat Modeling Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Denial_of_Service_Cheat_Sheet.html", + "action": "Anti-DoS Measures", + "mitigation": "Apply anti-DoS techniques like throttling and/or per-client load blocking with quotas. Also for maintenance access routes consider applying a VPN instead of public reachable interfaces. Generally applying redundancy on the targeted technical asset reduces the risk of DoS.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope technical assets (excluding load-balancer) with availability rating of critical or higher which have incoming data-flows across a network trust-boundary (excluding devops usage).", + "risk_assessment": "Matching technical assets with availability rating of critical or higher are at low risk. When the availability rating is mission-critical and neither a VPN nor IP filter for the incoming data-flow nor redundancy for the asset is applied, the risk-rating is considered medium.", + "false_positives": "When the accessed target operations are not time- or resource-consuming.", + "function": "operations", + "stride": "denial-of-service", + "cwe": 400 + }, + "incomplete-model": { + "id": "incomplete-model", + "title": "Incomplete Model", + "description": "When the threat model contains unknown technologies or transfers data over unknown protocols, this is an indicator for an incomplete model.", + "impact": "If this risk is unmitigated, other risks might not be noticed as the model is incomplete.", + "asvs": "V1 - Architecture, Design and Threat Modeling Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Threat_Modeling_Cheat_Sheet.html", + "action": "Threat Modeling Completeness", + "mitigation": "Try to find out what technology or protocol is used instead of specifying that it is unknown.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "All technical assets and communication links with technology type or protocol type specified as unknown.", + "risk_assessment": "low", + "false_positives": "Usually no false positives as this looks like an incomplete model.", + "function": "architecture", + "stride": "information-disclosure", + "model_failure_possible_reason": true, + "cwe": 1008 + }, + "ldap-injection": { + "id": "ldap-injection", + "title": "LDAP-Injection", + "description": "When an LDAP server is accessed LDAP-Injection risks might arise. The risk rating depends on the sensitivity of the LDAP server itself and of the data assets processed or stored.", + "impact": "If this risk remains unmitigated, attackers might be able to modify LDAP queries and access more data from the LDAP server than allowed.", + "asvs": "V5 - Validation, Sanitization and Encoding Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/LDAP_Injection_Prevention_Cheat_Sheet.html", + "action": "LDAP-Injection Prevention", + "mitigation": "Try to use libraries that properly encode LDAP meta characters in searches and queries to access the LDAP sever in order to stay safe from LDAP-Injection vulnerabilities. When a third-party product is used instead of custom developed software, check if the product applies the proper mitigation and ensure a reasonable patch-level.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope clients accessing LDAP servers via typical LDAP access protocols.", + "risk_assessment": "The risk rating depends on the sensitivity of the LDAP server itself and of the data assets processed or stored.", + "false_positives": "LDAP server queries by search values not consisting of parts controllable by the caller can be considered as false positives after individual review.", + "function": "development", + "stride": "tampering", + "cwe": 90 + }, + "missing-authentication": { + "id": "missing-authentication", + "title": "Missing Authentication", + "description": "Technical assets (especially multi-tenant systems) should authenticate incoming requests when the asset processes or stores sensitive data. ", + "impact": "If this risk is unmitigated, attackers might be able to access or modify sensitive data in an unauthenticated way.", + "asvs": "V2 - Authentication Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Authentication_Cheat_Sheet.html", + "action": "Authentication of Incoming Requests", + "mitigation": "Apply an authentication method to the technical asset. To protect highly sensitive data consider the use of two-factor authentication for human users.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope technical assets (except load-balancer, reverse-proxy, service-registry, waf, ids, and ips and in-process calls) should authenticate incoming requests when the asset processes or stores sensitive data. This is especially the case for all multi-tenant assets (there even non-sensitive ones).", + "risk_assessment": "The risk rating (medium or high) depends on the sensitivity of the data sent across the communication link. Monitoring callers are exempted from this risk.", + "false_positives": "Technical assets which do not process requests regarding functionality or data linked to end-users (customers) can be considered as false positives after individual review.", + "function": "architecture", + "stride": "elevation-of-privilege", + "cwe": 306 + }, + "missing-authentication-second-factor": { + "id": "missing-authentication-second-factor", + "title": "Missing Two-Factor Authentication (2FA)", + "description": "Technical assets (especially multi-tenant systems) should authenticate incoming requests with two-factor (2FA) authentication when the asset processes or stores highly sensitive data (in terms of confidentiality, integrity, and availability) and is accessed by humans.", + "impact": "If this risk is unmitigated, attackers might be able to access or modify highly sensitive data without strong authentication.", + "asvs": "V2 - Authentication Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Multifactor_Authentication_Cheat_Sheet.html", + "action": "Authentication with Second Factor (2FA)", + "mitigation": "Apply an authentication method to the technical asset protecting highly sensitive data via two-factor authentication for human users.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope technical assets (except load-balancer, reverse-proxy, waf, ids, and ips) should authenticate incoming requests via two-factor authentication (2FA) when the asset processes or stores highly sensitive data (in terms of confidentiality, integrity, and availability) and is accessed by a client used by a human user.", + "risk_assessment": "medium", + "false_positives": "Technical assets which do not process requests regarding functionality or data linked to end-users (customers) can be considered as false positives after individual review.", + "stride": "elevation-of-privilege", + "cwe": 308 + }, + "missing-build-infrastructure": { + "id": "missing-build-infrastructure", + "title": "Missing Build Infrastructure", + "description": "The modeled architecture does not contain a build infrastructure (devops-client, sourcecode-repo, build-pipeline, etc.), which might be the risk of a model missing critical assets (and thus not seeing their risks). If the architecture contains custom-developed parts, the pipeline where code gets developed and built needs to be part of the model.", + "impact": "If this risk is unmitigated, attackers might be able to exploit risks unseen in this threat model due to critical build infrastructure components missing in the model.", + "asvs": "V1 - Architecture, Design and Threat Modeling Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", + "action": "Build Pipeline Hardening", + "mitigation": "Include the build infrastructure in the model.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "Models with in-scope custom-developed parts missing in-scope development (code creation) and build infrastructure components (devops-client, sourcecode-repo, build-pipeline, etc.).", + "risk_assessment": "The risk rating depends on the highest sensitivity of the in-scope assets running custom-developed parts.", + "false_positives": "Models not having any custom-developed parts can be considered as false positives after individual review.", + "function": "architecture", + "stride": "tampering", + "model_failure_possible_reason": true, + "cwe": 1127 + }, + "missing-cloud-hardening": { + "id": "missing-cloud-hardening", + "title": "Missing Cloud Hardening", + "description": "Cloud components should be hardened according to the cloud vendor best practices. This affects their configuration, auditing, and further areas.", + "impact": "If this risk is unmitigated, attackers might access cloud components in an unintended way.", + "asvs": "V1 - Architecture, Design and Threat Modeling Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", + "action": "Cloud Hardening", + "mitigation": "Apply hardening of all cloud components and services, taking special care to follow the individual risk descriptions (which depend on the cloud provider tags in the model). \u003cbr\u003e\u003cbr\u003eFor \u003cb\u003eAmazon Web Services (AWS)\u003c/b\u003e: Follow the \u003ci\u003eCIS Benchmark for Amazon Web Services\u003c/i\u003e (see also the automated checks of cloud audit tools like \u003ci\u003e\"PacBot\", \"CloudSploit\", \"CloudMapper\", \"ScoutSuite\", or \"Prowler AWS CIS Benchmark Tool\"\u003c/i\u003e). \u003cbr\u003eFor EC2 and other servers running Amazon Linux, follow the \u003ci\u003eCIS Benchmark for Amazon Linux\u003c/i\u003e and switch to IMDSv2. \u003cbr\u003eFor S3 buckets follow the \u003ci\u003eSecurity Best Practices for Amazon S3\u003c/i\u003e at \u003ca href=\"https://docs.aws.amazon.com/AmazonS3/latest/dev/security-best-practices.html\"\u003ehttps://docs.aws.amazon.com/AmazonS3/latest/dev/security-best-practices.html\u003c/a\u003e to avoid accidental leakage. \u003cbr\u003eAlso take a look at some of these tools: \u003ca href=\"https://github.com/toniblyx/my-arsenal-of-aws-security-tools\"\u003ehttps://github.com/toniblyx/my-arsenal-of-aws-security-tools\u003c/a\u003e \u003cbr\u003e\u003cbr\u003eFor \u003cb\u003eMicrosoft Azure\u003c/b\u003e: Follow the \u003ci\u003eCIS Benchmark for Microsoft Azure\u003c/i\u003e (see also the automated checks of cloud audit tools like \u003ci\u003e\"CloudSploit\" or \"ScoutSuite\"\u003c/i\u003e).\u003cbr\u003e\u003cbr\u003eFor \u003cb\u003eGoogle Cloud Platform\u003c/b\u003e: Follow the \u003ci\u003eCIS Benchmark for Google Cloud Computing Platform\u003c/i\u003e (see also the automated checks of cloud audit tools like \u003ci\u003e\"CloudSploit\" or \"ScoutSuite\"\u003c/i\u003e). \u003cbr\u003e\u003cbr\u003eFor \u003cb\u003eOracle Cloud Platform\u003c/b\u003e: Follow the hardening best practices (see also the automated checks of cloud audit tools like \u003ci\u003e\"CloudSploit\"\u003c/i\u003e).", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope cloud components (either residing in cloud trust boundaries or more specifically tagged with cloud provider types).", + "risk_assessment": "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", + "false_positives": "Cloud components not running parts of the target architecture can be considered as false positives after individual review.", + "function": "operations", + "stride": "tampering", + "cwe": 1008 + }, + "missing-file-validation": { + "id": "missing-file-validation", + "title": "Missing File Validation", + "description": "When a technical asset accepts files, these input files should be strictly validated about filename and type.", + "impact": "If this risk is unmitigated, attackers might be able to provide malicious files to the application.", + "asvs": "V12 - File and Resources Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/File_Upload_Cheat_Sheet.html", + "action": "File Validation", + "mitigation": "Filter by file extension and discard (if feasible) the name provided. Whitelist the accepted file types and determine the mime-type on the server-side (for example via \"Apache Tika\" or similar checks). If the file is retrievable by end users and/or backoffice employees, consider performing scans for popular malware (if the files can be retrieved much later than they were uploaded, also apply a fresh malware scan during retrieval to scan with newer signatures of popular malware). Also enforce limits on maximum file size to avoid denial-of-service like scenarios.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope technical assets with custom-developed code accepting file data formats.", + "risk_assessment": "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", + "false_positives": "Fully trusted (i.e. cryptographically signed or similar) files can be considered as false positives after individual review.", + "function": "development", + "cwe": 434 + }, + "missing-hardening": { + "id": "missing-hardening", + "title": "Missing Hardening", + "description": "Technical assets with a Relative Attacker Attractiveness (RAA) value of 55 % or higher should be explicitly hardened taking best practices and vendor hardening guides into account.", + "impact": "If this risk remains unmitigated, attackers might be able to easier attack high-value targets.", + "asvs": "V14 - Configuration Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", + "action": "System Hardening", + "mitigation": "Try to apply all hardening best practices (like CIS benchmarks, OWASP recommendations, vendor recommendations, DevSec Hardening Framework, DBSAT for Oracle databases, and others).", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope technical assets with RAA values of 55 % or higher. Generally for high-value targets like data stores, application servers, identity providers and ERP systems this limit is reduced to 40 %", + "risk_assessment": "The risk rating depends on the sensitivity of the data processed or stored in the technical asset.", + "false_positives": "Usually no false positives.", + "function": "operations", + "stride": "tampering", + "cwe": 16 + }, + "missing-identity-propagation": { + "id": "missing-identity-propagation", + "title": "Missing Identity Propagation", + "description": "Technical assets (especially multi-tenant systems), which usually process data for end users should authorize every request based on the identity of the end user when the data flow is authenticated (i.e. non-public). For DevOps usages at least a technical-user authorization is required.", + "impact": "If this risk is unmitigated, attackers might be able to access or modify foreign data after a successful compromise of a component within the system due to missing resource-based authorization checks.", + "asvs": "V4 - Access Control Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Access_Control_Cheat_Sheet.html", + "action": "Identity Propagation and Resource-based Authorization", + "mitigation": "When processing requests for end users if possible authorize in the backend against the propagated identity of the end user. This can be achieved in passing JWTs or similar tokens and checking them in the backend services. For DevOps usages apply at least a technical-user authorization.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope service-like technical assets which usually process data based on end user requests, if authenticated (i.e. non-public), should authorize incoming requests based on the propagated end user identity when their rating is sensitive. This is especially the case for all multi-tenant assets (there even less-sensitive rated ones). DevOps usages are exempted from this risk.", + "risk_assessment": "The risk rating (medium or high) depends on the confidentiality, integrity, and availability rating of the technical asset.", + "false_positives": "Technical assets which do not process requests regarding functionality or data linked to end-users (customers) can be considered as false positives after individual review.", + "function": "architecture", + "stride": "elevation-of-privilege", + "cwe": 284 + }, + "missing-identity-provider-isolation": { + "id": "missing-identity-provider-isolation", + "title": "Missing Identity Provider Isolation", + "description": "Highly sensitive identity provider assets and their identity data stores should be isolated from other assets by their own network segmentation trust-boundary (execution-environment boundaries do not count as network isolation).", + "impact": "If this risk is unmitigated, attackers successfully attacking other components of the system might have an easy path towards highly sensitive identity provider assets and their identity data stores, as they are not separated by network segmentation.", + "asvs": "V1 - Architecture, Design and Threat Modeling Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", + "action": "Network Segmentation", + "mitigation": "Apply a network segmentation trust-boundary around the highly sensitive identity provider assets and their identity data stores.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope identity provider assets and their identity data stores when surrounded by other (not identity-related) assets (without a network trust-boundary in-between). This risk is especially prevalent when other non-identity related assets are within the same execution environment (i.e. same database or same application server).", + "risk_assessment": "Default is high impact. The impact is increased to very-high when the asset missing the trust-boundary protection is rated as strictly-confidential or mission-critical.", + "false_positives": "When all assets within the network segmentation trust-boundary are hardened and protected to the same extend as if all were identity providers with data of highest sensitivity.", + "function": "operations", + "stride": "elevation-of-privilege", + "cwe": 1008 + }, + "missing-identity-store": { + "id": "missing-identity-store", + "title": "Missing Identity Store", + "description": "The modeled architecture does not contain an identity store, which might be the risk of a model missing critical assets (and thus not seeing their risks).", + "impact": "If this risk is unmitigated, attackers might be able to exploit risks unseen in this threat model in the identity provider/store that is currently missing in the model.", + "asvs": "V2 - Authentication Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Authentication_Cheat_Sheet.html", + "action": "Identity Store", + "mitigation": "Include an identity store in the model if the application has a login.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "Models with authenticated data-flows authorized via end user identity missing an in-scope identity store.", + "risk_assessment": "The risk rating depends on the sensitivity of the end user-identity authorized technical assets and their data assets processed and stored.", + "false_positives": "Models only offering data/services without any real authentication need can be considered as false positives after individual review.", + "function": "architecture", + "model_failure_possible_reason": true, + "cwe": 287 + }, + "missing-network-segmentation": { + "id": "missing-network-segmentation", + "title": "Missing Network Segmentation", + "description": "Highly sensitive assets and/or data stores residing in the same network segment than other lower sensitive assets (like webservers or content management systems etc.) should be better protected by a network segmentation trust-boundary.", + "impact": "If this risk is unmitigated, attackers successfully attacking other components of the system might have an easy path towards more valuable targets, as they are not separated by network segmentation.", + "asvs": "V1 - Architecture, Design and Threat Modeling Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", + "action": "Network Segmentation", + "mitigation": "Apply a network segmentation trust-boundary around the highly sensitive assets and/or data stores.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope technical assets with high sensitivity and RAA values as well as data stores when surrounded by assets (without a network trust-boundary in-between) which are of type client-system, web-server, web-application, cms, web-service-rest, web-service-soap, build-pipeline, sourcecode-repository, monitoring, or similar and there is no direct connection between these (hence no requirement to be so close to each other).", + "risk_assessment": "Default is low risk. The risk is increased to medium when the asset missing the trust-boundary protection is rated as strictly-confidential or mission-critical.", + "false_positives": "When all assets within the network segmentation trust-boundary are hardened and protected to the same extend as if all were containing/processing highly sensitive data.", + "function": "operations", + "stride": "elevation-of-privilege", + "cwe": 1008 + }, + "missing-vault": { + "id": "missing-vault", + "title": "Missing Vault (Secret Storage)", + "description": "In order to avoid the risk of secret leakage via config files (when attacked through vulnerabilities being able to read files like Path-Traversal and others), it is best practice to use a separate hardened process with proper authentication, authorization, and audit logging to access config secrets (like credentials, private keys, client certificates, etc.). This component is usually some kind of Vault.", + "impact": "If this risk is unmitigated, attackers might be able to easier steal config secrets (like credentials, private keys, client certificates, etc.) once a vulnerability to access files is present and exploited.", + "asvs": "V6 - Stored Cryptography Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Cryptographic_Storage_Cheat_Sheet.html", + "action": "Vault (Secret Storage)", + "mitigation": "Consider using a Vault (Secret Storage) to securely store and access config secrets (like credentials, private keys, client certificates, etc.).", + "check": "Is a Vault (Secret Storage) in place?", + "detection_logic": "Models without a Vault (Secret Storage).", + "risk_assessment": "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", + "false_positives": "Models where no technical assets have any kind of sensitive config data to protect can be considered as false positives after individual review.", + "function": "architecture", + "stride": "information-disclosure", + "model_failure_possible_reason": true, + "cwe": 522 + }, + "missing-vault-isolation": { + "id": "missing-vault-isolation", + "title": "Missing Vault Isolation", + "description": "Highly sensitive vault assets and their data stores should be isolated from other assets by their own network segmentation trust-boundary (execution-environment boundaries do not count as network isolation).", + "impact": "If this risk is unmitigated, attackers successfully attacking other components of the system might have an easy path towards highly sensitive vault assets and their data stores, as they are not separated by network segmentation.", + "asvs": "V1 - Architecture, Design and Threat Modeling Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", + "action": "Network Segmentation", + "mitigation": "Apply a network segmentation trust-boundary around the highly sensitive vault assets and their data stores.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope vault assets when surrounded by other (not vault-related) assets (without a network trust-boundary in-between). This risk is especially prevalent when other non-vault related assets are within the same execution environment (i.e. same database or same application server).", + "risk_assessment": "Default is medium impact. The impact is increased to high when the asset missing the trust-boundary protection is rated as strictly-confidential or mission-critical.", + "false_positives": "When all assets within the network segmentation trust-boundary are hardened and protected to the same extend as if all were vaults with data of highest sensitivity.", + "function": "operations", + "stride": "elevation-of-privilege", + "cwe": 1008 + }, + "missing-waf": { + "id": "missing-waf", + "title": "Missing Web Application Firewall (WAF)", + "description": "To have a first line of filtering defense, security architectures with web-services or web-applications should include a WAF in front of them. Even though a WAF is not a replacement for security (all components must be secure even without a WAF) it adds another layer of defense to the overall system by delaying some attacks and having easier attack alerting through it.", + "impact": "If this risk is unmitigated, attackers might be able to apply standard attack pattern tests at great speed without any filtering.", + "asvs": "V1 - Architecture, Design and Threat Modeling Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Virtual_Patching_Cheat_Sheet.html", + "action": "Web Application Firewall (WAF)", + "mitigation": "Consider placing a Web Application Firewall (WAF) in front of the web-services and/or web-applications. For cloud environments many cloud providers offer pre-configured WAFs. Even reverse proxies can be enhances by a WAF component via ModSecurity plugins.", + "check": "Is a Web Application Firewall (WAF) in place?", + "detection_logic": "In-scope web-services and/or web-applications accessed across a network trust boundary not having a Web Application Firewall (WAF) in front of them.", + "risk_assessment": "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", + "false_positives": "Targets only accessible via WAFs or reverse proxies containing a WAF component (like ModSecurity) can be considered as false positives after individual review.", + "function": "operations", + "stride": "tampering", + "cwe": 1008 + }, + "mixed-targets-on-shared-runtime": { + "id": "mixed-targets-on-shared-runtime", + "title": "Mixed Targets on Shared Runtime", + "description": "Different attacker targets (like frontend and backend/datastore components) should not be running on the same shared (underlying) runtime.", + "impact": "If this risk is unmitigated, attackers successfully attacking other components of the system might have an easy path towards more valuable targets, as they are running on the same shared runtime.", + "asvs": "V1 - Architecture, Design and Threat Modeling Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", + "action": "Runtime Separation", + "mitigation": "Use separate runtime environments for running different target components or apply similar separation styles to prevent load- or breach-related problems originating from one more attacker-facing asset impacts also the other more critical rated backend/datastore assets.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "Shared runtime running technical assets of different trust-boundaries is at risk. Also mixing backend/datastore with frontend components on the same shared runtime is considered a risk.", + "risk_assessment": "The risk rating (low or medium) depends on the confidentiality, integrity, and availability rating of the technical asset running on the shared runtime.", + "false_positives": "When all assets running on the shared runtime are hardened and protected to the same extend as if all were containing/processing highly sensitive data.", + "function": "operations", + "stride": "elevation-of-privilege", + "cwe": 1008 + }, + "path-traversal": { + "id": "path-traversal", + "title": "Path-Traversal", + "description": "When a filesystem is accessed Path-Traversal or Local-File-Inclusion (LFI) risks might arise. The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed or stored.", + "impact": "If this risk is unmitigated, attackers might be able to read sensitive files (configuration data, key/credential files, deployment files, business data files, etc.) from the filesystem of affected components.", + "asvs": "V12 - File and Resources Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Input_Validation_Cheat_Sheet.html", + "action": "Path-Traversal Prevention", + "mitigation": "Before accessing the file cross-check that it resides in the expected folder and is of the expected type and filename/suffix. Try to use a mapping if possible instead of directly accessing by a filename which is (partly or fully) provided by the caller. When a third-party product is used instead of custom developed software, check if the product applies the proper mitigation and ensure a reasonable patch-level.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "Filesystems accessed by in-scope callers.", + "risk_assessment": "The risk rating depends on the sensitivity of the data stored inside the technical asset.", + "false_positives": "File accesses by filenames not consisting of parts controllable by the caller can be considered as false positives after individual review.", + "function": "development", + "stride": "information-disclosure", + "cwe": 22 + }, + "push-instead-of-pull-deployment": { + "id": "push-instead-of-pull-deployment", + "title": "Push instead of Pull Deployment", + "description": "When comparing push-based vs. pull-based deployments from a security perspective, pull-based deployments improve the overall security of the deployment targets. Every exposed interface of a production system to accept a deployment increases the attack surface of the production system, thus a pull-based approach exposes less attack surface relevant interfaces.", + "impact": "If this risk is unmitigated, attackers might have more potential target vectors for attacks, as the overall attack surface is unnecessarily increased.", + "asvs": "V1 - Architecture, Design and Threat Modeling Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", + "action": "Build Pipeline Hardening", + "mitigation": "Try to prefer pull-based deployments (like GitOps scenarios offer) over push-based deployments to reduce the attack surface of the production system.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "Models with build pipeline components accessing in-scope targets of deployment (in a non-readonly way) which are not build-related components themselves.", + "risk_assessment": "The risk rating depends on the highest sensitivity of the deployment targets running custom-developed parts.", + "false_positives": "Communication links that are not deployment paths can be considered as false positives after individual review.", + "function": "architecture", + "stride": "tampering", + "model_failure_possible_reason": true, + "cwe": 1127 + }, + "search-query-injection": { + "id": "search-query-injection", + "title": "Search-Query Injection", + "description": "When a search engine server is accessed Search-Query Injection risks might arise.\u003cbr\u003e\u003cbr\u003eSee for example \u003ca href=\"https://github.com/veracode-research/solr-injection\"\u003ehttps://github.com/veracode-research/solr-injection\u003c/a\u003e and \u003ca href=\"https://github.com/veracode-research/solr-injection/blob/master/slides/DEFCON-27-Michael-Stepankin-Apache-Solr-Injection.pdf\"\u003ehttps://github.com/veracode-research/solr-injection/blob/master/slides/DEFCON-27-Michael-Stepankin-Apache-Solr-Injection.pdf\u003c/a\u003e for more details (here related to Solr, but in general showcasing the topic of search query injections).", + "impact": "If this risk remains unmitigated, attackers might be able to read more data from the search index and eventually further escalate towards a deeper system penetration via code executions.", + "asvs": "V5 - Validation, Sanitization and Encoding Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Injection_Prevention_Cheat_Sheet.html", + "action": "Search-Query Injection Prevention", + "mitigation": "Try to use libraries that properly encode search query meta characters in searches and don't expose the query unfiltered to the caller. When a third-party product is used instead of custom developed software, check if the product applies the proper mitigation and ensure a reasonable patch-level.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope clients accessing search engine servers via typical search access protocols.", + "risk_assessment": "The risk rating depends on the sensitivity of the search engine server itself and of the data assets processed or stored.", + "false_positives": "Server engine queries by search values not consisting of parts controllable by the caller can be considered as false positives after individual review.", + "function": "development", + "stride": "tampering", + "cwe": 74 + }, + "server-side-request-forgery": { + "id": "server-side-request-forgery", + "title": "Server-Side Request Forgery (SSRF)", + "description": "When a server system (i.e. not a client) is accessing other server systems via typical web protocols Server-Side Request Forgery (SSRF) or Local-File-Inclusion (LFI) or Remote-File-Inclusion (RFI) risks might arise. ", + "impact": "If this risk is unmitigated, attackers might be able to access sensitive services or files of network-reachable components by modifying outgoing calls of affected components.", + "asvs": "V12 - File and Resources Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Server_Side_Request_Forgery_Prevention_Cheat_Sheet.html", + "action": "SSRF Prevention", + "mitigation": "Try to avoid constructing the outgoing target URL with caller controllable values. Alternatively use a mapping (whitelist) when accessing outgoing URLs instead of creating them including caller controllable values. When a third-party product is used instead of custom developed software, check if the product applies the proper mitigation and ensure a reasonable patch-level.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope non-client systems accessing (using outgoing communication links) targets with either HTTP or HTTPS protocol.", + "risk_assessment": "The risk rating (low or medium) depends on the sensitivity of the data assets receivable via web protocols from targets within the same network trust-boundary as well on the sensitivity of the data assets receivable via web protocols from the target asset itself. Also for cloud-based environments the exploitation impact is at least medium, as cloud backend services can be attacked via SSRF.", + "false_positives": "Servers not sending outgoing web requests can be considered as false positives after review.", + "function": "development", + "stride": "information-disclosure", + "cwe": 918 + }, + "service-registry-poisoning": { + "id": "service-registry-poisoning", + "title": "Service Registry Poisoning", + "description": "When a service registry used for discovery of trusted service endpoints Service Registry Poisoning risks might arise.", + "impact": "If this risk remains unmitigated, attackers might be able to poison the service registry with malicious service endpoints or malicious lookup and config data leading to breach of sensitive data.", + "asvs": "V10 - Malicious Code Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Access_Control_Cheat_Sheet.html", + "action": "Service Registry Integrity Check", + "mitigation": "Try to strengthen the access control of the service registry and apply cross-checks to detect maliciously poisoned lookup data.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope service registries.", + "risk_assessment": "The risk rating depends on the sensitivity of the technical assets accessing the service registry as well as the data assets processed or stored.", + "false_positives": "Service registries not used for service discovery can be considered as false positives after individual review.", + "function": "architecture", + "cwe": 693 + }, + "sql-nosql-injection": { + "id": "sql-nosql-injection", + "title": "SQL/NoSQL-Injection", + "description": "When a database is accessed via database access protocols SQL/NoSQL-Injection risks might arise. The risk rating depends on the sensitivity technical asset itself and of the data assets processed or stored.", + "impact": "If this risk is unmitigated, attackers might be able to modify SQL/NoSQL queries to steal and modify data and eventually further escalate towards a deeper system penetration via code executions.", + "asvs": "V5 - Validation, Sanitization and Encoding Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/SQL_Injection_Prevention_Cheat_Sheet.html", + "action": "SQL/NoSQL-Injection Prevention", + "mitigation": "Try to use parameter binding to be safe from injection vulnerabilities. When a third-party product is used instead of custom developed software, check if the product applies the proper mitigation and ensure a reasonable patch-level.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "Database accessed via typical database access protocols by in-scope clients.", + "risk_assessment": "The risk rating depends on the sensitivity of the data stored inside the database.", + "false_positives": "Database accesses by queries not consisting of parts controllable by the caller can be considered as false positives after individual review.", + "function": "development", + "stride": "tampering", + "cwe": 89 + }, + "unchecked-deployment": { + "id": "unchecked-deployment", + "title": "Unchecked Deployment", + "description": "For each build-pipeline component Unchecked Deployment risks might arise when the build-pipeline does not include established DevSecOps best-practices. DevSecOps best-practices scan as part of CI/CD pipelines for vulnerabilities in source- or byte-code, dependencies, container layers, and dynamically against running test systems. There are several open-source and commercial tools existing in the categories DAST, SAST, and IAST.", + "impact": "If this risk remains unmitigated, vulnerabilities in custom-developed software or their dependencies might not be identified during continuous deployment cycles.", + "asvs": "V14 - Configuration Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Vulnerable_Dependency_Management_Cheat_Sheet.html", + "action": "Build Pipeline Hardening", + "mitigation": "Apply DevSecOps best-practices and use scanning tools to identify vulnerabilities in source- or byte-code,dependencies, container layers, and optionally also via dynamic scans against running test systems.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "All development-relevant technical assets.", + "risk_assessment": "The risk rating depends on the highest rating of the technical assets and data assets processed by deployment-receiving targets.", + "false_positives": "When the build-pipeline does not build any software components it can be considered a false positive after individual review.", + "function": "architecture", + "stride": "tampering", + "cwe": 1127 + }, + "unencrypted-asset": { + "id": "unencrypted-asset", + "title": "Unencrypted Technical Assets", + "description": "Due to the confidentiality rating of the technical asset itself and/or the processed data assets this technical asset must be encrypted. The risk rating depends on the sensitivity technical asset itself and of the data assets stored.", + "impact": "If this risk is unmitigated, attackers might be able to access unencrypted data when successfully compromising sensitive components.", + "asvs": "V6 - Stored Cryptography Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Cryptographic_Storage_Cheat_Sheet.html", + "action": "Encryption of Technical Asset", + "mitigation": "Apply encryption to the technical asset.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope unencrypted technical assets (excluding reverse-proxy, load-balancer, waf, ids, ips and embedded components like library) storing data assets rated at least as confidential or critical. For technical assets storing data assets rated as strictly-confidential or mission-critical the encryption must be of type data-with-enduser-individual-key.", + "risk_assessment": "Depending on the confidentiality rating of the stored data-assets either medium or high risk.", + "false_positives": "When all sensitive data stored within the asset is already fully encrypted on document or data level.", + "function": "operations", + "stride": "information-disclosure", + "cwe": 311 + }, + "unencrypted-communication": { + "id": "unencrypted-communication", + "title": "Unencrypted Communication", + "description": "Due to the confidentiality and/or integrity rating of the data assets transferred over the communication link this connection must be encrypted.", + "impact": "If this risk is unmitigated, network attackers might be able to to eavesdrop on unencrypted sensitive data sent between components.", + "asvs": "V9 - Communication Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Transport_Layer_Protection_Cheat_Sheet.html", + "action": "Encryption of Communication Links", + "mitigation": "Apply transport layer encryption to the communication link.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "Unencrypted technical communication links of in-scope technical assets (excluding monitoring traffic as well as local-file-access and in-process-library-call) transferring sensitive data.", + "risk_assessment": "Depending on the confidentiality rating of the transferred data-assets either medium or high risk.", + "false_positives": "When all sensitive data sent over the communication link is already fully encrypted on document or data level. Also intra-container/pod communication can be considered false positive when container orchestration platform handles encryption.", + "function": "operations", + "stride": "information-disclosure", + "cwe": 319 + }, + "unguarded-access-from-internet": { + "id": "unguarded-access-from-internet", + "title": "Unguarded Access From Internet", + "description": "Internet-exposed assets must be guarded by a protecting service, application, or reverse-proxy.", + "impact": "If this risk is unmitigated, attackers might be able to directly attack sensitive systems without any hardening components in-between due to them being directly exposed on the internet.", + "asvs": "V1 - Architecture, Design and Threat Modeling Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", + "action": "Encapsulation of Technical Asset", + "mitigation": "Encapsulate the asset behind a guarding service, application, or reverse-proxy. For admin maintenance a bastion-host should be used as a jump-server. For file transfer a store-and-forward-host should be used as an indirect file exchange platform.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope technical assets (excluding load-balancer) with confidentiality rating of confidential (or higher) or with integrity rating of critical (or higher) when accessed directly from the internet. All web-server, web-application, reverse-proxy, waf, and gateway assets are exempted from this risk when they do not consist of custom developed code and the data-flow only consists of HTTP or FTP protocols. Access from monitoring systems as well as VPN-protected connections are exempted.", + "risk_assessment": "The matching technical assets are at low risk. When either the confidentiality rating is strictly-confidential or the integrity rating is mission-critical, the risk-rating is considered medium. For assets with RAA values higher than 40 % the risk-rating increases.", + "false_positives": "When other means of filtering client requests are applied equivalent of reverse-proxy, waf, or gateway components.", + "function": "architecture", + "stride": "elevation-of-privilege", + "cwe": 501 + }, + "unguarded-direct-datastore-access": { + "id": "unguarded-direct-datastore-access", + "title": "Unguarded Direct Datastore Access", + "description": "Data stores accessed across trust boundaries must be guarded by some protecting service or application.", + "impact": "If this risk is unmitigated, attackers might be able to directly attack sensitive data stores without any protecting components in-between.", + "asvs": "V1 - Architecture, Design and Threat Modeling Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", + "action": "Encapsulation of Datastore", + "mitigation": "Encapsulate the datastore access behind a guarding service or application.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope technical assets of type datastore (except identity-store-ldap when accessed from identity-provider and file-server when accessed via file transfer protocols) with confidentiality rating of confidential (or higher) or with integrity rating of critical (or higher) which have incoming data-flows from assets outside across a network trust-boundary. DevOps config and deployment access is excluded from this risk.", + "risk_assessment": "The matching technical assets are at low risk. When either the confidentiality rating is strictly-confidential or the integrity rating is mission-critical, the risk-rating is considered medium. For assets with RAA values higher than 40 % the risk-rating increases.", + "false_positives": "When the caller is considered fully trusted as if it was part of the datastore itself.", + "function": "architecture", + "stride": "elevation-of-privilege", + "cwe": 501 + }, + "unnecessary-communication-link": { + "id": "unnecessary-communication-link", + "title": "Unnecessary Communication Link", + "description": "When a technical communication link does not send or receive any data assets, this is an indicator for an unnecessary communication link (or for an incomplete model).", + "impact": "If this risk is unmitigated, attackers might be able to target unnecessary communication links.", + "asvs": "V1 - Architecture, Design and Threat Modeling Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", + "action": "Attack Surface Reduction", + "mitigation": "Try to avoid using technical communication links that do not send or receive anything.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope technical assets' technical communication links not sending or receiving any data assets.", + "risk_assessment": "low", + "false_positives": "Usually no false positives as this looks like an incomplete model.", + "function": "architecture", + "stride": "elevation-of-privilege", + "model_failure_possible_reason": true, + "cwe": 1008 + }, + "unnecessary-data-asset": { + "id": "unnecessary-data-asset", + "title": "Unnecessary Data Asset", + "description": "When a data asset is not processed or stored by any data assets and also not transferred by any communication links, this is an indicator for an unnecessary data asset (or for an incomplete model).", + "impact": "If this risk is unmitigated, attackers might be able to access unnecessary data assets using other vulnerabilities.", + "asvs": "V1 - Architecture, Design and Threat Modeling Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", + "action": "Attack Surface Reduction", + "mitigation": "Try to avoid having data assets that are not required/used.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "Modelled data assets not processed or stored by any data assets and also not transferred by any communication links.", + "risk_assessment": "low", + "false_positives": "Usually no false positives as this looks like an incomplete model.", + "function": "architecture", + "stride": "elevation-of-privilege", + "model_failure_possible_reason": true, + "cwe": 1008 + }, + "unnecessary-data-transfer": { + "id": "unnecessary-data-transfer", + "title": "Unnecessary Data Transfer", + "description": "When a technical asset sends or receives data assets, which it neither processes or stores this is an indicator for unnecessarily transferred data (or for an incomplete model). When the unnecessarily transferred data assets are sensitive, this poses an unnecessary risk of an increased attack surface.", + "impact": "If this risk is unmitigated, attackers might be able to target unnecessarily transferred data.", + "asvs": "V1 - Architecture, Design and Threat Modeling Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", + "action": "Attack Surface Reduction", + "mitigation": "Try to avoid sending or receiving sensitive data assets which are not required (i.e. neither processed or stored) by the involved technical asset.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope technical assets sending or receiving sensitive data assets which are neither processed nor stored by the technical asset are flagged with this risk. The risk rating (low or medium) depends on the confidentiality, integrity, and availability rating of the technical asset. Monitoring data is exempted from this risk.", + "risk_assessment": "The risk assessment is depending on the confidentiality and integrity rating of the transferred data asset either low or medium.", + "false_positives": "Technical assets missing the model entries of either processing or storing the mentioned data assets can be considered as false positives (incomplete models) after individual review. These should then be addressed by completing the model so that all necessary data assets are processed and/or stored by the technical asset involved.", + "function": "architecture", + "stride": "elevation-of-privilege", + "model_failure_possible_reason": true, + "cwe": 1008 + }, + "unnecessary-technical-asset": { + "id": "unnecessary-technical-asset", + "title": "Unnecessary Technical Asset", + "description": "When a technical asset does not process or store any data assets, this is an indicator for an unnecessary technical asset (or for an incomplete model). This is also the case if the asset has no communication links (either outgoing or incoming).", + "impact": "If this risk is unmitigated, attackers might be able to target unnecessary technical assets.", + "asvs": "V1 - Architecture, Design and Threat Modeling Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", + "action": "Attack Surface Reduction", + "mitigation": "Try to avoid using technical assets that do not process or store anything.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "Technical assets not processing or storing any data assets.", + "risk_assessment": "low", + "false_positives": "Usually no false positives as this looks like an incomplete model.", + "function": "architecture", + "stride": "elevation-of-privilege", + "model_failure_possible_reason": true, + "cwe": 1008 + }, + "untrusted-deserialization": { + "id": "untrusted-deserialization", + "title": "Untrusted Deserialization", + "description": "When a technical asset accepts data in a specific serialized form (like Java or .NET serialization), Untrusted Deserialization risks might arise.\u003cbr\u003e\u003cbr\u003eSee \u003ca href=\"https://christian-schneider.net/JavaDeserializationSecurityFAQ.html\"\u003ehttps://christian-schneider.net/JavaDeserializationSecurityFAQ.html\u003c/a\u003e for more details.", + "impact": "If this risk is unmitigated, attackers might be able to execute code on target systems by exploiting untrusted deserialization endpoints.", + "asvs": "V5 - Validation, Sanitization and Encoding Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Deserialization_Cheat_Sheet.html", + "action": "Prevention of Deserialization of Untrusted Data", + "mitigation": "Try to avoid the deserialization of untrusted data (even of data within the same trust-boundary as long as it is sent across a remote connection) in order to stay safe from Untrusted Deserialization vulnerabilities. Alternatively a strict whitelisting approach of the classes/types/values to deserialize might help as well. When a third-party product is used instead of custom developed software, check if the product applies the proper mitigation and ensure a reasonable patch-level.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope technical assets accepting serialization data formats (including EJB and RMI protocols).", + "risk_assessment": "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", + "false_positives": "Fully trusted (i.e. cryptographically signed or similar) data deserialized can be considered as false positives after individual review.", + "function": "architecture", + "stride": "tampering", + "cwe": 502 + }, + "wrong-communication-link-content": { + "id": "wrong-communication-link-content", + "title": "Wrong Communication Link Content", + "description": "When a communication link is defined as readonly, but does not receive any data asset, or when it is defined as not readonly, but does not send any data asset, it is likely to be a model failure.", + "impact": "If this potential model error is not fixed, some risks might not be visible.", + "asvs": "V1 - Architecture, Design and Threat Modeling Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Threat_Modeling_Cheat_Sheet.html", + "action": "Model Consistency", + "mitigation": "Try to model the correct readonly flag and/or data sent/received of communication links. Also try to use communication link types matching the target technology/machine types.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "Communication links with inconsistent data assets being sent/received not matching their readonly flag or otherwise inconsistent protocols not matching the target technology type.", + "risk_assessment": "low", + "false_positives": "Usually no false positives as this looks like an incomplete model.", + "function": "architecture", + "stride": "information-disclosure", + "model_failure_possible_reason": true, + "cwe": 1008 + }, + "wrong-trust-boundary-content": { + "id": "wrong-trust-boundary-content", + "title": "Wrong Trust Boundary Content", + "description": "When a trust boundary of type network-policy-namespace-isolation contains non-container assets it is likely to be a model failure.", + "impact": "If this potential model error is not fixed, some risks might not be visible.", + "asvs": "V1 - Architecture, Design and Threat Modeling Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/Threat_Modeling_Cheat_Sheet.html", + "action": "Model Consistency", + "mitigation": "Try to model the correct types of trust boundaries and data assets.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "Trust boundaries which should only contain containers, but have different assets inside.", + "risk_assessment": "low", + "false_positives": "Usually no false positives as this looks like an incomplete model.", + "function": "architecture", + "stride": "elevation-of-privilege", + "model_failure_possible_reason": true, + "cwe": 1008 + }, + "xml-external-entity": { + "id": "xml-external-entity", + "title": "XML External Entity (XXE)", + "description": "When a technical asset accepts data in XML format, XML External Entity (XXE) risks might arise.", + "impact": "If this risk is unmitigated, attackers might be able to read sensitive files (configuration data, key/credential files, deployment files, business data files, etc.) form the filesystem of affected components and/or access sensitive services or files of other components.", + "asvs": "V14 - Configuration Verification Requirements", + "cheat_sheet": "https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html", + "action": "XML Parser Hardening", + "mitigation": "Apply hardening of all XML parser instances in order to stay safe from XML External Entity (XXE) vulnerabilities. When a third-party product is used instead of custom developed software, check if the product applies the proper mitigation and ensure a reasonable patch-level.", + "check": "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", + "detection_logic": "In-scope technical assets accepting XML data formats.", + "risk_assessment": "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored. Also for cloud-based environments the exploitation impact is at least medium, as cloud backend services can be attacked via SSRF (and XXE vulnerabilities are often also SSRF vulnerabilities).", + "false_positives": "Fully trusted (i.e. cryptographically signed or similar) XML data can be considered as false positives after individual review.", + "function": "development", + "stride": "information-disclosure", + "cwe": 611 } }, "risk_tracking": { "dos-risky-access-across-trust-boundary@*@*@*": { - "SyntheticRiskId": "dos-risky-access-across-trust-boundary@*@*@*", - "Justification": "The hardening measures are being implemented and checked", - "Ticket": "XYZ-1234", - "CheckedBy": "John Doe", - "Status": "in-progress", - "Date": "2020-01-04T00:00:00Z" + "synthetic_risk_id": "dos-risky-access-across-trust-boundary@*@*@*", + "justification": "The hardening measures are being implemented and checked", + "ticket": "XYZ-1234", + "checked_by": "John Doe", + "status": "in-progress", + "date": "2020-01-04T00:00:00Z" }, "ldap-injection@*@ldap-auth-server@*": { - "SyntheticRiskId": "ldap-injection@*@ldap-auth-server@*", - "Justification": "The hardening measures were implemented and checked", - "Ticket": "XYZ-5678", - "CheckedBy": "John Doe", - "Status": "mitigated", - "Date": "2020-01-05T00:00:00Z" + "synthetic_risk_id": "ldap-injection@*@ldap-auth-server@*", + "justification": "The hardening measures were implemented and checked", + "ticket": "XYZ-5678", + "checked_by": "John Doe", + "status": "mitigated", + "date": "2020-01-05T00:00:00Z" }, "missing-authentication-second-factor@*@*@*": { - "SyntheticRiskId": "missing-authentication-second-factor@*@*@*", - "Justification": "The hardening measures were implemented and checked", - "Ticket": "XYZ-1234", - "CheckedBy": "John Doe", - "Status": "mitigated", - "Date": "2020-01-04T00:00:00Z" + "synthetic_risk_id": "missing-authentication-second-factor@*@*@*", + "justification": "The hardening measures were implemented and checked", + "ticket": "XYZ-1234", + "checked_by": "John Doe", + "status": "mitigated", + "date": "2020-01-04T00:00:00Z" }, "missing-hardening@*": { - "SyntheticRiskId": "missing-hardening@*", - "Justification": "The hardening measures were implemented and checked", - "Ticket": "XYZ-1234", - "CheckedBy": "John Doe", - "Status": "mitigated", - "Date": "2020-01-04T00:00:00Z" + "synthetic_risk_id": "missing-hardening@*", + "justification": "The hardening measures were implemented and checked", + "ticket": "XYZ-1234", + "checked_by": "John Doe", + "status": "mitigated", + "date": "2020-01-04T00:00:00Z" }, "unencrypted-asset@*": { - "SyntheticRiskId": "unencrypted-asset@*", - "Justification": "The hardening measures were implemented and checked", - "Ticket": "XYZ-1234", - "CheckedBy": "John Doe", - "Status": "mitigated", - "Date": "2020-01-04T00:00:00Z" + "synthetic_risk_id": "unencrypted-asset@*", + "justification": "The hardening measures were implemented and checked", + "ticket": "XYZ-1234", + "checked_by": "John Doe", + "status": "mitigated", + "date": "2020-01-04T00:00:00Z" }, "untrusted-deserialization@erp-system": { - "SyntheticRiskId": "untrusted-deserialization@erp-system", - "Justification": "Risk accepted as tolerable", - "Ticket": "XYZ-1234", - "CheckedBy": "John Doe", - "Status": "accepted", - "Date": "2020-01-04T00:00:00Z" + "synthetic_risk_id": "untrusted-deserialization@erp-system", + "justification": "Risk accepted as tolerable", + "ticket": "XYZ-1234", + "checked_by": "John Doe", + "status": "accepted", + "date": "2020-01-04T00:00:00Z" } }, "communication_links": { "apache-webserver\u003eauth-credential-check-traffic": { - "Id": "apache-webserver\u003eauth-credential-check-traffic", - "SourceId": "apache-webserver", - "TargetId": "identity-provider", - "Title": "Auth Credential Check Traffic", - "Description": "Link to the identity provider server", - "Protocol": 2, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 1, - "Authorization": 1, - "Usage": 0, - "DataAssetsSent": [ + "id": "apache-webserver\u003eauth-credential-check-traffic", + "source_id": "apache-webserver", + "target_id": "identity-provider", + "title": "Auth Credential Check Traffic", + "description": "Link to the identity provider server", + "protocol": "https", + "authentication": "credentials", + "authorization": "technical-user", + "data_assets_sent": [ "customer-accounts" ], - "DataAssetsReceived": null, - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, "apache-webserver\u003eerp-system-traffic": { - "Id": "apache-webserver\u003eerp-system-traffic", - "SourceId": "apache-webserver", - "TargetId": "erp-system", - "Title": "ERP System Traffic", - "Description": "Link to the ERP system", - "Protocol": 2, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 3, - "Authorization": 1, - "Usage": 0, - "DataAssetsSent": [ + "id": "apache-webserver\u003eerp-system-traffic", + "source_id": "apache-webserver", + "target_id": "erp-system", + "title": "ERP System Traffic", + "description": "Link to the ERP system", + "protocol": "https", + "authentication": "token", + "authorization": "technical-user", + "data_assets_sent": [ "customer-accounts", "customer-operational-data", "internal-business-data" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-accounts", "customer-operational-data", "customer-contracts", "internal-business-data" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, "backend-admin-client\u003edb-update-access": { - "Id": "backend-admin-client\u003edb-update-access", - "SourceId": "backend-admin-client", - "TargetId": "sql-database", - "Title": "DB Update Access", - "Description": "Link to the database (JDBC tunneled via SSH)", - "Protocol": 20, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 4, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ + "id": "backend-admin-client\u003edb-update-access", + "source_id": "backend-admin-client", + "target_id": "sql-database", + "title": "DB Update Access", + "description": "Link to the database (JDBC tunneled via SSH)", + "protocol": "ssh", + "authentication": "client-certificate", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ "db-dumps" ], - "DataAssetsReceived": [ + "data_assets_received": [ "db-dumps", "erp-logs", "customer-accounts", "customer-operational-data" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, "backend-admin-client\u003eerp-web-access": { - "Id": "backend-admin-client\u003eerp-web-access", - "SourceId": "backend-admin-client", - "TargetId": "erp-system", - "Title": "ERP Web Access", - "Description": "Link to the ERP system (Web)", - "Protocol": 2, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 3, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ + "id": "backend-admin-client\u003eerp-web-access", + "source_id": "backend-admin-client", + "target_id": "erp-system", + "title": "ERP Web Access", + "description": "Link to the ERP system (Web)", + "protocol": "https", + "authentication": "token", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ "erp-customizing" ], - "DataAssetsReceived": [ + "data_assets_received": [ "erp-logs" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, "backend-admin-client\u003euser-management-access": { - "Id": "backend-admin-client\u003euser-management-access", - "SourceId": "backend-admin-client", - "TargetId": "ldap-auth-server", - "Title": "User Management Access", - "Description": "Link to the LDAP auth server for managing users", - "Protocol": 33, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 1, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ + "id": "backend-admin-client\u003euser-management-access", + "source_id": "backend-admin-client", + "target_id": "ldap-auth-server", + "title": "User Management Access", + "description": "Link to the LDAP auth server for managing users", + "protocol": "ldaps", + "authentication": "credentials", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ "customer-accounts" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-accounts" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, "backoffice-client\u003eerp-internal-access": { - "Id": "backoffice-client\u003eerp-internal-access", - "SourceId": "backoffice-client", - "TargetId": "erp-system", - "Title": "ERP Internal Access", - "Description": "Link to the ERP system", - "Protocol": 2, - "Tags": [ + "id": "backoffice-client\u003eerp-internal-access", + "source_id": "backoffice-client", + "target_id": "erp-system", + "title": "ERP Internal Access", + "description": "Link to the ERP system", + "protocol": "https", + "tags": [ "some-erp" ], - "VPN": true, - "IpFiltered": false, - "Readonly": false, - "Authentication": 3, - "Authorization": 2, - "Usage": 0, - "DataAssetsSent": [ + "vpn": true, + "authentication": "token", + "authorization": "enduser-identity-propagation", + "data_assets_sent": [ "internal-business-data" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-contracts", "internal-business-data" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, "backoffice-client\u003emarketing-cms-editing": { - "Id": "backoffice-client\u003emarketing-cms-editing", - "SourceId": "backoffice-client", - "TargetId": "marketing-cms", - "Title": "Marketing CMS Editing", - "Description": "Link to the CMS for editing content", - "Protocol": 2, - "Tags": [], - "VPN": true, - "IpFiltered": false, - "Readonly": false, - "Authentication": 3, - "Authorization": 2, - "Usage": 0, - "DataAssetsSent": [ + "id": "backoffice-client\u003emarketing-cms-editing", + "source_id": "backoffice-client", + "target_id": "marketing-cms", + "title": "Marketing CMS Editing", + "description": "Link to the CMS for editing content", + "protocol": "https", + "vpn": true, + "authentication": "token", + "authorization": "enduser-identity-propagation", + "data_assets_sent": [ "marketing-material" ], - "DataAssetsReceived": [ + "data_assets_received": [ "marketing-material" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, "customer-client\u003ecustomer-traffic": { - "Id": "customer-client\u003ecustomer-traffic", - "SourceId": "customer-client", - "TargetId": "load-balancer", - "Title": "Customer Traffic", - "Description": "Link to the load balancer", - "Protocol": 2, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 2, - "Authorization": 2, - "Usage": 0, - "DataAssetsSent": [ + "id": "customer-client\u003ecustomer-traffic", + "source_id": "customer-client", + "target_id": "load-balancer", + "title": "Customer Traffic", + "description": "Link to the load balancer", + "protocol": "https", + "authentication": "session-id", + "authorization": "enduser-identity-propagation", + "data_assets_sent": [ "customer-accounts", "customer-operational-data" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-accounts", "customer-operational-data", "customer-contracts", "client-application-code", "marketing-material" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, "erp-system\u003edatabase-traffic": { - "Id": "erp-system\u003edatabase-traffic", - "SourceId": "erp-system", - "TargetId": "sql-database", - "Title": "Database Traffic", - "Description": "Link to the DB system", - "Protocol": 8, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 1, - "Authorization": 1, - "Usage": 0, - "DataAssetsSent": [ + "id": "erp-system\u003edatabase-traffic", + "source_id": "erp-system", + "target_id": "sql-database", + "title": "Database Traffic", + "description": "Link to the DB system", + "protocol": "jdbc", + "authentication": "credentials", + "authorization": "technical-user", + "data_assets_sent": [ "customer-accounts", "customer-operational-data", "internal-business-data" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-accounts", "customer-operational-data", "internal-business-data" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, "erp-system\u003enfs-filesystem-access": { - "Id": "erp-system\u003enfs-filesystem-access", - "SourceId": "erp-system", - "TargetId": "contract-fileserver", - "Title": "NFS Filesystem Access", - "Description": "Link to the file system", - "Protocol": 35, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 0, - "Authorization": 0, - "Usage": 0, - "DataAssetsSent": [ + "id": "erp-system\u003enfs-filesystem-access", + "source_id": "erp-system", + "target_id": "contract-fileserver", + "title": "NFS Filesystem Access", + "description": "Link to the file system", + "protocol": "nfs", + "data_assets_sent": [ "customer-contracts" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-contracts" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, "external-dev-client\u003egit-repo-code-write-access": { - "Id": "external-dev-client\u003egit-repo-code-write-access", - "SourceId": "external-dev-client", - "TargetId": "git-repo", - "Title": "Git-Repo Code Write Access", - "Description": "Link to the Git repo", - "Protocol": 20, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 4, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ + "id": "external-dev-client\u003egit-repo-code-write-access", + "source_id": "external-dev-client", + "target_id": "git-repo", + "title": "Git-Repo Code Write Access", + "description": "Link to the Git repo", + "protocol": "ssh", + "authentication": "client-certificate", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ "client-application-code", "server-application-code" ], - "DataAssetsReceived": [ + "data_assets_received": [ "client-application-code", "server-application-code" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, "external-dev-client\u003egit-repo-web-ui-access": { - "Id": "external-dev-client\u003egit-repo-web-ui-access", - "SourceId": "external-dev-client", - "TargetId": "git-repo", - "Title": "Git-Repo Web-UI Access", - "Description": "Link to the Git repo", - "Protocol": 2, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 3, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ + "id": "external-dev-client\u003egit-repo-web-ui-access", + "source_id": "external-dev-client", + "target_id": "git-repo", + "title": "Git-Repo Web-UI Access", + "description": "Link to the Git repo", + "protocol": "https", + "authentication": "token", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ "client-application-code", "server-application-code" ], - "DataAssetsReceived": [ + "data_assets_received": [ "client-application-code", "server-application-code" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, "external-dev-client\u003ejenkins-web-ui-access": { - "Id": "external-dev-client\u003ejenkins-web-ui-access", - "SourceId": "external-dev-client", - "TargetId": "jenkins-buildserver", - "Title": "Jenkins Web-UI Access", - "Description": "Link to the Jenkins build server", - "Protocol": 2, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 1, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ + "id": "external-dev-client\u003ejenkins-web-ui-access", + "source_id": "external-dev-client", + "target_id": "jenkins-buildserver", + "title": "Jenkins Web-UI Access", + "description": "Link to the Jenkins build server", + "protocol": "https", + "authentication": "credentials", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ "build-job-config" ], - "DataAssetsReceived": [ + "data_assets_received": [ "build-job-config" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, "identity-provider\u003eldap-credential-check-traffic": { - "Id": "identity-provider\u003eldap-credential-check-traffic", - "SourceId": "identity-provider", - "TargetId": "ldap-auth-server", - "Title": "LDAP Credential Check Traffic", - "Description": "Link to the LDAP server", - "Protocol": 33, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 1, - "Authorization": 1, - "Usage": 0, - "DataAssetsSent": [ + "id": "identity-provider\u003eldap-credential-check-traffic", + "source_id": "identity-provider", + "target_id": "ldap-auth-server", + "title": "LDAP Credential Check Traffic", + "description": "Link to the LDAP server", + "protocol": "ldaps", + "authentication": "credentials", + "authorization": "technical-user", + "data_assets_sent": [ "customer-accounts" ], - "DataAssetsReceived": null, - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, "jenkins-buildserver\u003eapplication-deployment": { - "Id": "jenkins-buildserver\u003eapplication-deployment", - "SourceId": "jenkins-buildserver", - "TargetId": "apache-webserver", - "Title": "Application Deployment", - "Description": "Link to the Apache webserver", - "Protocol": 20, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 4, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ + "id": "jenkins-buildserver\u003eapplication-deployment", + "source_id": "jenkins-buildserver", + "target_id": "apache-webserver", + "title": "Application Deployment", + "description": "Link to the Apache webserver", + "protocol": "ssh", + "authentication": "client-certificate", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ "client-application-code", "server-application-code" ], - "DataAssetsReceived": null, - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, "jenkins-buildserver\u003ecms-updates": { - "Id": "jenkins-buildserver\u003ecms-updates", - "SourceId": "jenkins-buildserver", - "TargetId": "marketing-cms", - "Title": "CMS Updates", - "Description": "Link to the CMS", - "Protocol": 20, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 4, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ + "id": "jenkins-buildserver\u003ecms-updates", + "source_id": "jenkins-buildserver", + "target_id": "marketing-cms", + "title": "CMS Updates", + "description": "Link to the CMS", + "protocol": "ssh", + "authentication": "client-certificate", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ "marketing-material" ], - "DataAssetsReceived": null, - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, "jenkins-buildserver\u003egit-repo-code-read-access": { - "Id": "jenkins-buildserver\u003egit-repo-code-read-access", - "SourceId": "jenkins-buildserver", - "TargetId": "git-repo", - "Title": "Git Repo Code Read Access", - "Description": "Link to the Git repository server", - "Protocol": 20, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": true, - "Authentication": 4, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": null, - "DataAssetsReceived": [ + "id": "jenkins-buildserver\u003egit-repo-code-read-access", + "source_id": "jenkins-buildserver", + "target_id": "git-repo", + "title": "Git Repo Code Read Access", + "description": "Link to the Git repository server", + "protocol": "ssh", + "readonly": true, + "authentication": "client-certificate", + "authorization": "technical-user", + "usage": "devops", + "data_assets_received": [ "client-application-code", "server-application-code" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, "load-balancer\u003ecms-content-traffic": { - "Id": "load-balancer\u003ecms-content-traffic", - "SourceId": "load-balancer", - "TargetId": "marketing-cms", - "Title": "CMS Content Traffic", - "Description": "Link to the CMS server", - "Protocol": 1, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": true, - "Authentication": 0, - "Authorization": 0, - "Usage": 0, - "DataAssetsSent": null, - "DataAssetsReceived": [ + "id": "load-balancer\u003ecms-content-traffic", + "source_id": "load-balancer", + "target_id": "marketing-cms", + "title": "CMS Content Traffic", + "description": "Link to the CMS server", + "protocol": "http", + "readonly": true, + "data_assets_received": [ "marketing-material" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, "load-balancer\u003eweb-application-traffic": { - "Id": "load-balancer\u003eweb-application-traffic", - "SourceId": "load-balancer", - "TargetId": "apache-webserver", - "Title": "Web Application Traffic", - "Description": "Link to the web server", - "Protocol": 1, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 2, - "Authorization": 2, - "Usage": 0, - "DataAssetsSent": [ + "id": "load-balancer\u003eweb-application-traffic", + "source_id": "load-balancer", + "target_id": "apache-webserver", + "title": "Web Application Traffic", + "description": "Link to the web server", + "protocol": "http", + "authentication": "session-id", + "authorization": "enduser-identity-propagation", + "data_assets_sent": [ "customer-accounts", "customer-operational-data" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-accounts", "customer-operational-data", "customer-contracts", "client-application-code" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, "marketing-cms\u003eauth-traffic": { - "Id": "marketing-cms\u003eauth-traffic", - "SourceId": "marketing-cms", - "TargetId": "ldap-auth-server", - "Title": "Auth Traffic", - "Description": "Link to the LDAP auth server", - "Protocol": 32, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": true, - "Authentication": 1, - "Authorization": 1, - "Usage": 0, - "DataAssetsSent": [ + "id": "marketing-cms\u003eauth-traffic", + "source_id": "marketing-cms", + "target_id": "ldap-auth-server", + "title": "Auth Traffic", + "description": "Link to the LDAP auth server", + "protocol": "ldap", + "readonly": true, + "authentication": "credentials", + "authorization": "technical-user", + "data_assets_sent": [ "customer-accounts" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-accounts" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } }, "diagram_tweak_nodesep": 2, @@ -1902,706 +2231,576 @@ "incoming_technical_communication_links_mapped_by_target_id": { "apache-webserver": [ { - "Id": "jenkins-buildserver\u003eapplication-deployment", - "SourceId": "jenkins-buildserver", - "TargetId": "apache-webserver", - "Title": "Application Deployment", - "Description": "Link to the Apache webserver", - "Protocol": 20, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 4, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ - "client-application-code", - "server-application-code" - ], - "DataAssetsReceived": null, - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true - }, - { - "Id": "load-balancer\u003eweb-application-traffic", - "SourceId": "load-balancer", - "TargetId": "apache-webserver", - "Title": "Web Application Traffic", - "Description": "Link to the web server", - "Protocol": 1, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 2, - "Authorization": 2, - "Usage": 0, - "DataAssetsSent": [ + "id": "load-balancer\u003eweb-application-traffic", + "source_id": "load-balancer", + "target_id": "apache-webserver", + "title": "Web Application Traffic", + "description": "Link to the web server", + "protocol": "http", + "authentication": "session-id", + "authorization": "enduser-identity-propagation", + "data_assets_sent": [ "customer-accounts", "customer-operational-data" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-accounts", "customer-operational-data", "customer-contracts", "client-application-code" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true + }, + { + "id": "jenkins-buildserver\u003eapplication-deployment", + "source_id": "jenkins-buildserver", + "target_id": "apache-webserver", + "title": "Application Deployment", + "description": "Link to the Apache webserver", + "protocol": "ssh", + "authentication": "client-certificate", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ + "client-application-code", + "server-application-code" + ], + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } ], "contract-fileserver": [ { - "Id": "erp-system\u003enfs-filesystem-access", - "SourceId": "erp-system", - "TargetId": "contract-fileserver", - "Title": "NFS Filesystem Access", - "Description": "Link to the file system", - "Protocol": 35, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 0, - "Authorization": 0, - "Usage": 0, - "DataAssetsSent": [ + "id": "erp-system\u003enfs-filesystem-access", + "source_id": "erp-system", + "target_id": "contract-fileserver", + "title": "NFS Filesystem Access", + "description": "Link to the file system", + "protocol": "nfs", + "data_assets_sent": [ "customer-contracts" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-contracts" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } ], "erp-system": [ { - "Id": "backoffice-client\u003eerp-internal-access", - "SourceId": "backoffice-client", - "TargetId": "erp-system", - "Title": "ERP Internal Access", - "Description": "Link to the ERP system", - "Protocol": 2, - "Tags": [ + "id": "backend-admin-client\u003eerp-web-access", + "source_id": "backend-admin-client", + "target_id": "erp-system", + "title": "ERP Web Access", + "description": "Link to the ERP system (Web)", + "protocol": "https", + "authentication": "token", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ + "erp-customizing" + ], + "data_assets_received": [ + "erp-logs" + ], + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true + }, + { + "id": "backoffice-client\u003eerp-internal-access", + "source_id": "backoffice-client", + "target_id": "erp-system", + "title": "ERP Internal Access", + "description": "Link to the ERP system", + "protocol": "https", + "tags": [ "some-erp" ], - "VPN": true, - "IpFiltered": false, - "Readonly": false, - "Authentication": 3, - "Authorization": 2, - "Usage": 0, - "DataAssetsSent": [ + "vpn": true, + "authentication": "token", + "authorization": "enduser-identity-propagation", + "data_assets_sent": [ "internal-business-data" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-contracts", "internal-business-data" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, { - "Id": "backend-admin-client\u003eerp-web-access", - "SourceId": "backend-admin-client", - "TargetId": "erp-system", - "Title": "ERP Web Access", - "Description": "Link to the ERP system (Web)", - "Protocol": 2, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 3, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ - "erp-customizing" - ], - "DataAssetsReceived": [ - "erp-logs" - ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true - }, - { - "Id": "apache-webserver\u003eerp-system-traffic", - "SourceId": "apache-webserver", - "TargetId": "erp-system", - "Title": "ERP System Traffic", - "Description": "Link to the ERP system", - "Protocol": 2, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 3, - "Authorization": 1, - "Usage": 0, - "DataAssetsSent": [ + "id": "apache-webserver\u003eerp-system-traffic", + "source_id": "apache-webserver", + "target_id": "erp-system", + "title": "ERP System Traffic", + "description": "Link to the ERP system", + "protocol": "https", + "authentication": "token", + "authorization": "technical-user", + "data_assets_sent": [ "customer-accounts", "customer-operational-data", "internal-business-data" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-accounts", "customer-operational-data", "customer-contracts", "internal-business-data" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } ], "git-repo": [ { - "Id": "external-dev-client\u003egit-repo-code-write-access", - "SourceId": "external-dev-client", - "TargetId": "git-repo", - "Title": "Git-Repo Code Write Access", - "Description": "Link to the Git repo", - "Protocol": 20, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 4, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ + "id": "external-dev-client\u003egit-repo-code-write-access", + "source_id": "external-dev-client", + "target_id": "git-repo", + "title": "Git-Repo Code Write Access", + "description": "Link to the Git repo", + "protocol": "ssh", + "authentication": "client-certificate", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ "client-application-code", "server-application-code" ], - "DataAssetsReceived": [ + "data_assets_received": [ "client-application-code", "server-application-code" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, { - "Id": "external-dev-client\u003egit-repo-web-ui-access", - "SourceId": "external-dev-client", - "TargetId": "git-repo", - "Title": "Git-Repo Web-UI Access", - "Description": "Link to the Git repo", - "Protocol": 2, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 3, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ + "id": "external-dev-client\u003egit-repo-web-ui-access", + "source_id": "external-dev-client", + "target_id": "git-repo", + "title": "Git-Repo Web-UI Access", + "description": "Link to the Git repo", + "protocol": "https", + "authentication": "token", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ "client-application-code", "server-application-code" ], - "DataAssetsReceived": [ + "data_assets_received": [ "client-application-code", "server-application-code" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, { - "Id": "jenkins-buildserver\u003egit-repo-code-read-access", - "SourceId": "jenkins-buildserver", - "TargetId": "git-repo", - "Title": "Git Repo Code Read Access", - "Description": "Link to the Git repository server", - "Protocol": 20, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": true, - "Authentication": 4, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": null, - "DataAssetsReceived": [ + "id": "jenkins-buildserver\u003egit-repo-code-read-access", + "source_id": "jenkins-buildserver", + "target_id": "git-repo", + "title": "Git Repo Code Read Access", + "description": "Link to the Git repository server", + "protocol": "ssh", + "readonly": true, + "authentication": "client-certificate", + "authorization": "technical-user", + "usage": "devops", + "data_assets_received": [ "client-application-code", "server-application-code" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } ], "identity-provider": [ { - "Id": "apache-webserver\u003eauth-credential-check-traffic", - "SourceId": "apache-webserver", - "TargetId": "identity-provider", - "Title": "Auth Credential Check Traffic", - "Description": "Link to the identity provider server", - "Protocol": 2, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 1, - "Authorization": 1, - "Usage": 0, - "DataAssetsSent": [ + "id": "apache-webserver\u003eauth-credential-check-traffic", + "source_id": "apache-webserver", + "target_id": "identity-provider", + "title": "Auth Credential Check Traffic", + "description": "Link to the identity provider server", + "protocol": "https", + "authentication": "credentials", + "authorization": "technical-user", + "data_assets_sent": [ "customer-accounts" ], - "DataAssetsReceived": null, - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } ], "jenkins-buildserver": [ { - "Id": "external-dev-client\u003ejenkins-web-ui-access", - "SourceId": "external-dev-client", - "TargetId": "jenkins-buildserver", - "Title": "Jenkins Web-UI Access", - "Description": "Link to the Jenkins build server", - "Protocol": 2, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 1, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ + "id": "external-dev-client\u003ejenkins-web-ui-access", + "source_id": "external-dev-client", + "target_id": "jenkins-buildserver", + "title": "Jenkins Web-UI Access", + "description": "Link to the Jenkins build server", + "protocol": "https", + "authentication": "credentials", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ "build-job-config" ], - "DataAssetsReceived": [ + "data_assets_received": [ "build-job-config" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } ], "ldap-auth-server": [ { - "Id": "marketing-cms\u003eauth-traffic", - "SourceId": "marketing-cms", - "TargetId": "ldap-auth-server", - "Title": "Auth Traffic", - "Description": "Link to the LDAP auth server", - "Protocol": 32, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": true, - "Authentication": 1, - "Authorization": 1, - "Usage": 0, - "DataAssetsSent": [ - "customer-accounts" - ], - "DataAssetsReceived": [ + "id": "identity-provider\u003eldap-credential-check-traffic", + "source_id": "identity-provider", + "target_id": "ldap-auth-server", + "title": "LDAP Credential Check Traffic", + "description": "Link to the LDAP server", + "protocol": "ldaps", + "authentication": "credentials", + "authorization": "technical-user", + "data_assets_sent": [ "customer-accounts" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, { - "Id": "backend-admin-client\u003euser-management-access", - "SourceId": "backend-admin-client", - "TargetId": "ldap-auth-server", - "Title": "User Management Access", - "Description": "Link to the LDAP auth server for managing users", - "Protocol": 33, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 1, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ + "id": "marketing-cms\u003eauth-traffic", + "source_id": "marketing-cms", + "target_id": "ldap-auth-server", + "title": "Auth Traffic", + "description": "Link to the LDAP auth server", + "protocol": "ldap", + "readonly": true, + "authentication": "credentials", + "authorization": "technical-user", + "data_assets_sent": [ "customer-accounts" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-accounts" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, { - "Id": "identity-provider\u003eldap-credential-check-traffic", - "SourceId": "identity-provider", - "TargetId": "ldap-auth-server", - "Title": "LDAP Credential Check Traffic", - "Description": "Link to the LDAP server", - "Protocol": 33, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 1, - "Authorization": 1, - "Usage": 0, - "DataAssetsSent": [ + "id": "backend-admin-client\u003euser-management-access", + "source_id": "backend-admin-client", + "target_id": "ldap-auth-server", + "title": "User Management Access", + "description": "Link to the LDAP auth server for managing users", + "protocol": "ldaps", + "authentication": "credentials", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ "customer-accounts" ], - "DataAssetsReceived": null, - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "data_assets_received": [ + "customer-accounts" + ], + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } ], "load-balancer": [ { - "Id": "customer-client\u003ecustomer-traffic", - "SourceId": "customer-client", - "TargetId": "load-balancer", - "Title": "Customer Traffic", - "Description": "Link to the load balancer", - "Protocol": 2, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 2, - "Authorization": 2, - "Usage": 0, - "DataAssetsSent": [ + "id": "customer-client\u003ecustomer-traffic", + "source_id": "customer-client", + "target_id": "load-balancer", + "title": "Customer Traffic", + "description": "Link to the load balancer", + "protocol": "https", + "authentication": "session-id", + "authorization": "enduser-identity-propagation", + "data_assets_sent": [ "customer-accounts", "customer-operational-data" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-accounts", "customer-operational-data", "customer-contracts", "client-application-code", "marketing-material" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } ], "marketing-cms": [ { - "Id": "jenkins-buildserver\u003ecms-updates", - "SourceId": "jenkins-buildserver", - "TargetId": "marketing-cms", - "Title": "CMS Updates", - "Description": "Link to the CMS", - "Protocol": 20, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 4, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ + "id": "load-balancer\u003ecms-content-traffic", + "source_id": "load-balancer", + "target_id": "marketing-cms", + "title": "CMS Content Traffic", + "description": "Link to the CMS server", + "protocol": "http", + "readonly": true, + "data_assets_received": [ "marketing-material" ], - "DataAssetsReceived": null, - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, { - "Id": "backoffice-client\u003emarketing-cms-editing", - "SourceId": "backoffice-client", - "TargetId": "marketing-cms", - "Title": "Marketing CMS Editing", - "Description": "Link to the CMS for editing content", - "Protocol": 2, - "Tags": [], - "VPN": true, - "IpFiltered": false, - "Readonly": false, - "Authentication": 3, - "Authorization": 2, - "Usage": 0, - "DataAssetsSent": [ + "id": "backoffice-client\u003emarketing-cms-editing", + "source_id": "backoffice-client", + "target_id": "marketing-cms", + "title": "Marketing CMS Editing", + "description": "Link to the CMS for editing content", + "protocol": "https", + "vpn": true, + "authentication": "token", + "authorization": "enduser-identity-propagation", + "data_assets_sent": [ "marketing-material" ], - "DataAssetsReceived": [ + "data_assets_received": [ "marketing-material" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, { - "Id": "load-balancer\u003ecms-content-traffic", - "SourceId": "load-balancer", - "TargetId": "marketing-cms", - "Title": "CMS Content Traffic", - "Description": "Link to the CMS server", - "Protocol": 1, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": true, - "Authentication": 0, - "Authorization": 0, - "Usage": 0, - "DataAssetsSent": null, - "DataAssetsReceived": [ + "id": "jenkins-buildserver\u003ecms-updates", + "source_id": "jenkins-buildserver", + "target_id": "marketing-cms", + "title": "CMS Updates", + "description": "Link to the CMS", + "protocol": "ssh", + "authentication": "client-certificate", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ "marketing-material" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } ], "sql-database": [ { - "Id": "backend-admin-client\u003edb-update-access", - "SourceId": "backend-admin-client", - "TargetId": "sql-database", - "Title": "DB Update Access", - "Description": "Link to the database (JDBC tunneled via SSH)", - "Protocol": 20, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 4, - "Authorization": 1, - "Usage": 1, - "DataAssetsSent": [ + "id": "backend-admin-client\u003edb-update-access", + "source_id": "backend-admin-client", + "target_id": "sql-database", + "title": "DB Update Access", + "description": "Link to the database (JDBC tunneled via SSH)", + "protocol": "ssh", + "authentication": "client-certificate", + "authorization": "technical-user", + "usage": "devops", + "data_assets_sent": [ "db-dumps" ], - "DataAssetsReceived": [ + "data_assets_received": [ "db-dumps", "erp-logs", "customer-accounts", "customer-operational-data" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true }, { - "Id": "erp-system\u003edatabase-traffic", - "SourceId": "erp-system", - "TargetId": "sql-database", - "Title": "Database Traffic", - "Description": "Link to the DB system", - "Protocol": 8, - "Tags": [], - "VPN": false, - "IpFiltered": false, - "Readonly": false, - "Authentication": 1, - "Authorization": 1, - "Usage": 0, - "DataAssetsSent": [ + "id": "erp-system\u003edatabase-traffic", + "source_id": "erp-system", + "target_id": "sql-database", + "title": "Database Traffic", + "description": "Link to the DB system", + "protocol": "jdbc", + "authentication": "credentials", + "authorization": "technical-user", + "data_assets_sent": [ "customer-accounts", "customer-operational-data", "internal-business-data" ], - "DataAssetsReceived": [ + "data_assets_received": [ "customer-accounts", "customer-operational-data", "internal-business-data" ], - "DiagramTweakWeight": 1, - "DiagramTweakConstraint": true + "diagram_tweak_weight": 1, + "diagram_tweak_constraint": true } ] }, "direct_containing_trust_boundary_mapped_by_technical_asset_id": { "apache-webserver": { - "Id": "web-dmz", - "Title": "Web DMZ", - "Description": "Web DMZ", - "Type": 4, - "Tags": [], - "TechnicalAssetsInside": [ + "id": "web-dmz", + "title": "Web DMZ", + "description": "Web DMZ", + "type": "network-cloud-security-group", + "technical_assets_inside": [ "apache-webserver", "marketing-cms" - ], - "TrustBoundariesNested": [] + ] }, "backend-admin-client": { - "Id": "dev-network", - "Title": "Dev Network", - "Description": "Development Network", - "Type": 0, - "Tags": [], - "TechnicalAssetsInside": [ + "id": "dev-network", + "title": "Dev Network", + "description": "Development Network", + "technical_assets_inside": [ "jenkins-buildserver", "git-repo", "backend-admin-client", "backoffice-client" - ], - "TrustBoundariesNested": [] + ] }, "backoffice-client": { - "Id": "dev-network", - "Title": "Dev Network", - "Description": "Development Network", - "Type": 0, - "Tags": [], - "TechnicalAssetsInside": [ + "id": "dev-network", + "title": "Dev Network", + "description": "Development Network", + "technical_assets_inside": [ "jenkins-buildserver", "git-repo", "backend-admin-client", "backoffice-client" - ], - "TrustBoundariesNested": [] + ] }, "contract-fileserver": { - "Id": "erp-dmz", - "Title": "ERP DMZ", - "Description": "ERP DMZ", - "Type": 4, - "Tags": [ + "id": "erp-dmz", + "title": "ERP DMZ", + "description": "ERP DMZ", + "type": "network-cloud-security-group", + "tags": [ "some-erp" ], - "TechnicalAssetsInside": [ + "technical_assets_inside": [ "erp-system", "contract-fileserver", "sql-database" - ], - "TrustBoundariesNested": [] + ] }, "erp-system": { - "Id": "erp-dmz", - "Title": "ERP DMZ", - "Description": "ERP DMZ", - "Type": 4, - "Tags": [ + "id": "erp-dmz", + "title": "ERP DMZ", + "description": "ERP DMZ", + "type": "network-cloud-security-group", + "tags": [ "some-erp" ], - "TechnicalAssetsInside": [ + "technical_assets_inside": [ "erp-system", "contract-fileserver", "sql-database" - ], - "TrustBoundariesNested": [] + ] }, "git-repo": { - "Id": "dev-network", - "Title": "Dev Network", - "Description": "Development Network", - "Type": 0, - "Tags": [], - "TechnicalAssetsInside": [ + "id": "dev-network", + "title": "Dev Network", + "description": "Development Network", + "technical_assets_inside": [ "jenkins-buildserver", "git-repo", "backend-admin-client", "backoffice-client" - ], - "TrustBoundariesNested": [] + ] }, "identity-provider": { - "Id": "auth-env", - "Title": "Auth Handling Environment", - "Description": "Auth Handling Environment", - "Type": 6, - "Tags": [], - "TechnicalAssetsInside": [ + "id": "auth-env", + "title": "Auth Handling Environment", + "description": "Auth Handling Environment", + "type": "execution-environment", + "technical_assets_inside": [ "identity-provider", "ldap-auth-server" - ], - "TrustBoundariesNested": [] + ] }, "jenkins-buildserver": { - "Id": "dev-network", - "Title": "Dev Network", - "Description": "Development Network", - "Type": 0, - "Tags": [], - "TechnicalAssetsInside": [ + "id": "dev-network", + "title": "Dev Network", + "description": "Development Network", + "technical_assets_inside": [ "jenkins-buildserver", "git-repo", "backend-admin-client", "backoffice-client" - ], - "TrustBoundariesNested": [] + ] }, "ldap-auth-server": { - "Id": "auth-env", - "Title": "Auth Handling Environment", - "Description": "Auth Handling Environment", - "Type": 6, - "Tags": [], - "TechnicalAssetsInside": [ + "id": "auth-env", + "title": "Auth Handling Environment", + "description": "Auth Handling Environment", + "type": "execution-environment", + "technical_assets_inside": [ "identity-provider", "ldap-auth-server" - ], - "TrustBoundariesNested": [] + ] }, "load-balancer": { - "Id": "application-network", - "Title": "Application Network", - "Description": "Application Network", - "Type": 3, - "Tags": [ + "id": "application-network", + "title": "Application Network", + "description": "Application Network", + "type": "network-cloud-provider", + "tags": [ "aws" ], - "TechnicalAssetsInside": [ + "technical_assets_inside": [ "load-balancer" ], - "TrustBoundariesNested": [ + "trust_boundaries_nested": [ "web-dmz", "erp-dmz", "auth-env" ] }, "marketing-cms": { - "Id": "web-dmz", - "Title": "Web DMZ", - "Description": "Web DMZ", - "Type": 4, - "Tags": [], - "TechnicalAssetsInside": [ + "id": "web-dmz", + "title": "Web DMZ", + "description": "Web DMZ", + "type": "network-cloud-security-group", + "technical_assets_inside": [ "apache-webserver", "marketing-cms" - ], - "TrustBoundariesNested": [] + ] }, "sql-database": { - "Id": "erp-dmz", - "Title": "ERP DMZ", - "Description": "ERP DMZ", - "Type": 4, - "Tags": [ + "id": "erp-dmz", + "title": "ERP DMZ", + "description": "ERP DMZ", + "type": "network-cloud-security-group", + "tags": [ "some-erp" ], - "TechnicalAssetsInside": [ + "technical_assets_inside": [ "erp-system", "contract-fileserver", "sql-database" - ], - "TrustBoundariesNested": [] + ] } }, "generated_risks_by_category": { "something-strange": [ { - "category": "", - "risk_status": "unchecked", + "category": "something-strange", "severity": "critical", "exploitation_likelihood": "likely", "exploitation_impact": "medium", "title": "\u003cb\u003eExample Individual Risk\u003c/b\u003e at \u003cb\u003eDatabase\u003c/b\u003e", "synthetic_id": "something-strange@sql-database", - "most_relevant_data_asset": "", "most_relevant_technical_asset": "sql-database", - "most_relevant_trust_boundary": "", - "most_relevant_shared_runtime": "", - "most_relevant_communication_link": "", "data_breach_probability": "probable", "data_breach_technical_assets": [ "sql-database" ] }, { - "category": "", - "risk_status": "unchecked", + "category": "something-strange", "severity": "medium", "exploitation_likelihood": "frequent", "exploitation_impact": "very-high", "title": "\u003cb\u003eExample Individual Risk\u003c/b\u003e at \u003cb\u003eContract Filesystem\u003c/b\u003e", "synthetic_id": "something-strange@contract-fileserver", - "most_relevant_data_asset": "", - "most_relevant_technical_asset": "contract-fileserver", - "most_relevant_trust_boundary": "", - "most_relevant_shared_runtime": "", - "most_relevant_communication_link": "", - "data_breach_probability": "improbable", - "data_breach_technical_assets": null + "most_relevant_technical_asset": "contract-fileserver" } ] } From 3d59ffcc3a36d17cf64e807c35480930d43ac416 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Wed, 31 Jan 2024 13:59:13 -0800 Subject: [PATCH 54/68] improved string list and multi-line merging --- Dockerfile | 6 +++--- go.mod | 3 --- go.sum | 6 ------ pkg/input/strings.go | 18 +++++++----------- test/technical_assets_devops.yaml | 1 - 5 files changed, 10 insertions(+), 24 deletions(-) diff --git a/Dockerfile b/Dockerfile index e039dee4..db99e4b4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -25,9 +25,9 @@ COPY --from=clone /app/threagile /app RUN go version RUN go test ./... -RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o raa_calc raa/raa/raa.go -RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o raa_dummy raa/dummy/dummy.go -RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o risk_demo_rule risks/custom/demo/demo-rule.go +RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o raa_calc cmd/raa/main.go +RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o raa_dummy cmd/raa_dummy/main.go +RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o risk_demo_rule cmd/risk_demo/main.go RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o threagile # add the -race parameter to go build call in order to instrument with race condition detector: https://blog.golang.org/race-detector # NOTE: copy files with final name to send to final build diff --git a/go.mod b/go.mod index fcc26a36..82fafb5d 100644 --- a/go.mod +++ b/go.mod @@ -5,7 +5,6 @@ go 1.20 require ( github.com/gin-gonic/gin v1.9.1 github.com/google/uuid v1.5.0 - github.com/jedib0t/go-pretty/v6 v6.5.4 github.com/jung-kurt/gofpdf v1.16.2 github.com/mpvl/unique v0.0.0-20150818121801-cbe035fff7de github.com/spf13/pflag v1.0.5 @@ -26,7 +25,6 @@ require ( github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/leodido/go-urn v1.2.4 // indirect - github.com/mattn/go-runewidth v0.0.15 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect @@ -34,7 +32,6 @@ require ( github.com/pmezard/go-difflib v1.0.0 // indirect github.com/richardlehane/mscfb v1.0.4 // indirect github.com/richardlehane/msoleps v1.0.3 // indirect - github.com/rivo/uniseg v0.2.0 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect golang.org/x/sys v0.16.0 // indirect golang.org/x/text v0.14.0 // indirect diff --git a/go.sum b/go.sum index 4cf5a53f..658908f0 100644 --- a/go.sum +++ b/go.sum @@ -42,8 +42,6 @@ github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU= github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= -github.com/jedib0t/go-pretty/v6 v6.5.4 h1:gOGo0613MoqUcf0xCj+h/V3sHDaZasfv152G6/5l91s= -github.com/jedib0t/go-pretty/v6 v6.5.4/go.mod h1:5LQIxa52oJ/DlDSLv0HEkWOFMDGoWkJb9ss5KqPpJBg= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= @@ -57,8 +55,6 @@ github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= -github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -83,8 +79,6 @@ github.com/richardlehane/mscfb v1.0.4/go.mod h1:YzVpcZg9czvAuhk9T+a3avCpcFPMUWm7 github.com/richardlehane/msoleps v1.0.1/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg= github.com/richardlehane/msoleps v1.0.3 h1:aznSZzrwYRl3rLKRT3gUk9am7T/mLNSnJINvN0AQoVM= github.com/richardlehane/msoleps v1.0.3/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg= -github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= -github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w= github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= diff --git a/pkg/input/strings.go b/pkg/input/strings.go index 5a2d27f9..d1dfeea3 100644 --- a/pkg/input/strings.go +++ b/pkg/input/strings.go @@ -2,8 +2,7 @@ package input import ( "fmt" - "github.com/mpvl/unique" - "sort" + "slices" "strings" ) @@ -31,7 +30,7 @@ func (what *Strings) MergeSingleton(first string, second string) (string, error) func (what *Strings) MergeMultiline(first string, second string) string { text := first if len(first) > 0 { - if len(second) > 0 { + if len(second) > 0 && !strings.EqualFold(first, second) { text = text + lineSeparator + second } } else { @@ -55,14 +54,11 @@ func (what *Strings) MergeMap(first map[string]string, second map[string]string) } func (what *Strings) MergeUniqueSlice(first []string, second []string) []string { - slice := append(first, second...) - - for n := range slice { - slice[n] = strings.TrimSpace(strings.ToLower(slice[n])) + for _, item := range second { + if !slices.Contains(first, item) { + first = append(first, item) + } } - sort.Strings(slice) - unique.Strings(&slice) - - return slice + return first } diff --git a/test/technical_assets_devops.yaml b/test/technical_assets_devops.yaml index 3107c76c..a6c0aa49 100644 --- a/test/technical_assets_devops.yaml +++ b/test/technical_assets_devops.yaml @@ -1,6 +1,5 @@ includes: - technical_assets_clients.yaml - - technical_assets: From ac5b4c4723d3d26807a3b708100bcea011acc63b Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Wed, 31 Jan 2024 14:06:45 -0800 Subject: [PATCH 55/68] fixed docker run for local docker file --- Dockerfile | 2 +- Dockerfile.local | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index db99e4b4..bd52e7c9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -78,4 +78,4 @@ USER 1000:1000 ENV PATH=/app:$PATH GIN_MODE=release ENTRYPOINT ["/app/threagile"] -CMD ["-help"] +CMD ["help"] diff --git a/Dockerfile.local b/Dockerfile.local index 4c5b4cc0..8fed2028 100644 --- a/Dockerfile.local +++ b/Dockerfile.local @@ -78,4 +78,4 @@ USER 1000:1000 ENV PATH=/app:$PATH GIN_MODE=release ENTRYPOINT ["/app/threagile"] -CMD ["-help"] +CMD ["help"] From 6e236051a59a0ea8bd6655a49dea2a890402027f Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Wed, 31 Jan 2024 15:50:05 -0800 Subject: [PATCH 56/68] - added gosec workflow - resolved issues flagged by gosec --- .github/workflows/gosec-analysis.yml | 24 ++++++++++++++++++++++++ Makefile | 10 +++++++--- cmd/raa_dummy/main.go | 10 ++++++++-- internal/threagile/about.go | 2 +- pkg/common/config.go | 2 +- pkg/examples/examples.go | 4 ++-- pkg/input/model.go | 4 ++-- pkg/macros/macros.go | 5 +++-- pkg/model/runner.go | 2 +- pkg/report/graphviz.go | 16 ++++++++-------- pkg/report/json.go | 6 +++--- pkg/report/report.go | 4 ++-- pkg/server/execute.go | 5 +++-- pkg/server/model.go | 6 +++--- pkg/server/report.go | 16 ++++++++-------- pkg/server/zip.go | 14 ++++++++++---- 16 files changed, 86 insertions(+), 44 deletions(-) create mode 100644 .github/workflows/gosec-analysis.yml diff --git a/.github/workflows/gosec-analysis.yml b/.github/workflows/gosec-analysis.yml new file mode 100644 index 00000000..d4926df1 --- /dev/null +++ b/.github/workflows/gosec-analysis.yml @@ -0,0 +1,24 @@ +name: Security Static Analysis + +on: + push: + branches: [master] + pull_request: + # The branches below must be a subset of the branches above + branches: [master] + schedule: + - cron: '0 0 1 * *' + +jobs: + analyze: + name: GoSec + runs-on: ubuntu-latest + env: + GO111MODULE: on + steps: + - name: Checkout Source + uses: actions/checkout@v4 + - name: Run Gosec + uses: securego/gosec@master + with: + args: ./... diff --git a/Makefile b/Makefile index ca094ff3..c16cea52 100644 --- a/Makefile +++ b/Makefile @@ -20,12 +20,15 @@ GO = env GO111MODULE=on go MKDIR = mkdir -p CP = cp -r RM = rm -rf +GOSEC = /opt/homebrew/bin/gosec # Targets -.phony: all run_tests install clean uninstall +.phony: all prep run_tests clean tidy install uninstall gosec default: all +all: prep run_tests $(addprefix bin/,$(BIN)) + prep: @# env GO111MODULE=on go mod vendor $(MKDIR) bin @@ -33,8 +36,6 @@ prep: run_tests: $(GO) test ./... -all: prep run_tests $(addprefix bin/,$(BIN)) - clean: $(RM) bin vendor @@ -56,6 +57,9 @@ uninstall: $(RM) $(addprefix $(BIN_DIR)/,$(BIN)) $(RM) $(ASSET_DIR) +gosec: + $(GOSEC) ./... + bin/raa_calc: cmd/raa/main.go $(GO) build $(GOFLAGS) -o $@ $< diff --git a/cmd/raa_dummy/main.go b/cmd/raa_dummy/main.go index ff50c92e..2e974b0e 100644 --- a/cmd/raa_dummy/main.go +++ b/cmd/raa_dummy/main.go @@ -2,12 +2,14 @@ package main import ( "bufio" + "crypto/rand" "encoding/json" "fmt" "github.com/threagile/threagile/pkg/security/types" "io" - "math/rand" + "math/big" "os" + "time" ) // JUST A DUMMY TO HAVE AN ALTERNATIVE PLUGIN TO USE/TEST @@ -44,7 +46,11 @@ func main() { func CalculateRAA(input *types.ParsedModel) string { for techAssetID, techAsset := range input.TechnicalAssets { - techAsset.RAA = float64(rand.Intn(100)) + nBig, randError := rand.Int(rand.Reader, big.NewInt(100)) + if randError != nil { + nBig.SetInt64(time.Now().UnixMilli()) + } + techAsset.RAA = float64(nBig.Int64()) fmt.Println("Using dummy RAA random calculation (just to test the usage of other shared object files as plugins)") input.TechnicalAssets[techAssetID] = techAsset } diff --git a/internal/threagile/about.go b/internal/threagile/about.go index 48d6a477..9d43f8dc 100644 --- a/internal/threagile/about.go +++ b/internal/threagile/about.go @@ -43,7 +43,7 @@ func (what *Threagile) initAbout() *Threagile { cmd.Printf("weird app folder %v", appDir) return errors.New("weird app folder") } - content, err := os.ReadFile(filepath.Join(appDir, "LICENSE.txt")) + content, err := os.ReadFile(filepath.Clean(filepath.Join(appDir, "LICENSE.txt"))) if err != nil { cmd.Printf("Unable to read license file: %v", err) return err diff --git a/pkg/common/config.go b/pkg/common/config.go index 1e392c34..56e83c33 100644 --- a/pkg/common/config.go +++ b/pkg/common/config.go @@ -118,7 +118,7 @@ func (c *Config) Load(configFilename string) error { return nil } - data, readError := os.ReadFile(configFilename) + data, readError := os.ReadFile(filepath.Clean(configFilename)) if readError != nil { return readError } diff --git a/pkg/examples/examples.go b/pkg/examples/examples.go index 5d42d6ec..c335ee2d 100644 --- a/pkg/examples/examples.go +++ b/pkg/examples/examples.go @@ -59,13 +59,13 @@ func copyFile(src, dst string) (int64, error) { return 0, fmt.Errorf("%s is not a regular file", src) } - source, err := os.Open(src) + source, err := os.Open(filepath.Clean(src)) if err != nil { return 0, err } defer func() { _ = source.Close() }() - destination, err := os.Create(dst) + destination, err := os.Create(filepath.Clean(dst)) if err != nil { return 0, err } diff --git a/pkg/input/model.go b/pkg/input/model.go index f44db7b5..2d6db1c2 100644 --- a/pkg/input/model.go +++ b/pkg/input/model.go @@ -67,7 +67,7 @@ func (model *Model) Defaults() *Model { } func (model *Model) Load(inputFilename string) error { - modelYaml, readError := os.ReadFile(inputFilename) + modelYaml, readError := os.ReadFile(filepath.Clean(inputFilename)) if readError != nil { log.Fatal("Unable to read model file: ", readError) } @@ -88,7 +88,7 @@ func (model *Model) Load(inputFilename string) error { } func (model *Model) Merge(dir string, includeFilename string) error { - modelYaml, readError := os.ReadFile(filepath.Join(dir, includeFilename)) + modelYaml, readError := os.ReadFile(filepath.Clean(filepath.Join(dir, includeFilename))) if readError != nil { return fmt.Errorf("unable to read model file: %v", readError) } diff --git a/pkg/macros/macros.go b/pkg/macros/macros.go index 4fb1ffb2..c1f3bcaa 100644 --- a/pkg/macros/macros.go +++ b/pkg/macros/macros.go @@ -9,6 +9,7 @@ import ( "fmt" "io" "os" + "path/filepath" "strconv" "strings" @@ -288,13 +289,13 @@ func copyFile(src, dst string) (int64, error) { return 0, fmt.Errorf("%s is not a regular file", src) } - source, err := os.Open(src) + source, err := os.Open(filepath.Clean(src)) if err != nil { return 0, err } defer func() { _ = source.Close() }() - destination, err := os.Create(dst) + destination, err := os.Create(filepath.Clean(dst)) if err != nil { return 0, err } diff --git a/pkg/model/runner.go b/pkg/model/runner.go index 81b61168..3cd9c211 100644 --- a/pkg/model/runner.go +++ b/pkg/model/runner.go @@ -42,7 +42,7 @@ func (p *runner) Run(in any, out any, parameters ...string) error { Out: out, } - plugin := exec.Command(p.Filename, p.Parameters...) + plugin := exec.Command(p.Filename, p.Parameters...) // #nosec G204 stdin, stdinError := plugin.StdinPipe() if stdinError != nil { return stdinError diff --git a/pkg/report/graphviz.go b/pkg/report/graphviz.go index 0cdfa708..7ffc2d18 100644 --- a/pkg/report/graphviz.go +++ b/pkg/report/graphviz.go @@ -259,7 +259,7 @@ func WriteDataFlowDiagramGraphvizDOT(parsedModel *types.ParsedModel, //fmt.Println(dotContent.String()) // Write the DOT file - file, err := os.Create(diagramFilenameDOT) + file, err := os.Create(filepath.Clean(diagramFilenameDOT)) if err != nil { return nil, fmt.Errorf("Error creating %s: %v", diagramFilenameDOT, err) } @@ -292,14 +292,14 @@ func GenerateDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string, if err != nil { return fmt.Errorf("Error reading %s: %v", dotFile.Name(), err) } - err = os.WriteFile(tmpFileDOT.Name(), inputDOT, 0644) + err = os.WriteFile(tmpFileDOT.Name(), inputDOT, 0600) if err != nil { return fmt.Errorf("Error creating %s: %v", tmpFileDOT.Name(), err) } // exec - cmd := exec.Command("dot", "-Tpng", tmpFileDOT.Name(), "-o", tmpFilePNG.Name()) + cmd := exec.Command("dot", "-Tpng", tmpFileDOT.Name(), "-o", tmpFilePNG.Name()) // #nosec G204 cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr err = cmd.Run() @@ -311,7 +311,7 @@ func GenerateDataFlowDiagramGraphvizImage(dotFile *os.File, targetDir string, if err != nil { return fmt.Errorf("Error copying into resulting file %s: %v", tmpFilePNG.Name(), err) } - err = os.WriteFile(filepath.Join(targetDir, dataFlowDiagramFilenamePNG), inputPNG, 0644) + err = os.WriteFile(filepath.Join(targetDir, dataFlowDiagramFilenamePNG), inputPNG, 0600) if err != nil { return fmt.Errorf("Error creating %s: %v", filepath.Join(targetDir, dataFlowDiagramFilenamePNG), err) } @@ -446,7 +446,7 @@ func WriteDataAssetDiagramGraphvizDOT(parsedModel *types.ParsedModel, diagramFil dotContent.WriteString("}") // Write the DOT file - file, err := os.Create(diagramFilenameDOT) + file, err := os.Create(filepath.Clean(diagramFilenameDOT)) if err != nil { return nil, fmt.Errorf("Error creating %s: %v", diagramFilenameDOT, err) } @@ -567,13 +567,13 @@ func GenerateDataAssetDiagramGraphvizImage(dotFile *os.File, targetDir string, if err != nil { return fmt.Errorf("Error reading %s: %v", dotFile.Name(), err) } - err = os.WriteFile(tmpFileDOT.Name(), inputDOT, 0644) + err = os.WriteFile(tmpFileDOT.Name(), inputDOT, 0600) if err != nil { return fmt.Errorf("Error creating %s: %v", tmpFileDOT.Name(), err) } // exec - cmd := exec.Command("dot", "-Tpng", tmpFileDOT.Name(), "-o", tmpFilePNG.Name()) + cmd := exec.Command("dot", "-Tpng", tmpFileDOT.Name(), "-o", tmpFilePNG.Name()) // #nosec G204 cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr err = cmd.Run() @@ -585,7 +585,7 @@ func GenerateDataAssetDiagramGraphvizImage(dotFile *os.File, targetDir string, if err != nil { return fmt.Errorf("Error copying into resulting file %s: %v", tmpFilePNG.Name(), err) } - err = os.WriteFile(filepath.Join(targetDir, dataAssetDiagramFilenamePNG), inputPNG, 0644) + err = os.WriteFile(filepath.Join(targetDir, dataAssetDiagramFilenamePNG), inputPNG, 0600) if err != nil { return fmt.Errorf("Error creating %s: %v", filepath.Join(targetDir, dataAssetDiagramFilenamePNG), err) } diff --git a/pkg/report/json.go b/pkg/report/json.go index 290d1333..8a27d655 100644 --- a/pkg/report/json.go +++ b/pkg/report/json.go @@ -22,7 +22,7 @@ func WriteRisksJSON(parsedModel *types.ParsedModel, filename string) error { if err != nil { return fmt.Errorf("failed to marshal risks to JSON: %w", err) } - err = os.WriteFile(filename, jsonBytes, 0644) + err = os.WriteFile(filename, jsonBytes, 0600) if err != nil { return fmt.Errorf("failed to write risks to JSON file: %w", err) } @@ -36,7 +36,7 @@ func WriteTechnicalAssetsJSON(parsedModel *types.ParsedModel, filename string) e if err != nil { return fmt.Errorf("failed to marshal technical assets to JSON: %w", err) } - err = os.WriteFile(filename, jsonBytes, 0644) + err = os.WriteFile(filename, jsonBytes, 0600) if err != nil { return fmt.Errorf("failed to write technical assets to JSON file: %w", err) } @@ -48,7 +48,7 @@ func WriteStatsJSON(parsedModel *types.ParsedModel, filename string) error { if err != nil { return fmt.Errorf("failed to marshal stats to JSON: %w", err) } - err = os.WriteFile(filename, jsonBytes, 0644) + err = os.WriteFile(filename, jsonBytes, 0600) if err != nil { return fmt.Errorf("failed to write stats to JSON file: %w", err) } diff --git a/pkg/report/report.go b/pkg/report/report.go index e0a936f5..907056f2 100644 --- a/pkg/report/report.go +++ b/pkg/report/report.go @@ -1172,8 +1172,8 @@ func (r *pdfReporter) createRiskMitigationStatus(parsedModel *types.ParsedModel, }, } - r.embedPieChart(pieChartRemainingRiskSeverity, 15.0, 216, tempFolder) - r.embedPieChart(pieChartRemainingRisksByFunction, 110.0, 216, tempFolder) + _ = r.embedPieChart(pieChartRemainingRiskSeverity, 15.0, 216, tempFolder) + _ = r.embedPieChart(pieChartRemainingRisksByFunction, 110.0, 216, tempFolder) r.pdf.SetFont("Helvetica", "B", fontSizeBody) r.pdf.Ln(8) diff --git a/pkg/server/execute.go b/pkg/server/execute.go index e0dcbae8..6fc34fd7 100644 --- a/pkg/server/execute.go +++ b/pkg/server/execute.go @@ -132,7 +132,7 @@ func (s *server) execute(ginContext *gin.Context, dryRun bool) (yamlContent []by s.doItViaRuntimeCall(yamlFile, tmpOutputDir, true, true, true, true, true, true, true, true, dpi) } - yamlContent, err = os.ReadFile(yamlFile) + yamlContent, err = os.ReadFile(filepath.Clean(yamlFile)) if err != nil { handleErrorInServiceCall(err, ginContext) return yamlContent, false @@ -214,7 +214,8 @@ func (s *server) doItViaRuntimeCall(modelFile string, outputDir string, if nameError != nil { panic(nameError) } - cmd = exec.Command(self, args...) + + cmd = exec.Command(self, args...) // #nosec G204 out, err := cmd.CombinedOutput() if err != nil { panic(errors.New(string(out))) diff --git a/pkg/server/model.go b/pkg/server/model.go index 85fce9d1..af8a2a23 100644 --- a/pkg/server/model.go +++ b/pkg/server/model.go @@ -994,7 +994,7 @@ func (s *server) readModel(ginContext *gin.Context, modelUUID string, key []byte return modelInputResult, yamlText, false } - fileBytes, err := os.ReadFile(filepath.Join(modelFolder, s.config.InputFile)) + fileBytes, err := os.ReadFile(filepath.Clean(filepath.Join(modelFolder, s.config.InputFile))) if err != nil { log.Println(err) ginContext.JSON(http.StatusInternalServerError, gin.H{ @@ -1264,7 +1264,7 @@ func (s *server) writeModelYAML(ginContext *gin.Context, yaml string, key []byte return false } } - f, err := os.Create(filepath.Join(modelFolder, s.config.InputFile)) + f, err := os.Create(filepath.Clean(filepath.Join(modelFolder, s.config.InputFile))) if err != nil { log.Println(err) ginContext.JSON(http.StatusInternalServerError, gin.H{ @@ -1303,7 +1303,7 @@ func (s *server) backupModelToHistory(modelFolder string, changeReasonForHistory return err } } - inputModel, err := os.ReadFile(filepath.Join(modelFolder, s.config.InputFile)) + inputModel, err := os.ReadFile(filepath.Clean(filepath.Join(modelFolder, s.config.InputFile))) if err != nil { return err } diff --git a/pkg/server/report.go b/pkg/server/report.go index b4f804bf..f8349200 100644 --- a/pkg/server/report.go +++ b/pkg/server/report.go @@ -108,42 +108,42 @@ func (s *server) streamResponse(ginContext *gin.Context, responseType responseTy handleErrorInServiceCall(err, ginContext) return } - ginContext.File(filepath.Join(tmpOutputDir, s.config.DataFlowDiagramFilenamePNG)) + ginContext.File(filepath.Clean(filepath.Join(tmpOutputDir, s.config.DataFlowDiagramFilenamePNG))) } else if responseType == dataAssetDiagram { s.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, true, false, false, false, false, false, false, dpi) if err != nil { handleErrorInServiceCall(err, ginContext) return } - ginContext.File(filepath.Join(tmpOutputDir, s.config.DataAssetDiagramFilenamePNG)) + ginContext.File(filepath.Clean(filepath.Join(tmpOutputDir, s.config.DataAssetDiagramFilenamePNG))) } else if responseType == reportPDF { s.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, true, false, false, false, false, false, dpi) if err != nil { handleErrorInServiceCall(err, ginContext) return } - ginContext.FileAttachment(filepath.Join(tmpOutputDir, s.config.ReportFilename), s.config.ReportFilename) + ginContext.FileAttachment(filepath.Clean(filepath.Join(tmpOutputDir, s.config.ReportFilename)), s.config.ReportFilename) } else if responseType == risksExcel { s.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, true, false, false, false, false, dpi) if err != nil { handleErrorInServiceCall(err, ginContext) return } - ginContext.FileAttachment(filepath.Join(tmpOutputDir, s.config.ExcelRisksFilename), s.config.ExcelRisksFilename) + ginContext.FileAttachment(filepath.Clean(filepath.Join(tmpOutputDir, s.config.ExcelRisksFilename)), s.config.ExcelRisksFilename) } else if responseType == tagsExcel { s.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, true, false, false, false, dpi) if err != nil { handleErrorInServiceCall(err, ginContext) return } - ginContext.FileAttachment(filepath.Join(tmpOutputDir, s.config.ExcelTagsFilename), s.config.ExcelTagsFilename) + ginContext.FileAttachment(filepath.Clean(filepath.Join(tmpOutputDir, s.config.ExcelTagsFilename)), s.config.ExcelTagsFilename) } else if responseType == risksJSON { s.doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, false, false, false, false, false, true, false, false, dpi) if err != nil { handleErrorInServiceCall(err, ginContext) return } - jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, s.config.JsonRisksFilename)) + jsonData, err := os.ReadFile(filepath.Clean(filepath.Join(tmpOutputDir, s.config.JsonRisksFilename))) if err != nil { handleErrorInServiceCall(err, ginContext) return @@ -155,7 +155,7 @@ func (s *server) streamResponse(ginContext *gin.Context, responseType responseTy handleErrorInServiceCall(err, ginContext) return } - jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, s.config.JsonTechnicalAssetsFilename)) + jsonData, err := os.ReadFile(filepath.Clean(filepath.Join(tmpOutputDir, s.config.JsonTechnicalAssetsFilename))) if err != nil { handleErrorInServiceCall(err, ginContext) return @@ -167,7 +167,7 @@ func (s *server) streamResponse(ginContext *gin.Context, responseType responseTy handleErrorInServiceCall(err, ginContext) return } - jsonData, err := os.ReadFile(filepath.Join(tmpOutputDir, s.config.JsonStatsFilename)) + jsonData, err := os.ReadFile(filepath.Clean(filepath.Join(tmpOutputDir, s.config.JsonStatsFilename))) if err != nil { handleErrorInServiceCall(err, ginContext) return diff --git a/pkg/server/zip.go b/pkg/server/zip.go index 93278b2d..32401189 100644 --- a/pkg/server/zip.go +++ b/pkg/server/zip.go @@ -16,7 +16,7 @@ import ( // Param 1: filename is the output zip file's name. // Param 2: files is a list of files to add to the zip. func zipFiles(filename string, files []string) error { - newZipFile, err := os.Create(filename) + newZipFile, err := os.Create(filepath.Clean(filename)) if err != nil { return err } @@ -47,7 +47,7 @@ func unzip(src string, dest string) ([]string, error) { for _, f := range r.File { // Store filename/path for returning and using later on - path := filepath.Join(dest, f.Name) + path := filepath.Clean(filepath.Join(dest, filepath.Clean(f.Name))) // Check for ZipSlip. More Info: http://bit.ly/2MsjAWE if !strings.HasPrefix(path, filepath.Clean(dest)+string(os.PathSeparator)) { return filenames, fmt.Errorf("%s: illegal file path", path) @@ -69,11 +69,17 @@ func unzip(src string, dest string) ([]string, error) { if err != nil { return filenames, err } + + if f.FileInfo().Size() == 0 { + _ = outFile.Close() + continue + } + rc, err := f.Open() if err != nil { return filenames, err } - _, err = io.Copy(outFile, rc) + _, err = io.CopyN(outFile, rc, f.FileInfo().Size()) // Close the file without defer to close before next iteration of loop _ = outFile.Close() _ = rc.Close() @@ -85,7 +91,7 @@ func unzip(src string, dest string) ([]string, error) { } func addFileToZip(zipWriter *zip.Writer, filename string) error { - fileToZip, err := os.Open(filename) + fileToZip, err := os.Open(filepath.Clean(filename)) if err != nil { return err } From a020fb4fcb89d862cca022841105e9f6864b83b2 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Wed, 31 Jan 2024 15:55:06 -0800 Subject: [PATCH 57/68] trying to persuade Anchore to use the local code base rather than the remote repo --- .github/workflows/anchore-analysis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/anchore-analysis.yml b/.github/workflows/anchore-analysis.yml index 98370108..3e13304d 100644 --- a/.github/workflows/anchore-analysis.yml +++ b/.github/workflows/anchore-analysis.yml @@ -24,7 +24,7 @@ jobs: - name: Checkout the code uses: actions/checkout@v4 - name: Build the Docker image - run: docker build . --file Dockerfile --tag localbuild/testimage:latest + run: docker build . --file Dockerfile.local --tag localbuild/testimage:latest - name: Run the local Anchore scan action itself with GitHub Advanced Security code scanning integration enabled uses: anchore/scan-action@v3 with: From b6bb2fb5bce38a11bc2086a6f4ab1640f734d381 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Wed, 31 Jan 2024 15:58:22 -0800 Subject: [PATCH 58/68] upgraded codeql upload action to v2 according to deprecation notice --- .github/workflows/anchore-analysis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/anchore-analysis.yml b/.github/workflows/anchore-analysis.yml index 3e13304d..d8f7a6c1 100644 --- a/.github/workflows/anchore-analysis.yml +++ b/.github/workflows/anchore-analysis.yml @@ -32,6 +32,6 @@ jobs: dockerfile-path: "Dockerfile" acs-report-enable: true - name: Upload Anchore Scan Report - uses: github/codeql-action/upload-sarif@v1 + uses: github/codeql-action/upload-sarif@v2 with: sarif_file: results.sarif From f03ea816ff5c5613969412a5cb60e9d3c9b20df6 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Wed, 31 Jan 2024 16:01:50 -0800 Subject: [PATCH 59/68] upgraded codeql upload action to v3 according to new deprecation notice for v2 lol --- .github/workflows/anchore-analysis.yml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/.github/workflows/anchore-analysis.yml b/.github/workflows/anchore-analysis.yml index d8f7a6c1..e138fb64 100644 --- a/.github/workflows/anchore-analysis.yml +++ b/.github/workflows/anchore-analysis.yml @@ -27,11 +27,7 @@ jobs: run: docker build . --file Dockerfile.local --tag localbuild/testimage:latest - name: Run the local Anchore scan action itself with GitHub Advanced Security code scanning integration enabled uses: anchore/scan-action@v3 - with: - image-reference: "localbuild/testimage:latest" - dockerfile-path: "Dockerfile" - acs-report-enable: true - name: Upload Anchore Scan Report - uses: github/codeql-action/upload-sarif@v2 + uses: github/codeql-action/upload-sarif@v3 with: sarif_file: results.sarif From eda85183f3186f08dff9202f95cc032f26286b8a Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Thu, 1 Feb 2024 20:33:41 -0800 Subject: [PATCH 60/68] manually merged pull request #53 --- .dockerignore | 7 +++- .github/workflows/anchore-analysis.yml | 30 ++++++++++------ Dockerfile.local | 50 +++++++++++++------------- 3 files changed, 50 insertions(+), 37 deletions(-) diff --git a/.dockerignore b/.dockerignore index 5cd47d71..e41bfc15 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,4 +1,9 @@ **/.git **/.gitignore **/.DS_Store -**/*.tmp \ No newline at end of file +**/*.tmp + +.github +.dockerignore +Dockerfile +Dockerfile.* diff --git a/.github/workflows/anchore-analysis.yml b/.github/workflows/anchore-analysis.yml index e138fb64..7daa5e31 100644 --- a/.github/workflows/anchore-analysis.yml +++ b/.github/workflows/anchore-analysis.yml @@ -21,13 +21,23 @@ jobs: Anchore-Build-Scan: runs-on: ubuntu-latest steps: - - name: Checkout the code - uses: actions/checkout@v4 - - name: Build the Docker image - run: docker build . --file Dockerfile.local --tag localbuild/testimage:latest - - name: Run the local Anchore scan action itself with GitHub Advanced Security code scanning integration enabled - uses: anchore/scan-action@v3 - - name: Upload Anchore Scan Report - uses: github/codeql-action/upload-sarif@v3 - with: - sarif_file: results.sarif + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build the Docker image + uses: docker/build-push-action@v5 + with: + tags: localbuild/threagile:latest + file: Dockerfile.local + push: false + load: true + + - name: Scan image + uses: anchore/scan-action@v3 + with: + image: "localbuild/threagile:latest" + + - name: Upload Anchore Scan Report + uses: github/codeql-action/upload-sarif@v3 + with: + sarif_file: results.sarif diff --git a/Dockerfile.local b/Dockerfile.local index 8fed2028..416cc703 100644 --- a/Dockerfile.local +++ b/Dockerfile.local @@ -1,47 +1,41 @@ -# Used for local manual test builds +# Used for local manual test builds as well as Anchore scans in github workflow ###### -## Stage 1: Clone the Git repository +## Stage 1: Build application with Go's build tools ###### -FROM alpine/git as clone +FROM docker.io/library/golang:alpine as build WORKDIR /app -COPY . /app/threagile +# Add build dependencies (gcc, c stdlib) +RUN apk add --no-cache build-base +COPY go.mod go.sum ./ +RUN go mod download +COPY . . - - -###### -## Stage 2: Build application with Go's build tools -###### -FROM golang as build -WORKDIR /app - +# Set build-time variables +ARG GOOS=linux ENV GO111MODULE=on -# https://stackoverflow.com/questions/36279253/go-compiled-binary-wont-run-in-an-alpine-docker-container-on-ubuntu-host -#ENV CGO_ENABLED=0 # cannot be set as otherwise plugins don't run -COPY --from=clone /app/threagile /app - RUN go version RUN go test ./... -RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o raa_calc cmd/raa/main.go -RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o raa_dummy cmd/raa_dummy/main.go -RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o risk_demo_rule cmd/risk_demo/main.go -RUN GOOS=linux go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o threagile cmd/threagile/main.go + +RUN go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o raa_calc cmd/raa/main.go +RUN go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o raa_dummy cmd/raa_dummy/main.go +RUN go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o risk_demo_rule cmd/risk_demo/main.go +RUN go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o threagile cmd/threagile/main.go + # add the -race parameter to go build call in order to instrument with race condition detector: https://blog.golang.org/race-detector # NOTE: copy files with final name to send to final build + RUN cp /app/demo/example/threagile.yaml /app/demo/example/threagile-example-model.yaml RUN cp /app/demo/stub/threagile.yaml /app/demo/stub/threagile-stub-model.yaml - - ###### -## Stage 3: Make final small image +## Stage 2: Make final small image ###### -FROM alpine as deploy -WORKDIR /app +FROM docker.io/library/alpine:latest as deploy # label used in other scripts to filter LABEL type="threagile" @@ -56,6 +50,10 @@ RUN apk add libc6-compat # RUN mkdir -p /lib64 && ln -s /lib/libc.musl-x86_64.so.1 /lib64/ld-linux-x86-64.so.2 # clean apk cache RUN rm -rf /var/cache/apk/* +# add non-privileged user +RUN adduser --disabled-password --gecos "" --home "$(pwd)" --no-create-home threagile + +WORKDIR /app RUN mkdir -p /app /data RUN chown -R 1000:1000 /app /data @@ -73,7 +71,7 @@ COPY --from=build --chown=1000:1000 /app/demo/example/threagile-example-model.ya COPY --from=build --chown=1000:1000 /app/demo/stub/threagile-stub-model.yaml /app/ COPY --from=build --chown=1000:1000 /app/server /app/server -USER 1000:1000 +USER threagile ENV PATH=/app:$PATH GIN_MODE=release From 6af115d3a9cc92694a62db3418f5725d0a9f00ab Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Thu, 1 Feb 2024 23:02:58 -0800 Subject: [PATCH 61/68] resolved grype issues: - forced package update for packages with available upstream fixes - configured grype to ignore issues in packages without available fixes --- .grype.yaml | 47 ++++++++++++++++++++++++++++++++++++ Dockerfile.local | 62 +++++++++++++++++++++++++++--------------------- go.mod | 6 ++--- go.sum | 6 +++++ 4 files changed, 91 insertions(+), 30 deletions(-) create mode 100644 .grype.yaml diff --git a/.grype.yaml b/.grype.yaml new file mode 100644 index 00000000..56dfefe4 --- /dev/null +++ b/.grype.yaml @@ -0,0 +1,47 @@ +ignore: + - vulnerability: CVE-2023-42363 + reason: + busybox(1.36.1-r15) - no upstream fix available + busybox-binsh(1.36.1-r15) - no upstream fix available + - vulnerability: CVE-2023-42364 + reason: + busybox(1.36.1-r15) - no upstream fix available + busybox-binsh(1.36.1-r15) - no upstream fix available + - vulnerability: CVE-2023-42365 + reason: + busybox(1.36.1-r15) - no upstream fix available + busybox-binsh(1.36.1-r15) - no upstream fix available + - vulnerability: CVE-2023-42366 + reason: + busybox(1.36.1-r15) - no upstream fix available + busybox-binsh(1.36.1-r15) - no upstream fix available + - vulnerability: CVE-2014-9157 + reason: + graphviz(8.0.5-r1) - no upstream fix available + graphviz-libs(8.0.5-r1) - no upstream fix available + - vulnerability: CVE-2023-37769 + reason: pixman(0.42.2-r1) - no upstream fix available + - vulnerability: CVE-2023-43789 + reason: libxpm(3.5.16-r1) - no upstream fix available + - vulnerability: CVE-2023-42363 + reason: ssl_client(1.36.1-r5) - no upstream fix available + - vulnerability: CVE-2023-42364 + reason: ssl_client(1.36.1-r5) - no upstream fix available + - vulnerability: CVE-2023-42365 + reason: ssl_client(1.36.1-r5) - no upstream fix available + - vulnerability: CVE-2015-7313 + reason: tiff( 4.5.1-r0) - no upstream fix available + - vulnerability: CVE-2023-3164 + reason: tiff( 4.5.1-r0) - no upstream fix available + - vulnerability: CVE-2023-40745 + reason: tiff( 4.5.1-r0) - no upstream fix available + - vulnerability: CVE-2023-41175 + reason: tiff( 4.5.1-r0) - no upstream fix available + - vulnerability: CVE-2023-6228 + reason: tiff( 4.5.1-r0) - no upstream fix available + - vulnerability: CVE-2023-6277 + reason: tiff( 4.5.1-r0) - no upstream fix available + - vulnerability: CVE-2023-52355 + reason: tiff( 4.5.1-r0) - no upstream fix available + - vulnerability: CVE-2023-52356 + reason: tiff( 4.5.1-r0) - no upstream fix available diff --git a/Dockerfile.local b/Dockerfile.local index 416cc703..6003a64c 100644 --- a/Dockerfile.local +++ b/Dockerfile.local @@ -4,22 +4,21 @@ ## Stage 1: Build application with Go's build tools ###### FROM docker.io/library/golang:alpine as build + +COPY . /app WORKDIR /app -# Add build dependencies (gcc, c stdlib) -RUN apk add --no-cache build-base -COPY go.mod go.sum ./ +ARG GOOS=linux +ENV GO111MODULE=on +# download dependencies RUN go mod download -COPY . . # Set build-time variables -ARG GOOS=linux -ENV GO111MODULE=on - RUN go version RUN go test ./... +# build binaries RUN go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o raa_calc cmd/raa/main.go RUN go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o raa_dummy cmd/raa_dummy/main.go RUN go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o risk_demo_rule cmd/risk_demo/main.go @@ -28,6 +27,7 @@ RUN go build -ldflags="-X main.buildTimestamp=$(date '+%Y%m%d%H%M%S')" -o threag # add the -race parameter to go build call in order to instrument with race condition detector: https://blog.golang.org/race-detector # NOTE: copy files with final name to send to final build +# copy assets RUN cp /app/demo/example/threagile.yaml /app/demo/example/threagile-example-model.yaml RUN cp /app/demo/stub/threagile.yaml /app/demo/stub/threagile-stub-model.yaml @@ -35,45 +35,53 @@ RUN cp /app/demo/stub/threagile.yaml /app/demo/stub/threagile-stub-model.yaml ###### ## Stage 2: Make final small image ###### -FROM docker.io/library/alpine:latest as deploy +#FROM alpine:edge as finalize +FROM alpine:latest as finalize # label used in other scripts to filter LABEL type="threagile" -# add certificates +# update vulnerable packages +RUN apk add libcrypto3=3.1.4-r5 +RUN apk add libssl3=3.1.4-r5 + +# add certificates, graphviz, fonts RUN apk add --update --no-cache ca-certificates -# add graphviz, fonts RUN apk add --update --no-cache graphviz ttf-freefont + # https://stackoverflow.com/questions/66963068/docker-alpine-executable-binary-not-found-even-if-in-path RUN apk add libc6-compat + # https://stackoverflow.com/questions/34729748/installed-go-binary-not-found-in-path-on-alpine-linux-docker # RUN mkdir -p /lib64 && ln -s /lib/libc.musl-x86_64.so.1 /lib64/ld-linux-x86-64.so.2 + # clean apk cache RUN rm -rf /var/cache/apk/* -# add non-privileged user -RUN adduser --disabled-password --gecos "" --home "$(pwd)" --no-create-home threagile +# add non-privileged user WORKDIR /app +RUN adduser --disabled-password --gecos "" --home "$(pwd)" --no-create-home threagile RUN mkdir -p /app /data -RUN chown -R 1000:1000 /app /data - -COPY --from=build --chown=1000:1000 /app/threagile /app/ -COPY --from=build --chown=1000:1000 /app/raa_calc /app/ -COPY --from=build --chown=1000:1000 /app/raa_dummy /app/ -COPY --from=build --chown=1000:1000 /app/risk_demo_rule /app/ -COPY --from=build --chown=1000:1000 /app/LICENSE.txt /app/ -COPY --from=build --chown=1000:1000 /app/report/template/background.pdf /app/ -COPY --from=build --chown=1000:1000 /app/support/openapi.yaml /app/ -COPY --from=build --chown=1000:1000 /app/support/schema.json /app/ -COPY --from=build --chown=1000:1000 /app/support/live-templates.txt /app/ -COPY --from=build --chown=1000:1000 /app/demo/example/threagile-example-model.yaml /app/ -COPY --from=build --chown=1000:1000 /app/demo/stub/threagile-stub-model.yaml /app/ -COPY --from=build --chown=1000:1000 /app/server /app/server +RUN chown -R threagile:threagile /app /data USER threagile -ENV PATH=/app:$PATH GIN_MODE=release +COPY --from=build --chown=threagile:threagile /app/threagile /app/ +COPY --from=build --chown=threagile:threagile /app/raa_calc /app/ +COPY --from=build --chown=threagile:threagile /app/raa_dummy /app/ +COPY --from=build --chown=threagile:threagile /app/risk_demo_rule /app/ +COPY --from=build --chown=threagile:threagile /app/LICENSE.txt /app/ +COPY --from=build --chown=threagile:threagile /app/report/template/background.pdf /app/ +COPY --from=build --chown=threagile:threagile /app/support/openapi.yaml /app/ +COPY --from=build --chown=threagile:threagile /app/support/schema.json /app/ +COPY --from=build --chown=threagile:threagile /app/support/live-templates.txt /app/ +COPY --from=build --chown=threagile:threagile /app/demo/example/threagile-example-model.yaml /app/ +COPY --from=build --chown=threagile:threagile /app/demo/stub/threagile-stub-model.yaml /app/ +COPY --from=build --chown=threagile:threagile /app/server /app/server + +ENV PATH=/app:$PATH +ENV GIN_MODE=release ENTRYPOINT ["/app/threagile"] CMD ["help"] diff --git a/go.mod b/go.mod index 82fafb5d..b8fcc2fe 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,7 @@ go 1.20 require ( github.com/gin-gonic/gin v1.9.1 - github.com/google/uuid v1.5.0 + github.com/google/uuid v1.6.0 github.com/jung-kurt/gofpdf v1.16.2 github.com/mpvl/unique v0.0.0-20150818121801-cbe035fff7de github.com/spf13/pflag v1.0.5 @@ -24,7 +24,7 @@ require ( github.com/google/go-cmp v0.5.9 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/json-iterator/go v1.1.12 // indirect - github.com/leodido/go-urn v1.2.4 // indirect + github.com/leodido/go-urn v1.3.0 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect @@ -43,7 +43,7 @@ require ( github.com/bytedance/sonic v1.10.2 // indirect github.com/chenzhuoyu/iasm v0.9.1 // indirect github.com/gabriel-vasile/mimetype v1.4.3 // indirect - github.com/go-playground/validator/v10 v10.16.0 // indirect + github.com/go-playground/validator/v10 v10.17.0 // indirect github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect github.com/klauspost/cpuid/v2 v2.2.6 // indirect github.com/mattn/go-isatty v0.0.20 // indirect diff --git a/go.sum b/go.sum index 658908f0..aad09b65 100644 --- a/go.sum +++ b/go.sum @@ -31,6 +31,8 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE= github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= +github.com/go-playground/validator/v10 v10.17.0 h1:SmVVlfAOtlZncTxRuinDPomC2DkXJ4E5T9gDA0AIH74= +github.com/go-playground/validator/v10 v10.17.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g= @@ -40,6 +42,8 @@ github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeN github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU= github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= @@ -53,6 +57,8 @@ github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZY github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= +github.com/leodido/go-urn v1.3.0 h1:jX8FDLfW4ThVXctBNZ+3cIWnCSnrACDV73r76dy0aQQ= +github.com/leodido/go-urn v1.3.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= From a7a61c642d10cd48618e2ca99a83061f44858c27 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Thu, 1 Feb 2024 23:10:11 -0800 Subject: [PATCH 62/68] ignoring grype build errors --- .github/workflows/anchore-analysis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/anchore-analysis.yml b/.github/workflows/anchore-analysis.yml index 7daa5e31..acdef9cd 100644 --- a/.github/workflows/anchore-analysis.yml +++ b/.github/workflows/anchore-analysis.yml @@ -36,6 +36,7 @@ jobs: uses: anchore/scan-action@v3 with: image: "localbuild/threagile:latest" + fail-build: false - name: Upload Anchore Scan Report uses: github/codeql-action/upload-sarif@v3 From 033fc8ad191df8b6f5e9df21310d7e5ed1226373 Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Fri, 2 Feb 2024 17:31:55 +0000 Subject: [PATCH 63/68] Stored, sent and received data assets are always processed --- cmd/raa/main.go | 2 + pkg/model/parse.go | 84 +++++++++++++++---- pkg/report/colors.go | 15 +--- .../builtin/accidental-secret-leak-rule.go | 2 +- .../builtin/container-platform-escape-rule.go | 2 +- .../builtin/cross-site-scripting-rule.go | 2 +- .../risks/builtin/ldap-injection-rule.go | 4 +- .../builtin/missing-authentication-rule.go | 4 +- .../builtin/missing-cloud-hardening-rule.go | 2 +- .../builtin/missing-file-validation-rule.go | 2 +- .../risks/builtin/missing-hardening-rule.go | 2 +- .../builtin/missing-identity-store-rule.go | 2 +- .../risks/builtin/missing-vault-rule.go | 2 +- .../risks/builtin/missing-waf-rule.go | 2 +- .../risks/builtin/path-traversal-rule.go | 2 +- .../builtin/search-query-injection-rule.go | 2 +- .../service-registry-poisoning-rule.go | 2 +- .../risks/builtin/sql-nosql-injection-rule.go | 2 +- .../risks/builtin/unencrypted-asset-rule.go | 1 + .../builtin/unnecessary-data-asset-rule.go | 4 +- .../builtin/unnecessary-data-transfer-rule.go | 4 +- .../unnecessary-technical-asset-rule.go | 2 +- .../builtin/untrusted-deserialization-rule.go | 2 +- .../risks/builtin/xml-external-entity-rule.go | 2 +- pkg/security/types/data_asset.go | 23 ----- pkg/security/types/technical_asset.go | 26 +----- support/schema.json | 6 +- 27 files changed, 100 insertions(+), 105 deletions(-) diff --git a/cmd/raa/main.go b/cmd/raa/main.go index c7f67be5..6ff37d65 100644 --- a/cmd/raa/main.go +++ b/cmd/raa/main.go @@ -167,12 +167,14 @@ func calculateAttackerAttractiveness(input *types.ParsedModel, techAsset types.T score += dataAsset.Integrity.AttackerAttractivenessForProcessedOrStoredData() * dataAsset.Quantity.QuantityFactor() score += dataAsset.Availability.AttackerAttractivenessForProcessedOrStoredData() } + // NOTE: Assuming all stored data is also processed, this effectively scores stored data twice for _, dataAssetStored := range techAsset.DataAssetsStored { dataAsset := input.DataAssets[dataAssetStored] score += dataAsset.Confidentiality.AttackerAttractivenessForProcessedOrStoredData() * dataAsset.Quantity.QuantityFactor() score += dataAsset.Integrity.AttackerAttractivenessForProcessedOrStoredData() * dataAsset.Quantity.QuantityFactor() score += dataAsset.Availability.AttackerAttractivenessForProcessedOrStoredData() } + // NOTE: To send or receive data effectively is processing that data and it's questionable if the attractiveness increases further for _, dataFlow := range techAsset.CommunicationLinks { for _, dataAssetSent := range dataFlow.DataAssetsSent { dataAsset := input.DataAssets[dataAssetSent] diff --git a/pkg/model/parse.go b/pkg/model/parse.go index 5bf86072..79401f49 100644 --- a/pkg/model/parse.go +++ b/pkg/model/parse.go @@ -129,29 +129,35 @@ func ParseModel(modelInput *input.Model, builtinRiskRules map[string]risks.RiskR return nil, errors.New("unknown 'usage' value of technical asset '" + title + "': " + asset.Usage) } - var dataAssetsProcessed = make([]string, 0) - if asset.DataAssetsProcessed != nil { - dataAssetsProcessed = make([]string, len(asset.DataAssetsProcessed)) - for i, parsedProcessedAsset := range asset.DataAssetsProcessed { - referencedAsset := fmt.Sprintf("%v", parsedProcessedAsset) + var dataAssetsStored = make([]string, 0) + if asset.DataAssetsStored != nil { + for _, parsedStoredAssets := range asset.DataAssetsStored { + referencedAsset := fmt.Sprintf("%v", parsedStoredAssets) + if contains(dataAssetsStored, referencedAsset) { + continue + } + err := parsedModel.CheckDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") if err != nil { return nil, err } - dataAssetsProcessed[i] = referencedAsset + dataAssetsStored = append(dataAssetsStored, referencedAsset) } } - var dataAssetsStored = make([]string, 0) - if asset.DataAssetsStored != nil { - dataAssetsStored = make([]string, len(asset.DataAssetsStored)) - for i, parsedStoredAssets := range asset.DataAssetsStored { - referencedAsset := fmt.Sprintf("%v", parsedStoredAssets) + var dataAssetsProcessed = dataAssetsStored + if asset.DataAssetsProcessed != nil { + for _, parsedProcessedAsset := range asset.DataAssetsProcessed { + referencedAsset := fmt.Sprintf("%v", parsedProcessedAsset) + if contains(dataAssetsProcessed, referencedAsset) { + continue + } + err := parsedModel.CheckDataAssetTargetExists(referencedAsset, "technical asset '"+title+"'") if err != nil { return nil, err } - dataAssetsStored[i] = referencedAsset + dataAssetsProcessed = append(dataAssetsProcessed, referencedAsset) } } @@ -227,22 +233,36 @@ func ParseModel(modelInput *input.Model, builtinRiskRules map[string]risks.RiskR if commLink.DataAssetsSent != nil { for _, dataAssetSent := range commLink.DataAssetsSent { referencedAsset := fmt.Sprintf("%v", dataAssetSent) - err := parsedModel.CheckDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") - if err != nil { - return nil, err + if !contains(dataAssetsSent, referencedAsset) { + err := parsedModel.CheckDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") + if err != nil { + return nil, err + } + + dataAssetsSent = append(dataAssetsSent, referencedAsset) + if !contains(dataAssetsProcessed, referencedAsset) { + dataAssetsProcessed = append(dataAssetsProcessed, referencedAsset) + } } - dataAssetsSent = append(dataAssetsSent, referencedAsset) } } if commLink.DataAssetsReceived != nil { for _, dataAssetReceived := range commLink.DataAssetsReceived { referencedAsset := fmt.Sprintf("%v", dataAssetReceived) + if contains(dataAssetsReceived, referencedAsset) { + continue + } + err := parsedModel.CheckDataAssetTargetExists(referencedAsset, "communication link '"+commLinkTitle+"' of technical asset '"+title+"'") if err != nil { return nil, err } dataAssetsReceived = append(dataAssetsReceived, referencedAsset) + + if !contains(dataAssetsProcessed, referencedAsset) { + dataAssetsProcessed = append(dataAssetsProcessed, referencedAsset) + } } } @@ -334,6 +354,29 @@ func ParseModel(modelInput *input.Model, builtinRiskRules map[string]risks.RiskR } } + // A target of a communication link implicitly processes all data assets that are sent to or received by that target + for id, techAsset := range parsedModel.TechnicalAssets { + for _, commLink := range techAsset.CommunicationLinks { + if commLink.TargetId == id { + continue + } + targetTechAsset := parsedModel.TechnicalAssets[commLink.TargetId] + dataAssetsProcessedByTarget := targetTechAsset.DataAssetsProcessed + for _, dataAssetSent := range commLink.DataAssetsSent { + if !contains(dataAssetsProcessedByTarget, dataAssetSent) { + dataAssetsProcessedByTarget = append(dataAssetsProcessedByTarget, dataAssetSent) + } + } + for _, dataAssetReceived := range commLink.DataAssetsReceived { + if !contains(dataAssetsProcessedByTarget, dataAssetReceived) { + dataAssetsProcessedByTarget = append(dataAssetsProcessedByTarget, dataAssetReceived) + } + } + targetTechAsset.DataAssetsProcessed = dataAssetsProcessedByTarget + parsedModel.TechnicalAssets[commLink.TargetId] = targetTechAsset + } + } + // Trust Boundaries =============================================================================== checklistToAvoidAssetBeingModeledInMultipleTrustBoundaries := make(map[string]bool) parsedModel.TrustBoundaries = make(map[string]types.TrustBoundary) @@ -713,3 +756,12 @@ func lowerCaseAndTrim(tags []string) []string { } return tags } + +func contains(a []string, x string) bool { + for _, n := range a { + if x == n { + return true + } + } + return false +} diff --git a/pkg/report/colors.go b/pkg/report/colors.go index 46b16155..e8f3fdb2 100644 --- a/pkg/report/colors.go +++ b/pkg/report/colors.go @@ -367,18 +367,12 @@ func determineTechnicalAssetLabelColor(ta types.TechnicalAsset, model *types.Par // red when mission-critical integrity, but still unauthenticated (non-readonly) channels access it // amber when critical integrity, but still unauthenticated (non-readonly) channels access it -// pink when model forgery attempt (i.e. nothing being processed or stored) - +// pink when model forgery attempt (i.e. nothing being processed) func determineShapeBorderColor(ta types.TechnicalAsset, parsedModel *types.ParsedModel) string { // Check for red if ta.Confidentiality == types.StrictlyConfidential { return Red } - for _, storedDataAsset := range ta.DataAssetsStored { - if parsedModel.DataAssets[storedDataAsset].Confidentiality == types.StrictlyConfidential { - return Red - } - } for _, processedDataAsset := range ta.DataAssetsProcessed { if parsedModel.DataAssets[processedDataAsset].Confidentiality == types.StrictlyConfidential { return Red @@ -388,11 +382,6 @@ func determineShapeBorderColor(ta types.TechnicalAsset, parsedModel *types.Parse if ta.Confidentiality == types.Confidential { return Amber } - for _, storedDataAsset := range ta.DataAssetsStored { - if parsedModel.DataAssets[storedDataAsset].Confidentiality == types.Confidential { - return Amber - } - } for _, processedDataAsset := range ta.DataAssetsProcessed { if parsedModel.DataAssets[processedDataAsset].Confidentiality == types.Confidential { return Amber @@ -427,7 +416,7 @@ func determineShapeBorderColor(ta types.TechnicalAsset, parsedModel *types.Parse // dotted when model forgery attempt (i.e. nothing being processed or stored) func determineShapeBorderLineStyle(ta types.TechnicalAsset) string { - if len(ta.DataAssetsProcessed) == 0 && len(ta.DataAssetsStored) == 0 || ta.OutOfScope { + if len(ta.DataAssetsProcessed) == 0 || ta.OutOfScope { return "dotted" // dotted, because it's strange when too many technical communication links transfer no data... some ok, but many in a diagram ist a sign of model forgery... } return "solid" diff --git a/pkg/security/risks/builtin/accidental-secret-leak-rule.go b/pkg/security/risks/builtin/accidental-secret-leak-rule.go index 2da19893..fe0afee3 100644 --- a/pkg/security/risks/builtin/accidental-secret-leak-rule.go +++ b/pkg/security/risks/builtin/accidental-secret-leak-rule.go @@ -29,7 +29,7 @@ func (*AccidentalSecretLeakRule) Category() types.RiskCategory { Function: types.Operations, STRIDE: types.InformationDisclosure, DetectionLogic: "In-scope sourcecode repositories and artifact registries.", - RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", + RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed.", FalsePositives: "Usually no false positives.", ModelFailurePossibleReason: false, CWE: 200, diff --git a/pkg/security/risks/builtin/container-platform-escape-rule.go b/pkg/security/risks/builtin/container-platform-escape-rule.go index 4f55f21b..3d32c99f 100644 --- a/pkg/security/risks/builtin/container-platform-escape-rule.go +++ b/pkg/security/risks/builtin/container-platform-escape-rule.go @@ -34,7 +34,7 @@ func (*ContainerPlatformEscapeRule) Category() types.RiskCategory { Function: types.Operations, STRIDE: types.ElevationOfPrivilege, DetectionLogic: "In-scope container platforms.", - RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", + RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed.", FalsePositives: "Container platforms not running parts of the target architecture can be considered " + "as false positives after individual review.", ModelFailurePossibleReason: false, diff --git a/pkg/security/risks/builtin/cross-site-scripting-rule.go b/pkg/security/risks/builtin/cross-site-scripting-rule.go index 00e87ab1..829de3af 100644 --- a/pkg/security/risks/builtin/cross-site-scripting-rule.go +++ b/pkg/security/risks/builtin/cross-site-scripting-rule.go @@ -27,7 +27,7 @@ func (*CrossSiteScriptingRule) Category() types.RiskCategory { Function: types.Development, STRIDE: types.Tampering, DetectionLogic: "In-scope web applications.", - RiskAssessment: "The risk rating depends on the sensitivity of the data processed or stored in the web application.", + RiskAssessment: "The risk rating depends on the sensitivity of the data processed in the web application.", FalsePositives: "When the technical asset " + "is not accessed via a browser-like component (i.e not by a human user initiating the request that " + "gets passed through all components until it reaches the web application) this can be considered a false positive.", diff --git a/pkg/security/risks/builtin/ldap-injection-rule.go b/pkg/security/risks/builtin/ldap-injection-rule.go index d6e33593..5f04a719 100644 --- a/pkg/security/risks/builtin/ldap-injection-rule.go +++ b/pkg/security/risks/builtin/ldap-injection-rule.go @@ -15,7 +15,7 @@ func (*LdapInjectionRule) Category() types.RiskCategory { Id: "ldap-injection", Title: "LDAP-Injection", Description: "When an LDAP server is accessed LDAP-Injection risks might arise. " + - "The risk rating depends on the sensitivity of the LDAP server itself and of the data assets processed or stored.", + "The risk rating depends on the sensitivity of the LDAP server itself and of the data assets processed.", Impact: "If this risk remains unmitigated, attackers might be able to modify LDAP queries and access more data from the LDAP server than allowed.", ASVS: "V5 - Validation, Sanitization and Encoding Verification Requirements", CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/LDAP_Injection_Prevention_Cheat_Sheet.html", @@ -27,7 +27,7 @@ func (*LdapInjectionRule) Category() types.RiskCategory { Function: types.Development, STRIDE: types.Tampering, DetectionLogic: "In-scope clients accessing LDAP servers via typical LDAP access protocols.", - RiskAssessment: "The risk rating depends on the sensitivity of the LDAP server itself and of the data assets processed or stored.", + RiskAssessment: "The risk rating depends on the sensitivity of the LDAP server itself and of the data assets processed.", FalsePositives: "LDAP server queries by search values not consisting of parts controllable by the caller can be considered " + "as false positives after individual review.", ModelFailurePossibleReason: false, diff --git a/pkg/security/risks/builtin/missing-authentication-rule.go b/pkg/security/risks/builtin/missing-authentication-rule.go index d7015fae..faf334c5 100644 --- a/pkg/security/risks/builtin/missing-authentication-rule.go +++ b/pkg/security/risks/builtin/missing-authentication-rule.go @@ -14,7 +14,7 @@ func (*MissingAuthenticationRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "missing-authentication", Title: "Missing Authentication", - Description: "Technical assets (especially multi-tenant systems) should authenticate incoming requests when the asset processes or stores sensitive data. ", + Description: "Technical assets (especially multi-tenant systems) should authenticate incoming requests when the asset processes sensitive data. ", Impact: "If this risk is unmitigated, attackers might be able to access or modify sensitive data in an unauthenticated way.", ASVS: "V2 - Authentication Verification Requirements", CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/Authentication_Cheat_Sheet.html", @@ -24,7 +24,7 @@ func (*MissingAuthenticationRule) Category() types.RiskCategory { Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", Function: types.Architecture, STRIDE: types.ElevationOfPrivilege, - DetectionLogic: "In-scope technical assets (except " + types.LoadBalancer.String() + ", " + types.ReverseProxy.String() + ", " + types.ServiceRegistry.String() + ", " + types.WAF.String() + ", " + types.IDS.String() + ", and " + types.IPS.String() + " and in-process calls) should authenticate incoming requests when the asset processes or stores " + + DetectionLogic: "In-scope technical assets (except " + types.LoadBalancer.String() + ", " + types.ReverseProxy.String() + ", " + types.ServiceRegistry.String() + ", " + types.WAF.String() + ", " + types.IDS.String() + ", and " + types.IPS.String() + " and in-process calls) should authenticate incoming requests when the asset processes " + "sensitive data. This is especially the case for all multi-tenant assets (there even non-sensitive ones).", RiskAssessment: "The risk rating (medium or high) " + "depends on the sensitivity of the data sent across the communication link. Monitoring callers are exempted from this risk.", diff --git a/pkg/security/risks/builtin/missing-cloud-hardening-rule.go b/pkg/security/risks/builtin/missing-cloud-hardening-rule.go index 8c32b340..b6f6b06a 100644 --- a/pkg/security/risks/builtin/missing-cloud-hardening-rule.go +++ b/pkg/security/risks/builtin/missing-cloud-hardening-rule.go @@ -35,7 +35,7 @@ func (*MissingCloudHardeningRule) Category() types.RiskCategory { Function: types.Operations, STRIDE: types.Tampering, DetectionLogic: "In-scope cloud components (either residing in cloud trust boundaries or more specifically tagged with cloud provider types).", - RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", + RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed.", FalsePositives: "Cloud components not running parts of the target architecture can be considered " + "as false positives after individual review.", ModelFailurePossibleReason: false, diff --git a/pkg/security/risks/builtin/missing-file-validation-rule.go b/pkg/security/risks/builtin/missing-file-validation-rule.go index 5a872e1f..2a7c388d 100644 --- a/pkg/security/risks/builtin/missing-file-validation-rule.go +++ b/pkg/security/risks/builtin/missing-file-validation-rule.go @@ -28,7 +28,7 @@ func (*MissingFileValidationRule) Category() types.RiskCategory { Function: types.Development, STRIDE: types.Spoofing, DetectionLogic: "In-scope technical assets with custom-developed code accepting file data formats.", - RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", + RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed.", FalsePositives: "Fully trusted (i.e. cryptographically signed or similar) files can be considered " + "as false positives after individual review.", ModelFailurePossibleReason: false, diff --git a/pkg/security/risks/builtin/missing-hardening-rule.go b/pkg/security/risks/builtin/missing-hardening-rule.go index 7847cdad..60d2f6e5 100644 --- a/pkg/security/risks/builtin/missing-hardening-rule.go +++ b/pkg/security/risks/builtin/missing-hardening-rule.go @@ -32,7 +32,7 @@ func (r *MissingHardeningRule) Category() types.RiskCategory { STRIDE: types.Tampering, DetectionLogic: "In-scope technical assets with RAA values of " + strconv.Itoa(r.raaLimit) + " % or higher. " + "Generally for high-value targets like data stores, application servers, identity providers and ERP systems this limit is reduced to " + strconv.Itoa(r.raaLimitReduced) + " %", - RiskAssessment: "The risk rating depends on the sensitivity of the data processed or stored in the technical asset.", + RiskAssessment: "The risk rating depends on the sensitivity of the data processed in the technical asset.", FalsePositives: "Usually no false positives.", ModelFailurePossibleReason: false, CWE: 16, diff --git a/pkg/security/risks/builtin/missing-identity-store-rule.go b/pkg/security/risks/builtin/missing-identity-store-rule.go index 15d98adb..9bdb1d2d 100644 --- a/pkg/security/risks/builtin/missing-identity-store-rule.go +++ b/pkg/security/risks/builtin/missing-identity-store-rule.go @@ -27,7 +27,7 @@ func (*MissingIdentityStoreRule) Category() types.RiskCategory { STRIDE: types.Spoofing, DetectionLogic: "Models with authenticated data-flows authorized via end user identity missing an in-scope identity store.", RiskAssessment: "The risk rating depends on the sensitivity of the end user-identity authorized technical assets and " + - "their data assets processed and stored.", + "their data assets processed.", FalsePositives: "Models only offering data/services without any real authentication need " + "can be considered as false positives after individual review.", ModelFailurePossibleReason: true, diff --git a/pkg/security/risks/builtin/missing-vault-rule.go b/pkg/security/risks/builtin/missing-vault-rule.go index 575c6eeb..61e8e1c1 100644 --- a/pkg/security/risks/builtin/missing-vault-rule.go +++ b/pkg/security/risks/builtin/missing-vault-rule.go @@ -28,7 +28,7 @@ func (*MissingVaultRule) Category() types.RiskCategory { Function: types.Architecture, STRIDE: types.InformationDisclosure, DetectionLogic: "Models without a Vault (Secret Storage).", - RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", + RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed.", FalsePositives: "Models where no technical assets have any kind of sensitive config data to protect " + "can be considered as false positives after individual review.", ModelFailurePossibleReason: true, diff --git a/pkg/security/risks/builtin/missing-waf-rule.go b/pkg/security/risks/builtin/missing-waf-rule.go index b39d40c3..f2c1132b 100644 --- a/pkg/security/risks/builtin/missing-waf-rule.go +++ b/pkg/security/risks/builtin/missing-waf-rule.go @@ -27,7 +27,7 @@ func (*MissingWafRule) Category() types.RiskCategory { Function: types.Operations, STRIDE: types.Tampering, DetectionLogic: "In-scope web-services and/or web-applications accessed across a network trust boundary not having a Web Application Firewall (WAF) in front of them.", - RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", + RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed.", FalsePositives: "Targets only accessible via WAFs or reverse proxies containing a WAF component (like ModSecurity) can be considered " + "as false positives after individual review.", ModelFailurePossibleReason: false, diff --git a/pkg/security/risks/builtin/path-traversal-rule.go b/pkg/security/risks/builtin/path-traversal-rule.go index 6bc81197..061b1dfb 100644 --- a/pkg/security/risks/builtin/path-traversal-rule.go +++ b/pkg/security/risks/builtin/path-traversal-rule.go @@ -15,7 +15,7 @@ func (*PathTraversalRule) Category() types.RiskCategory { Id: "path-traversal", Title: "Path-Traversal", Description: "When a filesystem is accessed Path-Traversal or Local-File-Inclusion (LFI) risks might arise. " + - "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed or stored.", + "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed.", Impact: "If this risk is unmitigated, attackers might be able to read sensitive files (configuration data, key/credential files, deployment files, " + "business data files, etc.) from the filesystem of affected components.", ASVS: "V12 - File and Resources Verification Requirements", diff --git a/pkg/security/risks/builtin/search-query-injection-rule.go b/pkg/security/risks/builtin/search-query-injection-rule.go index 3b449d27..749df40e 100644 --- a/pkg/security/risks/builtin/search-query-injection-rule.go +++ b/pkg/security/risks/builtin/search-query-injection-rule.go @@ -30,7 +30,7 @@ func (*SearchQueryInjectionRule) Category() types.RiskCategory { Function: types.Development, STRIDE: types.Tampering, DetectionLogic: "In-scope clients accessing search engine servers via typical search access protocols.", - RiskAssessment: "The risk rating depends on the sensitivity of the search engine server itself and of the data assets processed or stored.", + RiskAssessment: "The risk rating depends on the sensitivity of the search engine server itself and of the data assets processed.", FalsePositives: "Server engine queries by search values not consisting of parts controllable by the caller can be considered " + "as false positives after individual review.", ModelFailurePossibleReason: false, diff --git a/pkg/security/risks/builtin/service-registry-poisoning-rule.go b/pkg/security/risks/builtin/service-registry-poisoning-rule.go index 76f7acf4..94be965c 100644 --- a/pkg/security/risks/builtin/service-registry-poisoning-rule.go +++ b/pkg/security/risks/builtin/service-registry-poisoning-rule.go @@ -26,7 +26,7 @@ func (*ServiceRegistryPoisoningRule) Category() types.RiskCategory { STRIDE: types.Spoofing, DetectionLogic: "In-scope service registries.", RiskAssessment: "The risk rating depends on the sensitivity of the technical assets accessing the service registry " + - "as well as the data assets processed or stored.", + "as well as the data assets processed.", FalsePositives: "Service registries not used for service discovery " + "can be considered as false positives after individual review.", ModelFailurePossibleReason: false, diff --git a/pkg/security/risks/builtin/sql-nosql-injection-rule.go b/pkg/security/risks/builtin/sql-nosql-injection-rule.go index 48b42300..f2684333 100644 --- a/pkg/security/risks/builtin/sql-nosql-injection-rule.go +++ b/pkg/security/risks/builtin/sql-nosql-injection-rule.go @@ -15,7 +15,7 @@ func (*SqlNoSqlInjectionRule) Category() types.RiskCategory { Id: "sql-nosql-injection", Title: "SQL/NoSQL-Injection", Description: "When a database is accessed via database access protocols SQL/NoSQL-Injection risks might arise. " + - "The risk rating depends on the sensitivity technical asset itself and of the data assets processed or stored.", + "The risk rating depends on the sensitivity technical asset itself and of the data assets processed.", Impact: "If this risk is unmitigated, attackers might be able to modify SQL/NoSQL queries to steal and modify data and eventually further escalate towards a deeper system penetration via code executions.", ASVS: "V5 - Validation, Sanitization and Encoding Verification Requirements", CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/SQL_Injection_Prevention_Cheat_Sheet.html", diff --git a/pkg/security/risks/builtin/unencrypted-asset-rule.go b/pkg/security/risks/builtin/unencrypted-asset-rule.go index ca4c00b0..3b5d64dc 100644 --- a/pkg/security/risks/builtin/unencrypted-asset-rule.go +++ b/pkg/security/risks/builtin/unencrypted-asset-rule.go @@ -30,6 +30,7 @@ func (*UnencryptedAssetRule) Category() types.RiskCategory { "storing data assets rated at least as " + types.Confidential.String() + " or " + types.Critical.String() + ". " + "For technical assets storing data assets rated as " + types.StrictlyConfidential.String() + " or " + types.MissionCritical.String() + " the " + "encryption must be of type " + types.DataWithEndUserIndividualKey.String() + ".", + // NOTE: the risk assesment does not only consider the CIs of the *stored* data-assets RiskAssessment: "Depending on the confidentiality rating of the stored data-assets either medium or high risk.", FalsePositives: "When all sensitive data stored within the asset is already fully encrypted on document or data level.", ModelFailurePossibleReason: false, diff --git a/pkg/security/risks/builtin/unnecessary-data-asset-rule.go b/pkg/security/risks/builtin/unnecessary-data-asset-rule.go index a5e7b13d..dcb6cc3d 100644 --- a/pkg/security/risks/builtin/unnecessary-data-asset-rule.go +++ b/pkg/security/risks/builtin/unnecessary-data-asset-rule.go @@ -16,7 +16,7 @@ func (*UnnecessaryDataAssetRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "unnecessary-data-asset", Title: "Unnecessary Data Asset", - Description: "When a data asset is not processed or stored by any data assets and also not transferred by any " + + Description: "When a data asset is not processed by any data assets and also not transferred by any " + "communication links, this is an indicator for an unnecessary data asset (or for an incomplete model).", Impact: "If this risk is unmitigated, attackers might be able to access unnecessary data assets using " + "other vulnerabilities.", @@ -27,7 +27,7 @@ func (*UnnecessaryDataAssetRule) Category() types.RiskCategory { Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", Function: types.Architecture, STRIDE: types.ElevationOfPrivilege, - DetectionLogic: "Modelled data assets not processed or stored by any data assets and also not transferred by any " + + DetectionLogic: "Modelled data assets not processed by any data assets and also not transferred by any " + "communication links.", RiskAssessment: types.LowSeverity.String(), FalsePositives: "Usually no false positives as this looks like an incomplete model.", diff --git a/pkg/security/risks/builtin/unnecessary-data-transfer-rule.go b/pkg/security/risks/builtin/unnecessary-data-transfer-rule.go index 0cd99c7b..51a36a48 100644 --- a/pkg/security/risks/builtin/unnecessary-data-transfer-rule.go +++ b/pkg/security/risks/builtin/unnecessary-data-transfer-rule.go @@ -24,7 +24,7 @@ func (*UnnecessaryDataTransferRule) Category() types.RiskCategory { CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html", Action: "Attack Surface Reduction", Mitigation: "Try to avoid sending or receiving sensitive data assets which are not required (i.e. neither " + - "processed or stored) by the involved technical asset.", + "processed) by the involved technical asset.", Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?", Function: types.Architecture, STRIDE: types.ElevationOfPrivilege, @@ -35,7 +35,7 @@ func (*UnnecessaryDataTransferRule) Category() types.RiskCategory { "either " + types.LowSeverity.String() + " or " + types.MediumSeverity.String() + ".", FalsePositives: "Technical assets missing the model entries of either processing or storing the mentioned data assets " + "can be considered as false positives (incomplete models) after individual review. These should then be addressed by " + - "completing the model so that all necessary data assets are processed and/or stored by the technical asset involved.", + "completing the model so that all necessary data assets are processed by the technical asset involved.", ModelFailurePossibleReason: true, CWE: 1008, } diff --git a/pkg/security/risks/builtin/unnecessary-technical-asset-rule.go b/pkg/security/risks/builtin/unnecessary-technical-asset-rule.go index f2ffa71f..c075407e 100644 --- a/pkg/security/risks/builtin/unnecessary-technical-asset-rule.go +++ b/pkg/security/risks/builtin/unnecessary-technical-asset-rule.go @@ -14,7 +14,7 @@ func (*UnnecessaryTechnicalAssetRule) Category() types.RiskCategory { return types.RiskCategory{ Id: "unnecessary-technical-asset", Title: "Unnecessary Technical Asset", - Description: "When a technical asset does not process or store any data assets, this is " + + Description: "When a technical asset does not process any data assets, this is " + "an indicator for an unnecessary technical asset (or for an incomplete model). " + "This is also the case if the asset has no communication links (either outgoing or incoming).", Impact: "If this risk is unmitigated, attackers might be able to target unnecessary technical assets.", diff --git a/pkg/security/risks/builtin/untrusted-deserialization-rule.go b/pkg/security/risks/builtin/untrusted-deserialization-rule.go index d23428b4..d6e7aeb7 100644 --- a/pkg/security/risks/builtin/untrusted-deserialization-rule.go +++ b/pkg/security/risks/builtin/untrusted-deserialization-rule.go @@ -30,7 +30,7 @@ func (*UntrustedDeserializationRule) Category() types.RiskCategory { Function: types.Architecture, STRIDE: types.Tampering, DetectionLogic: "In-scope technical assets accepting serialization data formats (including EJB and RMI protocols).", - RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored.", + RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed.", FalsePositives: "Fully trusted (i.e. cryptographically signed or similar) data deserialized can be considered " + "as false positives after individual review.", ModelFailurePossibleReason: false, diff --git a/pkg/security/risks/builtin/xml-external-entity-rule.go b/pkg/security/risks/builtin/xml-external-entity-rule.go index f7bf8d09..e548ea19 100644 --- a/pkg/security/risks/builtin/xml-external-entity-rule.go +++ b/pkg/security/risks/builtin/xml-external-entity-rule.go @@ -27,7 +27,7 @@ func (*XmlExternalEntityRule) Category() types.RiskCategory { Function: types.Development, STRIDE: types.InformationDisclosure, DetectionLogic: "In-scope technical assets accepting XML data formats.", - RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed and stored. " + + RiskAssessment: "The risk rating depends on the sensitivity of the technical asset itself and of the data assets processed. " + "Also for cloud-based environments the exploitation impact is at least medium, as cloud backend services can be attacked via SSRF (and XXE vulnerabilities are often also SSRF vulnerabilities).", FalsePositives: "Fully trusted (i.e. cryptographically signed or similar) XML data can be considered " + "as false positives after individual review.", diff --git a/pkg/security/types/data_asset.go b/pkg/security/types/data_asset.go index a95f80ca..7fe1c46d 100644 --- a/pkg/security/types/data_asset.go +++ b/pkg/security/types/data_asset.go @@ -92,9 +92,6 @@ func (what DataAsset) IsDataBreachPotentialStillAtRisk(parsedModel *ParsedModel) if contains(parsedModel.TechnicalAssets[techAsset].DataAssetsProcessed, what.Id) { return true } - if contains(parsedModel.TechnicalAssets[techAsset].DataAssetsStored, what.Id) { - return true - } } } return false @@ -110,12 +107,6 @@ func (what DataAsset) IdentifiedDataBreachProbability(parsedModel *ParsedModel) break } } - if contains(parsedModel.TechnicalAssets[techAsset].DataAssetsStored, what.Id) { - if risk.DataBreachProbability > highestProbability { - highestProbability = risk.DataBreachProbability - break - } - } } } return highestProbability @@ -131,12 +122,6 @@ func (what DataAsset) IdentifiedDataBreachProbabilityStillAtRisk(parsedModel *Pa break } } - if contains(parsedModel.TechnicalAssets[techAsset].DataAssetsStored, what.Id) { - if risk.DataBreachProbability > highestProbability { - highestProbability = risk.DataBreachProbability - break - } - } } } return highestProbability @@ -150,10 +135,6 @@ func (what DataAsset) IdentifiedDataBreachProbabilityRisksStillAtRisk(parsedMode result = append(result, risk) break } - if contains(parsedModel.TechnicalAssets[techAsset].DataAssetsStored, what.Id) { - result = append(result, risk) - break - } } } return result @@ -167,10 +148,6 @@ func (what DataAsset) IdentifiedDataBreachProbabilityRisks(parsedModel *ParsedMo result = append(result, risk) break } - if contains(parsedModel.TechnicalAssets[techAsset].DataAssetsStored, what.Id) { - result = append(result, risk) - break - } } } return result diff --git a/pkg/security/types/technical_asset.go b/pkg/security/types/technical_asset.go index 2c12b5fd..eda65cca 100644 --- a/pkg/security/types/technical_asset.go +++ b/pkg/security/types/technical_asset.go @@ -110,12 +110,6 @@ func (what TechnicalAsset) HighestConfidentiality(parsedModel *ParsedModel) Conf highest = dataAsset.Confidentiality } } - for _, dataId := range what.DataAssetsStored { - dataAsset := parsedModel.DataAssets[dataId] - if dataAsset.Confidentiality > highest { - highest = dataAsset.Confidentiality - } - } return highest } @@ -163,12 +157,6 @@ func (what TechnicalAsset) HighestIntegrity(model *ParsedModel) Criticality { highest = dataAsset.Integrity } } - for _, dataId := range what.DataAssetsStored { - dataAsset := model.DataAssets[dataId] - if dataAsset.Integrity > highest { - highest = dataAsset.Integrity - } - } return highest } @@ -180,12 +168,6 @@ func (what TechnicalAsset) HighestAvailability(model *ParsedModel) Criticality { highest = dataAsset.Availability } } - for _, dataId := range what.DataAssetsStored { - dataAsset := model.DataAssets[dataId] - if dataAsset.Availability > highest { - highest = dataAsset.Availability - } - } return highest } @@ -238,13 +220,7 @@ func (what TechnicalAsset) IsZero() bool { } func (what TechnicalAsset) ProcessesOrStoresDataAsset(dataAssetId string) bool { - if contains(what.DataAssetsProcessed, dataAssetId) { - return true - } - if contains(what.DataAssetsStored, dataAssetId) { - return true - } - return false + return contains(what.DataAssetsProcessed, dataAssetId) } /* diff --git a/support/schema.json b/support/schema.json index c83628a6..bbbafcb2 100644 --- a/support/schema.json +++ b/support/schema.json @@ -535,7 +535,7 @@ "type": "boolean" }, "data_assets_processed": { - "description": "Data assets processed", + "description": "Data assets processed; ; all data assets stored or sent or received via a communication link (be it as a source or a target) are implicitly also processed and do not need to be listed here.", "type": [ "array", "null" @@ -745,9 +745,7 @@ "vpn", "ip_filtered", "readonly", - "usage", - "data_assets_sent", - "data_assets_received" + "usage" ] } } From 60bb6142f37d418fba25c11f6e965c263ddd9afa Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Fri, 2 Feb 2024 12:09:48 -0800 Subject: [PATCH 64/68] manually merged pr #5: Infer CIA ratings of tech assets #19 --- .github/workflows/codeql-analysis.yml | 8 ++++---- pkg/model/parse.go | 20 ++++++++++++++++++++ 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index a21762c5..de325f0c 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -28,7 +28,7 @@ jobs: uses: actions/checkout@v4 with: # We must fetch at least the immediate parents so that if this is - # a pull request then we can checkout the head. + # a pull request then we can check out the head. fetch-depth: 2 - name: Install Go @@ -39,14 +39,14 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v2 + uses: github/codeql-action/autobuild@v3 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -60,4 +60,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 diff --git a/pkg/model/parse.go b/pkg/model/parse.go index 79401f49..cc4168c8 100644 --- a/pkg/model/parse.go +++ b/pkg/model/parse.go @@ -354,6 +354,26 @@ func ParseModel(modelInput *input.Model, builtinRiskRules map[string]risks.RiskR } } + // If CIA is lower than that of its data assets, it is implicitly set to the highest CIA value of its data assets + for id, techAsset := range parsedModel.TechnicalAssets { + dataAssetConfidentiality := techAsset.HighestConfidentiality(&parsedModel) + if techAsset.Confidentiality < dataAssetConfidentiality { + techAsset.Confidentiality = dataAssetConfidentiality + } + + dataAssetIntegrity := techAsset.HighestIntegrity(&parsedModel) + if techAsset.Integrity < dataAssetIntegrity { + techAsset.Integrity = dataAssetIntegrity + } + + dataAssetAvailability := techAsset.HighestAvailability(&parsedModel) + if techAsset.Availability < dataAssetAvailability { + techAsset.Availability = dataAssetAvailability + } + + parsedModel.TechnicalAssets[id] = techAsset + } + // A target of a communication link implicitly processes all data assets that are sent to or received by that target for id, techAsset := range parsedModel.TechnicalAssets { for _, commLink := range techAsset.CommunicationLinks { From be0dda07a4dc16e319880e6615eae7e738c937a4 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Fri, 2 Feb 2024 14:20:10 -0800 Subject: [PATCH 65/68] made separate command 'analyze' to analyze a model --- internal/threagile/about.go | 7 ++-- internal/threagile/analyze.go | 41 ++++++++++++++++++++++ internal/threagile/examples.go | 7 ++-- internal/threagile/macros.go | 4 +-- internal/threagile/root.go | 62 ++++++++------------------------- internal/threagile/rules.go | 4 +-- internal/threagile/server.go | 33 ++++++++++++++++++ internal/threagile/threagile.go | 2 +- internal/threagile/types.go | 5 +-- pkg/common/config.go | 10 ++++-- pkg/common/consts.go | 34 +++++++----------- pkg/docs/constants.go | 12 ++++--- 12 files changed, 132 insertions(+), 89 deletions(-) create mode 100644 internal/threagile/analyze.go create mode 100644 internal/threagile/server.go diff --git a/internal/threagile/about.go b/internal/threagile/about.go index 9d43f8dc..97c45f44 100644 --- a/internal/threagile/about.go +++ b/internal/threagile/about.go @@ -7,6 +7,7 @@ package threagile import ( "errors" "fmt" + "github.com/threagile/threagile/pkg/common" "os" "path/filepath" @@ -17,19 +18,19 @@ import ( func (what *Threagile) initAbout() *Threagile { what.rootCmd.AddCommand(&cobra.Command{ - Use: "version", + Use: common.PrintVersionCommand, Short: "Get version information", Long: "\n" + docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp), }) what.rootCmd.AddCommand(&cobra.Command{ - Use: "print-3rd-party-licenses", + Use: common.Print3rdPartyCommand, Short: "Print 3rd-party license information", Long: "\n" + docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp) + "\n\n" + docs.ThirdPartyLicenses, }) what.rootCmd.AddCommand(&cobra.Command{ - Use: "print-license", + Use: common.PrintLicenseCommand, Short: "Print license information", RunE: func(cmd *cobra.Command, args []string) error { appDir, err := cmd.Flags().GetString(appDirFlagName) diff --git a/internal/threagile/analyze.go b/internal/threagile/analyze.go new file mode 100644 index 00000000..e3ed3b43 --- /dev/null +++ b/internal/threagile/analyze.go @@ -0,0 +1,41 @@ +package threagile + +import ( + "github.com/spf13/cobra" + "github.com/threagile/threagile/pkg/common" + "github.com/threagile/threagile/pkg/model" + "github.com/threagile/threagile/pkg/report" +) + +func (what *Threagile) initAnalyze() *Threagile { + analyze := &cobra.Command{ + Use: common.AnalyzeModelCommand, + Short: "Analyze model", + Aliases: []string{"analyze", "analyse", "run", "analyse-model"}, + RunE: func(cmd *cobra.Command, args []string) error { + cfg := what.readConfig(cmd, what.buildTimestamp) + commands := what.readCommands() + progressReporter := common.DefaultProgressReporter{Verbose: cfg.Verbose} + + r, err := model.ReadAndAnalyzeModel(*cfg, progressReporter) + if err != nil { + cmd.Printf("Failed to read and analyze model: %v", err) + return err + } + + err = report.Generate(cfg, r, commands, progressReporter) + if err != nil { + cmd.Printf("Failed to generate reports: %v \n", err) + return err + } + return nil + }, + CompletionOptions: cobra.CompletionOptions{ + DisableDefaultCmd: true, + }, + } + + what.rootCmd.AddCommand(analyze) + + return what +} diff --git a/internal/threagile/examples.go b/internal/threagile/examples.go index faa63dd8..4b48486e 100644 --- a/internal/threagile/examples.go +++ b/internal/threagile/examples.go @@ -7,13 +7,14 @@ package threagile import ( "fmt" "github.com/spf13/cobra" + "github.com/threagile/threagile/pkg/common" "github.com/threagile/threagile/pkg/docs" "github.com/threagile/threagile/pkg/examples" ) func (what *Threagile) initExamples() *Threagile { what.rootCmd.AddCommand(&cobra.Command{ - Use: "create-example-model", + Use: common.CreateExampleModelCommand, Short: "Create example threagile model", Long: "\n" + docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp) + "\n\njust create an example model named threagile-example-model.yaml in the output directory", RunE: func(cmd *cobra.Command, args []string) error { @@ -44,7 +45,7 @@ func (what *Threagile) initExamples() *Threagile { }) what.rootCmd.AddCommand(&cobra.Command{ - Use: "create-stub-model", + Use: common.CreateStubModelCommand, Short: "Create stub threagile model", Long: "\n" + docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp) + "\n\njust create a minimal stub model named threagile-stub-model.yaml in the output directory", RunE: func(cmd *cobra.Command, args []string) error { @@ -75,7 +76,7 @@ func (what *Threagile) initExamples() *Threagile { }) what.rootCmd.AddCommand(&cobra.Command{ - Use: "create-editing-support", + Use: common.CreateEditingSupportCommand, Short: "Create editing support", Long: "\n" + docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp) + "\n\njust create some editing support stuff in the output directory", RunE: func(cmd *cobra.Command, args []string) error { diff --git a/internal/threagile/macros.go b/internal/threagile/macros.go index c4a58990..439d9bc9 100644 --- a/internal/threagile/macros.go +++ b/internal/threagile/macros.go @@ -17,7 +17,7 @@ import ( func (what *Threagile) initMacros() *Threagile { what.rootCmd.AddCommand(&cobra.Command{ - Use: "list-model-macros", + Use: common.ListModelMacrosCommand, Short: "Print model macros", Run: func(cmd *cobra.Command, args []string) { cmd.Println(docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp)) @@ -43,7 +43,7 @@ func (what *Threagile) initMacros() *Threagile { }) what.rootCmd.AddCommand(&cobra.Command{ - Use: "explain-model-macros", + Use: common.ExplainModelMacrosCommand, Short: "Explain model macros", Run: func(cmd *cobra.Command, args []string) { cmd.Println(docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp)) diff --git a/internal/threagile/root.go b/internal/threagile/root.go index b1677502..ef83131f 100644 --- a/internal/threagile/root.go +++ b/internal/threagile/root.go @@ -13,73 +13,41 @@ import ( "github.com/threagile/threagile/pkg/common" "github.com/threagile/threagile/pkg/docs" - "github.com/threagile/threagile/pkg/model" "github.com/threagile/threagile/pkg/report" - "github.com/threagile/threagile/pkg/server" ) func (what *Threagile) initRoot() *Threagile { what.rootCmd = &cobra.Command{ Use: "threagile", + Version: docs.ThreagileVersion, Short: "\n" + docs.Logo, Long: "\n" + docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp) + "\n\n" + docs.Examples, SilenceErrors: true, SilenceUsage: true, - RunE: func(cmd *cobra.Command, args []string) error { - cfg := what.readConfig(cmd, what.buildTimestamp) - commands := what.readCommands() - progressReporter := common.DefaultProgressReporter{Verbose: cfg.Verbose} - - r, err := model.ReadAndAnalyzeModel(*cfg, progressReporter) - if err != nil { - cmd.Printf("Failed to read and analyze model: %v", err) - return err - } - - err = report.Generate(cfg, r, commands, progressReporter) - if err != nil { - cmd.Printf("Failed to generate reports: %v \n", err) - return err - } - return nil - }, CompletionOptions: cobra.CompletionOptions{ DisableDefaultCmd: true, }, } - serverCmd := &cobra.Command{ - Use: "server", - Short: "Run server", - RunE: func(cmd *cobra.Command, args []string) error { - cfg := what.readConfig(cmd, what.buildTimestamp) - server.RunServer(cfg) - return nil - }, - } - - cfg := new(common.Config).Defaults("") - - what.rootCmd.PersistentFlags().StringVar(&what.flags.appDirFlag, appDirFlagName, cfg.AppFolder, "app folder") - what.rootCmd.PersistentFlags().StringVar(&what.flags.binDirFlag, binDirFlagName, cfg.BinFolder, "binary folder location") - what.rootCmd.PersistentFlags().StringVar(&what.flags.outputDirFlag, outputFlagName, cfg.OutputFolder, "output directory") - what.rootCmd.PersistentFlags().StringVar(&what.flags.tempDirFlag, tempDirFlagName, cfg.TempFolder, "temporary folder location") + defaultConfig := new(common.Config).Defaults(what.buildTimestamp) - what.rootCmd.PersistentFlags().StringVar(&what.flags.inputFileFlag, inputFileFlagName, cfg.InputFile, "input model yaml file") - what.rootCmd.PersistentFlags().StringVar(&what.flags.raaPluginFlag, raaPluginFlagName, cfg.RAAPlugin, "RAA calculation run file name") + what.rootCmd.PersistentFlags().StringVar(&what.flags.appDirFlag, appDirFlagName, defaultConfig.AppFolder, "app folder") + what.rootCmd.PersistentFlags().StringVar(&what.flags.binDirFlag, binDirFlagName, defaultConfig.BinFolder, "binary folder location") + what.rootCmd.PersistentFlags().StringVar(&what.flags.outputDirFlag, outputFlagName, defaultConfig.OutputFolder, "output directory") + what.rootCmd.PersistentFlags().StringVar(&what.flags.tempDirFlag, tempDirFlagName, defaultConfig.TempFolder, "temporary folder location") - serverCmd.PersistentFlags().IntVar(&what.flags.serverPortFlag, serverPortFlagName, cfg.ServerPort, "the server port") - serverCmd.PersistentFlags().StringVar(&what.flags.serverDirFlag, serverDirFlagName, cfg.DataFolder, "base folder for server mode (default: "+common.DataDir+")") + what.rootCmd.PersistentFlags().StringVar(&what.flags.inputFileFlag, inputFileFlagName, defaultConfig.InputFile, "input model yaml file") + what.rootCmd.PersistentFlags().StringVar(&what.flags.raaPluginFlag, raaPluginFlagName, defaultConfig.RAAPlugin, "RAA calculation run file name") - what.rootCmd.PersistentFlags().BoolVarP(&what.flags.verboseFlag, verboseFlagName, verboseFlagShorthand, cfg.Verbose, "verbose output") + what.rootCmd.PersistentFlags().BoolVarP(&what.flags.verboseFlag, verboseFlagName, verboseFlagShorthand, defaultConfig.Verbose, "verbose output") what.rootCmd.PersistentFlags().StringVar(&what.flags.configFlag, configFlagName, "", "config file") - what.rootCmd.PersistentFlags().StringVar(&what.flags.customRiskRulesPluginFlag, customRiskRulesPluginFlagName, strings.Join(cfg.RiskRulesPlugins, ","), "comma-separated list of plugins file names with custom risk rules to load") - what.rootCmd.PersistentFlags().IntVar(&what.flags.diagramDpiFlag, diagramDpiFlagName, cfg.DiagramDPI, "DPI used to render: maximum is "+fmt.Sprintf("%d", common.MaxGraphvizDPI)+"") - what.rootCmd.PersistentFlags().StringVar(&what.flags.skipRiskRulesFlag, skipRiskRulesFlagName, cfg.SkipRiskRules, "comma-separated list of risk rules (by their ID) to skip") - what.rootCmd.PersistentFlags().BoolVar(&what.flags.ignoreOrphanedRiskTrackingFlag, ignoreOrphanedRiskTrackingFlagName, cfg.IgnoreOrphanedRiskTracking, "ignore orphaned risk tracking (just log them) not matching a concrete risk") - what.rootCmd.PersistentFlags().StringVar(&what.flags.templateFileNameFlag, templateFileNameFlagName, cfg.TemplateFilename, "background pdf file") + what.rootCmd.PersistentFlags().StringVar(&what.flags.customRiskRulesPluginFlag, customRiskRulesPluginFlagName, strings.Join(defaultConfig.RiskRulesPlugins, ","), "comma-separated list of plugins file names with custom risk rules to load") + what.rootCmd.PersistentFlags().IntVar(&what.flags.diagramDpiFlag, diagramDpiFlagName, defaultConfig.DiagramDPI, "DPI used to render: maximum is "+fmt.Sprintf("%d", common.MaxGraphvizDPI)+"") + what.rootCmd.PersistentFlags().StringVar(&what.flags.skipRiskRulesFlag, skipRiskRulesFlagName, defaultConfig.SkipRiskRules, "comma-separated list of risk rules (by their ID) to skip") + what.rootCmd.PersistentFlags().BoolVar(&what.flags.ignoreOrphanedRiskTrackingFlag, ignoreOrphanedRiskTrackingFlagName, defaultConfig.IgnoreOrphanedRiskTracking, "ignore orphaned risk tracking (just log them) not matching a concrete risk") + what.rootCmd.PersistentFlags().StringVar(&what.flags.templateFileNameFlag, templateFileNameFlagName, defaultConfig.TemplateFilename, "background pdf file") what.rootCmd.PersistentFlags().BoolVar(&what.flags.generateDataFlowDiagramFlag, generateDataFlowDiagramFlagName, true, "generate data flow diagram") what.rootCmd.PersistentFlags().BoolVar(&what.flags.generateDataAssetDiagramFlag, generateDataAssetDiagramFlagName, true, "generate data asset diagram") @@ -90,8 +58,6 @@ func (what *Threagile) initRoot() *Threagile { what.rootCmd.PersistentFlags().BoolVar(&what.flags.generateTagsExcelFlag, generateTagsExcelFlagName, true, "generate tags excel") what.rootCmd.PersistentFlags().BoolVar(&what.flags.generateReportPDFFlag, generateReportPDFFlagName, true, "generate report pdf, including diagrams") - what.rootCmd.AddCommand(serverCmd) - return what } diff --git a/internal/threagile/rules.go b/internal/threagile/rules.go index ae0fb582..b2bbdb3e 100644 --- a/internal/threagile/rules.go +++ b/internal/threagile/rules.go @@ -19,7 +19,7 @@ import ( func (what *Threagile) initRules() *Threagile { what.rootCmd.AddCommand(&cobra.Command{ - Use: "list-risk-rules", + Use: common.ListRiskRulesCommand, Short: "Print available risk rules", RunE: func(cmd *cobra.Command, args []string) error { cmd.Println(docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp)) @@ -46,7 +46,7 @@ func (what *Threagile) initRules() *Threagile { }) what.rootCmd.AddCommand(&cobra.Command{ - Use: "explain-risk-rules", + Use: common.ExplainRiskRulesCommand, Short: "Detailed explanation of all the risk rules", RunE: func(cmd *cobra.Command, args []string) error { cmd.Println(docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp)) diff --git a/internal/threagile/server.go b/internal/threagile/server.go new file mode 100644 index 00000000..aba7f6f8 --- /dev/null +++ b/internal/threagile/server.go @@ -0,0 +1,33 @@ +package threagile + +import ( + "github.com/spf13/cobra" + "github.com/threagile/threagile/pkg/common" + "github.com/threagile/threagile/pkg/server" +) + +func (what *Threagile) initServer() *Threagile { + defaultConfig := new(common.Config).Defaults(what.buildTimestamp) + + serverCmd := &cobra.Command{ + Use: "server", + Short: "Run server", + RunE: func(cmd *cobra.Command, args []string) error { + cfg := what.readConfig(cmd, what.buildTimestamp) + cfg.ServerMode = true + serverError := cfg.CheckServerFolder() + if serverError != nil { + return serverError + } + server.RunServer(cfg) + return nil + }, + } + + serverCmd.PersistentFlags().IntVar(&what.flags.serverPortFlag, serverPortFlagName, defaultConfig.ServerPort, "server port") + serverCmd.PersistentFlags().StringVar(&what.flags.serverDirFlag, serverDirFlagName, defaultConfig.DataFolder, "base folder for server mode (default: "+common.DataDir+")") + + what.rootCmd.AddCommand(serverCmd) + + return what +} diff --git a/internal/threagile/threagile.go b/internal/threagile/threagile.go index 20d0a22f..4a97deb4 100644 --- a/internal/threagile/threagile.go +++ b/internal/threagile/threagile.go @@ -20,5 +20,5 @@ func (what *Threagile) Execute() { func (what *Threagile) Init(buildTimestamp string) *Threagile { what.buildTimestamp = buildTimestamp - return what.initRoot().initAbout().initRules().initExamples().initMacros().initTypes() + return what.initRoot().initAbout().initRules().initExamples().initMacros().initTypes().initAnalyze().initServer() } diff --git a/internal/threagile/types.go b/internal/threagile/types.go index 3aad1787..f6f50ffd 100644 --- a/internal/threagile/types.go +++ b/internal/threagile/types.go @@ -6,6 +6,7 @@ package threagile import ( "fmt" + "github.com/threagile/threagile/pkg/common" "github.com/spf13/cobra" @@ -15,7 +16,7 @@ import ( func (what *Threagile) initTypes() *Threagile { what.rootCmd.AddCommand(&cobra.Command{ - Use: "list-types", + Use: common.ListTypesCommand, Short: "Print type information (enum values to be used in models)", Run: func(cmd *cobra.Command, args []string) { cmd.Println(docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp)) @@ -30,7 +31,7 @@ func (what *Threagile) initTypes() *Threagile { }) what.rootCmd.AddCommand(&cobra.Command{ - Use: "explain-types", + Use: common.ExplainTypesCommand, Short: "Print type information (enum values to be used in models)", Run: func(cmd *cobra.Command, args []string) { cmd.Println(docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp)) diff --git a/pkg/common/config.go b/pkg/common/config.go index 56e83c33..61130aba 100644 --- a/pkg/common/config.go +++ b/pkg/common/config.go @@ -39,6 +39,7 @@ type Config struct { SkipRiskRules string ExecuteModelMacro string + ServerMode bool DiagramDPI int ServerPort int GraphvizDPI int @@ -81,7 +82,8 @@ func (c *Config) Defaults(buildTimestamp string) *Config { RiskRulesPlugins: make([]string, 0), SkipRiskRules: "", ExecuteModelMacro: "", - ServerPort: 0, //DefaultServerPort, + ServerMode: false, + ServerPort: DefaultServerPort, GraphvizDPI: DefaultGraphvizDPI, BackupHistoryFilesToKeep: DefaultBackupHistoryFilesToKeep, @@ -167,7 +169,11 @@ func (c *Config) Load(configFilename string) error { return dataDirError } - if c.ServerPort > 0 { + return c.CheckServerFolder() +} + +func (c *Config) CheckServerFolder() error { + if c.ServerMode { c.ServerFolder = c.CleanPath(c.ServerFolder) serverDirError := c.checkDir(c.ServerFolder, "server") if serverDirError != nil { diff --git a/pkg/common/consts.go b/pkg/common/consts.go index c57ee68e..b24c3c13 100644 --- a/pkg/common/consts.go +++ b/pkg/common/consts.go @@ -33,25 +33,17 @@ const ( ) const ( - ServerPortCommand = "server-port" - CreateExampleModelCommand = "create-example-model" - CreateStubModelCommand = "create-stub-model" - CreateEditingSupportCommand = "create-editing-support" - GenerateDataFlowDiagramCommand = "generate-data-flow-diagram" - GenerateDataAssetDiagramCommand = "generate-data-asset-diagram" - GenerateRisksJSONCommand = "generate-risks-json" - GenerateTechnicalAssetsJSONCommand = "generate-technical-assets-json" - GenerateStatsJSONCommand = "generate-stats-json" - GenerateRisksExcelCommand = "generate-risks-excel" - GenerateTagsExcelCommand = "generate-tags-excel" - GenerateReportPDFCommand = "generate-report-pdf" - PrintVersionCommand = "version" - ListTypesCommand = "list-types" - ListRiskRulesCommand = "list-risk-rules" - ListModelMacrosCommand = "list-model-macros" - ExplainTypesCommand = "explain-types" - ExplainRiskRulesCommand = "explain-risk-rules" - ExplainModelMacrosCommand = "explain-model-macros" - Print3rdPartyCommand = "print-3rd-party-licenses" - PrintLicenseCommand = "print-license" + AnalyzeModelCommand = "analyze-model" + CreateExampleModelCommand = "create-example-model" + CreateStubModelCommand = "create-stub-model" + CreateEditingSupportCommand = "create-editing-support" + PrintVersionCommand = "version" + ListTypesCommand = "list-types" + ListRiskRulesCommand = "list-risk-rules" + ListModelMacrosCommand = "list-model-macros" + ExplainTypesCommand = "explain-types" + ExplainRiskRulesCommand = "explain-risk-rules" + ExplainModelMacrosCommand = "explain-model-macros" + Print3rdPartyCommand = "print-3rd-party-licenses" + PrintLicenseCommand = "print-license" ) diff --git a/pkg/docs/constants.go b/pkg/docs/constants.go index 6b52eb28..32225cb7 100644 --- a/pkg/docs/constants.go +++ b/pkg/docs/constants.go @@ -4,6 +4,8 @@ Copyright © 2023 NAME HERE package docs +import "github.com/threagile/threagile/pkg/common" + const ( ThreagileVersion = "1.0.0" // Also update into example and stub model files and openapi.yaml Logo = " _____ _ _ _ \n |_ _| |__ _ __ ___ __ _ __ _(_) | ___ \n | | | '_ \\| '__/ _ \\/ _` |/ _` | | |/ _ \\\n | | | | | | | | __/ (_| | (_| | | | __/\n |_| |_| |_|_| \\___|\\__,_|\\__, |_|_|\\___|\n |___/ " + @@ -15,19 +17,19 @@ const ( "Version: " + ThreagileVersion + " (%v)" Examples = "Examples:\n\n" + "If you want to create an example model (via docker) as a starting point to learn about Threagile just run: \n" + - " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile create-example-model -output app/work \n\n" + + " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile " + common.CreateExampleModelCommand + " -output app/work \n\n" + "If you want to create a minimal stub model (via docker) as a starting point for your own model just run: \n" + - " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile create-stub-model -output app/work \n\n" + + " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile " + common.CreateStubModelCommand + " -output app/work \n\n" + "If you want to execute Threagile on a model yaml file (via docker): \n" + " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile -verbose -model -output app/work \n\n" + "If you want to run Threagile as a server (REST API) on some port (here 8080): \n" + " docker run --rm -it --shm-size=256m -p 8080:8080 --name --mount 'type=volume,src=threagile-storage,dst=/data,readonly=false' threagile/threagile server --server-port 8080 \n\n" + "If you want to find out about the different enum values usable in the model yaml file: \n" + - " docker run --rm -it threagile/threagile list-types\n\n" + + " docker run --rm -it threagile/threagile " + common.ListTypesCommand + "\n\n" + "If you want to use some nice editing help (syntax validation, autocompletion, and live templates) in your favourite IDE: " + - " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile create-editing-support -output app/work\n\n" + + " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile " + common.CreateEditingSupportCommand + " -output app/work\n\n" + "If you want to list all available model macros (which are macros capable of reading a model yaml file, asking you questions in a wizard-style and then update the model yaml file accordingly): \n" + - " docker run --rm -it threagile/threagile list-model-macros \n\n" + + " docker run --rm -it threagile/threagile " + common.ListModelMacrosCommand + " \n\n" + "If you want to execute a certain model macro on the model yaml file (here the macro add-build-pipeline): \n" + " docker run --rm -it -v \"$(pwd)\":app/work threagile/threagile -model app/work/threagile.yaml -output app/work execute-model-macro add-build-pipeline" ThirdPartyLicenses = " - golang (Google Go License): https://golang.org/LICENSE\n" + From d45d4b6669239184a202f4f532296a10671154e2 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Fri, 2 Feb 2024 14:58:17 -0800 Subject: [PATCH 66/68] added interactive mode --- go.mod | 3 + go.sum | 9 ++ internal/threagile/flags.go | 24 +++--- internal/threagile/quit.go | 25 ++++++ internal/threagile/root.go | 140 ++++++++++++++++++++++++++++++++ internal/threagile/threagile.go | 2 +- pkg/common/config.go | 1 + pkg/common/consts.go | 1 + 8 files changed, 194 insertions(+), 11 deletions(-) create mode 100644 internal/threagile/quit.go diff --git a/go.mod b/go.mod index b8fcc2fe..6c80e9ac 100644 --- a/go.mod +++ b/go.mod @@ -15,7 +15,9 @@ require ( ) require ( + github.com/buildkite/shellwords v0.0.0-20180315110454-59467a9b8e10 // indirect github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect + github.com/chzyer/readline v1.5.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/gin-contrib/sse v0.1.0 // indirect github.com/go-playground/locales v0.14.1 // indirect @@ -25,6 +27,7 @@ require ( github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/leodido/go-urn v1.3.0 // indirect + github.com/mattn/go-shellwords v1.0.12 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect diff --git a/go.sum b/go.sum index aad09b65..b5317e53 100644 --- a/go.sum +++ b/go.sum @@ -3,6 +3,8 @@ github.com/akedrou/textdiff v0.0.0-20230423230343-2ebdcebdccc1/go.mod h1:PJwvxBp github.com/blend/go-sdk v1.20220411.3 h1:GFV4/FQX5UzXLPwWV03gP811pj7B8J2sbuq+GJQofXc= github.com/blend/go-sdk v1.20220411.3/go.mod h1:7lnH8fTi6U4i1fArEXRyOIY2E1X4MALg09qsQqY1+ak= github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= +github.com/buildkite/shellwords v0.0.0-20180315110454-59467a9b8e10 h1:XwHQ5xDtYPdtBbVPyRO6UZoWZe8/mbKUb076f8x7RvI= +github.com/buildkite/shellwords v0.0.0-20180315110454-59467a9b8e10/go.mod h1:gv0DYOzHEsKgo31lTCDGauIg4DTTGn41Bzp+t3wSOlk= github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM= github.com/bytedance/sonic v1.10.2 h1:GQebETVBxYB7JGWJtLBi07OVzWwt+8dWA00gEVW2ZFE= @@ -14,6 +16,10 @@ github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpV github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= github.com/chenzhuoyu/iasm v0.9.1 h1:tUHQJXo3NhBqw6s33wkGn9SP3bvrWLdlVIJ3hQBL7P0= github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= +github.com/chzyer/logex v1.2.1/go.mod h1:JLbx6lG2kDbNRFnfkgvh4eRJRPX1QCoOIWomwysCBrQ= +github.com/chzyer/readline v1.5.1 h1:upd/6fQk4src78LMRzh5vItIt361/o4uq553V8B5sGI= +github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObkaSkeBlk= +github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8= github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= @@ -61,6 +67,8 @@ github.com/leodido/go-urn v1.3.0 h1:jX8FDLfW4ThVXctBNZ+3cIWnCSnrACDV73r76dy0aQQ= github.com/leodido/go-urn v1.3.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-shellwords v1.0.12 h1:M2zGm7EW6UQJvDeQxo4T51eKPurbeFbe8WtebGE2xrk= +github.com/mattn/go-shellwords v1.0.12/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -146,6 +154,7 @@ golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= diff --git a/internal/threagile/flags.go b/internal/threagile/flags.go index 3eab3d78..42554c36 100644 --- a/internal/threagile/flags.go +++ b/internal/threagile/flags.go @@ -7,6 +7,9 @@ package threagile const ( configFlagName = "config" + interactiveFlagName = "interactive" + interactiveFlagShorthand = "i" + verboseFlagName = "verbose" verboseFlagShorthand = "v" @@ -38,16 +41,17 @@ const ( ) type Flags struct { - configFlag string - verboseFlag bool - appDirFlag string - binDirFlag string - outputDirFlag string - tempDirFlag string - inputFileFlag string - raaPluginFlag string - serverPortFlag int - serverDirFlag string + configFlag string + verboseFlag bool + interactiveFlag bool + appDirFlag string + binDirFlag string + outputDirFlag string + tempDirFlag string + inputFileFlag string + raaPluginFlag string + serverPortFlag int + serverDirFlag string skipRiskRulesFlag string customRiskRulesPluginFlag string diff --git a/internal/threagile/quit.go b/internal/threagile/quit.go new file mode 100644 index 00000000..44aa8655 --- /dev/null +++ b/internal/threagile/quit.go @@ -0,0 +1,25 @@ +package threagile + +import ( + "github.com/spf13/cobra" + "github.com/threagile/threagile/pkg/common" + "os" +) + +func (what *Threagile) initQuit() *Threagile { + analyze := &cobra.Command{ + Use: common.QuitCommand, + Short: "quit client", + Aliases: []string{"exit", "bye", "x", "q"}, + Run: func(cmd *cobra.Command, args []string) { + os.Exit(0) + }, + CompletionOptions: cobra.CompletionOptions{ + DisableDefaultCmd: true, + }, + } + + what.rootCmd.AddCommand(analyze) + + return what +} diff --git a/internal/threagile/root.go b/internal/threagile/root.go index ef83131f..6b123bb4 100644 --- a/internal/threagile/root.go +++ b/internal/threagile/root.go @@ -6,6 +6,10 @@ package threagile import ( "fmt" + "github.com/chzyer/readline" + "github.com/mattn/go-shellwords" + "os" + "path/filepath" "strings" "github.com/spf13/cobra" @@ -16,6 +20,25 @@ import ( "github.com/threagile/threagile/pkg/report" ) +const ( + UsageTemplate = `Usage:{{if .Runnable}} + {{.UseLine}}{{end}}{{if .HasAvailableSubCommands}} + {{.CommandPath}} [command]{{end}}{{if gt (len .Aliases) 0}} + +Aliases: + {{.NameAndAliases}}{{end}}{{if .HasExample}} + +Examples: +{{.Example}}{{end}}{{if .HasAvailableSubCommands}} + +Available Commands:{{range .Commands}}{{if (or .IsAvailableCommand (eq .Name "help"))}} + {{rpad .Name .NamePadding }} {{.Short}}{{end}}{{end}}{{end}}{{if .HasHelpSubCommands}} + +Additional help topics:{{range .Commands}}{{if .IsAdditionalHelpTopicCommand}} + {{rpad .CommandPath .CommandPathPadding}} {{.Short}}{{end}}{{end}}{{end}} +` +) + func (what *Threagile) initRoot() *Threagile { what.rootCmd = &cobra.Command{ Use: "threagile", @@ -24,6 +47,7 @@ func (what *Threagile) initRoot() *Threagile { Long: "\n" + docs.Logo + "\n\n" + fmt.Sprintf(docs.VersionText, what.buildTimestamp) + "\n\n" + docs.Examples, SilenceErrors: true, SilenceUsage: true, + Run: what.run, CompletionOptions: cobra.CompletionOptions{ DisableDefaultCmd: true, }, @@ -39,6 +63,7 @@ func (what *Threagile) initRoot() *Threagile { what.rootCmd.PersistentFlags().StringVar(&what.flags.inputFileFlag, inputFileFlagName, defaultConfig.InputFile, "input model yaml file") what.rootCmd.PersistentFlags().StringVar(&what.flags.raaPluginFlag, raaPluginFlagName, defaultConfig.RAAPlugin, "RAA calculation run file name") + what.rootCmd.PersistentFlags().BoolVarP(&what.flags.interactiveFlag, interactiveFlagName, interactiveFlagShorthand, defaultConfig.Interactive, "interactive mode") what.rootCmd.PersistentFlags().BoolVarP(&what.flags.verboseFlag, verboseFlagName, verboseFlagShorthand, defaultConfig.Verbose, "verbose output") what.rootCmd.PersistentFlags().StringVar(&what.flags.configFlag, configFlagName, "", "config file") @@ -61,6 +86,121 @@ func (what *Threagile) initRoot() *Threagile { return what } +func (what *Threagile) run(*cobra.Command, []string) { + if !what.flags.interactiveFlag { + return + } + + what.rootCmd.Use = "\b" + completer := readline.NewPrefixCompleter() + for _, child := range what.rootCmd.Commands() { + what.cobraToReadline(completer, child) + } + + dir, homeError := os.UserHomeDir() + if homeError != nil { + return + } + + shell, readlineError := readline.NewEx(&readline.Config{ + Prompt: "\033[31m>>\033[0m ", + HistoryFile: filepath.Join(dir, ".threagile_history"), + HistoryLimit: 1000, + AutoComplete: completer, + InterruptPrompt: "^C", + EOFPrompt: "quit", + HistorySearchFold: true, + }) + + if readlineError != nil { + return + } + + defer func() { _ = shell.Close() }() + + for { + line, readError := shell.Readline() + if readError != nil { + return + } + + if len(strings.TrimSpace(line)) == 0 { + continue + } + + params, parseError := shellwords.Parse(line) + if parseError != nil { + fmt.Printf("failed to parse command line: %s", parseError.Error()) + continue + } + + cmd, args, findError := what.rootCmd.Find(params) + if findError != nil { + fmt.Printf("failed to find command: %s", findError.Error()) + continue + } + + if cmd == nil || cmd == what.rootCmd { + fmt.Printf("failed to find command") + continue + } + + flagsError := cmd.ParseFlags(args) + if flagsError != nil { + fmt.Printf("invalid flags: %s", flagsError.Error()) + continue + } + + if !cmd.DisableFlagParsing { + args = cmd.Flags().Args() + } + + argsError := cmd.ValidateArgs(args) + if argsError != nil { + _ = cmd.Help() + continue + } + + if cmd.Run != nil { + cmd.Run(cmd, args) + continue + } + + if cmd.RunE != nil { + runError := cmd.RunE(cmd, args) + if runError != nil { + fmt.Printf("error: %v \n", runError) + } + continue + } + + _ = cmd.Help() + continue + } +} + +func (c *Threagile) cobraToReadline(node readline.PrefixCompleterInterface, cmd *cobra.Command) { + cmd.SetUsageTemplate(UsageTemplate) + cmd.Use = c.usage(cmd) + pcItem := readline.PcItem(cmd.Use) + node.SetChildren(append(node.GetChildren(), pcItem)) + + for _, child := range cmd.Commands() { + c.cobraToReadline(pcItem, child) + } +} + +func (c *Threagile) usage(cmd *cobra.Command) string { + words := make([]string, 0, len(cmd.ArgAliases)+1) + words = append(words, cmd.Use) + + for _, name := range cmd.ArgAliases { + words = append(words, "["+name+"]") + } + + return strings.Join(words, " ") +} + func (what *Threagile) readCommands() *report.GenerateCommands { commands := new(report.GenerateCommands).Defaults() commands.DataFlowDiagram = what.flags.generateDataFlowDiagramFlag diff --git a/internal/threagile/threagile.go b/internal/threagile/threagile.go index 4a97deb4..a3a5939a 100644 --- a/internal/threagile/threagile.go +++ b/internal/threagile/threagile.go @@ -20,5 +20,5 @@ func (what *Threagile) Execute() { func (what *Threagile) Init(buildTimestamp string) *Threagile { what.buildTimestamp = buildTimestamp - return what.initRoot().initAbout().initRules().initExamples().initMacros().initTypes().initAnalyze().initServer() + return what.initRoot().initAbout().initRules().initExamples().initMacros().initTypes().initAnalyze().initServer().initQuit() } diff --git a/pkg/common/config.go b/pkg/common/config.go index 61130aba..8fe33471 100644 --- a/pkg/common/config.go +++ b/pkg/common/config.go @@ -12,6 +12,7 @@ import ( type Config struct { BuildTimestamp string Verbose bool + Interactive bool AppFolder string BinFolder string diff --git a/pkg/common/consts.go b/pkg/common/consts.go index b24c3c13..f2b833a2 100644 --- a/pkg/common/consts.go +++ b/pkg/common/consts.go @@ -33,6 +33,7 @@ const ( ) const ( + QuitCommand = "quit" AnalyzeModelCommand = "analyze-model" CreateExampleModelCommand = "create-example-model" CreateStubModelCommand = "create-stub-model" From 428119aec17d6e42acb79ba064e0e875dacc383e Mon Sep 17 00:00:00 2001 From: Yevhen Zavhorodnii Date: Fri, 2 Feb 2024 22:59:31 +0000 Subject: [PATCH 67/68] Add tests (red) to test previous commit --- pkg/model/parse_test.go | 158 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 158 insertions(+) create mode 100644 pkg/model/parse_test.go diff --git a/pkg/model/parse_test.go b/pkg/model/parse_test.go new file mode 100644 index 00000000..67c2bb10 --- /dev/null +++ b/pkg/model/parse_test.go @@ -0,0 +1,158 @@ +/* +Copyright © 2023 NAME HERE +*/ + +package model + +import ( + "testing" + + "github.com/google/uuid" + + "github.com/stretchr/testify/assert" + "github.com/threagile/threagile/pkg/input" + "github.com/threagile/threagile/pkg/security/risks" + "github.com/threagile/threagile/pkg/security/types" +) + +func TestDefaultInputNotFail(t *testing.T) { + parsedModel, err := ParseModel(createInputModel(map[string]input.TechnicalAsset{}), map[string]risks.RiskRule{}, map[string]*CustomRisk{}) + + assert.NoError(t, err) + assert.NotNil(t, parsedModel) +} + +func TestInferConfidentiality_NotSet_NoOthers_ExpectTODO(t *testing.T) { + ta := map[string]input.TechnicalAsset{} + + taUndefinedConfidentiality := createDefaultTechnicalAsset() + taUndefinedConfidentiality.Confidentiality = "" + ta[taUndefinedConfidentiality.ID] = taUndefinedConfidentiality + + parsedModel, err := ParseModel(createInputModel(ta), map[string]risks.RiskRule{}, map[string]*CustomRisk{}) + + assert.NoError(t, err) + // TODO: rename test and check if everyone agree that by default it should be public if there are no other assets + assert.Equal(t, types.Public, parsedModel.TechnicalAssets[taUndefinedConfidentiality.ID].Confidentiality) +} + +func TestInferConfidentiality_NotSet_ExpectHighestConfidentiality(t *testing.T) { + ta := map[string]input.TechnicalAsset{} + + taUndefinedConfidentiality := createDefaultTechnicalAsset() + taUndefinedConfidentiality.Confidentiality = "" + ta[taUndefinedConfidentiality.ID] = taUndefinedConfidentiality + + taLowerConfidentiality := createDefaultTechnicalAsset() + taLowerConfidentiality.Confidentiality = "restricted" + ta[taLowerConfidentiality.ID] = taLowerConfidentiality + + taHigherConfidentiality := createDefaultTechnicalAsset() + taHigherConfidentiality.Confidentiality = "confidential" + ta[taLowerConfidentiality.ID] = taHigherConfidentiality + + parsedModel, err := ParseModel(createInputModel(ta), map[string]risks.RiskRule{}, map[string]*CustomRisk{}) + + assert.NoError(t, err) + assert.Equal(t, types.Confidential, parsedModel.TechnicalAssets[taUndefinedConfidentiality.ID].Confidentiality) + assert.Equal(t, types.Confidential, parsedModel.TechnicalAssets[taLowerConfidentiality.ID].Confidentiality) + assert.Equal(t, types.Confidential, parsedModel.TechnicalAssets[taHigherConfidentiality.ID].Confidentiality) +} + +func TestInferIntegrity_NotSet_NoOthers_ExpectTODO(t *testing.T) { + ta := map[string]input.TechnicalAsset{} + + taUndefinedIntegrity := createDefaultTechnicalAsset() + taUndefinedIntegrity.Integrity = "" + ta[taUndefinedIntegrity.ID] = taUndefinedIntegrity + + parsedModel, err := ParseModel(createInputModel(ta), map[string]risks.RiskRule{}, map[string]*CustomRisk{}) + + assert.NoError(t, err) + // TODO: rename test and check if everyone agree that by default it should be archive if there are no other assets + assert.Equal(t, types.Archive, parsedModel.TechnicalAssets[taUndefinedIntegrity.ID].Integrity) +} + +func TestInferIntegrity_NotSet_ExpectHighestIntegrity(t *testing.T) { + ta := map[string]input.TechnicalAsset{} + + taUndefinedIntegrity := createDefaultTechnicalAsset() + taUndefinedIntegrity.Integrity = "" + ta[taUndefinedIntegrity.ID] = taUndefinedIntegrity + + taLowerIntegrity := createDefaultTechnicalAsset() + taLowerIntegrity.Integrity = "important" + ta[taLowerIntegrity.ID] = taLowerIntegrity + + taHigherConfidentiality := createDefaultTechnicalAsset() + taHigherConfidentiality.Confidentiality = "critical" + ta[taHigherConfidentiality.ID] = taHigherConfidentiality + + parsedModel, err := ParseModel(createInputModel(ta), map[string]risks.RiskRule{}, map[string]*CustomRisk{}) + + assert.NoError(t, err) + assert.Equal(t, types.Critical, parsedModel.TechnicalAssets[taUndefinedIntegrity.ID].Integrity) + assert.Equal(t, types.Important, parsedModel.TechnicalAssets[taLowerIntegrity.ID].Integrity) + assert.Equal(t, types.Critical, parsedModel.TechnicalAssets[taHigherConfidentiality.ID].Integrity) +} + +func TestInferAvailability_NotSet_NoOthers_ExpectTODO(t *testing.T) { + ta := map[string]input.TechnicalAsset{} + + taUndefinedIntegrity := createDefaultTechnicalAsset() + taUndefinedIntegrity.Integrity = "" + ta[taUndefinedIntegrity.ID] = taUndefinedIntegrity + + parsedModel, err := ParseModel(createInputModel(ta), map[string]risks.RiskRule{}, map[string]*CustomRisk{}) + + assert.NoError(t, err) + assert.Equal(t, types.Archive, parsedModel.TechnicalAssets[taUndefinedIntegrity.ID].Integrity) +} + +func TestInferAvailability_NotSet_ExpectHighestAvailability(t *testing.T) { + ta := map[string]input.TechnicalAsset{} + + taUndefinedAvailability := createDefaultTechnicalAsset() + taUndefinedAvailability.Availability = "" + ta[taUndefinedAvailability.ID] = taUndefinedAvailability + + taLowerAvailability := createDefaultTechnicalAsset() + taLowerAvailability.Availability = "important" + ta[taLowerAvailability.ID] = taLowerAvailability + + taHigherAvailability := createDefaultTechnicalAsset() + taHigherAvailability.Availability = "critical" + ta[taHigherAvailability.ID] = taHigherAvailability + + parsedModel, err := ParseModel(createInputModel(ta), map[string]risks.RiskRule{}, map[string]*CustomRisk{}) + + assert.NoError(t, err) + assert.Equal(t, types.Critical, parsedModel.TechnicalAssets[taUndefinedAvailability.ID].Availability) + assert.Equal(t, types.Important, parsedModel.TechnicalAssets[taLowerAvailability.ID].Availability) + assert.Equal(t, types.Critical, parsedModel.TechnicalAssets[taHigherAvailability.ID].Availability) +} + +func createInputModel(technicalAssets map[string]input.TechnicalAsset) *input.Model { + return &input.Model{ + TechnicalAssets: technicalAssets, + + // set some dummy values to bypass validation + BusinessCriticality: "archive", + } +} + +func createDefaultTechnicalAsset() input.TechnicalAsset { + return input.TechnicalAsset{ + ID: uuid.New().String(), + // those values are required to bypass validation + Usage: "business", + Type: "process", + Size: "system", + Technology: "unknown-technology", + Encryption: "none", + Machine: "virtual", + Confidentiality: "public", + Integrity: "archive", + Availability: "archive", + } +} From 5f76e14a1cc60d0f539c36fa9f8417373de77898 Mon Sep 17 00:00:00 2001 From: Joerg Reichelt Date: Fri, 2 Feb 2024 15:52:00 -0800 Subject: [PATCH 68/68] fixed inference tests --- pkg/model/parse_test.go | 178 +++++++++++++++++++++++----------------- 1 file changed, 104 insertions(+), 74 deletions(-) diff --git a/pkg/model/parse_test.go b/pkg/model/parse_test.go index 67c2bb10..4718e3dc 100644 --- a/pkg/model/parse_test.go +++ b/pkg/model/parse_test.go @@ -16,132 +16,151 @@ import ( ) func TestDefaultInputNotFail(t *testing.T) { - parsedModel, err := ParseModel(createInputModel(map[string]input.TechnicalAsset{}), map[string]risks.RiskRule{}, map[string]*CustomRisk{}) + parsedModel, err := ParseModel(createInputModel(make(map[string]input.TechnicalAsset), make(map[string]input.DataAsset)), make(map[string]risks.RiskRule), make(map[string]*CustomRisk)) assert.NoError(t, err) assert.NotNil(t, parsedModel) } func TestInferConfidentiality_NotSet_NoOthers_ExpectTODO(t *testing.T) { - ta := map[string]input.TechnicalAsset{} + ta := make(map[string]input.TechnicalAsset) + da := make(map[string]input.DataAsset) - taUndefinedConfidentiality := createDefaultTechnicalAsset() - taUndefinedConfidentiality.Confidentiality = "" - ta[taUndefinedConfidentiality.ID] = taUndefinedConfidentiality - - parsedModel, err := ParseModel(createInputModel(ta), map[string]risks.RiskRule{}, map[string]*CustomRisk{}) + _, err := ParseModel(createInputModel(ta, da), make(map[string]risks.RiskRule), make(map[string]*CustomRisk)) + // TODO: rename test and check if everyone agree that by default it should be public if there are no other assets assert.NoError(t, err) - // TODO: rename test and check if everyone agree that by default it should be public if there are no other assets - assert.Equal(t, types.Public, parsedModel.TechnicalAssets[taUndefinedConfidentiality.ID].Confidentiality) } -func TestInferConfidentiality_NotSet_ExpectHighestConfidentiality(t *testing.T) { - ta := map[string]input.TechnicalAsset{} +func TestInferConfidentiality_ExpectHighestConfidentiality(t *testing.T) { + ta := make(map[string]input.TechnicalAsset) + da := make(map[string]input.DataAsset) + + daConfidentialConfidentiality := createDataAsset(types.Confidential, types.Critical, types.Critical) + da[daConfidentialConfidentiality.ID] = daConfidentialConfidentiality + + daRestrictedConfidentiality := createDataAsset(types.Restricted, types.Important, types.Important) + da[daRestrictedConfidentiality.ID] = daRestrictedConfidentiality - taUndefinedConfidentiality := createDefaultTechnicalAsset() - taUndefinedConfidentiality.Confidentiality = "" - ta[taUndefinedConfidentiality.ID] = taUndefinedConfidentiality + daPublicConfidentiality := createDataAsset(types.Public, types.Archive, types.Archive) + da[daPublicConfidentiality.ID] = daPublicConfidentiality - taLowerConfidentiality := createDefaultTechnicalAsset() - taLowerConfidentiality.Confidentiality = "restricted" - ta[taLowerConfidentiality.ID] = taLowerConfidentiality + taWithConfidentialConfidentialityDataAsset := createTechnicalAsset(types.Internal, types.Operational, types.Operational) + taWithConfidentialConfidentialityDataAsset.DataAssetsProcessed = append(taWithConfidentialConfidentialityDataAsset.DataAssetsProcessed, daConfidentialConfidentiality.ID) + ta[taWithConfidentialConfidentialityDataAsset.ID] = taWithConfidentialConfidentialityDataAsset - taHigherConfidentiality := createDefaultTechnicalAsset() - taHigherConfidentiality.Confidentiality = "confidential" - ta[taLowerConfidentiality.ID] = taHigherConfidentiality + taWithRestrictedConfidentialityDataAsset := createTechnicalAsset(types.Internal, types.Operational, types.Operational) + taWithRestrictedConfidentialityDataAsset.DataAssetsProcessed = append(taWithRestrictedConfidentialityDataAsset.DataAssetsProcessed, daRestrictedConfidentiality.ID) + ta[taWithRestrictedConfidentialityDataAsset.ID] = taWithRestrictedConfidentialityDataAsset - parsedModel, err := ParseModel(createInputModel(ta), map[string]risks.RiskRule{}, map[string]*CustomRisk{}) + taWithPublicConfidentialityDataAsset := createTechnicalAsset(types.Internal, types.Operational, types.Operational) + taWithPublicConfidentialityDataAsset.DataAssetsProcessed = append(taWithPublicConfidentialityDataAsset.DataAssetsProcessed, daPublicConfidentiality.ID) + ta[taWithPublicConfidentialityDataAsset.ID] = taWithPublicConfidentialityDataAsset + + parsedModel, err := ParseModel(createInputModel(ta, da), make(map[string]risks.RiskRule), make(map[string]*CustomRisk)) assert.NoError(t, err) - assert.Equal(t, types.Confidential, parsedModel.TechnicalAssets[taUndefinedConfidentiality.ID].Confidentiality) - assert.Equal(t, types.Confidential, parsedModel.TechnicalAssets[taLowerConfidentiality.ID].Confidentiality) - assert.Equal(t, types.Confidential, parsedModel.TechnicalAssets[taHigherConfidentiality.ID].Confidentiality) + assert.Equal(t, types.Confidential, parsedModel.TechnicalAssets[taWithConfidentialConfidentialityDataAsset.ID].Confidentiality) + assert.Equal(t, types.Restricted, parsedModel.TechnicalAssets[taWithRestrictedConfidentialityDataAsset.ID].Confidentiality) + assert.Equal(t, types.Internal, parsedModel.TechnicalAssets[taWithPublicConfidentialityDataAsset.ID].Confidentiality) } func TestInferIntegrity_NotSet_NoOthers_ExpectTODO(t *testing.T) { - ta := map[string]input.TechnicalAsset{} - - taUndefinedIntegrity := createDefaultTechnicalAsset() - taUndefinedIntegrity.Integrity = "" - ta[taUndefinedIntegrity.ID] = taUndefinedIntegrity + ta := make(map[string]input.TechnicalAsset) + da := make(map[string]input.DataAsset) - parsedModel, err := ParseModel(createInputModel(ta), map[string]risks.RiskRule{}, map[string]*CustomRisk{}) + _, err := ParseModel(createInputModel(ta, da), make(map[string]risks.RiskRule), make(map[string]*CustomRisk)) + // TODO: rename test and check if everyone agree that by default it should be public if there are no other assets assert.NoError(t, err) - // TODO: rename test and check if everyone agree that by default it should be archive if there are no other assets - assert.Equal(t, types.Archive, parsedModel.TechnicalAssets[taUndefinedIntegrity.ID].Integrity) } -func TestInferIntegrity_NotSet_ExpectHighestIntegrity(t *testing.T) { - ta := map[string]input.TechnicalAsset{} +func TestInferIntegrity_ExpectHighestIntegrity(t *testing.T) { + ta := make(map[string]input.TechnicalAsset) + da := make(map[string]input.DataAsset) + + daCriticalIntegrity := createDataAsset(types.Confidential, types.Critical, types.Critical) + da[daCriticalIntegrity.ID] = daCriticalIntegrity + + daImportantIntegrity := createDataAsset(types.Restricted, types.Important, types.Important) + da[daImportantIntegrity.ID] = daImportantIntegrity - taUndefinedIntegrity := createDefaultTechnicalAsset() - taUndefinedIntegrity.Integrity = "" - ta[taUndefinedIntegrity.ID] = taUndefinedIntegrity + daArchiveIntegrity := createDataAsset(types.Public, types.Archive, types.Archive) + da[daArchiveIntegrity.ID] = daArchiveIntegrity - taLowerIntegrity := createDefaultTechnicalAsset() - taLowerIntegrity.Integrity = "important" - ta[taLowerIntegrity.ID] = taLowerIntegrity + taWithCriticalIntegrityDataAsset := createTechnicalAsset(types.Internal, types.Operational, types.Operational) + taWithCriticalIntegrityDataAsset.DataAssetsProcessed = append(taWithCriticalIntegrityDataAsset.DataAssetsProcessed, daCriticalIntegrity.ID) + ta[taWithCriticalIntegrityDataAsset.ID] = taWithCriticalIntegrityDataAsset - taHigherConfidentiality := createDefaultTechnicalAsset() - taHigherConfidentiality.Confidentiality = "critical" - ta[taHigherConfidentiality.ID] = taHigherConfidentiality + taWithImportantIntegrityDataAsset := createTechnicalAsset(types.Internal, types.Operational, types.Operational) + taWithImportantIntegrityDataAsset.DataAssetsProcessed = append(taWithImportantIntegrityDataAsset.DataAssetsProcessed, daImportantIntegrity.ID) + ta[taWithImportantIntegrityDataAsset.ID] = taWithImportantIntegrityDataAsset - parsedModel, err := ParseModel(createInputModel(ta), map[string]risks.RiskRule{}, map[string]*CustomRisk{}) + taWithArchiveIntegrityDataAsset := createTechnicalAsset(types.Internal, types.Operational, types.Operational) + taWithArchiveIntegrityDataAsset.DataAssetsProcessed = append(taWithArchiveIntegrityDataAsset.DataAssetsProcessed, daArchiveIntegrity.ID) + ta[taWithArchiveIntegrityDataAsset.ID] = taWithArchiveIntegrityDataAsset + + parsedModel, err := ParseModel(createInputModel(ta, da), make(map[string]risks.RiskRule), make(map[string]*CustomRisk)) assert.NoError(t, err) - assert.Equal(t, types.Critical, parsedModel.TechnicalAssets[taUndefinedIntegrity.ID].Integrity) - assert.Equal(t, types.Important, parsedModel.TechnicalAssets[taLowerIntegrity.ID].Integrity) - assert.Equal(t, types.Critical, parsedModel.TechnicalAssets[taHigherConfidentiality.ID].Integrity) + assert.Equal(t, types.Critical, parsedModel.TechnicalAssets[taWithCriticalIntegrityDataAsset.ID].Integrity) + assert.Equal(t, types.Important, parsedModel.TechnicalAssets[taWithImportantIntegrityDataAsset.ID].Integrity) + assert.Equal(t, types.Operational, parsedModel.TechnicalAssets[taWithArchiveIntegrityDataAsset.ID].Integrity) } func TestInferAvailability_NotSet_NoOthers_ExpectTODO(t *testing.T) { - ta := map[string]input.TechnicalAsset{} - - taUndefinedIntegrity := createDefaultTechnicalAsset() - taUndefinedIntegrity.Integrity = "" - ta[taUndefinedIntegrity.ID] = taUndefinedIntegrity + ta := make(map[string]input.TechnicalAsset) + da := make(map[string]input.DataAsset) - parsedModel, err := ParseModel(createInputModel(ta), map[string]risks.RiskRule{}, map[string]*CustomRisk{}) + _, err := ParseModel(createInputModel(ta, da), make(map[string]risks.RiskRule), make(map[string]*CustomRisk)) assert.NoError(t, err) - assert.Equal(t, types.Archive, parsedModel.TechnicalAssets[taUndefinedIntegrity.ID].Integrity) } -func TestInferAvailability_NotSet_ExpectHighestAvailability(t *testing.T) { - ta := map[string]input.TechnicalAsset{} +func TestInferAvailability_ExpectHighestAvailability(t *testing.T) { + ta := make(map[string]input.TechnicalAsset) + da := make(map[string]input.DataAsset) + + daCriticalAvailability := createDataAsset(types.Confidential, types.Critical, types.Critical) + da[daCriticalAvailability.ID] = daCriticalAvailability + + daImportantAvailability := createDataAsset(types.Restricted, types.Important, types.Important) + da[daImportantAvailability.ID] = daImportantAvailability + + daArchiveAvailability := createDataAsset(types.Public, types.Archive, types.Archive) + da[daArchiveAvailability.ID] = daArchiveAvailability - taUndefinedAvailability := createDefaultTechnicalAsset() - taUndefinedAvailability.Availability = "" - ta[taUndefinedAvailability.ID] = taUndefinedAvailability + taWithCriticalAvailabilityDataAsset := createTechnicalAsset(types.Internal, types.Operational, types.Operational) + taWithCriticalAvailabilityDataAsset.DataAssetsProcessed = append(taWithCriticalAvailabilityDataAsset.DataAssetsProcessed, daCriticalAvailability.ID) + ta[taWithCriticalAvailabilityDataAsset.ID] = taWithCriticalAvailabilityDataAsset - taLowerAvailability := createDefaultTechnicalAsset() - taLowerAvailability.Availability = "important" - ta[taLowerAvailability.ID] = taLowerAvailability + taWithImportantAvailabilityDataAsset := createTechnicalAsset(types.Internal, types.Operational, types.Operational) + taWithImportantAvailabilityDataAsset.DataAssetsProcessed = append(taWithImportantAvailabilityDataAsset.DataAssetsProcessed, daImportantAvailability.ID) + ta[taWithImportantAvailabilityDataAsset.ID] = taWithImportantAvailabilityDataAsset - taHigherAvailability := createDefaultTechnicalAsset() - taHigherAvailability.Availability = "critical" - ta[taHigherAvailability.ID] = taHigherAvailability + taWithArchiveAvailabilityDataAsset := createTechnicalAsset(types.Internal, types.Operational, types.Operational) + taWithArchiveAvailabilityDataAsset.DataAssetsProcessed = append(taWithArchiveAvailabilityDataAsset.DataAssetsProcessed, daArchiveAvailability.ID) + ta[taWithArchiveAvailabilityDataAsset.ID] = taWithArchiveAvailabilityDataAsset - parsedModel, err := ParseModel(createInputModel(ta), map[string]risks.RiskRule{}, map[string]*CustomRisk{}) + parsedModel, err := ParseModel(createInputModel(ta, da), make(map[string]risks.RiskRule), make(map[string]*CustomRisk)) assert.NoError(t, err) - assert.Equal(t, types.Critical, parsedModel.TechnicalAssets[taUndefinedAvailability.ID].Availability) - assert.Equal(t, types.Important, parsedModel.TechnicalAssets[taLowerAvailability.ID].Availability) - assert.Equal(t, types.Critical, parsedModel.TechnicalAssets[taHigherAvailability.ID].Availability) + assert.Equal(t, types.Critical, parsedModel.TechnicalAssets[taWithCriticalAvailabilityDataAsset.ID].Availability) + assert.Equal(t, types.Important, parsedModel.TechnicalAssets[taWithImportantAvailabilityDataAsset.ID].Availability) + assert.Equal(t, types.Operational, parsedModel.TechnicalAssets[taWithArchiveAvailabilityDataAsset.ID].Availability) } -func createInputModel(technicalAssets map[string]input.TechnicalAsset) *input.Model { +func createInputModel(technicalAssets map[string]input.TechnicalAsset, dataAssets map[string]input.DataAsset) *input.Model { return &input.Model{ TechnicalAssets: technicalAssets, + DataAssets: dataAssets, // set some dummy values to bypass validation BusinessCriticality: "archive", } } -func createDefaultTechnicalAsset() input.TechnicalAsset { +func createTechnicalAsset(confidentiality types.Confidentiality, integrity types.Criticality, availability types.Criticality) input.TechnicalAsset { return input.TechnicalAsset{ ID: uuid.New().String(), // those values are required to bypass validation @@ -151,8 +170,19 @@ func createDefaultTechnicalAsset() input.TechnicalAsset { Technology: "unknown-technology", Encryption: "none", Machine: "virtual", - Confidentiality: "public", - Integrity: "archive", - Availability: "archive", + Confidentiality: confidentiality.String(), + Integrity: integrity.String(), + Availability: availability.String(), + } +} + +func createDataAsset(confidentiality types.Confidentiality, integrity types.Criticality, availability types.Criticality) input.DataAsset { + return input.DataAsset{ + ID: uuid.New().String(), + Usage: "business", + Quantity: "few", + Confidentiality: confidentiality.String(), + Integrity: integrity.String(), + Availability: availability.String(), } }