diff --git a/.gitignore b/.gitignore index 02c23c4..8748ddc 100644 --- a/.gitignore +++ b/.gitignore @@ -67,3 +67,4 @@ claude.md # Puppeteer cache .local-chromium/ +.lcm/ diff --git a/package-lock.json b/package-lock.json index 76f6098..fc1759e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,21 +1,23 @@ { "name": "isms-build", - "version": "1.34.1", + "version": "1.35.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "isms-build", - "version": "1.34.1", + "version": "1.35.0", "dependencies": { "bcryptjs": "^2.4.3", "better-sqlite3": "^12.6.2", "dotenv": "^16.4.5", "express": "^4.18.2", "jsonwebtoken": "^9.0.2", + "knex": "^3.2.9", "multer": "^2.1.1", "nodemailer": "^8.0.1", "pdf-parse": "1.1.1", + "pg": "^8.20.0", "qrcode": "^1.5.4", "xml2js": "^0.6.2" }, @@ -2476,6 +2478,12 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "license": "MIT" }, + "node_modules/colorette": { + "version": "2.0.19", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.19.tgz", + "integrity": "sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==", + "license": "MIT" + }, "node_modules/combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -2489,6 +2497,15 @@ "node": ">= 0.8" } }, + "node_modules/commander": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz", + "integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==", + "license": "MIT", + "engines": { + "node": ">=14" + } + }, "node_modules/component-emitter": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.1.tgz", @@ -2970,7 +2987,6 @@ "version": "3.2.0", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", - "dev": true, "license": "MIT", "engines": { "node": ">=6" @@ -3014,6 +3030,15 @@ "source-map": "~0.6.1" } }, + "node_modules/esm": { + "version": "3.2.25", + "resolved": "https://registry.npmjs.org/esm/-/esm-3.2.25.tgz", + "integrity": "sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/esprima": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", @@ -3485,7 +3510,6 @@ "version": "0.1.0", "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", - "dev": true, "license": "MIT", "engines": { "node": ">=8.0.0" @@ -3557,6 +3581,12 @@ "dev": true, "license": "MIT" }, + "node_modules/getopts": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/getopts/-/getopts-2.3.0.tgz", + "integrity": "sha512-5eDf9fuSXwxBL6q5HX+dhDj+dslFGWzU5thZ9kNKUkcPtaPdatmUFKwHFrLb/uf/WpA4BHET+AX3Scl56cAjpA==", + "license": "MIT" + }, "node_modules/github-from-package": { "version": "0.0.0", "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", @@ -3882,6 +3912,15 @@ "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", "license": "ISC" }, + "node_modules/interpret": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-2.2.0.tgz", + "integrity": "sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==", + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, "node_modules/ip-address": { "version": "10.1.0", "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.1.0.tgz", @@ -3908,6 +3947,21 @@ "dev": true, "license": "MIT" }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -4786,6 +4840,86 @@ "safe-buffer": "^5.0.1" } }, + "node_modules/knex": { + "version": "3.2.9", + "resolved": "https://registry.npmjs.org/knex/-/knex-3.2.9.tgz", + "integrity": "sha512-dtAILTjBMaG8YloP5oBxohDIKyIsdQ/TkcVvSjhsksvsjeH63Y0PADyuMDfNZKbVT3Rlx3vEYVBlecbPT/KerA==", + "license": "MIT", + "dependencies": { + "colorette": "2.0.19", + "commander": "^10.0.0", + "debug": "4.3.4", + "escalade": "^3.1.1", + "esm": "^3.2.25", + "get-package-type": "^0.1.0", + "getopts": "2.3.0", + "interpret": "^2.2.0", + "lodash": "^4.17.21", + "pg-connection-string": "2.6.2", + "rechoir": "^0.8.0", + "resolve-from": "^5.0.0", + "tarn": "^3.0.2", + "tildify": "2.0.0" + }, + "bin": { + "knex": "bin/cli.js" + }, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "pg-query-stream": "^4.14.0" + }, + "peerDependenciesMeta": { + "better-sqlite3": { + "optional": true + }, + "mysql": { + "optional": true + }, + "mysql2": { + "optional": true + }, + "pg": { + "optional": true + }, + "pg-native": { + "optional": true + }, + "pg-query-stream": { + "optional": true + }, + "sqlite3": { + "optional": true + }, + "tedious": { + "optional": true + } + } + }, + "node_modules/knex/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "license": "MIT", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/knex/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "license": "MIT" + }, "node_modules/leven": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", @@ -4815,6 +4949,12 @@ "node": ">=8" } }, + "node_modules/lodash": { + "version": "4.18.1", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.18.1.tgz", + "integrity": "sha512-dMInicTPVE8d1e5otfwmmjlxkZoUpiVLwyeTdUsi/Caj/gfzzblBcCE5sRHV/AsjuCmxWrte2TNGSYuCeCq+0Q==", + "license": "MIT" + }, "node_modules/lodash.includes": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", @@ -5514,6 +5654,12 @@ "node": ">=8" } }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "license": "MIT" + }, "node_modules/path-scurry": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", @@ -5579,6 +5725,101 @@ "dev": true, "license": "MIT" }, + "node_modules/pg": { + "version": "8.20.0", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.20.0.tgz", + "integrity": "sha512-ldhMxz2r8fl/6QkXnBD3CR9/xg694oT6DZQ2s6c/RI28OjtSOpxnPrUCGOBJ46RCUxcWdx3p6kw/xnDHjKvaRA==", + "license": "MIT", + "dependencies": { + "pg-connection-string": "^2.12.0", + "pg-pool": "^3.13.0", + "pg-protocol": "^1.13.0", + "pg-types": "2.2.0", + "pgpass": "1.0.5" + }, + "engines": { + "node": ">= 16.0.0" + }, + "optionalDependencies": { + "pg-cloudflare": "^1.3.0" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-cloudflare": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.3.0.tgz", + "integrity": "sha512-6lswVVSztmHiRtD6I8hw4qP/nDm1EJbKMRhf3HCYaqud7frGysPv7FYJ5noZQdhQtN2xJnimfMtvQq21pdbzyQ==", + "license": "MIT", + "optional": true + }, + "node_modules/pg-connection-string": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.6.2.tgz", + "integrity": "sha512-ch6OwaeaPYcova4kKZ15sbJ2hKb/VP48ZD2gE7i1J+L4MspCtBMAx8nMgz7bksc7IojCIIWuEhHibSMFH8m8oA==", + "license": "MIT" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "license": "ISC", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-pool": { + "version": "3.13.0", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.13.0.tgz", + "integrity": "sha512-gB+R+Xud1gLFuRD/QgOIgGOBE2KCQPaPwkzBBGC9oG69pHTkhQeIuejVIk3/cnDyX39av2AxomQiyPT13WKHQA==", + "license": "MIT", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.13.0.tgz", + "integrity": "sha512-zzdvXfS6v89r6v7OcFCHfHlyG/wvry1ALxZo4LqgUoy7W9xhBDMaqOuMiF3qEV45VqsN6rdlcehHrfDtlCPc8w==", + "license": "MIT" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "license": "MIT", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pg/node_modules/pg-connection-string": { + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.12.0.tgz", + "integrity": "sha512-U7qg+bpswf3Cs5xLzRqbXbQl85ng0mfSV/J0nnA31MCLgvEaAo7CIhmeyrmJpOr7o+zm0rXK+hNnT5l9RHkCkQ==", + "license": "MIT" + }, + "node_modules/pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "license": "MIT", + "dependencies": { + "split2": "^4.1.0" + } + }, "node_modules/picocolors": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", @@ -5631,6 +5872,45 @@ "node": ">=10.13.0" } }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.1.tgz", + "integrity": "sha512-5+5HqXnsZPE65IJZSMkZtURARZelel2oXUEO8rH83VS/hxH5vv1uHquPg5wZs8yMAfdv971IU+kcPUczi7NVBQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "license": "MIT", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/prebuild-install": { "version": "7.1.3", "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.3.tgz", @@ -6072,6 +6352,18 @@ "node": ">= 6" } }, + "node_modules/rechoir": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.8.0.tgz", + "integrity": "sha512-/vxpCXddiX8NGfGO/mTafwjq4aFa/71pvamip0++IQk3zG8cbCj0fifNPrjjF1XMXUne91jL9OoxmdykoEtifQ==", + "license": "MIT", + "dependencies": { + "resolve": "^1.20.0" + }, + "engines": { + "node": ">= 10.13.0" + } + }, "node_modules/require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", @@ -6087,6 +6379,26 @@ "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", "license": "ISC" }, + "node_modules/resolve": { + "version": "1.22.11", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/resolve-cwd": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", @@ -6104,7 +6416,6 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", - "dev": true, "license": "MIT", "engines": { "node": ">=8" @@ -6462,6 +6773,15 @@ "source-map": "^0.6.0" } }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "license": "ISC", + "engines": { + "node": ">= 10.x" + } + }, "node_modules/sprintf-js": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", @@ -6807,6 +7127,18 @@ "node": ">=8" } }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/synckit": { "version": "0.11.12", "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.11.12.tgz", @@ -6851,6 +7183,15 @@ "node": ">=6" } }, + "node_modules/tarn": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/tarn/-/tarn-3.0.2.tgz", + "integrity": "sha512-51LAVKUSZSVfI05vjPESNc5vwqqZpbXCsU+/+wxlOrUjk2SnFTt97v9ZgQrD4YmxYW1Px6w2KjaDitCfkvgxMQ==", + "license": "MIT", + "engines": { + "node": ">=8.0.0" + } + }, "node_modules/teex": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/teex/-/teex-1.0.1.tgz", @@ -6932,6 +7273,15 @@ "b4a": "^1.6.4" } }, + "node_modules/tildify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/tildify/-/tildify-2.0.0.tgz", + "integrity": "sha512-Cc+OraorugtXNfs50hU9KS369rFXCfgGLpfCfvlc+Ud5u6VWmUQsOAa9HbTvheQdYnrdJqqv1e5oIqXppMYnSw==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/tmpl": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", @@ -7335,6 +7685,15 @@ "node": ">=4.0" } }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "license": "MIT", + "engines": { + "node": ">=0.4" + } + }, "node_modules/y18n": { "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", diff --git a/package.json b/package.json index b0e7995..daa1ea9 100644 --- a/package.json +++ b/package.json @@ -17,9 +17,11 @@ "dotenv": "^16.4.5", "express": "^4.18.2", "jsonwebtoken": "^9.0.2", + "knex": "^3.2.9", "multer": "^2.1.1", "nodemailer": "^8.0.1", "pdf-parse": "1.1.1", + "pg": "^8.20.0", "qrcode": "^1.5.4", "xml2js": "^0.6.2" }, diff --git a/server/db/ackStore.js b/server/db/ackStore.js index 364d8b6..363277b 100644 --- a/server/db/ackStore.js +++ b/server/db/ackStore.js @@ -1,4 +1,5 @@ // © 2026 Claude Hecker — ISMS Builder — AGPL-3.0 +const STORAGE_BACKEND = (process.env.STORAGE_BACKEND || 'json').toLowerCase() // Policy Acknowledgement Store // Manages policy distribution campaigns and individual acknowledgements. @@ -249,7 +250,7 @@ function getSummary() { } } -module.exports = { +const _jsonExports = { getDistributions, getDistribution, getDistributionWithStats, @@ -266,3 +267,11 @@ module.exports = { deleteAck, getSummary, } + +if (STORAGE_BACKEND !== 'json') { + const _knex = require('./stores/ackStore') + _knex.init().catch(e => console.error('[ackStore] Knex init:', e.message)) + module.exports = _knex +} else { + module.exports = _jsonExports +} diff --git a/server/db/assetStore.js b/server/db/assetStore.js index 1c6bc3c..906d535 100644 --- a/server/db/assetStore.js +++ b/server/db/assetStore.js @@ -1,6 +1,8 @@ // © 2026 Claude Hecker — ISMS Builder V 1.29 — AGPL-3.0 'use strict' +const STORAGE_BACKEND = (process.env.STORAGE_BACKEND || 'json').toLowerCase() + const fs = require('fs') const path = require('path') @@ -178,4 +180,12 @@ function getSummary() { } } -module.exports = { getAll, getById, create, update, remove, getSummary, ASSET_TYPES, CATEGORIES } +const _jsonExports = { getAll, getById, create, update, remove, getSummary, ASSET_TYPES, CATEGORIES } + +if (STORAGE_BACKEND !== 'json') { + const _knex = require('./stores/assetStore') + _knex.init().catch(e => console.error('[assetStore] Knex init:', e.message)) + module.exports = _knex +} else { + module.exports = _jsonExports +} diff --git a/server/db/auditStore.js b/server/db/auditStore.js index 4047689..1b0b765 100644 --- a/server/db/auditStore.js +++ b/server/db/auditStore.js @@ -2,6 +2,8 @@ // Append-only audit log store. // Entries are stored in data/audit-log.json, capped at MAX_ENTRIES (rolling). +const STORAGE_BACKEND = (process.env.STORAGE_BACKEND || 'json').toLowerCase() + const fs = require('fs') const path = require('path') function uuidv4() { @@ -79,4 +81,12 @@ function clear() { save([]) } -module.exports = { append, query, clear } +const _jsonExports = { append, query, clear } + +if (STORAGE_BACKEND !== 'json') { + const _knex = require('./stores/auditStore') + _knex.init().catch(e => console.error('[auditStore] Knex init:', e.message)) + module.exports = _knex +} else { + module.exports = _jsonExports +} diff --git a/server/db/bcmStore.js b/server/db/bcmStore.js index 29b0f21..d580048 100644 --- a/server/db/bcmStore.js +++ b/server/db/bcmStore.js @@ -1,5 +1,8 @@ // © 2026 Claude Hecker — ISMS Builder V 1.29 — AGPL-3.0 'use strict' + +const STORAGE_BACKEND = (process.env.STORAGE_BACKEND || 'json').toLowerCase() + const fs = require('fs') const path = require('path') @@ -261,9 +264,17 @@ function getSummary() { } } -module.exports = { +const _jsonExports = { getBia, getBiaById, createBia, updateBia, deleteBia, getPlans, getPlanById, createPlan, updatePlan, deletePlan, getExercises, getExerciseById, createExercise, updateExercise, deleteExercise, getSummary, } + +if (STORAGE_BACKEND !== 'json') { + const _knex = require('./stores/bcmStore') + _knex.init().catch(e => console.error('[bcmStore] Knex init:', e.message)) + module.exports = _knex +} else { + module.exports = _jsonExports +} diff --git a/server/db/customListsStore.js b/server/db/customListsStore.js index d5cc188..45add5e 100644 --- a/server/db/customListsStore.js +++ b/server/db/customListsStore.js @@ -1,4 +1,7 @@ // © 2026 Claude Hecker — ISMS Builder V 1.29 — AGPL-3.0 + +const STORAGE_BACKEND = (process.env.STORAGE_BACKEND || 'json').toLowerCase() + // Persistent store for editable dropdown lists used throughout the application. // Data saved to data/custom-lists.json; falls back to defaults if file missing. @@ -89,4 +92,12 @@ function resetList(listId) { return DEFAULTS[listId] } -module.exports = { getAll, getList, setList, resetList, ALLOWED_LIST_IDS, DEFAULTS } +const _jsonExports = { getAll, getList, setList, resetList, ALLOWED_LIST_IDS, DEFAULTS } + +if (STORAGE_BACKEND !== 'json') { + const _knex = require('./stores/customListsStore') + _knex.init().catch(e => console.error('[customListsStore] Knex init:', e.message)) + module.exports = _knex +} else { + module.exports = _jsonExports +} diff --git a/server/db/entityStore.js b/server/db/entityStore.js index 0df0c74..0a2d732 100644 --- a/server/db/entityStore.js +++ b/server/db/entityStore.js @@ -1,6 +1,9 @@ // © 2026 Claude Hecker — ISMS Builder V 1.29 — AGPL-3.0 // Entity Store – Konzernstruktur (Holding + Gesellschaften) // Persistenz: data/entities.json + +const STORAGE_BACKEND = (process.env.STORAGE_BACKEND || 'json').toLowerCase() + const fs = require('fs') const path = require('path') @@ -37,14 +40,13 @@ function genId() { return `entity_${Date.now()}` } let store = load() -module.exports = { +const _jsonExports = { init: () => { store = load() }, getAll: () => store.filter(e => e.active !== false), getById: (id) => store.find(e => e.id === id) || null, - // Hierarchische Baumstruktur getTree: () => { const all = store.filter(e => e.active !== false) const map = {} @@ -84,10 +86,17 @@ module.exports = { delete: (id) => { const idx = store.findIndex(e => e.id === id) if (idx < 0) return false - // Soft-delete: deaktivieren statt löschen (Referenzintegrität) store[idx].active = false store[idx].updatedAt = nowISO() save(store) return true } } + +if (STORAGE_BACKEND !== 'json') { + const _knex = require('./stores/entityStore') + _knex.init().catch(e => console.error('[entityStore] Knex init:', e.message)) + module.exports = _knex +} else { + module.exports = _jsonExports +} diff --git a/server/db/findingStore.js b/server/db/findingStore.js index dba49b2..7eceb03 100644 --- a/server/db/findingStore.js +++ b/server/db/findingStore.js @@ -3,6 +3,8 @@ // Datenmodell gemäß internal_audit_report_rules.md (IST/SOLL/Risiko/Empfehlung) 'use strict' +const STORAGE_BACKEND = (process.env.STORAGE_BACKEND || 'json').toLowerCase() + const fs = require('fs') const path = require('path') @@ -211,9 +213,17 @@ function autopurge(days = 30) { return before - cleaned.length } -module.exports = { +const _jsonExports = { getAll, getById, create, update, remove, permanentDelete, restore, getDeleted, addAction, updateAction, deleteAction, getSummary, autopurge, SEVERITIES, STATUSES, ACT_STATUS, } + +if (STORAGE_BACKEND !== 'json') { + const _knex = require('./stores/findingStore') + _knex.init().catch(e => console.error('[findingStore] Knex init:', e.message)) + module.exports = _knex +} else { + module.exports = _jsonExports +} diff --git a/server/db/gdprStore.js b/server/db/gdprStore.js index 2be91aa..c2be2e8 100644 --- a/server/db/gdprStore.js +++ b/server/db/gdprStore.js @@ -1,6 +1,8 @@ // © 2026 Claude Hecker — ISMS Builder V 1.29 — AGPL-3.0 'use strict' +const STORAGE_BACKEND = (process.env.STORAGE_BACKEND || 'json').toLowerCase() + const fs = require('fs') const path = require('path') @@ -719,7 +721,7 @@ const deletionLog = { } } -module.exports = { +const _jsonExports = { vvt, av, dsfa, incidents, dsar, toms, dsb, deletionLog, getSummary, VVT_LEGAL_BASES, VVT_STATUSES, @@ -729,3 +731,11 @@ module.exports = { DSAR_TYPES, DSAR_STATUSES, TOM_CATEGORIES, TOM_STATUSES } + +if (STORAGE_BACKEND !== 'json') { + const _knex = require('./stores/gdprStore') + _knex.init().catch(e => console.error('[gdprStore] Knex init:', e.message)) + module.exports = _knex +} else { + module.exports = _jsonExports +} diff --git a/server/db/goalsStore.js b/server/db/goalsStore.js index 68dc5d3..9b94fcf 100644 --- a/server/db/goalsStore.js +++ b/server/db/goalsStore.js @@ -1,6 +1,8 @@ // © 2026 Claude Hecker — ISMS Builder V 1.29 — AGPL-3.0 'use strict' +const STORAGE_BACKEND = (process.env.STORAGE_BACKEND || 'json').toLowerCase() + const fs = require('fs') const path = require('path') @@ -193,8 +195,16 @@ function getCalendarEvents() { return events } -module.exports = { +const _jsonExports = { getAll, getById, create, update, delete: del, permanentDelete, restore, getDeleted, getSummary, getCalendarEvents, CATEGORIES, STATUSES, PRIORITIES } + +if (STORAGE_BACKEND !== 'json') { + const _knex = require('./stores/goalsStore') + _knex.init().catch(e => console.error('[goalsStore] Knex init:', e.message)) + module.exports = _knex +} else { + module.exports = _jsonExports +} diff --git a/server/db/governanceStore.js b/server/db/governanceStore.js index a1615fc..7ff55cf 100644 --- a/server/db/governanceStore.js +++ b/server/db/governanceStore.js @@ -1,5 +1,8 @@ // © 2026 Claude Hecker — ISMS Builder V 1.29 — AGPL-3.0 'use strict' + +const STORAGE_BACKEND = (process.env.STORAGE_BACKEND || 'json').toLowerCase() + const fs = require('fs') const path = require('path') @@ -286,9 +289,17 @@ function getSummary() { } } -module.exports = { +const _jsonExports = { getReviews, getReviewById, createReview, updateReview, deleteReview, getActions, getActionById, createAction, updateAction, deleteAction, getMeetings, getMeetingById, createMeeting, updateMeeting, deleteMeeting, getSummary, } + +if (STORAGE_BACKEND !== 'json') { + const _knex = require('./stores/governanceStore') + _knex.init().catch(e => console.error('[governanceStore] Knex init:', e.message)) + module.exports = _knex +} else { + module.exports = _jsonExports +} diff --git a/server/db/guidanceStore.js b/server/db/guidanceStore.js index 7213861..aa03dba 100644 --- a/server/db/guidanceStore.js +++ b/server/db/guidanceStore.js @@ -1,6 +1,8 @@ // © 2026 Claude Hecker — ISMS Builder V 1.29 — AGPL-3.0 'use strict' +const STORAGE_BACKEND = (process.env.STORAGE_BACKEND || 'json').toLowerCase() + const fs = require('fs') const path = require('path') @@ -3887,7 +3889,7 @@ function seedIsoNotice() { } } -module.exports = { +const _jsonExports = { getAll, getByCategory, search, getById, create, update, delete: del, permanentDelete, restore, getDeleted, getFilePath, VALID_CATEGORIES, seedArchitectureDocs, @@ -3898,3 +3900,94 @@ module.exports = { seedIsoNotice, seedSystemhandbuch, } + +if (STORAGE_BACKEND !== 'json') { + const _knex = require('./stores/guidanceStore') + _knex.init().catch(e => console.error('[guidanceStore] Knex init:', e.message)) + + async function _knexSeedArchitectureDocs() { + const lang = _getDemoLang() + for (const entry of ARCH_SEED) { + const title = typeof entry.title === 'object' ? (entry.title[lang] || entry.title.en) : entry.title + if (!fs.existsSync(entry.srcFile)) continue + let content = fs.readFileSync(entry.srcFile, 'utf8') + if (entry.wrapCode) content = '```' + entry.wrapCode + '\n' + content + '\n```' + await _knex.upsertSeed(entry.seedId, { + id: 'guid_arch_' + entry.seedId, category: entry.category || 'admin-intern', + type: 'markdown', title, content, seedLang: lang, + minRole: entry.minRole !== undefined ? entry.minRole : 'admin', + }) + } + } + + async function _knexSeedDemoDoc() { + const lang = _getDemoLang() + const { title, content } = DEMO_DOC[lang] || DEMO_DOC.en + await _knex.upsertSeed(DEMO_GUIDE_SEED_ID, { + id: 'guid_demo_overview', category: 'systemhandbuch', type: 'markdown', + title, content, pinOrder: 1, minRole: null, seedLang: lang, + }) + } + + async function _knexSeedRoleGuides() { + const lang = _getDemoLang() + const langGuides = ROLE_GUIDES_I18N[lang] || ROLE_GUIDES_I18N.en + for (const guide of ROLE_GUIDES) { + const override = langGuides ? langGuides[guide.seedId] : null + const title = override ? override.title : guide.title + const content = override ? override.content : guide.content + await _knex.upsertSeed(guide.seedId, { + id: guide.id, category: 'rollen', type: 'markdown', + title, content, pinOrder: guide.pinOrder, minRole: guide.minRole, + }) + } + } + + async function _knexSeedSoaGuide() { + const lang = _getDemoLang() + const data = SOA_GUIDE[lang] || SOA_GUIDE.en + await _knex.upsertSeed(SOA_GUIDE_SEED_ID, { + id: 'guid_soa_audit_guide', category: 'soa-audit', type: 'markdown', + pinOrder: 1, minRole: null, ...data, + }) + } + + async function _knexSeedPolicyGuide() { + const lang = _getDemoLang() + const data = POLICY_GUIDE[lang] || POLICY_GUIDE.en + await _knex.upsertSeed(POLICY_GUIDE_SEED_ID, { + id: 'guid_policy_prozesse_guide', category: 'policy-prozesse', type: 'markdown', + pinOrder: 1, minRole: null, ...data, + }) + } + + async function _knexSeedIsoNotice() { + const lang = _getDemoLang() + const { title, content } = ISO_NOTICE[lang] || ISO_NOTICE.en + await _knex.upsertSeed(ISO_NOTICE_SEED_ID, { + id: 'guid_iso_controls_notice', category: 'systemhandbuch', type: 'markdown', + pinOrder: 2, minRole: null, title, content, seedLang: lang, + }) + } + + async function _knexSeedSystemhandbuch() { + await _knex.upsertSeed(SYSHANDBUCH_SEED_ID, { + id: 'guid_syshandbuch_quickref', category: 'systemhandbuch', type: 'markdown', + pinOrder: 3, minRole: null, title: 'Systemhandbuch ISMS Build', + content: SYSHANDBUCH_CONTENT, + }) + } + + module.exports = { + ..._knex, + seedArchitectureDocs: _knexSeedArchitectureDocs, + seedDemoDoc: _knexSeedDemoDoc, + seedRoleGuides: _knexSeedRoleGuides, + seedSoaGuide: _knexSeedSoaGuide, + seedPolicyGuide: _knexSeedPolicyGuide, + seedIsoNotice: _knexSeedIsoNotice, + seedSystemhandbuch: _knexSeedSystemhandbuch, + } +} else { + module.exports = _jsonExports +} diff --git a/server/db/knexDatabase.js b/server/db/knexDatabase.js new file mode 100644 index 0000000..01e8b2c --- /dev/null +++ b/server/db/knexDatabase.js @@ -0,0 +1,478 @@ +// © 2026 Claude Hecker — ISMS Builder — AGPL-3.0 +'use strict' + +const knex = require('knex') +const path = require('path') +const fs = require('fs') + +let _knex = null +let _initPromise = null + +function getDb() { + if (!_initPromise) throw new Error('[knexDatabase] Not initialized. Call init() first.') + if (!_knex) throw new Error('[knexDatabase] Still initializing. Await init() first.') + return _knex +} + +function clientType() { + return _knex ? _knex.client.config.client : null +} + +function _buildConfig() { + const backend = (process.env.STORAGE_BACKEND || 'json').toLowerCase() + + if (backend === 'sqlite') { + const dbDir = path.join(__dirname, '../../data') + if (!fs.existsSync(dbDir)) fs.mkdirSync(dbDir, { recursive: true }) + return { + client: 'better-sqlite3', + connection: { filename: path.join(dbDir, 'isms.db') }, + useNullAsDefault: true, + } + } + + if (backend === 'mariadb' || backend === 'mysql') { + return { + client: 'mysql2', + connection: process.env.DATABASE_URL || { + host: process.env.DB_HOST || 'localhost', + port: parseInt(process.env.DB_PORT || '3306', 10), + user: process.env.DB_USER || 'isms', + password: process.env.DB_PASS || '', + database: process.env.DB_NAME || 'isms_builder', + ssl: process.env.DB_SSL === 'true' ? { rejectUnauthorized: true } : undefined, + }, + pool: { min: 0, max: 10 }, + } + } + + if (backend === 'postgres' || backend === 'pg') { + return { + client: 'pg', + connection: process.env.DATABASE_URL || { + host: process.env.DB_HOST || 'localhost', + port: parseInt(process.env.DB_PORT || '5432', 10), + user: process.env.DB_USER || 'isms', + password: process.env.DB_PASS || '', + database: process.env.DB_NAME || 'isms_builder', + ssl: process.env.DB_SSL === 'true' ? { rejectUnauthorized: false } : undefined, + }, + pool: { min: 0, max: 10 }, + } + } + + throw new Error(`[knexDatabase] Unsupported STORAGE_BACKEND: ${backend}`) +} + +function autoId(t) { + if (clientType() === 'pg') { + t.specificType('id', 'INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY') + } else { + t.increments('id').primary() + } +} + +const TABLES = { + templates(t) { + t.string('id', 120).primary() + t.string('type', 80).notNullable() + t.string('language', 10).notNullable().defaultTo('de') + t.string('title', 512).notNullable().defaultTo('') + t.text('content').notNullable().defaultTo('') + t.integer('version').notNullable().defaultTo(1) + t.string('status', 30).notNullable().defaultTo('draft') + t.string('owner', 120).nullable() + t.string('next_review_date', 20).nullable() + t.string('parent_id', 120).nullable() + t.integer('sort_order').notNullable().defaultTo(0) + t.string('created_at', 30).notNullable() + t.string('updated_at', 30).notNullable() + t.text('linked_controls').notNullable().defaultTo('[]') + t.text('applicable_entities').notNullable().defaultTo('[]') + t.text('attachments').notNullable().defaultTo('[]') + t.text('history').notNullable().defaultTo('[]') + t.text('status_history').notNullable().defaultTo('[]') + t.string('deleted_at', 30).nullable() + t.string('deleted_by', 120).nullable() + t.index('type', 'idx_template_type') + t.index('status', 'idx_template_status') + t.index('parent_id', 'idx_template_parent') + t.index('deleted_at', 'idx_template_deleted') + }, + + training(t) { + t.string('id', 120).primary() + t.string('title', 512).notNullable().defaultTo('') + t.text('description').notNullable().defaultTo('') + t.string('category', 80).notNullable().defaultTo('other') + t.string('status', 30).notNullable().defaultTo('planned') + t.string('due_date', 20).nullable() + t.string('completed_date', 20).nullable() + t.string('instructor', 120).notNullable().defaultTo('') + t.text('assignees').notNullable().defaultTo('') + t.text('applicable_entities').notNullable().defaultTo('[]') + t.text('evidence').notNullable().defaultTo('') + t.boolean('mandatory').notNullable().defaultTo(false) + t.string('created_by', 120).notNullable().defaultTo('system') + t.string('created_at', 30).notNullable() + t.string('updated_at', 30).notNullable() + t.string('deleted_at', 30).nullable() + }, + + entities(t) { + t.string('id', 120).primary() + t.string('name', 256).notNullable() + t.string('short', 30).notNullable().defaultTo('') + t.string('type', 50).notNullable().defaultTo('subsidiary') + t.string('parent_id', 120).nullable() + t.string('created_at', 30).notNullable() + t.string('updated_at', 30).notNullable() + }, + + soa_controls(t) { + t.string('id', 120).primary() + t.string('framework', 50).notNullable().defaultTo('ISO27001') + t.string('control_id', 50).notNullable().defaultTo('') + t.string('title', 512).notNullable().defaultTo('') + t.text('description').notNullable().defaultTo('') + t.string('theme', 80).notNullable().defaultTo('') + t.boolean('applicable').notNullable().defaultTo(true) + t.string('status', 50).notNullable().defaultTo('not_started') + t.text('justification').notNullable().defaultTo('') + t.text('evidence').notNullable().defaultTo('') + t.string('owner', 120).notNullable().defaultTo('') + t.text('applicable_entities').notNullable().defaultTo('[]') + t.text('linked_templates').notNullable().defaultTo('[]') + t.string('updated_by', 120).notNullable().defaultTo('system') + t.boolean('is_custom').notNullable().defaultTo(false) + t.string('created_at', 30).notNullable() + t.string('updated_at', 30).notNullable() + t.index('framework', 'idx_soa_framework') + }, + + guidance(t) { + t.string('id', 120).primary() + t.string('title', 512).notNullable().defaultTo('') + t.string('category', 80).notNullable().defaultTo('systemhandbuch') + t.string('type', 30).notNullable().defaultTo('markdown') + t.text('content').notNullable().defaultTo('') + t.string('file_name', 256).nullable() + t.string('file_type', 80).nullable() + t.integer('file_size').nullable() + t.integer('version').notNullable().defaultTo(1) + t.string('min_role', 30).nullable() + t.text('linked_controls').notNullable().defaultTo('[]') + t.text('linked_policies').notNullable().defaultTo('[]') + t.integer('pin_order').nullable() + t.string('seed_id', 120).nullable() + t.string('created_by', 120).notNullable().defaultTo('system') + t.string('created_at', 30).notNullable() + t.string('updated_at', 30).notNullable() + t.string('deleted_at', 30).nullable() + t.string('deleted_by', 120).nullable() + t.index('category', 'idx_guidance_category') + t.index('seed_id', 'idx_guidance_seed') + }, + + risks(t) { + t.string('id', 120).primary() + t.string('title', 512).notNullable().defaultTo('') + t.text('description').notNullable().defaultTo('') + t.string('category', 80).notNullable().defaultTo('other') + t.integer('likelihood').notNullable().defaultTo(2) + t.integer('impact').notNullable().defaultTo(2) + t.integer('risk_score').notNullable().defaultTo(4) + t.string('status', 30).notNullable().defaultTo('open') + t.string('owner', 120).notNullable().defaultTo('') + t.text('applicable_entities').notNullable().defaultTo('[]') + t.text('treatments').notNullable().defaultTo('[]') + t.string('created_by', 120).notNullable().defaultTo('system') + t.string('created_at', 30).notNullable() + t.string('updated_at', 30).notNullable() + t.string('deleted_at', 30).nullable() + }, + + gdpr_entries(t) { + t.string('id', 120).primary() + t.string('gdpr_type', 30).notNullable().defaultTo('vvt') + t.text('data').notNullable().defaultTo('{}') + t.string('created_by', 120).notNullable().defaultTo('system') + t.string('created_at', 30).notNullable() + t.string('updated_at', 30).notNullable() + t.string('deleted_at', 30).nullable() + t.index('gdpr_type', 'idx_gdpr_type') + }, + + gdpr_deletion_log(t) { + t.string('id', 120).primary() + t.text('data').notNullable().defaultTo('{}') + t.string('deleted_by', 120).notNullable().defaultTo('system') + t.string('deleted_at', 30).notNullable() + }, + + rbac_users(t) { + t.string('id', 120).primary() + t.string('username', 120).notNullable().unique() + t.string('email', 256).notNullable() + t.string('domain', 80).notNullable().defaultTo('Global') + t.string('role', 30).notNullable().defaultTo('reader') + t.text('functions').notNullable().defaultTo('[]') + t.string('password_hash', 256).notNullable() + t.string('totp_secret', 256).nullable() + t.boolean('totp_enabled').notNullable().defaultTo(false) + t.boolean('totp_verified').notNullable().defaultTo(false) + t.text('sections').notNullable().defaultTo('[]') + t.string('created_at', 30).notNullable() + t.string('updated_at', 30).notNullable() + t.index('username', 'idx_rbac_username') + }, + + org_settings(t) { + t.string('key_name', 120).primary() + t.text('value').notNullable() + }, + + audit_log(t) { + autoId(t) + t.string('ts', 30).notNullable() + t.string('user_email', 256).nullable() + t.string('action', 80).notNullable() + t.string('resource', 80).nullable() + t.string('resource_id', 120).nullable() + t.text('detail').nullable() + t.index('ts', 'idx_audit_ts') + t.index('user_email', 'idx_audit_user') + t.index('resource', 'idx_audit_resource') + }, + + goals(t) { + t.string('id', 120).primary() + t.string('title', 512).notNullable().defaultTo('') + t.text('description').notNullable().defaultTo('') + t.string('category', 80).notNullable().defaultTo('other') + t.string('status', 30).notNullable().defaultTo('active') + t.string('priority', 20).notNullable().defaultTo('medium') + t.float('target_value').nullable() + t.float('current_value').nullable() + t.string('unit', 30).nullable() + t.string('due_date', 20).nullable() + t.string('review_date', 20).nullable() + t.string('owner', 120).notNullable().defaultTo('') + t.text('applicable_entities').notNullable().defaultTo('[]') + t.text('linked_controls').notNullable().defaultTo('[]') + t.string('created_by', 120).notNullable().defaultTo('system') + t.string('created_at', 30).notNullable() + t.string('updated_at', 30).notNullable() + t.string('deleted_at', 30).nullable() + }, + + assets(t) { + t.string('id', 120).primary() + t.string('name', 512).notNullable().defaultTo('') + t.text('description').notNullable().defaultTo('') + t.string('category', 80).notNullable().defaultTo('other') + t.string('classification', 50).notNullable().defaultTo('internal') + t.string('criticality', 30).notNullable().defaultTo('medium') + t.string('owner', 120).notNullable().defaultTo('') + t.string('location', 256).notNullable().defaultTo('') + t.string('eol_date', 20).nullable() + t.string('status', 30).notNullable().defaultTo('active') + t.text('applicable_entities').notNullable().defaultTo('[]') + t.text('linked_controls').notNullable().defaultTo('[]') + t.string('created_by', 120).notNullable().defaultTo('system') + t.string('created_at', 30).notNullable() + t.string('updated_at', 30).notNullable() + t.string('deleted_at', 30).nullable() + }, + + suppliers(t) { + t.string('id', 120).primary() + t.string('name', 512).notNullable().defaultTo('') + t.string('category', 80).notNullable().defaultTo('other') + t.string('contact', 256).notNullable().defaultTo('') + t.string('risk_level', 20).notNullable().defaultTo('medium') + t.string('status', 30).notNullable().defaultTo('active') + t.string('contract_end', 20).nullable() + t.string('next_audit', 20).nullable() + t.text('notes').notNullable().defaultTo('') + t.text('applicable_entities').notNullable().defaultTo('[]') + t.text('linked_controls').notNullable().defaultTo('[]') + t.string('created_by', 120).notNullable().defaultTo('system') + t.string('created_at', 30).notNullable() + t.string('updated_at', 30).notNullable() + t.string('deleted_at', 30).nullable() + }, + + bcm_entries(t) { + t.string('id', 120).primary() + t.string('bcm_type', 30).notNullable().defaultTo('bia') + t.text('data').notNullable().defaultTo('{}') + t.string('created_by', 120).notNullable().defaultTo('system') + t.string('created_at', 30).notNullable() + t.string('updated_at', 30).notNullable() + t.string('deleted_at', 30).nullable() + t.index('bcm_type', 'idx_bcm_type') + }, + + legal_entries(t) { + t.string('id', 120).primary() + t.string('legal_type', 30).notNullable().defaultTo('contract') + t.text('data').notNullable().defaultTo('{}') + t.string('created_by', 120).notNullable().defaultTo('system') + t.string('created_at', 30).notNullable() + t.string('updated_at', 30).notNullable() + t.string('deleted_at', 30).nullable() + t.index('legal_type', 'idx_legal_type') + }, + + governance_entries(t) { + t.string('id', 120).primary() + t.string('gov_type', 30).notNullable().defaultTo('review') + t.text('data').notNullable().defaultTo('{}') + t.string('created_by', 120).notNullable().defaultTo('system') + t.string('created_at', 30).notNullable() + t.string('updated_at', 30).notNullable() + t.string('deleted_at', 30).nullable() + t.index('gov_type', 'idx_gov_type') + }, + + public_incidents(t) { + t.string('id', 120).primary() + t.string('ref', 30).notNullable() + t.text('data').notNullable().defaultTo('{}') + t.string('submitted_at', 30).notNullable() + t.string('deleted_at', 30).nullable() + }, + + findings(t) { + t.string('id', 120).primary() + t.text('data').notNullable().defaultTo('{}') + t.string('created_by', 120).notNullable().defaultTo('system') + t.string('created_at', 30).notNullable() + t.string('updated_at', 30).notNullable() + t.string('deleted_at', 30).nullable() + }, + + org_units(t) { + t.string('id', 120).primary() + t.text('data').notNullable().defaultTo('{}') + t.string('created_at', 30).notNullable() + t.string('updated_at', 30).notNullable() + }, + + custom_lists(t) { + t.string('list_id', 120).primary() + t.text('items').notNullable().defaultTo('[]') + }, + + policy_distributions(t) { + t.string('id', 120).primary() + t.string('template_id', 120).notNullable().defaultTo('') + t.string('template_title', 512).notNullable().defaultTo('') + t.string('template_type', 80).notNullable().defaultTo('Policy') + t.integer('template_version').notNullable().defaultTo(1) + t.string('mode', 30).notNullable().defaultTo('manual') + t.string('target_group', 256).notNullable().defaultTo('') + t.string('due_date', 20).nullable() + t.text('email_list').notNullable().defaultTo('[]') + t.text('notes').notNullable().defaultTo('') + t.string('status', 30).notNullable().defaultTo('active') + t.string('created_at', 30).notNullable() + t.string('created_by', 120).notNullable().defaultTo('system') + t.string('email_sent_at', 30).nullable() + t.integer('email_sent_count').notNullable().defaultTo(0) + t.index('template_id', 'idx_pdistro_template') + t.index('status', 'idx_pdistro_status') + }, + + policy_acks(t) { + t.string('id', 120).primary() + t.string('distribution_id', 120).notNullable() + t.string('recipient_email', 256).notNullable().defaultTo('') + t.string('recipient_name', 256).notNullable().defaultTo('') + t.string('token', 120).nullable() + t.string('acknowledged_at', 30).nullable() + t.string('ip_address', 45).nullable() + t.string('method', 30).notNullable().defaultTo('manual') + t.text('notes').notNullable().defaultTo('') + t.string('added_by', 120).notNullable().defaultTo('system') + t.index('distribution_id', 'idx_pack_dist') + t.index('token', 'idx_pack_token') + }, +} + +async function initSchema(k) { + for (const [name, builder] of Object.entries(TABLES)) { + if (!(await k.schema.hasTable(name))) { + await k.schema.createTable(name, builder) + } + } + if (clientType() === 'mysql2') { + await _promoteTextToMedium(k) + } +} + +async function _promoteTextToMedium(k) { + for (const tableName of Object.keys(TABLES)) { + const info = await k.raw(`SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = DATABASE() AND TABLE_NAME = ? AND DATA_TYPE = 'text'`, [tableName]) + const cols = (info[0] || info).map(r => r.COLUMN_NAME) + for (const col of cols) { + await k.raw(`ALTER TABLE \`${tableName}\` MODIFY \`${col}\` MEDIUMTEXT`) + } + } +} + +async function ensureColumns(k) { + const patches = [ + { table: 'soa_controls', col: 'status', fn: (t) => t.string('status', 50).notNullable().defaultTo('not_started') }, + { table: 'soa_controls', col: 'updated_by', fn: (t) => t.string('updated_by', 120).notNullable().defaultTo('system') }, + { table: 'soa_controls', col: 'is_custom', fn: (t) => t.boolean('is_custom').notNullable().defaultTo(false) }, + { table: 'guidance', col: 'type', fn: (t) => t.string('type', 30).notNullable().defaultTo('markdown') }, + { table: 'guidance', col: 'version', fn: (t) => t.integer('version').notNullable().defaultTo(1) }, + { table: 'guidance', col: 'min_role', fn: (t) => t.string('min_role', 30).nullable() }, + { table: 'guidance', col: 'linked_controls', fn: (t) => t.text('linked_controls').notNullable().defaultTo('[]') }, + { table: 'guidance', col: 'linked_policies', fn: (t) => t.text('linked_policies').notNullable().defaultTo('[]') }, + { table: 'guidance', col: 'pin_order', fn: (t) => t.integer('pin_order').nullable() }, + { table: 'guidance', col: 'seed_id', fn: (t) => t.string('seed_id', 120).nullable() }, + { table: 'guidance', col: 'deleted_by', fn: (t) => t.string('deleted_by', 120).nullable() }, + { table: 'rbac_users', col: 'username', fn: (t) => t.string('username', 120).notNullable().unique() }, + { table: 'rbac_users', col: 'domain', fn: (t) => t.string('domain', 80).notNullable().defaultTo('Global') }, + { table: 'rbac_users', col: 'totp_verified', fn: (t) => t.boolean('totp_verified').notNullable().defaultTo(false) }, + { table: 'rbac_users', col: 'sections', fn: (t) => t.text('sections').notNullable().defaultTo('[]') }, + { table: 'assets', col: 'data', fn: (t) => t.text('data').notNullable().defaultTo('{}') }, + { table: 'training', col: 'data', fn: (t) => t.text('data').notNullable().defaultTo('{}') }, + { table: 'goals', col: 'data', fn: (t) => t.text('data').notNullable().defaultTo('{}') }, + { table: 'suppliers', col: 'data', fn: (t) => t.text('data').notNullable().defaultTo('{}') }, + { table: 'risks', col: 'deleted_by', fn: (t) => t.string('deleted_by', 120).nullable() }, + ] + + for (const { table, col, fn } of patches) { + if (await k.schema.hasTable(table) && !(await k.schema.hasColumn(table, col))) { + await k.schema.alterTable(table, fn) + } + } +} + +async function init() { + if (_initPromise) return _initPromise + _initPromise = _doInit() + return _initPromise +} + +async function _doInit() { + const config = _buildConfig() + _knex = knex(config) + await initSchema(_knex) + await ensureColumns(_knex) + return _knex +} + +async function destroy() { + if (_knex) { + await _knex.destroy() + _knex = null + } + _initPromise = null +} + +module.exports = { getDb, init, destroy, clientType } diff --git a/server/db/legalStore.js b/server/db/legalStore.js index fdd1f38..f36581c 100644 --- a/server/db/legalStore.js +++ b/server/db/legalStore.js @@ -1,5 +1,6 @@ // © 2026 Claude Hecker — ISMS Builder V 1.29 — AGPL-3.0 'use strict' +const STORAGE_BACKEND = (process.env.STORAGE_BACKEND || 'json').toLowerCase() const fs = require('fs') const path = require('path') @@ -352,10 +353,17 @@ function getSummary() { } } -module.exports = { +const _jsonExports = { contracts, ndas, privacyPolicies, getSummary, FILES_DIR, CONTRACT_TYPES, CONTRACT_STATUSES, NDA_TYPES, NDA_STATUSES, POLICY_TYPES, POLICY_STATUSES } + +if (STORAGE_BACKEND !== 'json') { + const _knex = require('./stores/legalStore') + module.exports = _knex +} else { + module.exports = _jsonExports +} diff --git a/server/db/orgSettingsStore.js b/server/db/orgSettingsStore.js index f9b3516..47a86b3 100644 --- a/server/db/orgSettingsStore.js +++ b/server/db/orgSettingsStore.js @@ -1,4 +1,7 @@ // © 2026 Claude Hecker — ISMS Builder V 1.29 — AGPL-3.0 + +const STORAGE_BACKEND = (process.env.STORAGE_BACKEND || 'json').toLowerCase() + // Persistent store for organisation-wide settings and role-specific config. // Data saved to data/org-settings.json @@ -215,4 +218,12 @@ function update(patch) { return updated } -module.exports = { get, update, DEFAULTS } +const _jsonExports = { get, update, DEFAULTS } + +if (STORAGE_BACKEND !== 'json') { + const _knex = require('./stores/orgSettingsStore') + _knex.init().catch(e => console.error('[orgSettingsStore] Knex init:', e.message)) + module.exports = _knex +} else { + module.exports = _jsonExports +} diff --git a/server/db/orgUnitStore.js b/server/db/orgUnitStore.js index 0ea4459..9e0598c 100644 --- a/server/db/orgUnitStore.js +++ b/server/db/orgUnitStore.js @@ -3,6 +3,9 @@ // Data saved to data/org-units.json 'use strict' + +const STORAGE_BACKEND = (process.env.STORAGE_BACKEND || 'json').toLowerCase() + const fs = require('fs') const path = require('path') @@ -136,4 +139,12 @@ function remove(id) { return true } -module.exports = { getAll, getById, create, update, remove } +const _jsonExports = { getAll, getById, create, update, remove } + +if (STORAGE_BACKEND !== 'json') { + const _knex = require('./stores/orgUnitStore') + _knex.init().catch(e => console.error('[orgUnitStore] Knex init:', e.message)) + module.exports = _knex +} else { + module.exports = _jsonExports +} diff --git a/server/db/publicIncidentStore.js b/server/db/publicIncidentStore.js index 1a4434f..b1eb106 100644 --- a/server/db/publicIncidentStore.js +++ b/server/db/publicIncidentStore.js @@ -1,4 +1,7 @@ // © 2026 Claude Hecker — ISMS Builder V 1.29 — AGPL-3.0 + +const STORAGE_BACKEND = (process.env.STORAGE_BACKEND || 'json').toLowerCase() + // Public Incident Store – Vorfall-Meldungen ohne Login (Login-Seite) // Persistenz: data/public-incidents.json @@ -118,4 +121,12 @@ function getDeleted() { // Keep remove as alias for permanentDelete for backward compatibility const remove = permanentDelete -module.exports = { getAll, getById, create, update, delete: del, permanentDelete, restore, getDeleted, remove, INCIDENT_TYPES } +const _jsonExports = { getAll, getById, create, update, delete: del, permanentDelete, restore, getDeleted, remove, INCIDENT_TYPES } + +if (STORAGE_BACKEND !== 'json') { + const _knex = require('./stores/publicIncidentStore') + _knex.init().catch(e => console.error('[publicIncidentStore] Knex init:', e.message)) + module.exports = _knex +} else { + module.exports = _jsonExports +} diff --git a/server/db/riskStore.js b/server/db/riskStore.js index 123c124..2a8e2b0 100644 --- a/server/db/riskStore.js +++ b/server/db/riskStore.js @@ -1,6 +1,8 @@ // © 2026 Claude Hecker — ISMS Builder V 1.29 — AGPL-3.0 'use strict' +const STORAGE_BACKEND = (process.env.STORAGE_BACKEND || 'json').toLowerCase() + const fs = require('fs') const path = require('path') @@ -245,4 +247,12 @@ function getSummary() { return { total: risks.length, byLevel, byCategory, byStatus, openTreatments, top5 } } -module.exports = { getAll, getById, create, update, delete: del, permanentDelete, restore, getDeleted, getReviewPending, approve, addTreatment, updateTreatment, deleteTreatment, getCalendarEvents, getSummary, CATEGORIES, TREATMENT_OPTS, STATUSES } +const _jsonExports = { getAll, getById, create, update, delete: del, permanentDelete, restore, getDeleted, getReviewPending, approve, addTreatment, updateTreatment, deleteTreatment, getCalendarEvents, getSummary, CATEGORIES, TREATMENT_OPTS, STATUSES } + +if (STORAGE_BACKEND !== 'json') { + const _knex = require('./stores/riskStore') + _knex.init().catch(e => console.error('[riskStore] Knex init:', e.message)) + module.exports = _knex +} else { + module.exports = _jsonExports +} diff --git a/server/db/soaStore.js b/server/db/soaStore.js index 515e1da..24a56e9 100644 --- a/server/db/soaStore.js +++ b/server/db/soaStore.js @@ -10,6 +10,7 @@ // // Persistenz: data/soa.json // Control-IDs sind framework-präfixiert (z.B. ISO-5.1, BSI-ISMS.1, NIS2-a, EUCS-1, EUAI-9) +const STORAGE_BACKEND = (process.env.STORAGE_BACKEND || 'json').toLowerCase() const fs = require('fs') const path = require('path') @@ -322,7 +323,7 @@ function save(data) { let store = load() -module.exports = { +const _jsonExports = { init: () => { store = load() }, getFrameworks: () => Object.values(FRAMEWORKS), @@ -368,7 +369,6 @@ module.exports = { return store[controlId] }, - // Zusammenfassung – optional pro Framework getSummary: (framework) => { const frameworks = framework ? [framework] : Object.keys(FRAMEWORKS) const result = {} @@ -395,7 +395,6 @@ module.exports = { return framework ? result[framework] : result }, - // Create a custom control (framework = 'CUSTOM', id = 'CUSTOM-') createCustomControl: (body, { changedBy } = {}) => { const title = (body.title || '').trim() if (!title) throw new Error('title required') @@ -421,7 +420,6 @@ module.exports = { return store[id] }, - // Update a custom control's editable metadata updateCustomControl: (id, body, { changedBy } = {}) => { if (!store[id] || !store[id].isCustom) return null const allowed = ['title', 'theme', 'description', 'owner', 'applicable', 'status', 'justification', 'linkedTemplates', 'applicableEntities'] @@ -434,7 +432,6 @@ module.exports = { return store[id] }, - // Delete a custom control — only allowed when not yet linked to any templates deleteCustomControl: (id) => { const ctrl = store[id] if (!ctrl) return { ok: false, reason: 'not_found' } @@ -448,3 +445,11 @@ module.exports = { FRAMEWORKS, IMPLEMENTATION_STATUSES } + +if (STORAGE_BACKEND !== 'json') { + const _knex = require('./stores/soaStore') + _knex.init().catch(e => console.error('[soaStore] Knex init:', e.message)) + module.exports = _knex +} else { + module.exports = _jsonExports +} diff --git a/server/db/stores/ackStore.js b/server/db/stores/ackStore.js new file mode 100644 index 0000000..dfeee7a --- /dev/null +++ b/server/db/stores/ackStore.js @@ -0,0 +1,300 @@ +// © 2026 Claude Hecker — ISMS Builder — AGPL-3.0 +'use strict' + +const { getDb } = require('../knexDatabase') + +function uuidv4() { + return `${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 10)}-${Math.random().toString(36).slice(2, 10)}` +} + +function nowISO() { return new Date().toISOString() } + +function _parse(val, fallback) { + if (!val) return fallback + try { return JSON.parse(val) } catch { return fallback } +} + +function rowToDist(row) { + if (!row) return null + return { + id: row.id, + templateId: row.template_id, + templateTitle: row.template_title || '', + templateType: row.template_type || 'Policy', + templateVersion: row.template_version || 1, + mode: row.mode || 'manual', + targetGroup: row.target_group || '', + dueDate: row.due_date || null, + emailList: _parse(row.email_list, []), + notes: row.notes || '', + status: row.status || 'active', + createdAt: row.created_at, + createdBy: row.created_by || 'system', + emailSentAt: row.email_sent_at || null, + emailSentCount: row.email_sent_count || 0, + } +} + +function rowToAck(row) { + if (!row) return null + return { + id: row.id, + distributionId: row.distribution_id, + recipientEmail: row.recipient_email || '', + recipientName: row.recipient_name || '', + token: row.token || null, + acknowledgedAt: row.acknowledged_at || null, + ipAddress: row.ip_address || null, + method: row.method || 'manual', + notes: row.notes || '', + addedBy: row.added_by || 'system', + } +} + +async function init() { await getDb() } + +async function getDistributions() { + const db = getDb() + const rows = await db('policy_distributions').orderBy('created_at', 'desc') + return rows.map(rowToDist) +} + +async function getDistribution(id) { + const db = getDb() + const row = await db('policy_distributions').where('id', id).first() + return row ? rowToDist(row) : null +} + +async function _withStats(dist) { + const db = getDb() + const acks = (await db('policy_acks').where('distribution_id', dist.id)).map(rowToAck) + const confirmed = acks.filter(a => a.acknowledgedAt) + const pending = dist.mode === 'email_campaign' + ? dist.emailList.filter(e => !acks.find(a => a.recipientEmail === e && a.acknowledgedAt)) + : [] + return { + ...dist, + stats: { + total: dist.mode === 'email_campaign' ? dist.emailList.length : acks.length, + confirmed: confirmed.length, + pending: pending.length, + }, + } +} + +async function getDistributionWithStats(id) { + const dist = await getDistribution(id) + if (!dist) return null + return await _withStats(dist) +} + +async function getAllDistributionsWithStats() { + const dists = await getDistributions() + const result = [] + for (const d of dists) result.push(await _withStats(d)) + return result +} + +async function createDistribution({ templateId, templateTitle, templateType, templateVersion, mode, targetGroup, dueDate, emailList, notes, createdBy }) { + const id = uuidv4() + const now = nowISO() + const db = getDb() + await db('policy_distributions').insert({ + id, + template_id: templateId, + template_title: templateTitle || '', + template_type: templateType || 'Policy', + template_version: templateVersion || 1, + mode: mode || 'manual', + target_group: targetGroup || '', + due_date: dueDate || null, + email_list: JSON.stringify(emailList || []), + notes: notes || '', + status: 'active', + created_at: now, + created_by: createdBy || 'system', + email_sent_at: null, + email_sent_count: 0, + }) + return await getDistribution(id) +} + +async function updateDistribution(id, patch) { + const db = getDb() + const existing = await db('policy_distributions').where('id', id).first() + if (!existing) return null + const row = {} + const map = { + templateTitle: 'template_title', + templateType: 'template_type', + templateVersion: 'template_version', + mode: 'mode', + targetGroup: 'target_group', + dueDate: 'due_date', + notes: 'notes', + status: 'status', + emailSentAt: 'email_sent_at', + emailSentCount: 'email_sent_count', + } + for (const [jsKey, dbKey] of Object.entries(map)) { + if (patch[jsKey] !== undefined) row[dbKey] = patch[jsKey] + } + if (patch.emailList !== undefined) row.email_list = JSON.stringify(patch.emailList) + await db('policy_distributions').where('id', id).update(row) + return await getDistribution(id) +} + +async function deleteDistribution(id) { + const db = getDb() + const existing = await db('policy_distributions').where('id', id).first() + if (!existing) return false + await db('policy_acks').where('distribution_id', id).del() + await db('policy_distributions').where('id', id).del() + return true +} + +async function getAcksForDistribution(distributionId) { + const db = getDb() + const rows = await db('policy_acks').where('distribution_id', distributionId) + return rows.map(rowToAck) +} + +async function getAckByToken(token) { + const db = getDb() + const row = await db('policy_acks').where('token', token).first() + return row ? rowToAck(row) : null +} + +async function prepareEmailAcks(distributionId, emailList) { + const db = getDb() + const existing = await db('policy_acks').where('distribution_id', distributionId) + const existingEmails = new Set(existing.map(a => a.recipient_email)) + let added = 0 + const inserts = [] + for (const email of emailList) { + if (existingEmails.has(email)) continue + inserts.push({ + id: uuidv4(), + distribution_id: distributionId, + recipient_email: email, + recipient_name: '', + token: uuidv4(), + acknowledged_at: null, + ip_address: null, + method: 'email_link', + }) + added++ + } + if (inserts.length > 0) await db('policy_acks').insert(inserts) + return added +} + +async function confirmByToken(token, { recipientName, ipAddress } = {}) { + const db = getDb() + const row = await db('policy_acks').where('token', token).first() + if (!row) return null + if (row.acknowledged_at) return rowToAck(row) + const patch = { acknowledged_at: nowISO(), ip_address: ipAddress || null } + if (recipientName) patch.recipient_name = recipientName + await db('policy_acks').where('token', token).update(patch) + return await getAckByToken(token) +} + +async function addManualAck({ distributionId, recipientEmail, recipientName, acknowledgedAt, notes, addedBy }) { + const dist = await getDistribution(distributionId) + if (!dist) return null + const id = uuidv4() + const db = getDb() + await db('policy_acks').insert({ + id, + distribution_id: distributionId, + recipient_email: recipientEmail || '', + recipient_name: recipientName || '', + token: null, + acknowledged_at: acknowledgedAt || nowISO(), + ip_address: null, + method: 'manual', + notes: notes || '', + added_by: addedBy || 'system', + }) + return await getAckByToken(null).then(() => rowToAck({ + id, distribution_id: distributionId, + recipient_email: recipientEmail || '', + recipient_name: recipientName || '', + token: null, + acknowledged_at: acknowledgedAt || nowISO(), + ip_address: null, + method: 'manual', + notes: notes || '', + added_by: addedBy || 'system', + })) +} + +async function importAcks(distributionId, rows, addedBy) { + const dist = await getDistribution(distributionId) + if (!dist) return { imported: 0, skipped: 0 } + const db = getDb() + let imported = 0, skipped = 0 + const inserts = [] + for (const row of rows) { + if (!row.email) { skipped++; continue } + inserts.push({ + id: uuidv4(), + distribution_id: distributionId, + recipient_email: row.email, + recipient_name: row.name || '', + token: null, + acknowledged_at: row.acknowledgedAt || nowISO(), + ip_address: null, + method: 'csv_import', + added_by: addedBy || 'system', + }) + imported++ + } + if (inserts.length > 0) await db('policy_acks').insert(inserts) + return { imported, skipped } +} + +async function deleteAck(id) { + const db = getDb() + const existing = await db('policy_acks').where('id', id).first() + if (!existing) return false + await db('policy_acks').where('id', id).del() + return true +} + +async function getSummary() { + const db = getDb() + const dists = (await db('policy_distributions').where('status', 'active')).map(rowToDist) + let totalPending = 0 + for (const d of dists) { + if (d.mode === 'email_campaign') { + const acks = (await db('policy_acks').where('distribution_id', d.id)).map(rowToAck) + const confirmed = new Set(acks.filter(a => a.acknowledgedAt).map(a => a.recipientEmail)) + totalPending += d.emailList.filter(e => !confirmed.has(e)).length + } + } + return { + activeDistributions: dists.length, + pendingAcks: totalPending, + } +} + +module.exports = { + init, + getDistributions, + getDistribution, + getDistributionWithStats, + getAllDistributionsWithStats, + createDistribution, + updateDistribution, + deleteDistribution, + getAcksForDistribution, + getAckByToken, + prepareEmailAcks, + confirmByToken, + addManualAck, + importAcks, + deleteAck, + getSummary, +} diff --git a/server/db/stores/assetStore.js b/server/db/stores/assetStore.js new file mode 100644 index 0000000..35435ba --- /dev/null +++ b/server/db/stores/assetStore.js @@ -0,0 +1,180 @@ +'use strict' + +const { getDb, init: initDb } = require('../knexDatabase') + +const ASSET_TYPES = { + hardware_server: 'Server', hardware_workstation: 'Workstation / PC', + hardware_laptop: 'Laptop / Notebook', hardware_mobile: 'Mobilgerät', + hardware_network: 'Netzwerk-Equipment', hardware_ics_ot: 'ICS/OT-Anlage', + hardware_building: 'Gebäudetechnik (BAS/GLT)', hardware_other: 'Hardware (Sonstige)', + software_app: 'Anwendungssoftware', software_os: 'Betriebssystem', + software_cloud: 'Cloud-Dienst (IaaS/PaaS)', software_saas: 'SaaS-Anwendung', + software_other: 'Software (Sonstige)', data_database: 'Datenbank', + data_document: 'Dokumentensammlung', data_backup: 'Backup / Archiv', + data_other: 'Daten (Sonstige)', service_internal: 'Interner Dienst', + service_cloud: 'Cloud-Service (extern)', service_external: 'Externer Dienstleister', + facility_office: 'Bürogebäude', facility_datacenter: 'Rechenzentrum / Serverraum', + facility_production: 'Produktionsstätte / Werk', facility_other: 'Einrichtung (Sonstige)', +} + +const CATEGORIES = { + hardware: 'Hardware', software: 'Software', + data: 'Daten / Informationen', service: 'Dienste', facility: 'Einrichtungen', +} + +function nowISO() { return new Date().toISOString() } +function makeId() { return `asset_${require('crypto').randomBytes(4).toString('hex')}` } +function _json(val, fallback) { if (!val) return fallback; try { return JSON.parse(val) } catch { return fallback } } + +function rowToAsset(row) { + if (!row) return null + const d = _json(row.data, {}) + return { + id: row.id, name: row.name, description: row.description, + category: row.category, type: d.type || '', + classification: row.classification, criticality: row.criticality, + owner: row.owner, ownerEmail: d.ownerEmail || '', + custodian: d.custodian || '', entityId: d.entityId || '', + location: row.location, status: row.status, + vendor: d.vendor || '', version: d.version || '', + serialNumber: d.serialNumber || '', purchaseDate: d.purchaseDate || '', + endOfLifeDate: row.eol_date || '', + tags: d.tags || [], notes: d.notes || '', + linkedControls: _json(row.linked_controls, []), + linkedPolicies: d.linkedPolicies || [], + applicableEntities: _json(row.applicable_entities, []), + createdBy: row.created_by, createdAt: row.created_at, + updatedAt: row.updated_at, deletedAt: row.deleted_at || null, + updatedBy: d.updatedBy || '', deletedBy: d.deletedBy || '', + } +} + +function packData(a) { + return JSON.stringify({ + type: a.type, ownerEmail: a.ownerEmail, custodian: a.custodian, + entityId: a.entityId, vendor: a.vendor, version: a.version, + serialNumber: a.serialNumber, purchaseDate: a.purchaseDate, + tags: a.tags || [], notes: a.notes || '', + linkedPolicies: a.linkedPolicies || [], + updatedBy: a.updatedBy || '', deletedBy: a.deletedBy || '', + }) +} + +module.exports = { + init: async () => { await initDb() }, + + getAll: async ({ category, type, classification, criticality, status, entityId } = {}) => { + const q = getDb()('assets').whereNull('deleted_at') + if (category) q.where('category', category) + if (classification) q.where('classification', classification) + if (criticality) q.where('criticality', criticality) + if (status) q.where('status', status) + let list = (await q).map(rowToAsset) + if (type) list = list.filter(i => i.type === type) + if (entityId) list = list.filter(i => i.entityId === entityId) + return list + }, + + getById: async (id) => { + const row = await getDb()('assets').where('id', id).whereNull('deleted_at').first() + return rowToAsset(row) + }, + + create: async (data, { createdBy } = {}) => { + const a = { + id: makeId(), + name: data.name || '', category: data.category || 'hardware', + type: data.type || '', description: data.description || '', + owner: data.owner || '', ownerEmail: data.ownerEmail || '', + custodian: data.custodian || '', entityId: data.entityId || '', + location: data.location || '', + classification: data.classification || 'internal', + criticality: data.criticality || 'medium', + status: data.status || 'active', + vendor: data.vendor || '', version: data.version || '', + serialNumber: data.serialNumber || '', purchaseDate: data.purchaseDate || '', + endOfLifeDate: data.endOfLifeDate || '', + tags: Array.isArray(data.tags) ? data.tags : (data.tags ? String(data.tags).split(',').map(t => t.trim()).filter(Boolean) : []), + notes: data.notes || '', + linkedControls: Array.isArray(data.linkedControls) ? data.linkedControls : [], + linkedPolicies: Array.isArray(data.linkedPolicies) ? data.linkedPolicies : [], + createdBy: createdBy || 'system', + } + const now = nowISO() + await getDb()('assets').insert({ + id: a.id, name: a.name, description: a.description, + category: a.category, classification: a.classification, + criticality: a.criticality, owner: a.owner, location: a.location, + eol_date: a.endOfLifeDate || null, status: a.status, + applicable_entities: JSON.stringify(data.applicableEntities || []), + linked_controls: JSON.stringify(a.linkedControls), + data: packData(a), created_by: a.createdBy, created_at: now, updated_at: now, + }) + return { ...a, createdAt: now, updatedAt: now } + }, + + update: async (id, patch, { changedBy } = {}) => { + const row = await getDb()('assets').where('id', id).whereNull('deleted_at').first() + if (!row) return null + const a = rowToAsset(row) + const allowed = ['name','category','type','description','owner','ownerEmail','custodian','entityId', + 'location','classification','criticality','status','vendor','version','serialNumber', + 'purchaseDate','endOfLifeDate','tags','notes','linkedControls','linkedPolicies'] + for (const k of allowed) { + if (patch[k] !== undefined) a[k] = patch[k] + } + if (patch.tags !== undefined && !Array.isArray(a.tags)) { + a.tags = String(a.tags).split(',').map(t => t.trim()).filter(Boolean) + } + a.updatedAt = nowISO() + if (changedBy) a.updatedBy = changedBy + await getDb()('assets').where('id', id).update({ + name: a.name, description: a.description, category: a.category, + classification: a.classification, criticality: a.criticality, + owner: a.owner, location: a.location, status: a.status, + eol_date: a.endOfLifeDate || null, + linked_controls: JSON.stringify(a.linkedControls || []), + data: packData(a), updated_at: a.updatedAt, + }) + return a + }, + + remove: async (id) => { + const affected = await getDb()('assets').where('id', id).whereNull('deleted_at') + .update({ deleted_at: nowISO() }) + return affected > 0 + }, + + getSummary: async () => { + const rows = await getDb()('assets').whereNull('deleted_at') + const list = rows.map(rowToAsset) + const now = new Date() + const in90 = new Date(now.getTime() + 90 * 86400000) + const byCategory = { hardware: 0, software: 0, data: 0, service: 0, facility: 0 } + const byClassification = { public: 0, internal: 0, confidential: 0, strictly_confidential: 0 } + const byCriticality = { low: 0, medium: 0, high: 0, critical: 0 } + let unclassified = 0, criticalUnclassified = 0, endOfLifeSoon = 0 + for (const a of list) { + if (byCategory[a.category] !== undefined) byCategory[a.category]++ + if (byClassification[a.classification] !== undefined) byClassification[a.classification]++ + else unclassified++ + if (byCriticality[a.criticality] !== undefined) byCriticality[a.criticality]++ + if (!a.classification || a.classification === 'public') unclassified++ + if ((a.criticality === 'critical' || a.criticality === 'high') && (!a.classification || a.classification === 'public')) criticalUnclassified++ + if (a.endOfLifeDate) { + const eol = new Date(a.endOfLifeDate) + if (eol >= now && eol <= in90) endOfLifeSoon++ + } + } + return { + total: list.length, + active: list.filter(i => i.status === 'active').length, + decommissioned: list.filter(i => i.status === 'decommissioned').length, + planned: list.filter(i => i.status === 'planned').length, + unclassified, byCategory, byClassification, byCriticality, + criticalUnclassified, endOfLifeSoon, + } + }, + + ASSET_TYPES, CATEGORIES, +} diff --git a/server/db/stores/auditStore.js b/server/db/stores/auditStore.js new file mode 100644 index 0000000..fbe41ab --- /dev/null +++ b/server/db/stores/auditStore.js @@ -0,0 +1,67 @@ +'use strict' + +const { getDb, init: initDb } = require('../knexDatabase') + +const MAX_ENTRIES = 2000 + +function nowISO() { return new Date().toISOString() } + +module.exports = { + init: async () => { await initDb() }, + + append: async ({ user, action, resource, resourceId = '', detail = '' }) => { + const db = getDb() + await db('audit_log').insert({ + ts: nowISO(), + user_email: user || 'system', + action, + resource: resource || null, + resource_id: String(resourceId), + detail: detail || null, + }) + const clientType = db.client.config.client + let count + if (clientType === 'pg') { + const row = await db.raw('SELECT CURRVAL(pg_get_serial_sequence(\'audit_log\', \'id\')) as n') + count = row.rows?.[0]?.n || 0 + } else { + const row = await db('audit_log').max('id as n').first() + count = row?.n || 0 + } + if (count > MAX_ENTRIES) { + await db.raw('DELETE FROM audit_log WHERE id <= ?', [count - MAX_ENTRIES]) + } + }, + + query: async ({ user, action, resource, from, to, limit = 200, offset = 0 } = {}) => { + const q = getDb()('audit_log') + if (user) q.where('user_email', 'like', `%${user}%`) + if (action) q.where('action', action) + if (resource) q.where('resource', resource) + if (from) q.where('ts', '>=', from) + if (to) q.where('ts', '<=', to) + + const totalQ = q.clone() + const countResult = await totalQ.count('* as cnt').first() + const total = countResult?.cnt || 0 + + q.orderBy('ts', 'desc').offset(offset).limit(limit) + const rows = await q + return { + total, + entries: rows.map(r => ({ + id: r.id, + ts: r.ts, + user: r.user_email, + action: r.action, + resource: r.resource, + resourceId: r.resource_id, + detail: r.detail, + })), + } + }, + + clear: async () => { + await getDb()('audit_log').del() + }, +} diff --git a/server/db/stores/bcmStore.js b/server/db/stores/bcmStore.js new file mode 100644 index 0000000..cdbe88d --- /dev/null +++ b/server/db/stores/bcmStore.js @@ -0,0 +1,180 @@ +'use strict' + +const { getDb, init: initDb } = require('../knexDatabase') + +function nowISO() { return new Date().toISOString() } +function makeId(prefix) { return `${prefix}_${require('crypto').randomBytes(4).toString('hex')}` } +function _json(val, fallback) { if (!val) return fallback; try { return JSON.parse(val) } catch { return fallback } } + +function rowToItem(row) { + if (!row) return null + return { id: row.id, ..._json(row.data, {}), createdBy: row.created_by, createdAt: row.created_at, updatedAt: row.updated_at, deletedAt: row.deleted_at || null } +} + +module.exports = { + init: async () => { await initDb() }, + + getBia: async () => { + const rows = await getDb()('bcm_entries').where('bcm_type', 'bia').whereNull('deleted_at') + return rows.map(rowToItem) + }, + getBiaById: async (id) => { + const row = await getDb()('bcm_entries').where('id', id).where('bcm_type', 'bia').whereNull('deleted_at').first() + return rowToItem(row) + }, + createBia: async (fields, { createdBy } = {}) => { + const id = makeId('bia') + const now = nowISO() + const item = { + id, title: fields.title || 'Ohne Titel', + processOwner: fields.processOwner || '', department: fields.department || '', + criticality: fields.criticality || 'medium', + rto: typeof fields.rto === 'number' ? fields.rto : (parseFloat(fields.rto) || 0), + rpo: typeof fields.rpo === 'number' ? fields.rpo : (parseFloat(fields.rpo) || 0), + mtpd: typeof fields.mtpd === 'number' ? fields.mtpd : (parseFloat(fields.mtpd) || 0), + dependencies: Array.isArray(fields.dependencies) ? fields.dependencies : [], + affectedSystems: Array.isArray(fields.affectedSystems) ? fields.affectedSystems : [], + status: fields.status || 'draft', lastReviewDate: fields.lastReviewDate || '', + notes: fields.notes || '', entityId: fields.entityId || '', + linkedControls: Array.isArray(fields.linkedControls) ? fields.linkedControls : [], + linkedPolicies: Array.isArray(fields.linkedPolicies) ? fields.linkedPolicies : [], + createdBy: createdBy || 'system', createdAt: now, updatedAt: now, deletedAt: null, + } + await getDb()('bcm_entries').insert({ + id, bcm_type: 'bia', data: JSON.stringify(item), + created_by: createdBy || 'system', created_at: now, updated_at: now, + }) + return item + }, + updateBia: async (id, patch, { changedBy } = {}) => { + const row = await getDb()('bcm_entries').where('id', id).where('bcm_type', 'bia').whereNull('deleted_at').first() + if (!row) return null + const item = rowToItem(row) + const allowed = ['title','processOwner','department','criticality','rto','rpo','mtpd', + 'dependencies','affectedSystems','status','lastReviewDate','notes','entityId','linkedControls','linkedPolicies'] + for (const k of allowed) { if (patch[k] !== undefined) item[k] = patch[k] } + item.updatedAt = nowISO() + if (changedBy) item.updatedBy = changedBy + await getDb()('bcm_entries').where('id', id).update({ data: JSON.stringify(item), updated_at: item.updatedAt }) + return item + }, + deleteBia: async (id) => { + const affected = await getDb()('bcm_entries').where('id', id).where('bcm_type', 'bia').whereNull('deleted_at').update({ deleted_at: nowISO() }) + return affected > 0 + }, + + getPlans: async () => { + const rows = await getDb()('bcm_entries').where('bcm_type', 'plan').whereNull('deleted_at') + return rows.map(rowToItem) + }, + getPlanById: async (id) => { + const row = await getDb()('bcm_entries').where('id', id).where('bcm_type', 'plan').whereNull('deleted_at').first() + return rowToItem(row) + }, + createPlan: async (fields, { createdBy } = {}) => { + const id = makeId('bcp') + const now = nowISO() + const item = { + id, title: fields.title || 'Ohne Titel', type: fields.type || 'bcp', + scope: fields.scope || '', planOwner: fields.planOwner || '', + status: fields.status || 'draft', version: fields.version || '1.0', + lastTested: fields.lastTested || '', nextTest: fields.nextTest || '', + testResult: fields.testResult || 'not_tested', + linkedBiaIds: Array.isArray(fields.linkedBiaIds) ? fields.linkedBiaIds : [], + procedures: fields.procedures || '', entityId: fields.entityId || '', + linkedControls: Array.isArray(fields.linkedControls) ? fields.linkedControls : [], + linkedPolicies: Array.isArray(fields.linkedPolicies) ? fields.linkedPolicies : [], + createdBy: createdBy || 'system', createdAt: now, updatedAt: now, deletedAt: null, + } + await getDb()('bcm_entries').insert({ + id, bcm_type: 'plan', data: JSON.stringify(item), + created_by: createdBy || 'system', created_at: now, updated_at: now, + }) + return item + }, + updatePlan: async (id, patch, { changedBy } = {}) => { + const row = await getDb()('bcm_entries').where('id', id).where('bcm_type', 'plan').whereNull('deleted_at').first() + if (!row) return null + const item = rowToItem(row) + const allowed = ['title','type','scope','planOwner','status','version','lastTested', + 'nextTest','testResult','linkedBiaIds','procedures','entityId','linkedControls','linkedPolicies'] + for (const k of allowed) { if (patch[k] !== undefined) item[k] = patch[k] } + item.updatedAt = nowISO() + if (changedBy) item.updatedBy = changedBy + await getDb()('bcm_entries').where('id', id).update({ data: JSON.stringify(item), updated_at: item.updatedAt }) + return item + }, + deletePlan: async (id) => { + const affected = await getDb()('bcm_entries').where('id', id).where('bcm_type', 'plan').whereNull('deleted_at').update({ deleted_at: nowISO() }) + return affected > 0 + }, + + getExercises: async () => { + const rows = await getDb()('bcm_entries').where('bcm_type', 'exercise').whereNull('deleted_at') + return rows.map(rowToItem) + }, + getExerciseById: async (id) => { + const row = await getDb()('bcm_entries').where('id', id).where('bcm_type', 'exercise').whereNull('deleted_at').first() + return rowToItem(row) + }, + createExercise: async (fields, { createdBy } = {}) => { + const id = makeId('bex') + const now = nowISO() + const item = { + id, title: fields.title || 'Ohne Titel', type: fields.type || 'tabletop', + date: fields.date || '', conductor: fields.conductor || '', + participants: Array.isArray(fields.participants) ? fields.participants : [], + linkedPlanId: fields.linkedPlanId || '', result: fields.result || 'planned', + findings: fields.findings || '', actions: fields.actions || '', + nextExercise: fields.nextExercise || '', + linkedControls: Array.isArray(fields.linkedControls) ? fields.linkedControls : [], + linkedPolicies: Array.isArray(fields.linkedPolicies) ? fields.linkedPolicies : [], + createdBy: createdBy || 'system', createdAt: now, updatedAt: now, deletedAt: null, + } + await getDb()('bcm_entries').insert({ + id, bcm_type: 'exercise', data: JSON.stringify(item), + created_by: createdBy || 'system', created_at: now, updated_at: now, + }) + return item + }, + updateExercise: async (id, patch, { changedBy } = {}) => { + const row = await getDb()('bcm_entries').where('id', id).where('bcm_type', 'exercise').whereNull('deleted_at').first() + if (!row) return null + const item = rowToItem(row) + const allowed = ['title','type','date','conductor','participants','linkedPlanId', + 'result','findings','actions','nextExercise','linkedControls','linkedPolicies'] + for (const k of allowed) { if (patch[k] !== undefined) item[k] = patch[k] } + item.updatedAt = nowISO() + if (changedBy) item.updatedBy = changedBy + await getDb()('bcm_entries').where('id', id).update({ data: JSON.stringify(item), updated_at: item.updatedAt }) + return item + }, + deleteExercise: async (id) => { + const affected = await getDb()('bcm_entries').where('id', id).where('bcm_type', 'exercise').whereNull('deleted_at').update({ deleted_at: nowISO() }) + return affected > 0 + }, + + getSummary: async () => { + const db = getDb() + const biaRows = (await db('bcm_entries').where('bcm_type', 'bia').whereNull('deleted_at')).map(rowToItem) + const planRows = (await db('bcm_entries').where('bcm_type', 'plan').whereNull('deleted_at')).map(rowToItem) + const exRows = (await db('bcm_entries').where('bcm_type', 'exercise').whereNull('deleted_at')).map(rowToItem) + const today = new Date().toISOString().slice(0, 10) + const in30 = new Date(Date.now() + 30 * 86400000).toISOString().slice(0, 10) + const biaCritical = biaRows.filter(b => b.criticality === 'critical').length + const linkedBiaIds = new Set(planRows.flatMap(p => p.linkedBiaIds || [])) + const withoutPlan = biaRows.filter(b => !linkedBiaIds.has(b.id)).length + const plansApproved = planRows.filter(p => p.status === 'approved' || p.status === 'tested').length + const plansTested = planRows.filter(p => p.status === 'tested' || (p.testResult && p.testResult !== 'not_tested')).length + const overdueTest = planRows.filter(p => p.nextTest && p.nextTest < today).length + const nextTestSoon = planRows.filter(p => p.nextTest && p.nextTest >= today && p.nextTest <= in30).length + const upcoming = exRows.filter(e => e.result === 'planned' && e.date && e.date >= today).length + const pastResults = exRows.filter(e => e.result !== 'planned' && e.date && e.date < today).sort((a, b) => new Date(b.date) - new Date(a.date)) + const lastResult = pastResults.length ? pastResults[0].result : null + return { + bia: { total: biaRows.length, critical: biaCritical, withoutPlan }, + plans: { total: planRows.length, approved: plansApproved, tested: plansTested, overdueTest, nextTestSoon }, + exercises: { total: exRows.length, upcoming, lastResult }, + } + }, +} diff --git a/server/db/stores/customListsStore.js b/server/db/stores/customListsStore.js new file mode 100644 index 0000000..2abf013 --- /dev/null +++ b/server/db/stores/customListsStore.js @@ -0,0 +1,100 @@ +'use strict' + +const { getDb, init: initDb } = require('../knexDatabase') + +const DEFAULTS = { + templateTypes: ['Policy', 'Procedure', 'Risk Policy', 'SoA', 'Incident', 'Release'], + riskCategories: [ + { id: 'technical', label: 'Technical', icon: 'ph-cpu' }, + { id: 'organizational', label: 'Organizational', icon: 'ph-users' }, + { id: 'physical', label: 'Physical', icon: 'ph-building' }, + { id: 'legal', label: 'Legal', icon: 'ph-scales' }, + ], + riskTreatments: [ + { id: 'reduce', label: 'Reduce' }, + { id: 'accept', label: 'Accept' }, + { id: 'avoid', label: 'Avoid' }, + { id: 'transfer', label: 'Transfer' }, + ], + gdprDataCategories: ['name', 'email', 'phone', 'address', 'health', 'biometric', 'financial', 'location', 'other'], + gdprSubjectTypes: [ + { id: 'customers', label: 'Customers' }, + { id: 'employees', label: 'Employees' }, + { id: 'contractors', label: 'Contractors' }, + { id: 'website_visitors', label: 'Website Visitors' }, + { id: 'minors', label: 'Minors' }, + ], + incidentTypes: [ + { id: 'malware', label: 'Malware / Malicious Software' }, + { id: 'phishing', label: 'Phishing / Scam' }, + { id: 'data_theft', label: 'Data Theft / Data Leak' }, + { id: 'unauthorized_access', label: 'Unauthorized Access' }, + { id: 'ransomware', label: 'Ransomware' }, + { id: 'social_engineering', label: 'Social Engineering' }, + { id: 'other', label: 'Other' }, + ], +} + +const ALLOWED_LIST_IDS = Object.keys(DEFAULTS) + +function _json(val, fallback) { + if (!val) return fallback + try { return JSON.parse(val) } catch { return fallback } +} + +module.exports = { + init: async () => { + await initDb() + const db = getDb() + for (const listId of ALLOWED_LIST_IDS) { + const row = await db('custom_lists').where('list_id', listId).first() + if (!row) { + await db('custom_lists').insert({ + list_id: listId, + items: JSON.stringify(DEFAULTS[listId]), + }) + } + } + }, + + getAll: async () => { + const rows = await getDb()('custom_lists') + const result = {} + for (const row of rows) { + result[row.list_id] = _json(row.items, DEFAULTS[row.list_id]) + } + for (const key of ALLOWED_LIST_IDS) { + if (!(key in result)) result[key] = DEFAULTS[key] + } + return result + }, + + getList: async (listId) => { + if (!ALLOWED_LIST_IDS.includes(listId)) return null + const row = await getDb()('custom_lists').where('list_id', listId).first() + return row ? _json(row.items, DEFAULTS[listId]) : DEFAULTS[listId] + }, + + setList: async (listId, items) => { + if (!ALLOWED_LIST_IDS.includes(listId)) return null + const db = getDb() + const row = await db('custom_lists').where('list_id', listId).first() + if (row) { + await db('custom_lists').where('list_id', listId).update({ items: JSON.stringify(items) }) + } else { + await db('custom_lists').insert({ list_id: listId, items: JSON.stringify(items) }) + } + return items + }, + + resetList: async (listId) => { + if (!ALLOWED_LIST_IDS.includes(listId)) return null + await getDb()('custom_lists').where('list_id', listId).update({ + items: JSON.stringify(DEFAULTS[listId]), + }) + return DEFAULTS[listId] + }, + + ALLOWED_LIST_IDS, + DEFAULTS, +} diff --git a/server/db/stores/entityStore.js b/server/db/stores/entityStore.js new file mode 100644 index 0000000..63a07fa --- /dev/null +++ b/server/db/stores/entityStore.js @@ -0,0 +1,117 @@ +'use strict' + +const { getDb, init: initDb } = require('../knexDatabase') + +const SEED = [ + { id: 'entity_holding', name: 'Holding GmbH', type: 'holding', parent: null, shortCode: 'HLD', active: true }, + { id: 'entity_sub1', name: 'Gesellschaft Alpha GmbH', type: 'subsidiary', parent: 'entity_holding', shortCode: 'ALP', active: true }, + { id: 'entity_sub2', name: 'Gesellschaft Beta GmbH', type: 'subsidiary', parent: 'entity_holding', shortCode: 'BET', active: true }, + { id: 'entity_sub3', name: 'Gesellschaft Gamma GmbH', type: 'subsidiary', parent: 'entity_holding', shortCode: 'GAM', active: true }, +] + +function nowISO() { return new Date().toISOString() } +function genId() { return `entity_${Date.now()}` } + +function rowToEntity(row) { + if (!row) return null + return { + id: row.id, + name: row.name, + short: row.short, + type: row.type, + parent: row.parent_id || null, + shortCode: row.short, + active: true, + createdAt: row.created_at, + updatedAt: row.updated_at, + } +} + +module.exports = { + init: async () => { + await initDb() + const db = getDb() + for (const s of SEED) { + const exists = await db('entities').where('id', s.id).first() + if (!exists) { + const now = nowISO() + await db('entities').insert({ + id: s.id, + name: s.name, + short: s.shortCode || '', + type: s.type, + parent_id: s.parent || null, + created_at: now, + updated_at: now, + }) + } + } + }, + + getAll: async () => { + const rows = await getDb()('entities') + return rows.map(rowToEntity).filter(e => e.active !== false) + }, + + getById: async (id) => { + const row = await getDb()('entities').where('id', id).first() + return rowToEntity(row) + }, + + getTree: async () => { + const rows = await getDb()('entities') + const all = rows.map(rowToEntity).filter(e => e.active !== false) + const map = {} + for (const e of all) map[e.id] = { ...e, children: [] } + const roots = [] + for (const e of all) { + if (e.parent && map[e.parent]) { + map[e.parent].children.push(map[e.id]) + } else { + roots.push(map[e.id]) + } + } + return roots + }, + + create: async ({ name, type = 'subsidiary', parent = 'entity_holding', shortCode = '', active = true }) => { + const id = genId() + const now = nowISO() + await getDb()('entities').insert({ + id, + name, + short: shortCode || '', + type, + parent_id: parent || null, + created_at: now, + updated_at: now, + }) + return { id, name, type, parent: parent || null, shortCode, active, createdAt: now, updatedAt: now } + }, + + update: async (id, fields) => { + const row = await getDb()('entities').where('id', id).first() + if (!row) return null + const e = rowToEntity(row) + const allowed = ['name', 'type', 'parent', 'shortCode', 'active'] + for (const key of allowed) { + if (fields[key] !== undefined) e[key] = fields[key] + } + const now = nowISO() + await getDb()('entities').where('id', id).update({ + name: e.name, + short: e.shortCode || e.short || '', + type: e.type, + parent_id: e.parent || null, + updated_at: now, + }) + return { ...e, updatedAt: now } + }, + + delete: async (id) => { + const row = await getDb()('entities').where('id', id).first() + if (!row) return false + await getDb()('entities').where('id', id).del() + return true + }, +} diff --git a/server/db/stores/findingStore.js b/server/db/stores/findingStore.js new file mode 100644 index 0000000..204681f --- /dev/null +++ b/server/db/stores/findingStore.js @@ -0,0 +1,181 @@ +'use strict' + +const { getDb, init: initDb } = require('../knexDatabase') + +function nowISO() { return new Date().toISOString() } +function makeId() { return `finding_${Date.now()}_${Math.random().toString(36).slice(2, 6)}` } +function makeActId() { return `act_${Date.now()}_${Math.random().toString(36).slice(2, 6)}` } +function _json(val, fallback) { if (!val) return fallback; try { return JSON.parse(val) } catch { return fallback } } + +const SEVERITIES = ['critical', 'high', 'medium', 'low', 'observation'] +const STATUSES = ['open', 'in_progress', 'resolved', 'accepted'] +const ACT_STATUS = ['open', 'in_progress', 'done'] + +function rowToFinding(row) { + if (!row) return null + return { id: row.id, ..._json(row.data, {}), createdBy: row.created_by, createdAt: row.created_at, updatedAt: row.updated_at, deletedAt: row.deleted_at || null } +} + +async function nextRef() { + const year = new Date().getFullYear() + const rows = await getDb()('findings') + const thisYear = rows.filter(f => { const d = _json(f.data, {}); return d.ref && d.ref.startsWith(`FIND-${year}-`) }) + const nums = thisYear.map(f => parseInt((_json(f.data, {}).ref || '').split('-')[2], 10)).filter(n => !isNaN(n)) + const next = nums.length ? Math.max(...nums) + 1 : 1 + return `FIND-${year}-${String(next).padStart(4, '0')}` +} + +module.exports = { + init: async () => { await initDb() }, + SEVERITIES, STATUSES, ACT_STATUS, + + getAll: async ({ status, severity, auditor } = {}) => { + const rows = await getDb()('findings').whereNull('deleted_at').orderBy('created_at', 'desc') + let list = rows.map(rowToFinding) + if (status) list = list.filter(f => f.status === status) + if (severity) list = list.filter(f => f.severity === severity) + if (auditor) list = list.filter(f => f.auditor === auditor) + return list + }, + + getById: async (id) => { + const row = await getDb()('findings').where('id', id).whereNull('deleted_at').first() + return rowToFinding(row) + }, + + create: async (fields, createdBy) => { + const id = makeId() + const ref = await nextRef() + const now = nowISO() + const f = { + ref, + title: fields.title || 'Neue Feststellung', + severity: SEVERITIES.includes(fields.severity) ? fields.severity : 'medium', + status: STATUSES.includes(fields.status) ? fields.status : 'open', + observation: fields.observation || '', requirement: fields.requirement || '', + impact: fields.impact || '', recommendation: fields.recommendation || '', + auditor: fields.auditor || '', auditedArea: fields.auditedArea || '', + auditPeriodFrom: fields.auditPeriodFrom || null, auditPeriodTo: fields.auditPeriodTo || null, + linkedControls: Array.isArray(fields.linkedControls) ? fields.linkedControls : [], + linkedPolicies: Array.isArray(fields.linkedPolicies) ? fields.linkedPolicies : [], + linkedRisks: Array.isArray(fields.linkedRisks) ? fields.linkedRisks : [], + actions: [], deletedBy: null, + } + await getDb()('findings').insert({ + id, data: JSON.stringify(f), created_by: createdBy || 'system', created_at: now, updated_at: now, + }) + return { id, ...f, createdBy: createdBy || 'system', createdAt: now, updatedAt: now, deletedAt: null } + }, + + update: async (id, fields, updatedBy) => { + const row = await getDb()('findings').where('id', id).whereNull('deleted_at').first() + if (!row) return null + const f = rowToFinding(row) + const allowed = ['title','severity','status','observation','requirement','impact', + 'recommendation','auditor','auditedArea','auditPeriodFrom','auditPeriodTo', + 'linkedControls','linkedPolicies','linkedRisks'] + for (const k of allowed) { if (fields[k] !== undefined) f[k] = fields[k] } + if (fields.severity && !SEVERITIES.includes(fields.severity)) f.severity = 'medium' + if (fields.status && !STATUSES.includes(fields.status)) f.status = 'open' + f.updatedAt = nowISO() + await getDb()('findings').where('id', id).update({ data: JSON.stringify(f), updated_at: f.updatedAt }) + return f + }, + + remove: async (id, deletedBy) => { + const row = await getDb()('findings').where('id', id).whereNull('deleted_at').first() + if (!row) return false + const f = rowToFinding(row) + f.deletedAt = nowISO() + f.deletedBy = deletedBy || 'system' + await getDb()('findings').where('id', id).update({ deleted_at: nowISO(), data: JSON.stringify(f) }) + return true + }, + + permanentDelete: async (id) => { + const affected = await getDb()('findings').where('id', id).del() + return affected > 0 + }, + + restore: async (id) => { + const row = await getDb()('findings').where('id', id).first() + if (!row) return null + const f = rowToFinding(row) + delete f.deletedAt + delete f.deletedBy + f.updatedAt = nowISO() + await getDb()('findings').where('id', id).update({ deleted_at: null, data: JSON.stringify(f), updated_at: f.updatedAt }) + return f + }, + + getDeleted: async () => { + const rows = await getDb()('findings').whereNotNull('deleted_at') + return rows.map(rowToFinding) + }, + + addAction: async (findingId, fields, updatedBy) => { + const row = await getDb()('findings').where('id', findingId).whereNull('deleted_at').first() + if (!row) return null + const f = rowToFinding(row) + const action = { + id: makeActId(), description: fields.description || '', + responsible: fields.responsible || '', dueDate: fields.dueDate || null, + status: ACT_STATUS.includes(fields.status) ? fields.status : 'open', + updatedAt: nowISO(), updatedBy: updatedBy || 'system', + } + f.actions = f.actions || [] + f.actions.push(action) + f.updatedAt = nowISO() + await getDb()('findings').where('id', findingId).update({ data: JSON.stringify(f), updated_at: f.updatedAt }) + return action + }, + + updateAction: async (findingId, actionId, fields, updatedBy) => { + const row = await getDb()('findings').where('id', findingId).whereNull('deleted_at').first() + if (!row) return null + const f = rowToFinding(row) + const action = (f.actions || []).find(a => a.id === actionId) + if (!action) return null + if (fields.description !== undefined) action.description = fields.description + if (fields.responsible !== undefined) action.responsible = fields.responsible + if (fields.dueDate !== undefined) action.dueDate = fields.dueDate + if (fields.status && ACT_STATUS.includes(fields.status)) action.status = fields.status + action.updatedAt = nowISO() + action.updatedBy = updatedBy || 'system' + f.updatedAt = nowISO() + await getDb()('findings').where('id', findingId).update({ data: JSON.stringify(f), updated_at: f.updatedAt }) + return action + }, + + deleteAction: async (findingId, actionId) => { + const row = await getDb()('findings').where('id', findingId).whereNull('deleted_at').first() + if (!row) return false + const f = rowToFinding(row) + const before = (f.actions || []).length + f.actions = (f.actions || []).filter(a => a.id !== actionId) + f.updatedAt = nowISO() + await getDb()('findings').where('id', findingId).update({ data: JSON.stringify(f), updated_at: f.updatedAt }) + return f.actions.length < before + }, + + getSummary: async () => { + const rows = await getDb()('findings').whereNull('deleted_at') + const list = rows.map(rowToFinding) + const bySeverity = { critical: 0, high: 0, medium: 0, low: 0, observation: 0 } + const byStatus = { open: 0, in_progress: 0, resolved: 0, accepted: 0 } + for (const f of list) { + if (bySeverity[f.severity] !== undefined) bySeverity[f.severity]++ + if (byStatus[f.status] !== undefined) byStatus[f.status]++ + } + const openActions = list.reduce((n, f) => n + (f.actions || []).filter(a => a.status !== 'done').length, 0) + const now = new Date() + const overdueActions = list.reduce((n, f) => n + (f.actions || []).filter(a => a.status !== 'done' && a.dueDate && new Date(a.dueDate) < now).length, 0) + return { total: list.length, bySeverity, byStatus, openActions, overdueActions } + }, + + autopurge: async (days = 30) => { + const cutoff = new Date(Date.now() - days * 86400000).toISOString() + const affected = await getDb()('findings').whereNotNull('deleted_at').where('deleted_at', '<', cutoff).del() + return affected + }, +} diff --git a/server/db/stores/gdprStore.js b/server/db/stores/gdprStore.js new file mode 100644 index 0000000..373ead5 --- /dev/null +++ b/server/db/stores/gdprStore.js @@ -0,0 +1,435 @@ +'use strict' + +const { getDb, init: initDb } = require('../knexDatabase') + +function nowISO() { return new Date().toISOString() } +function makeId(prefix) { return `${prefix}_${Date.now()}_${Math.random().toString(36).slice(2,6)}` } +function _json(val, fallback) { if (!val) return fallback; try { return JSON.parse(val) } catch { return fallback } } +function _addDays(isoStr, days) { const d = new Date(isoStr); d.setDate(d.getDate() + days); return d.toISOString() } + +const VVT_LEGAL_BASES = ['consent','contract','legal_obligation','vital_interests','public_task','legitimate_interest'] +const VVT_STATUSES = ['draft','approved','archived'] +const AV_STATUSES = ['draft','negotiation','signed','active','terminated'] +const DSFA_STATUSES = ['draft','review','approved','archived'] +const RESIDUAL_RISKS = ['low','medium','high','critical'] +const DSFA_DECISIONS = ['proceed','modify','reject',''] +const INCIDENT_TYPES = ['unauthorized_access','loss','deletion','theft','ransomware','other'] +const INCIDENT_STATUSES = ['detected','contained','reported','closed'] +const INCIDENT_RISKS = ['low','medium','high'] +const DSAR_TYPES = ['access','rectification','erasure','restriction','portability','objection','review_automated'] +const DSAR_STATUSES = ['received','in_progress','extended','completed','refused'] +const TOM_CATEGORIES = ['access','encryption','logging','network','application','backup','organizational','training','retention'] +const TOM_STATUSES = ['planned','in_progress','implemented','verified'] +const TOM_RISKS = ['low','medium','high','critical'] + +function rowToItem(row) { + if (!row) return null + return { id: row.id, ..._json(row.data, {}), createdBy: row.created_by, createdAt: row.created_at, updatedAt: row.updated_at, deletedAt: row.deleted_at || null } +} + +function _makeSubStore(type) { + return { + getAll: async ({ entity } = {}) => { + const q = getDb()('gdpr_entries').where('gdpr_type', type).whereNull('deleted_at') + let list = (await q).map(rowToItem) + if (entity) list = list.filter(v => !v.applicableEntities?.length || v.applicableEntities.includes(entity)) + return list + }, + getById: async (id) => { + const row = await getDb()('gdpr_entries').where('id', id).where('gdpr_type', type).whereNull('deleted_at').first() + return rowToItem(row) + }, + _createEntry: async (id, item, createdBy) => { + const now = nowISO() + await getDb()('gdpr_entries').insert({ id, gdpr_type: type, data: JSON.stringify(item), created_by: createdBy || 'system', created_at: now, updated_at: now }) + return item + }, + _updateEntry: async (id, fields, updatableKeys) => { + const row = await getDb()('gdpr_entries').where('id', id).where('gdpr_type', type).first() + if (!row) return null + const item = rowToItem(row) + for (const k of updatableKeys) { if (fields[k] !== undefined) item[k] = fields[k] } + item.updatedAt = nowISO() + await getDb()('gdpr_entries').where('id', id).update({ data: JSON.stringify(item), updated_at: item.updatedAt }) + return item + }, + _deleteEntry: async (id, deletedBy) => { + const row = await getDb()('gdpr_entries').where('id', id).where('gdpr_type', type).first() + if (!row) return false + const item = rowToItem(row) + item.deletedAt = new Date().toISOString() + item.deletedBy = deletedBy || null + await getDb()('gdpr_entries').where('id', id).update({ deleted_at: nowISO(), data: JSON.stringify(item) }) + return true + }, + _permanentDelete: async (id) => { + const affected = await getDb()('gdpr_entries').where('id', id).where('gdpr_type', type).del() + return affected > 0 + }, + _restore: async (id) => { + const row = await getDb()('gdpr_entries').where('id', id).first() + if (!row) return null + const item = rowToItem(row) + delete item.deletedAt + delete item.deletedBy + await getDb()('gdpr_entries').where('id', id).update({ deleted_at: null, data: JSON.stringify(item) }) + return rowToItem({ ...row, deleted_at: null }) + }, + _getDeleted: async () => { + const rows = await getDb()('gdpr_entries').where('gdpr_type', type).whereNotNull('deleted_at') + return rows.map(rowToItem) + }, + } +} + +const vvt = (() => { + const base = _makeSubStore('vvt') + return { + getAll: base.getAll, getById: base.getById, getDeleted: base._getDeleted, + create: async (fields, createdBy) => { + const id = makeId('vvt') + const item = { + id, title: fields.title || 'Ohne Titel', purpose: fields.purpose || '', + legalBasis: VVT_LEGAL_BASES.includes(fields.legalBasis) ? fields.legalBasis : 'contract', + legalBasisNote: fields.legalBasisNote || '', + dataCategories: Array.isArray(fields.dataCategories) ? fields.dataCategories : [], + dataSubjectTypes: Array.isArray(fields.dataSubjectTypes) ? fields.dataSubjectTypes : [], + recipients: Array.isArray(fields.recipients) ? fields.recipients : [], + internationalTransfer: !!fields.internationalTransfer, transferMechanism: fields.transferMechanism || '', + retentionPeriod: fields.retentionPeriod || '', retentionMonths: fields.retentionMonths ? parseInt(fields.retentionMonths) : null, + deletionProcedure: fields.deletionProcedure || '', + isHighRisk: !!fields.isHighRisk, automatedDecision: !!fields.automatedDecision, + linkedAv: Array.isArray(fields.linkedAv) ? fields.linkedAv : [], + linkedToms: Array.isArray(fields.linkedToms) ? fields.linkedToms : [], + applicableEntities: Array.isArray(fields.applicableEntities) ? fields.applicableEntities : [], + status: VVT_STATUSES.includes(fields.status) ? fields.status : 'draft', + owner: fields.owner || '', + linkedControls: Array.isArray(fields.linkedControls) ? fields.linkedControls : [], + linkedPolicies: Array.isArray(fields.linkedPolicies) ? fields.linkedPolicies : [], + createdBy: createdBy || 'system', createdAt: nowISO(), updatedAt: nowISO(), + } + return base._createEntry(id, item, createdBy) + }, + update: async (id, fields) => { + return base._updateEntry(id, fields, ['title','purpose','legalBasis','legalBasisNote','dataCategories','dataSubjectTypes', + 'recipients','internationalTransfer','transferMechanism','retentionPeriod','retentionMonths', + 'deletionProcedure','isHighRisk','automatedDecision','linkedAv','linkedToms', + 'applicableEntities','status','owner','linkedControls','linkedPolicies']) + }, + delete: base._deleteEntry, permanentDelete: base._permanentDelete, restore: base._restore, + } +})() + +const av = (() => { + const base = _makeSubStore('av') + return { + getAll: base.getAll, getById: base.getById, getDeleted: base._getDeleted, + create: async (fields, createdBy) => { + const id = makeId('av') + const item = { + id, title: fields.title || 'Ohne Titel', + processorName: fields.processorName || '', processorCountry: fields.processorCountry || '', + processorContactEmail: fields.processorContactEmail || '', + processingScope: fields.processingScope || '', + linkedVvt: Array.isArray(fields.linkedVvt) ? fields.linkedVvt : [], + signatureDate: fields.signatureDate || null, effectiveUntil: fields.effectiveUntil || null, + subProcessors: Array.isArray(fields.subProcessors) ? fields.subProcessors : [], + status: AV_STATUSES.includes(fields.status) ? fields.status : 'draft', + applicableEntities: Array.isArray(fields.applicableEntities) ? fields.applicableEntities : [], + transferMechanism: fields.transferMechanism || '', + art28Checklist: { + instructionsOnly: !!(fields.art28Checklist?.instructionsOnly), + confidentiality: !!(fields.art28Checklist?.confidentiality), + security: !!(fields.art28Checklist?.security), + subProcessorApproval: !!(fields.art28Checklist?.subProcessorApproval), + assistanceRights: !!(fields.art28Checklist?.assistanceRights), + deletionReturn: !!(fields.art28Checklist?.deletionReturn), + auditRights: !!(fields.art28Checklist?.auditRights), + cooperation: !!(fields.art28Checklist?.cooperation), + }, + filePath: fields.filePath || null, filename: fields.filename || null, + notes: fields.notes || '', + linkedControls: Array.isArray(fields.linkedControls) ? fields.linkedControls : [], + linkedPolicies: Array.isArray(fields.linkedPolicies) ? fields.linkedPolicies : [], + createdBy: createdBy || 'system', createdAt: nowISO(), updatedAt: nowISO(), + } + return base._createEntry(id, item, createdBy) + }, + update: async (id, fields) => { + return base._updateEntry(id, fields, ['title','processorName','processorCountry','processorContactEmail','processingScope', + 'linkedVvt','signatureDate','effectiveUntil','subProcessors','status', + 'applicableEntities','transferMechanism','art28Checklist','notes','filePath','filename', + 'linkedControls','linkedPolicies']) + }, + delete: base._deleteEntry, permanentDelete: base._permanentDelete, restore: base._restore, + } +})() + +const dsfa = (() => { + const base = _makeSubStore('dsfa') + return { + getAll: base.getAll, getById: base.getById, getDeleted: base._getDeleted, + create: async (fields, createdBy) => { + const id = makeId('dsfa') + const item = { + id, title: fields.title || 'Ohne Titel', linkedVvtId: fields.linkedVvtId || '', + processingDescription: fields.processingDescription || '', + necessityAssessment: fields.necessityAssessment || '', + risks: Array.isArray(fields.risks) ? fields.risks : [], + existingControls: fields.existingControls || '', + residualRisk: RESIDUAL_RISKS.includes(fields.residualRisk) ? fields.residualRisk : 'medium', + dpoConsulted: !!fields.dpoConsulted, dpoOpinion: fields.dpoOpinion || '', + saConsultationRequired: !!fields.saConsultationRequired, + decision: DSFA_DECISIONS.includes(fields.decision) ? fields.decision : '', + decisionJustification: fields.decisionJustification || '', + applicableEntities: Array.isArray(fields.applicableEntities) ? fields.applicableEntities : [], + status: DSFA_STATUSES.includes(fields.status) ? fields.status : 'draft', + owner: fields.owner || '', approvedBy: fields.approvedBy || null, approvedAt: fields.approvedAt || null, + linkedControls: Array.isArray(fields.linkedControls) ? fields.linkedControls : [], + linkedPolicies: Array.isArray(fields.linkedPolicies) ? fields.linkedPolicies : [], + createdBy: createdBy || 'system', createdAt: nowISO(), updatedAt: nowISO(), + } + return base._createEntry(id, item, createdBy) + }, + update: async (id, fields) => { + return base._updateEntry(id, fields, ['title','linkedVvtId','processingDescription','necessityAssessment','risks', + 'existingControls','residualRisk','dpoConsulted','dpoOpinion', + 'saConsultationRequired','decision','decisionJustification', + 'applicableEntities','status','owner','approvedBy','approvedAt', + 'linkedControls','linkedPolicies']) + }, + delete: base._deleteEntry, permanentDelete: base._permanentDelete, restore: base._restore, + } +})() + +const incidents = (() => { + const base = _makeSubStore('incident') + return { + getAll: base.getAll, getById: base.getById, getDeleted: base._getDeleted, + create: async (fields, createdBy) => { + const id = makeId('inc') + const item = { + id, title: fields.title || 'Datenpanne', + discoveredAt: fields.discoveredAt || nowISO(), + incidentType: INCIDENT_TYPES.includes(fields.incidentType) ? fields.incidentType : 'other', + dataCategories: Array.isArray(fields.dataCategories) ? fields.dataCategories : [], + dataSubjectTypes: Array.isArray(fields.dataSubjectTypes) ? fields.dataSubjectTypes : [], + estimatedAffected: fields.estimatedAffected ? parseInt(fields.estimatedAffected) : null, + containmentMeasures: fields.containmentMeasures || '', rootCause: fields.rootCause || '', + riskLevel: INCIDENT_RISKS.includes(fields.riskLevel) ? fields.riskLevel : 'medium', + saNotificationRequired: !!fields.saNotificationRequired, + saNotifiedAt: fields.saNotifiedAt || null, saReference: fields.saReference || '', + dsNotificationRequired: !!fields.dsNotificationRequired, + dsNotifiedAt: fields.dsNotifiedAt || null, + remediationMeasures: fields.remediationMeasures || '', + linkedVvt: Array.isArray(fields.linkedVvt) ? fields.linkedVvt : [], + applicableEntities: Array.isArray(fields.applicableEntities) ? fields.applicableEntities : [], + status: INCIDENT_STATUSES.includes(fields.status) ? fields.status : 'detected', + reportedBy: createdBy || 'system', createdAt: nowISO(), updatedAt: nowISO(), + } + return base._createEntry(id, item, createdBy) + }, + update: async (id, fields) => { + return base._updateEntry(id, fields, ['title','discoveredAt','incidentType','dataCategories','dataSubjectTypes', + 'estimatedAffected','containmentMeasures','rootCause','riskLevel', + 'saNotificationRequired','saNotifiedAt','saReference', + 'dsNotificationRequired','dsNotifiedAt','remediationMeasures', + 'linkedVvt','applicableEntities','status']) + }, + delete: base._deleteEntry, permanentDelete: base._permanentDelete, restore: base._restore, + } +})() + +const dsar = (() => { + const base = _makeSubStore('dsar') + return { + getAll: base.getAll, getById: base.getById, getDeleted: base._getDeleted, + create: async (fields, createdBy) => { + const id = makeId('dsar') + const receivedAt = fields.receivedAt || nowISO() + const deadline = _addDays(receivedAt, 30) + const item = { + id, requestType: DSAR_TYPES.includes(fields.requestType) ? fields.requestType : 'access', + dataSubjectName: fields.dataSubjectName || '', dataSubjectEmail: fields.dataSubjectEmail || '', + receivedAt, deadline, extendedDeadline: null, + identityVerified: !!fields.identityVerified, + affectedVvt: Array.isArray(fields.affectedVvt) ? fields.affectedVvt : [], + response: fields.response || '', + status: DSAR_STATUSES.includes(fields.status) ? fields.status : 'received', + refusalReason: fields.refusalReason || '', + completedAt: fields.completedAt || null, + handledBy: fields.handledBy || createdBy || '', + applicableEntities: Array.isArray(fields.applicableEntities) ? fields.applicableEntities : [], + createdAt: nowISO(), updatedAt: nowISO(), + } + return base._createEntry(id, item, createdBy) + }, + update: async (id, fields) => { + const row = await getDb()('gdpr_entries').where('id', id).where('gdpr_type', 'dsar').first() + if (!row) return null + const item = rowToItem(row) + const updatable = ['requestType','dataSubjectName','dataSubjectEmail','receivedAt','deadline', + 'extendedDeadline','identityVerified','affectedVvt','response','status', + 'refusalReason','completedAt','handledBy','applicableEntities'] + for (const k of updatable) { if (fields[k] !== undefined) item[k] = fields[k] } + if (fields.status === 'extended' && !item.extendedDeadline) item.extendedDeadline = _addDays(item.receivedAt, 90) + if (fields.status === 'completed' && !item.completedAt) item.completedAt = nowISO() + item.updatedAt = nowISO() + await getDb()('gdpr_entries').where('id', id).update({ data: JSON.stringify(item), updated_at: item.updatedAt }) + return item + }, + delete: base._deleteEntry, permanentDelete: base._permanentDelete, restore: base._restore, + } +})() + +const toms = (() => { + const base = _makeSubStore('tom') + return { + getAll: async ({ entity, category } = {}) => { + const q = getDb()('gdpr_entries').where('gdpr_type', 'tom').whereNull('deleted_at') + let list = (await q).map(rowToItem) + if (entity) list = list.filter(t => !t.applicableEntities?.length || t.applicableEntities.includes(entity)) + if (category) list = list.filter(t => t.category === category) + return list + }, + getById: base.getById, getDeleted: base._getDeleted, + create: async (fields, createdBy) => { + const id = makeId('tom') + const item = { + id, title: fields.title || 'Ohne Titel', + category: TOM_CATEGORIES.includes(fields.category) ? fields.category : 'organizational', + description: fields.description || '', implementation: fields.implementation || '', + status: TOM_STATUSES.includes(fields.status) ? fields.status : 'planned', + owner: fields.owner || '', evidenceNote: fields.evidenceNote || '', + retentionRule: fields.retentionRule || '', + linkedVvt: Array.isArray(fields.linkedVvt) ? fields.linkedVvt : [], + riskLevel: TOM_RISKS.includes(fields.riskLevel) ? fields.riskLevel : 'medium', + reviewDate: fields.reviewDate || null, + applicableEntities: Array.isArray(fields.applicableEntities) ? fields.applicableEntities : [], + linkedControls: Array.isArray(fields.linkedControls) ? fields.linkedControls : [], + linkedPolicies: Array.isArray(fields.linkedPolicies) ? fields.linkedPolicies : [], + createdBy: createdBy || 'system', createdAt: nowISO(), updatedAt: nowISO(), + } + return base._createEntry(id, item, createdBy) + }, + update: async (id, fields) => { + return base._updateEntry(id, fields, ['title','category','description','implementation','status','owner', + 'evidenceNote','retentionRule','linkedVvt','riskLevel','reviewDate','applicableEntities', + 'linkedControls','linkedPolicies']) + }, + delete: base._deleteEntry, permanentDelete: base._permanentDelete, restore: base._restore, + } +})() + +const dsb = { + get: async () => { + const row = await getDb()('gdpr_entries').where('gdpr_type', 'dsb').first() + if (!row) return {} + return _json(row.data, {}) + }, + update: async (fields) => { + const row = await getDb()('gdpr_entries').where('gdpr_type', 'dsb').first() + const current = row ? _json(row.data, {}) : {} + const updatable = ['type','name','email','phone','appointmentDate','contractEnd','notes','filePath','filename'] + for (const k of updatable) { if (fields[k] !== undefined) current[k] = fields[k] } + current.updatedAt = nowISO() + if (row) { + await getDb()('gdpr_entries').where('id', row.id).update({ data: JSON.stringify(current), updated_at: nowISO() }) + } else { + const id = 'dsb_singleton' + await getDb()('gdpr_entries').insert({ id, gdpr_type: 'dsb', data: JSON.stringify(current), created_by: 'system', created_at: nowISO(), updated_at: nowISO() }) + } + return current + }, +} + +const deletionLog = { + getAll: async () => { + const rows = await getDb()('gdpr_deletion_log') + return rows.map(r => _json(r.data, {})) + }, + getDue: async () => { + const vvtRows = (await getDb()('gdpr_entries').where('gdpr_type', 'vvt').whereNull('deleted_at')).map(rowToItem) + const loggedRows = await getDb()('gdpr_deletion_log') + const logged = new Set(loggedRows.map(r => _json(r.data, {}).vvtId)) + const now = new Date() + return vvtRows.filter(v => { + if (!v.retentionMonths || logged.has(v.id)) return false + const created = new Date(v.createdAt || now) + const due = new Date(created.getTime() + v.retentionMonths * 30 * 86400000) + return due <= now + }).map(v => { + const created = new Date(v.createdAt || now) + const due = new Date(created.getTime() + v.retentionMonths * 30 * 86400000) + return { ...v, deletionDue: due.toISOString().slice(0, 10) } + }) + }, + getUpcoming: async (daysAhead = 90) => { + const vvtRows = (await getDb()('gdpr_entries').where('gdpr_type', 'vvt').whereNull('deleted_at')).map(rowToItem) + const loggedRows = await getDb()('gdpr_deletion_log') + const logged = new Set(loggedRows.map(r => _json(r.data, {}).vvtId)) + const now = new Date() + const cutoff = new Date(now.getTime() + daysAhead * 86400000) + return vvtRows.filter(v => { + if (!v.retentionMonths || logged.has(v.id)) return false + const created = new Date(v.createdAt || now) + const due = new Date(created.getTime() + v.retentionMonths * 30 * 86400000) + return due > now && due <= cutoff + }).map(v => { + const created = new Date(v.createdAt || now) + const due = new Date(created.getTime() + v.retentionMonths * 30 * 86400000) + return { ...v, deletionDue: due.toISOString().slice(0, 10) } + }) + }, + confirm: async (fields, confirmedBy) => { + const id = makeId('del') + const now = nowISO() + const entry = { + id, vvtId: fields.vvtId, vvtTitle: fields.vvtTitle || '', + confirmedAt: now, confirmedBy: confirmedBy || 'system', + method: fields.method || 'manual', evidence: fields.evidence || '', note: fields.note || '', + } + await getDb()('gdpr_deletion_log').insert({ id, data: JSON.stringify(entry), deleted_by: confirmedBy || 'system', deleted_at: now }) + return entry + }, +} + +async function getSummary(entityId) { + const opts = entityId ? { entity: entityId } : {} + const vvtList = await vvt.getAll(opts) + const avList = await av.getAll(opts) + const dsaList = await dsar.getAll(opts) + const incList = await incidents.getAll(opts) + const tomList = await toms.getAll(opts) + const dsbData = await dsb.get() + const now = new Date() + return { + vvt: { total: vvtList.length, highRisk: vvtList.filter(v => v.isHighRisk).length, noLegal: vvtList.filter(v => !v.legalBasis).length }, + av: { active: avList.filter(a => a.status === 'active' || a.status === 'signed').length, total: avList.length }, + dsar: { + open: dsaList.filter(d => !['completed','refused'].includes(d.status)).length, + overdue: dsaList.filter(d => { + if (['completed','refused'].includes(d.status)) return false + const dl = d.extendedDeadline || d.deadline + return dl && new Date(dl) < now + }).length, + }, + incidents: { + open: incList.filter(i => !['closed'].includes(i.status)).length, + missed72h: incList.filter(i => { + if (i.status === 'closed' || !i.saNotificationRequired || i.saNotifiedAt) return false + return (now - new Date(i.discoveredAt)) > 72 * 3600 * 1000 + }).length, + }, + toms: { implemented: tomList.filter(t => t.status === 'implemented' || t.status === 'verified').length, total: tomList.length }, + dsbSet: !!(dsbData.name), + } +} + +module.exports = { + init: async () => { await initDb() }, + vvt, av, dsfa, incidents, dsar, toms, dsb, deletionLog, getSummary, + VVT_LEGAL_BASES, VVT_STATUSES, AV_STATUSES, + DSFA_STATUSES, RESIDUAL_RISKS, INCIDENT_TYPES, INCIDENT_STATUSES, + DSAR_TYPES, DSAR_STATUSES, TOM_CATEGORIES, TOM_STATUSES, +} diff --git a/server/db/stores/goalsStore.js b/server/db/stores/goalsStore.js new file mode 100644 index 0000000..5560b80 --- /dev/null +++ b/server/db/stores/goalsStore.js @@ -0,0 +1,208 @@ +'use strict' + +const { getDb, init: initDb } = require('../knexDatabase') + +function nowISO() { return new Date().toISOString() } +function makeId() { return `goal_${Date.now()}_${Math.random().toString(36).slice(2,6)}` } +function makeKpiId() { return `kpi_${Date.now()}_${Math.random().toString(36).slice(2,4)}` } +function _json(val, fallback) { if (!val) return fallback; try { return JSON.parse(val) } catch { return fallback } } + +const CATEGORIES = [ + { id: 'confidentiality', label: 'Vertraulichkeit' }, + { id: 'integrity', label: 'Integrität' }, + { id: 'availability', label: 'Verfügbarkeit' }, + { id: 'compliance', label: 'Compliance' }, + { id: 'operational', label: 'Betrieblich' }, + { id: 'technical', label: 'Technisch' }, + { id: 'organizational', label: 'Organisatorisch' }, +] + +const STATUSES = [ + { id: 'planned', label: 'Geplant' }, { id: 'active', label: 'Aktiv' }, + { id: 'achieved', label: 'Erreicht' }, { id: 'missed', label: 'Verfehlt' }, + { id: 'cancelled', label: 'Abgebrochen' }, +] + +const PRIORITIES = [ + { id: 'low', label: 'Niedrig' }, { id: 'medium', label: 'Mittel' }, + { id: 'high', label: 'Hoch' }, { id: 'critical', label: 'Kritisch' }, +] + +function calcProgress(goal) { + const kpis = (goal.kpis || []).filter(k => k.targetValue > 0) + if (!kpis.length) return goal.progress || 0 + const avg = kpis.reduce((s, k) => s + Math.min(100, Math.round((k.currentValue / k.targetValue) * 100)), 0) / kpis.length + return Math.round(avg) +} + +function rowToGoal(row) { + if (!row) return null + const d = _json(row.data, {}) + return { + id: row.id, title: row.title, description: row.description, + category: row.category, status: row.status, priority: row.priority, + owner: row.owner, targetValue: row.target_value, currentValue: row.current_value, + unit: row.unit, dueDate: row.due_date, reviewDate: row.review_date, + targetDate: d.targetDate || row.due_date || null, + progress: d.progress || 0, + kpis: d.kpis || [], + linkedControls: _json(row.linked_controls, []), + linkedPolicies: d.linkedPolicies || [], + applicableEntities: _json(row.applicable_entities, []), + notes: d.notes || '', attachments: d.attachments || [], + deletedBy: d.deletedBy || '', + createdBy: row.created_by, createdAt: row.created_at, + updatedAt: row.updated_at, deletedAt: row.deleted_at || null, + progressCalc: 0, + } +} + +function packData(g) { + return JSON.stringify({ + targetDate: g.targetDate, progress: g.progress, + kpis: g.kpis || [], linkedPolicies: g.linkedPolicies || [], + notes: g.notes || '', attachments: g.attachments || [], + deletedBy: g.deletedBy || '', + }) +} + +module.exports = { + init: async () => { await initDb() }, + + getAll: async ({ status, category, entity } = {}) => { + const q = getDb()('goals').whereNull('deleted_at') + if (status) q.where('status', status) + if (category) q.where('category', category) + let list = (await q).map(r => { const g = rowToGoal(r); g.progressCalc = calcProgress(g); return g }) + if (entity) list = list.filter(g => !g.applicableEntities?.length || g.applicableEntities.includes(entity)) + return list + }, + + getById: async (id) => { + const row = await getDb()('goals').where('id', id).whereNull('deleted_at').first() + if (!row) return null + const g = rowToGoal(row) + g.progressCalc = calcProgress(g) + return g + }, + + create: async (fields, createdBy) => { + const id = makeId() + const now = nowISO() + const item = { + id, title: fields.title || 'Ohne Titel', + description: fields.description || '', + category: CATEGORIES.some(c => c.id === fields.category) ? fields.category : 'organizational', + status: STATUSES.some(s => s.id === fields.status) ? fields.status : 'planned', + priority: PRIORITIES.some(p => p.id === fields.priority) ? fields.priority : 'medium', + owner: fields.owner || createdBy || '', + targetDate: fields.targetDate || null, + reviewDate: fields.reviewDate || null, + progress: parseInt(fields.progress) || 0, + kpis: Array.isArray(fields.kpis) ? fields.kpis.map(k => ({ ...k, id: k.id || makeKpiId() })) : [], + linkedControls: Array.isArray(fields.linkedControls) ? fields.linkedControls : [], + linkedPolicies: Array.isArray(fields.linkedPolicies) ? fields.linkedPolicies : [], + applicableEntities: Array.isArray(fields.applicableEntities) ? fields.applicableEntities : [], + notes: fields.notes || '', attachments: [], + createdBy: createdBy || 'system', + } + await getDb()('goals').insert({ + id, title: item.title, description: item.description, + category: item.category, status: item.status, priority: item.priority, + owner: item.owner, target_value: null, current_value: null, unit: null, + due_date: item.targetDate, review_date: item.reviewDate, + applicable_entities: JSON.stringify(item.applicableEntities), + linked_controls: JSON.stringify(item.linkedControls), + data: packData(item), + created_by: item.createdBy, created_at: now, updated_at: now, + }) + const result = { ...item, createdAt: now, updatedAt: now, deletedAt: null } + result.progressCalc = calcProgress(result) + return result + }, + + update: async (id, fields) => { + const row = await getDb()('goals').where('id', id).whereNull('deleted_at').first() + if (!row) return null + const g = rowToGoal(row) + const updatable = ['title','description','category','status','priority','owner', + 'targetDate','reviewDate','progress','kpis','linkedControls','linkedPolicies','applicableEntities','notes'] + for (const k of updatable) { + if (fields[k] !== undefined) g[k] = fields[k] + } + if (fields.kpis) g.kpis = fields.kpis.map(k => ({ ...k, id: k.id || makeKpiId() })) + g.updatedAt = nowISO() + await getDb()('goals').where('id', id).update({ + title: g.title, description: g.description, + category: g.category, status: g.status, priority: g.priority, + owner: g.owner, due_date: g.targetDate, review_date: g.reviewDate, + applicable_entities: JSON.stringify(g.applicableEntities || []), + linked_controls: JSON.stringify(g.linkedControls || []), + data: packData(g), updated_at: g.updatedAt, + }) + g.progressCalc = calcProgress(g) + return g + }, + + delete: async (id, deletedBy) => { + const row = await getDb()('goals').where('id', id).first() + if (!row) return false + const d = _json(row.data, {}) + d.deletedBy = deletedBy || '' + await getDb()('goals').where('id', id).update({ + deleted_at: nowISO(), data: JSON.stringify(d), + }) + return true + }, + + permanentDelete: async (id) => { + const affected = await getDb()('goals').where('id', id).del() + return affected > 0 + }, + + restore: async (id) => { + const row = await getDb()('goals').where('id', id).first() + if (!row) return null + const d = _json(row.data, {}) + delete d.deletedBy + await getDb()('goals').where('id', id).update({ + deleted_at: null, data: JSON.stringify(d), updated_at: nowISO(), + }) + const g = rowToGoal({ ...row, deleted_at: null }) + g.progressCalc = calcProgress(g) + return g + }, + + getDeleted: async () => { + const rows = await getDb()('goals').whereNotNull('deleted_at') + return rows.map(r => { const g = rowToGoal(r); g.progressCalc = calcProgress(g); return g }) + }, + + getSummary: async () => { + const rows = await getDb()('goals').whereNull('deleted_at') + const list = rows.map(r => { const g = rowToGoal(r); g.progressCalc = calcProgress(g); return g }) + const now = new Date() + return { + total: list.length, + active: list.filter(g => g.status === 'active').length, + achieved: list.filter(g => g.status === 'achieved').length, + planned: list.filter(g => g.status === 'planned').length, + overdue: list.filter(g => g.targetDate && new Date(g.targetDate) < now && !['achieved','cancelled'].includes(g.status)).length, + avgProgress: list.length ? Math.round(list.reduce((s, g) => s + g.progressCalc, 0) / list.length) : 0, + } + }, + + getCalendarEvents: async () => { + const rows = await getDb()('goals').whereNull('deleted_at') + const list = rows.map(r => { const g = rowToGoal(r); g.progressCalc = calcProgress(g); return g }) + const events = [] + for (const g of list) { + if (['cancelled','achieved'].includes(g.status)) continue + if (g.targetDate) events.push({ date: g.targetDate, type: 'goal_due', label: `Ziel fällig: ${g.title}`, goalId: g.id, title: g.title }) + if (g.reviewDate) events.push({ date: g.reviewDate, type: 'goal_review', label: `Ziel-Review: ${g.title}`, goalId: g.id, title: g.title }) + } + return events + }, + + CATEGORIES, STATUSES, PRIORITIES, +} diff --git a/server/db/stores/governanceStore.js b/server/db/stores/governanceStore.js new file mode 100644 index 0000000..0e8ad01 --- /dev/null +++ b/server/db/stores/governanceStore.js @@ -0,0 +1,183 @@ +'use strict' + +const { getDb, init: initDb } = require('../knexDatabase') + +function nowISO() { return new Date().toISOString() } +function makeId(prefix) { return `${prefix}_${require('crypto').randomBytes(4).toString('hex')}` } +function _json(val, fallback) { if (!val) return fallback; try { return JSON.parse(val) } catch { return fallback } } + +function rowToItem(row) { + if (!row) return null + return { id: row.id, ..._json(row.data, {}), createdBy: row.created_by, createdAt: row.created_at, updatedAt: row.updated_at, deletedAt: row.deleted_at || null } +} + +module.exports = { + init: async () => { await initDb() }, + + getReviews: async () => { + const rows = await getDb()('governance_entries').where('gov_type', 'review').whereNull('deleted_at') + return rows.map(rowToItem) + }, + getReviewById: async (id) => { + const row = await getDb()('governance_entries').where('id', id).where('gov_type', 'review').whereNull('deleted_at').first() + return rowToItem(row) + }, + createReview: async (fields, { createdBy } = {}) => { + const id = makeId('mgmt') + const now = nowISO() + const item = { + id, title: fields.title || 'Ohne Titel', type: fields.type || 'annual', + date: fields.date || '', nextReviewDate: fields.nextReviewDate || '', + status: fields.status || 'planned', chair: fields.chair || '', + participants: fields.participants || '', + inputAuditResults: fields.inputAuditResults || '', + inputStakeholderFeedback: fields.inputStakeholderFeedback || '', + inputPerformance: fields.inputPerformance || '', + inputNonconformities: fields.inputNonconformities || '', + inputPreviousActions: fields.inputPreviousActions || '', + inputRisksOpportunities: fields.inputRisksOpportunities || '', + inputExternalChanges: fields.inputExternalChanges || '', + decisions: fields.decisions || '', improvements: fields.improvements || '', + resourceNeeds: fields.resourceNeeds || '', notes: fields.notes || '', + linkedControls: Array.isArray(fields.linkedControls) ? fields.linkedControls : [], + linkedPolicies: Array.isArray(fields.linkedPolicies) ? fields.linkedPolicies : [], + createdBy: createdBy || 'system', createdAt: now, updatedAt: now, deletedAt: null, + } + await getDb()('governance_entries').insert({ id, gov_type: 'review', data: JSON.stringify(item), created_by: createdBy || 'system', created_at: now, updated_at: now }) + return item + }, + updateReview: async (id, patch, { changedBy } = {}) => { + const row = await getDb()('governance_entries').where('id', id).where('gov_type', 'review').whereNull('deleted_at').first() + if (!row) return null + const item = rowToItem(row) + const allowed = ['title','type','date','nextReviewDate','status','chair','participants', + 'inputAuditResults','inputStakeholderFeedback','inputPerformance','inputNonconformities', + 'inputPreviousActions','inputRisksOpportunities','inputExternalChanges', + 'decisions','improvements','resourceNeeds','notes','linkedControls','linkedPolicies'] + for (const k of allowed) { if (patch[k] !== undefined) item[k] = patch[k] } + item.updatedAt = nowISO() + if (changedBy) item.updatedBy = changedBy + await getDb()('governance_entries').where('id', id).update({ data: JSON.stringify(item), updated_at: item.updatedAt }) + return item + }, + deleteReview: async (id) => { + const affected = await getDb()('governance_entries').where('id', id).where('gov_type', 'review').whereNull('deleted_at').update({ deleted_at: nowISO() }) + return affected > 0 + }, + + getActions: async () => { + const rows = await getDb()('governance_entries').where('gov_type', 'action').whereNull('deleted_at') + return rows.map(rowToItem) + }, + getActionById: async (id) => { + const row = await getDb()('governance_entries').where('id', id).where('gov_type', 'action').whereNull('deleted_at').first() + return rowToItem(row) + }, + createAction: async (fields, { createdBy } = {}) => { + const id = makeId('gact') + const now = nowISO() + const item = { + id, title: fields.title || 'Ohne Titel', description: fields.description || '', + source: fields.source || 'management_review', sourceRef: fields.sourceRef || '', + owner: fields.owner || '', ownerEmail: fields.ownerEmail || '', + dueDate: fields.dueDate || '', completedDate: fields.completedDate || '', + priority: fields.priority || 'medium', status: fields.status || 'open', + progress: typeof fields.progress === 'number' ? fields.progress : 0, + notes: fields.notes || '', entityId: fields.entityId || '', + linkedControls: Array.isArray(fields.linkedControls) ? fields.linkedControls : [], + linkedPolicies: Array.isArray(fields.linkedPolicies) ? fields.linkedPolicies : [], + createdBy: createdBy || 'system', createdAt: now, updatedAt: now, deletedAt: null, + } + await getDb()('governance_entries').insert({ id, gov_type: 'action', data: JSON.stringify(item), created_by: createdBy || 'system', created_at: now, updated_at: now }) + return item + }, + updateAction: async (id, patch, { changedBy } = {}) => { + const row = await getDb()('governance_entries').where('id', id).where('gov_type', 'action').whereNull('deleted_at').first() + if (!row) return null + const item = rowToItem(row) + const allowed = ['title','description','source','sourceRef','owner','ownerEmail', + 'dueDate','completedDate','priority','status','progress','notes','entityId','linkedControls','linkedPolicies'] + for (const k of allowed) { if (patch[k] !== undefined) item[k] = patch[k] } + if (patch.status === 'completed' && !item.completedDate) item.completedDate = nowISO().slice(0, 10) + item.updatedAt = nowISO() + if (changedBy) item.updatedBy = changedBy + await getDb()('governance_entries').where('id', id).update({ data: JSON.stringify(item), updated_at: item.updatedAt }) + return item + }, + deleteAction: async (id) => { + const affected = await getDb()('governance_entries').where('id', id).where('gov_type', 'action').whereNull('deleted_at').update({ deleted_at: nowISO() }) + return affected > 0 + }, + + getMeetings: async () => { + const rows = await getDb()('governance_entries').where('gov_type', 'meeting').whereNull('deleted_at') + return rows.map(rowToItem) + }, + getMeetingById: async (id) => { + const row = await getDb()('governance_entries').where('id', id).where('gov_type', 'meeting').whereNull('deleted_at').first() + return rowToItem(row) + }, + createMeeting: async (fields, { createdBy } = {}) => { + const id = makeId('meet') + const now = nowISO() + const item = { + id, title: fields.title || 'Ohne Titel', committee: fields.committee || 'isms_committee', + date: fields.date || '', location: fields.location || '', + chair: fields.chair || '', secretary: fields.secretary || '', + participants: fields.participants || '', agenda: fields.agenda || '', + decisions: fields.decisions || '', nextMeetingDate: fields.nextMeetingDate || '', + approved: fields.approved === true, approvedBy: fields.approvedBy || '', + notes: fields.notes || '', + linkedControls: Array.isArray(fields.linkedControls) ? fields.linkedControls : [], + linkedPolicies: Array.isArray(fields.linkedPolicies) ? fields.linkedPolicies : [], + createdBy: createdBy || 'system', createdAt: now, updatedAt: now, deletedAt: null, + } + await getDb()('governance_entries').insert({ id, gov_type: 'meeting', data: JSON.stringify(item), created_by: createdBy || 'system', created_at: now, updated_at: now }) + return item + }, + updateMeeting: async (id, patch, { changedBy } = {}) => { + const row = await getDb()('governance_entries').where('id', id).where('gov_type', 'meeting').whereNull('deleted_at').first() + if (!row) return null + const item = rowToItem(row) + const allowed = ['title','committee','date','location','chair','secretary','participants', + 'agenda','decisions','nextMeetingDate','approved','approvedBy','notes','linkedControls','linkedPolicies'] + for (const k of allowed) { if (patch[k] !== undefined) item[k] = patch[k] } + item.updatedAt = nowISO() + if (changedBy) item.updatedBy = changedBy + await getDb()('governance_entries').where('id', id).update({ data: JSON.stringify(item), updated_at: item.updatedAt }) + return item + }, + deleteMeeting: async (id) => { + const affected = await getDb()('governance_entries').where('id', id).where('gov_type', 'meeting').whereNull('deleted_at').update({ deleted_at: nowISO() }) + return affected > 0 + }, + + getSummary: async () => { + const db = getDb() + const reviews = (await db('governance_entries').where('gov_type', 'review').whereNull('deleted_at')).map(rowToItem) + const actions = (await db('governance_entries').where('gov_type', 'action').whereNull('deleted_at')).map(rowToItem) + const meetings = (await db('governance_entries').where('gov_type', 'meeting').whereNull('deleted_at')).map(rowToItem) + const today = new Date().toISOString().slice(0, 10) + const in90 = new Date(Date.now() + 90 * 86400000).toISOString().slice(0, 10) + return { + reviews: { + total: reviews.length, planned: reviews.filter(r => r.status === 'planned').length, + completed: reviews.filter(r => r.status === 'completed').length, + approved: reviews.filter(r => r.status === 'approved').length, + nextPlanned: reviews.filter(r => r.status === 'planned' && r.date).map(r => r.date).sort()[0] || null, + }, + actions: { + total: actions.length, open: actions.filter(a => a.status === 'open').length, + inProgress: actions.filter(a => a.status === 'in_progress').length, + completed: actions.filter(a => a.status === 'completed').length, + overdue: actions.filter(a => (a.status === 'open' || a.status === 'in_progress') && a.dueDate && a.dueDate < today).length, + critical: actions.filter(a => a.priority === 'critical' && (a.status === 'open' || a.status === 'in_progress')).length, + }, + meetings: { + total: meetings.length, + upcoming: meetings.filter(m => m.date && m.date >= today && m.date <= in90).length, + lastMeeting: meetings.filter(m => m.date && m.date < today).map(m => m.date).sort().slice(-1)[0] || null, + }, + } + }, +} diff --git a/server/db/stores/guidanceStore.js b/server/db/stores/guidanceStore.js new file mode 100644 index 0000000..d4e2dcc --- /dev/null +++ b/server/db/stores/guidanceStore.js @@ -0,0 +1,265 @@ +// © 2026 Claude Hecker — ISMS Builder — AGPL-3.0 +'use strict' + +const { getDb } = require('../knexDatabase') + +const VALID_CATEGORIES = ['systemhandbuch', 'rollen', 'policy-prozesse', 'soa-audit', 'admin-intern'] + +const ROLE_RANK = { reader: 1, revision: 1, editor: 2, dept_head: 2, qmb: 2, contentowner: 3, auditor: 3, admin: 4 } + +function nowISO() { return new Date().toISOString() } + +function makeId() { + return 'guid_' + Date.now() + '_' + Math.random().toString(36).slice(2, 7) +} + +function _roleRank(role) { return ROLE_RANK[(role || '').toLowerCase()] || 1 } + +function _parse(val, fallback) { + if (!val) return fallback + try { return JSON.parse(val) } catch { return fallback } +} + +function rowToDoc(row) { + if (!row) return null + return { + id: row.id, + category: row.category, + title: row.title, + type: row.type || 'markdown', + content: row.content || '', + filename: row.file_name || null, + filePath: null, + version: row.version || 1, + minRole: row.min_role || null, + linkedControls: _parse(row.linked_controls, []), + linkedPolicies: _parse(row.linked_policies, []), + pinOrder: row.pin_order || null, + seedId: row.seed_id || null, + createdBy: row.created_by || 'system', + createdAt: row.created_at, + updatedAt: row.updated_at, + deletedAt: row.deleted_at || null, + deletedBy: row.deleted_by || null, + } +} + +function publicDoc(doc) { + if (!doc) return null + const { filePath, ...rest } = doc + return rest +} + +function _visibleFor(doc, userRank) { + if (!doc.minRole) return true + return userRank >= (_roleRank(doc.minRole)) +} + +async function init() { await getDb() } + +async function getAll(userRank) { + const rank = userRank != null ? userRank : 1 + const db = getDb() + const rows = await db('guidance').whereNull('deleted_at') + return rows.map(rowToDoc).filter(d => _visibleFor(d, rank)).map(publicDoc) +} + +async function search(query, userRank) { + if (!query || !query.trim()) return [] + const rank = userRank != null ? userRank : 1 + const db = getDb() + const q = query.trim().toLowerCase() + const rows = await db('guidance').whereNull('deleted_at') + return rows + .map(rowToDoc) + .filter(d => _visibleFor(d, rank)) + .filter(d => { + if (d.title && d.title.toLowerCase().includes(q)) return true + if (d.content && d.content.toLowerCase().includes(q)) return true + return false + }) + .map(d => { + const pub = publicDoc(d) + if (d.content && d.content.toLowerCase().includes(q)) { + const idx = d.content.toLowerCase().indexOf(q) + const start = Math.max(0, idx - 60) + const end = Math.min(d.content.length, idx + q.length + 60) + pub.excerpt = (start > 0 ? '…' : '') + d.content.slice(start, end).replace(/\n/g, ' ') + (end < d.content.length ? '…' : '') + } + return pub + }) +} + +async function getByCategory(cat, userRank) { + const rank = userRank != null ? userRank : 1 + const db = getDb() + const rows = await db('guidance').where('category', cat).whereNull('deleted_at') + return rows + .map(rowToDoc) + .filter(d => _visibleFor(d, rank)) + .sort((a, b) => { + const ap = a.pinOrder != null ? a.pinOrder : Infinity + const bp = b.pinOrder != null ? b.pinOrder : Infinity + if (ap !== bp) return ap - bp + return new Date(a.createdAt) - new Date(b.createdAt) + }) + .map(publicDoc) +} + +async function getById(id) { + const db = getDb() + const row = await db('guidance').where('id', id).whereNull('deleted_at').first() + return row ? rowToDoc(row) : null +} + +async function create({ category, title, type, content, filename, filePath, createdBy, minRole, linkedControls, seedId, linkedPolicies }) { + if (!VALID_CATEGORIES.includes(category)) throw new Error('Invalid category') + const id = makeId() + const now = nowISO() + const db = getDb() + const insertData = { + id, + category, + title: title || 'Ohne Titel', + type: type || 'markdown', + content: content || '', + file_name: filename || null, + version: 1, + min_role: minRole || null, + linked_controls: JSON.stringify(Array.isArray(linkedControls) ? linkedControls : []), + linked_policies: JSON.stringify(Array.isArray(linkedPolicies) ? linkedPolicies : []), + seed_id: seedId || null, + created_by: createdBy || 'system', + created_at: now, + updated_at: now, + } + await db('guidance').insert(insertData) + const doc = await getById(id) + return publicDoc(doc) +} + +async function update(id, fields) { + const db = getDb() + const existing = await db('guidance').where('id', id).first() + if (!existing) return null + const patch = { updated_at: nowISO() } + if (fields.title !== undefined) patch.title = fields.title + if (fields.category !== undefined && VALID_CATEGORIES.includes(fields.category)) patch.category = fields.category + if (fields.content !== undefined) patch.content = fields.content + if (fields.filename !== undefined) patch.file_name = fields.filename + if (fields.linkedControls !== undefined) patch.linked_controls = JSON.stringify(Array.isArray(fields.linkedControls) ? fields.linkedControls : []) + if (fields.linkedPolicies !== undefined) patch.linked_policies = JSON.stringify(Array.isArray(fields.linkedPolicies) ? fields.linkedPolicies : []) + if (fields.pinOrder !== undefined) patch.pin_order = fields.pinOrder + if (fields.minRole !== undefined) patch.min_role = fields.minRole + const currentVersion = existing.version || 1 + patch.version = currentVersion + 1 + await db('guidance').where('id', id).update(patch) + const doc = await getById(id) + return publicDoc(doc) +} + +async function del(id, deletedBy) { + const db = getDb() + const existing = await db('guidance').where('id', id).first() + if (!existing) return false + await db('guidance').where('id', id).update({ + deleted_at: nowISO(), + deleted_by: deletedBy || null, + }) + return true +} + +async function permanentDelete(id) { + const db = getDb() + const existing = await db('guidance').where('id', id).first() + if (!existing) return false + await db('guidance').where('id', id).del() + return true +} + +async function restore(id) { + const db = getDb() + const existing = await db('guidance').where('id', id).first() + if (!existing) return null + await db('guidance').where('id', id).update({ + deleted_at: null, + deleted_by: null, + }) + const doc = await getById(id) + return publicDoc(doc) +} + +async function getDeleted() { + const db = getDb() + const rows = await db('guidance').whereNotNull('deleted_at') + return rows.map(rowToDoc).map(publicDoc) +} + +async function getFilePath(id) { + return null +} + +async function upsertSeed(seedId, docData) { + const db = getDb() + const lang = docData.seedLang || 'en' + const existing = await db('guidance').where('seed_id', seedId).whereNull('deleted_at').first() + if (!existing) { + const id = makeId() + const now = nowISO() + await db('guidance').insert({ + id, + title: docData.title || '', + category: docData.category || 'systemhandbuch', + type: docData.type || 'markdown', + content: docData.content || '', + version: 1, + min_role: docData.minRole || null, + linked_controls: JSON.stringify(docData.linkedControls || []), + linked_policies: JSON.stringify(docData.linkedPolicies || []), + pin_order: docData.pinOrder || null, + seed_id: seedId, + created_by: 'system', + created_at: now, + updated_at: now, + }) + return true + } + if (existing.seed_id) { + const existingDoc = rowToDoc(existing) + if (existingDoc.seedLang !== lang) { + await db('guidance').where('id', existing.id).update({ + title: docData.title || existing.title, + content: docData.content || existing.content, + updated_at: nowISO(), + }) + return true + } + } + let changed = false + if ((existing.pin_order == null) && docData.pinOrder != null) { + await db('guidance').where('id', existing.id).update({ pin_order: docData.pinOrder }) + changed = true + } + if (existing.category && docData.category && existing.category !== docData.category) { + await db('guidance').where('id', existing.id).update({ category: docData.category }) + changed = true + } + return changed +} + +module.exports = { + init, + getAll, + getByCategory, + search, + getById, + create, + update, + delete: del, + permanentDelete, + restore, + getDeleted, + getFilePath, + VALID_CATEGORIES, + upsertSeed, +} diff --git a/server/db/stores/legalStore.js b/server/db/stores/legalStore.js new file mode 100644 index 0000000..f463542 --- /dev/null +++ b/server/db/stores/legalStore.js @@ -0,0 +1,342 @@ +'use strict' + +const path = require('path') +const fs = require('fs') + +const FILES_DIR = path.join(process.env.DATA_DIR || path.join(__dirname, '../../../data'), 'legal', 'files') +if (!fs.existsSync(FILES_DIR)) fs.mkdirSync(FILES_DIR, { recursive: true }) + +const { getDb, init: initDb } = require('../knexDatabase') + +function nowISO() { return new Date().toISOString() } +function makeId(prefix) { return `${prefix}_${Date.now()}_${Math.random().toString(36).slice(2,6)}` } +function _json(val, fallback) { if (!val) return fallback; try { return JSON.parse(val) } catch { return fallback } } + +const CONTRACT_TYPES = ['service','supply','nda','framework','other'] +const CONTRACT_STATUSES = ['draft','review','active','expired','terminated'] +const NDA_TYPES = ['bilateral','unilateral_recv','unilateral_give'] +const NDA_STATUSES = ['draft','signed','expired','terminated'] +const POLICY_TYPES = ['privacy_notice','cookie','consent_form','employee','internal','other'] +const POLICY_STATUSES = ['draft','review','published','archived'] + +function rowToItem(row) { + if (!row) return null + return { id: row.id, ..._json(row.data, {}), createdBy: row.created_by, createdAt: row.created_at, updatedAt: row.updated_at, deletedAt: row.deleted_at || null } +} + +const contracts = { + getAll: async ({ status, type, entity } = {}) => { + const q = getDb()('legal_entries').where('legal_type', 'contract').whereNull('deleted_at') + let list = (await q).map(rowToItem) + if (status) list = list.filter(c => c.status === status) + if (type) list = list.filter(c => c.contractType === type) + if (entity) list = list.filter(c => !c.applicableEntities?.length || c.applicableEntities.includes(entity)) + return list + }, + getById: async (id) => { + const row = await getDb()('legal_entries').where('id', id).where('legal_type', 'contract').whereNull('deleted_at').first() + return rowToItem(row) + }, + create: async (fields, createdBy) => { + const id = makeId('contract') + const now = nowISO() + const item = { + id, title: fields.title || 'Ohne Titel', + contractType: CONTRACT_TYPES.includes(fields.contractType) ? fields.contractType : 'other', + counterparty: fields.counterparty || '', description: fields.description || '', + startDate: fields.startDate || null, endDate: fields.endDate || null, + autoRenew: !!fields.autoRenew, + noticePeriodDays: parseInt(fields.noticePeriodDays) || null, + owner: fields.owner || createdBy || '', + status: CONTRACT_STATUSES.includes(fields.status) ? fields.status : 'draft', + value: fields.value || '', currency: fields.currency || 'EUR', + linkedVvt: Array.isArray(fields.linkedVvt) ? fields.linkedVvt : [], + applicableEntities: Array.isArray(fields.applicableEntities) ? fields.applicableEntities : [], + notes: fields.notes || '', + linkedControls: Array.isArray(fields.linkedControls) ? fields.linkedControls : [], + linkedPolicies: Array.isArray(fields.linkedPolicies) ? fields.linkedPolicies : [], + attachments: [], + createdBy: createdBy || 'system', createdAt: now, updatedAt: now, + } + await getDb()('legal_entries').insert({ id, legal_type: 'contract', data: JSON.stringify(item), created_by: createdBy || 'system', created_at: now, updated_at: now }) + return item + }, + update: async (id, fields) => { + const row = await getDb()('legal_entries').where('id', id).where('legal_type', 'contract').first() + if (!row) return null + const item = rowToItem(row) + const updatable = ['title','contractType','counterparty','description','startDate','endDate', + 'autoRenew','noticePeriodDays','owner','status','value','currency','linkedVvt','applicableEntities','notes', + 'linkedControls','linkedPolicies'] + for (const k of updatable) { if (fields[k] !== undefined) item[k] = fields[k] } + item.updatedAt = nowISO() + await getDb()('legal_entries').where('id', id).update({ data: JSON.stringify(item), updated_at: item.updatedAt }) + return item + }, + delete: async (id, deletedBy) => { + const row = await getDb()('legal_entries').where('id', id).where('legal_type', 'contract').first() + if (!row) return false + const item = rowToItem(row) + item.deletedAt = nowISO() + item.deletedBy = deletedBy || null + await getDb()('legal_entries').where('id', id).update({ deleted_at: nowISO(), data: JSON.stringify(item) }) + return true + }, + permanentDelete: async (id) => { + const affected = await getDb()('legal_entries').where('id', id).where('legal_type', 'contract').del() + return affected > 0 + }, + restore: async (id) => { + const row = await getDb()('legal_entries').where('id', id).first() + if (!row) return null + const item = rowToItem(row) + delete item.deletedAt + delete item.deletedBy + await getDb()('legal_entries').where('id', id).update({ deleted_at: null, data: JSON.stringify(item) }) + return rowToItem({ ...row, deleted_at: null }) + }, + getDeleted: async () => { + const rows = await getDb()('legal_entries').where('legal_type', 'contract').whereNotNull('deleted_at') + return rows.map(rowToItem) + }, + addAttachment: async (id, meta) => { + const row = await getDb()('legal_entries').where('id', id).where('legal_type', 'contract').first() + if (!row) return null + const item = rowToItem(row) + if (!item.attachments) item.attachments = [] + item.attachments.push(meta) + item.updatedAt = nowISO() + await getDb()('legal_entries').where('id', id).update({ data: JSON.stringify(item), updated_at: item.updatedAt }) + return item + }, + removeAttachment: async (id, attId) => { + const row = await getDb()('legal_entries').where('id', id).where('legal_type', 'contract').first() + if (!row) return null + const item = rowToItem(row) + const attIdx = (item.attachments || []).findIndex(a => a.id === attId) + if (attIdx === -1) return null + const [att] = item.attachments.splice(attIdx, 1) + item.updatedAt = nowISO() + await getDb()('legal_entries').where('id', id).update({ data: JSON.stringify(item), updated_at: item.updatedAt }) + return att + }, + getExpiring: async (daysAhead = 60) => { + const rows = await getDb()('legal_entries').where('legal_type', 'contract').whereNull('deleted_at') + const list = rows.map(rowToItem).filter(c => ['active','review'].includes(c.status) && c.endDate) + const now = new Date() + const cut = new Date(now.getTime() + daysAhead * 86400000) + return list.filter(c => { + const end = new Date(c.endDate) + const notice = c.noticePeriodDays || 0 + const noticeDate = new Date(end.getTime() - notice * 86400000) + return noticeDate <= cut + }).map(c => { + const end = new Date(c.endDate) + const notice = c.noticePeriodDays || 0 + return { ...c, noticeDate: new Date(end.getTime() - notice * 86400000).toISOString().slice(0, 10) } + }) + }, +} + +const ndas = { + getAll: async ({ status, entity } = {}) => { + const q = getDb()('legal_entries').where('legal_type', 'nda').whereNull('deleted_at') + let list = (await q).map(rowToItem) + if (status) list = list.filter(n => n.status === status) + if (entity) list = list.filter(n => !n.applicableEntities?.length || n.applicableEntities.includes(entity)) + return list + }, + getById: async (id) => { + const row = await getDb()('legal_entries').where('id', id).where('legal_type', 'nda').whereNull('deleted_at').first() + return rowToItem(row) + }, + create: async (fields, createdBy) => { + const id = makeId('nda') + const now = nowISO() + const item = { + id, title: fields.title || 'NDA', + ndaType: NDA_TYPES.includes(fields.ndaType) ? fields.ndaType : 'bilateral', + counterparty: fields.counterparty || '', signingDate: fields.signingDate || null, + expiryDate: fields.expiryDate || null, scope: fields.scope || '', + owner: fields.owner || createdBy || '', + status: NDA_STATUSES.includes(fields.status) ? fields.status : 'draft', + applicableEntities: Array.isArray(fields.applicableEntities) ? fields.applicableEntities : [], + notes: fields.notes || '', + linkedControls: Array.isArray(fields.linkedControls) ? fields.linkedControls : [], + linkedPolicies: Array.isArray(fields.linkedPolicies) ? fields.linkedPolicies : [], + attachments: [], + createdBy: createdBy || 'system', createdAt: now, updatedAt: now, + } + await getDb()('legal_entries').insert({ id, legal_type: 'nda', data: JSON.stringify(item), created_by: createdBy || 'system', created_at: now, updated_at: now }) + return item + }, + update: async (id, fields) => { + const row = await getDb()('legal_entries').where('id', id).where('legal_type', 'nda').first() + if (!row) return null + const item = rowToItem(row) + const updatable = ['title','ndaType','counterparty','signingDate','expiryDate','scope','owner','status','applicableEntities','notes','linkedControls','linkedPolicies'] + for (const k of updatable) { if (fields[k] !== undefined) item[k] = fields[k] } + item.updatedAt = nowISO() + await getDb()('legal_entries').where('id', id).update({ data: JSON.stringify(item), updated_at: item.updatedAt }) + return item + }, + delete: async (id, deletedBy) => { + const row = await getDb()('legal_entries').where('id', id).where('legal_type', 'nda').first() + if (!row) return false + const item = rowToItem(row) + item.deletedAt = nowISO() + item.deletedBy = deletedBy || null + await getDb()('legal_entries').where('id', id).update({ deleted_at: nowISO(), data: JSON.stringify(item) }) + return true + }, + permanentDelete: async (id) => { + const affected = await getDb()('legal_entries').where('id', id).where('legal_type', 'nda').del() + return affected > 0 + }, + restore: async (id) => { + const row = await getDb()('legal_entries').where('id', id).first() + if (!row) return null + const item = rowToItem(row) + delete item.deletedAt + delete item.deletedBy + await getDb()('legal_entries').where('id', id).update({ deleted_at: null, data: JSON.stringify(item) }) + return rowToItem({ ...row, deleted_at: null }) + }, + getDeleted: async () => { + const rows = await getDb()('legal_entries').where('legal_type', 'nda').whereNotNull('deleted_at') + return rows.map(rowToItem) + }, + addAttachment: async (id, meta) => { + const row = await getDb()('legal_entries').where('id', id).where('legal_type', 'nda').first() + if (!row) return null + const item = rowToItem(row) + if (!item.attachments) item.attachments = [] + item.attachments.push(meta) + item.updatedAt = nowISO() + await getDb()('legal_entries').where('id', id).update({ data: JSON.stringify(item), updated_at: item.updatedAt }) + return item + }, + removeAttachment: async (id, attId) => { + const row = await getDb()('legal_entries').where('id', id).where('legal_type', 'nda').first() + if (!row) return null + const item = rowToItem(row) + const attIdx = (item.attachments || []).findIndex(a => a.id === attId) + if (attIdx === -1) return null + const [att] = item.attachments.splice(attIdx, 1) + item.updatedAt = nowISO() + await getDb()('legal_entries').where('id', id).update({ data: JSON.stringify(item), updated_at: item.updatedAt }) + return att + }, +} + +const privacyPolicies = { + getAll: async ({ status, entity } = {}) => { + const q = getDb()('legal_entries').where('legal_type', 'privacy_policy').whereNull('deleted_at') + let list = (await q).map(rowToItem) + if (status) list = list.filter(p => p.status === status) + if (entity) list = list.filter(p => !p.applicableEntities?.length || p.applicableEntities.includes(entity)) + return list + }, + getById: async (id) => { + const row = await getDb()('legal_entries').where('id', id).where('legal_type', 'privacy_policy').whereNull('deleted_at').first() + return rowToItem(row) + }, + create: async (fields, createdBy) => { + const id = makeId('policy') + const now = nowISO() + const item = { + id, title: fields.title || 'Ohne Titel', + policyType: POLICY_TYPES.includes(fields.policyType) ? fields.policyType : 'other', + description: fields.description || '', content: fields.content || '', + publishedAt: fields.publishedAt || null, nextReviewDate: fields.nextReviewDate || null, + url: fields.url || '', owner: fields.owner || createdBy || '', + status: POLICY_STATUSES.includes(fields.status) ? fields.status : 'draft', + version: 1, + applicableEntities: Array.isArray(fields.applicableEntities) ? fields.applicableEntities : [], + notes: fields.notes || '', + linkedControls: Array.isArray(fields.linkedControls) ? fields.linkedControls : [], + linkedPolicies: Array.isArray(fields.linkedPolicies) ? fields.linkedPolicies : [], + attachments: [], + createdBy: createdBy || 'system', createdAt: now, updatedAt: now, + } + await getDb()('legal_entries').insert({ id, legal_type: 'privacy_policy', data: JSON.stringify(item), created_by: createdBy || 'system', created_at: now, updated_at: now }) + return item + }, + update: async (id, fields) => { + const row = await getDb()('legal_entries').where('id', id).where('legal_type', 'privacy_policy').first() + if (!row) return null + const item = rowToItem(row) + const updatable = ['title','policyType','description','content','publishedAt','nextReviewDate', + 'url','owner','status','applicableEntities','notes','linkedControls','linkedPolicies'] + for (const k of updatable) { if (fields[k] !== undefined) item[k] = fields[k] } + if (fields.status && fields.status !== item.status) item.version = (item.version || 0) + 1 + item.updatedAt = nowISO() + await getDb()('legal_entries').where('id', id).update({ data: JSON.stringify(item), updated_at: item.updatedAt }) + return item + }, + delete: async (id, deletedBy) => { + const row = await getDb()('legal_entries').where('id', id).where('legal_type', 'privacy_policy').first() + if (!row) return false + const item = rowToItem(row) + item.deletedAt = nowISO() + item.deletedBy = deletedBy || null + await getDb()('legal_entries').where('id', id).update({ deleted_at: nowISO(), data: JSON.stringify(item) }) + return true + }, + permanentDelete: async (id) => { + const affected = await getDb()('legal_entries').where('id', id).where('legal_type', 'privacy_policy').del() + return affected > 0 + }, + restore: async (id) => { + const row = await getDb()('legal_entries').where('id', id).first() + if (!row) return null + const item = rowToItem(row) + delete item.deletedAt + delete item.deletedBy + await getDb()('legal_entries').where('id', id).update({ deleted_at: null, data: JSON.stringify(item) }) + return rowToItem({ ...row, deleted_at: null }) + }, + getDeleted: async () => { + const rows = await getDb()('legal_entries').where('legal_type', 'privacy_policy').whereNotNull('deleted_at') + return rows.map(rowToItem) + }, + addAttachment: async (id, meta) => { + const row = await getDb()('legal_entries').where('id', id).where('legal_type', 'privacy_policy').first() + if (!row) return null + const item = rowToItem(row) + if (!item.attachments) item.attachments = [] + item.attachments.push(meta) + item.updatedAt = nowISO() + await getDb()('legal_entries').where('id', id).update({ data: JSON.stringify(item), updated_at: item.updatedAt }) + return item + }, + removeAttachment: async (id, attId) => { + const row = await getDb()('legal_entries').where('id', id).where('legal_type', 'privacy_policy').first() + if (!row) return null + const item = rowToItem(row) + const attIdx = (item.attachments || []).findIndex(a => a.id === attId) + if (attIdx === -1) return null + const [att] = item.attachments.splice(attIdx, 1) + item.updatedAt = nowISO() + await getDb()('legal_entries').where('id', id).update({ data: JSON.stringify(item), updated_at: item.updatedAt }) + return att + }, +} + +async function getSummary() { + const db = getDb() + const cList = (await db('legal_entries').where('legal_type', 'contract').whereNull('deleted_at')).map(rowToItem) + const nList = (await db('legal_entries').where('legal_type', 'nda').whereNull('deleted_at')).map(rowToItem) + const pList = (await db('legal_entries').where('legal_type', 'privacy_policy').whereNull('deleted_at')).map(rowToItem) + return { + contracts: { total: cList.length, active: cList.filter(c => c.status === 'active').length, expiring: (await contracts.getExpiring(60)).length }, + ndas: { total: nList.length, signed: nList.filter(n => n.status === 'signed').length }, + policies: { total: pList.length, published: pList.filter(p => p.status === 'published').length, draft: pList.filter(p => p.status === 'draft').length }, + } +} + +module.exports = { + contracts, ndas, privacyPolicies, getSummary, + FILES_DIR, + CONTRACT_TYPES, CONTRACT_STATUSES, NDA_TYPES, NDA_STATUSES, POLICY_TYPES, POLICY_STATUSES, +} diff --git a/server/db/stores/orgSettingsStore.js b/server/db/stores/orgSettingsStore.js new file mode 100644 index 0000000..6b66b62 --- /dev/null +++ b/server/db/stores/orgSettingsStore.js @@ -0,0 +1,156 @@ +'use strict' + +const { getDb, init: initDb } = require('../knexDatabase') + +const DEFAULTS = { + orgName: '', + orgShort: '', + ismsScope: '', + logoText: '', + require2FA: false, + aiEnabled: true, + aiOllamaUrl: '', + aiEmbedModel: '', + cisoName: '', + cisoEmail: '', + gdpoName: '', + gdpoEmail: '', + icsContact: '', + cisoSettings: { + escalationEmail: '', + incidentResponseSLA: 24, + reportableThreshold: 'high', + reportableTypes: ['ransomware', 'data_theft', 'unauthorized_access'], + }, + gdpoSettings: { + dsarDeadlineDays: 30, + dsarExtendedDays: 90, + timer72hEnabled: true, + supervisoryAuthority: '', + supervisoryContact: '', + dsarDefaultResponse: '', + }, + modules: { + soa: true, guidance: true, goals: true, risk: true, legal: true, + incident: true, gdpr: true, training: true, reports: true, calendar: true, + assets: true, governance: true, bcm: true, suppliers: true, + }, + soaFrameworks: { + ISO27001: true, BSI: true, NIS2: true, EUCS: true, EUAI: true, + ISO9000: true, ISO9001: true, CRA: true, + }, + icsSettings: { + otResponsible: '', otResponsibleEmail: '', otScope: '', + otStandard: 'iec62443', otNis2Sector: '', otKritisRelevant: false, + otNetworkSegmentation: 'planned', otPatchCycleWeeks: 12, + otMaintenanceWindow: '', otEmergencyContact: '', + }, + revisionSettings: { + revResponsible: '', revResponsibleEmail: '', revScope: '', + revCycleMonths: 12, revLastAuditDate: '', revNextAuditDate: '', + revReportsTo: 'gf', revExternalSupport: '', + }, + smtpSettings: { + host: '', port: 587, secure: false, user: '', pass: '', from: '', + }, + splashScreen: { + enabled: true, duration: 7, + }, + languageConfig: { + available: ['de', 'en', 'fr', 'nl'], default: 'en', + }, + navOrder: ['dashboard','soa','guidance','goals','risk','legal','incident','gdpr','training','assets','governance','bcm','suppliers','reports','calendar','settings','admin'], + emailNotifications: { + enabled: false, adminEmail: '', risks: true, dsar: true, + gdprIncidents: true, deletionLog: true, bcm: true, contracts: true, + templateReview: true, supplierAudits: true, + }, + policyAckMode: 'manual', + qmSettings: { + qmResponsible: '', qmResponsibleEmail: '', qmScope: '', + qmStandard: 'iso9001', qmCertBody: '', qmCertValidUntil: '', + qmLastAuditDate: '', qmNextAuditDate: '', qmRecertDate: '', + }, +} + +const DEEP_KEYS = [ + 'modules', 'soaFrameworks', 'cisoSettings', 'gdpoSettings', 'icsSettings', + 'revisionSettings', 'qmSettings', 'emailNotifications', 'smtpSettings', +] + +function _deepMerge(current, patch) { + const result = { ...current, ...patch } + for (const key of DEEP_KEYS) { + result[key] = { ...(current[key] || {}), ...(patch[key] || {}) } + } + if (Array.isArray(patch.navOrder)) result.navOrder = patch.navOrder + if (patch.languageConfig) { + result.languageConfig = { + ...(current.languageConfig || {}), + ...patch.languageConfig, + available: Array.isArray(patch.languageConfig.available) + ? patch.languageConfig.available + : (current.languageConfig?.available || DEFAULTS.languageConfig.available), + } + } + return result +} + +function _json(val, fallback) { + if (!val) return fallback + try { return JSON.parse(val) } catch { return fallback } +} + +function _defaultsDeep() { + const d = { ...DEFAULTS } + for (const key of DEEP_KEYS) d[key] = { ...DEFAULTS[key] } + d.navOrder = DEFAULTS.navOrder.slice() + d.languageConfig = { ...DEFAULTS.languageConfig, available: [...DEFAULTS.languageConfig.available] } + return d +} + +module.exports = { + init: async () => { await initDb() }, + + get: async () => { + const rows = await getDb()('org_settings') + const stored = {} + for (const row of rows) { + stored[row.key_name] = _json(row.value, null) + } + return _deepMerge(_defaultsDeep(), stored) + }, + + update: async (patch) => { + const db = getDb() + const flatPatch = { ...patch } + const nestedToStore = {} + for (const key of DEEP_KEYS) { + if (flatPatch[key] !== undefined) { + nestedToStore[key] = flatPatch[key] + delete flatPatch[key] + } + } + if (flatPatch.languageConfig) { + nestedToStore.languageConfig = flatPatch.languageConfig + delete flatPatch.languageConfig + } + if (flatPatch.navOrder) { + nestedToStore.navOrder = flatPatch.navOrder + delete flatPatch.navOrder + } + for (const [key, value] of Object.entries(flatPatch)) { + await db('org_settings') + .insert({ key_name: key, value: JSON.stringify(value) }) + .onConflict('key_name').merge() + } + for (const [key, value] of Object.entries(nestedToStore)) { + await db('org_settings') + .insert({ key_name: key, value: JSON.stringify(value) }) + .onConflict('key_name').merge() + } + return module.exports.get() + }, + + DEFAULTS, +} diff --git a/server/db/stores/orgUnitStore.js b/server/db/stores/orgUnitStore.js new file mode 100644 index 0000000..ec20f13 --- /dev/null +++ b/server/db/stores/orgUnitStore.js @@ -0,0 +1,149 @@ +'use strict' + +const { getDb, init: initDb } = require('../knexDatabase') + +const SEED = [ + { + id: 'ou-cio', + name: 'CIO', + type: 'cio', + parentId: null, + head: '', + email: '', + description: 'Chief Information Officer — strategic IT governance and oversight', + }, + { + id: 'ou-groupit', + name: 'GroupIT', + type: 'group', + parentId: 'ou-cio', + head: '', + email: '', + description: 'IT Stabsstelle — central infrastructure operations.', + }, + { + id: 'ou-groupapp', + name: 'GroupApp', + type: 'group', + parentId: 'ou-cio', + head: '', + email: '', + description: 'Applications Programming & Deployment.', + }, + { + id: 'ou-localit', + name: 'Local IT', + type: 'local', + parentId: 'ou-groupit', + head: '', + email: '', + description: 'Local IT teams at subsidiary level.', + }, +] + +function nowISO() { return new Date().toISOString() } + +function _json(val, fallback) { + if (!val) return fallback + try { return JSON.parse(val) } catch { return fallback } +} + +function rowToUnit(row) { + if (!row) return null + const d = _json(row.data, {}) + return { + id: row.id, + name: d.name || '', + type: d.type || 'group', + parentId: d.parentId || null, + head: d.head || '', + email: d.email || '', + description: d.description || '', + createdAt: row.created_at, + updatedAt: row.updated_at, + } +} + +module.exports = { + init: async () => { + await initDb() + const db = getDb() + for (const s of SEED) { + const exists = await db('org_units').where('id', s.id).first() + if (!exists) { + const now = nowISO() + await db('org_units').insert({ + id: s.id, + data: JSON.stringify({ + name: s.name, + type: s.type, + parentId: s.parentId, + head: s.head, + email: s.email, + description: s.description, + }), + created_at: now, + updated_at: now, + }) + } + } + }, + + getAll: async () => { + const rows = await getDb()('org_units') + return rows.map(rowToUnit) + }, + + getById: async (id) => { + const row = await getDb()('org_units').where('id', id).first() + return rowToUnit(row) + }, + + create: async (body) => { + const id = 'ou-' + Date.now() + const now = nowISO() + const name = (body.name || '').trim() + if (!name) throw new Error('name required') + const data = { + name, + type: body.type || 'group', + parentId: body.parentId || null, + head: (body.head || '').trim(), + email: (body.email || '').trim(), + description: (body.description || '').trim(), + } + await getDb()('org_units').insert({ + id, + data: JSON.stringify(data), + created_at: now, + updated_at: now, + }) + return { id, ...data, createdAt: now, updatedAt: now } + }, + + update: async (id, body) => { + const row = await getDb()('org_units').where('id', id).first() + if (!row) return null + const current = rowToUnit(row) + const now = nowISO() + const data = { + name: body.name !== undefined ? (body.name || '').trim() : current.name, + type: body.type !== undefined ? body.type : current.type, + parentId: body.parentId !== undefined ? (body.parentId || null) : current.parentId, + head: body.head !== undefined ? (body.head || '').trim() : current.head, + email: body.email !== undefined ? (body.email || '').trim() : current.email, + description: body.description !== undefined ? (body.description || '').trim() : current.description, + } + if (!data.name) throw new Error('name required') + await getDb()('org_units').where('id', id).update({ + data: JSON.stringify(data), + updated_at: now, + }) + return { id, ...data, createdAt: current.createdAt, updatedAt: now } + }, + + remove: async (id) => { + const affected = await getDb()('org_units').where('id', id).del() + return affected > 0 + }, +} diff --git a/server/db/stores/publicIncidentStore.js b/server/db/stores/publicIncidentStore.js new file mode 100644 index 0000000..9dd6d10 --- /dev/null +++ b/server/db/stores/publicIncidentStore.js @@ -0,0 +1,109 @@ +'use strict' + +const { getDb, init: initDb } = require('../knexDatabase') + +const INCIDENT_TYPES = ['malware','phishing','data_theft','ransomware','unauthorized_access','social_engineering','other'] +const CLEANED_UP_VALUES = ['yes','no','partial'] + +function nowISO() { return new Date().toISOString() } +function _json(val, fallback) { if (!val) return fallback; try { return JSON.parse(val) } catch { return fallback } } + +function rowToIncident(row) { + if (!row) return null + return { id: row.id, refNumber: row.ref, ..._json(row.data, {}), createdAt: row.submitted_at, deletedAt: row.deleted_at || null } +} + +async function nextRefNumber() { + const year = new Date().getFullYear() + const prefix = `INC-${year}-` + const rows = await getDb()('public_incidents').where('ref', 'like', `${prefix}%`) + const maxSeq = rows.map(r => parseInt(r.ref.replace(prefix, ''), 10) || 0).reduce((a, b) => Math.max(a, b), 0) + return `${prefix}${String(maxSeq + 1).padStart(4, '0')}` +} + +module.exports = { + init: async () => { await initDb() }, + INCIDENT_TYPES, + + getAll: async ({ status } = {}) => { + const q = getDb()('public_incidents').whereNull('deleted_at') + if (status) q.whereRaw("data LIKE ?", [`%"status":"${status}"%`]) + const rows = await q.orderBy('submitted_at', 'desc') + let list = rows.map(rowToIncident) + if (status) list = list.filter(i => i.status === status) + return list + }, + + getById: async (id) => { + const row = await getDb()('public_incidents').where('id', id).whereNull('deleted_at').first() + return rowToIncident(row) + }, + + create: async (data) => { + const { randomUUID } = require('crypto') + const id = randomUUID() + const ref = await nextRefNumber() + const now = nowISO() + const incident = { + refNumber: ref, status: 'new', + email: (data.email || '').trim(), entityName: (data.entityName || '').trim(), + incidentType: INCIDENT_TYPES.includes(data.incidentType) ? data.incidentType : 'other', + description: (data.description || '').trim(), measuresTaken: (data.measuresTaken || '').trim(), + localContact: (data.localContact || '').trim(), + cleanedUp: CLEANED_UP_VALUES.includes(data.cleanedUp) ? data.cleanedUp : 'no', + assignedTo: null, reportable: null, cisoNotes: '', + updatedAt: null, updatedBy: null, + } + await getDb()('public_incidents').insert({ + id, ref, data: JSON.stringify(incident), submitted_at: now, + }) + return { id, ...incident, createdAt: now } + }, + + update: async (id, patch, updatedBy) => { + const row = await getDb()('public_incidents').where('id', id).first() + if (!row) return null + const inc = rowToIncident(row) + const allowed = ['status','assignedTo','reportable','cisoNotes'] + for (const k of allowed) { if (k in patch) inc[k] = patch[k] } + inc.updatedAt = nowISO() + inc.updatedBy = updatedBy || null + await getDb()('public_incidents').where('id', id).update({ data: JSON.stringify(inc) }) + return inc + }, + + delete: async (id, deletedBy) => { + const row = await getDb()('public_incidents').where('id', id).first() + if (!row) return false + const inc = rowToIncident(row) + inc.deletedAt = nowISO() + inc.deletedBy = deletedBy || null + await getDb()('public_incidents').where('id', id).update({ deleted_at: nowISO(), data: JSON.stringify(inc) }) + return true + }, + + permanentDelete: async (id) => { + const affected = await getDb()('public_incidents').where('id', id).del() + return affected > 0 + }, + + restore: async (id) => { + const row = await getDb()('public_incidents').where('id', id).first() + if (!row) return null + const inc = rowToIncident(row) + delete inc.deletedAt + delete inc.deletedBy + await getDb()('public_incidents').where('id', id).update({ deleted_at: null, data: JSON.stringify(inc) }) + return rowToIncident({ ...row, deleted_at: null }) + }, + + getDeleted: async () => { + const rows = await getDb()('public_incidents').whereNotNull('deleted_at') + return rows.map(rowToIncident) + }, + + remove: async (id) => { + const affected = await getDb()('public_incidents').where('id', id).del() + return affected > 0 + }, +} diff --git a/server/db/stores/riskStore.js b/server/db/stores/riskStore.js new file mode 100644 index 0000000..18e89d9 --- /dev/null +++ b/server/db/stores/riskStore.js @@ -0,0 +1,297 @@ +'use strict' + +const { getDb, init: initDb } = require('../knexDatabase') + +function nowISO() { return new Date().toISOString() } +function makeId(prefix) { return `${prefix}_${Date.now()}_${Math.random().toString(36).slice(2,6)}` } +function _json(val, fallback) { if (!val) return fallback; try { return JSON.parse(val) } catch { return fallback } } + +const CATEGORIES = ['technical','organizational','physical','legal'] +const TREATMENT_OPTS = ['accept','reduce','avoid','transfer'] +const STATUSES = ['open','in_treatment','accepted','closed'] + +function riskLevel(score) { + if (score <= 4) return 'low' + if (score <= 9) return 'medium' + if (score <= 14) return 'high' + return 'critical' +} + +function calcScore(r) { + const p = Math.min(5, Math.max(1, parseInt(r.probability) || 1)) + const i = Math.min(5, Math.max(1, parseInt(r.impact) || 1)) + return p * i +} + +function rowToRisk(row) { + if (!row) return null + const extra = _json(row.treatments, {}) + const score = calcScore({ probability: row.likelihood, impact: row.impact }) + return { + id: row.id, + title: row.title, + description: row.description, + category: row.category, + threat: extra.threat || '', + vulnerability: extra.vulnerability || '', + probability: row.likelihood, + impact: row.impact, + treatmentOption: extra.treatmentOption || 'reduce', + mitigationNotes: extra.mitigationNotes || '', + owner: row.owner, + dueDate: extra.dueDate || null, + reviewDate: extra.reviewDate || null, + status: row.status, + linkedControls: extra.linkedControls || [], + linkedTemplates: extra.linkedTemplates || [], + applicableEntities: _json(row.applicable_entities, []), + treatmentPlans: extra.treatmentPlans || [], + needsReview: extra.needsReview || false, + source: extra.source || null, + scanRef: extra.scanRef || null, + cvssScore: extra.cvssScore != null ? extra.cvssScore : null, + cveIds: extra.cveIds || [], + deletedBy: row.deleted_by || null, + approvedBy: extra.approvedBy || null, + approvedAt: extra.approvedAt || null, + createdBy: row.created_by, + createdAt: row.created_at, + updatedAt: row.updated_at, + deletedAt: row.deleted_at || null, + score, + riskLevel: riskLevel(score), + } +} + +function packExtra(r) { + return JSON.stringify({ + threat: r.threat, vulnerability: r.vulnerability, + treatmentOption: r.treatmentOption, mitigationNotes: r.mitigationNotes, + dueDate: r.dueDate, reviewDate: r.reviewDate, + linkedControls: r.linkedControls || [], linkedTemplates: r.linkedTemplates || [], + treatmentPlans: r.treatmentPlans || [], + needsReview: r.needsReview || false, source: r.source, scanRef: r.scanRef, + cvssScore: r.cvssScore, cveIds: r.cveIds || [], + approvedBy: r.approvedBy, approvedAt: r.approvedAt, + }) +} + +module.exports = { + init: async () => { await initDb() }, + + getAll: async ({ category, status, entity } = {}) => { + const q = getDb()('risks').whereNull('deleted_at') + if (category) q.where('category', category) + if (status) q.where('status', status) + const rows = await q.orderBy('created_at', 'desc') + let list = rows.map(rowToRisk) + if (entity) list = list.filter(r => !r.applicableEntities?.length || r.applicableEntities.includes(entity)) + return list + }, + + getById: async (id) => { + const row = await getDb()('risks').where('id', id).whereNull('deleted_at').first() + return rowToRisk(row) + }, + + create: async (fields, createdBy) => { + const probability = Math.min(5, Math.max(1, parseInt(fields.probability) || 1)) + const impact = Math.min(5, Math.max(1, parseInt(fields.impact) || 1)) + const r = { + id: makeId('risk'), + title: fields.title || 'Ohne Titel', + description: fields.description || '', + category: CATEGORIES.includes(fields.category) ? fields.category : 'technical', + threat: fields.threat || '', + vulnerability: fields.vulnerability || '', + probability, + impact, + treatmentOption: TREATMENT_OPTS.includes(fields.treatmentOption) ? fields.treatmentOption : 'reduce', + mitigationNotes: fields.mitigationNotes || '', + owner: fields.owner || '', + dueDate: fields.dueDate || null, + reviewDate: fields.reviewDate || null, + status: STATUSES.includes(fields.status) ? fields.status : 'open', + linkedControls: Array.isArray(fields.linkedControls) ? fields.linkedControls : [], + linkedTemplates: Array.isArray(fields.linkedTemplates) ? fields.linkedTemplates : [], + applicableEntities: Array.isArray(fields.applicableEntities) ? fields.applicableEntities : [], + treatmentPlans: [], + needsReview: fields.needsReview === true, + source: fields.source || null, + scanRef: fields.scanRef || null, + cvssScore: fields.cvssScore != null ? Number(fields.cvssScore) : null, + cveIds: Array.isArray(fields.cveIds) ? fields.cveIds : [], + createdBy: createdBy || 'system', + } + const score = calcScore(r) + const now = nowISO() + await getDb()('risks').insert({ + id: r.id, title: r.title, description: r.description, + category: r.category, likelihood: r.probability, impact: r.impact, + risk_score: score, status: r.status, owner: r.owner, + applicable_entities: JSON.stringify(r.applicableEntities), + treatments: packExtra(r), + created_by: r.createdBy, created_at: now, updated_at: now, + }) + return { ...r, score, riskLevel: riskLevel(score), createdAt: now, updatedAt: now } + }, + + update: async (id, fields) => { + const row = await getDb()('risks').where('id', id).whereNull('deleted_at').first() + if (!row) return null + const r = rowToRisk(row) + const updatable = ['title','description','category','threat','vulnerability','probability', + 'impact','treatmentOption','mitigationNotes','owner','dueDate','reviewDate','status', + 'linkedControls','linkedTemplates','applicableEntities', + 'needsReview','source','scanRef','cvssScore','cveIds'] + for (const k of updatable) { + if (fields[k] !== undefined) r[k] = fields[k] + } + if (fields.probability) r.probability = Math.min(5, Math.max(1, parseInt(fields.probability) || 1)) + if (fields.impact) r.impact = Math.min(5, Math.max(1, parseInt(fields.impact) || 1)) + const now = nowISO() + r.updatedAt = now + const score = calcScore(r) + await getDb()('risks').where('id', id).update({ + title: r.title, description: r.description, category: r.category, + likelihood: r.probability, impact: r.impact, risk_score: score, + status: r.status, owner: r.owner, + applicable_entities: JSON.stringify(r.applicableEntities || []), + treatments: packExtra(r), updated_at: now, + }) + return { ...r, score, riskLevel: riskLevel(score) } + }, + + delete: async (id, deletedBy) => { + const affected = await getDb()('risks').where('id', id).whereNull('deleted_at') + .update({ deleted_at: nowISO(), deleted_by: deletedBy || null }) + return affected > 0 + }, + + permanentDelete: async (id) => { + const affected = await getDb()('risks').where('id', id).del() + return affected > 0 + }, + + restore: async (id) => { + const row = await getDb()('risks').where('id', id).first() + if (!row) return null + await getDb()('risks').where('id', id).update({ deleted_at: null, deleted_by: null, updated_at: nowISO() }) + return rowToRisk({ ...row, deleted_at: null, deleted_by: null }) + }, + + getDeleted: async () => { + const rows = await getDb()('risks').whereNotNull('deleted_at').orderBy('deleted_at', 'desc') + return rows.map(rowToRisk) + }, + + getReviewPending: async () => { + const rows = await getDb()('risks').whereNull('deleted_at') + return rows.map(rowToRisk).filter(r => r.needsReview) + }, + + approve: async (id, approvedBy) => { + const row = await getDb()('risks').where('id', id).whereNull('deleted_at').first() + if (!row) return null + const r = rowToRisk(row) + r.needsReview = false + r.approvedBy = approvedBy || 'system' + r.approvedAt = nowISO() + r.updatedAt = nowISO() + await getDb()('risks').where('id', id).update({ + treatments: packExtra(r), updated_at: r.updatedAt, + }) + return r + }, + + addTreatment: async (riskId, fields, createdBy) => { + const row = await getDb()('risks').where('id', riskId).whereNull('deleted_at').first() + if (!row) return null + const r = rowToRisk(row) + const tp = { + id: makeId('tp'), + title: fields.title || 'Maßnahme', + description: fields.description || '', + responsible: fields.responsible || '', + dueDate: fields.dueDate || null, + status: ['open','in_progress','completed'].includes(fields.status) ? fields.status : 'open', + createdAt: nowISO(), + updatedAt: nowISO(), + createdBy: createdBy || 'system', + } + r.treatmentPlans = r.treatmentPlans || [] + r.treatmentPlans.push(tp) + r.updatedAt = nowISO() + await getDb()('risks').where('id', riskId).update({ + treatments: packExtra(r), updated_at: r.updatedAt, + }) + return tp + }, + + updateTreatment: async (riskId, tpId, fields) => { + const row = await getDb()('risks').where('id', riskId).whereNull('deleted_at').first() + if (!row) return null + const r = rowToRisk(row) + const tp = (r.treatmentPlans || []).find(t => t.id === tpId) + if (!tp) return null + if (fields.title !== undefined) tp.title = fields.title + if (fields.description !== undefined) tp.description = fields.description + if (fields.responsible !== undefined) tp.responsible = fields.responsible + if (fields.dueDate !== undefined) tp.dueDate = fields.dueDate + if (fields.status !== undefined) tp.status = fields.status + tp.updatedAt = nowISO() + r.updatedAt = nowISO() + await getDb()('risks').where('id', riskId).update({ + treatments: packExtra(r), updated_at: r.updatedAt, + }) + return tp + }, + + deleteTreatment: async (riskId, tpId) => { + const row = await getDb()('risks').where('id', riskId).whereNull('deleted_at').first() + if (!row) return false + const r = rowToRisk(row) + const before = (r.treatmentPlans || []).length + r.treatmentPlans = (r.treatmentPlans || []).filter(t => t.id !== tpId) + if (r.treatmentPlans.length === before) return false + r.updatedAt = nowISO() + await getDb()('risks').where('id', riskId).update({ + treatments: packExtra(r), updated_at: r.updatedAt, + }) + return true + }, + + getCalendarEvents: async () => { + const rows = await getDb()('risks').whereNull('deleted_at') + const events = [] + for (const row of rows) { + const r = rowToRisk(row) + if (r.dueDate) events.push({ date: r.dueDate, type: 'risk_due', label: `Fälligkeit: ${r.title}`, riskId: r.id, riskTitle: r.title }) + if (r.reviewDate) events.push({ date: r.reviewDate, type: 'risk_review', label: `Review: ${r.title}`, riskId: r.id, riskTitle: r.title }) + for (const tp of r.treatmentPlans || []) { + if (tp.dueDate) events.push({ date: tp.dueDate, type: 'treatment_due', label: `Maßnahme: ${tp.title}`, riskId: r.id, riskTitle: r.title, tpId: tp.id }) + } + } + events.sort((a, b) => new Date(a.date) - new Date(b.date)) + return events + }, + + getSummary: async () => { + const rows = await getDb()('risks').whereNull('deleted_at') + const risks = rows.map(rowToRisk) + const byLevel = { low: 0, medium: 0, high: 0, critical: 0 } + const byCategory = { technical: 0, organizational: 0, physical: 0, legal: 0 } + const byStatus = { open: 0, in_treatment: 0, accepted: 0, closed: 0 } + let openTreatments = 0 + for (const r of risks) { + byLevel[r.riskLevel] = (byLevel[r.riskLevel] || 0) + 1 + byCategory[r.category] = (byCategory[r.category] || 0) + 1 + byStatus[r.status] = (byStatus[r.status] || 0) + 1 + openTreatments += (r.treatmentPlans || []).filter(t => t.status !== 'completed').length + } + const top5 = [...risks].sort((a, b) => b.score - a.score).slice(0, 5) + return { total: risks.length, byLevel, byCategory, byStatus, openTreatments, top5 } + }, + + CATEGORIES, TREATMENT_OPTS, STATUSES, +} diff --git a/server/db/stores/soaStore.js b/server/db/stores/soaStore.js new file mode 100644 index 0000000..730c34e --- /dev/null +++ b/server/db/stores/soaStore.js @@ -0,0 +1,213 @@ +// © 2026 Claude Hecker — ISMS Builder — AGPL-3.0 +'use strict' + +const { getDb } = require('../knexDatabase') + +const FRAMEWORKS = { + ISO27001: { id: 'ISO27001', label: 'ISO 27001:2022', color: '#4f8cff' }, + BSI: { id: 'BSI', label: 'BSI IT-Grundschutz', color: '#f0b429' }, + NIS2: { id: 'NIS2', label: 'EU NIS2', color: '#34d399' }, + EUCS: { id: 'EUCS', label: 'EU Cloud (EUCS)', color: '#a78bfa' }, + EUAI: { id: 'EUAI', label: 'EU AI Act', color: '#fb923c' }, + ISO9000: { id: 'ISO9000', label: 'ISO 9000:2015', color: '#2dd4bf' }, + ISO9001: { id: 'ISO9001', label: 'ISO 9001:2015', color: '#f472b6' }, + CRA: { id: 'CRA', label: 'EU Cyber Resilience Act', color: '#e11d48' }, + CUSTOM: { id: 'CUSTOM', label: 'Custom Controls', color: '#64748b' }, +} + +const IMPLEMENTATION_STATUSES = ['not_started', 'partial', 'implemented', 'optimized'] + +function _parse(val, fallback) { + if (!val) return fallback + try { return JSON.parse(val) } catch { return fallback } +} + +function rowToObj(row) { + if (!row) return null + return { + id: row.id, + framework: row.framework, + controlId: row.control_id || row.id, + theme: row.theme, + title: row.title, + description: row.description || '', + applicable: !!row.applicable, + status: row.status || 'not_started', + owner: row.owner || '', + justification: row.justification || '', + evidence: row.evidence || '', + applicableEntities: _parse(row.applicable_entities, []), + linkedTemplates: _parse(row.linked_templates, []), + updatedAt: row.updated_at, + updatedBy: row.updated_by || 'system', + isCustom: !!row.is_custom, + createdAt: row.created_at, + } +} + +async function init() { + await getDb() +} + +async function getFrameworks() { + return Object.values(FRAMEWORKS) +} + +async function getAll({ framework, theme } = {}) { + const db = getDb() + let q = db('soa_controls') + if (framework) q = q.where('framework', framework) + if (theme) q = q.where('theme', theme) + const rows = await q + return rows.map(rowToObj) +} + +async function getById(id) { + const db = getDb() + const row = await db('soa_controls').where('id', id).first() + return row ? rowToObj(row) : null +} + +async function update(id, fields, { changedBy } = {}) { + const db = getDb() + const existing = await db('soa_controls').where('id', id).first() + if (!existing) return null + const patch = { updated_at: new Date().toISOString(), updated_by: changedBy || 'unknown' } + const allowed = ['applicable', 'status', 'owner', 'justification', 'linkedTemplates', 'applicableEntities'] + if (fields.applicable !== undefined) patch.applicable = !!fields.applicable + if (fields.status !== undefined) patch.status = fields.status + if (fields.owner !== undefined) patch.owner = fields.owner + if (fields.justification !== undefined) patch.justification = fields.justification + if (fields.linkedTemplates !== undefined) patch.linked_templates = JSON.stringify(fields.linkedTemplates) + if (fields.applicableEntities !== undefined) patch.applicable_entities = JSON.stringify(fields.applicableEntities) + await db('soa_controls').where('id', id).update(patch) + return await getById(id) +} + +async function addLinkedTemplate(controlId, templateId) { + const ctrl = await getById(controlId) + if (!ctrl) return null + const lt = ctrl.linkedTemplates || [] + if (!lt.includes(templateId)) { + lt.push(templateId) + const db = getDb() + await db('soa_controls').where('id', controlId).update({ + linked_templates: JSON.stringify(lt), + updated_at: new Date().toISOString(), + }) + } + return await getById(controlId) +} + +async function removeLinkedTemplate(controlId, templateId) { + const ctrl = await getById(controlId) + if (!ctrl) return null + const lt = (ctrl.linkedTemplates || []).filter(t => t !== templateId) + const db = getDb() + await db('soa_controls').where('id', controlId).update({ + linked_templates: JSON.stringify(lt), + updated_at: new Date().toISOString(), + }) + return await getById(controlId) +} + +async function getSummary(framework) { + const frameworks = framework ? [framework] : Object.keys(FRAMEWORKS) + const db = getDb() + const result = {} + for (const fw of frameworks) { + const rows = await db('soa_controls').where('framework', fw) + const controls = rows.map(rowToObj) + const applicable = controls.filter(c => c.applicable) + const byStatus = { not_started: 0, partial: 0, implemented: 0, optimized: 0 } + for (const c of applicable) { + if (byStatus[c.status] !== undefined) byStatus[c.status]++ + } + result[fw] = { + framework: fw, + label: FRAMEWORKS[fw]?.label || fw, + color: FRAMEWORKS[fw]?.color || '#888', + total: controls.length, + applicable: applicable.length, + notApplicable: controls.length - applicable.length, + byStatus, + implementationRate: applicable.length > 0 + ? Math.round((byStatus.implemented + byStatus.optimized) / applicable.length * 100) + : 0, + } + } + return framework ? result[framework] : result +} + +async function createCustomControl(body, { changedBy } = {}) { + const title = (body.title || '').trim() + if (!title) throw new Error('title required') + const now = new Date().toISOString() + const id = 'CUSTOM-' + Date.now() + const db = getDb() + await db('soa_controls').insert({ + id, + framework: 'CUSTOM', + control_id: id, + title, + description: (body.description || '').trim(), + theme: (body.theme || 'Custom').trim(), + applicable: true, + status: 'not_started', + owner: (body.owner || '').trim(), + justification: (body.justification || '').trim(), + evidence: '', + applicable_entities: '[]', + linked_templates: '[]', + updated_by: changedBy || 'unknown', + is_custom: true, + created_at: now, + updated_at: now, + }) + return await getById(id) +} + +async function updateCustomControl(id, body, { changedBy } = {}) { + const existing = await getById(id) + if (!existing || !existing.isCustom) return null + const patch = { updated_at: new Date().toISOString(), updated_by: changedBy || 'unknown' } + const allowed = ['title', 'theme', 'description', 'owner', 'applicable', 'status', 'justification', 'linkedTemplates', 'applicableEntities'] + if (body.title !== undefined) patch.title = body.title + if (body.theme !== undefined) patch.theme = body.theme + if (body.description !== undefined) patch.description = body.description + if (body.owner !== undefined) patch.owner = body.owner + if (body.applicable !== undefined) patch.applicable = !!body.applicable + if (body.status !== undefined) patch.status = body.status + if (body.justification !== undefined) patch.justification = body.justification + if (body.linkedTemplates !== undefined) patch.linked_templates = JSON.stringify(body.linkedTemplates) + if (body.applicableEntities !== undefined) patch.applicable_entities = JSON.stringify(body.applicableEntities) + const db = getDb() + await db('soa_controls').where('id', id).update(patch) + return await getById(id) +} + +async function deleteCustomControl(id) { + const ctrl = await getById(id) + if (!ctrl) return { ok: false, reason: 'not_found' } + if (!ctrl.isCustom) return { ok: false, reason: 'not_custom' } + if ((ctrl.linkedTemplates || []).length > 0) return { ok: false, reason: 'has_links' } + const db = getDb() + await db('soa_controls').where('id', id).del() + return { ok: true } +} + +module.exports = { + init, + getFrameworks, + getAll, + getById, + update, + addLinkedTemplate, + removeLinkedTemplate, + getSummary, + createCustomControl, + updateCustomControl, + deleteCustomControl, + FRAMEWORKS, + IMPLEMENTATION_STATUSES, +} diff --git a/server/db/stores/supplierStore.js b/server/db/stores/supplierStore.js new file mode 100644 index 0000000..515b248 --- /dev/null +++ b/server/db/stores/supplierStore.js @@ -0,0 +1,186 @@ +'use strict' + +const { getDb, init: initDb } = require('../knexDatabase') + +function nowISO() { return new Date().toISOString() } +function makeId() { return `sup_${require('crypto').randomBytes(4).toString('hex')}` } +function _json(val, fallback) { if (!val) return fallback; try { return JSON.parse(val) } catch { return fallback } } + +function rowToSupplier(row) { + if (!row) return null + const d = _json(row.data, {}) + return { + id: row.id, name: row.name, category: row.category, + type: d.type || 'other', criticality: row.risk_level, + status: row.status, contact: row.contact, + country: d.country || '', contactName: d.contactName || '', + contactEmail: d.contactEmail || '', website: d.website || '', + description: d.description || '', products: d.products || '', + dataAccess: d.dataAccess || false, + dataCategories: d.dataCategories || [], + securityRequirements: d.securityRequirements || [], + lastAuditDate: d.lastAuditDate || '', nextAuditDate: row.next_audit || '', + auditResult: d.auditResult || 'not_scheduled', + contractId: d.contractId || '', avContractId: d.avContractId || '', + riskScore: d.riskScore || 0, + notes: row.notes, linkedControls: _json(row.linked_controls, []), + linkedPolicies: d.linkedPolicies || [], + applicableEntities: _json(row.applicable_entities, []), + contractEnd: row.contract_end || '', + deletedBy: d.deletedBy || '', updatedBy: d.updatedBy || '', + createdBy: row.created_by, createdAt: row.created_at, + updatedAt: row.updated_at, deletedAt: row.deleted_at || null, + } +} + +function packData(s) { + return JSON.stringify({ + type: s.type, country: s.country, contactName: s.contactName, + contactEmail: s.contactEmail, website: s.website, + description: s.description, products: s.products, + dataAccess: s.dataAccess, dataCategories: s.dataCategories || [], + securityRequirements: s.securityRequirements || [], + lastAuditDate: s.lastAuditDate, auditResult: s.auditResult, + contractId: s.contractId, avContractId: s.avContractId, + riskScore: s.riskScore, linkedPolicies: s.linkedPolicies || [], + deletedBy: s.deletedBy || '', updatedBy: s.updatedBy || '', + }) +} + +module.exports = { + init: async () => { await initDb() }, + + getAll: async ({ status, criticality, type } = {}) => { + const q = getDb()('suppliers').whereNull('deleted_at') + if (status) q.where('status', status) + if (criticality) q.where('risk_level', criticality) + let list = (await q).map(rowToSupplier) + if (type) list = list.filter(i => i.type === type) + return list + }, + + getById: async (id) => { + const row = await getDb()('suppliers').where('id', id).whereNull('deleted_at').first() + return rowToSupplier(row) + }, + + create: async (fields, { createdBy } = {}) => { + const id = makeId() + const now = nowISO() + const item = { + id, name: fields.name || '', type: fields.type || 'other', + criticality: fields.criticality || 'medium', status: fields.status || 'active', + country: fields.country || '', contactName: fields.contactName || '', + contactEmail: fields.contactEmail || '', website: fields.website || '', + description: fields.description || '', products: fields.products || '', + dataAccess: typeof fields.dataAccess === 'boolean' ? fields.dataAccess : !!fields.dataAccess, + dataCategories: Array.isArray(fields.dataCategories) ? fields.dataCategories : [], + securityRequirements: Array.isArray(fields.securityRequirements) ? fields.securityRequirements : [], + lastAuditDate: fields.lastAuditDate || '', nextAuditDate: fields.nextAuditDate || '', + auditResult: fields.auditResult || 'not_scheduled', + contractId: fields.contractId || '', avContractId: fields.avContractId || '', + riskScore: typeof fields.riskScore === 'number' ? fields.riskScore : (parseInt(fields.riskScore) || 0), + notes: fields.notes || '', + linkedControls: Array.isArray(fields.linkedControls) ? fields.linkedControls : [], + linkedPolicies: Array.isArray(fields.linkedPolicies) ? fields.linkedPolicies : [], + applicableEntities: Array.isArray(fields.applicableEntities) ? fields.applicableEntities : [], + contractEnd: fields.contractEnd || '', + createdBy: createdBy || 'system', + } + await getDb()('suppliers').insert({ + id, name: item.name, category: item.type, + contact: item.contactName, risk_level: item.criticality, + status: item.status, contract_end: item.contractEnd || null, + next_audit: item.nextAuditDate || null, notes: item.notes, + applicable_entities: JSON.stringify(item.applicableEntities), + linked_controls: JSON.stringify(item.linkedControls), + data: packData(item), created_by: item.createdBy, + created_at: now, updated_at: now, + }) + return { ...item, createdAt: now, updatedAt: now, deletedAt: null } + }, + + update: async (id, patch, { changedBy } = {}) => { + const row = await getDb()('suppliers').where('id', id).whereNull('deleted_at').first() + if (!row) return null + const s = rowToSupplier(row) + const allowed = ['name','type','criticality','status','country','contactName','contactEmail', + 'website','description','products','dataAccess','dataCategories','securityRequirements', + 'lastAuditDate','nextAuditDate','auditResult','contractId','avContractId', + 'riskScore','notes','linkedControls','linkedPolicies','applicableEntities','contractEnd'] + for (const k of allowed) { + if (patch[k] !== undefined) s[k] = patch[k] + } + s.updatedAt = nowISO() + if (changedBy) s.updatedBy = changedBy + await getDb()('suppliers').where('id', id).update({ + name: s.name, category: s.type, contact: s.contactName, + risk_level: s.criticality, status: s.status, + contract_end: s.contractEnd || null, next_audit: s.nextAuditDate || null, + notes: s.notes, + applicable_entities: JSON.stringify(s.applicableEntities || []), + linked_controls: JSON.stringify(s.linkedControls || []), + data: packData(s), updated_at: s.updatedAt, + }) + return s + }, + + remove: async (id, { deletedBy } = {}) => { + const row = await getDb()('suppliers').where('id', id).whereNull('deleted_at').first() + if (!row) return false + const d = _json(row.data, {}) + d.deletedBy = deletedBy || '' + await getDb()('suppliers').where('id', id).update({ + deleted_at: nowISO(), data: JSON.stringify(d), + }) + return true + }, + + permanentDelete: async (id) => { + const affected = await getDb()('suppliers').where('id', id).del() + return affected > 0 + }, + + restore: async (id) => { + const row = await getDb()('suppliers').where('id', id).first() + if (!row) return null + const d = _json(row.data, {}) + delete d.deletedBy + await getDb()('suppliers').where('id', id).update({ + deleted_at: null, data: JSON.stringify(d), updated_at: nowISO(), + }) + return rowToSupplier({ ...row, deleted_at: null }) + }, + + getDeleted: async () => { + const rows = await getDb()('suppliers').whereNotNull('deleted_at') + return rows.map(rowToSupplier) + }, + + getSummary: async () => { + const rows = await getDb()('suppliers').whereNull('deleted_at') + const list = rows.map(rowToSupplier) + const today = new Date().toISOString().slice(0, 10) + const in30 = new Date(Date.now() + 30 * 86400000).toISOString().slice(0, 10) + return { + total: list.length, + critical: list.filter(i => i.criticality === 'critical').length, + byStatus: { + active: list.filter(i => i.status === 'active').length, + under_review: list.filter(i => i.status === 'under_review').length, + inactive: list.filter(i => i.status === 'inactive').length, + terminated: list.filter(i => i.status === 'terminated').length, + }, + withDataAccess: list.filter(i => i.dataAccess).length, + upcomingAudits: list.filter(i => i.nextAuditDate && i.nextAuditDate >= today && i.nextAuditDate <= in30).length, + overdueAudits: list.filter(i => i.nextAuditDate && i.nextAuditDate < today).length, + } + }, + + getUpcomingAudits: async (days = 30) => { + const today = new Date().toISOString().slice(0, 10) + const cutoff = new Date(Date.now() + days * 86400000).toISOString().slice(0, 10) + const rows = await getDb()('suppliers').whereNull('deleted_at') + return rows.map(rowToSupplier).filter(i => i.nextAuditDate && i.nextAuditDate >= today && i.nextAuditDate <= cutoff) + }, +} diff --git a/server/db/stores/templateStore.js b/server/db/stores/templateStore.js new file mode 100644 index 0000000..e1fcc5f --- /dev/null +++ b/server/db/stores/templateStore.js @@ -0,0 +1,340 @@ +// © 2026 Claude Hecker — ISMS Builder — AGPL-3.0 +'use strict' + +const { getDb, init: initDb } = require('../knexDatabase') + +const VALID_STATUSES = ['draft', 'review', 'approved', 'archived'] +const TRANSITIONS = { + draft: [{ to: 'review', minRole: 'editor' }], + review: [{ to: 'approved', minRole: 'contentowner' }, + { to: 'draft', minRole: 'editor' }], + approved: [{ to: 'review', minRole: 'contentowner' }, + { to: 'archived', minRole: 'contentowner' }], + archived: [{ to: 'draft', minRole: 'admin' }], +} +const ROLE_RANK = { reader: 1, revision: 1, editor: 2, dept_head: 2, qmb: 2, contentowner: 3, auditor: 3, admin: 4 } + +function nowISO() { return new Date().toISOString() } +function generateId(type) { return `${type}_${Date.now()}` } + +function rowToTemplate(row) { + if (!row) return null + return { + id: row.id, + type: row.type, + language: row.language, + title: row.title, + content: row.content, + version: row.version, + status: row.status, + owner: row.owner || null, + nextReviewDate: row.next_review_date || null, + parentId: row.parent_id || null, + sortOrder: row.sort_order || 0, + createdAt: row.created_at, + updatedAt: row.updated_at, + deletedAt: row.deleted_at || null, + deletedBy: row.deleted_by || null, + linkedControls: _json(row.linked_controls, []), + applicableEntities: _json(row.applicable_entities, []), + attachments: _json(row.attachments, []), + history: _json(row.history, []), + statusHistory: _json(row.status_history, []), + } +} + +function _json(val, fallback) { + if (!val) return fallback + try { return JSON.parse(val) } catch { return fallback } +} + +module.exports = { + init: async () => { await initDb() }, + + getTemplates: async ({ type, language, status } = {}) => { + const q = getDb()('templates').whereNull('deleted_at') + if (type) q.where('type', type) + if (language) q.where('language', language) + if (status) q.where('status', status) + q.orderBy('sort_order', 'asc').orderBy('title', 'asc') + const rows = await q + return rows.map(rowToTemplate) + }, + + getTemplate: async (type, id) => { + const row = await getDb()('templates') + .where('type', type) + .where('id', id) + .whereNull('deleted_at') + .first() + return rowToTemplate(row) + }, + + createTemplate: async ({ type, language, title, content, owner, parentId }) => { + const id = generateId(type) + const now = nowISO() + await getDb()('templates').insert({ + id, + type, + language: language || 'de', + title: title || '', + content: content || '', + version: 1, + status: 'draft', + owner: owner || null, + next_review_date: null, + parent_id: parentId || null, + sort_order: 0, + created_at: now, + updated_at: now, + linked_controls: '[]', + applicable_entities: '[]', + attachments: '[]', + history: JSON.stringify([{ version: 1, content: content || '', updatedAt: now }]), + status_history: JSON.stringify([{ status: 'draft', changedBy: owner || 'system', changedAt: now }]), + }) + return module.exports.getTemplate(type, id) + }, + + updateTemplate: async (type, id, { title, content, owner, applicableEntities, linkedControls, parentId, nextReviewDate }) => { + const row = await getDb()('templates') + .where('type', type).where('id', id).whereNull('deleted_at').first() + if (!row) return null + const t = rowToTemplate(row) + + if (title !== undefined) t.title = title + if (typeof content === 'string') t.content = content + if (owner !== undefined) t.owner = owner + if (Array.isArray(applicableEntities)) t.applicableEntities = applicableEntities + if (Array.isArray(linkedControls)) t.linkedControls = linkedControls + if (parentId !== undefined) t.parentId = parentId || null + if (nextReviewDate !== undefined) t.nextReviewDate = nextReviewDate || null + + t.version += 1 + t.updatedAt = nowISO() + t.history.push({ version: t.version, content: t.content, updatedAt: t.updatedAt }) + + await getDb()('templates').where('type', type).where('id', id).update({ + title: t.title, + content: t.content, + version: t.version, + owner: t.owner, + next_review_date: t.nextReviewDate, + parent_id: t.parentId, + updated_at: t.updatedAt, + linked_controls: JSON.stringify(t.linkedControls), + applicable_entities: JSON.stringify(t.applicableEntities), + history: JSON.stringify(t.history), + status_history: JSON.stringify(t.statusHistory), + }) + return t + }, + + addLinkedControl: async (templateType, templateId, controlId) => { + const row = await getDb()('templates') + .where('type', templateType).where('id', templateId).whereNull('deleted_at').first() + if (!row) return null + const t = rowToTemplate(row) + if (!t.linkedControls.includes(controlId)) { + t.linkedControls.push(controlId) + t.updatedAt = nowISO() + await getDb()('templates').where('type', templateType).where('id', templateId).update({ + linked_controls: JSON.stringify(t.linkedControls), + updated_at: t.updatedAt, + }) + } + return t + }, + + removeLinkedControl: async (templateType, templateId, controlId) => { + const row = await getDb()('templates') + .where('type', templateType).where('id', templateId).whereNull('deleted_at').first() + if (!row) return null + const t = rowToTemplate(row) + t.linkedControls = t.linkedControls.filter(c => c !== controlId) + t.updatedAt = nowISO() + await getDb()('templates').where('type', templateType).where('id', templateId).update({ + linked_controls: JSON.stringify(t.linkedControls), + updated_at: t.updatedAt, + }) + return t + }, + + setStatus: async (type, id, { status: newStatus, changedBy, role }) => { + const row = await getDb()('templates') + .where('type', type).where('id', id).whereNull('deleted_at').first() + if (!row) return { ok: false, error: 'Not found' } + if (!VALID_STATUSES.includes(newStatus)) return { ok: false, error: 'Invalid status' } + + const t = rowToTemplate(row) + const currentStatus = t.status || 'draft' + if (currentStatus === newStatus) return { ok: false, error: 'Already in this status' } + + const allowed = (TRANSITIONS[currentStatus] || []).find(tr => tr.to === newStatus) + if (!allowed) return { ok: false, error: `Transition ${currentStatus} → ${newStatus} not allowed` } + + const userRank = ROLE_RANK[role?.toLowerCase()] || 0 + const requiredRank = ROLE_RANK[allowed.minRole] || 0 + if (userRank < requiredRank) { + return { ok: false, error: `Role '${role}' insufficient. Requires '${allowed.minRole}'` } + } + + const now = nowISO() + t.status = newStatus + t.updatedAt = now + if (!Array.isArray(t.statusHistory)) t.statusHistory = [] + t.statusHistory.push({ status: newStatus, changedBy: changedBy || 'unknown', changedAt: now }) + + await getDb()('templates').where('type', type).where('id', id).update({ + status: newStatus, + updated_at: now, + status_history: JSON.stringify(t.statusHistory), + }) + return { ok: true, template: t } + }, + + deleteTemplate: async (type, id, deletedBy) => { + const affected = await getDb()('templates') + .where('type', type).where('id', id).whereNull('deleted_at') + .update({ deleted_at: nowISO(), deleted_by: deletedBy || null }) + return affected > 0 + }, + + permanentDeleteTemplate: async (type, id) => { + const affected = await getDb()('templates').where('type', type).where('id', id).del() + return affected > 0 + }, + + restoreTemplate: async (type, id) => { + const row = await getDb()('templates').where('type', type).where('id', id).first() + if (!row) return null + await getDb()('templates').where('type', type).where('id', id).update({ + deleted_at: null, + deleted_by: null, + updated_at: nowISO(), + }) + return rowToTemplate({ ...row, deleted_at: null, deleted_by: null }) + }, + + getDeletedTemplates: async () => { + const rows = await getDb()('templates') + .whereNotNull('deleted_at') + .orderBy('deleted_at', 'desc') + return rows.map(rowToTemplate) + }, + + getHistory: async (type, id) => { + const row = await getDb()('templates').where('type', type).where('id', id) + .first('history') + return row ? _json(row.history, []) : null + }, + + getStatusHistory: async (type, id) => { + const row = await getDb()('templates').where('type', type).where('id', id) + .first('status_history') + return row ? _json(row.status_history, []) : null + }, + + getTemplateTree: async (type, language) => { + const q = getDb()('templates').whereNull('deleted_at') + if (type) q.where('type', type) + if (language) q.where('language', language) + const list = (await q).map(rowToTemplate) + + const byId = {} + list.forEach(t => { byId[t.id] = { ...t, children: [] } }) + const roots = [] + list.forEach(t => { + const pid = t.parentId || null + if (pid && byId[pid]) byId[pid].children.push(byId[t.id]) + else roots.push(byId[t.id]) + }) + function sortLevel(nodes) { + nodes.sort((a, b) => ((a.sortOrder || 0) - (b.sortOrder || 0)) || a.title.localeCompare(b.title, 'de')) + nodes.forEach(n => sortLevel(n.children)) + } + sortLevel(roots) + return roots + }, + + moveTemplate: async (type, id, { parentId, sortOrder }) => { + const row = await getDb()('templates').where('type', type).where('id', id).first() + if (!row) return null + + if (parentId) { + let cursor = parentId + const visited = new Set() + while (cursor) { + if (cursor === id) return { error: 'circular' } + if (visited.has(cursor)) break + visited.add(cursor) + const p = await getDb()('templates').where('id', cursor).first('parent_id') + cursor = p?.parent_id || null + } + } + await getDb()('templates').where('type', type).where('id', id).update({ + parent_id: parentId || null, + sort_order: sortOrder ?? row.sort_order, + updated_at: nowISO(), + }) + return { ok: true } + }, + + reorderTemplates: async (updates) => { + for (const { id, sortOrder } of updates) { + await getDb()('templates').where('id', id).update({ + sort_order: sortOrder, + updated_at: nowISO(), + }) + } + return true + }, + + getTemplateBreadcrumb: async (type, id) => { + const crumbs = [] + let currentId = id + const visited = new Set() + while (currentId) { + if (visited.has(currentId)) break + visited.add(currentId) + const row = await getDb()('templates').where('id', currentId) + .first('id', 'title', 'type', 'parent_id') + if (!row) break + crumbs.unshift({ id: row.id, title: row.title, type: row.type }) + currentId = row.parent_id || null + } + return crumbs + }, + + addAttachment: async (type, id, attachmentMeta) => { + const row = await getDb()('templates') + .where('type', type).where('id', id).whereNull('deleted_at').first() + if (!row) return null + const t = rowToTemplate(row) + t.attachments.push(attachmentMeta) + t.updatedAt = nowISO() + await getDb()('templates').where('type', type).where('id', id).update({ + attachments: JSON.stringify(t.attachments), + updated_at: t.updatedAt, + }) + return t + }, + + removeAttachment: async (type, id, attId) => { + const row = await getDb()('templates') + .where('type', type).where('id', id).whereNull('deleted_at').first() + if (!row) return null + const t = rowToTemplate(row) + const att = t.attachments.find(a => a.id === attId) || null + t.attachments = t.attachments.filter(a => a.id !== attId) + t.updatedAt = nowISO() + await getDb()('templates').where('type', type).where('id', id).update({ + attachments: JSON.stringify(t.attachments), + updated_at: t.updatedAt, + }) + return { template: t, attachment: att } + }, + + TRANSITIONS, + VALID_STATUSES, +} diff --git a/server/db/stores/trainingStore.js b/server/db/stores/trainingStore.js new file mode 100644 index 0000000..0b50fbd --- /dev/null +++ b/server/db/stores/trainingStore.js @@ -0,0 +1,202 @@ +'use strict' + +const { getDb, init: initDb } = require('../knexDatabase') + +function nowISO() { return new Date().toISOString() } +function makeId() { return `training_${Date.now()}_${Math.random().toString(36).slice(2,6)}` } +function _json(val, fallback) { if (!val) return fallback; try { return JSON.parse(val) } catch { return fallback } } + +const CATEGORIES = ['security_awareness','iso27001','gdpr','technical','management','other'] +const STATUSES = ['planned','in_progress','completed','cancelled'] + +function isOverdue(item) { + if (item.status === 'completed' || item.status === 'cancelled') return false + if (!item.dueDate) return false + return new Date(item.dueDate) < new Date() +} + +function rowToTraining(row) { + if (!row) return null + const d = _json(row.data, {}) + return { + id: row.id, title: row.title, description: row.description, + category: row.category, status: row.status, + dueDate: row.due_date, completedDate: row.completed_date, + instructor: row.instructor, assignees: row.assignees, + applicableEntities: _json(row.applicable_entities, []), + evidence: row.evidence, mandatory: !!row.mandatory, + linkedControls: d.linkedControls || [], + linkedPolicies: d.linkedPolicies || [], + deletedBy: d.deletedBy || '', + createdBy: row.created_by, createdAt: row.created_at, + updatedAt: row.updated_at, deletedAt: row.deleted_at || null, + overdue: isOverdue({ status: row.status, dueDate: row.due_date }), + } +} + +module.exports = { + init: async () => { + await initDb() + const db = getDb() + const count = await db('training').count('id as cnt').first() + if (!count?.cnt) { + const now = nowISO() + await db('training').insert([ + { + id: makeId(), title: 'Security Awareness Grundlagen', + description: 'Jährliche Pflichtschulung für alle Mitarbeitenden.', + category: 'security_awareness', status: 'planned', + due_date: new Date(Date.now() + 30 * 86400000).toISOString().slice(0, 10), + completed_date: null, instructor: 'IT-Security Team', + assignees: 'Alle Mitarbeitenden', applicable_entities: '[]', + evidence: '', mandatory: 1, data: '{}', + created_by: 'system', created_at: now, updated_at: now, + }, + { + id: makeId(), title: 'DSGVO-Schulung für neue Mitarbeitende', + description: 'Einführung in Datenschutzpflichten gem. Art. 39 DSGVO.', + category: 'gdpr', status: 'completed', + due_date: new Date(Date.now() - 10 * 86400000).toISOString().slice(0, 10), + completed_date: new Date(Date.now() - 12 * 86400000).toISOString().slice(0, 10), + instructor: 'Datenschutzbeauftragter', assignees: 'HR-Abteilung, neue MA Q1', + applicable_entities: '[]', + evidence: 'Teilnehmerliste und Attestat im SharePoint abgelegt.', + mandatory: 1, data: '{}', + created_by: 'system', created_at: now, updated_at: now, + }, + { + id: makeId(), title: 'ISO 27001 Lead Auditor Zertifizierung', + description: 'Externe Zertifizierungsschulung für ISMS-Verantwortliche.', + category: 'iso27001', status: 'in_progress', + due_date: new Date(Date.now() + 60 * 86400000).toISOString().slice(0, 10), + completed_date: null, instructor: 'TÜV Rheinland', + assignees: 'ISMS-Team (3 Personen)', applicable_entities: '[]', + evidence: '', mandatory: 0, data: '{}', + created_by: 'system', created_at: now, updated_at: now, + }, + ]) + } + }, + + getAll: async ({ status, category, entity } = {}) => { + const q = getDb()('training').whereNull('deleted_at') + if (status) q.where('status', status) + if (category) q.where('category', category) + let list = (await q).map(rowToTraining) + if (entity) list = list.filter(i => !i.applicableEntities?.length || i.applicableEntities.includes(entity)) + return list + }, + + getById: async (id) => { + const row = await getDb()('training').where('id', id).whereNull('deleted_at').first() + return rowToTraining(row) + }, + + getSummary: async () => { + const rows = await getDb()('training').whereNull('deleted_at') + const list = rows.map(rowToTraining) + const total = list.length + const completed = list.filter(i => i.status === 'completed').length + return { + total, + planned: list.filter(i => i.status === 'planned').length, + inProgress: list.filter(i => i.status === 'in_progress').length, + completed, + cancelled: list.filter(i => i.status === 'cancelled').length, + overdue: list.filter(i => i.overdue).length, + completionRate: total > 0 ? Math.round((completed / total) * 100) : 0, + } + }, + + create: async (fields, createdBy) => { + const id = makeId() + const now = nowISO() + const item = { + id, title: fields.title || 'Ohne Titel', + description: fields.description || '', + category: CATEGORIES.includes(fields.category) ? fields.category : 'other', + status: STATUSES.includes(fields.status) ? fields.status : 'planned', + dueDate: fields.dueDate || null, + completedDate: fields.completedDate || null, + instructor: fields.instructor || '', + assignees: fields.assignees || '', + applicableEntities: Array.isArray(fields.applicableEntities) ? fields.applicableEntities : [], + evidence: fields.evidence || '', + mandatory: fields.mandatory === true, + linkedControls: Array.isArray(fields.linkedControls) ? fields.linkedControls : [], + linkedPolicies: Array.isArray(fields.linkedPolicies) ? fields.linkedPolicies : [], + createdBy: createdBy || 'system', + } + await getDb()('training').insert({ + id, title: item.title, description: item.description, + category: item.category, status: item.status, + due_date: item.dueDate, completed_date: item.completedDate, + instructor: item.instructor, assignees: item.assignees, + applicable_entities: JSON.stringify(item.applicableEntities), + evidence: item.evidence, mandatory: item.mandatory ? 1 : 0, + data: JSON.stringify({ linkedControls: item.linkedControls, linkedPolicies: item.linkedPolicies }), + created_by: item.createdBy, created_at: now, updated_at: now, + }) + return { ...item, createdAt: now, updatedAt: now, overdue: isOverdue(item) } + }, + + update: async (id, fields) => { + const row = await getDb()('training').where('id', id).whereNull('deleted_at').first() + if (!row) return null + const item = rowToTraining(row) + const allowed = ['title','description','category','status','dueDate','completedDate', + 'instructor','assignees','applicableEntities','evidence','mandatory','linkedControls','linkedPolicies'] + for (const k of allowed) { + if (fields[k] !== undefined) item[k] = fields[k] + } + if (fields.status === 'completed' && !item.completedDate) { + item.completedDate = nowISO().slice(0, 10) + } + item.updatedAt = nowISO() + await getDb()('training').where('id', id).update({ + title: item.title, description: item.description, + category: item.category, status: item.status, + due_date: item.dueDate, completed_date: item.completedDate, + instructor: item.instructor, assignees: item.assignees, + applicable_entities: JSON.stringify(item.applicableEntities || []), + evidence: item.evidence, mandatory: item.mandatory ? 1 : 0, + data: JSON.stringify({ linkedControls: item.linkedControls || [], linkedPolicies: item.linkedPolicies || [] }), + updated_at: item.updatedAt, + }) + return { ...item, overdue: isOverdue(item) } + }, + + delete: async (id, deletedBy) => { + const row = await getDb()('training').where('id', id).first() + if (!row) return false + const d = _json(row.data, {}) + d.deletedBy = deletedBy || '' + await getDb()('training').where('id', id).update({ + deleted_at: nowISO(), data: JSON.stringify(d), + }) + return true + }, + + permanentDelete: async (id) => { + const affected = await getDb()('training').where('id', id).del() + return affected > 0 + }, + + restore: async (id) => { + const row = await getDb()('training').where('id', id).first() + if (!row) return null + const d = _json(row.data, {}) + delete d.deletedBy + await getDb()('training').where('id', id).update({ + deleted_at: null, data: JSON.stringify(d), updated_at: nowISO(), + }) + return rowToTraining({ ...row, deleted_at: null }) + }, + + getDeleted: async () => { + const rows = await getDb()('training').whereNotNull('deleted_at') + return rows.map(rowToTraining) + }, + + CATEGORIES, STATUSES, +} diff --git a/server/db/supplierStore.js b/server/db/supplierStore.js index 84ac170..a3da6eb 100644 --- a/server/db/supplierStore.js +++ b/server/db/supplierStore.js @@ -1,5 +1,6 @@ // © 2026 Claude Hecker — ISMS Builder V 1.29 — AGPL-3.0 'use strict' +const STORAGE_BACKEND = (process.env.STORAGE_BACKEND || 'json').toLowerCase() const fs = require('fs') const path = require('path') @@ -152,7 +153,7 @@ function getUpcomingAudits(days = 30) { ) } -module.exports = { +const _jsonExports = { getAll, getById, create, @@ -164,3 +165,11 @@ module.exports = { getSummary, getUpcomingAudits, } + +if (STORAGE_BACKEND !== 'json') { + const _knex = require('./stores/supplierStore') + _knex.init().catch(e => console.error('[supplierStore] Knex init:', e.message)) + module.exports = _knex +} else { + module.exports = _jsonExports +} diff --git a/server/db/trainingStore.js b/server/db/trainingStore.js index 65a638e..82ef10e 100644 --- a/server/db/trainingStore.js +++ b/server/db/trainingStore.js @@ -1,5 +1,6 @@ // © 2026 Claude Hecker — ISMS Builder V 1.29 — AGPL-3.0 'use strict' +const STORAGE_BACKEND = (process.env.STORAGE_BACKEND || 'json').toLowerCase() const fs = require('fs') const path = require('path') @@ -163,4 +164,12 @@ if (!fs.existsSync(DATA_FILE)) { save(seed) } -module.exports = { getAll, getById, getSummary, create, update, delete: del, permanentDelete, restore, getDeleted, CATEGORIES, STATUSES } +const _jsonExports = { getAll, getById, getSummary, create, update, delete: del, permanentDelete, restore, getDeleted, CATEGORIES, STATUSES } + +if (STORAGE_BACKEND !== 'json') { + const _knex = require('./stores/trainingStore') + _knex.init().catch(e => console.error('[trainingStore] Knex init:', e.message)) + module.exports = _knex +} else { + module.exports = _jsonExports +} diff --git a/server/routes/ackPublic.js b/server/routes/ackPublic.js index 6b7bbec..7b5b5e1 100644 --- a/server/routes/ackPublic.js +++ b/server/routes/ackPublic.js @@ -7,8 +7,8 @@ const ackStore = require('../db/ackStore') const storage = require('../storage') // ── GET /ack/:token — Bestätigungsseite anzeigen ────────────────────────────── -router.get('/ack/:token', (req, res) => { - const ack = ackStore.getAckByToken(req.params.token) +router.get('/ack/:token', async (req, res) => { + const ack = await ackStore.getAckByToken(req.params.token) if (!ack) { return res.status(404).send(` ISMS Builder @@ -17,14 +17,14 @@ router.get('/ack/:token', (req, res) => { `) } - const dist = ackStore.getDistribution(ack.distributionId) + const dist = await ackStore.getDistribution(ack.distributionId) if (!dist) return res.status(404).send('Verteilrunde nicht gefunden') // Policy-Inhalt laden let policyContent = '' let policyTitle = dist.templateTitle try { - const all = storage.getTemplates({}) || [] + const all = await storage.getTemplates({}) || [] const tmpl = all.find(t => t.id === dist.templateId) if (tmpl) { policyTitle = tmpl.title || dist.templateTitle @@ -113,18 +113,18 @@ router.get('/ack/:token', (req, res) => { }) // ── POST /ack/:token — Bestätigung speichern ────────────────────────────────── -router.post('/ack/:token', express.urlencoded({ extended: false }), (req, res) => { +router.post('/ack/:token', express.urlencoded({ extended: false }), async (req, res) => { const ip = req.headers['x-forwarded-for']?.split(',')[0]?.trim() || req.socket.remoteAddress || '' const name = (req.body.recipientName || '').trim().slice(0, 200) - const ack = ackStore.confirmByToken(req.params.token, { recipientName: name, ipAddress: ip }) + const ack = await ackStore.confirmByToken(req.params.token, { recipientName: name, ipAddress: ip }) if (!ack) { return res.status(404).send(` ISMS Builder

Link ungültig

Dieser Link ist nicht mehr gültig.

`) } - const dist = ackStore.getDistribution(ack.distributionId) || {} + const dist = await ackStore.getDistribution(ack.distributionId) || {} res.send(` diff --git a/server/routes/acknowledgements.js b/server/routes/acknowledgements.js index 329ed12..5db73f1 100644 --- a/server/routes/acknowledgements.js +++ b/server/routes/acknowledgements.js @@ -11,8 +11,9 @@ const storage = require('../storage') // ── Helper ──────────────────────────────────────────────────────────────────── -function getMode() { - return orgSettings.get().policyAckMode || 'manual' +async function getMode() { + const settings = await orgSettings.get() + return settings.policyAckMode || 'manual' } function buildTokenUrl(req, token) { @@ -22,7 +23,7 @@ function buildTokenUrl(req, token) { } async function sendCampaignMails(dist, req) { - const acks = ackStore.getAcksForDistribution(dist.id) + const acks = await ackStore.getAcksForDistribution(dist.id) let sent = 0 for (const ack of acks) { if (ack.acknowledgedAt) continue // already confirmed — skip @@ -49,47 +50,47 @@ async function sendCampaignMails(dist, req) { } // ── GET /admin/ack-settings — aktueller Modus ───────────────────────────────── -router.get('/admin/ack-settings', requireAuth, authorize('admin'), (req, res) => { - res.json({ policyAckMode: getMode() }) +router.get('/admin/ack-settings', requireAuth, authorize('admin'), async (req, res) => { + res.json({ policyAckMode: await getMode() }) }) // ── PUT /admin/ack-settings — Modus ändern (nur admin) ─────────────────────── -router.put('/admin/ack-settings', requireAuth, authorize('admin'), (req, res) => { +router.put('/admin/ack-settings', requireAuth, authorize('admin'), async (req, res) => { const { policyAckMode } = req.body const VALID = ['email_campaign', 'manual', 'distribution_only'] if (!VALID.includes(policyAckMode)) return res.status(400).json({ error: 'Ungültiger Modus' }) - orgSettings.update({ policyAckMode }) + await orgSettings.update({ policyAckMode }) res.json({ ok: true, policyAckMode }) }) // ── GET /distributions — alle Verteilrunden (contentowner+) ────────────────── -router.get('/distributions', requireAuth, authorize('contentowner'), (req, res) => { - res.json(ackStore.getAllDistributionsWithStats()) +router.get('/distributions', requireAuth, authorize('contentowner'), async (req, res) => { + res.json(await ackStore.getAllDistributionsWithStats()) }) // ── GET /distributions/summary — KPI für Dashboard ─────────────────────────── -router.get('/distributions/summary', requireAuth, authorize('reader'), (req, res) => { - res.json(ackStore.getSummary()) +router.get('/distributions/summary', requireAuth, authorize('reader'), async (req, res) => { + res.json(await ackStore.getSummary()) }) // ── GET /distributions/:id — Detail + Stats ─────────────────────────────────── -router.get('/distributions/:id', requireAuth, authorize('contentowner'), (req, res) => { - const dist = ackStore.getDistributionWithStats(req.params.id) +router.get('/distributions/:id', requireAuth, authorize('contentowner'), async (req, res) => { + const dist = await ackStore.getDistributionWithStats(req.params.id) if (!dist) return res.status(404).json({ error: 'Nicht gefunden' }) res.json(dist) }) // ── POST /distributions — neue Verteilrunde anlegen ────────────────────────── router.post('/distributions', requireAuth, authorize('contentowner'), async (req, res) => { - const { templateId, dueDate, targetGroup, emailList, notes } = req.body + const { templateId, dueDate, targetGroup, emailList, notes, mode } = req.body if (!templateId) return res.status(400).json({ error: 'templateId fehlt' }) - const mode = getMode() + const effectiveMode = mode || await getMode() // Vorlage laden für Titel/Typ/Version let templateTitle = '', templateType = 'Policy', templateVersion = 1 try { - const all = storage.getTemplates({}) || [] + const all = await storage.getTemplates({}) || [] const tmpl = all.find(t => t.id === templateId) if (tmpl) { templateTitle = tmpl.title || '' @@ -101,68 +102,67 @@ router.post('/distributions', requireAuth, authorize('contentowner'), async (req } } catch {} - const dist = ackStore.createDistribution({ + const dist = await ackStore.createDistribution({ templateId, templateTitle, templateType, templateVersion, - mode, - targetGroup: targetGroup || '', - dueDate: dueDate || null, - emailList: mode === 'email_campaign' ? (emailList || []) : [], - notes: notes || '', - createdBy: req.user?.username || req.user?.email || 'system', + mode: effectiveMode, + targetGroup: targetGroup || '', + dueDate: dueDate || null, + emailList: effectiveMode === 'email_campaign' ? (emailList || []) : [], + notes: notes || '', + createdBy: req.user?.username || req.user?.email || 'system', }) - // Für E-Mail-Modus: Ack-Records vorbereiten - if (mode === 'email_campaign' && dist.emailList.length > 0) { - ackStore.prepareEmailAcks(dist.id, dist.emailList) + if (effectiveMode === 'email_campaign' && dist.emailList.length > 0) { + await ackStore.prepareEmailAcks(dist.id, dist.emailList) } - res.status(201).json(ackStore.getDistributionWithStats(dist.id)) + res.status(201).json(await ackStore.getDistributionWithStats(dist.id)) }) // ── PUT /distributions/:id — bearbeiten (status, dueDate, notes) ───────────── -router.put('/distributions/:id', requireAuth, authorize('contentowner'), (req, res) => { +router.put('/distributions/:id', requireAuth, authorize('contentowner'), async (req, res) => { const allowed = ['status', 'dueDate', 'targetGroup', 'notes'] const patch = {} for (const k of allowed) { if (req.body[k] !== undefined) patch[k] = req.body[k] } - const updated = ackStore.updateDistribution(req.params.id, patch) + const updated = await ackStore.updateDistribution(req.params.id, patch) if (!updated) return res.status(404).json({ error: 'Nicht gefunden' }) - res.json(ackStore.getDistributionWithStats(updated.id)) + res.json(await ackStore.getDistributionWithStats(updated.id)) }) // ── DELETE /distributions/:id — löschen (admin) ────────────────────────────── -router.delete('/distributions/:id', requireAuth, authorize('admin'), (req, res) => { - const ok = ackStore.deleteDistribution(req.params.id) +router.delete('/distributions/:id', requireAuth, authorize('admin'), async (req, res) => { + const ok = await ackStore.deleteDistribution(req.params.id) if (!ok) return res.status(404).json({ error: 'Nicht gefunden' }) res.json({ ok: true }) }) // ── POST /distributions/:id/send — E-Mails versenden (email_campaign) ───────── router.post('/distributions/:id/send', requireAuth, authorize('contentowner'), async (req, res) => { - const dist = ackStore.getDistribution(req.params.id) + const dist = await ackStore.getDistribution(req.params.id) if (!dist) return res.status(404).json({ error: 'Nicht gefunden' }) if (dist.mode !== 'email_campaign') return res.status(400).json({ error: 'Nur für E-Mail-Kampagnen' }) // Ggf. neue E-Mail-Adressen aus Anfrage hinzufügen if (Array.isArray(req.body.emailList) && req.body.emailList.length > 0) { const merged = [...new Set([...dist.emailList, ...req.body.emailList])] - ackStore.updateDistribution(dist.id, { emailList: merged }) - ackStore.prepareEmailAcks(dist.id, merged) + await ackStore.updateDistribution(dist.id, { emailList: merged }) + await ackStore.prepareEmailAcks(dist.id, merged) } else { - ackStore.prepareEmailAcks(dist.id, dist.emailList) + await ackStore.prepareEmailAcks(dist.id, dist.emailList) } - const fresh = ackStore.getDistribution(dist.id) + const fresh = await ackStore.getDistribution(dist.id) const sent = await sendCampaignMails(fresh, req) - ackStore.updateDistribution(dist.id, { emailSentAt: new Date().toISOString(), emailSentCount: (fresh.emailSentCount || 0) + sent }) + await ackStore.updateDistribution(dist.id, { emailSentAt: new Date().toISOString(), emailSentCount: (fresh.emailSentCount || 0) + sent }) res.json({ ok: true, sent }) }) // ── POST /distributions/:id/remind — Erinnerung an nicht bestätigte ─────────── router.post('/distributions/:id/remind', requireAuth, authorize('contentowner'), async (req, res) => { - const dist = ackStore.getDistribution(req.params.id) + const dist = await ackStore.getDistribution(req.params.id) if (!dist) return res.status(404).json({ error: 'Nicht gefunden' }) if (dist.mode !== 'email_campaign') return res.status(400).json({ error: 'Nur für E-Mail-Kampagnen' }) const sent = await sendCampaignMails(dist, req) @@ -170,18 +170,18 @@ router.post('/distributions/:id/remind', requireAuth, authorize('contentowner'), }) // ── GET /distributions/:id/acks — alle Bestätigungen ───────────────────────── -router.get('/distributions/:id/acks', requireAuth, authorize('contentowner'), (req, res) => { - const dist = ackStore.getDistribution(req.params.id) +router.get('/distributions/:id/acks', requireAuth, authorize('contentowner'), async (req, res) => { + const dist = await ackStore.getDistribution(req.params.id) if (!dist) return res.status(404).json({ error: 'Nicht gefunden' }) - res.json(ackStore.getAcksForDistribution(req.params.id)) + res.json(await ackStore.getAcksForDistribution(req.params.id)) }) // ── POST /distributions/:id/acks — manuelle Bestätigung hinzufügen ───────────── -router.post('/distributions/:id/acks', requireAuth, authorize('contentowner'), (req, res) => { - const dist = ackStore.getDistribution(req.params.id) +router.post('/distributions/:id/acks', requireAuth, authorize('contentowner'), async (req, res) => { + const dist = await ackStore.getDistribution(req.params.id) if (!dist) return res.status(404).json({ error: 'Nicht gefunden' }) const { recipientEmail, recipientName, acknowledgedAt, notes } = req.body - const ack = ackStore.addManualAck({ + const ack = await ackStore.addManualAck({ distributionId: req.params.id, recipientEmail: recipientEmail || '', recipientName: recipientName || '', @@ -193,12 +193,12 @@ router.post('/distributions/:id/acks', requireAuth, authorize('contentowner'), ( }) // ── POST /distributions/:id/acks/import — CSV-Import ───────────────────────── -router.post('/distributions/:id/acks/import', requireAuth, authorize('contentowner'), (req, res) => { - const dist = ackStore.getDistribution(req.params.id) +router.post('/distributions/:id/acks/import', requireAuth, authorize('contentowner'), async (req, res) => { + const dist = await ackStore.getDistribution(req.params.id) if (!dist) return res.status(404).json({ error: 'Nicht gefunden' }) const { rows } = req.body // [{ email, name, acknowledgedAt }] if (!Array.isArray(rows)) return res.status(400).json({ error: 'rows[] erwartet' }) - const result = ackStore.importAcks( + const result = await ackStore.importAcks( req.params.id, rows, req.user?.username || req.user?.email || 'system' @@ -207,17 +207,17 @@ router.post('/distributions/:id/acks/import', requireAuth, authorize('contentown }) // ── DELETE /distributions/:id/acks/:ackId — einzelne Bestätigung löschen ────── -router.delete('/distributions/:id/acks/:ackId', requireAuth, authorize('admin'), (req, res) => { - const ok = ackStore.deleteAck(req.params.ackId) +router.delete('/distributions/:id/acks/:ackId', requireAuth, authorize('admin'), async (req, res) => { + const ok = await ackStore.deleteAck(req.params.ackId) if (!ok) return res.status(404).json({ error: 'Nicht gefunden' }) res.json({ ok: true }) }) // ── GET /distributions/:id/export/csv — CSV-Export ─────────────────────────── -router.get('/distributions/:id/export/csv', requireAuth, authorize('contentowner'), (req, res) => { - const dist = ackStore.getDistributionWithStats(req.params.id) +router.get('/distributions/:id/export/csv', requireAuth, authorize('contentowner'), async (req, res) => { + const dist = await ackStore.getDistributionWithStats(req.params.id) if (!dist) return res.status(404).json({ error: 'Nicht gefunden' }) - const acks = ackStore.getAcksForDistribution(req.params.id) + const acks = await ackStore.getAcksForDistribution(req.params.id) const header = 'E-Mail;Name;Bestätigt am;Methode\n' const rows = acks.map(a => diff --git a/server/routes/admin.js b/server/routes/admin.js index bbfcd0c..018bba5 100644 --- a/server/routes/admin.js +++ b/server/routes/admin.js @@ -127,7 +127,7 @@ router.post('/admin/users', requireAuth, authorize('admin'), async (req, res) => try { const user = await require('../rbacStore').createUser({ username, email, domain, role, functions, password }) const fnStr = (functions||[]).join(', ') || '—' - auditStore.append({ user: req.user, action: 'create', resource: 'user', resourceId: username, detail: `Rolle: ${role} | Funktionen: ${fnStr}` }) + await auditStore.append({ user: req.user, action: 'create', resource: 'user', resourceId: username, detail: `Rolle: ${role} | Funktionen: ${fnStr}` }) res.status(201).json(user) } catch (e) { res.status(409).json({ error: e.message }) @@ -141,72 +141,73 @@ router.put('/admin/users/:username', requireAuth, authorize('admin'), async (req const updated = await require('../rbacStore').updateUser(username, { email, domain, role, functions, password: password || undefined }) if (!updated) return res.status(404).json({ error: 'Not found' }) const fnStr = (functions||[]).join(', ') || '—' - auditStore.append({ user: req.user, action: 'update', resource: 'user', resourceId: username, detail: role ? `Neue Rolle: ${role} | Funktionen: ${fnStr}` : 'Profil aktualisiert' }) + await auditStore.append({ user: req.user, action: 'update', resource: 'user', resourceId: username, detail: role ? `Neue Rolle: ${role} | Funktionen: ${fnStr}` : 'Profil aktualisiert' }) res.json(updated) }) -router.delete('/admin/users/:username', requireAuth, authorize('admin'), (req, res) => { +router.delete('/admin/users/:username', requireAuth, authorize('admin'), async (req, res) => { const { username } = req.params if (username === req.user) return res.status(400).json({ error: 'Eigenen Account nicht löschbar' }) const ok = require('../rbacStore').deleteUser(username) if (!ok) return res.status(404).json({ error: 'Not found' }) - auditStore.append({ user: req.user, action: 'delete', resource: 'user', resourceId: username }) + await auditStore.append({ user: req.user, action: 'delete', resource: 'user', resourceId: username }) res.json({ deleted: true }) }) // ── Custom editable lists ── -router.get('/admin/lists', requireAuth, (req, res) => { - res.json(customListsStore.getAll()) +router.get('/admin/lists', requireAuth, async (req, res) => { + res.json(await customListsStore.getAll()) }) -router.put('/admin/list/:listId', requireAuth, authorize('admin'), (req, res) => { +router.put('/admin/list/:listId', requireAuth, authorize('admin'), async (req, res) => { const { listId } = req.params const items = req.body if (!Array.isArray(items)) return res.status(400).json({ error: 'Body must be an array' }) - const result = customListsStore.setList(listId, items) + const result = await customListsStore.setList(listId, items) if (result === null) return res.status(404).json({ error: 'Unknown list id' }) res.json(result) }) -router.post('/admin/list/:listId/reset', requireAuth, authorize('admin'), (req, res) => { - const result = customListsStore.resetList(req.params.listId) +router.post('/admin/list/:listId/reset', requireAuth, authorize('admin'), async (req, res) => { + const result = await customListsStore.resetList(req.params.listId) if (result === null) return res.status(404).json({ error: 'Unknown list id' }) res.json(result) }) // ── Organisationseinstellungen ── -router.get('/admin/org-settings', requireAuth, authorize('reader'), (req, res) => { - res.json(orgSettingsStore.get()) +router.get('/admin/org-settings', requireAuth, authorize('reader'), async (req, res) => { + res.json(await orgSettingsStore.get()) }) -router.put('/admin/org-settings', requireAuth, authorize('admin'), (req, res) => { - const updated = orgSettingsStore.update(req.body) - auditStore.append({ user: req.user, action: 'settings', resource: 'org', detail: 'Organisationseinstellungen aktualisiert' }) +router.put('/admin/org-settings', requireAuth, authorize('admin'), async (req, res) => { + const updated = await orgSettingsStore.update(req.body) + await auditStore.append({ user: req.user, action: 'settings', resource: 'org', detail: 'Organisationseinstellungen aktualisiert' }) res.json(updated) }) // Modul-Konfiguration -router.get('/admin/modules', requireAuth, authorize('reader'), (req, res) => { - res.json(orgSettingsStore.get().modules || {}) +router.get('/admin/modules', requireAuth, authorize('reader'), async (req, res) => { + const s = await orgSettingsStore.get() + res.json(s.modules || {}) }) -router.put('/admin/modules', requireAuth, authorize('admin'), (req, res) => { - const updated = orgSettingsStore.update({ modules: req.body }) - auditStore.append({ user: req.user, action: 'settings', resource: 'modules', detail: 'Modul-Konfiguration aktualisiert' }) +router.put('/admin/modules', requireAuth, authorize('admin'), async (req, res) => { + const updated = await orgSettingsStore.update({ modules: req.body }) + await auditStore.append({ user: req.user, action: 'settings', resource: 'modules', detail: 'Modul-Konfiguration aktualisiert' }) res.json(updated.modules) }) // 2FA-Enforcement -router.get('/admin/security', requireAuth, authorize('reader'), (req, res) => { - const s = orgSettingsStore.get() +router.get('/admin/security', requireAuth, authorize('reader'), async (req, res) => { + const s = await orgSettingsStore.get() res.json({ require2FA: s.require2FA === true }) }) -router.put('/admin/security', requireAuth, authorize('admin'), (req, res) => { +router.put('/admin/security', requireAuth, authorize('admin'), async (req, res) => { const { require2FA } = req.body - const updated = orgSettingsStore.update({ require2FA: !!require2FA }) - auditStore.append({ user: req.user, action: 'settings', resource: 'security', detail: `2FA-Pflicht: ${updated.require2FA ? 'aktiviert' : 'deaktiviert'}` }) + const updated = await orgSettingsStore.update({ require2FA: !!require2FA }) + await auditStore.append({ user: req.user, action: 'settings', resource: 'security', detail: `2FA-Pflicht: ${updated.require2FA ? 'aktiviert' : 'deaktiviert'}` }) res.json({ require2FA: updated.require2FA }) }) // Rollenspezifische Einstellungen -router.get('/admin/role-settings', requireAuth, authorize('contentowner'), (req, res) => { - const s = orgSettingsStore.get() +router.get('/admin/role-settings', requireAuth, authorize('contentowner'), async (req, res) => { + const s = await orgSettingsStore.get() res.json({ cisoSettings: s.cisoSettings, gdpoSettings: s.gdpoSettings, @@ -215,24 +216,24 @@ router.get('/admin/role-settings', requireAuth, authorize('contentowner'), (req, qmSettings: s.qmSettings, }) }) -router.put('/admin/role-settings', requireAuth, authorize('contentowner'), (req, res) => { - const updated = orgSettingsStore.update(req.body) - auditStore.append({ user: req.user, action: 'settings', resource: 'org', detail: 'Rolleneinstellungen aktualisiert' }) +router.put('/admin/role-settings', requireAuth, authorize('contentowner'), async (req, res) => { + const updated = await orgSettingsStore.update(req.body) + await auditStore.append({ user: req.user, action: 'settings', resource: 'org', detail: 'Rolleneinstellungen aktualisiert' }) res.json(updated) }) // ── Audit-Log ── -router.get('/admin/audit-log', requireAuth, authorize('admin'), (req, res) => { +router.get('/admin/audit-log', requireAuth, authorize('admin'), async (req, res) => { const { user, action, resource, from, to, limit, offset } = req.query - res.json(auditStore.query({ + res.json(await auditStore.query({ user, action, resource, from, to, limit: limit ? parseInt(limit) : 200, offset: offset ? parseInt(offset) : 0, })) }) -router.delete('/admin/audit-log', requireAuth, authorize('admin'), (req, res) => { - auditStore.clear() - auditStore.append({ user: req.user, action: 'delete', resource: 'audit', detail: 'Audit-Log geleert' }) +router.delete('/admin/audit-log', requireAuth, authorize('admin'), async (req, res) => { + await auditStore.clear() + await auditStore.append({ user: req.user, action: 'delete', resource: 'audit', detail: 'Audit-Log geleert' }) res.json({ ok: true }) }) @@ -242,7 +243,7 @@ router.post('/admin/email/test', requireAuth, authorize('admin'), async (req, re if (!to) return res.status(400).json({ error: 'Empfängeradresse fehlt' }) try { await mailer.sendTestMail(to) - auditStore.append({ user: req.user, action: 'settings', resource: 'org', detail: `Test-Mail an ${to} gesendet` }) + await auditStore.append({ user: req.user, action: 'settings', resource: 'org', detail: `Test-Mail an ${to} gesendet` }) res.json({ ok: true }) } catch (e) { res.status(500).json({ error: e.message }) @@ -279,8 +280,8 @@ router.get('/api/storage-info', requireAuth, authorize('admin'), (req, res) => { }) // ── KI-Einstellungen ── -router.get('/admin/ai-settings', requireAuth, authorize('admin'), (req, res) => { - const cfg = orgSettingsStore.get() +router.get('/admin/ai-settings', requireAuth, authorize('admin'), async (req, res) => { + const cfg = await orgSettingsStore.get() res.json({ aiEnabled: cfg.aiEnabled ?? true, aiOllamaUrl: cfg.aiOllamaUrl || '', @@ -294,8 +295,8 @@ router.put('/admin/ai-settings', requireAuth, authorize('admin'), async (req, re if (typeof aiEnabled === 'boolean') patch.aiEnabled = aiEnabled if (typeof aiOllamaUrl === 'string') patch.aiOllamaUrl = aiOllamaUrl.trim() if (typeof aiEmbedModel === 'string') patch.aiEmbedModel = aiEmbedModel.trim() - const updated = orgSettingsStore.update(patch) - auditStore.append({ user: req.user, action: 'update', resource: 'ai-settings', detail: `aiEnabled=${updated.aiEnabled}` }) + const updated = await orgSettingsStore.update(patch) + await auditStore.append({ user: req.user, action: 'update', resource: 'ai-settings', detail: `aiEnabled=${updated.aiEnabled}` }) res.json({ ok: true, aiEnabled: updated.aiEnabled, aiOllamaUrl: updated.aiOllamaUrl, aiEmbedModel: updated.aiEmbedModel }) }) @@ -318,7 +319,7 @@ router.get('/admin/export', requireAuth, authorize('admin'), async (req, res) => try { bundle.gdpr[f] = JSON.parse(fs.readFileSync(path.join(gdprDir, f), 'utf8')) } catch {} } } - auditStore.append({ user: req.user, action: 'export', resource: 'org', detail: 'Vollexport durchgeführt' }) + await auditStore.append({ user: req.user, action: 'export', resource: 'org', detail: 'Vollexport durchgeführt' }) res.setHeader('Content-Disposition', `attachment; filename="isms-export-${new Date().toISOString().slice(0,10)}.json"`) res.setHeader('Content-Type', 'application/json') res.json(bundle) @@ -327,7 +328,7 @@ router.get('/admin/export', requireAuth, authorize('admin'), async (req, res) => } }) -router.post('/admin/maintenance/cleanup', requireAuth, authorize('admin'), (req, res) => { +router.post('/admin/maintenance/cleanup', requireAuth, authorize('admin'), async (req, res) => { const results = { removed: [], errors: [] } try { const attachDir = path.join(__dirname, '../../data/template-files') @@ -344,7 +345,7 @@ router.post('/admin/maintenance/cleanup', requireAuth, authorize('admin'), (req, }) } } catch (e) { results.errors.push('template-files: ' + e.message) } - auditStore.append({ user: req.user, action: 'delete', resource: 'org', detail: `Bereinigung: ${results.removed.length} Dateien entfernt` }) + await auditStore.append({ user: req.user, action: 'delete', resource: 'org', detail: `Bereinigung: ${results.removed.length} Dateien entfernt` }) res.json(results) }) @@ -432,7 +433,7 @@ router.post('/admin/demo-reset', requireAuth, authorize('admin'), async (req, re console.warn('[demo-reset] .env konnte nicht auf SQLite umgestellt werden:', e.message) } - auditStore.append({ user: req.user, action: 'demo_reset', resource: 'org', detail: 'Demo-Reset — alle Moduldaten geleert, Benutzer zurückgesetzt, STORAGE_BACKEND=sqlite gesetzt' }) + await auditStore.append({ user: req.user, action: 'demo_reset', resource: 'org', detail: 'Demo-Reset — alle Moduldaten geleert, Benutzer zurückgesetzt, STORAGE_BACKEND=sqlite gesetzt' }) res.setHeader('Content-Disposition', `attachment; filename="isms-demo-export-${new Date().toISOString().slice(0,10)}.json"`) res.setHeader('Content-Type', 'application/json') // restartRequired im Header mitgeben damit das Frontend informieren kann @@ -498,15 +499,15 @@ router.post('/admin/demo-import', requireAuth, authorize('admin'), express.json( // Re-seed guidance docs (architecture, demo-overview, role guides, soa-guide, policy-guide) try { const gs = require('../db/guidanceStore') - gs.seedArchitectureDocs() - gs.seedDemoDoc() - gs.seedRoleGuides() - gs.seedSoaGuide() - gs.seedPolicyGuide() - gs.seedIsoNotice() + await gs.seedArchitectureDocs() + await gs.seedDemoDoc() + await gs.seedRoleGuides() + await gs.seedSoaGuide() + await gs.seedPolicyGuide() + await gs.seedIsoNotice() } catch {} - auditStore.append({ user: req.user, action: 'demo_import', resource: 'org', detail: 'Demo-Daten importiert — alice/bob wiederhergestellt' }) + await auditStore.append({ user: req.user, action: 'demo_import', resource: 'org', detail: 'Demo-Daten importiert — alice/bob wiederhergestellt' }) res.json({ ok: true, restoredAt: nowISO() }) } catch (e) { res.status(500).json({ error: e.message }) @@ -514,19 +515,19 @@ router.post('/admin/demo-import', requireAuth, authorize('admin'), express.json( }) // ── Seed-Sprache aktualisieren (ohne Bundle-Import) ────────────────────────── -router.put('/admin/seed-lang', requireAuth, authorize('admin'), express.json(), (req, res) => { +router.put('/admin/seed-lang', requireAuth, authorize('admin'), express.json(), async (req, res) => { const { lang } = req.body || {} const SUPPORTED = ['de', 'en', 'fr', 'nl'] if (!SUPPORTED.includes(lang)) return res.status(400).json({ error: 'Unsupported language' }) fs.writeFileSync(DEMO_LANG_FILE, JSON.stringify({ lang, setAt: nowISO() })) // Re-run all seed functions so guidance docs are immediately updated const gs = require('../db/guidanceStore') - try { gs.seedDemoDoc() } catch {} - try { gs.seedRoleGuides() } catch {} - try { gs.seedSoaGuide() } catch {} - try { gs.seedPolicyGuide() } catch {} - try { gs.seedIsoNotice() } catch {} - try { gs.seedArchitectureDocs() } catch {} + try { await gs.seedDemoDoc() } catch {} + try { await gs.seedRoleGuides() } catch {} + try { await gs.seedSoaGuide() } catch {} + try { await gs.seedPolicyGuide() } catch {} + try { await gs.seedIsoNotice() } catch {} + try { await gs.seedArchitectureDocs() } catch {} res.json({ ok: true, lang }) }) @@ -593,15 +594,15 @@ router.post('/admin/demo-load-bundle', requireAuth, authorize('admin'), express. // Re-seed guidance docs (architecture, demo-overview, role guides, soa-guide, policy-guide) try { const gs = require('../db/guidanceStore') - gs.seedArchitectureDocs() - gs.seedDemoDoc() - gs.seedRoleGuides() - gs.seedSoaGuide() - gs.seedPolicyGuide() - gs.seedIsoNotice() + await gs.seedArchitectureDocs() + await gs.seedDemoDoc() + await gs.seedRoleGuides() + await gs.seedSoaGuide() + await gs.seedPolicyGuide() + await gs.seedIsoNotice() } catch {} - auditStore.append({ user: req.user, action: 'demo_import', resource: 'org', detail: `Demo-Bundle '${lang}' geladen` }) + await auditStore.append({ user: req.user, action: 'demo_import', resource: 'org', detail: `Demo-Bundle '${lang}' geladen` }) res.json({ ok: true, lang, loadedAt: nowISO() }) } catch (e) { res.status(500).json({ error: e.message }) diff --git a/server/routes/ai.js b/server/routes/ai.js index fe86018..acc47ca 100644 --- a/server/routes/ai.js +++ b/server/routes/ai.js @@ -38,7 +38,7 @@ router.get('/api/ai/search', requireAuth, async (req, res) => { ? await embeddingStore.search(q) : await lexicalSearch.search(q) - auditStore.append({ + await auditStore.append({ user: req.user, action: 'ai_search', resource: 'ai', @@ -55,7 +55,7 @@ router.post('/api/ai/reindex', requireAuth, authorize('admin'), async (req, res) if (!aiEnabled()) return res.status(503).json({ error: 'KI-Integration deaktiviert' }) try { const stats = await embeddingStore.reindexAll() - auditStore.append({ + await auditStore.append({ user: req.user, action: 'ai_reindex', resource: 'ai', diff --git a/server/routes/assets.js b/server/routes/assets.js index 9d6ebec..4df7523 100644 --- a/server/routes/assets.js +++ b/server/routes/assets.js @@ -6,39 +6,39 @@ const { requireAuth, authorize } = require('../auth') const assetStore = require('../db/assetStore') const embeddingStore = require('../ai/embeddingStore') -router.get('/assets/summary', requireAuth, authorize('reader'), (req, res) => { - res.json(assetStore.getSummary()) +router.get('/assets/summary', requireAuth, authorize('reader'), async (req, res) => { + res.json(await assetStore.getSummary()) }) -router.get('/assets', requireAuth, authorize('reader'), (req, res) => { - res.json(assetStore.getAll(req.query)) +router.get('/assets', requireAuth, authorize('reader'), async (req, res) => { + res.json(await assetStore.getAll(req.query)) }) -router.get('/assets/:id', requireAuth, authorize('reader'), (req, res) => { - const a = assetStore.getById(req.params.id) +router.get('/assets/:id', requireAuth, authorize('reader'), async (req, res) => { + const a = await assetStore.getById(req.params.id) if (!a) return res.status(404).json({ error: 'Not found' }) res.json(a) }) -router.post('/assets', requireAuth, authorize('editor'), (req, res) => { - const asset = assetStore.create(req.body, { createdBy: req.user }) - require('../db/auditStore').append({ user: req.user, action: 'create', resource: 'asset', detail: asset.name }) +router.post('/assets', requireAuth, authorize('editor'), async (req, res) => { + const asset = await assetStore.create(req.body, { createdBy: req.user }) + await require('../db/auditStore').append({ user: req.user, action: 'create', resource: 'asset', detail: asset.name }) embeddingStore.indexDoc({ ...asset, title: asset.name }, 'Asset', '#assets').catch(() => {}) res.status(201).json(asset) }) -router.put('/assets/:id', requireAuth, authorize('editor'), (req, res) => { - const updated = assetStore.update(req.params.id, req.body, { changedBy: req.user }) +router.put('/assets/:id', requireAuth, authorize('editor'), async (req, res) => { + const updated = await assetStore.update(req.params.id, req.body, { changedBy: req.user }) if (!updated) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'update', resource: 'asset', detail: updated.name }) + await require('../db/auditStore').append({ user: req.user, action: 'update', resource: 'asset', detail: updated.name }) embeddingStore.indexDoc({ ...updated, title: updated.name }, 'Asset', '#assets').catch(() => {}) res.json(updated) }) -router.delete('/assets/:id', requireAuth, authorize('admin'), (req, res) => { - const ok = assetStore.remove(req.params.id) +router.delete('/assets/:id', requireAuth, authorize('admin'), async (req, res) => { + const ok = await assetStore.remove(req.params.id) if (!ok) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'asset', detail: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'asset', detail: req.params.id }) res.json({ ok: true }) }) diff --git a/server/routes/bcm.js b/server/routes/bcm.js index 16872f4..7697326 100644 --- a/server/routes/bcm.js +++ b/server/routes/bcm.js @@ -8,85 +8,85 @@ const multer = require('multer') const { requireAuth, authorize } = require('../auth') const bcmStore = require('../db/bcmStore') -router.get('/bcm/summary', requireAuth, authorize('reader'), (req, res) => { - res.json(bcmStore.getSummary()) +router.get('/bcm/summary', requireAuth, authorize('reader'), async (req, res) => { + res.json(await bcmStore.getSummary()) }) // BIA -router.get('/bcm/bia', requireAuth, authorize('reader'), (req, res) => { - res.json(bcmStore.getBia()) +router.get('/bcm/bia', requireAuth, authorize('reader'), async (req, res) => { + res.json(await bcmStore.getBia()) }) -router.get('/bcm/bia/:id', requireAuth, authorize('reader'), (req, res) => { - const b = bcmStore.getBiaById(req.params.id) +router.get('/bcm/bia/:id', requireAuth, authorize('reader'), async (req, res) => { + const b = await bcmStore.getBiaById(req.params.id) if (!b) return res.status(404).json({ error: 'Not found' }) res.json(b) }) -router.post('/bcm/bia', requireAuth, authorize('editor'), (req, res) => { - const b = bcmStore.createBia(req.body, { createdBy: req.user }) - require('../db/auditStore').append({ user: req.user, action: 'create', resource: 'bcm_bia', detail: b.title }) +router.post('/bcm/bia', requireAuth, authorize('editor'), async (req, res) => { + const b = await bcmStore.createBia(req.body, { createdBy: req.user }) + await require('../db/auditStore').append({ user: req.user, action: 'create', resource: 'bcm_bia', detail: b.title }) res.status(201).json(b) }) -router.put('/bcm/bia/:id', requireAuth, authorize('editor'), (req, res) => { - const b = bcmStore.updateBia(req.params.id, req.body, { changedBy: req.user }) +router.put('/bcm/bia/:id', requireAuth, authorize('editor'), async (req, res) => { + const b = await bcmStore.updateBia(req.params.id, req.body, { changedBy: req.user }) if (!b) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'update', resource: 'bcm_bia', detail: b.title }) + await require('../db/auditStore').append({ user: req.user, action: 'update', resource: 'bcm_bia', detail: b.title }) res.json(b) }) -router.delete('/bcm/bia/:id', requireAuth, authorize('admin'), (req, res) => { - if (!bcmStore.deleteBia(req.params.id)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'bcm_bia', detail: req.params.id }) +router.delete('/bcm/bia/:id', requireAuth, authorize('admin'), async (req, res) => { + if (!(await bcmStore.deleteBia(req.params.id))) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'bcm_bia', detail: req.params.id }) res.json({ ok: true }) }) // Plans -router.get('/bcm/plans', requireAuth, authorize('reader'), (req, res) => { - res.json(bcmStore.getPlans()) +router.get('/bcm/plans', requireAuth, authorize('reader'), async (req, res) => { + res.json(await bcmStore.getPlans()) }) -router.get('/bcm/plans/:id', requireAuth, authorize('reader'), (req, res) => { - const p = bcmStore.getPlanById(req.params.id) +router.get('/bcm/plans/:id', requireAuth, authorize('reader'), async (req, res) => { + const p = await bcmStore.getPlanById(req.params.id) if (!p) return res.status(404).json({ error: 'Not found' }) res.json(p) }) -router.post('/bcm/plans', requireAuth, authorize('editor'), (req, res) => { - const p = bcmStore.createPlan(req.body, { createdBy: req.user }) - require('../db/auditStore').append({ user: req.user, action: 'create', resource: 'bcm_plan', detail: p.title }) +router.post('/bcm/plans', requireAuth, authorize('editor'), async (req, res) => { + const p = await bcmStore.createPlan(req.body, { createdBy: req.user }) + await require('../db/auditStore').append({ user: req.user, action: 'create', resource: 'bcm_plan', detail: p.title }) res.status(201).json(p) }) -router.put('/bcm/plans/:id', requireAuth, authorize('editor'), (req, res) => { - const p = bcmStore.updatePlan(req.params.id, req.body, { changedBy: req.user }) +router.put('/bcm/plans/:id', requireAuth, authorize('editor'), async (req, res) => { + const p = await bcmStore.updatePlan(req.params.id, req.body, { changedBy: req.user }) if (!p) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'update', resource: 'bcm_plan', detail: p.title }) + await require('../db/auditStore').append({ user: req.user, action: 'update', resource: 'bcm_plan', detail: p.title }) res.json(p) }) -router.delete('/bcm/plans/:id', requireAuth, authorize('admin'), (req, res) => { - if (!bcmStore.deletePlan(req.params.id)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'bcm_plan', detail: req.params.id }) +router.delete('/bcm/plans/:id', requireAuth, authorize('admin'), async (req, res) => { + if (!(await bcmStore.deletePlan(req.params.id))) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'bcm_plan', detail: req.params.id }) res.json({ ok: true }) }) // Exercises -router.get('/bcm/exercises', requireAuth, authorize('reader'), (req, res) => { - res.json(bcmStore.getExercises()) +router.get('/bcm/exercises', requireAuth, authorize('reader'), async (req, res) => { + res.json(await bcmStore.getExercises()) }) -router.get('/bcm/exercises/:id', requireAuth, authorize('reader'), (req, res) => { - const e = bcmStore.getExerciseById(req.params.id) +router.get('/bcm/exercises/:id', requireAuth, authorize('reader'), async (req, res) => { + const e = await bcmStore.getExerciseById(req.params.id) if (!e) return res.status(404).json({ error: 'Not found' }) res.json(e) }) -router.post('/bcm/exercises', requireAuth, authorize('editor'), (req, res) => { - const e = bcmStore.createExercise(req.body, { createdBy: req.user }) - require('../db/auditStore').append({ user: req.user, action: 'create', resource: 'bcm_exercise', detail: e.title }) +router.post('/bcm/exercises', requireAuth, authorize('editor'), async (req, res) => { + const e = await bcmStore.createExercise(req.body, { createdBy: req.user }) + await require('../db/auditStore').append({ user: req.user, action: 'create', resource: 'bcm_exercise', detail: e.title }) res.status(201).json(e) }) -router.put('/bcm/exercises/:id', requireAuth, authorize('editor'), (req, res) => { - const e = bcmStore.updateExercise(req.params.id, req.body, { changedBy: req.user }) +router.put('/bcm/exercises/:id', requireAuth, authorize('editor'), async (req, res) => { + const e = await bcmStore.updateExercise(req.params.id, req.body, { changedBy: req.user }) if (!e) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'update', resource: 'bcm_exercise', detail: e.title }) + await require('../db/auditStore').append({ user: req.user, action: 'update', resource: 'bcm_exercise', detail: e.title }) res.json(e) }) -router.delete('/bcm/exercises/:id', requireAuth, authorize('admin'), (req, res) => { - if (!bcmStore.deleteExercise(req.params.id)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'bcm_exercise', detail: req.params.id }) +router.delete('/bcm/exercises/:id', requireAuth, authorize('admin'), async (req, res) => { + if (!(await bcmStore.deleteExercise(req.params.id))) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'bcm_exercise', detail: req.params.id }) res.json({ ok: true }) }) @@ -109,10 +109,10 @@ router.post('/bcm/:collection/:id/upload', requireAuth, authorize('editor'), (re const UPDATERS = { bia: 'updateBia', plans: 'updatePlan', exercises: 'updateExercise' } const col = req.params.collection if (!GETTERS[col]) return res.status(400).json({ error: 'Invalid collection' }) - bcmUpload.single('file')(req, res, err => { + bcmUpload.single('file')(req, res, async err => { if (err) return res.status(400).json({ error: err.message }) if (!req.file) return res.status(400).json({ error: 'No file uploaded' }) - const item = bcmStore[GETTERS[col]](req.params.id) + const item = await bcmStore[GETTERS[col]](req.params.id) if (!item) { fs.unlinkSync(req.file.path); return res.status(404).json({ error: 'Not found' }) } const attachment = { id: Date.now().toString(36), @@ -123,16 +123,16 @@ router.post('/bcm/:collection/:id/upload', requireAuth, authorize('editor'), (re uploadedAt: new Date().toISOString(), } const attachments = [...(item.attachments || []), attachment] - bcmStore[UPDATERS[col]](req.params.id, { attachments }) + await bcmStore[UPDATERS[col]](req.params.id, { attachments }) res.json({ ok: true, attachment }) }) }) -router.get('/bcm/:collection/:id/files/:fileId', requireAuth, authorize('reader'), (req, res) => { +router.get('/bcm/:collection/:id/files/:fileId', requireAuth, authorize('reader'), async (req, res) => { const GETTERS = { bia: 'getBiaById', plans: 'getPlanById', exercises: 'getExerciseById' } const col = req.params.collection if (!GETTERS[col]) return res.status(400).json({ error: 'Invalid collection' }) - const item = bcmStore[GETTERS[col]](req.params.id) + const item = await bcmStore[GETTERS[col]](req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) const att = (item.attachments || []).find(a => a.id === req.params.fileId) if (!att) return res.status(404).json({ error: 'Attachment not found' }) @@ -142,19 +142,19 @@ router.get('/bcm/:collection/:id/files/:fileId', requireAuth, authorize('reader' res.sendFile(path.resolve(filePath)) }) -router.delete('/bcm/:collection/:id/files/:fileId', requireAuth, authorize('editor'), (req, res) => { +router.delete('/bcm/:collection/:id/files/:fileId', requireAuth, authorize('editor'), async (req, res) => { const GETTERS = { bia: 'getBiaById', plans: 'getPlanById', exercises: 'getExerciseById' } const UPDATERS = { bia: 'updateBia', plans: 'updatePlan', exercises: 'updateExercise' } const col = req.params.collection if (!GETTERS[col]) return res.status(400).json({ error: 'Invalid collection' }) - const item = bcmStore[GETTERS[col]](req.params.id) + const item = await bcmStore[GETTERS[col]](req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) const att = (item.attachments || []).find(a => a.id === req.params.fileId) if (!att) return res.status(404).json({ error: 'Attachment not found' }) const filePath = path.join(BCM_FILES_DIR, att.storedName) if (fs.existsSync(filePath)) fs.unlinkSync(filePath) const attachments = (item.attachments || []).filter(a => a.id !== req.params.fileId) - bcmStore[UPDATERS[col]](req.params.id, { attachments }) + await bcmStore[UPDATERS[col]](req.params.id, { attachments }) res.json({ ok: true }) }) diff --git a/server/routes/calendar.js b/server/routes/calendar.js index 3ac1e71..8fcc1af 100644 --- a/server/routes/calendar.js +++ b/server/routes/calendar.js @@ -16,7 +16,7 @@ router.get('/calendar', requireAuth, authorize('reader'), async (req, res) => { // Risiken + Behandlungspläne try { - const riskEvts = riskStore.getCalendarEvents() + const riskEvts = await riskStore.getCalendarEvents() events.push(...riskEvts) } catch {} @@ -50,7 +50,7 @@ router.get('/calendar', requireAuth, authorize('reader'), async (req, res) => { // Legal: ablaufende Verträge try { const legalStore = require('../db/legalStore') - const expiring = legalStore.contracts.getExpiring(60) + const expiring = await legalStore.contracts.getExpiring(60) for (const c of expiring) { events.push({ date: c.noticeDate, @@ -65,7 +65,7 @@ router.get('/calendar', requireAuth, authorize('reader'), async (req, res) => { // GDPR VVT: bald fällige Löschfristen try { const gdprStore = require('../db/gdprStore') - const upcoming = gdprStore.deletionLog.getUpcoming(90) + const upcoming = await gdprStore.deletionLog.getUpcoming(90) for (const v of upcoming) { events.push({ date: v.deletionDue, @@ -75,7 +75,7 @@ router.get('/calendar', requireAuth, authorize('reader'), async (req, res) => { title: v.title }) } - const overdue = gdprStore.deletionLog.getDue() + const overdue = await gdprStore.deletionLog.getDue() for (const v of overdue) { events.push({ date: v.deletionDue, @@ -89,13 +89,13 @@ router.get('/calendar', requireAuth, authorize('reader'), async (req, res) => { // Sicherheitsziele try { - const goalEvts = goalsStore.getCalendarEvents() + const goalEvts = await goalsStore.getCalendarEvents() events.push(...goalEvts) } catch {} // Assets EoL try { - const assets = assetStore.getAll({ status: 'active' }) + const assets = await assetStore.getAll({ status: 'active' }) for (const a of assets) { if (a.endOfLifeDate) events.push({ date: a.endOfLifeDate, @@ -109,15 +109,15 @@ router.get('/calendar', requireAuth, authorize('reader'), async (req, res) => { // Governance: Management Reviews, Maßnahmen, Sitzungen try { - for (const r of govStore.getReviews()) { + for (const r of await govStore.getReviews()) { if (r.date) events.push({ date: r.date, type: 'management_review', title: r.title || 'Management Review', ref: r.id, severity: 'normal' }) if (r.nextReviewDate) events.push({ date: r.nextReviewDate, type: 'management_review', title: `Nächstes Review: ${r.title || 'Management Review'}`, ref: r.id, severity: 'normal' }) } - for (const a of govStore.getActions()) { + for (const a of await govStore.getActions()) { if (a.dueDate && a.status !== 'completed' && a.status !== 'cancelled') events.push({ date: a.dueDate, type: 'governance_action', title: `Maßnahme: ${a.title}`, ref: a.id, severity: a.priority === 'critical' || a.priority === 'high' ? 'high' : 'normal' }) } - for (const m of govStore.getMeetings()) { + for (const m of await govStore.getMeetings()) { if (m.date) events.push({ date: m.date, type: 'committee_meeting', title: m.title || 'Ausschusssitzung', ref: m.id, severity: 'normal' }) if (m.nextMeetingDate) events.push({ date: m.nextMeetingDate, type: 'committee_meeting', title: `Nächste Sitzung: ${m.committee || ''}`, ref: m.id, severity: 'normal' }) } @@ -125,11 +125,11 @@ router.get('/calendar', requireAuth, authorize('reader'), async (req, res) => { // BCM: Übungen und Plan-Tests try { - for (const ex of bcmStore.getExercises()) { + for (const ex of await bcmStore.getExercises()) { if (ex.result === 'planned' && ex.date) events.push({ date: ex.date, type: 'bcm_exercise', title: `BCM-Übung: ${ex.title}`, ref: ex.id, severity: 'normal' }) } - for (const pl of bcmStore.getPlans()) { + for (const pl of await bcmStore.getPlans()) { if (pl.nextTest) events.push({ date: pl.nextTest, type: 'bcm_plan_test', title: `Plan-Test fällig: ${pl.title}`, ref: pl.id, severity: pl.nextTest < new Date().toISOString().slice(0,10) ? 'high' : 'normal' }) } @@ -138,7 +138,7 @@ router.get('/calendar', requireAuth, authorize('reader'), async (req, res) => { // Lieferanten: anstehende Audits try { const supplierStore = require('../db/supplierStore') - const upcoming = supplierStore.getUpcomingAudits(60) + const upcoming = await supplierStore.getUpcomingAudits(60) for (const s of upcoming) { events.push({ date: s.nextAuditDate, @@ -154,7 +154,7 @@ router.get('/calendar', requireAuth, authorize('reader'), async (req, res) => { try { const findingStore = require('../db/findingStore') const today = new Date().toISOString().slice(0, 10) - for (const f of findingStore.getAll()) { + for (const f of await findingStore.getAll()) { for (const a of (f.actions || [])) { if (!a.dueDate || a.status === 'done') continue events.push({ diff --git a/server/routes/findings.js b/server/routes/findings.js index c480882..02aef95 100644 --- a/server/routes/findings.js +++ b/server/routes/findings.js @@ -9,74 +9,74 @@ const audit = require('../db/auditStore') const embeddingStore = require('../ai/embeddingStore') // ── Listings & Summary ──────────────────────────────────────────────────────── -router.get('/findings/summary', requireAuth, authorize('reader'), (req, res) => { - res.json(store.getSummary()) +router.get('/findings/summary', requireAuth, authorize('reader'), async (req, res) => { + res.json(await store.getSummary()) }) -router.get('/findings', requireAuth, authorize('reader'), (req, res) => { +router.get('/findings', requireAuth, authorize('reader'), async (req, res) => { const { status, severity, auditor } = req.query - res.json(store.getAll({ status, severity, auditor })) + res.json(await store.getAll({ status, severity, auditor })) }) -router.get('/findings/:id', requireAuth, authorize('reader'), (req, res) => { - const f = store.getById(req.params.id) +router.get('/findings/:id', requireAuth, authorize('reader'), async (req, res) => { + const f = await store.getById(req.params.id) if (!f) return res.status(404).json({ error: 'Not found' }) res.json(f) }) // ── CRUD ────────────────────────────────────────────────────────────────────── -router.post('/findings', requireAuth, authorize('auditor'), (req, res) => { - const f = store.create(req.body, req.user) - audit.append({ user: req.user, action: 'create', resource: 'finding', resourceId: f.id }) - embeddingStore.indexDoc(f, 'Audit-Feststellung', '#reports').catch(() => {}) +router.post('/findings', requireAuth, authorize('auditor'), async (req, res) => { + const f = await store.create(req.body, req.user) + await audit.append({ user: req.user, action: 'create', resource: 'finding', resourceId: f.id }) + await embeddingStore.indexDoc(f, 'Audit-Feststellung', '#reports') res.status(201).json(f) }) -router.put('/findings/:id', requireAuth, authorize('auditor'), (req, res) => { - const f = store.update(req.params.id, req.body, req.user) +router.put('/findings/:id', requireAuth, authorize('auditor'), async (req, res) => { + const f = await store.update(req.params.id, req.body, req.user) if (!f) return res.status(404).json({ error: 'Not found' }) - audit.append({ user: req.user, action: 'update', resource: 'finding', resourceId: f.id }) - embeddingStore.indexDoc(f, 'Audit-Feststellung', '#reports').catch(() => {}) + await audit.append({ user: req.user, action: 'update', resource: 'finding', resourceId: f.id }) + await embeddingStore.indexDoc(f, 'Audit-Feststellung', '#reports') res.json(f) }) -router.delete('/findings/:id', requireAuth, authorize('auditor'), (req, res) => { - const ok = store.remove(req.params.id, req.user) +router.delete('/findings/:id', requireAuth, authorize('auditor'), async (req, res) => { + const ok = await store.remove(req.params.id, req.user) if (!ok) return res.status(404).json({ error: 'Not found' }) - audit.append({ user: req.user, action: 'delete', resource: 'finding', resourceId: req.params.id }) + await audit.append({ user: req.user, action: 'delete', resource: 'finding', resourceId: req.params.id }) res.json({ deleted: true }) }) -router.delete('/findings/:id/permanent', requireAuth, authorize('admin'), (req, res) => { - const ok = store.permanentDelete(req.params.id) +router.delete('/findings/:id/permanent', requireAuth, authorize('admin'), async (req, res) => { + const ok = await store.permanentDelete(req.params.id) if (!ok) return res.status(404).json({ error: 'Not found' }) - audit.append({ user: req.user, action: 'permanent_delete', resource: 'finding', resourceId: req.params.id }) - embeddingStore.removeDoc(req.params.id) + await audit.append({ user: req.user, action: 'permanent_delete', resource: 'finding', resourceId: req.params.id }) + await embeddingStore.removeDoc(req.params.id) res.json({ deleted: true, permanent: true }) }) -router.post('/findings/:id/restore', requireAuth, authorize('admin'), (req, res) => { - const f = store.restore(req.params.id) +router.post('/findings/:id/restore', requireAuth, authorize('admin'), async (req, res) => { + const f = await store.restore(req.params.id) if (!f) return res.status(404).json({ error: 'Not found' }) - audit.append({ user: req.user, action: 'restore', resource: 'finding', resourceId: f.id }) + await audit.append({ user: req.user, action: 'restore', resource: 'finding', resourceId: f.id }) res.json(f) }) // ── Maßnahmenplan (Actions) ─────────────────────────────────────────────────── -router.post('/findings/:id/actions', requireAuth, authorize('auditor'), (req, res) => { - const action = store.addAction(req.params.id, req.body, req.user) +router.post('/findings/:id/actions', requireAuth, authorize('auditor'), async (req, res) => { + const action = await store.addAction(req.params.id, req.body, req.user) if (!action) return res.status(404).json({ error: 'Finding not found' }) res.status(201).json(action) }) -router.put('/findings/:id/actions/:actionId', requireAuth, authorize('editor'), (req, res) => { - const action = store.updateAction(req.params.id, req.params.actionId, req.body, req.user) +router.put('/findings/:id/actions/:actionId', requireAuth, authorize('editor'), async (req, res) => { + const action = await store.updateAction(req.params.id, req.params.actionId, req.body, req.user) if (!action) return res.status(404).json({ error: 'Not found' }) res.json(action) }) -router.delete('/findings/:id/actions/:actionId', requireAuth, authorize('auditor'), (req, res) => { - const ok = store.deleteAction(req.params.id, req.params.actionId) +router.delete('/findings/:id/actions/:actionId', requireAuth, authorize('auditor'), async (req, res) => { + const ok = await store.deleteAction(req.params.id, req.params.actionId) if (!ok) return res.status(404).json({ error: 'Not found' }) res.json({ deleted: true }) }) diff --git a/server/routes/gdpr.js b/server/routes/gdpr.js index 15d0f05..ec3f269 100644 --- a/server/routes/gdpr.js +++ b/server/routes/gdpr.js @@ -35,27 +35,27 @@ function authorizeEditorOrAbove(req, res, next) { } // Dashboard -router.get('/gdpr/dashboard', requireAuth, authorize('reader'), (req, res) => { - res.json(gdprStore.getSummary(req.query.entity || null)) +router.get('/gdpr/dashboard', requireAuth, authorize('reader'), async (req, res) => { + res.json(await gdprStore.getSummary(req.query.entity || null)) }) // DSB (Singleton) -router.get('/gdpr/dsb', requireAuth, authorize('reader'), (req, res) => { - res.json(gdprStore.dsb.get()) +router.get('/gdpr/dsb', requireAuth, authorize('reader'), async (req, res) => { + res.json(await gdprStore.dsb.get()) }) -router.put('/gdpr/dsb', requireAuth, authorizeContentOwner, (req, res) => { - res.json(gdprStore.dsb.update(req.body)) +router.put('/gdpr/dsb', requireAuth, authorizeContentOwner, async (req, res) => { + res.json(await gdprStore.dsb.update(req.body)) }) router.post('/gdpr/dsb/upload', requireAuth, authorizeContentOwner, (req, res) => { - gdprUpload.single('file')(req, res, (err) => { + gdprUpload.single('file')(req, res, async (err) => { if (err) return res.status(400).json({ error: err.message }) if (!req.file) return res.status(400).json({ error: 'No file uploaded' }) - const updated = gdprStore.dsb.update({ filePath: req.file.path, filename: req.file.originalname }) + const updated = await gdprStore.dsb.update({ filePath: req.file.path, filename: req.file.originalname }) res.json(updated) }) }) -router.get('/gdpr/dsb/file', requireAuth, authorize('reader'), (req, res) => { - const data = gdprStore.dsb.get() +router.get('/gdpr/dsb/file', requireAuth, authorize('reader'), async (req, res) => { + const data = await gdprStore.dsb.get() if (!data.filePath || !fs.existsSync(data.filePath)) return res.status(404).json({ error: 'File not found' }) const ext = data.filename ? path.extname(data.filename).toLowerCase() : '.bin' const mime = ext === '.pdf' ? 'application/pdf' : 'application/octet-stream' @@ -65,28 +65,28 @@ router.get('/gdpr/dsb/file', requireAuth, authorize('reader'), (req, res) => { }) // Löschprotokoll (Art. 17 DSGVO) -router.get('/gdpr/deletion-log', requireAuth, authorize('reader'), (req, res) => { - res.json(gdprStore.deletionLog.getAll()) +router.get('/gdpr/deletion-log', requireAuth, authorize('reader'), async (req, res) => { + res.json(await gdprStore.deletionLog.getAll()) }) -router.get('/gdpr/deletion-log/due', requireAuth, authorize('reader'), (req, res) => { - res.json(gdprStore.deletionLog.getDue()) +router.get('/gdpr/deletion-log/due', requireAuth, authorize('reader'), async (req, res) => { + res.json(await gdprStore.deletionLog.getDue()) }) -router.get('/gdpr/deletion-log/upcoming', requireAuth, authorize('reader'), (req, res) => { +router.get('/gdpr/deletion-log/upcoming', requireAuth, authorize('reader'), async (req, res) => { const days = parseInt(req.query.days) || 90 - res.json(gdprStore.deletionLog.getUpcoming(days)) + res.json(await gdprStore.deletionLog.getUpcoming(days)) }) -router.post('/gdpr/deletion-log', requireAuth, authorize('contentowner'), (req, res) => { - const entry = gdprStore.deletionLog.confirm(req.body, req.user) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'gdpr_vvt_deletion', resourceId: req.body.vvtId, detail: `Löschung bestätigt: ${req.body.vvtTitle}` }) +router.post('/gdpr/deletion-log', requireAuth, authorize('contentowner'), async (req, res) => { + const entry = await gdprStore.deletionLog.confirm(req.body, req.user) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'gdpr_vvt_deletion', resourceId: req.body.vvtId, detail: `Löschung bestätigt: ${req.body.vvtTitle}` }) res.status(201).json(entry) }) // VVT -router.get('/gdpr/vvt', requireAuth, authorize('reader'), (req, res) => { - res.json(gdprStore.vvt.getAll({ entity: req.query.entity || null })) +router.get('/gdpr/vvt', requireAuth, authorize('reader'), async (req, res) => { + res.json(await gdprStore.vvt.getAll({ entity: req.query.entity || null })) }) -router.get('/gdpr/vvt/export/csv', requireAuth, authorize('reader'), (req, res) => { - const list = gdprStore.vvt.getAll({ entity: req.query.entity || null }) +router.get('/gdpr/vvt/export/csv', requireAuth, authorize('reader'), async (req, res) => { + const list = await gdprStore.vvt.getAll({ entity: req.query.entity || null }) const header = ['ID','Titel','Zweck','Rechtsgrundlage','Status','Verantwortlich','Aufbewahrung (Monate)','Hochrisiko','Erstellt am'] const rows = list.map(v => [v.id, v.title, v.purpose, v.legalBasis, v.status, v.owner, v.retentionMonths || '', v.isHighRisk ? 'Ja' : 'Nein', v.createdAt ? v.createdAt.slice(0,10) : '']) @@ -98,47 +98,47 @@ router.get('/gdpr/vvt/export/csv', requireAuth, authorize('reader'), (req, res) res.setHeader('Content-Disposition', `attachment; filename="gdpr-vvt-${new Date().toISOString().slice(0,10)}.csv"`) res.send('\uFEFF' + csv) }) -router.get('/gdpr/vvt/:id', requireAuth, authorize('reader'), (req, res) => { - const item = gdprStore.vvt.getById(req.params.id) +router.get('/gdpr/vvt/:id', requireAuth, authorize('reader'), async (req, res) => { + const item = await gdprStore.vvt.getById(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) res.json(item) }) -router.post('/gdpr/vvt', requireAuth, authorizeEditorOrAbove, (req, res) => { - res.status(201).json(gdprStore.vvt.create(req.body, req.user)) +router.post('/gdpr/vvt', requireAuth, authorizeEditorOrAbove, async (req, res) => { + res.status(201).json(await gdprStore.vvt.create(req.body, req.user)) }) -router.put('/gdpr/vvt/:id', requireAuth, authorizeEditorOrAbove, (req, res) => { - const item = gdprStore.vvt.update(req.params.id, req.body) +router.put('/gdpr/vvt/:id', requireAuth, authorizeEditorOrAbove, async (req, res) => { + const item = await gdprStore.vvt.update(req.params.id, req.body) if (!item) return res.status(404).json({ error: 'Not found' }) res.json(item) }) -router.delete('/gdpr/vvt/:id', requireAuth, authorize('admin'), (req, res) => { - if (!gdprStore.vvt.delete(req.params.id, req.user)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'gdpr_vvt', resourceId: req.params.id }) +router.delete('/gdpr/vvt/:id', requireAuth, authorize('admin'), async (req, res) => { + if (!await gdprStore.vvt.delete(req.params.id, req.user)) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'gdpr_vvt', resourceId: req.params.id }) res.json({ deleted: true }) }) -router.delete('/gdpr/vvt/:id/permanent', requireAuth, authorize('admin'), (req, res) => { - if (!gdprStore.vvt.permanentDelete(req.params.id)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'gdpr_vvt', resourceId: req.params.id }) +router.delete('/gdpr/vvt/:id/permanent', requireAuth, authorize('admin'), async (req, res) => { + if (!await gdprStore.vvt.permanentDelete(req.params.id)) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'gdpr_vvt', resourceId: req.params.id }) res.json({ deleted: true, permanent: true }) }) -router.post('/gdpr/vvt/:id/restore', requireAuth, authorize('admin'), (req, res) => { - const item = gdprStore.vvt.restore(req.params.id) +router.post('/gdpr/vvt/:id/restore', requireAuth, authorize('admin'), async (req, res) => { + const item = await gdprStore.vvt.restore(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'gdpr_vvt', resourceId: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'gdpr_vvt', resourceId: req.params.id }) res.json(item) }) // AV-Verträge -router.get('/gdpr/av', requireAuth, authorize('reader'), (req, res) => { - res.json(gdprStore.av.getAll({ entity: req.query.entity || null })) +router.get('/gdpr/av', requireAuth, authorize('reader'), async (req, res) => { + res.json(await gdprStore.av.getAll({ entity: req.query.entity || null })) }) -router.get('/gdpr/av/:id', requireAuth, authorize('reader'), (req, res) => { - const item = gdprStore.av.getById(req.params.id) +router.get('/gdpr/av/:id', requireAuth, authorize('reader'), async (req, res) => { + const item = await gdprStore.av.getById(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) res.json(item) }) -router.get('/gdpr/av/:id/file', requireAuth, authorize('reader'), (req, res) => { - const item = gdprStore.av.getById(req.params.id) +router.get('/gdpr/av/:id/file', requireAuth, authorize('reader'), async (req, res) => { + const item = await gdprStore.av.getById(req.params.id) if (!item || !item.filePath || !fs.existsSync(item.filePath)) return res.status(404).json({ error: 'File not found' }) const ext = item.filename ? path.extname(item.filename).toLowerCase() : '.bin' const mime = ext === '.pdf' ? 'application/pdf' : 'application/octet-stream' @@ -147,176 +147,176 @@ router.get('/gdpr/av/:id/file', requireAuth, authorize('reader'), (req, res) => res.sendFile(path.resolve(item.filePath)) }) router.post('/gdpr/av/upload', requireAuth, authorizeContentOwner, (req, res) => { - gdprUpload.single('file')(req, res, (err) => { + gdprUpload.single('file')(req, res, async (err) => { if (err) return res.status(400).json({ error: err.message }) if (!req.file) return res.status(400).json({ error: 'No file uploaded' }) const { avId } = req.body if (avId) { - const updated = gdprStore.av.update(avId, { filePath: req.file.path, filename: req.file.originalname }) + const updated = await gdprStore.av.update(avId, { filePath: req.file.path, filename: req.file.originalname }) if (!updated) { fs.unlink(req.file.path, () => {}); return res.status(404).json({ error: 'AV not found' }) } return res.json(updated) } res.json({ filePath: req.file.path, filename: req.file.originalname }) }) }) -router.post('/gdpr/av', requireAuth, authorizeContentOwner, (req, res) => { - res.status(201).json(gdprStore.av.create(req.body, req.user)) +router.post('/gdpr/av', requireAuth, authorizeContentOwner, async (req, res) => { + res.status(201).json(await gdprStore.av.create(req.body, req.user)) }) -router.put('/gdpr/av/:id', requireAuth, authorizeContentOwner, (req, res) => { - const item = gdprStore.av.update(req.params.id, req.body) +router.put('/gdpr/av/:id', requireAuth, authorizeContentOwner, async (req, res) => { + const item = await gdprStore.av.update(req.params.id, req.body) if (!item) return res.status(404).json({ error: 'Not found' }) res.json(item) }) -router.delete('/gdpr/av/:id', requireAuth, authorize('admin'), (req, res) => { - if (!gdprStore.av.delete(req.params.id, req.user)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'gdpr_av', resourceId: req.params.id }) +router.delete('/gdpr/av/:id', requireAuth, authorize('admin'), async (req, res) => { + if (!await gdprStore.av.delete(req.params.id, req.user)) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'gdpr_av', resourceId: req.params.id }) res.json({ deleted: true }) }) -router.delete('/gdpr/av/:id/permanent', requireAuth, authorize('admin'), (req, res) => { - if (!gdprStore.av.permanentDelete(req.params.id)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'gdpr_av', resourceId: req.params.id }) +router.delete('/gdpr/av/:id/permanent', requireAuth, authorize('admin'), async (req, res) => { + if (!await gdprStore.av.permanentDelete(req.params.id)) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'gdpr_av', resourceId: req.params.id }) res.json({ deleted: true, permanent: true }) }) -router.post('/gdpr/av/:id/restore', requireAuth, authorize('admin'), (req, res) => { - const item = gdprStore.av.restore(req.params.id) +router.post('/gdpr/av/:id/restore', requireAuth, authorize('admin'), async (req, res) => { + const item = await gdprStore.av.restore(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'gdpr_av', resourceId: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'gdpr_av', resourceId: req.params.id }) res.json(item) }) // DSFA -router.get('/gdpr/dsfa', requireAuth, authorize('reader'), (req, res) => { - res.json(gdprStore.dsfa.getAll({ entity: req.query.entity || null })) +router.get('/gdpr/dsfa', requireAuth, authorize('reader'), async (req, res) => { + res.json(await gdprStore.dsfa.getAll({ entity: req.query.entity || null })) }) -router.get('/gdpr/dsfa/:id', requireAuth, authorize('reader'), (req, res) => { - const item = gdprStore.dsfa.getById(req.params.id) +router.get('/gdpr/dsfa/:id', requireAuth, authorize('reader'), async (req, res) => { + const item = await gdprStore.dsfa.getById(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) res.json(item) }) -router.post('/gdpr/dsfa', requireAuth, authorizeContentOwner, (req, res) => { - res.status(201).json(gdprStore.dsfa.create(req.body, req.user)) +router.post('/gdpr/dsfa', requireAuth, authorizeContentOwner, async (req, res) => { + res.status(201).json(await gdprStore.dsfa.create(req.body, req.user)) }) -router.put('/gdpr/dsfa/:id', requireAuth, authorizeContentOwner, (req, res) => { - const item = gdprStore.dsfa.update(req.params.id, req.body) +router.put('/gdpr/dsfa/:id', requireAuth, authorizeContentOwner, async (req, res) => { + const item = await gdprStore.dsfa.update(req.params.id, req.body) if (!item) return res.status(404).json({ error: 'Not found' }) res.json(item) }) -router.delete('/gdpr/dsfa/:id', requireAuth, authorize('admin'), (req, res) => { - if (!gdprStore.dsfa.delete(req.params.id, req.user)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'gdpr_dsfa', resourceId: req.params.id }) +router.delete('/gdpr/dsfa/:id', requireAuth, authorize('admin'), async (req, res) => { + if (!await gdprStore.dsfa.delete(req.params.id, req.user)) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'gdpr_dsfa', resourceId: req.params.id }) res.json({ deleted: true }) }) -router.delete('/gdpr/dsfa/:id/permanent', requireAuth, authorize('admin'), (req, res) => { - if (!gdprStore.dsfa.permanentDelete(req.params.id)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'gdpr_dsfa', resourceId: req.params.id }) +router.delete('/gdpr/dsfa/:id/permanent', requireAuth, authorize('admin'), async (req, res) => { + if (!await gdprStore.dsfa.permanentDelete(req.params.id)) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'gdpr_dsfa', resourceId: req.params.id }) res.json({ deleted: true, permanent: true }) }) -router.post('/gdpr/dsfa/:id/restore', requireAuth, authorize('admin'), (req, res) => { - const item = gdprStore.dsfa.restore(req.params.id) +router.post('/gdpr/dsfa/:id/restore', requireAuth, authorize('admin'), async (req, res) => { + const item = await gdprStore.dsfa.restore(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'gdpr_dsfa', resourceId: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'gdpr_dsfa', resourceId: req.params.id }) res.json(item) }) // Incidents (Datenpannen) -router.get('/gdpr/incidents', requireAuth, authorize('reader'), (req, res) => { - res.json(gdprStore.incidents.getAll({ entity: req.query.entity || null })) +router.get('/gdpr/incidents', requireAuth, authorize('reader'), async (req, res) => { + res.json(await gdprStore.incidents.getAll({ entity: req.query.entity || null })) }) -router.get('/gdpr/incidents/:id', requireAuth, authorize('reader'), (req, res) => { - const item = gdprStore.incidents.getById(req.params.id) +router.get('/gdpr/incidents/:id', requireAuth, authorize('reader'), async (req, res) => { + const item = await gdprStore.incidents.getById(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) res.json(item) }) -router.post('/gdpr/incidents', requireAuth, authorizeAuditorOrAbove, (req, res) => { - res.status(201).json(gdprStore.incidents.create(req.body, req.user)) +router.post('/gdpr/incidents', requireAuth, authorizeAuditorOrAbove, async (req, res) => { + res.status(201).json(await gdprStore.incidents.create(req.body, req.user)) }) -router.put('/gdpr/incidents/:id', requireAuth, authorizeAuditorOrAbove, (req, res) => { - const item = gdprStore.incidents.update(req.params.id, req.body) +router.put('/gdpr/incidents/:id', requireAuth, authorizeAuditorOrAbove, async (req, res) => { + const item = await gdprStore.incidents.update(req.params.id, req.body) if (!item) return res.status(404).json({ error: 'Not found' }) res.json(item) }) -router.delete('/gdpr/incidents/:id', requireAuth, authorize('admin'), (req, res) => { - if (!gdprStore.incidents.delete(req.params.id, req.user)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'gdpr_incident', resourceId: req.params.id }) +router.delete('/gdpr/incidents/:id', requireAuth, authorize('admin'), async (req, res) => { + if (!await gdprStore.incidents.delete(req.params.id, req.user)) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'gdpr_incident', resourceId: req.params.id }) res.json({ deleted: true }) }) -router.delete('/gdpr/incidents/:id/permanent', requireAuth, authorize('admin'), (req, res) => { - if (!gdprStore.incidents.permanentDelete(req.params.id)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'gdpr_incident', resourceId: req.params.id }) +router.delete('/gdpr/incidents/:id/permanent', requireAuth, authorize('admin'), async (req, res) => { + if (!await gdprStore.incidents.permanentDelete(req.params.id)) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'gdpr_incident', resourceId: req.params.id }) res.json({ deleted: true, permanent: true }) }) -router.post('/gdpr/incidents/:id/restore', requireAuth, authorize('admin'), (req, res) => { - const item = gdprStore.incidents.restore(req.params.id) +router.post('/gdpr/incidents/:id/restore', requireAuth, authorize('admin'), async (req, res) => { + const item = await gdprStore.incidents.restore(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'gdpr_incident', resourceId: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'gdpr_incident', resourceId: req.params.id }) res.json(item) }) // DSAR (Betroffenenrechte) -router.get('/gdpr/dsar', requireAuth, authorizeEditorOrAbove, (req, res) => { - res.json(gdprStore.dsar.getAll({ entity: req.query.entity || null })) +router.get('/gdpr/dsar', requireAuth, authorizeEditorOrAbove, async (req, res) => { + res.json(await gdprStore.dsar.getAll({ entity: req.query.entity || null })) }) -router.get('/gdpr/dsar/:id', requireAuth, authorizeEditorOrAbove, (req, res) => { - const item = gdprStore.dsar.getById(req.params.id) +router.get('/gdpr/dsar/:id', requireAuth, authorizeEditorOrAbove, async (req, res) => { + const item = await gdprStore.dsar.getById(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) res.json(item) }) -router.post('/gdpr/dsar', requireAuth, authorizeEditorOrAbove, (req, res) => { - res.status(201).json(gdprStore.dsar.create(req.body, req.user)) +router.post('/gdpr/dsar', requireAuth, authorizeEditorOrAbove, async (req, res) => { + res.status(201).json(await gdprStore.dsar.create(req.body, req.user)) }) -router.put('/gdpr/dsar/:id', requireAuth, authorizeEditorOrAbove, (req, res) => { - const item = gdprStore.dsar.update(req.params.id, req.body) +router.put('/gdpr/dsar/:id', requireAuth, authorizeEditorOrAbove, async (req, res) => { + const item = await gdprStore.dsar.update(req.params.id, req.body) if (!item) return res.status(404).json({ error: 'Not found' }) res.json(item) }) -router.delete('/gdpr/dsar/:id', requireAuth, authorize('admin'), (req, res) => { - if (!gdprStore.dsar.delete(req.params.id, req.user)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'gdpr_dsar', resourceId: req.params.id }) +router.delete('/gdpr/dsar/:id', requireAuth, authorize('admin'), async (req, res) => { + if (!await gdprStore.dsar.delete(req.params.id, req.user)) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'gdpr_dsar', resourceId: req.params.id }) res.json({ deleted: true }) }) -router.delete('/gdpr/dsar/:id/permanent', requireAuth, authorize('admin'), (req, res) => { - if (!gdprStore.dsar.permanentDelete(req.params.id)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'gdpr_dsar', resourceId: req.params.id }) +router.delete('/gdpr/dsar/:id/permanent', requireAuth, authorize('admin'), async (req, res) => { + if (!await gdprStore.dsar.permanentDelete(req.params.id)) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'gdpr_dsar', resourceId: req.params.id }) res.json({ deleted: true, permanent: true }) }) -router.post('/gdpr/dsar/:id/restore', requireAuth, authorize('admin'), (req, res) => { - const item = gdprStore.dsar.restore(req.params.id) +router.post('/gdpr/dsar/:id/restore', requireAuth, authorize('admin'), async (req, res) => { + const item = await gdprStore.dsar.restore(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'gdpr_dsar', resourceId: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'gdpr_dsar', resourceId: req.params.id }) res.json(item) }) // TOMs -router.get('/gdpr/toms', requireAuth, authorize('reader'), (req, res) => { - res.json(gdprStore.toms.getAll({ entity: req.query.entity || null, category: req.query.category || null })) +router.get('/gdpr/toms', requireAuth, authorize('reader'), async (req, res) => { + res.json(await gdprStore.toms.getAll({ entity: req.query.entity || null, category: req.query.category || null })) }) -router.get('/gdpr/toms/:id', requireAuth, authorize('reader'), (req, res) => { - const item = gdprStore.toms.getById(req.params.id) +router.get('/gdpr/toms/:id', requireAuth, authorize('reader'), async (req, res) => { + const item = await gdprStore.toms.getById(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) res.json(item) }) -router.post('/gdpr/toms', requireAuth, authorizeContentOwner, (req, res) => { - res.status(201).json(gdprStore.toms.create(req.body, req.user)) +router.post('/gdpr/toms', requireAuth, authorizeContentOwner, async (req, res) => { + res.status(201).json(await gdprStore.toms.create(req.body, req.user)) }) -router.put('/gdpr/toms/:id', requireAuth, authorizeContentOwner, (req, res) => { - const item = gdprStore.toms.update(req.params.id, req.body) +router.put('/gdpr/toms/:id', requireAuth, authorizeContentOwner, async (req, res) => { + const item = await gdprStore.toms.update(req.params.id, req.body) if (!item) return res.status(404).json({ error: 'Not found' }) res.json(item) }) -router.delete('/gdpr/toms/:id', requireAuth, authorize('admin'), (req, res) => { - if (!gdprStore.toms.delete(req.params.id, req.user)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'gdpr_toms', resourceId: req.params.id }) +router.delete('/gdpr/toms/:id', requireAuth, authorize('admin'), async (req, res) => { + if (!await gdprStore.toms.delete(req.params.id, req.user)) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'gdpr_toms', resourceId: req.params.id }) res.json({ deleted: true }) }) -router.delete('/gdpr/toms/:id/permanent', requireAuth, authorize('admin'), (req, res) => { - if (!gdprStore.toms.permanentDelete(req.params.id)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'gdpr_toms', resourceId: req.params.id }) +router.delete('/gdpr/toms/:id/permanent', requireAuth, authorize('admin'), async (req, res) => { + if (!await gdprStore.toms.permanentDelete(req.params.id)) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'gdpr_toms', resourceId: req.params.id }) res.json({ deleted: true, permanent: true }) }) -router.post('/gdpr/toms/:id/restore', requireAuth, authorize('admin'), (req, res) => { - const item = gdprStore.toms.restore(req.params.id) +router.post('/gdpr/toms/:id/restore', requireAuth, authorize('admin'), async (req, res) => { + const item = await gdprStore.toms.restore(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'gdpr_toms', resourceId: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'gdpr_toms', resourceId: req.params.id }) res.json(item) }) diff --git a/server/routes/goals.js b/server/routes/goals.js index 82d916c..479478f 100644 --- a/server/routes/goals.js +++ b/server/routes/goals.js @@ -6,48 +6,48 @@ const { requireAuth, authorize } = require('../auth') const goalsStore = require('../db/goalsStore') const embeddingStore = require('../ai/embeddingStore') -router.get('/goals/summary', requireAuth, authorize('reader'), (req, res) => { - res.json(goalsStore.getSummary()) +router.get('/goals/summary', requireAuth, authorize('reader'), async (req, res) => { + res.json(await goalsStore.getSummary()) }) -router.get('/goals', requireAuth, authorize('reader'), (req, res) => { +router.get('/goals', requireAuth, authorize('reader'), async (req, res) => { const { status, category, entity } = req.query - res.json(goalsStore.getAll({ status, category, entity })) + res.json(await goalsStore.getAll({ status, category, entity })) }) -router.get('/goals/:id', requireAuth, authorize('reader'), (req, res) => { - const g = goalsStore.getById(req.params.id) +router.get('/goals/:id', requireAuth, authorize('reader'), async (req, res) => { + const g = await goalsStore.getById(req.params.id) if (!g) return res.status(404).json({ error: 'Not found' }) res.json(g) }) -router.post('/goals', requireAuth, authorize('editor'), (req, res) => { - const g = goalsStore.create(req.body, req.user) - embeddingStore.indexDoc(g, 'Sicherheitsziel', '#goals').catch(() => {}) +router.post('/goals', requireAuth, authorize('editor'), async (req, res) => { + const g = await goalsStore.create(req.body, req.user) + await embeddingStore.indexDoc(g, 'Sicherheitsziel', '#goals') res.status(201).json(g) }) -router.put('/goals/:id', requireAuth, authorize('editor'), (req, res) => { - const g = goalsStore.update(req.params.id, req.body) +router.put('/goals/:id', requireAuth, authorize('editor'), async (req, res) => { + const g = await goalsStore.update(req.params.id, req.body) if (!g) return res.status(404).json({ error: 'Not found' }) - embeddingStore.indexDoc(g, 'Sicherheitsziel', '#goals').catch(() => {}) + await embeddingStore.indexDoc(g, 'Sicherheitsziel', '#goals') res.json(g) }) -router.delete('/goals/:id', requireAuth, authorize('admin'), (req, res) => { - const ok = goalsStore.delete(req.params.id, req.user) +router.delete('/goals/:id', requireAuth, authorize('admin'), async (req, res) => { + const ok = await goalsStore.delete(req.params.id, req.user) if (!ok) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'goal', resourceId: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'goal', resourceId: req.params.id }) res.json({ deleted: true }) }) -router.delete('/goals/:id/permanent', requireAuth, authorize('admin'), (req, res) => { - const ok = goalsStore.permanentDelete(req.params.id) +router.delete('/goals/:id/permanent', requireAuth, authorize('admin'), async (req, res) => { + const ok = await goalsStore.permanentDelete(req.params.id) if (!ok) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'goal', resourceId: req.params.id }) - embeddingStore.removeDoc(req.params.id) + await require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'goal', resourceId: req.params.id }) + await embeddingStore.removeDoc(req.params.id) res.json({ deleted: true, permanent: true }) }) -router.post('/goals/:id/restore', requireAuth, authorize('admin'), (req, res) => { - const item = goalsStore.restore(req.params.id) +router.post('/goals/:id/restore', requireAuth, authorize('admin'), async (req, res) => { + const item = await goalsStore.restore(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'goal', resourceId: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'goal', resourceId: req.params.id }) res.json(item) }) diff --git a/server/routes/governance.js b/server/routes/governance.js index d8f417d..9b28a64 100644 --- a/server/routes/governance.js +++ b/server/routes/governance.js @@ -8,85 +8,85 @@ const multer = require('multer') const { requireAuth, authorize } = require('../auth') const govStore = require('../db/governanceStore') -router.get('/governance/summary', requireAuth, authorize('reader'), (req, res) => { - res.json(govStore.getSummary()) +router.get('/governance/summary', requireAuth, authorize('reader'), async (req, res) => { + res.json(await govStore.getSummary()) }) // Reviews -router.get('/governance/reviews', requireAuth, authorize('reader'), (req, res) => { - res.json(govStore.getReviews()) +router.get('/governance/reviews', requireAuth, authorize('reader'), async (req, res) => { + res.json(await govStore.getReviews()) }) -router.get('/governance/reviews/:id', requireAuth, authorize('reader'), (req, res) => { - const r = govStore.getReviewById(req.params.id) +router.get('/governance/reviews/:id', requireAuth, authorize('reader'), async (req, res) => { + const r = await govStore.getReviewById(req.params.id) if (!r) return res.status(404).json({ error: 'Not found' }) res.json(r) }) -router.post('/governance/reviews', requireAuth, authorize('editor'), (req, res) => { - const r = govStore.createReview(req.body, { createdBy: req.user }) - require('../db/auditStore').append({ user: req.user, action: 'create', resource: 'governance_review', detail: r.title }) +router.post('/governance/reviews', requireAuth, authorize('editor'), async (req, res) => { + const r = await govStore.createReview(req.body, { createdBy: req.user }) + await require('../db/auditStore').append({ user: req.user, action: 'create', resource: 'governance_review', detail: r.title }) res.status(201).json(r) }) -router.put('/governance/reviews/:id', requireAuth, authorize('editor'), (req, res) => { - const r = govStore.updateReview(req.params.id, req.body, { changedBy: req.user }) +router.put('/governance/reviews/:id', requireAuth, authorize('editor'), async (req, res) => { + const r = await govStore.updateReview(req.params.id, req.body, { changedBy: req.user }) if (!r) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'update', resource: 'governance_review', detail: r.title }) + await require('../db/auditStore').append({ user: req.user, action: 'update', resource: 'governance_review', detail: r.title }) res.json(r) }) -router.delete('/governance/reviews/:id', requireAuth, authorize('admin'), (req, res) => { - if (!govStore.deleteReview(req.params.id)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'governance_review', detail: req.params.id }) +router.delete('/governance/reviews/:id', requireAuth, authorize('admin'), async (req, res) => { + if (!(await govStore.deleteReview(req.params.id))) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'governance_review', detail: req.params.id }) res.json({ ok: true }) }) // Actions -router.get('/governance/actions', requireAuth, authorize('reader'), (req, res) => { - res.json(govStore.getActions()) +router.get('/governance/actions', requireAuth, authorize('reader'), async (req, res) => { + res.json(await govStore.getActions()) }) -router.get('/governance/actions/:id', requireAuth, authorize('reader'), (req, res) => { - const a = govStore.getActionById(req.params.id) +router.get('/governance/actions/:id', requireAuth, authorize('reader'), async (req, res) => { + const a = await govStore.getActionById(req.params.id) if (!a) return res.status(404).json({ error: 'Not found' }) res.json(a) }) -router.post('/governance/actions', requireAuth, authorize('editor'), (req, res) => { - const a = govStore.createAction(req.body, { createdBy: req.user }) - require('../db/auditStore').append({ user: req.user, action: 'create', resource: 'governance_action', detail: a.title }) +router.post('/governance/actions', requireAuth, authorize('editor'), async (req, res) => { + const a = await govStore.createAction(req.body, { createdBy: req.user }) + await require('../db/auditStore').append({ user: req.user, action: 'create', resource: 'governance_action', detail: a.title }) res.status(201).json(a) }) -router.put('/governance/actions/:id', requireAuth, authorize('editor'), (req, res) => { - const a = govStore.updateAction(req.params.id, req.body, { changedBy: req.user }) +router.put('/governance/actions/:id', requireAuth, authorize('editor'), async (req, res) => { + const a = await govStore.updateAction(req.params.id, req.body, { changedBy: req.user }) if (!a) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'update', resource: 'governance_action', detail: a.title }) + await require('../db/auditStore').append({ user: req.user, action: 'update', resource: 'governance_action', detail: a.title }) res.json(a) }) -router.delete('/governance/actions/:id', requireAuth, authorize('admin'), (req, res) => { - if (!govStore.deleteAction(req.params.id)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'governance_action', detail: req.params.id }) +router.delete('/governance/actions/:id', requireAuth, authorize('admin'), async (req, res) => { + if (!(await govStore.deleteAction(req.params.id))) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'governance_action', detail: req.params.id }) res.json({ ok: true }) }) // Meetings -router.get('/governance/meetings', requireAuth, authorize('reader'), (req, res) => { - res.json(govStore.getMeetings()) +router.get('/governance/meetings', requireAuth, authorize('reader'), async (req, res) => { + res.json(await govStore.getMeetings()) }) -router.get('/governance/meetings/:id', requireAuth, authorize('reader'), (req, res) => { - const m = govStore.getMeetingById(req.params.id) +router.get('/governance/meetings/:id', requireAuth, authorize('reader'), async (req, res) => { + const m = await govStore.getMeetingById(req.params.id) if (!m) return res.status(404).json({ error: 'Not found' }) res.json(m) }) -router.post('/governance/meetings', requireAuth, authorize('editor'), (req, res) => { - const m = govStore.createMeeting(req.body, { createdBy: req.user }) - require('../db/auditStore').append({ user: req.user, action: 'create', resource: 'governance_meeting', detail: m.title }) +router.post('/governance/meetings', requireAuth, authorize('editor'), async (req, res) => { + const m = await govStore.createMeeting(req.body, { createdBy: req.user }) + await require('../db/auditStore').append({ user: req.user, action: 'create', resource: 'governance_meeting', detail: m.title }) res.status(201).json(m) }) -router.put('/governance/meetings/:id', requireAuth, authorize('editor'), (req, res) => { - const m = govStore.updateMeeting(req.params.id, req.body, { changedBy: req.user }) +router.put('/governance/meetings/:id', requireAuth, authorize('editor'), async (req, res) => { + const m = await govStore.updateMeeting(req.params.id, req.body, { changedBy: req.user }) if (!m) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'update', resource: 'governance_meeting', detail: m.title }) + await require('../db/auditStore').append({ user: req.user, action: 'update', resource: 'governance_meeting', detail: m.title }) res.json(m) }) -router.delete('/governance/meetings/:id', requireAuth, authorize('admin'), (req, res) => { - if (!govStore.deleteMeeting(req.params.id)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'governance_meeting', detail: req.params.id }) +router.delete('/governance/meetings/:id', requireAuth, authorize('admin'), async (req, res) => { + if (!(await govStore.deleteMeeting(req.params.id))) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'governance_meeting', detail: req.params.id }) res.json({ ok: true }) }) @@ -109,10 +109,10 @@ router.post('/governance/:collection/:id/upload', requireAuth, authorize('editor const UPDS = { reviews: 'updateReview', actions: 'updateAction', meetings: 'updateMeeting' } const col = req.params.collection if (!COLS[col]) return res.status(400).json({ error: 'Invalid collection' }) - govUpload.single('file')(req, res, err => { + govUpload.single('file')(req, res, async err => { if (err) return res.status(400).json({ error: err.message }) if (!req.file) return res.status(400).json({ error: 'No file uploaded' }) - const item = govStore[COLS[col]](req.params.id) + const item = await govStore[COLS[col]](req.params.id) if (!item) { fs.unlinkSync(req.file.path); return res.status(404).json({ error: 'Not found' }) } const attachment = { id: Date.now().toString(36), @@ -123,16 +123,16 @@ router.post('/governance/:collection/:id/upload', requireAuth, authorize('editor uploadedAt: new Date().toISOString(), } const attachments = [...(item.attachments || []), attachment] - govStore[UPDS[col]](req.params.id, { attachments }) + await govStore[UPDS[col]](req.params.id, { attachments }) res.json({ ok: true, attachment }) }) }) -router.get('/governance/:collection/:id/files/:fileId', requireAuth, authorize('reader'), (req, res) => { +router.get('/governance/:collection/:id/files/:fileId', requireAuth, authorize('reader'), async (req, res) => { const COLS = { reviews: 'getReviewById', actions: 'getActionById', meetings: 'getMeetingById' } const col = req.params.collection if (!COLS[col]) return res.status(400).json({ error: 'Invalid collection' }) - const item = govStore[COLS[col]](req.params.id) + const item = await govStore[COLS[col]](req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) const att = (item.attachments || []).find(a => a.id === req.params.fileId) if (!att) return res.status(404).json({ error: 'Attachment not found' }) @@ -142,19 +142,19 @@ router.get('/governance/:collection/:id/files/:fileId', requireAuth, authorize(' res.sendFile(path.resolve(filePath)) }) -router.delete('/governance/:collection/:id/files/:fileId', requireAuth, authorize('editor'), (req, res) => { +router.delete('/governance/:collection/:id/files/:fileId', requireAuth, authorize('editor'), async (req, res) => { const COLS = { reviews: 'getReviewById', actions: 'getActionById', meetings: 'getMeetingById' } const UPDS = { reviews: 'updateReview', actions: 'updateAction', meetings: 'updateMeeting' } const col = req.params.collection if (!COLS[col]) return res.status(400).json({ error: 'Invalid collection' }) - const item = govStore[COLS[col]](req.params.id) + const item = await govStore[COLS[col]](req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) const att = (item.attachments || []).find(a => a.id === req.params.fileId) if (!att) return res.status(404).json({ error: 'Attachment not found' }) const filePath = path.join(GOV_FILES_DIR, att.storedName) if (fs.existsSync(filePath)) fs.unlinkSync(filePath) const attachments = (item.attachments || []).filter(a => a.id !== req.params.fileId) - govStore[UPDS[col]](req.params.id, { attachments }) + await govStore[UPDS[col]](req.params.id, { attachments }) res.json({ ok: true }) }) diff --git a/server/routes/guidance.js b/server/routes/guidance.js index 2041bec..20492e1 100644 --- a/server/routes/guidance.js +++ b/server/routes/guidance.js @@ -23,35 +23,35 @@ const guidanceUpload = multer({ const DEMO_LANG_FILE = path.join(__dirname, '../../data/.demo_lang_set') const SUPPORTED_LANGS = ['de', 'en', 'fr', 'nl'] -function _syncSeedLang(lang) { +async function _syncSeedLang(lang) { try { let current = 'en' try { current = JSON.parse(fs.readFileSync(DEMO_LANG_FILE, 'utf8')).lang || 'en' } catch {} if (current === lang) return fs.writeFileSync(DEMO_LANG_FILE, JSON.stringify({ lang, setAt: new Date().toISOString() })) - try { guidanceStore.seedDemoDoc() } catch {} - try { guidanceStore.seedRoleGuides() } catch {} - try { guidanceStore.seedSoaGuide() } catch {} - try { guidanceStore.seedPolicyGuide() } catch {} - try { guidanceStore.seedIsoNotice() } catch {} - try { guidanceStore.seedSystemhandbuch() } catch {} - try { guidanceStore.seedArchitectureDocs() } catch {} + try { await guidanceStore.seedDemoDoc() } catch {} + try { await guidanceStore.seedRoleGuides() } catch {} + try { await guidanceStore.seedSoaGuide() } catch {} + try { await guidanceStore.seedPolicyGuide() } catch {} + try { await guidanceStore.seedIsoNotice() } catch {} + try { await guidanceStore.seedSystemhandbuch() } catch {} + try { await guidanceStore.seedArchitectureDocs() } catch {} } catch {} } -router.get('/guidance', requireAuth, authorize('reader'), (req, res) => { +router.get('/guidance', requireAuth, authorize('reader'), async (req, res) => { const { category, lang, search } = req.query - if (lang && SUPPORTED_LANGS.includes(lang)) _syncSeedLang(lang) + if (lang && SUPPORTED_LANGS.includes(lang)) await _syncSeedLang(lang) const rank = req.roleRank || 1 - if (search) return res.json(guidanceStore.search(search, rank)) - if (category) return res.json(guidanceStore.getByCategory(category, rank)) - res.json(guidanceStore.getAll(rank)) + if (search) return res.json(await guidanceStore.search(search, rank)) + if (category) return res.json(await guidanceStore.getByCategory(category, rank)) + res.json(await guidanceStore.getAll(rank)) }) -router.get('/guidance/:id/file', requireAuth, authorize('reader'), (req, res) => { - const filePath = guidanceStore.getFilePath(req.params.id) +router.get('/guidance/:id/file', requireAuth, authorize('reader'), async (req, res) => { + const filePath = await guidanceStore.getFilePath(req.params.id) if (!filePath || !fs.existsSync(filePath)) return res.status(404).json({ error: 'File not found' }) - const doc = guidanceStore.getById(req.params.id) + const doc = await guidanceStore.getById(req.params.id) const ext = doc?.filename ? path.extname(doc.filename).toLowerCase() : '.bin' const mime = ext === '.pdf' ? 'application/pdf' : 'application/octet-stream' res.setHeader('Content-Type', mime) @@ -59,8 +59,8 @@ router.get('/guidance/:id/file', requireAuth, authorize('reader'), (req, res) => res.sendFile(path.resolve(filePath)) }) -router.get('/guidance/:id', requireAuth, authorize('reader'), (req, res) => { - const doc = guidanceStore.getById(req.params.id) +router.get('/guidance/:id', requireAuth, authorize('reader'), async (req, res) => { + const doc = await guidanceStore.getById(req.params.id) if (!doc) return res.status(404).json({ error: 'Not found' }) const RRANK = { reader: 1, editor: 2, dept_head: 2, contentowner: 3, auditor: 3, admin: 4 } if (doc.minRole && (req.roleRank || 1) < (RRANK[doc.minRole] || 1)) { @@ -70,12 +70,12 @@ router.get('/guidance/:id', requireAuth, authorize('reader'), (req, res) => { res.json(rest) }) -router.post('/guidance', requireAuth, authorize('contentowner'), (req, res) => { +router.post('/guidance', requireAuth, authorize('contentowner'), async (req, res) => { const { category, title, type, content, linkedControls } = req.body if (!category || !title) return res.status(400).json({ error: 'category and title are required' }) try { - const doc = guidanceStore.create({ category, title, type: type || 'markdown', content, linkedControls, createdBy: req.user }) - embeddingStore.indexDoc(doc, 'Systemhandbuch', '#guidance').catch(() => {}) + const doc = await guidanceStore.create({ category, title, type: type || 'markdown', content, linkedControls, createdBy: req.user }) + await embeddingStore.indexDoc(doc, 'Systemhandbuch', '#guidance').catch(() => {}) res.status(201).json(doc) } catch (e) { res.status(400).json({ error: e.message }) @@ -83,7 +83,7 @@ router.post('/guidance', requireAuth, authorize('contentowner'), (req, res) => { }) router.post('/guidance/upload', requireAuth, authorize('contentowner'), (req, res) => { - guidanceUpload.single('file')(req, res, (err) => { + guidanceUpload.single('file')(req, res, async (err) => { if (err) return res.status(400).json({ error: err.message }) if (!req.file) return res.status(400).json({ error: 'No file uploaded' }) const { category, title } = req.body @@ -94,7 +94,7 @@ router.post('/guidance/upload', requireAuth, authorize('contentowner'), (req, re const ext = path.extname(req.file.originalname).toLowerCase() const type = ext === '.pdf' ? 'pdf' : 'docx' try { - const doc = guidanceStore.create({ + const doc = await guidanceStore.create({ category, title, type, @@ -111,33 +111,33 @@ router.post('/guidance/upload', requireAuth, authorize('contentowner'), (req, re }) }) -router.put('/guidance/:id', requireAuth, authorize('contentowner'), (req, res) => { +router.put('/guidance/:id', requireAuth, authorize('contentowner'), async (req, res) => { const { title, category, content, linkedControls } = req.body - const updated = guidanceStore.update(req.params.id, { title, category, content, linkedControls }) + const updated = await guidanceStore.update(req.params.id, { title, category, content, linkedControls }) if (!updated) return res.status(404).json({ error: 'Not found' }) - embeddingStore.indexDoc(updated, 'Systemhandbuch', '#guidance').catch(() => {}) + await embeddingStore.indexDoc(updated, 'Systemhandbuch', '#guidance').catch(() => {}) res.json(updated) }) -router.delete('/guidance/:id', requireAuth, authorize('admin'), (req, res) => { - const ok = guidanceStore.delete(req.params.id, req.user) +router.delete('/guidance/:id', requireAuth, authorize('admin'), async (req, res) => { + const ok = await guidanceStore.delete(req.params.id, req.user) if (!ok) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'guidance', resourceId: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'guidance', resourceId: req.params.id }) res.json({ deleted: true }) }) -router.delete('/guidance/:id/permanent', requireAuth, authorize('admin'), (req, res) => { - const ok = guidanceStore.permanentDelete(req.params.id) +router.delete('/guidance/:id/permanent', requireAuth, authorize('admin'), async (req, res) => { + const ok = await guidanceStore.permanentDelete(req.params.id) if (!ok) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'guidance', resourceId: req.params.id }) - embeddingStore.removeDoc(req.params.id) + await require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'guidance', resourceId: req.params.id }) + await embeddingStore.removeDoc(req.params.id) res.json({ deleted: true, permanent: true }) }) -router.post('/guidance/:id/restore', requireAuth, authorize('admin'), (req, res) => { - const item = guidanceStore.restore(req.params.id) +router.post('/guidance/:id/restore', requireAuth, authorize('admin'), async (req, res) => { + const item = await guidanceStore.restore(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'guidance', resourceId: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'guidance', resourceId: req.params.id }) res.json(item) }) diff --git a/server/routes/legal.js b/server/routes/legal.js index 674c17b..dd53cfc 100644 --- a/server/routes/legal.js +++ b/server/routes/legal.js @@ -9,7 +9,7 @@ const { requireAuth, authorize } = require('../auth') const legalStore = require('../db/legalStore') const LEGAL_FILES_DIR = legalStore.FILES_DIR -if (!fs.existsSync(LEGAL_FILES_DIR)) fs.mkdirSync(LEGAL_FILES_DIR, { recursive: true }) +if (LEGAL_FILES_DIR && !fs.existsSync(LEGAL_FILES_DIR)) fs.mkdirSync(LEGAL_FILES_DIR, { recursive: true }) const legalAttachUpload = multer({ dest: LEGAL_FILES_DIR, @@ -24,10 +24,10 @@ const legalAttachUpload = multer({ function legalAttachRoutes(resourceKey, store) { router.post(`/legal/${resourceKey}/:id/attachments`, requireAuth, authorize('contentowner'), (req, res) => { - legalAttachUpload.single('file')(req, res, (err) => { + legalAttachUpload.single('file')(req, res, async (err) => { if (err) return res.status(400).json({ error: err.message }) if (!req.file) return res.status(400).json({ error: 'Keine Datei hochgeladen' }) - const item = store.getById(req.params.id) + const item = await store.getById(req.params.id) if (!item) { fs.unlink(req.file.path, () => {}); return res.status(404).json({ error: 'Not found' }) } const meta = { id: `att_${Date.now()}`, @@ -38,12 +38,12 @@ function legalAttachRoutes(resourceKey, store) { uploadedAt: new Date().toISOString(), filePath: req.file.path } - store.addAttachment(req.params.id, meta) + await store.addAttachment(req.params.id, meta) res.status(201).json(meta) }) }) - router.get(`/legal/${resourceKey}/:id/attachments/:attId/file`, requireAuth, authorize('reader'), (req, res) => { - const item = store.getById(req.params.id) + router.get(`/legal/${resourceKey}/:id/attachments/:attId/file`, requireAuth, authorize('reader'), async (req, res) => { + const item = await store.getById(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) const att = (item.attachments || []).find(a => a.id === req.params.attId) if (!att || !att.filePath || !fs.existsSync(att.filePath)) return res.status(404).json({ error: 'Datei nicht gefunden' }) @@ -53,8 +53,8 @@ function legalAttachRoutes(resourceKey, store) { res.setHeader('Content-Disposition', `attachment; filename="${att.originalName}"`) res.sendFile(path.resolve(att.filePath)) }) - router.delete(`/legal/${resourceKey}/:id/attachments/:attId`, requireAuth, authorize('contentowner'), (req, res) => { - const att = store.removeAttachment(req.params.id, req.params.attId) + router.delete(`/legal/${resourceKey}/:id/attachments/:attId`, requireAuth, authorize('contentowner'), async (req, res) => { + const att = await store.removeAttachment(req.params.id, req.params.attId) if (!att) return res.status(404).json({ error: 'Not found' }) if (att.filePath) fs.unlink(att.filePath, () => {}) res.json({ deleted: true }) @@ -65,8 +65,8 @@ legalAttachRoutes('contracts', legalStore.contracts) legalAttachRoutes('ndas', legalStore.ndas) legalAttachRoutes('policies', legalStore.privacyPolicies) -router.get('/legal/summary', requireAuth, authorize('reader'), (req, res) => { - res.json(legalStore.getSummary()) +router.get('/legal/summary', requireAuth, authorize('reader'), async (req, res) => { + res.json(await legalStore.getSummary()) }) // ── CSV-Hilfsfunktion ──────────────────────────────────────────────── @@ -79,8 +79,8 @@ function toCsv(rows) { } // Verträge -router.get('/legal/contracts/export/csv', requireAuth, authorize('reader'), (req, res) => { - const list = legalStore.contracts.getAll(req.query) +router.get('/legal/contracts/export/csv', requireAuth, authorize('reader'), async (req, res) => { + const list = await legalStore.contracts.getAll(req.query) const header = ['ID','Titel','Typ','Vertragspartner','Status','Laufzeitbeginn','Laufzeitende','Automatische Verlängerung','Kündigungsfrist (Tage)','Wert','Währung','Owner','Notizen','Erstellt am'] const rows = list.map(c => [ c.id, c.title, c.contractType, c.counterparty, c.status, @@ -94,45 +94,45 @@ router.get('/legal/contracts/export/csv', requireAuth, authorize('reader'), (req res.send('\uFEFF' + toCsv([header, ...rows])) }) -router.get('/legal/contracts', requireAuth, authorize('reader'), (req, res) => { - res.json(legalStore.contracts.getAll(req.query)) +router.get('/legal/contracts', requireAuth, authorize('reader'), async (req, res) => { + res.json(await legalStore.contracts.getAll(req.query)) }) -router.get('/legal/contracts/expiring', requireAuth, authorize('reader'), (req, res) => { - res.json(legalStore.contracts.getExpiring(parseInt(req.query.days) || 60)) +router.get('/legal/contracts/expiring', requireAuth, authorize('reader'), async (req, res) => { + res.json(await legalStore.contracts.getExpiring(parseInt(req.query.days) || 60)) }) -router.get('/legal/contracts/:id', requireAuth, authorize('reader'), (req, res) => { - const item = legalStore.contracts.getById(req.params.id) +router.get('/legal/contracts/:id', requireAuth, authorize('reader'), async (req, res) => { + const item = await legalStore.contracts.getById(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) res.json(item) }) -router.post('/legal/contracts', requireAuth, authorize('contentowner'), (req, res) => { - res.status(201).json(legalStore.contracts.create(req.body, req.user)) +router.post('/legal/contracts', requireAuth, authorize('contentowner'), async (req, res) => { + res.status(201).json(await legalStore.contracts.create(req.body, req.user)) }) -router.put('/legal/contracts/:id', requireAuth, authorize('contentowner'), (req, res) => { - const item = legalStore.contracts.update(req.params.id, req.body) +router.put('/legal/contracts/:id', requireAuth, authorize('contentowner'), async (req, res) => { + const item = await legalStore.contracts.update(req.params.id, req.body) if (!item) return res.status(404).json({ error: 'Not found' }) res.json(item) }) -router.delete('/legal/contracts/:id', requireAuth, authorize('admin'), (req, res) => { - if (!legalStore.contracts.delete(req.params.id, req.user)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'legal_contract', resourceId: req.params.id }) +router.delete('/legal/contracts/:id', requireAuth, authorize('admin'), async (req, res) => { + if (!await legalStore.contracts.delete(req.params.id, req.user)) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'legal_contract', resourceId: req.params.id }) res.json({ deleted: true }) }) -router.delete('/legal/contracts/:id/permanent', requireAuth, authorize('admin'), (req, res) => { - if (!legalStore.contracts.permanentDelete(req.params.id)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'legal_contract', resourceId: req.params.id }) +router.delete('/legal/contracts/:id/permanent', requireAuth, authorize('admin'), async (req, res) => { + if (!await legalStore.contracts.permanentDelete(req.params.id)) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'legal_contract', resourceId: req.params.id }) res.json({ deleted: true, permanent: true }) }) -router.post('/legal/contracts/:id/restore', requireAuth, authorize('admin'), (req, res) => { - const item = legalStore.contracts.restore(req.params.id) +router.post('/legal/contracts/:id/restore', requireAuth, authorize('admin'), async (req, res) => { + const item = await legalStore.contracts.restore(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'legal_contract', resourceId: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'legal_contract', resourceId: req.params.id }) res.json(item) }) // NDAs -router.get('/legal/ndas/export/csv', requireAuth, authorize('reader'), (req, res) => { - const list = legalStore.ndas.getAll(req.query) +router.get('/legal/ndas/export/csv', requireAuth, authorize('reader'), async (req, res) => { + const list = await legalStore.ndas.getAll(req.query) const header = ['ID','Titel','Typ','Vertragspartner','Status','Unterzeichnet am','Läuft ab','Umfang','Owner','Notizen','Erstellt am'] const rows = list.map(n => [ n.id, n.title, n.ndaType, n.counterparty, n.status, @@ -145,42 +145,42 @@ router.get('/legal/ndas/export/csv', requireAuth, authorize('reader'), (req, res res.send('\uFEFF' + toCsv([header, ...rows])) }) -router.get('/legal/ndas', requireAuth, authorize('reader'), (req, res) => { - res.json(legalStore.ndas.getAll(req.query)) +router.get('/legal/ndas', requireAuth, authorize('reader'), async (req, res) => { + res.json(await legalStore.ndas.getAll(req.query)) }) -router.get('/legal/ndas/:id', requireAuth, authorize('reader'), (req, res) => { - const item = legalStore.ndas.getById(req.params.id) +router.get('/legal/ndas/:id', requireAuth, authorize('reader'), async (req, res) => { + const item = await legalStore.ndas.getById(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) res.json(item) }) -router.post('/legal/ndas', requireAuth, authorize('contentowner'), (req, res) => { - res.status(201).json(legalStore.ndas.create(req.body, req.user)) +router.post('/legal/ndas', requireAuth, authorize('contentowner'), async (req, res) => { + res.status(201).json(await legalStore.ndas.create(req.body, req.user)) }) -router.put('/legal/ndas/:id', requireAuth, authorize('contentowner'), (req, res) => { - const item = legalStore.ndas.update(req.params.id, req.body) +router.put('/legal/ndas/:id', requireAuth, authorize('contentowner'), async (req, res) => { + const item = await legalStore.ndas.update(req.params.id, req.body) if (!item) return res.status(404).json({ error: 'Not found' }) res.json(item) }) -router.delete('/legal/ndas/:id', requireAuth, authorize('admin'), (req, res) => { - if (!legalStore.ndas.delete(req.params.id, req.user)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'legal_nda', resourceId: req.params.id }) +router.delete('/legal/ndas/:id', requireAuth, authorize('admin'), async (req, res) => { + if (!await legalStore.ndas.delete(req.params.id, req.user)) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'legal_nda', resourceId: req.params.id }) res.json({ deleted: true }) }) -router.delete('/legal/ndas/:id/permanent', requireAuth, authorize('admin'), (req, res) => { - if (!legalStore.ndas.permanentDelete(req.params.id)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'legal_nda', resourceId: req.params.id }) +router.delete('/legal/ndas/:id/permanent', requireAuth, authorize('admin'), async (req, res) => { + if (!await legalStore.ndas.permanentDelete(req.params.id)) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'legal_nda', resourceId: req.params.id }) res.json({ deleted: true, permanent: true }) }) -router.post('/legal/ndas/:id/restore', requireAuth, authorize('admin'), (req, res) => { - const item = legalStore.ndas.restore(req.params.id) +router.post('/legal/ndas/:id/restore', requireAuth, authorize('admin'), async (req, res) => { + const item = await legalStore.ndas.restore(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'legal_nda', resourceId: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'legal_nda', resourceId: req.params.id }) res.json(item) }) // Privacy Policies -router.get('/legal/policies/export/csv', requireAuth, authorize('reader'), (req, res) => { - const list = legalStore.privacyPolicies.getAll(req.query) +router.get('/legal/policies/export/csv', requireAuth, authorize('reader'), async (req, res) => { + const list = await legalStore.privacyPolicies.getAll(req.query) const header = ['ID','Titel','Typ','Status','Version','Veröffentlicht am','Nächstes Review','URL','Owner','Notizen','Erstellt am'] const rows = list.map(p => [ p.id, p.title, p.policyType, p.status, p.version || 1, @@ -193,36 +193,36 @@ router.get('/legal/policies/export/csv', requireAuth, authorize('reader'), (req, res.send('\uFEFF' + toCsv([header, ...rows])) }) -router.get('/legal/policies', requireAuth, authorize('reader'), (req, res) => { - res.json(legalStore.privacyPolicies.getAll(req.query)) +router.get('/legal/policies', requireAuth, authorize('reader'), async (req, res) => { + res.json(await legalStore.privacyPolicies.getAll(req.query)) }) -router.get('/legal/policies/:id', requireAuth, authorize('reader'), (req, res) => { - const item = legalStore.privacyPolicies.getById(req.params.id) +router.get('/legal/policies/:id', requireAuth, authorize('reader'), async (req, res) => { + const item = await legalStore.privacyPolicies.getById(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) res.json(item) }) -router.post('/legal/policies', requireAuth, authorize('contentowner'), (req, res) => { - res.status(201).json(legalStore.privacyPolicies.create(req.body, req.user)) +router.post('/legal/policies', requireAuth, authorize('contentowner'), async (req, res) => { + res.status(201).json(await legalStore.privacyPolicies.create(req.body, req.user)) }) -router.put('/legal/policies/:id', requireAuth, authorize('contentowner'), (req, res) => { - const item = legalStore.privacyPolicies.update(req.params.id, req.body) +router.put('/legal/policies/:id', requireAuth, authorize('contentowner'), async (req, res) => { + const item = await legalStore.privacyPolicies.update(req.params.id, req.body) if (!item) return res.status(404).json({ error: 'Not found' }) res.json(item) }) -router.delete('/legal/policies/:id', requireAuth, authorize('admin'), (req, res) => { - if (!legalStore.privacyPolicies.delete(req.params.id, req.user)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'legal_policy', resourceId: req.params.id }) +router.delete('/legal/policies/:id', requireAuth, authorize('admin'), async (req, res) => { + if (!await legalStore.privacyPolicies.delete(req.params.id, req.user)) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'legal_policy', resourceId: req.params.id }) res.json({ deleted: true }) }) -router.delete('/legal/policies/:id/permanent', requireAuth, authorize('admin'), (req, res) => { - if (!legalStore.privacyPolicies.permanentDelete(req.params.id)) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'legal_policy', resourceId: req.params.id }) +router.delete('/legal/policies/:id/permanent', requireAuth, authorize('admin'), async (req, res) => { + if (!await legalStore.privacyPolicies.permanentDelete(req.params.id)) return res.status(404).json({ error: 'Not found' }) + await require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'legal_policy', resourceId: req.params.id }) res.json({ deleted: true, permanent: true }) }) -router.post('/legal/policies/:id/restore', requireAuth, authorize('admin'), (req, res) => { - const item = legalStore.privacyPolicies.restore(req.params.id) +router.post('/legal/policies/:id/restore', requireAuth, authorize('admin'), async (req, res) => { + const item = await legalStore.privacyPolicies.restore(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'legal_policy', resourceId: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'legal_policy', resourceId: req.params.id }) res.json(item) }) diff --git a/server/routes/orgUnits.js b/server/routes/orgUnits.js index b3a6f00..4e30be6 100644 --- a/server/routes/orgUnits.js +++ b/server/routes/orgUnits.js @@ -7,42 +7,42 @@ const { requireAuth, authorize } = require('../auth') const orgUnitStore = require('../db/orgUnitStore') const auditStore = require('../db/auditStore') -router.get('/org-units', requireAuth, authorize('reader'), (req, res) => { - res.json(orgUnitStore.getAll()) +router.get('/org-units', requireAuth, authorize('reader'), async (req, res) => { + res.json(await orgUnitStore.getAll()) }) -router.get('/org-units/:id', requireAuth, authorize('reader'), (req, res) => { - const u = orgUnitStore.getById(req.params.id) +router.get('/org-units/:id', requireAuth, authorize('reader'), async (req, res) => { + const u = await orgUnitStore.getById(req.params.id) if (!u) return res.status(404).json({ error: 'Not found' }) res.json(u) }) -router.post('/org-units', requireAuth, authorize('admin'), (req, res) => { +router.post('/org-units', requireAuth, authorize('admin'), async (req, res) => { try { - const unit = orgUnitStore.create(req.body) - auditStore.append({ user: req.user, action: 'create', resource: 'org-unit', detail: unit.name }) + const unit = await orgUnitStore.create(req.body) + await auditStore.append({ user: req.user, action: 'create', resource: 'org-unit', detail: unit.name }) res.status(201).json(unit) } catch (e) { res.status(400).json({ error: e.message }) } }) -router.put('/org-units/:id', requireAuth, authorize('admin'), (req, res) => { +router.put('/org-units/:id', requireAuth, authorize('admin'), async (req, res) => { try { - const updated = orgUnitStore.update(req.params.id, req.body) + const updated = await orgUnitStore.update(req.params.id, req.body) if (!updated) return res.status(404).json({ error: 'Not found' }) - auditStore.append({ user: req.user, action: 'update', resource: 'org-unit', detail: updated.name }) + await auditStore.append({ user: req.user, action: 'update', resource: 'org-unit', detail: updated.name }) res.json(updated) } catch (e) { res.status(400).json({ error: e.message }) } }) -router.delete('/org-units/:id', requireAuth, authorize('admin'), (req, res) => { +router.delete('/org-units/:id', requireAuth, authorize('admin'), async (req, res) => { try { - const ok = orgUnitStore.remove(req.params.id) + const ok = await orgUnitStore.remove(req.params.id) if (!ok) return res.status(404).json({ error: 'Not found' }) - auditStore.append({ user: req.user, action: 'delete', resource: 'org-unit', detail: req.params.id }) + await auditStore.append({ user: req.user, action: 'delete', resource: 'org-unit', detail: req.params.id }) res.json({ ok: true }) } catch (e) { res.status(500).json({ error: e.message }) diff --git a/server/routes/public.js b/server/routes/public.js index e64cace..aa12437 100644 --- a/server/routes/public.js +++ b/server/routes/public.js @@ -6,74 +6,74 @@ const { requireAuth, authorize } = require('../auth') const publicIncidentStore = require('../db/publicIncidentStore') // Splash-Konfiguration öffentlich abrufbar (Login-Seite braucht sie vor Auth) -router.get('/public/splash', (req, res) => { +router.get('/public/splash', async (req, res) => { try { const orgSettingsStore = require('../db/orgSettingsStore') - const s = orgSettingsStore.get() + const s = await orgSettingsStore.get() const sp = s.splashScreen || {} res.json({ enabled: sp.enabled !== false, duration: Math.min(30, Math.max(1, Number(sp.duration) || 7)) }) } catch { res.json({ enabled: true, duration: 7 }) } }) // Öffentliche Gesellschaftsliste (kein Login nötig) -router.get('/public/entities', (req, res) => { +router.get('/public/entities', async (req, res) => { try { const entityStore = require('../db/entityStore') - res.json(entityStore.getAll().map(e => ({ id: e.id, name: e.name }))) + res.json((await entityStore.getAll()).map(e => ({ id: e.id, name: e.name }))) } catch { res.json([]) } }) // Jeder kann einen Vorfall melden -router.post('/public/incident', (req, res) => { +router.post('/public/incident', async (req, res) => { const { email, entityName, incidentType, description, measuresTaken, localContact, cleanedUp } = req.body || {} if (!email || !incidentType || !description) { return res.status(400).json({ error: 'email, incidentType und description sind Pflichtfelder.' }) } - const incident = publicIncidentStore.create({ email, entityName, incidentType, description, measuresTaken, localContact, cleanedUp }) + const incident = await publicIncidentStore.create({ email, entityName, incidentType, description, measuresTaken, localContact, cleanedUp }) res.status(201).json({ ok: true, refNumber: incident.refNumber, id: incident.id }) }) // CISO / contentowner: Liste aller gemeldeten Vorfälle -router.get('/public/incidents', requireAuth, authorize('contentowner'), (req, res) => { +router.get('/public/incidents', requireAuth, authorize('contentowner'), async (req, res) => { const { status } = req.query - res.json(publicIncidentStore.getAll(status ? { status } : {})) + res.json(await publicIncidentStore.getAll(status ? { status } : {})) }) // CISO / contentowner: Einzelnen Vorfall abrufen -router.get('/public/incident/:id', requireAuth, authorize('contentowner'), (req, res) => { - const item = publicIncidentStore.getById(req.params.id) +router.get('/public/incident/:id', requireAuth, authorize('contentowner'), async (req, res) => { + const item = await publicIncidentStore.getById(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) res.json(item) }) // CISO / contentowner: Vorfall zuweisen / aktualisieren -router.put('/public/incident/:id', requireAuth, authorize('contentowner'), (req, res) => { - const updated = publicIncidentStore.update(req.params.id, req.body, req.user) +router.put('/public/incident/:id', requireAuth, authorize('contentowner'), async (req, res) => { + const updated = await publicIncidentStore.update(req.params.id, req.body, req.user) if (!updated) return res.status(404).json({ error: 'Not found' }) res.json(updated) }) // Admin: Vorfall in Papierkorb verschieben (Soft-Delete) -router.delete('/public/incident/:id', requireAuth, authorize('admin'), (req, res) => { - const ok = publicIncidentStore.delete(req.params.id, req.user) +router.delete('/public/incident/:id', requireAuth, authorize('admin'), async (req, res) => { + const ok = await publicIncidentStore.delete(req.params.id, req.user) if (!ok) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'public-incident', resourceId: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'public-incident', resourceId: req.params.id }) res.json({ deleted: true }) }) // Admin: Vorfall endgültig löschen -router.delete('/public/incident/:id/permanent', requireAuth, authorize('admin'), (req, res) => { - const ok = publicIncidentStore.permanentDelete(req.params.id) +router.delete('/public/incident/:id/permanent', requireAuth, authorize('admin'), async (req, res) => { + const ok = await publicIncidentStore.permanentDelete(req.params.id) if (!ok) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'public-incident', resourceId: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'public-incident', resourceId: req.params.id }) res.json({ deleted: true, permanent: true }) }) // Admin: Vorfall wiederherstellen -router.post('/public/incident/:id/restore', requireAuth, authorize('admin'), (req, res) => { - const item = publicIncidentStore.restore(req.params.id) +router.post('/public/incident/:id/restore', requireAuth, authorize('admin'), async (req, res) => { + const item = await publicIncidentStore.restore(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'public-incident', resourceId: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'public-incident', resourceId: req.params.id }) res.json(item) }) diff --git a/server/routes/risks.js b/server/routes/risks.js index 38dd8ec..4f7b484 100644 --- a/server/routes/risks.js +++ b/server/routes/risks.js @@ -11,87 +11,87 @@ function authorizeAuditor(req, res, next) { res.status(403).json({ error: 'Mindestens auditor- oder contentowner-Rolle erforderlich' }) } -router.get('/risks/calendar', requireAuth, authorize('reader'), (req, res) => { - res.json(riskStore.getCalendarEvents()) +router.get('/risks/calendar', requireAuth, authorize('reader'), async (req, res) => { + res.json(await riskStore.getCalendarEvents()) }) -router.get('/risks/summary', requireAuth, authorize('reader'), (req, res) => { - res.json(riskStore.getSummary()) +router.get('/risks/summary', requireAuth, authorize('reader'), async (req, res) => { + res.json(await riskStore.getSummary()) }) -router.get('/risks/review-pending', requireAuth, authorize('reader'), (req, res) => { - res.json(riskStore.getReviewPending()) +router.get('/risks/review-pending', requireAuth, authorize('reader'), async (req, res) => { + res.json(await riskStore.getReviewPending()) }) -router.get('/risks', requireAuth, authorize('reader'), (req, res) => { +router.get('/risks', requireAuth, authorize('reader'), async (req, res) => { const { category, status, entity } = req.query - res.json(riskStore.getAll({ category, status, entity })) + res.json(await riskStore.getAll({ category, status, entity })) }) -router.get('/risks/:id', requireAuth, authorize('reader'), (req, res) => { - const r = riskStore.getById(req.params.id) +router.get('/risks/:id', requireAuth, authorize('reader'), async (req, res) => { + const r = await riskStore.getById(req.params.id) if (!r) return res.status(404).json({ error: 'Not found' }) res.json(r) }) -router.post('/risks', requireAuth, authorizeAuditor, (req, res) => { - const r = riskStore.create(req.body, req.user) - require('../db/auditStore').append({ user: req.user, action: 'create', resource: 'risk', resourceId: r?.id, detail: req.body.title || '' }) - embeddingStore.indexDoc(r, 'Risiko', '#risks').catch(() => {}) +router.post('/risks', requireAuth, authorizeAuditor, async (req, res) => { + const r = await riskStore.create(req.body, req.user) + await require('../db/auditStore').append({ user: req.user, action: 'create', resource: 'risk', resourceId: r?.id, detail: req.body.title || '' }) + await embeddingStore.indexDoc(r, 'Risiko', '#risks') res.status(201).json(r) }) -router.put('/risks/:id', requireAuth, (req, res) => { - const existing = riskStore.getById(req.params.id) +router.put('/risks/:id', requireAuth, async (req, res) => { + const existing = await riskStore.getById(req.params.id) if (!existing) return res.status(404).json({ error: 'Not found' }) const canManageRisk = (req.roleRank || 0) >= 3 const isOwner = existing.owner && existing.owner === req.user if (!canManageRisk && !isOwner) { return res.status(403).json({ error: 'Mindestens auditor- oder contentowner-Rolle oder eingetragener Owner erforderlich' }) } - const r = riskStore.update(req.params.id, req.body) - require('../db/auditStore').append({ user: req.user, action: 'update', resource: 'risk', resourceId: req.params.id, detail: existing.title || '' }) - embeddingStore.indexDoc(r, 'Risiko', '#risks').catch(() => {}) + const r = await riskStore.update(req.params.id, req.body) + await require('../db/auditStore').append({ user: req.user, action: 'update', resource: 'risk', resourceId: req.params.id, detail: existing.title || '' }) + await embeddingStore.indexDoc(r, 'Risiko', '#risks') res.json(r) }) -router.delete('/risks/:id', requireAuth, authorize('admin'), (req, res) => { - const ok = riskStore.delete(req.params.id, req.user) +router.delete('/risks/:id', requireAuth, authorize('admin'), async (req, res) => { + const ok = await riskStore.delete(req.params.id, req.user) if (!ok) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'risk', resourceId: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'risk', resourceId: req.params.id }) res.json({ deleted: true }) }) -router.delete('/risks/:id/permanent', requireAuth, authorize('admin'), (req, res) => { - const ok = riskStore.permanentDelete(req.params.id) +router.delete('/risks/:id/permanent', requireAuth, authorize('admin'), async (req, res) => { + const ok = await riskStore.permanentDelete(req.params.id) if (!ok) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'risk', resourceId: req.params.id }) - embeddingStore.removeDoc(req.params.id) + await require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'risk', resourceId: req.params.id }) + await embeddingStore.removeDoc(req.params.id) res.json({ deleted: true, permanent: true }) }) -router.post('/risks/:id/restore', requireAuth, authorize('admin'), (req, res) => { - const item = riskStore.restore(req.params.id) +router.post('/risks/:id/restore', requireAuth, authorize('admin'), async (req, res) => { + const item = await riskStore.restore(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'risk', resourceId: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'risk', resourceId: req.params.id }) res.json(item) }) // ── Review-Queue ── -router.post('/risks/:id/approve', requireAuth, authorizeAuditor, (req, res) => { - const r = riskStore.approve(req.params.id, req.user) +router.post('/risks/:id/approve', requireAuth, authorizeAuditor, async (req, res) => { + const r = await riskStore.approve(req.params.id, req.user) if (!r) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'approve', resource: 'risk', resourceId: r.id, detail: r.title }) + await require('../db/auditStore').append({ user: req.user, action: 'approve', resource: 'risk', resourceId: r.id, detail: r.title }) res.json(r) }) -router.post('/risks/:id/treatments', requireAuth, authorizeAuditor, (req, res) => { - const tp = riskStore.addTreatment(req.params.id, req.body, req.user) +router.post('/risks/:id/treatments', requireAuth, authorizeAuditor, async (req, res) => { + const tp = await riskStore.addTreatment(req.params.id, req.body, req.user) if (!tp) return res.status(404).json({ error: 'Risk not found' }) res.status(201).json(tp) }) -router.put('/risks/:id/treatments/:tpId', requireAuth, authorizeAuditor, (req, res) => { - const tp = riskStore.updateTreatment(req.params.id, req.params.tpId, req.body) +router.put('/risks/:id/treatments/:tpId', requireAuth, authorizeAuditor, async (req, res) => { + const tp = await riskStore.updateTreatment(req.params.id, req.params.tpId, req.body) if (!tp) return res.status(404).json({ error: 'Not found' }) res.json(tp) }) -router.delete('/risks/:id/treatments/:tpId', requireAuth, authorizeAuditor, (req, res) => { - const ok = riskStore.deleteTreatment(req.params.id, req.params.tpId) +router.delete('/risks/:id/treatments/:tpId', requireAuth, authorizeAuditor, async (req, res) => { + const ok = await riskStore.deleteTreatment(req.params.id, req.params.tpId) if (!ok) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'risk_treatment', resourceId: req.params.tpId, detail: `riskId: ${req.params.id}` }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'risk_treatment', resourceId: req.params.tpId, detail: `riskId: ${req.params.id}` }) res.json({ deleted: true }) }) diff --git a/server/routes/scanImport.js b/server/routes/scanImport.js index d05157b..9e15ad4 100644 --- a/server/routes/scanImport.js +++ b/server/routes/scanImport.js @@ -81,7 +81,7 @@ router.post('/admin/scan-import/upload', // Status-Datei aktualisieren saveState({ lastImport: new Date().toISOString(), ...result, scanRef, parseMethod, importedBy: req.user }) - audit.append({ + await audit.append({ user: req.user, action: 'scan_import', resource: 'risk', diff --git a/server/routes/soa.js b/server/routes/soa.js index 45562c9..1e47a70 100644 --- a/server/routes/soa.js +++ b/server/routes/soa.js @@ -14,25 +14,25 @@ const storage = require('../storage') const DATA_DIR = process.env.DATA_DIR || path.join(__dirname, '../../data') // Verfügbare Frameworks auflisten -router.get('/soa/frameworks', requireAuth, authorize('reader'), (req, res) => { - const activeFw = orgSettingsStore.get().soaFrameworks || {} - const all = soaStore.getFrameworks() +router.get('/soa/frameworks', requireAuth, authorize('reader'), async (req, res) => { + const activeFw = (await orgSettingsStore.get()).soaFrameworks || {} + const all = await soaStore.getFrameworks() const hasConfig = Object.values(activeFw).some(v => v === false) res.json(hasConfig ? all.filter(fw => activeFw[fw.id] !== false) : all) }) // Alle Controls -router.get('/soa', requireAuth, authorize('reader'), (req, res) => { +router.get('/soa', requireAuth, authorize('reader'), async (req, res) => { const { framework, theme } = req.query - res.json(soaStore.getAll({ framework, theme })) + res.json(await soaStore.getAll({ framework, theme })) }) // Zusammenfassung -router.get('/soa/summary', requireAuth, authorize('reader'), (req, res) => { +router.get('/soa/summary', requireAuth, authorize('reader'), async (req, res) => { const { framework } = req.query - const activeFw = orgSettingsStore.get().soaFrameworks || {} + const activeFw = (await orgSettingsStore.get()).soaFrameworks || {} const hasConfig = Object.values(activeFw).some(v => v === false) - const full = soaStore.getSummary(framework || null) + const full = await soaStore.getSummary(framework || null) if (!framework && hasConfig) { const filtered = {} for (const [k, v] of Object.entries(full)) { @@ -44,24 +44,24 @@ router.get('/soa/summary', requireAuth, authorize('reader'), (req, res) => { }) // Framework-Selektion lesen -router.get('/admin/soa-frameworks', requireAuth, authorize('reader'), (req, res) => { - res.json(orgSettingsStore.get().soaFrameworks || {}) +router.get('/admin/soa-frameworks', requireAuth, authorize('reader'), async (req, res) => { + res.json((await orgSettingsStore.get()).soaFrameworks || {}) }) // Framework-Selektion speichern -router.put('/admin/soa-frameworks', requireAuth, authorize('admin'), (req, res) => { - const updated = orgSettingsStore.update({ soaFrameworks: req.body }) - auditStore.append({ user: req.user, action: 'settings', resource: 'soa-frameworks', detail: 'SoA Framework-Auswahl aktualisiert' }) +router.put('/admin/soa-frameworks', requireAuth, authorize('admin'), async (req, res) => { + const updated = await orgSettingsStore.update({ soaFrameworks: req.body }) + await auditStore.append({ user: req.user, action: 'settings', resource: 'soa-frameworks', detail: 'SoA Framework-Auswahl aktualisiert' }) res.json(updated.soaFrameworks) }) // Einzelnen Control aktualisieren -router.put('/soa/:id', requireAuth, authorize('editor'), (req, res) => { +router.put('/soa/:id', requireAuth, authorize('editor'), async (req, res) => { const { id } = req.params const { applicable, status, owner, justification, linkedTemplates, applicableEntities } = req.body if (Array.isArray(linkedTemplates)) { - const existing = soaStore.getById(id) + const existing = await soaStore.getById(id) if (existing) { const prevTemplates = existing.linkedTemplates || [] const added = linkedTemplates.filter(t => !prevTemplates.includes(t)) @@ -77,26 +77,26 @@ router.put('/soa/:id', requireAuth, authorize('editor'), (req, res) => { } } - const updated = soaStore.update(id, { applicable, status, owner, justification, linkedTemplates, applicableEntities }, { changedBy: req.user }) + const updated = await soaStore.update(id, { applicable, status, owner, justification, linkedTemplates, applicableEntities }, { changedBy: req.user }) if (!updated) return res.status(404).json({ error: 'Control not found' }) res.json(updated) }) // Cross-Mapping: alle Gruppen -router.get('/soa/crossmap', requireAuth, authorize('reader'), (req, res) => { - res.json(crossmapStore.getAll()) +router.get('/soa/crossmap', requireAuth, authorize('reader'), async (req, res) => { + res.json(await crossmapStore.getAll()) }) // Cross-Mapping: verwandte Controls -router.get('/soa/:id/crossmap', requireAuth, authorize('reader'), (req, res) => { +router.get('/soa/:id/crossmap', requireAuth, authorize('reader'), async (req, res) => { const { id } = req.params - res.json(crossmapStore.getRelated(id)) + res.json(await crossmapStore.getRelated(id)) }) // JSON-Export -router.get('/soa/export', requireAuth, authorize('reader'), (req, res) => { - const all = soaStore.getAll() - const summary = soaStore.getSummary() +router.get('/soa/export', requireAuth, authorize('reader'), async (req, res) => { + const all = await soaStore.getAll() + const summary = await soaStore.getSummary() res.setHeader('Content-Disposition', 'attachment; filename="soa-export.json"') res.json({ exportedAt: new Date().toISOString(), summary, controls: all }) }) @@ -123,7 +123,7 @@ router.get('/soa/import-controls/status', requireAuth, authorize('reader'), (req }) // Import: write iso-controls.json and reload the store -router.post('/soa/import-controls', requireAuth, authorize('admin'), express.json({ limit: '10mb' }), (req, res) => { +router.post('/soa/import-controls', requireAuth, authorize('admin'), express.json({ limit: '10mb' }), async (req, res) => { const controls = req.body if (!Array.isArray(controls)) return res.status(400).json({ error: 'Expected JSON array of controls' }) // Validate: each must have id, theme, title, framework @@ -131,38 +131,38 @@ router.post('/soa/import-controls', requireAuth, authorize('admin'), express.jso if (valid.length === 0) return res.status(400).json({ error: 'No valid controls found' }) const isoFile = path.join(DATA_DIR, 'iso-controls.json') fs.writeFileSync(isoFile, JSON.stringify(valid, null, 2)) - soaStore.init() // reload - auditStore.append({ user: req.user, action: 'import', resource: 'iso-controls', detail: `Imported ${valid.length} ISO controls` }) + await soaStore.init() // reload + await auditStore.append({ user: req.user, action: 'import', resource: 'iso-controls', detail: `Imported ${valid.length} ISO controls` }) res.json({ imported: valid.length }) }) // ── Custom Controls ─────────────────────────────────────────────────────────── -router.post('/soa/custom', requireAuth, authorize('contentowner'), (req, res) => { +router.post('/soa/custom', requireAuth, authorize('contentowner'), async (req, res) => { try { - const ctrl = soaStore.createCustomControl(req.body, { changedBy: req.user }) - auditStore.append({ user: req.user, action: 'create', resource: 'custom-control', detail: ctrl.title }) + const ctrl = await soaStore.createCustomControl(req.body, { changedBy: req.user }) + await auditStore.append({ user: req.user, action: 'create', resource: 'custom-control', detail: ctrl.title }) res.status(201).json(ctrl) } catch (e) { res.status(400).json({ error: e.message }) } }) -router.put('/soa/custom/:id', requireAuth, authorize('contentowner'), (req, res) => { - const updated = soaStore.updateCustomControl(req.params.id, req.body, { changedBy: req.user }) +router.put('/soa/custom/:id', requireAuth, authorize('contentowner'), async (req, res) => { + const updated = await soaStore.updateCustomControl(req.params.id, req.body, { changedBy: req.user }) if (!updated) return res.status(404).json({ error: 'Not found or not a custom control' }) - auditStore.append({ user: req.user, action: 'update', resource: 'custom-control', detail: updated.title }) + await auditStore.append({ user: req.user, action: 'update', resource: 'custom-control', detail: updated.title }) res.json(updated) }) -router.delete('/soa/custom/:id', requireAuth, authorize('contentowner'), (req, res) => { - const result = soaStore.deleteCustomControl(req.params.id) +router.delete('/soa/custom/:id', requireAuth, authorize('contentowner'), async (req, res) => { + const result = await soaStore.deleteCustomControl(req.params.id) if (!result.ok) { if (result.reason === 'not_found') return res.status(404).json({ error: 'Not found' }) if (result.reason === 'not_custom') return res.status(403).json({ error: 'Cannot delete built-in controls' }) if (result.reason === 'has_links') return res.status(409).json({ error: 'Control is linked to templates — unlink first' }) } - auditStore.append({ user: req.user, action: 'delete', resource: 'custom-control', detail: req.params.id }) + await auditStore.append({ user: req.user, action: 'delete', resource: 'custom-control', detail: req.params.id }) res.json({ ok: true }) }) diff --git a/server/routes/suppliers.js b/server/routes/suppliers.js index dce2c85..81a872f 100644 --- a/server/routes/suppliers.js +++ b/server/routes/suppliers.js @@ -6,71 +6,71 @@ const { requireAuth, authorize } = require('../auth') const supplierStore = require('../db/supplierStore') const embeddingStore = require('../ai/embeddingStore') -router.get('/suppliers/summary', requireAuth, authorize('reader'), (req, res) => { - res.json(supplierStore.getSummary()) +router.get('/suppliers/summary', requireAuth, authorize('reader'), async (req, res) => { + res.json(await supplierStore.getSummary()) }) -router.get('/suppliers', requireAuth, authorize('reader'), (req, res) => { - res.json(supplierStore.getAll(req.query)) +router.get('/suppliers', requireAuth, authorize('reader'), async (req, res) => { + res.json(await supplierStore.getAll(req.query)) }) -router.get('/suppliers/:id', requireAuth, authorize('reader'), (req, res) => { - const s = supplierStore.getById(req.params.id) +router.get('/suppliers/:id', requireAuth, authorize('reader'), async (req, res) => { + const s = await supplierStore.getById(req.params.id) if (!s) return res.status(404).json({ error: 'Not found' }) res.json(s) }) -router.post('/suppliers', requireAuth, authorize('editor'), (req, res) => { +router.post('/suppliers', requireAuth, authorize('editor'), async (req, res) => { try { - const item = supplierStore.create(req.body, { createdBy: req.user }) - require('../db/auditStore').append({ user: req.user, action: 'create', resource: 'supplier', detail: item.name }) - embeddingStore.indexDoc({ ...item, title: item.name }, 'Lieferant', '#suppliers').catch(() => {}) + const item = await supplierStore.create(req.body, { createdBy: req.user }) + await require('../db/auditStore').append({ user: req.user, action: 'create', resource: 'supplier', detail: item.name }) + await embeddingStore.indexDoc({ ...item, title: item.name }, 'Lieferant', '#suppliers') res.status(201).json(item) } catch (e) { res.status(500).json({ error: e.message }) } }) -router.put('/suppliers/:id', requireAuth, authorize('editor'), (req, res) => { +router.put('/suppliers/:id', requireAuth, authorize('editor'), async (req, res) => { try { - const updated = supplierStore.update(req.params.id, req.body, { changedBy: req.user }) + const updated = await supplierStore.update(req.params.id, req.body, { changedBy: req.user }) if (!updated) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'update', resource: 'supplier', detail: updated.name }) - embeddingStore.indexDoc({ ...updated, title: updated.name }, 'Lieferant', '#suppliers').catch(() => {}) + await require('../db/auditStore').append({ user: req.user, action: 'update', resource: 'supplier', detail: updated.name }) + await embeddingStore.indexDoc({ ...updated, title: updated.name }, 'Lieferant', '#suppliers') res.json(updated) } catch (e) { res.status(500).json({ error: e.message }) } }) -router.delete('/suppliers/:id/permanent', requireAuth, authorize('admin'), (req, res) => { +router.delete('/suppliers/:id/permanent', requireAuth, authorize('admin'), async (req, res) => { try { - const ok = supplierStore.permanentDelete(req.params.id) + const ok = await supplierStore.permanentDelete(req.params.id) if (!ok) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'supplier', detail: req.params.id }) - embeddingStore.removeDoc(req.params.id) + await require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'supplier', detail: req.params.id }) + await embeddingStore.removeDoc(req.params.id) res.json({ ok: true }) } catch (e) { res.status(500).json({ error: e.message }) } }) -router.post('/suppliers/:id/restore', requireAuth, authorize('admin'), (req, res) => { +router.post('/suppliers/:id/restore', requireAuth, authorize('admin'), async (req, res) => { try { - const item = supplierStore.restore(req.params.id) + const item = await supplierStore.restore(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'supplier', detail: item.name }) + await require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'supplier', detail: item.name }) res.json(item) } catch (e) { res.status(500).json({ error: e.message }) } }) -router.delete('/suppliers/:id', requireAuth, authorize('admin'), (req, res) => { +router.delete('/suppliers/:id', requireAuth, authorize('admin'), async (req, res) => { try { - const ok = supplierStore.remove(req.params.id, { deletedBy: req.user }) + const ok = await supplierStore.remove(req.params.id, { deletedBy: req.user }) if (!ok) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'supplier', detail: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'supplier', detail: req.params.id }) res.json({ ok: true }) } catch (e) { res.status(500).json({ error: e.message }) diff --git a/server/routes/training.js b/server/routes/training.js index d66c8e2..75c6ffa 100644 --- a/server/routes/training.js +++ b/server/routes/training.js @@ -6,46 +6,46 @@ const { requireAuth, authorize } = require('../auth') const trainingStore = require('../db/trainingStore') const embeddingStore = require('../ai/embeddingStore') -router.get('/training/summary', requireAuth, authorize('reader'), (req, res) => { - res.json(trainingStore.getSummary()) +router.get('/training/summary', requireAuth, authorize('reader'), async (req, res) => { + res.json(await trainingStore.getSummary()) }) -router.get('/training', requireAuth, authorize('reader'), (req, res) => { +router.get('/training', requireAuth, authorize('reader'), async (req, res) => { const { status, category, entity } = req.query - res.json(trainingStore.getAll({ status, category, entity })) + res.json(await trainingStore.getAll({ status, category, entity })) }) -router.get('/training/:id', requireAuth, authorize('reader'), (req, res) => { - const item = trainingStore.getById(req.params.id) +router.get('/training/:id', requireAuth, authorize('reader'), async (req, res) => { + const item = await trainingStore.getById(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) res.json(item) }) -router.post('/training', requireAuth, authorize('editor'), (req, res) => { - const item = trainingStore.create(req.body, req.user) - embeddingStore.indexDoc(item, 'Schulung', '#training').catch(() => {}) +router.post('/training', requireAuth, authorize('editor'), async (req, res) => { + const item = await trainingStore.create(req.body, req.user) + await embeddingStore.indexDoc(item, 'Schulung', '#training') res.status(201).json(item) }) -router.put('/training/:id', requireAuth, authorize('editor'), (req, res) => { - const item = trainingStore.update(req.params.id, req.body) +router.put('/training/:id', requireAuth, authorize('editor'), async (req, res) => { + const item = await trainingStore.update(req.params.id, req.body) if (!item) return res.status(404).json({ error: 'Not found' }) - embeddingStore.indexDoc(item, 'Schulung', '#training').catch(() => {}) + await embeddingStore.indexDoc(item, 'Schulung', '#training') res.json(item) }) -router.delete('/training/:id', requireAuth, authorize('admin'), (req, res) => { - const ok = trainingStore.delete(req.params.id, req.user) +router.delete('/training/:id', requireAuth, authorize('admin'), async (req, res) => { + const ok = await trainingStore.delete(req.params.id, req.user) if (!ok) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'training', resourceId: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'delete', resource: 'training', resourceId: req.params.id }) res.json({ deleted: true }) }) -router.delete('/training/:id/permanent', requireAuth, authorize('admin'), (req, res) => { - const ok = trainingStore.permanentDelete(req.params.id) +router.delete('/training/:id/permanent', requireAuth, authorize('admin'), async (req, res) => { + const ok = await trainingStore.permanentDelete(req.params.id) if (!ok) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'training', resourceId: req.params.id }) - embeddingStore.removeDoc(req.params.id) + await require('../db/auditStore').append({ user: req.user, action: 'permanent_delete', resource: 'training', resourceId: req.params.id }) + await embeddingStore.removeDoc(req.params.id) res.json({ deleted: true, permanent: true }) }) -router.post('/training/:id/restore', requireAuth, authorize('admin'), (req, res) => { - const item = trainingStore.restore(req.params.id) +router.post('/training/:id/restore', requireAuth, authorize('admin'), async (req, res) => { + const item = await trainingStore.restore(req.params.id) if (!item) return res.status(404).json({ error: 'Not found' }) - require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'training', resourceId: req.params.id }) + await require('../db/auditStore').append({ user: req.user, action: 'restore', resource: 'training', resourceId: req.params.id }) res.json(item) }) diff --git a/server/routes/trash.js b/server/routes/trash.js index 198ca3e..3448883 100644 --- a/server/routes/trash.js +++ b/server/routes/trash.js @@ -5,7 +5,7 @@ const router = express.Router() const { requireAuth, authorize } = require('../auth') const storage = require('../storage') -router.get('/trash', requireAuth, authorize('admin'), (req, res) => { +router.get('/trash', requireAuth, authorize('admin'), async (req, res) => { const riskStore = require('../db/riskStore') const goalsStore = require('../db/goalsStore') const guidanceStore = require('../db/guidanceStore') @@ -18,7 +18,7 @@ router.get('/trash', requireAuth, authorize('admin'), (req, res) => { // Templates try { - const deletedTmpl = storage.getDeletedTemplates?.() || [] + const deletedTmpl = await storage.getDeletedTemplates?.() || [] deletedTmpl.forEach(t => items.push({ module: 'template', moduleLabel: 'Template', id: t.id, title: t.title || t.id, @@ -30,7 +30,7 @@ router.get('/trash', requireAuth, authorize('admin'), (req, res) => { // Risks try { - const deletedRisks = riskStore.getDeleted?.() || [] + const deletedRisks = await riskStore.getDeleted?.() || [] deletedRisks.forEach(r => items.push({ module: 'risk', moduleLabel: 'Risiko', id: r.id, title: r.title || r.id, @@ -42,7 +42,7 @@ router.get('/trash', requireAuth, authorize('admin'), (req, res) => { // Goals try { - const deletedGoals = goalsStore.getDeleted?.() || [] + const deletedGoals = await goalsStore.getDeleted?.() || [] deletedGoals.forEach(g => items.push({ module: 'goal', moduleLabel: 'Sicherheitsziel', id: g.id, title: g.title || g.id, @@ -54,7 +54,7 @@ router.get('/trash', requireAuth, authorize('admin'), (req, res) => { // Guidance try { - const deletedGuidance = guidanceStore.getDeleted?.() || [] + const deletedGuidance = await guidanceStore.getDeleted?.() || [] deletedGuidance.forEach(d => items.push({ module: 'guidance', moduleLabel: 'Guidance-Dokument', id: d.id, title: d.title || d.id, @@ -66,7 +66,7 @@ router.get('/trash', requireAuth, authorize('admin'), (req, res) => { // Training try { - const deletedTraining = trainingStore.getDeleted?.() || [] + const deletedTraining = await trainingStore.getDeleted?.() || [] deletedTraining.forEach(t => items.push({ module: 'training', moduleLabel: 'Schulung', id: t.id, title: t.title || t.id, @@ -78,7 +78,7 @@ router.get('/trash', requireAuth, authorize('admin'), (req, res) => { // Legal: Contracts try { - const deletedContracts = legalStore.contracts.getDeleted?.() || [] + const deletedContracts = await legalStore.contracts.getDeleted?.() || [] deletedContracts.forEach(c => items.push({ module: 'legal_contract', moduleLabel: 'Vertrag', id: c.id, title: c.title || c.id, @@ -90,7 +90,7 @@ router.get('/trash', requireAuth, authorize('admin'), (req, res) => { // Legal: NDAs try { - const deletedNdas = legalStore.ndas.getDeleted?.() || [] + const deletedNdas = await legalStore.ndas.getDeleted?.() || [] deletedNdas.forEach(n => items.push({ module: 'legal_nda', moduleLabel: 'NDA', id: n.id, title: n.title || n.id, @@ -102,7 +102,7 @@ router.get('/trash', requireAuth, authorize('admin'), (req, res) => { // Legal: Privacy Policies try { - const deletedPolicies = legalStore.privacyPolicies.getDeleted?.() || [] + const deletedPolicies = await legalStore.privacyPolicies.getDeleted?.() || [] deletedPolicies.forEach(p => items.push({ module: 'legal_policy', moduleLabel: 'Datenschutzrichtlinie', id: p.id, title: p.title || p.id, @@ -114,7 +114,7 @@ router.get('/trash', requireAuth, authorize('admin'), (req, res) => { // GDPR: VVT try { - const deletedVvt = gdprStore.vvt.getDeleted?.() || [] + const deletedVvt = await gdprStore.vvt.getDeleted?.() || [] deletedVvt.forEach(v => items.push({ module: 'gdpr_vvt', moduleLabel: 'VVT-Eintrag', id: v.id, title: v.title || v.id, @@ -126,7 +126,7 @@ router.get('/trash', requireAuth, authorize('admin'), (req, res) => { // GDPR: AV try { - const deletedAv = gdprStore.av.getDeleted?.() || [] + const deletedAv = await gdprStore.av.getDeleted?.() || [] deletedAv.forEach(a => items.push({ module: 'gdpr_av', moduleLabel: 'AV-Vertrag', id: a.id, title: a.title || a.id, @@ -138,7 +138,7 @@ router.get('/trash', requireAuth, authorize('admin'), (req, res) => { // GDPR: DSFA try { - const deletedDsfa = gdprStore.dsfa.getDeleted?.() || [] + const deletedDsfa = await gdprStore.dsfa.getDeleted?.() || [] deletedDsfa.forEach(d => items.push({ module: 'gdpr_dsfa', moduleLabel: 'DSFA', id: d.id, title: d.title || d.id, @@ -150,7 +150,7 @@ router.get('/trash', requireAuth, authorize('admin'), (req, res) => { // GDPR: Incidents try { - const deletedIncidents = gdprStore.incidents.getDeleted?.() || [] + const deletedIncidents = await gdprStore.incidents.getDeleted?.() || [] deletedIncidents.forEach(i => items.push({ module: 'gdpr_incident', moduleLabel: 'GDPR-Datenpanne', id: i.id, title: i.title || i.id, @@ -162,7 +162,7 @@ router.get('/trash', requireAuth, authorize('admin'), (req, res) => { // GDPR: DSAR try { - const deletedDsar = gdprStore.dsar.getDeleted?.() || [] + const deletedDsar = await gdprStore.dsar.getDeleted?.() || [] deletedDsar.forEach(d => items.push({ module: 'gdpr_dsar', moduleLabel: 'DSAR-Anfrage', id: d.id, title: `${d.requestType}: ${d.dataSubjectName || d.id}`, @@ -174,7 +174,7 @@ router.get('/trash', requireAuth, authorize('admin'), (req, res) => { // GDPR: TOMs try { - const deletedToms = gdprStore.toms.getDeleted?.() || [] + const deletedToms = await gdprStore.toms.getDeleted?.() || [] deletedToms.forEach(t => items.push({ module: 'gdpr_toms', moduleLabel: 'TOM', id: t.id, title: t.title || t.id, @@ -186,7 +186,7 @@ router.get('/trash', requireAuth, authorize('admin'), (req, res) => { // Public Incidents try { - const deletedPub = pubStore.getDeleted?.() || [] + const deletedPub = await pubStore.getDeleted?.() || [] deletedPub.forEach(i => items.push({ module: 'public_incident', moduleLabel: 'Öff. Vorfall-Meldung', id: i.id, title: `${i.refNumber}: ${i.incidentType}`, @@ -199,7 +199,7 @@ router.get('/trash', requireAuth, authorize('admin'), (req, res) => { // Suppliers try { const supplierStore = require('../db/supplierStore') - const deletedSuppliers = supplierStore.getDeleted?.() || [] + const deletedSuppliers = await supplierStore.getDeleted?.() || [] deletedSuppliers.forEach(s => items.push({ module: 'supplier', moduleLabel: 'Lieferant', id: s.id, title: s.name || s.id, @@ -212,7 +212,7 @@ router.get('/trash', requireAuth, authorize('admin'), (req, res) => { // Findings try { const findingStore = require('../db/findingStore') - const deletedFindings = findingStore.getDeleted?.() || [] + const deletedFindings = await findingStore.getDeleted?.() || [] deletedFindings.forEach(f => items.push({ module: 'finding', moduleLabel: 'Audit-Feststellung', id: f.id, title: `${f.ref}: ${f.title || f.id}`, diff --git a/server/storage.js b/server/storage.js index 975e956..79f93cc 100644 --- a/server/storage.js +++ b/server/storage.js @@ -5,7 +5,22 @@ const backend = (process.env.STORAGE_BACKEND || 'json').toLowerCase() let store = null -if (backend === 'mariadb' || backend === 'mysql') { +if (backend !== 'json') { + try { + const knexTemplateStore = require('./db/stores/templateStore') + knexTemplateStore.init().then(() => { + console.log('[storage] Backend:', backend, '— Knex template store ready') + }).catch(e => { + console.error('[storage] Knex template store init failed:', e.message) + }) + store = knexTemplateStore + console.log('[storage] Backend:', backend, '(Knex)') + } catch (e) { + console.warn('[storage] Knex template store failed to load. Falling back to legacy store.', e.message) + } +} + +if (!store && (backend === 'mariadb' || backend === 'mysql')) { try { const mariadbStore = require('./db/mariadbStore') mariadbStore.init().then(() => { diff --git a/tests/acknowledgements.test.js b/tests/acknowledgements.test.js index 14cf9f9..ff7bb17 100644 --- a/tests/acknowledgements.test.js +++ b/tests/acknowledgements.test.js @@ -6,9 +6,15 @@ const path = require('path') let app, request, dataDir -beforeAll(() => { +beforeAll(async () => { dataDir = createTestDataDir() process.env.DATA_DIR = dataDir + process.env.JWT_SECRET = 'jest-test-ack' + process.env.NODE_ENV = 'test' + if (process.env.STORAGE_BACKEND && process.env.STORAGE_BACKEND !== 'json') { + const knexDb = require('../server/db/knexDatabase') + await knexDb.init() + } app = require('../server/index') request = require('supertest') }) diff --git a/tests/findings.test.js b/tests/findings.test.js index 226d8a0..d861bf9 100644 --- a/tests/findings.test.js +++ b/tests/findings.test.js @@ -4,9 +4,15 @@ const { createTestDataDir, removeTestDataDir } = require('./setup/testEnv') let app, request, dataDir -beforeAll(() => { +beforeAll(async () => { dataDir = createTestDataDir() process.env.DATA_DIR = dataDir + process.env.JWT_SECRET = 'jest-test-findings' + process.env.NODE_ENV = 'test' + if (process.env.STORAGE_BACKEND && process.env.STORAGE_BACKEND !== 'json') { + const knexDb = require('../server/db/knexDatabase') + await knexDb.init() + } app = require('../server/index') request = require('supertest') }) diff --git a/tools/migrate-json-to-knex.js b/tools/migrate-json-to-knex.js new file mode 100644 index 0000000..078d791 --- /dev/null +++ b/tools/migrate-json-to-knex.js @@ -0,0 +1,438 @@ +#!/usr/bin/env node +'use strict' + +const path = require('path') +const fs = require('fs') + +const DATA_DIR = path.resolve(process.env.DATA_DIR || path.join(__dirname, '../data')) + +require('dotenv').config({ path: path.join(__dirname, '../.env') }) + +const { init, getDb, destroy } = require('../server/db/knexDatabase') + +const gdprDir = path.join(DATA_DIR, 'gdpr') + +function readJson(file, fallback) { + const p = path.join(DATA_DIR, file) + if (!fs.existsSync(p)) return fallback !== undefined ? fallback : [] + try { return JSON.parse(fs.readFileSync(p, 'utf8')) } catch { return fallback !== undefined ? fallback : [] } +} + +function readGdpr(file) { return readJson(path.join('gdpr', file)) } + +function arr(v) { return JSON.stringify(Array.isArray(v) ? v : []) } +function str(v) { return v != null ? String(v) : '' } +function num(v) { return v != null ? Number(v) : null } +function now() { return new Date().toISOString() } +function jstr(v) { return v != null ? JSON.stringify(v) : '' } + +let migrated = 0 +let skipped = 0 +let errors = 0 + +async function batchInsert(knex, table, rows, label) { + if (!rows.length) { console.log(` ${label}: 0 records – skipped`); return } + console.log(` ${label}: ${rows.length} records`) + for (const row of rows) { + try { + const exists = await knex(table).where({ id: row.id }).first() + if (exists) { skipped++; continue } + await knex(table).insert(row) + migrated++ + } catch (e) { + if (e.message?.includes('duplicate') || e.message?.includes('UNIQUE') || e.code === 'ER_DUP_ENTRY') { + skipped++ + } else { + errors++ + console.error(` ERROR ${table}/${row.id}: ${e.message?.slice(0, 120)}`) + } + } + } +} + +async function batchInsertKey(knex, table, rows, key, label) { + if (!rows.length) { console.log(` ${label}: 0 records – skipped`); return } + console.log(` ${label}: ${rows.length} records`) + for (const row of rows) { + try { + const exists = await knex(table).where(key(row)).first() + if (exists) { skipped++; continue } + await knex(table).insert(row) + migrated++ + } catch (e) { + if (e.message?.includes('duplicate') || e.message?.includes('UNIQUE') || e.code === 'ER_DUP_ENTRY') { + skipped++ + } else { + errors++ + console.error(` ERROR ${table}: ${e.message?.slice(0, 120)}`) + } + } + } +} + +async function main() { + console.log(`Data dir: ${DATA_DIR}`) + console.log(`Initializing Knex (${process.env.STORAGE_BACKEND || 'sqlite'})…`) + const knex = await init() + console.log('Schema ready.\n') + + try { + // ── Templates ───────────────────────────────────────────────────────────── + const templates = readJson('templates.json', []) + await batchInsert(knex, 'templates', templates.map(t => ({ + id: t.id, + type: t.type || 'policy', + language: t.language || 'de', + title: str(t.title), + content: str(t.content), + version: t.version || 1, + status: t.status || 'draft', + owner: t.owner || null, + next_review_date: t.reviewDate || t.nextReviewDate || null, + parent_id: t.parentId || null, + sort_order: t.sortOrder || 0, + created_at: t.createdAt || now(), + updated_at: t.updatedAt || now(), + linked_controls: arr(t.linkedControls), + applicable_entities: arr(t.applicableEntities), + attachments: arr(t.attachments), + history: arr(t.history), + status_history: arr(t.statusHistory), + deleted_at: t.deletedAt || null, + deleted_by: t.deletedBy || null, + })), 'Templates') + + // ── Training ────────────────────────────────────────────────────────────── + const training = readJson('training.json', []) + await batchInsert(knex, 'training', training.map(t => ({ + id: t.id, + title: str(t.title), + description: str(t.description), + category: str(t.category), + status: str(t.status), + due_date: t.dueDate || null, + completed_date: t.completedDate || null, + instructor: str(t.instructor), + assignees: str(t.assignees), + applicable_entities: arr(t.applicableEntities), + evidence: str(t.evidence), + mandatory: t.mandatory ? 1 : 0, + created_by: str(t.createdBy), + created_at: t.createdAt || now(), + updated_at: t.updatedAt || now(), + deleted_at: t.deletedAt || null, + })), 'Training') + + // ── Entities ────────────────────────────────────────────────────────────── + const entities = readJson('entities.json', []) + await batchInsert(knex, 'entities', entities.map(e => ({ + id: e.id, + name: str(e.name), + short: str(e.short || e.shortCode || ''), + type: str(e.type), + parent_id: e.parent || e.parentId || null, + created_at: e.createdAt || now(), + updated_at: e.updatedAt || now(), + })), 'Entities') + + // ── Risks ───────────────────────────────────────────────────────────────── + const risks = readJson('risks.json', []) + await batchInsert(knex, 'risks', risks.map(r => ({ + id: r.id, + title: str(r.title), + description: str(r.description), + category: str(r.category), + likelihood: num(r.probability || r.likelihood) || 2, + impact: num(r.impact) || 2, + risk_score: num(r.riskScore) || (num(r.probability || r.likelihood || 2) * num(r.impact || 2)), + status: str(r.status), + owner: str(r.owner), + applicable_entities: arr(r.applicableEntities), + treatments: jstr(r.treatmentPlans || r.treatments) || '[]', + created_by: str(r.createdBy), + created_at: r.createdAt || now(), + updated_at: r.updatedAt || now(), + deleted_at: r.deletedAt || null, + })), 'Risks') + + // ── Goals ───────────────────────────────────────────────────────────────── + const goals = readJson('goals.json', []) + await batchInsert(knex, 'goals', goals.map(g => ({ + id: g.id, + title: str(g.title), + description: str(g.description), + category: str(g.category || ''), + status: str(g.status), + priority: str(g.priority || 'medium'), + unit: g.unit || null, + due_date: g.dueDate || null, + review_date: g.reviewDate || null, + owner: str(g.owner), + applicable_entities: arr(g.applicableEntities), + linked_controls: arr(g.linkedControls), + created_by: str(g.createdBy), + created_at: g.createdAt || now(), + updated_at: g.updatedAt || now(), + deleted_at: g.deletedAt || null, + })), 'Goals') + + // ── Assets ──────────────────────────────────────────────────────────────── + const assets = readJson('assets.json', []) + await batchInsert(knex, 'assets', assets.map(a => ({ + id: a.id, + name: str(a.name), + description: str(a.description), + category: str(a.category), + classification: str(a.classification), + criticality: str(a.criticality), + owner: str(a.owner), + location: str(a.location), + eol_date: a.endOfLifeDate || a.eolDate || null, + status: str(a.status), + applicable_entities: arr(a.applicableEntities), + linked_controls: arr(a.linkedControls), + created_by: str(a.createdBy), + created_at: a.createdAt || now(), + updated_at: a.updatedAt || now(), + deleted_at: a.deletedAt || null, + })), 'Assets') + + // ── Suppliers ───────────────────────────────────────────────────────────── + const suppliers = readJson('suppliers.json', []) + await batchInsert(knex, 'suppliers', suppliers.map(s => ({ + id: s.id, + name: str(s.name), + category: str(s.category || s.type || ''), + contact: str(s.contact || s.contactName || ''), + risk_level: str(s.riskLevel || s.criticality || ''), + status: str(s.status), + contract_end: s.contractEnd || null, + next_audit: s.nextAuditDate || s.nextAudit || null, + notes: str(s.notes), + applicable_entities: arr(s.applicableEntities), + linked_controls: arr(s.linkedControls), + created_by: str(s.createdBy), + created_at: s.createdAt || now(), + updated_at: s.updatedAt || now(), + deleted_at: s.deletedAt || null, + })), 'Suppliers') + + // ── Guidance ────────────────────────────────────────────────────────────── + const guidance = readJson('guidance.json', []) + await batchInsert(knex, 'guidance', guidance.map(g => ({ + id: g.id, + title: str(g.title), + category: str(g.category), + type: str(g.type || ''), + content: str(g.content), + file_name: g.fileName || g.file_name || null, + file_type: g.fileType || g.file_type || null, + file_size: g.fileSize || g.file_size || null, + version: g.version || 1, + min_role: g.minRole || g.min_role || null, + linked_controls: arr(g.linkedControls || g.linked_controls), + linked_policies: arr(g.linkedPolicies || g.linked_policies), + pin_order: g.pinOrder || g.pin_order || 0, + seed_id: g.seedId || g.seed_id || null, + created_by: str(g.createdBy || g.created_by), + created_at: g.createdAt || now(), + updated_at: g.updatedAt || now(), + deleted_at: g.deletedAt || null, + deleted_by: g.deletedBy || null, + })), 'Guidance') + + // ── SOA Controls ────────────────────────────────────────────────────────── + const soaData = readJson('soa.json', {}) + const soaRows = [] + for (const [ctrlId, ctrl] of Object.entries(soaData)) { + if (!ctrl || typeof ctrl !== 'object' || !ctrl.id) continue + soaRows.push({ + id: ctrl.id || ctrlId, + framework: str(ctrl.framework || ctrlId.split('-').slice(0, 2).join('-')), + control_id: ctrlId, + title: str(ctrl.title || ctrl.name || ''), + description: str(ctrl.description || ''), + theme: str(ctrl.theme || ''), + applicable: ctrl.applicable ? 1 : 0, + status: str(ctrl.status || ''), + justification: str(ctrl.justification || ''), + evidence: jstr(ctrl.evidence) || '', + owner: str(ctrl.owner || ''), + applicable_entities: arr(ctrl.applicableEntities), + linked_templates: arr(ctrl.linkedTemplates || ctrl.templates), + updated_by: str(ctrl.updatedBy || ''), + is_custom: ctrl.isCustom ? 1 : 0, + created_at: ctrl.createdAt || now(), + updated_at: ctrl.updatedAt || now(), + }) + } + await batchInsert(knex, 'soa_controls', soaRows, 'SOA Controls') + + // ── Findings (JSON-blob) ────────────────────────────────────────────────── + const findings = readJson('findings.json', []) + await batchInsert(knex, 'findings', findings.map(f => ({ + id: f.id, + data: JSON.stringify(f), + created_by: str(f.createdBy), + created_at: f.createdAt || now(), + updated_at: f.updatedAt || now(), + deleted_at: f.deletedAt || null, + })), 'Findings') + + // ── Public Incidents (JSON-blob) ────────────────────────────────────────── + const pubIncidents = readJson('public-incidents.json', []) + await batchInsert(knex, 'public_incidents', pubIncidents.map(i => ({ + id: i.id, + ref: str(i.refNumber || ''), + data: JSON.stringify(i), + submitted_at: i.createdAt || now(), + deleted_at: i.deletedAt || null, + })), 'Public Incidents') + + // ── Org Units (JSON-blob) ───────────────────────────────────────────────── + const orgUnits = readJson('org-units.json', []) + await batchInsert(knex, 'org_units', orgUnits.map(u => ({ + id: u.id, + data: JSON.stringify(u), + created_at: u.createdAt || now(), + updated_at: u.updatedAt || now(), + })), 'Org Units') + + // ── BCM (JSON-blob) ─────────────────────────────────────────────────────── + const bcm = readJson('bcm.json', {}) + const bcmRows = [] + for (const bia of (bcm.bia || [])) { + bcmRows.push({ id: bia.id, bcm_type: 'bia', data: JSON.stringify(bia), created_by: str(bia.createdBy), created_at: bia.createdAt || now(), updated_at: bia.updatedAt || now(), deleted_at: bia.deletedAt || null }) + } + for (const plan of (bcm.plans || [])) { + bcmRows.push({ id: plan.id, bcm_type: 'plan', data: JSON.stringify(plan), created_by: str(plan.createdBy), created_at: plan.createdAt || now(), updated_at: plan.updatedAt || now(), deleted_at: plan.deletedAt || null }) + } + for (const ex of (bcm.exercises || [])) { + bcmRows.push({ id: ex.id, bcm_type: 'exercise', data: JSON.stringify(ex), created_by: str(ex.createdBy), created_at: ex.createdAt || now(), updated_at: ex.updatedAt || now(), deleted_at: ex.deletedAt || null }) + } + await batchInsert(knex, 'bcm_entries', bcmRows, 'BCM') + + // ── Governance (JSON-blob) ──────────────────────────────────────────────── + const gov = readJson('governance.json', {}) + const govRows = [] + for (const r of (gov.reviews || [])) { + govRows.push({ id: r.id, gov_type: 'review', data: JSON.stringify(r), created_by: str(r.createdBy), created_at: r.createdAt || now(), updated_at: r.updatedAt || now(), deleted_at: r.deletedAt || null }) + } + for (const a of (gov.actions || [])) { + govRows.push({ id: a.id, gov_type: 'action', data: JSON.stringify(a), created_by: str(a.createdBy), created_at: a.createdAt || now(), updated_at: a.updatedAt || now(), deleted_at: a.deletedAt || null }) + } + for (const m of (gov.meetings || [])) { + govRows.push({ id: m.id, gov_type: 'meeting', data: JSON.stringify(m), created_by: str(m.createdBy), created_at: m.createdAt || now(), updated_at: m.updatedAt || now(), deleted_at: m.deletedAt || null }) + } + await batchInsert(knex, 'governance_entries', govRows, 'Governance') + + // ── GDPR (JSON-blob by type) ────────────────────────────────────────────── + const gdprMapping = [ + ['vvt', 'vvt'], + ['av', 'av'], + ['incidents', 'incident'], + ['toms', 'tom'], + ['dsar', 'dsar'], + ['dsfa', 'dsfa'], + ['dsb', 'dsb'], + ['deletionLog', 'deletion_log'], + ] + const gdprRows = [] + for (const [file, gdprType] of gdprMapping) { + const items = readGdpr(`${file}.json`) + const list = Array.isArray(items) ? items : [items] + for (const item of list) { + if (!item || !item.id) continue + gdprRows.push({ + id: item.id, + gdpr_type: gdprType, + data: JSON.stringify(item), + created_by: str(item.createdBy), + created_at: item.createdAt || now(), + updated_at: item.updatedAt || now(), + deleted_at: item.deletedAt || null, + }) + } + } + await batchInsert(knex, 'gdpr_entries', gdprRows, 'GDPR') + + // ── RBAC Users ──────────────────────────────────────────────────────────── + const rbacData = readJson('rbac_users.json', {}) + const rbacRows = Object.entries(rbacData).map(([key, u]) => ({ + id: key, + username: u.username || key, + email: str(u.email), + domain: str(u.domain || ''), + role: u.role || 'user', + functions: jstr(u.functions) || null, + password_hash: u.passwordHash || '', + totp_secret: u.totpSecret || null, + totp_enabled: u.totpEnabled ? 1 : 0, + totp_verified: u.totpVerified ? 1 : 0, + sections: jstr(u.sections) || null, + created_at: u.createdAt || now(), + updated_at: u.updatedAt || now(), + })) + await batchInsert(knex, 'rbac_users', rbacRows, 'RBAC Users') + + // ── Org Settings (key-value) ────────────────────────────────────────────── + const orgSettings = readJson('org-settings.json', {}) + const settingsRows = Object.entries(orgSettings).map(([key, value]) => ({ + key_name: key, + value: JSON.stringify(value), + })) + await batchInsertKey(knex, 'org_settings', settingsRows, r => ({ key_name: r.key_name }), 'Org Settings') + + // ── Crossmap ────────────────────────────────────────────────────────────── + const crossmap = readJson('crossmap.json', []) + await batchInsertKey(knex, 'custom_lists', crossmap.map(c => ({ + list_id: `crossmap::${c.id}`, + items: JSON.stringify(c.controls || []), + })), r => ({ list_id: r.list_id }), 'Crossmap') + + // ── Policy Distributions ────────────────────────────────────────────────── + const dists = readJson('policy-distributions.json', []) + await batchInsert(knex, 'policy_distributions', dists.map(d => ({ + id: d.id, + template_id: d.templateId || '', + template_title: str(d.templateTitle || ''), + template_type: str(d.templateType || ''), + template_version: d.templateVersion || 1, + mode: d.mode || 'manual', + target_group: str(d.targetGroup || ''), + due_date: d.dueDate || null, + email_list: jstr(d.emailList) || '[]', + notes: str(d.notes || ''), + status: d.status || 'active', + created_at: d.createdAt || now(), + created_by: str(d.createdBy || ''), + email_sent_at: d.emailSentAt || null, + email_sent_count: d.emailSentCount || 0, + })), 'Policy Distributions') + + // ── Policy Acks ─────────────────────────────────────────────────────────── + const acks = readJson('policy-acks.json', []) + await batchInsert(knex, 'policy_acks', acks.map(a => ({ + id: a.id, + distribution_id: a.distributionId || '', + recipient_email: str(a.recipientEmail || ''), + recipient_name: str(a.recipientName || ''), + token: a.token || '', + acknowledged_at: a.acknowledgedAt || null, + ip_address: str(a.ipAddress || ''), + method: str(a.method || ''), + notes: str(a.notes || ''), + added_by: str(a.addedBy || ''), + })), 'Policy Acks') + + // ── Summary ─────────────────────────────────────────────────────────────── + console.log(`\nMigration complete: ${migrated} inserted, ${skipped} skipped, ${errors} errors.`) + } finally { + await destroy() + } +} + +main().catch(e => { + console.error('Migration failed:', e.message) + process.exit(1) +})