1
0
Fork 0
mirror of synced 2024-09-20 11:27:56 +12:00

Merge branch 'master' into feature/automation-grouping

This commit is contained in:
deanhannigan 2024-07-05 11:43:10 +01:00 committed by GitHub
commit f44c401686
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
89 changed files with 3234 additions and 2746 deletions

View file

@ -214,6 +214,7 @@ jobs:
echo "pro_commit=$pro_commit"
echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT"
echo "base_commit=$base_commit"
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT"
base_commit_excluding_merges=$(git log --no-merges -n 1 --format=format:%H $base_commit)
echo "base_commit_excluding_merges=$base_commit_excluding_merges"
@ -230,7 +231,7 @@ jobs:
base_commit_excluding_merges='${{ steps.get_pro_commits.outputs.base_commit_excluding_merges }}'
pro_commit='${{ steps.get_pro_commits.outputs.pro_commit }}'
any_commit=$(git log --no-merges $base_commit...$pro_commit)
any_commit=$(git log --no-merges $base_commit_excluding_merges...$pro_commit)
if [ -n "$any_commit" ]; then
echo $any_commit

View file

@ -10,7 +10,7 @@
},
"dependencies": {
"bulma": "^0.9.3",
"next": "12.1.0",
"next": "14.1.1",
"node-fetch": "^3.2.10",
"sass": "^1.52.3",
"react": "17.0.2",

View file

@ -46,10 +46,10 @@
resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45"
integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==
"@next/env@12.1.0":
version "12.1.0"
resolved "https://registry.yarnpkg.com/@next/env/-/env-12.1.0.tgz#73713399399b34aa5a01771fb73272b55b22c314"
integrity sha512-nrIgY6t17FQ9xxwH3jj0a6EOiQ/WDHUos35Hghtr+SWN/ntHIQ7UpuvSi0vaLzZVHQWaDupKI+liO5vANcDeTQ==
"@next/env@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/env/-/env-14.1.1.tgz#80150a8440eb0022a73ba353c6088d419b908bac"
integrity sha512-7CnQyD5G8shHxQIIg3c7/pSeYFeMhsNbpU/bmvH7ZnDql7mNRgg8O2JZrhrc/soFnfBnKP4/xXNiiSIPn2w8gA==
"@next/eslint-plugin-next@12.1.0":
version "12.1.0"
@ -58,60 +58,50 @@
dependencies:
glob "7.1.7"
"@next/swc-android-arm64@12.1.0":
version "12.1.0"
resolved "https://registry.yarnpkg.com/@next/swc-android-arm64/-/swc-android-arm64-12.1.0.tgz#865ba3a9afc204ff2bdeea49dd64d58705007a39"
integrity sha512-/280MLdZe0W03stA69iL+v6I+J1ascrQ6FrXBlXGCsGzrfMaGr7fskMa0T5AhQIVQD4nA/46QQWxG//DYuFBcA==
"@next/swc-darwin-arm64@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.1.1.tgz#b74ba7c14af7d05fa2848bdeb8ee87716c939b64"
integrity sha512-yDjSFKQKTIjyT7cFv+DqQfW5jsD+tVxXTckSe1KIouKk75t1qZmj/mV3wzdmFb0XHVGtyRjDMulfVG8uCKemOQ==
"@next/swc-darwin-arm64@12.1.0":
version "12.1.0"
resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-12.1.0.tgz#08e8b411b8accd095009ed12efbc2f1d4d547135"
integrity sha512-R8vcXE2/iONJ1Unf5Ptqjk6LRW3bggH+8drNkkzH4FLEQkHtELhvcmJwkXcuipyQCsIakldAXhRbZmm3YN1vXg==
"@next/swc-darwin-x64@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-14.1.1.tgz#82c3e67775e40094c66e76845d1a36cc29c9e78b"
integrity sha512-KCQmBL0CmFmN8D64FHIZVD9I4ugQsDBBEJKiblXGgwn7wBCSe8N4Dx47sdzl4JAg39IkSN5NNrr8AniXLMb3aw==
"@next/swc-darwin-x64@12.1.0":
version "12.1.0"
resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-12.1.0.tgz#fcd684497a76e8feaca88db3c394480ff0b007cd"
integrity sha512-ieAz0/J0PhmbZBB8+EA/JGdhRHBogF8BWaeqR7hwveb6SYEIJaDNQy0I+ZN8gF8hLj63bEDxJAs/cEhdnTq+ug==
"@next/swc-linux-arm64-gnu@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.1.1.tgz#4f4134457b90adc5c3d167d07dfb713c632c0caa"
integrity sha512-YDQfbWyW0JMKhJf/T4eyFr4b3tceTorQ5w2n7I0mNVTFOvu6CGEzfwT3RSAQGTi/FFMTFcuspPec/7dFHuP7Eg==
"@next/swc-linux-arm-gnueabihf@12.1.0":
version "12.1.0"
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm-gnueabihf/-/swc-linux-arm-gnueabihf-12.1.0.tgz#9ec6380a27938a5799aaa6035c205b3c478468a7"
integrity sha512-njUd9hpl6o6A5d08dC0cKAgXKCzm5fFtgGe6i0eko8IAdtAPbtHxtpre3VeSxdZvuGFh+hb0REySQP9T1ttkog==
"@next/swc-linux-arm64-musl@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.1.1.tgz#594bedafaeba4a56db23a48ffed2cef7cd09c31a"
integrity sha512-fiuN/OG6sNGRN/bRFxRvV5LyzLB8gaL8cbDH5o3mEiVwfcMzyE5T//ilMmaTrnA8HLMS6hoz4cHOu6Qcp9vxgQ==
"@next/swc-linux-arm64-gnu@12.1.0":
version "12.1.0"
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-12.1.0.tgz#7f4196dff1049cea479607c75b81033ae2dbd093"
integrity sha512-OqangJLkRxVxMhDtcb7Qn1xjzFA3s50EIxY7mljbSCLybU+sByPaWAHY4px97ieOlr2y4S0xdPKkQ3BCAwyo6Q==
"@next/swc-linux-x64-gnu@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.1.1.tgz#cb4e75f1ff2b9bcadf2a50684605928ddfc58528"
integrity sha512-rv6AAdEXoezjbdfp3ouMuVqeLjE1Bin0AuE6qxE6V9g3Giz5/R3xpocHoAi7CufRR+lnkuUjRBn05SYJ83oKNQ==
"@next/swc-linux-arm64-musl@12.1.0":
version "12.1.0"
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-12.1.0.tgz#b445f767569cdc2dddee785ca495e1a88c025566"
integrity sha512-hB8cLSt4GdmOpcwRe2UzI5UWn6HHO/vLkr5OTuNvCJ5xGDwpPXelVkYW/0+C3g5axbDW2Tym4S+MQCkkH9QfWA==
"@next/swc-linux-x64-musl@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.1.1.tgz#15f26800df941b94d06327f674819ab64b272e25"
integrity sha512-YAZLGsaNeChSrpz/G7MxO3TIBLaMN8QWMr3X8bt6rCvKovwU7GqQlDu99WdvF33kI8ZahvcdbFsy4jAFzFX7og==
"@next/swc-linux-x64-gnu@12.1.0":
version "12.1.0"
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-12.1.0.tgz#67610e9be4fbc987de7535f1bcb17e45fe12f90e"
integrity sha512-OKO4R/digvrVuweSw/uBM4nSdyzsBV5EwkUeeG4KVpkIZEe64ZwRpnFB65bC6hGwxIBnTv5NMSnJ+0K/WmG78A==
"@next/swc-win32-arm64-msvc@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.1.1.tgz#060c134fa7fa843666e3e8574972b2b723773dd9"
integrity sha512-1L4mUYPBMvVDMZg1inUYyPvFSduot0g73hgfD9CODgbr4xiTYe0VOMTZzaRqYJYBA9mana0x4eaAaypmWo1r5A==
"@next/swc-linux-x64-musl@12.1.0":
version "12.1.0"
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-12.1.0.tgz#ea19a23db08a9f2e34ac30401f774cf7d1669d31"
integrity sha512-JohhgAHZvOD3rQY7tlp7NlmvtvYHBYgY0x5ZCecUT6eCCcl9lv6iV3nfu82ErkxNk1H893fqH0FUpznZ/H3pSw==
"@next/swc-win32-ia32-msvc@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.1.1.tgz#5c06889352b1f77e3807834a0d0afd7e2d2d1da2"
integrity sha512-jvIE9tsuj9vpbbXlR5YxrghRfMuG0Qm/nZ/1KDHc+y6FpnZ/apsgh+G6t15vefU0zp3WSpTMIdXRUsNl/7RSuw==
"@next/swc-win32-arm64-msvc@12.1.0":
version "12.1.0"
resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-12.1.0.tgz#eadf054fc412085659b98e145435bbba200b5283"
integrity sha512-T/3gIE6QEfKIJ4dmJk75v9hhNiYZhQYAoYm4iVo1TgcsuaKLFa+zMPh4056AHiG6n9tn2UQ1CFE8EoybEsqsSw==
"@next/swc-win32-ia32-msvc@12.1.0":
version "12.1.0"
resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-12.1.0.tgz#68faeae10c89f698bf9d28759172b74c9c21bda1"
integrity sha512-iwnKgHJdqhIW19H9PRPM9j55V6RdcOo6rX+5imx832BCWzkDbyomWnlzBfr6ByUYfhohb8QuH4hSGEikpPqI0Q==
"@next/swc-win32-x64-msvc@12.1.0":
version "12.1.0"
resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-12.1.0.tgz#d27e7e76c87a460a4da99c5bfdb1618dcd6cd064"
integrity sha512-aBvcbMwuanDH4EMrL2TthNJy+4nP59Bimn8egqv6GHMVj0a44cU6Au4PjOhLNqEh9l+IpRGBqMTzec94UdC5xg==
"@next/swc-win32-x64-msvc@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.1.1.tgz#d38c63a8f9b7f36c1470872797d3735b4a9c5c52"
integrity sha512-S6K6EHDU5+1KrBDLko7/c1MNy/Ya73pIAmvKeFwsF4RmBFJSO7/7YeD4FnZ4iBdzE69PpQ4sOMU9ORKeNuxe8A==
"@nodelib/fs.scandir@2.1.5":
version "2.1.5"
@ -139,6 +129,13 @@
resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.1.0.tgz#7f698254aadf921e48dda8c0a6b304026b8a9323"
integrity sha512-JLo+Y592QzIE+q7Dl2pMUtt4q8SKYI5jDrZxrozEQxnGVOyYE+GWK9eLkwTaeN9DDctlaRAQ3TBmzZ1qdLE30A==
"@swc/helpers@0.5.2":
version "0.5.2"
resolved "https://registry.yarnpkg.com/@swc/helpers/-/helpers-0.5.2.tgz#85ea0c76450b61ad7d10a37050289eded783c27d"
integrity sha512-E4KcWTpoLHqwPHLxidpOqQbcrZVgi0rsmmZXUle1jXmJfuIf/UWpczUJ7MZZ5tlxytgJXyp0w4PGkkeLiuIdZw==
dependencies:
tslib "^2.4.0"
"@types/json5@^0.0.29":
version "0.0.29"
resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee"
@ -344,6 +341,13 @@ bulma@^0.9.3:
resolved "https://registry.yarnpkg.com/bulma/-/bulma-0.9.3.tgz#ddccb7436ebe3e21bf47afe01d3c43a296b70243"
integrity sha512-0d7GNW1PY4ud8TWxdNcP6Cc8Bu7MxcntD/RRLGWuiw/s0a9P+XlH/6QoOIrmbj6o8WWJzJYhytiu9nFjTszk1g==
busboy@1.6.0:
version "1.6.0"
resolved "https://registry.yarnpkg.com/busboy/-/busboy-1.6.0.tgz#966ea36a9502e43cdb9146962523b92f531f6893"
integrity sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==
dependencies:
streamsearch "^1.1.0"
call-bind@^1.0.0, call-bind@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c"
@ -357,10 +361,10 @@ callsites@^3.0.0:
resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73"
integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==
caniuse-lite@^1.0.30001283:
version "1.0.30001314"
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001314.tgz#65c7f9fb7e4594fca0a333bec1d8939662377596"
integrity sha512-0zaSO+TnCHtHJIbpLroX7nsD+vYuOVjl3uzFbJO1wMVbuveJA0RK2WcQA9ZUIOiO0/ArMiMgHJLxfEZhQiC0kw==
caniuse-lite@^1.0.30001579:
version "1.0.30001640"
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001640.tgz#32c467d4bf1f1a0faa63fc793c2ba81169e7652f"
integrity sha512-lA4VMpW0PSUrFnkmVuEKBUovSWKhj7puyCg8StBChgu298N1AtuF1sKWEvfDuimSEDbhlb/KqPKC3fs1HbuQUA==
chalk@^4.0.0:
version "4.1.2"
@ -385,6 +389,11 @@ chalk@^4.0.0:
optionalDependencies:
fsevents "~2.3.2"
client-only@0.0.1:
version "0.0.1"
resolved "https://registry.yarnpkg.com/client-only/-/client-only-0.0.1.tgz#38bba5d403c41ab150bff64a95c85013cf73bca1"
integrity sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==
color-convert@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3"
@ -909,6 +918,11 @@ globby@^11.0.4:
merge2 "^1.4.1"
slash "^3.0.0"
graceful-fs@^4.2.11:
version "4.2.11"
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3"
integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==
has-bigints@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.1.tgz#64fe6acb020673e3b78db035a5af69aa9d07b113"
@ -1221,38 +1235,38 @@ ms@^2.1.1:
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
nanoid@^3.1.30:
version "3.3.1"
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.1.tgz#6347a18cac88af88f58af0b3594b723d5e99bb35"
integrity sha512-n6Vs/3KGyxPQd6uO0eH4Bv0ojGSUvuLlIHtC3Y0kEO23YRge8H9x1GCzLn28YX0H66pMkxuaeESFq4tKISKwdw==
nanoid@^3.3.6:
version "3.3.7"
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.7.tgz#d0c301a691bc8d54efa0a2226ccf3fe2fd656bd8"
integrity sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==
natural-compare@^1.4.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=
next@12.1.0:
version "12.1.0"
resolved "https://registry.yarnpkg.com/next/-/next-12.1.0.tgz#c33d753b644be92fc58e06e5a214f143da61dd5d"
integrity sha512-s885kWvnIlxsUFHq9UGyIyLiuD0G3BUC/xrH0CEnH5lHEWkwQcHOORgbDF0hbrW9vr/7am4ETfX4A7M6DjrE7Q==
next@14.1.1:
version "14.1.1"
resolved "https://registry.yarnpkg.com/next/-/next-14.1.1.tgz#92bd603996c050422a738e90362dff758459a171"
integrity sha512-McrGJqlGSHeaz2yTRPkEucxQKe5Zq7uPwyeHNmJaZNY4wx9E9QdxmTp310agFRoMuIYgQrCrT3petg13fSVOww==
dependencies:
"@next/env" "12.1.0"
caniuse-lite "^1.0.30001283"
postcss "8.4.5"
styled-jsx "5.0.0"
use-subscription "1.5.1"
"@next/env" "14.1.1"
"@swc/helpers" "0.5.2"
busboy "1.6.0"
caniuse-lite "^1.0.30001579"
graceful-fs "^4.2.11"
postcss "8.4.31"
styled-jsx "5.1.1"
optionalDependencies:
"@next/swc-android-arm64" "12.1.0"
"@next/swc-darwin-arm64" "12.1.0"
"@next/swc-darwin-x64" "12.1.0"
"@next/swc-linux-arm-gnueabihf" "12.1.0"
"@next/swc-linux-arm64-gnu" "12.1.0"
"@next/swc-linux-arm64-musl" "12.1.0"
"@next/swc-linux-x64-gnu" "12.1.0"
"@next/swc-linux-x64-musl" "12.1.0"
"@next/swc-win32-arm64-msvc" "12.1.0"
"@next/swc-win32-ia32-msvc" "12.1.0"
"@next/swc-win32-x64-msvc" "12.1.0"
"@next/swc-darwin-arm64" "14.1.1"
"@next/swc-darwin-x64" "14.1.1"
"@next/swc-linux-arm64-gnu" "14.1.1"
"@next/swc-linux-arm64-musl" "14.1.1"
"@next/swc-linux-x64-gnu" "14.1.1"
"@next/swc-linux-x64-musl" "14.1.1"
"@next/swc-win32-arm64-msvc" "14.1.1"
"@next/swc-win32-ia32-msvc" "14.1.1"
"@next/swc-win32-x64-msvc" "14.1.1"
node-domexception@^1.0.0:
version "1.0.0"
@ -1413,14 +1427,14 @@ picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.3:
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42"
integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==
postcss@8.4.5:
version "8.4.5"
resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.5.tgz#bae665764dfd4c6fcc24dc0fdf7e7aa00cc77f95"
integrity sha512-jBDboWM8qpaqwkMwItqTQTiFikhs/67OYVvblFFTM7MrZjt6yMKd6r2kgXizEbTTljacm4NldIlZnhbjr84QYg==
postcss@8.4.31:
version "8.4.31"
resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.31.tgz#92b451050a9f914da6755af352bdc0192508656d"
integrity sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==
dependencies:
nanoid "^3.1.30"
nanoid "^3.3.6"
picocolors "^1.0.0"
source-map-js "^1.0.1"
source-map-js "^1.0.2"
prelude-ls@^1.2.1:
version "1.2.1"
@ -1594,11 +1608,21 @@ slash@^3.0.0:
resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634"
integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==
"source-map-js@>=0.6.2 <2.0.0", source-map-js@^1.0.1:
"source-map-js@>=0.6.2 <2.0.0":
version "1.0.2"
resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c"
integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==
source-map-js@^1.0.2:
version "1.2.0"
resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.0.tgz#16b809c162517b5b8c3e7dcd315a2a5c2612b2af"
integrity sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==
streamsearch@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764"
integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==
string.prototype.matchall@^4.0.6:
version "4.0.6"
resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.6.tgz#5abb5dabc94c7b0ea2380f65ba610b3a544b15fa"
@ -1646,10 +1670,12 @@ strip-json-comments@^3.1.0, strip-json-comments@^3.1.1:
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006"
integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==
styled-jsx@5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/styled-jsx/-/styled-jsx-5.0.0.tgz#816b4b92e07b1786c6b7111821750e0ba4d26e77"
integrity sha512-qUqsWoBquEdERe10EW8vLp3jT25s/ssG1/qX5gZ4wu15OZpmSMFI2v+fWlRhLfykA5rFtlJ1ME8A8pm/peV4WA==
styled-jsx@5.1.1:
version "5.1.1"
resolved "https://registry.yarnpkg.com/styled-jsx/-/styled-jsx-5.1.1.tgz#839a1c3aaacc4e735fed0781b8619ea5d0009d1f"
integrity sha512-pW7uC1l4mBZ8ugbiZrcIsiIvVx1UmTfw7UkC3Um2tmfUq9Bhk8IiyEIPl6F8agHgjzku6j0xQEZbfA5uSgSaCw==
dependencies:
client-only "0.0.1"
supports-color@^7.1.0:
version "7.2.0"
@ -1690,6 +1716,11 @@ tslib@^1.8.1:
resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00"
integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==
tslib@^2.4.0:
version "2.6.3"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.3.tgz#0438f810ad7a9edcde7a241c3d80db693c8cbfe0"
integrity sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==
tsutils@^3.21.0:
version "3.21.0"
resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623"
@ -1709,10 +1740,10 @@ type-fest@^0.20.2:
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4"
integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==
typescript@5.2.2:
version "5.2.2"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.2.2.tgz#5ebb5e5a5b75f085f22bc3f8460fba308310fa78"
integrity sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==
typescript@5.5.2:
version "5.5.2"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.5.2.tgz#c26f023cb0054e657ce04f72583ea2d85f8d0507"
integrity sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew==
unbox-primitive@^1.0.1:
version "1.0.1"
@ -1731,13 +1762,6 @@ uri-js@^4.2.2:
dependencies:
punycode "^2.1.0"
use-subscription@1.5.1:
version "1.5.1"
resolved "https://registry.yarnpkg.com/use-subscription/-/use-subscription-1.5.1.tgz#73501107f02fad84c6dd57965beb0b75c68c42d1"
integrity sha512-Xv2a1P/yReAjAbhylMfFplFKj9GssgTwN7RlcTxBujFQcloStWNDQdc4g4NRWH9xS4i/FDk04vQBptAXoF3VcA==
dependencies:
object-assign "^4.1.1"
v8-compile-cache@^2.0.3:
version "2.3.0"
resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee"

View file

@ -96,10 +96,13 @@ EXPOSE 5984 4369 9100
CMD ["/opt/couchdb/bin/couchdb"]
FROM base as runner
ARG TARGETARCH
ENV TARGETARCH $TARGETARCH
ENV COUCHDB_USER admin
ENV COUCHDB_PASSWORD admin
EXPOSE 5984
EXPOSE 4984
RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \
wget -O - https://packages.adoptium.net/artifactory/api/gpg/key/public | apt-key add - && \
@ -125,7 +128,12 @@ ADD clouseau/log4j.properties clouseau/clouseau.ini ./
WORKDIR /opt/couchdb
ADD couch/vm.args couch/local.ini ./etc/
# setup SQS
WORKDIR /opt/sqs
ADD sqs ./
RUN chmod +x ./install.sh && ./install.sh
WORKDIR /
ADD runner.sh ./bbcouch-runner.sh
RUN chmod +x ./bbcouch-runner.sh /opt/clouseau/bin/clouseau
CMD ["./bbcouch-runner.sh"]
RUN chmod +x ./bbcouch-runner.sh /opt/clouseau/bin/clouseau /opt/sqs/sqs
CMD ["./bbcouch-runner.sh"]

View file

@ -1,139 +0,0 @@
# Modified from https://github.com/apache/couchdb-docker/blob/main/3.3.3/Dockerfile
#
# Everything in this `base` image is adapted from the official `couchdb` image's
# Dockerfile. Only modifications related to upgrading from Debian bullseye to
# bookworm have been included. The `runner` image contains Budibase's
# customisations to the image, e.g. adding Clouseau.
FROM node:20-slim AS base
# Add CouchDB user account to make sure the IDs are assigned consistently
RUN groupadd -g 5984 -r couchdb && useradd -u 5984 -d /opt/couchdb -g couchdb couchdb
# be sure GPG and apt-transport-https are available and functional
RUN set -ex; \
apt-get update; \
apt-get install -y --no-install-recommends \
apt-transport-https \
ca-certificates \
dirmngr \
gnupg \
; \
rm -rf /var/lib/apt/lists/*
# grab tini for signal handling and zombie reaping
# see https://github.com/apache/couchdb-docker/pull/28#discussion_r141112407
RUN set -eux; \
apt-get update; \
apt-get install -y --no-install-recommends tini; \
rm -rf /var/lib/apt/lists/*; \
tini --version
# http://docs.couchdb.org/en/latest/install/unix.html#installing-the-apache-couchdb-packages
ENV GPG_COUCH_KEY \
# gpg: rsa8192 205-01-19 The Apache Software Foundation (Package repository signing key) <root@apache.org>
390EF70BB1EA12B2773962950EE62FB37A00258D
RUN set -eux; \
apt-get update; \
apt-get install -y curl; \
export GNUPGHOME="$(mktemp -d)"; \
curl -fL -o keys.asc https://couchdb.apache.org/repo/keys.asc; \
gpg --batch --import keys.asc; \
gpg --batch --export "${GPG_COUCH_KEY}" > /usr/share/keyrings/couchdb-archive-keyring.gpg; \
command -v gpgconf && gpgconf --kill all || :; \
rm -rf "$GNUPGHOME"; \
apt-key list; \
apt purge -y --autoremove curl; \
rm -rf /var/lib/apt/lists/*
ENV COUCHDB_VERSION 3.3.3
RUN . /etc/os-release; \
echo "deb [signed-by=/usr/share/keyrings/couchdb-archive-keyring.gpg] https://apache.jfrog.io/artifactory/couchdb-deb/ ${VERSION_CODENAME} main" | \
tee /etc/apt/sources.list.d/couchdb.list >/dev/null
# https://github.com/apache/couchdb-pkg/blob/master/debian/README.Debian
RUN set -eux; \
apt-get update; \
\
echo "couchdb couchdb/mode select none" | debconf-set-selections; \
# we DO want recommends this time
DEBIAN_FRONTEND=noninteractive apt-get install -y --allow-downgrades --allow-remove-essential --allow-change-held-packages \
couchdb="$COUCHDB_VERSION"~bookworm \
; \
# Undo symlinks to /var/log and /var/lib
rmdir /var/lib/couchdb /var/log/couchdb; \
rm /opt/couchdb/data /opt/couchdb/var/log; \
mkdir -p /opt/couchdb/data /opt/couchdb/var/log; \
chown couchdb:couchdb /opt/couchdb/data /opt/couchdb/var/log; \
chmod 777 /opt/couchdb/data /opt/couchdb/var/log; \
# Remove file that sets logging to a file
rm /opt/couchdb/etc/default.d/10-filelog.ini; \
# Check we own everything in /opt/couchdb. Matches the command in dockerfile_entrypoint.sh
find /opt/couchdb \! \( -user couchdb -group couchdb \) -exec chown -f couchdb:couchdb '{}' +; \
# Setup directories and permissions for config. Technically these could be 555 and 444 respectively
# but we keep them as 755 and 644 for consistency with CouchDB defaults and the dockerfile_entrypoint.sh.
find /opt/couchdb/etc -type d ! -perm 0755 -exec chmod -f 0755 '{}' +; \
find /opt/couchdb/etc -type f ! -perm 0644 -exec chmod -f 0644 '{}' +; \
# only local.d needs to be writable for the docker_entrypoint.sh
chmod -f 0777 /opt/couchdb/etc/local.d; \
# apt clean-up
rm -rf /var/lib/apt/lists/*;
# Add configuration
COPY --chown=couchdb:couchdb couch/10-docker-default.ini /opt/couchdb/etc/default.d/
# COPY --chown=couchdb:couchdb vm.args /opt/couchdb/etc/
COPY docker-entrypoint.sh /usr/local/bin
RUN ln -s usr/local/bin/docker-entrypoint.sh /docker-entrypoint.sh # backwards compat
ENTRYPOINT ["tini", "--", "/docker-entrypoint.sh"]
VOLUME /opt/couchdb/data
# 5984: Main CouchDB endpoint
# 4369: Erlang portmap daemon (epmd)
# 9100: CouchDB cluster communication port
EXPOSE 5984 4369 9100
CMD ["/opt/couchdb/bin/couchdb"]
FROM base as runner
ARG TARGETARCH
ENV TARGETARCH $TARGETARCH
ENV COUCHDB_USER admin
ENV COUCHDB_PASSWORD admin
EXPOSE 5984
EXPOSE 4984
RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \
wget -O - https://packages.adoptium.net/artifactory/api/gpg/key/public | apt-key add - && \
apt-add-repository 'deb http://security.debian.org/debian-security bookworm-security/updates main' && \
apt-add-repository 'deb http://archive.debian.org/debian stretch-backports main' && \
apt-add-repository 'deb https://packages.adoptium.net/artifactory/deb bookworm main' && \
apt-get update && apt-get install -y --no-install-recommends temurin-8-jdk && \
rm -rf /var/lib/apt/lists/
# setup clouseau
WORKDIR /
RUN wget https://github.com/cloudant-labs/clouseau/releases/download/2.21.0/clouseau-2.21.0-dist.zip && \
unzip clouseau-2.21.0-dist.zip && \
mv clouseau-2.21.0 /opt/clouseau && \
rm clouseau-2.21.0-dist.zip
WORKDIR /opt/clouseau
RUN mkdir ./bin
ADD clouseau/clouseau ./bin/
ADD clouseau/log4j.properties clouseau/clouseau.ini ./
# setup CouchDB
WORKDIR /opt/couchdb
ADD couch/vm.args couch/local.ini ./etc/
# setup SQS
WORKDIR /opt/sqs
ADD sqs ./
RUN chmod +x ./install.sh && ./install.sh
WORKDIR /
ADD runner.v2.sh ./bbcouch-runner.sh
RUN chmod +x ./bbcouch-runner.sh /opt/clouseau/bin/clouseau /opt/sqs/sqs
CMD ["./bbcouch-runner.sh"]

View file

@ -70,9 +70,12 @@ sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/clouseau/clouse
/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 &
# Start CouchDB.
/docker-entrypoint.sh /opt/couchdb/bin/couchdb &
/docker-entrypoint.sh /opt/couchdb/bin/couchdb > /dev/stdout 2>&1 &
# Wati for CouchDB to start up.
# Start SQS. Use 127.0.0.1 instead of localhost to avoid IPv6 issues.
/opt/sqs/sqs --server "http://127.0.0.1:5984" --data-dir ${DATA_DIR}/sqs --bind-address=0.0.0.0 > /dev/stdout 2>&1 &
# Wait for CouchDB to start up.
while [[ $(curl -s -w "%{http_code}\n" http://localhost:5984/_up -o /dev/null) -ne 200 ]]; do
echo 'Waiting for CouchDB to start...';
sleep 5;
@ -82,4 +85,4 @@ done
# function correctly, so we create them here.
curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_users
curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_replicator
sleep infinity
sleep infinity

View file

@ -1,88 +0,0 @@
#!/bin/bash
DATA_DIR=${DATA_DIR:-/data}
COUCHDB_ERLANG_COOKIE=${COUCHDB_ERLANG_COOKIE:-B9CFC32C-3458-4A86-8448-B3C753991CA7}
mkdir -p ${DATA_DIR}
mkdir -p ${DATA_DIR}/couch/{dbs,views}
mkdir -p ${DATA_DIR}/search
chown -R couchdb:couchdb ${DATA_DIR}/couch
echo ${TARGETBUILD} > /buildtarget.txt
if [[ "${TARGETBUILD}" = "aas" ]]; then
# Azure AppService uses /home for persistent data & SSH on port 2222
DATA_DIR="${DATA_DIR:-/home}"
WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
mkdir -p $DATA_DIR/{search,minio,couch}
mkdir -p $DATA_DIR/couch/{dbs,views}
chown -R couchdb:couchdb $DATA_DIR/couch/
apt update
apt-get install -y openssh-server
echo "root:Docker!" | chpasswd
mkdir -p /tmp
chmod +x /tmp/ssh_setup.sh \
&& (sleep 1;/tmp/ssh_setup.sh 2>&1 > /dev/null)
cp /etc/sshd_config /etc/ssh/sshd_config
/etc/init.d/ssh restart
sed -i "s#DATA_DIR#/home#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/home#g" /opt/couchdb/etc/local.ini
elif [[ "${TARGETBUILD}" = "single" ]]; then
# In the single image build, the Dockerfile specifies /data as a volume
# mount, so we use that for all persistent data.
sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
elif [[ "${TARGETBUILD}" = "docker-compose" ]]; then
# We remove the database_dir and view_index_dir settings from the local.ini
# in docker-compose because it will default to /opt/couchdb/data which is what
# our docker-compose was using prior to us switching to using our own CouchDB
# image.
sed -i "s#^database_dir.*\$##g" /opt/couchdb/etc/local.ini
sed -i "s#^view_index_dir.*\$##g" /opt/couchdb/etc/local.ini
sed -i "s#^dir=.*\$#dir=/opt/couchdb/data#g" /opt/clouseau/clouseau.ini
elif [[ -n $KUBERNETES_SERVICE_HOST ]]; then
# In Kubernetes the directory /opt/couchdb/data has a persistent volume
# mount for storing database data.
sed -i "s#^dir=.*\$#dir=/opt/couchdb/data#g" /opt/clouseau/clouseau.ini
# We remove the database_dir and view_index_dir settings from the local.ini
# in Kubernetes because it will default to /opt/couchdb/data which is what
# our Helm chart was using prior to us switching to using our own CouchDB
# image.
sed -i "s#^database_dir.*\$##g" /opt/couchdb/etc/local.ini
sed -i "s#^view_index_dir.*\$##g" /opt/couchdb/etc/local.ini
# We remove the -name setting from the vm.args file in Kubernetes because
# it will default to the pod FQDN, which is what's required for clustering
# to work.
sed -i "s/^-name .*$//g" /opt/couchdb/etc/vm.args
else
# For all other builds, we use /data for persistent data.
sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
fi
sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/couchdb/etc/vm.args
sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/clouseau/clouseau.ini
# Start Clouseau. Budibase won't function correctly without Clouseau running, it
# powers the search API endpoints which are used to do all sorts, including
# populating app grids.
/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 &
# Start CouchDB.
/docker-entrypoint.sh /opt/couchdb/bin/couchdb > /dev/stdout 2>&1 &
# Start SQS. Use 127.0.0.1 instead of localhost to avoid IPv6 issues.
/opt/sqs/sqs --server "http://127.0.0.1:5984" --data-dir ${DATA_DIR}/sqs --bind-address=0.0.0.0 > /dev/stdout 2>&1 &
# Wait for CouchDB to start up.
while [[ $(curl -s -w "%{http_code}\n" http://localhost:5984/_up -o /dev/null) -ne 200 ]]; do
echo 'Waiting for CouchDB to start...';
sleep 5;
done
# CouchDB needs the `_users` and `_replicator` databases to exist before it will
# function correctly, so we create them here.
curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_users
curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_replicator
sleep infinity

View file

@ -1,6 +1,6 @@
{
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "2.29.5",
"version": "2.29.13",
"npmClient": "yarn",
"packages": [
"packages/*",
@ -22,4 +22,4 @@
"loadEnvFiles": false
}
}
}
}

View file

@ -33,10 +33,10 @@
"scripts": {
"get-past-client-version": "node scripts/getPastClientVersion.js",
"setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev",
"build": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream",
"build:apps": "yarn build --scope @budibase/server --scope @budibase/worker",
"build": "DISABLE_V8_COMPILE_CACHE=1 NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream",
"build:apps": "DISABLE_V8_COMPILE_CACHE=1 yarn build --scope @budibase/server --scope @budibase/worker",
"build:oss": "DISABLE_V8_COMPILE_CACHE=1 NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --ignore @budibase/account-portal-server --ignore @budibase/account-portal-ui",
"build:cli": "yarn build --scope @budibase/cli",
"build:oss": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --ignore @budibase/account-portal-server --ignore @budibase/account-portal-ui",
"build:account-portal": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --scope @budibase/account-portal-server --scope @budibase/account-portal-ui",
"build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput",
"check:types": "lerna run --concurrency 2 check:types --ignore @budibase/account-portal-server",
@ -77,7 +77,6 @@
"build:docker:single:sqs": "./scripts/build-single-image-sqs.sh",
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.3.3 --push ./hosting/couchdb",
"publish:docker:couch-sqs": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile.v2 -t budibase/couchdb:v3.3.3-sqs --push ./hosting/couchdb",
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
"release:helm": "node scripts/releaseHelmChart",
"env:multi:enable": "lerna run --stream env:multi:enable",

@ -1 +1 @@
Subproject commit ff16525b73c5751d344f5c161a682609c0a993f2
Subproject commit b03e584e465f620b49a1b688ff4afc973e6c0758

View file

@ -22,10 +22,9 @@
},
"dependencies": {
"@budibase/nano": "10.1.5",
"@budibase/pouchdb-replication-stream": "1.2.10",
"@budibase/pouchdb-replication-stream": "1.2.11",
"@budibase/shared-core": "0.0.0",
"@budibase/types": "0.0.0",
"@govtechsg/passport-openidconnect": "^1.0.2",
"aws-cloudfront-sign": "3.0.2",
"aws-sdk": "2.1030.0",
"bcrypt": "5.1.0",

View file

@ -80,6 +80,11 @@ export function DatabaseWithConnection(
connection: string,
opts?: DatabaseOpts
) {
if (!dbName || !connection) {
throw new Error(
"Unable to create database without database name or connection"
)
}
const db = new DatabaseImpl(dbName, opts, connection)
return new DDInstrumentedDatabase(db)
}

View file

@ -1,6 +1,7 @@
import env from "../../environment"
export const getCouchInfo = (connection?: string) => {
// clean out any auth credentials
const urlInfo = getUrlInfo(connection)
let username
let password
@ -23,9 +24,16 @@ export const getCouchInfo = (connection?: string) => {
throw new Error("CouchDB password not set")
}
const authCookie = Buffer.from(`${username}:${password}`).toString("base64")
let sqlUrl = env.COUCH_DB_SQL_URL
if (!sqlUrl && urlInfo.url) {
const parsed = new URL(urlInfo.url)
// attempt to connect on default port
sqlUrl = urlInfo.url.replace(parsed.port, "4984")
}
return {
url: urlInfo.url!,
sqlUrl: env.COUCH_DB_SQL_URL,
// clean out any auth credentials
sqlUrl: getUrlInfo(sqlUrl).url,
auth: {
username: username,
password: password,

View file

@ -0,0 +1,22 @@
import env from "../../environment"
import { getCouchInfo } from "../couch"
const MAIN_COUCH_URL = "http://user:test@localhost:5984"
describe("connections", () => {
beforeAll(() => {
env._set("COUCH_DB_SQL_URL", "https://user:test@localhost:4984")
})
it("should strip URL credentials", () => {
const response = getCouchInfo(MAIN_COUCH_URL)
expect(response.url).toBe("http://localhost:5984")
expect(response.sqlUrl).toBe("https://localhost:4984")
})
it("should return separate auth credentials", () => {
const response = getCouchInfo(MAIN_COUCH_URL)
expect(response.auth.username).toBe("user")
expect(response.auth.password).toBe("test")
})
})

View file

@ -200,8 +200,28 @@ const environment = {
},
ROLLING_LOG_MAX_SIZE: process.env.ROLLING_LOG_MAX_SIZE || "10M",
DISABLE_SCIM_CALLS: process.env.DISABLE_SCIM_CALLS,
BB_ADMIN_USER_EMAIL: process.env.BB_ADMIN_USER_EMAIL,
BB_ADMIN_USER_PASSWORD: process.env.BB_ADMIN_USER_PASSWORD,
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
}
type EnvironmentKey = keyof typeof environment
export const SECRETS: EnvironmentKey[] = [
"API_ENCRYPTION_KEY",
"BB_ADMIN_USER_PASSWORD",
"COUCH_DB_PASSWORD",
"COUCH_DB_SQL_URL",
"COUCH_DB_URL",
"GOOGLE_CLIENT_SECRET",
"INTERNAL_API_KEY_FALLBACK",
"INTERNAL_API_KEY",
"JWT_SECRET",
"MINIO_ACCESS_KEY",
"MINIO_SECRET_KEY",
"OPENAI_API_KEY",
"REDIS_PASSWORD",
]
// clean up any environment variable edge cases
for (let [key, value] of Object.entries(environment)) {
// handle the edge case of "0" to disable an environment variable

View file

@ -1,6 +1,7 @@
import { APIError } from "@budibase/types"
import * as errors from "../errors"
import environment from "../environment"
import { stringContainsSecret } from "../security/secrets"
export async function errorHandling(ctx: any, next: any) {
try {
@ -17,11 +18,19 @@ export async function errorHandling(ctx: any, next: any) {
let error: APIError = {
message: err.message,
status: status,
status,
validationErrors: err.validation,
error: errors.getPublicError(err),
}
if (stringContainsSecret(JSON.stringify(error))) {
error = {
message: "Unexpected error",
status,
error: "Unexpected error",
}
}
if (environment.isTest() && ctx.headers["x-budibase-include-stacktrace"]) {
// @ts-ignore
error.stack = err.stack

View file

@ -0,0 +1,20 @@
import environment, { SECRETS } from "../environment"
export function stringContainsSecret(str: string) {
if (str.includes("-----BEGIN PRIVATE KEY-----")) {
return true
}
for (const key of SECRETS) {
const value = environment[key]
if (typeof value !== "string" || value === "") {
continue
}
if (str.includes(value)) {
return true
}
}
return false
}

View file

@ -0,0 +1,35 @@
import { randomUUID } from "crypto"
import environment, { SECRETS } from "../../environment"
import { stringContainsSecret } from "../secrets"
describe("secrets", () => {
describe("stringContainsSecret", () => {
it.each(SECRETS)("detects that a string contains a secret in: %s", key => {
const needle = randomUUID()
const haystack = `this is a secret: ${needle}`
const old = environment[key]
environment._set(key, needle)
try {
expect(stringContainsSecret(haystack)).toBe(true)
} finally {
environment._set(key, old)
}
})
it.each(SECRETS)(
"detects that a string does not contain a secret in: %s",
key => {
const needle = randomUUID()
const haystack = `this does not contain a secret`
const old = environment[key]
environment._set(key, needle)
try {
expect(stringContainsSecret(haystack)).toBe(false)
} finally {
environment._set(key, old)
}
}
)
})
})

View file

@ -221,7 +221,7 @@ export class UserDB {
const tenantId = getTenantId()
const db = getGlobalDB()
let { email, _id, userGroups = [], roles } = user
const { email, _id, userGroups = [], roles } = user
if (!email && !_id) {
throw new Error("_id or email is required")
@ -231,11 +231,10 @@ export class UserDB {
if (_id) {
// try to get existing user from db
try {
dbUser = (await db.get(_id)) as User
if (email && dbUser.email !== email) {
throw "Email address cannot be changed"
dbUser = await usersCore.getById(_id)
if (email && dbUser.email !== email && !opts.allowChangingEmail) {
throw new Error("Email address cannot be changed")
}
email = dbUser.email
} catch (e: any) {
if (e.status === 404) {
// do nothing, save this new user with the id specified - required for SSO auth
@ -271,13 +270,13 @@ export class UserDB {
// make sure we set the _id field for a new user
// Also if this is a new user, associate groups with them
let groupPromises = []
const groupPromises = []
if (!_id) {
_id = builtUser._id!
if (userGroups.length > 0) {
for (let groupId of userGroups) {
groupPromises.push(UserDB.groups.addUsers(groupId, [_id!]))
groupPromises.push(
UserDB.groups.addUsers(groupId, [builtUser._id!])
)
}
}
}
@ -288,6 +287,11 @@ export class UserDB {
builtUser._rev = response.rev
await eventHelpers.handleSaveEvents(builtUser, dbUser)
if (dbUser && builtUser.email !== dbUser.email) {
// Remove the plaform email reference if the email changed
await platform.users.removeUser({ email: dbUser.email } as User)
}
await platform.users.addUser(
tenantId,
builtUser._id!,

View file

@ -0,0 +1,188 @@
import { User, UserStatus } from "@budibase/types"
import { DBTestConfiguration, generator, structures } from "../../../tests"
import { UserDB } from "../db"
import { searchExistingEmails } from "../lookup"
const db = UserDB
const config = new DBTestConfiguration()
const quotas = {
addUsers: jest
.fn()
.mockImplementation(
(_change: number, _creatorsChange: number, cb?: () => Promise<any>) =>
cb && cb()
),
removeUsers: jest
.fn()
.mockImplementation(
(_change: number, _creatorsChange: number, cb?: () => Promise<any>) =>
cb && cb()
),
}
const groups = {
addUsers: jest.fn(),
getBulk: jest.fn(),
getGroupBuilderAppIds: jest.fn(),
}
const features = { isSSOEnforced: jest.fn(), isAppBuildersEnabled: jest.fn() }
describe("UserDB", () => {
beforeAll(() => {
db.init(quotas, groups, features)
})
describe("save", () => {
describe("create", () => {
it("creating a new user will persist it", async () => {
const email = generator.email({})
const user: User = structures.users.user({
email,
tenantId: config.getTenantId(),
})
await config.doInTenant(async () => {
const saveUserResponse = await db.save(user)
const persistedUser = await db.getUserByEmail(email)
expect(persistedUser).toEqual({
...user,
_id: saveUserResponse._id,
_rev: expect.stringMatching(/^1-\w+/),
password: expect.not.stringMatching(user.password!),
status: UserStatus.ACTIVE,
createdAt: Date.now(),
updatedAt: new Date().toISOString(),
})
})
})
it("the same email cannot be used twice in the same tenant", async () => {
const email = generator.email({})
const user: User = structures.users.user({
email,
tenantId: config.getTenantId(),
})
await config.doInTenant(() => db.save(user))
await config.doInTenant(() =>
expect(db.save(user)).rejects.toThrow(
`Email already in use: '${email}'`
)
)
})
it("the same email cannot be used twice in different tenants", async () => {
const email = generator.email({})
const user: User = structures.users.user({
email,
tenantId: config.getTenantId(),
})
await config.doInTenant(() => db.save(user))
config.newTenant()
await config.doInTenant(() =>
expect(db.save(user)).rejects.toThrow(
`Email already in use: '${email}'`
)
)
})
})
describe("update", () => {
let user: User
beforeEach(async () => {
user = await config.doInTenant(() =>
db.save(
structures.users.user({
email: generator.email({}),
tenantId: config.getTenantId(),
})
)
)
})
it("can update user properties", async () => {
await config.doInTenant(async () => {
const updatedName = generator.first()
user.firstName = updatedName
await db.save(user)
const persistedUser = await db.getUserByEmail(user.email)
expect(persistedUser).toEqual(
expect.objectContaining({
_id: user._id,
email: user.email,
firstName: updatedName,
lastName: user.lastName,
})
)
})
})
it("email cannot be updated by default", async () => {
await config.doInTenant(async () => {
await expect(
db.save({ ...user, email: generator.email({}) })
).rejects.toThrow("Email address cannot be changed")
})
})
it("email can be updated if specified", async () => {
await config.doInTenant(async () => {
const newEmail = generator.email({})
await db.save(
{ ...user, email: newEmail },
{ allowChangingEmail: true }
)
const persistedUser = await db.getUserByEmail(newEmail)
expect(persistedUser).toEqual(
expect.objectContaining({
_id: user._id,
email: newEmail,
lastName: user.lastName,
_rev: expect.stringMatching(/^2-\w+/),
})
)
})
})
it("updating emails frees previous emails", async () => {
await config.doInTenant(async () => {
const previousEmail = user.email
const newEmail = generator.email({})
expect(await searchExistingEmails([previousEmail, newEmail])).toEqual(
[previousEmail]
)
await db.save(
{ ...user, email: newEmail },
{ allowChangingEmail: true }
)
expect(await searchExistingEmails([previousEmail, newEmail])).toEqual(
[newEmail]
)
await db.save(
structures.users.user({
email: previousEmail,
tenantId: config.getTenantId(),
})
)
expect(await searchExistingEmails([previousEmail, newEmail])).toEqual(
[previousEmail, newEmail]
)
})
})
})
})
})

View file

@ -1,33 +1,25 @@
<script>
import Tooltip from "./Tooltip.svelte"
import Icon from "../Icon/Icon.svelte"
import AbsTooltip from "./AbsTooltip.svelte"
export let tooltip = ""
export let size = "M"
export let disabled = true
let showTooltip = false
</script>
<!-- svelte-ignore a11y-no-static-element-interactions -->
<div class:container={!!tooltip}>
<slot />
{#if tooltip}
<div class="icon-container">
<div
class="icon"
class:icon-small={size === "M" || size === "S"}
on:mouseover={() => (showTooltip = true)}
on:mouseleave={() => (showTooltip = false)}
on:focus
>
<Icon name="InfoOutline" size="S" {disabled} />
</div>
{#if showTooltip}
<div class="tooltip">
<Tooltip textWrapping={true} direction={"bottom"} text={tooltip} />
<AbsTooltip text={tooltip}>
<div
class="icon"
class:icon-small={size === "M" || size === "S"}
on:focus
>
<Icon name="InfoOutline" size="S" {disabled} hoverable />
</div>
{/if}
</AbsTooltip>
</div>
{/if}
</div>
@ -44,14 +36,6 @@
margin-left: 5px;
margin-right: 5px;
}
.tooltip {
position: absolute;
display: flex;
justify-content: center;
top: 15px;
z-index: 200;
width: 160px;
}
.icon {
transform: scale(0.75);
}

View file

@ -74,7 +74,7 @@
"lodash": "4.17.21",
"posthog-js": "^1.118.0",
"remixicon": "2.5.0",
"sanitize-html": "^2.7.0",
"sanitize-html": "^2.13.0",
"shortid": "2.2.15",
"svelte-dnd-action": "^0.9.8",
"svelte-loading-spinners": "^0.1.1",

View file

@ -112,7 +112,7 @@
This action cannot be undone.
</ConfirmDialog>
<Modal bind:this={testDataModal} width="30%">
<Modal bind:this={testDataModal} width="30%" zIndex={5}>
<TestDataModal />
</Modal>
@ -148,7 +148,6 @@
.header.scrolling {
background: var(--background);
border-bottom: var(--border-light);
border-left: var(--border-light);
z-index: 1;
}

View file

@ -8,11 +8,63 @@
import { automationStore, selectedAutomation } from "stores/builder"
import AutomationBlockSetup from "../../SetupPanel/AutomationBlockSetup.svelte"
import { cloneDeep } from "lodash/fp"
import { memo } from "@budibase/frontend-core"
import { AutomationEventType } from "@budibase/types"
let failedParse = null
let trigger = {}
let schemaProperties = {}
const rowTriggers = [
AutomationEventType.ROW_DELETE,
AutomationEventType.ROW_UPDATE,
AutomationEventType.ROW_SAVE,
]
/**
* Parses the automation test data and ensures it is valid
* @param {object} testData contains all config for the test
* @returns {object} valid testData
* @todo Parse *all* data for each trigger type and relay adequate feedback
*/
const parseTestData = testData => {
const autoTrigger = $selectedAutomation?.definition?.trigger
const { tableId } = autoTrigger?.inputs || {}
// Ensure the tableId matches the trigger table for row trigger automations
if (
rowTriggers.includes(autoTrigger?.event) &&
testData?.row?.tableId !== tableId
) {
return {
// Reset Core fields
row: { tableId },
meta: {},
id: "",
revision: "",
}
} else {
// Leave the core data as it is
return testData
}
}
/**
* Before executing a test run, relay if an automation is in a valid state
* @param {object} trigger The automation trigger config
* @returns {boolean} validation status
* @todo Parse *all* trigger types relay adequate feedback
*/
const isTriggerValid = trigger => {
if (rowTriggers.includes(trigger?.event) && !trigger?.inputs?.tableId) {
return false
}
return true
}
const memoTestData = memo(parseTestData($selectedAutomation.testData))
$: memoTestData.set(parseTestData($selectedAutomation.testData))
$: {
// clone the trigger so we're not mutating the reference
trigger = cloneDeep($selectedAutomation.definition.trigger)
@ -20,34 +72,45 @@
// get the outputs so we can define the fields
let schema = Object.entries(trigger.schema?.outputs?.properties || {})
if (trigger?.event === "app:trigger") {
if (trigger?.event === AutomationEventType.APP_TRIGGER) {
schema = [["fields", { customType: "fields" }]]
}
schemaProperties = schema
}
// check to see if there is existing test data in the store
$: testData = $selectedAutomation.testData || {}
// Check the schema to see if required fields have been entered
$: isError = !trigger.schema.outputs.required.every(
required => testData[required] || required !== "row"
)
$: isError =
!isTriggerValid(trigger) ||
!trigger.schema.outputs.required.every(
required => $memoTestData?.[required] || required !== "row"
)
function parseTestJSON(e) {
let jsonUpdate
try {
const obj = JSON.parse(e.detail)
jsonUpdate = JSON.parse(e.detail)
failedParse = null
automationStore.actions.addTestDataToAutomation(obj)
} catch (e) {
failedParse = "Invalid JSON"
return false
}
if (rowTriggers.includes(trigger?.event)) {
const tableId = trigger?.inputs?.tableId
// Reset the tableId as it must match the trigger
if (jsonUpdate?.row?.tableId !== tableId) {
jsonUpdate.row.tableId = tableId
}
}
automationStore.actions.addTestDataToAutomation(jsonUpdate)
}
const testAutomation = async () => {
try {
await automationStore.actions.test($selectedAutomation, testData)
await automationStore.actions.test($selectedAutomation, $memoTestData)
$automationStore.showTestPanel = true
} catch (error) {
notifications.error(error)
@ -85,7 +148,7 @@
{#if selectedValues}
<div class="tab-content-padding">
<AutomationBlockSetup
{testData}
testData={$memoTestData}
{schemaProperties}
isTestModal
block={trigger}

View file

@ -1,19 +1,28 @@
<script>
import { createEventDispatcher } from "svelte"
import RowSelectorTypes from "./RowSelectorTypes.svelte"
import PropField from "./PropField.svelte"
import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte"
import ModalBindableInput from "../../common/bindings/ModalBindableInput.svelte"
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
import { DatePicker, Select } from "@budibase/bbui"
import { FieldType } from "@budibase/types"
const dispatch = createEventDispatcher()
export let value
export let value = {}
export let bindings
export let block
export let isTestModal
let schemaFields
const { STRING, NUMBER, ARRAY } = FieldType
let schemaFields = []
let editableValue
$: editableValue = { ...value }
$: {
let fields = {}
for (const [key, type] of Object.entries(block?.inputs?.fields ?? {})) {
fields = {
...fields,
@ -25,8 +34,8 @@
},
}
if (value[key] === type) {
value[key] = INITIAL_VALUES[type.toUpperCase()]
if (editableValue[key] === type) {
editableValue[key] = INITIAL_VALUES[type.toUpperCase()]
}
}
@ -38,77 +47,58 @@
NUMBER: null,
DATETIME: null,
STRING: "",
OPTIONS: [],
ARRAY: [],
ARRAY: "",
}
const coerce = (value, type) => {
const re = new RegExp(/{{([^{].*?)}}/g)
if (re.test(value)) {
return value
const onChange = (e, field) => {
if (e.detail !== editableValue[field]) {
editableValue[field] = e.detail
dispatch("change", editableValue)
}
if (type === "boolean") {
if (typeof value === "boolean") {
return value
}
return value === "true"
}
if (type === "number") {
if (typeof value === "number") {
return value
}
return Number(value)
}
if (type === "options") {
return [value]
}
if (type === "array") {
if (Array.isArray(value)) {
return value
}
return value.split(",").map(x => x.trim())
}
if (type === "link") {
if (Array.isArray(value)) {
return value
}
return [value]
}
return value
}
const onChange = (e, field, type) => {
value[field] = coerce(e.detail, type)
dispatch("change", value)
}
</script>
{#if schemaFields.length && isTestModal}
<div class="schema-fields">
{#if schemaFields?.length && isTestModal}
<div class="fields">
{#each schemaFields as [field, schema]}
<RowSelectorTypes
{isTestModal}
{field}
{schema}
{bindings}
{value}
{onChange}
/>
<PropField label={field}>
{#if [STRING, NUMBER, ARRAY].includes(schema.type)}
<svelte:component
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
panel={AutomationBindingPanel}
value={editableValue[field]}
on:change={e => onChange(e, field)}
type="string"
{bindings}
allowJS={true}
updateOnChange={false}
title={schema.name}
autocomplete="off"
/>
{:else if schema.type === "boolean"}
<Select
on:change={e => onChange(e, field)}
value={editableValue[field]}
options={[
{ label: "True", value: "true" },
{ label: "False", value: "false" },
]}
/>
{:else if schema.type === "datetime"}
<DatePicker
value={editableValue[field]}
on:change={e => onChange(e, field)}
/>
{/if}
</PropField>
{/each}
</div>
{/if}
<style>
.schema-fields {
display: grid;
grid-gap: var(--spacing-s);
margin-top: var(--spacing-s);
}
.schema-fields :global(label) {
text-transform: capitalize;
.fields {
display: flex;
flex-direction: column;
gap: var(--spacing-m);
}
</style>

View file

@ -0,0 +1,60 @@
<script>
import { Label } from "@budibase/bbui"
export let label
export let labelTooltip
export let fullWidth = false
export let componentWidth = 320
</script>
<div
class="prop-field"
class:fullWidth
style={`--comp-width: ${componentWidth}px;`}
>
<div class="prop-label" title={label}>
<Label tooltip={labelTooltip}>{label}</Label>
</div>
<div class="prop-control">
<slot />
</div>
</div>
<style>
.prop-field {
display: grid;
grid-template-columns: 1fr var(--comp-width);
}
.prop-field.fullWidth {
grid-template-columns: 1fr;
}
.prop-field.fullWidth .prop-label {
margin-bottom: var(--spacing-s);
}
.prop-label {
display: flex;
align-items: center;
overflow: hidden;
}
.prop-label :global(> div) {
width: 100%;
}
.prop-label :global(> div > label) {
text-overflow: ellipsis;
white-space: nowrap;
overflow: hidden;
}
.prop-control {
margin-left: var(--spacing-s);
}
.prop-field.fullWidth .prop-control {
margin-left: 0px;
}
</style>

View file

@ -1,28 +1,43 @@
<script>
import { tables } from "stores/builder"
import { Select, Checkbox, Label } from "@budibase/bbui"
import {
ActionButton,
Popover,
Icon,
TooltipPosition,
TooltipType,
} from "@budibase/bbui"
import { createEventDispatcher } from "svelte"
import { FieldType } from "@budibase/types"
import RowSelectorTypes from "./RowSelectorTypes.svelte"
import DrawerBindableSlot from "../../common/bindings/DrawerBindableSlot.svelte"
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
import { TableNames } from "constants"
import { FIELDS } from "constants/backend"
import { capitalise } from "helpers"
import { memo } from "@budibase/frontend-core"
import PropField from "./PropField.svelte"
import { cloneDeep, isPlainObject, mergeWith } from "lodash"
const dispatch = createEventDispatcher()
export let value
export let row
export let meta
export let bindings
export let isTestModal
export let isUpdateRow
$: parsedBindings = bindings.map(binding => {
let clone = Object.assign({}, binding)
clone.icon = "ShareAndroid"
return clone
const typeToField = Object.values(FIELDS).reduce((acc, field) => {
acc[field.type] = field
return acc
}, {})
const memoStore = memo({
row,
meta,
})
let table
// Row Schema Fields
let schemaFields
let attachmentTypes = [
FieldType.ATTACHMENTS,
@ -30,32 +45,123 @@
FieldType.SIGNATURE_SINGLE,
]
$: {
table = $tables.list.find(table => table._id === value?.tableId)
let customPopover
let popoverAnchor
let editableRow = {}
let editableFields = {}
// Just sorting attachment types to the bottom here for a cleaner UX
schemaFields = Object.entries(table?.schema ?? {}).sort(
([, schemaA], [, schemaB]) =>
(schemaA.type === "attachment") - (schemaB.type === "attachment")
)
// Avoid unnecessary updates
$: memoStore.set({
row,
meta,
})
schemaFields.forEach(([, schema]) => {
if (!schema.autocolumn && !value[schema.name]) {
value[schema.name] = ""
}
})
}
const onChangeTable = e => {
value["tableId"] = e.detail
dispatch("change", value)
}
$: parsedBindings = bindings.map(binding => {
let clone = Object.assign({}, binding)
clone.icon = "ShareAndroid"
return clone
})
const coerce = (value, type) => {
const re = new RegExp(/{{([^{].*?)}}/g)
if (re.test(value)) {
return value
$: tableId = $memoStore?.row?.tableId
$: initData(tableId, $memoStore?.meta?.fields, $memoStore?.row)
const initData = (tableId, metaFields, row) => {
if (!tableId) {
return
}
// Refesh the editable fields
editableFields = cloneDeep(metaFields || {})
// Refresh all the row data
editableRow = cloneDeep(row || {})
table = $tables.list.find(table => table._id === tableId)
if (table) {
editableRow["tableId"] = tableId
schemaFields = Object.entries(table?.schema ?? {})
.filter(entry => {
const [, field] = entry
return field.type !== "formula" && !field.autocolumn
})
.sort(([nameA], [nameB]) => {
return nameA < nameB ? -1 : 1
})
// Parse out any data not in the schema.
for (const column in editableFields) {
if (!Object.hasOwn(table?.schema, column)) {
delete editableFields[column]
}
}
}
// Go through the table schema and build out the editable content
for (const entry of schemaFields) {
const [key, fieldSchema] = entry
const emptyField =
editableRow[key] == null || editableRow[key]?.length === 0
// Put non-empty elements into the update and add their key to the fields list.
if (!emptyField && !Object.hasOwn(editableFields, key)) {
editableFields = {
...editableFields,
[key]: {},
}
}
// Legacy - clearRelationships
// Init the field and add it to the update.
if (emptyField) {
if (editableFields[key]?.clearRelationships === true) {
const emptyField = coerce(
!Object.hasOwn($memoStore?.row, key) ? "" : $memoStore?.row[key],
fieldSchema.type
)
// remove this and place the field in the editable row.
delete editableFields[key]?.clearRelationships
// Default the field
editableRow = {
...editableRow,
[key]: emptyField,
}
} else {
// Purge from the update as its presence is not necessary.
delete editableRow[key]
}
}
}
// Parse all known row schema keys
const schemaKeys = [
"tableId",
...schemaFields.map(entry => {
const [key] = entry
return key
}),
]
// Purge any row keys that are not present in the schema.
for (const rowKey of Object.keys(editableRow)) {
if (!schemaKeys.includes(rowKey)) {
delete editableRow[rowKey]
delete editableFields[rowKey]
}
}
}
// Row coerce
const coerce = (value, type) => {
const re = new RegExp(/{{([^{].*?)}}/g)
if (typeof value === "string" && re.test(value)) {
return value
}
if (type === "number") {
if (typeof value === "number") {
return value
@ -66,6 +172,9 @@
return value
}
if (type === "array") {
if (!value) {
return []
}
if (Array.isArray(value)) {
return value
}
@ -73,7 +182,9 @@
}
if (type === "link") {
if (Array.isArray(value)) {
if (!value) {
return []
} else if (Array.isArray(value)) {
return value
}
return value.split(",").map(x => x.trim())
@ -86,130 +197,176 @@
return value
}
const onChange = (e, field, type) => {
let newValue = {
...value,
[field]: coerce(e.detail, type),
}
dispatch("change", newValue)
const isFullWidth = type => {
return (
attachmentTypes.includes(type) ||
type === FieldType.JSON ||
type === FieldType.LONGFORM
)
}
const onChangeSetting = (field, key, value) => {
let newField = {}
newField[field] = {
[key]: value,
const onChange = update => {
const customizer = (objValue, srcValue) => {
if (isPlainObject(objValue) && isPlainObject(srcValue)) {
const result = mergeWith({}, objValue, srcValue, customizer)
let outcome = Object.keys(result).reduce((acc, key) => {
if (result[key] !== null) {
acc[key] = result[key]
}
return acc
}, {})
return outcome
}
return srcValue
}
let updatedFields = {
...meta?.fields,
...newField,
}
dispatch("change", {
key: "meta",
fields: updatedFields,
})
const result = mergeWith(
{},
{
row: editableRow,
meta: {
fields: editableFields,
},
},
update,
customizer
)
dispatch("change", result)
}
// Ensure any nullish tableId values get set to empty string so
// that the select works
$: if (value?.tableId == null) value = { tableId: "" }
</script>
<div class="schema-fields">
<Label>Table</Label>
<div class="field-width">
<Select
on:change={onChangeTable}
value={value.tableId}
options={$tables.list.filter(table => table._id !== TableNames.USERS)}
getOptionLabel={table => table.name}
getOptionValue={table => table._id}
/>
</div>
</div>
{#if schemaFields.length}
{#each schemaFields as [field, schema]}
{#if !schema.autocolumn}
<div class:schema-fields={!attachmentTypes.includes(schema.type)}>
<Label>{field}</Label>
<div class:field-width={!attachmentTypes.includes(schema.type)}>
{#if isTestModal}
{#each schemaFields || [] as [field, schema]}
{#if !schema.autocolumn && Object.hasOwn(editableFields, field)}
<PropField label={field} fullWidth={isFullWidth(schema.type)}>
<div class="prop-control-wrap">
{#if isTestModal}
<RowSelectorTypes
{isTestModal}
{field}
{schema}
bindings={parsedBindings}
value={editableRow}
meta={{
fields: editableFields,
}}
{onChange}
/>
{:else}
<DrawerBindableSlot
title={$memoStore?.row?.title || field}
panel={AutomationBindingPanel}
type={schema.type}
{schema}
value={editableRow[field]}
on:change={e =>
onChange({
row: {
[field]: e.detail,
},
})}
{bindings}
allowJS={true}
updateOnChange={false}
drawerLeft="260px"
>
<RowSelectorTypes
{isTestModal}
{field}
{schema}
bindings={parsedBindings}
{value}
{onChange}
value={editableRow}
meta={{
fields: editableFields,
}}
onChange={change => onChange(change)}
/>
{:else}
<DrawerBindableSlot
title={value.title || field}
panel={AutomationBindingPanel}
type={schema.type}
{schema}
value={value[field]}
on:change={e => onChange(e, field)}
{bindings}
allowJS={true}
updateOnChange={false}
drawerLeft="260px"
>
<RowSelectorTypes
{isTestModal}
{field}
{schema}
bindings={parsedBindings}
{value}
{onChange}
useAttachmentBinding={meta?.fields?.[field]
?.useAttachmentBinding}
{onChangeSetting}
/>
</DrawerBindableSlot>
{/if}
{#if isUpdateRow && schema.type === "link"}
<div class="checkbox-field">
<Checkbox
value={meta.fields?.[field]?.clearRelationships}
text={"Clear relationships if empty?"}
size={"S"}
on:change={e =>
onChangeSetting(field, "clearRelationships", e.detail)}
/>
</div>
{/if}
</div>
</DrawerBindableSlot>
{/if}
</div>
{/if}
{/each}
</PropField>
{/if}
{/each}
{#if table && schemaFields}
{#key editableFields}
<div
class="add-fields-btn"
class:empty={Object.is(editableFields, {})}
bind:this={popoverAnchor}
>
<ActionButton
icon="Add"
fullWidth
on:click={() => {
customPopover.show()
}}
disabled={!schemaFields}
>Add fields
</ActionButton>
</div>
{/key}
{/if}
<Popover
align="center"
bind:this={customPopover}
anchor={popoverAnchor}
useAnchorWidth
maxHeight={300}
resizable={false}
offset={10}
>
<!-- svelte-ignore a11y-click-events-have-key-events -->
<!-- svelte-ignore a11y-no-noninteractive-element-interactions -->
<ul class="spectrum-Menu" role="listbox">
{#each schemaFields || [] as [field, schema]}
{#if !schema.autocolumn}
<li
class="table_field spectrum-Menu-item"
class:is-selected={Object.hasOwn(editableFields, field)}
on:click={() => {
if (Object.hasOwn(editableFields, field)) {
delete editableFields[field]
onChange({
meta: { fields: editableFields },
row: { [field]: null },
})
} else {
editableFields[field] = {}
onChange({ meta: { fields: editableFields } })
}
}}
>
<Icon
name={typeToField?.[schema.type]?.icon}
color={"var(--spectrum-global-color-gray-600)"}
tooltip={capitalise(schema.type)}
tooltipType={TooltipType.Info}
tooltipPosition={TooltipPosition.Left}
/>
<div class="field_name spectrum-Menu-itemLabel">{field}</div>
<svg
class="spectrum-Icon spectrum-UIIcon-Checkmark100 spectrum-Menu-checkmark spectrum-Menu-itemIcon"
focusable="false"
aria-hidden="true"
>
<use xlink:href="#spectrum-css-icon-Checkmark100" />
</svg>
</li>
{/if}
{/each}
</ul>
</Popover>
<style>
.field-width {
width: 320px;
.table_field {
display: flex;
padding: var(--spacing-s) var(--spacing-l);
gap: var(--spacing-s);
}
.schema-fields {
display: flex;
justify-content: space-between;
align-items: center;
flex-direction: row;
align-items: center;
gap: 10px;
flex: 1;
margin-bottom: 10px;
}
.schema-fields :global(label) {
text-transform: capitalize;
}
.checkbox-field {
padding-bottom: var(--spacing-s);
padding-left: 1px;
padding-top: var(--spacing-s);
}
.checkbox-field :global(label) {
text-transform: none;
/* Override for general json field override */
.prop-control-wrap :global(.icon.json-slot-icon) {
right: 1px !important;
}
</style>

View file

@ -11,17 +11,18 @@
import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte"
import ModalBindableInput from "../../common/bindings/ModalBindableInput.svelte"
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
import Editor from "components/integration/QueryEditor.svelte"
import CodeEditor from "components/common/CodeEditor/CodeEditor.svelte"
import KeyValueBuilder from "components/integration/KeyValueBuilder.svelte"
export let onChange
export let field
export let schema
export let value
export let meta
export let bindings
export let isTestModal
export let useAttachmentBinding
export let onChangeSetting
$: fieldData = value[field]
$: parsedBindings = bindings.map(binding => {
let clone = Object.assign({}, binding)
@ -35,14 +36,15 @@
FieldType.SIGNATURE_SINGLE,
]
let previousBindingState = useAttachmentBinding
function schemaHasOptions(schema) {
return !!schema.constraints?.inclusion?.length
}
function handleAttachmentParams(keyValueObj) {
let params = {}
if (!keyValueObj) {
return null
}
if (!Array.isArray(keyValueObj) && keyValueObj) {
keyValueObj = [keyValueObj]
@ -50,45 +52,68 @@
if (keyValueObj.length) {
for (let param of keyValueObj) {
params[param.url] = param.filename
params[param.url || ""] = param.filename || ""
}
}
return params
}
async function handleToggleChange(toggleField, event) {
if (event.detail === true) {
value[toggleField] = []
} else {
value[toggleField] = ""
}
previousBindingState = event.detail
onChangeSetting(toggleField, "useAttachmentBinding", event.detail)
onChange({ detail: value[toggleField] }, toggleField)
}
const handleMediaUpdate = e => {
const media = e.detail || []
const isSingle =
schema.type === FieldType.ATTACHMENT_SINGLE ||
schema.type === FieldType.SIGNATURE_SINGLE
const parsedMedia = media.map(({ name, value }) => ({
url: name,
filename: value,
}))
$: if (useAttachmentBinding !== previousBindingState) {
if (useAttachmentBinding) {
value[field] = []
} else {
value[field] = ""
if (isSingle) {
const [singleMedia] = parsedMedia
// Return only the first entry
return singleMedia
? {
url: singleMedia.url,
filename: singleMedia.filename,
}
: null
}
previousBindingState = useAttachmentBinding
// Return the entire array
return parsedMedia
}
</script>
{#if schemaHasOptions(schema) && schema.type !== "array"}
<Select
on:change={e => onChange(e, field)}
value={value[field]}
on:change={e =>
onChange({
row: {
[field]: e.detail,
},
})}
value={fieldData}
options={schema.constraints.inclusion}
/>
{:else if schema.type === "datetime"}
<DatePicker value={value[field]} on:change={e => onChange(e, field)} />
<DatePicker
value={fieldData}
on:change={e =>
onChange({
row: {
[field]: e.detail,
},
})}
/>
{:else if schema.type === "boolean"}
<Select
on:change={e => onChange(e, field)}
value={value[field]}
on:change={e =>
onChange({
row: {
[field]: e.detail,
},
})}
value={fieldData}
options={[
{ label: "True", value: "true" },
{ label: "False", value: "false" },
@ -96,83 +121,111 @@
/>
{:else if schemaHasOptions(schema) && schema.type === "array"}
<Multiselect
bind:value={value[field]}
value={fieldData}
options={schema.constraints.inclusion}
on:change={e => onChange(e, field)}
on:change={e =>
onChange({
row: {
[field]: e.detail,
},
})}
/>
{:else if schema.type === "longform"}
<TextArea bind:value={value[field]} on:change={e => onChange(e, field)} />
<TextArea
value={fieldData}
on:change={e =>
onChange({
row: {
[field]: e.detail,
},
})}
/>
{:else if schema.type === "json"}
<span>
<Editor
editorHeight="150"
mode="json"
on:change={e => {
if (e.detail?.value !== value[field]) {
onChange(e, field, schema.type)
}
}}
value={value[field]}
/>
<div class="field-wrap json-field">
<CodeEditor
value={fieldData}
on:change={e => {
onChange({
row: {
[field]: e.detail,
},
})
}}
/>
</div>
</span>
{:else if schema.type === "link"}
<LinkedRowSelector
linkedRows={value[field]}
linkedRows={fieldData}
{schema}
on:change={e => onChange(e, field)}
on:change={e =>
onChange({
row: {
[field]: e.detail,
},
})}
useLabel={false}
/>
{:else if schema.type === "bb_reference" || schema.type === "bb_reference_single"}
<LinkedRowSelector
linkedRows={value[field]}
linkedRows={fieldData}
{schema}
linkedTableId={"ta_users"}
on:change={e => onChange(e, field)}
on:change={e =>
onChange({
row: {
[field]: e.detail,
},
})}
useLabel={false}
/>
{:else if attachmentTypes.includes(schema.type)}
<div class="attachment-field-container">
<div class="toggle-container">
<Toggle
value={useAttachmentBinding}
value={meta?.fields?.[field]?.useAttachmentBinding}
text={"Use bindings"}
size={"XS"}
on:change={e => handleToggleChange(field, e)}
on:change={e => {
onChange({
row: {
[field]: null,
},
meta: {
fields: {
[field]: {
useAttachmentBinding: e.detail,
},
},
},
})
}}
/>
</div>
{#if !useAttachmentBinding}
{#if !meta?.fields?.[field]?.useAttachmentBinding}
<div class="attachment-field-spacing">
<KeyValueBuilder
on:change={async e => {
onChange(
{
detail:
schema.type === FieldType.ATTACHMENT_SINGLE ||
schema.type === FieldType.SIGNATURE_SINGLE
? e.detail.length > 0
? {
url: e.detail[0].name,
filename: e.detail[0].value,
}
: {}
: e.detail.map(({ name, value }) => ({
url: name,
filename: value,
})),
on:change={e => {
onChange({
row: {
[field]: handleMediaUpdate(e),
},
field
)
})
}}
object={handleAttachmentParams(value[field])}
object={handleAttachmentParams(fieldData)}
allowJS
{bindings}
keyBindings
customButtonText={"Add attachment"}
customButtonText={schema.type === FieldType.SIGNATURE_SINGLE
? "Add signature"
: "Add attachment"}
keyPlaceholder={"URL"}
valuePlaceholder={"Filename"}
actionButtonDisabled={(schema.type === FieldType.ATTACHMENT_SINGLE ||
schema.type === FieldType.SIGNATURE) &&
Object.keys(value[field]).length >= 1}
schema.type === FieldType.SIGNATURE_SINGLE) &&
fieldData}
/>
</div>
{:else}
@ -180,8 +233,13 @@
<svelte:component
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
panel={AutomationBindingPanel}
value={value[field]}
on:change={e => onChange(e, field)}
value={fieldData}
on:change={e =>
onChange({
row: {
[field]: e.detail,
},
})}
type="string"
bindings={parsedBindings}
allowJS={true}
@ -195,20 +253,41 @@
<svelte:component
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
panel={AutomationBindingPanel}
value={value[field]}
on:change={e => onChange(e, field)}
value={fieldData}
on:change={e =>
onChange({
row: {
[field]: e.detail,
},
})}
type="string"
bindings={parsedBindings}
allowJS={true}
updateOnChange={false}
title={schema.name}
autocomplete="off"
/>
{/if}
<style>
.attachment-field-spacing,
.json-input-spacing {
margin-top: var(--spacing-s);
margin-bottom: var(--spacing-l);
.attachment-field-spacing {
border: 1px solid var(--spectrum-global-color-gray-400);
border-radius: 4px;
padding: var(--spacing-s);
}
.field-wrap.json-field {
height: 120px;
}
.field-wrap {
box-sizing: border-box;
border: 1px solid var(--spectrum-global-color-gray-400);
border-radius: 4px;
}
.field-wrap :global(.cm-editor),
.field-wrap :global(.cm-scroller) {
border-radius: 4px;
}
</style>

View file

@ -8,6 +8,7 @@
export let value
export let isTrigger
export let disabled = false
$: filteredTables = $tables.list.filter(table => {
return !isTrigger || table._id !== TableNames.USERS
@ -25,4 +26,5 @@
options={filteredTables}
getOptionLabel={table => table.name}
getOptionValue={table => table._id}
{disabled}
/>

View file

@ -23,6 +23,7 @@
export let disableBindings = false
export let forceModal = false
export let context = null
export let autocomplete
const dispatch = createEventDispatcher()
@ -71,6 +72,7 @@
on:blur={onBlur}
{placeholder}
{updateOnChange}
{autocomplete}
/>
{#if !disabled && !disableBindings}
<div

View file

@ -0,0 +1,12 @@
export { default as BindableCombobox } from "./BindableCombobox.svelte"
export { default as BindingPanel } from "./BindingPanel.svelte"
export { default as BindingSidePanel } from "./BindingSidePanel.svelte"
export { default as DrawerBindableCombobox } from "./DrawerBindableCombobox.svelte"
export { default as ClientBindingPanel } from "./ClientBindingPanel.svelte"
export { default as DrawerBindableInput } from "./DrawerBindableInput.svelte"
export { default as DrawerBindableSlot } from "./DrawerBindableSlot.svelte"
export { default as EvaluationSidePanel } from "./EvaluationSidePanel.svelte"
export { default as ModalBindableInput } from "./ModalBindableInput.svelte"
export { default as ServerBindingPanel } from "./ServerBindingPanel.svelte"
export { default as SnippetDrawer } from "./SnippetDrawer.svelte"
export { default as SnippetSidePanel } from "./SnippetSidePanel.svelte"

View file

@ -11,7 +11,7 @@
notifications,
} from "@budibase/bbui"
import { AUTH_TYPE_LABELS, AUTH_TYPES } from "./authTypes"
import BindableCombobox from "components/common/bindings/BindableCombobox.svelte"
import { BindableCombobox } from "components/common/bindings"
import { getAuthBindings, getEnvironmentBindings } from "dataBinding"
import { environment, licensing, auth } from "stores/portal"
import CreateEditVariableModal from "components/portal/environment/CreateEditVariableModal.svelte"

View file

@ -5,7 +5,17 @@
export let row
</script>
{value}
<span title={value} class="email">
{value}
</span>
{#if row.scimInfo?.isSync}
<ActiveDirectoryInfo iconSize="XS" />
{/if}
<style>
.email {
text-overflow: ellipsis;
overflow: hidden;
white-space: nowrap;
}
</style>

View file

@ -157,7 +157,8 @@ const automationActions = store => ({
)
}
},
updateBlockInputs: async (block, data) => {
processBlockInputs: async (block, data) => {
// Create new modified block
let newBlock = {
...block,
@ -184,6 +185,14 @@ const automationActions = store => ({
// Don't save if no changes were made
if (JSON.stringify(newAutomation) === JSON.stringify(automation)) {
return false
}
return newAutomation
},
updateBlockInputs: async (block, data) => {
const newAutomation = await store.actions.processBlockInputs(block, data)
if (newAutomation === false) {
return
}
await store.actions.save(newAutomation)

View file

@ -82,7 +82,7 @@ export default defineConfig(({ mode }) => {
...(isProduction ? [] : devOnlyPlugins),
],
optimizeDeps: {
exclude: ["@roxi/routify"],
exclude: ["@roxi/routify", "fsevents"],
},
resolve: {
dedupe: ["@roxi/routify"],

View file

@ -30,7 +30,7 @@
"node-fetch": "2.6.7",
"posthog-node": "1.3.0",
"pouchdb": "7.3.0",
"pouchdb-replication-stream": "1.2.9",
"@budibase/pouchdb-replication-stream": "1.2.11",
"randomstring": "1.1.5",
"tar": "6.2.1",
"yaml": "^2.1.1"

View file

@ -17,7 +17,7 @@ export function getPouch(url?: string) {
prefix: url,
}
}
const replicationStream = require("pouchdb-replication-stream")
const replicationStream = require("@budibase/pouchdb-replication-stream")
PouchDB.plugin(replicationStream.plugin)
// @ts-ignore
PouchDB.adapter("writableStream", replicationStream.adapters.writableStream)

View file

@ -30,7 +30,7 @@
"downloadjs": "1.4.7",
"html5-qrcode": "^2.2.1",
"leaflet": "^1.7.1",
"sanitize-html": "^2.7.0",
"sanitize-html": "^2.13.0",
"screenfull": "^6.0.1",
"shortid": "^2.2.15",
"svelte-spa-router": "^4.0.1",

View file

@ -12,6 +12,6 @@
"dayjs": "^1.10.8",
"lodash": "4.17.21",
"shortid": "2.2.15",
"socket.io-client": "^4.6.1"
"socket.io-client": "^4.7.5"
}
}

View file

@ -31,7 +31,7 @@
if (
tables.find(
table =>
table._id === datasource.tableId &&
table._id === datasource?.tableId &&
table.sourceId === DEFAULT_BB_DATASOURCE_ID
) &&
!schemaFields.some(field => field.name === "_id")

@ -1 +1 @@
Subproject commit e8f2c5a14780e1f61ec3896821ba5f93d486eb72
Subproject commit 11379517b76264a7f938c2d520bd259f586edada

View file

@ -14,7 +14,6 @@
"@rollup/plugin-commonjs": "^25.0.7",
"@rollup/plugin-node-resolve": "^15.2.3",
"rollup": "^4.9.6",
"rollup-plugin-terser": "^7.0.2",
"rollup-plugin-polyfill-node": "^0.13.0"
}
}

View file

@ -1,23 +0,0 @@
// eslint-disable-next-line @typescript-eslint/no-unused-vars
module SendgridMock {
class Email {
constructor() {
// @ts-ignore
this.apiKey = null
}
setApiKey(apiKey: any) {
// @ts-ignore
this.apiKey = apiKey
}
async send(msg: any) {
if (msg.to === "invalid@example.com") {
throw "Invalid"
}
return msg
}
}
module.exports = new Email()
}

View file

@ -58,7 +58,7 @@
"@bull-board/api": "5.10.2",
"@bull-board/koa": "5.10.2",
"@elastic/elasticsearch": "7.10.0",
"@google-cloud/firestore": "6.8.0",
"@google-cloud/firestore": "7.8.0",
"@koa/router": "8.0.8",
"@socket.io/redis-adapter": "^8.2.1",
"@types/xml2js": "^0.4.14",
@ -81,7 +81,7 @@
"google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.2",
"ioredis": "5.3.2",
"isolated-vm": "^4.7.2",
"jimp": "0.22.10",
"jimp": "0.22.12",
"joi": "17.6.0",
"js-yaml": "4.1.0",
"jsonschema": "1.4.0",
@ -94,7 +94,7 @@
"koa2-ratelimit": "1.1.1",
"lodash": "4.17.21",
"memorystream": "0.3.1",
"mongodb": "^6.3.0",
"mongodb": "6.7.0",
"mssql": "10.0.1",
"mysql2": "3.9.8",
"node-fetch": "2.6.7",
@ -109,7 +109,7 @@
"serialize-error": "^7.0.1",
"server-destroy": "1.0.1",
"snowflake-promise": "^4.5.0",
"socket.io": "4.6.2",
"socket.io": "4.7.5",
"tar": "6.2.1",
"to-json-schema": "0.2.5",
"uuid": "^8.3.2",

View file

@ -72,15 +72,23 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
const row = await sdk.rows.external.getRow(tableId, updatedId, {
relationships: true,
})
const enrichedRow = await outputProcessing(table, row, {
squash: true,
preserveLinks: true,
})
const [enrichedRow, oldRow] = await Promise.all([
outputProcessing(table, row, {
squash: true,
preserveLinks: true,
}),
outputProcessing(table, beforeRow, {
squash: true,
preserveLinks: true,
}),
])
return {
...response,
row: enrichedRow,
table,
oldRow: beforeRow,
oldRow,
}
}

View file

@ -25,7 +25,9 @@ export async function searchView(
ctx.throw(400, `This method only supports viewsV2`)
}
const viewFields = Object.keys(view.schema || {})
const viewFields = Object.entries(view.schema || {})
.filter(([_, value]) => value.visible)
.map(([key]) => key)
const { body } = ctx.request
// Enrich saved query with ephemeral query params.

View file

@ -33,11 +33,6 @@ async function parseSchema(view: CreateViewRequest) {
p[fieldName] = fieldSchema
return p
}, {} as Record<string, RequiredKeys<ViewUIFieldMetadata>>)
for (let [key, column] of Object.entries(finalViewSchema)) {
if (!column.visible && !column.readonly) {
delete finalViewSchema[key]
}
}
return finalViewSchema
}

View file

@ -2166,4 +2166,47 @@ describe.each([
})
}
)
describe.each([
"名前", // Japanese for "name"
"Benutzer-ID", // German for "user ID", includes a hyphen
"numéro", // French for "number", includes an accent
"år", // Swedish for "year", includes a ring above
"naïve", // English word borrowed from French, includes an umlaut
"الاسم", // Arabic for "name"
"оплата", // Russian for "payment"
"पता", // Hindi for "address"
"用戶名", // Chinese for "username"
"çalışma_zamanı", // Turkish for "runtime", includes an underscore and a cedilla
"preço", // Portuguese for "price", includes a cedilla
"사용자명", // Korean for "username"
"usuario_ñoño", // Spanish, uses an underscore and includes "ñ"
"файл", // Bulgarian for "file"
"δεδομένα", // Greek for "data"
"geändert_am", // German for "modified on", includes an umlaut
"ব্যবহারকারীর_নাম", // Bengali for "user name", includes an underscore
"São_Paulo", // Portuguese, includes an underscore and a tilde
"età", // Italian for "age", includes an accent
"ชื่อผู้ใช้", // Thai for "username"
])("non-ascii column name: %s", name => {
beforeAll(async () => {
table = await createTable({
[name]: {
name,
type: FieldType.STRING,
},
})
await createRows([{ [name]: "a" }, { [name]: "b" }])
})
it("should be able to query a column with non-ascii characters", async () => {
await expectSearch({
query: {
equal: {
[`1:${name}`]: "a",
},
},
}).toContainExactly([{ [name]: "a" }])
})
})
})

View file

@ -218,6 +218,10 @@ describe.each([
order: 1,
width: 100,
},
Category: {
visible: false,
icon: "ic",
},
},
id: createdView.id,
version: 2,
@ -269,9 +273,8 @@ describe.each([
...newView,
schema: {
id: { visible: true },
Price: {
visible: true,
},
Price: { visible: true },
Category: { visible: false },
},
id: expect.any(String),
version: 2,
@ -759,6 +762,7 @@ describe.each([
order: 1,
width: 100,
},
Category: { visible: false, icon: "ic" },
},
id: view.id,
version: 2,
@ -873,30 +877,23 @@ describe.each([
await db.getDB(config.appId!).put(tableToUpdate)
view = await config.api.viewV2.get(view.id)
await config.api.viewV2.update({
...view,
schema: {
...view.schema,
Price: {
visible: false,
await config.api.viewV2.update(
{
...view,
schema: {
...view.schema,
Price: {
visible: false,
},
},
},
})
expect(await config.api.viewV2.get(view.id)).toEqual(
expect.objectContaining({
schema: {
id: expect.objectContaining({
visible: false,
}),
Price: expect.objectContaining({
visible: false,
}),
Category: expect.objectContaining({
visible: true,
}),
{
status: 400,
body: {
message: 'You can\'t hide "id" because it is a required field.',
status: 400,
},
})
}
)
})
})
@ -938,7 +935,6 @@ describe.each([
Category: { visible: true },
},
})
expect(res.schema?.Price).toBeUndefined()
const view = await config.api.viewV2.get(res.id)
const updatedTable = await config.api.table.get(table._id!)
@ -1205,6 +1201,7 @@ describe.each([
],
schema: {
id: { visible: true },
one: { visible: false },
two: { visible: true },
},
})

View file

@ -49,7 +49,6 @@ export async function checkMissingMigrations(
const queue = getAppMigrationQueue()
if (
queue &&
latestMigration &&
getTimestamp(currentVersion) < getTimestamp(latestMigration)
) {

View file

@ -10,6 +10,6 @@ export const MIGRATIONS: AppMigration[] = [
{
id: "20240604153647_initial_sqs",
func: m20240604153647_initial_sqs,
disabled: !env.SQS_SEARCH_ENABLE,
disabled: !(env.SQS_MIGRATION_ENABLE || env.SQS_SEARCH_ENABLE),
},
]

View file

@ -40,7 +40,7 @@ const migration = async () => {
// only do initial search if environment is using SQS already
// initial search makes sure that all the indexes have been created
// and are ready to use, avoiding any initial waits for large tables
if (env.SQS_SEARCH_ENABLE) {
if (env.SQS_MIGRATION_ENABLE || env.SQS_SEARCH_ENABLE) {
const tables = await sdk.tables.getAllInternalTables()
// do these one by one - running in parallel could cause problems
for (let table of tables) {

View file

@ -66,64 +66,69 @@ function oldLinkDocument(): Omit<LinkDocument, "tableId"> {
}
}
async function sqsDisabled(cb: () => Promise<void>) {
await config.withEnv({ SQS_SEARCH_ENABLE: "" }, cb)
type SQSEnvVar = "SQS_MIGRATION_ENABLE" | "SQS_SEARCH_ENABLE"
async function sqsDisabled(envVar: SQSEnvVar, cb: () => Promise<void>) {
await config.withEnv({ [envVar]: "" }, cb)
}
async function sqsEnabled(cb: () => Promise<void>) {
await config.withEnv({ SQS_SEARCH_ENABLE: "1" }, cb)
async function sqsEnabled(envVar: SQSEnvVar, cb: () => Promise<void>) {
await config.withEnv({ [envVar]: "1" }, cb)
}
beforeAll(async () => {
await sqsDisabled(async () => {
await config.init()
const table = await config.api.table.save(basicTable())
tableId = table._id!
const db = dbCore.getDB(config.appId!)
// old link document
await db.put(oldLinkDocument())
})
})
describe("SQS migration", () => {
it("test migration runs as expected against an older DB", async () => {
const db = dbCore.getDB(config.appId!)
// confirm nothing exists initially
await sqsDisabled(async () => {
let error: any | undefined
try {
await db.get(SQLITE_DESIGN_DOC_ID)
} catch (err: any) {
error = err
}
expect(error).toBeDefined()
expect(error.status).toBe(404)
})
await sqsEnabled(async () => {
await processMigrations(config.appId!, MIGRATIONS)
const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
expect(designDoc.sql.tables).toBeDefined()
const mainTableDef = designDoc.sql.tables[tableId]
expect(mainTableDef).toBeDefined()
expect(mainTableDef.fields[prefix("name")]).toEqual({
field: "name",
type: SQLiteType.TEXT,
describe.each(["SQS_MIGRATION_ENABLE", "SQS_SEARCH_ENABLE"] as SQSEnvVar[])(
"SQS migration with (%s)",
envVar => {
beforeAll(async () => {
await sqsDisabled(envVar, async () => {
await config.init()
const table = await config.api.table.save(basicTable())
tableId = table._id!
const db = dbCore.getDB(config.appId!)
// old link document
await db.put(oldLinkDocument())
})
expect(mainTableDef.fields[prefix("description")]).toEqual({
field: "description",
type: SQLiteType.TEXT,
})
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
const linkDoc = await db.get<LinkDocument>(oldLinkDocID())
expect(linkDoc.tableId).toEqual(
generateJunctionTableID(tableId1, tableId2)
)
// should have swapped the documents
expect(linkDoc.doc1.tableId).toEqual(tableId2)
expect(linkDoc.doc1.rowId).toEqual(rowId2)
expect(linkDoc.doc2.tableId).toEqual(tableId1)
expect(linkDoc.doc2.rowId).toEqual(rowId1)
})
})
})
it("test migration runs as expected against an older DB", async () => {
const db = dbCore.getDB(config.appId!)
// confirm nothing exists initially
await sqsDisabled(envVar, async () => {
let error: any | undefined
try {
await db.get(SQLITE_DESIGN_DOC_ID)
} catch (err: any) {
error = err
}
expect(error).toBeDefined()
expect(error.status).toBe(404)
})
await sqsEnabled(envVar, async () => {
await processMigrations(config.appId!, MIGRATIONS)
const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
expect(designDoc.sql.tables).toBeDefined()
const mainTableDef = designDoc.sql.tables[tableId]
expect(mainTableDef).toBeDefined()
expect(mainTableDef.fields[prefix("name")]).toEqual({
field: "name",
type: SQLiteType.TEXT,
})
expect(mainTableDef.fields[prefix("description")]).toEqual({
field: "description",
type: SQLiteType.TEXT,
})
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
const linkDoc = await db.get<LinkDocument>(oldLinkDocID())
expect(linkDoc.tableId).toEqual(
generateJunctionTableID(tableId1, tableId2)
)
// should have swapped the documents
expect(linkDoc.doc1.tableId).toEqual(tableId2)
expect(linkDoc.doc1.rowId).toEqual(rowId2)
expect(linkDoc.doc2.tableId).toEqual(tableId1)
expect(linkDoc.doc2.rowId).toEqual(rowId1)
})
})
}
)

View file

@ -11,26 +11,26 @@ export type AppMigrationJob = {
appId: string
}
let appMigrationQueue: queue.Queue<AppMigrationJob> | undefined
// always create app migration queue - so that events can be pushed and read from it
// across the different api and automation services
const appMigrationQueue = queue.createQueue<AppMigrationJob>(
queue.JobQueue.APP_MIGRATION,
{
jobOptions: {
attempts: MAX_ATTEMPTS,
removeOnComplete: true,
removeOnFail: true,
},
maxStalledCount: MAX_ATTEMPTS,
removeStalledCb: async (job: Job) => {
logging.logAlert(
`App migration failed, queue job ID: ${job.id} - reason: ${job.failedReason}`
)
},
}
)
export function init() {
appMigrationQueue = queue.createQueue<AppMigrationJob>(
queue.JobQueue.APP_MIGRATION,
{
jobOptions: {
attempts: MAX_ATTEMPTS,
removeOnComplete: true,
removeOnFail: true,
},
maxStalledCount: MAX_ATTEMPTS,
removeStalledCb: async (job: Job) => {
logging.logAlert(
`App migration failed, queue job ID: ${job.id} - reason: ${job.failedReason}`
)
},
}
)
return appMigrationQueue.process(MIGRATION_CONCURRENCY, processMessage)
}

View file

@ -100,7 +100,10 @@ export function getError(err: any) {
}
export function guardAttachment(attachmentObject: any) {
if (!("url" in attachmentObject) || !("filename" in attachmentObject)) {
if (
attachmentObject &&
(!("url" in attachmentObject) || !("filename" in attachmentObject))
) {
const providedKeys = Object.keys(attachmentObject).join(", ")
throw new Error(
`Attachments must have both "url" and "filename" keys. You have provided: ${providedKeys}`
@ -135,7 +138,9 @@ export async function sendAutomationAttachmentsToStorage(
}
for (const [prop, attachments] of Object.entries(attachmentRows)) {
if (Array.isArray(attachments)) {
if (!attachments) {
continue
} else if (Array.isArray(attachments)) {
if (attachments.length) {
row[prop] = await Promise.all(
attachments.map(attachment => generateAttachmentRow(attachment))

View file

@ -1,4 +1,5 @@
import * as automationUtils from "./automationUtils"
import { isPlainObject } from "lodash"
type ObjValue = {
[key: string]: string | ObjValue
@ -18,6 +19,10 @@ function replaceBindingsRecursive(
value: string | ObjValue,
loopStepNumber: number
) {
if (value === null || value === undefined) {
return value
}
if (typeof value === "object") {
for (const [innerKey, innerValue] of Object.entries(value)) {
if (typeof innerValue === "string") {
@ -25,7 +30,11 @@ function replaceBindingsRecursive(
innerValue,
`steps.${loopStepNumber}`
)
} else if (typeof innerValue === "object") {
} else if (
innerValue &&
isPlainObject(innerValue) &&
Object.keys(innerValue).length > 0
) {
value[innerKey] = replaceBindingsRecursive(innerValue, loopStepNumber)
}
}

View file

@ -7,8 +7,8 @@ import {
AutomationStepType,
AutomationIOType,
} from "@budibase/types"
import { env } from "@budibase/backend-core"
import * as automationUtils from "../automationUtils"
import environment from "../../environment"
enum Model {
GPT_35_TURBO = "gpt-3.5-turbo",
@ -60,7 +60,7 @@ export const definition: AutomationStepSchema = {
}
export async function run({ inputs }: AutomationStepInput) {
if (!environment.OPENAI_API_KEY) {
if (!env.OPENAI_API_KEY) {
return {
success: false,
response:
@ -77,7 +77,7 @@ export async function run({ inputs }: AutomationStepInput) {
try {
const openai = new OpenAI({
apiKey: environment.OPENAI_API_KEY,
apiKey: env.OPENAI_API_KEY,
})
const completion = await openai.chat.completions.create({

View file

@ -82,39 +82,73 @@ export async function run({ inputs, appId, emitter }: AutomationStepInput) {
}
const tableId = inputs.row.tableId
// clear any undefined, null or empty string properties so that they aren't updated
for (let propKey of Object.keys(inputs.row)) {
const clearRelationships =
inputs.meta?.fields?.[propKey]?.clearRelationships
if (
(inputs.row[propKey] == null || inputs.row[propKey]?.length === 0) &&
!clearRelationships
) {
delete inputs.row[propKey]
}
// Base update
let rowUpdate: Record<string, any>
// Legacy
// Find previously set values and add them to the update. Ensure empty relationships
// are added to the update if clearRelationships is true
const legacyUpdated = Object.keys(inputs.row || {}).reduce(
(acc: Record<string, any>, key: string) => {
const isEmpty = inputs.row[key] == null || inputs.row[key]?.length === 0
const fieldConfig = inputs.meta?.fields || {}
if (isEmpty) {
if (
Object.hasOwn(fieldConfig, key) &&
fieldConfig[key].clearRelationships === true
) {
// Explicitly clear the field on update
acc[key] = []
}
} else {
// Keep non-empty values
acc[key] = inputs.row[key]
}
return acc
},
{}
)
// The source of truth for inclusion in the update is: inputs.meta?.fields
const parsedUpdate = Object.keys(inputs.meta?.fields || {}).reduce(
(acc: Record<string, any>, key: string) => {
const fieldConfig = inputs.meta?.fields?.[key] || {}
// Ignore legacy config.
if (Object.hasOwn(fieldConfig, "clearRelationships")) {
return acc
}
acc[key] =
!inputs.row[key] || inputs.row[key]?.length === 0 ? "" : inputs.row[key]
return acc
},
{}
)
rowUpdate = {
tableId,
...parsedUpdate,
...legacyUpdated,
}
try {
if (tableId) {
inputs.row = await automationUtils.cleanUpRow(
inputs.row.tableId,
inputs.row
)
rowUpdate = await automationUtils.cleanUpRow(tableId, rowUpdate)
inputs.row = await automationUtils.sendAutomationAttachmentsToStorage(
inputs.row.tableId,
inputs.row
rowUpdate = await automationUtils.sendAutomationAttachmentsToStorage(
tableId,
rowUpdate
)
}
// have to clean up the row, remove the table from it
const ctx: any = buildCtx(appId, emitter, {
body: {
...inputs.row,
...rowUpdate,
_id: inputs.rowId,
},
params: {
rowId: inputs.rowId,
tableId: tableId,
tableId,
},
})
await rowController.patch(ctx)

View file

@ -4,6 +4,7 @@ import { loopAutomation } from "../../tests/utilities/structures"
import { context } from "@budibase/backend-core"
import * as setup from "./utilities"
import { Table } from "@budibase/types"
import * as loopUtils from "../loopUtils"
import { LoopInput, LoopStepType } from "../../definitions/automations"
describe("Attempt to run a basic loop automation", () => {
@ -51,4 +52,98 @@ describe("Attempt to run a basic loop automation", () => {
})
expect(resp.steps[2].outputs.iterations).toBe(1)
})
describe("replaceFakeBindings", () => {
it("should replace loop bindings in nested objects", () => {
const originalStepInput = {
schema: {
name: {
type: "string",
constraints: {
type: "string",
length: { maximum: null },
presence: false,
},
name: "name",
display: { type: "Text" },
},
},
row: {
tableId: "ta_aaad4296e9f74b12b1b90ef7a84afcad",
name: "{{ loop.currentItem.pokemon }}",
},
}
const loopStepNumber = 3
const result = loopUtils.replaceFakeBindings(
originalStepInput,
loopStepNumber
)
expect(result).toEqual({
schema: {
name: {
type: "string",
constraints: {
type: "string",
length: { maximum: null },
presence: false,
},
name: "name",
display: { type: "Text" },
},
},
row: {
tableId: "ta_aaad4296e9f74b12b1b90ef7a84afcad",
name: "{{ steps.3.currentItem.pokemon }}",
},
})
})
it("should handle null values in nested objects", () => {
const originalStepInput = {
nullValue: null,
nestedNull: {
someKey: null,
},
validValue: "{{ loop.someValue }}",
}
const loopStepNumber = 2
const result = loopUtils.replaceFakeBindings(
originalStepInput,
loopStepNumber
)
expect(result).toEqual({
nullValue: null,
nestedNull: {
someKey: null,
},
validValue: "{{ steps.2.someValue }}",
})
})
it("should handle empty objects and arrays", () => {
const originalStepInput = {
emptyObject: {},
emptyArray: [],
nestedEmpty: {
emptyObj: {},
emptyArr: [],
},
}
const loopStepNumber = 1
const result = loopUtils.replaceFakeBindings(
originalStepInput,
loopStepNumber
)
expect(result).toEqual(originalStepInput)
})
})
})

View file

@ -1,6 +1,4 @@
const setup = require("./utilities")
import environment from "../../environment"
import { getConfig, runStep, afterAll as _afterAll } from "./utilities"
import { OpenAI } from "openai"
jest.mock("openai", () => ({
@ -26,42 +24,41 @@ const mockedOpenAI = OpenAI as jest.MockedClass<typeof OpenAI>
const OPENAI_PROMPT = "What is the meaning of life?"
describe("test the openai action", () => {
let config = setup.getConfig()
let config = getConfig()
let resetEnv: () => void | undefined
beforeAll(async () => {
await config.init()
})
beforeEach(() => {
environment.OPENAI_API_KEY = "abc123"
resetEnv = config.setCoreEnv({ OPENAI_API_KEY: "abc123" })
})
afterAll(setup.afterAll)
afterEach(() => {
resetEnv()
})
afterAll(_afterAll)
it("should present the correct error message when the OPENAI_API_KEY variable isn't set", async () => {
delete environment.OPENAI_API_KEY
let res = await setup.runStep("OPENAI", {
prompt: OPENAI_PROMPT,
await config.withCoreEnv({ OPENAI_API_KEY: "" }, async () => {
let res = await runStep("OPENAI", { prompt: OPENAI_PROMPT })
expect(res.response).toEqual(
"OpenAI API Key not configured - please add the OPENAI_API_KEY environment variable."
)
expect(res.success).toBeFalsy()
})
expect(res.response).toEqual(
"OpenAI API Key not configured - please add the OPENAI_API_KEY environment variable."
)
expect(res.success).toBeFalsy()
})
it("should be able to receive a response from ChatGPT given a prompt", async () => {
const res = await setup.runStep("OPENAI", {
prompt: OPENAI_PROMPT,
})
const res = await runStep("OPENAI", { prompt: OPENAI_PROMPT })
expect(res.response).toEqual("This is a test")
expect(res.success).toBeTruthy()
})
it("should present the correct error message when a prompt is not provided", async () => {
const res = await setup.runStep("OPENAI", {
prompt: null,
})
const res = await runStep("OPENAI", { prompt: null })
expect(res.response).toEqual(
"Budibase OpenAI Automation Failed: No prompt supplied"
)
@ -84,7 +81,7 @@ describe("test the openai action", () => {
} as any)
)
const res = await setup.runStep("OPENAI", {
const res = await runStep("OPENAI", {
prompt: OPENAI_PROMPT,
})

View file

@ -4,11 +4,12 @@ import {
AutomationStepType,
AutomationTriggerSchema,
AutomationTriggerStepId,
AutomationEventType,
} from "@budibase/types"
export const definition: AutomationTriggerSchema = {
name: "App Action",
event: "app:trigger",
event: AutomationEventType.APP_TRIGGER,
icon: "Apps",
tagline: "Automation fired from the frontend",
description: "Trigger an automation from an action inside your app",

View file

@ -4,11 +4,12 @@ import {
AutomationStepType,
AutomationTriggerSchema,
AutomationTriggerStepId,
AutomationEventType,
} from "@budibase/types"
export const definition: AutomationTriggerSchema = {
name: "Cron Trigger",
event: "cron:trigger",
event: AutomationEventType.CRON_TRIGGER,
icon: "Clock",
tagline: "Cron Trigger (<b>{{inputs.cron}}</b>)",
description: "Triggers automation on a cron schedule.",

View file

@ -4,11 +4,12 @@ import {
AutomationStepType,
AutomationTriggerSchema,
AutomationTriggerStepId,
AutomationEventType,
} from "@budibase/types"
export const definition: AutomationTriggerSchema = {
name: "Row Deleted",
event: "row:delete",
event: AutomationEventType.ROW_DELETE,
icon: "TableRowRemoveCenter",
tagline: "Row is deleted from {{inputs.enriched.table.name}}",
description: "Fired when a row is deleted from your database",

View file

@ -4,11 +4,12 @@ import {
AutomationStepType,
AutomationTriggerSchema,
AutomationTriggerStepId,
AutomationEventType,
} from "@budibase/types"
export const definition: AutomationTriggerSchema = {
name: "Row Created",
event: "row:save",
event: AutomationEventType.ROW_SAVE,
icon: "TableRowAddBottom",
tagline: "Row is added to {{inputs.enriched.table.name}}",
description: "Fired when a row is added to your database",

View file

@ -4,11 +4,12 @@ import {
AutomationStepType,
AutomationTriggerSchema,
AutomationTriggerStepId,
AutomationEventType,
} from "@budibase/types"
export const definition: AutomationTriggerSchema = {
name: "Row Updated",
event: "row:update",
event: AutomationEventType.ROW_UPDATE,
icon: "Refresh",
tagline: "Row is updated in {{inputs.enriched.table.name}}",
description: "Fired when a row is updated in your database",

View file

@ -4,11 +4,12 @@ import {
AutomationStepType,
AutomationTriggerSchema,
AutomationTriggerStepId,
AutomationEventType,
} from "@budibase/types"
export const definition: AutomationTriggerSchema = {
name: "Webhook",
event: "web:trigger",
event: AutomationEventType.WEBHOOK_TRIGGER,
icon: "Send",
tagline: "Webhook endpoint is hit",
description: "Trigger an automation when a HTTP POST webhook is hit",

View file

@ -7,12 +7,13 @@ import { automationQueue } from "./bullboard"
import { checkTestFlag } from "../utilities/redis"
import * as utils from "./utils"
import env from "../environment"
import { context, db as dbCore } from "@budibase/backend-core"
import { context, logging, db as dbCore } from "@budibase/backend-core"
import {
Automation,
Row,
AutomationData,
AutomationJob,
AutomationEventType,
UpdatedRowEventEmitter,
} from "@budibase/types"
import { executeInThread } from "../threads/automation"
@ -65,34 +66,41 @@ async function queueRelevantRowAutomations(
automationTrigger?.inputs &&
automationTrigger.inputs.tableId === event.row.tableId
) {
await automationQueue.add({ automation, event }, JOB_OPTS)
try {
await automationQueue.add({ automation, event }, JOB_OPTS)
} catch (e) {
logging.logAlert("Failed to queue automation", e)
}
}
}
})
}
emitter.on("row:save", async function (event: UpdatedRowEventEmitter) {
emitter.on(
AutomationEventType.ROW_SAVE,
async function (event: UpdatedRowEventEmitter) {
/* istanbul ignore next */
if (!event || !event.row || !event.row.tableId) {
return
}
await queueRelevantRowAutomations(event, AutomationEventType.ROW_SAVE)
}
)
emitter.on(AutomationEventType.ROW_UPDATE, async function (event) {
/* istanbul ignore next */
if (!event || !event.row || !event.row.tableId) {
return
}
await queueRelevantRowAutomations(event, "row:save")
await queueRelevantRowAutomations(event, AutomationEventType.ROW_UPDATE)
})
emitter.on("row:update", async function (event) {
emitter.on(AutomationEventType.ROW_DELETE, async function (event) {
/* istanbul ignore next */
if (!event || !event.row || !event.row.tableId) {
return
}
await queueRelevantRowAutomations(event, "row:update")
})
emitter.on("row:delete", async function (event) {
/* istanbul ignore next */
if (!event || !event.row || !event.row.tableId) {
return
}
await queueRelevantRowAutomations(event, "row:delete")
await queueRelevantRowAutomations(event, AutomationEventType.ROW_DELETE)
})
export async function externalTrigger(
@ -118,7 +126,6 @@ export async function externalTrigger(
}
params.fields = coercedFields
}
const data: AutomationData = { automation, event: params as any }
if (getResponses) {
data.event = {

View file

@ -75,16 +75,12 @@ const environment = {
AUTOMATION_MAX_ITERATIONS:
parseIntSafe(process.env.AUTOMATION_MAX_ITERATIONS) ||
DEFAULTS.AUTOMATION_MAX_ITERATIONS,
SENDGRID_API_KEY: process.env.SENDGRID_API_KEY,
DYNAMO_ENDPOINT: process.env.DYNAMO_ENDPOINT,
QUERY_THREAD_TIMEOUT: QUERY_THREAD_TIMEOUT,
AUTOMATION_THREAD_TIMEOUT:
parseIntSafe(process.env.AUTOMATION_THREAD_TIMEOUT) ||
DEFAULT_AUTOMATION_TIMEOUT,
BB_ADMIN_USER_EMAIL: process.env.BB_ADMIN_USER_EMAIL,
BB_ADMIN_USER_PASSWORD: process.env.BB_ADMIN_USER_PASSWORD,
PLUGINS_DIR: process.env.PLUGINS_DIR || DEFAULTS.PLUGINS_DIR,
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
MAX_IMPORT_SIZE_MB: process.env.MAX_IMPORT_SIZE_MB,
SESSION_EXPIRY_SECONDS: process.env.SESSION_EXPIRY_SECONDS,
// SQL
@ -92,6 +88,7 @@ const environment = {
SQL_LOGGING_ENABLE: process.env.SQL_LOGGING_ENABLE,
SQL_ALIASING_DISABLE: process.env.SQL_ALIASING_DISABLE,
SQS_SEARCH_ENABLE: process.env.SQS_SEARCH_ENABLE,
SQS_MIGRATION_ENABLE: process.env.SQS_MIGRATION_ENABLE,
// flags
ALLOW_DEV_AUTOMATIONS: process.env.ALLOW_DEV_AUTOMATIONS,
DISABLE_THREADING: process.env.DISABLE_THREADING,

View file

@ -71,6 +71,9 @@ class CouchDBIntegration implements IntegrationBase {
private readonly client: Database
constructor(config: CouchDBConfig) {
if (!config.url || !config.database) {
throw new Error("Unable to connect without URL or database")
}
this.client = dbCore.DatabaseWithConnection(config.database, config.url)
}
@ -79,45 +82,30 @@ class CouchDBIntegration implements IntegrationBase {
connected: false,
}
try {
const result = await this.query("exists", "validation error", {})
response.connected = result === true
response.connected = await this.client.exists()
} catch (e: any) {
response.error = e.message as string
}
return response
}
async query(
command: string,
errorMsg: string,
query: { json?: object; id?: string }
) {
try {
return await (this.client as any)[command](query.id || query.json)
} catch (err) {
console.error(errorMsg, err)
throw err
}
}
private parse(query: { json: string | object }) {
return typeof query.json === "string" ? JSON.parse(query.json) : query.json
}
async create(query: { json: string | object }) {
const parsed = this.parse(query)
return this.query("post", "Error writing to couchDB", { json: parsed })
return await this.client.put(parsed)
}
async read(query: { json: string | object }) {
const parsed = this.parse(query)
const result = await this.query("allDocs", "Error querying couchDB", {
json: {
include_docs: true,
...parsed,
},
})
return result.rows.map((row: { doc: object }) => row.doc)
const params = {
include_docs: true,
...parsed,
}
const result = await this.client.allDocs(params)
return result.rows.map(row => row.doc)
}
async update(query: { json: string | object }) {
@ -126,22 +114,15 @@ class CouchDBIntegration implements IntegrationBase {
const oldDoc = await this.get({ id: parsed._id })
parsed._rev = oldDoc._rev
}
return this.query("put", "Error updating couchDB document", {
json: parsed,
})
return await this.client.put(parsed)
}
async get(query: { id: string }) {
return this.query("get", "Error retrieving couchDB document by ID", {
id: query.id,
})
return await this.client.get(query.id)
}
async delete(query: { id: string }) {
const doc = await this.query("get", "Cannot find doc to be deleted", query)
return this.query("remove", "Error deleting couchDB document", {
json: doc,
})
return await this.client.remove(query.id)
}
}

View file

@ -6,7 +6,6 @@ jest.mock("@budibase/backend-core", () => {
...core.db,
DatabaseWithConnection: function () {
return {
post: jest.fn(),
allDocs: jest.fn().mockReturnValue({ rows: [] }),
put: jest.fn(),
get: jest.fn().mockReturnValue({ _rev: "a" }),
@ -43,7 +42,7 @@ describe("CouchDB Integration", () => {
await config.integration.create({
json: JSON.stringify(doc),
})
expect(config.integration.client.post).toHaveBeenCalledWith(doc)
expect(config.integration.client.put).toHaveBeenCalledWith(doc)
})
it("calls the read method with the correct params", async () => {
@ -80,7 +79,6 @@ describe("CouchDB Integration", () => {
it("calls the delete method with the correct params", async () => {
const id = "1234"
await config.integration.delete({ id })
expect(config.integration.client.get).toHaveBeenCalledWith(id)
expect(config.integration.client.remove).toHaveBeenCalled()
expect(config.integration.client.remove).toHaveBeenCalledWith(id)
})
})

View file

@ -18,7 +18,11 @@ import {
buildInternalRelationships,
sqlOutputProcessing,
} from "../../../../api/controllers/row/utils"
import { mapToUserColumn, USER_COLUMN_PREFIX } from "../../tables/internal/sqs"
import {
decodeNonAscii,
mapToUserColumn,
USER_COLUMN_PREFIX,
} from "../../tables/internal/sqs"
import sdk from "../../../index"
import {
context,
@ -150,7 +154,8 @@ function reverseUserColumnMapping(rows: Row[]) {
if (index !== -1) {
// cut out the prefix
const newKey = key.slice(0, index) + key.slice(index + prefixLength)
finalRow[newKey] = row[key]
const decoded = decodeNonAscii(newKey)
finalRow[decoded] = row[key]
} else {
finalRow[key] = row[key]
}

View file

@ -64,10 +64,29 @@ function buildRelationshipDefinitions(
export const USER_COLUMN_PREFIX = "data_"
// SQS does not support non-ASCII characters in column names, so we need to
// replace them with unicode escape sequences.
function encodeNonAscii(str: string): string {
return str
.split("")
.map(char => {
return char.charCodeAt(0) > 127
? "\\u" + char.charCodeAt(0).toString(16).padStart(4, "0")
: char
})
.join("")
}
export function decodeNonAscii(str: string): string {
return str.replace(/\\u([0-9a-fA-F]{4})/g, (match, p1) =>
String.fromCharCode(parseInt(p1, 16))
)
}
// utility function to denote that columns in SQLite are mapped to avoid overlap issues
// the overlaps can occur due to case insensitivity and some of the columns which Budibase requires
export function mapToUserColumn(key: string) {
return `${USER_COLUMN_PREFIX}${key}`
return `${USER_COLUMN_PREFIX}${encodeNonAscii(key)}`
}
// this can generate relationship tables as part of the mapping

View file

@ -160,14 +160,10 @@ export function enrichSchema(
for (const key of Object.keys(schema)) {
// if nothing specified in view, then it is not visible
const ui = view.schema?.[key] || { visible: false }
if (ui.visible === false) {
schema[key].visible = false
} else {
schema[key] = {
...schema[key],
...ui,
order: anyViewOrder ? ui?.order ?? undefined : schema[key].order,
}
schema[key] = {
...schema[key],
...ui,
order: anyViewOrder ? ui?.order ?? undefined : schema[key].order,
}
}

View file

@ -8,6 +8,7 @@ import {
tenancy,
users,
cache,
env as coreEnv,
} from "@budibase/backend-core"
import { watch } from "../watch"
import * as automations from "../automations"
@ -132,8 +133,8 @@ export async function startup(
// check and create admin user if required
// this must be run after the api has been initialised due to
// the app user sync
const bbAdminEmail = env.BB_ADMIN_USER_EMAIL,
bbAdminPassword = env.BB_ADMIN_USER_PASSWORD
const bbAdminEmail = coreEnv.BB_ADMIN_USER_EMAIL,
bbAdminPassword = coreEnv.BB_ADMIN_USER_PASSWORD
if (
env.SELF_HOSTED &&
!env.MULTI_TENANCY &&

View file

@ -14,20 +14,27 @@ describe("check BB_ADMIN environment variables", () => {
await tenancy.doInTenant(tenancy.DEFAULT_TENANT_ID, async () => {
await config.withEnv(
{
BB_ADMIN_USER_EMAIL: EMAIL,
BB_ADMIN_USER_PASSWORD: PASSWORD,
MULTI_TENANCY: "0",
SELF_HOSTED: "1",
},
async () => {
await startup({ rerun: true })
const user = await users.getGlobalUserByEmail(EMAIL, {
cleanup: false,
})
expect(user).toBeDefined()
expect(user?.password).toBeDefined()
expect(await utils.compare(PASSWORD, user?.password!)).toEqual(true)
}
() =>
config.withCoreEnv(
{
BB_ADMIN_USER_EMAIL: EMAIL,
BB_ADMIN_USER_PASSWORD: PASSWORD,
},
async () => {
await startup({ rerun: true })
const user = await users.getGlobalUserByEmail(EMAIL, {
cleanup: false,
})
expect(user).toBeDefined()
expect(user?.password).toBeDefined()
expect(await utils.compare(PASSWORD, user?.password!)).toEqual(
true
)
}
)
)
})
})

View file

@ -290,7 +290,7 @@ export default class TestConfiguration {
* that can be called to reset the environment variables to their original values.
*/
setCoreEnv(newEnvVars: Partial<typeof coreEnv>): () => void {
const oldEnv = cloneDeep(env)
const oldEnv = cloneDeep(coreEnv)
let key: keyof typeof newEnvVars
for (key in newEnvVars) {

View file

@ -24,6 +24,7 @@ import {
Query,
Webhook,
WebhookActionType,
AutomationEventType,
} from "@budibase/types"
import { LoopInput, LoopStepType } from "../../definitions/automations"
import { merge } from "lodash"
@ -305,7 +306,7 @@ export function loopAutomation(
trigger: {
id: "a",
type: "TRIGGER",
event: "row:save",
event: AutomationEventType.ROW_SAVE,
stepId: AutomationTriggerStepId.ROW_SAVED,
inputs: {
tableId,
@ -347,7 +348,7 @@ export function collectAutomation(tableId?: string): Automation {
trigger: {
id: "a",
type: "TRIGGER",
event: "row:save",
event: AutomationEventType.ROW_SAVE,
stepId: AutomationTriggerStepId.ROW_SAVED,
inputs: {
tableId,

View file

@ -50,6 +50,13 @@ export const TYPE_TRANSFORM_MAP: any = {
[undefined]: undefined,
parse: parseArrayString,
},
[FieldType.BB_REFERENCE]: {
//@ts-ignore
[null]: [],
//@ts-ignore
[undefined]: undefined,
parse: parseArrayString,
},
[FieldType.STRING]: {
"": null,
//@ts-ignore
@ -113,6 +120,9 @@ export const TYPE_TRANSFORM_MAP: any = {
[undefined]: undefined,
parse: parseArrayString,
},
[FieldType.ATTACHMENT_SINGLE]: {
"": null,
},
[FieldType.BOOLEAN]: {
"": null,
//@ts-ignore

View file

@ -209,10 +209,22 @@ describe("rowProcessor - inputProcessing", () => {
const { row } = await inputProcessing(userId, table, newRow)
if (userValue === undefined) {
// The 'user' field is omitted
expect(row).toEqual({
name: "Jack",
})
} else {
// The update is processed if null or "". 'user' is changed to an empty array.
expect(row).toEqual({
name: "Jack",
user: [],
})
}
expect(
bbReferenceProcessor.processInputBBReferences
).not.toHaveBeenCalled()
expect(row).toEqual(newRow)
}
)

View file

@ -34,6 +34,7 @@
"devDependencies": {
"@rollup/plugin-commonjs": "^17.1.0",
"@rollup/plugin-inject": "^5.0.5",
"@rollup/plugin-json": "^4.1.0",
"@rollup/plugin-typescript": "8.3.0",
"doctrine": "^3.0.0",
"jest": "29.7.0",

View file

@ -1,16 +1,23 @@
const HELPER_LIBRARY = "@budibase/handlebars-helpers"
const helpers = require(HELPER_LIBRARY)
const { HelperFunctionBuiltin } = require("../src/helpers/constants")
const fs = require("fs")
import { HelperFunctionBuiltin } from "../src/helpers/constants"
import { readFileSync, writeFileSync } from "fs"
import { marked } from "marked"
import { join, dirname } from "path"
const helpers = require("@budibase/handlebars-helpers")
const doctrine = require("doctrine")
const marked = require("marked")
type HelperInfo = {
acceptsInline?: boolean
acceptsBlock?: boolean
example?: string
description: string
tags?: any[]
}
/**
* full list of supported helpers can be found here:
* https://github.com/budibase/handlebars-helpers
*/
const { join } = require("path")
const path = require("path")
const COLLECTIONS = [
"math",
@ -23,7 +30,7 @@ const COLLECTIONS = [
"uuid",
]
const FILENAME = join(__dirname, "..", "src", "manifest.json")
const outputJSON = {}
const outputJSON: any = {}
const ADDED_HELPERS = {
date: {
date: {
@ -43,7 +50,7 @@ const ADDED_HELPERS = {
},
}
function fixSpecialCases(name, obj) {
function fixSpecialCases(name: string, obj: any) {
const args = obj.args
if (name === "ifNth") {
args[0] = "a"
@ -61,7 +68,7 @@ function fixSpecialCases(name, obj) {
return obj
}
function lookForward(lines, funcLines, idx) {
function lookForward(lines: string[], funcLines: string[], idx: number) {
const funcLen = funcLines.length
for (let i = idx, j = 0; i < idx + funcLen; ++i, j++) {
if (!lines[i].includes(funcLines[j])) {
@ -71,7 +78,7 @@ function lookForward(lines, funcLines, idx) {
return true
}
function getCommentInfo(file, func) {
function getCommentInfo(file: string, func: string): HelperInfo {
const lines = file.split("\n")
const funcLines = func.split("\n")
let comment = null
@ -98,7 +105,13 @@ function getCommentInfo(file, func) {
if (comment == null) {
return { description: "" }
}
const docs = doctrine.parse(comment, { unwrap: true })
const docs: {
acceptsInline?: boolean
acceptsBlock?: boolean
example: string
description: string
tags: any[]
} = doctrine.parse(comment, { unwrap: true })
// some hacky fixes
docs.description = docs.description.replace(/\n/g, " ")
docs.description = docs.description.replace(/[ ]{2,}/g, " ")
@ -120,7 +133,7 @@ function getCommentInfo(file, func) {
return docs
}
const excludeFunctions = { string: ["raw"] }
const excludeFunctions: Record<string, string[]> = { string: ["raw"] }
/**
* This script is very specific to purpose, parsing the handlebars-helpers files to attempt to get information about them.
@ -128,11 +141,13 @@ const excludeFunctions = { string: ["raw"] }
function run() {
const foundNames: string[] = []
for (let collection of COLLECTIONS) {
const collectionFile = fs.readFileSync(
`${path.dirname(require.resolve(HELPER_LIBRARY))}/lib/${collection}.js`,
const collectionFile = readFileSync(
`${dirname(
require.resolve("@budibase/handlebars-helpers")
)}/lib/${collection}.js`,
"utf8"
)
const collectionInfo = {}
const collectionInfo: any = {}
// collect information about helper
let hbsHelperInfo = helpers[collection]()
for (let entry of Object.entries(hbsHelperInfo)) {
@ -181,7 +196,7 @@ function run() {
helper.description = marked.parse(helper.description)
}
}
fs.writeFileSync(FILENAME, JSON.stringify(outputJSON, null, 2))
writeFileSync(FILENAME, JSON.stringify(outputJSON, null, 2))
}
run()

View file

@ -17,7 +17,6 @@
"devDependencies": {
"@budibase/nano": "10.1.5",
"@types/koa": "2.13.4",
"@types/pouchdb": "6.4.0",
"@types/redlock": "4.0.7",
"rimraf": "3.0.2",
"typescript": "5.5.2"

View file

@ -255,6 +255,15 @@ export type BucketedContent = AutomationAttachmentContent & {
path: string
}
export enum AutomationEventType {
ROW_SAVE = "row:save",
ROW_UPDATE = "row:update",
ROW_DELETE = "row:delete",
APP_TRIGGER = "app:trigger",
CRON_TRIGGER = "cron:trigger",
WEBHOOK_TRIGGER = "web:trigger",
}
export type UpdatedRowEventEmitter = {
row: Row
oldRow: Row

View file

@ -3,4 +3,5 @@ export interface SaveUserOpts {
requirePassword?: boolean
currentUserId?: string
skipPasswordValidation?: boolean
allowChangingEmail?: boolean
}

View file

@ -42,7 +42,7 @@
"@budibase/string-templates": "0.0.0",
"@budibase/types": "0.0.0",
"@koa/router": "8.0.8",
"@techpass/passport-openidconnect": "0.3.2",
"@techpass/passport-openidconnect": "0.3.3",
"@types/global-agent": "2.1.1",
"aws-sdk": "2.1030.0",
"bcrypt": "5.1.0",
@ -69,8 +69,6 @@
"pouchdb": "7.3.0",
"pouchdb-all-dbs": "1.1.1",
"server-destroy": "1.0.1",
"undici": "^6.0.1",
"undici-types": "^6.0.1",
"knex": "2.4.2"
},
"devDependencies": {

View file

@ -3,12 +3,6 @@ import env from "../../../environment"
import { env as coreEnv } from "@budibase/backend-core"
import nodeFetch from "node-fetch"
// When we come to move to SQS fully and move away from Clouseau, we will need
// to flip this to true (or remove it entirely). This will then be used to
// determine if we should show the maintenance page that links to the SQS
// migration docs.
const sqsRequired = false
let sqsAvailable: boolean
async function isSqsAvailable() {
// We cache this value for the duration of the Node process because we don't
@ -30,7 +24,7 @@ async function isSqsAvailable() {
}
async function isSqsMissing() {
return sqsRequired && !(await isSqsAvailable())
return env.SQS_SEARCH_ENABLE && !(await isSqsAvailable())
}
export const fetch = async (ctx: Ctx) => {

View file

@ -574,6 +574,41 @@ describe("scim", () => {
expect(events.user.updated).toHaveBeenCalledTimes(1)
})
it("an existing user's email can be updated", async () => {
const newEmail = structures.generator.email()
const body: ScimUpdateRequest = {
schemas: ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
Operations: [
{
op: "Replace",
path: 'emails[type eq "work"].value',
value: newEmail,
},
],
}
const response = await patchScimUser({ id: user.id, body })
const expectedScimUser: ScimUserResponse = {
...user,
emails: [
{
value: newEmail,
type: "work",
primary: true,
},
],
}
expect(response).toEqual(expectedScimUser)
const persistedUser = await config.api.scimUsersAPI.find(user.id)
expect(persistedUser).toEqual(expectedScimUser)
expect((await config.api.users.getUser(user.id)).body).toEqual(
expect.objectContaining({ _id: user.id, email: newEmail })
)
})
})
describe("DELETE /api/global/scim/v2/users/:id", () => {

2238
yarn.lock

File diff suppressed because it is too large Load diff