1
0
Fork 0
mirror of synced 2024-06-16 09:25:12 +12:00

Merge remote-tracking branch 'origin/develop' into feature/new-app-publish-workflow

This commit is contained in:
Dean 2022-04-21 14:10:58 +01:00
commit 91a6f8645c
113 changed files with 3990 additions and 8515 deletions

View file

@ -12,6 +12,9 @@ on:
- master
- develop
env:
BRANCH: ${{ github.event.pull_request.head.ref }}
jobs:
build:
runs-on: ubuntu-latest
@ -27,6 +30,18 @@ jobs:
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
# Add @budibase/pro to filesystem
- name: Checkout pro
uses: actions/checkout@v2
with:
repository: budibase/budibase-pro
ref: ${{ env.BRANCH }}
path: './pro'
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
- name: Setup pro
run: mv pro ../budibase-pro && cd ../budibase-pro && yarn setup
- run: yarn
- run: yarn bootstrap
- run: yarn lint

View file

@ -25,10 +25,27 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Extract branch name
shell: bash
run: echo "##[set-output name=branch;]$(echo ${GITHUB_REF_NAME})"
id: extract_branch
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
with:
node-version: 14.x
# Add @budibase/pro to filesystem
- name: Checkout pro
uses: actions/checkout@v2
with:
repository: budibase/budibase-pro
ref: ${{ steps.extract_branch.outputs.branch }}
path: './pro'
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
- name: Setup pro
run: mv pro ../budibase-pro && cd ../budibase-pro && yarn setup
- run: yarn
- run: yarn bootstrap
- run: yarn lint
@ -46,12 +63,27 @@ jobs:
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
# setup the username and email. I tend to use 'GitHub Actions Bot' with no email by default
git config user.name "Budibase Staging Release Bot"
git config user.email "<>"
# setup the username and email.
git config --global user.name "Budibase Staging Release Bot"
git config --global user.email "<>"
echo //registry.npmjs.org/:_authToken=${NPM_TOKEN} >> .npmrc
yarn release:develop
- name: Get the latest budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Publish @budibase/pro package to NPM
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
VERSION: ${{ steps.previoustag.outputs.tag }}
run: |
cd ../budibase-pro
echo //registry.npmjs.org/:_authToken=${NPM_TOKEN} >> .npmrc
yarn release:develop $RELEASE_VERSION
- name: Build/release Docker images
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD

View file

@ -98,10 +98,6 @@ spec:
value: http://worker-service:{{ .Values.services.worker.port }}
- name: PLATFORM_URL
value: {{ .Values.globals.platformUrl | quote }}
- name: USE_QUOTAS
value: {{ .Values.globals.useQuotas | quote }}
- name: EXCLUDE_QUOTAS_TENANTS
value: {{ .Values.globals.excludeQuotasTenants | quote }}
- name: ACCOUNT_PORTAL_URL
value: {{ .Values.globals.accountPortalUrl | quote }}
- name: ACCOUNT_PORTAL_API_KEY

View file

@ -93,15 +93,13 @@ globals:
logLevel: info
selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup
multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs
useQuotas: "0"
excludeQuotasTenants: "" # comma seperated list of tenants to exclude from quotas
accountPortalUrl: ""
accountPortalApiKey: ""
cookieDomain: ""
platformUrl: ""
httpMigrations: "0"
google:
clientId: ""
clientId: ""
secret: ""
automationMaxIterations: "500"

View file

@ -1,5 +1,5 @@
{
"version": "1.0.105-alpha.23",
"version": "1.0.105-alpha.24",
"npmClient": "yarn",
"packages": [
"packages/*"

View file

@ -21,10 +21,8 @@
},
"scripts": {
"setup": "node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev",
"bootstrap": "lerna link && lerna bootstrap",
"bootstrap": "lerna link && lerna bootstrap && ./scripts/link-dependencies.sh",
"build": "lerna run build",
"publishdev": "lerna run publishdev",
"publishnpm": "yarn build && lerna publish --force-publish",
"release": "lerna publish patch --yes --force-publish",
"release:develop": "lerna publish prerelease --yes --force-publish --dist-tag develop",
"restore": "yarn run clean && yarn run bootstrap && yarn run build",
@ -32,7 +30,6 @@
"nuke:packages": "yarn run restore",
"nuke:docker": "lerna run --parallel dev:stack:nuke",
"clean": "lerna clean",
"kill-port": "kill-port 4001",
"kill-builder": "kill-port 3000",
"kill-server": "kill-port 4001 4002",
"kill-all": "yarn run kill-builder && yarn run kill-server",

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/backend-core",
"version": "1.0.105-alpha.23",
"version": "1.0.105-alpha.24",
"description": "Budibase backend core libraries used in server and worker",
"main": "src/index.js",
"author": "Budibase",

View file

@ -13,6 +13,7 @@ exports.Cookies = {
exports.Headers = {
API_KEY: "x-budibase-api-key",
LICENSE_KEY: "x-budibase-license-key",
API_VER: "x-budibase-api-version",
APP_ID: "x-budibase-app-id",
TYPE: "x-budibase-type",

View file

@ -23,6 +23,7 @@ exports.StaticDatabases = {
docs: {
apiKeys: "apikeys",
usageQuota: "usage_quota",
licenseInfo: "license_info",
},
},
// contains information about tenancy and so on

View file

@ -27,6 +27,7 @@ const UNICODE_MAX = "\ufff0"
exports.ViewNames = {
USER_BY_EMAIL: "by_email",
BY_API_KEY: "by_api_key",
USER_BY_BUILDERS: "by_builders",
}
exports.StaticDatabases = StaticDatabases
@ -429,34 +430,9 @@ async function getScopedConfig(db, params) {
return configDoc && configDoc.config ? configDoc.config : configDoc
}
function generateNewUsageQuotaDoc() {
return {
_id: StaticDatabases.GLOBAL.docs.usageQuota,
quotaReset: Date.now() + 2592000000,
usageQuota: {
automationRuns: 0,
rows: 0,
storage: 0,
apps: 0,
users: 0,
views: 0,
emails: 0,
},
usageLimits: {
automationRuns: 1000,
rows: 4000,
apps: 4,
storage: 1000,
users: 10,
emails: 50,
},
}
}
exports.Replication = Replication
exports.getScopedConfig = getScopedConfig
exports.generateConfigID = generateConfigID
exports.getConfigParams = getConfigParams
exports.getScopedFullConfig = getScopedFullConfig
exports.generateNewUsageQuotaDoc = generateNewUsageQuotaDoc
exports.generateDevInfoID = generateDevInfoID

View file

@ -56,10 +56,34 @@ exports.createApiKeyView = async () => {
await db.put(designDoc)
}
exports.createUserBuildersView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get("_design/database")
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
map: `function(doc) {
if (doc.builder && doc.builder.global === true) {
emit(doc._id, doc._id)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewNames.USER_BY_BUILDERS]: view,
}
await db.put(designDoc)
}
exports.queryGlobalView = async (viewName, params, db = null) => {
const CreateFuncByName = {
[ViewNames.USER_BY_EMAIL]: exports.createUserEmailView,
[ViewNames.BY_API_KEY]: exports.createApiKeyView,
[ViewNames.USER_BY_BUILDERS]: exports.createUserBuildersView,
}
// can pass DB in if working with something specific
if (!db) {

View file

@ -28,6 +28,7 @@ module.exports = {
SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED),
COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,
PLATFORM_URL: process.env.PLATFORM_URL,
TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS,
isTest,
_set(key, value) {
process.env[key] = value

View file

@ -0,0 +1,11 @@
class BudibaseError extends Error {
constructor(message, type, code) {
super(message)
this.type = type
this.code = code
}
}
module.exports = {
BudibaseError,
}

View file

@ -0,0 +1,41 @@
const licensing = require("./licensing")
const codes = {
...licensing.codes,
}
const types = {
...licensing.types,
}
const context = {
...licensing.context,
}
const getPublicError = err => {
let error
if (err.code || err.type) {
// add generic error information
error = {
code: err.code,
type: err.type,
}
if (err.code && context[err.code]) {
error = {
...error,
// get any additional context from this error
...context[err.code](err),
}
}
}
return error
}
module.exports = {
codes,
types,
UsageLimitError: licensing.UsageLimitError,
getPublicError,
}

View file

@ -0,0 +1,32 @@
const { BudibaseError } = require("./base")
const types = {
LICENSE_ERROR: "license_error",
}
const codes = {
USAGE_LIMIT_EXCEEDED: "usage_limit_exceeded",
}
const context = {
[codes.USAGE_LIMIT_EXCEEDED]: err => {
return {
limitName: err.limitName,
}
},
}
class UsageLimitError extends BudibaseError {
constructor(message, limitName) {
super(message, types.LICENSE_ERROR, codes.USAGE_LIMIT_EXCEEDED)
this.limitName = limitName
this.status = 400
}
}
module.exports = {
types,
codes,
context,
UsageLimitError,
}

View file

@ -0,0 +1,52 @@
const env = require("../environment")
const tenancy = require("../tenancy")
/**
* Read the TENANT_FEATURE_FLAGS env var and return an array of features flags for each tenant.
* The env var is formatted as:
* tenant1:feature1:feature2,tenant2:feature1
*/
const getFeatureFlags = () => {
if (!env.TENANT_FEATURE_FLAGS) {
return
}
const tenantFeatureFlags = {}
env.TENANT_FEATURE_FLAGS.split(",").forEach(tenantToFeatures => {
const [tenantId, ...features] = tenantToFeatures.split(":")
features.forEach(feature => {
if (!tenantFeatureFlags[tenantId]) {
tenantFeatureFlags[tenantId] = []
}
tenantFeatureFlags[tenantId].push(feature)
})
})
return tenantFeatureFlags
}
const TENANT_FEATURE_FLAGS = getFeatureFlags()
exports.isEnabled = featureFlag => {
const tenantId = tenancy.getTenantId()
return (
TENANT_FEATURE_FLAGS &&
TENANT_FEATURE_FLAGS[tenantId] &&
TENANT_FEATURE_FLAGS[tenantId].includes(featureFlag)
)
}
exports.getTenantFeatureFlags = tenantId => {
if (TENANT_FEATURE_FLAGS && TENANT_FEATURE_FLAGS[tenantId]) {
return TENANT_FEATURE_FLAGS[tenantId]
}
return []
}
exports.FeatureFlag = {
LICENSING: "LICENSING",
}

View file

@ -15,4 +15,9 @@ module.exports = {
auth: require("../auth"),
constants: require("../constants"),
migrations: require("../migrations"),
errors: require("./errors"),
env: require("./environment"),
accounts: require("./cloud/accounts"),
tenancy: require("./tenancy"),
featureFlags: require("./featureFlags"),
}

View file

@ -2,24 +2,27 @@ const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
const { authenticateThirdParty } = require("./third-party-common")
async function authenticate(accessToken, refreshToken, profile, done) {
const thirdPartyUser = {
provider: profile.provider, // should always be 'google'
providerType: "google",
userId: profile.id,
profile: profile,
email: profile._json.email,
oauth2: {
accessToken: accessToken,
refreshToken: refreshToken,
},
}
const buildVerifyFn = async saveUserFn => {
return (accessToken, refreshToken, profile, done) => {
const thirdPartyUser = {
provider: profile.provider, // should always be 'google'
providerType: "google",
userId: profile.id,
profile: profile,
email: profile._json.email,
oauth2: {
accessToken: accessToken,
refreshToken: refreshToken,
},
}
return authenticateThirdParty(
thirdPartyUser,
true, // require local accounts to exist
done
)
return authenticateThirdParty(
thirdPartyUser,
true, // require local accounts to exist
done,
saveUserFn
)
}
}
/**
@ -27,11 +30,7 @@ async function authenticate(accessToken, refreshToken, profile, done) {
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
* @returns Dynamically configured Passport Google Strategy
*/
exports.strategyFactory = async function (
config,
callbackUrl,
verify = authenticate
) {
exports.strategyFactory = async function (config, callbackUrl, saveUserFn) {
try {
const { clientID, clientSecret } = config
@ -41,6 +40,7 @@ exports.strategyFactory = async function (
)
}
const verify = buildVerifyFn(saveUserFn)
return new GoogleStrategy(
{
clientID: config.clientID,
@ -58,4 +58,4 @@ exports.strategyFactory = async function (
}
}
// expose for testing
exports.authenticate = authenticate
exports.buildVerifyFn = buildVerifyFn

View file

@ -2,46 +2,49 @@ const fetch = require("node-fetch")
const OIDCStrategy = require("@techpass/passport-openidconnect").Strategy
const { authenticateThirdParty } = require("./third-party-common")
/**
* @param {*} issuer The identity provider base URL
* @param {*} sub The user ID
* @param {*} profile The user profile information. Created by passport from the /userinfo response
* @param {*} jwtClaims The parsed id_token claims
* @param {*} accessToken The access_token for contacting the identity provider - may or may not be a JWT
* @param {*} refreshToken The refresh_token for obtaining a new access_token - usually not a JWT
* @param {*} idToken The id_token - always a JWT
* @param {*} params The response body from requesting an access_token
* @param {*} done The passport callback: err, user, info
*/
async function authenticate(
issuer,
sub,
profile,
jwtClaims,
accessToken,
refreshToken,
idToken,
params,
done
) {
const thirdPartyUser = {
// store the issuer info to enable sync in future
provider: issuer,
providerType: "oidc",
userId: profile.id,
profile: profile,
email: getEmail(profile, jwtClaims),
oauth2: {
accessToken: accessToken,
refreshToken: refreshToken,
},
}
return authenticateThirdParty(
thirdPartyUser,
false, // don't require local accounts to exist
const buildVerifyFn = saveUserFn => {
/**
* @param {*} issuer The identity provider base URL
* @param {*} sub The user ID
* @param {*} profile The user profile information. Created by passport from the /userinfo response
* @param {*} jwtClaims The parsed id_token claims
* @param {*} accessToken The access_token for contacting the identity provider - may or may not be a JWT
* @param {*} refreshToken The refresh_token for obtaining a new access_token - usually not a JWT
* @param {*} idToken The id_token - always a JWT
* @param {*} params The response body from requesting an access_token
* @param {*} done The passport callback: err, user, info
*/
return async (
issuer,
sub,
profile,
jwtClaims,
accessToken,
refreshToken,
idToken,
params,
done
)
) => {
const thirdPartyUser = {
// store the issuer info to enable sync in future
provider: issuer,
providerType: "oidc",
userId: profile.id,
profile: profile,
email: getEmail(profile, jwtClaims),
oauth2: {
accessToken: accessToken,
refreshToken: refreshToken,
},
}
return authenticateThirdParty(
thirdPartyUser,
false, // don't require local accounts to exist
done,
saveUserFn
)
}
}
/**
@ -86,7 +89,7 @@ function validEmail(value) {
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
* @returns Dynamically configured Passport OIDC Strategy
*/
exports.strategyFactory = async function (config, callbackUrl) {
exports.strategyFactory = async function (config, callbackUrl, saveUserFn) {
try {
const { clientID, clientSecret, configUrl } = config
@ -106,6 +109,7 @@ exports.strategyFactory = async function (config, callbackUrl) {
const body = await response.json()
const verify = buildVerifyFn(saveUserFn)
return new OIDCStrategy(
{
issuer: body.issuer,
@ -116,7 +120,7 @@ exports.strategyFactory = async function (config, callbackUrl) {
clientSecret: clientSecret,
callbackURL: callbackUrl,
},
authenticate
verify
)
} catch (err) {
console.error(err)
@ -125,4 +129,4 @@ exports.strategyFactory = async function (config, callbackUrl) {
}
// expose for testing
exports.authenticate = authenticate
exports.buildVerifyFn = buildVerifyFn

View file

@ -58,8 +58,10 @@ describe("google", () => {
it("delegates authentication to third party common", async () => {
const google = require("../google")
const mockSaveUserFn = jest.fn()
const authenticate = await google.buildVerifyFn(mockSaveUserFn)
await google.authenticate(
await authenticate(
data.accessToken,
data.refreshToken,
profile,
@ -69,7 +71,8 @@ describe("google", () => {
expect(authenticateThirdParty).toHaveBeenCalledWith(
user,
true,
mockDone)
mockDone,
mockSaveUserFn)
})
})
})

View file

@ -83,8 +83,10 @@ describe("oidc", () => {
async function doAuthenticate() {
const oidc = require("../oidc")
const mockSaveUserFn = jest.fn()
const authenticate = await oidc.buildVerifyFn(mockSaveUserFn)
await oidc.authenticate(
await authenticate(
issuer,
sub,
profile,

View file

@ -1,7 +1,6 @@
const env = require("../../environment")
const jwt = require("jsonwebtoken")
const { generateGlobalUserID } = require("../../db/utils")
const { saveUser } = require("../../utils")
const { authError } = require("./utils")
const { newid } = require("../../hashing")
const { createASession } = require("../../security/sessions")
@ -16,8 +15,11 @@ exports.authenticateThirdParty = async function (
thirdPartyUser,
requireLocalAccount = true,
done,
saveUserFn = saveUser
saveUserFn
) {
if (!saveUserFn) {
throw new Error("Save user function must be provided")
}
if (!thirdPartyUser.provider) {
return authError(done, "third party user provider required")
}

View file

@ -17,6 +17,7 @@ exports.Databases = {
FLAGS: "flags",
APP_METADATA: "appMetadata",
QUERY_VARS: "queryVars",
LICENSES: "license",
}
exports.SEPARATOR = SEPARATOR

View file

@ -176,6 +176,13 @@ exports.getGlobalUserByEmail = async email => {
})
}
exports.getBuildersCount = async () => {
const builders = await queryGlobalView(ViewNames.USER_BY_BUILDERS, {
include_docs: false,
})
return builders ? builders.length : 0
}
exports.saveUser = async (
user,
tenantId,
@ -289,4 +296,5 @@ exports.platformLogout = async ({ ctx, userId, keepActiveSession }) => {
userId,
sessions.map(({ sessionId }) => sessionId)
)
await userCache.invalidateUser(userId)
}

View file

@ -1,7 +1,7 @@
{
"name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.",
"version": "1.0.105-alpha.23",
"version": "1.0.105-alpha.24",
"license": "MPL-2.0",
"svelte": "src/index.js",
"module": "dist/bbui.es.js",
@ -38,7 +38,7 @@
],
"dependencies": {
"@adobe/spectrum-css-workflow-icons": "^1.2.1",
"@budibase/string-templates": "^1.0.105-alpha.23",
"@budibase/string-templates": "^1.0.105-alpha.24",
"@spectrum-css/actionbutton": "^1.0.1",
"@spectrum-css/actiongroup": "^1.0.1",
"@spectrum-css/avatar": "^3.0.2",

View file

@ -1,8 +1,9 @@
<script>
export let wide = false
export let maxWidth = "80ch"
</script>
<div class:wide>
<div style="--max-width: {maxWidth}" class:wide>
<slot />
</div>
@ -12,7 +13,7 @@
flex-direction: column;
justify-content: flex-start;
align-items: stretch;
max-width: 80ch;
max-width: var(--max-width);
margin: 0 auto;
padding: calc(var(--spacing-xl) * 2);
min-height: calc(100% - var(--spacing-xl) * 4);

View file

@ -16,11 +16,11 @@
easing: easing,
})
$: if (value) $progress = value
$: if (value || value === 0) $progress = value
</script>
<div
class:spectrum-ProgressBar--indeterminate={!value}
class:spectrum-ProgressBar--indeterminate={!value && value !== 0}
class:spectrum-ProgressBar--sideLabel={sideLabel}
class="spectrum-ProgressBar spectrum-ProgressBar--size{size}"
value={$progress}
@ -28,7 +28,7 @@
aria-valuenow={$progress}
aria-valuemin="0"
aria-valuemax="100"
style={width ? `width: ${width}px;` : ""}
style={width ? `width: ${width};` : ""}
>
{#if $$slots}
<div
@ -37,7 +37,7 @@
<slot />
</div>
{/if}
{#if value}
{#if value || value === 0}
<div
class="spectrum-FieldLabel spectrum-ProgressBar-percentage spectrum-FieldLabel--size{size}"
>
@ -47,7 +47,7 @@
<div class="spectrum-ProgressBar-track">
<div
class="spectrum-ProgressBar-fill"
style={value ? `width: ${$progress}%` : ""}
style={value || value === 0 ? `width: ${$progress}%` : ""}
/>
</div>
<div class="spectrum-ProgressBar-label" hidden="" />

View file

@ -5,12 +5,14 @@
export let serif = false
export let weight = null
export let textAlign = null
export let color = null
</script>
<p
style={`
${weight ? `font-weight:${weight};` : ""}
${textAlign ? `text-align:${textAlign};` : ""}
${color ? `color:${color};` : ""}
`}
class="spectrum-Body spectrum-Body--size{size}"
class:spectrum-Body--serif={serif}

View file

@ -5,12 +5,13 @@
export let size = "M"
export let textAlign
export let noPadding = false
export let weight = "default" // light, heavy, default
</script>
<h1
style={textAlign ? `text-align:${textAlign}` : ``}
class:noPadding
class="spectrum-Heading spectrum-Heading--size{size}"
class="spectrum-Heading spectrum-Heading--size{size} spectrum-Heading--{weight}"
>
<slot />
</h1>

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/builder",
"version": "1.0.105-alpha.23",
"version": "1.0.105-alpha.24",
"license": "GPL-3.0",
"private": true,
"scripts": {
@ -65,10 +65,10 @@
}
},
"dependencies": {
"@budibase/bbui": "^1.0.105-alpha.23",
"@budibase/client": "^1.0.105-alpha.23",
"@budibase/frontend-core": "^1.0.105-alpha.23",
"@budibase/string-templates": "^1.0.105-alpha.23",
"@budibase/bbui": "^1.0.105-alpha.24",
"@budibase/client": "^1.0.105-alpha.24",
"@budibase/frontend-core": "^1.0.105-alpha.24",
"@budibase/string-templates": "^1.0.105-alpha.24",
"@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1",

View file

@ -0,0 +1,14 @@
import { auth } from "../stores/portal"
import { get } from "svelte/store"
export const FEATURE_FLAGS = {
LICENSING: "LICENSING",
}
export const isEnabled = featureFlag => {
const user = get(auth).user
if (user?.featureFlags?.includes(featureFlag)) {
return true
}
return false
}

View file

@ -14,7 +14,7 @@
notifications.success("Invitation accepted successfully")
$goto("../auth/login")
} catch (error) {
notifications.error("Error accepting invitation")
notifications.error(error.message)
}
}
</script>

View file

@ -20,6 +20,7 @@
import ChangePasswordModal from "components/settings/ChangePasswordModal.svelte"
import UpdateAPIKeyModal from "components/settings/UpdateAPIKeyModal.svelte"
import Logo from "assets/bb-emblem.svg"
import { isEnabled, FEATURE_FLAGS } from "../../../helpers/featureFlags"
let loaded = false
let userInfoModal
@ -54,10 +55,17 @@
if (!$adminStore.cloud) {
menu = menu.concat([
{
title: "Updates",
title: "Update",
href: "/builder/portal/settings/update",
},
])
if (isEnabled(FEATURE_FLAGS.LICENSING)) {
menu = menu.concat({
title: "Upgrade",
href: "/builder/portal/settings/upgrade",
})
}
}
} else {
menu = menu.concat([

View file

@ -26,7 +26,7 @@
})
notifications.success("Successfully created user")
} catch (error) {
notifications.error("Error creating user")
notifications.error(error.message)
}
}
</script>

View file

@ -0,0 +1,151 @@
<script>
import {
Layout,
Heading,
Body,
Divider,
Link,
Button,
Input,
Label,
notifications,
} from "@budibase/bbui"
import { auth, admin } from "stores/portal"
import { redirect } from "@roxi/routify"
import { processStringSync } from "@budibase/string-templates"
import { API } from "api"
import { onMount } from "svelte"
$: license = $auth.user.license
$: upgradeUrl = `${$admin.accountPortalUrl}/portal/upgrade`
$: activateDisabled = !licenseKey || licenseKeyDisabled
let licenseInfo
let licenseKeyDisabled = false
let licenseKeyType = "text"
let licenseKey = ""
// Make sure page can't be visited directly in cloud
$: {
if ($admin.cloud) {
$redirect("../../portal")
}
}
const activate = async () => {
await API.activateLicenseKey({ licenseKey })
await auth.getSelf()
await setLicenseInfo()
notifications.success("Successfully activated")
}
const refresh = async () => {
try {
await API.refreshLicense()
await auth.getSelf()
notifications.success("Refreshed license")
} catch (err) {
console.error(err)
notifications.error("Error refreshing license")
}
}
// deactivate the license key field if there is a license key set
$: {
if (licenseInfo?.licenseKey) {
licenseKey = "**********************************************"
licenseKeyType = "password"
licenseKeyDisabled = true
activateDisabled = true
}
}
const setLicenseInfo = async () => {
licenseInfo = await API.getLicenseInfo()
}
onMount(async () => {
await setLicenseInfo()
})
</script>
{#if $auth.isAdmin}
<Layout noPadding>
<Layout gap="XS" noPadding>
<Heading size="M">Upgrade</Heading>
<Body size="M">
{#if license.plan.type === "free"}
Upgrade your budibase installation to unlock additional features. To
subscribe to a plan visit your <Link size="L" href={upgradeUrl}
>Account</Link
>.
{:else}
To manage your plan visit your <Link size="L" href={upgradeUrl}
>Account</Link
>.
{/if}
</Body>
</Layout>
<Divider size="S" />
<Layout gap="XS" noPadding>
<Heading size="S">Activate</Heading>
<Body size="S">Enter your license key below to activate your plan</Body>
</Layout>
<Layout noPadding>
<div class="fields">
<div class="field">
<Label size="L">License Key</Label>
<Input
thin
bind:value={licenseKey}
type={licenseKeyType}
disabled={licenseKeyDisabled}
/>
</div>
</div>
<div>
<Button cta on:click={activate} disabled={activateDisabled}
>Activate</Button
>
</div>
</Layout>
<Divider size="S" />
<Layout gap="L" noPadding>
<Layout gap="S" noPadding>
<Heading size="S">Plan</Heading>
<Layout noPadding gap="XXS">
<Body size="S">You are currently on the {license.plan.type} plan</Body
>
<Body size="XS">
{processStringSync(
"Updated {{ duration time 'millisecond' }} ago",
{
time:
new Date().getTime() -
new Date(license.refreshedAt).getTime(),
}
)}
</Body>
</Layout>
</Layout>
<div>
<Button secondary on:click={refresh}>Refresh</Button>
</div>
</Layout>
</Layout>
{/if}
<style>
.fields {
display: grid;
grid-gap: var(--spacing-m);
}
.field {
display: grid;
grid-template-columns: 100px 1fr;
grid-gap: var(--spacing-l);
align-items: center;
}
</style>

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/cli",
"version": "1.0.105-alpha.23",
"version": "1.0.105-alpha.24",
"description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js",
"bin": {

View file

@ -1,14 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "launch",
"name": "Publish Dev",
"program": "${workspaceFolder}/scripts/publishDev.js"
}
]
}

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/client",
"version": "1.0.105-alpha.23",
"version": "1.0.105-alpha.24",
"license": "MPL-2.0",
"module": "dist/budibase-client.js",
"main": "dist/budibase-client.js",
@ -19,9 +19,9 @@
"dev:builder": "rollup -cw"
},
"dependencies": {
"@budibase/bbui": "^1.0.105-alpha.23",
"@budibase/frontend-core": "^1.0.105-alpha.23",
"@budibase/string-templates": "^1.0.105-alpha.23",
"@budibase/bbui": "^1.0.105-alpha.24",
"@budibase/frontend-core": "^1.0.105-alpha.24",
"@budibase/string-templates": "^1.0.105-alpha.24",
"@spectrum-css/button": "^3.0.3",
"@spectrum-css/card": "^3.0.3",
"@spectrum-css/divider": "^1.0.3",

File diff suppressed because it is too large Load diff

View file

@ -1,12 +1,12 @@
{
"name": "@budibase/frontend-core",
"version": "1.0.105-alpha.23",
"version": "1.0.105-alpha.24",
"description": "Budibase frontend core libraries used in builder and client",
"author": "Budibase",
"license": "MPL-2.0",
"svelte": "src/index.js",
"dependencies": {
"@budibase/bbui": "^1.0.105-alpha.23",
"@budibase/bbui": "^1.0.105-alpha.24",
"lodash": "^4.17.21",
"svelte": "^3.46.2"
}

View file

@ -22,6 +22,7 @@ import { buildTemplateEndpoints } from "./templates"
import { buildUserEndpoints } from "./user"
import { buildSelfEndpoints } from "./self"
import { buildViewEndpoints } from "./views"
import { buildLicensingEndpoints } from "./licensing"
const defaultAPIClientConfig = {
/**
@ -233,5 +234,6 @@ export const createAPIClient = config => {
...buildUserEndpoints(API),
...buildViewEndpoints(API),
...buildSelfEndpoints(API),
...buildLicensingEndpoints(API),
}
}

View file

@ -0,0 +1,30 @@
export const buildLicensingEndpoints = API => ({
/**
* Activates a self hosted license key
*/
activateLicenseKey: async data => {
return API.post({
url: `/api/global/license/activate`,
body: data,
})
},
/**
* Get the license info - metadata about the license including the
* obfuscated license key.
*/
getLicenseInfo: async () => {
return API.get({
url: "/api/global/license/info",
})
},
/**
* Refreshes the license cache
*/
refreshLicense: async () => {
return API.post({
url: "/api/global/license/refresh",
})
},
})

View file

@ -1,5 +1,5 @@
{
"watch": ["src", "../backend-core"],
"watch": ["src", "../backend-core", "../../../budibase-pro/packages/pro"],
"ext": "js,ts,json",
"ignore": ["src/**/*.spec.ts", "src/**/*.spec.js"],
"exec": "ts-node src/index.ts"

View file

@ -1,7 +1,7 @@
{
"name": "@budibase/server",
"email": "hi@budibase.com",
"version": "1.0.105-alpha.23",
"version": "1.0.105-alpha.24",
"description": "Budibase Web Server",
"main": "src/index.ts",
"repository": {
@ -9,7 +9,7 @@
"url": "https://github.com/Budibase/budibase.git"
},
"scripts": {
"build": "rimraf dist/ && tsc && mv dist/src/* dist/ && rimraf dist/src/ && yarn postbuild",
"build": "rimraf dist/ && tsc -p tsconfig.build.json && mv dist/src/* dist/ && rimraf dist/src/ && yarn postbuild",
"postbuild": "copyfiles -u 1 src/**/*.svelte dist/ && copyfiles -u 1 src/**/*.hbs dist/ && copyfiles -u 1 src/**/*.json dist/",
"test": "jest --coverage --maxWorkers=2",
"test:watch": "jest --watch",
@ -68,16 +68,17 @@
"license": "GPL-3.0",
"dependencies": {
"@apidevtools/swagger-parser": "^10.0.3",
"@budibase/backend-core": "^1.0.105-alpha.23",
"@budibase/client": "^1.0.105-alpha.23",
"@budibase/string-templates": "^1.0.105-alpha.23",
"@budibase/backend-core": "^1.0.105-alpha.24",
"@budibase/client": "^1.0.105-alpha.24",
"@budibase/string-templates": "^1.0.105-alpha.24",
"@bull-board/api": "^3.7.0",
"@bull-board/koa": "^3.7.0",
"@elastic/elasticsearch": "7.10.0",
"@google-cloud/firestore": "^5.0.2",
"@koa/router": "8.0.0",
"@sendgrid/mail": "7.1.1",
"@sentry/node": "^6.0.0",
"@sentry/node": "6.17.7",
"@types/koa__router": "^8.0.11",
"airtable": "0.10.1",
"arangojs": "7.2.0",
"aws-sdk": "^2.767.0",
@ -143,13 +144,14 @@
"@types/apidoc": "^0.50.0",
"@types/bull": "^3.15.1",
"@types/google-spreadsheet": "^3.1.5",
"@types/jest": "^26.0.23",
"@types/jest": "^27.4.1",
"@types/koa": "^2.13.3",
"@types/koa-router": "^7.4.2",
"@types/lodash": "4.14.180",
"@types/node": "^15.12.4",
"@types/oracledb": "^5.2.1",
"@types/redis": "^4.0.11",
"@typescript-eslint/parser": "5.12.0",
"apidoc": "^0.50.2",
"babel-jest": "^27.0.2",
"copyfiles": "^2.4.1",
@ -168,7 +170,7 @@
"swagger-jsdoc": "^6.1.0",
"ts-jest": "^27.0.3",
"ts-node": "^10.0.0",
"typescript": "^4.3.5",
"typescript": "^4.5.5",
"update-dotenv": "^1.1.1"
},
"optionalDependencies": {

View file

@ -42,6 +42,8 @@ async function init() {
REDIS_URL: "localhost:6379",
WORKER_URL: "http://localhost:4002",
INTERNAL_API_KEY: "budibase",
ACCOUNT_PORTAL_URL: "http://localhost:10001",
ACCOUNT_PORTAL_API_KEY: "budibase",
JWT_SECRET: "testsecret",
REDIS_PASSWORD: "budibase",
MINIO_ACCESS_KEY: "budibase",

View file

@ -1,29 +1,29 @@
const env = require("../../environment")
const packageJson = require("../../../package.json")
const {
import env from "../../environment"
import packageJson from "../../../package.json"
import {
createLinkView,
createRoutingView,
createAllSearchIndex,
} = require("../../db/views/staticViews")
const {
} from "../../db/views/staticViews"
import {
getTemplateStream,
createApp,
deleteApp,
} = require("../../utilities/fileSystem")
const {
} from "../../utilities/fileSystem"
import {
generateAppID,
getLayoutParams,
getScreenParams,
generateDevAppID,
DocumentTypes,
AppStatus,
} = require("../../db/utils")
} from "../../db/utils"
const {
BUILTIN_ROLE_IDS,
AccessController,
} = require("@budibase/backend-core/roles")
const { BASE_LAYOUTS } = require("../../constants/layouts")
const { cloneDeep } = require("lodash/fp")
import { BASE_LAYOUTS } from "../../constants/layouts"
import { cloneDeep } from "lodash/fp"
const { processObject } = require("@budibase/string-templates")
const {
getAllApps,
@ -31,24 +31,27 @@ const {
getProdAppID,
Replication,
} = require("@budibase/backend-core/db")
const { USERS_TABLE_SCHEMA } = require("../../constants")
const { removeAppFromUserRoles } = require("../../utilities/workerRequests")
const { clientLibraryPath, stringToReadStream } = require("../../utilities")
const { getAllLocks } = require("../../utilities/redis")
const {
import { USERS_TABLE_SCHEMA } from "../../constants"
import { removeAppFromUserRoles } from "../../utilities/workerRequests"
import { clientLibraryPath, stringToReadStream } from "../../utilities"
import { getAllLocks } from "../../utilities/redis"
import {
updateClientLibrary,
backupClientLibrary,
revertClientLibrary,
} = require("../../utilities/fileSystem/clientLibrary")
} from "../../utilities/fileSystem/clientLibrary"
const { getTenantId, isMultiTenant } = require("@budibase/backend-core/tenancy")
const { syncGlobalUsers } = require("./user")
import { syncGlobalUsers } from "./user"
const { app: appCache } = require("@budibase/backend-core/cache")
const { cleanupAutomations } = require("../../automations/utils")
import { cleanupAutomations } from "../../automations/utils"
const {
getAppDB,
getProdAppDB,
updateAppId,
} = require("@budibase/backend-core/context")
import { getUniqueRows } from "../../utilities/usageQuota/rows"
import { quotas } from "@budibase/pro"
import { errors } from "@budibase/backend-core"
const URL_REGEX_SLASH = /\/|\\/g
@ -61,7 +64,7 @@ async function getLayouts() {
include_docs: true,
})
)
).rows.map(row => row.doc)
).rows.map((row: any) => row.doc)
}
async function getScreens() {
@ -72,16 +75,16 @@ async function getScreens() {
include_docs: true,
})
)
).rows.map(row => row.doc)
).rows.map((row: any) => row.doc)
}
function getUserRoleId(ctx) {
function getUserRoleId(ctx: any) {
return !ctx.user.role || !ctx.user.role._id
? BUILTIN_ROLE_IDS.PUBLIC
: ctx.user.role._id
}
exports.getAppUrl = ctx => {
export const getAppUrl = (ctx: any) => {
// construct the url
let url
if (ctx.request.body.url) {
@ -97,29 +100,34 @@ exports.getAppUrl = ctx => {
return url
}
const checkAppUrl = (ctx, apps, url, currentAppId) => {
const checkAppUrl = (ctx: any, apps: any, url: any, currentAppId?: string) => {
if (currentAppId) {
apps = apps.filter(app => app.appId !== currentAppId)
apps = apps.filter((app: any) => app.appId !== currentAppId)
}
if (apps.some(app => app.url === url)) {
if (apps.some((app: any) => app.url === url)) {
ctx.throw(400, "App URL is already in use.")
}
}
const checkAppName = (ctx, apps, name, currentAppId) => {
const checkAppName = (
ctx: any,
apps: any,
name: any,
currentAppId?: string
) => {
// TODO: Replace with Joi
if (!name) {
ctx.throw(400, "Name is required")
}
if (currentAppId) {
apps = apps.filter(app => app.appId !== currentAppId)
apps = apps.filter((app: any) => app.appId !== currentAppId)
}
if (apps.some(app => app.name === name)) {
if (apps.some((app: any) => app.name === name)) {
ctx.throw(400, "App name is already in use.")
}
}
async function createInstance(template) {
async function createInstance(template: any) {
const tenantId = isMultiTenant() ? getTenantId() : null
const baseAppId = generateAppID(tenantId)
const appId = generateDevAppID(baseAppId)
@ -160,7 +168,7 @@ async function createInstance(template) {
return { _id: appId }
}
exports.fetch = async ctx => {
export const fetch = async (ctx: any) => {
const dev = ctx.query && ctx.query.status === AppStatus.DEV
const all = ctx.query && ctx.query.status === AppStatus.ALL
const apps = await getAllApps({ dev, all })
@ -172,7 +180,7 @@ exports.fetch = async ctx => {
if (app.status !== "development") {
continue
}
const lock = locks.find(lock => lock.appId === app.appId)
const lock = locks.find((lock: any) => lock.appId === app.appId)
if (lock) {
app.lockedBy = lock.user
} else {
@ -185,7 +193,7 @@ exports.fetch = async ctx => {
ctx.body = apps
}
exports.fetchAppDefinition = async ctx => {
export const fetchAppDefinition = async (ctx: any) => {
const layouts = await getLayouts()
const userRoleId = getUserRoleId(ctx)
const accessController = new AccessController()
@ -200,7 +208,7 @@ exports.fetchAppDefinition = async ctx => {
}
}
exports.fetchAppPackage = async ctx => {
export const fetchAppPackage = async (ctx: any) => {
const db = getAppDB()
const application = await db.get(DocumentTypes.APP_METADATA)
const layouts = await getLayouts()
@ -221,7 +229,7 @@ exports.fetchAppPackage = async ctx => {
}
}
exports.create = async ctx => {
const performAppCreate = async (ctx: any) => {
const apps = await getAllApps({ dev: true })
const name = ctx.request.body.name
checkAppName(ctx, apps, name)
@ -229,7 +237,7 @@ exports.create = async ctx => {
checkAppUrl(ctx, apps, url)
const { useTemplate, templateKey, templateString } = ctx.request.body
const instanceConfig = {
const instanceConfig: any = {
useTemplate,
key: templateKey,
templateString,
@ -280,13 +288,41 @@ exports.create = async ctx => {
}
await appCache.invalidateAppMetadata(appId, newApplication)
ctx.status = 200
return newApplication
}
const appPostCreate = async (ctx: any, appId: string) => {
// app import & template creation
if (ctx.request.body.useTemplate === "true") {
const rows = await getUniqueRows([appId])
const rowCount = rows ? rows.length : 0
if (rowCount) {
try {
await quotas.addRows(rowCount)
} catch (err: any) {
if (err.code && err.code === errors.codes.USAGE_LIMIT_EXCEEDED) {
// this import resulted in row usage exceeding the quota
// delete the app
// skip pre and post steps as no rows have been added to quotas yet
ctx.params.appId = appId
await destroyApp(ctx)
}
throw err
}
}
}
}
export const create = async (ctx: any) => {
const newApplication = await quotas.addApp(() => performAppCreate(ctx))
await appPostCreate(ctx, newApplication.appId)
ctx.body = newApplication
ctx.status = 200
}
// This endpoint currently operates as a PATCH rather than a PUT
// Thus name and url fields are handled only if present
exports.update = async ctx => {
export const update = async (ctx: any) => {
const apps = await getAllApps({ dev: true })
// validation
const name = ctx.request.body.name
@ -304,7 +340,7 @@ exports.update = async ctx => {
ctx.body = data
}
exports.updateClient = async ctx => {
export const updateClient = async (ctx: any) => {
// Get current app version
const db = getAppDB()
const application = await db.get(DocumentTypes.APP_METADATA)
@ -326,7 +362,7 @@ exports.updateClient = async ctx => {
ctx.body = data
}
exports.revertClient = async ctx => {
export const revertClient = async (ctx: any) => {
// Check app can be reverted
const db = getAppDB()
const application = await db.get(DocumentTypes.APP_METADATA)
@ -349,7 +385,7 @@ exports.revertClient = async ctx => {
ctx.body = data
}
exports.delete = async ctx => {
const destroyApp = async (ctx: any) => {
let appId = ctx.params.appId
let isUnpublish = ctx.query && ctx.query.unpublish
@ -360,6 +396,12 @@ exports.delete = async ctx => {
const db = isUnpublish ? getProdAppDB() : getAppDB()
const result = await db.destroy()
if (isUnpublish) {
await quotas.removePublishedApp()
} else {
await quotas.removeApp()
}
/* istanbul ignore next */
if (!env.isTest() && !isUnpublish) {
await deleteApp(appId)
@ -370,12 +412,30 @@ exports.delete = async ctx => {
// make sure the app/role doesn't stick around after the app has been deleted
await removeAppFromUserRoles(ctx, appId)
await appCache.invalidateAppMetadata(appId)
return result
}
const preDestroyApp = async (ctx: any) => {
const rows = await getUniqueRows([ctx.params.appId])
ctx.rowCount = rows.length
}
const postDestroyApp = async (ctx: any) => {
const rowCount = ctx.rowCount
if (rowCount) {
await quotas.removeRows(rowCount)
}
}
export const destroy = async (ctx: any) => {
await preDestroyApp(ctx)
const result = await destroyApp(ctx)
await postDestroyApp(ctx)
ctx.status = 200
ctx.body = result
}
exports.sync = async (ctx, next) => {
export const sync = async (ctx: any, next: any) => {
const appId = ctx.params.appId
if (!isDevAppID(appId)) {
ctx.throw(400, "This action cannot be performed for production apps")
@ -405,7 +465,7 @@ exports.sync = async (ctx, next) => {
let error
try {
await replication.replicate({
filter: function (doc) {
filter: function (doc: any) {
return doc._id !== DocumentTypes.APP_METADATA
},
})
@ -425,7 +485,7 @@ exports.sync = async (ctx, next) => {
}
}
const updateAppPackage = async (appPackage, appId) => {
const updateAppPackage = async (appPackage: any, appId: any) => {
const db = getAppDB()
const application = await db.get(DocumentTypes.APP_METADATA)
@ -444,7 +504,7 @@ const updateAppPackage = async (appPackage, appId) => {
return response
}
const createEmptyAppPackage = async (ctx, app) => {
const createEmptyAppPackage = async (ctx: any, app: any) => {
const db = getAppDB()
let screensAndLayouts = []

View file

@ -1,20 +1,18 @@
const Deployment = require("./Deployment")
const {
import Deployment from "./Deployment"
import {
Replication,
getProdAppID,
getDevelopmentAppID,
} = require("@budibase/backend-core/db")
const { DocumentTypes, getAutomationParams } = require("../../../db/utils")
const {
disableAllCrons,
enableCronTrigger,
} = require("../../../automations/utils")
const { app: appCache } = require("@budibase/backend-core/cache")
const {
} from "@budibase/backend-core/db"
import { DocumentTypes, getAutomationParams } from "../../../db/utils"
import { disableAllCrons, enableCronTrigger } from "../../../automations/utils"
import { app as appCache } from "@budibase/backend-core/cache"
import {
getAppId,
getAppDB,
getProdAppDB,
} = require("@budibase/backend-core/context")
} from "@budibase/backend-core/context"
import { quotas } from "@budibase/pro"
// the max time we can wait for an invalidation to complete before considering it failed
const MAX_PENDING_TIME_MS = 30 * 60000
@ -25,9 +23,10 @@ const DeploymentStatus = {
}
// checks that deployments are in a good state, any pending will be updated
async function checkAllDeployments(deployments) {
async function checkAllDeployments(deployments: any) {
let updated = false
for (let deployment of Object.values(deployments.history)) {
let deployment: any
for (deployment of Object.values(deployments.history)) {
// check that no deployments have crashed etc and are now stuck
if (
deployment.status === DeploymentStatus.PENDING &&
@ -41,7 +40,7 @@ async function checkAllDeployments(deployments) {
return { updated, deployments }
}
async function storeDeploymentHistory(deployment) {
async function storeDeploymentHistory(deployment: any) {
const deploymentJSON = deployment.getJSON()
const db = getAppDB()
@ -70,7 +69,7 @@ async function storeDeploymentHistory(deployment) {
return deployment
}
async function initDeployedApp(prodAppId) {
async function initDeployedApp(prodAppId: any) {
const db = getProdAppDB()
console.log("Reading automation docs")
const automations = (
@ -79,7 +78,7 @@ async function initDeployedApp(prodAppId) {
include_docs: true,
})
)
).rows.map(row => row.doc)
).rows.map((row: any) => row.doc)
console.log("You have " + automations.length + " automations")
const promises = []
console.log("Disabling prod crons..")
@ -93,16 +92,17 @@ async function initDeployedApp(prodAppId) {
console.log("Enabled cron triggers for deployed app..")
}
async function deployApp(deployment) {
async function deployApp(deployment: any) {
try {
const appId = getAppId()
const devAppId = getDevelopmentAppID(appId)
const productionAppId = getProdAppID(appId)
const replication = new Replication({
const config: any = {
source: devAppId,
target: productionAppId,
})
}
const replication = new Replication(config)
console.log("Replication object created")
@ -122,7 +122,7 @@ async function deployApp(deployment) {
console.log("Deployed app initialised, setting deployment to successful")
deployment.setStatus(DeploymentStatus.SUCCESS)
await storeDeploymentHistory(deployment)
} catch (err) {
} catch (err: any) {
deployment.setStatus(DeploymentStatus.FAILURE, err.message)
await storeDeploymentHistory(deployment)
throw {
@ -132,14 +132,11 @@ async function deployApp(deployment) {
}
}
exports.fetchDeployments = async function (ctx) {
export async function fetchDeployments(ctx: any) {
try {
const db = getAppDB()
const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
const { updated, deployments } = await checkAllDeployments(
deploymentDoc,
ctx.user
)
const { updated, deployments } = await checkAllDeployments(deploymentDoc)
if (updated) {
await db.put(deployments)
}
@ -149,7 +146,7 @@ exports.fetchDeployments = async function (ctx) {
}
}
exports.deploymentProgress = async function (ctx) {
export async function deploymentProgress(ctx: any) {
try {
const db = getAppDB()
const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
@ -162,7 +159,20 @@ exports.deploymentProgress = async function (ctx) {
}
}
exports.deployApp = async function (ctx) {
const isFirstDeploy = async () => {
try {
const db = getProdAppDB()
await db.get(DocumentTypes.APP_METADATA)
} catch (e: any) {
if (e.status === 404) {
return true
}
throw e
}
return false
}
const _deployApp = async function (ctx: any) {
let deployment = new Deployment()
console.log("Deployment object created")
deployment.setStatus(DeploymentStatus.PENDING)
@ -171,7 +181,14 @@ exports.deployApp = async function (ctx) {
console.log("Stored deployment history")
console.log("Deploying app...")
await deployApp(deployment)
if (await isFirstDeploy()) {
await quotas.addPublishedApp(() => deployApp(deployment))
} else {
await deployApp(deployment)
}
ctx.body = deployment
}
export { _deployApp as deployApp }

View file

@ -1,7 +1,7 @@
const { getAllApps } = require("@budibase/backend-core/db")
const { updateAppId } = require("@budibase/backend-core/context")
import { search as stringSearch } from "./utils"
import { default as controller } from "../application"
import * as controller from "../application"
import { Application } from "../../../definitions/common"
function fixAppID(app: Application, params: any) {
@ -59,7 +59,7 @@ export async function destroy(ctx: any, next: any) {
// get the app before deleting it
await setResponseApp(ctx)
const body = ctx.body
await controller.delete(ctx)
await controller.destroy(ctx)
// overwrite the body again
ctx.body = body
await next()

View file

@ -1,5 +1,5 @@
import { search as stringSearch } from "./utils"
import { default as queryController } from "../query"
import * as queryController from "../query"
export async function search(ctx: any, next: any) {
await queryController.fetch(ctx)

View file

@ -1,4 +1,4 @@
import { default as rowController } from "../row"
import * as rowController from "../row"
import { addRev } from "./utils"
import { Row } from "../../../definitions/common"
import { convertBookmark } from "../../../utilities"

View file

@ -1,22 +1,19 @@
const {
generateQueryID,
getQueryParams,
isProdAppID,
} = require("../../../db/utils")
const { BaseQueryVerbs } = require("../../../constants")
const { Thread, ThreadType } = require("../../../threads")
const { save: saveDatasource } = require("../datasource")
const { RestImporter } = require("./import")
const { invalidateDynamicVariables } = require("../../../threads/utils")
const environment = require("../../../environment")
const { getAppDB } = require("@budibase/backend-core/context")
import { generateQueryID, getQueryParams, isProdAppID } from "../../../db/utils"
import { BaseQueryVerbs } from "../../../constants"
import { Thread, ThreadType } from "../../../threads"
import { save as saveDatasource } from "../datasource"
import { RestImporter } from "./import"
import { invalidateDynamicVariables } from "../../../threads/utils"
import { QUERY_THREAD_TIMEOUT } from "../../../environment"
import { getAppDB } from "@budibase/backend-core/context"
import { quotas } from "@budibase/pro"
const Runner = new Thread(ThreadType.QUERY, {
timeoutMs: environment.QUERY_THREAD_TIMEOUT || 10000,
timeoutMs: QUERY_THREAD_TIMEOUT || 10000,
})
// simple function to append "readable" to all read queries
function enrichQueries(input) {
function enrichQueries(input: any) {
const wasArray = Array.isArray(input)
const queries = wasArray ? input : [input]
for (let query of queries) {
@ -27,7 +24,7 @@ function enrichQueries(input) {
return wasArray ? queries : queries[0]
}
exports.fetch = async function (ctx) {
export async function fetch(ctx: any) {
const db = getAppDB()
const body = await db.allDocs(
@ -36,10 +33,10 @@ exports.fetch = async function (ctx) {
})
)
ctx.body = enrichQueries(body.rows.map(row => row.doc))
ctx.body = enrichQueries(body.rows.map((row: any) => row.doc))
}
exports.import = async ctx => {
const _import = async (ctx: any) => {
const body = ctx.request.body
const data = body.data
@ -49,7 +46,7 @@ exports.import = async ctx => {
let datasourceId
if (!body.datasourceId) {
// construct new datasource
const info = await importer.getInfo()
const info: any = await importer.getInfo()
let datasource = {
type: "datasource",
source: "REST",
@ -77,8 +74,9 @@ exports.import = async ctx => {
}
ctx.status = 200
}
export { _import as import }
exports.save = async function (ctx) {
export async function save(ctx: any) {
const db = getAppDB()
const query = ctx.request.body
@ -93,7 +91,7 @@ exports.save = async function (ctx) {
ctx.message = `Query ${query.name} saved successfully.`
}
exports.find = async function (ctx) {
export async function find(ctx: any) {
const db = getAppDB()
const query = enrichQueries(await db.get(ctx.params.queryId))
// remove properties that could be dangerous in real app
@ -104,7 +102,7 @@ exports.find = async function (ctx) {
ctx.body = query
}
exports.preview = async function (ctx) {
export async function preview(ctx: any) {
const db = getAppDB()
const datasource = await db.get(ctx.request.body.datasourceId)
@ -114,16 +112,18 @@ exports.preview = async function (ctx) {
ctx.request.body
try {
const { rows, keys, info, extra } = await Runner.run({
appId: ctx.appId,
datasource,
queryVerb,
fields,
parameters,
transformer,
queryId,
})
const runFn = () =>
Runner.run({
appId: ctx.appId,
datasource,
queryVerb,
fields,
parameters,
transformer,
queryId,
})
const { rows, keys, info, extra } = await quotas.addQuery(runFn)
ctx.body = {
rows,
schemaFields: [...new Set(keys)],
@ -135,7 +135,7 @@ exports.preview = async function (ctx) {
}
}
async function execute(ctx, opts = { rowsOnly: false }) {
async function execute(ctx: any, opts = { rowsOnly: false }) {
const db = getAppDB()
const query = await db.get(ctx.params.queryId)
@ -153,16 +153,19 @@ async function execute(ctx, opts = { rowsOnly: false }) {
// call the relevant CRUD method on the integration class
try {
const { rows, pagination, extra } = await Runner.run({
appId: ctx.appId,
datasource,
queryVerb: query.queryVerb,
fields: query.fields,
pagination: ctx.request.body.pagination,
parameters: enrichedParameters,
transformer: query.transformer,
queryId: ctx.params.queryId,
})
const runFn = () =>
Runner.run({
appId: ctx.appId,
datasource,
queryVerb: query.queryVerb,
fields: query.fields,
pagination: ctx.request.body.pagination,
parameters: enrichedParameters,
transformer: query.transformer,
queryId: ctx.params.queryId,
})
const { rows, pagination, extra } = await quotas.addQuery(runFn)
if (opts && opts.rowsOnly) {
ctx.body = rows
} else {
@ -173,15 +176,15 @@ async function execute(ctx, opts = { rowsOnly: false }) {
}
}
exports.executeV1 = async function (ctx) {
export async function executeV1(ctx: any) {
return execute(ctx, { rowsOnly: true })
}
exports.executeV2 = async function (ctx) {
export async function executeV2(ctx: any) {
return execute(ctx, { rowsOnly: false })
}
const removeDynamicVariables = async queryId => {
const removeDynamicVariables = async (queryId: any) => {
const db = getAppDB()
const query = await db.get(queryId)
const datasource = await db.get(query.datasourceId)
@ -190,19 +193,19 @@ const removeDynamicVariables = async queryId => {
if (dynamicVariables) {
// delete dynamic variables from the datasource
datasource.config.dynamicVariables = dynamicVariables.filter(
dv => dv.queryId !== queryId
(dv: any) => dv.queryId !== queryId
)
await db.put(datasource)
// invalidate the deleted variables
const variablesToDelete = dynamicVariables.filter(
dv => dv.queryId === queryId
(dv: any) => dv.queryId === queryId
)
await invalidateDynamicVariables(variablesToDelete)
}
}
exports.destroy = async function (ctx) {
export async function destroy(ctx: any) {
const db = getAppDB()
await removeDynamicVariables(ctx.params.queryId)
await db.remove(ctx.params.queryId, ctx.params.revId)

View file

@ -52,7 +52,7 @@ interface RunConfig {
module External {
function buildFilters(
id: string | undefined,
id: string | undefined | string[],
filters: SearchFilters,
table: Table
) {

View file

@ -1,15 +1,16 @@
const internal = require("./internal")
const external = require("./external")
const { isExternalTable } = require("../../../integrations/utils")
import { quotas } from "@budibase/pro"
import internal from "./internal"
import external from "./external"
import { isExternalTable } from "../../../integrations/utils"
function pickApi(tableId) {
function pickApi(tableId: any) {
if (isExternalTable(tableId)) {
return external
}
return internal
}
function getTableId(ctx) {
function getTableId(ctx: any) {
if (ctx.request.body && ctx.request.body.tableId) {
return ctx.request.body.tableId
}
@ -21,13 +22,13 @@ function getTableId(ctx) {
}
}
exports.patch = async ctx => {
export async function patch(ctx: any): Promise<any> {
const appId = ctx.appId
const tableId = getTableId(ctx)
const body = ctx.request.body
// if it doesn't have an _id then its save
if (body && !body._id) {
return exports.save(ctx)
return save(ctx)
}
try {
const { row, table } = await pickApi(tableId).patch(ctx)
@ -41,13 +42,13 @@ exports.patch = async ctx => {
}
}
exports.save = async function (ctx) {
const saveRow = async (ctx: any) => {
const appId = ctx.appId
const tableId = getTableId(ctx)
const body = ctx.request.body
// if it has an ID already then its a patch
if (body && body._id) {
return exports.patch(ctx)
return patch(ctx)
}
try {
const { row, table } = await pickApi(tableId).save(ctx)
@ -60,7 +61,11 @@ exports.save = async function (ctx) {
}
}
exports.fetchView = async function (ctx) {
export async function save(ctx: any) {
await quotas.addRow(() => saveRow(ctx))
}
export async function fetchView(ctx: any) {
const tableId = getTableId(ctx)
try {
ctx.body = await pickApi(tableId).fetchView(ctx)
@ -69,7 +74,7 @@ exports.fetchView = async function (ctx) {
}
}
exports.fetch = async function (ctx) {
export async function fetch(ctx: any) {
const tableId = getTableId(ctx)
try {
ctx.body = await pickApi(tableId).fetch(ctx)
@ -78,7 +83,7 @@ exports.fetch = async function (ctx) {
}
}
exports.find = async function (ctx) {
export async function find(ctx: any) {
const tableId = getTableId(ctx)
try {
ctx.body = await pickApi(tableId).find(ctx)
@ -87,19 +92,21 @@ exports.find = async function (ctx) {
}
}
exports.destroy = async function (ctx) {
export async function destroy(ctx: any) {
const appId = ctx.appId
const inputs = ctx.request.body
const tableId = getTableId(ctx)
let response, row
if (inputs.rows) {
let { rows } = await pickApi(tableId).bulkDestroy(ctx)
await quotas.removeRows(rows.length)
response = rows
for (let row of rows) {
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row)
}
} else {
let resp = await pickApi(tableId).destroy(ctx)
await quotas.removeRow()
response = resp.response
row = resp.row
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row)
@ -110,7 +117,7 @@ exports.destroy = async function (ctx) {
ctx.body = response
}
exports.search = async ctx => {
export async function search(ctx: any) {
const tableId = getTableId(ctx)
try {
ctx.status = 200
@ -120,7 +127,7 @@ exports.search = async ctx => {
}
}
exports.validate = async function (ctx) {
export async function validate(ctx: any) {
const tableId = getTableId(ctx)
try {
ctx.body = await pickApi(tableId).validate(ctx)
@ -129,7 +136,7 @@ exports.validate = async function (ctx) {
}
}
exports.fetchEnrichedRow = async function (ctx) {
export async function fetchEnrichedRow(ctx: any) {
const tableId = getTableId(ctx)
try {
ctx.body = await pickApi(tableId).fetchEnrichedRow(ctx)
@ -138,7 +145,7 @@ exports.fetchEnrichedRow = async function (ctx) {
}
}
exports.export = async function (ctx) {
export const exportRows = async (ctx: any) => {
const tableId = getTableId(ctx)
try {
ctx.body = await pickApi(tableId).exportRows(ctx)

View file

@ -1,19 +1,19 @@
const linkRows = require("../../../db/linkedRows")
const { getRowParams, generateTableID } = require("../../../db/utils")
const { FieldTypes } = require("../../../constants")
const {
import { updateLinks, EventType } from "../../../db/linkedRows"
import { getRowParams, generateTableID } from "../../../db/utils"
import { FieldTypes } from "../../../constants"
import {
TableSaveFunctions,
hasTypeChanged,
getTable,
handleDataImport,
} = require("./utils")
const usageQuota = require("../../../utilities/usageQuota")
} from "./utils"
const { getAppDB } = require("@budibase/backend-core/context")
const env = require("../../../environment")
const { cleanupAttachments } = require("../../../utilities/rowProcessor")
const { runStaticFormulaChecks } = require("./bulkFormula")
import { isTest } from "../../../environment"
import { cleanupAttachments } from "../../../utilities/rowProcessor"
import { runStaticFormulaChecks } from "./bulkFormula"
import { quotas, QuotaUsageType, StaticQuotaName } from "@budibase/pro"
exports.save = async function (ctx) {
export async function save(ctx: any) {
const db = getAppDB()
const { dataImport, ...rest } = ctx.request.body
let tableToSave = {
@ -80,10 +80,8 @@ exports.save = async function (ctx) {
// update linked rows
try {
const linkResp = await linkRows.updateLinks({
eventType: oldTable
? linkRows.EventType.TABLE_UPDATED
: linkRows.EventType.TABLE_SAVE,
const linkResp: any = await updateLinks({
eventType: oldTable ? EventType.TABLE_UPDATED : EventType.TABLE_SAVE,
table: tableToSave,
oldTable: oldTable,
})
@ -105,11 +103,11 @@ exports.save = async function (ctx) {
tableToSave = await tableSaveFunctions.after(tableToSave)
// has to run after, make sure it has _id
await runStaticFormulaChecks(tableToSave, { oldTable })
await runStaticFormulaChecks(tableToSave, { oldTable, deletion: null })
return tableToSave
}
exports.destroy = async function (ctx) {
export async function destroy(ctx: any) {
const db = getAppDB()
const tableToDelete = await db.get(ctx.params.tableId)
@ -119,12 +117,14 @@ exports.destroy = async function (ctx) {
include_docs: true,
})
)
await db.bulkDocs(rows.rows.map(row => ({ ...row.doc, _deleted: true })))
await usageQuota.update(usageQuota.Properties.ROW, -rows.rows.length)
await db.bulkDocs(
rows.rows.map((row: any) => ({ ...row.doc, _deleted: true }))
)
await quotas.removeRows(rows.rows.length)
// update linked rows
await linkRows.updateLinks({
eventType: linkRows.EventType.TABLE_DELETE,
await updateLinks({
eventType: EventType.TABLE_DELETE,
table: tableToDelete,
})
@ -132,10 +132,10 @@ exports.destroy = async function (ctx) {
await db.remove(tableToDelete)
// remove table search index
if (!env.isTest()) {
if (!isTest()) {
const currentIndexes = await db.getIndexes()
const existingIndex = currentIndexes.indexes.find(
existing => existing.name === `search:${ctx.params.tableId}`
(existing: any) => existing.name === `search:${ctx.params.tableId}`
)
if (existingIndex) {
await db.deleteIndex(existingIndex)
@ -143,12 +143,15 @@ exports.destroy = async function (ctx) {
}
// has to run after, make sure it has _id
await runStaticFormulaChecks(tableToDelete, { deletion: true })
await runStaticFormulaChecks(tableToDelete, {
oldTable: null,
deletion: true,
})
await cleanupAttachments(tableToDelete, { rows })
return tableToDelete
}
exports.bulkImport = async function (ctx) {
export async function bulkImport(ctx: any) {
const table = await getTable(ctx.params.tableId)
const { dataImport } = ctx.request.body
await handleDataImport(ctx.user, table, dataImport)

View file

@ -1,34 +1,34 @@
const csvParser = require("../../../utilities/csvParser")
const {
import { transform } from "../../../utilities/csvParser"
import {
getRowParams,
generateRowID,
InternalTables,
getTableParams,
BudibaseInternalDB,
} = require("../../../db/utils")
const { isEqual } = require("lodash")
const { AutoFieldSubTypes, FieldTypes } = require("../../../constants")
const {
} from "../../../db/utils"
import { isEqual } from "lodash"
import { AutoFieldSubTypes, FieldTypes } from "../../../constants"
import {
inputProcessing,
cleanupAttachments,
} = require("../../../utilities/rowProcessor")
const {
} from "../../../utilities/rowProcessor"
import {
USERS_TABLE_SCHEMA,
SwitchableTypes,
CanSwitchTypes,
} = require("../../../constants")
const {
} from "../../../constants"
import {
isExternalTable,
breakExternalTableId,
isSQL,
} = require("../../../integrations/utils")
const { getViews, saveView } = require("../view/utils")
const viewTemplate = require("../view/viewBuilder")
const usageQuota = require("../../../utilities/usageQuota")
} from "../../../integrations/utils"
import { getViews, saveView } from "../view/utils"
import viewTemplate from "../view/viewBuilder"
const { getAppDB } = require("@budibase/backend-core/context")
const { cloneDeep } = require("lodash/fp")
import { cloneDeep } from "lodash/fp"
import { quotas, QuotaUsageType, StaticQuotaName } from "@budibase/pro"
exports.clearColumns = async (table, columnNames) => {
export async function clearColumns(table: any, columnNames: any) {
const db = getAppDB()
const rows = await db.allDocs(
getRowParams(table._id, null, {
@ -36,18 +36,18 @@ exports.clearColumns = async (table, columnNames) => {
})
)
return db.bulkDocs(
rows.rows.map(({ doc }) => {
columnNames.forEach(colName => delete doc[colName])
rows.rows.map(({ doc }: any) => {
columnNames.forEach((colName: any) => delete doc[colName])
return doc
})
)
}
exports.checkForColumnUpdates = async (oldTable, updatedTable) => {
export async function checkForColumnUpdates(oldTable: any, updatedTable: any) {
const db = getAppDB()
let updatedRows = []
const rename = updatedTable._rename
let deletedColumns = []
let deletedColumns: any = []
if (oldTable && oldTable.schema && updatedTable.schema) {
deletedColumns = Object.keys(oldTable.schema).filter(
colName => updatedTable.schema[colName] == null
@ -61,14 +61,14 @@ exports.checkForColumnUpdates = async (oldTable, updatedTable) => {
include_docs: true,
})
)
const rawRows = rows.rows.map(({ doc }) => doc)
updatedRows = rawRows.map(row => {
const rawRows = rows.rows.map(({ doc }: any) => doc)
updatedRows = rawRows.map((row: any) => {
row = cloneDeep(row)
if (rename) {
row[rename.updated] = row[rename.old]
delete row[rename.old]
} else if (deletedColumns.length !== 0) {
deletedColumns.forEach(colName => delete row[colName])
deletedColumns.forEach((colName: any) => delete row[colName])
}
return row
})
@ -76,14 +76,14 @@ exports.checkForColumnUpdates = async (oldTable, updatedTable) => {
// cleanup any attachments from object storage for deleted attachment columns
await cleanupAttachments(updatedTable, { oldTable, rows: rawRows })
// Update views
await exports.checkForViewUpdates(updatedTable, rename, deletedColumns)
await checkForViewUpdates(updatedTable, rename, deletedColumns)
delete updatedTable._rename
}
return { rows: updatedRows, table: updatedTable }
}
// makes sure the passed in table isn't going to reset the auto ID
exports.makeSureTableUpToDate = (table, tableToSave) => {
export function makeSureTableUpToDate(table: any, tableToSave: any) {
if (!table) {
return tableToSave
}
@ -91,7 +91,9 @@ exports.makeSureTableUpToDate = (table, tableToSave) => {
tableToSave._rev = table._rev
// make sure auto IDs are always updated - these are internal
// so the client may not know they have changed
for (let [field, column] of Object.entries(table.schema)) {
let field: any
let column: any
for ([field, column] of Object.entries(table.schema)) {
if (
column.autocolumn &&
column.subtype === AutoFieldSubTypes.AUTO_ID &&
@ -103,30 +105,32 @@ exports.makeSureTableUpToDate = (table, tableToSave) => {
return tableToSave
}
exports.handleDataImport = async (user, table, dataImport) => {
export async function handleDataImport(user: any, table: any, dataImport: any) {
if (!dataImport || !dataImport.csvString) {
return table
}
const db = getAppDB()
// Populate the table with rows imported from CSV in a bulk update
const data = await csvParser.transform({
const data = await transform({
...dataImport,
existingTable: table,
})
let finalData = []
let finalData: any = []
for (let i = 0; i < data.length; i++) {
let row = data[i]
row._id = generateRowID(table._id)
row.tableId = table._id
const processed = inputProcessing(user, table, row, {
const processed: any = inputProcessing(user, table, row, {
noAutoRelationships: true,
})
table = processed.table
row = processed.row
for (let [fieldName, schema] of Object.entries(table.schema)) {
let fieldName: any
let schema: any
for ([fieldName, schema] of Object.entries(table.schema)) {
// check whether the options need to be updated for inclusion as part of the data import
if (
schema.type === FieldTypes.OPTIONS &&
@ -143,17 +147,13 @@ exports.handleDataImport = async (user, table, dataImport) => {
finalData.push(row)
}
await usageQuota.update(usageQuota.Properties.ROW, finalData.length, {
dryRun: true,
})
await db.bulkDocs(finalData)
await usageQuota.update(usageQuota.Properties.ROW, finalData.length)
await quotas.addRows(finalData.length, () => db.bulkDocs(finalData))
let response = await db.put(table)
table._rev = response._rev
return table
}
exports.handleSearchIndexes = async table => {
export async function handleSearchIndexes(table: any) {
const db = getAppDB()
// create relevant search indexes
if (table.indexes && table.indexes.length > 0) {
@ -161,12 +161,12 @@ exports.handleSearchIndexes = async table => {
const indexName = `search:${table._id}`
const existingIndex = currentIndexes.indexes.find(
existing => existing.name === indexName
(existing: any) => existing.name === indexName
)
if (existingIndex) {
const currentFields = existingIndex.def.fields.map(
field => Object.keys(field)[0]
(field: any) => Object.keys(field)[0]
)
// if index fields have changed, delete the original index
@ -197,7 +197,7 @@ exports.handleSearchIndexes = async table => {
return table
}
exports.checkStaticTables = table => {
export function checkStaticTables(table: any) {
// check user schema has all required elements
if (table._id === InternalTables.USER_METADATA) {
for (let [key, schema] of Object.entries(USERS_TABLE_SCHEMA.schema)) {
@ -211,7 +211,13 @@ exports.checkStaticTables = table => {
}
class TableSaveFunctions {
constructor({ user, oldTable, dataImport }) {
db: any
user: any
oldTable: any
dataImport: any
rows: any
constructor({ user, oldTable, dataImport }: any) {
this.db = getAppDB()
this.user = user
this.oldTable = oldTable
@ -221,25 +227,25 @@ class TableSaveFunctions {
}
// before anything is done
async before(table) {
async before(table: any) {
if (this.oldTable) {
table = exports.makeSureTableUpToDate(this.oldTable, table)
table = makeSureTableUpToDate(this.oldTable, table)
}
table = exports.checkStaticTables(table)
table = checkStaticTables(table)
return table
}
// when confirmed valid
async mid(table) {
let response = await exports.checkForColumnUpdates(this.oldTable, table)
async mid(table: any) {
let response = await checkForColumnUpdates(this.oldTable, table)
this.rows = this.rows.concat(response.rows)
return table
}
// after saving
async after(table) {
table = await exports.handleSearchIndexes(table)
table = await exports.handleDataImport(this.user, table, this.dataImport)
async after(table: any) {
table = await handleSearchIndexes(table)
table = await handleDataImport(this.user, table, this.dataImport)
return table
}
@ -248,21 +254,21 @@ class TableSaveFunctions {
}
}
exports.getAllInternalTables = async () => {
export async function getAllInternalTables() {
const db = getAppDB()
const internalTables = await db.allDocs(
getTableParams(null, {
include_docs: true,
})
)
return internalTables.rows.map(tableDoc => ({
return internalTables.rows.map((tableDoc: any) => ({
...tableDoc.doc,
type: "internal",
sourceId: BudibaseInternalDB._id,
}))
}
exports.getAllExternalTables = async datasourceId => {
export async function getAllExternalTables(datasourceId: any) {
const db = getAppDB()
const datasource = await db.get(datasourceId)
if (!datasource || !datasource.entities) {
@ -271,24 +277,28 @@ exports.getAllExternalTables = async datasourceId => {
return datasource.entities
}
exports.getExternalTable = async (datasourceId, tableName) => {
const entities = await exports.getAllExternalTables(datasourceId)
export async function getExternalTable(datasourceId: any, tableName: any) {
const entities = await getAllExternalTables(datasourceId)
return entities[tableName]
}
exports.getTable = async tableId => {
export async function getTable(tableId: any) {
const db = getAppDB()
if (isExternalTable(tableId)) {
let { datasourceId, tableName } = breakExternalTableId(tableId)
const datasource = await db.get(datasourceId)
const table = await exports.getExternalTable(datasourceId, tableName)
const table = await getExternalTable(datasourceId, tableName)
return { ...table, sql: isSQL(datasource) }
} else {
return db.get(tableId)
}
}
exports.checkForViewUpdates = async (table, rename, deletedColumns) => {
export async function checkForViewUpdates(
table: any,
rename: any,
deletedColumns: any
) {
const views = await getViews()
const tableViews = views.filter(view => view.meta.tableId === table._id)
@ -312,7 +322,7 @@ exports.checkForViewUpdates = async (table, rename, deletedColumns) => {
// Update filters if required
if (view.meta.filters) {
view.meta.filters.forEach(filter => {
view.meta.filters.forEach((filter: any) => {
if (filter.key === rename.old) {
filter.key = rename.updated
needsUpdated = true
@ -320,7 +330,7 @@ exports.checkForViewUpdates = async (table, rename, deletedColumns) => {
})
}
} else if (deletedColumns) {
deletedColumns.forEach(column => {
deletedColumns.forEach((column: any) => {
// Remove calculation statement if required
if (view.meta.field === column) {
delete view.meta.field
@ -338,7 +348,7 @@ exports.checkForViewUpdates = async (table, rename, deletedColumns) => {
// Remove filters referencing deleted field if required
if (view.meta.filters && view.meta.filters.length) {
const initialLength = view.meta.filters.length
view.meta.filters = view.meta.filters.filter(filter => {
view.meta.filters = view.meta.filters.filter((filter: any) => {
return filter.key !== column
})
if (initialLength !== view.meta.filters.length) {
@ -360,16 +370,20 @@ exports.checkForViewUpdates = async (table, rename, deletedColumns) => {
}
}
exports.generateForeignKey = (column, relatedTable) => {
export function generateForeignKey(column: any, relatedTable: any) {
return `fk_${relatedTable.name}_${column.fieldName}`
}
exports.generateJunctionTableName = (column, table, relatedTable) => {
export function generateJunctionTableName(
column: any,
table: any,
relatedTable: any
) {
return `jt_${table.name}_${relatedTable.name}_${column.name}_${column.fieldName}`
}
exports.foreignKeyStructure = (keyName, meta = null) => {
const structure = {
export function foreignKeyStructure(keyName: any, meta = null) {
const structure: any = {
type: FieldTypes.NUMBER,
constraints: {},
name: keyName,
@ -380,7 +394,7 @@ exports.foreignKeyStructure = (keyName, meta = null) => {
return structure
}
exports.areSwitchableTypes = (type1, type2) => {
export function areSwitchableTypes(type1: any, type2: any) {
if (
SwitchableTypes.indexOf(type1) === -1 &&
SwitchableTypes.indexOf(type2) === -1
@ -397,21 +411,24 @@ exports.areSwitchableTypes = (type1, type2) => {
return false
}
exports.hasTypeChanged = (table, oldTable) => {
export function hasTypeChanged(table: any, oldTable: any) {
if (!oldTable) {
return false
}
for (let [key, field] of Object.entries(oldTable.schema)) {
let key: any
let field: any
for ([key, field] of Object.entries(oldTable.schema)) {
const oldType = field.type
if (!table.schema[key]) {
continue
}
const newType = table.schema[key].type
if (oldType !== newType && !exports.areSwitchableTypes(oldType, newType)) {
if (oldType !== newType && !areSwitchableTypes(oldType, newType)) {
return true
}
}
return false
}
exports.TableSaveFunctions = TableSaveFunctions
const _TableSaveFunctions = TableSaveFunctions
export { _TableSaveFunctions as TableSaveFunctions }

View file

@ -4,12 +4,14 @@ const {
auditLog,
buildTenancyMiddleware,
} = require("@budibase/backend-core/auth")
const { errors } = require("@budibase/backend-core")
const currentApp = require("../middleware/currentapp")
const compress = require("koa-compress")
const zlib = require("zlib")
const { mainRoutes, staticRoutes, publicRoutes } = require("./routes")
const pkg = require("../../package.json")
const env = require("../environment")
const { middleware: pro } = require("@budibase/pro")
const router = new Router()
@ -52,6 +54,7 @@ router
})
)
.use(currentApp)
.use(pro.licensing())
.use(auditLog)
// error handling middleware
@ -60,10 +63,12 @@ router.use(async (ctx, next) => {
await next()
} catch (err) {
ctx.status = err.status || err.statusCode || 500
const error = errors.getPublicError(err)
ctx.body = {
message: err.message,
status: ctx.status,
validationErrors: err.validation,
error,
}
if (env.NODE_ENV !== "jest") {
ctx.log.error(err)

View file

@ -1,14 +1,13 @@
const Router = require("@koa/router")
const controller = require("../controllers/application")
const authorized = require("../../middleware/authorized")
const { BUILDER } = require("@budibase/backend-core/permissions")
const usage = require("../../middleware/usageQuota")
import Router from "@koa/router"
import * as controller from "../controllers/application"
import authorized from "../../middleware/authorized"
import { BUILDER } from "@budibase/backend-core/permissions"
const router = Router()
const router = new Router()
router
.post("/api/applications/:appId/sync", authorized(BUILDER), controller.sync)
.post("/api/applications", authorized(BUILDER), usage, controller.create)
.post("/api/applications", authorized(BUILDER), controller.create)
.get("/api/applications/:appId/definition", controller.fetchAppDefinition)
.get("/api/applications", controller.fetch)
.get("/api/applications/:appId/appPackage", controller.fetchAppPackage)
@ -23,11 +22,6 @@ router
authorized(BUILDER),
controller.revertClient
)
.delete(
"/api/applications/:appId",
authorized(BUILDER),
usage,
controller.delete
)
.delete("/api/applications/:appId", authorized(BUILDER), controller.destroy)
module.exports = router
export default router

View file

@ -1,62 +0,0 @@
const authRoutes = require("./auth")
const layoutRoutes = require("./layout")
const screenRoutes = require("./screen")
const userRoutes = require("./user")
const applicationRoutes = require("./application")
const tableRoutes = require("./table")
const rowRoutes = require("./row")
const viewRoutes = require("./view")
const staticRoutes = require("./static")
const componentRoutes = require("./component")
const automationRoutes = require("./automation")
const webhookRoutes = require("./webhook")
const roleRoutes = require("./role")
const deployRoutes = require("./deploy")
const apiKeysRoutes = require("./apikeys")
const templatesRoutes = require("./templates")
const analyticsRoutes = require("./analytics")
const routingRoutes = require("./routing")
const integrationRoutes = require("./integration")
const permissionRoutes = require("./permission")
const datasourceRoutes = require("./datasource")
const queryRoutes = require("./query")
const backupRoutes = require("./backup")
const metadataRoutes = require("./metadata")
const devRoutes = require("./dev")
const cloudRoutes = require("./cloud")
const migrationRoutes = require("./migrations")
const publicRoutes = require("./public")
exports.mainRoutes = [
authRoutes,
deployRoutes,
layoutRoutes,
screenRoutes,
userRoutes,
applicationRoutes,
automationRoutes,
viewRoutes,
componentRoutes,
roleRoutes,
apiKeysRoutes,
templatesRoutes,
analyticsRoutes,
webhookRoutes,
routingRoutes,
integrationRoutes,
permissionRoutes,
datasourceRoutes,
queryRoutes,
backupRoutes,
metadataRoutes,
devRoutes,
cloudRoutes,
// these need to be handled last as they still use /api/:tableId
// this could be breaking as koa may recognise other routes as this
tableRoutes,
rowRoutes,
migrationRoutes,
]
exports.publicRoutes = publicRoutes
exports.staticRoutes = staticRoutes

View file

@ -0,0 +1,60 @@
import authRoutes from "./auth"
import layoutRoutes from "./layout"
import screenRoutes from "./screen"
import userRoutes from "./user"
import applicationRoutes from "./application"
import tableRoutes from "./table"
import rowRoutes from "./row"
import viewRoutes from "./view"
import componentRoutes from "./component"
import automationRoutes from "./automation"
import webhookRoutes from "./webhook"
import roleRoutes from "./role"
import deployRoutes from "./deploy"
import apiKeysRoutes from "./apikeys"
import templatesRoutes from "./templates"
import analyticsRoutes from "./analytics"
import routingRoutes from "./routing"
import integrationRoutes from "./integration"
import permissionRoutes from "./permission"
import datasourceRoutes from "./datasource"
import queryRoutes from "./query"
import backupRoutes from "./backup"
import metadataRoutes from "./metadata"
import devRoutes from "./dev"
import cloudRoutes from "./cloud"
import migrationRoutes from "./migrations"
export { default as staticRoutes } from "./static"
export { default as publicRoutes } from "./public"
export const mainRoutes = [
authRoutes,
deployRoutes,
layoutRoutes,
screenRoutes,
userRoutes,
applicationRoutes,
automationRoutes,
viewRoutes,
componentRoutes,
roleRoutes,
apiKeysRoutes,
templatesRoutes,
analyticsRoutes,
webhookRoutes,
routingRoutes,
integrationRoutes,
permissionRoutes,
datasourceRoutes,
queryRoutes,
backupRoutes,
metadataRoutes,
devRoutes,
cloudRoutes,
// these need to be handled last as they still use /api/:tableId
// this could be breaking as koa may recognise other routes as this
tableRoutes,
rowRoutes,
migrationRoutes,
]

View file

@ -3,7 +3,6 @@ import queryEndpoints from "./queries"
import tableEndpoints from "./tables"
import rowEndpoints from "./rows"
import userEndpoints from "./users"
import usage from "../../../middleware/usageQuota"
import authorized from "../../../middleware/authorized"
import publicApi from "../../../middleware/publicApi"
import { paramResource, paramSubResource } from "../../../middleware/resourceId"
@ -114,8 +113,6 @@ function applyRoutes(
// add the authorization middleware, using the correct perm type
addMiddleware(endpoints.read, authorized(permType, PermissionLevels.READ))
addMiddleware(endpoints.write, authorized(permType, PermissionLevels.WRITE))
// add the usage quota middleware
addMiddleware(endpoints.write, usage)
// add the output mapper middleware
addMiddleware(endpoints.read, mapperMiddleware, { output: true })
addMiddleware(endpoints.write, mapperMiddleware, { output: true })
@ -130,4 +127,4 @@ applyRoutes(queryEndpoints, PermissionTypes.QUERY, "queryId")
// needs to be applied last for routing purposes, don't override other endpoints
applyRoutes(rowEndpoints, PermissionTypes.TABLE, "tableId", "rowId")
module.exports = publicRouter
export default publicRouter

View file

@ -1,18 +1,14 @@
const Router = require("@koa/router")
const rowController = require("../controllers/row")
const authorized = require("../../middleware/authorized")
const usage = require("../../middleware/usageQuota")
const {
paramResource,
paramSubResource,
} = require("../../middleware/resourceId")
import Router from "@koa/router"
import * as rowController from "../controllers/row"
import authorized from "../../middleware/authorized"
import { paramResource, paramSubResource } from "../../middleware/resourceId"
const {
PermissionLevels,
PermissionTypes,
} = require("@budibase/backend-core/permissions")
const { internalSearchValidator } = require("./utils/validators")
const router = Router()
const router = new Router()
router
/**
@ -180,7 +176,6 @@ router
"/api/:tableId/rows",
paramResource("tableId"),
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
usage,
rowController.save
)
/**
@ -195,7 +190,6 @@ router
"/api/:tableId/rows",
paramResource("tableId"),
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
usage,
rowController.patch
)
/**
@ -248,7 +242,6 @@ router
"/api/:tableId/rows",
paramResource("tableId"),
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
usage,
rowController.destroy
)
@ -269,8 +262,7 @@ router
"/api/:tableId/rows/exportRows",
paramResource("tableId"),
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
usage,
rowController.export
rowController.exportRows
)
module.exports = router
export default router

View file

@ -1,19 +1,19 @@
const Router = require("@koa/router")
const controller = require("../controllers/static")
const { budibaseTempDir } = require("../../utilities/budibaseDir")
const authorized = require("../../middleware/authorized")
const {
import Router from "@koa/router"
import * as controller from "../controllers/static"
import { budibaseTempDir } from "../../utilities/budibaseDir"
import authorized from "../../middleware/authorized"
import {
BUILDER,
PermissionTypes,
PermissionLevels,
} = require("@budibase/backend-core/permissions")
const env = require("../../environment")
const { paramResource } = require("../../middleware/resourceId")
} from "@budibase/backend-core/permissions"
import * as env from "../../environment"
import { paramResource } from "../../middleware/resourceId"
const router = Router()
const router = new Router()
/* istanbul ignore next */
router.param("file", async (file, ctx, next) => {
router.param("file", async (file: any, ctx: any, next: any) => {
ctx.file = file && file.includes(".") ? file : "index.html"
if (!ctx.file.startsWith("budibase-client")) {
return next()
@ -52,4 +52,4 @@ router
controller.getSignedUploadURL
)
module.exports = router
export default router

View file

@ -1,31 +1,38 @@
const rowController = require("../../../controllers/row")
const appController = require("../../../controllers/application")
const { AppStatus } = require("../../../../db/utils")
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles")
const { TENANT_ID } = require("../../../../tests/utilities/structures")
const { getAppDB, doInAppContext } = require("@budibase/backend-core/context")
const env = require("../../../../environment")
import * as rowController from "../../../controllers/row"
import * as appController from "../../../controllers/application"
import { AppStatus } from "../../../../db/utils"
import { BUILTIN_ROLE_IDS } from "@budibase/backend-core/roles"
import { TENANT_ID } from "../../../../tests/utilities/structures"
import { getAppDB, doInAppContext } from "@budibase/backend-core/context"
import * as env from "../../../../environment"
function Request(appId, params) {
this.appId = appId
this.params = params
this.request = {}
class Request {
appId: any
params: any
request: any
body: any
constructor(appId: any, params: any) {
this.appId = appId
this.params = params
this.request = {}
}
}
function runRequest(appId, controlFunc, request) {
function runRequest(appId: any, controlFunc: any, request?: any) {
return doInAppContext(appId, async () => {
return controlFunc(request)
})
}
exports.getAllTableRows = async config => {
export const getAllTableRows = async (config: any) => {
const req = new Request(config.appId, { tableId: config.table._id })
await runRequest(config.appId, rowController.fetch, req)
return req.body
}
exports.clearAllApps = async (tenantId = TENANT_ID) => {
const req = { query: { status: AppStatus.DEV }, user: { tenantId } }
export const clearAllApps = async (tenantId = TENANT_ID) => {
const req: any = { query: { status: AppStatus.DEV }, user: { tenantId } }
await appController.fetch(req)
const apps = req.body
if (!apps || apps.length <= 0) {
@ -34,11 +41,11 @@ exports.clearAllApps = async (tenantId = TENANT_ID) => {
for (let app of apps) {
const { appId } = app
const req = new Request(null, { appId })
await runRequest(appId, appController.delete, req)
await runRequest(appId, appController.destroy, req)
}
}
exports.clearAllAutomations = async config => {
export const clearAllAutomations = async (config: any) => {
const automations = await config.getAllAutomations()
for (let auto of automations) {
await doInAppContext(config.appId, async () => {
@ -47,7 +54,12 @@ exports.clearAllAutomations = async config => {
}
}
exports.createRequest = (request, method, url, body) => {
export const createRequest = (
request: any,
method: any,
url: any,
body: any
) => {
let req
if (method === "POST") req = request.post(url).send(body)
@ -59,7 +71,12 @@ exports.createRequest = (request, method, url, body) => {
return req
}
exports.checkBuilderEndpoint = async ({ config, method, url, body }) => {
export const checkBuilderEndpoint = async ({
config,
method,
url,
body,
}: any) => {
const headers = await config.login({
userId: "us_fail",
builder: false,
@ -71,14 +88,14 @@ exports.checkBuilderEndpoint = async ({ config, method, url, body }) => {
.expect(403)
}
exports.checkPermissionsEndpoint = async ({
export const checkPermissionsEndpoint = async ({
config,
method,
url,
body,
passRole,
failRole,
}) => {
}: any) => {
const passHeader = await config.login({
roleId: passRole,
prodApp: true,
@ -106,11 +123,11 @@ exports.checkPermissionsEndpoint = async ({
.expect(403)
}
exports.getDB = () => {
export const getDB = () => {
return getAppDB()
}
exports.testAutomation = async (config, automation) => {
export const testAutomation = async (config: any, automation: any) => {
return runRequest(automation.appId, async () => {
return await config.request
.post(`/api/automations/${automation._id}/test`)
@ -126,7 +143,7 @@ exports.testAutomation = async (config, automation) => {
})
}
exports.runInProd = async func => {
export const runInProd = async (func: any) => {
const nodeEnv = env.NODE_ENV
const workerId = env.JEST_WORKER_ID
env._set("NODE_ENV", "PRODUCTION")

View file

@ -1,6 +1,5 @@
// need to load environment first
import { ExtendableContext } from "koa"
import * as env from "./environment"
const CouchDB = require("./db")
require("@budibase/backend-core").init(CouchDB)
@ -15,7 +14,7 @@ const automations = require("./automations/index")
const Sentry = require("@sentry/node")
const fileSystem = require("./utilities/fileSystem")
const bullboard = require("./automations/bullboard")
const redis = require("./utilities/redis")
import redis from "./utilities/redis"
import * as migrations from "./migrations"
const app = new Koa()

View file

@ -1,9 +1,8 @@
const rowController = require("../../api/controllers/row")
const automationUtils = require("../automationUtils")
const usage = require("../../utilities/usageQuota")
const { buildCtx } = require("./utils")
import { save } from "../../api/controllers/row"
import { cleanUpRow, getError } from "../automationUtils"
import { buildCtx } from "./utils"
exports.definition = {
export const definition = {
name: "Create Row",
tagline: "Create a {{inputs.enriched.table.name}} row",
icon: "TableRowAddBottom",
@ -59,7 +58,7 @@ exports.definition = {
},
}
exports.run = async function ({ inputs, appId, emitter }) {
export async function run({ inputs, appId, emitter }: any) {
if (inputs.row == null || inputs.row.tableId == null) {
return {
success: false,
@ -69,7 +68,7 @@ exports.run = async function ({ inputs, appId, emitter }) {
}
}
// have to clean up the row, remove the table from it
const ctx = buildCtx(appId, emitter, {
const ctx: any = buildCtx(appId, emitter, {
body: inputs.row,
params: {
tableId: inputs.row.tableId,
@ -77,13 +76,8 @@ exports.run = async function ({ inputs, appId, emitter }) {
})
try {
inputs.row = await automationUtils.cleanUpRow(
inputs.row.tableId,
inputs.row
)
await usage.update(usage.Properties.ROW, 1, { dryRun: true })
await rowController.save(ctx)
await usage.update(usage.Properties.ROW, 1)
inputs.row = await cleanUpRow(inputs.row.tableId, inputs.row)
await save(ctx)
return {
row: inputs.row,
response: ctx.body,
@ -94,7 +88,7 @@ exports.run = async function ({ inputs, appId, emitter }) {
} catch (err) {
return {
success: false,
response: automationUtils.getError(err),
response: getError(err),
}
}
}

View file

@ -1,9 +1,8 @@
const rowController = require("../../api/controllers/row")
const usage = require("../../utilities/usageQuota")
const { buildCtx } = require("./utils")
const automationUtils = require("../automationUtils")
import { destroy } from "../../api/controllers/row"
import { buildCtx } from "./utils"
import { getError } from "../automationUtils"
exports.definition = {
export const definition = {
description: "Delete a row from your database",
icon: "TableRowRemoveCenter",
name: "Delete Row",
@ -48,7 +47,7 @@ exports.definition = {
},
}
exports.run = async function ({ inputs, appId, emitter }) {
export async function run({ inputs, appId, emitter }: any) {
if (inputs.id == null) {
return {
success: false,
@ -58,7 +57,7 @@ exports.run = async function ({ inputs, appId, emitter }) {
}
}
let ctx = buildCtx(appId, emitter, {
let ctx: any = buildCtx(appId, emitter, {
body: {
_id: inputs.id,
_rev: inputs.revision,
@ -69,8 +68,7 @@ exports.run = async function ({ inputs, appId, emitter }) {
})
try {
await usage.update(usage.Properties.ROW, -1)
await rowController.destroy(ctx)
await destroy(ctx)
return {
response: ctx.body,
row: ctx.row,
@ -79,7 +77,7 @@ exports.run = async function ({ inputs, appId, emitter }) {
} catch (err) {
return {
success: false,
response: automationUtils.getError(err),
response: getError(err),
}
}
}

View file

@ -1,4 +1,3 @@
jest.mock("../../utilities/usageQuota")
jest.mock("../../threads/automation")
jest.mock("../../utilities/redis", () => ({
init: jest.fn(),

View file

@ -1,10 +1,8 @@
jest.mock("../../utilities/usageQuota")
const usageQuota = require("../../utilities/usageQuota")
const setup = require("./utilities")
import * as setup from "./utilities"
describe("test the create row action", () => {
let table, row
let table: any
let row: any
let config = setup.getConfig()
beforeEach(async () => {
@ -36,20 +34,11 @@ describe("test the create row action", () => {
row: {
tableId: "invalid",
invalid: "invalid",
}
},
})
expect(res.success).toEqual(false)
})
it("check usage quota attempts", async () => {
await setup.runInProd(async () => {
await setup.runStep(setup.actions.CREATE_ROW.stepId, {
row
})
expect(usageQuota.update).toHaveBeenCalledWith("rows", 1)
})
})
it("should check invalid inputs return an error", async () => {
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {})
expect(res.success).toEqual(false)

View file

@ -1,10 +1,9 @@
jest.mock("../../utilities/usageQuota")
const usageQuota = require("../../utilities/usageQuota")
const setup = require("./utilities")
describe("test the delete row action", () => {
let table, row, inputs
let table: any
let row: any
let inputs: any
let config = setup.getConfig()
beforeEach(async () => {
@ -37,7 +36,6 @@ describe("test the delete row action", () => {
it("check usage quota attempts", async () => {
await setup.runInProd(async () => {
await setup.runStep(setup.actions.DELETE_ROW.stepId, inputs)
expect(usageQuota.update).toHaveBeenCalledWith("rows", -1)
})
})

View file

@ -18,7 +18,6 @@ exports.afterAll = () => {
exports.runInProd = async fn => {
env._set("NODE_ENV", "production")
env._set("USE_QUOTAS", 1)
let error
try {
await fn()
@ -26,7 +25,6 @@ exports.runInProd = async fn => {
error = err
}
env._set("NODE_ENV", "jest")
env._set("USE_QUOTAS", null)
if (error) {
throw error
}

View file

@ -1,26 +1,32 @@
const { Thread, ThreadType } = require("../threads")
const { definitions } = require("./triggerInfo")
const webhooks = require("../api/controllers/webhook")
const CouchDB = require("../db")
const { queue } = require("./bullboard")
const newid = require("../db/newid")
const { updateEntityMetadata } = require("../utilities")
const { MetadataTypes, WebhookType } = require("../constants")
const { getProdAppID } = require("@budibase/backend-core/db")
const { cloneDeep } = require("lodash/fp")
const { getAppDB, getAppId } = require("@budibase/backend-core/context")
import { Thread, ThreadType } from "../threads"
import { definitions } from "./triggerInfo"
import * as webhooks from "../api/controllers/webhook"
import CouchDB from "../db"
import { queue } from "./bullboard"
import newid from "../db/newid"
import { updateEntityMetadata } from "../utilities"
import { MetadataTypes, WebhookType } from "../constants"
import { getProdAppID } from "@budibase/backend-core/db"
import { cloneDeep } from "lodash/fp"
import { getAppDB, getAppId } from "@budibase/backend-core/context"
import { tenancy } from "@budibase/backend-core"
import { quotas } from "@budibase/pro"
const WH_STEP_ID = definitions.WEBHOOK.stepId
const CRON_STEP_ID = definitions.CRON.stepId
const Runner = new Thread(ThreadType.AUTOMATION)
exports.processEvent = async job => {
export async function processEvent(job: any) {
try {
// need to actually await these so that an error can be captured properly
console.log(
`${job.data.automation.appId} automation ${job.data.automation._id} running`
)
return await Runner.run(job)
// need to actually await these so that an error can be captured properly
const tenantId = tenancy.getTenantIDFromAppID(job.data.event.appId)
return await tenancy.doInTenant(tenantId, async () => {
const runFn = () => Runner.run(job)
return quotas.addAutomation(runFn)
})
} catch (err) {
const errJson = JSON.stringify(err)
console.error(
@ -31,11 +37,15 @@ exports.processEvent = async job => {
}
}
exports.updateTestHistory = async (appId, automation, history) => {
export async function updateTestHistory(
appId: any,
automation: any,
history: any
) {
return updateEntityMetadata(
MetadataTypes.AUTOMATION_TEST_HISTORY,
automation._id,
metadata => {
(metadata: any) => {
if (metadata && Array.isArray(metadata.history)) {
metadata.history.push(history)
} else {
@ -48,7 +58,7 @@ exports.updateTestHistory = async (appId, automation, history) => {
)
}
exports.removeDeprecated = definitions => {
export function removeDeprecated(definitions: any) {
const base = cloneDeep(definitions)
for (let key of Object.keys(base)) {
if (base[key].deprecated) {
@ -59,13 +69,15 @@ exports.removeDeprecated = definitions => {
}
// end the repetition and the job itself
exports.disableAllCrons = async appId => {
export async function disableAllCrons(appId: any) {
const promises = []
const jobs = await queue.getRepeatableJobs()
for (let job of jobs) {
if (job.key.includes(`${appId}_cron`)) {
promises.push(queue.removeRepeatableByKey(job.key))
promises.push(queue.removeJobs(job.id))
if (job.id) {
promises.push(queue.removeJobs(job.id))
}
}
}
return Promise.all(promises)
@ -76,9 +88,9 @@ exports.disableAllCrons = async appId => {
* @param {string} appId The ID of the app in which we are checking for webhooks
* @param {object|undefined} automation The automation object to be updated.
*/
exports.enableCronTrigger = async (appId, automation) => {
export async function enableCronTrigger(appId: any, automation: any) {
const trigger = automation ? automation.definition.trigger : null
function isCronTrigger(auto) {
function isCronTrigger(auto: any) {
return (
auto &&
auto.definition.trigger &&
@ -89,7 +101,7 @@ exports.enableCronTrigger = async (appId, automation) => {
if (isCronTrigger(automation)) {
// make a job id rather than letting Bull decide, makes it easier to handle on way out
const jobId = `${appId}_cron_${newid()}`
const job = await queue.add(
const job: any = await queue.add(
{
automation,
event: { appId, timestamp: Date.now() },
@ -117,13 +129,13 @@ exports.enableCronTrigger = async (appId, automation) => {
* @returns {Promise<object|undefined>} After this is complete the new automation object may have been updated and should be
* written to DB (this does not write to DB as it would be wasteful to repeat).
*/
exports.checkForWebhooks = async ({ oldAuto, newAuto }) => {
export async function checkForWebhooks({ oldAuto, newAuto }: any) {
const appId = getAppId()
const oldTrigger = oldAuto ? oldAuto.definition.trigger : null
const newTrigger = newAuto ? newAuto.definition.trigger : null
const triggerChanged =
oldTrigger && newTrigger && oldTrigger.id !== newTrigger.id
function isWebhookTrigger(auto) {
function isWebhookTrigger(auto: any) {
return (
auto &&
auto.definition.trigger &&
@ -159,7 +171,7 @@ exports.checkForWebhooks = async ({ oldAuto, newAuto }) => {
(!isWebhookTrigger(oldAuto) || triggerChanged) &&
isWebhookTrigger(newAuto)
) {
const ctx = {
const ctx: any = {
appId,
request: {
body: new webhooks.Webhook(
@ -189,6 +201,6 @@ exports.checkForWebhooks = async ({ oldAuto, newAuto }) => {
* @param appId {string} the app that is being removed.
* @return {Promise<void>} clean is complete if this succeeds.
*/
exports.cleanupAutomations = async appId => {
await exports.disableAllCrons(appId)
export async function cleanupAutomations(appId: any) {
await disableAllCrons(appId)
}

View file

@ -38,8 +38,6 @@ module.exports = {
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
USE_QUOTAS: process.env.USE_QUOTAS,
EXCLUDE_QUOTAS_TENANTS: process.env.EXCLUDE_QUOTAS_TENANTS,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,

View file

@ -1,8 +1,8 @@
const {
import {
getUserRoleHierarchy,
getRequiredResourceRole,
BUILTIN_ROLE_IDS,
} = require("@budibase/backend-core/roles")
} from "@budibase/backend-core/roles"
const {
PermissionTypes,
PermissionLevels,
@ -13,7 +13,7 @@ const { isWebhookEndpoint } = require("./utils")
const { buildCsrfMiddleware } = require("@budibase/backend-core/auth")
const { getAppId } = require("@budibase/backend-core/context")
function hasResource(ctx) {
function hasResource(ctx: any) {
return ctx.resourceId != null
}
@ -25,7 +25,12 @@ const csrf = buildCsrfMiddleware()
* - Builders can access all resources.
* - Otherwise the user must have the required role.
*/
const checkAuthorized = async (ctx, resourceRoles, permType, permLevel) => {
const checkAuthorized = async (
ctx: any,
resourceRoles: any,
permType: any,
permLevel: any
) => {
// check if this is a builder api and the user is not a builder
const isBuilder = ctx.user && ctx.user.builder && ctx.user.builder.global
const isBuilderApi = permType === PermissionTypes.BUILDER
@ -40,10 +45,10 @@ const checkAuthorized = async (ctx, resourceRoles, permType, permLevel) => {
}
const checkAuthorizedResource = async (
ctx,
resourceRoles,
permType,
permLevel
ctx: any,
resourceRoles: any,
permType: any,
permLevel: any
) => {
// get the user's roles
const roleId = ctx.roleId || BUILTIN_ROLE_IDS.PUBLIC
@ -54,7 +59,9 @@ const checkAuthorizedResource = async (
// check if the user has the required role
if (resourceRoles.length > 0) {
// deny access if the user doesn't have the required resource role
const found = userRoles.find(role => resourceRoles.indexOf(role._id) !== -1)
const found = userRoles.find(
(role: any) => resourceRoles.indexOf(role._id) !== -1
)
if (!found) {
ctx.throw(403, permError)
}
@ -64,9 +71,8 @@ const checkAuthorizedResource = async (
}
}
module.exports =
(permType, permLevel = null, opts = { schema: false }) =>
async (ctx, next) => {
export = (permType: any, permLevel: any = null, opts = { schema: false }) =>
async (ctx: any, next: any) => {
// webhooks don't need authentication, each webhook unique
// also internal requests (between services) don't need authorized
if (isWebhookEndpoint(ctx) || ctx.internal) {
@ -82,8 +88,8 @@ module.exports =
await builderMiddleware(ctx, permType)
// get the resource roles
let resourceRoles = [],
otherLevelRoles
let resourceRoles: any = []
let otherLevelRoles: any = []
const otherLevel =
permLevel === PermissionLevels.READ
? PermissionLevels.WRITE

View file

@ -1,134 +0,0 @@
jest.mock("../../db")
jest.mock("../../utilities/usageQuota")
jest.mock("@budibase/backend-core/tenancy", () => ({
getTenantId: () => "testing123"
}))
const usageQuotaMiddleware = require("../usageQuota")
const usageQuota = require("../../utilities/usageQuota")
const CouchDB = require("../../db")
const env = require("../../environment")
class TestConfiguration {
constructor() {
this.throw = jest.fn()
this.next = jest.fn()
this.middleware = usageQuotaMiddleware
this.ctx = {
throw: this.throw,
next: this.next,
appId: "test",
request: {
body: {}
},
req: {
method: "POST",
url: "/applications"
}
}
usageQuota.useQuotas = () => true
}
executeMiddleware() {
return this.middleware(this.ctx, this.next)
}
setProd(bool) {
if (bool) {
env.isDev = () => false
env.isProd = () => true
this.ctx.user = { tenantId: "test" }
} else {
env.isDev = () => true
env.isProd = () => false
}
}
setMethod(method) {
this.ctx.req.method = method
}
setUrl(url) {
this.ctx.req.url = url
}
setBody(body) {
this.ctx.request.body = body
}
setFiles(files) {
this.ctx.request.files = { file: files }
}
}
describe("usageQuota middleware", () => {
let config
beforeEach(() => {
config = new TestConfiguration()
})
it("skips the middleware if there is no usage property or method", async () => {
await config.executeMiddleware()
expect(config.next).toHaveBeenCalled()
})
it("passes through to next middleware if document already exists", async () => {
config.setProd(true)
config.setBody({
_id: "test",
_rev: "test",
})
CouchDB.mockImplementationOnce(() => ({
get: async () => true
}))
await config.executeMiddleware()
expect(config.next).toHaveBeenCalled()
})
it("throws if request has _id, but the document no longer exists", async () => {
config.setBody({
_id: "123",
_rev: "test",
})
config.setProd(true)
CouchDB.mockImplementationOnce(() => ({
get: async () => {
throw new Error()
}
}))
await config.executeMiddleware()
expect(config.throw).toHaveBeenCalledWith(404, `${config.ctx.request.body._id} does not exist`)
})
it("calculates and persists the correct usage quota for the relevant action", async () => {
config.setUrl("/rows")
await config.executeMiddleware()
expect(usageQuota.update).toHaveBeenCalledWith("rows", 1)
expect(config.next).toHaveBeenCalled()
})
// it("calculates the correct file size from a file upload call and adds it to quota", async () => {
// config.setUrl("/upload")
// config.setProd(true)
// config.setFiles([
// {
// size: 100
// },
// {
// size: 10000
// },
// ])
// await config.executeMiddleware()
// expect(usageQuota.update).toHaveBeenCalledWith("storage", 10100)
// expect(config.next).toHaveBeenCalled()
// })
})

View file

@ -1,164 +0,0 @@
const usageQuota = require("../utilities/usageQuota")
const { getUniqueRows } = require("../utilities/usageQuota/rows")
const {
isExternalTable,
isRowId: isExternalRowId,
} = require("../integrations/utils")
const { getAppDB } = require("@budibase/backend-core/context")
// currently only counting new writes and deletes
const METHOD_MAP = {
POST: 1,
DELETE: -1,
}
const DOMAIN_MAP = {
rows: usageQuota.Properties.ROW,
// upload: usageQuota.Properties.UPLOAD, // doesn't work yet
// views: usageQuota.Properties.VIEW, // doesn't work yet
// users: usageQuota.Properties.USER, // doesn't work yet
applications: usageQuota.Properties.APPS,
// this will not be updated by endpoint calls
// instead it will be updated by triggerInfo
// automationRuns: usageQuota.Properties.AUTOMATION, // doesn't work yet
}
function getProperty(url) {
for (let domain of Object.keys(DOMAIN_MAP)) {
if (url.indexOf(domain) !== -1) {
return DOMAIN_MAP[domain]
}
}
}
module.exports = async (ctx, next) => {
if (!usageQuota.useQuotas()) {
return next()
}
let usage = METHOD_MAP[ctx.req.method]
const property = getProperty(ctx.req.url)
if (usage == null || property == null) {
return next()
}
// post request could be a save of a pre-existing entry
if (ctx.request.body && ctx.request.body._id && ctx.request.body._rev) {
const usageId = ctx.request.body._id
try {
if (ctx.appId) {
const db = getAppDB()
await db.get(usageId)
}
return next()
} catch (err) {
if (
isExternalTable(usageId) ||
(ctx.request.body.tableId &&
isExternalTable(ctx.request.body.tableId)) ||
isExternalRowId(usageId)
) {
return next()
} else {
ctx.throw(404, `${usageId} does not exist`)
}
}
}
// update usage for uploads to be the total size
if (property === usageQuota.Properties.UPLOAD) {
const files =
ctx.request.files.file.length > 1
? Array.from(ctx.request.files.file)
: [ctx.request.files.file]
usage = files.map(file => file.size).reduce((total, size) => total + size)
}
try {
await performRequest(ctx, next, property, usage)
} catch (err) {
ctx.throw(400, err)
}
}
const performRequest = async (ctx, next, property, usage) => {
const usageContext = {
skipNext: false,
skipUsage: false,
[usageQuota.Properties.APPS]: {},
}
if (usage === -1) {
if (PRE_DELETE[property]) {
await PRE_DELETE[property](ctx, usageContext)
}
} else {
if (PRE_CREATE[property]) {
await PRE_CREATE[property](ctx, usageContext)
}
}
// run the request
if (!usageContext.skipNext) {
await usageQuota.update(property, usage, { dryRun: true })
await next()
}
if (usage === -1) {
if (POST_DELETE[property]) {
await POST_DELETE[property](ctx, usageContext)
}
} else {
if (POST_CREATE[property]) {
await POST_CREATE[property](ctx, usageContext)
}
}
// update the usage
if (!usageContext.skipUsage) {
await usageQuota.update(property, usage)
}
}
const appPreDelete = async (ctx, usageContext) => {
if (ctx.query.unpublish) {
// don't run usage decrement for unpublish
usageContext.skipUsage = true
return
}
// store the row count to delete
const rows = await getUniqueRows([ctx.appId])
if (rows.length) {
usageContext[usageQuota.Properties.APPS] = { rowCount: rows.length }
}
}
const appPostDelete = async (ctx, usageContext) => {
// delete the app rows from usage
const rowCount = usageContext[usageQuota.Properties.APPS].rowCount
if (rowCount) {
await usageQuota.update(usageQuota.Properties.ROW, -rowCount)
}
}
const appPostCreate = async ctx => {
// app import & template creation
if (ctx.request.body.useTemplate === "true") {
const rows = await getUniqueRows([ctx.response.body.appId])
const rowCount = rows ? rows.length : 0
await usageQuota.update(usageQuota.Properties.ROW, rowCount)
}
}
const PRE_DELETE = {
[usageQuota.Properties.APPS]: appPreDelete,
}
const POST_DELETE = {
[usageQuota.Properties.APPS]: appPostDelete,
}
const PRE_CREATE = {}
const POST_CREATE = {
[usageQuota.Properties.APPS]: appPostCreate,
}

View file

@ -0,0 +1,15 @@
const { createUserBuildersView } = require("@budibase/backend-core/db")
import * as syncDevelopers from "./usageQuotas/syncDevelopers"
/**
* Date:
* March 2022
*
* Description:
* Create the builder users view and sync the developer count
*/
export const run = async (db: any) => {
await createUserBuildersView(db)
await syncDevelopers.run()
}

View file

@ -0,0 +1,13 @@
import * as syncPublishedApps from "./usageQuotas/syncPublishedApps"
/**
* Date:
* March 2022
*
* Description:
* Sync the published apps count
*/
export const run = async (db: any) => {
await syncPublishedApps.run()
}

View file

@ -1,4 +1,3 @@
const env = require("../../../environment")
const TestConfig = require("../../../tests/utilities/TestConfiguration")
const syncApps = jest.fn()
@ -14,7 +13,6 @@ describe("run", () => {
beforeEach(async () => {
await config.init()
env._set("USE_QUOTAS", 1)
})
afterAll(config.end)

View file

@ -1,8 +1,3 @@
const { useQuotas } = require("../../../utilities/usageQuota")
export const runQuotaMigration = async (migration: Function) => {
if (!useQuotas()) {
return
}
await migration()
}

View file

@ -1,9 +1,8 @@
import { getGlobalDB, getTenantId } from "@budibase/backend-core/tenancy"
import { getTenantId } from "@budibase/backend-core/tenancy"
import { getAllApps } from "@budibase/backend-core/db"
import { getUsageQuotaDoc } from "../../../utilities/usageQuota"
import { quotas, QuotaUsageType, StaticQuotaName } from "@budibase/pro"
export const run = async () => {
const db = getGlobalDB()
// get app count
// @ts-ignore
const devApps = await getAllApps({ dev: true })
@ -12,7 +11,5 @@ export const run = async () => {
// sync app count
const tenantId = getTenantId()
console.log(`[Tenant: ${tenantId}] Syncing app count: ${appCount}`)
const usageDoc = await getUsageQuotaDoc(db)
usageDoc.usageQuota.apps = appCount
await db.put(usageDoc)
await quotas.setUsage(appCount, StaticQuotaName.APPS, QuotaUsageType.STATIC)
}

View file

@ -0,0 +1,19 @@
import { getTenantId } from "@budibase/backend-core/tenancy"
import { utils } from "@budibase/backend-core"
import { quotas, QuotaUsageType, StaticQuotaName } from "@budibase/pro"
export const run = async () => {
// get developer count
const developerCount = await utils.getBuildersCount()
// sync developer count
const tenantId = getTenantId()
console.log(
`[Tenant: ${tenantId}] Syncing developer count: ${developerCount}`
)
await quotas.setUsage(
developerCount,
StaticQuotaName.DEVELOPERS,
QuotaUsageType.STATIC
)
}

View file

@ -0,0 +1,21 @@
import { getTenantId } from "@budibase/backend-core/tenancy"
import { getAllApps } from "@budibase/backend-core/db"
import { quotas, QuotaUsageType, StaticQuotaName } from "@budibase/pro"
export const run = async () => {
// get app count
const opts: any = { dev: false }
const prodApps = await getAllApps(opts)
const prodAppCount = prodApps ? prodApps.length : 0
// sync app count
const tenantId = getTenantId()
console.log(
`[Tenant: ${tenantId}] Syncing published app count: ${prodAppCount}`
)
await quotas.setUsage(
prodAppCount,
StaticQuotaName.PUBLISHED_APPS,
QuotaUsageType.STATIC
)
}

View file

@ -1,10 +1,9 @@
import { getGlobalDB, getTenantId } from "@budibase/backend-core/tenancy"
import { getTenantId } from "@budibase/backend-core/tenancy"
import { getAllApps } from "@budibase/backend-core/db"
import { getUsageQuotaDoc } from "../../../utilities/usageQuota"
import { getUniqueRows } from "../../../utilities/usageQuota/rows"
import { quotas, QuotaUsageType, StaticQuotaName } from "@budibase/pro"
export const run = async () => {
const db = getGlobalDB()
// get all rows in all apps
// @ts-ignore
const allApps = await getAllApps({ all: true })
@ -16,7 +15,5 @@ export const run = async () => {
// sync row count
const tenantId = getTenantId()
console.log(`[Tenant: ${tenantId}] Syncing row count: ${rowCount}`)
const usageDoc = await getUsageQuotaDoc(db)
usageDoc.usageQuota.rows = rowCount
await db.put(usageDoc)
await quotas.setUsage(rowCount, StaticQuotaName.ROWS, QuotaUsageType.STATIC)
}

View file

@ -1,37 +0,0 @@
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
const TestConfig = require("../../../../tests/utilities/TestConfiguration")
const { getUsageQuotaDoc, update, Properties } = require("../../../../utilities/usageQuota")
const syncApps = require("../syncApps")
const env = require("../../../../environment")
describe("syncApps", () => {
let config = new TestConfig(false)
beforeEach(async () => {
await config.init()
env._set("USE_QUOTAS", 1)
})
afterAll(config.end)
it("runs successfully", async () => {
// create the usage quota doc and mock usages
const db = getGlobalDB()
await getUsageQuotaDoc(db)
await update(Properties.APPS, 3)
let usageDoc = await getUsageQuotaDoc(db)
expect(usageDoc.usageQuota.apps).toEqual(3)
// create an extra app to test the migration
await config.createApp("quota-test")
// migrate
await syncApps.run()
// assert the migration worked
usageDoc = await getUsageQuotaDoc(db)
expect(usageDoc.usageQuota.apps).toEqual(2)
})
})

View file

@ -0,0 +1,32 @@
import TestConfig from "../../../../tests/utilities/TestConfiguration"
import * as syncApps from "../syncApps"
import { quotas, QuotaUsageType, StaticQuotaName } from "@budibase/pro"
describe("syncApps", () => {
let config = new TestConfig(false)
beforeEach(async () => {
await config.init()
})
afterAll(config.end)
it("runs successfully", async () => {
// create the usage quota doc and mock usages
await quotas.getQuotaUsage()
await quotas.setUsage(3, StaticQuotaName.APPS, QuotaUsageType.STATIC)
let usageDoc = await quotas.getQuotaUsage()
expect(usageDoc.usageQuota.apps).toEqual(3)
// create an extra app to test the migration
await config.createApp("quota-test")
// migrate
await syncApps.run()
// assert the migration worked
usageDoc = await quotas.getQuotaUsage()
expect(usageDoc.usageQuota.apps).toEqual(2)
})
})

View file

@ -1,43 +0,0 @@
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
const TestConfig = require("../../../../tests/utilities/TestConfiguration")
const { getUsageQuotaDoc, update, Properties } = require("../../../../utilities/usageQuota")
const syncRows = require("../syncRows")
const env = require("../../../../environment")
describe("syncRows", () => {
let config = new TestConfig(false)
beforeEach(async () => {
await config.init()
env._set("USE_QUOTAS", 1)
})
afterAll(config.end)
it("runs successfully", async () => {
// create the usage quota doc and mock usages
const db = getGlobalDB()
await getUsageQuotaDoc(db)
await update(Properties.ROW, 300)
let usageDoc = await getUsageQuotaDoc(db)
expect(usageDoc.usageQuota.rows).toEqual(300)
// app 1
await config.createTable()
await config.createRow()
// app 2
await config.createApp("second-app")
await config.createTable()
await config.createRow()
await config.createRow()
// migrate
await syncRows.run()
// assert the migration worked
usageDoc = await getUsageQuotaDoc(db)
expect(usageDoc.usageQuota.rows).toEqual(3)
})
})

View file

@ -0,0 +1,38 @@
import TestConfig from "../../../../tests/utilities/TestConfiguration"
import * as syncRows from "../syncRows"
import { quotas, QuotaUsageType, StaticQuotaName } from "@budibase/pro"
describe("syncRows", () => {
let config = new TestConfig(false)
beforeEach(async () => {
await config.init()
})
afterAll(config.end)
it("runs successfully", async () => {
// create the usage quota doc and mock usages
await quotas.getQuotaUsage()
await quotas.setUsage(300, StaticQuotaName.ROWS, QuotaUsageType.STATIC)
let usageDoc = await quotas.getQuotaUsage()
expect(usageDoc.usageQuota.rows).toEqual(300)
// app 1
await config.createTable()
await config.createRow()
// app 2
await config.createApp("second-app")
await config.createTable()
await config.createRow()
await config.createRow()
// migrate
await syncRows.run()
// assert the migration worked
usageDoc = await quotas.getQuotaUsage()
expect(usageDoc.usageQuota.rows).toEqual(3)
})
})

View file

@ -8,6 +8,8 @@ const {
import * as userEmailViewCasing from "./functions/userEmailViewCasing"
import * as quota1 from "./functions/quotas1"
import * as appUrls from "./functions/appUrls"
import * as developerQuota from "./functions/developerQuota"
import * as publishedAppsQuota from "./functions/publishedAppsQuota"
export interface Migration {
type: string
@ -27,7 +29,7 @@ export interface Migration {
*/
export interface MigrationOptions {
tenantIds?: string[]
forced?: {
force?: {
[type: string]: string[]
}
}
@ -49,6 +51,16 @@ export const MIGRATIONS: Migration[] = [
opts: { all: true },
fn: appUrls.run,
},
{
type: MIGRATION_TYPES.GLOBAL,
name: "developer_quota",
fn: developerQuota.run,
},
{
type: MIGRATION_TYPES.GLOBAL,
name: "published_apps_quota",
fn: publishedAppsQuota.run,
},
]
export const migrate = async (options?: MigrationOptions) => {

View file

@ -1,3 +1,7 @@
declare module "@budibase/backend-core"
declare module "@budibase/backend-core/tenancy"
declare module "@budibase/backend-core/db"
declare module "@budibase/backend-core/context"
declare module "@budibase/backend-core/cache"
declare module "@budibase/backend-core/permissions"
declare module "@budibase/backend-core/roles"

View file

@ -1,12 +1,12 @@
const workerFarm = require("worker-farm")
const env = require("../environment")
import workerFarm from "worker-farm"
import * as env from "../environment"
const ThreadType = {
export const ThreadType = {
QUERY: "query",
AUTOMATION: "automation",
}
function typeToFile(type) {
function typeToFile(type: any) {
let filename = null
switch (type) {
case ThreadType.QUERY:
@ -21,8 +21,13 @@ function typeToFile(type) {
return require.resolve(filename)
}
class Thread {
constructor(type, opts = { timeoutMs: null, count: 1 }) {
export class Thread {
type: any
count: any
disableThreading: any
workers: any
constructor(type: any, opts: any = { timeoutMs: null, count: 1 }) {
this.type = type
this.count = opts.count ? opts.count : 1
this.disableThreading =
@ -31,7 +36,7 @@ class Thread {
this.count === 0 ||
env.isInThread()
if (!this.disableThreading) {
const workerOpts = {
const workerOpts: any = {
autoStart: true,
maxConcurrentWorkers: this.count,
}
@ -42,7 +47,7 @@ class Thread {
}
}
run(data) {
run(data: any) {
return new Promise((resolve, reject) => {
let fncToCall
// if in test then don't use threading
@ -51,7 +56,7 @@ class Thread {
} else {
fncToCall = this.workers
}
fncToCall(data, (err, response) => {
fncToCall(data, (err: any, response: any) => {
if (err) {
reject(err)
} else {
@ -61,6 +66,3 @@ class Thread {
})
}
}
module.exports.Thread = Thread
module.exports.ThreadType = ThreadType

View file

@ -66,7 +66,8 @@ class InMemoryQueue {
* @param {object} msg A message to be transported over the queue, this should be
* a JSON message as this is required by Bull.
*/
add(msg) {
// eslint-disable-next-line no-unused-vars
add(msg, repeat) {
if (typeof msg !== "object") {
throw "Queue only supports carrying JSON."
}
@ -90,6 +91,11 @@ class InMemoryQueue {
return []
}
// eslint-disable-next-line no-unused-vars
removeJobs(pattern) {
// no-op
}
/**
* Implemented for tests
*/

View file

@ -1,72 +0,0 @@
const getTenantId = jest.fn()
jest.mock("@budibase/backend-core/tenancy", () => ({
getTenantId
}))
const usageQuota = require("../../usageQuota")
const env = require("../../../environment")
class TestConfiguration {
constructor() {
this.enableQuotas()
}
enableQuotas = () => {
env.USE_QUOTAS = 1
}
disableQuotas = () => {
env.USE_QUOTAS = null
}
setTenantId = (tenantId) => {
getTenantId.mockReturnValue(tenantId)
}
setExcludedTenants = (tenants) => {
env.EXCLUDE_QUOTAS_TENANTS = tenants
}
reset = () => {
this.disableQuotas()
this.setExcludedTenants(null)
}
}
describe("usageQuota", () => {
let config
beforeEach(() => {
config = new TestConfiguration()
})
afterEach(() => {
config.reset()
})
describe("useQuotas", () => {
it("works when no settings have been provided", () => {
config.reset()
expect(usageQuota.useQuotas()).toBe(false)
})
it("honours USE_QUOTAS setting", () => {
config.disableQuotas()
expect(usageQuota.useQuotas()).toBe(false)
config.enableQuotas()
expect(usageQuota.useQuotas()).toBe(true)
})
it("honours EXCLUDE_QUOTAS_TENANTS setting", () => {
config.setTenantId("test")
// tenantId is in the list
config.setExcludedTenants("test, test2, test2")
expect(usageQuota.useQuotas()).toBe(false)
config.setExcludedTenants("test,test2,test2")
expect(usageQuota.useQuotas()).toBe(false)
// tenantId is not in the list
config.setTenantId("other")
expect(usageQuota.useQuotas()).toBe(true)
})
})
})

View file

@ -1,93 +0,0 @@
const env = require("../../environment")
const { getGlobalDB, getTenantId } = require("@budibase/backend-core/tenancy")
const {
StaticDatabases,
generateNewUsageQuotaDoc,
} = require("@budibase/backend-core/db")
exports.useQuotas = () => {
// check if quotas are enabled
if (env.USE_QUOTAS) {
// check if there are any tenants without limits
if (env.EXCLUDE_QUOTAS_TENANTS) {
const excludedTenants = env.EXCLUDE_QUOTAS_TENANTS.replace(
/\s/g,
""
).split(",")
const tenantId = getTenantId()
if (excludedTenants.includes(tenantId)) {
return false
}
}
return true
}
return false
}
exports.Properties = {
ROW: "rows",
UPLOAD: "storage", // doesn't work yet
VIEW: "views", // doesn't work yet
USER: "users", // doesn't work yet
AUTOMATION: "automationRuns", // doesn't work yet
APPS: "apps",
EMAILS: "emails", // doesn't work yet
}
exports.getUsageQuotaDoc = async db => {
let quota
try {
quota = await db.get(StaticDatabases.GLOBAL.docs.usageQuota)
} catch (err) {
// doc doesn't exist. Create it
quota = generateNewUsageQuotaDoc()
const response = await db.put(quota)
quota._rev = response.rev
}
return quota
}
/**
* Given a specified tenantId this will add to the usage object for the specified property.
* @param {string} property The property which is to be added to (within the nested usageQuota object).
* @param {number} usage The amount (this can be negative) to adjust the number by.
* @param {object} opts optional - options such as dryRun, to check what update will do.
* @returns {Promise<void>} When this completes the API key will now be up to date - the quota period may have
* also been reset after this call.
*/
exports.update = async (property, usage, opts = { dryRun: false }) => {
if (!exports.useQuotas()) {
return
}
try {
const db = getGlobalDB()
const quota = await exports.getUsageQuotaDoc(db)
// increment the quota
quota.usageQuota[property] += usage
if (
quota.usageQuota[property] > quota.usageLimits[property] &&
usage > 0 // allow for decrementing usage when the quota is already exceeded
) {
throw new Error(
`You have exceeded your usage quota of ${quota.usageLimits[property]} ${property}.`
)
}
if (quota.usageQuota[property] < 0) {
// never go negative if the quota has previously been exceeded
quota.usageQuota[property] = 0
}
// update the usage quotas
if (!opts.dryRun) {
await db.put(quota)
}
} catch (err) {
console.error(`Error updating usage quotas for ${property}`, err)
throw err
}
}

View file

@ -0,0 +1,10 @@
{
// Used for building with tsc
"extends": "./tsconfig.json",
"exclude": [
"node_modules",
"**/*.json",
"**/*.spec.js",
"**/*.spec.ts"
]
}

View file

@ -19,7 +19,7 @@
"exclude": [
"node_modules",
"**/*.json",
"**/*.spec.ts",
"**/*.spec.js"
"**/*.spec.js",
// "**/*.spec.ts" // don't exclude spec.ts files for editor support
]
}

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/string-templates",
"version": "1.0.105-alpha.23",
"version": "1.0.105-alpha.24",
"description": "Handlebars wrapper for Budibase templating.",
"main": "src/index.cjs",
"module": "dist/bundle.mjs",
@ -43,7 +43,7 @@
"rollup-plugin-node-globals": "^1.4.0",
"rollup-plugin-node-resolve": "^5.2.0",
"rollup-plugin-terser": "^7.0.2",
"typescript": "^4.1.3"
"typescript": "^4.5.5"
},
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
}

View file

@ -4289,10 +4289,10 @@ typeof-article@^0.1.1:
dependencies:
kind-of "^3.1.0"
typescript@^4.1.3:
version "4.4.4"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.4.tgz#2cd01a1a1f160704d3101fd5a58ff0f9fcb8030c"
integrity sha512-DqGhF5IKoBl8WNf8C1gu8q0xZSInh9j1kJJMqT3a94w1JzVaBU4EXOSMrz9yDqMT0xt3selp83fuFMQ0uzv6qA==
typescript@^4.5.5:
version "4.5.5"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.5.5.tgz#d8c953832d28924a9e3d37c73d729c846c5896f3"
integrity sha512-TCTIul70LyWe6IJWT8QSYeA54WQe8EjQFU4wY52Fasj5UKx88LNYKCgBEHcOMOrFF1rKGbD8v/xcNWVUq9SymA==
uglify-js@^3.1.4:
version "3.14.3"

Some files were not shown because too many files have changed in this diff Show more