1
0
Fork 0
mirror of synced 2024-05-29 08:39:46 +12:00

Merge branch 'develop' of github.com:Budibase/budibase into plugins-dev-experience

This commit is contained in:
mike12345567 2022-08-31 15:53:32 +01:00
commit 5411f3c8e3
132 changed files with 3873 additions and 1479 deletions

View file

@ -68,16 +68,28 @@ jobs:
]
env:
KUBECONFIG_FILE: '${{ secrets.RELEASE_KUBECONFIG }}'
- name: Set the base64 kubeconfig
run: echo 'RELEASE_KUBECONFIG=${{ secrets.RELEASE_KUBECONFIG }}' | base64
- name: Re roll the services
- name: Re roll app-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ env.RELEASE_KUBECONFIG }}
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment proxy-service -n budibase && kubectl rollout restart deployment app-service -n budibase && kubectl rollout restart deployment worker-service -n budibase
args: rollout restart deployment app-service -n budibase
- name: Re roll proxy-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment proxy-service -n budibase
- name: Re roll worker-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment worker-service -n budibase
- name: Discord Webhook Action
uses: tsickert/discord-webhook@v4.0.0

View file

@ -121,15 +121,26 @@ jobs:
env:
KUBECONFIG_FILE: '${{ secrets.RELEASE_KUBECONFIG }}'
- name: Set the base64 kubeconfig
run: echo 'RELEASE_KUBECONFIG=${{ secrets.RELEASE_KUBECONFIG }}' | base64
- name: Re roll the services
- name: Re roll app-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ env.RELEASE_KUBECONFIG }}
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment proxy-service -n budibase && kubectl rollout restart deployment app-service -n budibase && kubectl rollout restart deployment worker-service -n budibase
args: rollout restart deployment app-service -n budibase
- name: Re roll proxy-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment proxy-service -n budibase
- name: Re roll worker-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment worker-service -n budibase
- name: Discord Webhook Action
uses: tsickert/discord-webhook@v4.0.0

View file

@ -4,7 +4,7 @@
"singleQuote": false,
"trailingComma": "es5",
"arrowParens": "avoid",
"jsxBracketSameLine": false,
"bracketSameLine": false,
"plugins": ["prettier-plugin-svelte"],
"svelteSortOrder": "options-scripts-markup-styles"
}

View file

@ -134,6 +134,22 @@ spec:
- name: PLUGINS_DIR
value: { { .Values.globals.pluginsDir | quote }}
{{ end }}
{{ if .Values.services.apps.nodeDebug }}
- name: NODE_DEBUG
value: {{ .Values.services.apps.nodeDebug | quote }}
{{ end }}
{{ if .Values.globals.elasticApmEnabled }}
- name: ELASTIC_APM_ENABLED
value: {{ .Values.globals.elasticApmEnabled | quote }}
{{ end }}
{{ if .Values.globals.elasticApmSecretToken }}
- name: ELASTIC_APM_SECRET_TOKEN
value: {{ .Values.globals.elasticApmSecretToken | quote }}
{{ end }}
{{ if .Values.globals.elasticApmServerUrl }}
- name: ELASTIC_APM_SERVER_URL
value: {{ .Values.globals.elasticApmServerUrl | quote }}
{{ end }}
image: budibase/apps:{{ .Values.globals.appVersion }}
imagePullPolicy: Always

View file

@ -27,6 +27,8 @@ spec:
spec:
containers:
- env:
- name: BUDIBASE_ENVIRONMENT
value: {{ .Values.globals.budibaseEnv }}
- name: DEPLOYMENT_ENVIRONMENT
value: "kubernetes"
- name: CLUSTER_PORT
@ -125,6 +127,19 @@ spec:
value: {{ .Values.globals.google.secret | quote }}
- name: TENANT_FEATURE_FLAGS
value: {{ .Values.globals.tenantFeatureFlags | quote }}
{{ if .Values.globals.elasticApmEnabled }}
- name: ELASTIC_APM_ENABLED
value: {{ .Values.globals.elasticApmEnabled | quote }}
{{ end }}
{{ if .Values.globals.elasticApmSecretToken }}
- name: ELASTIC_APM_SECRET_TOKEN
value: {{ .Values.globals.elasticApmSecretToken | quote }}
{{ end }}
{{ if .Values.globals.elasticApmServerUrl }}
- name: ELASTIC_APM_SERVER_URL
value: {{ .Values.globals.elasticApmServerUrl | quote }}
{{ end }}
image: budibase/worker:{{ .Values.globals.appVersion }}
imagePullPolicy: Always
livenessProbe:

View file

@ -114,6 +114,10 @@ globals:
smtp:
enabled: false
# elasticApmEnabled:
# elasticApmSecretToken:
# elasticApmServerUrl:
services:
budibaseVersion: latest
dns: cluster.local
@ -126,6 +130,7 @@ services:
port: 4002
replicaCount: 1
logLevel: info
# nodeDebug: "" # set the value of NODE_DEBUG
worker:
port: 4003

View file

@ -15,7 +15,10 @@ http {
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
'"$http_user_agent" "$http_x_forwarded_for" '
'response_time=$upstream_response_time proxy_host=$proxy_host upstream_addr=$upstream_addr';
access_log /var/log/nginx/access.log main;
map $http_upgrade $connection_upgrade {
default "upgrade";

View file

@ -33,7 +33,10 @@ http {
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
'"$http_user_agent" "$http_x_forwarded_for" '
'response_time=$upstream_response_time proxy_host=$proxy_host upstream_addr=$upstream_addr';
access_log /var/log/nginx/access.log main;
map $http_upgrade $connection_upgrade {
default "upgrade";
@ -85,6 +88,10 @@ http {
proxy_pass http://$apps:4002;
}
location /preview {
proxy_pass http://$apps:4002;
}
location = / {
proxy_pass http://$apps:4002;
}
@ -94,6 +101,7 @@ http {
proxy_pass http://$watchtower:8080;
}
{{/if}}
location ~ ^/(builder|app_) {
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;

View file

@ -1,5 +1,5 @@
{
"version": "1.2.44-alpha.1",
"version": "1.2.58-alpha.3",
"npmClient": "yarn",
"packages": [
"packages/*"

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/backend-core",
"version": "1.2.44-alpha.1",
"version": "1.2.58-alpha.3",
"description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
@ -20,7 +20,7 @@
"test:watch": "jest --watchAll"
},
"dependencies": {
"@budibase/types": "1.2.44-alpha.1",
"@budibase/types": "1.2.58-alpha.3",
"@techpass/passport-openidconnect": "0.3.2",
"aws-sdk": "2.1030.0",
"bcrypt": "5.0.1",

View file

@ -1,11 +1,11 @@
const passport = require("koa-passport")
const LocalStrategy = require("passport-local").Strategy
const JwtStrategy = require("passport-jwt").Strategy
const { getGlobalDB } = require("./tenancy")
import { getGlobalDB } from "./tenancy"
const refresh = require("passport-oauth2-refresh")
const { Configs } = require("./constants")
const { getScopedConfig } = require("./db/utils")
const {
import { Configs } from "./constants"
import { getScopedConfig } from "./db/utils"
import {
jwt,
local,
authenticated,
@ -13,7 +13,6 @@ const {
oidc,
auditLog,
tenancy,
appTenancy,
authError,
ssoCallbackUrl,
csrf,
@ -22,32 +21,36 @@ const {
builderOnly,
builderOrAdmin,
joiValidator,
} = require("./middleware")
const { invalidateUser } = require("./cache/user")
} from "./middleware"
import { invalidateUser } from "./cache/user"
import { User } from "@budibase/types"
// Strategies
passport.use(new LocalStrategy(local.options, local.authenticate))
passport.use(new JwtStrategy(jwt.options, jwt.authenticate))
passport.serializeUser((user, done) => done(null, user))
passport.serializeUser((user: User, done: any) => done(null, user))
passport.deserializeUser(async (user, done) => {
passport.deserializeUser(async (user: User, done: any) => {
const db = getGlobalDB()
try {
const user = await db.get(user._id)
return done(null, user)
const dbUser = await db.get(user._id)
return done(null, dbUser)
} catch (err) {
console.error(`User not found`, err)
return done(null, false, { message: "User not found" })
}
})
async function refreshOIDCAccessToken(db, chosenConfig, refreshToken) {
async function refreshOIDCAccessToken(
db: any,
chosenConfig: any,
refreshToken: string
) {
const callbackUrl = await oidc.getCallbackUrl(db, chosenConfig)
let enrichedConfig
let strategy
let enrichedConfig: any
let strategy: any
try {
enrichedConfig = await oidc.fetchStrategyConfig(chosenConfig, callbackUrl)
@ -70,22 +73,28 @@ async function refreshOIDCAccessToken(db, chosenConfig, refreshToken) {
refresh.requestNewAccessToken(
Configs.OIDC,
refreshToken,
(err, accessToken, refreshToken, params) => {
(err: any, accessToken: string, refreshToken: any, params: any) => {
resolve({ err, accessToken, refreshToken, params })
}
)
})
}
async function refreshGoogleAccessToken(db, config, refreshToken) {
async function refreshGoogleAccessToken(
db: any,
config: any,
refreshToken: any
) {
let callbackUrl = await google.getCallbackUrl(db, config)
let strategy
try {
strategy = await google.strategyFactory(config, callbackUrl)
} catch (err) {
} catch (err: any) {
console.error(err)
throw new Error("Error constructing OIDC refresh strategy", err)
throw new Error(
`Error constructing OIDC refresh strategy: message=${err.message}`
)
}
refresh.use(strategy)
@ -94,14 +103,18 @@ async function refreshGoogleAccessToken(db, config, refreshToken) {
refresh.requestNewAccessToken(
Configs.GOOGLE,
refreshToken,
(err, accessToken, refreshToken, params) => {
(err: any, accessToken: string, refreshToken: string, params: any) => {
resolve({ err, accessToken, refreshToken, params })
}
)
})
}
async function refreshOAuthToken(refreshToken, configType, configId) {
async function refreshOAuthToken(
refreshToken: string,
configType: string,
configId: string
) {
const db = getGlobalDB()
const config = await getScopedConfig(db, {
@ -113,7 +126,7 @@ async function refreshOAuthToken(refreshToken, configType, configId) {
let refreshResponse
if (configType === Configs.OIDC) {
// configId - retrieved from cookie.
chosenConfig = config.configs.filter(c => c.uuid === configId)[0]
chosenConfig = config.configs.filter((c: any) => c.uuid === configId)[0]
if (!chosenConfig) {
throw new Error("Invalid OIDC configuration")
}
@ -134,7 +147,7 @@ async function refreshOAuthToken(refreshToken, configType, configId) {
return refreshResponse
}
async function updateUserOAuth(userId, oAuthConfig) {
async function updateUserOAuth(userId: string, oAuthConfig: any) {
const details = {
accessToken: oAuthConfig.accessToken,
refreshToken: oAuthConfig.refreshToken,
@ -162,14 +175,13 @@ async function updateUserOAuth(userId, oAuthConfig) {
}
}
module.exports = {
export = {
buildAuthMiddleware: authenticated,
passport,
google,
oidc,
jwt: require("jsonwebtoken"),
buildTenancyMiddleware: tenancy,
buildAppTenancyMiddleware: appTenancy,
auditLog,
authError,
buildCsrfMiddleware: csrf,

View file

@ -18,6 +18,7 @@ export enum ViewName {
LINK = "by_link",
ROUTING = "screen_routes",
AUTOMATION_LOGS = "automation_logs",
ACCOUNT_BY_EMAIL = "account_by_email",
}
export const DeprecatedViews = {
@ -41,6 +42,7 @@ export enum DocumentType {
MIGRATIONS = "migrations",
DEV_INFO = "devinfo",
AUTOMATION_LOG = "log_au",
ACCOUNT_METADATA = "acc_metadata",
}
export const StaticDatabases = {

View file

@ -5,6 +5,8 @@ const {
SEPARATOR,
} = require("./utils")
const { getGlobalDB } = require("../tenancy")
const { StaticDatabases } = require("./constants")
const { doWithDB } = require("./")
const DESIGN_DB = "_design/database"
@ -56,6 +58,31 @@ exports.createNewUserEmailView = async () => {
await db.put(designDoc)
}
exports.createAccountEmailView = async () => {
await doWithDB(StaticDatabases.PLATFORM_INFO.name, async db => {
let designDoc
try {
designDoc = await db.get(DESIGN_DB)
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc._id.startsWith("${DocumentType.ACCOUNT_METADATA}${SEPARATOR}")) {
emit(doc.email.toLowerCase(), doc._id)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.ACCOUNT_BY_EMAIL]: view,
}
await db.put(designDoc)
})
}
exports.createUserAppView = async () => {
const db = getGlobalDB()
let designDoc
@ -128,6 +155,39 @@ exports.createUserBuildersView = async () => {
await db.put(designDoc)
}
exports.queryView = async (viewName, params, db, CreateFuncByName) => {
try {
let response = (await db.query(`database/${viewName}`, params)).rows
response = response.map(resp =>
params.include_docs ? resp.doc : resp.value
)
if (params.arrayResponse) {
return response
} else {
return response.length <= 1 ? response[0] : response
}
} catch (err) {
if (err != null && err.name === "not_found") {
const createFunc = CreateFuncByName[viewName]
await removeDeprecated(db, viewName)
await createFunc()
return exports.queryView(viewName, params, db, CreateFuncByName)
} else {
throw err
}
}
}
exports.queryPlatformView = async (viewName, params) => {
const CreateFuncByName = {
[ViewName.ACCOUNT_BY_EMAIL]: exports.createAccountEmailView,
}
return doWithDB(StaticDatabases.PLATFORM_INFO.name, async db => {
return exports.queryView(viewName, params, db, CreateFuncByName)
})
}
exports.queryGlobalView = async (viewName, params, db = null) => {
const CreateFuncByName = {
[ViewName.USER_BY_EMAIL]: exports.createNewUserEmailView,
@ -139,20 +199,5 @@ exports.queryGlobalView = async (viewName, params, db = null) => {
if (!db) {
db = getGlobalDB()
}
try {
let response = (await db.query(`database/${viewName}`, params)).rows
response = response.map(resp =>
params.include_docs ? resp.doc : resp.value
)
return response.length <= 1 ? response[0] : response
} catch (err) {
if (err != null && err.name === "not_found") {
const createFunc = CreateFuncByName[viewName]
await removeDeprecated(db, viewName)
await createFunc()
return exports.queryGlobalView(viewName, params)
} else {
throw err
}
}
return exports.queryView(viewName, params, db, CreateFuncByName)
}

View file

@ -8,4 +8,5 @@ import { processors } from "./processors"
export const shutdown = () => {
processors.shutdown()
console.log("Events shutdown")
}

View file

@ -17,6 +17,7 @@ import constants from "./constants"
import * as dbConstants from "./db/constants"
import logging from "./logging"
import pino from "./pino"
import * as middleware from "./middleware"
// mimic the outer package exports
import * as db from "./pkg/db"
@ -57,6 +58,7 @@ const core = {
roles,
...pino,
...errorClasses,
middleware,
}
export = core

View file

@ -65,7 +65,7 @@ async function checkApiKey(apiKey: string, populateUser?: Function) {
* The tenancy modules should not be used here and it should be assumed that the tenancy context
* has not yet been populated.
*/
module.exports = (
export = (
noAuthPatterns = [],
opts: { publicAllowed: boolean; populateUser?: Function } = {
publicAllowed: false,

View file

@ -13,7 +13,8 @@ const adminOnly = require("./adminOnly")
const builderOrAdmin = require("./builderOrAdmin")
const builderOnly = require("./builderOnly")
const joiValidator = require("./joi-validator")
module.exports = {
const pkg = {
google,
oidc,
jwt,
@ -33,3 +34,5 @@ module.exports = {
builderOrAdmin,
joiValidator,
}
export = pkg

View file

@ -13,10 +13,13 @@ function validate(schema, property) {
params = ctx.request[property]
}
schema = schema.append({
createdAt: Joi.any().optional(),
updatedAt: Joi.any().optional(),
})
// not all schemas have the append property e.g. array schemas
if (schema.append) {
schema = schema.append({
createdAt: Joi.any().optional(),
updatedAt: Joi.any().optional(),
})
}
const { error } = schema.validate(params)
if (error) {

View file

@ -70,15 +70,13 @@ const PUBLIC_BUCKETS = [
* @constructor
*/
export const ObjectStore = (bucket: any) => {
AWS.config.update({
accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY,
region: env.AWS_REGION,
})
const config: any = {
s3ForcePathStyle: true,
signatureVersion: "v4",
apiVersion: "2006-03-01",
accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY,
region: env.AWS_REGION,
}
if (bucket) {
config.params = {

View file

@ -3,17 +3,27 @@ const { v4: uuidv4 } = require("uuid")
const { logWarn } = require("../logging")
const env = require("../environment")
interface Session {
key: string
userId: string
interface CreateSession {
sessionId: string
lastAccessedAt: string
createdAt: string
tenantId: string
csrfToken?: string
value: string
}
type SessionKey = { key: string }[]
interface Session extends CreateSession {
userId: string
lastAccessedAt: string
createdAt: string
// make optional attributes required
csrfToken: string
}
interface SessionKey {
key: string
}
interface ScannedSession {
value: Session
}
// a week in seconds
const EXPIRY_SECONDS = 86400 * 7
@ -22,14 +32,14 @@ function makeSessionID(userId: string, sessionId: string) {
return `${userId}/${sessionId}`
}
export async function getSessionsForUser(userId: string) {
export async function getSessionsForUser(userId: string): Promise<Session[]> {
if (!userId) {
console.trace("Cannot get sessions for undefined userId")
return []
}
const client = await redis.getSessionClient()
const sessions = await client.scan(userId)
return sessions.map((session: Session) => session.value)
const sessions: ScannedSession[] = await client.scan(userId)
return sessions.map(session => session.value)
}
export async function invalidateSessions(
@ -39,33 +49,32 @@ export async function invalidateSessions(
try {
const reason = opts?.reason || "unknown"
let sessionIds: string[] = opts.sessionIds || []
let sessions: SessionKey
let sessionKeys: SessionKey[]
// If no sessionIds, get all the sessions for the user
if (sessionIds.length === 0) {
sessions = await getSessionsForUser(userId)
sessions.forEach(
(session: any) =>
(session.key = makeSessionID(session.userId, session.sessionId))
)
const sessions = await getSessionsForUser(userId)
sessionKeys = sessions.map(session => ({
key: makeSessionID(session.userId, session.sessionId),
}))
} else {
// use the passed array of sessionIds
sessionIds = Array.isArray(sessionIds) ? sessionIds : [sessionIds]
sessions = sessionIds.map((sessionId: string) => ({
sessionKeys = sessionIds.map(sessionId => ({
key: makeSessionID(userId, sessionId),
}))
}
if (sessions && sessions.length > 0) {
if (sessionKeys && sessionKeys.length > 0) {
const client = await redis.getSessionClient()
const promises = []
for (let session of sessions) {
promises.push(client.delete(session.key))
for (let sessionKey of sessionKeys) {
promises.push(client.delete(sessionKey.key))
}
if (!env.isTest()) {
logWarn(
`Invalidating sessions for ${userId} (reason: ${reason}) - ${sessions
.map(session => session.key)
`Invalidating sessions for ${userId} (reason: ${reason}) - ${sessionKeys
.map(sessionKey => sessionKey.key)
.join(", ")}`
)
}
@ -76,22 +85,26 @@ export async function invalidateSessions(
}
}
export async function createASession(userId: string, session: Session) {
export async function createASession(
userId: string,
createSession: CreateSession
) {
// invalidate all other sessions
await invalidateSessions(userId, { reason: "creation" })
const client = await redis.getSessionClient()
const sessionId = session.sessionId
if (!session.csrfToken) {
session.csrfToken = uuidv4()
}
session = {
...session,
const sessionId = createSession.sessionId
const csrfToken = createSession.csrfToken ? createSession.csrfToken : uuidv4()
const key = makeSessionID(userId, sessionId)
const session: Session = {
...createSession,
csrfToken,
createdAt: new Date().toISOString(),
lastAccessedAt: new Date().toISOString(),
userId,
}
await client.store(makeSessionID(userId, sessionId), session, EXPIRY_SECONDS)
await client.store(key, session, EXPIRY_SECONDS)
}
export async function updateSessionTTL(session: Session) {
@ -106,7 +119,10 @@ export async function endSession(userId: string, sessionId: string) {
await client.delete(makeSessionID(userId, sessionId))
}
export async function getSession(userId: string, sessionId: string) {
export async function getSession(
userId: string,
sessionId: string
): Promise<Session> {
if (!userId || !sessionId) {
throw new Error(`Invalid session details - ${userId} - ${sessionId}`)
}

View file

@ -11,7 +11,6 @@ const { UNICODE_MAX } = require("./db/constants")
* Given an email address this will use a view to search through
* all the users to find one with this email address.
* @param {string} email the email to lookup the user by.
* @return {Promise<object|null>}
*/
exports.getGlobalUserByEmail = async email => {
if (email == null) {

View file

@ -0,0 +1,7 @@
export const getAccount = jest.fn()
export const getAccountByTenantId = jest.fn()
jest.mock("../../../src/cloud/accounts", () => ({
getAccount,
getAccountByTenantId,
}))

View file

@ -1,2 +0,0 @@
exports.MOCK_DATE = new Date("2020-01-01T00:00:00.000Z")
exports.MOCK_DATE_TIMESTAMP = 1577836800000

View file

@ -0,0 +1,2 @@
export const MOCK_DATE = new Date("2020-01-01T00:00:00.000Z")
export const MOCK_DATE_TIMESTAMP = 1577836800000

View file

@ -1,9 +0,0 @@
const posthog = require("./posthog")
const events = require("./events")
const date = require("./date")
module.exports = {
posthog,
date,
events,
}

View file

@ -0,0 +1,4 @@
import "./posthog"
import "./events"
export * as accounts from "./accounts"
export * as date from "./date"

View file

@ -1,7 +1,7 @@
{
"name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.",
"version": "1.2.44-alpha.1",
"version": "1.2.58-alpha.3",
"license": "MPL-2.0",
"svelte": "src/index.js",
"module": "dist/bbui.es.js",
@ -38,7 +38,7 @@
],
"dependencies": {
"@adobe/spectrum-css-workflow-icons": "^1.2.1",
"@budibase/string-templates": "1.2.44-alpha.1",
"@budibase/string-templates": "1.2.58-alpha.3",
"@spectrum-css/actionbutton": "^1.0.1",
"@spectrum-css/actiongroup": "^1.0.1",
"@spectrum-css/avatar": "^3.0.2",

View file

@ -67,6 +67,13 @@
// If time only set date component to 2000-01-01
if (timeOnly) {
// Classic flackpickr causing issues.
// When selecting a value for the first time for a "time only" field,
// the time is always offset by 1 hour for some reason (regardless of time
// zone) so we need to correct it.
if (!value && newValue) {
newValue = new Date(dates[0].getTime() + 60 * 60 * 1000).toISOString()
}
newValue = `2000-01-01T${newValue.split("T")[1]}`
}

View file

@ -139,7 +139,13 @@
<div class="title">
<div class="filename">
{#if selectedUrl}
<Link href={selectedUrl}>{selectedImage.name}</Link>
<Link
target="_blank"
download={selectedImage.name}
href={selectedUrl}
>
{selectedImage.name}
</Link>
{:else}
{selectedImage.name}
{/if}

View file

@ -10,6 +10,7 @@
export let disabled = false
export let getOptionLabel = option => option
export let getOptionValue = option => option
export let getOptionTitle = option => option
const dispatch = createEventDispatcher()
const onChange = e => dispatch("change", e.target.value)
@ -19,7 +20,7 @@
{#if options && Array.isArray(options)}
{#each options as option}
<div
title={getOptionLabel(option)}
title={getOptionTitle(option)}
class="spectrum-Radio spectrum-FieldGroup-item spectrum-Radio--emphasized"
class:is-invalid={!!error}
>

View file

@ -12,6 +12,7 @@
export let direction = "vertical"
export let getOptionLabel = option => extractProperty(option, "label")
export let getOptionValue = option => extractProperty(option, "value")
export let getOptionTitle = option => extractProperty(option, "label")
const dispatch = createEventDispatcher()
const onChange = e => {
@ -35,6 +36,7 @@
{direction}
{getOptionLabel}
{getOptionValue}
{getOptionTitle}
on:change={onChange}
/>
</Field>

View file

@ -8,12 +8,14 @@
export let secondary = false
export let overBackground = false
export let target
export let download
</script>
<a
on:click
{href}
{target}
{download}
class:spectrum-Link--primary={primary}
class:spectrum-Link--secondary={secondary}
class:spectrum-Link--overBackground={overBackground}

View file

@ -15,14 +15,24 @@
{#each attachments as attachment}
{#if isImage(attachment.extension)}
<Link quiet target="_blank" href={attachment.url}>
<Link
quiet
target="_blank"
download={attachment.name}
href={attachment.url}
>
<div class="center" title={attachment.name}>
<img src={attachment.url} alt={attachment.extension} />
</div>
</Link>
{:else}
<div class="file" title={attachment.name}>
<Link quiet target="_blank" href={attachment.url}>
<Link
quiet
target="_blank"
download={attachment.name}
href={attachment.url}
>
{attachment.extension}
</Link>
</div>

View file

@ -102,7 +102,7 @@ filterTests(['all'], () => {
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 6000 })
cy.wait(500)
cy.get(interact.APP_TABLE_STATUS, { timeout: 1000 }).eq(0).contains("Unpublished")
cy.get(interact.APP_TABLE_STATUS, { timeout: 10000 }).eq(0).contains("Unpublished")
})
})

View file

@ -175,7 +175,10 @@ filterTests(["all"], () => {
cy.get("@query").its("response.statusCode").should("eq", 200)
cy.get("@query").its("response.body").should("not.be.empty")
// Save query
cy.intercept("POST", "**/queries").as("saveQuery")
cy.get(".spectrum-Button").contains("Save Query").click({ force: true })
cy.wait("@saveQuery")
cy.get("@saveQuery").its("response.statusCode").should("eq", 200)
cy.get(".nav-item").should("contain", queryName)
})

View file

@ -252,7 +252,8 @@ filterTests(["all"], () => {
.contains("Delete Query")
.click({ force: true })
// Confirm deletion
cy.reload({ timeout: 5000 })
cy.reload()
cy.get(".nav-item", { timeout: 30000 }).contains(datasource).click({ force: true })
cy.get(".nav-item", { timeout: 1000 }).should("not.contain", queryRename)
})

View file

@ -48,6 +48,7 @@ filterTests(['smoke', 'all'], () => {
cy.get(interact.AREA_LABEL_REVERT).click({ force: true })
})
cy.get(interact.SPECTRUM_DIALOG_GRID).within(() => {
cy.get("input").type("Cypress Tests")
// Click Revert
cy.get(interact.SPECTRUM_BUTTON).contains("Revert").click({ force: true })
cy.wait(2000) // Wait for app to finish reverting

View file

@ -448,10 +448,7 @@ Cypress.Commands.add("createTable", (tableName, initialTable) => {
.contains("Continue")
.click({ force: true })
})
cy.get(".spectrum-Modal", { timeout: 10000 }).should(
"not.contain",
"Add data source"
)
cy.get(".spectrum-Modal").contains("Create Table", { timeout: 10000 })
cy.get(".spectrum-Modal", { timeout: 2000 }).within(() => {
cy.get("input", { timeout: 2000 }).first().type(tableName).blur()
cy.get(".spectrum-ButtonGroup").contains("Create").click()
@ -742,8 +739,15 @@ Cypress.Commands.add("deleteAllScreens", () => {
Cypress.Commands.add("navigateToFrontend", () => {
// Clicks on Design tab and then the Home nav item
cy.wait(500)
cy.intercept("**/preview").as("preview")
cy.contains("Design").click()
cy.get(".spectrum-Search", { timeout: 2000 }).type("/")
cy.wait("@preview")
cy.get("@preview").then(res => {
if (res.statusCode != 200) {
cy.reload()
}
})
cy.get(".spectrum-Search", { timeout: 20000 }).type("/")
cy.get(".nav-item", { timeout: 2000 }).contains("home").click({ force: true })
})

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/builder",
"version": "1.2.44-alpha.1",
"version": "1.2.58-alpha.3",
"license": "GPL-3.0",
"private": true,
"scripts": {
@ -69,10 +69,10 @@
}
},
"dependencies": {
"@budibase/bbui": "1.2.44-alpha.1",
"@budibase/client": "1.2.44-alpha.1",
"@budibase/frontend-core": "1.2.44-alpha.1",
"@budibase/string-templates": "1.2.44-alpha.1",
"@budibase/bbui": "1.2.58-alpha.3",
"@budibase/client": "1.2.58-alpha.3",
"@budibase/frontend-core": "1.2.58-alpha.3",
"@budibase/string-templates": "1.2.58-alpha.3",
"@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1",
@ -121,4 +121,4 @@
"vite": "^3.0.8"
},
"gitHead": "115189f72a850bfb52b65ec61d932531bf327072"
}
}

View file

@ -14,7 +14,7 @@
$: {
let fields = {}
for (const [key, type] of Object.entries(block?.inputs?.fields)) {
for (const [key, type] of Object.entries(block?.inputs?.fields ?? {})) {
fields = {
...fields,
[key]: {

View file

@ -467,6 +467,7 @@
options={relationshipOptions}
getOptionLabel={option => option.name}
getOptionValue={option => option.value}
getOptionTitle={option => option.alt}
/>
{/if}
<Input

View file

@ -184,7 +184,7 @@
$goto("./navigation")
}
} else if (type === "request-add-component") {
$goto(`./components/${$selectedComponent?._id}/new`)
toggleAddComponent()
} else if (type === "highlight-setting") {
store.actions.settings.highlight(data.setting)
@ -228,9 +228,8 @@
if (isAddingComponent) {
$goto(`../${$selectedScreen._id}/components/${$selectedComponent?._id}`)
} else {
$goto(
`../${$selectedScreen._id}/components/${$selectedComponent?._id}/new`
)
const id = $selectedComponent?._id || $selectedScreen?.props?._id
$goto(`../${$selectedScreen._id}/components/${id}/new`)
}
}

View file

@ -2,9 +2,18 @@
import { store } from "builderStore"
import { ActionMenu, MenuItem, Icon } from "@budibase/bbui"
export let component
$: noPaste = !$store.componentToPaste
const keyboardEvent = (key, ctrlKey = false) => {
// Ensure this component is selected first
if (component._id !== $store.selectedComponentId) {
store.update(state => {
state.selectedComponentId = component._id
return state
})
}
document.dispatchEvent(new KeyboardEvent("keydown", { key, ctrlKey }))
}
</script>

View file

@ -0,0 +1,73 @@
<script>
import { Body, ModalContent, Table } from "@budibase/bbui"
import { onMount } from "svelte"
export let userData
export let deleteUsersResponse
let successCount
let failureCount
let title
let unsuccessfulUsers
let message
const setTitle = () => {
if (successCount) {
title = `${successCount} users deleted`
} else {
title = "Oops!"
}
}
const setMessage = () => {
if (successCount) {
message = "However there was a problem deleting some users."
} else {
message = "There was a problem deleting some users."
}
}
const setUsers = () => {
unsuccessfulUsers = deleteUsersResponse.unsuccessful.map(user => {
return {
email: user.email,
reason: user.reason,
}
})
}
onMount(() => {
successCount = deleteUsersResponse.successful.length
failureCount = deleteUsersResponse.unsuccessful.length
setTitle()
setMessage()
setUsers()
})
const schema = {
email: {},
reason: {},
}
</script>
<ModalContent
size="M"
{title}
confirmText="Close"
showCloseIcon={false}
showCancelButton={false}
>
<Body size="XS">
{message}
</Body>
<Table
{schema}
data={unsuccessfulUsers}
allowEditColumns={false}
allowEditRows={false}
allowSelectRows={false}
/>
</ModalContent>
<style>
</style>

View file

@ -62,7 +62,7 @@
csvString = e.target.result
files = fileArray
userEmails = csvString.split("\n")
userEmails = csvString.split(/\r?\n/)
})
reader.readAsText(fileArray[0])
}

View file

@ -2,24 +2,78 @@
import { Body, ModalContent, Table, Icon } from "@budibase/bbui"
import PasswordCopyRenderer from "./PasswordCopyRenderer.svelte"
import { parseToCsv } from "helpers/data/utils"
import { onMount } from "svelte"
export let userData
export let createUsersResponse
$: mappedData = userData.map(user => {
return {
email: user.email,
password: user.password,
let hasSuccess
let hasFailure
let title
let failureMessage
let userDataIndex
let successfulUsers
let unsuccessfulUsers
const setTitle = () => {
if (hasSuccess) {
title = "Users created!"
} else if (hasFailure) {
title = "Oops!"
}
}
const setFailureMessage = () => {
if (hasSuccess) {
failureMessage = "However there was a problem creating some users."
} else {
failureMessage = "There was a problem creating some users."
}
}
const setUsers = () => {
userDataIndex = userData.reduce((prev, current) => {
prev[current.email] = current
return prev
}, {})
successfulUsers = createUsersResponse.successful.map(user => {
return {
email: user.email,
password: userDataIndex[user.email].password,
}
})
unsuccessfulUsers = createUsersResponse.unsuccessful.map(user => {
return {
email: user.email,
reason: user.reason,
}
})
}
onMount(() => {
hasSuccess = createUsersResponse.successful.length
hasFailure = createUsersResponse.unsuccessful.length
setTitle()
setFailureMessage()
setUsers()
})
const schema = {
const successSchema = {
email: {},
password: {},
}
const failedSchema = {
email: {},
reason: {},
}
const downloadCsvFile = () => {
const fileName = "passwords.csv"
const content = parseToCsv(["email", "password"], mappedData)
const content = parseToCsv(["email", "password"], successfulUsers)
download(fileName, content)
}
@ -42,36 +96,52 @@
</script>
<ModalContent
size="S"
title="Accounts created!"
size="M"
{title}
confirmText="Done"
showCancelButton={false}
cancelText="Cancel"
showCloseIcon={false}
>
<Body size="XS">
All your new users can be accessed through the autogenerated passwords. Take
note of these passwords or download the CSV file.
</Body>
{#if hasFailure}
<Body size="XS">
{failureMessage}
</Body>
<Table
schema={failedSchema}
data={unsuccessfulUsers}
allowEditColumns={false}
allowEditRows={false}
allowSelectRows={false}
/>
{/if}
{#if hasSuccess}
<Body size="XS">
All your new users can be accessed through the autogenerated passwords.
Take note of these passwords or download the CSV file.
</Body>
<div class="container" on:click={downloadCsvFile}>
<div class="inner">
<Icon name="Download" />
<div class="container" on:click={downloadCsvFile}>
<div class="inner">
<Icon name="Download" />
<div style="margin-left: var(--spacing-m)">
<Body size="XS">Passwords CSV</Body>
<div style="margin-left: var(--spacing-m)">
<Body size="XS">Passwords CSV</Body>
</div>
</div>
</div>
</div>
<Table
{schema}
data={mappedData}
allowEditColumns={false}
allowEditRows={false}
allowSelectRows={false}
customRenderers={[{ column: "password", component: PasswordCopyRenderer }]}
/>
<Table
schema={successSchema}
data={successfulUsers}
allowEditColumns={false}
allowEditRows={false}
allowSelectRows={false}
customRenderers={[
{ column: "password", component: PasswordCopyRenderer },
]}
/>
{/if}
</ModalContent>
<style>

View file

@ -23,6 +23,7 @@
import { goto } from "@roxi/routify"
import OnboardingTypeModal from "./_components/OnboardingTypeModal.svelte"
import PasswordModal from "./_components/PasswordModal.svelte"
import DeletionFailureModal from "./_components/DeletionFailureModal.svelte"
import ImportUsersModal from "./_components/ImportUsersModal.svelte"
import { createPaginationStore } from "helpers/pagination"
import { get } from "svelte/store"
@ -33,7 +34,8 @@
inviteConfirmationModal,
onboardingTypeModal,
passwordModal,
importUsersModal
importUsersModal,
deletionFailureModal
let pageInfo = createPaginationStore()
let prevEmail = undefined,
searchEmail = undefined
@ -55,6 +57,8 @@
apps: {},
}
$: userData = []
$: createUsersResponse = { successful: [], unsuccessful: [] }
$: deleteUsersResponse = { successful: [], unsuccessful: [] }
$: page = $pageInfo.page
$: fetchUsers(page, searchEmail)
$: {
@ -116,8 +120,9 @@
newUsers.push(user)
}
if (!newUsers.length)
if (!newUsers.length) {
notifications.info("Duplicated! There is no new users to add.")
}
return { ...userData, users: newUsers }
}
@ -144,7 +149,9 @@
async function createUser() {
try {
await users.create(await removingDuplicities(userData))
createUsersResponse = await users.create(
await removingDuplicities(userData)
)
notifications.success("Successfully created user")
await groups.actions.init()
passwordModal.show()
@ -176,8 +183,15 @@
notifications.error("You cannot delete yourself")
return
}
await users.bulkDelete(ids)
notifications.success(`Successfully deleted ${selectedRows.length} rows`)
deleteUsersResponse = await users.bulkDelete(ids)
if (deleteUsersResponse.unsuccessful?.length) {
deletionFailureModal.show()
} else {
notifications.success(
`Successfully deleted ${selectedRows.length} users`
)
}
selectedRows = []
await fetchUsers(page, searchEmail)
} catch (error) {
@ -284,7 +298,11 @@
</Modal>
<Modal bind:this={passwordModal}>
<PasswordModal userData={userData.users} />
<PasswordModal {createUsersResponse} userData={userData.users} />
</Modal>
<Modal bind:this={deletionFailureModal}>
<DeletionFailureModal {deleteUsersResponse} />
</Modal>
<Modal bind:this={importUsersModal}>

View file

@ -63,10 +63,14 @@ export function createUsersStore() {
return body
})
await API.createUsers({ users: mappedUsers, groups: data.groups })
const response = await API.createUsers({
users: mappedUsers,
groups: data.groups,
})
// re-search from first page
await search()
return response
}
async function del(id) {
@ -79,7 +83,7 @@ export function createUsersStore() {
}
async function bulkDelete(userIds) {
await API.deleteUsers(userIds)
return API.deleteUsers(userIds)
}
async function save(user) {

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/cli",
"version": "1.2.44-alpha.1",
"version": "1.2.58-alpha.3",
"description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js",
"bin": {
@ -26,9 +26,9 @@
"outputPath": "build"
},
"dependencies": {
"@budibase/backend-core": "1.2.44-alpha.1",
"@budibase/string-templates": "1.2.44-alpha.1",
"@budibase/types": "1.2.44-alpha.1",
"@budibase/backend-core": "1.2.58-alpha.3",
"@budibase/string-templates": "1.2.58-alpha.3",
"@budibase/types": "1.2.58-alpha.3",
"axios": "0.21.2",
"chalk": "4.1.0",
"cli-progress": "3.11.2",

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/client",
"version": "1.2.44-alpha.1",
"version": "1.2.58-alpha.3",
"license": "MPL-2.0",
"module": "dist/budibase-client.js",
"main": "dist/budibase-client.js",
@ -19,9 +19,9 @@
"dev:builder": "rollup -cw"
},
"dependencies": {
"@budibase/bbui": "1.2.44-alpha.1",
"@budibase/frontend-core": "1.2.44-alpha.1",
"@budibase/string-templates": "1.2.44-alpha.1",
"@budibase/bbui": "1.2.58-alpha.3",
"@budibase/frontend-core": "1.2.58-alpha.3",
"@budibase/string-templates": "1.2.58-alpha.3",
"@spectrum-css/button": "^3.0.3",
"@spectrum-css/card": "^3.0.3",
"@spectrum-css/divider": "^1.0.3",

View file

@ -77,6 +77,7 @@
{direction}
on:change={handleChange}
getOptionLabel={flatOptions ? x => x : x => x.label}
getOptionTitle={flatOptions ? x => x : x => x.label}
getOptionValue={flatOptions ? x => x : x => x.value}
/>
{/if}

View file

@ -1,12 +1,12 @@
{
"name": "@budibase/frontend-core",
"version": "1.2.44-alpha.1",
"version": "1.2.58-alpha.3",
"description": "Budibase frontend core libraries used in builder and client",
"author": "Budibase",
"license": "MPL-2.0",
"svelte": "src/index.js",
"dependencies": {
"@budibase/bbui": "1.2.44-alpha.1",
"@budibase/bbui": "1.2.58-alpha.3",
"lodash": "^4.17.21",
"svelte": "^3.46.2"
}

View file

@ -72,7 +72,7 @@ const cleanupQuery = query => {
continue
}
for (let [key, value] of Object.entries(query[filterField])) {
if (!value || value === "") {
if (value == null || value === "") {
delete query[filterField][key]
}
}
@ -186,7 +186,7 @@ export const runLuceneQuery = (docs, query) => {
return docs
}
// make query consistent first
// Make query consistent first
query = cleanupQuery(query)
// Iterates over a set of filters and evaluates a fail function against a doc
@ -218,7 +218,12 @@ export const runLuceneQuery = (docs, query) => {
// Process a range match
const rangeMatch = match("range", (docValue, testValue) => {
return !docValue || docValue < testValue.low || docValue > testValue.high
return (
docValue == null ||
docValue === "" ||
docValue < testValue.low ||
docValue > testValue.high
)
})
// Process an equal match (fails if the value is different)

View file

@ -1,7 +1,7 @@
{
"name": "@budibase/server",
"email": "hi@budibase.com",
"version": "1.2.44-alpha.1",
"version": "1.2.58-alpha.3",
"description": "Budibase Web Server",
"main": "src/index.ts",
"repository": {
@ -77,11 +77,11 @@
"license": "GPL-3.0",
"dependencies": {
"@apidevtools/swagger-parser": "10.0.3",
"@budibase/backend-core": "1.2.44-alpha.1",
"@budibase/client": "1.2.44-alpha.1",
"@budibase/pro": "1.2.44-alpha.1",
"@budibase/string-templates": "1.2.44-alpha.1",
"@budibase/types": "1.2.44-alpha.1",
"@budibase/backend-core": "1.2.58-alpha.3",
"@budibase/client": "1.2.58-alpha.3",
"@budibase/pro": "1.2.58-alpha.3",
"@budibase/string-templates": "1.2.58-alpha.3",
"@budibase/types": "1.2.58-alpha.3",
"@bull-board/api": "3.7.0",
"@bull-board/koa": "3.9.4",
"@elastic/elasticsearch": "7.10.0",
@ -100,6 +100,7 @@
"curlconverter": "3.21.0",
"dotenv": "8.2.0",
"download": "8.0.0",
"elastic-apm-node": "3.38.0",
"fix-path": "3.0.0",
"form-data": "4.0.0",
"fs-extra": "8.1.0",

View file

@ -4,50 +4,54 @@ const { getAppDB } = require("@budibase/backend-core/context")
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
exports.fetchAppComponentDefinitions = async function (ctx) {
const db = getAppDB()
const app = await db.get(DocumentType.APP_METADATA)
try {
const db = getAppDB()
const app = await db.get(DocumentType.APP_METADATA)
let componentManifests = await Promise.all(
app.componentLibraries.map(async library => {
let manifest = await getComponentLibraryManifest(library)
return {
manifest,
library,
}
})
)
const definitions = {}
for (let { manifest, library } of componentManifests) {
for (let key of Object.keys(manifest)) {
if (key === "features") {
definitions[key] = manifest[key]
} else {
const fullComponentName = `${library}/${key}`.toLowerCase()
definitions[fullComponentName] = {
component: fullComponentName,
...manifest[key],
let componentManifests = await Promise.all(
app.componentLibraries.map(async library => {
let manifest = await getComponentLibraryManifest(library)
return {
manifest,
library,
}
})
)
const definitions = {}
for (let { manifest, library } of componentManifests) {
for (let key of Object.keys(manifest)) {
if (key === "features") {
definitions[key] = manifest[key]
} else {
const fullComponentName = `${library}/${key}`.toLowerCase()
definitions[fullComponentName] = {
component: fullComponentName,
...manifest[key],
}
}
}
}
// Add custom components
const globalDB = getGlobalDB()
const response = await globalDB.allDocs(
getPluginParams(null, {
include_docs: true,
})
)
response.rows
.map(row => row.doc)
.filter(plugin => plugin.schema.type === "component")
.forEach(plugin => {
const fullComponentName = `plugin/${plugin.name}/${plugin.version}`
definitions[fullComponentName] = {
component: fullComponentName,
...plugin.schema.schema,
}
})
ctx.body = definitions
} catch (err) {
console.error(`component-definitions=failed`, err)
}
// Add custom components
const globalDB = getGlobalDB()
const response = await globalDB.allDocs(
getPluginParams(null, {
include_docs: true,
})
)
response.rows
.map(row => row.doc)
.filter(plugin => plugin.schema.type === "component")
.forEach(plugin => {
const fullComponentName = `plugin/${plugin.name}/${plugin.version}`
definitions[fullComponentName] = {
component: fullComponentName,
...plugin.schema.schema,
}
})
ctx.body = definitions
}

View file

@ -375,6 +375,7 @@ exports.exportRows = async ctx => {
const table = await db.get(ctx.params.tableId)
const rowIds = ctx.request.body.rows
let format = ctx.query.format
const { columns } = ctx.request.body
let response = (
await db.allDocs({
include_docs: true,
@ -382,7 +383,20 @@ exports.exportRows = async ctx => {
})
).rows.map(row => row.doc)
let rows = await outputProcessing(table, response)
let result = await outputProcessing(table, response)
let rows = []
// Filter data to only specified columns if required
if (columns && columns.length) {
for (let i = 0; i < result.length; i++) {
rows[i] = {}
for (let column of columns) {
rows[i][column] = result[i][column]
}
}
} else {
rows = result
}
let headers = Object.keys(rows[0])
const exporter = exporters[format]

View file

@ -3,7 +3,12 @@ const setup = require("./utilities")
const { basicRow } = setup.structures
const { doInAppContext } = require("@budibase/backend-core/context")
const { doInTenant } = require("@budibase/backend-core/tenancy")
const { quotas, QuotaUsageType, StaticQuotaName, MonthlyQuotaName } = require("@budibase/pro")
const {
quotas,
QuotaUsageType,
StaticQuotaName,
MonthlyQuotaName,
} = require("@budibase/pro")
describe("/rows", () => {
let request = setup.getRequest()
@ -23,23 +28,30 @@ describe("/rows", () => {
await request
.get(`/api/${table._id}/rows/${id}`)
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect("Content-Type", /json/)
.expect(status)
const getRowUsage = async () => {
return config.doInContext(null, () => quotas.getCurrentUsageValue(QuotaUsageType.STATIC, StaticQuotaName.ROWS))
return config.doInContext(null, () =>
quotas.getCurrentUsageValue(QuotaUsageType.STATIC, StaticQuotaName.ROWS)
)
}
const getQueryUsage = async () => {
return config.doInContext(null, () => quotas.getCurrentUsageValue(QuotaUsageType.MONTHLY, MonthlyQuotaName.QUERIES))
return config.doInContext(null, () =>
quotas.getCurrentUsageValue(
QuotaUsageType.MONTHLY,
MonthlyQuotaName.QUERIES
)
)
}
const assertRowUsage = async (expected) => {
const assertRowUsage = async expected => {
const usage = await getRowUsage()
expect(usage).toBe(expected)
}
const assertQueryUsage = async (expected) => {
const assertQueryUsage = async expected => {
const usage = await getQueryUsage()
expect(usage).toBe(expected)
}
@ -76,10 +88,12 @@ describe("/rows", () => {
name: "Updated Name",
})
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect("Content-Type", /json/)
.expect(200)
expect(res.res.statusMessage).toEqual(`${table.name} updated successfully.`)
expect(res.res.statusMessage).toEqual(
`${table.name} updated successfully.`
)
expect(res.body.name).toEqual("Updated Name")
// await assertRowUsage(rowUsage)
// await assertQueryUsage(queryUsage + 1)
@ -92,7 +106,7 @@ describe("/rows", () => {
const res = await request
.get(`/api/${table._id}/rows/${existing._id}`)
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect("Content-Type", /json/)
.expect(200)
expect(res.body).toEqual({
@ -110,7 +124,7 @@ describe("/rows", () => {
const newRow = {
tableId: table._id,
name: "Second Contact",
status: "new"
status: "new",
}
await config.createRow()
await config.createRow(newRow)
@ -119,7 +133,7 @@ describe("/rows", () => {
const res = await request
.get(`/api/${table._id}/rows`)
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.length).toBe(2)
@ -135,17 +149,36 @@ describe("/rows", () => {
await request
.get(`/api/${table._id}/rows/not-a-valid-id`)
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect("Content-Type", /json/)
.expect(404)
await assertQueryUsage(queryUsage) // no change
})
it("row values are coerced", async () => {
const str = {type:"string", constraints: { type: "string", presence: false }}
const attachment = {type:"attachment", constraints: { type: "array", presence: false }}
const bool = {type:"boolean", constraints: { type: "boolean", presence: false }}
const number = {type:"number", constraints: { type: "number", presence: false }}
const datetime = {type:"datetime", constraints: { type: "string", presence: false, datetime: {earliest:"", latest: ""} }}
const str = {
type: "string",
constraints: { type: "string", presence: false },
}
const attachment = {
type: "attachment",
constraints: { type: "array", presence: false },
}
const bool = {
type: "boolean",
constraints: { type: "boolean", presence: false },
}
const number = {
type: "number",
constraints: { type: "number", presence: false },
}
const datetime = {
type: "datetime",
constraints: {
type: "string",
presence: false,
datetime: { earliest: "", latest: "" },
},
}
table = await config.createTable({
name: "TestTable2",
@ -171,9 +204,9 @@ describe("/rows", () => {
boolUndefined: bool,
boolString: bool,
boolBool: bool,
attachmentNull : attachment,
attachmentUndefined : attachment,
attachmentEmpty : attachment,
attachmentNull: attachment,
attachmentUndefined: attachment,
attachmentEmpty: attachment,
},
})
@ -198,9 +231,9 @@ describe("/rows", () => {
boolString: "true",
boolBool: true,
tableId: table._id,
attachmentNull : null,
attachmentUndefined : undefined,
attachmentEmpty : "",
attachmentNull: null,
attachmentUndefined: undefined,
attachmentEmpty: "",
}
const id = (await config.createRow(row))._id
@ -218,7 +251,9 @@ describe("/rows", () => {
expect(saved.datetimeEmptyString).toBe(null)
expect(saved.datetimeNull).toBe(null)
expect(saved.datetimeUndefined).toBe(undefined)
expect(saved.datetimeString).toBe(new Date(row.datetimeString).toISOString())
expect(saved.datetimeString).toBe(
new Date(row.datetimeString).toISOString()
)
expect(saved.datetimeDate).toBe(row.datetimeDate.toISOString())
expect(saved.boolNull).toBe(null)
expect(saved.boolEmpty).toBe(null)
@ -247,10 +282,12 @@ describe("/rows", () => {
name: "Updated Name",
})
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect("Content-Type", /json/)
.expect(200)
expect(res.res.statusMessage).toEqual(`${table.name} updated successfully.`)
expect(res.res.statusMessage).toEqual(
`${table.name} updated successfully.`
)
expect(res.body.name).toEqual("Updated Name")
expect(res.body.description).toEqual(existing.description)
@ -292,16 +329,14 @@ describe("/rows", () => {
const res = await request
.delete(`/api/${table._id}/rows`)
.send({
rows: [
createdRow
]
rows: [createdRow],
})
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect("Content-Type", /json/)
.expect(200)
expect(res.body[0]._id).toEqual(createdRow._id)
await assertRowUsage(rowUsage -1)
await assertQueryUsage(queryUsage +1)
await assertRowUsage(rowUsage - 1)
await assertQueryUsage(queryUsage + 1)
})
})
@ -314,9 +349,9 @@ describe("/rows", () => {
.post(`/api/${table._id}/rows/validate`)
.send({ name: "ivan" })
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.valid).toBe(true)
expect(Object.keys(res.body.errors)).toEqual([])
await assertRowUsage(rowUsage)
@ -331,9 +366,9 @@ describe("/rows", () => {
.post(`/api/${table._id}/rows/validate`)
.send({ name: 1 })
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.valid).toBe(false)
expect(Object.keys(res.body.errors)).toEqual(["name"])
await assertRowUsage(rowUsage)
@ -351,19 +386,16 @@ describe("/rows", () => {
const res = await request
.delete(`/api/${table._id}/rows`)
.send({
rows: [
row1,
row2,
]
rows: [row1, row2],
})
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.length).toEqual(2)
await loadRow(row1._id, 404)
await assertRowUsage(rowUsage - 2)
await assertQueryUsage(queryUsage +1)
await assertQueryUsage(queryUsage + 1)
})
})
@ -376,12 +408,12 @@ describe("/rows", () => {
const res = await request
.get(`/api/views/${table._id}`)
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.length).toEqual(1)
expect(res.body[0]._id).toEqual(row._id)
await assertRowUsage(rowUsage)
await assertQueryUsage(queryUsage +1)
await assertQueryUsage(queryUsage + 1)
})
it("should throw an error if view doesn't exist", async () => {
@ -406,7 +438,7 @@ describe("/rows", () => {
const res = await request
.get(`/api/views/${view.name}`)
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.length).toEqual(1)
expect(res.body[0]._id).toEqual(row._id)
@ -418,21 +450,24 @@ describe("/rows", () => {
describe("fetchEnrichedRows", () => {
it("should allow enriching some linked rows", async () => {
const { table, firstRow, secondRow } = await doInTenant(setup.structures.TENANT_ID, async () => {
const table = await config.createLinkedTable()
const firstRow = await config.createRow({
name: "Test Contact",
description: "original description",
tableId: table._id
})
const secondRow = await config.createRow({
name: "Test 2",
description: "og desc",
link: [{_id: firstRow._id}],
tableId: table._id,
})
return { table, firstRow, secondRow }
})
const { table, firstRow, secondRow } = await doInTenant(
setup.structures.TENANT_ID,
async () => {
const table = await config.createLinkedTable()
const firstRow = await config.createRow({
name: "Test Contact",
description: "original description",
tableId: table._id,
})
const secondRow = await config.createRow({
name: "Test 2",
description: "og desc",
link: [{ _id: firstRow._id }],
tableId: table._id,
})
return { table, firstRow, secondRow }
}
)
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
@ -440,7 +475,7 @@ describe("/rows", () => {
const resBasic = await request
.get(`/api/${table._id}/rows/${secondRow._id}`)
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect("Content-Type", /json/)
.expect(200)
expect(resBasic.body.link[0]._id).toBe(firstRow._id)
expect(resBasic.body.link[0].primaryDisplay).toBe("Test Contact")
@ -449,14 +484,14 @@ describe("/rows", () => {
const resEnriched = await request
.get(`/api/${table._id}/${secondRow._id}/enrich`)
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect("Content-Type", /json/)
.expect(200)
expect(resEnriched.body.link.length).toBe(1)
expect(resEnriched.body.link[0]._id).toBe(firstRow._id)
expect(resEnriched.body.link[0].name).toBe("Test Contact")
expect(resEnriched.body.link[0].description).toBe("original description")
await assertRowUsage(rowUsage)
await assertQueryUsage(queryUsage +2)
await assertQueryUsage(queryUsage + 2)
})
})
@ -466,9 +501,11 @@ describe("/rows", () => {
const row = await config.createRow({
name: "test",
description: "test",
attachment: [{
key: `${config.getAppId()}/attachments/test/thing.csv`,
}],
attachment: [
{
key: `${config.getAppId()}/attachments/test/thing.csv`,
},
],
tableId: table._id,
})
// the environment needs configured for this
@ -482,4 +519,49 @@ describe("/rows", () => {
})
})
})
describe("exportData", () => {
it("should allow exporting all columns", async () => {
const existing = await config.createRow()
const res = await request
.post(`/api/${table._id}/rows/exportRows?format=json`)
.set(config.defaultHeaders())
.send({
rows: [existing._id],
})
.expect("Content-Type", /json/)
.expect(200)
const results = JSON.parse(res.text)
expect(results.length).toEqual(1)
const row = results[0]
// Ensure all original columns were exported
expect(Object.keys(row).length).toBeGreaterThanOrEqual(
Object.keys(existing).length
)
Object.keys(existing).forEach(key => {
expect(row[key]).toEqual(existing[key])
})
})
it("should allow exporting only certain columns", async () => {
const existing = await config.createRow()
const res = await request
.post(`/api/${table._id}/rows/exportRows?format=json`)
.set(config.defaultHeaders())
.send({
rows: [existing._id],
columns: ["_id"],
})
.expect("Content-Type", /json/)
.expect(200)
const results = JSON.parse(res.text)
expect(results.length).toEqual(1)
const row = results[0]
// Ensure only the _id column was exported
expect(Object.keys(row).length).toEqual(1)
expect(row._id).toEqual(existing._id)
})
})
})

View file

@ -1,6 +1,15 @@
// need to load environment first
import { ExtendableContext } from "koa"
import * as env from "./environment"
// enable APM if configured
if (process.env.ELASTIC_APM_ENABLED) {
const apm = require("elastic-apm-node").start({
serviceName: process.env.SERVICE,
environment: process.env.BUDIBASE_ENVIRONMENT,
})
}
import { ExtendableContext } from "koa"
import db from "./db"
db.init()
const Koa = require("koa")
@ -76,9 +85,7 @@ server.on("close", async () => {
return
}
shuttingDown = true
if (!env.isTest()) {
console.log("Server Closed")
}
console.log("Server Closed")
await automations.shutdown()
await redis.shutdown()
await events.shutdown()
@ -170,3 +177,7 @@ process.on("uncaughtException", err => {
process.on("SIGTERM", () => {
shutdown()
})
process.on("SIGINT", () => {
shutdown()
})

View file

@ -8,12 +8,14 @@ const Queue = env.isTest()
const { JobQueues } = require("../constants")
const { utils } = require("@budibase/backend-core/redis")
const { opts, redisProtocolUrl } = utils.getRedisOptions()
const listeners = require("./listeners")
const CLEANUP_PERIOD_MS = 60 * 1000
const queueConfig = redisProtocolUrl || { redis: opts }
let cleanupInternal = null
let automationQueue = new Queue(JobQueues.AUTOMATIONS, queueConfig)
listeners.addListeners(automationQueue)
async function cleanup() {
await automationQueue.clean(CLEANUP_PERIOD_MS, "completed")
@ -51,6 +53,7 @@ exports.shutdown = async () => {
await automationQueue.close()
automationQueue = null
}
console.log("Bull shutdown")
}
exports.queue = automationQueue

View file

@ -0,0 +1,78 @@
import { Queue, Job, JobId } from "bull"
import { AutomationEvent } from "../definitions/automations"
import * as automation from "../threads/automation"
export const addListeners = (queue: Queue) => {
logging(queue)
handleStalled(queue)
}
const handleStalled = (queue: Queue) => {
queue.on("stalled", async (job: Job) => {
await automation.removeStalled(job as AutomationEvent)
})
}
const logging = (queue: Queue) => {
if (process.env.NODE_DEBUG?.includes("bull")) {
queue
.on("error", (error: any) => {
// An error occurred.
console.error(`automation-event=error error=${JSON.stringify(error)}`)
})
.on("waiting", (jobId: JobId) => {
// A Job is waiting to be processed as soon as a worker is idling.
console.log(`automation-event=waiting jobId=${jobId}`)
})
.on("active", (job: Job, jobPromise: any) => {
// A job has started. You can use `jobPromise.cancel()`` to abort it.
console.log(`automation-event=active jobId=${job.id}`)
})
.on("stalled", (job: Job) => {
// A job has been marked as stalled. This is useful for debugging job
// workers that crash or pause the event loop.
console.error(
`automation-event=stalled jobId=${job.id} job=${JSON.stringify(job)}`
)
})
.on("progress", (job: Job, progress: any) => {
// A job's progress was updated!
console.log(
`automation-event=progress jobId=${job.id} progress=${progress}`
)
})
.on("completed", (job: Job, result) => {
// A job successfully completed with a `result`.
console.log(
`automation-event=completed jobId=${job.id} result=${result}`
)
})
.on("failed", (job, err: any) => {
// A job failed with reason `err`!
console.log(`automation-event=failed jobId=${job.id} error=${err}`)
})
.on("paused", () => {
// The queue has been paused.
console.log(`automation-event=paused`)
})
.on("resumed", (job: Job) => {
// The queue has been resumed.
console.log(`automation-event=paused jobId=${job.id}`)
})
.on("cleaned", (jobs: Job[], type: string) => {
// Old jobs have been cleaned from the queue. `jobs` is an array of cleaned
// jobs, and `type` is the type of jobs cleaned.
console.log(
`automation-event=cleaned length=${jobs.length} type=${type}`
)
})
.on("drained", () => {
// Emitted every time the queue has processed all the waiting jobs (even if there can be some delayed jobs not yet processed)
console.log(`automation-event=drained`)
})
.on("removed", (job: Job) => {
// A job successfully removed.
console.log(`automation-event=removed jobId=${job.id}`)
})
}
}

View file

@ -125,6 +125,14 @@ const hasNullFilters = filters =>
exports.run = async function ({ inputs, appId }) {
const { tableId, filters, sortColumn, sortOrder, limit } = inputs
if (!tableId) {
return {
success: false,
response: {
message: "You must select a table to query.",
},
}
}
const table = await getTable(appId, tableId)
let sortType = FieldTypes.STRING
if (table && table.schema && table.schema[sortColumn] && sortColumn) {

View file

@ -21,11 +21,13 @@ const WH_STEP_ID = definitions.WEBHOOK.stepId
const CRON_STEP_ID = definitions.CRON.stepId
const Runner = new Thread(ThreadType.AUTOMATION)
const jobMessage = (job: any, message: string) => {
return `app=${job.data.event.appId} automation=${job.data.automation._id} jobId=${job.id} trigger=${job.data.automation.definition.trigger.event} : ${message}`
}
export async function processEvent(job: any) {
try {
console.log(
`${job.data.automation.appId} automation ${job.data.automation._id} running`
)
console.log(jobMessage(job, "running"))
// need to actually await these so that an error can be captured properly
const tenantId = tenancy.getTenantIDFromAppID(job.data.event.appId)
return await tenancy.doInTenant(tenantId, async () => {
@ -34,9 +36,7 @@ export async function processEvent(job: any) {
})
} catch (err) {
const errJson = JSON.stringify(err)
console.error(
`${job.data.automation.appId} automation ${job.data.automation._id} was unable to run - ${errJson}`
)
console.error(jobMessage(job, `was unable to run - ${errJson}`))
console.trace(err)
return { err }
}
@ -91,6 +91,7 @@ export async function disableAllCrons(appId: any) {
export async function disableCron(jobId: string, jobKey: string) {
await queue.removeRepeatableByKey(jobKey)
await queue.removeJobs(jobId)
console.log(`jobId=${jobId} disabled`)
}
export async function clearMetadata() {

View file

@ -103,11 +103,9 @@ class Table {
exports.init = endpoint => {
let AWS = require("aws-sdk")
AWS.config.update({
region: AWS_REGION,
})
let docClientParams = {
correctClockSkew: true,
region: AWS_REGION,
}
if (endpoint) {
docClientParams.endpoint = endpoint

View file

@ -13,10 +13,7 @@ function isJest() {
}
function isDev() {
return (
process.env.NODE_ENV !== "production" &&
process.env.BUDIBASE_ENVIRONMENT !== "production"
)
return process.env.NODE_ENV !== "production"
}
function isCypress() {

View file

@ -1,8 +1,10 @@
const { rowEmission, tableEmission } = require("./utils")
const mainEmitter = require("./index")
const env = require("../environment")
// max number of automations that can chain on top of each other
const MAX_AUTOMATION_CHAIN = 5
// TODO: in future make this configurable at the automation level
const MAX_AUTOMATION_CHAIN = env.SELF_HOSTED ? 5 : 0
/**
* Special emitter which takes the count of automation runs which have occurred and blocks an

View file

@ -12,7 +12,8 @@ interface DynamoDBConfig {
region: string
accessKeyId: string
secretAccessKey: string
endpoint: string
endpoint?: string
currentClockSkew?: boolean
}
const SCHEMA: Integration = {
@ -131,31 +132,20 @@ class DynamoDBIntegration implements IntegrationBase {
constructor(config: DynamoDBConfig) {
this.config = config
if (this.config.endpoint && !this.config.endpoint.includes("localhost")) {
this.connect()
// User is using a local dynamoDB endpoint, don't auth with remote
if (this.config?.endpoint?.includes("localhost")) {
// @ts-ignore
this.config = {}
}
let options = {
correctClockSkew: true,
region: this.config.region || AWS_REGION,
endpoint: config.endpoint ? config.endpoint : undefined,
this.config = {
...this.config,
currentClockSkew: true,
region: config.region || AWS_REGION,
endpoint: config.endpoint || undefined,
}
this.client = new AWS.DynamoDB.DocumentClient(options)
}
end() {
this.disconnect()
}
connect() {
AWS.config.update(this.config)
}
disconnect() {
AWS.config.update({
secretAccessKey: undefined,
accessKeyId: undefined,
region: AWS_REGION,
})
this.client = new AWS.DynamoDB.DocumentClient(this.config)
}
async create(query: { table: string; json: object }) {
@ -196,7 +186,7 @@ class DynamoDBIntegration implements IntegrationBase {
const params = {
TableName: query.table,
}
return new AWS.DynamoDB().describeTable(params).promise()
return new AWS.DynamoDB(this.config).describeTable(params).promise()
}
async get(query: { table: string; json: object }) {

View file

@ -20,6 +20,13 @@ interface MongoDBConfig {
db: string
}
interface MongoDBQuery {
json: object | string
extra: {
[key: string]: string
}
}
const SCHEMA: Integration = {
docs: "https://github.com/mongodb/node-mongodb-native",
friendlyName: "MongoDB",
@ -92,8 +99,8 @@ class MongoIntegration implements IntegrationBase {
json[field] = self.createObjectIds(json[field])
}
if (
(field === "_id" || field?.startsWith("$")) &&
typeof json[field] === "string"
typeof json[field] === "string" &&
json[field].toLowerCase().startsWith("objectid")
) {
const id = json[field].match(/(?<=objectid\(['"]).*(?=['"]\))/gi)?.[0]
if (id) {
@ -152,7 +159,7 @@ class MongoIntegration implements IntegrationBase {
}
}
async create(query: { json: object; extra: { [key: string]: string } }) {
async create(query: MongoDBQuery) {
try {
await this.connect()
const db = this.client.db(this.config.db)
@ -182,7 +189,7 @@ class MongoIntegration implements IntegrationBase {
}
}
async read(query: { json: object; extra: { [key: string]: string } }) {
async read(query: MongoDBQuery) {
try {
await this.connect()
const db = this.client.db(this.config.db)
@ -231,7 +238,7 @@ class MongoIntegration implements IntegrationBase {
}
}
async update(query: { json: object; extra: { [key: string]: string } }) {
async update(query: MongoDBQuery) {
try {
await this.connect()
const db = this.client.db(this.config.db)
@ -275,7 +282,7 @@ class MongoIntegration implements IntegrationBase {
}
}
async delete(query: { json: object; extra: { [key: string]: string } }) {
async delete(query: MongoDBQuery) {
try {
await this.connect()
const db = this.client.db(this.config.db)

View file

@ -100,4 +100,52 @@ describe("DynamoDB Integration", () => {
Name: "John"
})
})
it("configures the dynamoDB constructor based on an empty endpoint parameter", async () => {
const config = {
region: "us-east-1",
accessKeyId: "test",
secretAccessKeyId: "test"
}
const integration = new DynamoDBIntegration.integration(config)
expect(integration.config).toEqual({
currentClockSkew: true,
...config
})
})
it("configures the dynamoDB constructor based on a localhost endpoint parameter", async () => {
const config = {
region: "us-east-1",
accessKeyId: "test",
secretAccessKeyId: "test",
endpoint: "localhost:8080"
}
const integration = new DynamoDBIntegration.integration(config)
expect(integration.config).toEqual({
region: "us-east-1",
currentClockSkew: true,
endpoint: "localhost:8080"
})
})
it("configures the dynamoDB constructor based on a remote endpoint parameter", async () => {
const config = {
region: "us-east-1",
accessKeyId: "test",
secretAccessKeyId: "test",
endpoint: "dynamodb.aws.foo.net"
}
const integration = new DynamoDBIntegration.integration(config)
expect(integration.config).toEqual({
currentClockSkew: true,
...config
})
})
})

View file

@ -103,16 +103,16 @@ describe("MongoDB Integration", () => {
restore()
})
it("creates ObjectIds if the _id fields contains a match on ObjectId", async () => {
it("creates ObjectIds if the field contains a match on ObjectId", async () => {
const query = {
json: {
filter: {
_id: "ObjectId('ACBD12345678ABCD12345678')",
name: "ObjectId('name')"
name: "ObjectId('BBBB12345678ABCD12345678')"
},
update: {
_id: "ObjectId('FFFF12345678ABCD12345678')",
name: "ObjectId('updatedName')",
name: "ObjectId('CCCC12345678ABCD12345678')",
},
options: {
upsert: false,
@ -126,11 +126,11 @@ describe("MongoDB Integration", () => {
const args = config.integration.client.updateOne.mock.calls[0]
expect(args[0]).toEqual({
_id: mongo.ObjectID.createFromHexString("ACBD12345678ABCD12345678"),
name: "ObjectId('name')",
name: mongo.ObjectID.createFromHexString("BBBB12345678ABCD12345678"),
})
expect(args[1]).toEqual({
_id: mongo.ObjectID.createFromHexString("FFFF12345678ABCD12345678"),
name: "ObjectId('updatedName')",
name: mongo.ObjectID.createFromHexString("CCCC12345678ABCD12345678"),
})
expect(args[2]).toEqual({
upsert: false

View file

@ -13,10 +13,13 @@ function validate(schema, property) {
params = ctx.request[property]
}
schema = schema.append({
createdAt: Joi.any().optional(),
updatedAt: Joi.any().optional(),
})
// not all schemas have the append property e.g. array schemas
if (schema.append) {
schema = schema.append({
createdAt: Joi.any().optional(),
updatedAt: Joi.any().optional(),
})
}
const { error } = schema.validate(params)
if (error) {

View file

@ -133,27 +133,34 @@ class Orchestrator {
return metadata
}
async stopCron(reason: string) {
if (!this._repeat) {
return
}
logWarn(
`CRON disabled reason=${reason} - ${this._appId}/${this._automation._id}`
)
const automation = this._automation
const trigger = automation.definition.trigger
await disableCron(this._repeat?.jobId, this._repeat?.jobKey)
this.updateExecutionOutput(
trigger.id,
trigger.stepId,
{},
{
status: AutomationStatus.STOPPED_ERROR,
success: false,
}
)
await storeLog(automation, this.executionOutput)
}
async checkIfShouldStop(metadata: AutomationMetadata): Promise<boolean> {
if (!metadata.errorCount || !this._repeat) {
return false
}
const automation = this._automation
const trigger = automation.definition.trigger
if (metadata.errorCount >= MAX_AUTOMATION_RECURRING_ERRORS) {
logWarn(
`CRON disabled due to errors - ${this._appId}/${this._automation._id}`
)
await disableCron(this._repeat?.jobId, this._repeat?.jobKey)
this.updateExecutionOutput(
trigger.id,
trigger.stepId,
{},
{
status: AutomationStatus.STOPPED_ERROR,
success: false,
}
)
await storeLog(automation, this.executionOutput)
await this.stopCron("errors")
return true
}
return false
@ -465,3 +472,15 @@ export function execute(input: AutomationEvent, callback: WorkerCallback) {
}
})
}
export const removeStalled = async (input: AutomationEvent) => {
const appId = input.data.event.appId
await doInAppContext(appId, async () => {
const automationOrchestrator = new Orchestrator(
input.data.automation,
input.data.event,
input.opts
)
await automationOrchestrator.stopCron("stalled")
})
}

View file

@ -106,5 +106,6 @@ export class Thread {
static async shutdown() {
await Thread.stopThreads()
console.log("Threads shutdown")
}
}

View file

@ -291,13 +291,18 @@ exports.getComponentLibraryManifest = async library => {
}
let resp
let path
try {
// Try to load the manifest from the new file location
const path = join(appId, filename)
path = join(appId, filename)
resp = await retrieve(ObjectStoreBuckets.APPS, path)
} catch (error) {
console.error(
`component-manifest-objectstore=failed appId=${appId} path=${path}`,
error
)
// Fallback to loading it from the old location for old apps
const path = join(appId, "node_modules", library, "package", filename)
path = join(appId, "node_modules", library, "package", filename)
resp = await retrieve(ObjectStoreBuckets.APPS, path)
}
if (typeof resp !== "string") {

View file

@ -113,6 +113,10 @@ class InMemoryQueue {
async getJob() {
return {}
}
on() {
// do nothing
}
}
module.exports = InMemoryQueue

View file

@ -20,6 +20,7 @@ exports.shutdown = async () => {
if (devAppClient) await devAppClient.finish()
if (debounceClient) await debounceClient.finish()
if (flagClient) await flagClient.finish()
console.log("Redis shutdown")
}
exports.doesUserHaveLock = async (devAppId, user) => {

View file

@ -1094,12 +1094,12 @@
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
"@budibase/backend-core@1.2.44-alpha.1":
version "1.2.44-alpha.1"
resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.2.44-alpha.1.tgz#255c8550302ea0d4b10b231519e12021d008ad33"
integrity sha512-9ceN00ioBD5iUc2Wbcs9Pyvh5gaSfkg1v4Qn9QdRwAG36eo3XVK8kiYWF0q0hkzgcTViJw6xq/1jqMa2InKv6g==
"@budibase/backend-core@1.2.58-alpha.3":
version "1.2.58-alpha.3"
resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.2.58-alpha.3.tgz#705986aaf2d97d15fab64ded044d30530773b541"
integrity sha512-yliV7eLHgjprjYdLhB2Tk/0ibEk5O17Mez5Nn5AqySAwsz+LwfHquaJZ8gT6uoRmgdMVLiEipBmO7cd+XQpR5A==
dependencies:
"@budibase/types" "1.2.44-alpha.1"
"@budibase/types" "1.2.58-alpha.3"
"@techpass/passport-openidconnect" "0.3.2"
aws-sdk "2.1030.0"
bcrypt "5.0.1"
@ -1178,13 +1178,13 @@
svelte-flatpickr "^3.2.3"
svelte-portal "^1.0.0"
"@budibase/pro@1.2.44-alpha.1":
version "1.2.44-alpha.1"
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-1.2.44-alpha.1.tgz#d9e919b1df96f3d3d5cf37753721d41348279f59"
integrity sha512-KaclS4qv4+hWenvuVwF2HxcPWkrDyR7IVyLKLMVUqCnaMoXMvEOe9KuQ+lXLbsLIBsCsCFkzE/WHsdmoaGqa0g==
"@budibase/pro@1.2.58-alpha.3":
version "1.2.58-alpha.3"
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-1.2.58-alpha.3.tgz#ae905b19db0284265a6089c4a420f89e8e5e352e"
integrity sha512-jyhv5LdP28nnuKkjoHCAfowo/gYM2Bs9gRHXVpaH2+dfZP0+0jsSHekUoE/gBDpsslCcDoh/XuqriCyvIfG7Dw==
dependencies:
"@budibase/backend-core" "1.2.44-alpha.1"
"@budibase/types" "1.2.44-alpha.1"
"@budibase/backend-core" "1.2.58-alpha.3"
"@budibase/types" "1.2.58-alpha.3"
"@koa/router" "8.0.8"
joi "17.6.0"
node-fetch "^2.6.1"
@ -1207,10 +1207,10 @@
svelte-apexcharts "^1.0.2"
svelte-flatpickr "^3.1.0"
"@budibase/types@1.2.44-alpha.1":
version "1.2.44-alpha.1"
resolved "https://registry.yarnpkg.com/@budibase/types/-/types-1.2.44-alpha.1.tgz#6150d4494326438b46dd7354e5dd0fc7fdbfcc49"
integrity sha512-1eLGH5ym4oYbuRZRkD0slNqHlZ1EV0JHLkmoA47i7L/u8E8QRIyDIHyeZKC8ben33oQS4NX3IebD7zZ2JFhgcA==
"@budibase/types@1.2.58-alpha.3":
version "1.2.58-alpha.3"
resolved "https://registry.yarnpkg.com/@budibase/types/-/types-1.2.58-alpha.3.tgz#0a96a949b8b989723abf768e3494fe2040d3b543"
integrity sha512-jRC1zTxLi4ozEIlzRMyo1+L+WkKyU2r8m1W7FRhvUQn4+Alk4ynB6TUaSfWavDOvtg4hWg5TVzJfpVOfVjeg5w==
"@bull-board/api@3.7.0":
version "3.7.0"
@ -1304,6 +1304,20 @@
resolved "https://registry.yarnpkg.com/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz#1d572bfbbe14b7704e0ba0f39b74815b84870d70"
integrity sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==
"@elastic/ecs-helpers@^1.1.0":
version "1.1.0"
resolved "https://registry.yarnpkg.com/@elastic/ecs-helpers/-/ecs-helpers-1.1.0.tgz#ee7e6f870f75a2222c5d7179b36a628f1db4779e"
integrity sha512-MDLb2aFeGjg46O5mLpdCzT5yOUDnXToJSrco2ShqGIXxNJaM8uJjX+4nd+hRYV4Vex8YJyDtOFEVBldQct6ndg==
dependencies:
fast-json-stringify "^2.4.1"
"@elastic/ecs-pino-format@^1.2.0":
version "1.3.0"
resolved "https://registry.yarnpkg.com/@elastic/ecs-pino-format/-/ecs-pino-format-1.3.0.tgz#6e349a7da342b3c370d15361ba7f850bc2f783bc"
integrity sha512-U8D57gPECYoRCcwREsrXKBtqeyFFF/KAwHi4rG1u/oQhAg91Kzw8ZtUQJXD/DMDieLOqtbItFr2FRBWI3t3wog==
dependencies:
"@elastic/ecs-helpers" "^1.1.0"
"@elastic/elasticsearch@7.10.0":
version "7.10.0"
resolved "https://registry.yarnpkg.com/@elastic/elasticsearch/-/elasticsearch-7.10.0.tgz#da105a9c1f14146f9f2cab4e7026cb7949121b8d"
@ -2077,7 +2091,7 @@
"@nodelib/fs.scandir" "2.1.5"
fastq "^1.6.0"
"@opentelemetry/api@^1.0.1":
"@opentelemetry/api@^1.0.1", "@opentelemetry/api@^1.1.0":
version "1.1.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.1.0.tgz#563539048255bbe1a5f4f586a4a10a1bb737f44a"
integrity sha512-hf+3bwuBwtXsugA2ULBc95qxrOqP2pOekLz34BJhcAKawt94vfeNyUKpYc0lZQ/3sCP6LqRa7UAdHA7i5UODzQ==
@ -3357,6 +3371,11 @@ adal-node@^0.2.2:
uuid "^3.1.0"
xpath.js "~1.1.0"
after-all-results@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/after-all-results/-/after-all-results-2.0.0.tgz#6ac2fc202b500f88da8f4f5530cfa100f4c6a2d0"
integrity sha512-2zHEyuhSJOuCrmas9YV0YL/MFCWLxe1dS6k/ENhgYrb/JqyMnadLN4iIAc9kkZrbElMDyyAGH/0J18OPErOWLg==
agent-base@6, agent-base@^6.0.2:
version "6.0.2"
resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77"
@ -3364,6 +3383,15 @@ agent-base@6, agent-base@^6.0.2:
dependencies:
debug "4"
agentkeepalive@^4.2.1:
version "4.2.1"
resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.2.1.tgz#a7975cbb9f83b367f06c90cc51ff28fe7d499717"
integrity sha512-Zn4cw2NEqd+9fiSVWMscnjyQ1a8Yfoc5oBajLeo5w+YBHgDUcEBY2hS4YpTz6iN5f/2zQiktcuM6tS8x1p9dpA==
dependencies:
debug "^4.1.0"
depd "^1.1.2"
humanize-ms "^1.2.1"
airtable@0.10.1:
version "0.10.1"
resolved "https://registry.yarnpkg.com/airtable/-/airtable-0.10.1.tgz#0b311002bb44b39f19bf7c4bd2d47d75c733bf87"
@ -3387,7 +3415,7 @@ ajv-keywords@^3.5.2:
resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d"
integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==
ajv@^6.10.0, ajv@^6.10.2, ajv@^6.12.3, ajv@^6.12.5:
ajv@^6.10.0, ajv@^6.10.2, ajv@^6.11.0, ajv@^6.12.3, ajv@^6.12.5:
version "6.12.6"
resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4"
integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==
@ -3696,11 +3724,30 @@ astral-regex@^1.0.0:
resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-1.0.0.tgz#6c8c3fb827dd43ee3918f27b82782ab7658a6fd9"
integrity sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==
async-cache@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/async-cache/-/async-cache-1.1.0.tgz#4a9a5a89d065ec5d8e5254bd9ee96ba76c532b5a"
integrity sha512-YDQc4vBn5NFhY6g6HhVshyi3Fy9+SQ5ePnE7JLDJn1DoL+i7ER+vMwtTNOYk9leZkYMnOwpBCWqyLDPw8Aig8g==
dependencies:
lru-cache "^4.0.0"
async-limiter@~1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd"
integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==
async-value-promise@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/async-value-promise/-/async-value-promise-1.1.1.tgz#68957819e3eace804f3b4b69477e2bd276c15378"
integrity sha512-c2RFDKjJle1rHa0YxN9Ysu97/QBu3Wa+NOejJxsX+1qVDJrkD3JL/GN1B3gaILAEXJXbu/4Z1lcoCHFESe/APA==
dependencies:
async-value "^1.2.2"
async-value@^1.2.2:
version "1.2.2"
resolved "https://registry.yarnpkg.com/async-value/-/async-value-1.2.2.tgz#84517a1e7cb6b1a5b5e181fa31be10437b7fb125"
integrity sha512-8rwtYe32OAS1W9CTwvknoyts+mc3ta8N7Pi0h7AjkMaKvsFbr39K+gEfZ7Z81aPXQ1sK5M23lgLy1QfZpcpadQ==
async@^2.6.3:
version "2.6.4"
resolved "https://registry.yarnpkg.com/async/-/async-2.6.4.tgz#706b7ff6084664cd7eae713f6f965433b5504221"
@ -3991,6 +4038,13 @@ base@^0.11.1:
mixin-deep "^1.2.0"
pascalcase "^0.1.1"
basic-auth@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/basic-auth/-/basic-auth-2.0.1.tgz#b998279bf47ce38344b4f3cf916d4679bbf51e3a"
integrity sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==
dependencies:
safe-buffer "5.1.2"
bcrypt-pbkdf@^1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e"
@ -4036,6 +4090,11 @@ binary-extensions@^2.0.0:
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d"
integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==
binary-search@^1.3.3:
version "1.3.6"
resolved "https://registry.yarnpkg.com/binary-search/-/binary-search-1.3.6.tgz#e32426016a0c5092f0f3598836a1c7da3560565c"
integrity sha512-nbE1WxOTTrUWIfsfZ4aHGYu5DOuNkbxGokjV6Z2kxfJK3uaAb8zNK1muzOeipoLHZjInT4Br88BHpzevc681xA==
binascii@0.0.2:
version "0.0.2"
resolved "https://registry.yarnpkg.com/binascii/-/binascii-0.0.2.tgz#a7f8a8801dbccf8b1756b743daa0fee9e2d9e0ee"
@ -4157,6 +4216,13 @@ braces@^3.0.2, braces@~3.0.2:
dependencies:
fill-range "^7.0.1"
breadth-filter@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/breadth-filter/-/breadth-filter-2.0.0.tgz#7b3f8737f46ba1946aec19355ecf5df2bdb7e47c"
integrity sha512-thQShDXnFWSk2oVBixRCyrWsFoV5tfOpWKHmxwafHQDNxCfDBk539utpvytNjmlFrTMqz41poLwJvA1MW3z0MQ==
dependencies:
object.entries "^1.0.4"
browser-process-hrtime@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626"
@ -4755,6 +4821,11 @@ console-control-strings@^1.0.0, console-control-strings@^1.1.0:
resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e"
integrity sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==
console-log-level@^1.4.1:
version "1.4.1"
resolved "https://registry.yarnpkg.com/console-log-level/-/console-log-level-1.4.1.tgz#9c5a6bb9ef1ef65b05aba83028b0ff894cdf630a"
integrity sha512-VZzbIORbP+PPcN/gg3DXClTLPLg5Slwd5fL2MIc+o1qZ4BXBvWyc6QxPk6T/Mkr6IVjRpoAGf32XxP3ZWMVRcQ==
consolidate@^0.16.0:
version "0.16.0"
resolved "https://registry.yarnpkg.com/consolidate/-/consolidate-0.16.0.tgz#a11864768930f2f19431660a65906668f5fbdc16"
@ -4762,6 +4833,11 @@ consolidate@^0.16.0:
dependencies:
bluebird "^3.7.2"
container-info@^1.0.1:
version "1.1.0"
resolved "https://registry.yarnpkg.com/container-info/-/container-info-1.1.0.tgz#6fcb94e93eacd397c6316ca2834491ede44e55ee"
integrity sha512-eD2zLAmxGS2kmL4f1jY8BdOqnmpL6X70kvzTBW/9FIQnxoxiBJ4htMsTmtPLPWRs7NHYFvqKQ1VtppV08mdsQA==
content-disposition@^0.5.2, content-disposition@~0.5.2:
version "0.5.4"
resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe"
@ -4786,6 +4862,11 @@ cookie@^0.4.1:
resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.2.tgz#0e41f24de5ecf317947c82fc789e06a884824432"
integrity sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA==
cookie@^0.5.0:
version "0.5.0"
resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b"
integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==
cookiejar@^2.1.0:
version "2.1.3"
resolved "https://registry.yarnpkg.com/cookiejar/-/cookiejar-2.1.3.tgz#fc7a6216e408e74414b90230050842dacda75acc"
@ -5408,6 +5489,60 @@ ejs@^3.1.6:
dependencies:
jake "^10.8.5"
elastic-apm-http-client@11.0.1:
version "11.0.1"
resolved "https://registry.yarnpkg.com/elastic-apm-http-client/-/elastic-apm-http-client-11.0.1.tgz#15dbe99d56d62b3f732d1bd2b51bef6094b78801"
integrity sha512-5AOWlhs2WlZpI+DfgGqY/8Rk7KF8WeevaO8R961eBylavU6GWhLRNiJncohn5jsvrqhmeT19azBvy/oYRN7bJw==
dependencies:
agentkeepalive "^4.2.1"
breadth-filter "^2.0.0"
container-info "^1.0.1"
end-of-stream "^1.4.4"
fast-safe-stringify "^2.0.7"
fast-stream-to-buffer "^1.0.0"
object-filter-sequence "^1.0.0"
readable-stream "^3.4.0"
semver "^6.3.0"
stream-chopper "^3.0.1"
elastic-apm-node@3.38.0:
version "3.38.0"
resolved "https://registry.yarnpkg.com/elastic-apm-node/-/elastic-apm-node-3.38.0.tgz#4d0dc9279c0e23e09b3b30aa4a9f9aeccfa59cc0"
integrity sha512-/d6YuWFtsfkVRpFD0YJ2rYJVq0rI0PGqG/C+cW1JpMZ4IOU8dA9xzUkxbT0G3B8gpHNug07Xo6bJdQa2oUaFbQ==
dependencies:
"@elastic/ecs-pino-format" "^1.2.0"
"@opentelemetry/api" "^1.1.0"
after-all-results "^2.0.0"
async-cache "^1.1.0"
async-value-promise "^1.1.1"
basic-auth "^2.0.1"
cookie "^0.5.0"
core-util-is "^1.0.2"
elastic-apm-http-client "11.0.1"
end-of-stream "^1.4.4"
error-callsites "^2.0.4"
error-stack-parser "^2.0.6"
escape-string-regexp "^4.0.0"
fast-safe-stringify "^2.0.7"
http-headers "^3.0.2"
is-native "^1.0.1"
lru-cache "^6.0.0"
measured-reporting "^1.51.1"
monitor-event-loop-delay "^1.0.0"
object-filter-sequence "^1.0.0"
object-identity-map "^1.0.2"
original-url "^1.2.3"
pino "^6.11.2"
relative-microtime "^2.0.0"
require-in-the-middle "^5.0.3"
semver "^6.3.0"
set-cookie-serde "^1.0.0"
shallow-clone-shim "^2.0.0"
source-map "^0.8.0-beta.0"
sql-summary "^1.0.1"
traverse "^0.6.6"
unicode-byte-truncate "^1.0.0"
electron-to-chromium@^1.4.147:
version "1.4.150"
resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.150.tgz#89f0e12505462d5df7e56c5b91aff7e1dfdd33ec"
@ -5460,7 +5595,7 @@ encoding-down@^6.3.0:
level-codec "^9.0.0"
level-errors "^2.0.0"
end-of-stream@^1.0.0, end-of-stream@^1.1.0, end-of-stream@^1.4.1:
end-of-stream@^1.0.0, end-of-stream@^1.1.0, end-of-stream@^1.4.1, end-of-stream@^1.4.4:
version "1.4.4"
resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0"
integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==
@ -5499,6 +5634,11 @@ errno@~0.1.1, errno@~0.1.7:
dependencies:
prr "~1.0.1"
error-callsites@^2.0.4:
version "2.0.4"
resolved "https://registry.yarnpkg.com/error-callsites/-/error-callsites-2.0.4.tgz#44f09e6a201e9a1603ead81eacac5ba258fca76e"
integrity sha512-V877Ch4FC4FN178fDK1fsrHN4I1YQIBdtjKrHh3BUHMnh3SMvwUVrqkaOgDpUuevgSNna0RBq6Ox9SGlxYrigA==
error-ex@^1.3.1:
version "1.3.2"
resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf"
@ -5511,7 +5651,14 @@ error-inject@^1.0.0:
resolved "https://registry.yarnpkg.com/error-inject/-/error-inject-1.0.0.tgz#e2b3d91b54aed672f309d950d154850fa11d4f37"
integrity sha512-JM8N6PytDbmIYm1IhPWlo8vr3NtfjhDY/1MhD/a5b/aad/USE8a0+NsqE9d5n+GVGmuNkPQWm4bFQWv18d8tMg==
es-abstract@^1.17.5, es-abstract@^1.19.0, es-abstract@^1.19.2, es-abstract@^1.19.5, es-abstract@^1.20.0, es-abstract@^1.20.1:
error-stack-parser@^2.0.6:
version "2.1.4"
resolved "https://registry.yarnpkg.com/error-stack-parser/-/error-stack-parser-2.1.4.tgz#229cb01cdbfa84440bfa91876285b94680188286"
integrity sha512-Sk5V6wVazPhq5MhpO+AUxJn5x7XSXGl1R93Vn7i+zS15KDVxQijejNCrz8340/2bgLBjR9GtEG8ZVKONDjcqGQ==
dependencies:
stackframe "^1.3.4"
es-abstract@^1.17.5, es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.2, es-abstract@^1.19.5, es-abstract@^1.20.0, es-abstract@^1.20.1:
version "1.20.1"
resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.20.1.tgz#027292cd6ef44bd12b1913b828116f54787d1814"
integrity sha512-WEm2oBhfoI2sImeM4OF2zE2V3BYdSF+KnSi9Sidz51fQHd7+JuF8Xgcj9/0o+OWeIeIS/MiuNnlruQrJf16GQA==
@ -6137,6 +6284,16 @@ fast-json-stable-stringify@2.x, fast-json-stable-stringify@^2.0.0:
resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633"
integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==
fast-json-stringify@^2.4.1:
version "2.7.13"
resolved "https://registry.yarnpkg.com/fast-json-stringify/-/fast-json-stringify-2.7.13.tgz#277aa86c2acba4d9851bd6108ed657aa327ed8c0"
integrity sha512-ar+hQ4+OIurUGjSJD1anvYSDcUflywhKjfxnsW4TBTD7+u0tJufv6DKRWoQk3vI6YBOWMoz0TQtfbe7dxbQmvA==
dependencies:
ajv "^6.11.0"
deepmerge "^4.2.2"
rfdc "^1.2.0"
string-similarity "^4.0.1"
fast-levenshtein@~2.0.6:
version "2.0.6"
resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917"
@ -6152,6 +6309,13 @@ fast-safe-stringify@^2.0.7, fast-safe-stringify@^2.0.8:
resolved "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz#c406a83b6e70d9e35ce3b30a81141df30aeba884"
integrity sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==
fast-stream-to-buffer@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/fast-stream-to-buffer/-/fast-stream-to-buffer-1.0.0.tgz#793340cc753e7ec9c7fb6d57a53a0b911cb0f588"
integrity sha512-bI/544WUQlD2iXBibQbOMSmG07Hay7YrpXlKaeGTPT7H7pC0eitt3usak5vUwEvCGK/O7rUAM3iyQValGU22TQ==
dependencies:
end-of-stream "^1.4.1"
fast-text-encoding@^1.0.0, fast-text-encoding@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/fast-text-encoding/-/fast-text-encoding-1.0.3.tgz#ec02ac8e01ab8a319af182dae2681213cfe9ce53"
@ -6439,6 +6603,11 @@ formidable@^1.1.1, formidable@^1.2.0:
resolved "https://registry.yarnpkg.com/formidable/-/formidable-1.2.6.tgz#d2a51d60162bbc9b4a055d8457a7c75315d1a168"
integrity sha512-KcpbcpuLNOwrEjnbpMC0gS+X8ciDoZE1kkqzat4a8vrprf+s9pKNQ/QIwWfbfs4ltgmFl3MD177SNTkve3BwGQ==
forwarded-parse@^2.1.0:
version "2.1.2"
resolved "https://registry.yarnpkg.com/forwarded-parse/-/forwarded-parse-2.1.2.tgz#08511eddaaa2ddfd56ba11138eee7df117a09325"
integrity sha512-alTFZZQDKMporBH77856pXgzhEzaUVmLCDk+egLgIgHst3Tpndzz8MnKe+GzRJRfvVdn69HhpW7cmXzvtLvJAw==
fragment-cache@^0.2.1:
version "0.2.1"
resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19"
@ -7184,6 +7353,13 @@ http-errors@~1.6.2:
setprototypeof "1.1.0"
statuses ">= 1.4.0 < 2"
http-headers@^3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/http-headers/-/http-headers-3.0.2.tgz#5147771292f0b39d6778d930a3a59a76fc7ef44d"
integrity sha512-87E1I+2Wg4dxxz4rcxElo3dxO/w1ZtgL1yA0Sb6vH3qU16vRKq1NjWQv9SCY3ly2OQROcoxHZOUpmelS+k6wOw==
dependencies:
next-line "^1.1.0"
http-proxy-agent@^4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a"
@ -7215,6 +7391,13 @@ human-signals@^2.1.0:
resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0"
integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==
humanize-ms@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/humanize-ms/-/humanize-ms-1.2.1.tgz#c46e3159a293f6b896da29316d8b6fe8bb79bbed"
integrity sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==
dependencies:
ms "^2.0.0"
iconv-lite@0.4.24, iconv-lite@^0.4.24, iconv-lite@^0.4.5:
version "0.4.24"
resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b"
@ -7530,6 +7713,13 @@ is-core-module@^2.8.1:
dependencies:
has "^1.0.3"
is-core-module@^2.9.0:
version "2.10.0"
resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.10.0.tgz#9012ede0a91c69587e647514e1d5277019e728ed"
integrity sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg==
dependencies:
has "^1.0.3"
is-data-descriptor@^0.1.4:
version "0.1.4"
resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56"
@ -7591,6 +7781,11 @@ is-extglob@^2.1.1:
resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2"
integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==
is-finite@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.1.0.tgz#904135c77fb42c0641d6aa1bcdbc4daa8da082f3"
integrity sha512-cdyMtqX/BOqqNBBiKlIVkytNHm49MtMlYyn1zxzvJKWmFMlGzm+ry5BBfYyeY9YmNKbRSo/o7OX9w9ale0wg3w==
is-fullwidth-code-point@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f"
@ -7633,6 +7828,21 @@ is-installed-globally@^0.4.0:
global-dirs "^3.0.0"
is-path-inside "^3.0.2"
is-integer@^1.0.6:
version "1.0.7"
resolved "https://registry.yarnpkg.com/is-integer/-/is-integer-1.0.7.tgz#6bde81aacddf78b659b6629d629cadc51a886d5c"
integrity sha512-RPQc/s9yBHSvpi+hs9dYiJ2cuFeU6x3TyyIp8O2H6SKEltIvJOzRj9ToyvcStDvPR/pS4rxgr1oBFajQjZ2Szg==
dependencies:
is-finite "^1.0.0"
is-native@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/is-native/-/is-native-1.0.1.tgz#cd18cc162e8450d683b5babe79ac99c145449675"
integrity sha512-I4z9hx+4u3/zyvpvGtAR+n7SodJugE+i2jiS8yfq1A9QAZY0KldLQz0SBptLC9ti7kBlpghWUwTKE2BA62eCcw==
dependencies:
is-nil "^1.0.0"
to-source-code "^1.0.0"
is-natural-number@^4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/is-natural-number/-/is-natural-number-4.0.1.tgz#ab9d76e1db4ced51e35de0c72ebecf09f734cde8"
@ -7643,6 +7853,11 @@ is-negative-zero@^2.0.2:
resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150"
integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==
is-nil@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/is-nil/-/is-nil-1.0.1.tgz#2daba29e0b585063875e7b539d071f5b15937969"
integrity sha512-m2Rm8PhUFDNNhgvwZJjJG74a9h5CHU0fkA8WT+WGlCjyEbZ2jPwgb+ZxHu4np284EqNVyOsgppReK4qy/TwEwg==
is-npm@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-5.0.0.tgz#43e8d65cc56e1b67f8d47262cf667099193f45a8"
@ -9648,7 +9863,7 @@ lowercase-keys@^2.0.0:
resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479"
integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==
lru-cache@^4.0.1, lru-cache@^4.1.3, lru-cache@^4.1.5:
lru-cache@^4.0.0, lru-cache@^4.0.1, lru-cache@^4.1.3, lru-cache@^4.1.5:
version "4.1.5"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd"
integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==
@ -9731,6 +9946,11 @@ map-visit@^1.0.0:
dependencies:
object-visit "^1.0.0"
mapcap@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/mapcap/-/mapcap-1.0.0.tgz#e8e29d04a160eaf8c92ec4bcbd2c5d07ed037e5a"
integrity sha512-KcNlZSlFPx+r1jYZmxEbTVymG+dIctf10WmWkuhrhrblM+KMoF77HelwihL5cxYlORye79KoR4IlOOk99lUJ0g==
markdown-it@^12.2.0:
version "12.3.2"
resolved "https://registry.yarnpkg.com/markdown-it/-/markdown-it-12.3.2.tgz#bf92ac92283fe983fe4de8ff8abfb5ad72cd0c90"
@ -9763,6 +9983,24 @@ mdurl@^1.0.1:
resolved "https://registry.yarnpkg.com/mdurl/-/mdurl-1.0.1.tgz#fe85b2ec75a59037f2adfec100fd6c601761152e"
integrity sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==
measured-core@^1.51.1:
version "1.51.1"
resolved "https://registry.yarnpkg.com/measured-core/-/measured-core-1.51.1.tgz#98989705c00bfb0d8a20e665a9f8d6e246a40518"
integrity sha512-DZQP9SEwdqqYRvT2slMK81D/7xwdxXosZZBtLVfPSo6y5P672FBTbzHVdN4IQyUkUpcVOR9pIvtUy5Ryl7NKyg==
dependencies:
binary-search "^1.3.3"
optional-js "^2.0.0"
measured-reporting@^1.51.1:
version "1.51.1"
resolved "https://registry.yarnpkg.com/measured-reporting/-/measured-reporting-1.51.1.tgz#6aeb209ad55edf3940e8afa75c8f97f541216b31"
integrity sha512-JCt+2u6XT1I5lG3SuYqywE0e62DJuAzBcfMzWGUhIYtPQV2Vm4HiYt/durqmzsAbZV181CEs+o/jMKWJKkYIWw==
dependencies:
console-log-level "^1.4.1"
mapcap "^1.0.0"
measured-core "^1.51.1"
optional-js "^2.0.0"
media-typer@0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748"
@ -9960,19 +10198,19 @@ mock-require@^3.0.3:
get-caller-file "^1.0.2"
normalize-path "^2.1.1"
module-details-from-path@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/module-details-from-path/-/module-details-from-path-1.0.3.tgz#114c949673e2a8a35e9d35788527aa37b679da2b"
integrity sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A==
moment-timezone@^0.5.15:
version "0.5.34"
resolved "https://registry.yarnpkg.com/moment-timezone/-/moment-timezone-0.5.34.tgz#a75938f7476b88f155d3504a9343f7519d9a405c"
integrity sha512-3zAEHh2hKUs3EXLESx/wsgw6IQdusOT8Bxm3D9UrHPQR7zlMmzwybC8zHEM1tQ4LJwP7fcxrWr8tuBg05fFCbg==
version "0.5.37"
resolved "https://registry.yarnpkg.com/moment-timezone/-/moment-timezone-0.5.37.tgz#adf97f719c4e458fdb12e2b4e87b8bec9f4eef1e"
integrity sha512-uEDzDNFhfaywRl+vwXxffjjq1q0Vzr+fcQpQ1bU0kbzorfS7zVtZnCnGc8mhWmF39d4g4YriF6kwA75mJKE/Zg==
dependencies:
moment ">= 2.9.0"
"moment@>= 2.9.0":
version "2.29.3"
resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.3.tgz#edd47411c322413999f7a5940d526de183c031f3"
integrity sha512-c6YRvhEo//6T2Jz/vVtYzqBzwvPT95JBQ+smCytzf7c50oMZRsR/a4w88aD34I+/QVSfnoAnSBFPJHItlOMJVw==
moment@^2.29.3:
"moment@>= 2.9.0", moment@^2.29.3:
version "2.29.4"
resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.4.tgz#3dbe052889fe7c1b2ed966fcb3a77328964ef108"
integrity sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==
@ -9990,6 +10228,11 @@ mongodb@3.6.3:
optionalDependencies:
saslprep "^1.0.0"
monitor-event-loop-delay@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/monitor-event-loop-delay/-/monitor-event-loop-delay-1.0.0.tgz#b5ab78165a3bb93f2b275c50d01430c7f155d1f7"
integrity sha512-YRIr1exCIfBDLZle8WHOfSo7Xg3M+phcZfq9Fx1L6Abo+atGp7cge5pM7PjyBn4s1oZI/BRD4EMrzQBbPpVb5Q==
mri@1.1.4:
version "1.1.4"
resolved "https://registry.yarnpkg.com/mri/-/mri-1.1.4.tgz#7cb1dd1b9b40905f1fac053abe25b6720f44744a"
@ -10005,7 +10248,7 @@ ms@2.1.2:
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
ms@^2.1.1, ms@^2.1.3:
ms@^2.0.0, ms@^2.1.1, ms@^2.1.3:
version "2.1.3"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
@ -10145,6 +10388,11 @@ neo-async@^2.6.0, neo-async@^2.6.2:
resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f"
integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==
next-line@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/next-line/-/next-line-1.1.0.tgz#fcae57853052b6a9bae8208e40dd7d3c2d304603"
integrity sha512-+I10J3wKNoKddNxn0CNpoZ3eTZuqxjNM3b1GImVx22+ePI+Y15P8g/j3WsbP0fhzzrFzrtjOAoq5NCCucswXOQ==
nice-try@^1.0.4:
version "1.0.5"
resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366"
@ -10354,11 +10602,23 @@ object-copy@^0.1.0:
define-property "^0.2.5"
kind-of "^3.0.3"
object-filter-sequence@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/object-filter-sequence/-/object-filter-sequence-1.0.0.tgz#10bb05402fff100082b80d7e83991b10db411692"
integrity sha512-CsubGNxhIEChNY4cXYuA6KXafztzHqzLLZ/y3Kasf3A+sa3lL9thq3z+7o0pZqzEinjXT6lXDPAfVWI59dUyzQ==
object-hash@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-3.0.0.tgz#73f97f753e7baffc0e2cc9d6e079079744ac82e9"
integrity sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==
object-identity-map@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/object-identity-map/-/object-identity-map-1.0.2.tgz#2b4213a4285ca3a8cd2e696782c9964f887524e7"
integrity sha512-a2XZDGyYTngvGS67kWnqVdpoaJWsY7C1GhPJvejWAFCsUioTAaiTu8oBad7c6cI4McZxr4CmvnZeycK05iav5A==
dependencies:
object.entries "^1.1.0"
object-inspect@^1.12.0, object-inspect@^1.9.0:
version "1.12.2"
resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea"
@ -10386,6 +10646,15 @@ object.assign@^4.1.0, object.assign@^4.1.2:
has-symbols "^1.0.1"
object-keys "^1.1.1"
object.entries@^1.0.4, object.entries@^1.1.0:
version "1.1.5"
resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.5.tgz#e1acdd17c4de2cd96d5a08487cfb9db84d881861"
integrity sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g==
dependencies:
call-bind "^1.0.2"
define-properties "^1.1.3"
es-abstract "^1.19.1"
object.getownpropertydescriptors@^2.1.1:
version "2.1.4"
resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.4.tgz#7965e6437a57278b587383831a9b829455a4bc37"
@ -10509,6 +10778,11 @@ openapi-validator@^0.14.2:
path-parser "^6.1.0"
typeof "^1.0.0"
optional-js@^2.0.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/optional-js/-/optional-js-2.3.0.tgz#81d54c4719afa8845b988143643a5148f9d89490"
integrity sha512-B0LLi+Vg+eko++0z/b8zIv57kp7HKEzaPJo7LowJXMUKYdf+3XJGu/cw03h/JhIOsLnP+cG5QnTHAuicjA5fMw==
optionator@^0.8.1, optionator@^0.8.3:
version "0.8.3"
resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495"
@ -10526,6 +10800,13 @@ oracledb@5.3.0:
resolved "https://registry.yarnpkg.com/oracledb/-/oracledb-5.3.0.tgz#a15e6cd16757d8711a2c006a28bd7ecd3b8466f7"
integrity sha512-HMJzQ6lCf287ztvvehTEmjCWA21FQ3RMvM+mgoqd4i8pkREuqFWO+y3ovsGR9moJUg4T0xjcwS8rl4mggWPxmg==
original-url@^1.2.3:
version "1.2.3"
resolved "https://registry.yarnpkg.com/original-url/-/original-url-1.2.3.tgz#133aff4b2d27e38a98d736f7629c56262b7153e1"
integrity sha512-BYm+pKYLtS4mVe/mgT3YKGtWV5HzN/XKiaIu1aK4rsxyjuHeTW9N+xVBEpJcY1onB3nccfH0RbzUEoimMqFUHQ==
dependencies:
forwarded-parse "^2.1.0"
os-locale@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a"
@ -11009,7 +11290,7 @@ pino-std-serializers@^4.0.0:
resolved "https://registry.yarnpkg.com/pino-std-serializers/-/pino-std-serializers-4.0.0.tgz#1791ccd2539c091ae49ce9993205e2cd5dbba1e2"
integrity sha512-cK0pekc1Kjy5w9V2/n+8MkZwusa6EyyxfeQCB799CQRhRt/CqYKiWs5adeu8Shve2ZNffvfC/7J64A2PJo1W/Q==
pino@^6.13.0:
pino@^6.11.2, pino@^6.13.0:
version "6.14.0"
resolved "https://registry.yarnpkg.com/pino/-/pino-6.14.0.tgz#b745ea87a99a6c4c9b374e4f29ca7910d4c69f78"
integrity sha512-iuhEDel3Z3hF9Jfe44DPXR8l07bhjuFY3GMHIXbjnY9XcafbyDDwl2sN2vw2GjMPf5Nkoe+OFao7ffn9SXaKDg==
@ -11621,7 +11902,7 @@ readable-stream@1.1.14, readable-stream@^1.0.27-1:
isarray "0.0.1"
string_decoder "~0.10.x"
"readable-stream@2 || 3", readable-stream@^3.0.0, readable-stream@^3.0.1, readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.6.0:
"readable-stream@2 || 3", readable-stream@^3.0.0, readable-stream@^3.0.1, readable-stream@^3.0.6, readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.6.0:
version "3.6.0"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198"
integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==
@ -11838,6 +12119,11 @@ regjsparser@^0.8.2:
dependencies:
jsesc "~0.5.0"
relative-microtime@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/relative-microtime/-/relative-microtime-2.0.0.tgz#cceed2af095ecd72ea32011279c79e5fcc7de29b"
integrity sha512-l18ha6HEZc+No/uK4GyAnNxgKW7nvEe35IaeN54sShMojtqik2a6GbTyuiezkjpPaqP874Z3lW5ysBo5irz4NA==
remove-trailing-separator@^1.0.1:
version "1.1.0"
resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef"
@ -11918,6 +12204,15 @@ require-from-string@^2.0.2:
resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909"
integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==
require-in-the-middle@^5.0.3:
version "5.2.0"
resolved "https://registry.yarnpkg.com/require-in-the-middle/-/require-in-the-middle-5.2.0.tgz#4b71e3cc7f59977100af9beb76bf2d056a5a6de2"
integrity sha512-efCx3b+0Z69/LGJmm9Yvi4cqEdxnoGnxYxGxBghkkTTFeXRtTCmmhO0AnAfHz59k957uTSuy8WaHqOs8wbYUWg==
dependencies:
debug "^4.1.1"
module-details-from-path "^1.0.3"
resolve "^1.22.1"
require-main-filename@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b"
@ -11990,6 +12285,15 @@ resolve@^1.10.0, resolve@^1.14.2, resolve@^1.20.0, resolve@^1.9.0:
path-parse "^1.0.7"
supports-preserve-symlinks-flag "^1.0.0"
resolve@^1.22.1:
version "1.22.1"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177"
integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==
dependencies:
is-core-module "^2.9.0"
path-parse "^1.0.7"
supports-preserve-symlinks-flag "^1.0.0"
responselike@1.0.2, responselike@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7"
@ -12023,6 +12327,11 @@ reusify@^1.0.4:
resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76"
integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==
rfdc@^1.2.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.3.0.tgz#d0b7c441ab2720d05dc4cf26e01c89631d9da08b"
integrity sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==
rimraf@2.6.3:
version "2.6.3"
resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab"
@ -12078,7 +12387,7 @@ safe-buffer@*, safe-buffer@5.2.1, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-b
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
safe-buffer@~5.1.0, safe-buffer@~5.1.1:
safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1:
version "5.1.2"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
@ -12233,6 +12542,11 @@ set-blocking@^2.0.0:
resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7"
integrity sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==
set-cookie-serde@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/set-cookie-serde/-/set-cookie-serde-1.0.0.tgz#bcf9c260ed2212ac4005a53eacbaaa37c07ac452"
integrity sha512-Vq8e5GsupfJ7okHIvEPcfs5neCo7MZ1ZuWrO3sllYi3DOWt6bSSCpADzqXjz3k0fXehnoFIrmmhty9IN6U6BXQ==
set-value@^2.0.0, set-value@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b"
@ -12253,6 +12567,11 @@ setprototypeof@1.2.0:
resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424"
integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==
shallow-clone-shim@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/shallow-clone-shim/-/shallow-clone-shim-2.0.0.tgz#b62bf55aed79f4c1430ea1dc4d293a193f52cf91"
integrity sha512-YRNymdiL3KGOoS67d73TEmk4tdPTO9GSMCoiphQsTcC9EtC+AOmMPjkyBkRoCJfW9ASsaZw1craaiw1dPN2D3Q==
shallow-clone@^3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/shallow-clone/-/shallow-clone-3.0.1.tgz#8f2981ad92531f55035b01fb230769a40e02efa3"
@ -12522,6 +12841,13 @@ source-map@^0.7.3:
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.4.tgz#a9bbe705c9d8846f4e08ff6765acf0f1b0898656"
integrity sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==
source-map@^0.8.0-beta.0:
version "0.8.0-beta.0"
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.8.0-beta.0.tgz#d4c1bb42c3f7ee925f005927ba10709e0d1d1f11"
integrity sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==
dependencies:
whatwg-url "^7.0.0"
spark-md5@3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/spark-md5/-/spark-md5-3.0.1.tgz#83a0e255734f2ab4e5c466e5a2cfc9ba2aa2124d"
@ -12606,6 +12932,11 @@ sprintf-js@~1.0.2:
resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==
sql-summary@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/sql-summary/-/sql-summary-1.0.1.tgz#a2dddb5435bae294eb11424a7330dc5bafe09c2b"
integrity sha512-IpCr2tpnNkP3Jera4ncexsZUp0enJBLr+pHCyTweMUBrbJsTgQeLWx1FXLhoBj/MvcnUQpkgOn2EY8FKOkUzww==
sqlstring@^2.3.2:
version "2.3.3"
resolved "https://registry.yarnpkg.com/sqlstring/-/sqlstring-2.3.3.tgz#2ddc21f03bce2c387ed60680e739922c65751d0c"
@ -12645,6 +12976,11 @@ stack-utils@^2.0.3:
dependencies:
escape-string-regexp "^2.0.0"
stackframe@^1.3.4:
version "1.3.4"
resolved "https://registry.yarnpkg.com/stackframe/-/stackframe-1.3.4.tgz#b881a004c8c149a5e8efef37d51b16e412943310"
integrity sha512-oeVtt7eWQS+Na6F//S4kJ2K2VbRlS9D43mAlMyVpVWovy9o+jfgH8O9agzANzaiLjclA0oYzUXEM4PurhSUChw==
standard-as-callback@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/standard-as-callback/-/standard-as-callback-2.1.0.tgz#8953fc05359868a77b5b9739a665c5977bb7df45"
@ -12678,6 +13014,13 @@ step@0.0.x:
resolved "https://registry.yarnpkg.com/step/-/step-0.0.6.tgz#143e7849a5d7d3f4a088fe29af94915216eeede2"
integrity sha512-qSSeQinUJk2w38vUFobjFoE307GqsozMC8VisOCkJLpklvKPT0ptPHwWOrENoag8rgLudvTkfP3bancwP93/Jw==
stream-chopper@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/stream-chopper/-/stream-chopper-3.0.1.tgz#73791ae7bf954c297d6683aec178648efc61dd75"
integrity sha512-f7h+ly8baAE26iIjcp3VbnBkbIRGtrvV0X0xxFM/d7fwLTYnLzDPTXRKNxa2HZzohOrc96NTrR+FaV3mzOelNA==
dependencies:
readable-stream "^3.0.6"
stream-shift@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.1.tgz#d7088281559ab2778424279b0877da3c392d5a3d"
@ -12701,7 +13044,7 @@ string-length@^4.0.1:
char-regex "^1.0.2"
strip-ansi "^6.0.0"
string-similarity@^4.0.4:
string-similarity@^4.0.1, string-similarity@^4.0.4:
version "4.0.4"
resolved "https://registry.yarnpkg.com/string-similarity/-/string-similarity-4.0.4.tgz#42d01ab0b34660ea8a018da8f56a3309bb8b2a5b"
integrity sha512-/q/8Q4Bl4ZKAPjj8WerIBJWALKkaPRfrvhfF8k/B23i4nzrlRj2/go1m90In7nG/3XDSbOo0+pu6RvCTM9RGMQ==
@ -13356,6 +13699,13 @@ to-regex@^3.0.1, to-regex@^3.0.2:
regex-not "^1.0.2"
safe-regex "^1.1.0"
to-source-code@^1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/to-source-code/-/to-source-code-1.0.2.tgz#dd136bdb1e1dbd80bbeacf088992678e9070bfea"
integrity sha512-YzWtjmNIf3E75eZYa7m1SCyl0vgOGoTzdpH3svfa8SUm5rqTgl9hnDolrAGOghCF9P2gsITXQoMrlujOoz+RPw==
dependencies:
is-nil "^1.0.0"
toidentifier@1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35"
@ -13413,6 +13763,11 @@ tr46@~0.0.3:
resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a"
integrity sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=
traverse@^0.6.6:
version "0.6.6"
resolved "https://registry.yarnpkg.com/traverse/-/traverse-0.6.6.tgz#cbdf560fd7b9af632502fed40f918c157ea97137"
integrity sha512-kdf4JKs8lbARxWdp7RKdNzoJBhGUcIalSYibuGyHJbmk40pOysQ0+QPvlkCOICOivDWU2IJo2rkrxyTK2AH4fw==
trim-repeated@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/trim-repeated/-/trim-repeated-1.0.0.tgz#e3646a2ea4e891312bf7eace6cfb05380bc01c21"
@ -13611,6 +13966,14 @@ undici@^4.14.1:
resolved "https://registry.yarnpkg.com/undici/-/undici-4.16.0.tgz#469bb87b3b918818d3d7843d91a1d08da357d5ff"
integrity sha512-tkZSECUYi+/T1i4u+4+lwZmQgLXd4BLGlrc7KZPcLIW7Jpq99+Xpc30ONv7nS6F5UNOxp/HBZSSL9MafUrvJbw==
unicode-byte-truncate@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/unicode-byte-truncate/-/unicode-byte-truncate-1.0.0.tgz#aa6f0f3475193fe20c320ac9213e36e62e8764a7"
integrity sha512-GQgHk6DodEoKddKQdjnv7xKS9G09XCfHWX0R4RKht+EbUMSiVEmtWHGFO8HUm+6NvWik3E2/DG4MxTitOLL64A==
dependencies:
is-integer "^1.0.6"
unicode-substring "^0.1.0"
unicode-canonical-property-names-ecmascript@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz#301acdc525631670d39f6146e0e77ff6bbdebddc"
@ -13634,6 +13997,11 @@ unicode-property-aliases-ecmascript@^2.0.0:
resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.0.0.tgz#0a36cb9a585c4f6abd51ad1deddb285c165297c8"
integrity sha512-5Zfuy9q/DFr4tfO7ZPeVXb1aPoeQSdeFMLpYuFebehDAhbuevLs5yxSZmIFN1tP5F9Wl4IpJrYojg85/zgyZHQ==
unicode-substring@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/unicode-substring/-/unicode-substring-0.1.0.tgz#6120ce3c390385dbcd0f60c32b9065c4181d4b36"
integrity sha512-36Xaw9wXi7MB/3/EQZZHkZyyiRNa9i3k9YtPAz2KfqMVH2xutdXyMHn4Igarmnvr+wOrfWa/6njhY+jPpXN2EQ==
union-value@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847"

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/string-templates",
"version": "1.2.44-alpha.1",
"version": "1.2.58-alpha.3",
"description": "Handlebars wrapper for Budibase templating.",
"main": "src/index.cjs",
"module": "dist/bundle.mjs",

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/types",
"version": "1.2.44-alpha.1",
"version": "1.2.58-alpha.3",
"description": "Budibase types",
"main": "dist/index.js",
"types": "dist/index.d.ts",

View file

@ -0,0 +1,5 @@
export interface APIError {
message: string
status: number
error?: any
}

View file

@ -1 +1,3 @@
export * from "./analytics"
export * from "./user"
export * from "./errors"

View file

@ -0,0 +1,31 @@
import { User } from "../../documents"
export interface CreateUserResponse {
_id: string
_rev: string
email: string
}
export interface BulkCreateUsersRequest {
users: User[]
groups: any[]
}
export interface UserDetails {
_id: string
email: string
}
export interface BulkCreateUsersResponse {
successful: UserDetails[]
unsuccessful: { email: string; reason: string }[]
}
export interface BulkDeleteUsersRequest {
userIds: string[]
}
export interface BulkDeleteUsersResponse {
successful: UserDetails[]
unsuccessful: { _id: string; email: string; reason: string }[]
}

View file

@ -15,8 +15,26 @@ export interface User extends Document {
status?: string
createdAt?: number // override the default createdAt behaviour - users sdk historically set this to Date.now()
userGroups?: string[]
forceResetPassword?: boolean
}
export interface UserRoles {
[key: string]: string
}
// utility types
export interface BuilderUser extends User {
builder: {
global: boolean
}
}
export interface AdminUser extends User {
admin: {
global: boolean
}
builder: {
global: boolean
}
}

View file

@ -1,19 +1,20 @@
import { Document } from "../document"
import { User } from "./user"
export interface UserGroup extends Document {
name: string
icon: string
color: string
users: groupUser[]
users: GroupUser[]
apps: string[]
roles: UserGroupRoles
createdAt?: number
}
export interface groupUser {
export interface GroupUser {
_id: string
email: string[]
email: string
}
export interface UserGroupRoles {
[key: string]: string
}

View file

@ -4,3 +4,4 @@ export * from "./global"
export * from "./plugin"
export * from "./platform"
export * from "./document"
export * from "./pouch"

View file

@ -0,0 +1,5 @@
import { Document } from "../document"
export interface AccountMetadata extends Document {
email: string
}

View file

@ -1 +1,3 @@
export * from "./info"
export * from "./users"
export * from "./accounts"

View file

@ -0,0 +1,9 @@
import { Document } from "../document"
/**
* doc id is user email
*/
export interface PlatformUserByEmail extends Document {
tenantId: string
userId: string
}

View file

@ -0,0 +1,26 @@
export interface RowValue {
rev: string
deleted: boolean
}
export interface RowResponse<T> {
id: string
key: string
error: string
value: RowValue
doc: T
}
export interface AllDocsResponse<T> {
offset: number
total_rows: number
rows: RowResponse<T>[]
}
export type BulkDocsResponse = BulkDocResponse[]
interface BulkDocResponse {
ok: boolean
id: string
rev: string
}

View file

@ -0,0 +1,5 @@
export interface AuthToken {
userId: string
tenantId: string
sessionId: string
}

View file

@ -5,3 +5,4 @@ export * from "./licensing"
export * from "./migrations"
export * from "./datasources"
export * from "./search"
export * from "./auth"

View file

@ -1,7 +1,7 @@
{
"name": "@budibase/worker",
"email": "hi@budibase.com",
"version": "1.2.44-alpha.1",
"version": "1.2.58-alpha.3",
"description": "Budibase background service",
"main": "src/index.ts",
"repository": {
@ -35,10 +35,10 @@
"author": "Budibase",
"license": "GPL-3.0",
"dependencies": {
"@budibase/backend-core": "1.2.44-alpha.1",
"@budibase/pro": "1.2.44-alpha.1",
"@budibase/string-templates": "1.2.44-alpha.1",
"@budibase/types": "1.2.44-alpha.1",
"@budibase/backend-core": "1.2.58-alpha.3",
"@budibase/pro": "1.2.58-alpha.3",
"@budibase/string-templates": "1.2.58-alpha.3",
"@budibase/types": "1.2.58-alpha.3",
"@koa/router": "8.0.8",
"@sentry/node": "6.17.7",
"@techpass/passport-openidconnect": "0.3.2",
@ -46,6 +46,7 @@
"aws-sdk": "2.1030.0",
"bcryptjs": "2.4.3",
"dotenv": "8.6.0",
"elastic-apm-node": "3.38.0",
"global-agent": "3.0.0",
"got": "11.8.3",
"joi": "17.6.0",
@ -73,6 +74,7 @@
"@types/koa-router": "7.4.4",
"@types/koa__router": "8.0.11",
"@types/node": "14.18.20",
"@types/uuid": "8.3.4",
"@typescript-eslint/parser": "5.12.0",
"copyfiles": "2.4.1",
"eslint": "6.8.0",

View file

@ -14,3 +14,9 @@ const tk = require("timekeeper")
tk.freeze(mocks.date.MOCK_DATE)
global.console.log = jest.fn() // console.log are ignored in tests
if (!process.env.CI) {
// set a longer timeout in dev for debugging
// 100 seconds
jest.setTimeout(100000)
}

View file

@ -1,97 +0,0 @@
// get the JWT secret etc
require("../../src/environment")
require("@budibase/backend-core").init()
const {
getProdAppID,
generateGlobalUserID,
} = require("@budibase/backend-core/db")
const { doInTenant, getGlobalDB } = require("@budibase/backend-core/tenancy")
const users = require("../../src/sdk/users")
const { publicApiUserFix } = require("../../src/utilities/users")
const { hash } = require("@budibase/backend-core/utils")
const USER_LOAD_NUMBER = 10000
const BATCH_SIZE = 200
const PASSWORD = "test"
const TENANT_ID = "default"
const APP_ID = process.argv[2]
const words = [
"test",
"testing",
"budi",
"mail",
"age",
"risk",
"load",
"uno",
"arm",
"leg",
"pen",
"glass",
"box",
"chicken",
"bottle",
]
if (!APP_ID) {
console.error("Must supply app ID as first CLI option!")
process.exit(-1)
}
const WORD_1 = words[Math.floor(Math.random() * words.length)]
const WORD_2 = words[Math.floor(Math.random() * words.length)]
let HASHED_PASSWORD
function generateUser(count) {
return {
_id: generateGlobalUserID(),
password: HASHED_PASSWORD,
email: `${WORD_1}${count}@${WORD_2}.com`,
roles: {
[getProdAppID(APP_ID)]: "BASIC",
},
status: "active",
forceResetPassword: false,
firstName: "John",
lastName: "Smith",
}
}
async function run() {
HASHED_PASSWORD = await hash(PASSWORD)
return doInTenant(TENANT_ID, async () => {
const db = getGlobalDB()
for (let i = 0; i < USER_LOAD_NUMBER; i += BATCH_SIZE) {
let userSavePromises = []
for (let j = 0; j < BATCH_SIZE; j++) {
// like the public API
const ctx = publicApiUserFix({
request: {
body: generateUser(i + j),
},
})
userSavePromises.push(
users.save(ctx.request.body, {
hashPassword: false,
requirePassword: true,
bulkCreate: true,
})
)
}
const allUsers = await Promise.all(userSavePromises)
await db.bulkDocs(allUsers)
console.log(`${i + BATCH_SIZE} users have been created.`)
}
})
}
run()
.then(() => {
console.log(`Generated ${USER_LOAD_NUMBER} users!`)
})
.catch(err => {
console.error("Failed for reason: ", err)
process.exit(-1)
})

View file

@ -3,7 +3,7 @@ import { checkInviteCode } from "../../../utilities/redis"
import { sendEmail } from "../../../utilities/email"
import { users } from "../../../sdk"
import env from "../../../environment"
import { CloudAccount, User } from "@budibase/types"
import { BulkDeleteUsersRequest, CloudAccount, User } from "@budibase/types"
import {
accounts,
cache,
@ -46,8 +46,8 @@ export const bulkCreate = async (ctx: any) => {
}
try {
let response = await users.bulkCreate(newUsersRequested, groups)
await groupUtils.bulkSaveGroupUsers(groupsToSave, response)
const response = await users.bulkCreate(newUsersRequested, groups)
await groupUtils.bulkSaveGroupUsers(groupsToSave, response.successful)
ctx.body = response
} catch (err: any) {
@ -138,17 +138,15 @@ export const destroy = async (ctx: any) => {
}
export const bulkDelete = async (ctx: any) => {
const { userIds } = ctx.request.body
const { userIds } = ctx.request.body as BulkDeleteUsersRequest
if (userIds?.indexOf(ctx.user._id) !== -1) {
ctx.throw(400, "Unable to delete self.")
}
try {
let usersResponse = await users.bulkDelete(userIds)
let response = await users.bulkDelete(userIds)
ctx.body = {
message: `${usersResponse.length} user(s) deleted`,
}
ctx.body = response
} catch (err) {
ctx.throw(err)
}

View file

@ -0,0 +1,21 @@
import { Account, AccountMetadata } from "@budibase/types"
import { accounts } from "../../../sdk"
export const save = async (ctx: any) => {
const account = ctx.request.body as Account
let metadata: AccountMetadata = {
_id: accounts.formatAccountMetadataId(account.accountId),
email: account.email,
}
metadata = await accounts.saveMetadata(metadata)
ctx.body = metadata
ctx.status = 200
}
export const destroy = async (ctx: any) => {
const accountId = accounts.formatAccountMetadataId(ctx.params.accountId)
await accounts.destroyMetadata(accountId)
ctx.status = 204
}

View file

@ -1,15 +1,10 @@
const Router = require("@koa/router")
import Router from "@koa/router"
const compress = require("koa-compress")
const zlib = require("zlib")
const { routes } = require("./routes")
const {
buildAuthMiddleware,
auditLog,
buildTenancyMiddleware,
buildCsrfMiddleware,
} = require("@budibase/backend-core/auth")
const { middleware: pro } = require("@budibase/pro")
const { errors } = require("@budibase/backend-core")
import { routes } from "./routes"
import { middleware as pro } from "@budibase/pro"
import { errors, auth, middleware } from "@budibase/backend-core"
import { APIError } from "@budibase/types"
const PUBLIC_ENDPOINTS = [
// old deprecated endpoints kept for backwards compat
@ -97,9 +92,9 @@ router
})
)
.use("/health", ctx => (ctx.status = 200))
.use(buildAuthMiddleware(PUBLIC_ENDPOINTS))
.use(buildTenancyMiddleware(PUBLIC_ENDPOINTS, NO_TENANCY_ENDPOINTS))
.use(buildCsrfMiddleware({ noCsrfPatterns: NO_CSRF_ENDPOINTS }))
.use(auth.buildAuthMiddleware(PUBLIC_ENDPOINTS))
.use(auth.buildTenancyMiddleware(PUBLIC_ENDPOINTS, NO_TENANCY_ENDPOINTS))
.use(auth.buildCsrfMiddleware({ noCsrfPatterns: NO_CSRF_ENDPOINTS }))
.use(pro.licensing())
// for now no public access is allowed to worker (bar health check)
.use((ctx, next) => {
@ -114,21 +109,22 @@ router
}
return next()
})
.use(auditLog)
.use(middleware.auditLog)
// error handling middleware - TODO: This could be moved to backend-core
router.use(async (ctx, next) => {
try {
await next()
} catch (err) {
} catch (err: any) {
ctx.log.error(err)
ctx.status = err.status || err.statusCode || 500
const error = errors.getPublicError(err)
ctx.body = {
const body: APIError = {
message: err.message,
status: ctx.status,
error,
}
ctx.body = body
}
})

View file

@ -1,11 +1,11 @@
jest.mock("nodemailer")
const { config, request, mocks, structures } = require("../../../tests")
import { TestConfiguration, mocks, API } from "../../../../tests"
const sendMailMock = mocks.email.mock()
const { events } = require("@budibase/backend-core")
const TENANT_ID = structures.TENANT_ID
import { events } from "@budibase/backend-core"
describe("/api/global/auth", () => {
const config = new TestConfiguration()
const api = new API(config)
beforeAll(async () => {
await config.beforeAll()
@ -19,56 +19,32 @@ describe("/api/global/auth", () => {
jest.clearAllMocks()
})
const requestPasswordReset = async () => {
await config.saveSmtpConfig()
await config.saveSettingsConfig()
await config.createUser()
const res = await request
.post(`/api/global/auth/${TENANT_ID}/reset`)
.send({
email: "test@test.com",
})
.expect("Content-Type", /json/)
.expect(200)
const emailCall = sendMailMock.mock.calls[0][0]
const parts = emailCall.html.split(`http://localhost:10000/builder/auth/reset?code=`)
const code = parts[1].split("\"")[0].split("&")[0]
return { code, res }
}
it("should logout", async () => {
await request
.post("/api/global/auth/logout")
.set(config.defaultHeaders())
.expect(200)
await api.auth.logout()
expect(events.auth.logout).toBeCalledTimes(1)
})
it("should be able to generate password reset email", async () => {
const { res, code } = await requestPasswordReset()
const { res, code } = await api.auth.requestPasswordReset(sendMailMock)
const user = await config.getUser("test@test.com")
expect(res.body).toEqual({ message: "Please check your email for a reset link." })
expect(res.body).toEqual({
message: "Please check your email for a reset link.",
})
expect(sendMailMock).toHaveBeenCalled()
expect(code).toBeDefined()
expect(events.user.passwordResetRequested).toBeCalledTimes(1)
expect(events.user.passwordResetRequested).toBeCalledWith(user)
})
it("should allow resetting user password with code", async () => {
const { code } = await requestPasswordReset()
const { code } = await api.auth.requestPasswordReset(sendMailMock)
const user = await config.getUser("test@test.com")
delete user.password
delete user.password
const res = await api.auth.updatePassword(code)
const res = await request
.post(`/api/global/auth/${TENANT_ID}/reset/update`)
.send({
password: "newpassword",
resetCode: code,
})
.expect("Content-Type", /json/)
.expect(200)
expect(res.body).toEqual({ message: "password reset successfully." })
expect(events.user.passwordReset).toBeCalledTimes(1)
expect(events.user.passwordReset).toBeCalledWith(user)
@ -79,15 +55,15 @@ describe("/api/global/auth", () => {
const passportSpy = jest.spyOn(auth.passport, "authenticate")
let oidcConf
let chosenConfig
let configId
let chosenConfig: any
let configId: string
// mock the oidc strategy implementation and return value
let strategyFactory = jest.fn()
let mockStrategyReturn = jest.fn()
let mockStrategyConfig = jest.fn()
auth.oidc.fetchStrategyConfig = mockStrategyConfig
strategyFactory.mockReturnValue(mockStrategyReturn)
auth.oidc.strategyFactory = strategyFactory
@ -99,34 +75,34 @@ describe("/api/global/auth", () => {
})
afterEach(() => {
expect(strategyFactory).toBeCalledWith(
chosenConfig,
expect.any(Function)
)
expect(strategyFactory).toBeCalledWith(chosenConfig, expect.any(Function))
})
describe("oidc configs", () => {
it("should load strategy and delegate to passport", async () => {
await request.get(`/api/global/auth/${TENANT_ID}/oidc/configs/${configId}`)
await api.configs.getOIDCConfig(configId)
expect(passportSpy).toBeCalledWith(mockStrategyReturn, {
scope: ["profile", "email", "offline_access"]
scope: ["profile", "email", "offline_access"],
})
expect(passportSpy.mock.calls.length).toBe(1);
expect(passportSpy.mock.calls.length).toBe(1)
})
})
describe("oidc callback", () => {
it("should load strategy and delegate to passport", async () => {
await request.get(`/api/global/auth/${TENANT_ID}/oidc/callback`)
.set(config.getOIDConfigCookie(configId))
expect(passportSpy).toBeCalledWith(mockStrategyReturn, {
successRedirect: "/", failureRedirect: "/error"
}, expect.anything())
expect(passportSpy.mock.calls.length).toBe(1);
await api.configs.OIDCCallback(configId)
expect(passportSpy).toBeCalledWith(
mockStrategyReturn,
{
successRedirect: "/",
failureRedirect: "/error",
},
expect.anything()
)
expect(passportSpy.mock.calls.length).toBe(1)
})
})
})
})

View file

@ -1,11 +1,12 @@
// mock the email system
jest.mock("nodemailer")
const { config, structures, mocks, request } = require("../../../tests")
import { TestConfiguration, structures, mocks, API } from "../../../../tests"
mocks.email.mock()
const { Configs } = require("@budibase/backend-core/constants")
const { events } = require("@budibase/backend-core")
import { Configs, events } from "@budibase/backend-core"
describe("configs", () => {
const config = new TestConfiguration()
const api = new API(config)
beforeAll(async () => {
await config.beforeAll()
@ -20,35 +21,33 @@ describe("configs", () => {
})
describe("post /api/global/configs", () => {
const saveConfig = async (conf, _id, _rev) => {
const saveConfig = async (conf: any, _id?: string, _rev?: string) => {
const data = {
...conf,
_id,
_rev
_rev,
}
const res = await request
.post(`/api/global/configs`)
.send(data)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
const res = await api.configs.saveConfig(data)
return {
...data,
...res.body
...res.body,
}
}
describe("google", () => {
const saveGoogleConfig = async (conf, _id, _rev) => {
const saveGoogleConfig = async (
conf?: any,
_id?: string,
_rev?: string
) => {
const googleConfig = structures.configs.google(conf)
return saveConfig(googleConfig, _id, _rev)
}
describe("create", () => {
it ("should create activated google config", async () => {
it("should create activated google config", async () => {
await saveGoogleConfig()
expect(events.auth.SSOCreated).toBeCalledTimes(1)
expect(events.auth.SSOCreated).toBeCalledWith(Configs.GOOGLE)
@ -58,7 +57,7 @@ describe("configs", () => {
await config.deleteConfig(Configs.GOOGLE)
})
it ("should create deactivated google config", async () => {
it("should create deactivated google config", async () => {
await saveGoogleConfig({ activated: false })
expect(events.auth.SSOCreated).toBeCalledTimes(1)
expect(events.auth.SSOCreated).toBeCalledWith(Configs.GOOGLE)
@ -69,10 +68,14 @@ describe("configs", () => {
})
describe("update", () => {
it ("should update google config to deactivated", async () => {
it("should update google config to deactivated", async () => {
const googleConf = await saveGoogleConfig()
jest.clearAllMocks()
await saveGoogleConfig({ ...googleConf.config, activated: false }, googleConf._id, googleConf._rev)
await saveGoogleConfig(
{ ...googleConf.config, activated: false },
googleConf._id,
googleConf._rev
)
expect(events.auth.SSOUpdated).toBeCalledTimes(1)
expect(events.auth.SSOUpdated).toBeCalledWith(Configs.GOOGLE)
expect(events.auth.SSOActivated).not.toBeCalled()
@ -81,10 +84,14 @@ describe("configs", () => {
await config.deleteConfig(Configs.GOOGLE)
})
it ("should update google config to activated", async () => {
it("should update google config to activated", async () => {
const googleConf = await saveGoogleConfig({ activated: false })
jest.clearAllMocks()
await saveGoogleConfig({ ...googleConf.config, activated: true}, googleConf._id, googleConf._rev)
await saveGoogleConfig(
{ ...googleConf.config, activated: true },
googleConf._id,
googleConf._rev
)
expect(events.auth.SSOUpdated).toBeCalledTimes(1)
expect(events.auth.SSOUpdated).toBeCalledWith(Configs.GOOGLE)
expect(events.auth.SSODeactivated).not.toBeCalled()
@ -92,17 +99,21 @@ describe("configs", () => {
expect(events.auth.SSOActivated).toBeCalledWith(Configs.GOOGLE)
await config.deleteConfig(Configs.GOOGLE)
})
})
})
})
describe("oidc", () => {
const saveOIDCConfig = async (conf, _id, _rev) => {
const saveOIDCConfig = async (
conf?: any,
_id?: string,
_rev?: string
) => {
const oidcConfig = structures.configs.oidc(conf)
return saveConfig(oidcConfig, _id, _rev)
}
describe("create", () => {
it ("should create activated OIDC config", async () => {
it("should create activated OIDC config", async () => {
await saveOIDCConfig()
expect(events.auth.SSOCreated).toBeCalledTimes(1)
expect(events.auth.SSOCreated).toBeCalledWith(Configs.OIDC)
@ -112,7 +123,7 @@ describe("configs", () => {
await config.deleteConfig(Configs.OIDC)
})
it ("should create deactivated OIDC config", async () => {
it("should create deactivated OIDC config", async () => {
await saveOIDCConfig({ activated: false })
expect(events.auth.SSOCreated).toBeCalledTimes(1)
expect(events.auth.SSOCreated).toBeCalledWith(Configs.OIDC)
@ -123,10 +134,14 @@ describe("configs", () => {
})
describe("update", () => {
it ("should update OIDC config to deactivated", async () => {
it("should update OIDC config to deactivated", async () => {
const oidcConf = await saveOIDCConfig()
jest.clearAllMocks()
await saveOIDCConfig({ ...oidcConf.config.configs[0], activated: false }, oidcConf._id, oidcConf._rev)
await saveOIDCConfig(
{ ...oidcConf.config.configs[0], activated: false },
oidcConf._id,
oidcConf._rev
)
expect(events.auth.SSOUpdated).toBeCalledTimes(1)
expect(events.auth.SSOUpdated).toBeCalledWith(Configs.OIDC)
expect(events.auth.SSOActivated).not.toBeCalled()
@ -135,10 +150,14 @@ describe("configs", () => {
await config.deleteConfig(Configs.OIDC)
})
it ("should update OIDC config to activated", async () => {
it("should update OIDC config to activated", async () => {
const oidcConf = await saveOIDCConfig({ activated: false })
jest.clearAllMocks()
await saveOIDCConfig({ ...oidcConf.config.configs[0], activated: true}, oidcConf._id, oidcConf._rev)
await saveOIDCConfig(
{ ...oidcConf.config.configs[0], activated: true },
oidcConf._id,
oidcConf._rev
)
expect(events.auth.SSOUpdated).toBeCalledTimes(1)
expect(events.auth.SSOUpdated).toBeCalledWith(Configs.OIDC)
expect(events.auth.SSODeactivated).not.toBeCalled()
@ -147,17 +166,20 @@ describe("configs", () => {
await config.deleteConfig(Configs.OIDC)
})
})
})
describe("smtp", () => {
const saveSMTPConfig = async (conf, _id, _rev) => {
const saveSMTPConfig = async (
conf?: any,
_id?: string,
_rev?: string
) => {
const smtpConfig = structures.configs.smtp(conf)
return saveConfig(smtpConfig, _id, _rev)
}
describe("create", () => {
it ("should create SMTP config", async () => {
it("should create SMTP config", async () => {
await config.deleteConfig(Configs.SMTP)
await saveSMTPConfig()
expect(events.email.SMTPUpdated).not.toBeCalled()
@ -167,7 +189,7 @@ describe("configs", () => {
})
describe("update", () => {
it ("should update SMTP config", async () => {
it("should update SMTP config", async () => {
const smtpConf = await saveSMTPConfig()
jest.clearAllMocks()
await saveSMTPConfig(smtpConf.config, smtpConf._id, smtpConf._rev)
@ -179,15 +201,19 @@ describe("configs", () => {
})
describe("settings", () => {
const saveSettingsConfig = async (conf, _id, _rev) => {
const saveSettingsConfig = async (
conf?: any,
_id?: string,
_rev?: string
) => {
const settingsConfig = structures.configs.settings(conf)
return saveConfig(settingsConfig, _id, _rev)
}
describe("create", () => {
it ("should create settings config with default settings", async () => {
it("should create settings config with default settings", async () => {
await config.deleteConfig(Configs.SETTINGS)
await saveSettingsConfig()
expect(events.org.nameUpdated).not.toBeCalled()
@ -195,35 +221,43 @@ describe("configs", () => {
expect(events.org.platformURLUpdated).not.toBeCalled()
})
it ("should create settings config with non-default settings", async () => {
it("should create settings config with non-default settings", async () => {
config.modeSelf()
await config.deleteConfig(Configs.SETTINGS)
const conf = {
company: "acme",
logoUrl: "http://example.com",
platformUrl: "http://example.com"
platformUrl: "http://example.com",
}
await saveSettingsConfig(conf)
expect(events.org.nameUpdated).toBeCalledTimes(1)
expect(events.org.logoUpdated).toBeCalledTimes(1)
expect(events.org.platformURLUpdated).toBeCalledTimes(1)
config.modeAccount()
})
})
describe("update", () => {
it ("should update settings config", async () => {
it("should update settings config", async () => {
config.modeSelf()
await config.deleteConfig(Configs.SETTINGS)
const settingsConfig = await saveSettingsConfig()
settingsConfig.config.company = "acme"
settingsConfig.config.logoUrl = "http://example.com"
settingsConfig.config.platformUrl = "http://example.com"
await saveSettingsConfig(settingsConfig.config, settingsConfig._id, settingsConfig._rev)
await saveSettingsConfig(
settingsConfig.config,
settingsConfig._id,
settingsConfig._rev
)
expect(events.org.nameUpdated).toBeCalledTimes(1)
expect(events.org.logoUpdated).toBeCalledTimes(1)
expect(events.org.platformURLUpdated).toBeCalledTimes(1)
config.modeAccount()
})
})
})
@ -232,12 +266,7 @@ describe("configs", () => {
it("should return the correct checklist status based on the state of the budibase installation", async () => {
await config.saveSmtpConfig()
const res = await request
.get(`/api/global/configs/checklist`)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
const res = await api.configs.getConfigChecklist()
const checklist = res.body
expect(checklist.apps.checked).toBeFalsy()

View file

@ -1,12 +1,11 @@
jest.mock("nodemailer")
const { config, mocks, structures, request } = require("../../../tests")
import { TestConfiguration, mocks, API } from "../../../../tests"
const sendMailMock = mocks.email.mock()
const { EmailTemplatePurpose } = require("../../../constants")
const TENANT_ID = structures.TENANT_ID
import { EmailTemplatePurpose } from "../../../../constants"
describe("/api/global/email", () => {
const config = new TestConfiguration()
const api = new API(config)
beforeAll(async () => {
await config.beforeAll()
@ -20,16 +19,9 @@ describe("/api/global/email", () => {
// initially configure settings
await config.saveSmtpConfig()
await config.saveSettingsConfig()
const res = await request
.post(`/api/global/email/send`)
.send({
email: "test@test.com",
purpose: EmailTemplatePurpose.INVITATION,
tenantId: TENANT_ID,
})
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
const res = await api.emails.sendEmail(EmailTemplatePurpose.INVITATION)
expect(res.body.message).toBeDefined()
expect(sendMailMock).toHaveBeenCalled()
const emailCall = sendMailMock.mock.calls[0][0]

Some files were not shown because too many files have changed in this diff Show more