1
0
Fork 0
mirror of synced 2024-06-29 11:31:06 +12:00

Merge branch 'plugins-dev-experience' of github.com:Budibase/budibase into plugins-dev-experience-websocket

This commit is contained in:
Andrew Kingston 2022-09-01 14:39:30 +01:00
commit b9a4efc9c3
169 changed files with 4901 additions and 2133 deletions

View file

@ -68,16 +68,28 @@ jobs:
] ]
env: env:
KUBECONFIG_FILE: '${{ secrets.RELEASE_KUBECONFIG }}' KUBECONFIG_FILE: '${{ secrets.RELEASE_KUBECONFIG }}'
- name: Set the base64 kubeconfig
run: echo 'RELEASE_KUBECONFIG=${{ secrets.RELEASE_KUBECONFIG }}' | base64
- name: Re roll the services - name: Re roll app-service
uses: actions-hub/kubectl@master uses: actions-hub/kubectl@master
env: env:
KUBE_CONFIG: ${{ env.RELEASE_KUBECONFIG }} KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with: with:
args: rollout restart deployment proxy-service -n budibase && kubectl rollout restart deployment app-service -n budibase && kubectl rollout restart deployment worker-service -n budibase args: rollout restart deployment app-service -n budibase
- name: Re roll proxy-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment proxy-service -n budibase
- name: Re roll worker-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment worker-service -n budibase
- name: Discord Webhook Action - name: Discord Webhook Action
uses: tsickert/discord-webhook@v4.0.0 uses: tsickert/discord-webhook@v4.0.0

View file

@ -121,15 +121,26 @@ jobs:
env: env:
KUBECONFIG_FILE: '${{ secrets.RELEASE_KUBECONFIG }}' KUBECONFIG_FILE: '${{ secrets.RELEASE_KUBECONFIG }}'
- name: Set the base64 kubeconfig - name: Re roll app-service
run: echo 'RELEASE_KUBECONFIG=${{ secrets.RELEASE_KUBECONFIG }}' | base64
- name: Re roll the services
uses: actions-hub/kubectl@master uses: actions-hub/kubectl@master
env: env:
KUBE_CONFIG: ${{ env.RELEASE_KUBECONFIG }} KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with: with:
args: rollout restart deployment proxy-service -n budibase && kubectl rollout restart deployment app-service -n budibase && kubectl rollout restart deployment worker-service -n budibase args: rollout restart deployment app-service -n budibase
- name: Re roll proxy-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment proxy-service -n budibase
- name: Re roll worker-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment worker-service -n budibase
- name: Discord Webhook Action - name: Discord Webhook Action
uses: tsickert/discord-webhook@v4.0.0 uses: tsickert/discord-webhook@v4.0.0

View file

@ -4,7 +4,7 @@
"singleQuote": false, "singleQuote": false,
"trailingComma": "es5", "trailingComma": "es5",
"arrowParens": "avoid", "arrowParens": "avoid",
"jsxBracketSameLine": false, "bracketSameLine": false,
"plugins": ["prettier-plugin-svelte"], "plugins": ["prettier-plugin-svelte"],
"svelteSortOrder": "options-scripts-markup-styles" "svelteSortOrder": "options-scripts-markup-styles"
} }

View file

@ -132,7 +132,23 @@ spec:
{{ end }} {{ end }}
{{ if .Values.globals.pluginsDir }} {{ if .Values.globals.pluginsDir }}
- name: PLUGINS_DIR - name: PLUGINS_DIR
value: { { .Values.globals.pluginsDir | quote }} value: {{ .Values.globals.pluginsDir | quote }}
{{ end }}
{{ if .Values.services.apps.nodeDebug }}
- name: NODE_DEBUG
value: {{ .Values.services.apps.nodeDebug | quote }}
{{ end }}
{{ if .Values.globals.elasticApmEnabled }}
- name: ELASTIC_APM_ENABLED
value: {{ .Values.globals.elasticApmEnabled | quote }}
{{ end }}
{{ if .Values.globals.elasticApmSecretToken }}
- name: ELASTIC_APM_SECRET_TOKEN
value: {{ .Values.globals.elasticApmSecretToken | quote }}
{{ end }}
{{ if .Values.globals.elasticApmServerUrl }}
- name: ELASTIC_APM_SERVER_URL
value: {{ .Values.globals.elasticApmServerUrl | quote }}
{{ end }} {{ end }}
image: budibase/apps:{{ .Values.globals.appVersion }} image: budibase/apps:{{ .Values.globals.appVersion }}

View file

@ -27,6 +27,8 @@ spec:
spec: spec:
containers: containers:
- env: - env:
- name: BUDIBASE_ENVIRONMENT
value: {{ .Values.globals.budibaseEnv }}
- name: DEPLOYMENT_ENVIRONMENT - name: DEPLOYMENT_ENVIRONMENT
value: "kubernetes" value: "kubernetes"
- name: CLUSTER_PORT - name: CLUSTER_PORT
@ -125,6 +127,19 @@ spec:
value: {{ .Values.globals.google.secret | quote }} value: {{ .Values.globals.google.secret | quote }}
- name: TENANT_FEATURE_FLAGS - name: TENANT_FEATURE_FLAGS
value: {{ .Values.globals.tenantFeatureFlags | quote }} value: {{ .Values.globals.tenantFeatureFlags | quote }}
{{ if .Values.globals.elasticApmEnabled }}
- name: ELASTIC_APM_ENABLED
value: {{ .Values.globals.elasticApmEnabled | quote }}
{{ end }}
{{ if .Values.globals.elasticApmSecretToken }}
- name: ELASTIC_APM_SECRET_TOKEN
value: {{ .Values.globals.elasticApmSecretToken | quote }}
{{ end }}
{{ if .Values.globals.elasticApmServerUrl }}
- name: ELASTIC_APM_SERVER_URL
value: {{ .Values.globals.elasticApmServerUrl | quote }}
{{ end }}
image: budibase/worker:{{ .Values.globals.appVersion }} image: budibase/worker:{{ .Values.globals.appVersion }}
imagePullPolicy: Always imagePullPolicy: Always
livenessProbe: livenessProbe:

View file

@ -114,6 +114,10 @@ globals:
smtp: smtp:
enabled: false enabled: false
# elasticApmEnabled:
# elasticApmSecretToken:
# elasticApmServerUrl:
services: services:
budibaseVersion: latest budibaseVersion: latest
dns: cluster.local dns: cluster.local
@ -126,6 +130,7 @@ services:
port: 4002 port: 4002
replicaCount: 1 replicaCount: 1
logLevel: info logLevel: info
# nodeDebug: "" # set the value of NODE_DEBUG
worker: worker:
port: 4003 port: 4003

View file

@ -15,7 +15,10 @@ http {
log_format main '$remote_addr - $remote_user [$time_local] "$request" ' log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" ' '$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"'; '"$http_user_agent" "$http_x_forwarded_for" '
'response_time=$upstream_response_time proxy_host=$proxy_host upstream_addr=$upstream_addr';
access_log /var/log/nginx/access.log main;
map $http_upgrade $connection_upgrade { map $http_upgrade $connection_upgrade {
default "upgrade"; default "upgrade";

View file

@ -33,7 +33,10 @@ http {
log_format main '$remote_addr - $remote_user [$time_local] "$request" ' log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" ' '$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"'; '"$http_user_agent" "$http_x_forwarded_for" '
'response_time=$upstream_response_time proxy_host=$proxy_host upstream_addr=$upstream_addr';
access_log /var/log/nginx/access.log main;
map $http_upgrade $connection_upgrade { map $http_upgrade $connection_upgrade {
default "upgrade"; default "upgrade";
@ -85,6 +88,10 @@ http {
proxy_pass http://$apps:4002; proxy_pass http://$apps:4002;
} }
location /preview {
proxy_pass http://$apps:4002;
}
location = / { location = / {
proxy_pass http://$apps:4002; proxy_pass http://$apps:4002;
} }
@ -94,6 +101,7 @@ http {
proxy_pass http://$watchtower:8080; proxy_pass http://$watchtower:8080;
} }
{{/if}} {{/if}}
location ~ ^/(builder|app_) { location ~ ^/(builder|app_) {
proxy_http_version 1.1; proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade; proxy_set_header Connection $connection_upgrade;

View file

@ -1,5 +1,5 @@
{ {
"version": "1.2.44-alpha.1", "version": "1.2.58-alpha.6",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*"

View file

@ -1,6 +1,6 @@
{ {
"name": "@budibase/backend-core", "name": "@budibase/backend-core",
"version": "1.2.44-alpha.1", "version": "1.2.58-alpha.6",
"description": "Budibase backend core libraries used in server and worker", "description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js", "main": "dist/src/index.js",
"types": "dist/src/index.d.ts", "types": "dist/src/index.d.ts",
@ -20,7 +20,7 @@
"test:watch": "jest --watchAll" "test:watch": "jest --watchAll"
}, },
"dependencies": { "dependencies": {
"@budibase/types": "1.2.44-alpha.1", "@budibase/types": "1.2.58-alpha.6",
"@techpass/passport-openidconnect": "0.3.2", "@techpass/passport-openidconnect": "0.3.2",
"aws-sdk": "2.1030.0", "aws-sdk": "2.1030.0",
"bcrypt": "5.0.1", "bcrypt": "5.0.1",

View file

@ -1,11 +1,11 @@
const passport = require("koa-passport") const passport = require("koa-passport")
const LocalStrategy = require("passport-local").Strategy const LocalStrategy = require("passport-local").Strategy
const JwtStrategy = require("passport-jwt").Strategy const JwtStrategy = require("passport-jwt").Strategy
const { getGlobalDB } = require("./tenancy") import { getGlobalDB } from "./tenancy"
const refresh = require("passport-oauth2-refresh") const refresh = require("passport-oauth2-refresh")
const { Configs } = require("./constants") import { Configs } from "./constants"
const { getScopedConfig } = require("./db/utils") import { getScopedConfig } from "./db/utils"
const { import {
jwt, jwt,
local, local,
authenticated, authenticated,
@ -13,7 +13,6 @@ const {
oidc, oidc,
auditLog, auditLog,
tenancy, tenancy,
appTenancy,
authError, authError,
ssoCallbackUrl, ssoCallbackUrl,
csrf, csrf,
@ -22,32 +21,36 @@ const {
builderOnly, builderOnly,
builderOrAdmin, builderOrAdmin,
joiValidator, joiValidator,
} = require("./middleware") } from "./middleware"
import { invalidateUser } from "./cache/user"
const { invalidateUser } = require("./cache/user") import { User } from "@budibase/types"
// Strategies // Strategies
passport.use(new LocalStrategy(local.options, local.authenticate)) passport.use(new LocalStrategy(local.options, local.authenticate))
passport.use(new JwtStrategy(jwt.options, jwt.authenticate)) passport.use(new JwtStrategy(jwt.options, jwt.authenticate))
passport.serializeUser((user, done) => done(null, user)) passport.serializeUser((user: User, done: any) => done(null, user))
passport.deserializeUser(async (user, done) => { passport.deserializeUser(async (user: User, done: any) => {
const db = getGlobalDB() const db = getGlobalDB()
try { try {
const user = await db.get(user._id) const dbUser = await db.get(user._id)
return done(null, user) return done(null, dbUser)
} catch (err) { } catch (err) {
console.error(`User not found`, err) console.error(`User not found`, err)
return done(null, false, { message: "User not found" }) return done(null, false, { message: "User not found" })
} }
}) })
async function refreshOIDCAccessToken(db, chosenConfig, refreshToken) { async function refreshOIDCAccessToken(
db: any,
chosenConfig: any,
refreshToken: string
) {
const callbackUrl = await oidc.getCallbackUrl(db, chosenConfig) const callbackUrl = await oidc.getCallbackUrl(db, chosenConfig)
let enrichedConfig let enrichedConfig: any
let strategy let strategy: any
try { try {
enrichedConfig = await oidc.fetchStrategyConfig(chosenConfig, callbackUrl) enrichedConfig = await oidc.fetchStrategyConfig(chosenConfig, callbackUrl)
@ -70,22 +73,28 @@ async function refreshOIDCAccessToken(db, chosenConfig, refreshToken) {
refresh.requestNewAccessToken( refresh.requestNewAccessToken(
Configs.OIDC, Configs.OIDC,
refreshToken, refreshToken,
(err, accessToken, refreshToken, params) => { (err: any, accessToken: string, refreshToken: any, params: any) => {
resolve({ err, accessToken, refreshToken, params }) resolve({ err, accessToken, refreshToken, params })
} }
) )
}) })
} }
async function refreshGoogleAccessToken(db, config, refreshToken) { async function refreshGoogleAccessToken(
db: any,
config: any,
refreshToken: any
) {
let callbackUrl = await google.getCallbackUrl(db, config) let callbackUrl = await google.getCallbackUrl(db, config)
let strategy let strategy
try { try {
strategy = await google.strategyFactory(config, callbackUrl) strategy = await google.strategyFactory(config, callbackUrl)
} catch (err) { } catch (err: any) {
console.error(err) console.error(err)
throw new Error("Error constructing OIDC refresh strategy", err) throw new Error(
`Error constructing OIDC refresh strategy: message=${err.message}`
)
} }
refresh.use(strategy) refresh.use(strategy)
@ -94,14 +103,18 @@ async function refreshGoogleAccessToken(db, config, refreshToken) {
refresh.requestNewAccessToken( refresh.requestNewAccessToken(
Configs.GOOGLE, Configs.GOOGLE,
refreshToken, refreshToken,
(err, accessToken, refreshToken, params) => { (err: any, accessToken: string, refreshToken: string, params: any) => {
resolve({ err, accessToken, refreshToken, params }) resolve({ err, accessToken, refreshToken, params })
} }
) )
}) })
} }
async function refreshOAuthToken(refreshToken, configType, configId) { async function refreshOAuthToken(
refreshToken: string,
configType: string,
configId: string
) {
const db = getGlobalDB() const db = getGlobalDB()
const config = await getScopedConfig(db, { const config = await getScopedConfig(db, {
@ -113,7 +126,7 @@ async function refreshOAuthToken(refreshToken, configType, configId) {
let refreshResponse let refreshResponse
if (configType === Configs.OIDC) { if (configType === Configs.OIDC) {
// configId - retrieved from cookie. // configId - retrieved from cookie.
chosenConfig = config.configs.filter(c => c.uuid === configId)[0] chosenConfig = config.configs.filter((c: any) => c.uuid === configId)[0]
if (!chosenConfig) { if (!chosenConfig) {
throw new Error("Invalid OIDC configuration") throw new Error("Invalid OIDC configuration")
} }
@ -134,7 +147,7 @@ async function refreshOAuthToken(refreshToken, configType, configId) {
return refreshResponse return refreshResponse
} }
async function updateUserOAuth(userId, oAuthConfig) { async function updateUserOAuth(userId: string, oAuthConfig: any) {
const details = { const details = {
accessToken: oAuthConfig.accessToken, accessToken: oAuthConfig.accessToken,
refreshToken: oAuthConfig.refreshToken, refreshToken: oAuthConfig.refreshToken,
@ -162,14 +175,13 @@ async function updateUserOAuth(userId, oAuthConfig) {
} }
} }
module.exports = { export = {
buildAuthMiddleware: authenticated, buildAuthMiddleware: authenticated,
passport, passport,
google, google,
oidc, oidc,
jwt: require("jsonwebtoken"), jwt: require("jsonwebtoken"),
buildTenancyMiddleware: tenancy, buildTenancyMiddleware: tenancy,
buildAppTenancyMiddleware: appTenancy,
auditLog, auditLog,
authError, authError,
buildCsrfMiddleware: csrf, buildCsrfMiddleware: csrf,

View file

@ -18,6 +18,7 @@ export enum ViewName {
LINK = "by_link", LINK = "by_link",
ROUTING = "screen_routes", ROUTING = "screen_routes",
AUTOMATION_LOGS = "automation_logs", AUTOMATION_LOGS = "automation_logs",
ACCOUNT_BY_EMAIL = "account_by_email",
} }
export const DeprecatedViews = { export const DeprecatedViews = {
@ -41,6 +42,7 @@ export enum DocumentType {
MIGRATIONS = "migrations", MIGRATIONS = "migrations",
DEV_INFO = "devinfo", DEV_INFO = "devinfo",
AUTOMATION_LOG = "log_au", AUTOMATION_LOG = "log_au",
ACCOUNT_METADATA = "acc_metadata",
} }
export const StaticDatabases = { export const StaticDatabases = {

View file

@ -5,6 +5,8 @@ const {
SEPARATOR, SEPARATOR,
} = require("./utils") } = require("./utils")
const { getGlobalDB } = require("../tenancy") const { getGlobalDB } = require("../tenancy")
const { StaticDatabases } = require("./constants")
const { doWithDB } = require("./")
const DESIGN_DB = "_design/database" const DESIGN_DB = "_design/database"
@ -56,6 +58,31 @@ exports.createNewUserEmailView = async () => {
await db.put(designDoc) await db.put(designDoc)
} }
exports.createAccountEmailView = async () => {
await doWithDB(StaticDatabases.PLATFORM_INFO.name, async db => {
let designDoc
try {
designDoc = await db.get(DESIGN_DB)
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc._id.startsWith("${DocumentType.ACCOUNT_METADATA}${SEPARATOR}")) {
emit(doc.email.toLowerCase(), doc._id)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.ACCOUNT_BY_EMAIL]: view,
}
await db.put(designDoc)
})
}
exports.createUserAppView = async () => { exports.createUserAppView = async () => {
const db = getGlobalDB() const db = getGlobalDB()
let designDoc let designDoc
@ -128,6 +155,39 @@ exports.createUserBuildersView = async () => {
await db.put(designDoc) await db.put(designDoc)
} }
exports.queryView = async (viewName, params, db, CreateFuncByName) => {
try {
let response = (await db.query(`database/${viewName}`, params)).rows
response = response.map(resp =>
params.include_docs ? resp.doc : resp.value
)
if (params.arrayResponse) {
return response
} else {
return response.length <= 1 ? response[0] : response
}
} catch (err) {
if (err != null && err.name === "not_found") {
const createFunc = CreateFuncByName[viewName]
await removeDeprecated(db, viewName)
await createFunc()
return exports.queryView(viewName, params, db, CreateFuncByName)
} else {
throw err
}
}
}
exports.queryPlatformView = async (viewName, params) => {
const CreateFuncByName = {
[ViewName.ACCOUNT_BY_EMAIL]: exports.createAccountEmailView,
}
return doWithDB(StaticDatabases.PLATFORM_INFO.name, async db => {
return exports.queryView(viewName, params, db, CreateFuncByName)
})
}
exports.queryGlobalView = async (viewName, params, db = null) => { exports.queryGlobalView = async (viewName, params, db = null) => {
const CreateFuncByName = { const CreateFuncByName = {
[ViewName.USER_BY_EMAIL]: exports.createNewUserEmailView, [ViewName.USER_BY_EMAIL]: exports.createNewUserEmailView,
@ -139,20 +199,5 @@ exports.queryGlobalView = async (viewName, params, db = null) => {
if (!db) { if (!db) {
db = getGlobalDB() db = getGlobalDB()
} }
try { return exports.queryView(viewName, params, db, CreateFuncByName)
let response = (await db.query(`database/${viewName}`, params)).rows
response = response.map(resp =>
params.include_docs ? resp.doc : resp.value
)
return response.length <= 1 ? response[0] : response
} catch (err) {
if (err != null && err.name === "not_found") {
const createFunc = CreateFuncByName[viewName]
await removeDeprecated(db, viewName)
await createFunc()
return exports.queryGlobalView(viewName, params)
} else {
throw err
}
}
} }

View file

@ -8,4 +8,5 @@ import { processors } from "./processors"
export const shutdown = () => { export const shutdown = () => {
processors.shutdown() processors.shutdown()
console.log("Events shutdown")
} }

View file

@ -17,6 +17,7 @@ import constants from "./constants"
import * as dbConstants from "./db/constants" import * as dbConstants from "./db/constants"
import logging from "./logging" import logging from "./logging"
import pino from "./pino" import pino from "./pino"
import * as middleware from "./middleware"
// mimic the outer package exports // mimic the outer package exports
import * as db from "./pkg/db" import * as db from "./pkg/db"
@ -57,6 +58,7 @@ const core = {
roles, roles,
...pino, ...pino,
...errorClasses, ...errorClasses,
middleware,
} }
export = core export = core

View file

@ -65,7 +65,7 @@ async function checkApiKey(apiKey: string, populateUser?: Function) {
* The tenancy modules should not be used here and it should be assumed that the tenancy context * The tenancy modules should not be used here and it should be assumed that the tenancy context
* has not yet been populated. * has not yet been populated.
*/ */
module.exports = ( export = (
noAuthPatterns = [], noAuthPatterns = [],
opts: { publicAllowed: boolean; populateUser?: Function } = { opts: { publicAllowed: boolean; populateUser?: Function } = {
publicAllowed: false, publicAllowed: false,

View file

@ -13,7 +13,8 @@ const adminOnly = require("./adminOnly")
const builderOrAdmin = require("./builderOrAdmin") const builderOrAdmin = require("./builderOrAdmin")
const builderOnly = require("./builderOnly") const builderOnly = require("./builderOnly")
const joiValidator = require("./joi-validator") const joiValidator = require("./joi-validator")
module.exports = {
const pkg = {
google, google,
oidc, oidc,
jwt, jwt,
@ -33,3 +34,5 @@ module.exports = {
builderOrAdmin, builderOrAdmin,
joiValidator, joiValidator,
} }
export = pkg

View file

@ -13,10 +13,13 @@ function validate(schema, property) {
params = ctx.request[property] params = ctx.request[property]
} }
schema = schema.append({ // not all schemas have the append property e.g. array schemas
createdAt: Joi.any().optional(), if (schema.append) {
updatedAt: Joi.any().optional(), schema = schema.append({
}) createdAt: Joi.any().optional(),
updatedAt: Joi.any().optional(),
})
}
const { error } = schema.validate(params) const { error } = schema.validate(params)
if (error) { if (error) {

View file

@ -70,15 +70,13 @@ const PUBLIC_BUCKETS = [
* @constructor * @constructor
*/ */
export const ObjectStore = (bucket: any) => { export const ObjectStore = (bucket: any) => {
AWS.config.update({
accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY,
region: env.AWS_REGION,
})
const config: any = { const config: any = {
s3ForcePathStyle: true, s3ForcePathStyle: true,
signatureVersion: "v4", signatureVersion: "v4",
apiVersion: "2006-03-01", apiVersion: "2006-03-01",
accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY,
region: env.AWS_REGION,
} }
if (bucket) { if (bucket) {
config.params = { config.params = {

View file

@ -3,17 +3,27 @@ const { v4: uuidv4 } = require("uuid")
const { logWarn } = require("../logging") const { logWarn } = require("../logging")
const env = require("../environment") const env = require("../environment")
interface Session { interface CreateSession {
key: string
userId: string
sessionId: string sessionId: string
lastAccessedAt: string tenantId: string
createdAt: string
csrfToken?: string csrfToken?: string
value: string
} }
type SessionKey = { key: string }[] interface Session extends CreateSession {
userId: string
lastAccessedAt: string
createdAt: string
// make optional attributes required
csrfToken: string
}
interface SessionKey {
key: string
}
interface ScannedSession {
value: Session
}
// a week in seconds // a week in seconds
const EXPIRY_SECONDS = 86400 * 7 const EXPIRY_SECONDS = 86400 * 7
@ -22,14 +32,14 @@ function makeSessionID(userId: string, sessionId: string) {
return `${userId}/${sessionId}` return `${userId}/${sessionId}`
} }
export async function getSessionsForUser(userId: string) { export async function getSessionsForUser(userId: string): Promise<Session[]> {
if (!userId) { if (!userId) {
console.trace("Cannot get sessions for undefined userId") console.trace("Cannot get sessions for undefined userId")
return [] return []
} }
const client = await redis.getSessionClient() const client = await redis.getSessionClient()
const sessions = await client.scan(userId) const sessions: ScannedSession[] = await client.scan(userId)
return sessions.map((session: Session) => session.value) return sessions.map(session => session.value)
} }
export async function invalidateSessions( export async function invalidateSessions(
@ -39,33 +49,32 @@ export async function invalidateSessions(
try { try {
const reason = opts?.reason || "unknown" const reason = opts?.reason || "unknown"
let sessionIds: string[] = opts.sessionIds || [] let sessionIds: string[] = opts.sessionIds || []
let sessions: SessionKey let sessionKeys: SessionKey[]
// If no sessionIds, get all the sessions for the user // If no sessionIds, get all the sessions for the user
if (sessionIds.length === 0) { if (sessionIds.length === 0) {
sessions = await getSessionsForUser(userId) const sessions = await getSessionsForUser(userId)
sessions.forEach( sessionKeys = sessions.map(session => ({
(session: any) => key: makeSessionID(session.userId, session.sessionId),
(session.key = makeSessionID(session.userId, session.sessionId)) }))
)
} else { } else {
// use the passed array of sessionIds // use the passed array of sessionIds
sessionIds = Array.isArray(sessionIds) ? sessionIds : [sessionIds] sessionIds = Array.isArray(sessionIds) ? sessionIds : [sessionIds]
sessions = sessionIds.map((sessionId: string) => ({ sessionKeys = sessionIds.map(sessionId => ({
key: makeSessionID(userId, sessionId), key: makeSessionID(userId, sessionId),
})) }))
} }
if (sessions && sessions.length > 0) { if (sessionKeys && sessionKeys.length > 0) {
const client = await redis.getSessionClient() const client = await redis.getSessionClient()
const promises = [] const promises = []
for (let session of sessions) { for (let sessionKey of sessionKeys) {
promises.push(client.delete(session.key)) promises.push(client.delete(sessionKey.key))
} }
if (!env.isTest()) { if (!env.isTest()) {
logWarn( logWarn(
`Invalidating sessions for ${userId} (reason: ${reason}) - ${sessions `Invalidating sessions for ${userId} (reason: ${reason}) - ${sessionKeys
.map(session => session.key) .map(sessionKey => sessionKey.key)
.join(", ")}` .join(", ")}`
) )
} }
@ -76,22 +85,26 @@ export async function invalidateSessions(
} }
} }
export async function createASession(userId: string, session: Session) { export async function createASession(
userId: string,
createSession: CreateSession
) {
// invalidate all other sessions // invalidate all other sessions
await invalidateSessions(userId, { reason: "creation" }) await invalidateSessions(userId, { reason: "creation" })
const client = await redis.getSessionClient() const client = await redis.getSessionClient()
const sessionId = session.sessionId const sessionId = createSession.sessionId
if (!session.csrfToken) { const csrfToken = createSession.csrfToken ? createSession.csrfToken : uuidv4()
session.csrfToken = uuidv4() const key = makeSessionID(userId, sessionId)
}
session = { const session: Session = {
...session, ...createSession,
csrfToken,
createdAt: new Date().toISOString(), createdAt: new Date().toISOString(),
lastAccessedAt: new Date().toISOString(), lastAccessedAt: new Date().toISOString(),
userId, userId,
} }
await client.store(makeSessionID(userId, sessionId), session, EXPIRY_SECONDS) await client.store(key, session, EXPIRY_SECONDS)
} }
export async function updateSessionTTL(session: Session) { export async function updateSessionTTL(session: Session) {
@ -106,7 +119,10 @@ export async function endSession(userId: string, sessionId: string) {
await client.delete(makeSessionID(userId, sessionId)) await client.delete(makeSessionID(userId, sessionId))
} }
export async function getSession(userId: string, sessionId: string) { export async function getSession(
userId: string,
sessionId: string
): Promise<Session> {
if (!userId || !sessionId) { if (!userId || !sessionId) {
throw new Error(`Invalid session details - ${userId} - ${sessionId}`) throw new Error(`Invalid session details - ${userId} - ${sessionId}`)
} }

View file

@ -11,7 +11,6 @@ const { UNICODE_MAX } = require("./db/constants")
* Given an email address this will use a view to search through * Given an email address this will use a view to search through
* all the users to find one with this email address. * all the users to find one with this email address.
* @param {string} email the email to lookup the user by. * @param {string} email the email to lookup the user by.
* @return {Promise<object|null>}
*/ */
exports.getGlobalUserByEmail = async email => { exports.getGlobalUserByEmail = async email => {
if (email == null) { if (email == null) {

View file

@ -0,0 +1,7 @@
export const getAccount = jest.fn()
export const getAccountByTenantId = jest.fn()
jest.mock("../../../src/cloud/accounts", () => ({
getAccount,
getAccountByTenantId,
}))

View file

@ -1,2 +0,0 @@
exports.MOCK_DATE = new Date("2020-01-01T00:00:00.000Z")
exports.MOCK_DATE_TIMESTAMP = 1577836800000

View file

@ -0,0 +1,2 @@
export const MOCK_DATE = new Date("2020-01-01T00:00:00.000Z")
export const MOCK_DATE_TIMESTAMP = 1577836800000

View file

@ -1,9 +0,0 @@
const posthog = require("./posthog")
const events = require("./events")
const date = require("./date")
module.exports = {
posthog,
date,
events,
}

View file

@ -0,0 +1,4 @@
import "./posthog"
import "./events"
export * as accounts from "./accounts"
export * as date from "./date"

View file

@ -1,7 +1,7 @@
{ {
"name": "@budibase/bbui", "name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.", "description": "A UI solution used in the different Budibase projects.",
"version": "1.2.44-alpha.1", "version": "1.2.58-alpha.6",
"license": "MPL-2.0", "license": "MPL-2.0",
"svelte": "src/index.js", "svelte": "src/index.js",
"module": "dist/bbui.es.js", "module": "dist/bbui.es.js",
@ -38,7 +38,7 @@
], ],
"dependencies": { "dependencies": {
"@adobe/spectrum-css-workflow-icons": "^1.2.1", "@adobe/spectrum-css-workflow-icons": "^1.2.1",
"@budibase/string-templates": "1.2.44-alpha.1", "@budibase/string-templates": "1.2.58-alpha.6",
"@spectrum-css/actionbutton": "^1.0.1", "@spectrum-css/actionbutton": "^1.0.1",
"@spectrum-css/actiongroup": "^1.0.1", "@spectrum-css/actiongroup": "^1.0.1",
"@spectrum-css/avatar": "^3.0.2", "@spectrum-css/avatar": "^3.0.2",

View file

@ -67,6 +67,13 @@
// If time only set date component to 2000-01-01 // If time only set date component to 2000-01-01
if (timeOnly) { if (timeOnly) {
// Classic flackpickr causing issues.
// When selecting a value for the first time for a "time only" field,
// the time is always offset by 1 hour for some reason (regardless of time
// zone) so we need to correct it.
if (!value && newValue) {
newValue = new Date(dates[0].getTime() + 60 * 60 * 1000).toISOString()
}
newValue = `2000-01-01T${newValue.split("T")[1]}` newValue = `2000-01-01T${newValue.split("T")[1]}`
} }

View file

@ -139,7 +139,13 @@
<div class="title"> <div class="title">
<div class="filename"> <div class="filename">
{#if selectedUrl} {#if selectedUrl}
<Link href={selectedUrl}>{selectedImage.name}</Link> <Link
target="_blank"
download={selectedImage.name}
href={selectedUrl}
>
{selectedImage.name}
</Link>
{:else} {:else}
{selectedImage.name} {selectedImage.name}
{/if} {/if}

View file

@ -10,6 +10,7 @@
export let disabled = false export let disabled = false
export let getOptionLabel = option => option export let getOptionLabel = option => option
export let getOptionValue = option => option export let getOptionValue = option => option
export let getOptionTitle = option => option
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
const onChange = e => dispatch("change", e.target.value) const onChange = e => dispatch("change", e.target.value)
@ -19,7 +20,7 @@
{#if options && Array.isArray(options)} {#if options && Array.isArray(options)}
{#each options as option} {#each options as option}
<div <div
title={getOptionLabel(option)} title={getOptionTitle(option)}
class="spectrum-Radio spectrum-FieldGroup-item spectrum-Radio--emphasized" class="spectrum-Radio spectrum-FieldGroup-item spectrum-Radio--emphasized"
class:is-invalid={!!error} class:is-invalid={!!error}
> >

View file

@ -12,6 +12,7 @@
export let direction = "vertical" export let direction = "vertical"
export let getOptionLabel = option => extractProperty(option, "label") export let getOptionLabel = option => extractProperty(option, "label")
export let getOptionValue = option => extractProperty(option, "value") export let getOptionValue = option => extractProperty(option, "value")
export let getOptionTitle = option => extractProperty(option, "label")
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
const onChange = e => { const onChange = e => {
@ -35,6 +36,7 @@
{direction} {direction}
{getOptionLabel} {getOptionLabel}
{getOptionValue} {getOptionValue}
{getOptionTitle}
on:change={onChange} on:change={onChange}
/> />
</Field> </Field>

View file

@ -8,12 +8,14 @@
export let secondary = false export let secondary = false
export let overBackground = false export let overBackground = false
export let target export let target
export let download
</script> </script>
<a <a
on:click on:click
{href} {href}
{target} {target}
{download}
class:spectrum-Link--primary={primary} class:spectrum-Link--primary={primary}
class:spectrum-Link--secondary={secondary} class:spectrum-Link--secondary={secondary}
class:spectrum-Link--overBackground={overBackground} class:spectrum-Link--overBackground={overBackground}

View file

@ -15,14 +15,24 @@
{#each attachments as attachment} {#each attachments as attachment}
{#if isImage(attachment.extension)} {#if isImage(attachment.extension)}
<Link quiet target="_blank" href={attachment.url}> <Link
quiet
target="_blank"
download={attachment.name}
href={attachment.url}
>
<div class="center" title={attachment.name}> <div class="center" title={attachment.name}>
<img src={attachment.url} alt={attachment.extension} /> <img src={attachment.url} alt={attachment.extension} />
</div> </div>
</Link> </Link>
{:else} {:else}
<div class="file" title={attachment.name}> <div class="file" title={attachment.name}>
<Link quiet target="_blank" href={attachment.url}> <Link
quiet
target="_blank"
download={attachment.name}
href={attachment.url}
>
{attachment.extension} {attachment.extension}
</Link> </Link>
</div> </div>

View file

@ -102,7 +102,7 @@ filterTests(['all'], () => {
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 6000 }) cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 6000 })
cy.wait(500) cy.wait(500)
cy.get(interact.APP_TABLE_STATUS, { timeout: 1000 }).eq(0).contains("Unpublished") cy.get(interact.APP_TABLE_STATUS, { timeout: 10000 }).eq(0).contains("Unpublished")
}) })
}) })

View file

@ -175,7 +175,10 @@ filterTests(["all"], () => {
cy.get("@query").its("response.statusCode").should("eq", 200) cy.get("@query").its("response.statusCode").should("eq", 200)
cy.get("@query").its("response.body").should("not.be.empty") cy.get("@query").its("response.body").should("not.be.empty")
// Save query // Save query
cy.intercept("POST", "**/queries").as("saveQuery")
cy.get(".spectrum-Button").contains("Save Query").click({ force: true }) cy.get(".spectrum-Button").contains("Save Query").click({ force: true })
cy.wait("@saveQuery")
cy.get("@saveQuery").its("response.statusCode").should("eq", 200)
cy.get(".nav-item").should("contain", queryName) cy.get(".nav-item").should("contain", queryName)
}) })

View file

@ -252,7 +252,8 @@ filterTests(["all"], () => {
.contains("Delete Query") .contains("Delete Query")
.click({ force: true }) .click({ force: true })
// Confirm deletion // Confirm deletion
cy.reload({ timeout: 5000 }) cy.reload()
cy.get(".nav-item", { timeout: 30000 }).contains(datasource).click({ force: true })
cy.get(".nav-item", { timeout: 1000 }).should("not.contain", queryRename) cy.get(".nav-item", { timeout: 1000 }).should("not.contain", queryRename)
}) })

View file

@ -48,6 +48,7 @@ filterTests(['smoke', 'all'], () => {
cy.get(interact.AREA_LABEL_REVERT).click({ force: true }) cy.get(interact.AREA_LABEL_REVERT).click({ force: true })
}) })
cy.get(interact.SPECTRUM_DIALOG_GRID).within(() => { cy.get(interact.SPECTRUM_DIALOG_GRID).within(() => {
cy.get("input").type("Cypress Tests")
// Click Revert // Click Revert
cy.get(interact.SPECTRUM_BUTTON).contains("Revert").click({ force: true }) cy.get(interact.SPECTRUM_BUTTON).contains("Revert").click({ force: true })
cy.wait(2000) // Wait for app to finish reverting cy.wait(2000) // Wait for app to finish reverting

View file

@ -448,10 +448,7 @@ Cypress.Commands.add("createTable", (tableName, initialTable) => {
.contains("Continue") .contains("Continue")
.click({ force: true }) .click({ force: true })
}) })
cy.get(".spectrum-Modal", { timeout: 10000 }).should( cy.get(".spectrum-Modal").contains("Create Table", { timeout: 10000 })
"not.contain",
"Add data source"
)
cy.get(".spectrum-Modal", { timeout: 2000 }).within(() => { cy.get(".spectrum-Modal", { timeout: 2000 }).within(() => {
cy.get("input", { timeout: 2000 }).first().type(tableName).blur() cy.get("input", { timeout: 2000 }).first().type(tableName).blur()
cy.get(".spectrum-ButtonGroup").contains("Create").click() cy.get(".spectrum-ButtonGroup").contains("Create").click()
@ -742,8 +739,15 @@ Cypress.Commands.add("deleteAllScreens", () => {
Cypress.Commands.add("navigateToFrontend", () => { Cypress.Commands.add("navigateToFrontend", () => {
// Clicks on Design tab and then the Home nav item // Clicks on Design tab and then the Home nav item
cy.wait(500) cy.wait(500)
cy.intercept("**/preview").as("preview")
cy.contains("Design").click() cy.contains("Design").click()
cy.get(".spectrum-Search", { timeout: 2000 }).type("/") cy.wait("@preview")
cy.get("@preview").then(res => {
if (res.statusCode != 200) {
cy.reload()
}
})
cy.get(".spectrum-Search", { timeout: 20000 }).type("/")
cy.get(".nav-item", { timeout: 2000 }).contains("home").click({ force: true }) cy.get(".nav-item", { timeout: 2000 }).contains("home").click({ force: true })
}) })

View file

@ -1,6 +1,6 @@
{ {
"name": "@budibase/builder", "name": "@budibase/builder",
"version": "1.2.44-alpha.1", "version": "1.2.58-alpha.6",
"license": "GPL-3.0", "license": "GPL-3.0",
"private": true, "private": true,
"scripts": { "scripts": {
@ -69,10 +69,10 @@
} }
}, },
"dependencies": { "dependencies": {
"@budibase/bbui": "1.2.44-alpha.1", "@budibase/bbui": "1.2.58-alpha.6",
"@budibase/client": "1.2.44-alpha.1", "@budibase/client": "1.2.58-alpha.6",
"@budibase/frontend-core": "1.2.44-alpha.1", "@budibase/frontend-core": "1.2.58-alpha.6",
"@budibase/string-templates": "1.2.44-alpha.1", "@budibase/string-templates": "1.2.58-alpha.6",
"@sentry/browser": "5.19.1", "@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1", "@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1", "@spectrum-css/vars": "^3.0.1",
@ -121,4 +121,4 @@
"vite": "^3.0.8" "vite": "^3.0.8"
}, },
"gitHead": "115189f72a850bfb52b65ec61d932531bf327072" "gitHead": "115189f72a850bfb52b65ec61d932531bf327072"
} }

View file

@ -19,7 +19,6 @@ import {
makeComponentUnique, makeComponentUnique,
} from "../componentUtils" } from "../componentUtils"
import { Helpers } from "@budibase/bbui" import { Helpers } from "@budibase/bbui"
import { DefaultAppTheme, LAYOUT_NAMES } from "../../constants"
import { Utils } from "@budibase/frontend-core" import { Utils } from "@budibase/frontend-core"
const INITIAL_FRONTEND_STATE = { const INITIAL_FRONTEND_STATE = {
@ -134,35 +133,6 @@ export const getFrontendStore = () => {
await integrations.init() await integrations.init()
await queries.init() await queries.init()
await tables.init() await tables.init()
// Add navigation settings to old apps
if (!application.navigation) {
const layout = layouts.find(x => x._id === LAYOUT_NAMES.MASTER.PRIVATE)
const customTheme = application.customTheme
let navigationSettings = {
navigation: "Top",
title: application.name,
navWidth: "Large",
navBackground:
customTheme?.navBackground || DefaultAppTheme.navBackground,
navTextColor:
customTheme?.navTextColor || DefaultAppTheme.navTextColor,
}
if (layout) {
navigationSettings.hideLogo = layout.props.hideLogo
navigationSettings.hideTitle = layout.props.hideTitle
navigationSettings.title = layout.props.title || application.name
navigationSettings.logoUrl = layout.props.logoUrl
navigationSettings.links = layout.props.links
navigationSettings.navigation = layout.props.navigation || "Top"
navigationSettings.sticky = layout.props.sticky
navigationSettings.navWidth = layout.props.width || "Large"
if (navigationSettings.navigation === "None") {
navigationSettings.navigation = "Top"
}
}
await store.actions.navigation.save(navigationSettings)
}
}, },
theme: { theme: {
save: async theme => { save: async theme => {

View file

@ -23,7 +23,7 @@
</script> </script>
<div class="automations-list"> <div class="automations-list">
{#each $automationStore.automations as automation, idx} {#each $automationStore.automations.sort(aut => aut.name) as automation, idx}
<NavItem <NavItem
border={idx > 0} border={idx > 0}
icon="ShareAndroid" icon="ShareAndroid"

View file

@ -14,7 +14,7 @@
$: { $: {
let fields = {} let fields = {}
for (const [key, type] of Object.entries(block?.inputs?.fields)) { for (const [key, type] of Object.entries(block?.inputs?.fields ?? {})) {
fields = { fields = {
...fields, ...fields,
[key]: { [key]: {

View file

@ -467,6 +467,7 @@
options={relationshipOptions} options={relationshipOptions}
getOptionLabel={option => option.name} getOptionLabel={option => option.name}
getOptionValue={option => option.value} getOptionValue={option => option.value}
getOptionTitle={option => option.alt}
/> />
{/if} {/if}
<Input <Input

View file

@ -185,7 +185,7 @@
$goto("./navigation") $goto("./navigation")
} }
} else if (type === "request-add-component") { } else if (type === "request-add-component") {
$goto(`./components/${$selectedComponent?._id}/new`) toggleAddComponent()
} else if (type === "highlight-setting") { } else if (type === "highlight-setting") {
store.actions.settings.highlight(data.setting) store.actions.settings.highlight(data.setting)
@ -229,9 +229,8 @@
if (isAddingComponent) { if (isAddingComponent) {
$goto(`../${$selectedScreen._id}/components/${$selectedComponent?._id}`) $goto(`../${$selectedScreen._id}/components/${$selectedComponent?._id}`)
} else { } else {
$goto( const id = $selectedComponent?._id || $selectedScreen?.props?._id
`../${$selectedScreen._id}/components/${$selectedComponent?._id}/new` $goto(`../${$selectedScreen._id}/components/${id}/new`)
)
} }
} }

View file

@ -2,9 +2,18 @@
import { store } from "builderStore" import { store } from "builderStore"
import { ActionMenu, MenuItem, Icon } from "@budibase/bbui" import { ActionMenu, MenuItem, Icon } from "@budibase/bbui"
export let component
$: noPaste = !$store.componentToPaste $: noPaste = !$store.componentToPaste
const keyboardEvent = (key, ctrlKey = false) => { const keyboardEvent = (key, ctrlKey = false) => {
// Ensure this component is selected first
if (component._id !== $store.selectedComponentId) {
store.update(state => {
state.selectedComponentId = component._id
return state
})
}
document.dispatchEvent(new KeyboardEvent("keydown", { key, ctrlKey })) document.dispatchEvent(new KeyboardEvent("keydown", { key, ctrlKey }))
} }
</script> </script>

View file

@ -44,7 +44,11 @@
] ]
} }
function validateInput(email, index) { function validateInput(input, index) {
if (input.email) {
input.email = input.email.trim()
}
const email = input.email
if (email) { if (email) {
const res = emailValidator(email) const res = emailValidator(email)
if (res === true) { if (res === true) {
@ -95,7 +99,7 @@
bind:dropdownValue={input.role} bind:dropdownValue={input.role}
options={Constants.BudibaseRoleOptions} options={Constants.BudibaseRoleOptions}
error={input.error} error={input.error}
on:blur={() => validateInput(input.email, index)} on:blur={() => validateInput(input, index)}
/> />
</div> </div>
<div class="icon"> <div class="icon">

View file

@ -0,0 +1,73 @@
<script>
import { Body, ModalContent, Table } from "@budibase/bbui"
import { onMount } from "svelte"
export let userData
export let deleteUsersResponse
let successCount
let failureCount
let title
let unsuccessfulUsers
let message
const setTitle = () => {
if (successCount) {
title = `${successCount} users deleted`
} else {
title = "Oops!"
}
}
const setMessage = () => {
if (successCount) {
message = "However there was a problem deleting some users."
} else {
message = "There was a problem deleting some users."
}
}
const setUsers = () => {
unsuccessfulUsers = deleteUsersResponse.unsuccessful.map(user => {
return {
email: user.email,
reason: user.reason,
}
})
}
onMount(() => {
successCount = deleteUsersResponse.successful.length
failureCount = deleteUsersResponse.unsuccessful.length
setTitle()
setMessage()
setUsers()
})
const schema = {
email: {},
reason: {},
}
</script>
<ModalContent
size="M"
{title}
confirmText="Close"
showCloseIcon={false}
showCancelButton={false}
>
<Body size="XS">
{message}
</Body>
<Table
{schema}
data={unsuccessfulUsers}
allowEditColumns={false}
allowEditRows={false}
allowSelectRows={false}
/>
</ModalContent>
<style>
</style>

View file

@ -62,7 +62,7 @@
csvString = e.target.result csvString = e.target.result
files = fileArray files = fileArray
userEmails = csvString.split("\n") userEmails = csvString.split(/\r?\n/)
}) })
reader.readAsText(fileArray[0]) reader.readAsText(fileArray[0])
} }

View file

@ -0,0 +1,75 @@
<script>
import { Body, ModalContent, Table } from "@budibase/bbui"
import { onMount } from "svelte"
export let inviteUsersResponse
let hasSuccess
let hasFailure
let title
let failureMessage
let unsuccessfulUsers
const setTitle = () => {
if (hasSuccess) {
title = "Users invited!"
} else if (hasFailure) {
title = "Oops!"
}
}
const setFailureMessage = () => {
if (hasSuccess) {
failureMessage = "However there was a problem inviting some users."
} else {
failureMessage = "There was a problem inviting users."
}
}
const setUsers = () => {
unsuccessfulUsers = inviteUsersResponse.unsuccessful.map(user => {
return {
email: user.email,
reason: user.reason,
}
})
}
onMount(() => {
hasSuccess = inviteUsersResponse.successful.length
hasFailure = inviteUsersResponse.unsuccessful.length
setTitle()
setFailureMessage()
setUsers()
})
const failedSchema = {
email: {},
reason: {},
}
</script>
<ModalContent showCancelButton={false} {title} confirmText="Done">
{#if hasSuccess}
<Body size="XS">
Your users should now receive an email invite to get access to their
Budibase account
</Body>
{/if}
{#if hasFailure}
<Body size="XS">
{failureMessage}
</Body>
<Table
schema={failedSchema}
data={unsuccessfulUsers}
allowEditColumns={false}
allowEditRows={false}
allowSelectRows={false}
/>
{/if}
</ModalContent>
<style>
</style>

View file

@ -2,24 +2,78 @@
import { Body, ModalContent, Table, Icon } from "@budibase/bbui" import { Body, ModalContent, Table, Icon } from "@budibase/bbui"
import PasswordCopyRenderer from "./PasswordCopyRenderer.svelte" import PasswordCopyRenderer from "./PasswordCopyRenderer.svelte"
import { parseToCsv } from "helpers/data/utils" import { parseToCsv } from "helpers/data/utils"
import { onMount } from "svelte"
export let userData export let userData
export let createUsersResponse
$: mappedData = userData.map(user => { let hasSuccess
return { let hasFailure
email: user.email, let title
password: user.password, let failureMessage
let userDataIndex
let successfulUsers
let unsuccessfulUsers
const setTitle = () => {
if (hasSuccess) {
title = "Users created!"
} else if (hasFailure) {
title = "Oops!"
} }
}
const setFailureMessage = () => {
if (hasSuccess) {
failureMessage = "However there was a problem creating some users."
} else {
failureMessage = "There was a problem creating some users."
}
}
const setUsers = () => {
userDataIndex = userData.reduce((prev, current) => {
prev[current.email] = current
return prev
}, {})
successfulUsers = createUsersResponse.successful.map(user => {
return {
email: user.email,
password: userDataIndex[user.email].password,
}
})
unsuccessfulUsers = createUsersResponse.unsuccessful.map(user => {
return {
email: user.email,
reason: user.reason,
}
})
}
onMount(() => {
hasSuccess = createUsersResponse.successful.length
hasFailure = createUsersResponse.unsuccessful.length
setTitle()
setFailureMessage()
setUsers()
}) })
const schema = { const successSchema = {
email: {}, email: {},
password: {}, password: {},
} }
const failedSchema = {
email: {},
reason: {},
}
const downloadCsvFile = () => { const downloadCsvFile = () => {
const fileName = "passwords.csv" const fileName = "passwords.csv"
const content = parseToCsv(["email", "password"], mappedData) const content = parseToCsv(["email", "password"], successfulUsers)
download(fileName, content) download(fileName, content)
} }
@ -42,36 +96,52 @@
</script> </script>
<ModalContent <ModalContent
size="S" size="M"
title="Accounts created!" {title}
confirmText="Done" confirmText="Done"
showCancelButton={false} showCancelButton={false}
cancelText="Cancel" cancelText="Cancel"
showCloseIcon={false} showCloseIcon={false}
> >
<Body size="XS"> {#if hasFailure}
All your new users can be accessed through the autogenerated passwords. Take <Body size="XS">
note of these passwords or download the CSV file. {failureMessage}
</Body> </Body>
<Table
schema={failedSchema}
data={unsuccessfulUsers}
allowEditColumns={false}
allowEditRows={false}
allowSelectRows={false}
/>
{/if}
{#if hasSuccess}
<Body size="XS">
All your new users can be accessed through the autogenerated passwords.
Take note of these passwords or download the CSV file.
</Body>
<div class="container" on:click={downloadCsvFile}> <div class="container" on:click={downloadCsvFile}>
<div class="inner"> <div class="inner">
<Icon name="Download" /> <Icon name="Download" />
<div style="margin-left: var(--spacing-m)"> <div style="margin-left: var(--spacing-m)">
<Body size="XS">Passwords CSV</Body> <Body size="XS">Passwords CSV</Body>
</div>
</div> </div>
</div> </div>
</div>
<Table <Table
{schema} schema={successSchema}
data={mappedData} data={successfulUsers}
allowEditColumns={false} allowEditColumns={false}
allowEditRows={false} allowEditRows={false}
allowSelectRows={false} allowSelectRows={false}
customRenderers={[{ column: "password", component: PasswordCopyRenderer }]} customRenderers={[
/> { column: "password", component: PasswordCopyRenderer },
]}
/>
{/if}
</ModalContent> </ModalContent>
<style> <style>

View file

@ -7,7 +7,6 @@
Table, Table,
Layout, Layout,
Modal, Modal,
ModalContent,
Search, Search,
notifications, notifications,
Pagination, Pagination,
@ -23,6 +22,8 @@
import { goto } from "@roxi/routify" import { goto } from "@roxi/routify"
import OnboardingTypeModal from "./_components/OnboardingTypeModal.svelte" import OnboardingTypeModal from "./_components/OnboardingTypeModal.svelte"
import PasswordModal from "./_components/PasswordModal.svelte" import PasswordModal from "./_components/PasswordModal.svelte"
import InvitedModal from "./_components/InvitedModal.svelte"
import DeletionFailureModal from "./_components/DeletionFailureModal.svelte"
import ImportUsersModal from "./_components/ImportUsersModal.svelte" import ImportUsersModal from "./_components/ImportUsersModal.svelte"
import { createPaginationStore } from "helpers/pagination" import { createPaginationStore } from "helpers/pagination"
import { get } from "svelte/store" import { get } from "svelte/store"
@ -33,7 +34,8 @@
inviteConfirmationModal, inviteConfirmationModal,
onboardingTypeModal, onboardingTypeModal,
passwordModal, passwordModal,
importUsersModal importUsersModal,
deletionFailureModal
let pageInfo = createPaginationStore() let pageInfo = createPaginationStore()
let prevEmail = undefined, let prevEmail = undefined,
searchEmail = undefined searchEmail = undefined
@ -55,6 +57,9 @@
apps: {}, apps: {},
} }
$: userData = [] $: userData = []
$: createUsersResponse = { successful: [], unsuccessful: [] }
$: deleteUsersResponse = { successful: [], unsuccessful: [] }
$: inviteUsersResponse = { successful: [], unsuccessful: [] }
$: page = $pageInfo.page $: page = $pageInfo.page
$: fetchUsers(page, searchEmail) $: fetchUsers(page, searchEmail)
$: { $: {
@ -92,8 +97,7 @@
admin: user.role === Constants.BudibaseRoles.Admin, admin: user.role === Constants.BudibaseRoles.Admin,
})) }))
try { try {
const res = await users.invite(payload) inviteUsersResponse = await users.invite(payload)
notifications.success(res.message)
inviteConfirmationModal.show() inviteConfirmationModal.show()
} catch (error) { } catch (error) {
notifications.error("Error inviting user") notifications.error("Error inviting user")
@ -116,8 +120,9 @@
newUsers.push(user) newUsers.push(user)
} }
if (!newUsers.length) if (!newUsers.length) {
notifications.info("Duplicated! There is no new users to add.") notifications.info("Duplicated! There is no new users to add.")
}
return { ...userData, users: newUsers } return { ...userData, users: newUsers }
} }
@ -139,12 +144,14 @@
userData = await removingDuplicities({ groups, users }) userData = await removingDuplicities({ groups, users })
if (!userData.users.length) return if (!userData.users.length) return
return createUser() return createUsers()
} }
async function createUser() { async function createUsers() {
try { try {
await users.create(await removingDuplicities(userData)) createUsersResponse = await users.create(
await removingDuplicities(userData)
)
notifications.success("Successfully created user") notifications.success("Successfully created user")
await groups.actions.init() await groups.actions.init()
passwordModal.show() passwordModal.show()
@ -157,7 +164,7 @@
if (onboardingType === "emailOnboarding") { if (onboardingType === "emailOnboarding") {
createUserFlow() createUserFlow()
} else { } else {
await createUser() await createUsers()
} }
} }
@ -176,8 +183,15 @@
notifications.error("You cannot delete yourself") notifications.error("You cannot delete yourself")
return return
} }
await users.bulkDelete(ids) deleteUsersResponse = await users.bulkDelete(ids)
notifications.success(`Successfully deleted ${selectedRows.length} rows`) if (deleteUsersResponse.unsuccessful?.length) {
deletionFailureModal.show()
} else {
notifications.success(
`Successfully deleted ${selectedRows.length} users`
)
}
selectedRows = [] selectedRows = []
await fetchUsers(page, searchEmail) await fetchUsers(page, searchEmail)
} catch (error) { } catch (error) {
@ -267,16 +281,7 @@
</Modal> </Modal>
<Modal bind:this={inviteConfirmationModal}> <Modal bind:this={inviteConfirmationModal}>
<ModalContent <InvitedModal {inviteUsersResponse} />
showCancelButton={false}
title="Invites sent!"
confirmText="Done"
>
<Body size="S"
>Your users should now recieve an email invite to get access to their
Budibase account</Body
></ModalContent
>
</Modal> </Modal>
<Modal bind:this={onboardingTypeModal}> <Modal bind:this={onboardingTypeModal}>
@ -284,7 +289,11 @@
</Modal> </Modal>
<Modal bind:this={passwordModal}> <Modal bind:this={passwordModal}>
<PasswordModal userData={userData.users} /> <PasswordModal {createUsersResponse} userData={userData.users} />
</Modal>
<Modal bind:this={deletionFailureModal}>
<DeletionFailureModal {deleteUsersResponse} />
</Modal> </Modal>
<Modal bind:this={importUsersModal}> <Modal bind:this={importUsersModal}>

View file

@ -63,10 +63,14 @@ export function createUsersStore() {
return body return body
}) })
await API.createUsers({ users: mappedUsers, groups: data.groups }) const response = await API.createUsers({
users: mappedUsers,
groups: data.groups,
})
// re-search from first page // re-search from first page
await search() await search()
return response
} }
async function del(id) { async function del(id) {
@ -79,7 +83,7 @@ export function createUsersStore() {
} }
async function bulkDelete(userIds) { async function bulkDelete(userIds) {
await API.deleteUsers(userIds) return API.deleteUsers(userIds)
} }
async function save(user) { async function save(user) {

View file

@ -1,6 +1,6 @@
{ {
"name": "@budibase/cli", "name": "@budibase/cli",
"version": "1.2.44-alpha.1", "version": "1.2.58-alpha.6",
"description": "Budibase CLI, for developers, self hosting and migrations.", "description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js", "main": "src/index.js",
"bin": { "bin": {
@ -26,9 +26,9 @@
"outputPath": "build" "outputPath": "build"
}, },
"dependencies": { "dependencies": {
"@budibase/backend-core": "1.2.44-alpha.1", "@budibase/backend-core": "1.2.58-alpha.6",
"@budibase/string-templates": "1.2.44-alpha.1", "@budibase/string-templates": "1.2.58-alpha.6",
"@budibase/types": "1.2.44-alpha.1", "@budibase/types": "1.2.58-alpha.6",
"axios": "0.21.2", "axios": "0.21.2",
"chalk": "4.1.0", "chalk": "4.1.0",
"cli-progress": "3.11.2", "cli-progress": "3.11.2",

View file

@ -1,6 +1,6 @@
{ {
"name": "@budibase/client", "name": "@budibase/client",
"version": "1.2.44-alpha.1", "version": "1.2.58-alpha.6",
"license": "MPL-2.0", "license": "MPL-2.0",
"module": "dist/budibase-client.js", "module": "dist/budibase-client.js",
"main": "dist/budibase-client.js", "main": "dist/budibase-client.js",
@ -19,9 +19,9 @@
"dev:builder": "rollup -cw" "dev:builder": "rollup -cw"
}, },
"dependencies": { "dependencies": {
"@budibase/bbui": "1.2.44-alpha.1", "@budibase/bbui": "1.2.58-alpha.6",
"@budibase/frontend-core": "1.2.44-alpha.1", "@budibase/frontend-core": "1.2.58-alpha.6",
"@budibase/string-templates": "1.2.44-alpha.1", "@budibase/string-templates": "1.2.58-alpha.6",
"@spectrum-css/button": "^3.0.3", "@spectrum-css/button": "^3.0.3",
"@spectrum-css/card": "^3.0.3", "@spectrum-css/card": "^3.0.3",
"@spectrum-css/divider": "^1.0.3", "@spectrum-css/divider": "^1.0.3",

View file

@ -59,8 +59,8 @@
} }
const handleChange = e => { const handleChange = e => {
fieldApi.setValue(e.detail) const changed = fieldApi.setValue(e.detail)
if (onChange) { if (onChange && changed) {
onChange({ value: e.detail }) onChange({ value: e.detail })
} }
} }

View file

@ -28,8 +28,8 @@
} }
const handleChange = e => { const handleChange = e => {
fieldApi.setValue(e.detail) const changed = fieldApi.setValue(e.detail)
if (onChange) { if (onChange && changed) {
onChange({ value: e.detail }) onChange({ value: e.detail })
} }
} }

View file

@ -18,8 +18,8 @@
let fieldApi let fieldApi
const handleChange = e => { const handleChange = e => {
fieldApi.setValue(e.detail) const changed = fieldApi.setValue(e.detail)
if (onChange) { if (onChange && changed) {
onChange({ value: e.detail }) onChange({ value: e.detail })
} }
} }

View file

@ -268,7 +268,7 @@
// Skip if the value is the same // Skip if the value is the same
if (!skipCheck && fieldState.value === value) { if (!skipCheck && fieldState.value === value) {
return return false
} }
// Update field state // Update field state

View file

@ -37,8 +37,8 @@
const handleChange = e => { const handleChange = e => {
const value = parseValue(e.detail) const value = parseValue(e.detail)
fieldApi.setValue(value) const changed = fieldApi.setValue(value)
if (onChange) { if (onChange && changed) {
onChange({ value }) onChange({ value })
} }
} }

View file

@ -47,8 +47,8 @@
} }
const handleChange = e => { const handleChange = e => {
fieldApi.setValue(e.detail) const changed = fieldApi.setValue(e.detail)
if (onChange) { if (onChange && changed) {
onChange({ value: e.detail }) onChange({ value: e.detail })
} }
} }

View file

@ -44,8 +44,8 @@
} }
const handleChange = e => { const handleChange = e => {
fieldApi.setValue(e.detail) const changed = fieldApi.setValue(e.detail)
if (onChange) { if (onChange && changed) {
onChange({ value: e.detail }) onChange({ value: e.detail })
} }
} }

View file

@ -34,8 +34,8 @@
) )
const handleChange = e => { const handleChange = e => {
fieldApi.setValue(e.detail) const changed = fieldApi.setValue(e.detail)
if (onChange) { if (onChange && changed) {
onChange({ value: e.detail }) onChange({ value: e.detail })
} }
} }
@ -77,6 +77,7 @@
{direction} {direction}
on:change={handleChange} on:change={handleChange}
getOptionLabel={flatOptions ? x => x : x => x.label} getOptionLabel={flatOptions ? x => x : x => x.label}
getOptionTitle={flatOptions ? x => x : x => x.label}
getOptionValue={flatOptions ? x => x : x => x.value} getOptionValue={flatOptions ? x => x : x => x.value}
/> />
{/if} {/if}

View file

@ -84,8 +84,8 @@
} }
const handleChange = value => { const handleChange = value => {
fieldApi.setValue(value) const changed = fieldApi.setValue(value)
if (onChange) { if (onChange && changed) {
onChange({ value }) onChange({ value })
} }
} }

View file

@ -90,8 +90,8 @@
} }
const handleChange = e => { const handleChange = e => {
fieldApi.setValue(e.detail) const changed = fieldApi.setValue(e.detail)
if (onChange) { if (onChange && changed) {
onChange({ value: e.detail }) onChange({ value: e.detail })
} }
} }

View file

@ -16,8 +16,8 @@
let fieldApi let fieldApi
const handleChange = e => { const handleChange = e => {
fieldApi.setValue(e.detail) const changed = fieldApi.setValue(e.detail)
if (onChange) { if (onChange && changed) {
onChange({ value: e.detail }) onChange({ value: e.detail })
} }
} }
@ -29,7 +29,6 @@
{disabled} {disabled}
{validation} {validation}
{defaultValue} {defaultValue}
{onChange}
type={type === "number" ? "number" : "string"} type={type === "number" ? "number" : "string"}
bind:fieldState bind:fieldState
bind:fieldApi bind:fieldApi

View file

@ -1,12 +1,12 @@
{ {
"name": "@budibase/frontend-core", "name": "@budibase/frontend-core",
"version": "1.2.44-alpha.1", "version": "1.2.58-alpha.6",
"description": "Budibase frontend core libraries used in builder and client", "description": "Budibase frontend core libraries used in builder and client",
"author": "Budibase", "author": "Budibase",
"license": "MPL-2.0", "license": "MPL-2.0",
"svelte": "src/index.js", "svelte": "src/index.js",
"dependencies": { "dependencies": {
"@budibase/bbui": "1.2.44-alpha.1", "@budibase/bbui": "1.2.58-alpha.6",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"svelte": "^3.46.2" "svelte": "^3.46.2"
} }

View file

@ -72,7 +72,7 @@ const cleanupQuery = query => {
continue continue
} }
for (let [key, value] of Object.entries(query[filterField])) { for (let [key, value] of Object.entries(query[filterField])) {
if (!value || value === "") { if (value == null || value === "") {
delete query[filterField][key] delete query[filterField][key]
} }
} }
@ -186,7 +186,7 @@ export const runLuceneQuery = (docs, query) => {
return docs return docs
} }
// make query consistent first // Make query consistent first
query = cleanupQuery(query) query = cleanupQuery(query)
// Iterates over a set of filters and evaluates a fail function against a doc // Iterates over a set of filters and evaluates a fail function against a doc
@ -218,7 +218,12 @@ export const runLuceneQuery = (docs, query) => {
// Process a range match // Process a range match
const rangeMatch = match("range", (docValue, testValue) => { const rangeMatch = match("range", (docValue, testValue) => {
return !docValue || docValue < testValue.low || docValue > testValue.high return (
docValue == null ||
docValue === "" ||
docValue < testValue.low ||
docValue > testValue.high
)
}) })
// Process an equal match (fails if the value is different) // Process an equal match (fails if the value is different)

View file

@ -33,7 +33,7 @@ module MongoMock {
}) })
} }
mongodb.ObjectID = require("mongodb").ObjectID mongodb.ObjectID = jest.requireActual("mongodb").ObjectID
module.exports = mongodb module.exports = mongodb
} }

View file

@ -1,7 +1,7 @@
{ {
"name": "@budibase/server", "name": "@budibase/server",
"email": "hi@budibase.com", "email": "hi@budibase.com",
"version": "1.2.44-alpha.1", "version": "1.2.58-alpha.6",
"description": "Budibase Web Server", "description": "Budibase Web Server",
"main": "src/index.ts", "main": "src/index.ts",
"repository": { "repository": {
@ -77,11 +77,11 @@
"license": "GPL-3.0", "license": "GPL-3.0",
"dependencies": { "dependencies": {
"@apidevtools/swagger-parser": "10.0.3", "@apidevtools/swagger-parser": "10.0.3",
"@budibase/backend-core": "1.2.44-alpha.1", "@budibase/backend-core": "1.2.58-alpha.6",
"@budibase/client": "1.2.44-alpha.1", "@budibase/client": "1.2.58-alpha.6",
"@budibase/pro": "1.2.44-alpha.1", "@budibase/pro": "1.2.58-alpha.6",
"@budibase/string-templates": "1.2.44-alpha.1", "@budibase/string-templates": "1.2.58-alpha.6",
"@budibase/types": "1.2.44-alpha.1", "@budibase/types": "1.2.58-alpha.6",
"@bull-board/api": "3.7.0", "@bull-board/api": "3.7.0",
"@bull-board/koa": "3.9.4", "@bull-board/koa": "3.9.4",
"@elastic/elasticsearch": "7.10.0", "@elastic/elasticsearch": "7.10.0",
@ -100,6 +100,7 @@
"curlconverter": "3.21.0", "curlconverter": "3.21.0",
"dotenv": "8.2.0", "dotenv": "8.2.0",
"download": "8.0.0", "download": "8.0.0",
"elastic-apm-node": "3.38.0",
"fix-path": "3.0.0", "fix-path": "3.0.0",
"form-data": "4.0.0", "form-data": "4.0.0",
"fs-extra": "8.1.0", "fs-extra": "8.1.0",

View file

@ -47,7 +47,14 @@ import { checkAppMetadata } from "../../automations/logging"
import { getUniqueRows } from "../../utilities/usageQuota/rows" import { getUniqueRows } from "../../utilities/usageQuota/rows"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import { errors, events, migrations } from "@budibase/backend-core" import { errors, events, migrations } from "@budibase/backend-core"
import { App, MigrationType } from "@budibase/types" import {
App,
Layout,
Screen,
MigrationType,
AppNavigation,
} from "@budibase/types"
import { BASE_LAYOUT_PROP_IDS } from "../../constants/layouts"
const URL_REGEX_SLASH = /\/|\\/g const URL_REGEX_SLASH = /\/|\\/g
@ -243,27 +250,19 @@ const performAppCreate = async (ctx: any) => {
} }
const instance = await createInstance(instanceConfig) const instance = await createInstance(instanceConfig)
const appId = instance._id const appId = instance._id
const db = context.getAppDB() const db = context.getAppDB()
let _rev
try { let newApplication: App = {
// if template there will be an existing doc
const existing = await db.get(DocumentType.APP_METADATA)
_rev = existing._rev
} catch (err) {
// nothing to do
}
const newApplication: App = {
_id: DocumentType.APP_METADATA, _id: DocumentType.APP_METADATA,
_rev, _rev: undefined,
appId: instance._id, appId,
type: "app", type: "app",
version: packageJson.version, version: packageJson.version,
componentLibraries: ["@budibase/standard-components"], componentLibraries: ["@budibase/standard-components"],
name: name, name: name,
url: url, url: url,
template: ctx.request.body.template, template: templateKey,
instance: instance, instance,
tenantId: getTenantId(), tenantId: getTenantId(),
updatedAt: new Date().toISOString(), updatedAt: new Date().toISOString(),
createdAt: new Date().toISOString(), createdAt: new Date().toISOString(),
@ -285,6 +284,36 @@ const performAppCreate = async (ctx: any) => {
buttonBorderRadius: "16px", buttonBorderRadius: "16px",
}, },
} }
// If we used a template or imported an app there will be an existing doc.
// Fetch and migrate some metadata from the existing app.
try {
const existing: App = await db.get(DocumentType.APP_METADATA)
const keys: (keyof App)[] = [
"_rev",
"navigation",
"theme",
"customTheme",
"icon",
]
keys.forEach(key => {
if (existing[key]) {
// @ts-ignore
newApplication[key] = existing[key]
}
})
// Migrate navigation settings and screens if required
if (existing && !existing.navigation) {
const navigation = await migrateAppNavigation()
if (navigation) {
newApplication.navigation = navigation
}
}
} catch (err) {
// Nothing to do
}
const response = await db.put(newApplication, { force: true }) const response = await db.put(newApplication, { force: true })
newApplication._rev = response.rev newApplication._rev = response.rev
@ -567,3 +596,55 @@ export const updateAppPackage = async (appPackage: any, appId: any) => {
return newAppPackage return newAppPackage
}) })
} }
const migrateAppNavigation = async () => {
const db = context.getAppDB()
const existing: App = await db.get(DocumentType.APP_METADATA)
const layouts: Layout[] = await getLayouts()
const screens: Screen[] = await getScreens()
// Migrate all screens, removing custom layouts
for (let screen of screens) {
if (!screen.layoutId) {
return
}
const layout = layouts.find(layout => layout._id === screen.layoutId)
screen.layoutId = undefined
screen.showNavigation = layout?.props.navigation !== "None"
screen.width = layout?.props.width || "Large"
await db.put(screen)
}
// Migrate layout navigation settings
const { name, customTheme } = existing
const layout = layouts?.find(
(layout: Layout) => layout._id === BASE_LAYOUT_PROP_IDS.PRIVATE
)
if (layout) {
let navigationSettings: any = {
navigation: "Top",
title: name,
navWidth: "Large",
navBackground:
customTheme?.navBackground || "var(--spectrum-global-color-gray-50)",
navTextColor:
customTheme?.navTextColor || "var(--spectrum-global-color-gray-800)",
}
if (layout) {
navigationSettings.hideLogo = layout.props.hideLogo
navigationSettings.hideTitle = layout.props.hideTitle
navigationSettings.title = layout.props.title || name
navigationSettings.logoUrl = layout.props.logoUrl
navigationSettings.links = layout.props.links
navigationSettings.navigation = layout.props.navigation || "Top"
navigationSettings.sticky = layout.props.sticky
navigationSettings.navWidth = layout.props.width || "Large"
if (navigationSettings.navigation === "None") {
navigationSettings.navigation = "Top"
}
}
return navigationSettings
} else {
return null
}
}

View file

@ -4,50 +4,54 @@ const { getAppDB } = require("@budibase/backend-core/context")
const { getGlobalDB } = require("@budibase/backend-core/tenancy") const { getGlobalDB } = require("@budibase/backend-core/tenancy")
exports.fetchAppComponentDefinitions = async function (ctx) { exports.fetchAppComponentDefinitions = async function (ctx) {
const db = getAppDB() try {
const app = await db.get(DocumentType.APP_METADATA) const db = getAppDB()
const app = await db.get(DocumentType.APP_METADATA)
let componentManifests = await Promise.all( let componentManifests = await Promise.all(
app.componentLibraries.map(async library => { app.componentLibraries.map(async library => {
let manifest = await getComponentLibraryManifest(library) let manifest = await getComponentLibraryManifest(library)
return { return {
manifest, manifest,
library, library,
} }
}) })
) )
const definitions = {} const definitions = {}
for (let { manifest, library } of componentManifests) { for (let { manifest, library } of componentManifests) {
for (let key of Object.keys(manifest)) { for (let key of Object.keys(manifest)) {
if (key === "features") { if (key === "features") {
definitions[key] = manifest[key] definitions[key] = manifest[key]
} else { } else {
const fullComponentName = `${library}/${key}`.toLowerCase() const fullComponentName = `${library}/${key}`.toLowerCase()
definitions[fullComponentName] = { definitions[fullComponentName] = {
component: fullComponentName, component: fullComponentName,
...manifest[key], ...manifest[key],
}
} }
} }
} }
// Add custom components
const globalDB = getGlobalDB()
const response = await globalDB.allDocs(
getPluginParams(null, {
include_docs: true,
})
)
response.rows
.map(row => row.doc)
.filter(plugin => plugin.schema.type === "component")
.forEach(plugin => {
const fullComponentName = `plugin/${plugin.name}/${plugin.version}`
definitions[fullComponentName] = {
component: fullComponentName,
...plugin.schema.schema,
}
})
ctx.body = definitions
} catch (err) {
console.error(`component-definitions=failed`, err)
} }
// Add custom components
const globalDB = getGlobalDB()
const response = await globalDB.allDocs(
getPluginParams(null, {
include_docs: true,
})
)
response.rows
.map(row => row.doc)
.filter(plugin => plugin.schema.type === "component")
.forEach(plugin => {
const fullComponentName = `plugin/${plugin.name}/${plugin.version}`
definitions[fullComponentName] = {
component: fullComponentName,
...plugin.schema.schema,
}
})
ctx.body = definitions
} }

View file

@ -375,6 +375,7 @@ exports.exportRows = async ctx => {
const table = await db.get(ctx.params.tableId) const table = await db.get(ctx.params.tableId)
const rowIds = ctx.request.body.rows const rowIds = ctx.request.body.rows
let format = ctx.query.format let format = ctx.query.format
const { columns } = ctx.request.body
let response = ( let response = (
await db.allDocs({ await db.allDocs({
include_docs: true, include_docs: true,
@ -382,7 +383,20 @@ exports.exportRows = async ctx => {
}) })
).rows.map(row => row.doc) ).rows.map(row => row.doc)
let rows = await outputProcessing(table, response) let result = await outputProcessing(table, response)
let rows = []
// Filter data to only specified columns if required
if (columns && columns.length) {
for (let i = 0; i < result.length; i++) {
rows[i] = {}
for (let column of columns) {
rows[i][column] = result[i][column]
}
}
} else {
rows = result
}
let headers = Object.keys(rows[0]) let headers = Object.keys(rows[0])
const exporter = exporters[format] const exporter = exporters[format]

View file

@ -8,7 +8,7 @@ const { AccessController } = require("@budibase/backend-core/roles")
const { getAppDB } = require("@budibase/backend-core/context") const { getAppDB } = require("@budibase/backend-core/context")
const { events } = require("@budibase/backend-core") const { events } = require("@budibase/backend-core")
const { getGlobalDB } = require("@budibase/backend-core/tenancy") const { getGlobalDB } = require("@budibase/backend-core/tenancy")
import { updateAppPackage } from "./application" const { updateAppPackage } = require("./application")
exports.fetch = async ctx => { exports.fetch = async ctx => {
const db = getAppDB() const db = getAppDB()

View file

@ -17,6 +17,7 @@ const {
checkBuilderEndpoint, checkBuilderEndpoint,
} = require("./utilities/TestFunctions") } = require("./utilities/TestFunctions")
const setup = require("./utilities") const setup = require("./utilities")
const { basicScreen, basicLayout } = setup.structures
const { AppStatus } = require("../../../db/utils") const { AppStatus } = require("../../../db/utils")
const { events } = require("@budibase/backend-core") const { events } = require("@budibase/backend-core")
@ -81,6 +82,31 @@ describe("/applications", () => {
body: { name: "My App" }, body: { name: "My App" },
}) })
}) })
it("migrates navigation settings from old apps", async () => {
const res = await request
.post("/api/applications")
.field("name", "Old App")
.field("useTemplate", "true")
.set(config.defaultHeaders())
.attach("templateFile", "src/api/routes/tests/data/old-app.txt")
.expect("Content-Type", /json/)
.expect(200)
expect(res.body._id).toBeDefined()
expect(res.body.navigation).toBeDefined()
expect(res.body.navigation.hideLogo).toBe(true)
expect(res.body.navigation.title).toBe("Custom Title")
expect(res.body.navigation.hideLogo).toBe(true)
expect(res.body.navigation.navigation).toBe("Left")
expect(res.body.navigation.navBackground).toBe(
"var(--spectrum-global-color-blue-600)"
)
expect(res.body.navigation.navTextColor).toBe(
"var(--spectrum-global-color-gray-50)"
)
expect(events.app.created).toBeCalledTimes(1)
expect(events.app.fileImported).toBeCalledTimes(1)
})
}) })
describe("fetch", () => { describe("fetch", () => {

File diff suppressed because one or more lines are too long

View file

@ -3,7 +3,12 @@ const setup = require("./utilities")
const { basicRow } = setup.structures const { basicRow } = setup.structures
const { doInAppContext } = require("@budibase/backend-core/context") const { doInAppContext } = require("@budibase/backend-core/context")
const { doInTenant } = require("@budibase/backend-core/tenancy") const { doInTenant } = require("@budibase/backend-core/tenancy")
const { quotas, QuotaUsageType, StaticQuotaName, MonthlyQuotaName } = require("@budibase/pro") const {
quotas,
QuotaUsageType,
StaticQuotaName,
MonthlyQuotaName,
} = require("@budibase/pro")
describe("/rows", () => { describe("/rows", () => {
let request = setup.getRequest() let request = setup.getRequest()
@ -23,23 +28,30 @@ describe("/rows", () => {
await request await request
.get(`/api/${table._id}/rows/${id}`) .get(`/api/${table._id}/rows/${id}`)
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect('Content-Type', /json/) .expect("Content-Type", /json/)
.expect(status) .expect(status)
const getRowUsage = async () => { const getRowUsage = async () => {
return config.doInContext(null, () => quotas.getCurrentUsageValue(QuotaUsageType.STATIC, StaticQuotaName.ROWS)) return config.doInContext(null, () =>
quotas.getCurrentUsageValue(QuotaUsageType.STATIC, StaticQuotaName.ROWS)
)
} }
const getQueryUsage = async () => { const getQueryUsage = async () => {
return config.doInContext(null, () => quotas.getCurrentUsageValue(QuotaUsageType.MONTHLY, MonthlyQuotaName.QUERIES)) return config.doInContext(null, () =>
quotas.getCurrentUsageValue(
QuotaUsageType.MONTHLY,
MonthlyQuotaName.QUERIES
)
)
} }
const assertRowUsage = async (expected) => { const assertRowUsage = async expected => {
const usage = await getRowUsage() const usage = await getRowUsage()
expect(usage).toBe(expected) expect(usage).toBe(expected)
} }
const assertQueryUsage = async (expected) => { const assertQueryUsage = async expected => {
const usage = await getQueryUsage() const usage = await getQueryUsage()
expect(usage).toBe(expected) expect(usage).toBe(expected)
} }
@ -76,10 +88,12 @@ describe("/rows", () => {
name: "Updated Name", name: "Updated Name",
}) })
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect('Content-Type', /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.res.statusMessage).toEqual(`${table.name} updated successfully.`) expect(res.res.statusMessage).toEqual(
`${table.name} updated successfully.`
)
expect(res.body.name).toEqual("Updated Name") expect(res.body.name).toEqual("Updated Name")
// await assertRowUsage(rowUsage) // await assertRowUsage(rowUsage)
// await assertQueryUsage(queryUsage + 1) // await assertQueryUsage(queryUsage + 1)
@ -92,7 +106,7 @@ describe("/rows", () => {
const res = await request const res = await request
.get(`/api/${table._id}/rows/${existing._id}`) .get(`/api/${table._id}/rows/${existing._id}`)
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect('Content-Type', /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body).toEqual({ expect(res.body).toEqual({
@ -110,7 +124,7 @@ describe("/rows", () => {
const newRow = { const newRow = {
tableId: table._id, tableId: table._id,
name: "Second Contact", name: "Second Contact",
status: "new" status: "new",
} }
await config.createRow() await config.createRow()
await config.createRow(newRow) await config.createRow(newRow)
@ -119,7 +133,7 @@ describe("/rows", () => {
const res = await request const res = await request
.get(`/api/${table._id}/rows`) .get(`/api/${table._id}/rows`)
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect('Content-Type', /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body.length).toBe(2) expect(res.body.length).toBe(2)
@ -135,17 +149,36 @@ describe("/rows", () => {
await request await request
.get(`/api/${table._id}/rows/not-a-valid-id`) .get(`/api/${table._id}/rows/not-a-valid-id`)
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect('Content-Type', /json/) .expect("Content-Type", /json/)
.expect(404) .expect(404)
await assertQueryUsage(queryUsage) // no change await assertQueryUsage(queryUsage) // no change
}) })
it("row values are coerced", async () => { it("row values are coerced", async () => {
const str = {type:"string", constraints: { type: "string", presence: false }} const str = {
const attachment = {type:"attachment", constraints: { type: "array", presence: false }} type: "string",
const bool = {type:"boolean", constraints: { type: "boolean", presence: false }} constraints: { type: "string", presence: false },
const number = {type:"number", constraints: { type: "number", presence: false }} }
const datetime = {type:"datetime", constraints: { type: "string", presence: false, datetime: {earliest:"", latest: ""} }} const attachment = {
type: "attachment",
constraints: { type: "array", presence: false },
}
const bool = {
type: "boolean",
constraints: { type: "boolean", presence: false },
}
const number = {
type: "number",
constraints: { type: "number", presence: false },
}
const datetime = {
type: "datetime",
constraints: {
type: "string",
presence: false,
datetime: { earliest: "", latest: "" },
},
}
table = await config.createTable({ table = await config.createTable({
name: "TestTable2", name: "TestTable2",
@ -171,9 +204,9 @@ describe("/rows", () => {
boolUndefined: bool, boolUndefined: bool,
boolString: bool, boolString: bool,
boolBool: bool, boolBool: bool,
attachmentNull : attachment, attachmentNull: attachment,
attachmentUndefined : attachment, attachmentUndefined: attachment,
attachmentEmpty : attachment, attachmentEmpty: attachment,
}, },
}) })
@ -198,9 +231,9 @@ describe("/rows", () => {
boolString: "true", boolString: "true",
boolBool: true, boolBool: true,
tableId: table._id, tableId: table._id,
attachmentNull : null, attachmentNull: null,
attachmentUndefined : undefined, attachmentUndefined: undefined,
attachmentEmpty : "", attachmentEmpty: "",
} }
const id = (await config.createRow(row))._id const id = (await config.createRow(row))._id
@ -218,7 +251,9 @@ describe("/rows", () => {
expect(saved.datetimeEmptyString).toBe(null) expect(saved.datetimeEmptyString).toBe(null)
expect(saved.datetimeNull).toBe(null) expect(saved.datetimeNull).toBe(null)
expect(saved.datetimeUndefined).toBe(undefined) expect(saved.datetimeUndefined).toBe(undefined)
expect(saved.datetimeString).toBe(new Date(row.datetimeString).toISOString()) expect(saved.datetimeString).toBe(
new Date(row.datetimeString).toISOString()
)
expect(saved.datetimeDate).toBe(row.datetimeDate.toISOString()) expect(saved.datetimeDate).toBe(row.datetimeDate.toISOString())
expect(saved.boolNull).toBe(null) expect(saved.boolNull).toBe(null)
expect(saved.boolEmpty).toBe(null) expect(saved.boolEmpty).toBe(null)
@ -247,10 +282,12 @@ describe("/rows", () => {
name: "Updated Name", name: "Updated Name",
}) })
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect('Content-Type', /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.res.statusMessage).toEqual(`${table.name} updated successfully.`) expect(res.res.statusMessage).toEqual(
`${table.name} updated successfully.`
)
expect(res.body.name).toEqual("Updated Name") expect(res.body.name).toEqual("Updated Name")
expect(res.body.description).toEqual(existing.description) expect(res.body.description).toEqual(existing.description)
@ -292,16 +329,14 @@ describe("/rows", () => {
const res = await request const res = await request
.delete(`/api/${table._id}/rows`) .delete(`/api/${table._id}/rows`)
.send({ .send({
rows: [ rows: [createdRow],
createdRow
]
}) })
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect('Content-Type', /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body[0]._id).toEqual(createdRow._id) expect(res.body[0]._id).toEqual(createdRow._id)
await assertRowUsage(rowUsage -1) await assertRowUsage(rowUsage - 1)
await assertQueryUsage(queryUsage +1) await assertQueryUsage(queryUsage + 1)
}) })
}) })
@ -314,9 +349,9 @@ describe("/rows", () => {
.post(`/api/${table._id}/rows/validate`) .post(`/api/${table._id}/rows/validate`)
.send({ name: "ivan" }) .send({ name: "ivan" })
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect('Content-Type', /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body.valid).toBe(true) expect(res.body.valid).toBe(true)
expect(Object.keys(res.body.errors)).toEqual([]) expect(Object.keys(res.body.errors)).toEqual([])
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
@ -331,9 +366,9 @@ describe("/rows", () => {
.post(`/api/${table._id}/rows/validate`) .post(`/api/${table._id}/rows/validate`)
.send({ name: 1 }) .send({ name: 1 })
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect('Content-Type', /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body.valid).toBe(false) expect(res.body.valid).toBe(false)
expect(Object.keys(res.body.errors)).toEqual(["name"]) expect(Object.keys(res.body.errors)).toEqual(["name"])
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
@ -351,19 +386,16 @@ describe("/rows", () => {
const res = await request const res = await request
.delete(`/api/${table._id}/rows`) .delete(`/api/${table._id}/rows`)
.send({ .send({
rows: [ rows: [row1, row2],
row1,
row2,
]
}) })
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect('Content-Type', /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body.length).toEqual(2) expect(res.body.length).toEqual(2)
await loadRow(row1._id, 404) await loadRow(row1._id, 404)
await assertRowUsage(rowUsage - 2) await assertRowUsage(rowUsage - 2)
await assertQueryUsage(queryUsage +1) await assertQueryUsage(queryUsage + 1)
}) })
}) })
@ -376,12 +408,12 @@ describe("/rows", () => {
const res = await request const res = await request
.get(`/api/views/${table._id}`) .get(`/api/views/${table._id}`)
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect('Content-Type', /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body.length).toEqual(1) expect(res.body.length).toEqual(1)
expect(res.body[0]._id).toEqual(row._id) expect(res.body[0]._id).toEqual(row._id)
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
await assertQueryUsage(queryUsage +1) await assertQueryUsage(queryUsage + 1)
}) })
it("should throw an error if view doesn't exist", async () => { it("should throw an error if view doesn't exist", async () => {
@ -406,7 +438,7 @@ describe("/rows", () => {
const res = await request const res = await request
.get(`/api/views/${view.name}`) .get(`/api/views/${view.name}`)
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect('Content-Type', /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body.length).toEqual(1) expect(res.body.length).toEqual(1)
expect(res.body[0]._id).toEqual(row._id) expect(res.body[0]._id).toEqual(row._id)
@ -418,21 +450,24 @@ describe("/rows", () => {
describe("fetchEnrichedRows", () => { describe("fetchEnrichedRows", () => {
it("should allow enriching some linked rows", async () => { it("should allow enriching some linked rows", async () => {
const { table, firstRow, secondRow } = await doInTenant(setup.structures.TENANT_ID, async () => { const { table, firstRow, secondRow } = await doInTenant(
const table = await config.createLinkedTable() setup.structures.TENANT_ID,
const firstRow = await config.createRow({ async () => {
name: "Test Contact", const table = await config.createLinkedTable()
description: "original description", const firstRow = await config.createRow({
tableId: table._id name: "Test Contact",
}) description: "original description",
const secondRow = await config.createRow({ tableId: table._id,
name: "Test 2", })
description: "og desc", const secondRow = await config.createRow({
link: [{_id: firstRow._id}], name: "Test 2",
tableId: table._id, description: "og desc",
}) link: [{ _id: firstRow._id }],
return { table, firstRow, secondRow } tableId: table._id,
}) })
return { table, firstRow, secondRow }
}
)
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage() const queryUsage = await getQueryUsage()
@ -440,7 +475,7 @@ describe("/rows", () => {
const resBasic = await request const resBasic = await request
.get(`/api/${table._id}/rows/${secondRow._id}`) .get(`/api/${table._id}/rows/${secondRow._id}`)
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect('Content-Type', /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(resBasic.body.link[0]._id).toBe(firstRow._id) expect(resBasic.body.link[0]._id).toBe(firstRow._id)
expect(resBasic.body.link[0].primaryDisplay).toBe("Test Contact") expect(resBasic.body.link[0].primaryDisplay).toBe("Test Contact")
@ -449,14 +484,14 @@ describe("/rows", () => {
const resEnriched = await request const resEnriched = await request
.get(`/api/${table._id}/${secondRow._id}/enrich`) .get(`/api/${table._id}/${secondRow._id}/enrich`)
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect('Content-Type', /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(resEnriched.body.link.length).toBe(1) expect(resEnriched.body.link.length).toBe(1)
expect(resEnriched.body.link[0]._id).toBe(firstRow._id) expect(resEnriched.body.link[0]._id).toBe(firstRow._id)
expect(resEnriched.body.link[0].name).toBe("Test Contact") expect(resEnriched.body.link[0].name).toBe("Test Contact")
expect(resEnriched.body.link[0].description).toBe("original description") expect(resEnriched.body.link[0].description).toBe("original description")
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
await assertQueryUsage(queryUsage +2) await assertQueryUsage(queryUsage + 2)
}) })
}) })
@ -466,9 +501,11 @@ describe("/rows", () => {
const row = await config.createRow({ const row = await config.createRow({
name: "test", name: "test",
description: "test", description: "test",
attachment: [{ attachment: [
key: `${config.getAppId()}/attachments/test/thing.csv`, {
}], key: `${config.getAppId()}/attachments/test/thing.csv`,
},
],
tableId: table._id, tableId: table._id,
}) })
// the environment needs configured for this // the environment needs configured for this
@ -482,4 +519,49 @@ describe("/rows", () => {
}) })
}) })
}) })
describe("exportData", () => {
it("should allow exporting all columns", async () => {
const existing = await config.createRow()
const res = await request
.post(`/api/${table._id}/rows/exportRows?format=json`)
.set(config.defaultHeaders())
.send({
rows: [existing._id],
})
.expect("Content-Type", /json/)
.expect(200)
const results = JSON.parse(res.text)
expect(results.length).toEqual(1)
const row = results[0]
// Ensure all original columns were exported
expect(Object.keys(row).length).toBeGreaterThanOrEqual(
Object.keys(existing).length
)
Object.keys(existing).forEach(key => {
expect(row[key]).toEqual(existing[key])
})
})
it("should allow exporting only certain columns", async () => {
const existing = await config.createRow()
const res = await request
.post(`/api/${table._id}/rows/exportRows?format=json`)
.set(config.defaultHeaders())
.send({
rows: [existing._id],
columns: ["_id"],
})
.expect("Content-Type", /json/)
.expect(200)
const results = JSON.parse(res.text)
expect(results.length).toEqual(1)
const row = results[0]
// Ensure only the _id column was exported
expect(Object.keys(row).length).toEqual(1)
expect(row._id).toEqual(existing._id)
})
})
}) })

View file

@ -1,6 +1,15 @@
// need to load environment first // need to load environment first
import { ExtendableContext } from "koa"
import * as env from "./environment" import * as env from "./environment"
// enable APM if configured
if (process.env.ELASTIC_APM_ENABLED) {
const apm = require("elastic-apm-node").start({
serviceName: process.env.SERVICE,
environment: process.env.BUDIBASE_ENVIRONMENT,
})
}
import { ExtendableContext } from "koa"
import db from "./db" import db from "./db"
db.init() db.init()
const Koa = require("koa") const Koa = require("koa")
@ -80,9 +89,7 @@ server.on("close", async () => {
return return
} }
shuttingDown = true shuttingDown = true
if (!env.isTest()) { console.log("Server Closed")
console.log("Server Closed")
}
await automations.shutdown() await automations.shutdown()
await redis.shutdown() await redis.shutdown()
await events.shutdown() await events.shutdown()
@ -174,3 +181,7 @@ process.on("uncaughtException", err => {
process.on("SIGTERM", () => { process.on("SIGTERM", () => {
shutdown() shutdown()
}) })
process.on("SIGINT", () => {
shutdown()
})

View file

@ -8,12 +8,14 @@ const Queue = env.isTest()
const { JobQueues } = require("../constants") const { JobQueues } = require("../constants")
const { utils } = require("@budibase/backend-core/redis") const { utils } = require("@budibase/backend-core/redis")
const { opts, redisProtocolUrl } = utils.getRedisOptions() const { opts, redisProtocolUrl } = utils.getRedisOptions()
const listeners = require("./listeners")
const CLEANUP_PERIOD_MS = 60 * 1000 const CLEANUP_PERIOD_MS = 60 * 1000
const queueConfig = redisProtocolUrl || { redis: opts } const queueConfig = redisProtocolUrl || { redis: opts }
let cleanupInternal = null let cleanupInternal = null
let automationQueue = new Queue(JobQueues.AUTOMATIONS, queueConfig) let automationQueue = new Queue(JobQueues.AUTOMATIONS, queueConfig)
listeners.addListeners(automationQueue)
async function cleanup() { async function cleanup() {
await automationQueue.clean(CLEANUP_PERIOD_MS, "completed") await automationQueue.clean(CLEANUP_PERIOD_MS, "completed")
@ -51,6 +53,7 @@ exports.shutdown = async () => {
await automationQueue.close() await automationQueue.close()
automationQueue = null automationQueue = null
} }
console.log("Bull shutdown")
} }
exports.queue = automationQueue exports.queue = automationQueue

View file

@ -0,0 +1,78 @@
import { Queue, Job, JobId } from "bull"
import { AutomationEvent } from "../definitions/automations"
import * as automation from "../threads/automation"
export const addListeners = (queue: Queue) => {
logging(queue)
handleStalled(queue)
}
const handleStalled = (queue: Queue) => {
queue.on("stalled", async (job: Job) => {
await automation.removeStalled(job as AutomationEvent)
})
}
const logging = (queue: Queue) => {
if (process.env.NODE_DEBUG?.includes("bull")) {
queue
.on("error", (error: any) => {
// An error occurred.
console.error(`automation-event=error error=${JSON.stringify(error)}`)
})
.on("waiting", (jobId: JobId) => {
// A Job is waiting to be processed as soon as a worker is idling.
console.log(`automation-event=waiting jobId=${jobId}`)
})
.on("active", (job: Job, jobPromise: any) => {
// A job has started. You can use `jobPromise.cancel()`` to abort it.
console.log(`automation-event=active jobId=${job.id}`)
})
.on("stalled", (job: Job) => {
// A job has been marked as stalled. This is useful for debugging job
// workers that crash or pause the event loop.
console.error(
`automation-event=stalled jobId=${job.id} job=${JSON.stringify(job)}`
)
})
.on("progress", (job: Job, progress: any) => {
// A job's progress was updated!
console.log(
`automation-event=progress jobId=${job.id} progress=${progress}`
)
})
.on("completed", (job: Job, result) => {
// A job successfully completed with a `result`.
console.log(
`automation-event=completed jobId=${job.id} result=${result}`
)
})
.on("failed", (job, err: any) => {
// A job failed with reason `err`!
console.log(`automation-event=failed jobId=${job.id} error=${err}`)
})
.on("paused", () => {
// The queue has been paused.
console.log(`automation-event=paused`)
})
.on("resumed", (job: Job) => {
// The queue has been resumed.
console.log(`automation-event=paused jobId=${job.id}`)
})
.on("cleaned", (jobs: Job[], type: string) => {
// Old jobs have been cleaned from the queue. `jobs` is an array of cleaned
// jobs, and `type` is the type of jobs cleaned.
console.log(
`automation-event=cleaned length=${jobs.length} type=${type}`
)
})
.on("drained", () => {
// Emitted every time the queue has processed all the waiting jobs (even if there can be some delayed jobs not yet processed)
console.log(`automation-event=drained`)
})
.on("removed", (job: Job) => {
// A job successfully removed.
console.log(`automation-event=removed jobId=${job.id}`)
})
}
}

View file

@ -125,6 +125,14 @@ const hasNullFilters = filters =>
exports.run = async function ({ inputs, appId }) { exports.run = async function ({ inputs, appId }) {
const { tableId, filters, sortColumn, sortOrder, limit } = inputs const { tableId, filters, sortColumn, sortOrder, limit } = inputs
if (!tableId) {
return {
success: false,
response: {
message: "You must select a table to query.",
},
}
}
const table = await getTable(appId, tableId) const table = await getTable(appId, tableId)
let sortType = FieldTypes.STRING let sortType = FieldTypes.STRING
if (table && table.schema && table.schema[sortColumn] && sortColumn) { if (table && table.schema && table.schema[sortColumn] && sortColumn) {

View file

@ -21,11 +21,13 @@ const WH_STEP_ID = definitions.WEBHOOK.stepId
const CRON_STEP_ID = definitions.CRON.stepId const CRON_STEP_ID = definitions.CRON.stepId
const Runner = new Thread(ThreadType.AUTOMATION) const Runner = new Thread(ThreadType.AUTOMATION)
const jobMessage = (job: any, message: string) => {
return `app=${job.data.event.appId} automation=${job.data.automation._id} jobId=${job.id} trigger=${job.data.automation.definition.trigger.event} : ${message}`
}
export async function processEvent(job: any) { export async function processEvent(job: any) {
try { try {
console.log( console.log(jobMessage(job, "running"))
`${job.data.automation.appId} automation ${job.data.automation._id} running`
)
// need to actually await these so that an error can be captured properly // need to actually await these so that an error can be captured properly
const tenantId = tenancy.getTenantIDFromAppID(job.data.event.appId) const tenantId = tenancy.getTenantIDFromAppID(job.data.event.appId)
return await tenancy.doInTenant(tenantId, async () => { return await tenancy.doInTenant(tenantId, async () => {
@ -34,9 +36,7 @@ export async function processEvent(job: any) {
}) })
} catch (err) { } catch (err) {
const errJson = JSON.stringify(err) const errJson = JSON.stringify(err)
console.error( console.error(jobMessage(job, `was unable to run - ${errJson}`))
`${job.data.automation.appId} automation ${job.data.automation._id} was unable to run - ${errJson}`
)
console.trace(err) console.trace(err)
return { err } return { err }
} }
@ -91,6 +91,7 @@ export async function disableAllCrons(appId: any) {
export async function disableCron(jobId: string, jobKey: string) { export async function disableCron(jobId: string, jobKey: string) {
await queue.removeRepeatableByKey(jobKey) await queue.removeRepeatableByKey(jobKey)
await queue.removeJobs(jobId) await queue.removeJobs(jobId)
console.log(`jobId=${jobId} disabled`)
} }
export async function clearMetadata() { export async function clearMetadata() {

View file

@ -103,11 +103,9 @@ class Table {
exports.init = endpoint => { exports.init = endpoint => {
let AWS = require("aws-sdk") let AWS = require("aws-sdk")
AWS.config.update({
region: AWS_REGION,
})
let docClientParams = { let docClientParams = {
correctClockSkew: true, correctClockSkew: true,
region: AWS_REGION,
} }
if (endpoint) { if (endpoint) {
docClientParams.endpoint = endpoint docClientParams.endpoint = endpoint

View file

@ -13,10 +13,7 @@ function isJest() {
} }
function isDev() { function isDev() {
return ( return process.env.NODE_ENV !== "production"
process.env.NODE_ENV !== "production" &&
process.env.BUDIBASE_ENVIRONMENT !== "production"
)
} }
function isCypress() { function isCypress() {

View file

@ -1,8 +1,10 @@
const { rowEmission, tableEmission } = require("./utils") const { rowEmission, tableEmission } = require("./utils")
const mainEmitter = require("./index") const mainEmitter = require("./index")
const env = require("../environment")
// max number of automations that can chain on top of each other // max number of automations that can chain on top of each other
const MAX_AUTOMATION_CHAIN = 5 // TODO: in future make this configurable at the automation level
const MAX_AUTOMATION_CHAIN = env.SELF_HOSTED ? 5 : 0
/** /**
* Special emitter which takes the count of automation runs which have occurred and blocks an * Special emitter which takes the count of automation runs which have occurred and blocks an

View file

@ -12,7 +12,8 @@ interface DynamoDBConfig {
region: string region: string
accessKeyId: string accessKeyId: string
secretAccessKey: string secretAccessKey: string
endpoint: string endpoint?: string
currentClockSkew?: boolean
} }
const SCHEMA: Integration = { const SCHEMA: Integration = {
@ -131,31 +132,20 @@ class DynamoDBIntegration implements IntegrationBase {
constructor(config: DynamoDBConfig) { constructor(config: DynamoDBConfig) {
this.config = config this.config = config
if (this.config.endpoint && !this.config.endpoint.includes("localhost")) {
this.connect() // User is using a local dynamoDB endpoint, don't auth with remote
if (this.config?.endpoint?.includes("localhost")) {
// @ts-ignore
this.config = {}
} }
let options = {
correctClockSkew: true, this.config = {
region: this.config.region || AWS_REGION, ...this.config,
endpoint: config.endpoint ? config.endpoint : undefined, currentClockSkew: true,
region: config.region || AWS_REGION,
endpoint: config.endpoint || undefined,
} }
this.client = new AWS.DynamoDB.DocumentClient(options) this.client = new AWS.DynamoDB.DocumentClient(this.config)
}
end() {
this.disconnect()
}
connect() {
AWS.config.update(this.config)
}
disconnect() {
AWS.config.update({
secretAccessKey: undefined,
accessKeyId: undefined,
region: AWS_REGION,
})
} }
async create(query: { table: string; json: object }) { async create(query: { table: string; json: object }) {
@ -196,7 +186,7 @@ class DynamoDBIntegration implements IntegrationBase {
const params = { const params = {
TableName: query.table, TableName: query.table,
} }
return new AWS.DynamoDB().describeTable(params).promise() return new AWS.DynamoDB(this.config).describeTable(params).promise()
} }
async get(query: { table: string; json: object }) { async get(query: { table: string; json: object }) {

View file

@ -20,6 +20,13 @@ interface MongoDBConfig {
db: string db: string
} }
interface MongoDBQuery {
json: object | string
extra: {
[key: string]: string
}
}
const SCHEMA: Integration = { const SCHEMA: Integration = {
docs: "https://github.com/mongodb/node-mongodb-native", docs: "https://github.com/mongodb/node-mongodb-native",
friendlyName: "MongoDB", friendlyName: "MongoDB",
@ -92,8 +99,8 @@ class MongoIntegration implements IntegrationBase {
json[field] = self.createObjectIds(json[field]) json[field] = self.createObjectIds(json[field])
} }
if ( if (
(field === "_id" || field?.startsWith("$")) && typeof json[field] === "string" &&
typeof json[field] === "string" json[field].toLowerCase().startsWith("objectid")
) { ) {
const id = json[field].match(/(?<=objectid\(['"]).*(?=['"]\))/gi)?.[0] const id = json[field].match(/(?<=objectid\(['"]).*(?=['"]\))/gi)?.[0]
if (id) { if (id) {
@ -152,7 +159,7 @@ class MongoIntegration implements IntegrationBase {
} }
} }
async create(query: { json: object; extra: { [key: string]: string } }) { async create(query: MongoDBQuery) {
try { try {
await this.connect() await this.connect()
const db = this.client.db(this.config.db) const db = this.client.db(this.config.db)
@ -182,7 +189,7 @@ class MongoIntegration implements IntegrationBase {
} }
} }
async read(query: { json: object; extra: { [key: string]: string } }) { async read(query: MongoDBQuery) {
try { try {
await this.connect() await this.connect()
const db = this.client.db(this.config.db) const db = this.client.db(this.config.db)
@ -231,7 +238,7 @@ class MongoIntegration implements IntegrationBase {
} }
} }
async update(query: { json: object; extra: { [key: string]: string } }) { async update(query: MongoDBQuery) {
try { try {
await this.connect() await this.connect()
const db = this.client.db(this.config.db) const db = this.client.db(this.config.db)
@ -275,7 +282,7 @@ class MongoIntegration implements IntegrationBase {
} }
} }
async delete(query: { json: object; extra: { [key: string]: string } }) { async delete(query: MongoDBQuery) {
try { try {
await this.connect() await this.connect()
const db = this.client.db(this.config.db) const db = this.client.db(this.config.db)

View file

@ -1,10 +1,12 @@
const Airtable = require("airtable") import { default as AirtableIntegration } from "../airtable"
const AirtableIntegration = require("../airtable")
jest.mock("airtable") jest.mock("airtable")
class TestConfiguration { class TestConfiguration {
constructor(config = {}) { integration: any
this.integration = new AirtableIntegration.integration(config) client: any
constructor(config: any = {}) {
this.integration = new AirtableIntegration.integration(config)
this.client = { this.client = {
create: jest.fn(), create: jest.fn(),
select: jest.fn(() => ({ select: jest.fn(() => ({
@ -13,12 +15,12 @@ class TestConfiguration {
update: jest.fn(), update: jest.fn(),
destroy: jest.fn(), destroy: jest.fn(),
} }
this.integration.client = () => this.client this.integration.client = () => this.client
} }
} }
describe("Airtable Integration", () => { describe("Airtable Integration", () => {
let config let config: any
beforeEach(() => { beforeEach(() => {
config = new TestConfiguration() config = new TestConfiguration()
@ -27,22 +29,23 @@ describe("Airtable Integration", () => {
it("calls the create method with the correct params", async () => { it("calls the create method with the correct params", async () => {
const response = await config.integration.create({ const response = await config.integration.create({
table: "test", table: "test",
json: {} json: {},
}) })
expect(config.client.create).toHaveBeenCalledWith([ expect(config.client.create).toHaveBeenCalledWith([
{ {
fields: {} fields: {},
} },
]) ])
}) })
it("calls the read method with the correct params", async () => { it("calls the read method with the correct params", async () => {
const response = await config.integration.read({ const response = await config.integration.read({
table: "test", table: "test",
view: "Grid view" view: "Grid view",
}) })
expect(config.client.select).toHaveBeenCalledWith({ expect(config.client.select).toHaveBeenCalledWith({
maxRecords: 10, view: "Grid view" maxRecords: 10,
view: "Grid view",
}) })
}) })
@ -51,22 +54,22 @@ describe("Airtable Integration", () => {
table: "table", table: "table",
id: "123", id: "123",
json: { json: {
name: "test" name: "test",
} },
}) })
expect(config.client.update).toHaveBeenCalledWith([ expect(config.client.update).toHaveBeenCalledWith([
{ {
id: "123", id: "123",
fields: { name: "test" } fields: { name: "test" },
} },
]) ])
}) })
it("calls the delete method with the correct params", async () => { it("calls the delete method with the correct params", async () => {
const ids = [1,2,3,4] const ids = [1, 2, 3, 4]
const response = await config.integration.delete({ const response = await config.integration.delete({
ids ids,
}) })
expect(config.client.destroy).toHaveBeenCalledWith(ids) expect(config.client.destroy).toHaveBeenCalledWith(ids)
}) })
}) })

View file

@ -1,15 +1,16 @@
const arangodb = require("arangojs") import { default as ArangoDBIntegration } from "../arangodb"
const ArangoDBIntegration = require("../arangodb")
jest.mock("arangojs") jest.mock("arangojs")
class TestConfiguration { class TestConfiguration {
constructor(config = {}) { integration: any
this.integration = new ArangoDBIntegration.integration(config)
constructor(config: any = {}) {
this.integration = new ArangoDBIntegration.integration(config)
} }
} }
describe("ArangoDB Integration", () => { describe("ArangoDB Integration", () => {
let config let config: any
let indexName = "Users" let indexName = "Users"
beforeEach(() => { beforeEach(() => {
@ -18,18 +19,20 @@ describe("ArangoDB Integration", () => {
it("calls the create method with the correct params", async () => { it("calls the create method with the correct params", async () => {
const body = { const body = {
json: "Hello" json: "Hello",
} }
const response = await config.integration.create(body) const response = await config.integration.create(body)
expect(config.integration.client.query).toHaveBeenCalledWith(`INSERT Hello INTO collection RETURN NEW`) expect(config.integration.client.query).toHaveBeenCalledWith(
`INSERT Hello INTO collection RETURN NEW`
)
}) })
it("calls the read method with the correct params", async () => { it("calls the read method with the correct params", async () => {
const query = { const query = {
json: `test`, sql: `test`,
} }
const response = await config.integration.read(query) const response = await config.integration.read(query)
expect(config.integration.client.query).toHaveBeenCalledWith(query.sql) expect(config.integration.client.query).toHaveBeenCalledWith(query.sql)
}) })
}) })

View file

@ -1,23 +1,29 @@
jest.mock("pouchdb", () => function CouchDBMock() { jest.mock(
this.post = jest.fn() "pouchdb",
this.allDocs = jest.fn(() => ({ rows: [] })) () =>
this.put = jest.fn() function CouchDBMock(this: any) {
this.get = jest.fn() this.post = jest.fn()
this.remove = jest.fn() this.allDocs = jest.fn(() => ({ rows: [] }))
this.plugin = jest.fn() this.put = jest.fn()
this.close = jest.fn() this.get = jest.fn()
}) this.remove = jest.fn()
this.plugin = jest.fn()
this.close = jest.fn()
}
)
const CouchDBIntegration = require("../couchdb") import { default as CouchDBIntegration } from "../couchdb"
class TestConfiguration { class TestConfiguration {
constructor(config = {}) { integration: any
this.integration = new CouchDBIntegration.integration(config)
constructor(config: any = {}) {
this.integration = new CouchDBIntegration.integration(config)
} }
} }
describe("CouchDB Integration", () => { describe("CouchDB Integration", () => {
let config let config: any
beforeEach(() => { beforeEach(() => {
config = new TestConfiguration() config = new TestConfiguration()
@ -25,37 +31,37 @@ describe("CouchDB Integration", () => {
it("calls the create method with the correct params", async () => { it("calls the create method with the correct params", async () => {
const doc = { const doc = {
test: 1 test: 1,
} }
const response = await config.integration.create({ const response = await config.integration.create({
json: doc json: doc,
}) })
expect(config.integration.client.post).toHaveBeenCalledWith(doc) expect(config.integration.client.post).toHaveBeenCalledWith(doc)
}) })
it("calls the read method with the correct params", async () => { it("calls the read method with the correct params", async () => {
const doc = { const doc = {
name: "search" name: "search",
} }
const response = await config.integration.read({ const response = await config.integration.read({
json: doc json: doc,
}) })
expect(config.integration.client.allDocs).toHaveBeenCalledWith({ expect(config.integration.client.allDocs).toHaveBeenCalledWith({
include_docs: true, include_docs: true,
name: "search" name: "search",
}) })
}) })
it("calls the update method with the correct params", async () => { it("calls the update method with the correct params", async () => {
const doc = { const doc = {
_id: "1234", _id: "1234",
name: "search" name: "search",
} }
const response = await config.integration.update({ const response = await config.integration.update({
json: doc json: doc,
}) })
expect(config.integration.client.put).toHaveBeenCalledWith(doc) expect(config.integration.client.put).toHaveBeenCalledWith(doc)
@ -67,4 +73,4 @@ describe("CouchDB Integration", () => {
expect(config.integration.client.get).toHaveBeenCalledWith(id) expect(config.integration.client.get).toHaveBeenCalledWith(id)
expect(config.integration.client.remove).toHaveBeenCalled() expect(config.integration.client.remove).toHaveBeenCalled()
}) })
}) })

View file

@ -1,103 +0,0 @@
const AWS = require("aws-sdk")
const DynamoDBIntegration = require("../dynamodb")
jest.mock("aws-sdk")
class TestConfiguration {
constructor(config = {}) {
this.integration = new DynamoDBIntegration.integration(config)
}
}
describe("DynamoDB Integration", () => {
let config
let tableName = "Users"
beforeEach(() => {
config = new TestConfiguration()
})
it("calls the create method with the correct params", async () => {
const response = await config.integration.create({
table: tableName,
json: {
Name: "John"
}
})
expect(config.integration.client.put).toHaveBeenCalledWith({
TableName: tableName,
Name: "John"
})
})
it("calls the read method with the correct params", async () => {
const indexName = "Test"
const response = await config.integration.read({
table: tableName,
index: indexName,
json: {}
})
expect(config.integration.client.query).toHaveBeenCalledWith({
TableName: tableName,
IndexName: indexName,
})
expect(response).toEqual([])
})
it("calls the scan method with the correct params", async () => {
const indexName = "Test"
const response = await config.integration.scan({
table: tableName,
index: indexName,
json: {}
})
expect(config.integration.client.scan).toHaveBeenCalledWith({
TableName: tableName,
IndexName: indexName,
})
expect(response).toEqual([{
Name: "test"
}])
})
it("calls the get method with the correct params", async () => {
const response = await config.integration.get({
table: tableName,
json: {
Id: 123
}
})
expect(config.integration.client.get).toHaveBeenCalledWith({
TableName: tableName,
Id: 123
})
})
it("calls the update method with the correct params", async () => {
const response = await config.integration.update({
table: tableName,
json: {
Name: "John"
}
})
expect(config.integration.client.update).toHaveBeenCalledWith({
TableName: tableName,
Name: "John"
})
})
it("calls the delete method with the correct params", async () => {
const response = await config.integration.delete({
table: tableName,
json: {
Name: "John"
}
})
expect(config.integration.client.delete).toHaveBeenCalledWith({
TableName: tableName,
Name: "John"
})
})
})

View file

@ -0,0 +1,155 @@
import { default as DynamoDBIntegration } from "../dynamodb"
jest.mock("aws-sdk")
class TestConfiguration {
integration: any
constructor(config: any = {}) {
this.integration = new DynamoDBIntegration.integration(config)
}
}
describe("DynamoDB Integration", () => {
let config: any
let tableName = "Users"
beforeEach(() => {
config = new TestConfiguration()
})
it("calls the create method with the correct params", async () => {
const response = await config.integration.create({
table: tableName,
json: {
Name: "John",
},
})
expect(config.integration.client.put).toHaveBeenCalledWith({
TableName: tableName,
Name: "John",
})
})
it("calls the read method with the correct params", async () => {
const indexName = "Test"
const response = await config.integration.read({
table: tableName,
index: indexName,
json: {},
})
expect(config.integration.client.query).toHaveBeenCalledWith({
TableName: tableName,
IndexName: indexName,
})
expect(response).toEqual([])
})
it("calls the scan method with the correct params", async () => {
const indexName = "Test"
const response = await config.integration.scan({
table: tableName,
index: indexName,
json: {},
})
expect(config.integration.client.scan).toHaveBeenCalledWith({
TableName: tableName,
IndexName: indexName,
})
expect(response).toEqual([
{
Name: "test",
},
])
})
it("calls the get method with the correct params", async () => {
const response = await config.integration.get({
table: tableName,
json: {
Id: 123,
},
})
expect(config.integration.client.get).toHaveBeenCalledWith({
TableName: tableName,
Id: 123,
})
})
it("calls the update method with the correct params", async () => {
const response = await config.integration.update({
table: tableName,
json: {
Name: "John",
},
})
expect(config.integration.client.update).toHaveBeenCalledWith({
TableName: tableName,
Name: "John",
})
})
it("calls the delete method with the correct params", async () => {
const response = await config.integration.delete({
table: tableName,
json: {
Name: "John",
},
})
expect(config.integration.client.delete).toHaveBeenCalledWith({
TableName: tableName,
Name: "John",
})
})
it("configures the dynamoDB constructor based on an empty endpoint parameter", async () => {
const config = {
region: "us-east-1",
accessKeyId: "test",
secretAccessKey: "test",
}
const integration: any = new DynamoDBIntegration.integration(config)
expect(integration.config).toEqual({
currentClockSkew: true,
...config,
})
})
it("configures the dynamoDB constructor based on a localhost endpoint parameter", async () => {
const config = {
region: "us-east-1",
accessKeyId: "test",
secretAccessKey: "test",
endpoint: "localhost:8080",
}
const integration: any = new DynamoDBIntegration.integration(config)
expect(integration.config).toEqual({
region: "us-east-1",
currentClockSkew: true,
endpoint: "localhost:8080",
})
})
it("configures the dynamoDB constructor based on a remote endpoint parameter", async () => {
const config = {
region: "us-east-1",
accessKeyId: "test",
secretAccessKey: "test",
endpoint: "dynamodb.aws.foo.net",
}
const integration = new DynamoDBIntegration.integration(config)
// @ts-ignore
expect(integration.config).toEqual({
currentClockSkew: true,
...config,
})
})
})

View file

@ -1,15 +1,16 @@
const elasticsearch = require("@elastic/elasticsearch") import { default as ElasticSearchIntegration } from "../elasticsearch"
const ElasticSearchIntegration = require("../elasticsearch")
jest.mock("@elastic/elasticsearch") jest.mock("@elastic/elasticsearch")
class TestConfiguration { class TestConfiguration {
constructor(config = {}) { integration: any
this.integration = new ElasticSearchIntegration.integration(config)
constructor(config: any = {}) {
this.integration = new ElasticSearchIntegration.integration(config)
} }
} }
describe("Elasticsearch Integration", () => { describe("Elasticsearch Integration", () => {
let config let config: any
let indexName = "Users" let indexName = "Users"
beforeEach(() => { beforeEach(() => {
@ -18,15 +19,15 @@ describe("Elasticsearch Integration", () => {
it("calls the create method with the correct params", async () => { it("calls the create method with the correct params", async () => {
const body = { const body = {
name: "Hello" name: "Hello",
} }
const response = await config.integration.create({ const response = await config.integration.create({
index: indexName, index: indexName,
json: body json: body,
}) })
expect(config.integration.client.index).toHaveBeenCalledWith({ expect(config.integration.client.index).toHaveBeenCalledWith({
index: indexName, index: indexName,
body body,
}) })
}) })
@ -34,43 +35,43 @@ describe("Elasticsearch Integration", () => {
const body = { const body = {
query: { query: {
term: { term: {
name: "kimchy" name: "kimchy",
} },
} },
} }
const response = await config.integration.read({ const response = await config.integration.read({
index: indexName, index: indexName,
json: body json: body,
}) })
expect(config.integration.client.search).toHaveBeenCalledWith({ expect(config.integration.client.search).toHaveBeenCalledWith({
index: indexName, index: indexName,
body body,
}) })
expect(response).toEqual(expect.any(Array)) expect(response).toEqual(expect.any(Array))
}) })
it("calls the update method with the correct params", async () => { it("calls the update method with the correct params", async () => {
const body = { const body = {
name: "updated" name: "updated",
} }
const response = await config.integration.update({ const response = await config.integration.update({
id: "1234", id: "1234",
index: indexName, index: indexName,
json: body json: body,
}) })
expect(config.integration.client.update).toHaveBeenCalledWith({ expect(config.integration.client.update).toHaveBeenCalledWith({
id: "1234", id: "1234",
index: indexName, index: indexName,
body body,
}) })
expect(response).toEqual(expect.any(Array)) expect(response).toEqual(expect.any(Array))
}) })
it("calls the delete method with the correct params", async () => { it("calls the delete method with the correct params", async () => {
const body = { const body = {
id: "1234" id: "1234",
} }
const response = await config.integration.delete(body) const response = await config.integration.delete(body)
@ -78,4 +79,4 @@ describe("Elasticsearch Integration", () => {
expect(config.integration.client.delete).toHaveBeenCalledWith(body) expect(config.integration.client.delete).toHaveBeenCalledWith(body)
expect(response).toEqual(expect.any(Array)) expect(response).toEqual(expect.any(Array))
}) })
}) })

View file

@ -1,92 +1,97 @@
const firebase = require("@google-cloud/firestore") import { default as FirebaseIntegration } from "../firebase"
const FirebaseIntegration = require("../firebase")
jest.mock("@google-cloud/firestore") jest.mock("@google-cloud/firestore")
class TestConfiguration { class TestConfiguration {
constructor(config = {}) { integration: any
this.integration = new FirebaseIntegration.integration(config)
constructor(config: any = {}) {
this.integration = new FirebaseIntegration.integration(config)
} }
} }
describe("Firebase Integration", () => { describe("Firebase Integration", () => {
let config let config: any
let tableName = "Users" let tableName = "Users"
beforeEach(() => { beforeEach(() => {
config = new TestConfiguration({ config = new TestConfiguration({
serviceAccount: "{}" serviceAccount: "{}",
}) })
}) })
it("calls the create method with the correct params", async () => { it("calls the create method with the correct params", async () => {
await config.integration.create({ await config.integration.create({
table: tableName, table: tableName,
json: { json: {
Name: "Test Name" Name: "Test Name",
}, },
extra: { extra: {
collection: "test" collection: "test",
} },
}) })
expect(config.integration.client.collection).toHaveBeenCalledWith("test") expect(config.integration.client.collection).toHaveBeenCalledWith("test")
expect(config.integration.client.set).toHaveBeenCalledWith({ expect(config.integration.client.set).toHaveBeenCalledWith({
Name: "Test Name", Name: "Test Name",
id: "test_id" id: "test_id",
}) })
}) })
it("calls the read method with the correct params", async () => { it("calls the read method with the correct params", async () => {
const response = await config.integration.read({ const response = await config.integration.read({
table: tableName, table: tableName,
json: { json: {
Name: "Test" Name: "Test",
}, },
extra: { extra: {
collection: "test", collection: "test",
filterField: "field", filterField: "field",
filter: "==", filter: "==",
filterValue: "value", filterValue: "value",
} },
}) })
expect(config.integration.client.collection).toHaveBeenCalledWith("test") expect(config.integration.client.collection).toHaveBeenCalledWith("test")
expect(config.integration.client.where).toHaveBeenCalledWith("field", "==", "value") expect(config.integration.client.where).toHaveBeenCalledWith(
expect(response).toEqual([{ result: "test"}]) "field",
"==",
"value"
)
expect(response).toEqual([{ result: "test" }])
}) })
it("calls the update method with the correct params", async () => { it("calls the update method with the correct params", async () => {
const response = await config.integration.update({ const response = await config.integration.update({
table: tableName, table: tableName,
json: { json: {
id: "test", id: "test",
Name: "Test" Name: "Test",
}, },
extra: { extra: {
collection: "test" collection: "test",
} },
}) })
expect(config.integration.client.collection).toHaveBeenCalledWith("test") expect(config.integration.client.collection).toHaveBeenCalledWith("test")
expect(config.integration.client.update).toHaveBeenCalledWith({ expect(config.integration.client.update).toHaveBeenCalledWith({
Name: "Test", Name: "Test",
id: "test" id: "test",
}) })
expect(response).toEqual({ expect(response).toEqual({
result: "test" result: "test",
}) })
}) })
it("calls the delete method with the correct params", async () => { it("calls the delete method with the correct params", async () => {
const response = await config.integration.delete({ const response = await config.integration.delete({
table: tableName, table: tableName,
json: { json: {
id: "test", id: "test",
Name: "Test" Name: "Test",
}, },
extra: { extra: {
collection: "test" collection: "test",
} },
}) })
expect(config.integration.client.collection).toHaveBeenCalledWith("test") expect(config.integration.client.collection).toHaveBeenCalledWith("test")
expect(config.integration.client.doc).toHaveBeenCalledWith("test") expect(config.integration.client.doc).toHaveBeenCalledWith("test")
expect(config.integration.client.delete).toHaveBeenCalled() expect(config.integration.client.delete).toHaveBeenCalled()
}) })
}) })

View file

@ -1,15 +1,16 @@
const sqlServer = require("mssql") import { default as MSSQLIntegration } from "../microsoftSqlServer"
const MSSQLIntegration = require("../microsoftSqlServer")
jest.mock("mssql") jest.mock("mssql")
class TestConfiguration { class TestConfiguration {
constructor(config = {}) { integration: any
this.integration = new MSSQLIntegration.integration(config)
constructor(config: any = {}) {
this.integration = new MSSQLIntegration.integration(config)
} }
} }
describe("MS SQL Server Integration", () => { describe("MS SQL Server Integration", () => {
let config let config: any
beforeEach(async () => { beforeEach(async () => {
config = new TestConfiguration() config = new TestConfiguration()
@ -23,7 +24,7 @@ describe("MS SQL Server Integration", () => {
it("calls the create method with the correct params", async () => { it("calls the create method with the correct params", async () => {
const sql = "insert into users (name, age) values ('Joe', 123);" const sql = "insert into users (name, age) values ('Joe', 123);"
const response = await config.integration.create({ const response = await config.integration.create({
sql sql,
}) })
expect(config.integration.client.request).toHaveBeenCalledWith() expect(config.integration.client.request).toHaveBeenCalledWith()
expect(response[0]).toEqual(sql) expect(response[0]).toEqual(sql)
@ -32,7 +33,7 @@ describe("MS SQL Server Integration", () => {
it("calls the read method with the correct params", async () => { it("calls the read method with the correct params", async () => {
const sql = "select * from users;" const sql = "select * from users;"
const response = await config.integration.read({ const response = await config.integration.read({
sql sql,
}) })
expect(config.integration.client.request).toHaveBeenCalledWith() expect(config.integration.client.request).toHaveBeenCalledWith()
expect(response[0]).toEqual(sql) expect(response[0]).toEqual(sql)
@ -45,11 +46,11 @@ describe("MS SQL Server Integration", () => {
}) })
it("returns the correct response when the create response has no rows", async () => { it("returns the correct response when the create response has no rows", async () => {
const sql = "insert into users (name, age) values ('Joe', 123);" const sql = "insert into users (name, age) values ('Joe', 123);"
const response = await config.integration.create({ const response = await config.integration.create({
sql sql,
}) })
expect(response[0]).toEqual(sql) expect(response[0]).toEqual(sql)
}) })
}) })
}) })

View file

@ -1,22 +1,26 @@
const mongo = require("mongodb") const mongo = require("mongodb")
const MongoDBIntegration = require("../mongodb") import { default as MongoDBIntegration } from "../mongodb"
jest.mock("mongodb") jest.mock("mongodb")
class TestConfiguration { class TestConfiguration {
constructor(config = {}) { integration: any
constructor(config: any = {}) {
this.integration = new MongoDBIntegration.integration(config) this.integration = new MongoDBIntegration.integration(config)
} }
} }
function disableConsole() { function disableConsole() {
jest.spyOn(console, "error") jest.spyOn(console, "error")
// @ts-ignore
console.error.mockImplementation(() => {}) console.error.mockImplementation(() => {})
// @ts-ignore
return console.error.mockRestore return console.error.mockRestore
} }
describe("MongoDB Integration", () => { describe("MongoDB Integration", () => {
let config let config: any
let indexName = "Users" let indexName = "Users"
beforeEach(() => { beforeEach(() => {
@ -54,13 +58,16 @@ describe("MongoDB Integration", () => {
id: "test", id: "test",
}, },
options: { options: {
opt: "option" opt: "option",
} },
}, },
extra: { collection: "testCollection", actionTypes: "deleteOne" }, extra: { collection: "testCollection", actionTypes: "deleteOne" },
} }
await config.integration.delete(query) await config.integration.delete(query)
expect(config.integration.client.deleteOne).toHaveBeenCalledWith(query.json.filter, query.json.options) expect(config.integration.client.deleteOne).toHaveBeenCalledWith(
query.json.filter,
query.json.options
)
}) })
it("calls the update method with the correct params", async () => { it("calls the update method with the correct params", async () => {
@ -103,16 +110,16 @@ describe("MongoDB Integration", () => {
restore() restore()
}) })
it("creates ObjectIds if the _id fields contains a match on ObjectId", async () => { it("creates ObjectIds if the field contains a match on ObjectId", async () => {
const query = { const query = {
json: { json: {
filter: { filter: {
_id: "ObjectId('ACBD12345678ABCD12345678')", _id: "ObjectId('ACBD12345678ABCD12345678')",
name: "ObjectId('name')" name: "ObjectId('BBBB12345678ABCD12345678')",
}, },
update: { update: {
_id: "ObjectId('FFFF12345678ABCD12345678')", _id: "ObjectId('FFFF12345678ABCD12345678')",
name: "ObjectId('updatedName')", name: "ObjectId('CCCC12345678ABCD12345678')",
}, },
options: { options: {
upsert: false, upsert: false,
@ -122,18 +129,18 @@ describe("MongoDB Integration", () => {
} }
await config.integration.update(query) await config.integration.update(query)
expect(config.integration.client.updateOne).toHaveBeenCalled() expect(config.integration.client.updateOne).toHaveBeenCalled()
const args = config.integration.client.updateOne.mock.calls[0] const args = config.integration.client.updateOne.mock.calls[0]
expect(args[0]).toEqual({ expect(args[0]).toEqual({
_id: mongo.ObjectID.createFromHexString("ACBD12345678ABCD12345678"), _id: mongo.ObjectID.createFromHexString("ACBD12345678ABCD12345678"),
name: "ObjectId('name')", name: mongo.ObjectID.createFromHexString("BBBB12345678ABCD12345678"),
}) })
expect(args[1]).toEqual({ expect(args[1]).toEqual({
_id: mongo.ObjectID.createFromHexString("FFFF12345678ABCD12345678"), _id: mongo.ObjectID.createFromHexString("FFFF12345678ABCD12345678"),
name: "ObjectId('updatedName')", name: mongo.ObjectID.createFromHexString("CCCC12345678ABCD12345678"),
}) })
expect(args[2]).toEqual({ expect(args[2]).toEqual({
upsert: false upsert: false,
}) })
}) })
@ -143,7 +150,7 @@ describe("MongoDB Integration", () => {
filter: { filter: {
_id: { _id: {
$eq: "ObjectId('ACBD12345678ABCD12345678')", $eq: "ObjectId('ACBD12345678ABCD12345678')",
} },
}, },
update: { update: {
$set: { $set: {
@ -158,20 +165,20 @@ describe("MongoDB Integration", () => {
} }
await config.integration.update(query) await config.integration.update(query)
expect(config.integration.client.updateOne).toHaveBeenCalled() expect(config.integration.client.updateOne).toHaveBeenCalled()
const args = config.integration.client.updateOne.mock.calls[0] const args = config.integration.client.updateOne.mock.calls[0]
expect(args[0]).toEqual({ expect(args[0]).toEqual({
_id: { _id: {
$eq: mongo.ObjectID.createFromHexString("ACBD12345678ABCD12345678"), $eq: mongo.ObjectID.createFromHexString("ACBD12345678ABCD12345678"),
} },
}) })
expect(args[1]).toEqual({ expect(args[1]).toEqual({
$set: { $set: {
_id: mongo.ObjectID.createFromHexString("FFFF12345678ABCD12345678"), _id: mongo.ObjectID.createFromHexString("FFFF12345678ABCD12345678"),
} },
}) })
expect(args[2]).toEqual({ expect(args[2]).toEqual({
upsert: true upsert: true,
}) })
}) })
@ -181,12 +188,12 @@ describe("MongoDB Integration", () => {
filter: { filter: {
_id: { _id: {
$eq: "ObjectId('ACBD12345678ABCD12345678')", $eq: "ObjectId('ACBD12345678ABCD12345678')",
} },
}, },
update: { update: {
$set: { $set: {
name: "UPDATED", name: "UPDATED",
age: 99 age: 99,
}, },
}, },
options: { options: {
@ -197,21 +204,21 @@ describe("MongoDB Integration", () => {
} }
await config.integration.read(query) await config.integration.read(query)
expect(config.integration.client.findOneAndUpdate).toHaveBeenCalled() expect(config.integration.client.findOneAndUpdate).toHaveBeenCalled()
const args = config.integration.client.findOneAndUpdate.mock.calls[0] const args = config.integration.client.findOneAndUpdate.mock.calls[0]
expect(args[0]).toEqual({ expect(args[0]).toEqual({
_id: { _id: {
$eq: mongo.ObjectID.createFromHexString("ACBD12345678ABCD12345678"), $eq: mongo.ObjectID.createFromHexString("ACBD12345678ABCD12345678"),
} },
}) })
expect(args[1]).toEqual({ expect(args[1]).toEqual({
$set: { $set: {
name: "UPDATED", name: "UPDATED",
age: 99 age: 99,
} },
}) })
expect(args[2]).toEqual({ expect(args[2]).toEqual({
upsert: false upsert: false,
}) })
}) })
@ -242,12 +249,12 @@ describe("MongoDB Integration", () => {
} }
await config.integration.update(query) await config.integration.update(query)
expect(config.integration.client.updateOne).toHaveBeenCalled() expect(config.integration.client.updateOne).toHaveBeenCalled()
const args = config.integration.client.updateOne.mock.calls[0] const args = config.integration.client.updateOne.mock.calls[0]
expect(args[0]).toEqual({ expect(args[0]).toEqual({
_id: { _id: {
$eq: mongo.ObjectID.createFromHexString("ACBD12345678ABCD12345678"), $eq: mongo.ObjectID.createFromHexString("ACBD12345678ABCD12345678"),
} },
}) })
expect(args[1]).toEqual({ expect(args[1]).toEqual({
$set: { $set: {
@ -255,15 +262,17 @@ describe("MongoDB Integration", () => {
data: [ data: [
{ cid: 1 }, { cid: 1 },
{ cid: 2 }, { cid: 2 },
{ nested: { {
name: "test" nested: {
}} name: "test",
] },
},
],
}, },
}, },
}) })
expect(args[2]).toEqual({ expect(args[2]).toEqual({
upsert: true upsert: true,
}) })
}) })
@ -295,12 +304,12 @@ describe("MongoDB Integration", () => {
} }
await config.integration.update(query) await config.integration.update(query)
expect(config.integration.client.updateOne).toHaveBeenCalled() expect(config.integration.client.updateOne).toHaveBeenCalled()
const args = config.integration.client.updateOne.mock.calls[0] const args = config.integration.client.updateOne.mock.calls[0]
expect(args[0]).toEqual({ expect(args[0]).toEqual({
_id: { _id: {
$eq: mongo.ObjectID.createFromHexString("ACBD12345678ABCD12345678"), $eq: mongo.ObjectID.createFromHexString("ACBD12345678ABCD12345678"),
} },
}) })
expect(args[1]).toEqual({ expect(args[1]).toEqual({
$set: { $set: {
@ -308,16 +317,18 @@ describe("MongoDB Integration", () => {
data: [ data: [
{ cid: 1 }, { cid: 1 },
{ cid: 2 }, { cid: 2 },
{ nested: { {
name: "te}st" nested: {
}} name: "te}st",
] },
},
],
}, },
}, },
}) })
expect(args[2]).toEqual({ expect(args[2]).toEqual({
upsert: true, upsert: true,
extra: "ad\"{\"d" extra: 'ad"{"d',
}) })
}) })
}) })

View file

@ -1,14 +1,16 @@
const MySQLIntegration = require("../mysql") import { default as MySQLIntegration } from "../mysql"
jest.mock("mysql2") jest.mock("mysql2")
class TestConfiguration { class TestConfiguration {
constructor(config = { ssl: {} }) { integration: any
constructor(config: any = { ssl: {} }) {
this.integration = new MySQLIntegration.integration(config) this.integration = new MySQLIntegration.integration(config)
} }
} }
describe("MySQL Integration", () => { describe("MySQL Integration", () => {
let config let config: any
beforeEach(() => { beforeEach(() => {
config = new TestConfiguration() config = new TestConfiguration()
@ -17,7 +19,7 @@ describe("MySQL Integration", () => {
it("calls the create method with the correct params", async () => { it("calls the create method with the correct params", async () => {
const sql = "insert into users (name, age) values ('Joe', 123);" const sql = "insert into users (name, age) values ('Joe', 123);"
await config.integration.create({ await config.integration.create({
sql sql,
}) })
expect(config.integration.client.query).toHaveBeenCalledWith(sql, []) expect(config.integration.client.query).toHaveBeenCalledWith(sql, [])
}) })
@ -25,7 +27,7 @@ describe("MySQL Integration", () => {
it("calls the read method with the correct params", async () => { it("calls the read method with the correct params", async () => {
const sql = "select * from users;" const sql = "select * from users;"
await config.integration.read({ await config.integration.read({
sql sql,
}) })
expect(config.integration.client.query).toHaveBeenCalledWith(sql, []) expect(config.integration.client.query).toHaveBeenCalledWith(sql, [])
}) })
@ -33,7 +35,7 @@ describe("MySQL Integration", () => {
it("calls the update method with the correct params", async () => { it("calls the update method with the correct params", async () => {
const sql = "update table users set name = 'test';" const sql = "update table users set name = 'test';"
await config.integration.update({ await config.integration.update({
sql sql,
}) })
expect(config.integration.client.query).toHaveBeenCalledWith(sql, []) expect(config.integration.client.query).toHaveBeenCalledWith(sql, [])
}) })
@ -41,34 +43,34 @@ describe("MySQL Integration", () => {
it("calls the delete method with the correct params", async () => { it("calls the delete method with the correct params", async () => {
const sql = "delete from users where name = 'todelete';" const sql = "delete from users where name = 'todelete';"
await config.integration.delete({ await config.integration.delete({
sql sql,
}) })
expect(config.integration.client.query).toHaveBeenCalledWith(sql, []) expect(config.integration.client.query).toHaveBeenCalledWith(sql, [])
}) })
describe("no rows returned", () => { describe("no rows returned", () => {
it("returns the correct response when the create response has no rows", async () => { it("returns the correct response when the create response has no rows", async () => {
const sql = "insert into users (name, age) values ('Joe', 123);" const sql = "insert into users (name, age) values ('Joe', 123);"
const response = await config.integration.create({ const response = await config.integration.create({
sql sql,
}) })
expect(response).toEqual([{ created: true }]) expect(response).toEqual([{ created: true }])
}) })
it("returns the correct response when the update response has no rows", async () => { it("returns the correct response when the update response has no rows", async () => {
const sql = "update table users set name = 'test';" const sql = "update table users set name = 'test';"
const response = await config.integration.update({ const response = await config.integration.update({
sql sql,
}) })
expect(response).toEqual([{ updated: true }]) expect(response).toEqual([{ updated: true }])
}) })
it("returns the correct response when the delete response has no rows", async () => { it("returns the correct response when the delete response has no rows", async () => {
const sql = "delete from users where name = 'todelete';" const sql = "delete from users where name = 'todelete';"
const response = await config.integration.delete({ const response = await config.integration.delete({
sql sql,
}) })
expect(response).toEqual([{ deleted: true }]) expect(response).toEqual([{ deleted: true }])
}) })
}) })
}) })

View file

@ -1,17 +1,19 @@
const oracledb = require("oracledb") const oracledb = require("oracledb")
const OracleIntegration = require("../oracle") import { default as OracleIntegration } from "../oracle"
jest.mock("oracledb") jest.mock("oracledb")
class TestConfiguration { class TestConfiguration {
constructor(config = {}) { integration: any
this.integration = new OracleIntegration.integration(config)
constructor(config: any = {}) {
this.integration = new OracleIntegration.integration(config)
} }
} }
const options = { autoCommit: true } const options = { autoCommit: true }
describe("Oracle Integration", () => { describe("Oracle Integration", () => {
let config let config: any
beforeEach(() => { beforeEach(() => {
jest.clearAllMocks() jest.clearAllMocks()
@ -26,7 +28,7 @@ describe("Oracle Integration", () => {
it("calls the create method with the correct params", async () => { it("calls the create method with the correct params", async () => {
const sql = "insert into users (name, age) values ('Joe', 123);" const sql = "insert into users (name, age) values ('Joe', 123);"
await config.integration.create({ await config.integration.create({
sql sql,
}) })
expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options) expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options)
expect(oracledb.executeMock).toHaveBeenCalledTimes(1) expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
@ -35,7 +37,7 @@ describe("Oracle Integration", () => {
it("calls the read method with the correct params", async () => { it("calls the read method with the correct params", async () => {
const sql = "select * from users;" const sql = "select * from users;"
await config.integration.read({ await config.integration.read({
sql sql,
}) })
expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options) expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options)
expect(oracledb.executeMock).toHaveBeenCalledTimes(1) expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
@ -43,8 +45,8 @@ describe("Oracle Integration", () => {
it("calls the update method with the correct params", async () => { it("calls the update method with the correct params", async () => {
const sql = "update table users set name = 'test';" const sql = "update table users set name = 'test';"
const response = await config.integration.update({ const response = await config.integration.update({
sql sql,
}) })
expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options) expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options)
expect(oracledb.executeMock).toHaveBeenCalledTimes(1) expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
@ -53,7 +55,7 @@ describe("Oracle Integration", () => {
it("calls the delete method with the correct params", async () => { it("calls the delete method with the correct params", async () => {
const sql = "delete from users where name = 'todelete';" const sql = "delete from users where name = 'todelete';"
await config.integration.delete({ await config.integration.delete({
sql sql,
}) })
expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options) expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options)
expect(oracledb.executeMock).toHaveBeenCalledTimes(1) expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
@ -65,9 +67,9 @@ describe("Oracle Integration", () => {
}) })
it("returns the correct response when the create response has no rows", async () => { it("returns the correct response when the create response has no rows", async () => {
const sql = "insert into users (name, age) values ('Joe', 123);" const sql = "insert into users (name, age) values ('Joe', 123);"
const response = await config.integration.create({ const response = await config.integration.create({
sql sql,
}) })
expect(response).toEqual([{ created: true }]) expect(response).toEqual([{ created: true }])
expect(oracledb.executeMock).toHaveBeenCalledTimes(1) expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
@ -75,8 +77,8 @@ describe("Oracle Integration", () => {
it("returns the correct response when the update response has no rows", async () => { it("returns the correct response when the update response has no rows", async () => {
const sql = "update table users set name = 'test';" const sql = "update table users set name = 'test';"
const response = await config.integration.update({ const response = await config.integration.update({
sql sql,
}) })
expect(response).toEqual([{ updated: true }]) expect(response).toEqual([{ updated: true }])
expect(oracledb.executeMock).toHaveBeenCalledTimes(1) expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
@ -84,11 +86,11 @@ describe("Oracle Integration", () => {
it("returns the correct response when the delete response has no rows", async () => { it("returns the correct response when the delete response has no rows", async () => {
const sql = "delete from users where name = 'todelete';" const sql = "delete from users where name = 'todelete';"
const response = await config.integration.delete({ const response = await config.integration.delete({
sql sql,
}) })
expect(response).toEqual([{ deleted: true }]) expect(response).toEqual([{ deleted: true }])
expect(oracledb.executeMock).toHaveBeenCalledTimes(1) expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
}) })
}) })
}) })

View file

@ -1,15 +1,17 @@
const pg = require("pg") const pg = require("pg")
const PostgresIntegration = require("../postgres") import { default as PostgresIntegration } from "../postgres"
jest.mock("pg") jest.mock("pg")
class TestConfiguration { class TestConfiguration {
constructor(config = {}) { integration: any
this.integration = new PostgresIntegration.integration(config)
constructor(config: any = {}) {
this.integration = new PostgresIntegration.integration(config)
} }
} }
describe("Postgres Integration", () => { describe("Postgres Integration", () => {
let config let config: any
beforeEach(() => { beforeEach(() => {
config = new TestConfiguration() config = new TestConfiguration()
@ -18,7 +20,7 @@ describe("Postgres Integration", () => {
it("calls the create method with the correct params", async () => { it("calls the create method with the correct params", async () => {
const sql = "insert into users (name, age) values ('Joe', 123);" const sql = "insert into users (name, age) values ('Joe', 123);"
await config.integration.create({ await config.integration.create({
sql sql,
}) })
expect(pg.queryMock).toHaveBeenCalledWith(sql, []) expect(pg.queryMock).toHaveBeenCalledWith(sql, [])
}) })
@ -26,15 +28,15 @@ describe("Postgres Integration", () => {
it("calls the read method with the correct params", async () => { it("calls the read method with the correct params", async () => {
const sql = "select * from users;" const sql = "select * from users;"
await config.integration.read({ await config.integration.read({
sql sql,
}) })
expect(pg.queryMock).toHaveBeenCalledWith(sql, []) expect(pg.queryMock).toHaveBeenCalledWith(sql, [])
}) })
it("calls the update method with the correct params", async () => { it("calls the update method with the correct params", async () => {
const sql = "update table users set name = 'test';" const sql = "update table users set name = 'test';"
const response = await config.integration.update({ const response = await config.integration.update({
sql sql,
}) })
expect(pg.queryMock).toHaveBeenCalledWith(sql, []) expect(pg.queryMock).toHaveBeenCalledWith(sql, [])
}) })
@ -42,7 +44,7 @@ describe("Postgres Integration", () => {
it("calls the delete method with the correct params", async () => { it("calls the delete method with the correct params", async () => {
const sql = "delete from users where name = 'todelete';" const sql = "delete from users where name = 'todelete';"
await config.integration.delete({ await config.integration.delete({
sql sql,
}) })
expect(pg.queryMock).toHaveBeenCalledWith(sql, []) expect(pg.queryMock).toHaveBeenCalledWith(sql, [])
}) })
@ -53,27 +55,27 @@ describe("Postgres Integration", () => {
}) })
it("returns the correct response when the create response has no rows", async () => { it("returns the correct response when the create response has no rows", async () => {
const sql = "insert into users (name, age) values ('Joe', 123);" const sql = "insert into users (name, age) values ('Joe', 123);"
const response = await config.integration.create({ const response = await config.integration.create({
sql sql,
}) })
expect(response).toEqual([{ created: true }]) expect(response).toEqual([{ created: true }])
}) })
it("returns the correct response when the update response has no rows", async () => { it("returns the correct response when the update response has no rows", async () => {
const sql = "update table users set name = 'test';" const sql = "update table users set name = 'test';"
const response = await config.integration.update({ const response = await config.integration.update({
sql sql,
}) })
expect(response).toEqual([{ updated: true }]) expect(response).toEqual([{ updated: true }])
}) })
it("returns the correct response when the delete response has no rows", async () => { it("returns the correct response when the delete response has no rows", async () => {
const sql = "delete from users where name = 'todelete';" const sql = "delete from users where name = 'todelete';"
const response = await config.integration.delete({ const response = await config.integration.delete({
sql sql,
}) })
expect(response).toEqual([{ deleted: true }]) expect(response).toEqual([{ deleted: true }])
}) })
}) })
}) })

View file

@ -1,13 +1,16 @@
const Redis = require("ioredis-mock") const Redis = require("ioredis-mock")
const RedisIntegration = require("../redis") import { default as RedisIntegration } from "../redis"
class TestConfiguration { class TestConfiguration {
constructor(config = {}) { integration: any
this.integration = new RedisIntegration.integration(config) redis: any
constructor(config: any = {}) {
this.integration = new RedisIntegration.integration(config)
this.redis = new Redis({ this.redis = new Redis({
data: { data: {
test: 'test', test: "test",
result: "1" result: "1",
}, },
}) })
this.integration.client = this.redis this.integration.client = this.redis
@ -15,7 +18,7 @@ class TestConfiguration {
} }
describe("Redis Integration", () => { describe("Redis Integration", () => {
let config let config: any
beforeEach(() => { beforeEach(() => {
config = new TestConfiguration() config = new TestConfiguration()
@ -24,7 +27,7 @@ describe("Redis Integration", () => {
it("calls the create method with the correct params", async () => { it("calls the create method with the correct params", async () => {
const body = { const body = {
key: "key", key: "key",
value: "value" value: "value",
} }
const response = await config.integration.create(body) const response = await config.integration.create(body)
expect(await config.redis.get("key")).toEqual("value") expect(await config.redis.get("key")).toEqual("value")
@ -32,7 +35,7 @@ describe("Redis Integration", () => {
it("calls the read method with the correct params", async () => { it("calls the read method with the correct params", async () => {
const body = { const body = {
key: "test" key: "test",
} }
const response = await config.integration.read(body) const response = await config.integration.read(body)
expect(response).toEqual("test") expect(response).toEqual("test")
@ -40,7 +43,7 @@ describe("Redis Integration", () => {
it("calls the delete method with the correct params", async () => { it("calls the delete method with the correct params", async () => {
const body = { const body = {
key: "test" key: "test",
} }
await config.integration.delete(body) await config.integration.delete(body)
expect(await config.redis.get(body.key)).toEqual(null) expect(await config.redis.get(body.key)).toEqual(null)
@ -48,13 +51,17 @@ describe("Redis Integration", () => {
it("calls the command method with the correct params", async () => { it("calls the command method with the correct params", async () => {
const body = { const body = {
json: "KEYS *" json: "KEYS *",
} }
// ioredis-mock doesn't support pipelines // ioredis-mock doesn't support pipelines
config.integration.client.pipeline = jest.fn(() => ({ exec: jest.fn(() => [[]]) })) config.integration.client.pipeline = jest.fn(() => ({
exec: jest.fn(() => [[]]),
}))
await config.integration.command(body) await config.integration.command(body)
expect(config.integration.client.pipeline).toHaveBeenCalledWith([["KEYS", "*"]]) expect(config.integration.client.pipeline).toHaveBeenCalledWith([
["KEYS", "*"],
])
}) })
}) })

Some files were not shown because too many files have changed in this diff Show more