1
0
Fork 0
mirror of synced 2024-06-27 02:20:35 +12:00

Build and test fixes

This commit is contained in:
Rory Powell 2022-05-03 22:58:19 +01:00
parent e1a656b76f
commit 606d21b313
31 changed files with 1314 additions and 1597 deletions

View file

@ -2,7 +2,7 @@
"name": "@budibase/backend-core", "name": "@budibase/backend-core",
"version": "1.0.126-alpha.0", "version": "1.0.126-alpha.0",
"description": "Budibase backend core libraries used in server and worker", "description": "Budibase backend core libraries used in server and worker",
"main": "src/index.js", "main": "src/index.ts",
"types": "dist/src/index.d.ts", "types": "dist/src/index.d.ts",
"author": "Budibase", "author": "Budibase",
"license": "GPL-3.0", "license": "GPL-3.0",
@ -36,22 +36,29 @@
"zlib": "^1.0.5" "zlib": "^1.0.5"
}, },
"jest": { "jest": {
"preset": "ts-jest",
"testEnvironment": "node",
"setupFiles": [ "setupFiles": [
"./scripts/jestSetup.js" "./scripts/jestSetup.ts"
] ]
}, },
"devDependencies": { "devDependencies": {
"@shopify/jest-koa-mocks": "^3.1.5",
"@budibase/types": "^1.0.126-alpha.0",
"@types/jest": "^27.4.1", "@types/jest": "^27.4.1",
"@types/koa": "^2.13.3",
"@types/node": "^15.12.4", "@types/node": "^15.12.4",
"@types/node-fetch": "^2.6.1", "@types/node-fetch": "^2.6.1",
"@budibase/types": "^1.0.126-alpha.0", "@types/tar-fs": "^2.0.1",
"typescript": "^4.5.5", "@types/uuid": "^8.3.4",
"@shopify/jest-koa-mocks": "^3.1.5",
"ioredis-mock": "^5.5.5", "ioredis-mock": "^5.5.5",
"jest": "^26.6.3", "jest": "^27.0.3",
"koa": "2.7.0",
"pouchdb-adapter-memory": "^7.2.2", "pouchdb-adapter-memory": "^7.2.2",
"pouchdb-all-dbs": "^1.0.2", "pouchdb-all-dbs": "^1.0.2",
"timekeeper": "^2.2.0" "timekeeper": "^2.2.0",
"ts-jest": "^27.0.3",
"typescript": "^4.5.5"
}, },
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc" "gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
} }

View file

@ -1,9 +1,9 @@
const env = require("../src/environment") import env from "../src/environment"
const { mocks } = require("../testUtils") import { mocks } from "../testUtils"
// mock all dates to 2020-01-01T00:00:00.000Z // mock all dates to 2020-01-01T00:00:00.000Z
// use tk.reset() to use real dates in individual tests // use tk.reset() to use real dates in individual tests
const tk = require("timekeeper") import tk from "timekeeper"
tk.freeze(mocks.date.MOCK_DATE) tk.freeze(mocks.date.MOCK_DATE)
env._set("SELF_HOSTED", "1") env._set("SELF_HOSTED", "1")

View file

@ -1,6 +1,6 @@
import { newid } from "../hashing" import { newid } from "../hashing"
import { DEFAULT_TENANT_ID, Configs } from "../constants" import { DEFAULT_TENANT_ID, Configs } from "../constants"
import * as env from "../environment" import env from "../environment"
import { SEPARATOR, DocumentTypes } from "./constants" import { SEPARATOR, DocumentTypes } from "./constants"
import { getTenantId, getGlobalDBName } from "../tenancy" import { getTenantId, getGlobalDBName } from "../tenancy"
import fetch from "node-fetch" import fetch from "node-fetch"

View file

@ -6,7 +6,7 @@ function isTest() {
) )
} }
module.exports = { export = {
JWT_SECRET: process.env.JWT_SECRET, JWT_SECRET: process.env.JWT_SECRET,
COUCH_DB_URL: process.env.COUCH_DB_URL, COUCH_DB_URL: process.env.COUCH_DB_URL,
COUCH_DB_USERNAME: process.env.COUCH_DB_USER, COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
@ -26,7 +26,7 @@ module.exports = {
process.env.ACCOUNT_PORTAL_URL || "https://account.budibase.app", process.env.ACCOUNT_PORTAL_URL || "https://account.budibase.app",
ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY, ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY,
DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL, DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL,
SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED), SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED || ""),
COOKIE_DOMAIN: process.env.COOKIE_DOMAIN, COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,
PLATFORM_URL: process.env.PLATFORM_URL, PLATFORM_URL: process.env.PLATFORM_URL,
POSTHOG_TOKEN: process.env.POSTHOG_TOKEN, POSTHOG_TOKEN: process.env.POSTHOG_TOKEN,
@ -34,7 +34,7 @@ module.exports = {
TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS, TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS,
USE_COUCH: process.env.USE_COUCH || true, USE_COUCH: process.env.USE_COUCH || true,
isTest, isTest,
_set(key, value) { _set(key: any, value: any) {
process.env[key] = value process.env[key] = value
module.exports[key] = value module.exports[key] = value
}, },

View file

@ -1,10 +1,10 @@
import { AppServedEvent } from "./../../../../types/src/events/serve"
import { processEvent } from "../events" import { processEvent } from "../events"
import { import {
App, App,
BuilderServedEvent, BuilderServedEvent,
Events, Events,
AppPreviewServedEvent, AppPreviewServedEvent,
AppServedEvent,
} from "@budibase/types" } from "@budibase/types"
/* eslint-disable */ /* eslint-disable */

View file

@ -11,6 +11,9 @@ import {
ViewFilterDeletedEvent, ViewFilterDeletedEvent,
ViewFilterUpdatedEvent, ViewFilterUpdatedEvent,
ViewUpdatedEvent, ViewUpdatedEvent,
View,
Table,
TableExportFormat,
} from "@budibase/types" } from "@budibase/types"
/* eslint-disable */ /* eslint-disable */
@ -30,7 +33,7 @@ export function deleted() {
processEvent(Events.VIEW_DELETED, properties) processEvent(Events.VIEW_DELETED, properties)
} }
export function exported(table, format) { export function exported(table: Table, format: TableExportFormat) {
const properties: ViewExportedEvent = {} const properties: ViewExportedEvent = {}
processEvent(Events.VIEW_EXPORTED, properties) processEvent(Events.VIEW_EXPORTED, properties)
} }

View file

@ -1,9 +1,9 @@
const db = require("./db") import db from "./db"
const errors = require("./errors") import errors from "./errors"
import * as events from "./events" import * as events from "./events"
module.exports = { export = {
init(opts = {}) { init(opts: any = {}) {
db.init(opts.db) db.init(opts.db)
}, },
// some default exports from the library, however these ideally shouldn't // some default exports from the library, however these ideally shouldn't
@ -18,7 +18,7 @@ module.exports = {
auth: require("../auth"), auth: require("../auth"),
constants: require("../constants"), constants: require("../constants"),
migrations: require("../migrations"), migrations: require("../migrations"),
errors: require("./errors"), errors,
...errors.errors, ...errors.errors,
env: require("./environment"), env: require("./environment"),
accounts: require("./cloud/accounts"), accounts: require("./cloud/accounts"),

View file

@ -1,9 +1,9 @@
import sanitize from "sanitize-s3-objectkey" const sanitize = require("sanitize-s3-objectkey")
import AWS from "aws-sdk" import AWS from "aws-sdk"
import stream from "stream" import stream from "stream"
import fetch from "node-fetch" import fetch from "node-fetch"
import tar from "tar-fs" import tar from "tar-fs"
import zlib from "zlib" const zlib = require("zlib")
import { promisify } from "util" import { promisify } from "util"
import { join } from "path" import { join } from "path"
import fs from "fs" import fs from "fs"
@ -18,7 +18,7 @@ const STATE = {
bucketCreationPromises: {}, bucketCreationPromises: {},
} }
const CONTENT_TYPE_MAP = { const CONTENT_TYPE_MAP: any = {
html: "text/html", html: "text/html",
css: "text/css", css: "text/css",
js: "application/javascript", js: "application/javascript",
@ -32,20 +32,16 @@ const STRING_CONTENT_TYPES = [
] ]
// does normal sanitization and then swaps dev apps to apps // does normal sanitization and then swaps dev apps to apps
function sanitizeKey(input) { export function sanitizeKey(input: any) {
return sanitize(sanitizeBucket(input)).replace(/\\/g, "/") return sanitize(sanitizeBucket(input)).replace(/\\/g, "/")
} }
exports.sanitizeKey = sanitizeKey
// simply handles the dev app to app conversion // simply handles the dev app to app conversion
function sanitizeBucket(input) { export function sanitizeBucket(input: any) {
return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX) return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX)
} }
exports.sanitizeBucket = sanitizeBucket function publicPolicy(bucketName: any) {
function publicPolicy(bucketName) {
return { return {
Version: "2012-10-17", Version: "2012-10-17",
Statement: [ Statement: [
@ -69,13 +65,13 @@ const PUBLIC_BUCKETS = [ObjectStoreBuckets.APPS, ObjectStoreBuckets.GLOBAL]
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage. * @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
* @constructor * @constructor
*/ */
exports.ObjectStore = bucket => { export const ObjectStore = (bucket: any) => {
AWS.config.update({ AWS.config.update({
accessKeyId: env.MINIO_ACCESS_KEY, accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY, secretAccessKey: env.MINIO_SECRET_KEY,
region: env.AWS_REGION, region: env.AWS_REGION,
}) })
const config = { const config: any = {
s3ForcePathStyle: true, s3ForcePathStyle: true,
signatureVersion: "v4", signatureVersion: "v4",
apiVersion: "2006-03-01", apiVersion: "2006-03-01",
@ -93,7 +89,7 @@ exports.ObjectStore = bucket => {
* Given an object store and a bucket name this will make sure the bucket exists, * Given an object store and a bucket name this will make sure the bucket exists,
* if it does not exist then it will create it. * if it does not exist then it will create it.
*/ */
exports.makeSureBucketExists = async (client, bucketName) => { export const makeSureBucketExists = async (client: any, bucketName: any) => {
bucketName = sanitizeBucket(bucketName) bucketName = sanitizeBucket(bucketName)
try { try {
await client await client
@ -101,8 +97,8 @@ exports.makeSureBucketExists = async (client, bucketName) => {
Bucket: bucketName, Bucket: bucketName,
}) })
.promise() .promise()
} catch (err) { } catch (err: any) {
const promises = STATE.bucketCreationPromises const promises: any = STATE.bucketCreationPromises
const doesntExist = err.statusCode === 404, const doesntExist = err.statusCode === 404,
noAccess = err.statusCode === 403 noAccess = err.statusCode === 403
if (promises[bucketName]) { if (promises[bucketName]) {
@ -138,20 +134,20 @@ exports.makeSureBucketExists = async (client, bucketName) => {
* Uploads the contents of a file given the required parameters, useful when * Uploads the contents of a file given the required parameters, useful when
* temp files in use (for example file uploaded as an attachment). * temp files in use (for example file uploaded as an attachment).
*/ */
exports.upload = async ({ export const upload = async ({
bucket: bucketName, bucket: bucketName,
filename, filename,
path, path,
type, type,
metadata, metadata,
}) => { }: any) => {
const extension = [...filename.split(".")].pop() const extension = [...filename.split(".")].pop()
const fileBytes = fs.readFileSync(path) const fileBytes = fs.readFileSync(path)
const objectStore = exports.ObjectStore(bucketName) const objectStore = ObjectStore(bucketName)
await exports.makeSureBucketExists(objectStore, bucketName) await makeSureBucketExists(objectStore, bucketName)
const config = { const config: any = {
// windows file paths need to be converted to forward slashes for s3 // windows file paths need to be converted to forward slashes for s3
Key: sanitizeKey(filename), Key: sanitizeKey(filename),
Body: fileBytes, Body: fileBytes,
@ -167,9 +163,14 @@ exports.upload = async ({
* Similar to the upload function but can be used to send a file stream * Similar to the upload function but can be used to send a file stream
* through to the object store. * through to the object store.
*/ */
exports.streamUpload = async (bucketName, filename, stream, extra = {}) => { export const streamUpload = async (
const objectStore = exports.ObjectStore(bucketName) bucketName: any,
await exports.makeSureBucketExists(objectStore, bucketName) filename: any,
stream: any,
extra = {}
) => {
const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName)
const params = { const params = {
Bucket: sanitizeBucket(bucketName), Bucket: sanitizeBucket(bucketName),
@ -184,13 +185,13 @@ exports.streamUpload = async (bucketName, filename, stream, extra = {}) => {
* retrieves the contents of a file from the object store, if it is a known content type it * retrieves the contents of a file from the object store, if it is a known content type it
* will be converted, otherwise it will be returned as a buffer stream. * will be converted, otherwise it will be returned as a buffer stream.
*/ */
exports.retrieve = async (bucketName, filepath) => { export const retrieve = async (bucketName: any, filepath: any) => {
const objectStore = exports.ObjectStore(bucketName) const objectStore = ObjectStore(bucketName)
const params = { const params = {
Bucket: sanitizeBucket(bucketName), Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(filepath), Key: sanitizeKey(filepath),
} }
const response = await objectStore.getObject(params).promise() const response: any = await objectStore.getObject(params).promise()
// currently these are all strings // currently these are all strings
if (STRING_CONTENT_TYPES.includes(response.ContentType)) { if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
return response.Body.toString("utf8") return response.Body.toString("utf8")
@ -202,10 +203,10 @@ exports.retrieve = async (bucketName, filepath) => {
/** /**
* Same as retrieval function but puts to a temporary file. * Same as retrieval function but puts to a temporary file.
*/ */
exports.retrieveToTmp = async (bucketName, filepath) => { export const retrieveToTmp = async (bucketName: any, filepath: any) => {
bucketName = sanitizeBucket(bucketName) bucketName = sanitizeBucket(bucketName)
filepath = sanitizeKey(filepath) filepath = sanitizeKey(filepath)
const data = await exports.retrieve(bucketName, filepath) const data = await retrieve(bucketName, filepath)
const outputPath = join(budibaseTempDir(), v4()) const outputPath = join(budibaseTempDir(), v4())
fs.writeFileSync(outputPath, data) fs.writeFileSync(outputPath, data)
return outputPath return outputPath
@ -214,9 +215,9 @@ exports.retrieveToTmp = async (bucketName, filepath) => {
/** /**
* Delete a single file. * Delete a single file.
*/ */
exports.deleteFile = async (bucketName, filepath) => { export const deleteFile = async (bucketName: any, filepath: any) => {
const objectStore = exports.ObjectStore(bucketName) const objectStore = ObjectStore(bucketName)
await exports.makeSureBucketExists(objectStore, bucketName) await makeSureBucketExists(objectStore, bucketName)
const params = { const params = {
Bucket: bucketName, Bucket: bucketName,
Key: filepath, Key: filepath,
@ -224,13 +225,13 @@ exports.deleteFile = async (bucketName, filepath) => {
return objectStore.deleteObject(params) return objectStore.deleteObject(params)
} }
exports.deleteFiles = async (bucketName, filepaths) => { export const deleteFiles = async (bucketName: any, filepaths: any) => {
const objectStore = exports.ObjectStore(bucketName) const objectStore = ObjectStore(bucketName)
await exports.makeSureBucketExists(objectStore, bucketName) await makeSureBucketExists(objectStore, bucketName)
const params = { const params = {
Bucket: bucketName, Bucket: bucketName,
Delete: { Delete: {
Objects: filepaths.map(path => ({ Key: path })), Objects: filepaths.map((path: any) => ({ Key: path })),
}, },
} }
return objectStore.deleteObjects(params).promise() return objectStore.deleteObjects(params).promise()
@ -239,38 +240,45 @@ exports.deleteFiles = async (bucketName, filepaths) => {
/** /**
* Delete a path, including everything within. * Delete a path, including everything within.
*/ */
exports.deleteFolder = async (bucketName, folder) => { export const deleteFolder = async (
bucketName: any,
folder: any
): Promise<any> => {
bucketName = sanitizeBucket(bucketName) bucketName = sanitizeBucket(bucketName)
folder = sanitizeKey(folder) folder = sanitizeKey(folder)
const client = exports.ObjectStore(bucketName) const client = ObjectStore(bucketName)
const listParams = { const listParams = {
Bucket: bucketName, Bucket: bucketName,
Prefix: folder, Prefix: folder,
} }
let response = await client.listObjects(listParams).promise() let response: any = await client.listObjects(listParams).promise()
if (response.Contents.length === 0) { if (response.Contents.length === 0) {
return return
} }
const deleteParams = { const deleteParams: any = {
Bucket: bucketName, Bucket: bucketName,
Delete: { Delete: {
Objects: [], Objects: [],
}, },
} }
response.Contents.forEach(content => { response.Contents.forEach((content: any) => {
deleteParams.Delete.Objects.push({ Key: content.Key }) deleteParams.Delete.Objects.push({ Key: content.Key })
}) })
response = await client.deleteObjects(deleteParams).promise() response = await client.deleteObjects(deleteParams).promise()
// can only empty 1000 items at once // can only empty 1000 items at once
if (response.Deleted.length === 1000) { if (response.Deleted.length === 1000) {
return exports.deleteFolder(bucketName, folder) return deleteFolder(bucketName, folder)
} }
} }
exports.uploadDirectory = async (bucketName, localPath, bucketPath) => { export const uploadDirectory = async (
bucketName: any,
localPath: any,
bucketPath: any
) => {
bucketName = sanitizeBucket(bucketName) bucketName = sanitizeBucket(bucketName)
let uploads = [] let uploads = []
const files = fs.readdirSync(localPath, { withFileTypes: true }) const files = fs.readdirSync(localPath, { withFileTypes: true })
@ -278,17 +286,15 @@ exports.uploadDirectory = async (bucketName, localPath, bucketPath) => {
const path = sanitizeKey(join(bucketPath, file.name)) const path = sanitizeKey(join(bucketPath, file.name))
const local = join(localPath, file.name) const local = join(localPath, file.name)
if (file.isDirectory()) { if (file.isDirectory()) {
uploads.push(exports.uploadDirectory(bucketName, local, path)) uploads.push(uploadDirectory(bucketName, local, path))
} else { } else {
uploads.push( uploads.push(streamUpload(bucketName, path, fs.createReadStream(local)))
exports.streamUpload(bucketName, path, fs.createReadStream(local))
)
} }
} }
await Promise.all(uploads) await Promise.all(uploads)
} }
exports.downloadTarball = async (url, bucketName, path) => { export const downloadTarball = async (url: any, bucketName: any, path: any) => {
bucketName = sanitizeBucket(bucketName) bucketName = sanitizeBucket(bucketName)
path = sanitizeKey(path) path = sanitizeKey(path)
const response = await fetch(url) const response = await fetch(url)
@ -299,7 +305,7 @@ exports.downloadTarball = async (url, bucketName, path) => {
const tmpPath = join(budibaseTempDir(), path) const tmpPath = join(budibaseTempDir(), path)
await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath)) await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath))
if (!env.isTest() && env.SELF_HOSTED) { if (!env.isTest() && env.SELF_HOSTED) {
await exports.uploadDirectory(bucketName, tmpPath, path) await uploadDirectory(bucketName, tmpPath, path)
} }
// return the temporary path incase there is a use for it // return the temporary path incase there is a use for it
return tmpPath return tmpPath

View file

@ -1,7 +0,0 @@
const mocks = require("./mocks")
const structures = require("./structures")
module.exports = {
mocks,
structures,
}

View file

@ -0,0 +1,2 @@
export * as mocks from "./mocks"
export * as structures from "./structures"

View file

@ -59,7 +59,7 @@ jest.mock("../../../events", () => {
created: jest.fn(), created: jest.fn(),
updated: jest.fn(), updated: jest.fn(),
deleted: jest.fn(), deleted: jest.fn(),
import: jest.fn(), imported: jest.fn(),
previewed: jest.fn(), previewed: jest.fn(),
}, },
role: { role: {
@ -70,7 +70,7 @@ jest.mock("../../../events", () => {
unassigned: jest.fn(), unassigned: jest.fn(),
}, },
row: { row: {
import: jest.fn(), imported: jest.fn(),
}, },
screen: { screen: {
created: jest.fn(), created: jest.fn(),

View file

@ -1,8 +0,0 @@
require("../mocks")
const koa = require("./koa")
const structures = {
koa,
}
module.exports = structures

View file

@ -0,0 +1,3 @@
import "../mocks"
export * as koa from "./koa"

View file

@ -1,5 +0,0 @@
const { createMockContext } = require("@shopify/jest-koa-mocks")
exports.newContext = () => {
return createMockContext()
}

View file

@ -0,0 +1,5 @@
import { createMockContext } from "@shopify/jest-koa-mocks"
export const newContext = () => {
return createMockContext()
}

View file

@ -1 +0,0 @@
module.exports = require("./src/tests/utilities")

View file

@ -0,0 +1 @@
export * from "./src/tests/utilities"

File diff suppressed because it is too large Load diff

View file

@ -143,6 +143,7 @@
"@babel/core": "^7.14.3", "@babel/core": "^7.14.3",
"@babel/preset-env": "^7.14.4", "@babel/preset-env": "^7.14.4",
"@budibase/standard-components": "^0.9.139", "@budibase/standard-components": "^0.9.139",
"@budibase/types": "^1.0.126-alpha.0",
"@jest/test-sequencer": "^24.8.0", "@jest/test-sequencer": "^24.8.0",
"@types/apidoc": "^0.50.0", "@types/apidoc": "^0.50.0",
"@types/bull": "^3.15.1", "@types/bull": "^3.15.1",

View file

@ -52,6 +52,7 @@ const {
import { getUniqueRows } from "../../utilities/usageQuota/rows" import { getUniqueRows } from "../../utilities/usageQuota/rows"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import { errors, events } from "@budibase/backend-core" import { errors, events } from "@budibase/backend-core"
import { App } from "@budibase/types"
const URL_REGEX_SLASH = /\/|\\/g const URL_REGEX_SLASH = /\/|\\/g
@ -233,7 +234,7 @@ const performAppCreate = async (ctx: any) => {
const apps = await getAllApps({ dev: true }) const apps = await getAllApps({ dev: true })
const name = ctx.request.body.name const name = ctx.request.body.name
checkAppName(ctx, apps, name) checkAppName(ctx, apps, name)
const url = exports.getAppUrl(ctx) const url = getAppUrl(ctx)
checkAppUrl(ctx, apps, url) checkAppUrl(ctx, apps, url)
const { useTemplate, templateKey, templateString } = ctx.request.body const { useTemplate, templateKey, templateString } = ctx.request.body
@ -291,7 +292,7 @@ const performAppCreate = async (ctx: any) => {
return newApplication return newApplication
} }
const creationEvents = (request: any) => { const creationEvents = (request: any, app: App) => {
let creationFns = [] let creationFns = []
const body = request.body const body = request.body
@ -312,15 +313,15 @@ const creationEvents = (request: any) => {
creationFns.push(events.app.created) creationFns.push(events.app.created)
for (let fn of creationFns) { for (let fn of creationFns) {
fn() fn(app)
} }
} }
const appPostCreate = async (ctx: any, appId: string) => { const appPostCreate = async (ctx: any, app: App) => {
creationEvents(ctx.request) creationEvents(ctx.request, app)
// app import & template creation // app import & template creation
if (ctx.request.body.useTemplate === "true") { if (ctx.request.body.useTemplate === "true") {
const rows = await getUniqueRows([appId]) const rows = await getUniqueRows([app.appId])
const rowCount = rows ? rows.length : 0 const rowCount = rows ? rows.length : 0
if (rowCount) { if (rowCount) {
try { try {
@ -330,7 +331,7 @@ const appPostCreate = async (ctx: any, appId: string) => {
// this import resulted in row usage exceeding the quota // this import resulted in row usage exceeding the quota
// delete the app // delete the app
// skip pre and post steps as no rows have been added to quotas yet // skip pre and post steps as no rows have been added to quotas yet
ctx.params.appId = appId ctx.params.appId = app.appId
await destroyApp(ctx) await destroyApp(ctx)
} }
throw err throw err
@ -341,7 +342,7 @@ const appPostCreate = async (ctx: any, appId: string) => {
export const create = async (ctx: any) => { export const create = async (ctx: any) => {
const newApplication = await quotas.addApp(() => performAppCreate(ctx)) const newApplication = await quotas.addApp(() => performAppCreate(ctx))
await appPostCreate(ctx, newApplication.appId) await appPostCreate(ctx, newApplication)
ctx.body = newApplication ctx.body = newApplication
ctx.status = 200 ctx.status = 200
} }
@ -355,16 +356,16 @@ export const update = async (ctx: any) => {
if (name) { if (name) {
checkAppName(ctx, apps, name, ctx.params.appId) checkAppName(ctx, apps, name, ctx.params.appId)
} }
const url = await exports.getAppUrl(ctx) const url = getAppUrl(ctx)
if (url) { if (url) {
checkAppUrl(ctx, apps, url, ctx.params.appId) checkAppUrl(ctx, apps, url, ctx.params.appId)
ctx.request.body.url = url ctx.request.body.url = url
} }
const data = await updateAppPackage(ctx.request.body, ctx.params.appId) const app = await updateAppPackage(ctx.request.body, ctx.params.appId)
events.app.updated() events.app.updated(app)
ctx.status = 200 ctx.status = 200
ctx.body = data ctx.body = app
} }
export const updateClient = async (ctx: any) => { export const updateClient = async (ctx: any) => {
@ -384,10 +385,10 @@ export const updateClient = async (ctx: any) => {
version: packageJson.version, version: packageJson.version,
revertableVersion: currentVersion, revertableVersion: currentVersion,
} }
const data = await updateAppPackage(appPackageUpdates, ctx.params.appId) const app = await updateAppPackage(appPackageUpdates, ctx.params.appId)
events.app.versionUpdated() events.app.versionUpdated(app)
ctx.status = 200 ctx.status = 200
ctx.body = data ctx.body = app
} }
export const revertClient = async (ctx: any) => { export const revertClient = async (ctx: any) => {
@ -408,10 +409,10 @@ export const revertClient = async (ctx: any) => {
version: application.revertableVersion, version: application.revertableVersion,
revertableVersion: null, revertableVersion: null,
} }
const data = await updateAppPackage(appPackageUpdates, ctx.params.appId) const app = await updateAppPackage(appPackageUpdates, ctx.params.appId)
events.app.versionReverted() events.app.versionReverted(app)
ctx.status = 200 ctx.status = 200
ctx.body = data ctx.body = app
} }
const destroyApp = async (ctx: any) => { const destroyApp = async (ctx: any) => {
@ -423,14 +424,15 @@ const destroyApp = async (ctx: any) => {
} }
const db = isUnpublish ? getProdAppDB() : getAppDB() const db = isUnpublish ? getProdAppDB() : getAppDB()
const app = await db.get(DocumentTypes.APP_METADATA)
const result = await db.destroy() const result = await db.destroy()
if (isUnpublish) { if (isUnpublish) {
await quotas.removePublishedApp() await quotas.removePublishedApp()
events.app.unpublished() events.app.unpublished(app)
} else { } else {
await quotas.removeApp() await quotas.removeApp()
events.app.deleted() events.app.deleted(app)
} }
/* istanbul ignore next */ /* istanbul ignore next */
@ -531,10 +533,10 @@ const updateAppPackage = async (appPackage: any, appId: any) => {
// Redis, shouldn't ever store it // Redis, shouldn't ever store it
delete newAppPackage.lockedBy delete newAppPackage.lockedBy
const response = await db.put(newAppPackage) await db.put(newAppPackage)
// remove any cached metadata, so that it will be updated // remove any cached metadata, so that it will be updated
await appCache.invalidateAppMetadata(appId) await appCache.invalidateAppMetadata(appId)
return response return newAppPackage
} }
const createEmptyAppPackage = async (ctx: any, app: any) => { const createEmptyAppPackage = async (ctx: any, app: any) => {

View file

@ -123,6 +123,7 @@ async function deployApp(deployment: any) {
console.log("Deployed app initialised, setting deployment to successful") console.log("Deployed app initialised, setting deployment to successful")
deployment.setStatus(DeploymentStatus.SUCCESS) deployment.setStatus(DeploymentStatus.SUCCESS)
await storeDeploymentHistory(deployment) await storeDeploymentHistory(deployment)
return appDoc
} catch (err: any) { } catch (err: any) {
deployment.setStatus(DeploymentStatus.FAILURE, err.message) deployment.setStatus(DeploymentStatus.FAILURE, err.message)
await storeDeploymentHistory(deployment) await storeDeploymentHistory(deployment)
@ -187,13 +188,14 @@ const _deployApp = async function (ctx: any) {
console.log("Deploying app...") console.log("Deploying app...")
let app
if (await isFirstDeploy()) { if (await isFirstDeploy()) {
await quotas.addPublishedApp(() => deployApp(deployment)) app = await quotas.addPublishedApp(() => deployApp(deployment))
} else { } else {
await deployApp(deployment) app = await deployApp(deployment)
} }
events.app.published() events.app.published(app)
ctx.body = deployment ctx.body = deployment
} }

View file

@ -84,9 +84,9 @@ export class RestImporter {
const count = successQueries.length const count = successQueries.length
const importSource = this.source.getImportSource() const importSource = this.source.getImportSource()
const datasource = await db.get(datasourceId) const datasource = await db.get(datasourceId)
events.query.import(datasource, importSource, count) events.query.imported(datasource, importSource, count)
for (let query of successQueries) { for (let query of successQueries) {
events.query.created(query) events.query.created(datasource, query)
} }
return { return {

View file

@ -106,8 +106,8 @@ describe("Rest Importer", () => {
const importResult = await restImporter.importQueries(datasource._id) const importResult = await restImporter.importQueries(datasource._id)
expect(importResult.errorQueries.length).toBe(0) expect(importResult.errorQueries.length).toBe(0)
expect(importResult.queries.length).toBe(assertions[key].count) expect(importResult.queries.length).toBe(assertions[key].count)
expect(events.query.import).toBeCalledTimes(1) expect(events.query.imported).toBeCalledTimes(1)
expect(events.query.import).toBeCalledWith(datasource, assertions[key].source, assertions[key].count) expect(events.query.imported).toBeCalledWith(datasource, assertions[key].source, assertions[key].count)
jest.clearAllMocks() jest.clearAllMocks()
} }

View file

@ -149,7 +149,7 @@ export async function handleDataImport(user: any, table: any, dataImport: any) {
} }
await quotas.addRows(finalData.length, () => db.bulkDocs(finalData)) await quotas.addRows(finalData.length, () => db.bulkDocs(finalData))
events.row.import(table, "csv", finalData.length) events.row.imported(table, "csv", finalData.length)
return table return table
} }

View file

@ -134,7 +134,7 @@ describe("/applications", () => {
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body.rev).toBeDefined() expect(res.body._rev).toBeDefined()
expect(events.app.updated).toBeCalledTimes(1) expect(events.app.updated).toBeCalledTimes(1)
}) })
}) })
@ -201,7 +201,7 @@ describe("/applications", () => {
.set(headers) .set(headers)
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body.rev).toBeDefined() expect(res.body._rev).toBeDefined()
// retrieve the app to check it // retrieve the app to check it
const getRes = await request const getRes = await request
.get(`/api/applications/${config.getAppId()}/appPackage`) .get(`/api/applications/${config.getAppId()}/appPackage`)

View file

@ -55,8 +55,8 @@ describe("/tables", () => {
expect(events.table.created).toBeCalledWith(res.body) expect(events.table.created).toBeCalledWith(res.body)
expect(events.table.imported).toBeCalledTimes(1) expect(events.table.imported).toBeCalledTimes(1)
expect(events.table.imported).toBeCalledWith(res.body, "csv") expect(events.table.imported).toBeCalledWith(res.body, "csv")
expect(events.row.import).toBeCalledTimes(1) expect(events.row.imported).toBeCalledTimes(1)
expect(events.row.import).toBeCalledWith(res.body, "csv", 1) expect(events.row.imported).toBeCalledWith(res.body, "csv", 1)
}) })
it("should apply authorization to endpoint", async () => { it("should apply authorization to endpoint", async () => {
@ -163,8 +163,8 @@ describe("/tables", () => {
.expect(200) .expect(200)
expect(events.table.created).not.toHaveBeenCalled() expect(events.table.created).not.toHaveBeenCalled()
expect(events.row.import).toBeCalledTimes(1) expect(events.row.imported).toBeCalledTimes(1)
expect(events.row.import).toBeCalledWith(table, "csv", 1) expect(events.row.imported).toBeCalledWith(table, "csv", 1)
}) })
}) })

View file

@ -2,7 +2,7 @@
"compilerOptions": { "compilerOptions": {
"target": "es6", "target": "es6",
"module": "commonjs", "module": "commonjs",
"lib": ["es2019"], "lib": ["es2020"],
"allowJs": true, "allowJs": true,
"outDir": "dist", "outDir": "dist",
"strict": true, "strict": true,

View file

@ -7,7 +7,7 @@
"author": "Budibase", "author": "Budibase",
"license": "GPL-3.0", "license": "GPL-3.0",
"scripts": { "scripts": {
"build": "rimraf dist/ && tsc -p tsconfig.json" "build": "rimraf dist/ && tsc"
}, },
"jest": { "jest": {
}, },

View file

@ -1 +1,3 @@
export interface App {} export interface App {
appId: string
}

View file

@ -2,14 +2,15 @@
"compilerOptions": { "compilerOptions": {
"target": "es6", "target": "es6",
"module": "commonjs", "module": "commonjs",
"lib": ["es2019"], "lib": ["es2020"],
"allowJs": true, "allowJs": true,
"outDir": "dist", "outDir": "dist",
"strict": true, "strict": true,
"noImplicitAny": true, "noImplicitAny": true,
"esModuleInterop": true, "esModuleInterop": true,
"resolveJsonModule": true, "resolveJsonModule": true,
"incremental": true "incremental": true,
"types": [ "node", "jest"],
}, },
"include": [ "include": [
"./src/**/*" "./src/**/*"

View file

@ -1297,6 +1297,13 @@ async-hook-jl@^1.7.6:
dependencies: dependencies:
stack-chain "^1.3.7" stack-chain "^1.3.7"
async@~2.1.4:
version "2.1.5"
resolved "https://registry.yarnpkg.com/async/-/async-2.1.5.tgz#e587c68580994ac67fc56ff86d3ac56bdbe810bc"
integrity sha1-5YfGhYCZSsZ/xW/4bTrFa9voELw=
dependencies:
lodash "^4.14.0"
asynckit@^0.4.0: asynckit@^0.4.0:
version "0.4.0" version "0.4.0"
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
@ -2879,6 +2886,23 @@ globby@^11.0.4:
merge2 "^1.4.1" merge2 "^1.4.1"
slash "^3.0.0" slash "^3.0.0"
google-auth-library@~0.10.0:
version "0.10.0"
resolved "https://registry.yarnpkg.com/google-auth-library/-/google-auth-library-0.10.0.tgz#6e15babee85fd1dd14d8d128a295b6838d52136e"
integrity sha1-bhW6vuhf0d0U2NEoopW2g41SE24=
dependencies:
gtoken "^1.2.1"
jws "^3.1.4"
lodash.noop "^3.0.1"
request "^2.74.0"
google-p12-pem@^0.1.0:
version "0.1.2"
resolved "https://registry.yarnpkg.com/google-p12-pem/-/google-p12-pem-0.1.2.tgz#33c46ab021aa734fa0332b3960a9a3ffcb2f3177"
integrity sha1-M8RqsCGqc0+gMys5YKmj/8svMXc=
dependencies:
node-forge "^0.7.1"
googleapis@^16.0.0: googleapis@^16.0.0:
version "16.1.0" version "16.1.0"
resolved "https://registry.yarnpkg.com/googleapis/-/googleapis-16.1.0.tgz#0f19f2d70572d918881a0f626e3b1a2fa8629576" resolved "https://registry.yarnpkg.com/googleapis/-/googleapis-16.1.0.tgz#0f19f2d70572d918881a0f626e3b1a2fa8629576"
@ -2927,6 +2951,16 @@ graceful-fs@^4.1.2, graceful-fs@^4.2.4:
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.9.tgz#041b05df45755e587a24942279b9d113146e1c96" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.9.tgz#041b05df45755e587a24942279b9d113146e1c96"
integrity sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ== integrity sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==
gtoken@^1.2.1:
version "1.2.3"
resolved "https://registry.yarnpkg.com/gtoken/-/gtoken-1.2.3.tgz#5509571b8afd4322e124cf66cf68115284c476d8"
integrity sha512-wQAJflfoqSgMWrSBk9Fg86q+sd6s7y6uJhIvvIPz++RElGlMtEqsdAR2oWwZ/WTEtp7P9xFbJRrT976oRgzJ/w==
dependencies:
google-p12-pem "^0.1.0"
jws "^3.0.0"
mime "^1.4.1"
request "^2.72.0"
har-schema@^2.0.0: har-schema@^2.0.0:
version "2.0.0" version "2.0.0"
resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92"
@ -3986,7 +4020,7 @@ jwa@^1.4.1:
ecdsa-sig-formatter "1.0.11" ecdsa-sig-formatter "1.0.11"
safe-buffer "^5.0.1" safe-buffer "^5.0.1"
jws@^3.2.2: jws@^3.0.0, jws@^3.1.4, jws@^3.2.2:
version "3.2.2" version "3.2.2"
resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304" resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304"
integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA== integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==
@ -4306,6 +4340,11 @@ lodash.memoize@4.x:
resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe"
integrity sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4= integrity sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4=
lodash.noop@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/lodash.noop/-/lodash.noop-3.0.1.tgz#38188f4d650a3a474258439b96ec45b32617133c"
integrity sha1-OBiPTWUKOkdCWEObluxFsyYXEzw=
lodash.once@^4.0.0: lodash.once@^4.0.0:
version "4.1.1" version "4.1.1"
resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac" resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac"
@ -4316,7 +4355,7 @@ lodash.pick@^4.0.0:
resolved "https://registry.yarnpkg.com/lodash.pick/-/lodash.pick-4.4.0.tgz#52f05610fff9ded422611441ed1fc123a03001b3" resolved "https://registry.yarnpkg.com/lodash.pick/-/lodash.pick-4.4.0.tgz#52f05610fff9ded422611441ed1fc123a03001b3"
integrity sha1-UvBWEP/53tQiYRRB7R/BI6AwAbM= integrity sha1-UvBWEP/53tQiYRRB7R/BI6AwAbM=
lodash@^4.17.14, lodash@^4.17.19, lodash@^4.17.21, lodash@^4.7.0: lodash@^4.14.0, lodash@^4.17.14, lodash@^4.17.19, lodash@^4.17.21, lodash@^4.7.0:
version "4.17.21" version "4.17.21"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
@ -4435,6 +4474,11 @@ mime-types@^2.1.12, mime-types@^2.1.18, mime-types@~2.1.19, mime-types@~2.1.24:
dependencies: dependencies:
mime-db "1.51.0" mime-db "1.51.0"
mime@^1.4.1:
version "1.6.0"
resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1"
integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==
mime@^2.5.0: mime@^2.5.0:
version "2.6.0" version "2.6.0"
resolved "https://registry.yarnpkg.com/mime/-/mime-2.6.0.tgz#a2a682a95cd4d0cb1d6257e28f83da7e35800367" resolved "https://registry.yarnpkg.com/mime/-/mime-2.6.0.tgz#a2a682a95cd4d0cb1d6257e28f83da7e35800367"
@ -4541,6 +4585,11 @@ node-fetch@2.6.7, node-fetch@^2.6.1:
dependencies: dependencies:
whatwg-url "^5.0.0" whatwg-url "^5.0.0"
node-forge@^0.7.1:
version "0.7.6"
resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.7.6.tgz#fdf3b418aee1f94f0ef642cd63486c77ca9724ac"
integrity sha512-sol30LUpz1jQFBjOKwbjxijiE3b6pjd74YwfD0fJOKPjF+fONKb2Yg8rYgS6+bK6VDl+/wfr4IYpC7jDzLUIfw==
node-gyp-build@~4.1.0: node-gyp-build@~4.1.0:
version "4.1.1" version "4.1.1"
resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.1.1.tgz#d7270b5d86717068d114cc57fff352f96d745feb" resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.1.1.tgz#d7270b5d86717068d114cc57fff352f96d745feb"
@ -5502,7 +5551,7 @@ remove-trailing-slash@^0.1.1:
resolved "https://registry.yarnpkg.com/remove-trailing-slash/-/remove-trailing-slash-0.1.1.tgz#be2285a59f39c74d1bce4f825950061915e3780d" resolved "https://registry.yarnpkg.com/remove-trailing-slash/-/remove-trailing-slash-0.1.1.tgz#be2285a59f39c74d1bce4f825950061915e3780d"
integrity sha512-o4S4Qh6L2jpnCy83ysZDau+VORNvnFw07CKSAymkd6ICNVEPisMyzlc00KlvvicsxKck94SEwhDnMNdICzO+tA== integrity sha512-o4S4Qh6L2jpnCy83ysZDau+VORNvnFw07CKSAymkd6ICNVEPisMyzlc00KlvvicsxKck94SEwhDnMNdICzO+tA==
request@^2.88.0: request@^2.72.0, request@^2.74.0, request@^2.88.0:
version "2.88.2" version "2.88.2"
resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3" resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3"
integrity sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw== integrity sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==
@ -5931,6 +5980,11 @@ string-length@^4.0.1:
char-regex "^1.0.2" char-regex "^1.0.2"
strip-ansi "^6.0.0" strip-ansi "^6.0.0"
string-template@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/string-template/-/string-template-1.0.0.tgz#9e9f2233dc00f218718ec379a28a5673ecca8b96"
integrity sha1-np8iM9wA8hhxjsN5oopWc+zKi5Y=
string-width@^3.0.0: string-width@^3.0.0:
version "3.1.0" version "3.1.0"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961"