1
0
Fork 0
mirror of synced 2024-06-29 11:31:06 +12:00

Merge pull request #6074 from Budibase/feature/app-quotas

App/resource ID breakdown of quotas
This commit is contained in:
Rory Powell 2022-09-30 13:28:07 +01:00 committed by GitHub
commit a3cd3c8067
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
29 changed files with 226 additions and 124 deletions

View file

@ -6,6 +6,7 @@ const {
updateAppId,
doInAppContext,
doInTenant,
doInContext,
} = require("./src/context")
const identity = require("./src/context/identity")
@ -19,4 +20,5 @@ module.exports = {
doInAppContext,
doInTenant,
identity,
doInContext,
}

View file

@ -65,7 +65,16 @@ export const getTenantIDFromAppID = (appId: string) => {
}
}
// used for automations, API endpoints should always be in context already
export const doInContext = async (appId: string, task: any) => {
// gets the tenant ID from the app ID
const tenantId = getTenantIDFromAppID(appId)
return doInTenant(tenantId, async () => {
return doInAppContext(appId, async () => {
return task()
})
})
}
export const doInTenant = (tenantId: string | null, task: any) => {
// make sure default always selected in single tenancy
if (!env.MULTI_TENANCY) {

View file

@ -46,6 +46,9 @@ export enum DocumentType {
AUTOMATION_LOG = "log_au",
ACCOUNT_METADATA = "acc_metadata",
PLUGIN = "plg",
TABLE = "ta",
DATASOURCE = "datasource",
DATASOURCE_PLUS = "datasource_plus",
}
export const StaticDatabases = {

View file

@ -64,6 +64,28 @@ export function getQueryIndex(viewName: ViewName) {
return `database/${viewName}`
}
/**
* Check if a given ID is that of a table.
* @returns {boolean}
*/
export const isTableId = (id: string) => {
// this includes datasource plus tables
return (
id &&
(id.startsWith(`${DocumentType.TABLE}${SEPARATOR}`) ||
id.startsWith(`${DocumentType.DATASOURCE_PLUS}${SEPARATOR}`))
)
}
/**
* Check if a given ID is that of a datasource or datasource plus.
* @returns {boolean}
*/
export const isDatasourceId = (id: string) => {
// this covers both datasources and datasource plus
return id && id.startsWith(`${DocumentType.DATASOURCE}${SEPARATOR}`)
}
/**
* Generates a new workspace ID.
* @returns {string} The new workspace ID which the workspace doc can be stored under.

View file

@ -11,7 +11,7 @@ export const DEFINITIONS: MigrationDefinition[] = [
},
{
type: MigrationType.GLOBAL,
name: MigrationName.QUOTAS_1,
name: MigrationName.SYNC_QUOTAS,
},
{
type: MigrationType.APP,
@ -33,8 +33,4 @@ export const DEFINITIONS: MigrationDefinition[] = [
type: MigrationType.GLOBAL,
name: MigrationName.GLOBAL_INFO_SYNC_USERS,
},
{
type: MigrationType.GLOBAL,
name: MigrationName.PLUGIN_COUNT,
},
]

View file

@ -8,6 +8,7 @@ import {
updateAppId,
doInAppContext,
doInTenant,
doInContext,
} from "../context"
import * as identity from "../context/identity"
@ -20,5 +21,6 @@ export = {
updateAppId,
doInAppContext,
doInTenant,
doInContext,
identity,
}

View file

@ -356,7 +356,7 @@ const appPostCreate = async (ctx: any, app: App) => {
await creationEvents(ctx.request, app)
// app import & template creation
if (ctx.request.body.useTemplate === "true") {
const rows = await getUniqueRows([app.appId])
const { rows } = await getUniqueRows([app.appId])
const rowCount = rows ? rows.length : 0
if (rowCount) {
try {
@ -490,7 +490,7 @@ const destroyApp = async (ctx: any) => {
}
const preDestroyApp = async (ctx: any) => {
const rows = await getUniqueRows([ctx.params.appId])
const { rows } = await getUniqueRows([ctx.params.appId])
ctx.rowCount = rows.length
}

View file

@ -153,7 +153,10 @@ export async function preview(ctx: any) {
auth: { ...authConfigCtx },
},
})
const { rows, keys, info, extra } = await quotas.addQuery(runFn)
const { rows, keys, info, extra } = await quotas.addQuery(runFn, {
datasourceId: datasource._id,
})
const schemaFields: any = {}
if (rows?.length > 0) {
for (let key of [...new Set(keys)] as string[]) {
@ -234,7 +237,9 @@ async function execute(
},
})
const { rows, pagination, extra } = await quotas.addQuery(runFn)
const { rows, pagination, extra } = await quotas.addQuery(runFn, {
datasourceId: datasource._id,
})
if (opts && opts.rowsOnly) {
ctx.body = rows
} else {

View file

@ -31,8 +31,11 @@ export async function patch(ctx: any): Promise<any> {
return save(ctx)
}
try {
const { row, table } = await quotas.addQuery(() =>
pickApi(tableId).patch(ctx)
const { row, table } = await quotas.addQuery(
() => pickApi(tableId).patch(ctx),
{
datasourceId: tableId,
}
)
ctx.status = 200
ctx.eventEmitter &&
@ -54,7 +57,9 @@ export const save = async (ctx: any) => {
}
try {
const { row, table } = await quotas.addRow(() =>
quotas.addQuery(() => pickApi(tableId).save(ctx))
quotas.addQuery(() => pickApi(tableId).save(ctx), {
datasourceId: tableId,
})
)
ctx.status = 200
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:save`, appId, row, table)
@ -68,7 +73,9 @@ export const save = async (ctx: any) => {
export async function fetchView(ctx: any) {
const tableId = getTableId(ctx)
try {
ctx.body = await quotas.addQuery(() => pickApi(tableId).fetchView(ctx))
ctx.body = await quotas.addQuery(() => pickApi(tableId).fetchView(ctx), {
datasourceId: tableId,
})
} catch (err) {
ctx.throw(400, err)
}
@ -77,7 +84,9 @@ export async function fetchView(ctx: any) {
export async function fetch(ctx: any) {
const tableId = getTableId(ctx)
try {
ctx.body = await quotas.addQuery(() => pickApi(tableId).fetch(ctx))
ctx.body = await quotas.addQuery(() => pickApi(tableId).fetch(ctx), {
datasourceId: tableId,
})
} catch (err) {
ctx.throw(400, err)
}
@ -86,7 +95,9 @@ export async function fetch(ctx: any) {
export async function find(ctx: any) {
const tableId = getTableId(ctx)
try {
ctx.body = await quotas.addQuery(() => pickApi(tableId).find(ctx))
ctx.body = await quotas.addQuery(() => pickApi(tableId).find(ctx), {
datasourceId: tableId,
})
} catch (err) {
ctx.throw(400, err)
}
@ -98,8 +109,11 @@ export async function destroy(ctx: any) {
const tableId = getTableId(ctx)
let response, row
if (inputs.rows) {
let { rows } = await quotas.addQuery(() =>
pickApi(tableId).bulkDestroy(ctx)
let { rows } = await quotas.addQuery(
() => pickApi(tableId).bulkDestroy(ctx),
{
datasourceId: tableId,
}
)
await quotas.removeRows(rows.length)
response = rows
@ -107,7 +121,9 @@ export async function destroy(ctx: any) {
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row)
}
} else {
let resp = await quotas.addQuery(() => pickApi(tableId).destroy(ctx))
let resp = await quotas.addQuery(() => pickApi(tableId).destroy(ctx), {
datasourceId: tableId,
})
await quotas.removeRow()
response = resp.response
row = resp.row
@ -123,7 +139,9 @@ export async function search(ctx: any) {
const tableId = getTableId(ctx)
try {
ctx.status = 200
ctx.body = await quotas.addQuery(() => pickApi(tableId).search(ctx))
ctx.body = await quotas.addQuery(() => pickApi(tableId).search(ctx), {
datasourceId: tableId,
})
} catch (err) {
ctx.throw(400, err)
}
@ -141,8 +159,11 @@ export async function validate(ctx: any) {
export async function fetchEnrichedRow(ctx: any) {
const tableId = getTableId(ctx)
try {
ctx.body = await quotas.addQuery(() =>
pickApi(tableId).fetchEnrichedRow(ctx)
ctx.body = await quotas.addQuery(
() => pickApi(tableId).fetchEnrichedRow(ctx),
{
datasourceId: tableId,
}
)
} catch (err) {
ctx.throw(400, err)
@ -152,7 +173,9 @@ export async function fetchEnrichedRow(ctx: any) {
export const exportRows = async (ctx: any) => {
const tableId = getTableId(ctx)
try {
ctx.body = await quotas.addQuery(() => pickApi(tableId).exportRows(ctx))
ctx.body = await quotas.addQuery(() => pickApi(tableId).exportRows(ctx), {
datasourceId: tableId,
})
} catch (err) {
ctx.throw(400, err)
}

View file

@ -145,7 +145,9 @@ export async function destroy(ctx: any) {
await db.bulkDocs(
rows.rows.map((row: any) => ({ ...row.doc, _deleted: true }))
)
await quotas.removeRows(rows.rows.length)
await quotas.removeRows(rows.rows.length, {
tableId: ctx.params.tableId,
})
// update linked rows
await updateLinks({

View file

@ -148,7 +148,9 @@ export async function handleDataImport(user: any, table: any, dataImport: any) {
finalData.push(row)
}
await quotas.addRows(finalData.length, () => db.bulkDocs(finalData))
await quotas.addRows(finalData.length, () => db.bulkDocs(finalData), {
tableId: table._id,
})
await events.rows.imported(table, "csv", finalData.length)
return table
}

View file

@ -34,18 +34,13 @@ describe("/rows", () => {
.expect(status)
const getRowUsage = async () => {
return config.doInContext(null, () =>
quotas.getCurrentUsageValue(QuotaUsageType.STATIC, StaticQuotaName.ROWS)
)
const { total } = await config.doInContext(null, () => quotas.getCurrentUsageValues(QuotaUsageType.STATIC, StaticQuotaName.ROWS))
return total
}
const getQueryUsage = async () => {
return config.doInContext(null, () =>
quotas.getCurrentUsageValue(
QuotaUsageType.MONTHLY,
MonthlyQuotaName.QUERIES
)
)
const { total } = await config.doInContext(null, () => quotas.getCurrentUsageValues(QuotaUsageType.MONTHLY, MonthlyQuotaName.QUERIES))
return total
}
const assertRowUsage = async expected => {
@ -60,26 +55,26 @@ describe("/rows", () => {
describe("save, load, update", () => {
it("returns a success message when the row is created", async () => {
// const rowUsage = await getRowUsage()
// const queryUsage = await getQueryUsage()
//
// const res = await request
// .post(`/api/${row.tableId}/rows`)
// .send(row)
// .set(config.defaultHeaders())
// .expect('Content-Type', /json/)
// .expect(200)
// expect(res.res.statusMessage).toEqual(`${table.name} saved successfully`)
// expect(res.body.name).toEqual("Test Contact")
// expect(res.body._rev).toBeDefined()
// await assertRowUsage(rowUsage + 1)
// await assertQueryUsage(queryUsage + 1)
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
const res = await request
.post(`/api/${row.tableId}/rows`)
.send(row)
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect(200)
expect(res.res.statusMessage).toEqual(`${table.name} saved successfully`)
expect(res.body.name).toEqual("Test Contact")
expect(res.body._rev).toBeDefined()
await assertRowUsage(rowUsage + 1)
await assertQueryUsage(queryUsage + 1)
})
it("updates a row successfully", async () => {
const existing = await config.createRow()
// const rowUsage = await getRowUsage()
// const queryUsage = await getQueryUsage()
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
const res = await request
.post(`/api/${table._id}/rows`)
@ -97,8 +92,8 @@ describe("/rows", () => {
`${table.name} updated successfully.`
)
expect(res.body.name).toEqual("Updated Name")
// await assertRowUsage(rowUsage)
// await assertQueryUsage(queryUsage + 1)
await assertRowUsage(rowUsage)
await assertQueryUsage(queryUsage + 1)
})
it("should load a row", async () => {

View file

@ -29,16 +29,11 @@ describe("Run through some parts of the automations system", () => {
afterAll(setup.afterAll)
it("should be able to init in builder", async () => {
await triggers.externalTrigger(basicAutomation(), { a: 1 })
await triggers.externalTrigger(basicAutomation(), { a: 1, appId: "app_123" })
await wait(100)
expect(thread.execute).toHaveBeenCalled()
})
it("should be able to init in prod", async () => {
await triggers.externalTrigger(basicAutomation(), { a: 1 })
await wait(100)
})
it("should check coercion", async () => {
const table = await config.createTable()
const automation = basicAutomation()

View file

@ -13,7 +13,7 @@ import {
getAppId,
getProdAppDB,
} from "@budibase/backend-core/context"
import { tenancy } from "@budibase/backend-core"
import { context } from "@budibase/backend-core"
import { quotas } from "@budibase/pro"
import { Automation } from "@budibase/types"
@ -28,12 +28,14 @@ const jobMessage = (job: any, message: string) => {
export async function processEvent(job: any) {
try {
const automationId = job.data.automation._id
console.log(jobMessage(job, "running"))
// need to actually await these so that an error can be captured properly
const tenantId = tenancy.getTenantIDFromAppID(job.data.event.appId)
return await tenancy.doInTenant(tenantId, async () => {
return await context.doInContext(job.data.event.appId, async () => {
const runFn = () => Runner.run(job)
return quotas.addAutomation(runFn)
return quotas.addAutomation(runFn, {
automationId,
})
})
} catch (err) {
const errJson = JSON.stringify(err)

View file

@ -34,8 +34,6 @@ const DocumentType = {
INSTANCE: "inst",
LAYOUT: "layout",
SCREEN: "screen",
DATASOURCE: "datasource",
DATASOURCE_PLUS: "datasource_plus",
QUERY: "query",
DEPLOYMENTS: "deployments",
METADATA: "metadata",

View file

@ -8,7 +8,7 @@ import {
accounts,
db as dbUtils,
} from "@budibase/backend-core"
import { QuotaUsage } from "@budibase/pro"
import { QuotaUsage } from "@budibase/types"
import {
CloudAccount,
App,

View file

@ -1,12 +0,0 @@
import { tenancy, logging } from "@budibase/backend-core"
import { plugins } from "@budibase/pro"
export const run = async () => {
try {
await tenancy.doInTenant(tenancy.DEFAULT_TENANT_ID, async () => {
await plugins.checkPluginQuotas()
})
} catch (err) {
logging.logAlert("Failed to update plugin quotas", err)
}
}

View file

@ -1,20 +1,15 @@
import { runQuotaMigration } from "./usageQuotas"
import * as syncApps from "./usageQuotas/syncApps"
import * as syncRows from "./usageQuotas/syncRows"
import * as syncPlugins from "./usageQuotas/syncPlugins"
/**
* Date:
* January 2022
*
* Description:
* Synchronise the app and row quotas to the state of the db after it was
* discovered that the quota resets were still in place and the row quotas
* weren't being decremented correctly.
* Synchronise quotas to the state of the db.
*/
export const run = async () => {
await runQuotaMigration(async () => {
await syncApps.run()
await syncRows.run()
await syncPlugins.run()
})
}

View file

@ -2,11 +2,13 @@ const TestConfig = require("../../../tests/utilities/TestConfiguration")
const syncApps = jest.fn()
const syncRows = jest.fn()
const syncPlugins = jest.fn()
jest.mock("../usageQuotas/syncApps", () => ({ run: syncApps }) )
jest.mock("../usageQuotas/syncRows", () => ({ run: syncRows }) )
jest.mock("../usageQuotas/syncPlugins", () => ({ run: syncPlugins }) )
const migration = require("../quotas1")
const migration = require("../syncQuotas")
describe("run", () => {
let config = new TestConfig(false)
@ -17,9 +19,10 @@ describe("run", () => {
afterAll(config.end)
it("runs ", async () => {
it("run", async () => {
await migration.run()
expect(syncApps).toHaveBeenCalledTimes(1)
expect(syncRows).toHaveBeenCalledTimes(1)
expect(syncPlugins).toHaveBeenCalledTimes(1)
})
})

View file

@ -5,7 +5,6 @@ import { QuotaUsageType, StaticQuotaName } from "@budibase/types"
export const run = async () => {
// get app count
// @ts-ignore
const devApps = await getAllApps({ dev: true })
const appCount = devApps ? devApps.length : 0

View file

@ -0,0 +1,10 @@
import { logging } from "@budibase/backend-core"
import { plugins } from "@budibase/pro"
export const run = async () => {
try {
await plugins.checkPluginQuotas()
} catch (err) {
logging.logAlert("Failed to update plugin quotas", err)
}
}

View file

@ -2,19 +2,28 @@ import { getTenantId } from "@budibase/backend-core/tenancy"
import { getAllApps } from "@budibase/backend-core/db"
import { getUniqueRows } from "../../../utilities/usageQuota/rows"
import { quotas } from "@budibase/pro"
import { QuotaUsageType, StaticQuotaName } from "@budibase/types"
import { StaticQuotaName, QuotaUsageType } from "@budibase/types"
export const run = async () => {
// get all rows in all apps
// @ts-ignore
const allApps = await getAllApps({ all: true })
// @ts-ignore
const appIds = allApps ? allApps.map((app: { appId: any }) => app.appId) : []
const rows = await getUniqueRows(appIds)
const rowCount = rows ? rows.length : 0
const { appRows } = await getUniqueRows(appIds)
// get the counts per app
const counts: { [key: string]: number } = {}
let rowCount = 0
Object.entries(appRows).forEach(([appId, rows]) => {
counts[appId] = rows.length
rowCount += rows.length
})
// sync row count
const tenantId = getTenantId()
console.log(`[Tenant: ${tenantId}] Syncing row count: ${rowCount}`)
await quotas.setUsage(rowCount, StaticQuotaName.ROWS, QuotaUsageType.STATIC)
await quotas.setUsagePerApp(
counts,
StaticQuotaName.ROWS,
QuotaUsageType.STATIC
)
}

View file

@ -2,6 +2,7 @@ import TestConfig from "../../../../tests/utilities/TestConfiguration"
import * as syncRows from "../syncRows"
import { quotas } from "@budibase/pro"
import { QuotaUsageType, StaticQuotaName } from "@budibase/types"
const { getProdAppID } = require("@budibase/backend-core/db")
describe("syncRows", () => {
let config = new TestConfig(false)
@ -22,10 +23,11 @@ describe("syncRows", () => {
expect(usageDoc.usageQuota.rows).toEqual(300)
// app 1
const app1 = config.app
await config.createTable()
await config.createRow()
// app 2
await config.createApp("second-app")
const app2 = await config.createApp("second-app")
await config.createTable()
await config.createRow()
await config.createRow()
@ -36,6 +38,12 @@ describe("syncRows", () => {
// assert the migration worked
usageDoc = await quotas.getQuotaUsage()
expect(usageDoc.usageQuota.rows).toEqual(3)
expect(usageDoc.apps?.[getProdAppID(app1.appId)].usageQuota.rows).toEqual(
1
)
expect(usageDoc.apps?.[getProdAppID(app2.appId)].usageQuota.rows).toEqual(
2
)
})
})
})

View file

@ -4,11 +4,9 @@ import env from "../environment"
// migration functions
import * as userEmailViewCasing from "./functions/userEmailViewCasing"
import * as quota1 from "./functions/quotas1"
import * as syncQuotas from "./functions/syncQuotas"
import * as appUrls from "./functions/appUrls"
import * as backfill from "./functions/backfill"
import * as pluginCount from "./functions/pluginCount"
/**
* Populate the migration function and additional configuration from
* the static migration definitions.
@ -26,10 +24,10 @@ export const buildMigrations = () => {
})
break
}
case MigrationName.QUOTAS_1: {
case MigrationName.SYNC_QUOTAS: {
serverMigrations.push({
...definition,
fn: quota1.run,
fn: syncQuotas.run,
})
break
}
@ -69,16 +67,6 @@ export const buildMigrations = () => {
})
break
}
case MigrationName.PLUGIN_COUNT: {
if (env.SELF_HOSTED) {
serverMigrations.push({
...definition,
fn: pluginCount.run,
silent: !!env.SELF_HOSTED,
preventRetry: false,
})
}
}
}
}

View file

@ -4,7 +4,6 @@ import {
tenancy,
DocumentType,
context,
db,
} from "@budibase/backend-core"
import TestConfig from "../../tests/utilities/TestConfiguration"
import structures from "../../tests/utilities/structures"

View file

@ -2,6 +2,7 @@ const { getRowParams, USER_METDATA_PREFIX } = require("../../db/utils")
const {
isDevAppID,
getDevelopmentAppID,
getProdAppID,
doWithDB,
} = require("@budibase/backend-core/db")
@ -52,7 +53,8 @@ const getAppRows = async appId => {
* Rows duplicates may exist across apps due to data import so they are not filtered out.
*/
exports.getUniqueRows = async appIds => {
let uniqueRows = []
let uniqueRows = [],
rowsByApp = {}
const pairs = getAppPairs(appIds)
for (let pair of Object.values(pairs)) {
@ -73,8 +75,10 @@ exports.getUniqueRows = async appIds => {
// this can't be done on all rows because app import results in
// duplicate row ids across apps
// the array pre-concat is important to avoid stack overflow
uniqueRows = uniqueRows.concat([...new Set(appRows)])
const prodId = getProdAppID(pair.devId || pair.prodId)
rowsByApp[prodId] = [...new Set(appRows)]
uniqueRows = uniqueRows.concat(rowsByApp[prodId])
}
return uniqueRows
return { rows: uniqueRows, appRows: rowsByApp }
}

View file

@ -1,15 +1,58 @@
import { MonthlyQuotaName, StaticQuotaName } from "../../sdk"
export interface QuotaUsage {
_id: string
_rev?: string
quotaReset: string
export enum BreakdownQuotaName {
ROW_QUERIES = "rowQueries",
DATASOURCE_QUERIES = "datasourceQueries",
AUTOMATIONS = "automations",
}
export const APP_QUOTA_NAMES = [
StaticQuotaName.ROWS,
MonthlyQuotaName.QUERIES,
MonthlyQuotaName.AUTOMATIONS,
]
export const BREAKDOWN_QUOTA_NAMES = [
MonthlyQuotaName.QUERIES,
MonthlyQuotaName.AUTOMATIONS,
]
export interface UsageBreakdown {
parent: MonthlyQuotaName
values: {
[key: string]: number
}
}
export type MonthlyUsage = {
[MonthlyQuotaName.QUERIES]: number
[MonthlyQuotaName.AUTOMATIONS]: number
[MonthlyQuotaName.DAY_PASSES]: number
breakdown?: {
[key in BreakdownQuotaName]?: UsageBreakdown
}
}
export interface BaseQuotaUsage {
usageQuota: {
[key in StaticQuotaName]: number
}
monthly: {
[key: string]: {
[key in MonthlyQuotaName]: number
}
[key: string]: MonthlyUsage
}
}
export interface QuotaUsage extends BaseQuotaUsage {
_id: string
_rev?: string
quotaReset: string
apps?: {
[key: string]: BaseQuotaUsage
}
}
export type UsageValues = {
total: number
app?: number
breakdown?: number
}

View file

@ -27,6 +27,7 @@ export enum ConstantQuotaName {
AUTOMATION_LOG_RETENTION_DAYS = "automationLogRetentionDays",
}
export type MeteredQuotaName = StaticQuotaName | MonthlyQuotaName
export type QuotaName = StaticQuotaName | MonthlyQuotaName | ConstantQuotaName
export const isStaticQuota = (

View file

@ -39,14 +39,13 @@ export interface MigrationOptions {
export enum MigrationName {
USER_EMAIL_VIEW_CASING = "user_email_view_casing",
QUOTAS_1 = "quotas_1",
APP_URLS = "app_urls",
EVENT_APP_BACKFILL = "event_app_backfill",
EVENT_GLOBAL_BACKFILL = "event_global_backfill",
EVENT_INSTALLATION_BACKFILL = "event_installation_backfill",
GLOBAL_INFO_SYNC_USERS = "global_info_sync_users",
PLATFORM_USERS_EMAIL_CASING = "platform_users_email_casing",
PLUGIN_COUNT = "plugin_count",
// increment this number to re-activate this migration
SYNC_QUOTAS = "sync_quotas_1",
}
export interface MigrationDefinition {