1
0
Fork 0
mirror of synced 2024-06-02 18:44:54 +12:00

Typescript conversion of the table controllers.

This commit is contained in:
mike12345567 2022-11-22 13:56:01 +00:00
parent a74225cd1e
commit 7ab2029b5d
13 changed files with 304 additions and 254 deletions

View file

@ -1,10 +1,5 @@
import { BuiltinPermissionID, PermissionLevel } from "./permissions"
import {
generateRoleID,
getRoleParams,
DocumentType,
SEPARATOR,
} from "../db/utils"
import { generateRoleID, getRoleParams, DocumentType, SEPARATOR } from "../db"
import { getAppDB } from "../context"
import { doWithDB } from "../db"
import { Screen, Role as RoleDoc } from "@budibase/types"
@ -30,20 +25,17 @@ const EXTERNAL_BUILTIN_ROLE_IDS = [
BUILTIN_IDS.PUBLIC,
]
export class Role {
export class Role implements RoleDoc {
_id: string
name: string
permissionId?: string
permissionId: string
inherits?: string
permissions = {}
constructor(id: string, name: string) {
constructor(id: string, name: string, permissionId: string) {
this._id = id
this.name = name
}
addPermission(permissionId: string) {
this.permissionId = permissionId
return this
}
addInheritance(inherits: string) {
@ -53,24 +45,26 @@ export class Role {
}
const BUILTIN_ROLES = {
ADMIN: new Role(BUILTIN_IDS.ADMIN, "Admin")
.addPermission(BuiltinPermissionID.ADMIN)
.addInheritance(BUILTIN_IDS.POWER),
POWER: new Role(BUILTIN_IDS.POWER, "Power")
.addPermission(BuiltinPermissionID.POWER)
.addInheritance(BUILTIN_IDS.BASIC),
BASIC: new Role(BUILTIN_IDS.BASIC, "Basic")
.addPermission(BuiltinPermissionID.WRITE)
.addInheritance(BUILTIN_IDS.PUBLIC),
PUBLIC: new Role(BUILTIN_IDS.PUBLIC, "Public").addPermission(
BuiltinPermissionID.PUBLIC
),
BUILDER: new Role(BUILTIN_IDS.BUILDER, "Builder").addPermission(
ADMIN: new Role(
BUILTIN_IDS.ADMIN,
"Admin",
BuiltinPermissionID.ADMIN
),
).addInheritance(BUILTIN_IDS.POWER),
POWER: new Role(
BUILTIN_IDS.POWER,
"Power",
BuiltinPermissionID.POWER
).addInheritance(BUILTIN_IDS.BASIC),
BASIC: new Role(
BUILTIN_IDS.BASIC,
"Basic",
BuiltinPermissionID.WRITE
).addInheritance(BUILTIN_IDS.PUBLIC),
PUBLIC: new Role(BUILTIN_IDS.PUBLIC, "Public", BuiltinPermissionID.PUBLIC),
BUILDER: new Role(BUILTIN_IDS.BUILDER, "Builder", BuiltinPermissionID.ADMIN),
}
export function getBuiltinRoles() {
export function getBuiltinRoles(): { [key: string]: RoleDoc } {
return cloneDeep(BUILTIN_ROLES)
}
@ -104,7 +98,7 @@ export function builtinRoleToNumber(id?: string) {
if (!role) {
break
}
role = builtins[role.inherits]
role = builtins[role.inherits!]
count++
} while (role !== null)
return count
@ -129,12 +123,12 @@ export async function roleToNumber(id?: string) {
/**
* Returns whichever builtin roleID is lower.
*/
export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string) {
export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string {
if (!roleId1) {
return roleId2
return roleId2 as string
}
if (!roleId2) {
return roleId1
return roleId1 as string
}
return builtinRoleToNumber(roleId1) > builtinRoleToNumber(roleId2)
? roleId2

View file

@ -1,13 +1,14 @@
const { FieldTypes, FormulaTypes } = require("../../../constants")
const { clearColumns } = require("./utils")
const { doesContainStrings } = require("@budibase/string-templates")
const { cloneDeep } = require("lodash/fp")
const { isEqual, uniq } = require("lodash")
const { updateAllFormulasInTable } = require("../row/staticFormula")
const { getAppDB } = require("@budibase/backend-core/context")
const sdk = require("../../../sdk")
import { FieldTypes, FormulaTypes } from "../../../constants"
import { clearColumns } from "./utils"
import { doesContainStrings } from "@budibase/string-templates"
import { cloneDeep } from "lodash/fp"
import { isEqual, uniq } from "lodash"
import { updateAllFormulasInTable } from "../row/staticFormula"
import { context } from "@budibase/backend-core"
import { FieldSchema, Table } from "@budibase/types"
import sdk from "../../../sdk"
function isStaticFormula(column) {
function isStaticFormula(column: FieldSchema) {
return (
column.type === FieldTypes.FORMULA &&
column.formulaType === FormulaTypes.STATIC
@ -18,8 +19,8 @@ function isStaticFormula(column) {
* This retrieves the formula columns from a table schema that use a specified column name
* in the formula.
*/
function getFormulaThatUseColumn(table, columnNames) {
let formula = []
function getFormulaThatUseColumn(table: Table, columnNames: string[] | string) {
let formula: string[] = []
columnNames = Array.isArray(columnNames) ? columnNames : [columnNames]
for (let column of Object.values(table.schema)) {
// not a static formula, or doesn't contain a relationship
@ -38,7 +39,10 @@ function getFormulaThatUseColumn(table, columnNames) {
* This functions checks for when a related table, column or related column is deleted, if any
* tables need to have the formula column removed.
*/
async function checkIfFormulaNeedsCleared(table, { oldTable, deletion }) {
async function checkIfFormulaNeedsCleared(
table: Table,
{ oldTable, deletion }: { oldTable?: Table; deletion?: boolean }
) {
// start by retrieving all tables, remove the current table from the list
const tables = (await sdk.tables.getAllInternalTables()).filter(
tbl => tbl._id !== table._id
@ -49,11 +53,14 @@ async function checkIfFormulaNeedsCleared(table, { oldTable, deletion }) {
)
// remove any formula columns that used related columns
for (let removed of removedColumns) {
let tableToUse = table
let tableToUse: Table | undefined = table
// if relationship, get the related table
if (removed.type === FieldTypes.LINK) {
tableToUse = tables.find(table => table._id === removed.tableId)
}
if (!tableToUse) {
continue
}
const columnsToDelete = getFormulaThatUseColumn(tableToUse, removed.name)
if (columnsToDelete.length > 0) {
await clearColumns(table, columnsToDelete)
@ -71,11 +78,11 @@ async function checkIfFormulaNeedsCleared(table, { oldTable, deletion }) {
// look to see if the column was used in a relationship formula,
// relationships won't be used for this
if (relatedTable && relatedColumns && removed.type !== FieldTypes.LINK) {
let relatedFormulaToRemove = []
let relatedFormulaToRemove: string[] = []
for (let column of relatedColumns) {
relatedFormulaToRemove = relatedFormulaToRemove.concat(
getFormulaThatUseColumn(relatedTable, [
column.fieldName,
column.fieldName!,
removed.name,
])
)
@ -95,13 +102,14 @@ async function checkIfFormulaNeedsCleared(table, { oldTable, deletion }) {
* specifically only for static formula.
*/
async function updateRelatedFormulaLinksOnTables(
table,
{ deletion } = { deletion: false }
table: Table,
{ deletion }: { deletion?: boolean } = {}
) {
const db = getAppDB()
const tableId: string = table._id!
const db = context.getAppDB()
// start by retrieving all tables, remove the current table from the list
const tables = (await sdk.tables.getAllInternalTables()).filter(
tbl => tbl._id !== table._id
tbl => tbl._id !== tableId
)
// clone the tables, so we can compare at end
const initialTables = cloneDeep(tables)
@ -114,7 +122,7 @@ async function updateRelatedFormulaLinksOnTables(
if (!otherTable.relatedFormula) {
continue
}
const index = otherTable.relatedFormula.indexOf(table._id)
const index = otherTable.relatedFormula.indexOf(tableId)
if (index !== -1) {
otherTable.relatedFormula.splice(index, 1)
}
@ -133,11 +141,11 @@ async function updateRelatedFormulaLinksOnTables(
if (
relatedTable &&
(!relatedTable.relatedFormula ||
!relatedTable.relatedFormula.includes(table._id))
!relatedTable.relatedFormula.includes(tableId))
) {
relatedTable.relatedFormula = relatedTable.relatedFormula
? [...relatedTable.relatedFormula, table._id]
: [table._id]
? [...relatedTable.relatedFormula, tableId]
: [tableId]
}
}
}
@ -150,7 +158,10 @@ async function updateRelatedFormulaLinksOnTables(
}
}
async function checkIfFormulaUpdated(table, { oldTable }) {
async function checkIfFormulaUpdated(
table: Table,
{ oldTable }: { oldTable?: Table }
) {
// look to see if any formula values have changed
const shouldUpdate = Object.values(table.schema).find(
column =>
@ -165,7 +176,10 @@ async function checkIfFormulaUpdated(table, { oldTable }) {
}
}
exports.runStaticFormulaChecks = async (table, { oldTable, deletion }) => {
export async function runStaticFormulaChecks(
table: Table,
{ oldTable, deletion }: { oldTable?: Table; deletion?: boolean }
) {
await updateRelatedFormulaLinksOnTables(table, { deletion })
await checkIfFormulaNeedsCleared(table, { oldTable, deletion })
if (!deletion) {

View file

@ -1,38 +1,47 @@
const {
import {
buildExternalTableId,
breakExternalTableId,
} = require("../../../integrations/utils")
const {
} from "../../../integrations/utils"
import {
generateForeignKey,
generateJunctionTableName,
foreignKeyStructure,
hasTypeChanged,
} = require("./utils")
const {
} from "./utils"
import {
DataSourceOperation,
FieldTypes,
RelationshipTypes,
} = require("../../../constants")
const { makeExternalQuery } = require("../../../integrations/base/query")
} from "../../../constants"
import { makeExternalQuery } from "../../../integrations/base/query"
import csvParser from "../../../utilities/csvParser"
import { handleRequest } from "../row/external"
import { events, context } from "@budibase/backend-core"
import {
Datasource,
Table,
QueryJson,
Operation,
RenameColumn,
FieldSchema,
BBContext,
TableRequest,
} from "@budibase/types"
import sdk from "../../../sdk"
const { cloneDeep } = require("lodash/fp")
const csvParser = require("../../../utilities/csvParser")
const { handleRequest } = require("../row/external")
const { getAppDB } = require("@budibase/backend-core/context")
const { events } = require("@budibase/backend-core")
const sdk = require("../../../sdk")
async function makeTableRequest(
datasource,
operation,
table,
tables,
oldTable = null,
renamed = null
datasource: Datasource,
operation: Operation,
table: Table,
tables: Record<string, Table>,
oldTable?: Table,
renamed?: RenameColumn
) {
const json = {
const json: QueryJson = {
endpoint: {
datasourceId: datasource._id,
entityId: table._id,
datasourceId: datasource._id!,
entityId: table._id!,
operation,
},
meta: {
@ -41,15 +50,19 @@ async function makeTableRequest(
table,
}
if (oldTable) {
json.meta.table = oldTable
json.meta!.table = oldTable
}
if (renamed) {
json.meta.renamed = renamed
json.meta!.renamed = renamed
}
return makeExternalQuery(datasource, json)
}
function cleanupRelationships(table, tables, oldTable = null) {
function cleanupRelationships(
table: Table,
tables: Record<string, Table>,
oldTable?: Table
) {
const tableToIterate = oldTable ? oldTable : table
// clean up relationships in couch table schemas
for (let [key, schema] of Object.entries(tableToIterate.schema)) {
@ -78,7 +91,7 @@ function cleanupRelationships(table, tables, oldTable = null) {
}
}
function getDatasourceId(table) {
function getDatasourceId(table: Table) {
if (!table) {
throw "No table supplied"
}
@ -88,7 +101,7 @@ function getDatasourceId(table) {
return breakExternalTableId(table._id).datasourceId
}
function otherRelationshipType(type) {
function otherRelationshipType(type?: string) {
if (type === RelationshipTypes.MANY_TO_MANY) {
return RelationshipTypes.MANY_TO_MANY
}
@ -97,13 +110,21 @@ function otherRelationshipType(type) {
: RelationshipTypes.ONE_TO_MANY
}
function generateManyLinkSchema(datasource, column, table, relatedTable) {
function generateManyLinkSchema(
datasource: Datasource,
column: FieldSchema,
table: Table,
relatedTable: Table
): Table {
if (!table.primary || !relatedTable.primary) {
throw new Error("Unable to generate many link schema, no primary keys")
}
const primary = table.name + table.primary[0]
const relatedPrimary = relatedTable.name + relatedTable.primary[0]
const jcTblName = generateJunctionTableName(column, table, relatedTable)
// first create the new table
const junctionTable = {
_id: buildExternalTableId(datasource._id, jcTblName),
_id: buildExternalTableId(datasource._id!, jcTblName),
name: jcTblName,
primary: [primary, relatedPrimary],
constrained: [primary, relatedPrimary],
@ -125,7 +146,15 @@ function generateManyLinkSchema(datasource, column, table, relatedTable) {
return junctionTable
}
function generateLinkSchema(column, table, relatedTable, type) {
function generateLinkSchema(
column: FieldSchema,
table: Table,
relatedTable: Table,
type: string
) {
if (!table.primary || !relatedTable.primary) {
throw new Error("Unable to generate link schema, no primary keys")
}
const isOneSide = type === RelationshipTypes.ONE_TO_MANY
const primary = isOneSide ? relatedTable.primary[0] : table.primary[0]
// generate a foreign key
@ -136,7 +165,12 @@ function generateLinkSchema(column, table, relatedTable, type) {
return foreignKey
}
function generateRelatedSchema(linkColumn, table, relatedTable, columnName) {
function generateRelatedSchema(
linkColumn: FieldSchema,
table: Table,
relatedTable: Table,
columnName: string
) {
// generate column for other table
const relatedSchema = cloneDeep(linkColumn)
// swap them from the main link
@ -159,21 +193,21 @@ function generateRelatedSchema(linkColumn, table, relatedTable, columnName) {
table.schema[columnName] = relatedSchema
}
function isRelationshipSetup(column) {
function isRelationshipSetup(column: FieldSchema) {
return column.foreignKey || column.through
}
exports.save = async function (ctx) {
const table = ctx.request.body
const { _rename: renamed } = table
export async function save(ctx: BBContext) {
const table: TableRequest = ctx.request.body
const renamed = table?._rename
// can't do this right now
delete table.dataImport
const datasourceId = getDatasourceId(ctx.request.body)
const datasourceId = getDatasourceId(ctx.request.body)!
// table doesn't exist already, note that it is created
if (!table._id) {
table.created = true
}
let tableToSave = {
let tableToSave: TableRequest = {
type: "table",
_id: buildExternalTableId(datasourceId, table.name),
...table,
@ -188,10 +222,10 @@ exports.save = async function (ctx) {
ctx.throw(400, "A column type has changed.")
}
const db = getAppDB()
const db = context.getAppDB()
const datasource = await db.get(datasourceId)
const oldTables = cloneDeep(datasource.entities)
const tables = datasource.entities
const tables: Record<string, Table> = datasource.entities
const extraTablesToUpdate = []
@ -203,8 +237,11 @@ exports.save = async function (ctx) {
const relatedTable = Object.values(tables).find(
table => table._id === schema.tableId
)
const relatedColumnName = schema.fieldName
const relationType = schema.relationshipType
if (!relatedTable) {
continue
}
const relatedColumnName = schema.fieldName!
const relationType = schema.relationshipType!
if (relationType === RelationshipTypes.MANY_TO_MANY) {
const junctionTable = generateManyLinkSchema(
datasource,
@ -244,9 +281,7 @@ exports.save = async function (ctx) {
cleanupRelationships(tableToSave, tables, oldTable)
const operation = oldTable
? DataSourceOperation.UPDATE_TABLE
: DataSourceOperation.CREATE_TABLE
const operation = oldTable ? Operation.UPDATE_TABLE : Operation.CREATE_TABLE
await makeTableRequest(
datasource,
operation,
@ -258,9 +293,7 @@ exports.save = async function (ctx) {
// update any extra tables (like foreign keys in other tables)
for (let extraTable of extraTablesToUpdate) {
const oldExtraTable = oldTables[extraTable.name]
let op = oldExtraTable
? DataSourceOperation.UPDATE_TABLE
: DataSourceOperation.CREATE_TABLE
let op = oldExtraTable ? Operation.UPDATE_TABLE : Operation.CREATE_TABLE
await makeTableRequest(datasource, op, extraTable, tables, oldExtraTable)
}
@ -280,18 +313,20 @@ exports.save = async function (ctx) {
return tableToSave
}
exports.destroy = async function (ctx) {
const tableToDelete = await sdk.tables.getTable(ctx.params.tableId)
export async function destroy(ctx: BBContext) {
const tableToDelete: TableRequest = await sdk.tables.getTable(
ctx.params.tableId
)
if (!tableToDelete || !tableToDelete.created) {
ctx.throw(400, "Cannot delete tables which weren't created in Budibase.")
}
const datasourceId = getDatasourceId(tableToDelete)
const db = getAppDB()
const db = context.getAppDB()
const datasource = await db.get(datasourceId)
const tables = datasource.entities
const operation = DataSourceOperation.DELETE_TABLE
const operation = Operation.DELETE_TABLE
await makeTableRequest(datasource, operation, tableToDelete, tables)
cleanupRelationships(tableToDelete, tables)
@ -302,7 +337,7 @@ exports.destroy = async function (ctx) {
return tableToDelete
}
exports.bulkImport = async function (ctx) {
export async function bulkImport(ctx: BBContext) {
const table = await sdk.tables.getTable(ctx.params.tableId)
const { dataImport } = ctx.request.body
if (!dataImport || !dataImport.schema || !dataImport.csvString) {

View file

@ -1,13 +1,13 @@
const internal = require("./internal")
const external = require("./external")
const csvParser = require("../../../utilities/csvParser")
const { isExternalTable, isSQL } = require("../../../integrations/utils")
const { getDatasourceParams } = require("../../../db/utils")
const { getAppDB } = require("@budibase/backend-core/context")
const { events } = require("@budibase/backend-core")
const sdk = require("../../../sdk")
import * as internal from "./internal"
import * as external from "./external"
import csvParser from "../../../utilities/csvParser"
import { isExternalTable, isSQL } from "../../../integrations/utils"
import { getDatasourceParams } from "../../../db/utils"
import { context, events } from "@budibase/backend-core"
import { Table, BBContext } from "@budibase/types"
import sdk from "../../../sdk"
function pickApi({ tableId, table }) {
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
if (table && !tableId) {
tableId = table._id
}
@ -20,8 +20,8 @@ function pickApi({ tableId, table }) {
}
// covers both internal and external
exports.fetch = async function (ctx) {
const db = getAppDB()
export async function fetch(ctx: BBContext) {
const db = context.getAppDB()
const internal = await sdk.tables.getAllInternalTables()
@ -34,7 +34,7 @@ exports.fetch = async function (ctx) {
const external = externalTables.rows.flatMap(tableDoc => {
let entities = tableDoc.doc.entities
if (entities) {
return Object.values(entities).map(entity => ({
return Object.values(entities).map((entity: any) => ({
...entity,
type: "external",
sourceId: tableDoc.doc._id,
@ -48,12 +48,12 @@ exports.fetch = async function (ctx) {
ctx.body = [...internal, ...external]
}
exports.find = async function (ctx) {
export async function find(ctx: BBContext) {
const tableId = ctx.params.tableId
ctx.body = await sdk.tables.getTable(tableId)
}
exports.save = async function (ctx) {
export async function save(ctx: BBContext) {
const appId = ctx.appId
const table = ctx.request.body
const importFormat =
@ -74,7 +74,7 @@ exports.save = async function (ctx) {
ctx.body = savedTable
}
exports.destroy = async function (ctx) {
export async function destroy(ctx: BBContext) {
const appId = ctx.appId
const tableId = ctx.params.tableId
const deletedTable = await pickApi({ tableId }).destroy(ctx)
@ -86,7 +86,7 @@ exports.destroy = async function (ctx) {
ctx.body = { message: `Table ${tableId} deleted.` }
}
exports.bulkImport = async function (ctx) {
export async function bulkImport(ctx: BBContext) {
const tableId = ctx.params.tableId
await pickApi({ tableId }).bulkImport(ctx)
// right now we don't trigger anything for bulk import because it
@ -96,7 +96,7 @@ exports.bulkImport = async function (ctx) {
ctx.body = { message: `Bulk rows created.` }
}
exports.validateCSVSchema = async function (ctx) {
export async function validateCSVSchema(ctx: BBContext) {
// tableId being specified means its an import to an existing table
const { csvString, schema = {}, tableId } = ctx.request.body
let existingTable

View file

@ -13,28 +13,28 @@ import {
} from "../../../constants"
import { getViews, saveView } from "../view/utils"
import viewTemplate from "../view/viewBuilder"
const { getAppDB } = require("@budibase/backend-core/context")
import { cloneDeep } from "lodash/fp"
import { quotas } from "@budibase/pro"
import { events } from "@budibase/backend-core"
import { events, context } from "@budibase/backend-core"
import { Database } from "@budibase/types"
export async function clearColumns(table: any, columnNames: any) {
const db = getAppDB()
const db: Database = context.getAppDB()
const rows = await db.allDocs(
getRowParams(table._id, null, {
include_docs: true,
})
)
return db.bulkDocs(
return (await db.bulkDocs(
rows.rows.map(({ doc }: any) => {
columnNames.forEach((colName: any) => delete doc[colName])
return doc
})
)
)) as { id: string; _rev?: string }[]
}
export async function checkForColumnUpdates(oldTable: any, updatedTable: any) {
const db = getAppDB()
const db = context.getAppDB()
let updatedRows = []
const rename = updatedTable._rename
let deletedColumns: any = []
@ -133,7 +133,7 @@ export async function handleDataImport(user: any, table: any, dataImport: any) {
return table
}
const db = getAppDB()
const db = context.getAppDB()
// Populate the table with rows imported from CSV in a bulk update
const data = await transform({
...dataImport,
@ -150,7 +150,7 @@ export async function handleDataImport(user: any, table: any, dataImport: any) {
}
export async function handleSearchIndexes(table: any) {
const db = getAppDB()
const db = context.getAppDB()
// create relevant search indexes
if (table.indexes && table.indexes.length > 0) {
const currentIndexes = await db.getIndexes()
@ -214,7 +214,7 @@ class TableSaveFunctions {
rows: any
constructor({ user, oldTable, dataImport }: any) {
this.db = getAppDB()
this.db = context.getAppDB()
this.user = user
this.oldTable = oldTable
this.dataImport = dataImport
@ -338,7 +338,7 @@ export function generateJunctionTableName(
return `jt_${table.name}_${relatedTable.name}_${column.name}_${column.fieldName}`
}
export function foreignKeyStructure(keyName: any, meta = null) {
export function foreignKeyStructure(keyName: any, meta?: any) {
const structure: any = {
type: FieldTypes.NUMBER,
constraints: {},

View file

@ -1,17 +1,20 @@
const env = require("../environment")
const { OBJ_STORE_DIRECTORY } = require("../constants")
const { sanitizeKey } = require("@budibase/backend-core/objectStore")
const { generateMetadataID } = require("../db/utils")
const Readable = require("stream").Readable
const { getAppDB } = require("@budibase/backend-core/context")
import env from "../environment"
import { OBJ_STORE_DIRECTORY } from "../constants"
import { objectStore, context } from "@budibase/backend-core"
import { generateMetadataID } from "../db/utils"
import { Document } from "@budibase/types"
import stream from "stream"
const Readable = stream.Readable
exports.wait = ms => new Promise(resolve => setTimeout(resolve, ms))
export function wait(ms: number) {
return new Promise(resolve => setTimeout(resolve, ms))
}
exports.isDev = env.isDev
export const isDev = env.isDev
exports.NUMBER_REGEX = /^[+-]?([0-9]*[.])?[0-9]+$/g
export const NUMBER_REGEX = /^[+-]?([0-9]*[.])?[0-9]+$/g
exports.removeFromArray = (array, element) => {
export function removeFromArray(array: any[], element: any) {
const index = array.indexOf(element)
if (index !== -1) {
array.splice(index, 1)
@ -25,7 +28,7 @@ exports.removeFromArray = (array, element) => {
* @param {string} url The URL to test and remove any extra double slashes.
* @return {string} The updated url.
*/
exports.checkSlashesInUrl = url => {
export function checkSlashesInUrl(url: string) {
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")
}
@ -33,7 +36,7 @@ exports.checkSlashesInUrl = url => {
* Gets the address of the object store, depending on whether self hosted or in cloud.
* @return {string} The base URL of the object store (MinIO or S3).
*/
exports.objectStoreUrl = () => {
export function objectStoreUrl() {
if (env.SELF_HOSTED || env.MINIO_URL) {
// can use a relative url for this as all goes through the proxy (this is hosted in minio)
return OBJ_STORE_DIRECTORY
@ -52,9 +55,9 @@ exports.objectStoreUrl = () => {
* @return {string} The URL to be inserted into appPackage response or server rendered
* app index file.
*/
exports.clientLibraryPath = (appId, version) => {
export function clientLibraryPath(appId: string, version: string) {
if (env.isProd()) {
let url = `${exports.objectStoreUrl()}/${sanitizeKey(
let url = `${objectStoreUrl()}/${objectStore.sanitizeKey(
appId
)}/budibase-client.js`
@ -68,18 +71,19 @@ exports.clientLibraryPath = (appId, version) => {
}
}
exports.attachmentsRelativeURL = attachmentKey => {
return exports.checkSlashesInUrl(
`${exports.objectStoreUrl()}/${attachmentKey}`
)
export function attachmentsRelativeURL(attachmentKey: string) {
return checkSlashesInUrl(`${objectStoreUrl()}/${attachmentKey}`)
}
exports.updateEntityMetadata = async (type, entityId, updateFn) => {
const db = getAppDB()
export async function updateEntityMetadata(
type: string,
entityId: string,
updateFn: any
) {
const db = context.getAppDB()
const id = generateMetadataID(type, entityId)
// read it to see if it exists, we'll overwrite it no matter what
let rev,
metadata = {}
let rev, metadata: Document
try {
const oldMetadata = await db.get(id)
rev = oldMetadata._rev
@ -100,14 +104,18 @@ exports.updateEntityMetadata = async (type, entityId, updateFn) => {
}
}
exports.saveEntityMetadata = async (type, entityId, metadata) => {
return exports.updateEntityMetadata(type, entityId, () => {
export async function saveEntityMetadata(
type: string,
entityId: string,
metadata: Document
) {
return updateEntityMetadata(type, entityId, () => {
return metadata
})
}
exports.deleteEntityMetadata = async (type, entityId) => {
const db = getAppDB()
export async function deleteEntityMetadata(type: string, entityId: string) {
const db = context.getAppDB()
const id = generateMetadataID(type, entityId)
let rev
try {
@ -123,7 +131,7 @@ exports.deleteEntityMetadata = async (type, entityId) => {
}
}
exports.escapeDangerousCharacters = string => {
export function escapeDangerousCharacters(string: string) {
return string
.replace(/[\\]/g, "\\\\")
.replace(/[\b]/g, "\\b")
@ -133,7 +141,7 @@ exports.escapeDangerousCharacters = string => {
.replace(/[\t]/g, "\\t")
}
exports.stringToReadStream = string => {
export function stringToReadStream(string: string) {
return new Readable({
read() {
this.push(string)
@ -142,7 +150,7 @@ exports.stringToReadStream = string => {
})
}
exports.formatBytes = bytes => {
export function formatBytes(bytes: string) {
const units = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]
const byteIncrements = 1024
let unit = 0
@ -153,7 +161,7 @@ exports.formatBytes = bytes => {
return `${size.toFixed(size < 10 && unit > 0 ? 1 : 0)}${units[unit]}`
}
exports.convertBookmark = bookmark => {
export function convertBookmark(bookmark: string) {
const IS_NUMBER = /^\d+\.?\d*$/
if (typeof bookmark === "string" && bookmark.match(IS_NUMBER)) {
return parseFloat(bookmark)
@ -161,7 +169,7 @@ exports.convertBookmark = bookmark => {
return bookmark
}
exports.isQsTrue = param => {
export function isQsTrue(param: string) {
if (typeof param === "string") {
return param.toLowerCase() === "true"
} else {

View file

@ -8,6 +8,7 @@ import { InternalTables } from "../../db/utils"
import { TYPE_TRANSFORM_MAP } from "./map"
import { Row, User, Table } from "@budibase/types"
const { cloneDeep } = require("lodash/fp")
export * from "./utils"
type AutoColumnProcessingOpts = {
reprocessing?: boolean
@ -229,7 +230,7 @@ export async function cleanupAttachments(
rows,
oldRow,
oldTable,
}: { row?: Row; rows?: Row[]; oldRow?: Row; oldTable: Table }
}: { row?: Row; rows?: Row[]; oldRow?: Row; oldTable?: Table }
): Promise<any> {
const appId = context.getAppId()
if (!dbCore.isProdAppID(appId)) {

View file

@ -1,71 +0,0 @@
const {
PermissionLevel,
PermissionType,
getBuiltinPermissionByID,
isPermissionLevelHigherThanRead,
} = require("@budibase/backend-core/permissions")
const {
lowerBuiltinRoleID,
getBuiltinRoles,
} = require("@budibase/backend-core/roles")
const { DocumentType } = require("../db/utils")
const CURRENTLY_SUPPORTED_LEVELS = [
PermissionLevel.WRITE,
PermissionLevel.READ,
PermissionLevel.EXECUTE,
]
exports.getPermissionType = resourceId => {
const docType = Object.values(DocumentType).filter(docType =>
resourceId.startsWith(docType)
)[0]
switch (docType) {
case DocumentType.TABLE:
case DocumentType.ROW:
return PermissionType.TABLE
case DocumentType.AUTOMATION:
return PermissionType.AUTOMATION
case DocumentType.WEBHOOK:
return PermissionType.WEBHOOK
case DocumentType.QUERY:
case DocumentType.DATASOURCE:
return PermissionType.QUERY
default:
// views don't have an ID, will end up here
return PermissionType.VIEW
}
}
/**
* works out the basic permissions based on builtin roles for a resource, using its ID
* @param resourceId
* @returns {{}}
*/
exports.getBasePermissions = resourceId => {
const type = exports.getPermissionType(resourceId)
const permissions = {}
for (let [roleId, role] of Object.entries(getBuiltinRoles())) {
if (!role.permissionId) {
continue
}
const perms = getBuiltinPermissionByID(role.permissionId)
const typedPermission = perms.permissions.find(perm => perm.type === type)
if (
typedPermission &&
CURRENTLY_SUPPORTED_LEVELS.indexOf(typedPermission.level) !== -1
) {
const level = typedPermission.level
permissions[level] = lowerBuiltinRoleID(permissions[level], roleId)
if (isPermissionLevelHigherThanRead(level)) {
permissions[PermissionLevel.READ] = lowerBuiltinRoleID(
permissions[PermissionLevel.READ],
roleId
)
}
}
}
return permissions
}
exports.CURRENTLY_SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS

View file

@ -0,0 +1,65 @@
import { permissions, roles } from "@budibase/backend-core"
import { DocumentType } from "../db/utils"
export const CURRENTLY_SUPPORTED_LEVELS: string[] = [
permissions.PermissionLevel.WRITE,
permissions.PermissionLevel.READ,
permissions.PermissionLevel.EXECUTE,
]
export function getPermissionType(resourceId: string) {
const docType = Object.values(DocumentType).filter(docType =>
resourceId.startsWith(docType)
)[0]
switch (docType) {
case DocumentType.TABLE:
case DocumentType.ROW:
return permissions.PermissionType.TABLE
case DocumentType.AUTOMATION:
return permissions.PermissionType.AUTOMATION
case DocumentType.WEBHOOK:
return permissions.PermissionType.WEBHOOK
case DocumentType.QUERY:
case DocumentType.DATASOURCE:
return permissions.PermissionType.QUERY
default:
// views don't have an ID, will end up here
return permissions.PermissionType.VIEW
}
}
/**
* works out the basic permissions based on builtin roles for a resource, using its ID
*/
export function getBasePermissions(resourceId: string) {
const type = getPermissionType(resourceId)
const basePermissions: { [key: string]: string } = {}
for (let [roleId, role] of Object.entries(roles.getBuiltinRoles())) {
if (!role.permissionId) {
continue
}
const perms = permissions.getBuiltinPermissionByID(role.permissionId)
if (!perms) {
continue
}
const typedPermission = perms.permissions.find(perm => perm.type === type)
if (
typedPermission &&
CURRENTLY_SUPPORTED_LEVELS.indexOf(typedPermission.level) !== -1
) {
const level = typedPermission.level
basePermissions[level] = roles.lowerBuiltinRoleID(
basePermissions[level],
roleId
)
if (permissions.isPermissionLevelHigherThanRead(level)) {
basePermissions[permissions.PermissionLevel.READ] =
roles.lowerBuiltinRoleID(
basePermissions[permissions.PermissionLevel.READ],
roleId
)
}
}
}
return basePermissions
}

View file

@ -1,12 +1,11 @@
const { InternalTables } = require("../db/utils")
const { getGlobalUser } = require("../utilities/global")
const { getAppDB } = require("@budibase/backend-core/context")
const { getProdAppID } = require("@budibase/backend-core/db")
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles")
import { InternalTables } from "../db/utils"
import { getGlobalUser } from "./global"
import { context, db as dbCore, roles } from "@budibase/backend-core"
import { BBContext } from "@budibase/types"
exports.getFullUser = async (ctx, userId) => {
export async function getFullUser(ctx: BBContext, userId: string) {
const global = await getGlobalUser(userId)
let metadata = {}
let metadata: any = {}
// always prefer the user metadata _id and _rev
delete global._id
@ -14,7 +13,7 @@ exports.getFullUser = async (ctx, userId) => {
try {
// this will throw an error if the db doesn't exist, or there is no appId
const db = getAppDB()
const db = context.getAppDB()
metadata = await db.get(userId)
} catch (err) {
// it is fine if there is no user metadata yet
@ -23,14 +22,14 @@ exports.getFullUser = async (ctx, userId) => {
return {
...metadata,
...global,
roleId: global.roleId || BUILTIN_ROLE_IDS.PUBLIC,
roleId: global.roleId || roles.BUILTIN_ROLE_IDS.PUBLIC,
tableId: InternalTables.USER_METADATA,
// make sure the ID is always a local ID, not a global one
_id: userId,
}
}
exports.publicApiUserFix = ctx => {
export function publicApiUserFix(ctx: BBContext) {
if (!ctx.request.body) {
return ctx
}
@ -40,10 +39,9 @@ exports.publicApiUserFix = ctx => {
if (!ctx.request.body.roles) {
ctx.request.body.roles = {}
} else {
const newRoles = {}
const newRoles: { [key: string]: any } = {}
for (let [appId, role] of Object.entries(ctx.request.body.roles)) {
// @ts-ignore
newRoles[getProdAppID(appId)] = role
newRoles[dbCore.getProdAppID(appId)] = role
}
ctx.request.body.roles = newRoles
}

View file

@ -2,6 +2,6 @@ import { Document } from "../document"
export interface Role extends Document {
permissionId: string
inherits: string
inherits?: string
permissions: { [key: string]: string[] }
}

View file

@ -1,5 +1,6 @@
import { Document } from "../document"
import { View } from "./view"
import { RenameColumn } from "../../sdk"
export interface FieldSchema {
// TODO: replace with field types enum when done
@ -55,3 +56,8 @@ export interface Table extends Document {
indexes?: { [key: string]: any }
dataImport?: { [key: string]: any }
}
export interface TableRequest extends Table {
_rename?: RenameColumn
created?: boolean
}

View file

@ -72,7 +72,7 @@ export interface QueryJson {
operation: Operation
schema?: string
}
resource: {
resource?: {
fields: string[]
}
filters?: SearchFilters
@ -83,7 +83,7 @@ export interface QueryJson {
meta?: {
table?: Table
tables?: Record<string, Table>
renamed: RenameColumn
renamed?: RenameColumn
}
extra?: {
idFilter?: SearchFilters