1
0
Fork 0
mirror of synced 2024-07-14 18:55:45 +12:00

One file left in the server, some fixes after converting the internal row controller.

This commit is contained in:
mike12345567 2022-11-26 15:42:53 +00:00
parent 658f12281d
commit 12d6e60525
9 changed files with 119 additions and 86 deletions

View file

@ -1,6 +1,6 @@
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import internal from "./internal" import * as internal from "./internal"
import external from "./external" import * as external from "./external"
import { isExternalTable } from "../../../integrations/utils" import { isExternalTable } from "../../../integrations/utils"
function pickApi(tableId: any) { function pickApi(tableId: any) {

View file

@ -1,34 +1,41 @@
const linkRows = require("../../../db/linkedRows") import * as linkRows from "../../../db/linkedRows"
const { import {
generateRowID, generateRowID,
getRowParams, getRowParams,
getTableIDFromRowID, getTableIDFromRowID,
DocumentType, DocumentType,
InternalTables, InternalTables,
} = require("../../../db/utils") } from "../../../db/utils"
const userController = require("../user") import * as userController from "../user"
const { import {
inputProcessing, inputProcessing,
outputProcessing, outputProcessing,
cleanupAttachments, cleanupAttachments,
} = require("../../../utilities/rowProcessor") } from "../../../utilities/rowProcessor"
const { FieldTypes } = require("../../../constants") import { FieldTypes } from "../../../constants"
const { validate, findRow } = require("./utils") import { validate as rowValidate, findRow } from "./utils"
const { fullSearch, paginatedSearch } = require("./internalSearch") import { fullSearch, paginatedSearch } from "./internalSearch"
const { getGlobalUsersFromMetadata } = require("../../../utilities/global") import { getGlobalUsersFromMetadata } from "../../../utilities/global"
const inMemoryViews = require("../../../db/inMemoryView") import * as inMemoryViews from "../../../db/inMemoryView"
const env = require("../../../environment") import env from "../../../environment"
const { import {
migrateToInMemoryView, migrateToInMemoryView,
migrateToDesignView, migrateToDesignView,
getFromDesignDoc, getFromDesignDoc,
getFromMemoryDoc, getFromMemoryDoc,
} = require("../view/utils") } from "../view/utils"
const { cloneDeep } = require("lodash/fp") import { cloneDeep } from "lodash/fp"
const { context, db: dbCore } = require("@budibase/backend-core") import { context, db as dbCore } from "@budibase/backend-core"
const { finaliseRow, updateRelatedFormula } = require("./staticFormula") import { finaliseRow, updateRelatedFormula } from "./staticFormula"
const exporters = require("../view/exporters") import * as exporters from "../view/exporters"
const { apiFileReturn } = require("../../../utilities/fileSystem") import { apiFileReturn } from "../../../utilities/fileSystem"
import {
BBContext,
Database,
LinkDocumentValue,
Row,
Table,
} from "@budibase/types"
const CALCULATION_TYPES = { const CALCULATION_TYPES = {
SUM: "sum", SUM: "sum",
@ -36,7 +43,7 @@ const CALCULATION_TYPES = {
STATS: "stats", STATS: "stats",
} }
async function getView(db, viewName) { async function getView(db: Database, viewName: string) {
let mainGetter = env.SELF_HOSTED ? getFromDesignDoc : getFromMemoryDoc let mainGetter = env.SELF_HOSTED ? getFromDesignDoc : getFromMemoryDoc
let secondaryGetter = env.SELF_HOSTED ? getFromMemoryDoc : getFromDesignDoc let secondaryGetter = env.SELF_HOSTED ? getFromMemoryDoc : getFromDesignDoc
let migration = env.SELF_HOSTED ? migrateToDesignView : migrateToInMemoryView let migration = env.SELF_HOSTED ? migrateToDesignView : migrateToInMemoryView
@ -44,7 +51,7 @@ async function getView(db, viewName) {
migrate = false migrate = false
try { try {
viewInfo = await mainGetter(db, viewName) viewInfo = await mainGetter(db, viewName)
} catch (err) { } catch (err: any) {
// check if it can be retrieved from design doc (needs migrated) // check if it can be retrieved from design doc (needs migrated)
if (err.status !== 404) { if (err.status !== 404) {
viewInfo = null viewInfo = null
@ -62,7 +69,7 @@ async function getView(db, viewName) {
return viewInfo return viewInfo
} }
async function getRawTableData(ctx, db, tableId) { async function getRawTableData(ctx: BBContext, db: Database, tableId: string) {
let rows let rows
if (tableId === InternalTables.USER_METADATA) { if (tableId === InternalTables.USER_METADATA) {
await userController.fetchMetadata(ctx) await userController.fetchMetadata(ctx)
@ -75,10 +82,10 @@ async function getRawTableData(ctx, db, tableId) {
) )
rows = response.rows.map(row => row.doc) rows = response.rows.map(row => row.doc)
} }
return rows return rows as Row[]
} }
exports.patch = async ctx => { export async function patch(ctx: BBContext) {
const db = context.getAppDB() const db = context.getAppDB()
const inputs = ctx.request.body const inputs = ctx.request.body
const tableId = inputs.tableId const tableId = inputs.tableId
@ -103,15 +110,15 @@ exports.patch = async ctx => {
} }
let dbTable = await db.get(tableId) let dbTable = await db.get(tableId)
// need to build up full patch fields before coerce // need to build up full patch fields before coerce
let combinedRow = cloneDeep(oldRow) let combinedRow: any = cloneDeep(oldRow)
for (let key of Object.keys(inputs)) { for (let key of Object.keys(inputs)) {
if (!dbTable.schema[key]) continue if (!dbTable.schema[key]) continue
combinedRow[key] = inputs[key] combinedRow[key] = inputs[key]
} }
// this returns the table and row incase they have been updated // this returns the table and row incase they have been updated
let { table, row } = inputProcessing(ctx.user, dbTable, combinedRow) let { table, row } = inputProcessing(ctx.user!, dbTable, combinedRow)
const validateResult = await validate({ const validateResult = await rowValidate({
row, row,
table, table,
}) })
@ -121,12 +128,12 @@ exports.patch = async ctx => {
} }
// returned row is cleaned and prepared for writing to DB // returned row is cleaned and prepared for writing to DB
row = await linkRows.updateLinks({ row = (await linkRows.updateLinks({
eventType: linkRows.EventType.ROW_UPDATE, eventType: linkRows.EventType.ROW_UPDATE,
row, row,
tableId: row.tableId, tableId: row.tableId,
table, table,
}) })) as Row
// check if any attachments removed // check if any attachments removed
await cleanupAttachments(table, { oldRow, row }) await cleanupAttachments(table, { oldRow, row })
@ -143,7 +150,7 @@ exports.patch = async ctx => {
}) })
} }
exports.save = async function (ctx) { export async function save(ctx: BBContext) {
const db = context.getAppDB() const db = context.getAppDB()
let inputs = ctx.request.body let inputs = ctx.request.body
inputs.tableId = ctx.params.tableId inputs.tableId = ctx.params.tableId
@ -154,8 +161,8 @@ exports.save = async function (ctx) {
// this returns the table and row incase they have been updated // this returns the table and row incase they have been updated
const dbTable = await db.get(inputs.tableId) const dbTable = await db.get(inputs.tableId)
let { table, row } = inputProcessing(ctx.user, dbTable, inputs) let { table, row } = inputProcessing(ctx.user!, dbTable, inputs)
const validateResult = await validate({ const validateResult = await rowValidate({
row, row,
table, table,
}) })
@ -165,12 +172,12 @@ exports.save = async function (ctx) {
} }
// make sure link rows are up to date // make sure link rows are up to date
row = await linkRows.updateLinks({ row = (await linkRows.updateLinks({
eventType: linkRows.EventType.ROW_SAVE, eventType: linkRows.EventType.ROW_SAVE,
row, row,
tableId: row.tableId, tableId: row.tableId,
table, table,
}) })) as Row
return finaliseRow(table, row, { return finaliseRow(table, row, {
oldTable: dbTable, oldTable: dbTable,
@ -178,7 +185,7 @@ exports.save = async function (ctx) {
}) })
} }
exports.fetchView = async ctx => { export async function fetchView(ctx: BBContext) {
const viewName = ctx.params.viewName const viewName = ctx.params.viewName
// if this is a table view being looked for just transfer to that // if this is a table view being looked for just transfer to that
@ -199,7 +206,12 @@ exports.fetchView = async ctx => {
} else { } else {
const tableId = viewInfo.meta.tableId const tableId = viewInfo.meta.tableId
const data = await getRawTableData(ctx, db, tableId) const data = await getRawTableData(ctx, db, tableId)
response = await inMemoryViews.runView(viewInfo, calculation, group, data) response = await inMemoryViews.runView(
viewInfo,
calculation as string,
!!group,
data
)
} }
let rows let rows
@ -240,7 +252,7 @@ exports.fetchView = async ctx => {
return rows return rows
} }
exports.fetch = async ctx => { export async function fetch(ctx: BBContext) {
const db = context.getAppDB() const db = context.getAppDB()
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
@ -249,7 +261,7 @@ exports.fetch = async ctx => {
return outputProcessing(table, rows) return outputProcessing(table, rows)
} }
exports.find = async ctx => { export async function find(ctx: BBContext) {
const db = dbCore.getDB(ctx.appId) const db = dbCore.getDB(ctx.appId)
const table = await db.get(ctx.params.tableId) const table = await db.get(ctx.params.tableId)
let row = await findRow(ctx, ctx.params.tableId, ctx.params.rowId) let row = await findRow(ctx, ctx.params.tableId, ctx.params.rowId)
@ -257,7 +269,7 @@ exports.find = async ctx => {
return row return row
} }
exports.destroy = async function (ctx) { export async function destroy(ctx: BBContext) {
const db = context.getAppDB() const db = context.getAppDB()
const { _id } = ctx.request.body const { _id } = ctx.request.body
let row = await db.get(_id) let row = await db.get(_id)
@ -293,7 +305,7 @@ exports.destroy = async function (ctx) {
return { response, row } return { response, row }
} }
exports.bulkDestroy = async ctx => { export async function bulkDestroy(ctx: BBContext) {
const db = context.getAppDB() const db = context.getAppDB()
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
const table = await db.get(tableId) const table = await db.get(tableId)
@ -301,10 +313,12 @@ exports.bulkDestroy = async ctx => {
// before carrying out any updates, make sure the rows are ready to be returned // before carrying out any updates, make sure the rows are ready to be returned
// they need to be the full rows (including previous relationships) for automations // they need to be the full rows (including previous relationships) for automations
rows = await outputProcessing(table, rows, { squash: false }) const processedRows = (await outputProcessing(table, rows, {
squash: false,
})) as Row[]
// remove the relationships first // remove the relationships first
let updates = rows.map(row => let updates: Promise<any>[] = processedRows.map(row =>
linkRows.updateLinks({ linkRows.updateLinks({
eventType: linkRows.EventType.ROW_DELETE, eventType: linkRows.EventType.ROW_DELETE,
row, row,
@ -313,7 +327,7 @@ exports.bulkDestroy = async ctx => {
) )
if (tableId === InternalTables.USER_METADATA) { if (tableId === InternalTables.USER_METADATA) {
updates = updates.concat( updates = updates.concat(
rows.map(row => { processedRows.map(row => {
ctx.params = { ctx.params = {
id: row._id, id: row._id,
} }
@ -321,16 +335,16 @@ exports.bulkDestroy = async ctx => {
}) })
) )
} else { } else {
await db.bulkDocs(rows.map(row => ({ ...row, _deleted: true }))) await db.bulkDocs(processedRows.map(row => ({ ...row, _deleted: true })))
} }
// remove any attachments that were on the rows from object storage // remove any attachments that were on the rows from object storage
await cleanupAttachments(table, { rows }) await cleanupAttachments(table, { rows: processedRows })
await updateRelatedFormula(table, rows) await updateRelatedFormula(table, processedRows)
await Promise.all(updates) await Promise.all(updates)
return { response: { ok: true }, rows } return { response: { ok: true }, rows: processedRows }
} }
exports.search = async ctx => { export async function search(ctx: BBContext) {
// Fetch the whole table when running in cypress, as search doesn't work // Fetch the whole table when running in cypress, as search doesn't work
if (!env.COUCH_DB_URL && env.isCypress()) { if (!env.COUCH_DB_URL && env.isCypress()) {
return { rows: await exports.fetch(ctx) } return { rows: await exports.fetch(ctx) }
@ -362,14 +376,14 @@ exports.search = async ctx => {
return response return response
} }
exports.validate = async ctx => { export async function validate(ctx: BBContext) {
return validate({ return rowValidate({
tableId: ctx.params.tableId, tableId: ctx.params.tableId,
row: ctx.request.body, row: ctx.request.body,
}) })
} }
exports.exportRows = async ctx => { export async function exportRows(ctx: BBContext) {
const db = context.getAppDB() const db = context.getAppDB()
const table = await db.get(ctx.params.tableId) const table = await db.get(ctx.params.tableId)
const rowIds = ctx.request.body.rows const rowIds = ctx.request.body.rows
@ -382,8 +396,8 @@ exports.exportRows = async ctx => {
}) })
).rows.map(row => row.doc) ).rows.map(row => row.doc)
let result = await outputProcessing(table, response) let result = (await outputProcessing(table, response)) as Row[]
let rows = [] let rows: Row[] = []
// Filter data to only specified columns if required // Filter data to only specified columns if required
if (columns && columns.length) { if (columns && columns.length) {
@ -398,6 +412,7 @@ exports.exportRows = async ctx => {
} }
let headers = Object.keys(rows[0]) let headers = Object.keys(rows[0])
// @ts-ignore
const exporter = exporters[format] const exporter = exporters[format]
const filename = `export.${format}` const filename = `export.${format}`
@ -406,7 +421,7 @@ exports.exportRows = async ctx => {
return apiFileReturn(exporter(headers, rows)) return apiFileReturn(exporter(headers, rows))
} }
exports.fetchEnrichedRow = async ctx => { export async function fetchEnrichedRow(ctx: BBContext) {
const db = context.getAppDB() const db = context.getAppDB()
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
const rowId = ctx.params.rowId const rowId = ctx.params.rowId
@ -416,10 +431,10 @@ exports.fetchEnrichedRow = async ctx => {
findRow(ctx, tableId, rowId), findRow(ctx, tableId, rowId),
]) ])
// get the link docs // get the link docs
const linkVals = await linkRows.getLinkDocuments({ const linkVals = (await linkRows.getLinkDocuments({
tableId, tableId,
rowId, rowId,
}) })) as LinkDocumentValue[]
// look up the actual rows based on the ids // look up the actual rows based on the ids
let response = ( let response = (
await db.allDocs({ await db.allDocs({
@ -428,8 +443,8 @@ exports.fetchEnrichedRow = async ctx => {
}) })
).rows.map(row => row.doc) ).rows.map(row => row.doc)
// group responses by table // group responses by table
let groups = {}, let groups: any = {},
tables = {} tables: Record<string, Table> = {}
for (let row of response) { for (let row of response) {
if (!row.tableId) { if (!row.tableId) {
row.tableId = getTableIDFromRowID(row._id) row.tableId = getTableIDFromRowID(row._id)
@ -442,11 +457,11 @@ exports.fetchEnrichedRow = async ctx => {
groups[linkedTableId].push(row) groups[linkedTableId].push(row)
} }
} }
let linkedRows = [] let linkedRows: Row[] = []
for (let [tableId, rows] of Object.entries(groups)) { for (let [tableId, rows] of Object.entries(groups)) {
// need to include the IDs in these rows for any links they may have // need to include the IDs in these rows for any links they may have
linkedRows = linkedRows.concat( linkedRows = linkedRows.concat(
await outputProcessing(tables[tableId], rows) await outputProcessing(tables[tableId], rows as Row[])
) )
} }

View file

@ -16,7 +16,7 @@ const { cloneDeep } = require("lodash/fp")
* updated. * updated.
* NOTE: this will only for affect static formulas. * NOTE: this will only for affect static formulas.
*/ */
exports.updateRelatedFormula = async (table: Table, enrichedRows: Row[]) => { export async function updateRelatedFormula(table: Table, enrichedRows: Row[]) {
const db = context.getAppDB() const db = context.getAppDB()
// no formula to update, we're done // no formula to update, we're done
if (!table.relatedFormula) { if (!table.relatedFormula) {

View file

@ -51,16 +51,19 @@ export async function validate({
}: { }: {
tableId?: string tableId?: string
row: Row row: Row
table: Table table?: Table
}) { }) {
let fetchedTable: Table
if (!table) { if (!table) {
const db = context.getAppDB() const db = context.getAppDB()
table = await db.get(tableId) fetchedTable = await db.get(tableId)
} else {
fetchedTable = table
} }
const errors: any = {} const errors: any = {}
for (let fieldName of Object.keys(table.schema)) { for (let fieldName of Object.keys(fetchedTable.schema)) {
const constraints = cloneDeep(table.schema[fieldName].constraints) const constraints = cloneDeep(fetchedTable.schema[fieldName].constraints)
const type = table.schema[fieldName].type const type = fetchedTable.schema[fieldName].type
// formulas shouldn't validated, data will be deleted anyway // formulas shouldn't validated, data will be deleted anyway
if (type === FieldTypes.FORMULA) { if (type === FieldTypes.FORMULA) {
continue continue

View file

@ -1,4 +1,14 @@
const TOKEN_MAP = { import { ViewFilter } from "@budibase/types"
type ViewTemplateOpts = {
field: string
tableId: string
groupBy: string
filters: ViewFilter[]
calculation: string
}
const TOKEN_MAP: Record<string, string> = {
EQUALS: "===", EQUALS: "===",
NOT_EQUALS: "!==", NOT_EQUALS: "!==",
LT: "<", LT: "<",
@ -10,13 +20,13 @@ const TOKEN_MAP = {
OR: "||", OR: "||",
} }
const CONDITIONS = { const CONDITIONS: Record<string, string> = {
EMPTY: "EMPTY", EMPTY: "EMPTY",
NOT_EMPTY: "NOT_EMPTY", NOT_EMPTY: "NOT_EMPTY",
CONTAINS: "CONTAINS", CONTAINS: "CONTAINS",
} }
const isEmptyExpression = key => { function isEmptyExpression(key: string) {
return `( return `(
doc["${key}"] === undefined || doc["${key}"] === undefined ||
doc["${key}"] === null || doc["${key}"] === null ||
@ -25,19 +35,19 @@ const isEmptyExpression = key => {
)` )`
} }
const GROUP_PROPERTY = { const GROUP_PROPERTY: Record<string, { type: string }> = {
group: { group: {
type: "string", type: "string",
}, },
} }
const FIELD_PROPERTY = { const FIELD_PROPERTY: Record<string, { type: string }> = {
field: { field: {
type: "string", type: "string",
}, },
} }
const SCHEMA_MAP = { const SCHEMA_MAP: Record<string, any> = {
sum: { sum: {
field: "string", field: "string",
value: "number", value: "number",
@ -74,7 +84,7 @@ const SCHEMA_MAP = {
* @param {Array} filters - an array of filter objects * @param {Array} filters - an array of filter objects
* @returns {String} JS Expression * @returns {String} JS Expression
*/ */
function parseFilterExpression(filters) { function parseFilterExpression(filters: ViewFilter[]) {
const expression = [] const expression = []
let first = true let first = true
@ -111,7 +121,7 @@ function parseFilterExpression(filters) {
* @param {String?} field - field to use for calculations, if any * @param {String?} field - field to use for calculations, if any
* @param {String?} groupBy - field to group calculation results on, if any * @param {String?} groupBy - field to group calculation results on, if any
*/ */
function parseEmitExpression(field, groupBy) { function parseEmitExpression(field: string, groupBy: string) {
return `emit(doc["${groupBy || "_id"}"], doc["${field}"]);` return `emit(doc["${groupBy || "_id"}"], doc["${field}"]);`
} }
@ -126,7 +136,13 @@ function parseEmitExpression(field, groupBy) {
* filters: Array of filter objects containing predicates that are parsed into a JS expression * filters: Array of filter objects containing predicates that are parsed into a JS expression
* calculation: an optional calculation to be performed over the view data. * calculation: an optional calculation to be performed over the view data.
*/ */
function viewTemplate({ field, tableId, groupBy, filters = [], calculation }) { export = function ({
field,
tableId,
groupBy,
filters = [],
calculation,
}: ViewTemplateOpts) {
// first filter can't have a conjunction // first filter can't have a conjunction
if (filters && filters.length > 0 && filters[0].conjunction) { if (filters && filters.length > 0 && filters[0].conjunction) {
delete filters[0].conjunction delete filters[0].conjunction
@ -179,5 +195,3 @@ function viewTemplate({ field, tableId, groupBy, filters = [], calculation }) {
...reduction, ...reduction,
} }
} }
module.exports = viewTemplate

View file

@ -8,8 +8,8 @@ const Pouch = dbCore.getPouch({ inMemory: true })
export async function runView( export async function runView(
view: View, view: View,
calculation: boolean, calculation: string,
group: string, group: boolean,
data: Row[] data: Row[]
) { ) {
// use a different ID each time for the DB, make sure they // use a different ID each time for the DB, make sure they

View file

@ -6,7 +6,7 @@ import { ObjectStoreBuckets } from "../../constants"
import { context, db as dbCore, objectStore } from "@budibase/backend-core" import { context, db as dbCore, objectStore } from "@budibase/backend-core"
import { InternalTables } from "../../db/utils" import { InternalTables } from "../../db/utils"
import { TYPE_TRANSFORM_MAP } from "./map" import { TYPE_TRANSFORM_MAP } from "./map"
import { Row, User, Table } from "@budibase/types" import { Row, Table, ContextUser } from "@budibase/types"
const { cloneDeep } = require("lodash/fp") const { cloneDeep } = require("lodash/fp")
export * from "./utils" export * from "./utils"
@ -49,7 +49,7 @@ function getRemovedAttachmentKeys(
* for automatic ID purposes. * for automatic ID purposes.
*/ */
export function processAutoColumn( export function processAutoColumn(
user: User | null, user: ContextUser | null,
table: Table, table: Table,
row: Row, row: Row,
opts?: AutoColumnProcessingOpts opts?: AutoColumnProcessingOpts
@ -132,10 +132,10 @@ export function coerce(row: any, type: any) {
* @returns {object} the row which has been prepared to be written to the DB. * @returns {object} the row which has been prepared to be written to the DB.
*/ */
export function inputProcessing( export function inputProcessing(
user: User, user: ContextUser,
table: Table, table: Table,
row: Row, row: Row,
opts: AutoColumnProcessingOpts opts?: AutoColumnProcessingOpts
) { ) {
let clonedRow = cloneDeep(row) let clonedRow = cloneDeep(row)
// need to copy the table so it can be differenced on way out // need to copy the table so it can be differenced on way out

View file

@ -33,7 +33,7 @@ export interface ViewStatisticsSchema {
} }
export interface ViewFilter { export interface ViewFilter {
value: any value?: any
condition: string condition: string
key: string key: string
conjunction?: string conjunction?: string

View file

@ -57,6 +57,7 @@ export type DatabaseQueryOpts = {
descending?: boolean descending?: boolean
key?: string key?: string
keys?: string[] keys?: string[]
group?: boolean
} }
export const isDocument = (doc: any): doc is Document => { export const isDocument = (doc: any): doc is Document => {