1
0
Fork 0
mirror of synced 2024-07-08 15:56:23 +12:00

Merge remote-tracking branch 'origin/master' into feature/form-screen-template

This commit is contained in:
Dean 2024-03-06 10:13:58 +00:00
commit f494611bd8
82 changed files with 2053 additions and 1278 deletions

View file

@ -1,5 +1,5 @@
{ {
"version": "2.21.0", "version": "2.21.3",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

@ -1 +1 @@
Subproject commit 19f7a5829f4d23cbc694136e45d94482a59a475a Subproject commit 0c050591c21d3b67dc0c9225d60cc9e2324c8dac

View file

@ -1,5 +1,6 @@
import { APIError } from "@budibase/types" import { APIError } from "@budibase/types"
import * as errors from "../errors" import * as errors from "../errors"
import environment from "../environment"
export async function errorHandling(ctx: any, next: any) { export async function errorHandling(ctx: any, next: any) {
try { try {
@ -14,15 +15,19 @@ export async function errorHandling(ctx: any, next: any) {
console.error(err) console.error(err)
} }
const error = errors.getPublicError(err) let error: APIError = {
const body: APIError = {
message: err.message, message: err.message,
status: status, status: status,
validationErrors: err.validation, validationErrors: err.validation,
error, error: errors.getPublicError(err),
} }
ctx.body = body if (environment.isTest() && ctx.headers["x-budibase-include-stacktrace"]) {
// @ts-ignore
error.stack = err.stack
}
ctx.body = error
} }
} }

View file

@ -84,16 +84,18 @@ export function getBuiltinRoles(): { [key: string]: RoleDoc } {
return cloneDeep(BUILTIN_ROLES) return cloneDeep(BUILTIN_ROLES)
} }
export const BUILTIN_ROLE_ID_ARRAY = Object.values(BUILTIN_ROLES).map( export function isBuiltin(role: string) {
role => role._id return getBuiltinRole(role) !== undefined
) }
export const BUILTIN_ROLE_NAME_ARRAY = Object.values(BUILTIN_ROLES).map( export function getBuiltinRole(roleId: string): Role | undefined {
role => role.name const role = Object.values(BUILTIN_ROLES).find(role =>
) roleId.includes(role._id)
)
export function isBuiltin(role?: string) { if (!role) {
return BUILTIN_ROLE_ID_ARRAY.some(builtin => role?.includes(builtin)) return undefined
}
return cloneDeep(role)
} }
/** /**
@ -123,7 +125,7 @@ export function builtinRoleToNumber(id?: string) {
/** /**
* Converts any role to a number, but has to be async to get the roles from db. * Converts any role to a number, but has to be async to get the roles from db.
*/ */
export async function roleToNumber(id?: string) { export async function roleToNumber(id: string) {
if (isBuiltin(id)) { if (isBuiltin(id)) {
return builtinRoleToNumber(id) return builtinRoleToNumber(id)
} }
@ -131,7 +133,7 @@ export async function roleToNumber(id?: string) {
defaultPublic: true, defaultPublic: true,
})) as RoleDoc[] })) as RoleDoc[]
for (let role of hierarchy) { for (let role of hierarchy) {
if (isBuiltin(role?.inherits)) { if (role?.inherits && isBuiltin(role.inherits)) {
return builtinRoleToNumber(role.inherits) + 1 return builtinRoleToNumber(role.inherits) + 1
} }
} }
@ -161,35 +163,28 @@ export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string {
* @returns The role object, which may contain an "inherits" property. * @returns The role object, which may contain an "inherits" property.
*/ */
export async function getRole( export async function getRole(
roleId?: string, roleId: string,
opts?: { defaultPublic?: boolean } opts?: { defaultPublic?: boolean }
): Promise<RoleDoc | undefined> { ): Promise<RoleDoc> {
if (!roleId) {
return undefined
}
let role: any = {}
// built in roles mostly come from the in-code implementation, // built in roles mostly come from the in-code implementation,
// but can be extended by a doc stored about them (e.g. permissions) // but can be extended by a doc stored about them (e.g. permissions)
if (isBuiltin(roleId)) { let role: RoleDoc | undefined = getBuiltinRole(roleId)
role = cloneDeep( if (!role) {
Object.values(BUILTIN_ROLES).find(role => role._id === roleId)
)
} else {
// make sure has the prefix (if it has it then it won't be added) // make sure has the prefix (if it has it then it won't be added)
roleId = prefixRoleID(roleId) roleId = prefixRoleID(roleId)
} }
try { try {
const db = getAppDB() const db = getAppDB()
const dbRole = await db.get(getDBRoleID(roleId)) const dbRole = await db.get<RoleDoc>(getDBRoleID(roleId))
role = Object.assign(role, dbRole) role = Object.assign(role || {}, dbRole)
// finalise the ID // finalise the ID
role._id = getExternalRoleID(role._id, role.version) role._id = getExternalRoleID(role._id!, role.version)
} catch (err) { } catch (err) {
if (!isBuiltin(roleId) && opts?.defaultPublic) { if (!isBuiltin(roleId) && opts?.defaultPublic) {
return cloneDeep(BUILTIN_ROLES.PUBLIC) return cloneDeep(BUILTIN_ROLES.PUBLIC)
} }
// only throw an error if there is no role at all // only throw an error if there is no role at all
if (Object.keys(role).length === 0) { if (!role || Object.keys(role).length === 0) {
throw err throw err
} }
} }
@ -200,7 +195,7 @@ export async function getRole(
* Simple function to get all the roles based on the top level user role ID. * Simple function to get all the roles based on the top level user role ID.
*/ */
async function getAllUserRoles( async function getAllUserRoles(
userRoleId?: string, userRoleId: string,
opts?: { defaultPublic?: boolean } opts?: { defaultPublic?: boolean }
): Promise<RoleDoc[]> { ): Promise<RoleDoc[]> {
// admins have access to all roles // admins have access to all roles
@ -226,7 +221,7 @@ async function getAllUserRoles(
} }
export async function getUserRoleIdHierarchy( export async function getUserRoleIdHierarchy(
userRoleId?: string userRoleId: string
): Promise<string[]> { ): Promise<string[]> {
const roles = await getUserRoleHierarchy(userRoleId) const roles = await getUserRoleHierarchy(userRoleId)
return roles.map(role => role._id!) return roles.map(role => role._id!)
@ -241,7 +236,7 @@ export async function getUserRoleIdHierarchy(
* highest level of access and the last being the lowest level. * highest level of access and the last being the lowest level.
*/ */
export async function getUserRoleHierarchy( export async function getUserRoleHierarchy(
userRoleId?: string, userRoleId: string,
opts?: { defaultPublic?: boolean } opts?: { defaultPublic?: boolean }
) { ) {
// special case, if they don't have a role then they are a public user // special case, if they don't have a role then they are a public user
@ -265,9 +260,9 @@ export function checkForRoleResourceArray(
return rolePerms return rolePerms
} }
export async function getAllRoleIds(appId?: string) { export async function getAllRoleIds(appId: string): Promise<string[]> {
const roles = await getAllRoles(appId) const roles = await getAllRoles(appId)
return roles.map(role => role._id) return roles.map(role => role._id!)
} }
/** /**

View file

@ -147,6 +147,12 @@ export function createTablesStore() {
if (indexes) { if (indexes) {
draft.indexes = indexes draft.indexes = indexes
} }
// Add object to indicate if column is being added
if (draft.schema[field.name] === undefined) {
draft._add = {
name: field.name,
}
}
draft.schema = { draft.schema = {
...draft.schema, ...draft.schema,
[field.name]: cloneDeep(field), [field.name]: cloneDeep(field),

View file

@ -1,7 +1,7 @@
<script> <script>
import { CoreSelect, CoreMultiselect } from "@budibase/bbui" import { CoreSelect, CoreMultiselect } from "@budibase/bbui"
import { fetchData, Utils } from "@budibase/frontend-core" import { fetchData, Utils } from "@budibase/frontend-core"
import { getContext } from "svelte" import { getContext, onMount } from "svelte"
import Field from "./Field.svelte" import Field from "./Field.svelte"
import { FieldTypes } from "../../../constants" import { FieldTypes } from "../../../constants"
@ -28,6 +28,7 @@
let tableDefinition let tableDefinition
let searchTerm let searchTerm
let open let open
let initialValue
$: type = $: type =
datasourceType === "table" ? FieldTypes.LINK : FieldTypes.BB_REFERENCE datasourceType === "table" ? FieldTypes.LINK : FieldTypes.BB_REFERENCE
@ -109,7 +110,11 @@
} }
$: forceFetchRows(filter) $: forceFetchRows(filter)
$: debouncedFetchRows(searchTerm, primaryDisplay, defaultValue) $: debouncedFetchRows(
searchTerm,
primaryDisplay,
initialValue || defaultValue
)
const forceFetchRows = async () => { const forceFetchRows = async () => {
// if the filter has changed, then we need to reset the options, clear the selection, and re-fetch // if the filter has changed, then we need to reset the options, clear the selection, and re-fetch
@ -127,9 +132,13 @@
if (allRowsFetched || !primaryDisplay) { if (allRowsFetched || !primaryDisplay) {
return return
} }
if (defaultVal && !optionsObj[defaultVal]) { // must be an array
if (defaultVal && !Array.isArray(defaultVal)) {
defaultVal = defaultVal.split(",")
}
if (defaultVal && defaultVal.some(val => !optionsObj[val])) {
await fetch.update({ await fetch.update({
query: { equal: { _id: defaultVal } }, query: { oneOf: { _id: defaultVal } },
}) })
} }
@ -202,6 +211,16 @@
fetch.nextPage() fetch.nextPage()
} }
} }
onMount(() => {
// if the form is in 'Update' mode, then we need to fetch the matching row so that the value is correctly set
if (fieldState?.value) {
initialValue =
fieldSchema?.relationshipType !== "one-to-many"
? flatten(fieldState?.value) ?? []
: flatten(fieldState?.value)?.[0]
}
})
</script> </script>
<Field <Field

View file

@ -59,13 +59,13 @@
isReadonly: () => readonly, isReadonly: () => readonly,
getType: () => column.schema.type, getType: () => column.schema.type,
getValue: () => row[column.name], getValue: () => row[column.name],
setValue: (value, options = { save: true }) => { setValue: (value, options = { apply: true }) => {
validation.actions.setError(cellId, null) validation.actions.setError(cellId, null)
updateValue({ updateValue({
rowId: row._id, rowId: row._id,
column: column.name, column: column.name,
value, value,
save: options?.save, apply: options?.apply,
}) })
}, },
} }

View file

@ -217,14 +217,14 @@
const type = $focusedCellAPI.getType() const type = $focusedCellAPI.getType()
if (type === "number" && keyCodeIsNumber(keyCode)) { if (type === "number" && keyCodeIsNumber(keyCode)) {
// Update the value locally but don't save it yet // Update the value locally but don't save it yet
$focusedCellAPI.setValue(parseInt(key), { save: false }) $focusedCellAPI.setValue(parseInt(key), { apply: false })
$focusedCellAPI.focus() $focusedCellAPI.focus()
} else if ( } else if (
["string", "barcodeqr", "longform"].includes(type) && ["string", "barcodeqr", "longform"].includes(type) &&
(keyCodeIsLetter(keyCode) || keyCodeIsNumber(keyCode)) (keyCodeIsLetter(keyCode) || keyCodeIsNumber(keyCode))
) { ) {
// Update the value locally but don't save it yet // Update the value locally but don't save it yet
$focusedCellAPI.setValue(key, { save: false }) $focusedCellAPI.setValue(key, { apply: false })
$focusedCellAPI.focus() $focusedCellAPI.focus()
} }
} }

View file

@ -327,29 +327,31 @@ export const createActions = context => {
get(fetch)?.getInitialData() get(fetch)?.getInitialData()
} }
// Patches a row with some changes // Checks if a changeset for a row actually mutates the row or not
const updateRow = async (rowId, changes, options = { save: true }) => { const changesAreValid = (row, changes) => {
const columns = Object.keys(changes || {})
if (!row || !columns.length) {
return false
}
// Ensure there is at least 1 column that creates a difference
return columns.some(column => row[column] !== changes[column])
}
// Patches a row with some changes in local state, and returns whether a
// valid pending change was made or not
const stashRowChanges = (rowId, changes) => {
const $rows = get(rows) const $rows = get(rows)
const $rowLookupMap = get(rowLookupMap) const $rowLookupMap = get(rowLookupMap)
const index = $rowLookupMap[rowId] const index = $rowLookupMap[rowId]
const row = $rows[index] const row = $rows[index]
if (index == null || !Object.keys(changes || {}).length) {
return // Check this is a valid change
if (!row || !changesAreValid(row, changes)) {
return false
} }
// Abandon if no changes // Add change to cache
let same = true
for (let column of Object.keys(changes)) {
if (row[column] !== changes[column]) {
same = false
break
}
}
if (same) {
return
}
// Immediately update state so that the change is reflected
rowChangeCache.update(state => ({ rowChangeCache.update(state => ({
...state, ...state,
[rowId]: { [rowId]: {
@ -357,26 +359,30 @@ export const createActions = context => {
...changes, ...changes,
}, },
})) }))
return true
}
// Stop here if we don't want to persist the change // Saves any pending changes to a row
if (!options?.save) { const applyRowChanges = async rowId => {
const $rows = get(rows)
const $rowLookupMap = get(rowLookupMap)
const index = $rowLookupMap[rowId]
const row = $rows[index]
if (row == null) {
return return
} }
// Save change // Save change
try { try {
inProgressChanges.update(state => ({ // Mark as in progress
...state, inProgressChanges.update(state => ({ ...state, [rowId]: true }))
[rowId]: true,
}))
// Update row // Update row
const saved = await datasource.actions.updateRow({ const changes = get(rowChangeCache)[rowId]
...cleanRow(row), const newRow = { ...cleanRow(row), ...changes }
...get(rowChangeCache)[rowId], const saved = await datasource.actions.updateRow(newRow)
})
// Update state after a successful change // Update row state after a successful change
if (saved?._id) { if (saved?._id) {
rows.update(state => { rows.update(state => {
state[index] = saved state[index] = saved
@ -386,6 +392,8 @@ export const createActions = context => {
// Handle users table edge case // Handle users table edge case
await refreshRow(saved.id) await refreshRow(saved.id)
} }
// Wipe row change cache now that we've saved the row
rowChangeCache.update(state => { rowChangeCache.update(state => {
delete state[rowId] delete state[rowId]
return state return state
@ -393,15 +401,17 @@ export const createActions = context => {
} catch (error) { } catch (error) {
handleValidationError(rowId, error) handleValidationError(rowId, error)
} }
inProgressChanges.update(state => ({
...state, // Mark as completed
[rowId]: false, inProgressChanges.update(state => ({ ...state, [rowId]: false }))
}))
} }
// Updates a value of a row // Updates a value of a row
const updateValue = async ({ rowId, column, value, save = true }) => { const updateValue = async ({ rowId, column, value, apply = true }) => {
return await updateRow(rowId, { [column]: value }, { save }) const success = stashRowChanges(rowId, { [column]: value })
if (success && apply) {
await applyRowChanges(rowId)
}
} }
// Deletes an array of rows // Deletes an array of rows
@ -411,9 +421,7 @@ export const createActions = context => {
} }
// Actually delete rows // Actually delete rows
rowsToDelete.forEach(row => { rowsToDelete.forEach(row => delete row.__idx)
delete row.__idx
})
await datasource.actions.deleteRows(rowsToDelete) await datasource.actions.deleteRows(rowsToDelete)
// Update state // Update state
@ -433,7 +441,7 @@ export const createActions = context => {
newRow = newRows[i] newRow = newRows[i]
// Ensure we have a unique _id. // Ensure we have a unique _id.
// This means generating one for non DS+, overriting any that may already // This means generating one for non DS+, overwriting any that may already
// exist as we cannot allow duplicates. // exist as we cannot allow duplicates.
if (!$isDatasourcePlus) { if (!$isDatasourcePlus) {
newRow._id = Helpers.uuid() newRow._id = Helpers.uuid()
@ -494,7 +502,7 @@ export const createActions = context => {
duplicateRow, duplicateRow,
getRow, getRow,
updateValue, updateValue,
updateRow, applyRowChanges,
deleteRows, deleteRows,
hasRow, hasRow,
loadNextPage, loadNextPage,
@ -508,7 +516,14 @@ export const createActions = context => {
} }
export const initialise = context => { export const initialise = context => {
const { rowChangeCache, inProgressChanges, previousFocusedRowId } = context const {
rowChangeCache,
inProgressChanges,
previousFocusedRowId,
previousFocusedCellId,
rows,
validation,
} = context
// Wipe the row change cache when changing row // Wipe the row change cache when changing row
previousFocusedRowId.subscribe(id => { previousFocusedRowId.subscribe(id => {
@ -519,4 +534,15 @@ export const initialise = context => {
}) })
} }
}) })
// Ensure any unsaved changes are saved when changing cell
previousFocusedCellId.subscribe(async id => {
const rowId = id?.split("-")[0]
const hasErrors = validation.actions.rowHasErrors(rowId)
const hasChanges = Object.keys(get(rowChangeCache)[rowId] || {}).length > 0
const isSavingChanges = get(inProgressChanges)[rowId]
if (rowId && !hasErrors && hasChanges && !isSavingChanges) {
await rows.actions.applyRowChanges(rowId)
}
})
} }

View file

@ -16,6 +16,7 @@ export const createStores = context => {
const hoveredRowId = writable(null) const hoveredRowId = writable(null)
const rowHeight = writable(get(props).fixedRowHeight || DefaultRowHeight) const rowHeight = writable(get(props).fixedRowHeight || DefaultRowHeight)
const previousFocusedRowId = writable(null) const previousFocusedRowId = writable(null)
const previousFocusedCellId = writable(null)
const gridFocused = writable(false) const gridFocused = writable(false)
const isDragging = writable(false) const isDragging = writable(false)
const buttonColumnWidth = writable(0) const buttonColumnWidth = writable(0)
@ -48,6 +49,7 @@ export const createStores = context => {
focusedCellAPI, focusedCellAPI,
focusedRowId, focusedRowId,
previousFocusedRowId, previousFocusedRowId,
previousFocusedCellId,
hoveredRowId, hoveredRowId,
rowHeight, rowHeight,
gridFocused, gridFocused,
@ -129,6 +131,7 @@ export const initialise = context => {
const { const {
focusedRowId, focusedRowId,
previousFocusedRowId, previousFocusedRowId,
previousFocusedCellId,
rows, rows,
focusedCellId, focusedCellId,
selectedRows, selectedRows,
@ -181,6 +184,13 @@ export const initialise = context => {
lastFocusedRowId = id lastFocusedRowId = id
}) })
// Remember the last focused cell ID so that we can store the previous one
let lastFocusedCellId = null
focusedCellId.subscribe(id => {
previousFocusedCellId.set(lastFocusedCellId)
lastFocusedCellId = id
})
// Remove hovered row when a cell is selected // Remove hovered row when a cell is selected
focusedCellId.subscribe(cell => { focusedCellId.subscribe(cell => {
if (cell && get(hoveredRowId)) { if (cell && get(hoveredRowId)) {

View file

@ -1,8 +1,23 @@
import { writable, get } from "svelte/store" import { writable, get, derived } from "svelte/store"
// Normally we would break out actions into the explicit "createActions"
// function, but for validation all these actions are pure so can go into
// "createStores" instead to make dependency ordering simpler
export const createStores = () => { export const createStores = () => {
const validation = writable({}) const validation = writable({})
// Derive which rows have errors so that we can use that info later
const rowErrorMap = derived(validation, $validation => {
let map = {}
Object.entries($validation).forEach(([key, error]) => {
// Extract row ID from all errored cell IDs
if (error) {
map[key.split("-")[0]] = true
}
})
return map
})
const setError = (cellId, error) => { const setError = (cellId, error) => {
if (!cellId) { if (!cellId) {
return return
@ -13,11 +28,16 @@ export const createStores = () => {
})) }))
} }
const rowHasErrors = rowId => {
return get(rowErrorMap)[rowId]
}
return { return {
validation: { validation: {
...validation, ...validation,
actions: { actions: {
setError, setError,
rowHasErrors,
}, },
}, },
} }

@ -1 +1 @@
Subproject commit 183b35d3acd42433dcb2d32bcd89a36abe13afec Subproject commit 22a278da720d92991dabdcd4cb6c96e7abe29781

View file

@ -10,6 +10,11 @@ CREATE TABLE Persons (
City varchar(255), City varchar(255),
PRIMARY KEY (PersonID) PRIMARY KEY (PersonID)
); );
CREATE TABLE Person (
PersonID int NOT NULL AUTO_INCREMENT,
Name varchar(255),
PRIMARY KEY (PersonID)
);
CREATE TABLE Tasks ( CREATE TABLE Tasks (
TaskID int NOT NULL AUTO_INCREMENT, TaskID int NOT NULL AUTO_INCREMENT,
PersonID INT, PersonID INT,
@ -27,6 +32,7 @@ CREATE TABLE Products (
); );
INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Mike', 'Hughes', 28.2, '123 Fake Street', 'Belfast', '2021-01-19 03:14:07'); INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Mike', 'Hughes', 28.2, '123 Fake Street', 'Belfast', '2021-01-19 03:14:07');
INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Dave', 'Johnson', 29, '124 Fake Street', 'Belfast', '2022-04-01 00:11:11'); INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Dave', 'Johnson', 29, '124 Fake Street', 'Belfast', '2022-04-01 00:11:11');
INSERT INTO Person (Name) VALUES ('Elf');
INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (1, 'assembling', '2020-01-01'); INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (1, 'assembling', '2020-01-01');
INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (2, 'processing', '2019-12-31'); INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (2, 'processing', '2019-12-31');
INSERT INTO Products (name, updated) VALUES ('Meat', '11:00:22'), ('Fruit', '10:00:00'); INSERT INTO Products (name, updated) VALUES ('Meat', '11:00:22'), ('Fruit', '10:00:00');

View file

@ -7,6 +7,10 @@ import {
GetResourcePermsResponse, GetResourcePermsResponse,
ResourcePermissionInfo, ResourcePermissionInfo,
GetDependantResourcesResponse, GetDependantResourcesResponse,
AddPermissionResponse,
AddPermissionRequest,
RemovePermissionRequest,
RemovePermissionResponse,
} from "@budibase/types" } from "@budibase/types"
import { getRoleParams } from "../../db/utils" import { getRoleParams } from "../../db/utils"
import { import {
@ -16,9 +20,9 @@ import {
import { removeFromArray } from "../../utilities" import { removeFromArray } from "../../utilities"
import sdk from "../../sdk" import sdk from "../../sdk"
const PermissionUpdateType = { const enum PermissionUpdateType {
REMOVE: "remove", REMOVE = "remove",
ADD: "add", ADD = "add",
} }
const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS
@ -39,7 +43,7 @@ async function updatePermissionOnRole(
resourceId, resourceId,
level, level,
}: { roleId: string; resourceId: string; level: PermissionLevel }, }: { roleId: string; resourceId: string; level: PermissionLevel },
updateType: string updateType: PermissionUpdateType
) { ) {
const allowedAction = await sdk.permissions.resourceActionAllowed({ const allowedAction = await sdk.permissions.resourceActionAllowed({
resourceId, resourceId,
@ -107,11 +111,15 @@ async function updatePermissionOnRole(
} }
const response = await db.bulkDocs(docUpdates) const response = await db.bulkDocs(docUpdates)
return response.map((resp: any) => { return response.map(resp => {
const version = docUpdates.find(role => role._id === resp.id)?.version const version = docUpdates.find(role => role._id === resp.id)?.version
resp._id = roles.getExternalRoleID(resp.id, version) const _id = roles.getExternalRoleID(resp.id, version)
delete resp.id return {
return resp _id,
rev: resp.rev,
error: resp.error,
reason: resp.reason,
}
}) })
} }
@ -189,13 +197,14 @@ export async function getDependantResources(
} }
} }
export async function addPermission(ctx: UserCtx) { export async function addPermission(ctx: UserCtx<void, AddPermissionResponse>) {
ctx.body = await updatePermissionOnRole(ctx.params, PermissionUpdateType.ADD) const params: AddPermissionRequest = ctx.params
ctx.body = await updatePermissionOnRole(params, PermissionUpdateType.ADD)
} }
export async function removePermission(ctx: UserCtx) { export async function removePermission(
ctx.body = await updatePermissionOnRole( ctx: UserCtx<void, RemovePermissionResponse>
ctx.params, ) {
PermissionUpdateType.REMOVE const params: RemovePermissionRequest = ctx.params
) ctx.body = await updatePermissionOnRole(params, PermissionUpdateType.REMOVE)
} }

View file

@ -17,10 +17,12 @@ import {
QueryPreview, QueryPreview,
QuerySchema, QuerySchema,
FieldType, FieldType,
type ExecuteQueryRequest, ExecuteQueryRequest,
type ExecuteQueryResponse, ExecuteQueryResponse,
type Row, Row,
QueryParameter, QueryParameter,
PreviewQueryRequest,
PreviewQueryResponse,
} from "@budibase/types" } from "@budibase/types"
import { ValidQueryNameRegex, utils as JsonUtils } from "@budibase/shared-core" import { ValidQueryNameRegex, utils as JsonUtils } from "@budibase/shared-core"
@ -134,14 +136,16 @@ function enrichParameters(
return requestParameters return requestParameters
} }
export async function preview(ctx: UserCtx) { export async function preview(
ctx: UserCtx<PreviewQueryRequest, PreviewQueryResponse>
) {
const { datasource, envVars } = await sdk.datasources.getWithEnvVars( const { datasource, envVars } = await sdk.datasources.getWithEnvVars(
ctx.request.body.datasourceId ctx.request.body.datasourceId
) )
const query: QueryPreview = ctx.request.body
// preview may not have a queryId as it hasn't been saved, but if it does // preview may not have a queryId as it hasn't been saved, but if it does
// this stops dynamic variables from calling the same query // this stops dynamic variables from calling the same query
const { fields, parameters, queryVerb, transformer, queryId, schema } = query const { fields, parameters, queryVerb, transformer, queryId, schema } =
ctx.request.body
let existingSchema = schema let existingSchema = schema
if (queryId && !existingSchema) { if (queryId && !existingSchema) {
@ -266,9 +270,7 @@ export async function preview(ctx: UserCtx) {
}, },
} }
const { rows, keys, info, extra } = (await Runner.run( const { rows, keys, info, extra } = await Runner.run<QueryResponse>(inputs)
inputs
)) as QueryResponse
const { previewSchema, nestedSchemaFields } = getSchemaFields(rows, keys) const { previewSchema, nestedSchemaFields } = getSchemaFields(rows, keys)
// if existing schema, update to include any previous schema keys // if existing schema, update to include any previous schema keys
@ -281,7 +283,7 @@ export async function preview(ctx: UserCtx) {
} }
// remove configuration before sending event // remove configuration before sending event
delete datasource.config delete datasource.config
await events.query.previewed(datasource, query) await events.query.previewed(datasource, ctx.request.body)
ctx.body = { ctx.body = {
rows, rows,
nestedSchemaFields, nestedSchemaFields,
@ -295,7 +297,10 @@ export async function preview(ctx: UserCtx) {
} }
async function execute( async function execute(
ctx: UserCtx<ExecuteQueryRequest, ExecuteQueryResponse | Row[]>, ctx: UserCtx<
ExecuteQueryRequest,
ExecuteQueryResponse | Record<string, any>[]
>,
opts: any = { rowsOnly: false, isAutomation: false } opts: any = { rowsOnly: false, isAutomation: false }
) { ) {
const db = context.getAppDB() const db = context.getAppDB()
@ -350,18 +355,23 @@ async function execute(
} }
} }
export async function executeV1(ctx: UserCtx) { export async function executeV1(
ctx: UserCtx<ExecuteQueryRequest, Record<string, any>[]>
) {
return execute(ctx, { rowsOnly: true, isAutomation: false }) return execute(ctx, { rowsOnly: true, isAutomation: false })
} }
export async function executeV2( export async function executeV2(
ctx: UserCtx, ctx: UserCtx<
ExecuteQueryRequest,
ExecuteQueryResponse | Record<string, any>[]
>,
{ isAutomation }: { isAutomation?: boolean } = {} { isAutomation }: { isAutomation?: boolean } = {}
) { ) {
return execute(ctx, { rowsOnly: false, isAutomation }) return execute(ctx, { rowsOnly: false, isAutomation })
} }
const removeDynamicVariables = async (queryId: any) => { const removeDynamicVariables = async (queryId: string) => {
const db = context.getAppDB() const db = context.getAppDB()
const query = await db.get<Query>(queryId) const query = await db.get<Query>(queryId)
const datasource = await sdk.datasources.get(query.datasourceId) const datasource = await sdk.datasources.get(query.datasourceId)
@ -384,7 +394,7 @@ const removeDynamicVariables = async (queryId: any) => {
export async function destroy(ctx: UserCtx) { export async function destroy(ctx: UserCtx) {
const db = context.getAppDB() const db = context.getAppDB()
const queryId = ctx.params.queryId const queryId = ctx.params.queryId as string
await removeDynamicVariables(queryId) await removeDynamicVariables(queryId)
const query = await db.get<Query>(queryId) const query = await db.get<Query>(queryId)
const datasource = await sdk.datasources.get(query.datasourceId) const datasource = await sdk.datasources.get(query.datasourceId)

View file

@ -7,8 +7,14 @@ import {
} from "@budibase/backend-core" } from "@budibase/backend-core"
import { getUserMetadataParams, InternalTables } from "../../db/utils" import { getUserMetadataParams, InternalTables } from "../../db/utils"
import { import {
AccessibleRolesResponse,
Database, Database,
DestroyRoleResponse,
FetchRolesResponse,
FindRoleResponse,
Role, Role,
SaveRoleRequest,
SaveRoleResponse,
UserCtx, UserCtx,
UserMetadata, UserMetadata,
UserRoles, UserRoles,
@ -25,43 +31,36 @@ async function updateRolesOnUserTable(
db: Database, db: Database,
roleId: string, roleId: string,
updateOption: string, updateOption: string,
roleVersion: string | undefined roleVersion?: string
) { ) {
const table = await sdk.tables.getTable(InternalTables.USER_METADATA) const table = await sdk.tables.getTable(InternalTables.USER_METADATA)
const schema = table.schema const constraints = table.schema.roleId?.constraints
if (!constraints) {
return
}
const updatedRoleId =
roleVersion === roles.RoleIDVersion.NAME
? roles.getExternalRoleID(roleId, roleVersion)
: roleId
const indexOfRoleId = constraints.inclusion!.indexOf(updatedRoleId)
const remove = updateOption === UpdateRolesOptions.REMOVED const remove = updateOption === UpdateRolesOptions.REMOVED
let updated = false if (remove && indexOfRoleId !== -1) {
for (let prop of Object.keys(schema)) { constraints.inclusion!.splice(indexOfRoleId, 1)
if (prop === "roleId") { } else if (!remove && indexOfRoleId === -1) {
updated = true constraints.inclusion!.push(updatedRoleId)
const constraints = schema[prop].constraints!
const updatedRoleId =
roleVersion === roles.RoleIDVersion.NAME
? roles.getExternalRoleID(roleId, roleVersion)
: roleId
const indexOfRoleId = constraints.inclusion!.indexOf(updatedRoleId)
if (remove && indexOfRoleId !== -1) {
constraints.inclusion!.splice(indexOfRoleId, 1)
} else if (!remove && indexOfRoleId === -1) {
constraints.inclusion!.push(updatedRoleId)
}
break
}
}
if (updated) {
await db.put(table)
} }
await db.put(table)
} }
export async function fetch(ctx: UserCtx) { export async function fetch(ctx: UserCtx<void, FetchRolesResponse>) {
ctx.body = await roles.getAllRoles() ctx.body = await roles.getAllRoles()
} }
export async function find(ctx: UserCtx) { export async function find(ctx: UserCtx<void, FindRoleResponse>) {
ctx.body = await roles.getRole(ctx.params.roleId) ctx.body = await roles.getRole(ctx.params.roleId)
} }
export async function save(ctx: UserCtx) { export async function save(ctx: UserCtx<SaveRoleRequest, SaveRoleResponse>) {
const db = context.getAppDB() const db = context.getAppDB()
let { _id, name, inherits, permissionId, version } = ctx.request.body let { _id, name, inherits, permissionId, version } = ctx.request.body
let isCreate = false let isCreate = false
@ -109,9 +108,9 @@ export async function save(ctx: UserCtx) {
ctx.body = role ctx.body = role
} }
export async function destroy(ctx: UserCtx) { export async function destroy(ctx: UserCtx<void, DestroyRoleResponse>) {
const db = context.getAppDB() const db = context.getAppDB()
let roleId = ctx.params.roleId let roleId = ctx.params.roleId as string
if (roles.isBuiltin(roleId)) { if (roles.isBuiltin(roleId)) {
ctx.throw(400, "Cannot delete builtin role.") ctx.throw(400, "Cannot delete builtin role.")
} else { } else {
@ -144,14 +143,18 @@ export async function destroy(ctx: UserCtx) {
ctx.status = 200 ctx.status = 200
} }
export async function accessible(ctx: UserCtx) { export async function accessible(ctx: UserCtx<void, AccessibleRolesResponse>) {
let roleId = ctx.user?.roleId let roleId = ctx.user?.roleId
if (!roleId) { if (!roleId) {
roleId = roles.BUILTIN_ROLE_IDS.PUBLIC roleId = roles.BUILTIN_ROLE_IDS.PUBLIC
} }
if (ctx.user && sharedSdk.users.isAdminOrBuilder(ctx.user)) { if (ctx.user && sharedSdk.users.isAdminOrBuilder(ctx.user)) {
const appId = context.getAppId() const appId = context.getAppId()
ctx.body = await roles.getAllRoleIds(appId) if (!appId) {
ctx.body = []
} else {
ctx.body = await roles.getAllRoleIds(appId)
}
} else { } else {
ctx.body = await roles.getUserRoleIdHierarchy(roleId!) ctx.body = await roles.getUserRoleIdHierarchy(roleId!)
} }

View file

@ -63,7 +63,7 @@ export async function fetch(ctx: UserCtx) {
export async function clientFetch(ctx: UserCtx) { export async function clientFetch(ctx: UserCtx) {
const routing = await getRoutingStructure() const routing = await getRoutingStructure()
let roleId = ctx.user?.role?._id let roleId = ctx.user?.role?._id
const roleIds = await roles.getUserRoleIdHierarchy(roleId) const roleIds = roleId ? await roles.getUserRoleIdHierarchy(roleId) : []
for (let topLevel of Object.values(routing.routes) as any) { for (let topLevel of Object.values(routing.routes) as any) {
for (let subpathKey of Object.keys(topLevel.subpaths)) { for (let subpathKey of Object.keys(topLevel.subpaths)) {
let found = false let found = false

View file

@ -1,12 +1,27 @@
import { import {
QueryJson, Datasource,
SearchFilters,
Table,
Row,
DatasourcePlusQueryResponse, DatasourcePlusQueryResponse,
Operation,
QueryJson,
Row,
SearchFilters,
} from "@budibase/types" } from "@budibase/types"
import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils" import { getSQLClient } from "../../../sdk/app/rows/utils"
import { cloneDeep } from "lodash" import { cloneDeep } from "lodash"
import sdk from "../../../sdk"
import { makeExternalQuery } from "../../../integrations/base/query"
import { SqlClient } from "../../../integrations/utils"
const WRITE_OPERATIONS: Operation[] = [
Operation.CREATE,
Operation.UPDATE,
Operation.DELETE,
]
const DISABLED_WRITE_CLIENTS: SqlClient[] = [
SqlClient.MY_SQL,
SqlClient.MS_SQL,
SqlClient.ORACLE,
]
class CharSequence { class CharSequence {
static alphabet = "abcdefghijklmnopqrstuvwxyz" static alphabet = "abcdefghijklmnopqrstuvwxyz"
@ -43,6 +58,25 @@ export default class AliasTables {
this.charSeq = new CharSequence() this.charSeq = new CharSequence()
} }
isAliasingEnabled(json: QueryJson, datasource: Datasource) {
const fieldLength = json.resource?.fields?.length
if (!fieldLength || fieldLength <= 0) {
return false
}
try {
const sqlClient = getSQLClient(datasource)
const isWrite = WRITE_OPERATIONS.includes(json.endpoint.operation)
const isDisabledClient = DISABLED_WRITE_CLIENTS.includes(sqlClient)
if (isWrite && isDisabledClient) {
return false
}
} catch (err) {
// if we can't get an SQL client, we can't alias
return false
}
return true
}
getAlias(tableName: string) { getAlias(tableName: string) {
if (this.aliases[tableName]) { if (this.aliases[tableName]) {
return this.aliases[tableName] return this.aliases[tableName]
@ -62,7 +96,11 @@ export default class AliasTables {
if (idx === -1 || idx > 1) { if (idx === -1 || idx > 1) {
return return
} }
return Math.abs(tableName.length - name.length) <= 2 // this might look a bit mad, but the idea is if the field is wrapped, say in "", `` or []
// then the idx of the table name will be 1, and we should allow for it ending in a closing
// character - otherwise it should be the full length if the index is zero
const allowedCharacterDiff = idx * 2
return Math.abs(tableName.length - name.length) <= allowedCharacterDiff
}) })
if (foundTableName) { if (foundTableName) {
const aliasedTableName = tableName.replace( const aliasedTableName = tableName.replace(
@ -107,57 +145,57 @@ export default class AliasTables {
} }
async queryWithAliasing(json: QueryJson): DatasourcePlusQueryResponse { async queryWithAliasing(json: QueryJson): DatasourcePlusQueryResponse {
json = cloneDeep(json) const datasourceId = json.endpoint.datasourceId
const aliasTable = (table: Table) => ({ const datasource = await sdk.datasources.get(datasourceId)
...table,
name: this.getAlias(table.name), const aliasingEnabled = this.isAliasingEnabled(json, datasource)
}) if (aliasingEnabled) {
// run through the query json to update anywhere a table may be used json = cloneDeep(json)
if (json.resource?.fields) { // run through the query json to update anywhere a table may be used
json.resource.fields = json.resource.fields.map(field => if (json.resource?.fields) {
this.aliasField(field) json.resource.fields = json.resource.fields.map(field =>
) this.aliasField(field)
} )
if (json.filters) {
for (let [filterKey, filter] of Object.entries(json.filters)) {
if (typeof filter !== "object") {
continue
}
const aliasedFilters: typeof filter = {}
for (let key of Object.keys(filter)) {
aliasedFilters[this.aliasField(key)] = filter[key]
}
json.filters[filterKey as keyof SearchFilters] = aliasedFilters
} }
} if (json.filters) {
if (json.relationships) { for (let [filterKey, filter] of Object.entries(json.filters)) {
json.relationships = json.relationships.map(relationship => ({ if (typeof filter !== "object") {
...relationship, continue
aliases: this.aliasMap([ }
relationship.through, const aliasedFilters: typeof filter = {}
relationship.tableName, for (let key of Object.keys(filter)) {
json.endpoint.entityId, aliasedFilters[this.aliasField(key)] = filter[key]
]), }
})) json.filters[filterKey as keyof SearchFilters] = aliasedFilters
} }
if (json.meta?.table) {
json.meta.table = aliasTable(json.meta.table)
}
if (json.meta?.tables) {
const aliasedTables: Record<string, Table> = {}
for (let [tableName, table] of Object.entries(json.meta.tables)) {
aliasedTables[this.getAlias(tableName)] = aliasTable(table)
} }
json.meta.tables = aliasedTables if (json.meta?.table) {
this.getAlias(json.meta.table.name)
}
if (json.meta?.tables) {
Object.keys(json.meta.tables).forEach(tableName =>
this.getAlias(tableName)
)
}
if (json.relationships) {
json.relationships = json.relationships.map(relationship => ({
...relationship,
aliases: this.aliasMap([
relationship.through,
relationship.tableName,
json.endpoint.entityId,
]),
}))
}
// invert and return
const invertedTableAliases: Record<string, string> = {}
for (let [key, value] of Object.entries(this.tableAliases)) {
invertedTableAliases[value] = key
}
json.tableAliases = invertedTableAliases
} }
// invert and return const response = await makeExternalQuery(datasource, json)
const invertedTableAliases: Record<string, string> = {} if (Array.isArray(response) && aliasingEnabled) {
for (let [key, value] of Object.entries(this.tableAliases)) {
invertedTableAliases[value] = key
}
json.tableAliases = invertedTableAliases
const response = await getDatasourceAndQuery(json)
if (Array.isArray(response)) {
return this.reverse(response) return this.reverse(response)
} else { } else {
return response return response

View file

@ -211,7 +211,7 @@ export async function validate(ctx: Ctx<Row, ValidateResponse>) {
} }
} }
export async function fetchEnrichedRow(ctx: any) { export async function fetchEnrichedRow(ctx: UserCtx<void, Row>) {
const tableId = utils.getTableId(ctx) const tableId = utils.getTableId(ctx)
ctx.body = await pickApi(tableId).fetchEnrichedRow(ctx) ctx.body = await pickApi(tableId).fetchEnrichedRow(ctx)
} }

View file

@ -6,6 +6,7 @@ import {
BulkImportRequest, BulkImportRequest,
BulkImportResponse, BulkImportResponse,
Operation, Operation,
RenameColumn,
SaveTableRequest, SaveTableRequest,
SaveTableResponse, SaveTableResponse,
Table, Table,
@ -25,9 +26,12 @@ function getDatasourceId(table: Table) {
return breakExternalTableId(table._id).datasourceId return breakExternalTableId(table._id).datasourceId
} }
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) { export async function save(
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
renaming?: RenameColumn
) {
const inputs = ctx.request.body const inputs = ctx.request.body
const renaming = inputs?._rename const adding = inputs?._add
// can't do this right now // can't do this right now
delete inputs.rows delete inputs.rows
const tableId = ctx.request.body._id const tableId = ctx.request.body._id
@ -40,7 +44,7 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
const { datasource, table } = await sdk.tables.external.save( const { datasource, table } = await sdk.tables.external.save(
datasourceId!, datasourceId!,
inputs, inputs,
{ tableId, renaming } { tableId, renaming, adding }
) )
builderSocket?.emitDatasourceUpdate(ctx, datasource) builderSocket?.emitDatasourceUpdate(ctx, datasource)
return table return table

View file

@ -74,8 +74,15 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
const appId = ctx.appId const appId = ctx.appId
const table = ctx.request.body const table = ctx.request.body
const isImport = table.rows const isImport = table.rows
const renaming = ctx.request.body._rename
let savedTable = await pickApi({ table }).save(ctx) const api = pickApi({ table })
// do not pass _rename or _add if saving to CouchDB
if (api === internal) {
delete ctx.request.body._add
delete ctx.request.body._rename
}
let savedTable = await api.save(ctx, renaming)
if (!table._id) { if (!table._id) {
await events.table.created(savedTable) await events.table.created(savedTable)
savedTable = sdk.tables.enrichViewSchemas(savedTable) savedTable = sdk.tables.enrichViewSchemas(savedTable)

View file

@ -12,11 +12,12 @@ import {
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) { export async function save(
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
renaming?: RenameColumn
) {
const { rows, ...rest } = ctx.request.body const { rows, ...rest } = ctx.request.body
let tableToSave: Table & { let tableToSave: Table = {
_rename?: RenameColumn
} = {
_id: generateTableID(), _id: generateTableID(),
...rest, ...rest,
// Ensure these fields are populated, even if not sent in the request // Ensure these fields are populated, even if not sent in the request
@ -28,15 +29,12 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
tableToSave.views = {} tableToSave.views = {}
} }
const renaming = tableToSave._rename
delete tableToSave._rename
try { try {
const { table } = await sdk.tables.internal.save(tableToSave, { const { table } = await sdk.tables.internal.save(tableToSave, {
user: ctx.user, user: ctx.user,
rowsToImport: rows, rowsToImport: rows,
tableId: ctx.request.body._id, tableId: ctx.request.body._id,
renaming: renaming, renaming,
}) })
return table return table

View file

@ -13,7 +13,7 @@ describe("/api/keys", () => {
describe("fetch", () => { describe("fetch", () => {
it("should allow fetching", async () => { it("should allow fetching", async () => {
await setup.switchToSelfHosted(async () => { await config.withEnv({ SELF_HOSTED: "true" }, async () => {
const res = await request const res = await request
.get(`/api/keys`) .get(`/api/keys`)
.set(config.defaultHeaders()) .set(config.defaultHeaders())
@ -34,7 +34,7 @@ describe("/api/keys", () => {
describe("update", () => { describe("update", () => {
it("should allow updating a value", async () => { it("should allow updating a value", async () => {
await setup.switchToSelfHosted(async () => { await config.withEnv({ SELF_HOSTED: "true" }, async () => {
const res = await request const res = await request
.put(`/api/keys/TEST`) .put(`/api/keys/TEST`)
.send({ .send({

View file

@ -184,7 +184,7 @@ describe("/applications", () => {
it("app should not sync if production", async () => { it("app should not sync if production", async () => {
const { message } = await config.api.application.sync( const { message } = await config.api.application.sync(
app.appId.replace("_dev", ""), app.appId.replace("_dev", ""),
{ statusCode: 400 } { status: 400 }
) )
expect(message).toEqual( expect(message).toEqual(
@ -248,4 +248,18 @@ describe("/applications", () => {
expect(devLogs.data.length).toBe(0) expect(devLogs.data.length).toBe(0)
}) })
}) })
describe("permissions", () => {
it("should only return apps a user has access to", async () => {
const user = await config.createUser({
builder: { global: false },
admin: { global: false },
})
await config.withUser(user, async () => {
const apps = await config.api.application.fetch()
expect(apps).toHaveLength(0)
})
})
})
}) })

View file

@ -29,7 +29,7 @@ describe("/api/applications/:appId/sync", () => {
let resp = (await config.api.attachment.process( let resp = (await config.api.attachment.process(
"ohno.exe", "ohno.exe",
Buffer.from([0]), Buffer.from([0]),
{ expectStatus: 400 } { status: 400 }
)) as unknown as APIError )) as unknown as APIError
expect(resp.message).toContain("invalid extension") expect(resp.message).toContain("invalid extension")
}) })
@ -40,7 +40,7 @@ describe("/api/applications/:appId/sync", () => {
let resp = (await config.api.attachment.process( let resp = (await config.api.attachment.process(
"OHNO.EXE", "OHNO.EXE",
Buffer.from([0]), Buffer.from([0]),
{ expectStatus: 400 } { status: 400 }
)) as unknown as APIError )) as unknown as APIError
expect(resp.message).toContain("invalid extension") expect(resp.message).toContain("invalid extension")
}) })
@ -51,7 +51,7 @@ describe("/api/applications/:appId/sync", () => {
undefined as any, undefined as any,
undefined as any, undefined as any,
{ {
expectStatus: 400, status: 400,
} }
)) as unknown as APIError )) as unknown as APIError
expect(resp.message).toContain("No file provided") expect(resp.message).toContain("No file provided")

View file

@ -19,11 +19,8 @@ describe("/backups", () => {
describe("/api/backups/export", () => { describe("/api/backups/export", () => {
it("should be able to export app", async () => { it("should be able to export app", async () => {
const { body, headers } = await config.api.backup.exportBasicBackup( const body = await config.api.backup.exportBasicBackup(config.getAppId()!)
config.getAppId()!
)
expect(body instanceof Buffer).toBe(true) expect(body instanceof Buffer).toBe(true)
expect(headers["content-type"]).toEqual("application/gzip")
expect(events.app.exported).toBeCalledTimes(1) expect(events.app.exported).toBeCalledTimes(1)
}) })
@ -38,15 +35,13 @@ describe("/backups", () => {
it("should infer the app name from the app", async () => { it("should infer the app name from the app", async () => {
tk.freeze(mocks.date.MOCK_DATE) tk.freeze(mocks.date.MOCK_DATE)
const { headers } = await config.api.backup.exportBasicBackup( await config.api.backup.exportBasicBackup(config.getAppId()!, {
config.getAppId()! headers: {
) "content-disposition": `attachment; filename="${
config.getApp().name
expect(headers["content-disposition"]).toEqual( }-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`,
`attachment; filename="${ },
config.getApp().name })
}-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`
)
}) })
}) })

View file

@ -45,7 +45,7 @@ describe("/permission", () => {
table = (await config.createTable()) as typeof table table = (await config.createTable()) as typeof table
row = await config.createRow() row = await config.createRow()
view = await config.api.viewV2.create({ tableId: table._id }) view = await config.api.viewV2.create({ tableId: table._id })
perms = await config.api.permission.set({ perms = await config.api.permission.add({
roleId: STD_ROLE_ID, roleId: STD_ROLE_ID,
resourceId: table._id, resourceId: table._id,
level: PermissionLevel.READ, level: PermissionLevel.READ,
@ -88,13 +88,13 @@ describe("/permission", () => {
}) })
it("should get resource permissions with multiple roles", async () => { it("should get resource permissions with multiple roles", async () => {
perms = await config.api.permission.set({ perms = await config.api.permission.add({
roleId: HIGHER_ROLE_ID, roleId: HIGHER_ROLE_ID,
resourceId: table._id, resourceId: table._id,
level: PermissionLevel.WRITE, level: PermissionLevel.WRITE,
}) })
const res = await config.api.permission.get(table._id) const res = await config.api.permission.get(table._id)
expect(res.body).toEqual({ expect(res).toEqual({
permissions: { permissions: {
read: { permissionType: "EXPLICIT", role: STD_ROLE_ID }, read: { permissionType: "EXPLICIT", role: STD_ROLE_ID },
write: { permissionType: "EXPLICIT", role: HIGHER_ROLE_ID }, write: { permissionType: "EXPLICIT", role: HIGHER_ROLE_ID },
@ -117,16 +117,19 @@ describe("/permission", () => {
level: PermissionLevel.READ, level: PermissionLevel.READ,
}) })
const response = await config.api.permission.set( await config.api.permission.add(
{ {
roleId: STD_ROLE_ID, roleId: STD_ROLE_ID,
resourceId: table._id, resourceId: table._id,
level: PermissionLevel.EXECUTE, level: PermissionLevel.EXECUTE,
}, },
{ expectStatus: 403 } {
) status: 403,
expect(response.message).toEqual( body: {
"You are not allowed to 'read' the resource type 'datasource'" message:
"You are not allowed to 'read' the resource type 'datasource'",
},
}
) )
}) })
}) })
@ -138,9 +141,9 @@ describe("/permission", () => {
resourceId: table._id, resourceId: table._id,
level: PermissionLevel.READ, level: PermissionLevel.READ,
}) })
expect(res.body[0]._id).toEqual(STD_ROLE_ID) expect(res[0]._id).toEqual(STD_ROLE_ID)
const permsRes = await config.api.permission.get(table._id) const permsRes = await config.api.permission.get(table._id)
expect(permsRes.body[STD_ROLE_ID]).toBeUndefined() expect(permsRes.permissions[STD_ROLE_ID]).toBeUndefined()
}) })
it("throw forbidden if the action is not allowed for the resource", async () => { it("throw forbidden if the action is not allowed for the resource", async () => {
@ -156,10 +159,13 @@ describe("/permission", () => {
resourceId: table._id, resourceId: table._id,
level: PermissionLevel.EXECUTE, level: PermissionLevel.EXECUTE,
}, },
{ expectStatus: 403 } {
) status: 403,
expect(response.body.message).toEqual( body: {
"You are not allowed to 'read' the resource type 'datasource'" message:
"You are not allowed to 'read' the resource type 'datasource'",
},
}
) )
}) })
}) })
@ -181,10 +187,8 @@ describe("/permission", () => {
// replicate changes before checking permissions // replicate changes before checking permissions
await config.publish() await config.publish()
const res = await config.api.viewV2.search(view.id, undefined, { const res = await config.api.viewV2.publicSearch(view.id)
usePublicUser: true, expect(res.rows[0]._id).toEqual(row._id)
})
expect(res.body.rows[0]._id).toEqual(row._id)
}) })
it("should not be able to access the view data when the table is not public and there are no view permissions overrides", async () => { it("should not be able to access the view data when the table is not public and there are no view permissions overrides", async () => {
@ -196,14 +200,11 @@ describe("/permission", () => {
// replicate changes before checking permissions // replicate changes before checking permissions
await config.publish() await config.publish()
await config.api.viewV2.search(view.id, undefined, { await config.api.viewV2.publicSearch(view.id, undefined, { status: 403 })
expectStatus: 403,
usePublicUser: true,
})
}) })
it("should ignore the view permissions if the flag is not on", async () => { it("should ignore the view permissions if the flag is not on", async () => {
await config.api.permission.set({ await config.api.permission.add({
roleId: STD_ROLE_ID, roleId: STD_ROLE_ID,
resourceId: view.id, resourceId: view.id,
level: PermissionLevel.READ, level: PermissionLevel.READ,
@ -216,15 +217,14 @@ describe("/permission", () => {
// replicate changes before checking permissions // replicate changes before checking permissions
await config.publish() await config.publish()
await config.api.viewV2.search(view.id, undefined, { await config.api.viewV2.publicSearch(view.id, undefined, {
expectStatus: 403, status: 403,
usePublicUser: true,
}) })
}) })
it("should use the view permissions if the flag is on", async () => { it("should use the view permissions if the flag is on", async () => {
mocks.licenses.useViewPermissions() mocks.licenses.useViewPermissions()
await config.api.permission.set({ await config.api.permission.add({
roleId: STD_ROLE_ID, roleId: STD_ROLE_ID,
resourceId: view.id, resourceId: view.id,
level: PermissionLevel.READ, level: PermissionLevel.READ,
@ -237,10 +237,8 @@ describe("/permission", () => {
// replicate changes before checking permissions // replicate changes before checking permissions
await config.publish() await config.publish()
const res = await config.api.viewV2.search(view.id, undefined, { const res = await config.api.viewV2.publicSearch(view.id)
usePublicUser: true, expect(res.rows[0]._id).toEqual(row._id)
})
expect(res.body.rows[0]._id).toEqual(row._id)
}) })
it("shouldn't allow writing from a public user", async () => { it("shouldn't allow writing from a public user", async () => {
@ -277,7 +275,7 @@ describe("/permission", () => {
const res = await config.api.permission.get(legacyView.name) const res = await config.api.permission.get(legacyView.name)
expect(res.body).toEqual({ expect(res).toEqual({
permissions: { permissions: {
read: { read: {
permissionType: "BASE", permissionType: "BASE",

View file

@ -157,7 +157,7 @@ describe("/queries", () => {
}) })
it("should find a query in cloud", async () => { it("should find a query in cloud", async () => {
await setup.switchToSelfHosted(async () => { await config.withEnv({ SELF_HOSTED: "true" }, async () => {
const query = await config.createQuery() const query = await config.createQuery()
const res = await request const res = await request
.get(`/api/queries/${query._id}`) .get(`/api/queries/${query._id}`)
@ -397,15 +397,16 @@ describe("/queries", () => {
}) })
it("should fail with invalid integration type", async () => { it("should fail with invalid integration type", async () => {
const response = await config.api.datasource.create( const datasource: Datasource = {
{ ...basicDatasource().datasource,
...basicDatasource().datasource, source: "INVALID_INTEGRATION" as SourceName,
source: "INVALID_INTEGRATION" as SourceName, }
await config.api.datasource.create(datasource, {
status: 500,
body: {
message: "No datasource implementation found.",
}, },
{ expectStatus: 500, rawResponse: true } })
)
expect(response.body.message).toBe("No datasource implementation found.")
}) })
}) })

View file

@ -93,7 +93,7 @@ describe("/roles", () => {
it("should be able to get the role with a permission added", async () => { it("should be able to get the role with a permission added", async () => {
const table = await config.createTable() const table = await config.createTable()
await config.api.permission.set({ await config.api.permission.add({
roleId: BUILTIN_ROLE_IDS.POWER, roleId: BUILTIN_ROLE_IDS.POWER,
resourceId: table._id, resourceId: table._id,
level: PermissionLevel.READ, level: PermissionLevel.READ,

View file

@ -7,6 +7,7 @@ import { context, InternalTable, roles, tenancy } from "@budibase/backend-core"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import { import {
AutoFieldSubType, AutoFieldSubType,
DeleteRow,
FieldSchema, FieldSchema,
FieldType, FieldType,
FieldTypeSubtypes, FieldTypeSubtypes,
@ -106,9 +107,6 @@ describe.each([
mocks.licenses.useCloudFree() mocks.licenses.useCloudFree()
}) })
const loadRow = (id: string, tbl_Id: string, status = 200) =>
config.api.row.get(tbl_Id, id, { expectStatus: status })
const getRowUsage = async () => { const getRowUsage = async () => {
const { total } = await config.doInContext(undefined, () => const { total } = await config.doInContext(undefined, () =>
quotas.getCurrentUsageValues(QuotaUsageType.STATIC, StaticQuotaName.ROWS) quotas.getCurrentUsageValues(QuotaUsageType.STATIC, StaticQuotaName.ROWS)
@ -235,7 +233,7 @@ describe.each([
const res = await config.api.row.get(tableId, existing._id!) const res = await config.api.row.get(tableId, existing._id!)
expect(res.body).toEqual({ expect(res).toEqual({
...existing, ...existing,
...defaultRowFields, ...defaultRowFields,
}) })
@ -265,7 +263,7 @@ describe.each([
await config.createRow() await config.createRow()
await config.api.row.get(tableId, "1234567", { await config.api.row.get(tableId, "1234567", {
expectStatus: 404, status: 404,
}) })
}) })
@ -395,7 +393,7 @@ describe.each([
const createdRow = await config.createRow(row) const createdRow = await config.createRow(row)
const id = createdRow._id! const id = createdRow._id!
const saved = (await loadRow(id, table._id!)).body const saved = await config.api.row.get(table._id!, id)
expect(saved.stringUndefined).toBe(undefined) expect(saved.stringUndefined).toBe(undefined)
expect(saved.stringNull).toBe(null) expect(saved.stringNull).toBe(null)
@ -476,8 +474,8 @@ describe.each([
) )
const row = await config.api.row.get(table._id!, createRowResponse._id!) const row = await config.api.row.get(table._id!, createRowResponse._id!)
expect(row.body.Story).toBeUndefined() expect(row.Story).toBeUndefined()
expect(row.body).toEqual({ expect(row).toEqual({
...defaultRowFields, ...defaultRowFields,
OrderID: 1111, OrderID: 1111,
Country: "Aussy", Country: "Aussy",
@ -524,10 +522,10 @@ describe.each([
expect(row.name).toEqual("Updated Name") expect(row.name).toEqual("Updated Name")
expect(row.description).toEqual(existing.description) expect(row.description).toEqual(existing.description)
const savedRow = await loadRow(row._id!, table._id!) const savedRow = await config.api.row.get(table._id!, row._id!)
expect(savedRow.body.description).toEqual(existing.description) expect(savedRow.description).toEqual(existing.description)
expect(savedRow.body.name).toEqual("Updated Name") expect(savedRow.name).toEqual("Updated Name")
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
}) })
@ -543,7 +541,7 @@ describe.each([
tableId: table._id!, tableId: table._id!,
name: 1, name: 1,
}, },
{ expectStatus: 400 } { status: 400 }
) )
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
@ -582,8 +580,8 @@ describe.each([
}) })
let getResp = await config.api.row.get(table._id!, row._id!) let getResp = await config.api.row.get(table._id!, row._id!)
expect(getResp.body.user1[0]._id).toEqual(user1._id) expect(getResp.user1[0]._id).toEqual(user1._id)
expect(getResp.body.user2[0]._id).toEqual(user2._id) expect(getResp.user2[0]._id).toEqual(user2._id)
let patchResp = await config.api.row.patch(table._id!, { let patchResp = await config.api.row.patch(table._id!, {
_id: row._id!, _id: row._id!,
@ -595,8 +593,8 @@ describe.each([
expect(patchResp.user2[0]._id).toEqual(user2._id) expect(patchResp.user2[0]._id).toEqual(user2._id)
getResp = await config.api.row.get(table._id!, row._id!) getResp = await config.api.row.get(table._id!, row._id!)
expect(getResp.body.user1[0]._id).toEqual(user2._id) expect(getResp.user1[0]._id).toEqual(user2._id)
expect(getResp.body.user2[0]._id).toEqual(user2._id) expect(getResp.user2[0]._id).toEqual(user2._id)
}) })
it("should be able to update relationships when both columns are same name", async () => { it("should be able to update relationships when both columns are same name", async () => {
@ -609,7 +607,7 @@ describe.each([
description: "test", description: "test",
relationship: [row._id], relationship: [row._id],
}) })
row = (await config.api.row.get(table._id!, row._id!)).body row = await config.api.row.get(table._id!, row._id!)
expect(row.relationship.length).toBe(1) expect(row.relationship.length).toBe(1)
const resp = await config.api.row.patch(table._id!, { const resp = await config.api.row.patch(table._id!, {
_id: row._id!, _id: row._id!,
@ -632,8 +630,10 @@ describe.each([
const createdRow = await config.createRow() const createdRow = await config.createRow()
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
const res = await config.api.row.delete(table._id!, [createdRow]) const res = await config.api.row.bulkDelete(table._id!, {
expect(res.body[0]._id).toEqual(createdRow._id) rows: [createdRow],
})
expect(res[0]._id).toEqual(createdRow._id)
await assertRowUsage(rowUsage - 1) await assertRowUsage(rowUsage - 1)
}) })
}) })
@ -682,10 +682,12 @@ describe.each([
const row2 = await config.createRow() const row2 = await config.createRow()
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
const res = await config.api.row.delete(table._id!, [row1, row2]) const res = await config.api.row.bulkDelete(table._id!, {
rows: [row1, row2],
})
expect(res.body.length).toEqual(2) expect(res.length).toEqual(2)
await loadRow(row1._id!, table._id!, 404) await config.api.row.get(table._id!, row1._id!, { status: 404 })
await assertRowUsage(rowUsage - 2) await assertRowUsage(rowUsage - 2)
}) })
@ -697,14 +699,12 @@ describe.each([
]) ])
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
const res = await config.api.row.delete(table._id!, [ const res = await config.api.row.bulkDelete(table._id!, {
row1, rows: [row1, row2._id!, { _id: row3._id }],
row2._id, })
{ _id: row3._id },
])
expect(res.body.length).toEqual(3) expect(res.length).toEqual(3)
await loadRow(row1._id!, table._id!, 404) await config.api.row.get(table._id!, row1._id!, { status: 404 })
await assertRowUsage(rowUsage - 3) await assertRowUsage(rowUsage - 3)
}) })
@ -712,34 +712,36 @@ describe.each([
const row1 = await config.createRow() const row1 = await config.createRow()
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
const res = await config.api.row.delete(table._id!, row1) const res = await config.api.row.delete(table._id!, row1 as DeleteRow)
expect(res.body.id).toEqual(row1._id) expect(res.id).toEqual(row1._id)
await loadRow(row1._id!, table._id!, 404) await config.api.row.get(table._id!, row1._id!, { status: 404 })
await assertRowUsage(rowUsage - 1) await assertRowUsage(rowUsage - 1)
}) })
it("Should ignore malformed/invalid delete requests", async () => { it("Should ignore malformed/invalid delete requests", async () => {
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
const res = await config.api.row.delete( await config.api.row.delete(table._id!, { not: "valid" } as any, {
table._id!, status: 400,
{ not: "valid" }, body: {
{ expectStatus: 400 } message: "Invalid delete rows request",
) },
expect(res.body.message).toEqual("Invalid delete rows request") })
const res2 = await config.api.row.delete( await config.api.row.delete(table._id!, { rows: 123 } as any, {
table._id!, status: 400,
{ rows: 123 }, body: {
{ expectStatus: 400 } message: "Invalid delete rows request",
) },
expect(res2.body.message).toEqual("Invalid delete rows request") })
const res3 = await config.api.row.delete(table._id!, "invalid", { await config.api.row.delete(table._id!, "invalid" as any, {
expectStatus: 400, status: 400,
body: {
message: "Invalid delete rows request",
},
}) })
expect(res3.body.message).toEqual("Invalid delete rows request")
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
}) })
@ -757,16 +759,16 @@ describe.each([
const row = await config.createRow() const row = await config.createRow()
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
const res = await config.api.legacyView.get(table._id!) const rows = await config.api.legacyView.get(table._id!)
expect(res.body.length).toEqual(1) expect(rows.length).toEqual(1)
expect(res.body[0]._id).toEqual(row._id) expect(rows[0]._id).toEqual(row._id)
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
}) })
it("should throw an error if view doesn't exist", async () => { it("should throw an error if view doesn't exist", async () => {
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
await config.api.legacyView.get("derp", { expectStatus: 404 }) await config.api.legacyView.get("derp", { status: 404 })
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
}) })
@ -781,9 +783,9 @@ describe.each([
const row = await config.createRow() const row = await config.createRow()
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
const res = await config.api.legacyView.get(view.name) const rows = await config.api.legacyView.get(view.name)
expect(res.body.length).toEqual(1) expect(rows.length).toEqual(1)
expect(res.body[0]._id).toEqual(row._id) expect(rows[0]._id).toEqual(row._id)
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
}) })
@ -841,8 +843,8 @@ describe.each([
linkedTable._id!, linkedTable._id!,
secondRow._id! secondRow._id!
) )
expect(resBasic.body.link.length).toBe(1) expect(resBasic.link.length).toBe(1)
expect(resBasic.body.link[0]).toEqual({ expect(resBasic.link[0]).toEqual({
_id: firstRow._id, _id: firstRow._id,
primaryDisplay: firstRow.name, primaryDisplay: firstRow.name,
}) })
@ -852,10 +854,10 @@ describe.each([
linkedTable._id!, linkedTable._id!,
secondRow._id! secondRow._id!
) )
expect(resEnriched.body.link.length).toBe(1) expect(resEnriched.link.length).toBe(1)
expect(resEnriched.body.link[0]._id).toBe(firstRow._id) expect(resEnriched.link[0]._id).toBe(firstRow._id)
expect(resEnriched.body.link[0].name).toBe("Test Contact") expect(resEnriched.link[0].name).toBe("Test Contact")
expect(resEnriched.body.link[0].description).toBe("original description") expect(resEnriched.link[0].description).toBe("original description")
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
}) })
}) })
@ -880,8 +882,7 @@ describe.each([
], ],
tableId: table._id, tableId: table._id,
}) })
// the environment needs configured for this await config.withEnv({ SELF_HOSTED: "true" }, async () => {
await setup.switchToSelfHosted(async () => {
return context.doInAppContext(config.getAppId(), async () => { return context.doInAppContext(config.getAppId(), async () => {
const enriched = await outputProcessing(table, [row]) const enriched = await outputProcessing(table, [row])
expect((enriched as Row[])[0].attachment[0].url).toBe( expect((enriched as Row[])[0].attachment[0].url).toBe(
@ -903,7 +904,7 @@ describe.each([
const res = await config.api.row.exportRows(table._id!, { const res = await config.api.row.exportRows(table._id!, {
rows: [existing._id!], rows: [existing._id!],
}) })
const results = JSON.parse(res.text) const results = JSON.parse(res)
expect(results.length).toEqual(1) expect(results.length).toEqual(1)
const row = results[0] const row = results[0]
@ -922,7 +923,7 @@ describe.each([
rows: [existing._id!], rows: [existing._id!],
columns: ["_id"], columns: ["_id"],
}) })
const results = JSON.parse(res.text) const results = JSON.parse(res)
expect(results.length).toEqual(1) expect(results.length).toEqual(1)
const row = results[0] const row = results[0]
@ -1000,7 +1001,7 @@ describe.each([
}) })
const row = await config.api.row.get(table._id!, newRow._id!) const row = await config.api.row.get(table._id!, newRow._id!)
expect(row.body).toEqual({ expect(row).toEqual({
name: data.name, name: data.name,
surname: data.surname, surname: data.surname,
address: data.address, address: data.address,
@ -1010,9 +1011,9 @@ describe.each([
id: newRow.id, id: newRow.id,
...defaultRowFields, ...defaultRowFields,
}) })
expect(row.body._viewId).toBeUndefined() expect(row._viewId).toBeUndefined()
expect(row.body.age).toBeUndefined() expect(row.age).toBeUndefined()
expect(row.body.jobTitle).toBeUndefined() expect(row.jobTitle).toBeUndefined()
}) })
}) })
@ -1042,7 +1043,7 @@ describe.each([
}) })
const row = await config.api.row.get(tableId, newRow._id!) const row = await config.api.row.get(tableId, newRow._id!)
expect(row.body).toEqual({ expect(row).toEqual({
...newRow, ...newRow,
name: newData.name, name: newData.name,
address: newData.address, address: newData.address,
@ -1051,9 +1052,9 @@ describe.each([
id: newRow.id, id: newRow.id,
...defaultRowFields, ...defaultRowFields,
}) })
expect(row.body._viewId).toBeUndefined() expect(row._viewId).toBeUndefined()
expect(row.body.age).toBeUndefined() expect(row.age).toBeUndefined()
expect(row.body.jobTitle).toBeUndefined() expect(row.jobTitle).toBeUndefined()
}) })
}) })
@ -1071,12 +1072,12 @@ describe.each([
const createdRow = await config.createRow() const createdRow = await config.createRow()
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
await config.api.row.delete(view.id, [createdRow]) await config.api.row.bulkDelete(view.id, { rows: [createdRow] })
await assertRowUsage(rowUsage - 1) await assertRowUsage(rowUsage - 1)
await config.api.row.get(tableId, createdRow._id!, { await config.api.row.get(tableId, createdRow._id!, {
expectStatus: 404, status: 404,
}) })
}) })
@ -1097,17 +1098,17 @@ describe.each([
]) ])
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
await config.api.row.delete(view.id, [rows[0], rows[2]]) await config.api.row.bulkDelete(view.id, { rows: [rows[0], rows[2]] })
await assertRowUsage(rowUsage - 2) await assertRowUsage(rowUsage - 2)
await config.api.row.get(tableId, rows[0]._id!, { await config.api.row.get(tableId, rows[0]._id!, {
expectStatus: 404, status: 404,
}) })
await config.api.row.get(tableId, rows[2]._id!, { await config.api.row.get(tableId, rows[2]._id!, {
expectStatus: 404, status: 404,
}) })
await config.api.row.get(tableId, rows[1]._id!, { expectStatus: 200 }) await config.api.row.get(tableId, rows[1]._id!, { status: 200 })
}) })
}) })
@ -1154,8 +1155,8 @@ describe.each([
const createViewResponse = await config.createView() const createViewResponse = await config.createView()
const response = await config.api.viewV2.search(createViewResponse.id) const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(10) expect(response.rows).toHaveLength(10)
expect(response.body).toEqual({ expect(response).toEqual({
rows: expect.arrayContaining( rows: expect.arrayContaining(
rows.map(r => ({ rows.map(r => ({
_viewId: createViewResponse.id, _viewId: createViewResponse.id,
@ -1206,8 +1207,8 @@ describe.each([
const response = await config.api.viewV2.search(createViewResponse.id) const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(5) expect(response.rows).toHaveLength(5)
expect(response.body).toEqual({ expect(response).toEqual({
rows: expect.arrayContaining( rows: expect.arrayContaining(
expectedRows.map(r => ({ expectedRows.map(r => ({
_viewId: createViewResponse.id, _viewId: createViewResponse.id,
@ -1328,8 +1329,8 @@ describe.each([
createViewResponse.id createViewResponse.id
) )
expect(response.body.rows).toHaveLength(4) expect(response.rows).toHaveLength(4)
expect(response.body.rows).toEqual( expect(response.rows).toEqual(
expected.map(name => expect.objectContaining({ name })) expected.map(name => expect.objectContaining({ name }))
) )
} }
@ -1357,8 +1358,8 @@ describe.each([
} }
) )
expect(response.body.rows).toHaveLength(4) expect(response.rows).toHaveLength(4)
expect(response.body.rows).toEqual( expect(response.rows).toEqual(
expected.map(name => expect.objectContaining({ name })) expected.map(name => expect.objectContaining({ name }))
) )
} }
@ -1382,8 +1383,8 @@ describe.each([
}) })
const response = await config.api.viewV2.search(view.id) const response = await config.api.viewV2.search(view.id)
expect(response.body.rows).toHaveLength(10) expect(response.rows).toHaveLength(10)
expect(response.body.rows).toEqual( expect(response.rows).toEqual(
expect.arrayContaining( expect.arrayContaining(
rows.map(r => ({ rows.map(r => ({
...(isInternal ...(isInternal
@ -1402,7 +1403,7 @@ describe.each([
const createViewResponse = await config.createView() const createViewResponse = await config.createView()
const response = await config.api.viewV2.search(createViewResponse.id) const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(0) expect(response.rows).toHaveLength(0)
}) })
it("respects the limit parameter", async () => { it("respects the limit parameter", async () => {
@ -1417,7 +1418,7 @@ describe.each([
query: {}, query: {},
}) })
expect(response.body.rows).toHaveLength(limit) expect(response.rows).toHaveLength(limit)
}) })
it("can handle pagination", async () => { it("can handle pagination", async () => {
@ -1426,7 +1427,7 @@ describe.each([
const createViewResponse = await config.createView() const createViewResponse = await config.createView()
const allRows = (await config.api.viewV2.search(createViewResponse.id)) const allRows = (await config.api.viewV2.search(createViewResponse.id))
.body.rows .rows
const firstPageResponse = await config.api.viewV2.search( const firstPageResponse = await config.api.viewV2.search(
createViewResponse.id, createViewResponse.id,
@ -1436,7 +1437,7 @@ describe.each([
query: {}, query: {},
} }
) )
expect(firstPageResponse.body).toEqual({ expect(firstPageResponse).toEqual({
rows: expect.arrayContaining(allRows.slice(0, 4)), rows: expect.arrayContaining(allRows.slice(0, 4)),
totalRows: isInternal ? 10 : undefined, totalRows: isInternal ? 10 : undefined,
hasNextPage: true, hasNextPage: true,
@ -1448,12 +1449,12 @@ describe.each([
{ {
paginate: true, paginate: true,
limit: 4, limit: 4,
bookmark: firstPageResponse.body.bookmark, bookmark: firstPageResponse.bookmark,
query: {}, query: {},
} }
) )
expect(secondPageResponse.body).toEqual({ expect(secondPageResponse).toEqual({
rows: expect.arrayContaining(allRows.slice(4, 8)), rows: expect.arrayContaining(allRows.slice(4, 8)),
totalRows: isInternal ? 10 : undefined, totalRows: isInternal ? 10 : undefined,
hasNextPage: true, hasNextPage: true,
@ -1465,11 +1466,11 @@ describe.each([
{ {
paginate: true, paginate: true,
limit: 4, limit: 4,
bookmark: secondPageResponse.body.bookmark, bookmark: secondPageResponse.bookmark,
query: {}, query: {},
} }
) )
expect(lastPageResponse.body).toEqual({ expect(lastPageResponse).toEqual({
rows: expect.arrayContaining(allRows.slice(8)), rows: expect.arrayContaining(allRows.slice(8)),
totalRows: isInternal ? 10 : undefined, totalRows: isInternal ? 10 : undefined,
hasNextPage: false, hasNextPage: false,
@ -1489,7 +1490,7 @@ describe.each([
email: "joe@joe.com", email: "joe@joe.com",
roles: {}, roles: {},
}, },
{ expectStatus: 400 } { status: 400 }
) )
expect(response.message).toBe("Cannot create new user entry.") expect(response.message).toBe("Cannot create new user entry.")
}) })
@ -1516,58 +1517,52 @@ describe.each([
it("does not allow public users to fetch by default", async () => { it("does not allow public users to fetch by default", async () => {
await config.publish() await config.publish()
await config.api.viewV2.search(viewId, undefined, { await config.api.viewV2.publicSearch(viewId, undefined, {
expectStatus: 403, status: 403,
usePublicUser: true,
}) })
}) })
it("allow public users to fetch when permissions are explicit", async () => { it("allow public users to fetch when permissions are explicit", async () => {
await config.api.permission.set({ await config.api.permission.add({
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, roleId: roles.BUILTIN_ROLE_IDS.PUBLIC,
level: PermissionLevel.READ, level: PermissionLevel.READ,
resourceId: viewId, resourceId: viewId,
}) })
await config.publish() await config.publish()
const response = await config.api.viewV2.search(viewId, undefined, { const response = await config.api.viewV2.publicSearch(viewId)
usePublicUser: true,
})
expect(response.body.rows).toHaveLength(10) expect(response.rows).toHaveLength(10)
}) })
it("allow public users to fetch when permissions are inherited", async () => { it("allow public users to fetch when permissions are inherited", async () => {
await config.api.permission.set({ await config.api.permission.add({
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, roleId: roles.BUILTIN_ROLE_IDS.PUBLIC,
level: PermissionLevel.READ, level: PermissionLevel.READ,
resourceId: tableId, resourceId: tableId,
}) })
await config.publish() await config.publish()
const response = await config.api.viewV2.search(viewId, undefined, { const response = await config.api.viewV2.publicSearch(viewId)
usePublicUser: true,
})
expect(response.body.rows).toHaveLength(10) expect(response.rows).toHaveLength(10)
}) })
it("respects inherited permissions, not allowing not public views from public tables", async () => { it("respects inherited permissions, not allowing not public views from public tables", async () => {
await config.api.permission.set({ await config.api.permission.add({
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, roleId: roles.BUILTIN_ROLE_IDS.PUBLIC,
level: PermissionLevel.READ, level: PermissionLevel.READ,
resourceId: tableId, resourceId: tableId,
}) })
await config.api.permission.set({ await config.api.permission.add({
roleId: roles.BUILTIN_ROLE_IDS.POWER, roleId: roles.BUILTIN_ROLE_IDS.POWER,
level: PermissionLevel.READ, level: PermissionLevel.READ,
resourceId: viewId, resourceId: viewId,
}) })
await config.publish() await config.publish()
await config.api.viewV2.search(viewId, undefined, { await config.api.viewV2.publicSearch(viewId, undefined, {
usePublicUser: true, status: 403,
expectStatus: 403,
}) })
}) })
}) })
@ -1754,7 +1749,7 @@ describe.each([
} }
const row = await config.api.row.save(tableId, rowData) const row = await config.api.row.save(tableId, rowData)
const { body: retrieved } = await config.api.row.get(tableId, row._id!) const retrieved = await config.api.row.get(tableId, row._id!)
expect(retrieved).toEqual({ expect(retrieved).toEqual({
name: rowData.name, name: rowData.name,
description: rowData.description, description: rowData.description,
@ -1781,7 +1776,7 @@ describe.each([
} }
const row = await config.api.row.save(tableId, rowData) const row = await config.api.row.save(tableId, rowData)
const { body: retrieved } = await config.api.row.get(tableId, row._id!) const retrieved = await config.api.row.get(tableId, row._id!)
expect(retrieved).toEqual({ expect(retrieved).toEqual({
name: rowData.name, name: rowData.name,
description: rowData.description, description: rowData.description,

View file

@ -26,6 +26,7 @@ import { TableToBuild } from "../../../tests/utilities/TestConfiguration"
tk.freeze(mocks.date.MOCK_DATE) tk.freeze(mocks.date.MOCK_DATE)
const { basicTable } = setup.structures const { basicTable } = setup.structures
const ISO_REGEX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/
describe("/tables", () => { describe("/tables", () => {
let request = setup.getRequest() let request = setup.getRequest()
@ -285,6 +286,35 @@ describe("/tables", () => {
expect(res.body.schema.roleId).toBeDefined() expect(res.body.schema.roleId).toBeDefined()
}) })
}) })
it("should add a new column for an internal DB table", async () => {
const saveTableRequest: SaveTableRequest = {
_add: {
name: "NEW_COLUMN",
},
...basicTable(),
}
const response = await request
.post(`/api/tables`)
.send(saveTableRequest)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
const expectedResponse = {
...saveTableRequest,
_rev: expect.stringMatching(/^\d-.+/),
_id: expect.stringMatching(/^ta_.+/),
createdAt: expect.stringMatching(ISO_REGEX_PATTERN),
updatedAt: expect.stringMatching(ISO_REGEX_PATTERN),
views: {},
}
delete expectedResponse._add
expect(response.status).toBe(200)
expect(response.body).toEqual(expectedResponse)
})
}) })
describe("import", () => { describe("import", () => {
@ -663,8 +693,7 @@ describe("/tables", () => {
expect(migratedTable.schema["user column"]).toBeDefined() expect(migratedTable.schema["user column"]).toBeDefined()
expect(migratedTable.schema["user relationship"]).not.toBeDefined() expect(migratedTable.schema["user relationship"]).not.toBeDefined()
const resp = await config.api.row.get(table._id!, testRow._id!) const migratedRow = await config.api.row.get(table._id!, testRow._id!)
const migratedRow = resp.body as Row
expect(migratedRow["user column"]).toBeDefined() expect(migratedRow["user column"]).toBeDefined()
expect(migratedRow["user relationship"]).not.toBeDefined() expect(migratedRow["user relationship"]).not.toBeDefined()
@ -716,15 +745,13 @@ describe("/tables", () => {
expect(migratedTable.schema["user column"]).toBeDefined() expect(migratedTable.schema["user column"]).toBeDefined()
expect(migratedTable.schema["user relationship"]).not.toBeDefined() expect(migratedTable.schema["user relationship"]).not.toBeDefined()
const row1Migrated = (await config.api.row.get(table._id!, row1._id!)) const row1Migrated = await config.api.row.get(table._id!, row1._id!)
.body as Row
expect(row1Migrated["user relationship"]).not.toBeDefined() expect(row1Migrated["user relationship"]).not.toBeDefined()
expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual( expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual(
expect.arrayContaining([users[0]._id, users[1]._id]) expect.arrayContaining([users[0]._id, users[1]._id])
) )
const row2Migrated = (await config.api.row.get(table._id!, row2._id!)) const row2Migrated = await config.api.row.get(table._id!, row2._id!)
.body as Row
expect(row2Migrated["user relationship"]).not.toBeDefined() expect(row2Migrated["user relationship"]).not.toBeDefined()
expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual( expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual(
expect.arrayContaining([users[1]._id, users[2]._id]) expect.arrayContaining([users[1]._id, users[2]._id])
@ -773,15 +800,13 @@ describe("/tables", () => {
expect(migratedTable.schema["user column"]).toBeDefined() expect(migratedTable.schema["user column"]).toBeDefined()
expect(migratedTable.schema["user relationship"]).not.toBeDefined() expect(migratedTable.schema["user relationship"]).not.toBeDefined()
const row1Migrated = (await config.api.row.get(table._id!, row1._id!)) const row1Migrated = await config.api.row.get(table._id!, row1._id!)
.body as Row
expect(row1Migrated["user relationship"]).not.toBeDefined() expect(row1Migrated["user relationship"]).not.toBeDefined()
expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual( expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual(
expect.arrayContaining([users[0]._id, users[1]._id]) expect.arrayContaining([users[0]._id, users[1]._id])
) )
const row2Migrated = (await config.api.row.get(table._id!, row2._id!)) const row2Migrated = await config.api.row.get(table._id!, row2._id!)
.body as Row
expect(row2Migrated["user relationship"]).not.toBeDefined() expect(row2Migrated["user relationship"]).not.toBeDefined()
expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual([ expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual([
users[2]._id, users[2]._id,
@ -831,7 +856,7 @@ describe("/tables", () => {
subtype: FieldSubtype.USERS, subtype: FieldSubtype.USERS,
}, },
}, },
{ expectStatus: 400 } { status: 400 }
) )
}) })
@ -846,7 +871,7 @@ describe("/tables", () => {
subtype: FieldSubtype.USERS, subtype: FieldSubtype.USERS,
}, },
}, },
{ expectStatus: 400 } { status: 400 }
) )
}) })
@ -861,7 +886,7 @@ describe("/tables", () => {
subtype: FieldSubtype.USERS, subtype: FieldSubtype.USERS,
}, },
}, },
{ expectStatus: 400 } { status: 400 }
) )
}) })
@ -880,7 +905,7 @@ describe("/tables", () => {
subtype: FieldSubtype.USERS, subtype: FieldSubtype.USERS,
}, },
}, },
{ expectStatus: 400 } { status: 400 }
) )
}) })
}) })

View file

@ -90,7 +90,7 @@ describe("/users", () => {
}) })
await config.api.user.update( await config.api.user.update(
{ ...user, roleId: roles.BUILTIN_ROLE_IDS.POWER }, { ...user, roleId: roles.BUILTIN_ROLE_IDS.POWER },
{ expectStatus: 409 } { status: 409 }
) )
}) })
}) })

View file

@ -1,5 +1,4 @@
import TestConfig from "../../../../tests/utilities/TestConfiguration" import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
import env from "../../../../environment"
import supertest from "supertest" import supertest from "supertest"
export * as structures from "../../../../tests/utilities/structures" export * as structures from "../../../../tests/utilities/structures"
@ -47,10 +46,10 @@ export function delay(ms: number) {
} }
let request: supertest.SuperTest<supertest.Test> | undefined | null, let request: supertest.SuperTest<supertest.Test> | undefined | null,
config: TestConfig | null config: TestConfiguration | null
export function beforeAll() { export function beforeAll() {
config = new TestConfig() config = new TestConfiguration()
request = config.getRequest() request = config.getRequest()
} }
@ -77,21 +76,3 @@ export function getConfig() {
} }
return config! return config!
} }
export async function switchToSelfHosted(func: any) {
// self hosted stops any attempts to Dynamo
env._set("NODE_ENV", "production")
env._set("SELF_HOSTED", true)
let error
try {
await func()
} catch (err) {
error = err
}
env._set("NODE_ENV", "jest")
env._set("SELF_HOSTED", false)
// don't throw error until after reset
if (error) {
throw error
}
}

View file

@ -177,7 +177,7 @@ describe.each([
} }
await config.api.viewV2.create(newView, { await config.api.viewV2.create(newView, {
expectStatus: 201, status: 201,
}) })
}) })
}) })
@ -275,7 +275,7 @@ describe.each([
const tableId = table._id! const tableId = table._id!
await config.api.viewV2.update( await config.api.viewV2.update(
{ ...view, id: generator.guid() }, { ...view, id: generator.guid() },
{ expectStatus: 404 } { status: 404 }
) )
expect(await config.api.table.get(tableId)).toEqual( expect(await config.api.table.get(tableId)).toEqual(
@ -304,7 +304,7 @@ describe.each([
}, },
], ],
}, },
{ expectStatus: 404 } { status: 404 }
) )
expect(await config.api.table.get(tableId)).toEqual( expect(await config.api.table.get(tableId)).toEqual(
@ -326,12 +326,10 @@ describe.each([
...viewV1, ...viewV1,
}, },
{ {
expectStatus: 400, status: 400,
handleResponse: r => { body: {
expect(r.body).toEqual({ message: "Only views V2 can be updated",
message: "Only views V2 can be updated", status: 400,
status: 400,
})
}, },
} }
) )
@ -403,7 +401,7 @@ describe.each([
} as Record<string, FieldSchema>, } as Record<string, FieldSchema>,
}, },
{ {
expectStatus: 200, status: 200,
} }
) )
}) })

View file

@ -30,9 +30,9 @@ describe("migrations", () => {
const appId = config.getAppId() const appId = config.getAppId()
const response = await config.api.application.getRaw(appId) await config.api.application.get(appId, {
headersNotPresent: [Header.MIGRATING_APP],
expect(response.headers[Header.MIGRATING_APP]).toBeUndefined() })
}) })
it("accessing an app that has pending migrations will attach the migrating header", async () => { it("accessing an app that has pending migrations will attach the migrating header", async () => {
@ -46,8 +46,10 @@ describe("migrations", () => {
func: async () => {}, func: async () => {},
}) })
const response = await config.api.application.getRaw(appId) await config.api.application.get(appId, {
headers: {
expect(response.headers[Header.MIGRATING_APP]).toEqual(appId) [Header.MIGRATING_APP]: appId,
},
})
}) })
}) })

View file

@ -24,7 +24,7 @@ describe("test the create row action", () => {
expect(res.id).toBeDefined() expect(res.id).toBeDefined()
expect(res.revision).toBeDefined() expect(res.revision).toBeDefined()
expect(res.success).toEqual(true) expect(res.success).toEqual(true)
const gottenRow = await config.getRow(table._id, res.id) const gottenRow = await config.api.row.get(table._id, res.id)
expect(gottenRow.name).toEqual("test") expect(gottenRow.name).toEqual("test")
expect(gottenRow.description).toEqual("test") expect(gottenRow.description).toEqual("test")
}) })

View file

@ -36,7 +36,7 @@ describe("test the update row action", () => {
it("should be able to run the action", async () => { it("should be able to run the action", async () => {
const res = await setup.runStep(setup.actions.UPDATE_ROW.stepId, inputs) const res = await setup.runStep(setup.actions.UPDATE_ROW.stepId, inputs)
expect(res.success).toEqual(true) expect(res.success).toEqual(true)
const updatedRow = await config.getRow(table._id!, res.id) const updatedRow = await config.api.row.get(table._id!, res.id)
expect(updatedRow.name).toEqual("Updated name") expect(updatedRow.name).toEqual("Updated name")
expect(updatedRow.description).not.toEqual("") expect(updatedRow.description).not.toEqual("")
}) })
@ -87,8 +87,8 @@ describe("test the update row action", () => {
}) })
let getResp = await config.api.row.get(table._id!, row._id!) let getResp = await config.api.row.get(table._id!, row._id!)
expect(getResp.body.user1[0]._id).toEqual(user1._id) expect(getResp.user1[0]._id).toEqual(user1._id)
expect(getResp.body.user2[0]._id).toEqual(user2._id) expect(getResp.user2[0]._id).toEqual(user2._id)
let stepResp = await setup.runStep(setup.actions.UPDATE_ROW.stepId, { let stepResp = await setup.runStep(setup.actions.UPDATE_ROW.stepId, {
rowId: row._id, rowId: row._id,
@ -103,8 +103,8 @@ describe("test the update row action", () => {
expect(stepResp.success).toEqual(true) expect(stepResp.success).toEqual(true)
getResp = await config.api.row.get(table._id!, row._id!) getResp = await config.api.row.get(table._id!, row._id!)
expect(getResp.body.user1[0]._id).toEqual(user2._id) expect(getResp.user1[0]._id).toEqual(user2._id)
expect(getResp.body.user2[0]._id).toEqual(user2._id) expect(getResp.user2[0]._id).toEqual(user2._id)
}) })
it("should overwrite links if those links are not set and we ask it do", async () => { it("should overwrite links if those links are not set and we ask it do", async () => {
@ -140,8 +140,8 @@ describe("test the update row action", () => {
}) })
let getResp = await config.api.row.get(table._id!, row._id!) let getResp = await config.api.row.get(table._id!, row._id!)
expect(getResp.body.user1[0]._id).toEqual(user1._id) expect(getResp.user1[0]._id).toEqual(user1._id)
expect(getResp.body.user2[0]._id).toEqual(user2._id) expect(getResp.user2[0]._id).toEqual(user2._id)
let stepResp = await setup.runStep(setup.actions.UPDATE_ROW.stepId, { let stepResp = await setup.runStep(setup.actions.UPDATE_ROW.stepId, {
rowId: row._id, rowId: row._id,
@ -163,7 +163,7 @@ describe("test the update row action", () => {
expect(stepResp.success).toEqual(true) expect(stepResp.success).toEqual(true)
getResp = await config.api.row.get(table._id!, row._id!) getResp = await config.api.row.get(table._id!, row._id!)
expect(getResp.body.user1[0]._id).toEqual(user2._id) expect(getResp.user1[0]._id).toEqual(user2._id)
expect(getResp.body.user2).toBeUndefined() expect(getResp.user2).toBeUndefined()
}) })
}) })

View file

@ -100,7 +100,7 @@ describe("test the link controller", () => {
const { _id } = await config.createRow( const { _id } = await config.createRow(
basicLinkedRow(t1._id!, row._id!, linkField) basicLinkedRow(t1._id!, row._id!, linkField)
) )
return config.getRow(t1._id!, _id!) return config.api.row.get(t1._id!, _id!)
} }
it("should be able to confirm if two table schemas are equal", async () => { it("should be able to confirm if two table schemas are equal", async () => {

View file

@ -0,0 +1,363 @@
import fetch from "node-fetch"
import {
generateMakeRequest,
MakeRequestResponse,
} from "../api/routes/public/tests/utils"
import { v4 as uuidv4 } from "uuid"
import * as setup from "../api/routes/tests/utilities"
import {
Datasource,
FieldType,
Table,
TableRequest,
TableSourceType,
} from "@budibase/types"
import _ from "lodash"
import { databaseTestProviders } from "../integrations/tests/utils"
import mysql from "mysql2/promise"
import { builderSocket } from "../websockets"
// @ts-ignore
fetch.mockSearch()
const config = setup.getConfig()!
jest.unmock("mysql2/promise")
jest.mock("../websockets", () => ({
clientAppSocket: jest.fn(),
gridAppSocket: jest.fn(),
initialise: jest.fn(),
builderSocket: {
emitTableUpdate: jest.fn(),
emitTableDeletion: jest.fn(),
emitDatasourceUpdate: jest.fn(),
emitDatasourceDeletion: jest.fn(),
emitScreenUpdate: jest.fn(),
emitAppMetadataUpdate: jest.fn(),
emitAppPublish: jest.fn(),
},
}))
describe("mysql integrations", () => {
let makeRequest: MakeRequestResponse,
mysqlDatasource: Datasource,
primaryMySqlTable: Table
beforeAll(async () => {
await config.init()
const apiKey = await config.generateApiKey()
makeRequest = generateMakeRequest(apiKey, true)
mysqlDatasource = await config.api.datasource.create(
await databaseTestProviders.mysql.datasource()
)
})
afterAll(async () => {
await databaseTestProviders.mysql.stop()
})
beforeEach(async () => {
primaryMySqlTable = await config.createTable({
name: uuidv4(),
type: "table",
primary: ["id"],
schema: {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
},
name: {
name: "name",
type: FieldType.STRING,
},
description: {
name: "description",
type: FieldType.STRING,
},
value: {
name: "value",
type: FieldType.NUMBER,
},
},
sourceId: mysqlDatasource._id,
sourceType: TableSourceType.EXTERNAL,
})
})
afterAll(config.end)
it("validate table schema", async () => {
const res = await makeRequest(
"get",
`/api/datasources/${mysqlDatasource._id}`
)
expect(res.status).toBe(200)
expect(res.body).toEqual({
config: {
database: "mysql",
host: mysqlDatasource.config!.host,
password: "--secret-value--",
port: mysqlDatasource.config!.port,
user: "root",
},
plus: true,
source: "MYSQL",
type: "datasource_plus",
_id: expect.any(String),
_rev: expect.any(String),
createdAt: expect.any(String),
updatedAt: expect.any(String),
entities: expect.any(Object),
})
})
describe("POST /api/datasources/verify", () => {
it("should be able to verify the connection", async () => {
await config.api.datasource.verify(
{
datasource: await databaseTestProviders.mysql.datasource(),
},
{
body: {
connected: true,
},
}
)
})
it("should state an invalid datasource cannot connect", async () => {
const dbConfig = await databaseTestProviders.mysql.datasource()
await config.api.datasource.verify(
{
datasource: {
...dbConfig,
config: {
...dbConfig.config,
password: "wrongpassword",
},
},
},
{
body: {
connected: false,
error:
"Access denied for the specified user. User does not have the necessary privileges or the provided credentials are incorrect. Please verify the credentials, and ensure that the user has appropriate permissions.",
},
}
)
})
})
describe("POST /api/datasources/info", () => {
it("should fetch information about mysql datasource", async () => {
const primaryName = primaryMySqlTable.name
const response = await makeRequest("post", "/api/datasources/info", {
datasource: mysqlDatasource,
})
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined()
expect(response.body.tableNames.indexOf(primaryName)).not.toBe(-1)
})
})
describe("Integration compatibility with mysql search_path", () => {
let client: mysql.Connection, pathDatasource: Datasource
const database = "test1"
const database2 = "test-2"
beforeAll(async () => {
const dsConfig = await databaseTestProviders.mysql.datasource()
const dbConfig = dsConfig.config!
client = await mysql.createConnection(dbConfig)
await client.query(`CREATE DATABASE \`${database}\`;`)
await client.query(`CREATE DATABASE \`${database2}\`;`)
const pathConfig: any = {
...dsConfig,
config: {
...dbConfig,
database,
},
}
pathDatasource = await config.api.datasource.create(pathConfig)
})
afterAll(async () => {
await client.query(`DROP DATABASE \`${database}\`;`)
await client.query(`DROP DATABASE \`${database2}\`;`)
await client.end()
})
it("discovers tables from any schema in search path", async () => {
await client.query(
`CREATE TABLE \`${database}\`.table1 (id1 SERIAL PRIMARY KEY);`
)
const response = await makeRequest("post", "/api/datasources/info", {
datasource: pathDatasource,
})
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined()
expect(response.body.tableNames).toEqual(
expect.arrayContaining(["table1"])
)
})
it("does not mix columns from different tables", async () => {
const repeated_table_name = "table_same_name"
await client.query(
`CREATE TABLE \`${database}\`.${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
)
await client.query(
`CREATE TABLE \`${database2}\`.${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
)
const response = await makeRequest(
"post",
`/api/datasources/${pathDatasource._id}/schema`,
{
tablesFilter: [repeated_table_name],
}
)
expect(response.status).toBe(200)
expect(
response.body.datasource.entities[repeated_table_name].schema
).toBeDefined()
const schema =
response.body.datasource.entities[repeated_table_name].schema
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
})
})
describe("POST /api/tables/", () => {
let client: mysql.Connection
const emitDatasourceUpdateMock = jest.fn()
beforeEach(async () => {
client = await mysql.createConnection(
(
await databaseTestProviders.mysql.datasource()
).config!
)
mysqlDatasource = await config.api.datasource.create(
await databaseTestProviders.mysql.datasource()
)
})
afterEach(async () => {
await client.end()
})
it("will emit the datasource entity schema with externalType to the front-end when adding a new column", async () => {
const addColumnToTable: TableRequest = {
type: "table",
sourceType: TableSourceType.EXTERNAL,
name: "table",
sourceId: mysqlDatasource._id!,
primary: ["id"],
schema: {
id: {
type: FieldType.AUTO,
name: "id",
autocolumn: true,
},
new_column: {
type: FieldType.NUMBER,
name: "new_column",
},
},
_add: {
name: "new_column",
},
}
jest
.spyOn(builderSocket!, "emitDatasourceUpdate")
.mockImplementation(emitDatasourceUpdateMock)
await makeRequest("post", "/api/tables/", addColumnToTable)
const expectedTable: TableRequest = {
...addColumnToTable,
schema: {
id: {
type: FieldType.NUMBER,
name: "id",
autocolumn: true,
constraints: {
presence: false,
},
externalType: "int unsigned",
},
new_column: {
type: FieldType.NUMBER,
name: "new_column",
autocolumn: false,
constraints: {
presence: false,
},
externalType: "float(8,2)",
},
},
created: true,
_id: `${mysqlDatasource._id}__table`,
}
delete expectedTable._add
expect(emitDatasourceUpdateMock).toBeCalledTimes(1)
const emittedDatasource: Datasource =
emitDatasourceUpdateMock.mock.calls[0][1]
expect(emittedDatasource.entities!["table"]).toEqual(expectedTable)
})
it("will rename a column", async () => {
await makeRequest("post", "/api/tables/", primaryMySqlTable)
let renameColumnOnTable: TableRequest = {
...primaryMySqlTable,
schema: {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
externalType: "unsigned integer",
},
name: {
name: "name",
type: FieldType.STRING,
externalType: "text",
},
description: {
name: "description",
type: FieldType.STRING,
externalType: "text",
},
age: {
name: "age",
type: FieldType.NUMBER,
externalType: "float(8,2)",
},
},
}
const response = await makeRequest(
"post",
"/api/tables/",
renameColumnOnTable
)
mysqlDatasource = (
await makeRequest(
"post",
`/api/datasources/${mysqlDatasource._id}/schema`
)
).body.datasource
expect(response.status).toEqual(200)
expect(
Object.keys(mysqlDatasource.entities![primaryMySqlTable.name].schema)
).toEqual(["id", "name", "description", "age"])
})
})
})

View file

@ -398,7 +398,7 @@ describe("postgres integrations", () => {
expect(res.status).toBe(200) expect(res.status).toBe(200)
expect(res.body).toEqual(updatedRow) expect(res.body).toEqual(updatedRow)
const persistedRow = await config.getRow( const persistedRow = await config.api.row.get(
primaryPostgresTable._id!, primaryPostgresTable._id!,
row.id row.id
) )
@ -1040,28 +1040,37 @@ describe("postgres integrations", () => {
describe("POST /api/datasources/verify", () => { describe("POST /api/datasources/verify", () => {
it("should be able to verify the connection", async () => { it("should be able to verify the connection", async () => {
const response = await config.api.datasource.verify({ await config.api.datasource.verify(
datasource: await databaseTestProviders.postgres.datasource(), {
}) datasource: await databaseTestProviders.postgres.datasource(),
expect(response.status).toBe(200) },
expect(response.body.connected).toBe(true) {
body: {
connected: true,
},
}
)
}) })
it("should state an invalid datasource cannot connect", async () => { it("should state an invalid datasource cannot connect", async () => {
const dbConfig = await databaseTestProviders.postgres.datasource() const dbConfig = await databaseTestProviders.postgres.datasource()
const response = await config.api.datasource.verify({ await config.api.datasource.verify(
datasource: { {
...dbConfig, datasource: {
config: { ...dbConfig,
...dbConfig.config, config: {
password: "wrongpassword", ...dbConfig.config,
password: "wrongpassword",
},
}, },
}, },
}) {
body: {
expect(response.status).toBe(200) connected: false,
expect(response.body.connected).toBe(false) error: 'password authentication failed for user "postgres"',
expect(response.body.error).toBeDefined() },
}
)
}) })
}) })

View file

@ -12,6 +12,8 @@ import {
} from "@budibase/types" } from "@budibase/types"
import environment from "../../environment" import environment from "../../environment"
type QueryFunction = (query: Knex.SqlNative, operation: Operation) => any
const envLimit = environment.SQL_MAX_ROWS const envLimit = environment.SQL_MAX_ROWS
? parseInt(environment.SQL_MAX_ROWS) ? parseInt(environment.SQL_MAX_ROWS)
: null : null
@ -322,15 +324,18 @@ class InternalBuilder {
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder { addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
let { sort, paginate } = json let { sort, paginate } = json
const table = json.meta?.table const table = json.meta?.table
const aliases = json.tableAliases
const aliased =
table?.name && aliases?.[table.name] ? aliases[table.name] : table?.name
if (sort && Object.keys(sort || {}).length > 0) { if (sort && Object.keys(sort || {}).length > 0) {
for (let [key, value] of Object.entries(sort)) { for (let [key, value] of Object.entries(sort)) {
const direction = const direction =
value.direction === SortDirection.ASCENDING ? "asc" : "desc" value.direction === SortDirection.ASCENDING ? "asc" : "desc"
query = query.orderBy(`${table?.name}.${key}`, direction) query = query.orderBy(`${aliased}.${key}`, direction)
} }
} else if (this.client === SqlClient.MS_SQL && paginate?.limit) { } else if (this.client === SqlClient.MS_SQL && paginate?.limit) {
// @ts-ignore // @ts-ignore
query = query.orderBy(`${table?.name}.${table?.primary[0]}`) query = query.orderBy(`${aliased}.${table?.primary[0]}`)
} }
return query return query
} }
@ -430,10 +435,12 @@ class InternalBuilder {
aliases?: QueryJson["tableAliases"] aliases?: QueryJson["tableAliases"]
): Knex.QueryBuilder { ): Knex.QueryBuilder {
const tableName = endpoint.entityId const tableName = endpoint.entityId
const tableAliased = aliases?.[tableName] const tableAlias = aliases?.[tableName]
? `${tableName} as ${aliases?.[tableName]}` let table: string | Record<string, string> = tableName
: tableName if (tableAlias) {
let query = knex(tableAliased) table = { [tableAlias]: tableName }
}
let query = knex(table)
if (endpoint.schema) { if (endpoint.schema) {
query = query.withSchema(endpoint.schema) query = query.withSchema(endpoint.schema)
} }
@ -605,7 +612,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
return query.toSQL().toNative() return query.toSQL().toNative()
} }
async getReturningRow(queryFn: Function, json: QueryJson) { async getReturningRow(queryFn: QueryFunction, json: QueryJson) {
if (!json.extra || !json.extra.idFilter) { if (!json.extra || !json.extra.idFilter) {
return {} return {}
} }
@ -617,7 +624,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
resource: { resource: {
fields: [], fields: [],
}, },
filters: json.extra.idFilter, filters: json.extra?.idFilter,
paginate: { paginate: {
limit: 1, limit: 1,
}, },
@ -646,7 +653,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
// this function recreates the returning functionality of postgres // this function recreates the returning functionality of postgres
async queryWithReturning( async queryWithReturning(
json: QueryJson, json: QueryJson,
queryFn: Function, queryFn: QueryFunction,
processFn: Function = (result: any) => result processFn: Function = (result: any) => result
) { ) {
const sqlClient = this.getSqlClient() const sqlClient = this.getSqlClient()

View file

@ -14,7 +14,12 @@ import firebase from "./firebase"
import redis from "./redis" import redis from "./redis"
import snowflake from "./snowflake" import snowflake from "./snowflake"
import oracle from "./oracle" import oracle from "./oracle"
import { SourceName, Integration, PluginType } from "@budibase/types" import {
SourceName,
Integration,
PluginType,
IntegrationBase,
} from "@budibase/types"
import { getDatasourcePlugin } from "../utilities/fileSystem" import { getDatasourcePlugin } from "../utilities/fileSystem"
import env from "../environment" import env from "../environment"
import cloneDeep from "lodash/cloneDeep" import cloneDeep from "lodash/cloneDeep"
@ -40,25 +45,28 @@ const DEFINITIONS: Record<SourceName, Integration | undefined> = {
[SourceName.BUDIBASE]: undefined, [SourceName.BUDIBASE]: undefined,
} }
const INTEGRATIONS: Record<SourceName, any> = { type IntegrationBaseConstructor = new (...args: any[]) => IntegrationBase
[SourceName.POSTGRES]: postgres.integration,
[SourceName.DYNAMODB]: dynamodb.integration, const INTEGRATIONS: Record<SourceName, IntegrationBaseConstructor | undefined> =
[SourceName.MONGODB]: mongodb.integration, {
[SourceName.ELASTICSEARCH]: elasticsearch.integration, [SourceName.POSTGRES]: postgres.integration,
[SourceName.COUCHDB]: couchdb.integration, [SourceName.DYNAMODB]: dynamodb.integration,
[SourceName.SQL_SERVER]: sqlServer.integration, [SourceName.MONGODB]: mongodb.integration,
[SourceName.S3]: s3.integration, [SourceName.ELASTICSEARCH]: elasticsearch.integration,
[SourceName.AIRTABLE]: airtable.integration, [SourceName.COUCHDB]: couchdb.integration,
[SourceName.MYSQL]: mysql.integration, [SourceName.SQL_SERVER]: sqlServer.integration,
[SourceName.ARANGODB]: arangodb.integration, [SourceName.S3]: s3.integration,
[SourceName.REST]: rest.integration, [SourceName.AIRTABLE]: airtable.integration,
[SourceName.FIRESTORE]: firebase.integration, [SourceName.MYSQL]: mysql.integration,
[SourceName.GOOGLE_SHEETS]: googlesheets.integration, [SourceName.ARANGODB]: arangodb.integration,
[SourceName.REDIS]: redis.integration, [SourceName.REST]: rest.integration,
[SourceName.SNOWFLAKE]: snowflake.integration, [SourceName.FIRESTORE]: firebase.integration,
[SourceName.ORACLE]: undefined, [SourceName.GOOGLE_SHEETS]: googlesheets.integration,
[SourceName.BUDIBASE]: undefined, [SourceName.REDIS]: redis.integration,
} [SourceName.SNOWFLAKE]: snowflake.integration,
[SourceName.ORACLE]: undefined,
[SourceName.BUDIBASE]: undefined,
}
// optionally add oracle integration if the oracle binary can be installed // optionally add oracle integration if the oracle binary can be installed
if ( if (

View file

@ -1,9 +1,10 @@
import { QueryJson } from "@budibase/types" import { Datasource, Operation, QueryJson, SourceName } from "@budibase/types"
import { join } from "path" import { join } from "path"
import Sql from "../base/sql" import Sql from "../base/sql"
import { SqlClient } from "../utils" import { SqlClient } from "../utils"
import AliasTables from "../../api/controllers/row/alias" import AliasTables from "../../api/controllers/row/alias"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import { Knex } from "knex"
function multiline(sql: string) { function multiline(sql: string) {
return sql.replace(/\n/g, "").replace(/ +/g, " ") return sql.replace(/\n/g, "").replace(/ +/g, " ")
@ -160,6 +161,28 @@ describe("Captures of real examples", () => {
}) })
}) })
describe("returning (everything bar Postgres)", () => {
it("should be able to handle row returning", () => {
const queryJson = getJson("createSimple.json")
const SQL = new Sql(SqlClient.MS_SQL, limit)
let query = SQL._query(queryJson, { disableReturning: true })
expect(query).toEqual({
sql: "insert into [people] ([age], [name]) values (@p0, @p1)",
bindings: [22, "Test"],
})
// now check returning
let returningQuery: Knex.SqlNative = { sql: "", bindings: [] }
SQL.getReturningRow((input: Knex.SqlNative) => {
returningQuery = input
}, queryJson)
expect(returningQuery).toEqual({
sql: "select * from (select top (@p0) * from [people] where [people].[name] = @p1 and [people].[age] = @p2 order by [people].[name] asc) as [people]",
bindings: [1, "Test", 22],
})
})
})
describe("check max character aliasing", () => { describe("check max character aliasing", () => {
it("should handle over 'z' max character alias", () => { it("should handle over 'z' max character alias", () => {
const tableNames = [] const tableNames = []
@ -175,6 +198,114 @@ describe("Captures of real examples", () => {
}) })
}) })
describe("check aliasing is disabled/enabled", () => {
const tables = ["tableA", "tableB"]
function getDatasource(source: SourceName): Datasource {
return {
source,
type: "datasource",
isSQL: true,
}
}
function getQuery(op: Operation, fields: string[] = ["a"]): QueryJson {
return {
endpoint: { datasourceId: "", entityId: "", operation: op },
resource: {
fields,
},
}
}
it("should check for Postgres aliased status", () => {
const aliasing = new AliasTables(tables)
const datasource = getDatasource(SourceName.POSTGRES)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.CREATE), datasource)
).toEqual(true)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.READ), datasource)
).toEqual(true)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.UPDATE), datasource)
).toEqual(true)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.DELETE), datasource)
).toEqual(true)
})
it("should check for MS-SQL aliased status", () => {
const aliasing = new AliasTables(tables)
const datasource = getDatasource(SourceName.SQL_SERVER)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.CREATE), datasource)
).toEqual(false)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.READ), datasource)
).toEqual(true)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.UPDATE), datasource)
).toEqual(false)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.DELETE), datasource)
).toEqual(false)
})
it("should check for MySQL aliased status", () => {
const aliasing = new AliasTables(tables)
const datasource = getDatasource(SourceName.MYSQL)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.CREATE), datasource)
).toEqual(false)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.READ), datasource)
).toEqual(true)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.UPDATE), datasource)
).toEqual(false)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.DELETE), datasource)
).toEqual(false)
})
it("should check for Oracle aliased status", () => {
const aliasing = new AliasTables(tables)
const datasource = getDatasource(SourceName.ORACLE)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.CREATE), datasource)
).toEqual(false)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.READ), datasource)
).toEqual(true)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.UPDATE), datasource)
).toEqual(false)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.DELETE), datasource)
).toEqual(false)
})
it("should disable aliasing for non-SQL datasources", () => {
const aliasing = new AliasTables(tables)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.READ), {
source: SourceName.GOOGLE_SHEETS,
type: "datasource",
isSQL: false,
})
)
})
it("should disable when no fields", () => {
const aliasing = new AliasTables(tables)
const datasource = getDatasource(SourceName.POSTGRES)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.READ, []), datasource)
).toEqual(false)
})
})
describe("check some edge cases", () => { describe("check some edge cases", () => {
const tableNames = ["hello", "world"] const tableNames = ["hello", "world"]

View file

@ -68,7 +68,7 @@
"primary": [ "primary": [
"personid" "personid"
], ],
"name": "a", "name": "persons",
"schema": { "schema": {
"year": { "year": {
"type": "number", "type": "number",

View file

@ -0,0 +1,64 @@
{
"endpoint": {
"datasourceId": "datasource_plus_0ed5835e5552496285df546030f7c4ae",
"entityId": "people",
"operation": "CREATE"
},
"resource": {
"fields": [
"a.name",
"a.age"
]
},
"filters": {},
"relationships": [],
"body": {
"name": "Test",
"age": 22
},
"extra": {
"idFilter": {
"equal": {
"name": "Test",
"age": 22
}
}
},
"meta": {
"table": {
"_id": "datasource_plus_0ed5835e5552496285df546030f7c4ae__people",
"type": "table",
"sourceId": "datasource_plus_0ed5835e5552496285df546030f7c4ae",
"sourceType": "external",
"primary": [
"name",
"age"
],
"name": "people",
"schema": {
"name": {
"type": "string",
"externalType": "varchar",
"autocolumn": false,
"name": "name",
"constraints": {
"presence": true
}
},
"age": {
"type": "number",
"externalType": "int",
"autocolumn": false,
"name": "age",
"constraints": {
"presence": false
}
}
},
"primaryDisplay": "name"
}
},
"tableAliases": {
"people": "a"
}
}

View file

@ -58,7 +58,7 @@
"primary": [ "primary": [
"personid" "personid"
], ],
"name": "a", "name": "persons",
"schema": { "schema": {
"year": { "year": {
"type": "number", "type": "number",

View file

@ -34,7 +34,7 @@
"keypartone", "keypartone",
"keyparttwo" "keyparttwo"
], ],
"name": "a", "name": "compositetable",
"schema": { "schema": {
"keyparttwo": { "keyparttwo": {
"type": "string", "type": "string",

View file

@ -49,7 +49,7 @@
"primary": [ "primary": [
"taskid" "taskid"
], ],
"name": "a", "name": "tasks",
"schema": { "schema": {
"executorid": { "executorid": {
"type": "number", "type": "number",

View file

@ -63,7 +63,7 @@
"primary": [ "primary": [
"productid" "productid"
], ],
"name": "a", "name": "products",
"schema": { "schema": {
"productname": { "productname": {
"type": "string", "type": "string",

View file

@ -53,7 +53,7 @@
"primary": [ "primary": [
"productid" "productid"
], ],
"name": "a", "name": "products",
"schema": { "schema": {
"productname": { "productname": {
"type": "string", "type": "string",

View file

@ -109,7 +109,7 @@
"primary": [ "primary": [
"taskid" "taskid"
], ],
"name": "a", "name": "tasks",
"schema": { "schema": {
"executorid": { "executorid": {
"type": "number", "type": "number",

View file

@ -66,7 +66,7 @@
"primary": [ "primary": [
"personid" "personid"
], ],
"name": "a", "name": "persons",
"schema": { "schema": {
"year": { "year": {
"type": "number", "type": "number",

View file

@ -66,7 +66,7 @@
"primary": [ "primary": [
"personid" "personid"
], ],
"name": "a", "name": "persons",
"schema": { "schema": {
"year": { "year": {
"type": "number", "type": "number",

View file

@ -11,7 +11,10 @@ import {
import * as exporters from "../../../../api/controllers/view/exporters" import * as exporters from "../../../../api/controllers/view/exporters"
import sdk from "../../../../sdk" import sdk from "../../../../sdk"
import { handleRequest } from "../../../../api/controllers/row/external" import { handleRequest } from "../../../../api/controllers/row/external"
import { breakExternalTableId } from "../../../../integrations/utils" import {
breakExternalTableId,
breakRowIdField,
} from "../../../../integrations/utils"
import { cleanExportRows } from "../utils" import { cleanExportRows } from "../utils"
import { utils } from "@budibase/shared-core" import { utils } from "@budibase/shared-core"
import { ExportRowsParams, ExportRowsResult } from "../search" import { ExportRowsParams, ExportRowsResult } from "../search"
@ -52,6 +55,15 @@ export async function search(options: SearchParams) {
} }
} }
// Make sure oneOf _id queries decode the Row IDs
if (query?.oneOf?._id) {
const rowIds = query.oneOf._id
query.oneOf._id = rowIds.map((row: string) => {
const ids = breakRowIdField(row)
return ids[0]
})
}
try { try {
const table = await sdk.tables.getTable(tableId) const table = await sdk.tables.getTable(tableId)
options = searchInputMapping(table, options) options = searchInputMapping(table, options)
@ -119,9 +131,7 @@ export async function exportRows(
requestQuery = { requestQuery = {
oneOf: { oneOf: {
_id: rowIds.map((row: string) => { _id: rowIds.map((row: string) => {
const ids = JSON.parse( const ids = breakRowIdField(row)
decodeURI(row).replace(/'/g, `"`).replace(/%2C/g, ",")
)
if (ids.length > 1) { if (ids.length > 1) {
throw new HTTPError( throw new HTTPError(
"Export data does not support composite keys.", "Export data does not support composite keys.",

View file

@ -21,10 +21,11 @@ jest.unmock("mysql2/promise")
jest.setTimeout(30000) jest.setTimeout(30000)
describe.skip("external", () => { describe("external search", () => {
const config = new TestConfiguration() const config = new TestConfiguration()
let externalDatasource: Datasource, tableData: Table let externalDatasource: Datasource, tableData: Table
const rows: Row[] = []
beforeAll(async () => { beforeAll(async () => {
const container = await new GenericContainer("mysql") const container = await new GenericContainer("mysql")
@ -89,67 +90,81 @@ describe.skip("external", () => {
}, },
}, },
} }
const table = await config.createExternalTable({
...tableData,
sourceId: externalDatasource._id,
})
for (let i = 0; i < 10; i++) {
rows.push(
await config.createRow({
tableId: table._id,
name: generator.first(),
surname: generator.last(),
age: generator.age(),
address: generator.address(),
})
)
}
}) })
describe("search", () => { it("default search returns all the data", async () => {
const rows: Row[] = [] await config.doInContext(config.appId, async () => {
beforeAll(async () => { const tableId = config.table!._id!
const table = await config.createExternalTable({
...tableData, const searchParams: SearchParams = {
sourceId: externalDatasource._id, tableId,
}) query: {},
for (let i = 0; i < 10; i++) {
rows.push(
await config.createRow({
tableId: table._id,
name: generator.first(),
surname: generator.last(),
age: generator.age(),
address: generator.address(),
})
)
} }
const result = await search(searchParams)
expect(result.rows).toHaveLength(10)
expect(result.rows).toEqual(
expect.arrayContaining(rows.map(r => expect.objectContaining(r)))
)
}) })
})
it("default search returns all the data", async () => { it("querying by fields will always return data attribute columns", async () => {
await config.doInContext(config.appId, async () => { await config.doInContext(config.appId, async () => {
const tableId = config.table!._id! const tableId = config.table!._id!
const searchParams: SearchParams = { const searchParams: SearchParams = {
tableId, tableId,
query: {}, query: {},
} fields: ["name", "age"],
const result = await search(searchParams) }
const result = await search(searchParams)
expect(result.rows).toHaveLength(10) expect(result.rows).toHaveLength(10)
expect(result.rows).toEqual( expect(result.rows).toEqual(
expect.arrayContaining(rows.map(r => expect.objectContaining(r))) expect.arrayContaining(
rows.map(r => ({
...expectAnyExternalColsAttributes,
name: r.name,
age: r.age,
}))
) )
}) )
}) })
})
it("querying by fields will always return data attribute columns", async () => { it("will decode _id in oneOf query", async () => {
await config.doInContext(config.appId, async () => { await config.doInContext(config.appId, async () => {
const tableId = config.table!._id! const tableId = config.table!._id!
const searchParams: SearchParams = { const searchParams: SearchParams = {
tableId, tableId,
query: {}, query: {
fields: ["name", "age"], oneOf: {
} _id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"],
const result = await search(searchParams) },
},
}
const result = await search(searchParams)
expect(result.rows).toHaveLength(10) expect(result.rows).toHaveLength(3)
expect(result.rows).toEqual( expect(result.rows.map(row => row.id)).toEqual([1, 4, 8])
expect.arrayContaining(
rows.map(r => ({
...expectAnyExternalColsAttributes,
name: r.name,
age: r.age,
}))
)
)
})
}) })
}) })
}) })

View file

@ -1,6 +1,5 @@
import { import {
FieldType, FieldType,
FieldTypeSubtypes,
SearchParams, SearchParams,
Table, Table,
DocumentType, DocumentType,

View file

@ -98,7 +98,10 @@ describe("sdk >> rows >> internal", () => {
}, },
}) })
const persistedRow = await config.getRow(table._id!, response.row._id!) const persistedRow = await config.api.row.get(
table._id!,
response.row._id!
)
expect(persistedRow).toEqual({ expect(persistedRow).toEqual({
...row, ...row,
type: "row", type: "row",
@ -157,7 +160,10 @@ describe("sdk >> rows >> internal", () => {
}, },
}) })
const persistedRow = await config.getRow(table._id!, response.row._id!) const persistedRow = await config.api.row.get(
table._id!,
response.row._id!
)
expect(persistedRow).toEqual({ expect(persistedRow).toEqual({
...row, ...row,
type: "row", type: "row",

View file

@ -1,17 +1,51 @@
import cloneDeep from "lodash/cloneDeep" import cloneDeep from "lodash/cloneDeep"
import validateJs from "validate.js" import validateJs from "validate.js"
import { import {
Datasource,
DatasourcePlusQueryResponse,
FieldType, FieldType,
QueryJson, QueryJson,
Row, Row,
SourceName,
Table, Table,
TableSchema, TableSchema,
DatasourcePlusQueryResponse,
} from "@budibase/types" } from "@budibase/types"
import { makeExternalQuery } from "../../../integrations/base/query" import { makeExternalQuery } from "../../../integrations/base/query"
import { Format } from "../../../api/controllers/view/exporters" import { Format } from "../../../api/controllers/view/exporters"
import sdk from "../.." import sdk from "../.."
import { isRelationshipColumn } from "../../../db/utils" import { isRelationshipColumn } from "../../../db/utils"
import { SqlClient } from "../../../integrations/utils"
const SQL_CLIENT_SOURCE_MAP: Record<SourceName, SqlClient | undefined> = {
[SourceName.POSTGRES]: SqlClient.POSTGRES,
[SourceName.MYSQL]: SqlClient.MY_SQL,
[SourceName.SQL_SERVER]: SqlClient.MS_SQL,
[SourceName.ORACLE]: SqlClient.ORACLE,
[SourceName.DYNAMODB]: undefined,
[SourceName.MONGODB]: undefined,
[SourceName.ELASTICSEARCH]: undefined,
[SourceName.COUCHDB]: undefined,
[SourceName.S3]: undefined,
[SourceName.AIRTABLE]: undefined,
[SourceName.ARANGODB]: undefined,
[SourceName.REST]: undefined,
[SourceName.FIRESTORE]: undefined,
[SourceName.GOOGLE_SHEETS]: undefined,
[SourceName.REDIS]: undefined,
[SourceName.SNOWFLAKE]: undefined,
[SourceName.BUDIBASE]: undefined,
}
export function getSQLClient(datasource: Datasource): SqlClient {
if (!datasource.isSQL) {
throw new Error("Cannot get SQL Client for non-SQL datasource")
}
const lookup = SQL_CLIENT_SOURCE_MAP[datasource.source]
if (lookup) {
return lookup
}
throw new Error("Unable to determine client for SQL datasource")
}
export async function getDatasourceAndQuery( export async function getDatasourceAndQuery(
json: QueryJson json: QueryJson

View file

@ -3,6 +3,7 @@ import {
Operation, Operation,
RelationshipType, RelationshipType,
RenameColumn, RenameColumn,
AddColumn,
Table, Table,
TableRequest, TableRequest,
ViewV2, ViewV2,
@ -32,7 +33,7 @@ import * as viewSdk from "../../views"
export async function save( export async function save(
datasourceId: string, datasourceId: string,
update: Table, update: Table,
opts?: { tableId?: string; renaming?: RenameColumn } opts?: { tableId?: string; renaming?: RenameColumn; adding?: AddColumn }
) { ) {
let tableToSave: TableRequest = { let tableToSave: TableRequest = {
...update, ...update,
@ -165,8 +166,17 @@ export async function save(
// remove the rename prop // remove the rename prop
delete tableToSave._rename delete tableToSave._rename
// if adding a new column, we need to rebuild the schema for that table to get the 'externalType' of the column
if (opts?.adding) {
datasource.entities[tableToSave.name] = (
await datasourceSdk.buildFilteredSchema(datasource, [tableToSave.name])
).tables[tableToSave.name]
} else {
datasource.entities[tableToSave.name] = tableToSave
}
// store it into couch now for budibase reference // store it into couch now for budibase reference
datasource.entities[tableToSave.name] = tableToSave
await db.put(populateExternalTableSchemas(datasource)) await db.put(populateExternalTableSchemas(datasource))
// Since tables are stored inside datasources, we need to notify clients // Since tables are stored inside datasources, we need to notify clients

View file

@ -299,6 +299,16 @@ export default class TestConfiguration {
} }
} }
withUser(user: User, f: () => Promise<void>) {
const oldUser = this.user
this.user = user
try {
return f()
} finally {
this.user = oldUser
}
}
// UTILS // UTILS
_req<Req extends Record<string, any> | void, Res>( _req<Req extends Record<string, any> | void, Res>(
@ -712,11 +722,6 @@ export default class TestConfiguration {
return this.api.row.save(tableId, config) return this.api.row.save(tableId, config)
} }
async getRow(tableId: string, rowId: string): Promise<Row> {
const res = await this.api.row.get(tableId, rowId)
return res.body
}
async getRows(tableId: string) { async getRows(tableId: string) {
if (!tableId && this.table) { if (!tableId && this.table) {
tableId = this.table._id! tableId = this.table._id!

View file

@ -1,193 +1,133 @@
import { Response } from "supertest"
import { import {
App, App,
PublishResponse,
type CreateAppRequest, type CreateAppRequest,
type FetchAppDefinitionResponse, type FetchAppDefinitionResponse,
type FetchAppPackageResponse, type FetchAppPackageResponse,
} from "@budibase/types" } from "@budibase/types"
import TestConfiguration from "../TestConfiguration" import { Expectations, TestAPI } from "./base"
import { TestAPI } from "./base"
import { AppStatus } from "../../../db/utils" import { AppStatus } from "../../../db/utils"
import { constants } from "@budibase/backend-core" import { constants } from "@budibase/backend-core"
export class ApplicationAPI extends TestAPI { export class ApplicationAPI extends TestAPI {
constructor(config: TestConfiguration) { create = async (
super(config) app: CreateAppRequest,
expectations?: Expectations
): Promise<App> => {
const files = app.templateFile ? { templateFile: app.templateFile } : {}
delete app.templateFile
return await this._post<App>("/api/applications", {
fields: app,
files,
expectations,
})
} }
create = async (app: CreateAppRequest): Promise<App> => { delete = async (
const request = this.request appId: string,
.post("/api/applications") expectations?: Expectations
.set(this.config.defaultHeaders()) ): Promise<void> => {
.expect("Content-Type", /json/) await this._delete(`/api/applications/${appId}`, { expectations })
for (const key of Object.keys(app)) {
request.field(key, (app as any)[key])
}
if (app.templateFile) {
request.attach("templateFile", app.templateFile)
}
const result = await request
if (result.statusCode !== 200) {
throw new Error(JSON.stringify(result.body))
}
return result.body as App
} }
delete = async (appId: string): Promise<void> => { publish = async (appId: string): Promise<PublishResponse> => {
await this.request return await this._post<PublishResponse>(
.delete(`/api/applications/${appId}`) `/api/applications/${appId}/publish`,
.set(this.config.defaultHeaders()) {
.expect(200) // While the publish endpoint does take an :appId parameter, it doesn't
} // use it. It uses the appId from the context.
headers: {
publish = async ( [constants.Header.APP_ID]: appId,
appId: string },
): Promise<{ _id: string; status: string; appUrl: string }> => { }
// While the publish endpoint does take an :appId parameter, it doesn't )
// use it. It uses the appId from the context.
let headers = {
...this.config.defaultHeaders(),
[constants.Header.APP_ID]: appId,
}
const result = await this.request
.post(`/api/applications/${appId}/publish`)
.set(headers)
.expect("Content-Type", /json/)
.expect(200)
return result.body as { _id: string; status: string; appUrl: string }
} }
unpublish = async (appId: string): Promise<void> => { unpublish = async (appId: string): Promise<void> => {
await this.request await this._post(`/api/applications/${appId}/unpublish`, {
.post(`/api/applications/${appId}/unpublish`) expectations: { status: 204 },
.set(this.config.defaultHeaders()) })
.expect(204)
} }
sync = async ( sync = async (
appId: string, appId: string,
{ statusCode }: { statusCode: number } = { statusCode: 200 } expectations?: Expectations
): Promise<{ message: string }> => { ): Promise<{ message: string }> => {
const result = await this.request return await this._post<{ message: string }>(
.post(`/api/applications/${appId}/sync`) `/api/applications/${appId}/sync`,
.set(this.config.defaultHeaders()) { expectations }
.expect("Content-Type", /json/) )
.expect(statusCode)
return result.body
} }
getRaw = async (appId: string): Promise<Response> => { get = async (appId: string, expectations?: Expectations): Promise<App> => {
// While the appPackage endpoint does take an :appId parameter, it doesn't return await this._get<App>(`/api/applications/${appId}`, {
// use it. It uses the appId from the context. // While the get endpoint does take an :appId parameter, it doesn't use
let headers = { // it. It uses the appId from the context.
...this.config.defaultHeaders(), headers: {
[constants.Header.APP_ID]: appId, [constants.Header.APP_ID]: appId,
} },
const result = await this.request expectations,
.get(`/api/applications/${appId}/appPackage`) })
.set(headers)
.expect("Content-Type", /json/)
.expect(200)
return result
}
get = async (appId: string): Promise<App> => {
const result = await this.getRaw(appId)
return result.body.application as App
} }
getDefinition = async ( getDefinition = async (
appId: string appId: string,
expectations?: Expectations
): Promise<FetchAppDefinitionResponse> => { ): Promise<FetchAppDefinitionResponse> => {
const result = await this.request return await this._get<FetchAppDefinitionResponse>(
.get(`/api/applications/${appId}/definition`) `/api/applications/${appId}/definition`,
.set(this.config.defaultHeaders()) { expectations }
.expect("Content-Type", /json/) )
.expect(200)
return result.body as FetchAppDefinitionResponse
} }
getAppPackage = async (appId: string): Promise<FetchAppPackageResponse> => { getAppPackage = async (
const result = await this.request appId: string,
.get(`/api/applications/${appId}/appPackage`) expectations?: Expectations
.set(this.config.defaultHeaders()) ): Promise<FetchAppPackageResponse> => {
.expect("Content-Type", /json/) return await this._get<FetchAppPackageResponse>(
.expect(200) `/api/applications/${appId}/appPackage`,
return result.body { expectations }
)
} }
update = async ( update = async (
appId: string, appId: string,
app: { name?: string; url?: string } app: { name?: string; url?: string },
expectations?: Expectations
): Promise<App> => { ): Promise<App> => {
const request = this.request return await this._put<App>(`/api/applications/${appId}`, {
.put(`/api/applications/${appId}`) fields: app,
.set(this.config.defaultHeaders()) expectations,
.expect("Content-Type", /json/) })
for (const key of Object.keys(app)) {
request.field(key, (app as any)[key])
}
const result = await request
if (result.statusCode !== 200) {
throw new Error(JSON.stringify(result.body))
}
return result.body as App
} }
updateClient = async (appId: string): Promise<void> => { updateClient = async (
// While the updateClient endpoint does take an :appId parameter, it doesn't appId: string,
// use it. It uses the appId from the context. expectations?: Expectations
let headers = { ): Promise<void> => {
...this.config.defaultHeaders(), await this._post(`/api/applications/${appId}/client/update`, {
[constants.Header.APP_ID]: appId, // While the updateClient endpoint does take an :appId parameter, it doesn't
} // use it. It uses the appId from the context.
const response = await this.request headers: {
.post(`/api/applications/${appId}/client/update`) [constants.Header.APP_ID]: appId,
.set(headers) },
.expect("Content-Type", /json/) expectations,
})
if (response.statusCode !== 200) {
throw new Error(JSON.stringify(response.body))
}
} }
revertClient = async (appId: string): Promise<void> => { revertClient = async (appId: string): Promise<void> => {
// While the revertClient endpoint does take an :appId parameter, it doesn't await this._post(`/api/applications/${appId}/client/revert`, {
// use it. It uses the appId from the context. // While the revertClient endpoint does take an :appId parameter, it doesn't
let headers = { // use it. It uses the appId from the context.
...this.config.defaultHeaders(), headers: {
[constants.Header.APP_ID]: appId, [constants.Header.APP_ID]: appId,
} },
const response = await this.request })
.post(`/api/applications/${appId}/client/revert`)
.set(headers)
.expect("Content-Type", /json/)
if (response.statusCode !== 200) {
throw new Error(JSON.stringify(response.body))
}
} }
fetch = async ({ status }: { status?: AppStatus } = {}): Promise<App[]> => { fetch = async ({ status }: { status?: AppStatus } = {}): Promise<App[]> => {
let query = [] return await this._get<App[]>("/api/applications", {
if (status) { query: { status },
query.push(`status=${status}`) })
}
const result = await this.request
.get(`/api/applications${query.length ? `?${query.join("&")}` : ""}`)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
return result.body as App[]
} }
} }

View file

@ -1,35 +1,16 @@
import { import { ProcessAttachmentResponse } from "@budibase/types"
APIError, import { Expectations, TestAPI } from "./base"
Datasource,
ProcessAttachmentResponse,
} from "@budibase/types"
import TestConfiguration from "../TestConfiguration"
import { TestAPI } from "./base"
import fs from "fs" import fs from "fs"
export class AttachmentAPI extends TestAPI { export class AttachmentAPI extends TestAPI {
constructor(config: TestConfiguration) {
super(config)
}
process = async ( process = async (
name: string, name: string,
file: Buffer | fs.ReadStream | string, file: Buffer | fs.ReadStream | string,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
): Promise<ProcessAttachmentResponse> => { ): Promise<ProcessAttachmentResponse> => {
const result = await this.request return await this._post(`/api/attachments/process`, {
.post(`/api/attachments/process`) files: { file: { name, file } },
.attach("file", file, name) expectations,
.set(this.config.defaultHeaders()) })
if (result.statusCode !== expectStatus) {
throw new Error(
`Expected status ${expectStatus} but got ${
result.statusCode
}, body: ${JSON.stringify(result.body)}`
)
}
return result.body
} }
} }

View file

@ -2,42 +2,38 @@ import {
CreateAppBackupResponse, CreateAppBackupResponse,
ImportAppBackupResponse, ImportAppBackupResponse,
} from "@budibase/types" } from "@budibase/types"
import TestConfiguration from "../TestConfiguration" import { Expectations, TestAPI } from "./base"
import { TestAPI } from "./base"
export class BackupAPI extends TestAPI { export class BackupAPI extends TestAPI {
constructor(config: TestConfiguration) { exportBasicBackup = async (appId: string, expectations?: Expectations) => {
super(config) const exp = {
} ...expectations,
headers: {
exportBasicBackup = async (appId: string) => { ...expectations?.headers,
const result = await this.request "Content-Type": "application/gzip",
.post(`/api/backups/export?appId=${appId}`) },
.set(this.config.defaultHeaders())
.expect("Content-Type", /application\/gzip/)
.expect(200)
return {
body: result.body as Buffer,
headers: result.headers,
} }
return await this._post<Buffer>(`/api/backups/export`, {
query: { appId },
expectations: exp,
})
} }
createBackup = async (appId: string) => { createBackup = async (appId: string, expectations?: Expectations) => {
const result = await this.request return await this._post<CreateAppBackupResponse>(
.post(`/api/apps/${appId}/backups`) `/api/apps/${appId}/backups`,
.set(this.config.defaultHeaders()) { expectations }
.expect("Content-Type", /json/) )
.expect(200)
return result.body as CreateAppBackupResponse
} }
waitForBackupToComplete = async (appId: string, backupId: string) => { waitForBackupToComplete = async (appId: string, backupId: string) => {
for (let i = 0; i < 10; i++) { for (let i = 0; i < 10; i++) {
await new Promise(resolve => setTimeout(resolve, 1000)) await new Promise(resolve => setTimeout(resolve, 1000))
const result = await this.request const response = await this._requestRaw(
.get(`/api/apps/${appId}/backups/${backupId}/file`) "get",
.set(this.config.defaultHeaders()) `/api/apps/${appId}/backups/${backupId}/file`
if (result.status === 200) { )
if (response.status === 200) {
return return
} }
} }
@ -46,13 +42,12 @@ export class BackupAPI extends TestAPI {
importBackup = async ( importBackup = async (
appId: string, appId: string,
backupId: string backupId: string,
expectations?: Expectations
): Promise<ImportAppBackupResponse> => { ): Promise<ImportAppBackupResponse> => {
const result = await this.request return await this._post<ImportAppBackupResponse>(
.post(`/api/apps/${appId}/backups/${backupId}/import`) `/api/apps/${appId}/backups/${backupId}/import`,
.set(this.config.defaultHeaders()) { expectations }
.expect("Content-Type", /json/) )
.expect(200)
return result.body as ImportAppBackupResponse
} }
} }

View file

@ -1,17 +1,196 @@
import TestConfiguration from "../TestConfiguration" import TestConfiguration from "../TestConfiguration"
import { SuperTest, Test } from "supertest" import { SuperTest, Test, Response } from "supertest"
import { ReadStream } from "fs"
export interface TestAPIOpts { type Headers = Record<string, string | string[] | undefined>
headers?: any type Method = "get" | "post" | "put" | "patch" | "delete"
export interface AttachedFile {
name: string
file: Buffer | ReadStream | string
}
function isAttachedFile(file: any): file is AttachedFile {
if (file === undefined) {
return false
}
const attachedFile = file as AttachedFile
return (
Object.hasOwnProperty.call(attachedFile, "file") &&
Object.hasOwnProperty.call(attachedFile, "name")
)
}
export interface Expectations {
status?: number status?: number
headers?: Record<string, string | RegExp>
headersNotPresent?: string[]
body?: Record<string, any>
}
export interface RequestOpts {
headers?: Headers
query?: Record<string, string | undefined>
body?: Record<string, any>
fields?: Record<string, any>
files?: Record<
string,
Buffer | ReadStream | string | AttachedFile | undefined
>
expectations?: Expectations
publicUser?: boolean
} }
export abstract class TestAPI { export abstract class TestAPI {
config: TestConfiguration config: TestConfiguration
request: SuperTest<Test> request: SuperTest<Test>
protected constructor(config: TestConfiguration) { constructor(config: TestConfiguration) {
this.config = config this.config = config
this.request = config.request! this.request = config.request!
} }
protected _get = async <T>(url: string, opts?: RequestOpts): Promise<T> => {
return await this._request<T>("get", url, opts)
}
protected _post = async <T>(url: string, opts?: RequestOpts): Promise<T> => {
return await this._request<T>("post", url, opts)
}
protected _put = async <T>(url: string, opts?: RequestOpts): Promise<T> => {
return await this._request<T>("put", url, opts)
}
protected _patch = async <T>(url: string, opts?: RequestOpts): Promise<T> => {
return await this._request<T>("patch", url, opts)
}
protected _delete = async <T>(
url: string,
opts?: RequestOpts
): Promise<T> => {
return await this._request<T>("delete", url, opts)
}
protected _requestRaw = async (
method: "get" | "post" | "put" | "patch" | "delete",
url: string,
opts?: RequestOpts
): Promise<Response> => {
const {
headers = {},
query = {},
body,
fields = {},
files = {},
expectations,
publicUser = false,
} = opts || {}
const { status = 200 } = expectations || {}
const expectHeaders = expectations?.headers || {}
if (status !== 204 && !expectHeaders["Content-Type"]) {
expectHeaders["Content-Type"] = /^application\/json/
}
let queryParams = []
for (const [key, value] of Object.entries(query)) {
if (value) {
queryParams.push(`${key}=${value}`)
}
}
if (queryParams.length) {
url += `?${queryParams.join("&")}`
}
const headersFn = publicUser
? this.config.publicHeaders.bind(this.config)
: this.config.defaultHeaders.bind(this.config)
let request = this.request[method](url).set(
headersFn({
"x-budibase-include-stacktrace": "true",
})
)
if (headers) {
request = request.set(headers)
}
if (body) {
request = request.send(body)
}
for (const [key, value] of Object.entries(fields)) {
request = request.field(key, value)
}
for (const [key, value] of Object.entries(files)) {
if (isAttachedFile(value)) {
request = request.attach(key, value.file, value.name)
} else {
request = request.attach(key, value as any)
}
}
if (expectations?.headers) {
for (const [key, value] of Object.entries(expectations.headers)) {
if (value === undefined) {
throw new Error(
`Got an undefined expected value for header "${key}", if you want to check for the absence of a header, use headersNotPresent`
)
}
request = request.expect(key, value as any)
}
}
return await request
}
protected _request = async <T>(
method: Method,
url: string,
opts?: RequestOpts
): Promise<T> => {
const { expectations } = opts || {}
const { status = 200 } = expectations || {}
const response = await this._requestRaw(method, url, opts)
if (response.status !== status) {
let message = `Expected status ${status} but got ${response.status}`
const stack = response.body.stack
delete response.body.stack
if (response.body) {
message += `\n\nBody:`
const body = JSON.stringify(response.body, null, 2)
for (const line of body.split("\n")) {
message += `\n⏐ ${line}`
}
}
if (stack) {
message += `\n\nStack from request handler:`
for (const line of stack.split("\n")) {
message += `\n⏐ ${line}`
}
}
throw new Error(message)
}
if (expectations?.headersNotPresent) {
for (const header of expectations.headersNotPresent) {
if (response.headers[header]) {
throw new Error(
`Expected header ${header} not to be present, found value "${response.headers[header]}"`
)
}
}
}
if (expectations?.body) {
expect(response.body).toMatchObject(expectations.body)
}
return response.body
}
} }

View file

@ -1,63 +1,48 @@
import { import {
CreateDatasourceRequest,
Datasource, Datasource,
VerifyDatasourceRequest, VerifyDatasourceRequest,
CreateDatasourceResponse,
UpdateDatasourceResponse,
UpdateDatasourceRequest,
} from "@budibase/types" } from "@budibase/types"
import TestConfiguration from "../TestConfiguration" import { Expectations, TestAPI } from "./base"
import { TestAPI } from "./base"
import supertest from "supertest"
export class DatasourceAPI extends TestAPI { export class DatasourceAPI extends TestAPI {
constructor(config: TestConfiguration) { create = async (
super(config)
}
create = async <B extends boolean = false>(
config: Datasource, config: Datasource,
{ expectations?: Expectations
expectStatus, ): Promise<Datasource> => {
rawResponse, const response = await this._post<CreateDatasourceResponse>(
}: { expectStatus?: number; rawResponse?: B } = {} `/api/datasources`,
): Promise<B extends false ? Datasource : supertest.Response> => { {
const body: CreateDatasourceRequest = { body: {
datasource: config, datasource: config,
tablesFilter: [], tablesFilter: [],
} },
const result = await this.request expectations,
.post(`/api/datasources`) }
.send(body) )
.set(this.config.defaultHeaders()) return response.datasource
.expect("Content-Type", /json/)
.expect(expectStatus || 200)
if (rawResponse) {
return result as any
}
return result.body.datasource
} }
update = async ( update = async (
datasource: Datasource, datasource: UpdateDatasourceRequest,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
): Promise<Datasource> => { ): Promise<Datasource> => {
const result = await this.request const response = await this._put<UpdateDatasourceResponse>(
.put(`/api/datasources/${datasource._id}`) `/api/datasources/${datasource._id}`,
.send(datasource) { body: datasource, expectations }
.set(this.config.defaultHeaders()) )
.expect("Content-Type", /json/) return response.datasource
.expect(expectStatus)
return result.body.datasource as Datasource
} }
verify = async ( verify = async (
data: VerifyDatasourceRequest, data: VerifyDatasourceRequest,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
) => { ) => {
const result = await this.request return await this._post(`/api/datasources/verify`, {
.post(`/api/datasources/verify`) body: data,
.send(data) expectations,
.set(this.config.defaultHeaders()) })
.expect("Content-Type", /json/)
.expect(expectStatus)
return result
} }
} }

View file

@ -1,16 +1,8 @@
import TestConfiguration from "../TestConfiguration" import { Expectations, TestAPI } from "./base"
import { TestAPI } from "./base" import { Row } from "@budibase/types"
export class LegacyViewAPI extends TestAPI { export class LegacyViewAPI extends TestAPI {
constructor(config: TestConfiguration) { get = async (id: string, expectations?: Expectations) => {
super(config) return await this._get<Row[]>(`/api/views/${id}`, { expectations })
}
get = async (id: string, { expectStatus } = { expectStatus: 200 }) => {
return await this.request
.get(`/api/views/${id}`)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
} }
} }

View file

@ -1,52 +1,39 @@
import { AnyDocument, PermissionLevel } from "@budibase/types" import {
import TestConfiguration from "../TestConfiguration" AddPermissionRequest,
import { TestAPI } from "./base" AddPermissionResponse,
GetResourcePermsResponse,
RemovePermissionRequest,
RemovePermissionResponse,
} from "@budibase/types"
import { Expectations, TestAPI } from "./base"
export class PermissionAPI extends TestAPI { export class PermissionAPI extends TestAPI {
constructor(config: TestConfiguration) { get = async (resourceId: string, expectations?: Expectations) => {
super(config) return await this._get<GetResourcePermsResponse>(
`/api/permission/${resourceId}`,
{ expectations }
)
} }
get = async ( add = async (
resourceId: string, request: AddPermissionRequest,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
) => { ): Promise<AddPermissionResponse> => {
return this.request const { roleId, resourceId, level } = request
.get(`/api/permission/${resourceId}`) return await this._post<AddPermissionResponse>(
.set(this.config.defaultHeaders()) `/api/permission/${roleId}/${resourceId}/${level}`,
.expect("Content-Type", /json/) { expectations }
.expect(expectStatus) )
}
set = async (
{
roleId,
resourceId,
level,
}: { roleId: string; resourceId: string; level: PermissionLevel },
{ expectStatus } = { expectStatus: 200 }
): Promise<any> => {
const res = await this.request
.post(`/api/permission/${roleId}/${resourceId}/${level}`)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
return res.body
} }
revoke = async ( revoke = async (
{ request: RemovePermissionRequest,
roleId, expectations?: Expectations
resourceId,
level,
}: { roleId: string; resourceId: string; level: PermissionLevel },
{ expectStatus } = { expectStatus: 200 }
) => { ) => {
const res = await this.request const { roleId, resourceId, level } = request
.delete(`/api/permission/${roleId}/${resourceId}/${level}`) return await this._delete<RemovePermissionResponse>(
.set(this.config.defaultHeaders()) `/api/permission/${roleId}/${resourceId}/${level}`,
.expect("Content-Type", /json/) { expectations }
.expect(expectStatus) )
return res
} }
} }

View file

@ -1,60 +1,32 @@
import TestConfiguration from "../TestConfiguration"
import { import {
Query, Query,
QueryPreview, ExecuteQueryRequest,
type ExecuteQueryRequest, ExecuteQueryResponse,
type ExecuteQueryResponse, PreviewQueryRequest,
PreviewQueryResponse,
} from "@budibase/types" } from "@budibase/types"
import { TestAPI } from "./base" import { TestAPI } from "./base"
export class QueryAPI extends TestAPI { export class QueryAPI extends TestAPI {
constructor(config: TestConfiguration) {
super(config)
}
create = async (body: Query): Promise<Query> => { create = async (body: Query): Promise<Query> => {
const res = await this.request return await this._post<Query>(`/api/queries`, { body })
.post(`/api/queries`)
.set(this.config.defaultHeaders())
.send(body)
.expect("Content-Type", /json/)
if (res.status !== 200) {
throw new Error(JSON.stringify(res.body))
}
return res.body as Query
} }
execute = async ( execute = async (
queryId: string, queryId: string,
body?: ExecuteQueryRequest body?: ExecuteQueryRequest
): Promise<ExecuteQueryResponse> => { ): Promise<ExecuteQueryResponse> => {
const res = await this.request return await this._post<ExecuteQueryResponse>(
.post(`/api/v2/queries/${queryId}`) `/api/v2/queries/${queryId}`,
.set(this.config.defaultHeaders()) {
.send(body) body,
.expect("Content-Type", /json/) }
)
if (res.status !== 200) {
throw new Error(JSON.stringify(res.body))
}
return res.body
} }
previewQuery = async (queryPreview: QueryPreview) => { previewQuery = async (queryPreview: PreviewQueryRequest) => {
const res = await this.request return await this._post<PreviewQueryResponse>(`/api/queries/preview`, {
.post(`/api/queries/preview`) body: queryPreview,
.send(queryPreview) })
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
if (res.status !== 200) {
throw new Error(JSON.stringify(res.body))
}
return res.body
} }
} }

View file

@ -8,162 +8,140 @@ import {
BulkImportResponse, BulkImportResponse,
SearchRowResponse, SearchRowResponse,
SearchParams, SearchParams,
DeleteRowRequest,
DeleteRows,
DeleteRow,
ExportRowsResponse,
} from "@budibase/types" } from "@budibase/types"
import TestConfiguration from "../TestConfiguration" import { Expectations, TestAPI } from "./base"
import { TestAPI } from "./base"
export class RowAPI extends TestAPI { export class RowAPI extends TestAPI {
constructor(config: TestConfiguration) {
super(config)
}
get = async ( get = async (
sourceId: string, sourceId: string,
rowId: string, rowId: string,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
) => { ) => {
const request = this.request return await this._get<Row>(`/api/${sourceId}/rows/${rowId}`, {
.get(`/api/${sourceId}/rows/${rowId}`) expectations,
.set(this.config.defaultHeaders()) })
.expect(expectStatus)
if (expectStatus !== 404) {
request.expect("Content-Type", /json/)
}
return request
} }
getEnriched = async ( getEnriched = async (
sourceId: string, sourceId: string,
rowId: string, rowId: string,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
) => { ) => {
const request = this.request return await this._get<Row>(`/api/${sourceId}/${rowId}/enrich`, {
.get(`/api/${sourceId}/${rowId}/enrich`) expectations,
.set(this.config.defaultHeaders()) })
.expect(expectStatus)
if (expectStatus !== 404) {
request.expect("Content-Type", /json/)
}
return request
} }
save = async ( save = async (
tableId: string, tableId: string,
row: SaveRowRequest, row: SaveRowRequest,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
): Promise<Row> => { ): Promise<Row> => {
const resp = await this.request return await this._post<Row>(`/api/${tableId}/rows`, {
.post(`/api/${tableId}/rows`) body: row,
.send(row) expectations,
.set(this.config.defaultHeaders()) })
.expect("Content-Type", /json/)
if (resp.status !== expectStatus) {
throw new Error(
`Expected status ${expectStatus} but got ${
resp.status
}, body: ${JSON.stringify(resp.body)}`
)
}
return resp.body as Row
} }
validate = async ( validate = async (
sourceId: string, sourceId: string,
row: SaveRowRequest, row: SaveRowRequest,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
): Promise<ValidateResponse> => { ): Promise<ValidateResponse> => {
const resp = await this.request return await this._post<ValidateResponse>(
.post(`/api/${sourceId}/rows/validate`) `/api/${sourceId}/rows/validate`,
.send(row) {
.set(this.config.defaultHeaders()) body: row,
.expect("Content-Type", /json/) expectations,
.expect(expectStatus) }
return resp.body as ValidateResponse )
} }
patch = async ( patch = async (
sourceId: string, sourceId: string,
row: PatchRowRequest, row: PatchRowRequest,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
): Promise<Row> => { ): Promise<Row> => {
let resp = await this.request return await this._patch<Row>(`/api/${sourceId}/rows`, {
.patch(`/api/${sourceId}/rows`) body: row,
.send(row) expectations,
.set(this.config.defaultHeaders()) })
.expect("Content-Type", /json/)
if (resp.status !== expectStatus) {
throw new Error(
`Expected status ${expectStatus} but got ${
resp.status
}, body: ${JSON.stringify(resp.body)}`
)
}
return resp.body as Row
} }
delete = async ( delete = async (
sourceId: string, sourceId: string,
rows: Row | string | (Row | string)[], row: DeleteRow,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
) => { ) => {
return this.request return await this._delete<Row>(`/api/${sourceId}/rows`, {
.delete(`/api/${sourceId}/rows`) body: row,
.send(Array.isArray(rows) ? { rows } : rows) expectations,
.set(this.config.defaultHeaders()) })
.expect("Content-Type", /json/) }
.expect(expectStatus)
bulkDelete = async (
sourceId: string,
body: DeleteRows,
expectations?: Expectations
) => {
return await this._delete<Row[]>(`/api/${sourceId}/rows`, {
body,
expectations,
})
} }
fetch = async ( fetch = async (
sourceId: string, sourceId: string,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
): Promise<Row[]> => { ): Promise<Row[]> => {
const request = this.request return await this._get<Row[]>(`/api/${sourceId}/rows`, {
.get(`/api/${sourceId}/rows`) expectations,
.set(this.config.defaultHeaders()) })
.expect(expectStatus)
return (await request).body
} }
exportRows = async ( exportRows = async (
tableId: string, tableId: string,
body: ExportRowsRequest, body: ExportRowsRequest,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
) => { ) => {
const request = this.request const response = await this._requestRaw(
.post(`/api/${tableId}/rows/exportRows?format=json`) "post",
.set(this.config.defaultHeaders()) `/api/${tableId}/rows/exportRows`,
.send(body) {
.expect("Content-Type", /json/) body,
.expect(expectStatus) query: { format: "json" },
return request expectations,
}
)
return response.text
} }
bulkImport = async ( bulkImport = async (
tableId: string, tableId: string,
body: BulkImportRequest, body: BulkImportRequest,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
): Promise<BulkImportResponse> => { ): Promise<BulkImportResponse> => {
let request = this.request return await this._post<BulkImportResponse>(
.post(`/api/tables/${tableId}/import`) `/api/tables/${tableId}/import`,
.send(body) {
.set(this.config.defaultHeaders()) body,
.expect(expectStatus) expectations,
return (await request).body }
)
} }
search = async ( search = async (
sourceId: string, sourceId: string,
params?: SearchParams, params?: SearchParams,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
): Promise<SearchRowResponse> => { ): Promise<SearchRowResponse> => {
const request = this.request return await this._post<SearchRowResponse>(`/api/${sourceId}/search`, {
.post(`/api/${sourceId}/search`) body: params,
.send(params) expectations,
.set(this.config.defaultHeaders()) })
.expect(expectStatus)
return (await request).body
} }
} }

View file

@ -1,18 +1,8 @@
import TestConfiguration from "../TestConfiguration"
import { Screen } from "@budibase/types" import { Screen } from "@budibase/types"
import { TestAPI } from "./base" import { Expectations, TestAPI } from "./base"
export class ScreenAPI extends TestAPI { export class ScreenAPI extends TestAPI {
constructor(config: TestConfiguration) { list = async (expectations?: Expectations): Promise<Screen[]> => {
super(config) return await this._get<Screen[]>(`/api/screens`, { expectations })
}
list = async (): Promise<Screen[]> => {
const res = await this.request
.get(`/api/screens`)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
return res.body as Screen[]
} }
} }

View file

@ -5,74 +5,38 @@ import {
SaveTableResponse, SaveTableResponse,
Table, Table,
} from "@budibase/types" } from "@budibase/types"
import TestConfiguration from "../TestConfiguration" import { Expectations, TestAPI } from "./base"
import { TestAPI } from "./base"
export class TableAPI extends TestAPI { export class TableAPI extends TestAPI {
constructor(config: TestConfiguration) {
super(config)
}
save = async ( save = async (
data: SaveTableRequest, data: SaveTableRequest,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
): Promise<SaveTableResponse> => { ): Promise<SaveTableResponse> => {
const res = await this.request return await this._post<SaveTableResponse>("/api/tables", {
.post(`/api/tables`) body: data,
.send(data) expectations,
.set(this.config.defaultHeaders()) })
.expect("Content-Type", /json/)
if (res.status !== expectStatus) {
throw new Error(
`Expected status ${expectStatus} but got ${
res.status
} with body ${JSON.stringify(res.body)}`
)
}
return res.body
} }
fetch = async ( fetch = async (expectations?: Expectations): Promise<Table[]> => {
{ expectStatus } = { expectStatus: 200 } return await this._get<Table[]>("/api/tables", { expectations })
): Promise<Table[]> => {
const res = await this.request
.get(`/api/tables`)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
return res.body
} }
get = async ( get = async (
tableId: string, tableId: string,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
): Promise<Table> => { ): Promise<Table> => {
const res = await this.request return await this._get<Table>(`/api/tables/${tableId}`, { expectations })
.get(`/api/tables/${tableId}`)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
return res.body
} }
migrate = async ( migrate = async (
tableId: string, tableId: string,
data: MigrateRequest, data: MigrateRequest,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
): Promise<MigrateResponse> => { ): Promise<MigrateResponse> => {
const res = await this.request return await this._post<MigrateResponse>(`/api/tables/${tableId}/migrate`, {
.post(`/api/tables/${tableId}/migrate`) body: data,
.send(data) expectations,
.set(this.config.defaultHeaders()) })
if (res.status !== expectStatus) {
throw new Error(
`Expected status ${expectStatus} but got ${
res.status
} with body ${JSON.stringify(res.body)}`
)
}
return res.body
} }
} }

View file

@ -4,154 +4,79 @@ import {
Flags, Flags,
UserMetadata, UserMetadata,
} from "@budibase/types" } from "@budibase/types"
import TestConfiguration from "../TestConfiguration" import { Expectations, TestAPI } from "./base"
import { TestAPI } from "./base"
import { DocumentInsertResponse } from "@budibase/nano" import { DocumentInsertResponse } from "@budibase/nano"
export class UserAPI extends TestAPI { export class UserAPI extends TestAPI {
constructor(config: TestConfiguration) {
super(config)
}
fetch = async ( fetch = async (
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
): Promise<FetchUserMetadataResponse> => { ): Promise<FetchUserMetadataResponse> => {
const res = await this.request return await this._get<FetchUserMetadataResponse>("/api/users/metadata", {
.get(`/api/users/metadata`) expectations,
.set(this.config.defaultHeaders()) })
.expect("Content-Type", /json/)
if (res.status !== expectStatus) {
throw new Error(
`Expected status ${expectStatus} but got ${
res.status
} with body ${JSON.stringify(res.body)}`
)
}
return res.body
} }
find = async ( find = async (
id: string, id: string,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
): Promise<FindUserMetadataResponse> => { ): Promise<FindUserMetadataResponse> => {
const res = await this.request return await this._get<FindUserMetadataResponse>(
.get(`/api/users/metadata/${id}`) `/api/users/metadata/${id}`,
.set(this.config.defaultHeaders()) {
.expect("Content-Type", /json/) expectations,
}
if (res.status !== expectStatus) { )
throw new Error(
`Expected status ${expectStatus} but got ${
res.status
} with body ${JSON.stringify(res.body)}`
)
}
return res.body
} }
update = async ( update = async (
user: UserMetadata, user: UserMetadata,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
): Promise<DocumentInsertResponse> => { ): Promise<DocumentInsertResponse> => {
const res = await this.request return await this._put<DocumentInsertResponse>("/api/users/metadata", {
.put(`/api/users/metadata`) body: user,
.set(this.config.defaultHeaders()) expectations,
.send(user) })
.expect("Content-Type", /json/)
if (res.status !== expectStatus) {
throw new Error(
`Expected status ${expectStatus} but got ${
res.status
} with body ${JSON.stringify(res.body)}`
)
}
return res.body as DocumentInsertResponse
} }
updateSelf = async ( updateSelf = async (
user: UserMetadata, user: UserMetadata,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
): Promise<DocumentInsertResponse> => { ): Promise<DocumentInsertResponse> => {
const res = await this.request return await this._post<DocumentInsertResponse>(
.post(`/api/users/metadata/self`) "/api/users/metadata/self",
.set(this.config.defaultHeaders()) {
.send(user) body: user,
.expect("Content-Type", /json/) expectations,
}
if (res.status !== expectStatus) { )
throw new Error(
`Expected status ${expectStatus} but got ${
res.status
} with body ${JSON.stringify(res.body)}`
)
}
return res.body as DocumentInsertResponse
} }
destroy = async ( destroy = async (
id: string, id: string,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
): Promise<{ message: string }> => { ): Promise<{ message: string }> => {
const res = await this.request return await this._delete<{ message: string }>(
.delete(`/api/users/metadata/${id}`) `/api/users/metadata/${id}`,
.set(this.config.defaultHeaders()) {
.expect("Content-Type", /json/) expectations,
}
if (res.status !== expectStatus) { )
throw new Error(
`Expected status ${expectStatus} but got ${
res.status
} with body ${JSON.stringify(res.body)}`
)
}
return res.body as { message: string }
} }
setFlag = async ( setFlag = async (
flag: string, flag: string,
value: any, value: any,
{ expectStatus } = { expectStatus: 200 } expectations?: Expectations
): Promise<{ message: string }> => { ): Promise<{ message: string }> => {
const res = await this.request return await this._post<{ message: string }>(`/api/users/flags`, {
.post(`/api/users/flags`) body: { flag, value },
.set(this.config.defaultHeaders()) expectations,
.send({ flag, value }) })
.expect("Content-Type", /json/)
if (res.status !== expectStatus) {
throw new Error(
`Expected status ${expectStatus} but got ${
res.status
} with body ${JSON.stringify(res.body)}`
)
}
return res.body as { message: string }
} }
getFlags = async ( getFlags = async (expectations?: Expectations): Promise<Flags> => {
{ expectStatus } = { expectStatus: 200 } return await this._get<Flags>(`/api/users/flags`, {
): Promise<Flags> => { expectations,
const res = await this.request })
.get(`/api/users/flags`)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
if (res.status !== expectStatus) {
throw new Error(
`Expected status ${expectStatus} but got ${
res.status
} with body ${JSON.stringify(res.body)}`
)
}
return res.body as Flags
} }
} }

View file

@ -3,21 +3,16 @@ import {
UpdateViewRequest, UpdateViewRequest,
ViewV2, ViewV2,
SearchViewRowRequest, SearchViewRowRequest,
PaginatedSearchRowResponse,
} from "@budibase/types" } from "@budibase/types"
import TestConfiguration from "../TestConfiguration" import { Expectations, TestAPI } from "./base"
import { TestAPI } from "./base"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import { Response } from "superagent"
import sdk from "../../../sdk" import sdk from "../../../sdk"
export class ViewV2API extends TestAPI { export class ViewV2API extends TestAPI {
constructor(config: TestConfiguration) {
super(config)
}
create = async ( create = async (
viewData?: Partial<CreateViewRequest>, viewData?: Partial<CreateViewRequest>,
{ expectStatus } = { expectStatus: 201 } expectations?: Expectations
): Promise<ViewV2> => { ): Promise<ViewV2> => {
let tableId = viewData?.tableId let tableId = viewData?.tableId
if (!tableId && !this.config.table) { if (!tableId && !this.config.table) {
@ -30,43 +25,36 @@ export class ViewV2API extends TestAPI {
name: generator.guid(), name: generator.guid(),
...viewData, ...viewData,
} }
const result = await this.request
.post(`/api/v2/views`) const exp: Expectations = {
.send(view) status: 201,
.set(this.config.defaultHeaders()) ...expectations,
.expect("Content-Type", /json/) }
.expect(expectStatus)
return result.body.data as ViewV2 const resp = await this._post<{ data: ViewV2 }>("/api/v2/views", {
body: view,
expectations: exp,
})
return resp.data
} }
update = async ( update = async (
view: UpdateViewRequest, view: UpdateViewRequest,
{ expectations?: Expectations
expectStatus,
handleResponse,
}: {
expectStatus: number
handleResponse?: (response: Response) => void
} = { expectStatus: 200 }
): Promise<ViewV2> => { ): Promise<ViewV2> => {
const result = await this.request const resp = await this._put<{ data: ViewV2 }>(`/api/v2/views/${view.id}`, {
.put(`/api/v2/views/${view.id}`) body: view,
.send(view) expectations,
.set(this.config.defaultHeaders()) })
.expect("Content-Type", /json/) return resp.data
.expect(expectStatus)
if (handleResponse) {
handleResponse(result)
}
return result.body.data as ViewV2
} }
delete = async (viewId: string, { expectStatus } = { expectStatus: 204 }) => { delete = async (viewId: string, expectations?: Expectations) => {
return this.request const exp = {
.delete(`/api/v2/views/${viewId}`) status: 204,
.set(this.config.defaultHeaders()) ...expectations,
.expect(expectStatus) }
return await this._delete(`/api/v2/views/${viewId}`, { expectations: exp })
} }
get = async (viewId: string) => { get = async (viewId: string) => {
@ -78,17 +66,29 @@ export class ViewV2API extends TestAPI {
search = async ( search = async (
viewId: string, viewId: string,
params?: SearchViewRowRequest, params?: SearchViewRowRequest,
{ expectStatus = 200, usePublicUser = false } = {} expectations?: Expectations
) => { ) => {
return this.request return await this._post<PaginatedSearchRowResponse>(
.post(`/api/v2/views/${viewId}/search`) `/api/v2/views/${viewId}/search`,
.send(params) {
.set( body: params,
usePublicUser expectations,
? this.config.publicHeaders() }
: this.config.defaultHeaders() )
) }
.expect("Content-Type", /json/)
.expect(expectStatus) publicSearch = async (
viewId: string,
params?: SearchViewRowRequest,
expectations?: Expectations
) => {
return await this._post<PaginatedSearchRowResponse>(
`/api/v2/views/${viewId}/search`,
{
body: params,
expectations,
publicUser: true,
}
)
} }
} }

View file

@ -1,4 +1,4 @@
import { PlanType } from "../../../sdk" import { PermissionLevel, PlanType } from "../../../sdk"
export interface ResourcePermissionInfo { export interface ResourcePermissionInfo {
role: string role: string
@ -14,3 +14,21 @@ export interface GetResourcePermsResponse {
export interface GetDependantResourcesResponse { export interface GetDependantResourcesResponse {
resourceByType?: Record<string, number> resourceByType?: Record<string, number>
} }
export interface AddedPermission {
_id?: string
rev?: string
error?: string
reason?: string
}
export type AddPermissionResponse = AddedPermission[]
export interface AddPermissionRequest {
roleId: string
resourceId: string
level: PermissionLevel
}
export interface RemovePermissionRequest extends AddPermissionRequest {}
export interface RemovePermissionResponse extends AddPermissionResponse {}

View file

@ -1,6 +1,6 @@
import { SearchFilters, SearchParams } from "../../../sdk" import { SearchFilters, SearchParams } from "../../../sdk"
import { Row } from "../../../documents" import { Row } from "../../../documents"
import { SortOrder } from "../../../api" import { PaginationResponse, SortOrder } from "../../../api"
import { ReadStream } from "fs" import { ReadStream } from "fs"
export interface SaveRowRequest extends Row {} export interface SaveRowRequest extends Row {}
@ -31,6 +31,10 @@ export interface SearchRowResponse {
rows: any[] rows: any[]
} }
export interface PaginatedSearchRowResponse
extends SearchRowResponse,
PaginationResponse {}
export interface ExportRowsRequest { export interface ExportRowsRequest {
rows: string[] rows: string[]
columns?: string[] columns?: string[]

View file

@ -27,3 +27,9 @@ export interface FetchAppPackageResponse {
clientLibPath: string clientLibPath: string
hasLock: boolean hasLock: boolean
} }
export interface PublishResponse {
_id: string
status: string
appUrl: string
}

View file

@ -13,3 +13,5 @@ export * from "./searchFilter"
export * from "./cookies" export * from "./cookies"
export * from "./automation" export * from "./automation"
export * from "./layout" export * from "./layout"
export * from "./query"
export * from "./role"

View file

@ -0,0 +1,20 @@
import { QueryPreview, QuerySchema } from "../../documents"
export interface PreviewQueryRequest extends QueryPreview {}
export interface PreviewQueryResponse {
rows: any[]
nestedSchemaFields: { [key: string]: { [key: string]: string | QuerySchema } }
schema: { [key: string]: string | QuerySchema }
info: any
extra: any
}
export interface ExecuteQueryRequest {
parameters?: { [key: string]: string }
pagination?: any
}
export interface ExecuteQueryResponse {
data: Record<string, any>[]
}

View file

@ -0,0 +1,22 @@
import { Role } from "../../documents"
export interface SaveRoleRequest {
_id?: string
_rev?: string
name: string
inherits: string
permissionId: string
version: string
}
export interface SaveRoleResponse extends Role {}
export interface FindRoleResponse extends Role {}
export type FetchRolesResponse = Role[]
export interface DestroyRoleResponse {
message: string
}
export type AccessibleRolesResponse = string[]

View file

@ -62,22 +62,6 @@ export interface PaginationValues {
limit: number | null limit: number | null
} }
export interface PreviewQueryRequest extends Omit<Query, "parameters"> {
parameters: {}
flags?: {
urlName?: boolean
}
}
export interface ExecuteQueryRequest {
parameters?: { [key: string]: string }
pagination?: any
}
export interface ExecuteQueryResponse {
data: Row[]
}
export enum HttpMethod { export enum HttpMethod {
GET = "GET", GET = "GET",
POST = "POST", POST = "POST",

View file

@ -1,6 +1,6 @@
import { Document } from "../../document" import { Document } from "../../document"
import { View, ViewV2 } from "../view" import { View, ViewV2 } from "../view"
import { RenameColumn } from "../../../sdk" import { AddColumn, RenameColumn } from "../../../sdk"
import { TableSchema } from "./schema" import { TableSchema } from "./schema"
export const INTERNAL_TABLE_SOURCE_ID = "bb_internal" export const INTERNAL_TABLE_SOURCE_ID = "bb_internal"
@ -29,5 +29,6 @@ export interface Table extends Document {
export interface TableRequest extends Table { export interface TableRequest extends Table {
_rename?: RenameColumn _rename?: RenameColumn
_add?: AddColumn
created?: boolean created?: boolean
} }

View file

@ -60,6 +60,10 @@ export interface RenameColumn {
updated: string updated: string
} }
export interface AddColumn {
name: string
}
export interface RelationshipsJson { export interface RelationshipsJson {
through?: string through?: string
from?: string from?: string