1
0
Fork 0
mirror of synced 2024-10-02 18:16:29 +13:00

Merge branch 'master' of github.com:budibase/budibase into budi-7710-user-groups-do-not-fully-support-custom-roles-4

This commit is contained in:
Sam Rose 2024-03-04 16:43:37 +00:00
commit ca4884e9cc
No known key found for this signature in database
15 changed files with 540 additions and 61 deletions

@ -1 +1 @@
Subproject commit 19f7a5829f4d23cbc694136e45d94482a59a475a
Subproject commit 0c050591c21d3b67dc0c9225d60cc9e2324c8dac

View file

@ -147,6 +147,12 @@ export function createTablesStore() {
if (indexes) {
draft.indexes = indexes
}
// Add object to indicate if column is being added
if (draft.schema[field.name] === undefined) {
draft._add = {
name: field.name,
}
}
draft.schema = {
...draft.schema,
[field.name]: cloneDeep(field),

View file

@ -59,13 +59,13 @@
isReadonly: () => readonly,
getType: () => column.schema.type,
getValue: () => row[column.name],
setValue: (value, options = { save: true }) => {
setValue: (value, options = { apply: true }) => {
validation.actions.setError(cellId, null)
updateValue({
rowId: row._id,
column: column.name,
value,
save: options?.save,
apply: options?.apply,
})
},
}

View file

@ -217,14 +217,14 @@
const type = $focusedCellAPI.getType()
if (type === "number" && keyCodeIsNumber(keyCode)) {
// Update the value locally but don't save it yet
$focusedCellAPI.setValue(parseInt(key), { save: false })
$focusedCellAPI.setValue(parseInt(key), { apply: false })
$focusedCellAPI.focus()
} else if (
["string", "barcodeqr", "longform"].includes(type) &&
(keyCodeIsLetter(keyCode) || keyCodeIsNumber(keyCode))
) {
// Update the value locally but don't save it yet
$focusedCellAPI.setValue(key, { save: false })
$focusedCellAPI.setValue(key, { apply: false })
$focusedCellAPI.focus()
}
}

View file

@ -327,29 +327,31 @@ export const createActions = context => {
get(fetch)?.getInitialData()
}
// Patches a row with some changes
const updateRow = async (rowId, changes, options = { save: true }) => {
// Checks if a changeset for a row actually mutates the row or not
const changesAreValid = (row, changes) => {
const columns = Object.keys(changes || {})
if (!row || !columns.length) {
return false
}
// Ensure there is at least 1 column that creates a difference
return columns.some(column => row[column] !== changes[column])
}
// Patches a row with some changes in local state, and returns whether a
// valid pending change was made or not
const stashRowChanges = (rowId, changes) => {
const $rows = get(rows)
const $rowLookupMap = get(rowLookupMap)
const index = $rowLookupMap[rowId]
const row = $rows[index]
if (index == null || !Object.keys(changes || {}).length) {
return
// Check this is a valid change
if (!row || !changesAreValid(row, changes)) {
return false
}
// Abandon if no changes
let same = true
for (let column of Object.keys(changes)) {
if (row[column] !== changes[column]) {
same = false
break
}
}
if (same) {
return
}
// Immediately update state so that the change is reflected
// Add change to cache
rowChangeCache.update(state => ({
...state,
[rowId]: {
@ -357,26 +359,30 @@ export const createActions = context => {
...changes,
},
}))
return true
}
// Stop here if we don't want to persist the change
if (!options?.save) {
// Saves any pending changes to a row
const applyRowChanges = async rowId => {
const $rows = get(rows)
const $rowLookupMap = get(rowLookupMap)
const index = $rowLookupMap[rowId]
const row = $rows[index]
if (row == null) {
return
}
// Save change
try {
inProgressChanges.update(state => ({
...state,
[rowId]: true,
}))
// Mark as in progress
inProgressChanges.update(state => ({ ...state, [rowId]: true }))
// Update row
const saved = await datasource.actions.updateRow({
...cleanRow(row),
...get(rowChangeCache)[rowId],
})
const changes = get(rowChangeCache)[rowId]
const newRow = { ...cleanRow(row), ...changes }
const saved = await datasource.actions.updateRow(newRow)
// Update state after a successful change
// Update row state after a successful change
if (saved?._id) {
rows.update(state => {
state[index] = saved
@ -386,6 +392,8 @@ export const createActions = context => {
// Handle users table edge case
await refreshRow(saved.id)
}
// Wipe row change cache now that we've saved the row
rowChangeCache.update(state => {
delete state[rowId]
return state
@ -393,15 +401,17 @@ export const createActions = context => {
} catch (error) {
handleValidationError(rowId, error)
}
inProgressChanges.update(state => ({
...state,
[rowId]: false,
}))
// Mark as completed
inProgressChanges.update(state => ({ ...state, [rowId]: false }))
}
// Updates a value of a row
const updateValue = async ({ rowId, column, value, save = true }) => {
return await updateRow(rowId, { [column]: value }, { save })
const updateValue = async ({ rowId, column, value, apply = true }) => {
const success = stashRowChanges(rowId, { [column]: value })
if (success && apply) {
await applyRowChanges(rowId)
}
}
// Deletes an array of rows
@ -411,9 +421,7 @@ export const createActions = context => {
}
// Actually delete rows
rowsToDelete.forEach(row => {
delete row.__idx
})
rowsToDelete.forEach(row => delete row.__idx)
await datasource.actions.deleteRows(rowsToDelete)
// Update state
@ -433,7 +441,7 @@ export const createActions = context => {
newRow = newRows[i]
// Ensure we have a unique _id.
// This means generating one for non DS+, overriting any that may already
// This means generating one for non DS+, overwriting any that may already
// exist as we cannot allow duplicates.
if (!$isDatasourcePlus) {
newRow._id = Helpers.uuid()
@ -494,7 +502,7 @@ export const createActions = context => {
duplicateRow,
getRow,
updateValue,
updateRow,
applyRowChanges,
deleteRows,
hasRow,
loadNextPage,
@ -508,7 +516,14 @@ export const createActions = context => {
}
export const initialise = context => {
const { rowChangeCache, inProgressChanges, previousFocusedRowId } = context
const {
rowChangeCache,
inProgressChanges,
previousFocusedRowId,
previousFocusedCellId,
rows,
validation,
} = context
// Wipe the row change cache when changing row
previousFocusedRowId.subscribe(id => {
@ -519,4 +534,15 @@ export const initialise = context => {
})
}
})
// Ensure any unsaved changes are saved when changing cell
previousFocusedCellId.subscribe(async id => {
const rowId = id?.split("-")[0]
const hasErrors = validation.actions.rowHasErrors(rowId)
const hasChanges = Object.keys(get(rowChangeCache)[rowId] || {}).length > 0
const isSavingChanges = get(inProgressChanges)[rowId]
if (rowId && !hasErrors && hasChanges && !isSavingChanges) {
await rows.actions.applyRowChanges(rowId)
}
})
}

View file

@ -16,6 +16,7 @@ export const createStores = context => {
const hoveredRowId = writable(null)
const rowHeight = writable(get(props).fixedRowHeight || DefaultRowHeight)
const previousFocusedRowId = writable(null)
const previousFocusedCellId = writable(null)
const gridFocused = writable(false)
const isDragging = writable(false)
const buttonColumnWidth = writable(0)
@ -48,6 +49,7 @@ export const createStores = context => {
focusedCellAPI,
focusedRowId,
previousFocusedRowId,
previousFocusedCellId,
hoveredRowId,
rowHeight,
gridFocused,
@ -129,6 +131,7 @@ export const initialise = context => {
const {
focusedRowId,
previousFocusedRowId,
previousFocusedCellId,
rows,
focusedCellId,
selectedRows,
@ -181,6 +184,13 @@ export const initialise = context => {
lastFocusedRowId = id
})
// Remember the last focused cell ID so that we can store the previous one
let lastFocusedCellId = null
focusedCellId.subscribe(id => {
previousFocusedCellId.set(lastFocusedCellId)
lastFocusedCellId = id
})
// Remove hovered row when a cell is selected
focusedCellId.subscribe(cell => {
if (cell && get(hoveredRowId)) {

View file

@ -1,8 +1,23 @@
import { writable, get } from "svelte/store"
import { writable, get, derived } from "svelte/store"
// Normally we would break out actions into the explicit "createActions"
// function, but for validation all these actions are pure so can go into
// "createStores" instead to make dependency ordering simpler
export const createStores = () => {
const validation = writable({})
// Derive which rows have errors so that we can use that info later
const rowErrorMap = derived(validation, $validation => {
let map = {}
Object.entries($validation).forEach(([key, error]) => {
// Extract row ID from all errored cell IDs
if (error) {
map[key.split("-")[0]] = true
}
})
return map
})
const setError = (cellId, error) => {
if (!cellId) {
return
@ -13,11 +28,16 @@ export const createStores = () => {
}))
}
const rowHasErrors = rowId => {
return get(rowErrorMap)[rowId]
}
return {
validation: {
...validation,
actions: {
setError,
rowHasErrors,
},
},
}

View file

@ -6,6 +6,7 @@ import {
BulkImportRequest,
BulkImportResponse,
Operation,
RenameColumn,
SaveTableRequest,
SaveTableResponse,
Table,
@ -25,9 +26,12 @@ function getDatasourceId(table: Table) {
return breakExternalTableId(table._id).datasourceId
}
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
export async function save(
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
renaming?: RenameColumn
) {
const inputs = ctx.request.body
const renaming = inputs?._rename
const adding = inputs?._add
// can't do this right now
delete inputs.rows
const tableId = ctx.request.body._id
@ -40,7 +44,7 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
const { datasource, table } = await sdk.tables.external.save(
datasourceId!,
inputs,
{ tableId, renaming }
{ tableId, renaming, adding }
)
builderSocket?.emitDatasourceUpdate(ctx, datasource)
return table

View file

@ -74,8 +74,15 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
const appId = ctx.appId
const table = ctx.request.body
const isImport = table.rows
const renaming = ctx.request.body._rename
let savedTable = await pickApi({ table }).save(ctx)
const api = pickApi({ table })
// do not pass _rename or _add if saving to CouchDB
if (api === internal) {
delete ctx.request.body._add
delete ctx.request.body._rename
}
let savedTable = await api.save(ctx, renaming)
if (!table._id) {
await events.table.created(savedTable)
savedTable = sdk.tables.enrichViewSchemas(savedTable)

View file

@ -12,11 +12,12 @@ import {
} from "@budibase/types"
import sdk from "../../../sdk"
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
export async function save(
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
renaming?: RenameColumn
) {
const { rows, ...rest } = ctx.request.body
let tableToSave: Table & {
_rename?: RenameColumn
} = {
let tableToSave: Table = {
_id: generateTableID(),
...rest,
// Ensure these fields are populated, even if not sent in the request
@ -28,15 +29,12 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
tableToSave.views = {}
}
const renaming = tableToSave._rename
delete tableToSave._rename
try {
const { table } = await sdk.tables.internal.save(tableToSave, {
user: ctx.user,
rowsToImport: rows,
tableId: ctx.request.body._id,
renaming: renaming,
renaming,
})
return table

View file

@ -26,6 +26,7 @@ import { TableToBuild } from "../../../tests/utilities/TestConfiguration"
tk.freeze(mocks.date.MOCK_DATE)
const { basicTable } = setup.structures
const ISO_REGEX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/
describe("/tables", () => {
let request = setup.getRequest()
@ -285,6 +286,35 @@ describe("/tables", () => {
expect(res.body.schema.roleId).toBeDefined()
})
})
it("should add a new column for an internal DB table", async () => {
const saveTableRequest: SaveTableRequest = {
_add: {
name: "NEW_COLUMN",
},
...basicTable(),
}
const response = await request
.post(`/api/tables`)
.send(saveTableRequest)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
const expectedResponse = {
...saveTableRequest,
_rev: expect.stringMatching(/^\d-.+/),
_id: expect.stringMatching(/^ta_.+/),
createdAt: expect.stringMatching(ISO_REGEX_PATTERN),
updatedAt: expect.stringMatching(ISO_REGEX_PATTERN),
views: {},
}
delete expectedResponse._add
expect(response.status).toBe(200)
expect(response.body).toEqual(expectedResponse)
})
})
describe("import", () => {

View file

@ -0,0 +1,363 @@
import fetch from "node-fetch"
import {
generateMakeRequest,
MakeRequestResponse,
} from "../api/routes/public/tests/utils"
import { v4 as uuidv4 } from "uuid"
import * as setup from "../api/routes/tests/utilities"
import {
Datasource,
FieldType,
Table,
TableRequest,
TableSourceType,
} from "@budibase/types"
import _ from "lodash"
import { databaseTestProviders } from "../integrations/tests/utils"
import mysql from "mysql2/promise"
import { builderSocket } from "../websockets"
// @ts-ignore
fetch.mockSearch()
const config = setup.getConfig()!
jest.unmock("mysql2/promise")
jest.mock("../websockets", () => ({
clientAppSocket: jest.fn(),
gridAppSocket: jest.fn(),
initialise: jest.fn(),
builderSocket: {
emitTableUpdate: jest.fn(),
emitTableDeletion: jest.fn(),
emitDatasourceUpdate: jest.fn(),
emitDatasourceDeletion: jest.fn(),
emitScreenUpdate: jest.fn(),
emitAppMetadataUpdate: jest.fn(),
emitAppPublish: jest.fn(),
},
}))
describe("mysql integrations", () => {
let makeRequest: MakeRequestResponse,
mysqlDatasource: Datasource,
primaryMySqlTable: Table
beforeAll(async () => {
await config.init()
const apiKey = await config.generateApiKey()
makeRequest = generateMakeRequest(apiKey, true)
mysqlDatasource = await config.api.datasource.create(
await databaseTestProviders.mysql.datasource()
)
})
afterAll(async () => {
await databaseTestProviders.mysql.stop()
})
beforeEach(async () => {
primaryMySqlTable = await config.createTable({
name: uuidv4(),
type: "table",
primary: ["id"],
schema: {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
},
name: {
name: "name",
type: FieldType.STRING,
},
description: {
name: "description",
type: FieldType.STRING,
},
value: {
name: "value",
type: FieldType.NUMBER,
},
},
sourceId: mysqlDatasource._id,
sourceType: TableSourceType.EXTERNAL,
})
})
afterAll(config.end)
it("validate table schema", async () => {
const res = await makeRequest(
"get",
`/api/datasources/${mysqlDatasource._id}`
)
expect(res.status).toBe(200)
expect(res.body).toEqual({
config: {
database: "mysql",
host: mysqlDatasource.config!.host,
password: "--secret-value--",
port: mysqlDatasource.config!.port,
user: "root",
},
plus: true,
source: "MYSQL",
type: "datasource_plus",
_id: expect.any(String),
_rev: expect.any(String),
createdAt: expect.any(String),
updatedAt: expect.any(String),
entities: expect.any(Object),
})
})
describe("POST /api/datasources/verify", () => {
it("should be able to verify the connection", async () => {
await config.api.datasource.verify(
{
datasource: await databaseTestProviders.mysql.datasource(),
},
{
body: {
connected: true,
},
}
)
})
it("should state an invalid datasource cannot connect", async () => {
const dbConfig = await databaseTestProviders.mysql.datasource()
await config.api.datasource.verify(
{
datasource: {
...dbConfig,
config: {
...dbConfig.config,
password: "wrongpassword",
},
},
},
{
body: {
connected: false,
error:
"Access denied for the specified user. User does not have the necessary privileges or the provided credentials are incorrect. Please verify the credentials, and ensure that the user has appropriate permissions.",
},
}
)
})
})
describe("POST /api/datasources/info", () => {
it("should fetch information about mysql datasource", async () => {
const primaryName = primaryMySqlTable.name
const response = await makeRequest("post", "/api/datasources/info", {
datasource: mysqlDatasource,
})
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined()
expect(response.body.tableNames.indexOf(primaryName)).not.toBe(-1)
})
})
describe("Integration compatibility with mysql search_path", () => {
let client: mysql.Connection, pathDatasource: Datasource
const database = "test1"
const database2 = "test-2"
beforeAll(async () => {
const dsConfig = await databaseTestProviders.mysql.datasource()
const dbConfig = dsConfig.config!
client = await mysql.createConnection(dbConfig)
await client.query(`CREATE DATABASE \`${database}\`;`)
await client.query(`CREATE DATABASE \`${database2}\`;`)
const pathConfig: any = {
...dsConfig,
config: {
...dbConfig,
database,
},
}
pathDatasource = await config.api.datasource.create(pathConfig)
})
afterAll(async () => {
await client.query(`DROP DATABASE \`${database}\`;`)
await client.query(`DROP DATABASE \`${database2}\`;`)
await client.end()
})
it("discovers tables from any schema in search path", async () => {
await client.query(
`CREATE TABLE \`${database}\`.table1 (id1 SERIAL PRIMARY KEY);`
)
const response = await makeRequest("post", "/api/datasources/info", {
datasource: pathDatasource,
})
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined()
expect(response.body.tableNames).toEqual(
expect.arrayContaining(["table1"])
)
})
it("does not mix columns from different tables", async () => {
const repeated_table_name = "table_same_name"
await client.query(
`CREATE TABLE \`${database}\`.${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
)
await client.query(
`CREATE TABLE \`${database2}\`.${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
)
const response = await makeRequest(
"post",
`/api/datasources/${pathDatasource._id}/schema`,
{
tablesFilter: [repeated_table_name],
}
)
expect(response.status).toBe(200)
expect(
response.body.datasource.entities[repeated_table_name].schema
).toBeDefined()
const schema =
response.body.datasource.entities[repeated_table_name].schema
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
})
})
describe("POST /api/tables/", () => {
let client: mysql.Connection
const emitDatasourceUpdateMock = jest.fn()
beforeEach(async () => {
client = await mysql.createConnection(
(
await databaseTestProviders.mysql.datasource()
).config!
)
mysqlDatasource = await config.api.datasource.create(
await databaseTestProviders.mysql.datasource()
)
})
afterEach(async () => {
await client.end()
})
it("will emit the datasource entity schema with externalType to the front-end when adding a new column", async () => {
const addColumnToTable: TableRequest = {
type: "table",
sourceType: TableSourceType.EXTERNAL,
name: "table",
sourceId: mysqlDatasource._id!,
primary: ["id"],
schema: {
id: {
type: FieldType.AUTO,
name: "id",
autocolumn: true,
},
new_column: {
type: FieldType.NUMBER,
name: "new_column",
},
},
_add: {
name: "new_column",
},
}
jest
.spyOn(builderSocket!, "emitDatasourceUpdate")
.mockImplementation(emitDatasourceUpdateMock)
await makeRequest("post", "/api/tables/", addColumnToTable)
const expectedTable: TableRequest = {
...addColumnToTable,
schema: {
id: {
type: FieldType.NUMBER,
name: "id",
autocolumn: true,
constraints: {
presence: false,
},
externalType: "int unsigned",
},
new_column: {
type: FieldType.NUMBER,
name: "new_column",
autocolumn: false,
constraints: {
presence: false,
},
externalType: "float(8,2)",
},
},
created: true,
_id: `${mysqlDatasource._id}__table`,
}
delete expectedTable._add
expect(emitDatasourceUpdateMock).toBeCalledTimes(1)
const emittedDatasource: Datasource =
emitDatasourceUpdateMock.mock.calls[0][1]
expect(emittedDatasource.entities!["table"]).toEqual(expectedTable)
})
it("will rename a column", async () => {
await makeRequest("post", "/api/tables/", primaryMySqlTable)
let renameColumnOnTable: TableRequest = {
...primaryMySqlTable,
schema: {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
externalType: "unsigned integer",
},
name: {
name: "name",
type: FieldType.STRING,
externalType: "text",
},
description: {
name: "description",
type: FieldType.STRING,
externalType: "text",
},
age: {
name: "age",
type: FieldType.NUMBER,
externalType: "float(8,2)",
},
},
}
const response = await makeRequest(
"post",
"/api/tables/",
renameColumnOnTable
)
mysqlDatasource = (
await makeRequest(
"post",
`/api/datasources/${mysqlDatasource._id}/schema`
)
).body.datasource
expect(response.status).toEqual(200)
expect(
Object.keys(mysqlDatasource.entities![primaryMySqlTable.name].schema)
).toEqual(["id", "name", "description", "age"])
})
})
})

View file

@ -3,6 +3,7 @@ import {
Operation,
RelationshipType,
RenameColumn,
AddColumn,
Table,
TableRequest,
ViewV2,
@ -32,7 +33,7 @@ import * as viewSdk from "../../views"
export async function save(
datasourceId: string,
update: Table,
opts?: { tableId?: string; renaming?: RenameColumn }
opts?: { tableId?: string; renaming?: RenameColumn; adding?: AddColumn }
) {
let tableToSave: TableRequest = {
...update,
@ -165,8 +166,17 @@ export async function save(
// remove the rename prop
delete tableToSave._rename
// store it into couch now for budibase reference
// if adding a new column, we need to rebuild the schema for that table to get the 'externalType' of the column
if (opts?.adding) {
datasource.entities[tableToSave.name] = (
await datasourceSdk.buildFilteredSchema(datasource, [tableToSave.name])
).tables[tableToSave.name]
} else {
datasource.entities[tableToSave.name] = tableToSave
}
// store it into couch now for budibase reference
await db.put(populateExternalTableSchemas(datasource))
// Since tables are stored inside datasources, we need to notify clients

View file

@ -1,6 +1,6 @@
import { Document } from "../../document"
import { View, ViewV2 } from "../view"
import { RenameColumn } from "../../../sdk"
import { AddColumn, RenameColumn } from "../../../sdk"
import { TableSchema } from "./schema"
export const INTERNAL_TABLE_SOURCE_ID = "bb_internal"
@ -29,5 +29,6 @@ export interface Table extends Document {
export interface TableRequest extends Table {
_rename?: RenameColumn
_add?: AddColumn
created?: boolean
}

View file

@ -60,6 +60,10 @@ export interface RenameColumn {
updated: string
}
export interface AddColumn {
name: string
}
export interface RelationshipsJson {
through?: string
from?: string