1
0
Fork 0
mirror of synced 2024-08-09 07:08:01 +12:00

Merge pull request #12008 from Budibase/feature/import_user_column_type

Import user column type
This commit is contained in:
Adria Navarro 2023-10-10 16:09:50 +02:00 committed by GitHub
commit 7ba8001ae4
17 changed files with 202 additions and 45 deletions

View file

@ -45,6 +45,11 @@ export function generateGlobalUserID(id?: any) {
return `${DocumentType.USER}${SEPARATOR}${id || newid()}`
}
const isGlobalUserIDRegex = new RegExp(`^${DocumentType.USER}${SEPARATOR}.+`)
export function isGlobalUserID(id: string) {
return isGlobalUserIDRegex.test(id)
}
/**
* Generates a new user ID based on the passed in global ID.
* @param {string} globalId The ID of the global user.

View file

@ -49,6 +49,15 @@
label: "Long Form Text",
value: FIELDS.LONGFORM.type,
},
{
label: "User",
value: `${FIELDS.USER.type}${FIELDS.USER.subtype}`,
},
{
label: "Users",
value: `${FIELDS.USERS.type}${FIELDS.USERS.subtype}`,
},
]
$: {
@ -143,7 +152,7 @@
<div class="field">
<span>{name}</span>
<Select
value={schema[name]?.type}
value={`${schema[name]?.type}${schema[name]?.subtype || ""}`}
options={typeOptions}
placeholder={null}
getOptionLabel={option => option.label}

View file

@ -10,36 +10,82 @@
export let displayColumn = null
export let promptUpload = false
const typeOptions = [
{
const typeOptions = {
[FIELDS.STRING.type]: {
label: "Text",
value: FIELDS.STRING.type,
config: {
type: FIELDS.STRING.type,
constraints: FIELDS.STRING.constraints,
},
},
{
[FIELDS.NUMBER.type]: {
label: "Number",
value: FIELDS.NUMBER.type,
config: {
type: FIELDS.NUMBER.type,
constraints: FIELDS.NUMBER.constraints,
},
},
{
[FIELDS.DATETIME.type]: {
label: "Date",
value: FIELDS.DATETIME.type,
config: {
type: FIELDS.DATETIME.type,
constraints: FIELDS.DATETIME.constraints,
},
},
{
[FIELDS.OPTIONS.type]: {
label: "Options",
value: FIELDS.OPTIONS.type,
config: {
type: FIELDS.OPTIONS.type,
constraints: FIELDS.OPTIONS.constraints,
},
},
{
[FIELDS.ARRAY.type]: {
label: "Multi-select",
value: FIELDS.ARRAY.type,
config: {
type: FIELDS.ARRAY.type,
constraints: FIELDS.ARRAY.constraints,
},
},
{
[FIELDS.BARCODEQR.type]: {
label: "Barcode/QR",
value: FIELDS.BARCODEQR.type,
config: {
type: FIELDS.BARCODEQR.type,
constraints: FIELDS.BARCODEQR.constraints,
},
},
{
[FIELDS.LONGFORM.type]: {
label: "Long Form Text",
value: FIELDS.LONGFORM.type,
config: {
type: FIELDS.LONGFORM.type,
constraints: FIELDS.LONGFORM.constraints,
},
},
]
user: {
label: "User",
value: "user",
config: {
type: FIELDS.USER.type,
subtype: FIELDS.USER.subtype,
constraints: FIELDS.USER.constraints,
},
},
users: {
label: "Users",
value: "users",
config: {
type: FIELDS.USERS.type,
subtype: FIELDS.USERS.subtype,
constraints: FIELDS.USERS.constraints,
},
},
}
let fileInput
let error = null
@ -48,10 +94,12 @@
let validation = {}
let validateHash = ""
let errors = {}
let selectedColumnTypes = {}
$: displayColumnOptions = Object.keys(schema || {}).filter(column => {
return validation[column]
})
$: {
// binding in consumer is causing double renders here
const newValidateHash = JSON.stringify(rows) + JSON.stringify(schema)
@ -72,6 +120,13 @@
rows = response.rows
schema = response.schema
fileName = response.fileName
selectedColumnTypes = Object.entries(response.schema).reduce(
(acc, [colName, fieldConfig]) => ({
...acc,
[colName]: fieldConfig.type,
}),
{}
)
} catch (e) {
loading = false
error = e
@ -98,8 +153,10 @@
}
const handleChange = (name, e) => {
schema[name].type = e.detail
schema[name].constraints = FIELDS[e.detail.toUpperCase()].constraints
const { config } = typeOptions[e.detail]
schema[name].type = config.type
schema[name].subtype = config.subtype
schema[name].constraints = config.constraints
}
const openFileUpload = (promptUpload, fileInput) => {
@ -142,9 +199,9 @@
<div class="field">
<span>{column.name}</span>
<Select
bind:value={column.type}
bind:value={selectedColumnTypes[column.name]}
on:change={e => handleChange(name, e)}
options={typeOptions}
options={Object.values(typeOptions)}
placeholder={null}
getOptionLabel={option => option.label}
getOptionValue={option => option.value}

View file

@ -156,7 +156,10 @@ export async function destroy(ctx: UserCtx) {
}
const table = await sdk.tables.getTable(row.tableId)
// update the row to include full relationships before deleting them
row = await outputProcessing(table, row, { squash: false })
row = await outputProcessing(table, row, {
squash: false,
skipBBReferences: true,
})
// now remove the relationships
await linkRows.updateLinks({
eventType: linkRows.EventType.ROW_DELETE,
@ -190,6 +193,7 @@ export async function bulkDestroy(ctx: UserCtx) {
// they need to be the full rows (including previous relationships) for automations
const processedRows = (await outputProcessing(table, rows, {
squash: false,
skipBBReferences: true,
})) as Row[]
// remove the relationships first

View file

@ -15,7 +15,7 @@ import { handleRequest } from "../row/external"
import { context, events } from "@budibase/backend-core"
import { isRows, isSchema, parse } from "../../../utilities/schema"
import {
AutoReason,
BulkImportRequest,
Datasource,
FieldSchema,
Operation,
@ -374,10 +374,10 @@ export async function destroy(ctx: UserCtx) {
return tableToDelete
}
export async function bulkImport(ctx: UserCtx) {
export async function bulkImport(ctx: UserCtx<BulkImportRequest>) {
const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows }: { rows: unknown } = ctx.request.body
const schema: unknown = table.schema
const { rows } = ctx.request.body
const schema = table.schema
if (!rows || !isRows(rows) || !isSchema(schema)) {
ctx.throw(400, "Provided data import information is invalid.")

View file

@ -8,6 +8,7 @@ import {
import { isExternalTable, isSQL } from "../../../integrations/utils"
import { events } from "@budibase/backend-core"
import {
BulkImportRequest,
FetchTablesResponse,
SaveTableRequest,
SaveTableResponse,
@ -97,7 +98,7 @@ export async function destroy(ctx: UserCtx) {
builderSocket?.emitTableDeletion(ctx, deletedTable)
}
export async function bulkImport(ctx: UserCtx) {
export async function bulkImport(ctx: UserCtx<BulkImportRequest>) {
const tableId = ctx.params.tableId
await pickApi({ tableId }).bulkImport(ctx)
// right now we don't trigger anything for bulk import because it

View file

@ -10,6 +10,7 @@ import {
} from "../../../utilities/rowProcessor"
import { runStaticFormulaChecks } from "./bulkFormula"
import {
BulkImportRequest,
RenameColumn,
SaveTableRequest,
SaveTableResponse,
@ -206,7 +207,7 @@ export async function destroy(ctx: any) {
return tableToDelete
}
export async function bulkImport(ctx: any) {
export async function bulkImport(ctx: UserCtx<BulkImportRequest>) {
const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows, identifierFields } = ctx.request.body
await handleDataImport(ctx.user, table, rows, identifierFields)

View file

@ -20,7 +20,13 @@ import viewTemplate from "../view/viewBuilder"
import { cloneDeep } from "lodash/fp"
import { quotas } from "@budibase/pro"
import { events, context } from "@budibase/backend-core"
import { ContextUser, Datasource, SourceName, Table } from "@budibase/types"
import {
ContextUser,
Datasource,
Row,
SourceName,
Table,
} from "@budibase/types"
export async function clearColumns(table: any, columnNames: any) {
const db = context.getAppDB()
@ -144,12 +150,12 @@ export async function importToRows(
}
export async function handleDataImport(
user: any,
table: any,
rows: any,
user: ContextUser,
table: Table,
rows: Row[],
identifierFields: Array<string> = []
) {
const schema: unknown = table.schema
const schema = table.schema
if (!rows || !isRows(rows) || !isSchema(schema)) {
return table

View file

@ -43,3 +43,7 @@ export enum Format {
export function isFormat(format: any): format is Format {
return Object.values(Format).includes(format as Format)
}
export function parseCsvExport<T>(value: string) {
return JSON.parse(value?.replace(/'/g, '"')) as T
}

View file

@ -1578,9 +1578,6 @@ describe.each([
() => config.createUser(),
(row: Row) => ({
_id: row._id,
email: row.email,
firstName: row.firstName,
lastName: row.lastName,
primaryDisplay: row.email,
}),
],

View file

@ -1,6 +1,11 @@
import { generator } from "@budibase/backend-core/tests"
import { events, context } from "@budibase/backend-core"
import { FieldType, Table, ViewCalculation } from "@budibase/types"
import {
FieldType,
SaveTableRequest,
Table,
ViewCalculation,
} from "@budibase/types"
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities"
const { basicTable } = setup.structures
@ -47,7 +52,7 @@ describe("/tables", () => {
})
it("creates a table via data import", async () => {
const table = basicTable()
const table: SaveTableRequest = basicTable()
table.rows = [{ name: "test-name", description: "test-desc" }]
const res = await createTable(table)

View file

@ -82,9 +82,6 @@ export async function processOutputBBReferences(
return users.map(u => ({
_id: u._id,
primaryDisplay: u.email,
email: u.email,
firstName: u.firstName,
lastName: u.lastName,
}))
default:

View file

@ -201,9 +201,14 @@ export async function inputProcessing(
export async function outputProcessing<T extends Row[] | Row>(
table: Table,
rows: T,
opts: { squash?: boolean; preserveLinks?: boolean } = {
opts: {
squash?: boolean
preserveLinks?: boolean
skipBBReferences?: boolean
} = {
squash: true,
preserveLinks: false,
skipBBReferences: false,
}
): Promise<T> {
let safeRows: Row[]
@ -230,7 +235,10 @@ export async function outputProcessing<T extends Row[] | Row>(
attachment.url = objectStore.getAppFileUrl(attachment.key)
})
}
} else if (column.type == FieldTypes.BB_REFERENCE) {
} else if (
!opts.skipBBReferences &&
column.type == FieldTypes.BB_REFERENCE
) {
for (let row of enriched) {
row[property] = await processOutputBBReferences(
row[property],

View file

@ -180,9 +180,6 @@ describe("bbReferenceProcessor", () => {
{
_id: user._id,
primaryDisplay: user.email,
email: user.email,
firstName: user.firstName,
lastName: user.lastName,
},
])
expect(cacheGetUsersSpy).toBeCalledTimes(1)
@ -207,9 +204,6 @@ describe("bbReferenceProcessor", () => {
[user1, user2].map(u => ({
_id: u._id,
primaryDisplay: u.email,
email: u.email,
firstName: u.firstName,
lastName: u.lastName,
}))
)
)

View file

@ -1,9 +1,13 @@
import { FieldSubtype } from "@budibase/types"
import { FieldTypes } from "../constants"
import { ValidColumnNameRegex } from "@budibase/shared-core"
import { ValidColumnNameRegex, utils } from "@budibase/shared-core"
import { db } from "@budibase/backend-core"
import { parseCsvExport } from "../api/controllers/view/exporters"
interface SchemaColumn {
readonly name: string
readonly type: FieldTypes
readonly subtype: FieldSubtype
readonly autocolumn?: boolean
readonly constraints?: {
presence: boolean
@ -77,8 +81,14 @@ export function validate(rows: Rows, schema: Schema): ValidationResults {
rows.forEach(row => {
Object.entries(row).forEach(([columnName, columnData]) => {
const columnType = schema[columnName]?.type
const columnSubtype = schema[columnName]?.subtype
const isAutoColumn = schema[columnName]?.autocolumn
// If the column had an invalid value we don't want to override it
if (results.schemaValidation[columnName] === false) {
return
}
// If the columnType is not a string, then it's not present in the schema, and should be added to the invalid columns array
if (typeof columnType !== "string") {
results.invalidColumns.push(columnName)
@ -112,6 +122,11 @@ export function validate(rows: Rows, schema: Schema): ValidationResults {
isNaN(new Date(columnData).getTime())
) {
results.schemaValidation[columnName] = false
} else if (
columnType === FieldTypes.BB_REFERENCE &&
!isValidBBReference(columnData, columnSubtype)
) {
results.schemaValidation[columnName] = false
} else {
results.schemaValidation[columnName] = true
}
@ -138,6 +153,7 @@ export function parse(rows: Rows, schema: Schema): Rows {
}
const columnType = schema[columnName].type
const columnSubtype = schema[columnName].subtype
if (columnType === FieldTypes.NUMBER) {
// If provided must be a valid number
@ -147,6 +163,23 @@ export function parse(rows: Rows, schema: Schema): Rows {
parsedRow[columnName] = columnData
? new Date(columnData).toISOString()
: columnData
} else if (columnType === FieldTypes.BB_REFERENCE) {
const parsedValues =
!!columnData && parseCsvExport<{ _id: string }[]>(columnData)
if (!parsedValues) {
parsedRow[columnName] = undefined
} else {
switch (columnSubtype) {
case FieldSubtype.USER:
parsedRow[columnName] = parsedValues[0]?._id
break
case FieldSubtype.USERS:
parsedRow[columnName] = parsedValues.map(u => u._id)
break
default:
utils.unreachable(columnSubtype)
}
}
} else {
parsedRow[columnName] = columnData
}
@ -155,3 +188,32 @@ export function parse(rows: Rows, schema: Schema): Rows {
return parsedRow
})
}
function isValidBBReference(
columnData: any,
columnSubtype: FieldSubtype
): boolean {
switch (columnSubtype) {
case FieldSubtype.USER:
case FieldSubtype.USERS:
if (typeof columnData !== "string") {
return false
}
const userArray = parseCsvExport<{ _id: string }[]>(columnData)
if (!Array.isArray(userArray)) {
return false
}
if (columnSubtype === FieldSubtype.USER && userArray.length > 1) {
return false
}
const constainsWrongId = userArray.find(
user => !db.isGlobalUserID(user._id)
)
return !constainsWrongId
default:
throw utils.unreachable(columnSubtype)
}
}

View file

@ -1,4 +1,5 @@
import {
Row,
Table,
TableRequest,
TableSchema,
@ -18,6 +19,13 @@ export interface TableResponse extends Table {
export type FetchTablesResponse = TableResponse[]
export interface SaveTableRequest extends TableRequest {}
export interface SaveTableRequest extends TableRequest {
rows?: Row[]
}
export type SaveTableResponse = Table
export interface BulkImportRequest {
rows: Row[]
identifierFields?: Array<string>
}

View file

@ -15,7 +15,6 @@ export interface Table extends Document {
constrained?: string[]
sql?: boolean
indexes?: { [key: string]: any }
rows?: { [key: string]: any }
created?: boolean
rowHeight?: number
}