1
0
Fork 0
mirror of synced 2024-09-20 11:27:56 +12:00

Merge master.

This commit is contained in:
Sam Rose 2024-08-02 15:29:48 +01:00
commit 7ab442eae7
No known key found for this signature in database
18 changed files with 963 additions and 240 deletions

View file

@ -39,10 +39,12 @@ import { dataFilters, helpers } from "@budibase/shared-core"
type QueryFunction = (query: SqlQuery | SqlQuery[], operation: Operation) => any
const envLimit = environment.SQL_MAX_ROWS
? parseInt(environment.SQL_MAX_ROWS)
: null
const BASE_LIMIT = envLimit || 5000
function getBaseLimit() {
const envLimit = environment.SQL_MAX_ROWS
? parseInt(environment.SQL_MAX_ROWS)
: null
return envLimit || 5000
}
function getTableName(table?: Table): string | undefined {
// SQS uses the table ID rather than the table name
@ -970,7 +972,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
private readonly limit: number
// pass through client to get flavour of SQL
constructor(client: SqlClient, limit: number = BASE_LIMIT) {
constructor(client: SqlClient, limit: number = getBaseLimit()) {
super(client)
this.limit = limit
}
@ -1014,7 +1016,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
query = builder.read({
limits: {
query: this.limit,
base: BASE_LIMIT,
base: getBaseLimit(),
},
})
break

View file

@ -66,9 +66,14 @@ export interface RunConfig {
includeSqlRelationships?: IncludeRelationship
}
export type ExternalReadRequestReturnType = {
rows: Row[]
rawResponseSize: number
}
export type ExternalRequestReturnType<T extends Operation> =
T extends Operation.READ
? Row[]
? ExternalReadRequestReturnType
: T extends Operation.COUNT
? number
: { row: Row; table: Table }
@ -741,9 +746,11 @@ export class ExternalRequest<T extends Operation> {
)
// if reading it'll just be an array of rows, return whole thing
if (operation === Operation.READ) {
return (
Array.isArray(output) ? output : [output]
) as ExternalRequestReturnType<T>
const rows = Array.isArray(output) ? output : [output]
return {
rows,
rawResponseSize: responseRows.length,
} as ExternalRequestReturnType<T>
} else {
return { row: output[0], table } as ExternalRequestReturnType<T>
}

View file

@ -136,7 +136,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
includeSqlRelationships: IncludeRelationship.INCLUDE,
})
const table: Table = tables[tableName]
const row = response[0]
const row = response.rows[0]
// this seems like a lot of work, but basically we need to dig deeper for the enrich
// for a single row, there is probably a better way to do this with some smart multi-layer joins
for (let [fieldName, field] of Object.entries(table.schema)) {
@ -163,10 +163,14 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
},
includeSqlRelationships: IncludeRelationship.INCLUDE,
})
row[fieldName] = await outputProcessing(linkedTable, relatedRows, {
squash: true,
preserveLinks: true,
})
row[fieldName] = await outputProcessing<Row[]>(
linkedTable,
relatedRows.rows,
{
squash: true,
preserveLinks: true,
}
)
}
return row
}

View file

@ -51,7 +51,3 @@ export function jsonWithSchema(schema: TableSchema, rows: Row[]) {
export function isFormat(format: any): format is RowExportFormat {
return Object.values(RowExportFormat).includes(format as RowExportFormat)
}
export function parseCsvExport<T>(value: string) {
return JSON.parse(value) as T
}

View file

@ -33,6 +33,7 @@ import {
UpdatedRowEventEmitter,
TableSchema,
JsonFieldSubType,
RowExportFormat,
} from "@budibase/types"
import { generator, mocks } from "@budibase/backend-core/tests"
import _, { merge } from "lodash"
@ -1816,6 +1817,7 @@ describe.each([
await config.api.row.exportRows(
"1234567",
{ rows: [existing._id!] },
RowExportFormat.JSON,
{ status: 404 }
)
})
@ -1854,6 +1856,202 @@ describe.each([
const results = JSON.parse(res)
expect(results.length).toEqual(3)
})
describe("should allow exporting all column types", () => {
let tableId: string
let expectedRowData: Row
beforeAll(async () => {
const fullSchema = setup.structures.fullSchemaWithoutLinks({
allRequired: true,
})
const table = await config.api.table.save(
saveTableRequest({
...setup.structures.basicTable(),
schema: fullSchema,
primary: ["string"],
})
)
tableId = table._id!
const rowValues: Record<keyof typeof fullSchema, any> = {
[FieldType.STRING]: generator.guid(),
[FieldType.LONGFORM]: generator.paragraph(),
[FieldType.OPTIONS]: "option 2",
[FieldType.ARRAY]: ["options 2", "options 4"],
[FieldType.NUMBER]: generator.natural(),
[FieldType.BOOLEAN]: generator.bool(),
[FieldType.DATETIME]: generator.date().toISOString(),
[FieldType.ATTACHMENTS]: [setup.structures.basicAttachment()],
[FieldType.ATTACHMENT_SINGLE]: setup.structures.basicAttachment(),
[FieldType.FORMULA]: undefined, // generated field
[FieldType.AUTO]: undefined, // generated field
[FieldType.JSON]: { name: generator.guid() },
[FieldType.INTERNAL]: generator.guid(),
[FieldType.BARCODEQR]: generator.guid(),
[FieldType.SIGNATURE_SINGLE]: setup.structures.basicAttachment(),
[FieldType.BIGINT]: generator.integer().toString(),
[FieldType.BB_REFERENCE]: [{ _id: config.getUser()._id }],
[FieldType.BB_REFERENCE_SINGLE]: { _id: config.getUser()._id },
}
const row = await config.api.row.save(table._id!, rowValues)
expectedRowData = {
_id: row._id,
[FieldType.STRING]: rowValues[FieldType.STRING],
[FieldType.LONGFORM]: rowValues[FieldType.LONGFORM],
[FieldType.OPTIONS]: rowValues[FieldType.OPTIONS],
[FieldType.ARRAY]: rowValues[FieldType.ARRAY],
[FieldType.NUMBER]: rowValues[FieldType.NUMBER],
[FieldType.BOOLEAN]: rowValues[FieldType.BOOLEAN],
[FieldType.DATETIME]: rowValues[FieldType.DATETIME],
[FieldType.ATTACHMENTS]: rowValues[FieldType.ATTACHMENTS].map(
(a: any) =>
expect.objectContaining({
...a,
url: expect.any(String),
})
),
[FieldType.ATTACHMENT_SINGLE]: expect.objectContaining({
...rowValues[FieldType.ATTACHMENT_SINGLE],
url: expect.any(String),
}),
[FieldType.FORMULA]: fullSchema[FieldType.FORMULA].formula,
[FieldType.AUTO]: expect.any(Number),
[FieldType.JSON]: rowValues[FieldType.JSON],
[FieldType.INTERNAL]: rowValues[FieldType.INTERNAL],
[FieldType.BARCODEQR]: rowValues[FieldType.BARCODEQR],
[FieldType.SIGNATURE_SINGLE]: expect.objectContaining({
...rowValues[FieldType.SIGNATURE_SINGLE],
url: expect.any(String),
}),
[FieldType.BIGINT]: rowValues[FieldType.BIGINT],
[FieldType.BB_REFERENCE]: rowValues[FieldType.BB_REFERENCE].map(
expect.objectContaining
),
[FieldType.BB_REFERENCE_SINGLE]: expect.objectContaining(
rowValues[FieldType.BB_REFERENCE_SINGLE]
),
}
})
it("as csv", async () => {
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.CSV
)
const jsonResult = await config.api.table.csvToJson({
csvString: exportedValue,
})
const stringified = (value: string) =>
JSON.stringify(value).replace(/"/g, "'")
const matchingObject = (key: string, value: any, isArray: boolean) => {
const objectMatcher = `{'${key}':'${value[key]}'.*?}`
if (isArray) {
return expect.stringMatching(new RegExp(`^\\[${objectMatcher}\\]$`))
}
return expect.stringMatching(new RegExp(`^${objectMatcher}$`))
}
expect(jsonResult).toEqual([
{
...expectedRowData,
auto: expect.any(String),
array: stringified(expectedRowData["array"]),
attachment: matchingObject(
"key",
expectedRowData["attachment"][0].sample,
true
),
attachment_single: matchingObject(
"key",
expectedRowData["attachment_single"].sample,
false
),
boolean: stringified(expectedRowData["boolean"]),
json: stringified(expectedRowData["json"]),
number: stringified(expectedRowData["number"]),
signature_single: matchingObject(
"key",
expectedRowData["signature_single"].sample,
false
),
bb_reference: matchingObject(
"_id",
expectedRowData["bb_reference"][0].sample,
true
),
bb_reference_single: matchingObject(
"_id",
expectedRowData["bb_reference_single"].sample,
false
),
},
])
})
it("as json", async () => {
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.JSON
)
const json = JSON.parse(exportedValue)
expect(json).toEqual([expectedRowData])
})
it("as json with schema", async () => {
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.JSON_WITH_SCHEMA
)
const json = JSON.parse(exportedValue)
expect(json).toEqual({
schema: expect.any(Object),
rows: [expectedRowData],
})
})
it("exported data can be re-imported", async () => {
// export all
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.CSV
)
// import all twice
const rows = await config.api.table.csvToJson({
csvString: exportedValue,
})
await config.api.row.bulkImport(tableId, {
rows,
})
await config.api.row.bulkImport(tableId, {
rows,
})
const { rows: allRows } = await config.api.row.search(tableId)
const expectedRow = {
...expectedRowData,
_id: expect.any(String),
_rev: expect.any(String),
type: "row",
tableId: tableId,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
}
expect(allRows).toEqual([expectedRow, expectedRow, expectedRow])
})
})
})
let o2mTable: Table

View file

@ -54,6 +54,7 @@ describe.each([
const isLucene = name === "lucene"
const isInMemory = name === "in-memory"
const isInternal = isSqs || isLucene || isInMemory
const isSql = !isInMemory && !isLucene
const config = setup.getConfig()
let envCleanup: (() => void) | undefined
@ -193,7 +194,8 @@ describe.each([
// different to the one passed in will cause the assertion to fail. Extra
// rows returned by the query will also cause the assertion to fail.
async toMatchExactly(expectedRows: any[]) {
const { rows: foundRows } = await this.performSearch()
const response = await this.performSearch()
const foundRows = response.rows
// eslint-disable-next-line jest/no-standalone-expect
expect(foundRows).toHaveLength(expectedRows.length)
@ -203,13 +205,15 @@ describe.each([
expect.objectContaining(this.popRow(expectedRow, foundRows))
)
)
return response
}
// Asserts that the query returns rows matching exactly the set of rows
// passed in. The order of the rows is not important, but extra rows will
// cause the assertion to fail.
async toContainExactly(expectedRows: any[]) {
const { rows: foundRows } = await this.performSearch()
const response = await this.performSearch()
const foundRows = response.rows
// eslint-disable-next-line jest/no-standalone-expect
expect(foundRows).toHaveLength(expectedRows.length)
@ -221,6 +225,7 @@ describe.each([
)
)
)
return response
}
// Asserts that the query returns some property values - this cannot be used
@ -237,6 +242,7 @@ describe.each([
expect(response[key]).toEqual(properties[key])
}
}
return response
}
// Asserts that the query doesn't return a property, e.g. pagination parameters.
@ -246,13 +252,15 @@ describe.each([
// eslint-disable-next-line jest/no-standalone-expect
expect(response[property]).toBeUndefined()
}
return response
}
// Asserts that the query returns rows matching the set of rows passed in.
// The order of the rows is not important. Extra rows will not cause the
// assertion to fail.
async toContain(expectedRows: any[]) {
const { rows: foundRows } = await this.performSearch()
const response = await this.performSearch()
const foundRows = response.rows
// eslint-disable-next-line jest/no-standalone-expect
expect([...foundRows]).toEqual(
@ -262,6 +270,7 @@ describe.each([
)
)
)
return response
}
async toFindNothing() {
@ -2612,4 +2621,79 @@ describe.each([
}).toContainExactly([row])
})
})
isSql &&
describe("pagination edge case with relationships", () => {
let mainRows: Row[] = []
beforeAll(async () => {
const toRelateTable = await createTable({
name: {
name: "name",
type: FieldType.STRING,
},
})
table = await createTable({
name: {
name: "name",
type: FieldType.STRING,
},
rel: {
name: "rel",
type: FieldType.LINK,
relationshipType: RelationshipType.MANY_TO_ONE,
tableId: toRelateTable._id!,
fieldName: "rel",
},
})
const relatedRows = await Promise.all([
config.api.row.save(toRelateTable._id!, { name: "tag 1" }),
config.api.row.save(toRelateTable._id!, { name: "tag 2" }),
config.api.row.save(toRelateTable._id!, { name: "tag 3" }),
config.api.row.save(toRelateTable._id!, { name: "tag 4" }),
config.api.row.save(toRelateTable._id!, { name: "tag 5" }),
config.api.row.save(toRelateTable._id!, { name: "tag 6" }),
])
mainRows = await Promise.all([
config.api.row.save(table._id!, {
name: "product 1",
rel: relatedRows.map(row => row._id),
}),
config.api.row.save(table._id!, {
name: "product 2",
rel: [],
}),
config.api.row.save(table._id!, {
name: "product 3",
rel: [],
}),
])
})
it("can still page when the hard limit is hit", async () => {
await config.withCoreEnv(
{
SQL_MAX_ROWS: "6",
},
async () => {
const params: Omit<RowSearchParams, "tableId"> = {
query: {},
paginate: true,
limit: 3,
sort: "name",
sortType: SortType.STRING,
sortOrder: SortOrder.ASCENDING,
}
const page1 = await expectSearch(params).toContain([mainRows[0]])
expect(page1.hasNextPage).toBe(true)
expect(page1.bookmark).toBeDefined()
const page2 = await expectSearch({
...params,
bookmark: page1.bookmark,
}).toContain([mainRows[1], mainRows[2]])
expect(page2.hasNextPage).toBe(false)
}
)
})
})
})

View file

@ -18,8 +18,10 @@ import {
TableSchema,
TableSourceType,
User,
ValidateTableImportResponse,
ViewCalculation,
ViewV2Enriched,
RowExportFormat,
} from "@budibase/types"
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities"
@ -1093,7 +1095,10 @@ describe.each([
})
})
describe("import validation", () => {
describe.each([
[RowExportFormat.CSV, (val: any) => JSON.stringify(val).replace(/"/g, "'")],
[RowExportFormat.JSON, (val: any) => val],
])("import validation (%s)", (_, userParser) => {
const basicSchema: TableSchema = {
id: {
type: FieldType.NUMBER,
@ -1105,9 +1110,41 @@ describe.each([
},
}
describe("validateNewTableImport", () => {
it("can validate basic imports", async () => {
const result = await config.api.table.validateNewTableImport(
const importCases: [
string,
(rows: Row[], schema: TableSchema) => Promise<ValidateTableImportResponse>
][] = [
[
"validateNewTableImport",
async (rows: Row[], schema: TableSchema) => {
const result = await config.api.table.validateNewTableImport({
rows,
schema,
})
return result
},
],
[
"validateExistingTableImport",
async (rows: Row[], schema: TableSchema) => {
const table = await config.api.table.save(
tableForDatasource(datasource, {
primary: ["id"],
schema,
})
)
const result = await config.api.table.validateExistingTableImport({
tableId: table._id,
rows,
})
return result
},
],
]
describe.each(importCases)("%s", (_, testDelegate) => {
it("validates basic imports", async () => {
const result = await testDelegate(
[{ id: generator.natural(), name: generator.first() }],
basicSchema
)
@ -1126,18 +1163,18 @@ describe.each([
it.each(
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
)("don't allow protected names in schema (%s)", async columnName => {
const result = await config.api.table.validateNewTableImport(
[
const result = await config.api.table.validateNewTableImport({
rows: [
{
id: generator.natural(),
name: generator.first(),
[columnName]: generator.word(),
},
],
{
schema: {
...basicSchema,
}
)
},
})
expect(result).toEqual({
allValid: false,
@ -1153,25 +1190,53 @@ describe.each([
})
})
it("does not allow imports without rows", async () => {
const result = await testDelegate([], basicSchema)
expect(result).toEqual({
allValid: false,
errors: {},
invalidColumns: [],
schemaValidation: {},
})
})
it("validates imports with some empty rows", async () => {
const result = await testDelegate(
[{}, { id: generator.natural(), name: generator.first() }, {}],
basicSchema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
},
})
})
isInternal &&
it.each(
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
)("don't allow protected names in the rows (%s)", async columnName => {
const result = await config.api.table.validateNewTableImport(
[
const result = await config.api.table.validateNewTableImport({
rows: [
{
id: generator.natural(),
name: generator.first(),
},
],
{
schema: {
...basicSchema,
[columnName]: {
name: columnName,
type: FieldType.STRING,
},
}
)
},
})
expect(result).toEqual({
allValid: false,
@ -1186,20 +1251,24 @@ describe.each([
},
})
})
})
describe("validateExistingTableImport", () => {
it("can validate basic imports", async () => {
const table = await config.api.table.save(
tableForDatasource(datasource, {
primary: ["id"],
schema: basicSchema,
})
it("validates required fields and valid rows", async () => {
const schema: TableSchema = {
...basicSchema,
name: {
type: FieldType.STRING,
name: "name",
constraints: { presence: true },
},
}
const result = await testDelegate(
[
{ id: generator.natural(), name: generator.first() },
{ id: generator.natural(), name: generator.first() },
],
schema
)
const result = await config.api.table.validateExistingTableImport({
tableId: table._id,
rows: [{ id: generator.natural(), name: generator.first() }],
})
expect(result).toEqual({
allValid: true,
@ -1212,6 +1281,154 @@ describe.each([
})
})
it("validates required fields and non-valid rows", async () => {
const schema: TableSchema = {
...basicSchema,
name: {
type: FieldType.STRING,
name: "name",
constraints: { presence: true },
},
}
const result = await testDelegate(
[
{ id: generator.natural(), name: generator.first() },
{ id: generator.natural(), name: "" },
],
schema
)
expect(result).toEqual({
allValid: false,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: false,
},
})
})
describe("bb references", () => {
const getUserValues = () => ({
_id: docIds.generateGlobalUserID(),
primaryDisplay: generator.first(),
email: generator.email({}),
})
it("can validate user column imports", async () => {
const schema: TableSchema = {
...basicSchema,
user: {
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
name: "user",
},
}
const result = await testDelegate(
[
{
id: generator.natural(),
name: generator.first(),
user: userParser(getUserValues()),
},
],
schema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
user: true,
},
})
})
it("can validate user column imports with invalid data", async () => {
const schema: TableSchema = {
...basicSchema,
user: {
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
name: "user",
},
}
const result = await testDelegate(
[
{
id: generator.natural(),
name: generator.first(),
user: userParser(getUserValues()),
},
{
id: generator.natural(),
name: generator.first(),
user: "no valid user data",
},
],
schema
)
expect(result).toEqual({
allValid: false,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
user: false,
},
})
})
it("can validate users column imports", async () => {
const schema: TableSchema = {
...basicSchema,
user: {
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
name: "user",
externalType: "array",
},
}
const result = await testDelegate(
[
{
id: generator.natural(),
name: generator.first(),
user: userParser([
getUserValues(),
getUserValues(),
getUserValues(),
]),
},
],
schema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
user: true,
},
})
})
})
})
describe("validateExistingTableImport", () => {
isInternal &&
it("can reimport _id fields for internal tables", async () => {
const table = await config.api.table.save(

View file

@ -21,7 +21,8 @@ export async function getRow(
? IncludeRelationship.INCLUDE
: IncludeRelationship.EXCLUDE,
})
return response ? response[0] : response
const rows = response?.rows || []
return rows[0]
}
export async function save(

View file

@ -8,7 +8,6 @@ import {
import { isExternalTableID } from "../../../integrations/utils"
import * as internal from "./search/internal"
import * as external from "./search/external"
import * as sqs from "./search/sqs"
import { ExportRowsParams, ExportRowsResult } from "./search/types"
import { dataFilters } from "@budibase/shared-core"
import sdk from "../../index"
@ -55,9 +54,9 @@ export async function search(
if (isExternalTable) {
return external.search(options, table)
} else if (dbCore.isSqsEnabledForTenant()) {
return sqs.search(options, table)
return internal.sqs.search(options, table)
} else {
return internal.search(options, table)
return internal.lucene.search(options, table)
}
}

View file

@ -47,7 +47,7 @@ function getPaginationAndLimitParameters(
limit: limit + 1,
}
if (bookmark) {
paginateObj.offset = limit * bookmark
paginateObj.offset = bookmark
}
} else if (limit) {
paginateObj = {
@ -105,37 +105,37 @@ export async function search(
paginate: paginateObj as PaginationJson,
includeSqlRelationships: IncludeRelationship.INCLUDE,
}
const queries: Promise<Row[] | number>[] = []
queries.push(handleRequest(Operation.READ, tableId, parameters))
if (countRows) {
queries.push(handleRequest(Operation.COUNT, tableId, parameters))
}
const responses = await Promise.all(queries)
let rows = responses[0] as Row[]
const totalRows =
responses.length > 1 ? (responses[1] as number) : undefined
const [{ rows, rawResponseSize }, totalRows] = await Promise.all([
handleRequest(Operation.READ, tableId, parameters),
countRows
? handleRequest(Operation.COUNT, tableId, parameters)
: Promise.resolve(undefined),
])
let hasNextPage = false
// remove the extra row if it's there
if (paginate && limit && rows.length > limit) {
rows.pop()
hasNextPage = true
}
if (options.fields) {
const fields = [...options.fields, ...PROTECTED_EXTERNAL_COLUMNS]
rows = rows.map((r: any) => pick(r, fields))
}
rows = await outputProcessing<Row[]>(table, rows, {
let processed = await outputProcessing<Row[]>(table, rows, {
preserveLinks: true,
squash: true,
})
let hasNextPage = false
// if the raw rows is greater than the limit then we likely need to paginate
if (paginate && limit && rawResponseSize > limit) {
hasNextPage = true
// processed rows has merged relationships down, this might not be more than limit
if (processed.length > limit) {
processed.pop()
}
}
if (options.fields) {
const fields = [...options.fields, ...PROTECTED_EXTERNAL_COLUMNS]
processed = processed.map((r: any) => pick(r, fields))
}
// need wrapper object for bookmarks etc when paginating
const response: SearchResponse<Row> = { rows, hasNextPage }
const response: SearchResponse<Row> = { rows: processed, hasNextPage }
if (hasNextPage && bookmark != null) {
response.bookmark = bookmark + 1
response.bookmark = bookmark + processed.length
}
if (totalRows != null) {
response.totalRows = totalRows
@ -256,24 +256,21 @@ export async function exportRows(
}
export async function fetch(tableId: string): Promise<Row[]> {
const response = await handleRequest<Operation.READ>(
Operation.READ,
tableId,
{
includeSqlRelationships: IncludeRelationship.INCLUDE,
}
)
const response = await handleRequest(Operation.READ, tableId, {
includeSqlRelationships: IncludeRelationship.INCLUDE,
})
const table = await sdk.tables.getTable(tableId)
return await outputProcessing<Row[]>(table, response, {
return await outputProcessing<Row[]>(table, response.rows, {
preserveLinks: true,
squash: true,
})
}
export async function fetchRaw(tableId: string): Promise<Row[]> {
return await handleRequest<Operation.READ>(Operation.READ, tableId, {
const response = await handleRequest(Operation.READ, tableId, {
includeSqlRelationships: IncludeRelationship.INCLUDE,
})
return response.rows
}
export async function fetchView(viewName: string) {

View file

@ -0,0 +1,3 @@
export * as sqs from "./sqs"
export * as lucene from "./lucene"
export * from "./internal"

View file

@ -1,90 +1,30 @@
import { context, HTTPError } from "@budibase/backend-core"
import { PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
import env from "../../../../environment"
import { fullSearch, paginatedSearch } from "./utils"
import { getRowParams, InternalTables } from "../../../../db/utils"
import env from "../../../../../environment"
import { getRowParams, InternalTables } from "../../../../../db/utils"
import {
Database,
DocumentType,
Row,
RowSearchParams,
SearchResponse,
SortType,
Table,
TableSchema,
User,
} from "@budibase/types"
import { getGlobalUsersFromMetadata } from "../../../../utilities/global"
import { outputProcessing } from "../../../../utilities/rowProcessor"
import { outputProcessing } from "../../../../../utilities/rowProcessor"
import {
csv,
Format,
json,
jsonWithSchema,
} from "../../../../api/controllers/view/exporters"
import * as inMemoryViews from "../../../../db/inMemoryView"
} from "../../../../../api/controllers/view/exporters"
import * as inMemoryViews from "../../../../../db/inMemoryView"
import {
getFromDesignDoc,
getFromMemoryDoc,
migrateToDesignView,
migrateToInMemoryView,
} from "../../../../api/controllers/view/utils"
import sdk from "../../../../sdk"
import { ExportRowsParams, ExportRowsResult } from "./types"
import pick from "lodash/pick"
import { breakRowIdField } from "../../../../integrations/utils"
export async function search(
options: RowSearchParams,
table: Table
): Promise<SearchResponse<Row>> {
const { tableId } = options
const { paginate, query } = options
const params: RowSearchParams = {
tableId: options.tableId,
sort: options.sort,
sortOrder: options.sortOrder,
sortType: options.sortType,
limit: options.limit,
bookmark: options.bookmark,
version: options.version,
disableEscaping: options.disableEscaping,
query: {},
}
if (params.sort && !params.sortType) {
const schema = table.schema
const sortField = schema[params.sort]
params.sortType =
sortField.type === "number" ? SortType.NUMBER : SortType.STRING
}
let response
if (paginate) {
response = await paginatedSearch(query, params)
} else {
response = await fullSearch(query, params)
}
// Enrich search results with relationships
if (response.rows && response.rows.length) {
// enrich with global users if from users table
if (tableId === InternalTables.USER_METADATA) {
response.rows = await getGlobalUsersFromMetadata(response.rows as User[])
}
if (options.fields) {
const fields = [...options.fields, ...PROTECTED_INTERNAL_COLUMNS]
response.rows = response.rows.map((r: any) => pick(r, fields))
}
response.rows = await outputProcessing(table, response.rows)
}
return response
}
} from "../../../../../api/controllers/view/utils"
import sdk from "../../../../../sdk"
import { ExportRowsParams, ExportRowsResult } from "../types"
import { breakRowIdField } from "../../../../../integrations/utils"
export async function exportRows(
options: ExportRowsParams
@ -123,15 +63,12 @@ export async function exportRows(
result = await outputProcessing<Row[]>(table, response)
} else if (query) {
let searchResponse = await search(
{
tableId,
query,
sort,
sortOrder,
},
table
)
let searchResponse = await sdk.rows.search({
tableId,
query,
sort,
sortOrder,
})
result = searchResponse.rows
}

View file

@ -0,0 +1,66 @@
import { PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
import { fullSearch, paginatedSearch } from "../utils"
import { InternalTables } from "../../../../../db/utils"
import {
Row,
RowSearchParams,
SearchResponse,
SortType,
Table,
User,
} from "@budibase/types"
import { getGlobalUsersFromMetadata } from "../../../../../utilities/global"
import { outputProcessing } from "../../../../../utilities/rowProcessor"
import pick from "lodash/pick"
export async function search(
options: RowSearchParams,
table: Table
): Promise<SearchResponse<Row>> {
const { tableId } = options
const { paginate, query } = options
const params: RowSearchParams = {
tableId: options.tableId,
sort: options.sort,
sortOrder: options.sortOrder,
sortType: options.sortType,
limit: options.limit,
bookmark: options.bookmark,
version: options.version,
disableEscaping: options.disableEscaping,
query: {},
}
if (params.sort && !params.sortType) {
const schema = table.schema
const sortField = schema[params.sort]
params.sortType =
sortField.type === "number" ? SortType.NUMBER : SortType.STRING
}
let response
if (paginate) {
response = await paginatedSearch(query, params)
} else {
response = await fullSearch(query, params)
}
// Enrich search results with relationships
if (response.rows && response.rows.length) {
// enrich with global users if from users table
if (tableId === InternalTables.USER_METADATA) {
response.rows = await getGlobalUsersFromMetadata(response.rows as User[])
}
if (options.fields) {
const fields = [...options.fields, ...PROTECTED_INTERNAL_COLUMNS]
response.rows = response.rows.map((r: any) => pick(r, fields))
}
response.rows = await outputProcessing(table, response.rows)
}
return response
}

View file

@ -18,34 +18,38 @@ import {
import {
buildInternalRelationships,
sqlOutputProcessing,
} from "../../../../api/controllers/row/utils"
import { mapToUserColumn, USER_COLUMN_PREFIX } from "../../tables/internal/sqs"
import sdk from "../../../index"
} from "../../../../../api/controllers/row/utils"
import sdk from "../../../../index"
import {
mapToUserColumn,
USER_COLUMN_PREFIX,
} from "../../../tables/internal/sqs"
import {
context,
sql,
SQLITE_DESIGN_DOC_ID,
SQS_DATASOURCE_INTERNAL,
} from "@budibase/backend-core"
import { generateJunctionTableID } from "../../../../db/utils"
import AliasTables from "../sqlAlias"
import { outputProcessing } from "../../../../utilities/rowProcessor"
import { generateJunctionTableID } from "../../../../../db/utils"
import AliasTables from "../../sqlAlias"
import { outputProcessing } from "../../../../../utilities/rowProcessor"
import pick from "lodash/pick"
import { processRowCountResponse } from "../utils"
import { processRowCountResponse } from "../../utils"
import {
updateFilterKeys,
getRelationshipColumns,
getTableIDList,
} from "./filters"
} from "../filters"
import {
dataFilters,
helpers,
PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core"
import { isSearchingByRowID } from "./utils"
import { isSearchingByRowID } from "../utils"
import tracer from "dd-trace"
const builder = new sql.Sql(SqlClient.SQL_LITE)
const SQLITE_COLUMN_LIMIT = 2000
const MISSING_COLUMN_REGEX = new RegExp(`no such column: .+`)
const MISSING_TABLE_REGX = new RegExp(`no such table: .+`)
const DUPLICATE_COLUMN_REGEX = new RegExp(`duplicate column name: .+`)
@ -56,12 +60,14 @@ function buildInternalFieldList(
opts?: { relationships?: RelationshipsJson[] }
) {
let fieldList: string[] = []
const addJunctionFields = (relatedTable: Table, fields: string[]) => {
const getJunctionFields = (relatedTable: Table, fields: string[]) => {
const junctionFields: string[] = []
fields.forEach(field => {
fieldList.push(
junctionFields.push(
`${generateJunctionTableID(table._id!, relatedTable._id!)}.${field}`
)
})
return junctionFields
}
fieldList = fieldList.concat(
PROTECTED_INTERNAL_COLUMNS.map(col => `${table._id}.${col}`)
@ -71,18 +77,22 @@ function buildInternalFieldList(
if (!opts?.relationships && isRelationship) {
continue
}
if (isRelationship) {
if (!isRelationship) {
fieldList.push(`${table._id}.${mapToUserColumn(col.name)}`)
} else {
const linkCol = col as RelationshipFieldMetadata
const relatedTable = tables.find(table => table._id === linkCol.tableId)
// no relationships provided, don't go more than a layer deep
if (relatedTable) {
fieldList = fieldList.concat(
buildInternalFieldList(relatedTable, tables)
)
addJunctionFields(relatedTable, ["doc1.fieldName", "doc2.fieldName"])
if (!relatedTable) {
continue
}
} else {
fieldList.push(`${table._id}.${mapToUserColumn(col.name)}`)
const relatedFields = buildInternalFieldList(relatedTable, tables).concat(
getJunctionFields(relatedTable, ["doc1.fieldName", "doc2.fieldName"])
)
// break out of the loop if we have reached the max number of columns
if (relatedFields.length + fieldList.length > SQLITE_COLUMN_LIMIT) {
break
}
fieldList = fieldList.concat(relatedFields)
}
}
return [...new Set(fieldList)]
@ -320,25 +330,19 @@ export async function search(
paginate = true
request.paginate = {
limit: params.limit + 1,
offset: bookmark * params.limit,
offset: bookmark,
}
}
try {
const queries: Promise<Row[] | number>[] = []
queries.push(runSqlQuery(request, allTables, relationships))
if (options.countRows) {
// get the total count of rows
queries.push(
runSqlQuery(request, allTables, relationships, {
countTotalRows: true,
})
)
}
const responses = await Promise.all(queries)
let rows = responses[0] as Row[]
const totalRows =
responses.length > 1 ? (responses[1] as number) : undefined
const [rows, totalRows] = await Promise.all([
runSqlQuery(request, allTables, relationships),
options.countRows
? runSqlQuery(request, allTables, relationships, {
countTotalRows: true,
})
: Promise.resolve(undefined),
])
// process from the format of tableId.column to expected format also
// make sure JSON columns corrected
@ -350,10 +354,13 @@ export async function search(
)
// check for pagination final row
let nextRow: Row | undefined
let nextRow: boolean = false
if (paginate && params.limit && rows.length > params.limit) {
// remove the extra row that confirmed if there is another row to move to
nextRow = processed.pop()
nextRow = true
if (processed.length > params.limit) {
processed.pop()
}
}
// get the rows
@ -377,7 +384,7 @@ export async function search(
// check for pagination
if (paginate && nextRow) {
response.hasNextPage = true
response.bookmark = bookmark + 1
response.bookmark = bookmark + processed.length
}
if (paginate && !nextRow) {
response.hasNextPage = false

View file

@ -11,6 +11,7 @@ import {
DeleteRows,
DeleteRow,
PaginatedSearchRowResponse,
RowExportFormat,
} from "@budibase/types"
import { Expectations, TestAPI } from "./base"
@ -105,6 +106,7 @@ export class RowAPI extends TestAPI {
exportRows = async (
tableId: string,
body: ExportRowsRequest,
format: RowExportFormat = RowExportFormat.JSON,
expectations?: Expectations
) => {
const response = await this._requestRaw(
@ -112,7 +114,7 @@ export class RowAPI extends TestAPI {
`/api/${tableId}/rows/exportRows`,
{
body,
query: { format: "json" },
query: { format },
expectations,
}
)

View file

@ -1,13 +1,14 @@
import {
BulkImportRequest,
BulkImportResponse,
CsvToJsonRequest,
CsvToJsonResponse,
MigrateRequest,
MigrateResponse,
Row,
SaveTableRequest,
SaveTableResponse,
Table,
TableSchema,
ValidateNewTableImportRequest,
ValidateTableImportRequest,
ValidateTableImportResponse,
} from "@budibase/types"
@ -71,17 +72,13 @@ export class TableAPI extends TestAPI {
}
validateNewTableImport = async (
rows: Row[],
schema: TableSchema,
body: ValidateNewTableImportRequest,
expectations?: Expectations
): Promise<ValidateTableImportResponse> => {
return await this._post<ValidateTableImportResponse>(
`/api/tables/validateNewTableImport`,
{
body: {
rows,
schema,
},
body,
expectations,
}
)
@ -99,4 +96,14 @@ export class TableAPI extends TestAPI {
}
)
}
csvToJson = async (
body: CsvToJsonRequest,
expectations?: Expectations
): Promise<CsvToJsonResponse> => {
return await this._post<CsvToJsonResponse>(`/api/convert/csvToJson`, {
body,
expectations,
})
}
}

View file

@ -26,6 +26,10 @@ import {
WebhookActionType,
AutomationEventType,
LoopStepType,
FieldSchema,
BBReferenceFieldSubType,
JsonFieldSubType,
AutoFieldSubType,
} from "@budibase/types"
import { LoopInput } from "../../definitions/automations"
import { merge } from "lodash"
@ -573,3 +577,161 @@ export function basicEnvironmentVariable(
development: dev || prod,
}
}
export function fullSchemaWithoutLinks({
allRequired,
}: {
allRequired?: boolean
}) {
const schema: {
[type in Exclude<FieldType, FieldType.LINK>]: FieldSchema & { type: type }
} = {
[FieldType.STRING]: {
name: "string",
type: FieldType.STRING,
constraints: {
presence: allRequired,
},
},
[FieldType.LONGFORM]: {
name: "longform",
type: FieldType.LONGFORM,
constraints: {
presence: allRequired,
},
},
[FieldType.OPTIONS]: {
name: "options",
type: FieldType.OPTIONS,
constraints: {
presence: allRequired,
inclusion: ["option 1", "option 2", "option 3", "option 4"],
},
},
[FieldType.ARRAY]: {
name: "array",
type: FieldType.ARRAY,
constraints: {
presence: allRequired,
type: JsonFieldSubType.ARRAY,
inclusion: ["options 1", "options 2", "options 3", "options 4"],
},
},
[FieldType.NUMBER]: {
name: "number",
type: FieldType.NUMBER,
constraints: {
presence: allRequired,
},
},
[FieldType.BOOLEAN]: {
name: "boolean",
type: FieldType.BOOLEAN,
constraints: {
presence: allRequired,
},
},
[FieldType.DATETIME]: {
name: "datetime",
type: FieldType.DATETIME,
dateOnly: true,
timeOnly: false,
constraints: {
presence: allRequired,
},
},
[FieldType.FORMULA]: {
name: "formula",
type: FieldType.FORMULA,
formula: "any formula",
constraints: {
presence: allRequired,
},
},
[FieldType.BARCODEQR]: {
name: "barcodeqr",
type: FieldType.BARCODEQR,
constraints: {
presence: allRequired,
},
},
[FieldType.BIGINT]: {
name: "bigint",
type: FieldType.BIGINT,
constraints: {
presence: allRequired,
},
},
[FieldType.BB_REFERENCE]: {
name: "user",
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
constraints: {
presence: allRequired,
},
},
[FieldType.BB_REFERENCE_SINGLE]: {
name: "users",
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
constraints: {
presence: allRequired,
},
},
[FieldType.ATTACHMENTS]: {
name: "attachments",
type: FieldType.ATTACHMENTS,
constraints: {
presence: allRequired,
},
},
[FieldType.ATTACHMENT_SINGLE]: {
name: "attachment_single",
type: FieldType.ATTACHMENT_SINGLE,
constraints: {
presence: allRequired,
},
},
[FieldType.AUTO]: {
name: "auto",
type: FieldType.AUTO,
subtype: AutoFieldSubType.AUTO_ID,
autocolumn: true,
constraints: {
presence: allRequired,
},
},
[FieldType.JSON]: {
name: "json",
type: FieldType.JSON,
constraints: {
presence: allRequired,
},
},
[FieldType.INTERNAL]: {
name: "internal",
type: FieldType.INTERNAL,
constraints: {
presence: allRequired,
},
},
[FieldType.SIGNATURE_SINGLE]: {
name: "signature_single",
type: FieldType.SIGNATURE_SINGLE,
constraints: {
presence: allRequired,
},
},
}
return schema
}
export function basicAttachment() {
return {
key: generator.guid(),
name: generator.word(),
extension: generator.word(),
size: generator.natural(),
url: `/${generator.guid()}`,
}
}

View file

@ -8,7 +8,6 @@ import {
} from "@budibase/types"
import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core"
import { db } from "@budibase/backend-core"
import { parseCsvExport } from "../api/controllers/view/exporters"
type Rows = Array<Row>
@ -85,7 +84,7 @@ export function validate(
"Column names can't contain special characters"
} else if (
columnData == null &&
!schema[columnName].constraints?.presence
!helpers.schema.isRequired(constraints)
) {
results.schemaValidation[columnName] = true
} else if (
@ -95,6 +94,12 @@ export function validate(
isAutoColumn
) {
return
} else if (
[FieldType.STRING].includes(columnType) &&
!columnData &&
helpers.schema.isRequired(constraints)
) {
results.schemaValidation[columnName] = false
} else if (columnType === FieldType.NUMBER && isNaN(Number(columnData))) {
// If provided must be a valid number
results.schemaValidation[columnName] = false
@ -159,7 +164,7 @@ export function parse(rows: Rows, table: Table): Rows {
const columnSchema = schema[columnName]
const { type: columnType } = columnSchema
if (columnType === FieldType.NUMBER) {
if ([FieldType.NUMBER].includes(columnType)) {
// If provided must be a valid number
parsedRow[columnName] = columnData ? Number(columnData) : columnData
} else if (
@ -171,16 +176,23 @@ export function parse(rows: Rows, table: Table): Rows {
parsedRow[columnName] = columnData
? new Date(columnData).toISOString()
: columnData
} else if (
columnType === FieldType.JSON &&
typeof columnData === "string"
) {
parsedRow[columnName] = parseJsonExport(columnData)
} else if (columnType === FieldType.BB_REFERENCE) {
let parsedValues: { _id: string }[] = columnData || []
if (columnData) {
parsedValues = parseCsvExport<{ _id: string }[]>(columnData)
if (columnData && typeof columnData === "string") {
parsedValues = parseJsonExport<{ _id: string }[]>(columnData)
}
parsedRow[columnName] = parsedValues?.map(u => u._id)
} else if (columnType === FieldType.BB_REFERENCE_SINGLE) {
const parsedValue =
columnData && parseCsvExport<{ _id: string }>(columnData)
let parsedValue = columnData
if (columnData && typeof columnData === "string") {
parsedValue = parseJsonExport<{ _id: string }>(columnData)
}
parsedRow[columnName] = parsedValue?._id
} else if (
(columnType === FieldType.ATTACHMENTS ||
@ -188,7 +200,7 @@ export function parse(rows: Rows, table: Table): Rows {
columnType === FieldType.SIGNATURE_SINGLE) &&
typeof columnData === "string"
) {
parsedRow[columnName] = parseCsvExport(columnData)
parsedRow[columnName] = parseJsonExport(columnData)
} else {
parsedRow[columnName] = columnData
}
@ -204,32 +216,54 @@ function isValidBBReference(
subtype: BBReferenceFieldSubType,
isRequired: boolean
): boolean {
if (typeof data !== "string") {
try {
if (type === FieldType.BB_REFERENCE_SINGLE) {
if (!data) {
return !isRequired
}
const user = parseJsonExport<{ _id: string }>(data)
return db.isGlobalUserID(user._id)
}
switch (subtype) {
case BBReferenceFieldSubType.USER:
case BBReferenceFieldSubType.USERS: {
const userArray = parseJsonExport<{ _id: string }[]>(data)
if (!Array.isArray(userArray)) {
return false
}
const constainsWrongId = userArray.find(
user => !db.isGlobalUserID(user._id)
)
return !constainsWrongId
}
default:
throw utils.unreachable(subtype)
}
} catch {
return false
}
}
if (type === FieldType.BB_REFERENCE_SINGLE) {
if (!data) {
return !isRequired
}
const user = parseCsvExport<{ _id: string }>(data)
return db.isGlobalUserID(user._id)
function parseJsonExport<T>(value: any) {
if (typeof value !== "string") {
return value
}
try {
const parsed = JSON.parse(value)
switch (subtype) {
case BBReferenceFieldSubType.USER:
case BBReferenceFieldSubType.USERS: {
const userArray = parseCsvExport<{ _id: string }[]>(data)
if (!Array.isArray(userArray)) {
return false
}
const constainsWrongId = userArray.find(
user => !db.isGlobalUserID(user._id)
)
return !constainsWrongId
return parsed as T
} catch (e: any) {
if (
e.message.startsWith("Expected property name or '}' in JSON at position ")
) {
// This was probably converted as CSV and it has single quotes instead of double ones
const parsed = JSON.parse(value.replace(/'/g, '"'))
return parsed as T
}
default:
throw utils.unreachable(subtype)
// It is no a valid JSON
throw e
}
}