1
0
Fork 0
mirror of synced 2024-09-20 11:27:56 +12:00

Merge branch 'master' into node-fetch-mockectomy

This commit is contained in:
Sam Rose 2024-08-01 15:49:56 +01:00 committed by GitHub
commit b2f70f5051
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 323 additions and 32 deletions

View file

@ -1,4 +1,4 @@
FROM mcr.microsoft.com/mssql/server:2022-latest
FROM mcr.microsoft.com/mssql/server@sha256:c4369c38385eba011c10906dc8892425831275bb035d5ce69656da8e29de50d8
ENV ACCEPT_EULA=Y
ENV SA_PASSWORD=Passw0rd

View file

@ -17,6 +17,7 @@ import {
CsvToJsonRequest,
CsvToJsonResponse,
FetchTablesResponse,
FieldType,
MigrateRequest,
MigrateResponse,
SaveTableRequest,
@ -178,9 +179,17 @@ export async function validateExistingTableImport(
const { rows, tableId } = ctx.request.body
let schema = null
if (tableId) {
const table = await sdk.tables.getTable(tableId)
schema = table.schema
if (!isExternalTable(table)) {
schema._id = {
name: "_id",
type: FieldType.STRING,
}
}
} else {
ctx.status = 422
return

View file

@ -3,6 +3,7 @@ import { handleDataImport } from "./utils"
import {
BulkImportRequest,
BulkImportResponse,
FieldType,
RenameColumn,
SaveTableRequest,
SaveTableResponse,
@ -69,10 +70,22 @@ export async function bulkImport(
) {
const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows, identifierFields } = ctx.request.body
await handleDataImport(table, {
importRows: rows,
identifierFields,
user: ctx.user,
})
await handleDataImport(
{
...table,
schema: {
_id: {
name: "_id",
type: FieldType.STRING,
},
...table.schema,
},
},
{
importRows: rows,
identifierFields,
user: ctx.user,
}
)
return table
}

View file

@ -122,13 +122,15 @@ export function makeSureTableUpToDate(table: Table, tableToSave: Table) {
export async function importToRows(
data: Row[],
table: Table,
user?: ContextUser
user?: ContextUser,
opts?: { keepCouchId: boolean }
) {
let originalTable = table
let finalData: any = []
const originalTable = table
const finalData: Row[] = []
const keepCouchId = !!opts?.keepCouchId
for (let i = 0; i < data.length; i++) {
let row = data[i]
row._id = generateRowID(table._id!)
row._id = (keepCouchId && row._id) || generateRowID(table._id!)
row.type = "row"
row.tableId = table._id
@ -180,7 +182,11 @@ export async function handleDataImport(
const db = context.getAppDB()
const data = parse(importRows, table)
let finalData: any = await importToRows(data, table, user)
const finalData = await importToRows(data, table, user, {
keepCouchId: identifierFields.includes("_id"),
})
let newRowCount = finalData.length
//Set IDs of finalData to match existing row if an update is expected
if (identifierFields.length > 0) {
@ -203,12 +209,14 @@ export async function handleDataImport(
if (match) {
finalItem._id = doc._id
finalItem._rev = doc._rev
newRowCount--
}
})
})
}
await quotas.addRows(finalData.length, () => db.bulkDocs(finalData), {
await quotas.addRows(newRowCount, () => db.bulkDocs(finalData), {
tableId: table._id,
})

View file

@ -1298,6 +1298,113 @@ describe.each([
await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage)
})
isInternal &&
it("should be able to update existing rows on bulkImport", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
name: {
type: FieldType.STRING,
name: "name",
},
description: {
type: FieldType.STRING,
name: "description",
},
},
})
)
const existingRow = await config.api.row.save(table._id!, {
name: "Existing row",
description: "Existing description",
})
const rowUsage = await getRowUsage()
await config.api.row.bulkImport(table._id!, {
rows: [
{
name: "Row 1",
description: "Row 1 description",
},
{ ...existingRow, name: "Updated existing row" },
{
name: "Row 2",
description: "Row 2 description",
},
],
identifierFields: ["_id"],
})
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(3)
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Row 1")
expect(rows[0].description).toEqual("Row 1 description")
expect(rows[1].name).toEqual("Row 2")
expect(rows[1].description).toEqual("Row 2 description")
expect(rows[2].name).toEqual("Updated existing row")
expect(rows[2].description).toEqual("Existing description")
await assertRowUsage(rowUsage + 2)
})
isInternal &&
it("should create new rows if not identifierFields are provided", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
name: {
type: FieldType.STRING,
name: "name",
},
description: {
type: FieldType.STRING,
name: "description",
},
},
})
)
const existingRow = await config.api.row.save(table._id!, {
name: "Existing row",
description: "Existing description",
})
const rowUsage = await getRowUsage()
await config.api.row.bulkImport(table._id!, {
rows: [
{
name: "Row 1",
description: "Row 1 description",
},
{ ...existingRow, name: "Updated existing row" },
{
name: "Row 2",
description: "Row 2 description",
},
],
})
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(4)
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Existing row")
expect(rows[0].description).toEqual("Existing description")
expect(rows[1].name).toEqual("Row 1")
expect(rows[1].description).toEqual("Row 1 description")
expect(rows[2].name).toEqual("Row 2")
expect(rows[2].description).toEqual("Row 2 description")
expect(rows[3].name).toEqual("Updated existing row")
expect(rows[3].description).toEqual("Existing description")
await assertRowUsage(rowUsage + 3)
})
// Upserting isn't yet supported in MSSQL, see:
// https://github.com/knex/knex/pull/6050
!isMSSQL &&
@ -1640,23 +1747,38 @@ describe.each([
table = await config.api.table.save(defaultTable())
})
it("should allow exporting all columns", async () => {
const existing = await config.api.row.save(table._id!, {})
const res = await config.api.row.exportRows(table._id!, {
rows: [existing._id!],
})
const results = JSON.parse(res)
expect(results.length).toEqual(1)
const row = results[0]
isInternal &&
it("should not export internal couchdb fields", async () => {
const existing = await config.api.row.save(table._id!, {
name: generator.guid(),
description: generator.paragraph(),
})
const res = await config.api.row.exportRows(table._id!, {
rows: [existing._id!],
})
const results = JSON.parse(res)
expect(results.length).toEqual(1)
const row = results[0]
// Ensure all original columns were exported
expect(Object.keys(row).length).toBeGreaterThanOrEqual(
Object.keys(existing).length
)
Object.keys(existing).forEach(key => {
expect(row[key]).toEqual(existing[key])
expect(Object.keys(row)).toEqual(["_id", "name", "description"])
})
!isInternal &&
it("should allow exporting all columns", async () => {
const existing = await config.api.row.save(table._id!, {})
const res = await config.api.row.exportRows(table._id!, {
rows: [existing._id!],
})
const results = JSON.parse(res)
expect(results.length).toEqual(1)
const row = results[0]
// Ensure all original columns were exported
expect(Object.keys(row).length).toBe(Object.keys(existing).length)
Object.keys(existing).forEach(key => {
expect(row[key]).toEqual(existing[key])
})
})
})
it("should allow exporting only certain columns", async () => {
const existing = await config.api.row.save(table._id!, {})

View file

@ -1,4 +1,4 @@
import { context, events } from "@budibase/backend-core"
import { context, docIds, events } from "@budibase/backend-core"
import {
AutoFieldSubType,
BBReferenceFieldSubType,
@ -10,6 +10,7 @@ import {
Row,
SaveTableRequest,
Table,
TableSchema,
TableSourceType,
User,
ViewCalculation,
@ -1022,4 +1023,92 @@ describe.each([
})
})
})
describe("import validation", () => {
const basicSchema: TableSchema = {
id: {
type: FieldType.NUMBER,
name: "id",
},
name: {
type: FieldType.STRING,
name: "name",
},
}
describe("validateNewTableImport", () => {
it("can validate basic imports", async () => {
const result = await config.api.table.validateNewTableImport(
[{ id: generator.natural(), name: generator.first() }],
basicSchema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
},
})
})
})
describe("validateExistingTableImport", () => {
it("can validate basic imports", async () => {
const table = await config.api.table.save(
tableForDatasource(datasource, {
primary: ["id"],
schema: basicSchema,
})
)
const result = await config.api.table.validateExistingTableImport({
tableId: table._id,
rows: [{ id: generator.natural(), name: generator.first() }],
})
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
},
})
})
isInternal &&
it("can reimport _id fields for internal tables", async () => {
const table = await config.api.table.save(
tableForDatasource(datasource, {
primary: ["id"],
schema: basicSchema,
})
)
const result = await config.api.table.validateExistingTableImport({
tableId: table._id,
rows: [
{
_id: docIds.generateRowID(table._id!),
id: generator.natural(),
name: generator.first(),
},
],
})
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
_id: true,
id: true,
name: true,
},
})
})
})
})
})

View file

@ -651,10 +651,10 @@ export async function buildDefaultDocs() {
return new LinkDocument(
employeeData.table._id!,
"Jobs",
employeeData.rows[index]._id,
employeeData.rows[index]._id!,
jobData.table._id!,
"Assigned",
jobData.rows[index]._id
jobData.rows[index]._id!
)
}
)

View file

@ -29,6 +29,7 @@ import { getReadableErrorMessage } from "./base/errorMapping"
import sqlServer from "mssql"
import { sql } from "@budibase/backend-core"
import { ConfidentialClientApplication } from "@azure/msal-node"
import env from "../environment"
import { utils } from "@budibase/shared-core"
@ -246,6 +247,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
options: {
encrypt,
enableArithAbort: true,
requestTimeout: env.QUERY_THREAD_TIMEOUT,
},
}
if (encrypt) {

View file

@ -11,6 +11,7 @@ import {
SearchResponse,
SortType,
Table,
TableSchema,
User,
} from "@budibase/types"
import { getGlobalUsersFromMetadata } from "../../../../utilities/global"
@ -137,6 +138,9 @@ export async function exportRows(
let rows: Row[] = []
let schema = table.schema
let headers
result = trimFields(result, schema)
// Filter data to only specified columns if required
if (columns && columns.length) {
for (let i = 0; i < result.length; i++) {
@ -299,3 +303,13 @@ async function getView(db: Database, viewName: string) {
}
return viewInfo
}
function trimFields(rows: Row[], schema: TableSchema) {
const allowedFields = ["_id", ...Object.keys(schema)]
const result = rows.map(row =>
Object.keys(row)
.filter(key => allowedFields.includes(key))
.reduce((acc, key) => ({ ...acc, [key]: row[key] }), {} as Row)
)
return result
}

View file

@ -76,7 +76,7 @@ export async function getDatasourceAndQuery(
}
export function cleanExportRows(
rows: any[],
rows: Row[],
schema: TableSchema,
format: string,
columns?: string[],

View file

@ -3,9 +3,13 @@ import {
BulkImportResponse,
MigrateRequest,
MigrateResponse,
Row,
SaveTableRequest,
SaveTableResponse,
Table,
TableSchema,
ValidateTableImportRequest,
ValidateTableImportResponse,
} from "@budibase/types"
import { Expectations, TestAPI } from "./base"
@ -61,8 +65,38 @@ export class TableAPI extends TestAPI {
revId: string,
expectations?: Expectations
): Promise<void> => {
return await this._delete<void>(`/api/tables/${tableId}/${revId}`, {
return await this._delete(`/api/tables/${tableId}/${revId}`, {
expectations,
})
}
validateNewTableImport = async (
rows: Row[],
schema: TableSchema,
expectations?: Expectations
): Promise<ValidateTableImportResponse> => {
return await this._post<ValidateTableImportResponse>(
`/api/tables/validateNewTableImport`,
{
body: {
rows,
schema,
},
expectations,
}
)
}
validateExistingTableImport = async (
body: ValidateTableImportRequest,
expectations?: Expectations
): Promise<ValidateTableImportResponse> => {
return await this._post<ValidateTableImportResponse>(
`/api/tables/validateExistingTableImport`,
{
body,
expectations,
}
)
}
}