1
0
Fork 0
mirror of synced 2024-09-20 11:27:56 +12:00

Fix re-importing

This commit is contained in:
Adria Navarro 2024-07-31 14:55:25 +02:00
parent b28aaa3a93
commit 24cdfb3443
3 changed files with 73 additions and 9 deletions

View file

@ -3,6 +3,7 @@ import { handleDataImport } from "./utils"
import {
BulkImportRequest,
BulkImportResponse,
FieldType,
RenameColumn,
SaveTableRequest,
SaveTableResponse,
@ -69,10 +70,22 @@ export async function bulkImport(
) {
const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows, identifierFields } = ctx.request.body
await handleDataImport(table, {
importRows: rows,
identifierFields,
user: ctx.user,
})
await handleDataImport(
{
...table,
schema: {
_id: {
name: "_id",
type: FieldType.STRING,
},
...table.schema,
},
},
{
importRows: rows,
identifierFields,
user: ctx.user,
}
)
return table
}

View file

@ -124,11 +124,12 @@ export async function importToRows(
table: Table,
user?: ContextUser
) {
let originalTable = table
let finalData: any = []
const originalTable = table
const finalData: Row[] = []
const keepCouchId = "_id" in table.schema
for (let i = 0; i < data.length; i++) {
let row = data[i]
row._id = generateRowID(table._id!)
row._id = (keepCouchId && row._id) || generateRowID(table._id!)
row.type = "row"
row.tableId = table._id
@ -180,7 +181,7 @@ export async function handleDataImport(
const db = context.getAppDB()
const data = parse(importRows, table)
let finalData: any = await importToRows(data, table, user)
const finalData = await importToRows(data, table, user)
//Set IDs of finalData to match existing row if an update is expected
if (identifierFields.length > 0) {

View file

@ -1298,6 +1298,56 @@ describe.each([
await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage)
})
isInternal &&
it("should be able to update existing rows on bulkImport", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
name: {
type: FieldType.STRING,
name: "name",
},
description: {
type: FieldType.STRING,
name: "description",
},
},
})
)
const existingRow = await config.api.row.save(table._id!, {
name: "Existing row",
description: "Existing description",
})
await config.api.row.bulkImport(table._id!, {
rows: [
{
name: "Row 1",
description: "Row 1 description",
},
{ ...existingRow, name: "Updated existing row" },
{
name: "Row 2",
description: "Row 2 description",
},
],
identifierFields: ["_id"],
})
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(3)
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Row 1")
expect(rows[0].description).toEqual("Row 1 description")
expect(rows[1].name).toEqual("Row 2")
expect(rows[1].description).toEqual("Row 2 description")
expect(rows[2].name).toEqual("Updated existing row")
expect(rows[2].description).toEqual("Existing description")
})
// Upserting isn't yet supported in MSSQL, see:
// https://github.com/knex/knex/pull/6050
!isMSSQL &&