1
0
Fork 0
mirror of synced 2024-09-25 13:51:40 +12:00

Fix row.spec.ts.

This commit is contained in:
Sam Rose 2024-09-24 17:46:38 +01:00
parent 0eb90cfbea
commit e3256cb005
No known key found for this signature in database
10 changed files with 36 additions and 33 deletions

View file

@ -612,7 +612,6 @@ async function runQuery<T>(
* limit {number} The number of results to fetch
* bookmark {string|null} Current bookmark in the recursive search
* rows {array|null} Current results in the recursive search
* @returns {Promise<*[]|*>}
*/
async function recursiveSearch<T>(
dbName: string,

View file

@ -76,11 +76,11 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
})
const [enrichedRow, oldRow] = await Promise.all([
outputProcessing(table, row, {
outputProcessing(source, row, {
squash: true,
preserveLinks: true,
}),
outputProcessing(table, beforeRow, {
outputProcessing(source, beforeRow, {
squash: true,
preserveLinks: true,
}),

View file

@ -207,7 +207,7 @@ export async function destroy(ctx: UserCtx<DeleteRowRequest>) {
}
export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
const { tableId } = utils.getSourceId(ctx)
const { tableId, viewId } = utils.getSourceId(ctx)
await context.ensureSnippetContext(true)
@ -221,7 +221,7 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
const searchParams: RowSearchParams = {
...ctx.request.body,
query: enrichedQuery,
sourceId: tableId,
sourceId: viewId || tableId,
}
ctx.status = 200

View file

@ -112,7 +112,7 @@ function fixBooleanFields({ row, table }: { row: Row; table: Table }) {
export async function sqlOutputProcessing(
rows: DatasourcePlusQueryResponse,
table: Table,
source: Table | ViewV2,
tables: Record<string, Table>,
relationships: RelationshipsJson[],
opts?: { sqs?: boolean; aggregations?: Aggregation[] }
@ -120,6 +120,12 @@ export async function sqlOutputProcessing(
if (isKnexEmptyReadResponse(rows)) {
return []
}
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
let finalRows: { [key: string]: Row } = {}
for (let row of rows as Row[]) {
let rowId = row._id

View file

@ -33,7 +33,7 @@ import {
import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv"
import { builderSocket } from "../../../websockets"
import { cloneDeep, isEqual } from "lodash"
import { cloneDeep } from "lodash"
import {
helpers,
PROTECTED_EXTERNAL_COLUMNS,
@ -149,12 +149,7 @@ export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) {
const tableId = ctx.params.tableId
let tableBefore = await sdk.tables.getTable(tableId)
let tableAfter = await pickApi({ tableId }).bulkImport(ctx)
if (!isEqual(tableBefore, tableAfter)) {
await sdk.tables.saveTable(tableAfter)
}
await pickApi({ tableId }).bulkImport(ctx)
// right now we don't trigger anything for bulk import because it
// can only be done in the builder, but in the future we may need to

View file

@ -76,7 +76,7 @@ async function waitForEvent(
}
describe.each([
["internal", undefined],
["lucene", undefined],
["sqs", undefined],
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
@ -2453,9 +2453,15 @@ describe.each([
let flagCleanup: (() => void) | undefined
beforeAll(async () => {
flagCleanup = setCoreEnv({
const env = {
TENANT_FEATURE_FLAGS: `*:${FeatureFlag.ENRICHED_RELATIONSHIPS}`,
})
}
if (isSqs) {
env.TENANT_FEATURE_FLAGS = `${env.TENANT_FEATURE_FLAGS},*:SQS`
} else {
env.TENANT_FEATURE_FLAGS = `${env.TENANT_FEATURE_FLAGS},*:!SQS`
}
flagCleanup = setCoreEnv(env)
const aux2Table = await config.api.table.save(saveTableRequest())
const aux2Data = await config.api.row.save(aux2Table._id!, {})

View file

@ -248,10 +248,11 @@ function getPrimaryDisplayValue(row: Row, table?: Table) {
export type SquashTableFields = Record<string, { visibleFieldNames: string[] }>
/**
* This function will take the given enriched rows and squash the links to only contain the primary display field.
* @param table The table from which the rows originated.
* This function will take the given enriched rows and squash the links to only
* contain the primary display field.
*
* @param source The table or view from which the rows originated.
* @param enriched The pre-enriched rows (full docs) which are to be squashed.
* @param squashFields Per link column (key) define which columns are allowed while squashing.
* @returns The rows after having their links squashed to only contain the ID and primary display.
*/
export async function squashLinks<T = Row[] | Row>(

View file

@ -56,7 +56,7 @@ export async function save(
table,
})) as Row
return finaliseRow(table, row, { updateFormula: true })
return finaliseRow(source, row, { updateFormula: true })
}
export async function find(sourceId: string, rowId: string): Promise<Row> {

View file

@ -31,7 +31,7 @@ export async function search(
const { paginate, query } = options
const params: RowSearchParams = {
sourceId: options.sourceId,
sourceId: table._id!,
sort: options.sort,
sortOrder: options.sortOrder,
sortType: options.sortType,

View file

@ -308,8 +308,8 @@ export async function search(
const allTables = await sdk.tables.getAllInternalTables()
const allTablesMap = buildTableMap(allTables)
// make sure we have the mapped/latest table
if (table?._id) {
table = allTablesMap[table?._id]
if (table._id) {
table = allTablesMap[table._id]
}
if (!table) {
throw new Error("Unable to find table")
@ -322,13 +322,6 @@ export async function search(
documentType: DocumentType.ROW,
}
let fields = options.fields
if (fields === undefined) {
fields = buildInternalFieldList(table, allTables, { relationships })
} else {
fields = fields.map(f => mapToUserColumn(f))
}
if (options.aggregations) {
options.aggregations = options.aggregations.map(a => {
a.field = mapToUserColumn(a.field)
@ -350,7 +343,10 @@ export async function search(
tables: allTablesMap,
columnPrefix: USER_COLUMN_PREFIX,
},
resource: { fields, aggregations: options.aggregations },
resource: {
fields: buildInternalFieldList(table, allTables, { relationships }),
aggregations: options.aggregations,
},
relationships,
}
@ -394,7 +390,7 @@ export async function search(
// make sure JSON columns corrected
const processed = builder.convertJsonStringColumns<Row>(
table,
await sqlOutputProcessing(rows, table, allTablesMap, relationships, {
await sqlOutputProcessing(rows, source, allTablesMap, relationships, {
sqs: true,
aggregations: options.aggregations,
})
@ -411,7 +407,7 @@ export async function search(
}
// get the rows
let finalRows = await outputProcessing(table, processed, {
let finalRows = await outputProcessing(source, processed, {
preserveLinks: true,
squash: true,
aggregations: options.aggregations,