1
0
Fork 0
mirror of synced 2024-09-24 21:31:17 +12:00

Add Google Sheets search test.

This commit is contained in:
Sam Rose 2024-09-20 10:17:45 +01:00
parent bd618f2b00
commit af26b915b1
No known key found for this signature in database
3 changed files with 197 additions and 6 deletions

View file

@ -581,16 +581,15 @@ export class GoogleSheetsIntegration implements DatasourcePlus {
rows = await sheet.getRows() rows = await sheet.getRows()
} }
if (hasFilters && query.paginate) {
rows = rows.slice(offset, offset + limit)
}
const headerValues = sheet.headerValues
let response = rows.map(row => let response = rows.map(row =>
this.buildRowObject(headerValues, row.toObject(), row.rowNumber) this.buildRowObject(sheet.headerValues, row.toObject(), row.rowNumber)
) )
response = dataFilters.runQuery(response, query.filters || {}) response = dataFilters.runQuery(response, query.filters || {})
if (hasFilters && query.paginate) {
response = response.slice(offset, offset + limit)
}
if (query.sort) { if (query.sort) {
if (Object.keys(query.sort).length !== 1) { if (Object.keys(query.sort).length !== 1) {
console.warn("Googlesheets does not support multiple sorting", { console.warn("Googlesheets does not support multiple sorting", {

View file

@ -5,6 +5,7 @@ import TestConfiguration from "../../tests/utilities/TestConfiguration"
import { import {
Datasource, Datasource,
FieldType, FieldType,
Row,
SourceName, SourceName,
Table, Table,
TableSourceType, TableSourceType,
@ -598,4 +599,193 @@ describe("Google Sheets Integration", () => {
) )
}) })
}) })
describe("search", () => {
let table: Table
beforeEach(async () => {
table = await config.api.table.save({
name: "Test Table",
type: "table",
sourceId: datasource._id!,
sourceType: TableSourceType.EXTERNAL,
schema: {
name: {
name: "name",
type: FieldType.STRING,
constraints: {
type: "string",
},
},
},
})
await config.api.row.bulkImport(table._id!, {
rows: [
{
name: "Foo",
},
{
name: "Bar",
},
{
name: "Baz",
},
],
})
})
it("should be able to find rows with equals filter", async () => {
const response = await config.api.row.search(table._id!, {
tableId: table._id!,
query: {
equal: {
name: "Foo",
},
},
})
expect(response.rows).toHaveLength(1)
expect(response.rows[0].name).toEqual("Foo")
})
it("should be able to find rows with not equals filter", async () => {
const response = await config.api.row.search(table._id!, {
tableId: table._id!,
query: {
notEqual: {
name: "Foo",
},
},
})
expect(response.rows).toHaveLength(2)
expect(response.rows[0].name).toEqual("Bar")
expect(response.rows[1].name).toEqual("Baz")
})
it("should be able to find rows with empty filter", async () => {
const response = await config.api.row.search(table._id!, {
tableId: table._id!,
query: {
empty: {
name: null,
},
},
})
expect(response.rows).toHaveLength(0)
})
it("should be able to find rows with not empty filter", async () => {
const response = await config.api.row.search(table._id!, {
tableId: table._id!,
query: {
notEmpty: {
name: null,
},
},
})
expect(response.rows).toHaveLength(3)
})
it("should be able to find rows with one of filter", async () => {
const response = await config.api.row.search(table._id!, {
tableId: table._id!,
query: {
oneOf: {
name: ["Foo", "Bar"],
},
},
})
expect(response.rows).toHaveLength(2)
expect(response.rows[0].name).toEqual("Foo")
expect(response.rows[1].name).toEqual("Bar")
})
it("should be able to find rows with fuzzy filter", async () => {
const response = await config.api.row.search(table._id!, {
tableId: table._id!,
query: {
fuzzy: {
name: "oo",
},
},
})
expect(response.rows).toHaveLength(1)
expect(response.rows[0].name).toEqual("Foo")
})
it("should be able to find rows with range filter", async () => {
const response = await config.api.row.search(table._id!, {
tableId: table._id!,
query: {
range: {
name: {
low: "A",
high: "C",
},
},
},
})
expect(response.rows).toHaveLength(2)
expect(response.rows[0].name).toEqual("Bar")
expect(response.rows[1].name).toEqual("Baz")
})
it("should paginate correctly", async () => {
await config.api.row.bulkImport(table._id!, {
rows: Array.from({ length: 50 }, () => ({
name: `Unique value!`,
})),
})
await config.api.row.bulkImport(table._id!, {
rows: Array.from({ length: 50 }, () => ({
name: `Non-unique value!`,
})),
})
let response = await config.api.row.search(table._id!, {
tableId: table._id!,
query: { equal: { name: "Unique value!" } },
paginate: true,
limit: 10,
})
let rows: Row[] = response.rows
while (response.hasNextPage) {
response = await config.api.row.search(table._id!, {
tableId: table._id!,
query: { equal: { name: "Unique value!" } },
paginate: true,
limit: 10,
bookmark: response.bookmark,
})
expect(response.rows.length).toBeLessThanOrEqual(10)
rows = rows.concat(response.rows)
}
// Make sure we only get rows matching the query.
expect(rows.length).toEqual(50)
expect(rows.map(row => row.name)).toEqual(
expect.arrayContaining(
Array.from({ length: 50 }, () => "Unique value!")
)
)
// Make sure all of the rows have a unique ID.
const ids = Object.keys(
rows.reduce((acc, row) => {
acc[row._id!] = true
return acc
}, {})
)
expect(ids.length).toEqual(50)
})
})
}) })

View file

@ -440,6 +440,8 @@ export class GoogleSheetsMock {
endColumnIndex: 0, endColumnIndex: 0,
}) })
sheet.properties.gridProperties.rowCount = sheet.data[0].rowData.length
return { return {
spreadsheetId: this.spreadsheet.spreadsheetId, spreadsheetId: this.spreadsheet.spreadsheetId,
tableRange: range, tableRange: range,