1
0
Fork 0
mirror of synced 2024-09-17 09:49:11 +12:00

Merge branch 'master' into delete-sql-spec-ts-2

This commit is contained in:
Sam Rose 2024-05-22 10:35:16 +01:00 committed by GitHub
commit e53dddfdf0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 477 additions and 143 deletions

View file

@ -61,7 +61,7 @@ http {
set $csp_img "img-src http: https: data: blob:"; set $csp_img "img-src http: https: data: blob:";
set $csp_manifest "manifest-src 'self'"; set $csp_manifest "manifest-src 'self'";
set $csp_media "media-src 'self' https://js.intercomcdn.com https://cdn.budi.live"; set $csp_media "media-src 'self' https://js.intercomcdn.com https://cdn.budi.live";
set $csp_worker "worker-src 'none'"; set $csp_worker "worker-src blob:";
error_page 502 503 504 /error.html; error_page 502 503 504 /error.html;
location = /error.html { location = /error.html {

View file

@ -1,5 +1,5 @@
{ {
"version": "2.26.4", "version": "2.27.1",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View file

@ -4,13 +4,14 @@
export let max export let max
export let hideArrows = false export let hideArrows = false
export let width export let width
export let type = "number"
$: style = width ? `width:${width}px;` : "" $: style = width ? `width:${width}px;` : ""
</script> </script>
<input <input
class:hide-arrows={hideArrows} class:hide-arrows={hideArrows}
type="number" {type}
{style} {style}
{value} {value}
{min} {min}
@ -51,4 +52,7 @@
input.hide-arrows { input.hide-arrows {
-moz-appearance: textfield; -moz-appearance: textfield;
} }
input[type="time"]::-webkit-calendar-picker-indicator {
display: none;
}
</style> </style>

View file

@ -1,5 +1,4 @@
<script> <script>
import { cleanInput } from "./utils"
import dayjs from "dayjs" import dayjs from "dayjs"
import NumberInput from "./NumberInput.svelte" import NumberInput from "./NumberInput.svelte"
import { createEventDispatcher } from "svelte" import { createEventDispatcher } from "svelte"
@ -8,39 +7,26 @@
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
$: displayValue = value || dayjs() $: displayValue = value?.format("HH:mm")
const handleHourChange = e => { const handleChange = e => {
dispatch("change", displayValue.hour(parseInt(e.target.value))) if (!e.target.value) {
dispatch("change", undefined)
return
}
const [hour, minute] = e.target.value.split(":").map(x => parseInt(x))
dispatch("change", (value || dayjs()).hour(hour).minute(minute))
} }
const handleMinuteChange = e => {
dispatch("change", displayValue.minute(parseInt(e.target.value)))
}
const cleanHour = cleanInput({ max: 23, pad: 2, fallback: "00" })
const cleanMinute = cleanInput({ max: 59, pad: 2, fallback: "00" })
</script> </script>
<div class="time-picker"> <div class="time-picker">
<NumberInput <NumberInput
hideArrows hideArrows
value={displayValue.hour().toString().padStart(2, "0")} type={"time"}
min={0} value={displayValue}
max={23} on:input={handleChange}
width={20} on:change={handleChange}
on:input={cleanHour}
on:change={handleHourChange}
/>
<span>:</span>
<NumberInput
hideArrows
value={displayValue.minute().toString().padStart(2, "0")}
min={0}
max={59}
width={20}
on:input={cleanMinute}
on:change={handleMinuteChange}
/> />
</div> </div>
@ -50,10 +36,4 @@
flex-direction: row; flex-direction: row;
align-items: center; align-items: center;
} }
.time-picker span {
font-weight: bold;
font-size: 18px;
z-index: 0;
margin-bottom: 1px;
}
</style> </style>

View file

@ -166,7 +166,7 @@ export const stringifyDate = (
const offsetForTimezone = (enableTime && ignoreTimezones) || timeOnly const offsetForTimezone = (enableTime && ignoreTimezones) || timeOnly
if (offsetForTimezone) { if (offsetForTimezone) {
// Ensure we use the correct offset for the date // Ensure we use the correct offset for the date
const referenceDate = timeOnly ? new Date() : value.toDate() const referenceDate = value.toDate()
const offset = referenceDate.getTimezoneOffset() * 60000 const offset = referenceDate.getTimezoneOffset() * 60000
return new Date(value.valueOf() - offset).toISOString().slice(0, -1) return new Date(value.valueOf() - offset).toISOString().slice(0, -1)
} }

View file

@ -20,7 +20,7 @@ import {
previewStore, previewStore,
tables, tables,
componentTreeNodesStore, componentTreeNodesStore,
} from "stores/builder/index" } from "stores/builder"
import { buildFormSchema, getSchemaForDatasource } from "dataBinding" import { buildFormSchema, getSchemaForDatasource } from "dataBinding"
import { import {
BUDIBASE_INTERNAL_DB_ID, BUDIBASE_INTERNAL_DB_ID,
@ -30,6 +30,7 @@ import {
} from "constants/backend" } from "constants/backend"
import BudiStore from "../BudiStore" import BudiStore from "../BudiStore"
import { Utils } from "@budibase/frontend-core" import { Utils } from "@budibase/frontend-core"
import { FieldType } from "@budibase/types"
export const INITIAL_COMPONENTS_STATE = { export const INITIAL_COMPONENTS_STATE = {
components: {}, components: {},
@ -296,6 +297,80 @@ export class ComponentStore extends BudiStore {
} }
} }
}) })
// Add default bindings to card blocks
if (component._component.endsWith("/cardsblock")) {
// Only proceed if the card is empty, i.e. we just changed datasource or
// just created the card
const cardKeys = ["cardTitle", "cardSubtitle", "cardDescription"]
if (cardKeys.every(key => !component[key]) && !component.cardImageURL) {
const { _id, dataSource } = component
if (dataSource) {
const { schema, table } = getSchemaForDatasource(screen, dataSource)
// Finds fields by types from the schema of the configured datasource
const findFieldTypes = fieldTypes => {
if (!Array.isArray(fieldTypes)) {
fieldTypes = [fieldTypes]
}
return Object.entries(schema || {})
.filter(([name, fieldSchema]) => {
return (
fieldTypes.includes(fieldSchema.type) &&
!fieldSchema.autoColumn &&
name !== table?.primaryDisplay &&
!name.startsWith("_")
)
})
.map(([name]) => name)
}
// Inserts a card binding for a certain setting
const addBinding = (key, fallback, ...parts) => {
if (parts.some(x => x == null)) {
component[key] = fallback
} else {
parts.unshift(`${_id}-repeater`)
component[key] = `{{ ${parts.map(safe).join(".")} }}`
}
}
// Extract good field candidates to prefill our cards with.
// Use the primary display as the best field, if it exists.
const shortFields = [
...findFieldTypes(FieldType.STRING),
...findFieldTypes(FieldType.OPTIONS),
...findFieldTypes(FieldType.ARRAY),
...findFieldTypes(FieldType.NUMBER),
]
const longFields = findFieldTypes(FieldType.LONGFORM)
if (schema?.[table?.primaryDisplay]) {
shortFields.unshift(table.primaryDisplay)
}
// Fill title and subtitle with short fields
addBinding("cardTitle", "Title", shortFields[0])
addBinding("cardSubtitle", "Subtitle", shortFields[1])
// Fill description with a long field if possible
const longField = longFields[0] ?? shortFields[2]
addBinding("cardDescription", "Description", longField)
// Attempt to fill the image setting.
// Check single attachment fields first.
let imgField = findFieldTypes(FieldType.ATTACHMENT_SINGLE)[0]
if (imgField) {
addBinding("cardImageURL", null, imgField, "url")
} else {
// Then try multi-attachment fields if no single ones exist
imgField = findFieldTypes(FieldType.ATTACHMENTS)[0]
if (imgField) {
addBinding("cardImageURL", null, imgField, 0, "url")
}
}
}
}
}
} }
/** /**
@ -324,21 +399,21 @@ export class ComponentStore extends BudiStore {
...presetProps, ...presetProps,
} }
// Enrich empty settings // Standard post processing
this.enrichEmptySettings(instance, { this.enrichEmptySettings(instance, {
parent, parent,
screen: get(selectedScreen), screen: get(selectedScreen),
useDefaultValues: true, useDefaultValues: true,
}) })
// Migrate nested component settings
this.migrateSettings(instance) this.migrateSettings(instance)
// Add any extra properties the component needs // Custom post processing for creation only
let extras = {} let extras = {}
if (definition.hasChildren) { if (definition.hasChildren) {
extras._children = [] extras._children = []
} }
// Add step name to form steps
if (componentName.endsWith("/formstep")) { if (componentName.endsWith("/formstep")) {
const parentForm = findClosestMatchingComponent( const parentForm = findClosestMatchingComponent(
get(selectedScreen).props, get(selectedScreen).props,
@ -351,6 +426,7 @@ export class ComponentStore extends BudiStore {
extras.step = formSteps.length + 1 extras.step = formSteps.length + 1
extras._instanceName = `Step ${formSteps.length + 1}` extras._instanceName = `Step ${formSteps.length + 1}`
} }
return { return {
...cloneDeep(instance), ...cloneDeep(instance),
...extras, ...extras,
@ -463,7 +539,6 @@ export class ComponentStore extends BudiStore {
if (!componentId || !screenId) { if (!componentId || !screenId) {
const state = get(this.store) const state = get(this.store)
componentId = componentId || state.selectedComponentId componentId = componentId || state.selectedComponentId
const screenState = get(screenStore) const screenState = get(screenStore)
screenId = screenId || screenState.selectedScreenId screenId = screenId || screenState.selectedScreenId
} }
@ -471,7 +546,6 @@ export class ComponentStore extends BudiStore {
return return
} }
const patchScreen = screen => { const patchScreen = screen => {
// findComponent looks in the tree not comp.settings[0]
let component = findComponent(screen.props, componentId) let component = findComponent(screen.props, componentId)
if (!component) { if (!component) {
return false return false
@ -480,7 +554,7 @@ export class ComponentStore extends BudiStore {
// Mutates the fetched component with updates // Mutates the fetched component with updates
const patchResult = patchFn(component, screen) const patchResult = patchFn(component, screen)
// Mutates the component with any required settings updates // Post processing
const migrated = this.migrateSettings(component) const migrated = this.migrateSettings(component)
// Returning an explicit false signifies that we should skip this // Returning an explicit false signifies that we should skip this

View file

@ -23,6 +23,7 @@ import {
DB_TYPE_EXTERNAL, DB_TYPE_EXTERNAL,
DEFAULT_BB_DATASOURCE_ID, DEFAULT_BB_DATASOURCE_ID,
} from "constants/backend" } from "constants/backend"
import { makePropSafe as safe } from "@budibase/string-templates"
// Could move to fixtures // Could move to fixtures
const COMP_PREFIX = "@budibase/standard-components" const COMP_PREFIX = "@budibase/standard-components"
@ -360,8 +361,30 @@ describe("Component store", () => {
resourceId: internalTableDoc._id, resourceId: internalTableDoc._id,
type: "table", type: "table",
}) })
return comp
} }
it("enrichEmptySettings - initialise cards blocks with correct fields", async ctx => {
const comp = enrichSettingsDS("cardsblock", ctx)
const expectBinding = (setting, ...parts) => {
expect(comp[setting]).toStrictEqual(
`{{ ${safe(`${comp._id}-repeater`)}.${parts.map(safe).join(".")} }}`
)
}
expectBinding("cardTitle", internalTableDoc.schema.MediaTitle.name)
expectBinding("cardSubtitle", internalTableDoc.schema.MediaVersion.name)
expectBinding(
"cardDescription",
internalTableDoc.schema.MediaDescription.name
)
expectBinding(
"cardImageURL",
internalTableDoc.schema.MediaImage.name,
"url"
)
})
it("enrichEmptySettings - set default datasource for 'table' setting type", async ctx => { it("enrichEmptySettings - set default datasource for 'table' setting type", async ctx => {
enrichSettingsDS("formblock", ctx) enrichSettingsDS("formblock", ctx)
}) })

View file

@ -8,6 +8,7 @@ import {
DB_TYPE_EXTERNAL, DB_TYPE_EXTERNAL,
DEFAULT_BB_DATASOURCE_ID, DEFAULT_BB_DATASOURCE_ID,
} from "constants/backend" } from "constants/backend"
import { FieldType } from "@budibase/types"
const getDocId = () => { const getDocId = () => {
return v4().replace(/-/g, "") return v4().replace(/-/g, "")
@ -45,6 +46,52 @@ export const COMPONENT_DEFINITIONS = {
}, },
], ],
}, },
cardsblock: {
block: true,
name: "Cards Block",
settings: [
{
type: "dataSource",
label: "Data",
key: "dataSource",
required: true,
},
{
section: true,
name: "Cards",
settings: [
{
type: "text",
key: "cardTitle",
label: "Title",
nested: true,
resetOn: "dataSource",
},
{
type: "text",
key: "cardSubtitle",
label: "Subtitle",
nested: true,
resetOn: "dataSource",
},
{
type: "text",
key: "cardDescription",
label: "Description",
nested: true,
resetOn: "dataSource",
},
{
type: "text",
key: "cardImageURL",
label: "Image URL",
nested: true,
resetOn: "dataSource",
},
],
},
],
},
container: { container: {
name: "Container", name: "Container",
}, },
@ -262,14 +309,23 @@ export const internalTableDoc = {
name: "Media", name: "Media",
sourceId: BUDIBASE_INTERNAL_DB_ID, sourceId: BUDIBASE_INTERNAL_DB_ID,
sourceType: DB_TYPE_INTERNAL, sourceType: DB_TYPE_INTERNAL,
primaryDisplay: "MediaTitle",
schema: { schema: {
MediaTitle: { MediaTitle: {
name: "MediaTitle", name: "MediaTitle",
type: "string", type: FieldType.STRING,
}, },
MediaVersion: { MediaVersion: {
name: "MediaVersion", name: "MediaVersion",
type: "string", type: FieldType.STRING,
},
MediaDescription: {
name: "MediaDescription",
type: FieldType.LONGFORM,
},
MediaImage: {
name: "MediaImage",
type: FieldType.ATTACHMENT_SINGLE,
}, },
}, },
} }

View file

@ -6243,27 +6243,28 @@
"key": "cardTitle", "key": "cardTitle",
"label": "Title", "label": "Title",
"nested": true, "nested": true,
"defaultValue": "Title" "resetOn": "dataSource"
}, },
{ {
"type": "text", "type": "text",
"key": "cardSubtitle", "key": "cardSubtitle",
"label": "Subtitle", "label": "Subtitle",
"nested": true, "nested": true,
"defaultValue": "Subtitle" "resetOn": "dataSource"
}, },
{ {
"type": "text", "type": "text",
"key": "cardDescription", "key": "cardDescription",
"label": "Description", "label": "Description",
"nested": true, "nested": true,
"defaultValue": "Description" "resetOn": "dataSource"
}, },
{ {
"type": "text", "type": "text",
"key": "cardImageURL", "key": "cardImageURL",
"label": "Image URL", "label": "Image URL",
"nested": true "nested": true,
"resetOn": "dataSource"
}, },
{ {
"type": "boolean", "type": "boolean",

View file

@ -1,3 +1,4 @@
import dayjs from "dayjs"
import { import {
AutoFieldSubType, AutoFieldSubType,
AutoReason, AutoReason,
@ -285,65 +286,73 @@ export class ExternalRequest<T extends Operation> {
// parse floats/numbers // parse floats/numbers
if (field.type === FieldType.NUMBER && !isNaN(parseFloat(row[key]))) { if (field.type === FieldType.NUMBER && !isNaN(parseFloat(row[key]))) {
newRow[key] = parseFloat(row[key]) newRow[key] = parseFloat(row[key])
} } else if (field.type === FieldType.LINK) {
// if its not a link then just copy it over const { tableName: linkTableName } = breakExternalTableId(
if (field.type !== FieldType.LINK) { field?.tableId
newRow[key] = row[key] )
continue // table has to exist for many to many
} if (!linkTableName || !this.tables[linkTableName]) {
const { tableName: linkTableName } = breakExternalTableId(field?.tableId) continue
// table has to exist for many to many
if (!linkTableName || !this.tables[linkTableName]) {
continue
}
const linkTable = this.tables[linkTableName]
// @ts-ignore
const linkTablePrimary = linkTable.primary[0]
// one to many
if (isOneSide(field)) {
let id = row[key][0]
if (id) {
if (typeof row[key] === "string") {
id = decodeURIComponent(row[key]).match(/\[(.*?)\]/)?.[1]
}
newRow[field.foreignKey || linkTablePrimary] = breakRowIdField(id)[0]
} else {
// Removing from both new and row, as we don't know if it has already been processed
row[field.foreignKey || linkTablePrimary] = null
newRow[field.foreignKey || linkTablePrimary] = null
} }
} const linkTable = this.tables[linkTableName]
// many to many
else if (isManyToMany(field)) {
// we're not inserting a doc, will be a bunch of update calls
const otherKey: string = field.throughFrom || linkTablePrimary
const thisKey: string = field.throughTo || tablePrimary
for (const relationship of row[key]) {
manyRelationships.push({
tableId: field.through || field.tableId,
isUpdate: false,
key: otherKey,
[otherKey]: breakRowIdField(relationship)[0],
// leave the ID for enrichment later
[thisKey]: `{{ literal ${tablePrimary} }}`,
})
}
}
// many to one
else {
const thisKey: string = "id"
// @ts-ignore // @ts-ignore
const otherKey: string = field.fieldName const linkTablePrimary = linkTable.primary[0]
for (const relationship of row[key]) { // one to many
manyRelationships.push({ if (isOneSide(field)) {
tableId: field.tableId, let id = row[key][0]
isUpdate: true, if (id) {
key: otherKey, if (typeof row[key] === "string") {
[thisKey]: breakRowIdField(relationship)[0], id = decodeURIComponent(row[key]).match(/\[(.*?)\]/)?.[1]
// leave the ID for enrichment later }
[otherKey]: `{{ literal ${tablePrimary} }}`, newRow[field.foreignKey || linkTablePrimary] =
}) breakRowIdField(id)[0]
} else {
// Removing from both new and row, as we don't know if it has already been processed
row[field.foreignKey || linkTablePrimary] = null
newRow[field.foreignKey || linkTablePrimary] = null
}
} }
// many to many
else if (isManyToMany(field)) {
// we're not inserting a doc, will be a bunch of update calls
const otherKey: string = field.throughFrom || linkTablePrimary
const thisKey: string = field.throughTo || tablePrimary
for (const relationship of row[key]) {
manyRelationships.push({
tableId: field.through || field.tableId,
isUpdate: false,
key: otherKey,
[otherKey]: breakRowIdField(relationship)[0],
// leave the ID for enrichment later
[thisKey]: `{{ literal ${tablePrimary} }}`,
})
}
}
// many to one
else {
const thisKey: string = "id"
// @ts-ignore
const otherKey: string = field.fieldName
for (const relationship of row[key]) {
manyRelationships.push({
tableId: field.tableId,
isUpdate: true,
key: otherKey,
[thisKey]: breakRowIdField(relationship)[0],
// leave the ID for enrichment later
[otherKey]: `{{ literal ${tablePrimary} }}`,
})
}
}
} else if (
field.type === FieldType.DATETIME &&
field.timeOnly &&
row[key] &&
dayjs(row[key]).isValid()
) {
newRow[key] = dayjs(row[key]).format("HH:mm")
} else {
newRow[key] = row[key]
} }
} }
// we return the relationships that may need to be created in the through table // we return the relationships that may need to be created in the through table

View file

@ -80,13 +80,14 @@ describe.each([
} }
async function createRows(rows: Record<string, any>[]) { async function createRows(rows: Record<string, any>[]) {
await config.api.row.bulkImport(table._id!, { rows }) // Shuffling to avoid false positives given a fixed order
await config.api.row.bulkImport(table._id!, { rows: _.shuffle(rows) })
} }
class SearchAssertion { class SearchAssertion {
constructor(private readonly query: RowSearchParams) {} constructor(private readonly query: RowSearchParams) {}
private findRow(expectedRow: any, foundRows: any[]) { private popRow(expectedRow: any, foundRows: any[]) {
const row = foundRows.find(foundRow => _.isMatch(foundRow, expectedRow)) const row = foundRows.find(foundRow => _.isMatch(foundRow, expectedRow))
if (!row) { if (!row) {
const fields = Object.keys(expectedRow) const fields = Object.keys(expectedRow)
@ -99,6 +100,9 @@ describe.each([
)} in ${JSON.stringify(searchedObjects)}` )} in ${JSON.stringify(searchedObjects)}`
) )
} }
// Ensuring the same row is not matched twice
foundRows.splice(foundRows.indexOf(row), 1)
return row return row
} }
@ -115,9 +119,9 @@ describe.each([
// eslint-disable-next-line jest/no-standalone-expect // eslint-disable-next-line jest/no-standalone-expect
expect(foundRows).toHaveLength(expectedRows.length) expect(foundRows).toHaveLength(expectedRows.length)
// eslint-disable-next-line jest/no-standalone-expect // eslint-disable-next-line jest/no-standalone-expect
expect(foundRows).toEqual( expect([...foundRows]).toEqual(
expectedRows.map((expectedRow: any) => expectedRows.map((expectedRow: any) =>
expect.objectContaining(this.findRow(expectedRow, foundRows)) expect.objectContaining(this.popRow(expectedRow, foundRows))
) )
) )
} }
@ -134,10 +138,10 @@ describe.each([
// eslint-disable-next-line jest/no-standalone-expect // eslint-disable-next-line jest/no-standalone-expect
expect(foundRows).toHaveLength(expectedRows.length) expect(foundRows).toHaveLength(expectedRows.length)
// eslint-disable-next-line jest/no-standalone-expect // eslint-disable-next-line jest/no-standalone-expect
expect(foundRows).toEqual( expect([...foundRows]).toEqual(
expect.arrayContaining( expect.arrayContaining(
expectedRows.map((expectedRow: any) => expectedRows.map((expectedRow: any) =>
expect.objectContaining(this.findRow(expectedRow, foundRows)) expect.objectContaining(this.popRow(expectedRow, foundRows))
) )
) )
) )
@ -153,10 +157,10 @@ describe.each([
}) })
// eslint-disable-next-line jest/no-standalone-expect // eslint-disable-next-line jest/no-standalone-expect
expect(foundRows).toEqual( expect([...foundRows]).toEqual(
expect.arrayContaining( expect.arrayContaining(
expectedRows.map((expectedRow: any) => expectedRows.map((expectedRow: any) =>
expect.objectContaining(this.findRow(expectedRow, foundRows)) expect.objectContaining(this.popRow(expectedRow, foundRows))
) )
) )
) )
@ -1010,6 +1014,159 @@ describe.each([
}) })
}) })
!isInternal &&
describe("datetime - time only", () => {
const T_1000 = "10:00"
const T_1045 = "10:45"
const T_1200 = "12:00"
const T_1530 = "15:30"
const T_0000 = "00:00"
const UNEXISTING_TIME = "10:01"
const NULL_TIME__ID = `null_time__id`
beforeAll(async () => {
await createTable({
timeid: { name: "timeid", type: FieldType.STRING },
time: { name: "time", type: FieldType.DATETIME, timeOnly: true },
})
await createRows([
{ timeid: NULL_TIME__ID, time: null },
{ time: T_1000 },
{ time: T_1045 },
{ time: T_1200 },
{ time: T_1530 },
{ time: T_0000 },
])
})
describe("equal", () => {
it("successfully finds a row", () =>
expectQuery({ equal: { time: T_1000 } }).toContainExactly([
{ time: "10:00:00" },
]))
it("fails to find nonexistent row", () =>
expectQuery({ equal: { time: UNEXISTING_TIME } }).toFindNothing())
})
describe("notEqual", () => {
it("successfully finds a row", () =>
expectQuery({ notEqual: { time: T_1000 } }).toContainExactly([
{ time: "10:45:00" },
{ time: "12:00:00" },
{ time: "15:30:00" },
{ time: "00:00:00" },
]))
it("return all when requesting non-existing", () =>
expectQuery({ notEqual: { time: UNEXISTING_TIME } }).toContainExactly(
[
{ time: "10:00:00" },
{ time: "10:45:00" },
{ time: "12:00:00" },
{ time: "15:30:00" },
{ time: "00:00:00" },
]
))
})
describe("oneOf", () => {
it("successfully finds a row", () =>
expectQuery({ oneOf: { time: [T_1000] } }).toContainExactly([
{ time: "10:00:00" },
]))
it("fails to find nonexistent row", () =>
expectQuery({ oneOf: { time: [UNEXISTING_TIME] } }).toFindNothing())
})
describe("range", () => {
it("successfully finds a row", () =>
expectQuery({
range: { time: { low: T_1045, high: T_1045 } },
}).toContainExactly([{ time: "10:45:00" }]))
it("successfully finds multiple rows", () =>
expectQuery({
range: { time: { low: T_1045, high: T_1530 } },
}).toContainExactly([
{ time: "10:45:00" },
{ time: "12:00:00" },
{ time: "15:30:00" },
]))
it("successfully finds no rows", () =>
expectQuery({
range: { time: { low: UNEXISTING_TIME, high: UNEXISTING_TIME } },
}).toFindNothing())
})
describe("sort", () => {
it("sorts ascending", () =>
expectSearch({
query: {},
sort: "time",
sortOrder: SortOrder.ASCENDING,
}).toMatchExactly([
{ timeid: NULL_TIME__ID },
{ time: "00:00:00" },
{ time: "10:00:00" },
{ time: "10:45:00" },
{ time: "12:00:00" },
{ time: "15:30:00" },
]))
it("sorts descending", () =>
expectSearch({
query: {},
sort: "time",
sortOrder: SortOrder.DESCENDING,
}).toMatchExactly([
{ time: "15:30:00" },
{ time: "12:00:00" },
{ time: "10:45:00" },
{ time: "10:00:00" },
{ time: "00:00:00" },
{ timeid: NULL_TIME__ID },
]))
describe("sortType STRING", () => {
it("sorts ascending", () =>
expectSearch({
query: {},
sort: "time",
sortType: SortType.STRING,
sortOrder: SortOrder.ASCENDING,
}).toMatchExactly([
{ timeid: NULL_TIME__ID },
{ time: "00:00:00" },
{ time: "10:00:00" },
{ time: "10:45:00" },
{ time: "12:00:00" },
{ time: "15:30:00" },
]))
it("sorts descending", () =>
expectSearch({
query: {},
sort: "time",
sortType: SortType.STRING,
sortOrder: SortOrder.DESCENDING,
}).toMatchExactly([
{ time: "15:30:00" },
{ time: "12:00:00" },
{ time: "10:45:00" },
{ time: "10:00:00" },
{ time: "00:00:00" },
{ timeid: NULL_TIME__ID },
]))
})
})
})
describe.each([FieldType.ARRAY, FieldType.OPTIONS])("%s", () => { describe.each([FieldType.ARRAY, FieldType.OPTIONS])("%s", () => {
beforeAll(async () => { beforeAll(async () => {
table = await createTable({ table = await createTable({

View file

@ -122,11 +122,8 @@ function generateSelectStatement(
const fieldNames = field.split(/\./g) const fieldNames = field.split(/\./g)
const tableName = fieldNames[0] const tableName = fieldNames[0]
const columnName = fieldNames[1] const columnName = fieldNames[1]
if ( const columnSchema = schema?.[columnName]
columnName && if (columnSchema && knex.client.config.client === SqlClient.POSTGRES) {
schema?.[columnName] &&
knex.client.config.client === SqlClient.POSTGRES
) {
const externalType = schema[columnName].externalType const externalType = schema[columnName].externalType
if (externalType?.includes("money")) { if (externalType?.includes("money")) {
return knex.raw( return knex.raw(
@ -134,6 +131,14 @@ function generateSelectStatement(
) )
} }
} }
if (
knex.client.config.client === SqlClient.MS_SQL &&
columnSchema?.type === FieldType.DATETIME &&
columnSchema.timeOnly
) {
// Time gets returned as timestamp from mssql, not matching the expected HH:mm format
return knex.raw(`CONVERT(varchar, ${field}, 108) as "${field}"`)
}
return `${field} as ${field}` return `${field} as ${field}`
}) })
} }
@ -383,7 +388,13 @@ class InternalBuilder {
for (let [key, value] of Object.entries(sort)) { for (let [key, value] of Object.entries(sort)) {
const direction = const direction =
value.direction === SortDirection.ASCENDING ? "asc" : "desc" value.direction === SortDirection.ASCENDING ? "asc" : "desc"
query = query.orderBy(`${aliased}.${key}`, direction) let nulls
if (this.client === SqlClient.POSTGRES) {
// All other clients already sort this as expected by default, and adding this to the rest of the clients is causing issues
nulls = value.direction === SortDirection.ASCENDING ? "first" : "last"
}
query = query.orderBy(`${aliased}.${key}`, direction, nulls)
} }
} else if (this.client === SqlClient.MS_SQL && paginate?.limit) { } else if (this.client === SqlClient.MS_SQL && paginate?.limit) {
// @ts-ignore // @ts-ignore
@ -634,12 +645,13 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
*/ */
_query(json: QueryJson, opts: QueryOptions = {}): SqlQuery | SqlQuery[] { _query(json: QueryJson, opts: QueryOptions = {}): SqlQuery | SqlQuery[] {
const sqlClient = this.getSqlClient() const sqlClient = this.getSqlClient()
const config: { client: string; useNullAsDefault?: boolean } = { const config: Knex.Config = {
client: sqlClient, client: sqlClient,
} }
if (sqlClient === SqlClient.SQL_LITE) { if (sqlClient === SqlClient.SQL_LITE) {
config.useNullAsDefault = true config.useNullAsDefault = true
} }
const client = knex(config) const client = knex(config)
let query: Knex.QueryBuilder let query: Knex.QueryBuilder
const builder = new InternalBuilder(sqlClient) const builder = new InternalBuilder(sqlClient)

View file

@ -79,9 +79,13 @@ function generateSchema(
schema.boolean(key) schema.boolean(key)
break break
case FieldType.DATETIME: case FieldType.DATETIME:
schema.datetime(key, { if (!column.timeOnly) {
useTz: !column.ignoreTimezones, schema.datetime(key, {
}) useTz: !column.ignoreTimezones,
})
} else {
schema.time(key)
}
break break
case FieldType.ARRAY: case FieldType.ARRAY:
case FieldType.BB_REFERENCE: case FieldType.BB_REFERENCE:

View file

@ -61,9 +61,9 @@ describe("Captures of real examples", () => {
"b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid", "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid",
"b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid",
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid" "b"."completed" as "b.completed", "b"."qaid" as "b.qaid"
from (select * from "persons" as "a" order by "a"."firstname" asc limit $1) as "a" from (select * from "persons" as "a" order by "a"."firstname" asc nulls first limit $1) as "a"
left join "tasks" as "b" on "a"."personid" = "b"."qaid" or "a"."personid" = "b"."executorid" left join "tasks" as "b" on "a"."personid" = "b"."qaid" or "a"."personid" = "b"."executorid"
order by "a"."firstname" asc limit $2`), order by "a"."firstname" asc nulls first limit $2`),
}) })
}) })
@ -75,10 +75,10 @@ describe("Captures of real examples", () => {
sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid", sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid",
"b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid",
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid" "b"."completed" as "b.completed", "b"."qaid" as "b.qaid"
from (select * from "products" as "a" order by "a"."productname" asc limit $1) as "a" from (select * from "products" as "a" order by "a"."productname" asc nulls first limit $1) as "a"
left join "products_tasks" as "c" on "a"."productid" = "c"."productid" left join "products_tasks" as "c" on "a"."productid" = "c"."productid"
left join "tasks" as "b" on "b"."taskid" = "c"."taskid" where "b"."taskname" = $2 left join "tasks" as "b" on "b"."taskid" = "c"."taskid" where "b"."taskname" = $2
order by "a"."productname" asc limit $3`), order by "a"."productname" asc nulls first limit $3`),
}) })
}) })
@ -90,10 +90,10 @@ describe("Captures of real examples", () => {
sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid", sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid",
"b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid",
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid" "b"."completed" as "b.completed", "b"."qaid" as "b.qaid"
from (select * from "products" as "a" order by "a"."productname" asc limit $1) as "a" from (select * from "products" as "a" order by "a"."productname" asc nulls first limit $1) as "a"
left join "products_tasks" as "c" on "a"."productid" = "c"."productid" left join "products_tasks" as "c" on "a"."productid" = "c"."productid"
left join "tasks" as "b" on "b"."taskid" = "c"."taskid" left join "tasks" as "b" on "b"."taskid" = "c"."taskid"
order by "a"."productname" asc limit $2`), order by "a"."productname" asc nulls first limit $2`),
}) })
}) })
@ -138,11 +138,11 @@ describe("Captures of real examples", () => {
"c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type", "c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type",
"c"."city" as "c.city", "c"."lastname" as "c.lastname" "c"."city" as "c.city", "c"."lastname" as "c.lastname"
from (select * from "tasks" as "a" where not "a"."completed" = $1 from (select * from "tasks" as "a" where not "a"."completed" = $1
order by "a"."taskname" asc limit $2) as "a" order by "a"."taskname" asc nulls first limit $2) as "a"
left join "products_tasks" as "d" on "a"."taskid" = "d"."taskid" left join "products_tasks" as "d" on "a"."taskid" = "d"."taskid"
left join "products" as "b" on "b"."productid" = "d"."productid" left join "products" as "b" on "b"."productid" = "d"."productid"
left join "persons" as "c" on "a"."executorid" = "c"."personid" or "a"."qaid" = "c"."personid" left join "persons" as "c" on "a"."executorid" = "c"."personid" or "a"."qaid" = "c"."personid"
where "c"."year" between $3 and $4 and "b"."productname" = $5 order by "a"."taskname" asc limit $6`), where "c"."year" between $3 and $4 and "b"."productname" = $5 order by "a"."taskname" asc nulls first limit $6`),
}) })
}) })
}) })

View file

@ -71,7 +71,11 @@ const SQL_DATE_TYPE_MAP: Record<string, PrimitiveTypes> = {
} }
const SQL_DATE_ONLY_TYPES = ["date"] const SQL_DATE_ONLY_TYPES = ["date"]
const SQL_TIME_ONLY_TYPES = ["time"] const SQL_TIME_ONLY_TYPES = [
"time",
"time without time zone",
"time with time zone",
]
const SQL_STRING_TYPE_MAP: Record<string, PrimitiveTypes> = { const SQL_STRING_TYPE_MAP: Record<string, PrimitiveTypes> = {
varchar: FieldType.STRING, varchar: FieldType.STRING,

View file

@ -126,16 +126,25 @@ export default class AliasTables {
} }
reverse<T extends Row | Row[]>(rows: T): T { reverse<T extends Row | Row[]>(rows: T): T {
const mapping = new Map()
const process = (row: Row) => { const process = (row: Row) => {
const final: Row = {} const final: Row = {}
for (let [key, value] of Object.entries(row)) { for (const key of Object.keys(row)) {
if (!key.includes(".")) { let mappedKey = mapping.get(key)
final[key] = value if (!mappedKey) {
} else { const dotLocation = key.indexOf(".")
const [alias, column] = key.split(".") if (dotLocation === -1) {
const tableName = this.tableAliases[alias] || alias mappedKey = key
final[`${tableName}.${column}`] = value } else {
const alias = key.slice(0, dotLocation)
const column = key.slice(dotLocation + 1)
const tableName = this.tableAliases[alias] || alias
mappedKey = `${tableName}.${column}`
}
mapping.set(key, mappedKey)
} }
final[mappedKey] = row[key]
} }
return final return final
} }

View file

@ -129,11 +129,12 @@ export function parse(rows: Rows, schema: TableSchema): Rows {
return return
} }
const { type: columnType } = schema[columnName] const columnSchema = schema[columnName]
const { type: columnType } = columnSchema
if (columnType === FieldType.NUMBER) { if (columnType === FieldType.NUMBER) {
// If provided must be a valid number // If provided must be a valid number
parsedRow[columnName] = columnData ? Number(columnData) : columnData parsedRow[columnName] = columnData ? Number(columnData) : columnData
} else if (columnType === FieldType.DATETIME) { } else if (columnType === FieldType.DATETIME && !columnSchema.timeOnly) {
// If provided must be a valid date // If provided must be a valid date
parsedRow[columnName] = columnData parsedRow[columnName] = columnData
? new Date(columnData).toISOString() ? new Date(columnData).toISOString()