1
0
Fork 0
mirror of synced 2024-07-02 21:10:43 +12:00
This commit is contained in:
mike12345567 2023-10-06 15:55:10 +01:00
parent 7d772e312c
commit d8b7f930e9
18 changed files with 334 additions and 79 deletions

View file

@ -4,6 +4,7 @@ on:
pull_request:
branches:
- develop
- master
jobs:
release:

View file

@ -1,5 +1,5 @@
{
"version": "2.11.5-alpha.3",
"version": "2.11.14",
"npmClient": "yarn",
"packages": [
"packages/*"
@ -19,4 +19,4 @@
"loadEnvFiles": false
}
}
}
}

View file

@ -21,14 +21,6 @@
"hsla(240, 90%, 75%, 0.3)",
"hsla(320, 90%, 75%, 0.3)",
]
$: {
if (constraints.inclusion.length) {
options = constraints.inclusion.map(value => ({
name: value,
id: Math.random(),
}))
}
}
const removeInput = idx => {
delete optionColors[options[idx].name]
constraints.inclusion = constraints.inclusion.filter((e, i) => i !== idx)
@ -80,6 +72,11 @@
// Initialize anchor arrays on mount, assuming 'options' is already populated
colorPopovers = constraints.inclusion.map(() => undefined)
anchors = constraints.inclusion.map(() => undefined)
options = constraints.inclusion.map(value => ({
name: value,
id: Math.random(),
}))
})
</script>

View file

@ -110,20 +110,7 @@
<div class="schema-fields">
{#each schemaFields as [field, schema]}
{#if !schema.autocolumn && schema.type !== "attachment"}
<DrawerBindableSlot
fillWidth
title={value.title}
label={field}
panel={AutomationBindingPanel}
type={schema.type}
{schema}
value={value[field]}
on:change={e => onChange(e, field)}
{bindings}
allowJS={true}
updateOnChange={false}
drawerLeft="260px"
>
{#if isTestModal}
<RowSelectorTypes
{isTestModal}
{field}
@ -132,7 +119,31 @@
{value}
{onChange}
/>
</DrawerBindableSlot>
{:else}
<DrawerBindableSlot
fillWidth
title={value.title}
label={field}
panel={AutomationBindingPanel}
type={schema.type}
{schema}
value={value[field]}
on:change={e => onChange(e, field)}
{bindings}
allowJS={true}
updateOnChange={false}
drawerLeft="260px"
>
<RowSelectorTypes
{isTestModal}
{field}
{schema}
bindings={parsedBindings}
{value}
{onChange}
/>
</DrawerBindableSlot>
{/if}
{/if}
{#if isUpdateRow && schema.type === "link"}
<div class="checkbox-field">

View file

@ -13,7 +13,13 @@
let modal
$: tempValue = filters || []
$: schemaFields = Object.values(schema || {})
$: schemaFields = Object.entries(schema || {}).map(
([fieldName, fieldSchema]) => ({
name: fieldName, // Using the key as name if not defined in the schema, for example in some autogenerated columns
...fieldSchema,
})
)
$: text = getText(filters)
$: selected = tempValue.filter(x => !x.onEmptyFilter)?.length > 0

View file

@ -660,7 +660,8 @@
>Open schema editor</Button
>
{:else if editableColumn.type === USER_REFRENCE_TYPE}
<Toggle
<!-- Disabled temporally -->
<!-- <Toggle
value={editableColumn.relationshipType === RelationshipType.MANY_TO_MANY}
on:change={e =>
(editableColumn.relationshipType = e.detail
@ -669,7 +670,7 @@
disabled={!isCreating}
thin
text="Allow multiple users"
/>
/> -->
{/if}
{#if editableColumn.type === AUTO_TYPE || editableColumn.autocolumn}
<Select

View file

@ -3,21 +3,24 @@
Body,
Button,
Combobox,
Multiselect,
DatePicker,
DrawerContent,
Icon,
Input,
Layout,
Select,
Label,
Layout,
Multiselect,
Select,
} from "@budibase/bbui"
import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte"
import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte"
import { generate } from "shortid"
import { LuceneUtils, Constants } from "@budibase/frontend-core"
import { Constants, LuceneUtils } from "@budibase/frontend-core"
import { getFields } from "helpers/searchFields"
import { FieldType } from "@budibase/types"
import { createEventDispatcher, onMount } from "svelte"
import FilterUsers from "./FilterUsers.svelte"
import { RelationshipType } from "constants/backend"
export let schemaFields
export let filters = []
@ -29,7 +32,6 @@
const dispatch = createEventDispatcher()
const { OperatorOptions } = Constants
const { getValidOperatorsForType } = LuceneUtils
const KeyedFieldRegex = /\d[0-9]*:/g
const behaviourOptions = [
{ value: "and", label: "Match all filters" },
@ -120,7 +122,7 @@
return enrichedSchemaFields.find(field => field.name === filter.field)
}
const santizeTypes = filter => {
const sanitizeTypes = filter => {
// Update type based on field
const fieldSchema = enrichedSchemaFields.find(x => x.name === filter.field)
filter.type = fieldSchema?.type
@ -129,13 +131,9 @@
filter.externalType = getSchema(filter)?.externalType
}
const santizeOperator = filter => {
const sanitizeOperator = filter => {
// Ensure a valid operator is selected
const operators = getValidOperatorsForType(
filter.type,
filter.field,
datasource
).map(x => x.value)
const operators = getValidOperatorsForType(filter).map(x => x.value)
if (!operators.includes(filter.operator)) {
filter.operator = operators[0] ?? OperatorOptions.Equals.value
}
@ -148,7 +146,7 @@
filter.noValue = noValueOptions.includes(filter.operator)
}
const santizeValue = filter => {
const sanitizeValue = (filter, previousType) => {
// Check if the operator allows a value at all
if (filter.noValue) {
filter.value = null
@ -162,28 +160,47 @@
}
} else if (filter.type === "array" && filter.valueType === "Value") {
filter.value = []
} else if (
previousType !== filter.type &&
(previousType === FieldType.BB_REFERENCE ||
filter.type === FieldType.BB_REFERENCE)
) {
filter.value = filter.type === "array" ? [] : null
}
}
const onFieldChange = filter => {
santizeTypes(filter)
santizeOperator(filter)
santizeValue(filter)
const previousType = filter.type
sanitizeTypes(filter)
sanitizeOperator(filter)
sanitizeValue(filter, previousType)
}
const onOperatorChange = filter => {
santizeOperator(filter)
santizeValue(filter)
sanitizeOperator(filter)
sanitizeValue(filter, filter.type)
}
const onValueTypeChange = filter => {
santizeValue(filter)
sanitizeValue(filter)
}
const getFieldOptions = field => {
const schema = enrichedSchemaFields.find(x => x.name === field)
return schema?.constraints?.inclusion || []
}
const getValidOperatorsForType = filter => {
if (!filter?.field) {
return []
}
return LuceneUtils.getValidOperatorsForType(
filter.type,
filter.field,
datasource
)
}
</script>
<DrawerContent>
@ -228,11 +245,7 @@
/>
<Select
disabled={!filter.field}
options={getValidOperatorsForType(
filter.type,
filter.field,
datasource
)}
options={getValidOperatorsForType(filter)}
bind:value={filter.operator}
on:change={() => onOperatorChange(filter)}
placeholder={null}
@ -285,6 +298,14 @@
timeOnly={getSchema(filter)?.timeOnly}
bind:value={filter.value}
/>
{:else if filter.type === FieldType.BB_REFERENCE}
<FilterUsers
bind:value={filter.value}
multiselect={getSchema(filter).relationshipType ===
RelationshipType.MANY_TO_MANY ||
filter.operator === OperatorOptions.In.value}
disabled={filter.noValue}
/>
{:else}
<DrawerBindableInput disabled />
{/if}

View file

@ -0,0 +1,34 @@
<script>
import { Select, Multiselect } from "@budibase/bbui"
import { fetchData } from "@budibase/frontend-core"
import { API } from "api"
export let value = null
export let disabled
export let multiselect = false
$: fetch = fetchData({
API,
datasource: {
type: "user",
},
options: {
limit: 100,
},
})
$: options = $fetch.rows
$: component = multiselect ? Multiselect : Select
</script>
<svelte:component
this={component}
bind:value
autocomplete
{options}
getOptionLabel={option => option.email}
getOptionValue={option => option._id}
{disabled}
/>

View file

@ -57,7 +57,8 @@ export async function checkDockerConfigured() {
"docker/docker-compose has not been installed, please follow instructions at: https://docs.budibase.com/docs/docker-compose"
const docker = await lookpath("docker")
const compose = await lookpath("docker-compose")
if (!docker || !compose) {
const composeV2 = await lookpath("docker compose")
if (!docker || (!compose && !composeV2)) {
throw error
}
}

View file

@ -12,6 +12,10 @@ if (!process.argv[0].includes("node")) {
checkForBinaries()
}
function localPrebuildPath() {
return join(process.execPath, "..", PREBUILDS)
}
function checkForBinaries() {
const readDir = join(__filename, "..", "..", "..", "cli", PREBUILDS, ARCH)
if (fs.existsSync(PREBUILD_DIR) || !fs.existsSync(readDir)) {
@ -19,17 +23,21 @@ function checkForBinaries() {
}
const natives = fs.readdirSync(readDir)
if (fs.existsSync(readDir)) {
const writePath = join(process.execPath, PREBUILDS, ARCH)
const writePath = join(localPrebuildPath(), ARCH)
fs.mkdirSync(writePath, { recursive: true })
for (let native of natives) {
const filename = `${native.split(".fake")[0]}.node`
fs.cpSync(join(readDir, native), join(writePath, filename))
}
console.log("copied something")
}
}
function cleanup(evt?: number) {
// cleanup prebuilds first
const path = localPrebuildPath()
if (fs.existsSync(path)) {
fs.rmSync(path, { recursive: true })
}
if (evt && !isNaN(evt)) {
return
}
@ -41,10 +49,6 @@ function cleanup(evt?: number) {
)
console.error(error(evt))
}
const path = join(process.execPath, PREBUILDS)
if (fs.existsSync(path)) {
fs.rmSync(path, { recursive: true })
}
}
const events = ["exit", "SIGINT", "SIGUSR1", "SIGUSR2", "uncaughtException"]

View file

@ -2,7 +2,7 @@ version: "3.8"
services:
db:
container_name: postgres
image: postgres
image: postgres:15
restart: unless-stopped
environment:
POSTGRES_USER: root
@ -25,4 +25,4 @@ services:
- "5050:80"
volumes:
pg_data:
pg_data:

View file

@ -308,12 +308,19 @@ class LinkController {
}
})
)
// remove schema from other table
let linkedTable = await this._db.get<Table>(field.tableId)
if (field.fieldName) {
delete linkedTable.schema[field.fieldName]
try {
// remove schema from other table, if it exists
let linkedTable = await this._db.get<Table>(field.tableId)
if (field.fieldName) {
delete linkedTable.schema[field.fieldName]
}
await this._db.put(linkedTable)
} catch (error: any) {
// ignore missing to ensure broken relationship columns can be deleted
if (error.statusCode !== 404) {
throw error
}
}
await this._db.put(linkedTable)
}
/**

View file

@ -233,4 +233,19 @@ describe("test the link controller", () => {
}
await config.updateTable(table)
})
it("should be able to remove a linked field from a table, even if the linked table does not exist", async () => {
await createLinkedRow()
await createLinkedRow("link2")
table1.schema["link"].tableId = "not_found"
const controller = await createLinkController(table1, null, table1)
await context.doInAppContext(appId, async () => {
let before = await controller.getTableLinkDocs()
await controller.removeFieldFromTable("link")
let after = await controller.getTableLinkDocs()
expect(before.length).toEqual(2)
// shouldn't delete the other field
expect(after.length).toEqual(1)
})
})
})

View file

@ -16,6 +16,7 @@ import { cleanExportRows } from "../utils"
import { utils } from "@budibase/shared-core"
import { ExportRowsParams, ExportRowsResult } from "../search"
import { HTTPError, db } from "@budibase/backend-core"
import { searchInputMapping } from "./utils"
import pick from "lodash/pick"
import { outputProcessing } from "../../../../utilities/rowProcessor"
@ -50,7 +51,10 @@ export async function search(options: SearchParams) {
[params.sort]: { direction },
}
}
try {
const table = await sdk.tables.getTable(tableId)
options = searchInputMapping(table, options)
let rows = (await handleRequest(Operation.READ, tableId, {
filters: query,
sort,
@ -76,7 +80,6 @@ export async function search(options: SearchParams) {
rows = rows.map((r: any) => pick(r, fields))
}
const table = await sdk.tables.getTable(tableId)
rows = await outputProcessing(table, rows, { preserveLinks: true })
// need wrapper object for bookmarks etc when paginating

View file

@ -29,6 +29,7 @@ import {
} from "../../../../api/controllers/view/utils"
import sdk from "../../../../sdk"
import { ExportRowsParams, ExportRowsResult } from "../search"
import { searchInputMapping } from "./utils"
import pick from "lodash/pick"
export async function search(options: SearchParams) {
@ -47,9 +48,9 @@ export async function search(options: SearchParams) {
disableEscaping: options.disableEscaping,
}
let table
let table = await sdk.tables.getTable(tableId)
options = searchInputMapping(table, options)
if (params.sort && !params.sortType) {
table = await sdk.tables.getTable(tableId)
const schema = table.schema
const sortField = schema[params.sort]
params.sortType = sortField.type === "number" ? "number" : "string"
@ -68,7 +69,6 @@ export async function search(options: SearchParams) {
if (tableId === InternalTables.USER_METADATA) {
response.rows = await getGlobalUsersFromMetadata(response.rows)
}
table = table || (await sdk.tables.getTable(tableId))
if (options.fields) {
const fields = [...options.fields, ...db.CONSTANT_INTERNAL_ROW_COLS]

View file

@ -0,0 +1,77 @@
import { searchInputMapping } from "../utils"
import { db as dbCore } from "@budibase/backend-core"
import {
FieldType,
FieldTypeSubtypes,
Table,
SearchParams,
} from "@budibase/types"
const tableId = "ta_a"
const tableWithUserCol: Table = {
_id: tableId,
name: "table",
schema: {
user: {
name: "user",
type: FieldType.BB_REFERENCE,
subtype: FieldTypeSubtypes.BB_REFERENCE.USER,
},
},
}
describe("searchInputMapping", () => {
const globalUserId = dbCore.generateGlobalUserID()
const userMedataId = dbCore.generateUserMetadataID(globalUserId)
it("should be able to map ro_ to global user IDs", () => {
const params: SearchParams = {
tableId,
query: {
equal: {
"1:user": userMedataId,
},
},
}
const output = searchInputMapping(tableWithUserCol, params)
expect(output.query.equal!["1:user"]).toBe(globalUserId)
})
it("should handle array of user IDs", () => {
const params: SearchParams = {
tableId,
query: {
oneOf: {
"1:user": [userMedataId, globalUserId],
},
},
}
const output = searchInputMapping(tableWithUserCol, params)
expect(output.query.oneOf!["1:user"]).toStrictEqual([
globalUserId,
globalUserId,
])
})
it("shouldn't change any other input", () => {
const email = "test@test.com"
const params: SearchParams = {
tableId,
query: {
equal: {
"1:user": email,
},
},
}
const output = searchInputMapping(tableWithUserCol, params)
expect(output.query.equal!["1:user"]).toBe(email)
})
it("shouldn't error if no query supplied", () => {
const params: any = {
tableId,
}
const output = searchInputMapping(tableWithUserCol, params)
expect(output.query).toBeUndefined()
})
})

View file

@ -0,0 +1,76 @@
import {
FieldType,
FieldTypeSubtypes,
SearchParams,
Table,
DocumentType,
SEPARATOR,
} from "@budibase/types"
import { db as dbCore } from "@budibase/backend-core"
function findColumnInQueries(
column: string,
options: SearchParams,
callback: (filter: any) => any
) {
if (!options.query) {
return
}
for (let filterBlock of Object.values(options.query)) {
if (typeof filterBlock !== "object") {
continue
}
for (let [key, filter] of Object.entries(filterBlock)) {
if (key.endsWith(column)) {
filterBlock[key] = callback(filter)
}
}
}
}
function userColumnMapping(column: string, options: SearchParams) {
findColumnInQueries(column, options, (filterValue: any): any => {
const isArray = Array.isArray(filterValue),
isString = typeof filterValue === "string"
if (!isString && !isArray) {
return filterValue
}
const processString = (input: string) => {
const rowPrefix = DocumentType.ROW + SEPARATOR
if (input.startsWith(rowPrefix)) {
return dbCore.getGlobalIDFromUserMetadataID(input)
} else {
return input
}
}
if (isArray) {
return filterValue.map(el => {
if (typeof el === "string") {
return processString(el)
} else {
return el
}
})
} else {
return processString(filterValue)
}
})
}
// maps through the search parameters to check if any of the inputs are invalid
// based on the table schema, converts them to something that is valid.
export function searchInputMapping(table: Table, options: SearchParams) {
if (!table?.schema) {
return options
}
for (let [key, column] of Object.entries(table.schema)) {
switch (column.type) {
case FieldType.BB_REFERENCE:
if (column.subtype === FieldTypeSubtypes.BB_REFERENCE.USER) {
userColumnMapping(key, options)
}
break
}
}
return options
}

View file

@ -14,7 +14,6 @@ const HBS_REGEX = /{{([^{].*?)}}/g
/**
* Returns the valid operator options for a certain data type
* @param type the data type
*/
export const getValidOperatorsForType = (
type: FieldType,
@ -44,22 +43,24 @@ export const getValidOperatorsForType = (
value: string
label: string
}[] = []
if (type === "string") {
if (type === FieldType.STRING) {
ops = stringOps
} else if (type === "number" || type === "bigint") {
} else if (type === FieldType.NUMBER || type === FieldType.BIGINT) {
ops = numOps
} else if (type === "options") {
} else if (type === FieldType.OPTIONS) {
ops = [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty, Op.In]
} else if (type === "array") {
} else if (type === FieldType.ARRAY) {
ops = [Op.Contains, Op.NotContains, Op.Empty, Op.NotEmpty, Op.ContainsAny]
} else if (type === "boolean") {
} else if (type === FieldType.BOOLEAN) {
ops = [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty]
} else if (type === "longform") {
} else if (type === FieldType.LONGFORM) {
ops = stringOps
} else if (type === "datetime") {
} else if (type === FieldType.DATETIME) {
ops = numOps
} else if (type === "formula") {
} else if (type === FieldType.FORMULA) {
ops = stringOps.concat([Op.MoreThan, Op.LessThan])
} else if (type === FieldType.BB_REFERENCE) {
ops = [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty, Op.In]
}
// Only allow equal/not equal for _id in SQL tables