1
0
Fork 0
mirror of synced 2024-10-01 09:38:55 +13:00

Merge branch 'master' of github.com:Budibase/budibase into endpoint-renaming

This commit is contained in:
mike12345567 2020-10-12 17:45:11 +01:00
commit 408eda81ff
12 changed files with 130 additions and 39 deletions

View file

@ -90,6 +90,8 @@ const contextToBindables = (tables, walkResult) => context => {
runtimeBinding: `${contextParentPath}data.${key}`, runtimeBinding: `${contextParentPath}data.${key}`,
// how the binding exressions looks to the user of the builder // how the binding exressions looks to the user of the builder
readableBinding: `${context.instance._instanceName}.${table.name}.${key}`, readableBinding: `${context.instance._instanceName}.${table.name}.${key}`,
// table / view info
table: context.table,
}) })
// see TableViewSelect.svelte for the format of context.table // see TableViewSelect.svelte for the format of context.table

View file

@ -79,7 +79,7 @@
} }
function fieldOptions(field) { function fieldOptions(field) {
return viewTable.schema[field].type === "string" return viewTable.schema[field].type === "options"
? viewTable.schema[field].constraints.inclusion ? viewTable.schema[field].constraints.inclusion
: [true, false] : [true, false]
} }

View file

@ -3,6 +3,7 @@
import { Input, Button, Spacer, Select, ModalContent } from "@budibase/bbui" import { Input, Button, Spacer, Select, ModalContent } from "@budibase/bbui"
import getTemplates from "builderStore/store/screenTemplates" import getTemplates from "builderStore/store/screenTemplates"
import { some } from "lodash/fp" import { some } from "lodash/fp"
import analytics from "analytics"
const CONTAINER = "@budibase/standard-components/container" const CONTAINER = "@budibase/standard-components/container"
@ -29,7 +30,7 @@
const templateChanged = newTemplateIndex => { const templateChanged = newTemplateIndex => {
if (newTemplateIndex === undefined) return if (newTemplateIndex === undefined) return
const template = templates[newTemplateIndex]
draftScreen = templates[newTemplateIndex].create() draftScreen = templates[newTemplateIndex].create()
if (draftScreen.props._instanceName) { if (draftScreen.props._instanceName) {
name = draftScreen.props._instanceName name = draftScreen.props._instanceName
@ -63,6 +64,13 @@
store.createScreen(draftScreen) store.createScreen(draftScreen)
if (templateIndex !== undefined) {
const template = templates[templateIndex]
analytics.captureEvent("Screen Created", {
template: template.id || template.name,
})
}
finished() finished()
} }

View file

@ -1,18 +1,75 @@
<script> <script>
import { DataList } from "@budibase/bbui" import { DataList } from "@budibase/bbui"
import { createEventDispatcher } from "svelte" import { createEventDispatcher } from "svelte"
import { store } from "builderStore" import { store, backendUiStore } from "builderStore"
import fetchBindableProperties from "builderStore/fetchBindableProperties"
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
export let value = "" export let value = ""
$: urls = getUrls()
const handleBlur = () => dispatch("change", value) const handleBlur = () => dispatch("change", value)
// this will get urls of all screens, but only
// choose detail screens that are usable in the current context
// and substitute the :id param for the actual {{ ._id }} binding
const getUrls = () => {
const urls = [
...$store.screens
.filter(screen => !screen.props._component.endsWith("/rowdetail"))
.map(screen => ({
name: screen.props._instanceName,
url: screen.route,
sort: screen.props._component,
})),
]
const bindableProperties = fetchBindableProperties({
componentInstanceId: $store.currentComponentInfo._id,
components: $store.components,
screen: $store.currentPreviewItem,
tables: $backendUiStore.tables,
})
const detailScreens = $store.screens.filter(screen =>
screen.props._component.endsWith("/rowdetail")
)
for (let detailScreen of detailScreens) {
const idBinding = bindableProperties.find(p => {
if (
p.type === "context" &&
p.runtimeBinding.endsWith("._id") &&
p.table
) {
const tableId =
typeof p.table === "string" ? p.table : p.table.tableId
return tableId === detailScreen.props.table
}
return false
})
if (idBinding) {
urls.push({
name: detailScreen.props._instanceName,
url: detailScreen.route.replace(
":id",
`{{ ${idBinding.runtimeBinding} }}`
),
sort: detailScreen.props._component,
})
}
return urls
}
}
</script> </script>
<DataList editable secondary on:blur={handleBlur} on:change bind:value> <DataList editable secondary on:blur={handleBlur} on:change bind:value>
<option value="" /> <option value="" />
{#each $store.allScreens as screen} {#each urls as url}
<option value={screen.route}>{screen.props._instanceName}</option> <option value={url.url}>{url.name}</option>
{/each} {/each}
</DataList> </DataList>

View file

@ -1,4 +1,4 @@
import { isString, isUndefined } from "lodash/fp" import { isString, isUndefined, cloneDeep } from "lodash/fp"
import { TYPE_MAP } from "./types" import { TYPE_MAP } from "./types"
import { assign } from "lodash" import { assign } from "lodash"
import { uuid } from "builderStore/uuid" import { uuid } from "builderStore/uuid"
@ -83,13 +83,13 @@ const parsePropDef = propDef => {
if (isString(propDef)) { if (isString(propDef)) {
if (!TYPE_MAP[propDef]) return error(`Type ${propDef} is not recognised.`) if (!TYPE_MAP[propDef]) return error(`Type ${propDef} is not recognised.`)
return TYPE_MAP[propDef].default return cloneDeep(TYPE_MAP[propDef].default)
} }
const type = TYPE_MAP[propDef.type] const type = TYPE_MAP[propDef.type]
if (!type) return error(`Type ${propDef.type} is not recognised.`) if (!type) return error(`Type ${propDef.type} is not recognised.`)
return propDef.default return cloneDeep(propDef.default)
} }
export const arrayElementComponentName = (parentComponentName, arrayPropName) => export const arrayElementComponentName = (parentComponentName, arrayPropName) =>

View file

@ -10,7 +10,6 @@ export const TYPE_MAP = {
}, },
options: { options: {
default: [], default: [],
options: [],
}, },
event: { event: {
default: [], default: [],

View file

@ -356,7 +356,7 @@ export default {
{ {
label: "destinationUrl", label: "destinationUrl",
key: "destinationUrl", key: "destinationUrl",
control: Input, control: ScreenSelect,
placeholder: "/table/_id", placeholder: "/table/_id",
}, },
], ],
@ -405,7 +405,7 @@ export default {
{ {
label: "Link Url", label: "Link Url",
key: "linkUrl", key: "linkUrl",
control: Input, control: ScreenSelect,
placeholder: "Link URL", placeholder: "Link URL",
}, },
{ {
@ -480,7 +480,7 @@ export default {
{ {
label: "Link Url", label: "Link Url",
key: "linkUrl", key: "linkUrl",
control: Input, control: ScreenSelect,
placeholder: "Link URL", placeholder: "Link URL",
}, },
{ {

View file

@ -15,7 +15,7 @@ export const FIELDS = {
type: "options", type: "options",
constraints: { constraints: {
type: "string", type: "string",
presence: { allowEmpty: true }, presence: false,
inclusion: [], inclusion: [],
}, },
}, },
@ -67,7 +67,7 @@ export const FIELDS = {
type: "link", type: "link",
constraints: { constraints: {
type: "array", type: "array",
presence: { allowEmpty: true }, presence: false,
}, },
}, },
} }

View file

@ -33,6 +33,7 @@ exports.save = async function(ctx) {
views: {}, views: {},
...rest, ...rest,
} }
let renameDocs = []
// if the table obj had an _id then it will have been retrieved // if the table obj had an _id then it will have been retrieved
const oldTable = ctx.preExisting const oldTable = ctx.preExisting
@ -49,14 +50,11 @@ exports.save = async function(ctx) {
include_docs: true, include_docs: true,
}) })
) )
renameDocs = rows.rows.map(({ doc }) => {
const docs = rows.rows.map(({ doc }) => {
doc[_rename.updated] = doc[_rename.old] doc[_rename.updated] = doc[_rename.old]
delete doc[_rename.old] delete doc[_rename.old]
return doc return doc
}) })
await db.bulkDocs(docs)
delete tableToSave._rename delete tableToSave._rename
} }
@ -69,9 +67,6 @@ exports.save = async function(ctx) {
tableView.schema = tableToSave.schema tableView.schema = tableToSave.schema
} }
const result = await db.post(tableToSave)
tableToSave._rev = result.rev
// update linked rows // update linked rows
await linkRows.updateLinks({ await linkRows.updateLinks({
instanceId, instanceId,
@ -82,6 +77,14 @@ exports.save = async function(ctx) {
oldTable: oldTable, oldTable: oldTable,
}) })
// don't perform any updates until relationships have been
// checked by the updateLinks function
if (renameDocs.length !== 0) {
await db.bulkDocs(renameDocs)
}
const result = await db.post(tableToSave)
tableToSave._rev = result.rev
ctx.eventEmitter && ctx.eventEmitter &&
ctx.eventEmitter.emitTable(`table:save`, instanceId, tableToSave) ctx.eventEmitter.emitTable(`table:save`, instanceId, tableToSave)
@ -105,18 +108,17 @@ exports.save = async function(ctx) {
exports.destroy = async function(ctx) { exports.destroy = async function(ctx) {
const instanceId = ctx.user.instanceId const instanceId = ctx.user.instanceId
const db = new CouchDB(instanceId) const db = new CouchDB(instanceId)
const tableToDelete = await db.get(ctx.params.tableId) const tableToDelete = await db.get(ctx.params.tableId)
await db.remove(tableToDelete)
// Delete all rows for that table // Delete all rows for that table
const rows = await db.allDocs( const rows = await db.allDocs(
getRowParams(ctx.params.tableId, null, { getRowParams(ctx.params.tableId, null, {
include_docs: true, include_docs: true,
}) })
) )
await db.bulkDocs(rows.rows.map(row => ({ _id: row.id, _deleted: true }))) await db.bulkDocs(
rows.rows.map(row => ({ ...row.doc, _deleted: true }))
)
// update linked rows // update linked rows
await linkRows.updateLinks({ await linkRows.updateLinks({
@ -125,6 +127,9 @@ exports.destroy = async function(ctx) {
table: tableToDelete, table: tableToDelete,
}) })
// don't remove the table itself until very end
await db.remove(tableToDelete)
ctx.eventEmitter && ctx.eventEmitter &&
ctx.eventEmitter.emitTable(`table:delete`, instanceId, tableToDelete) ctx.eventEmitter.emitTable(`table:delete`, instanceId, tableToDelete)
ctx.status = 200 ctx.status = 200

View file

@ -161,7 +161,7 @@ class LinkController {
}) })
// now add the docs to be deleted to the bulk operation // now add the docs to be deleted to the bulk operation
operations.push(...toDeleteDocs) operations.push(...toDeleteDocs)
// replace this field with a simple entry to denote there are links // remove the field from this row, link doc will be added to row on way out
delete row[fieldName] delete row[fieldName]
} }
} }
@ -234,8 +234,16 @@ class LinkController {
for (let fieldName of Object.keys(schema)) { for (let fieldName of Object.keys(schema)) {
const field = schema[fieldName] const field = schema[fieldName]
if (field.type === "link") { if (field.type === "link") {
// handle this in a separate try catch, want
// the put to bubble up as an error, if can't update
// table for some reason
let linkedTable
try {
linkedTable = await this._db.get(field.tableId)
} catch (err) {
continue
}
// create the link field in the other table // create the link field in the other table
const linkedTable = await this._db.get(field.tableId)
linkedTable.schema[field.fieldName] = { linkedTable.schema[field.fieldName] = {
name: field.fieldName, name: field.fieldName,
type: "link", type: "link",

View file

@ -42,6 +42,7 @@ exports.updateLinks = async function({
table, table,
oldTable, oldTable,
}) { }) {
const baseReturnObj = row == null ? table : row
if (instanceId == null) { if (instanceId == null) {
throw "Cannot operate without an instance ID." throw "Cannot operate without an instance ID."
} }
@ -50,12 +51,16 @@ exports.updateLinks = async function({
arguments[0].tableId = table._id arguments[0].tableId = table._id
} }
let linkController = new LinkController(arguments[0]) let linkController = new LinkController(arguments[0])
if ( try {
!(await linkController.doesTableHaveLinkedFields()) && if (
(oldTable == null || !(await linkController.doesTableHaveLinkedFields()) &&
!(await linkController.doesTableHaveLinkedFields(oldTable))) (oldTable == null ||
) { !(await linkController.doesTableHaveLinkedFields(oldTable)))
return row ) {
return baseReturnObj
}
} catch (err) {
return baseReturnObj
} }
switch (eventType) { switch (eventType) {
case EventType.ROW_SAVE: case EventType.ROW_SAVE:

View file

@ -57,19 +57,26 @@ exports.generateTableID = () => {
/** /**
* Gets the DB allDocs/query params for retrieving a row. * Gets the DB allDocs/query params for retrieving a row.
* @param {string} tableId The table in which the rows have been stored. * @param {string|null} tableId The table in which the rows have been stored.
* @param {string|null} rowId The ID of the row which is being specifically queried for. This can be * @param {string|null} rowId The ID of the row which is being specifically queried for. This can be
* left null to get all the rows in the table. * left null to get all the rows in the table.
* @param {object} otherProps Any other properties to add to the request. * @param {object} otherProps Any other properties to add to the request.
* @returns {object} Parameters which can then be used with an allDocs request. * @returns {object} Parameters which can then be used with an allDocs request.
*/ */
exports.getRowParams = (tableId, rowId = null, otherProps = {}) => { exports.getRowParams = (
tableId = null,
rowId = null,
otherProps = {}
) => {
if (tableId == null) { if (tableId == null) {
throw "Cannot build params for rows without a table ID" return getDocParams(DocumentTypes.ROW, null, otherProps)
} else {
const endOfKey =
rowId == null
? `${tableId}${SEPARATOR}`
: `${tableId}${SEPARATOR}${rowId}`
return getDocParams(DocumentTypes.ROW, endOfKey, otherProps)
} }
const endOfKey =
rowId == null ? `${tableId}${SEPARATOR}` : `${tableId}${SEPARATOR}${rowId}`
return getDocParams(DocumentTypes.ROW, endOfKey, otherProps)
} }
/** /**