1
0
Fork 0
mirror of synced 2024-09-03 03:01:14 +12:00

Merge pull request #7333 from Budibase/feature/select-tables-to-fetch

Allow list of named tables to be fetched
This commit is contained in:
melohagan 2022-09-13 11:19:49 +01:00 committed by GitHub
commit abdb156559
4 changed files with 45 additions and 8 deletions

View file

@ -8,6 +8,7 @@
notifications, notifications,
Modal, Modal,
Table, Table,
Toggle,
} from "@budibase/bbui" } from "@budibase/bbui"
import { datasources, integrations, tables } from "stores/backend" import { datasources, integrations, tables } from "stores/backend"
import CreateEditRelationship from "components/backend/Datasources/CreateEditRelationship.svelte" import CreateEditRelationship from "components/backend/Datasources/CreateEditRelationship.svelte"
@ -15,6 +16,7 @@
import ArrayRenderer from "components/common/renderers/ArrayRenderer.svelte" import ArrayRenderer from "components/common/renderers/ArrayRenderer.svelte"
import ConfirmDialog from "components/common/ConfirmDialog.svelte" import ConfirmDialog from "components/common/ConfirmDialog.svelte"
import { goto } from "@roxi/routify" import { goto } from "@roxi/routify"
import ValuesList from "components/common/ValuesList.svelte"
export let datasource export let datasource
export let save export let save
@ -31,6 +33,8 @@
let createExternalTableModal let createExternalTableModal
let selectedFromRelationship, selectedToRelationship let selectedFromRelationship, selectedToRelationship
let confirmDialog let confirmDialog
let specificTables = null
let requireSpecificTables = false
$: integration = datasource && $integrations[datasource.source] $: integration = datasource && $integrations[datasource.source]
$: plusTables = datasource?.plus $: plusTables = datasource?.plus
@ -87,7 +91,7 @@
async function updateDatasourceSchema() { async function updateDatasourceSchema() {
try { try {
await datasources.updateSchema(datasource) await datasources.updateSchema(datasource, specificTables)
notifications.success(`Datasource ${name} tables updated successfully.`) notifications.success(`Datasource ${name} tables updated successfully.`)
await tables.fetch() await tables.fetch()
} catch (error) { } catch (error) {
@ -150,6 +154,19 @@
warning={false} warning={false}
title="Confirm table fetch" title="Confirm table fetch"
> >
<Toggle
bind:value={requireSpecificTables}
on:change={e => {
requireSpecificTables = e.detail
specificTables = null
}}
thin
text="Fetch listed tables only (one per line)"
/>
{#if requireSpecificTables}
<ValuesList label="" bind:values={specificTables} />
{/if}
<br />
<Body> <Body>
If you have fetched tables from this database before, this action may If you have fetched tables from this database before, this action may
overwrite any changes you made after your initial fetch. overwrite any changes you made after your initial fetch.

View file

@ -62,8 +62,11 @@ export function createDatasourcesStore() {
unselect: () => { unselect: () => {
update(state => ({ ...state, selected: null })) update(state => ({ ...state, selected: null }))
}, },
updateSchema: async datasource => { updateSchema: async (datasource, tablesFilter) => {
const response = await API.buildDatasourceSchema(datasource?._id) const response = await API.buildDatasourceSchema({
datasourceId: datasource?._id,
tablesFilter,
})
return await updateDatasource(response) return await updateDatasource(response)
}, },
save: async (body, fetchSchema = false) => { save: async (body, fetchSchema = false) => {

View file

@ -11,10 +11,14 @@ export const buildDatasourceEndpoints = API => ({
/** /**
* Prompts the server to build the schema for a datasource. * Prompts the server to build the schema for a datasource.
* @param datasourceId the datasource ID to build the schema for * @param datasourceId the datasource ID to build the schema for
* @param tablesFilter list of specific table names to be build the schema
*/ */
buildDatasourceSchema: async datasourceId => { buildDatasourceSchema: async ({ datasourceId, tablesFilter }) => {
return await API.post({ return await API.post({
url: `/api/datasources/${datasourceId}/schema`, url: `/api/datasources/${datasourceId}/schema`,
body: {
tablesFilter,
},
}) })
}, },

View file

@ -50,9 +50,23 @@ exports.fetch = async function (ctx) {
exports.buildSchemaFromDb = async function (ctx) { exports.buildSchemaFromDb = async function (ctx) {
const db = getAppDB() const db = getAppDB()
const datasource = await db.get(ctx.params.datasourceId) const datasource = await db.get(ctx.params.datasourceId)
const tablesFilter = ctx.request.body.tablesFilter
const { tables, error } = await buildSchemaHelper(datasource) let { tables, error } = await buildSchemaHelper(datasource)
if (tablesFilter) {
if (!datasource.entities) {
datasource.entities = {}
}
for (let key in tables) {
if (
tablesFilter.some(filter => filter.toLowerCase() === key.toLowerCase())
) {
datasource.entities[key] = tables[key]
}
}
} else {
datasource.entities = tables datasource.entities = tables
}
const dbResp = await db.put(datasource) const dbResp = await db.put(datasource)
datasource._rev = dbResp.rev datasource._rev = dbResp.rev
@ -223,10 +237,9 @@ const buildSchemaHelper = async datasource => {
// Connect to the DB and build the schema // Connect to the DB and build the schema
const connector = new Connector(datasource.config) const connector = new Connector(datasource.config)
await connector.buildSchema(datasource._id, datasource.entities) await connector.buildSchema(datasource._id, datasource.entities)
datasource.entities = connector.tables
// make sure they all have a display name selected // make sure they all have a display name selected
for (let entity of Object.values(datasource.entities)) { for (let entity of Object.values(datasource.entities ?? {})) {
if (entity.primaryDisplay) { if (entity.primaryDisplay) {
continue continue
} }