1
0
Fork 0
mirror of synced 2024-06-02 10:34:40 +12:00

Adding full flow for creating custom datasource.

This commit is contained in:
mike12345567 2022-08-17 10:05:13 +01:00
parent 39188c6af4
commit 9a49f58a88
4 changed files with 43 additions and 3 deletions

View file

@ -49,6 +49,9 @@
schema: selected.datasource,
auth: selected.auth,
}
if (selected.friendlyName) {
integration.name = selected.friendlyName
}
checkShowImport()
}

View file

@ -14,6 +14,9 @@
let datasource = cloneDeep(integration)
let skipFetch = false
$: name =
IntegrationNames[datasource.type] || datasource.name || datasource.type
async function saveDatasource() {
try {
const resp = await save(datasource, skipFetch)
@ -32,7 +35,7 @@
</script>
<ModalContent
title={`Connect to ${IntegrationNames[datasource.type]}`}
title={`Connect to ${name}`}
onConfirm={() => saveDatasource()}
onCancel={() => modal.show()}
confirmText={datasource.plus

View file

@ -15,6 +15,7 @@ import redis from "./redis"
import snowflake from "./snowflake"
import { getPlugins } from "../api/controllers/plugin"
import { SourceName, Integration, PluginType } from "@budibase/types"
import { getDatasourcePlugin } from "../utilities/fileSystem"
const environment = require("../environment")
const { cloneDeep } = require("lodash")
@ -65,6 +66,8 @@ if (environment.SELF_HOSTED) {
DEFINITIONS[SourceName.GOOGLE_SHEETS] = googlesheets.schema
}
function isIntegrationAvailable(integration: string) {}
module.exports = {
getDefinitions: async () => {
const plugins = await getPlugins(PluginType.DATASOURCE)
@ -82,7 +85,16 @@ module.exports = {
...pluginSchemas,
}
},
getIntegration: async () => {
return INTEGRATIONS
getIntegration: async (integration: string) => {
if (INTEGRATIONS[integration]) {
return INTEGRATIONS[integration]
}
const plugins = await getPlugins(PluginType.DATASOURCE)
for (let plugin of plugins) {
if (plugin.name === integration) {
// need to use commonJS require due to its dynamic runtime nature
return getDatasourcePlugin(plugin.name, plugin.jsUrl)
}
}
},
}

View file

@ -26,9 +26,11 @@ const {
const MemoryStream = require("memorystream")
const { getAppId } = require("@budibase/backend-core/context")
const tar = require("tar")
const fetch = require("node-fetch")
const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules")
const DATASOURCE_PATH = join(budibaseTempDir(), "datasource")
/**
* The single stack system (Cloud and Builder) should not make use of the file system where possible,
@ -348,6 +350,26 @@ exports.extractPluginTarball = async file => {
return { metadata, directory: path }
}
exports.getDatasourcePlugin = async (name, url) => {
if (!fs.existsSync(DATASOURCE_PATH)) {
fs.mkdirSync(DATASOURCE_PATH)
}
const filename = join(DATASOURCE_PATH, name)
if (fs.existsSync(filename)) {
return require(filename)
}
const response = fetch(url)
if (response.status === 200) {
const content = await response.text()
fs.writeFileSync(filename, content)
require(filename)
} else {
throw new Error(
`Unable to retrieve plugin - reason: ${await response.text()}`
)
}
}
/**
* Full function definition for below can be found in the utilities.
*/