1
0
Fork 0
mirror of synced 2024-07-06 15:00:49 +12:00

Updating datasource information endpoint to POST which allows sending up an unfinished/unsaved datasource for fetching information with. Also changing how verification and information endpoints work so that enrichment is used and therefore env vars can also be used.

This commit is contained in:
mike12345567 2023-05-31 17:04:29 +01:00
parent bfb3ae66a9
commit 651d50a064
8 changed files with 41 additions and 31 deletions

View file

@ -21,6 +21,7 @@ import {
CreateDatasourceRequest,
VerifyDatasourceRequest,
VerifyDatasourceResponse,
FetchDatasourceInfoRequest,
FetchDatasourceInfoResponse,
IntegrationBase,
DatasourcePlus,
@ -57,6 +58,21 @@ async function getConnector(
return new Connector(datasource.config)
}
async function getAndMergeDatasource(datasource: Datasource) {
let existingDatasource: undefined | Datasource
if (datasource._id) {
existingDatasource = await sdk.datasources.get(datasource._id)
}
let enrichedDatasource = datasource
if (existingDatasource) {
enrichedDatasource = sdk.datasources.mergeConfigs(
datasource,
existingDatasource
)
}
return await sdk.datasources.enrich(enrichedDatasource)
}
async function buildSchemaHelper(datasource: Datasource) {
const connector = (await getConnector(datasource)) as DatasourcePlus
await connector.buildSchema(datasource._id!, datasource.entities!)
@ -132,17 +148,7 @@ export async function verify(
ctx: UserCtx<VerifyDatasourceRequest, VerifyDatasourceResponse>
) {
const { datasource } = ctx.request.body
let existingDatasource: undefined | Datasource
if (datasource._id) {
existingDatasource = await sdk.datasources.get(datasource._id)
}
let enrichedDatasource = datasource
if (existingDatasource) {
enrichedDatasource = sdk.datasources.mergeConfigs(
datasource,
existingDatasource
)
}
const enrichedDatasource = await getAndMergeDatasource(datasource)
const connector = await getConnector(enrichedDatasource)
if (!connector.testConnection) {
ctx.throw(400, "Connection information verification not supported")
@ -156,11 +162,11 @@ export async function verify(
}
export async function information(
ctx: UserCtx<void, FetchDatasourceInfoResponse>
ctx: UserCtx<FetchDatasourceInfoRequest, FetchDatasourceInfoResponse>
) {
const datasourceId = ctx.params.datasourceId
const datasource = await sdk.datasources.get(datasourceId, { enriched: true })
const connector = (await getConnector(datasource)) as DatasourcePlus
const { datasource } = ctx.request.body
const enrichedDatasource = await getAndMergeDatasource(datasource)
const connector = (await getConnector(enrichedDatasource)) as DatasourcePlus
if (!connector.getTableNames) {
ctx.throw(400, "Table name fetching not supported by datasource")
}
@ -297,7 +303,7 @@ export async function update(ctx: UserCtx<any, UpdateDatasourceResponse>) {
ctx.body = {
datasource: await sdk.datasources.removeSecretSingle(datasource),
}
builderSocket.emitDatasourceUpdate(ctx, datasource)
builderSocket?.emitDatasourceUpdate(ctx, datasource)
}
export async function save(
@ -340,7 +346,7 @@ export async function save(
response.error = schemaError
}
ctx.body = response
builderSocket.emitDatasourceUpdate(ctx, datasource)
builderSocket?.emitDatasourceUpdate(ctx, datasource)
}
async function destroyInternalTablesBySourceId(datasourceId: string) {
@ -400,7 +406,7 @@ export async function destroy(ctx: UserCtx) {
ctx.message = `Datasource deleted.`
ctx.status = 200
builderSocket.emitDatasourceDeletion(ctx, datasourceId)
builderSocket?.emitDatasourceDeletion(ctx, datasourceId)
}
export async function find(ctx: UserCtx) {

View file

@ -71,7 +71,7 @@ export async function create(ctx: any) {
const doc = await pro.plugins.storePlugin(metadata, directory, source)
clientAppSocket.emit("plugins-update", { name, hash: doc.hash })
clientAppSocket?.emit("plugins-update", { name, hash: doc.hash })
ctx.body = {
message: "Plugin uploaded successfully",
plugins: [doc],

View file

@ -78,7 +78,7 @@ export async function save(ctx: UserCtx) {
ctx.eventEmitter &&
ctx.eventEmitter.emitTable(`table:save`, appId, savedTable)
ctx.body = savedTable
builderSocket.emitTableUpdate(ctx, savedTable)
builderSocket?.emitTableUpdate(ctx, savedTable)
}
export async function destroy(ctx: UserCtx) {
@ -91,7 +91,7 @@ export async function destroy(ctx: UserCtx) {
ctx.status = 200
ctx.table = deletedTable
ctx.body = { message: `Table ${tableId} deleted.` }
builderSocket.emitTableDeletion(ctx, tableId)
builderSocket?.emitTableDeletion(ctx, tableId)
}
export async function bulkImport(ctx: UserCtx) {

View file

@ -58,7 +58,7 @@ export async function save(ctx: Ctx) {
await handleViewEvents(existingTable.views[viewName], table.views[viewName])
ctx.body = table.views[viewName]
builderSocket.emitTableUpdate(ctx, table)
builderSocket?.emitTableUpdate(ctx, table)
}
export async function calculationEvents(existingView: View, newView: View) {
@ -127,7 +127,7 @@ export async function destroy(ctx: Ctx) {
await events.view.deleted(view)
ctx.body = view
builderSocket.emitTableUpdate(ctx, table)
builderSocket?.emitTableUpdate(ctx, table)
}
export async function exportView(ctx: Ctx) {

View file

@ -20,8 +20,8 @@ router
authorized(permissions.BUILDER),
datasourceController.verify
)
.get(
"/api/datasources/:datasourceId/info",
.post(
"/api/datasources/info",
authorized(permissions.BUILDER),
datasourceController.information
)

View file

@ -25,6 +25,7 @@ const config = setup.getConfig()!
jest.setTimeout(30000)
jest.unmock("pg")
jest.mock("../websockets")
describe("postgres integrations", () => {
let makeRequest: MakeRequestResponse,
@ -1055,13 +1056,12 @@ describe("postgres integrations", () => {
})
})
describe("GET /api/datasources/:datasourceId/info", () => {
describe("POST /api/datasources/info", () => {
it("should fetch information about postgres datasource", async () => {
const primaryName = primaryPostgresTable.name
const response = await makeRequest(
"get",
`/api/datasources/${postgresDatasource._id}/info`
)
const response = await makeRequest("post", "/api/datasources/info", {
datasource: postgresDatasource,
})
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined()
expect(response.body.tableNames.indexOf(primaryName)).not.toBe(-1)

View file

@ -36,6 +36,6 @@ export async function processUploaded(plugin: FileType, source?: PluginSource) {
}
const doc = await pro.plugins.storePlugin(metadata, directory, source)
clientAppSocket.emit("plugin-update", { name: doc.name, hash: doc.hash })
clientAppSocket?.emit("plugin-update", { name: doc.name, hash: doc.hash })
return doc
}

View file

@ -23,6 +23,10 @@ export interface VerifyDatasourceResponse {
error?: string
}
export interface FetchDatasourceInfoRequest {
datasource: Datasource
}
export interface FetchDatasourceInfoResponse {
tableNames: string[]
}