1
0
Fork 0
mirror of synced 2024-07-02 13:01:09 +12:00

Merge pull request #3291 from Budibase/feature/mssql-plus

MS SQL Server data source plus
This commit is contained in:
Michael Drury 2021-11-09 17:17:05 +00:00 committed by GitHub
commit 94e952984d
29 changed files with 772 additions and 397 deletions

View file

@ -8,7 +8,11 @@
import CreateEditRow from "./modals/CreateEditRow.svelte"
import CreateEditUser from "./modals/CreateEditUser.svelte"
import CreateEditColumn from "./modals/CreateEditColumn.svelte"
import { TableNames, UNEDITABLE_USER_FIELDS } from "constants"
import {
TableNames,
UNEDITABLE_USER_FIELDS,
UNSORTABLE_TYPES,
} from "constants"
import RoleCell from "./cells/RoleCell.svelte"
export let schema = {}
@ -33,6 +37,15 @@
$: isUsersTable = tableId === TableNames.USERS
$: data && resetSelectedRows()
$: editRowComponent = isUsersTable ? CreateEditUser : CreateEditRow
$: {
UNSORTABLE_TYPES.forEach(type => {
Object.values(schema).forEach(col => {
if (col.type === type) {
col.sortable = false
}
})
})
}
$: {
if (isUsersTable) {
customRenderers = [

View file

@ -59,9 +59,6 @@
let deletion
$: checkConstraints(field)
$: tableOptions = $tables.list.filter(
opt => opt._id !== $tables.draft._id && opt.type === table.type
)
$: required = !!field?.constraints?.presence || primaryDisplay
$: uneditable =
$tables.selected?._id === TableNames.USERS &&
@ -88,6 +85,13 @@
field.type !== LINK_TYPE && !uneditable && field.type !== AUTO_TYPE
$: relationshipOptions = getRelationshipOptions(field)
$: external = table.type === "external"
// in the case of internal tables the sourceId will just be undefined
$: tableOptions = $tables.list.filter(
opt =>
opt._id !== $tables.draft._id &&
opt.type === table.type &&
table.sourceId === opt.sourceId
)
async function saveColumn() {
if (field.type === AUTO_TYPE) {
@ -174,7 +178,7 @@
if (!field || !field.tableId) {
return null
}
const linkTable = tableOptions.find(table => table._id === field.tableId)
const linkTable = tableOptions?.find(table => table._id === field.tableId)
if (!linkTable) {
return null
}

View file

@ -39,6 +39,8 @@ export const UNEDITABLE_USER_FIELDS = [
"lastName",
]
export const UNSORTABLE_TYPES = ["formula", "attachment", "array", "link"]
export const LAYOUT_NAMES = {
MASTER: {
PRIVATE: "layout_private_master",

View file

@ -79,6 +79,10 @@
try {
// Create datasource
await datasources.save(datasource)
if (datasource?.plus) {
await tables.fetch()
}
await datasources.fetch()
notifications.success(`Datasource ${name} updated successfully.`)
} catch (err) {
notifications.error(`Error saving datasource: ${err}`)

View file

@ -2,6 +2,7 @@ import { cloneDeep } from "lodash/fp"
import { fetchTableData, fetchTableDefinition } from "./tables"
import { fetchViewData } from "./views"
import { fetchRelationshipData } from "./relationships"
import { FieldTypes } from "../constants"
import { executeQuery, fetchQueryDefinition } from "./queries"
/**
@ -28,7 +29,7 @@ export const fetchDatasource = async dataSource => {
}
}
rows = await executeQuery({ queryId: dataSource._id, parameters })
} else if (type === "link") {
} else if (type === FieldTypes.LINK) {
rows = await fetchRelationshipData({
rowId: dataSource.rowId,
tableId: dataSource.rowTableId,

View file

@ -1,6 +1,7 @@
import { notificationStore, dataSourceStore } from "stores"
import API from "./api"
import { fetchTableDefinition } from "./tables"
import { FieldTypes } from "../constants"
/**
* Fetches data about a certain row in a table.
@ -129,7 +130,7 @@ export const enrichRows = async (rows, tableId) => {
const keys = Object.keys(schema)
for (let key of keys) {
const type = schema[key].type
if (type === "link" && Array.isArray(row[key])) {
if (type === FieldTypes.LINK && Array.isArray(row[key])) {
// Enrich row a string join of relationship fields
row[`${key}_text`] =
row[key]

View file

@ -1,6 +1,7 @@
<script>
import { getContext } from "svelte"
import { Heading, Icon } from "@budibase/bbui"
import { FieldTypes } from "../../constants"
import active from "svelte-spa-router/active"
const { routeStore, styleable, linkable, builderStore } = getContext("sdk")
@ -108,7 +109,7 @@
{#each validLinks as { text, url }}
{#if isInternal(url)}
<a
class="link"
class={FieldTypes.LINK}
href={url}
use:linkable
on:click={close}
@ -117,7 +118,11 @@
{text}
</a>
{:else}
<a class="link" href={ensureExternal(url)} on:click={close}>
<a
class={FieldTypes.LINK}
href={ensureExternal(url)}
on:click={close}
>
{text}
</a>
{/if}

View file

@ -2,6 +2,7 @@
import { CoreSelect, CoreMultiselect } from "@budibase/bbui"
import { getContext } from "svelte"
import Field from "./Field.svelte"
import { FieldTypes } from "../../../constants"
const { API } = getContext("sdk")
@ -68,7 +69,7 @@
{field}
{disabled}
{validation}
type="link"
type={FieldTypes.LINK}
bind:fieldState
bind:fieldApi
bind:fieldSchema

View file

@ -1,4 +1,5 @@
import flatpickr from "flatpickr"
import { FieldTypes } from "../../../constants"
/**
* Creates a validation function from a combination of schema-level constraints
@ -154,7 +155,7 @@ const parseType = (value, type) => {
}
// Parse as string
if (type === "string") {
if (type === FieldTypes.STRING) {
if (typeof value === "string" || Array.isArray(value)) {
return value
}
@ -165,7 +166,7 @@ const parseType = (value, type) => {
}
// Parse as number
if (type === "number") {
if (type === FieldTypes.NUMBER) {
if (isNaN(value)) {
return null
}
@ -173,7 +174,7 @@ const parseType = (value, type) => {
}
// Parse as date
if (type === "datetime") {
if (type === FieldTypes.DATETIME) {
if (value instanceof Date) {
return value.getTime()
}
@ -182,7 +183,7 @@ const parseType = (value, type) => {
}
// Parse as boolean
if (type === "boolean") {
if (type === FieldTypes.BOOLEAN) {
if (typeof value === "string") {
return value.toLowerCase() === "true"
}
@ -190,7 +191,7 @@ const parseType = (value, type) => {
}
// Parse attachments, treating no elements as null
if (type === "attachment") {
if (type === FieldTypes.ATTACHMENT) {
if (!Array.isArray(value) || !value.length) {
return null
}
@ -198,14 +199,14 @@ const parseType = (value, type) => {
}
// Parse links, treating no elements as null
if (type === "link") {
if (type === FieldTypes.LINK) {
if (!Array.isArray(value) || !value.length) {
return null
}
return value
}
if (type === "array") {
if (type === FieldTypes.ARRAY) {
if (!Array.isArray(value) || !value.length) {
return null
}

View file

@ -2,6 +2,7 @@
import { getContext } from "svelte"
import { Table } from "@budibase/bbui"
import SlotRenderer from "./SlotRenderer.svelte"
import { UnsortableTypes } from "../../../constants"
export let dataProvider
export let columns
@ -73,8 +74,12 @@
divider: true,
}
}
fields.forEach(field => {
newSchema[field] = schema[field]
if (schema[field] && UnsortableTypes.indexOf(schema[field].type) !== -1) {
newSchema[field].sortable = false
}
})
return newSchema
}

View file

@ -2,6 +2,26 @@ export const TableNames = {
USERS: "ta_users",
}
export const FieldTypes = {
STRING: "string",
LONGFORM: "longform",
OPTIONS: "options",
NUMBER: "number",
BOOLEAN: "boolean",
ARRAY: "array",
DATETIME: "datetime",
ATTACHMENT: "attachment",
LINK: "link",
FORMULA: "formula",
}
export const UnsortableTypes = [
FieldTypes.FORMULA,
FieldTypes.ATTACHMENT,
FieldTypes.ARRAY,
FieldTypes.LINK,
]
export const ActionTypes = {
ValidateForm: "ValidateForm",
RefreshDatasource: "RefreshDatasource",

View file

@ -1,5 +1,6 @@
import { writable, get } from "svelte/store"
import { fetchTableDefinition } from "../api"
import { FieldTypes } from "../constants"
export const createDataSourceStore = () => {
const store = writable([])
@ -20,7 +21,7 @@ export const createDataSourceStore = () => {
// Only one side of the relationship is required as a trigger, as it will
// automatically invalidate related table IDs
else if (dataSource.type === "link") {
else if (dataSource.type === FieldTypes.LINK) {
dataSourceId = dataSource.tableId || dataSource.rowTableId
}
@ -72,7 +73,7 @@ export const createDataSourceStore = () => {
if (schema) {
Object.values(schema).forEach(fieldSchema => {
if (
fieldSchema.type === "link" &&
fieldSchema.type === FieldTypes.LINK &&
fieldSchema.tableId &&
!fieldSchema.autocolumn
) {

View file

@ -15,7 +15,7 @@ module MsSqlMock {
mssql.ConnectionPool = jest.fn(() => ({
connect: jest.fn(() => ({
request: jest.fn(() => ({
query: jest.fn(() => ({})),
query: jest.fn(sql => ({ recordset: [ sql ] })),
})),
})),
}))

View file

@ -0,0 +1,9 @@
FROM mcr.microsoft.com/mssql/server
ENV ACCEPT_EULA=Y
ENV SA_PASSWORD=Passw0rd
COPY ./data /
ENTRYPOINT [ "/bin/bash", "entrypoint.sh" ]
CMD [ "/opt/mssql/bin/sqlservr" ]

View file

@ -0,0 +1,24 @@
#!/bin/bash
set -e
if [ "$1" = '/opt/mssql/bin/sqlservr' ]; then
# If this is the container's first run, initialize the application database
if [ ! -f /tmp/app-initialized ]; then
# Initialize the application database asynchronously in a background process. This allows a) the SQL Server process to be the main process in the container, which allows graceful shutdown and other goodies, and b) us to only start the SQL Server process once, as opposed to starting, stopping, then starting it again.
function initialize_app_database() {
# Wait a bit for SQL Server to start. SQL Server's process doesn't provide a clever way to check if it's up or not, and it needs to be up before we can import the application database
sleep 30s
echo "RUNNING BUDIBASE SETUP"
#run the setup script to create the DB and the schema in the DB
/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Passw0rd -i setup.sql
# Note that the container has been initialized so future starts won't wipe changes to the data
touch /tmp/app-initialized
}
initialize_app_database &
fi
fi
exec "$@"

View file

@ -0,0 +1,54 @@
USE master;
IF OBJECT_ID ('dbo.products', 'U') IS NOT NULL
DROP TABLE products;
GO
CREATE TABLE products
(
id int IDENTITY(1,1),
name varchar (20),
description varchar(30),
CONSTRAINT pk_products PRIMARY KEY NONCLUSTERED (id)
);
IF OBJECT_ID ('dbo.tasks', 'U') IS NOT NULL
DROP TABLE tasks;
GO
CREATE TABLE tasks
(
taskid int IDENTITY(1,1),
taskname varchar (20),
productid int,
CONSTRAINT pk_tasks PRIMARY KEY NONCLUSTERED (taskid),
CONSTRAINT fk_products FOREIGN KEY (productid) REFERENCES products (id),
);
IF OBJECT_ID ('dbo.people', 'U') IS NOT NULL
DROP TABLE people;
GO
CREATE TABLE people
(
name varchar(30),
age varchar(20),
CONSTRAINT pk_people PRIMARY KEY NONCLUSTERED (name, age)
);
INSERT products
(name, description)
VALUES
('Bananas', 'Fruit thing');
INSERT products
(name, description)
VALUES
('Meat', 'Animal thing');
INSERT tasks
(taskname, productid)
VALUES
('Processing', 1);
INSERT people
(name, age)
VALUES
('Bob', '30');

View file

@ -0,0 +1,12 @@
version: "3.8"
services:
# password: Passw0rd
# user: sa
# database: master
mssql:
image: bb/mssql
build:
context: .
dockerfile: data/Dockerfile
ports:
- "1433:1433"

View file

@ -0,0 +1,3 @@
#!/bin/bash
docker-compose down
docker volume prune -f

View file

@ -163,8 +163,8 @@ module External {
}
}
function basicProcessing(row: Row, table: Table) {
const thisRow: { [key: string]: any } = {}
function basicProcessing(row: Row, table: Table): Row {
const thisRow: Row = {}
// filter the row down to what is actually the row (not joined)
for (let fieldName of Object.keys(table.schema)) {
const value = row[`${table.name}.${fieldName}`] || row[fieldName]
@ -179,6 +179,23 @@ module External {
return thisRow
}
function fixArrayTypes(row: Row, table: Table) {
for (let [fieldName, schema] of Object.entries(table.schema)) {
if (
schema.type === FieldTypes.ARRAY &&
typeof row[fieldName] === "string"
) {
try {
row[fieldName] = JSON.parse(row[fieldName])
} catch (err) {
// couldn't convert back to array, ignore
delete row[fieldName]
}
}
}
return row
}
function isMany(field: FieldSchema) {
return (
field.relationshipType && field.relationshipType.split("-")[0] === "many"
@ -226,7 +243,12 @@ module External {
manyRelationships: ManyRelationship[] = []
for (let [key, field] of Object.entries(table.schema)) {
// if set already, or not set just skip it
if (row[key] == null || newRow[key] || field.autocolumn || field.type === FieldTypes.FORMULA) {
if (
row[key] == null ||
newRow[key] ||
field.autocolumn ||
field.type === FieldTypes.FORMULA
) {
continue
}
// if its an empty string then it means return the column to null (if possible)
@ -337,7 +359,7 @@ module External {
table: Table,
relationships: RelationshipsJson[]
) {
if (rows[0].read === true) {
if (!rows || rows.length === 0 || rows[0].read === true) {
return []
}
let finalRows: { [key: string]: Row } = {}
@ -353,7 +375,10 @@ module External {
)
continue
}
const thisRow = basicProcessing(row, table)
const thisRow = fixArrayTypes(basicProcessing(row, table), table)
if (thisRow._id == null) {
throw "Unable to generate row ID for SQL rows"
}
finalRows[thisRow._id] = thisRow
// do this at end once its been added to the final rows
finalRows = this.updateRelationshipColumns(

View file

@ -119,7 +119,7 @@ export interface SortJson {
export interface PaginationJson {
limit: number
page: string | number
page?: string | number
}
export interface RelationshipsJson {

View file

@ -7,7 +7,7 @@ import {
SearchFilters,
SortDirection,
} from "../../definitions/datasource"
import { isIsoDateString } from "../utils"
import { isIsoDateString, SqlClients } from "../utils"
import SqlTableQueryBuilder from "./sqlTable"
const BASE_LIMIT = 5000
@ -29,222 +29,236 @@ function parseBody(body: any) {
return body
}
// right now we only do filters on the specific table being queried
function addFilters(
tableName: string,
query: KnexQuery,
filters: SearchFilters | undefined
): KnexQuery {
function iterate(
structure: { [key: string]: any },
fn: (key: string, value: any) => void
) {
for (let [key, value] of Object.entries(structure)) {
fn(`${tableName}.${key}`, value)
}
class InternalBuilder {
private readonly client: string
constructor(client: string) {
this.client = client
}
if (!filters) {
return query
}
// if all or specified in filters, then everything is an or
const allOr = filters.allOr
if (filters.oneOf) {
iterate(filters.oneOf, (key, array) => {
const fnc = allOr ? "orWhereIn" : "whereIn"
query = query[fnc](key, array)
})
}
if (filters.string) {
iterate(filters.string, (key, value) => {
const fnc = allOr ? "orWhere" : "where"
query = query[fnc](key, "ilike", `${value}%`)
})
}
if (filters.fuzzy) {
iterate(filters.fuzzy, (key, value) => {
const fnc = allOr ? "orWhere" : "where"
query = query[fnc](key, "ilike", `%${value}%`)
})
}
if (filters.range) {
iterate(filters.range, (key, value) => {
if (!value.high || !value.low) {
return
// right now we only do filters on the specific table being queried
addFilters(
tableName: string,
query: KnexQuery,
filters: SearchFilters | undefined
): KnexQuery {
function iterate(
structure: { [key: string]: any },
fn: (key: string, value: any) => void
) {
for (let [key, value] of Object.entries(structure)) {
fn(`${tableName}.${key}`, value)
}
const fnc = allOr ? "orWhereBetween" : "whereBetween"
query = query[fnc](key, [value.low, value.high])
})
}
if (filters.equal) {
iterate(filters.equal, (key, value) => {
const fnc = allOr ? "orWhere" : "where"
query = query[fnc]({ [key]: value })
})
}
if (filters.notEqual) {
iterate(filters.notEqual, (key, value) => {
const fnc = allOr ? "orWhereNot" : "whereNot"
query = query[fnc]({ [key]: value })
})
}
if (filters.empty) {
iterate(filters.empty, key => {
const fnc = allOr ? "orWhereNull" : "whereNull"
query = query[fnc](key)
})
}
if (filters.notEmpty) {
iterate(filters.notEmpty, key => {
const fnc = allOr ? "orWhereNotNull" : "whereNotNull"
query = query[fnc](key)
})
}
return query
}
function addRelationships(
knex: Knex,
query: KnexQuery,
fields: string | string[],
fromTable: string,
relationships: RelationshipsJson[] | undefined
): KnexQuery {
if (!relationships) {
}
if (!filters) {
return query
}
// if all or specified in filters, then everything is an or
const allOr = filters.allOr
if (filters.oneOf) {
iterate(filters.oneOf, (key, array) => {
const fnc = allOr ? "orWhereIn" : "whereIn"
query = query[fnc](key, array)
})
}
if (filters.string) {
iterate(filters.string, (key, value) => {
const fnc = allOr ? "orWhere" : "where"
// postgres supports ilike, nothing else does
if (this.client === SqlClients.POSTGRES) {
query = query[fnc](key, "ilike", `${value}%`)
} else {
const rawFnc = `${fnc}Raw`
// @ts-ignore
query = query[rawFnc](`LOWER(${key}) LIKE ?`, [`${value}%`])
}
})
}
if (filters.fuzzy) {
iterate(filters.fuzzy, (key, value) => {
const fnc = allOr ? "orWhere" : "where"
// postgres supports ilike, nothing else does
if (this.client === SqlClients.POSTGRES) {
query = query[fnc](key, "ilike", `%${value}%`)
} else {
const rawFnc = `${fnc}Raw`
// @ts-ignore
query = query[rawFnc](`LOWER(${key}) LIKE ?`, [`%${value}%`])
}
})
}
if (filters.range) {
iterate(filters.range, (key, value) => {
if (!value.high || !value.low) {
return
}
const fnc = allOr ? "orWhereBetween" : "whereBetween"
query = query[fnc](key, [value.low, value.high])
})
}
if (filters.equal) {
iterate(filters.equal, (key, value) => {
const fnc = allOr ? "orWhere" : "where"
query = query[fnc]({ [key]: value })
})
}
if (filters.notEqual) {
iterate(filters.notEqual, (key, value) => {
const fnc = allOr ? "orWhereNot" : "whereNot"
query = query[fnc]({ [key]: value })
})
}
if (filters.empty) {
iterate(filters.empty, key => {
const fnc = allOr ? "orWhereNull" : "whereNull"
query = query[fnc](key)
})
}
if (filters.notEmpty) {
iterate(filters.notEmpty, key => {
const fnc = allOr ? "orWhereNotNull" : "whereNotNull"
query = query[fnc](key)
})
}
return query
}
for (let relationship of relationships) {
const from = relationship.from,
to = relationship.to,
toTable = relationship.tableName
if (!relationship.through) {
// @ts-ignore
query = query.leftJoin(
toTable,
`${fromTable}.${from}`,
`${toTable}.${to}`
)
} else {
const throughTable = relationship.through
const fromPrimary = relationship.fromPrimary
const toPrimary = relationship.toPrimary
query = query
addRelationships(
knex: Knex,
query: KnexQuery,
fields: string | string[],
fromTable: string,
relationships: RelationshipsJson[] | undefined
): KnexQuery {
if (!relationships) {
return query
}
for (let relationship of relationships) {
const from = relationship.from,
to = relationship.to,
toTable = relationship.tableName
if (!relationship.through) {
// @ts-ignore
.leftJoin(
throughTable,
`${fromTable}.${fromPrimary}`,
`${throughTable}.${from}`
query = query.leftJoin(
toTable,
`${fromTable}.${from}`,
`${toTable}.${to}`
)
.leftJoin(toTable, `${toTable}.${toPrimary}`, `${throughTable}.${to}`)
} else {
const throughTable = relationship.through
const fromPrimary = relationship.fromPrimary
const toPrimary = relationship.toPrimary
query = query
// @ts-ignore
.leftJoin(
throughTable,
`${fromTable}.${fromPrimary}`,
`${throughTable}.${from}`
)
.leftJoin(toTable, `${toTable}.${toPrimary}`, `${throughTable}.${to}`)
}
}
return query.limit(BASE_LIMIT)
}
create(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
const { endpoint, body } = json
let query: KnexQuery = knex(endpoint.entityId)
const parsedBody = parseBody(body)
// make sure no null values in body for creation
for (let [key, value] of Object.entries(parsedBody)) {
if (value == null) {
delete parsedBody[key]
}
}
// mysql can't use returning
if (opts.disableReturning) {
return query.insert(parsedBody)
} else {
return query.insert(parsedBody).returning("*")
}
}
return query.limit(BASE_LIMIT)
}
function buildCreate(
knex: Knex,
json: QueryJson,
opts: QueryOptions
): KnexQuery {
const { endpoint, body } = json
let query: KnexQuery = knex(endpoint.entityId)
const parsedBody = parseBody(body)
// make sure no null values in body for creation
for (let [key, value] of Object.entries(parsedBody)) {
if (value == null) {
delete parsedBody[key]
read(knex: Knex, json: QueryJson, limit: number): KnexQuery {
let { endpoint, resource, filters, sort, paginate, relationships } = json
const tableName = endpoint.entityId
// select all if not specified
if (!resource) {
resource = { fields: [] }
}
}
// mysql can't use returning
if (opts.disableReturning) {
return query.insert(parsedBody)
} else {
return query.insert(parsedBody).returning("*")
}
}
function buildRead(knex: Knex, json: QueryJson, limit: number): KnexQuery {
let { endpoint, resource, filters, sort, paginate, relationships } = json
const tableName = endpoint.entityId
// select all if not specified
if (!resource) {
resource = { fields: [] }
}
let selectStatement: string | string[] = "*"
// handle select
if (resource.fields && resource.fields.length > 0) {
// select the resources as the format "table.columnName" - this is what is provided
// by the resource builder further up
selectStatement = resource.fields.map(field => `${field} as ${field}`)
}
let foundLimit = limit || BASE_LIMIT
// handle pagination
let foundOffset: number | null = null
if (paginate && paginate.page && paginate.limit) {
let selectStatement: string | string[] = "*"
// handle select
if (resource.fields && resource.fields.length > 0) {
// select the resources as the format "table.columnName" - this is what is provided
// by the resource builder further up
selectStatement = resource.fields.map(field => `${field} as ${field}`)
}
let foundLimit = limit || BASE_LIMIT
// handle pagination
let foundOffset: number | null = null
if (paginate && paginate.page && paginate.limit) {
// @ts-ignore
const page = paginate.page <= 1 ? 0 : paginate.page - 1
const offset = page * paginate.limit
foundLimit = paginate.limit
foundOffset = offset
} else if (paginate && paginate.limit) {
foundLimit = paginate.limit
}
// start building the query
let query: KnexQuery = knex(tableName).limit(foundLimit)
if (foundOffset) {
query = query.offset(foundOffset)
}
if (sort) {
for (let [key, value] of Object.entries(sort)) {
const direction = value === SortDirection.ASCENDING ? "asc" : "desc"
query = query.orderBy(key, direction)
}
}
if (this.client === SqlClients.MS_SQL && !sort && paginate?.limit) {
// @ts-ignore
query = query.orderBy(json.meta?.table?.primary[0])
}
query = this.addFilters(tableName, query, filters)
// @ts-ignore
const page = paginate.page <= 1 ? 0 : paginate.page - 1
const offset = page * paginate.limit
foundLimit = paginate.limit
foundOffset = offset
} else if (paginate && paginate.limit) {
foundLimit = paginate.limit
let preQuery: KnexQuery = knex({
// @ts-ignore
[tableName]: query,
}).select(selectStatement)
// handle joins
return this.addRelationships(
knex,
preQuery,
selectStatement,
tableName,
relationships
)
}
// start building the query
let query: KnexQuery = knex(tableName).limit(foundLimit)
if (foundOffset) {
query = query.offset(foundOffset)
}
if (sort) {
for (let [key, value] of Object.entries(sort)) {
const direction = value === SortDirection.ASCENDING ? "asc" : "desc"
query = query.orderBy(key, direction)
update(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
const { endpoint, body, filters } = json
let query: KnexQuery = knex(endpoint.entityId)
const parsedBody = parseBody(body)
query = this.addFilters(endpoint.entityId, query, filters)
// mysql can't use returning
if (opts.disableReturning) {
return query.update(parsedBody)
} else {
return query.update(parsedBody).returning("*")
}
}
query = addFilters(tableName, query, filters)
// @ts-ignore
let preQuery: KnexQuery = knex({
// @ts-ignore
[tableName]: query,
}).select(selectStatement)
// handle joins
return addRelationships(
knex,
preQuery,
selectStatement,
tableName,
relationships
)
}
function buildUpdate(
knex: Knex,
json: QueryJson,
opts: QueryOptions
): KnexQuery {
const { endpoint, body, filters } = json
let query: KnexQuery = knex(endpoint.entityId)
const parsedBody = parseBody(body)
query = addFilters(endpoint.entityId, query, filters)
// mysql can't use returning
if (opts.disableReturning) {
return query.update(parsedBody)
} else {
return query.update(parsedBody).returning("*")
}
}
function buildDelete(
knex: Knex,
json: QueryJson,
opts: QueryOptions
): KnexQuery {
const { endpoint, filters } = json
let query: KnexQuery = knex(endpoint.entityId)
query = addFilters(endpoint.entityId, query, filters)
// mysql can't use returning
if (opts.disableReturning) {
return query.delete()
} else {
return query.delete().returning("*")
delete(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
const { endpoint, filters } = json
let query: KnexQuery = knex(endpoint.entityId)
query = this.addFilters(endpoint.entityId, query, filters)
// mysql can't use returning
if (opts.disableReturning) {
return query.delete()
} else {
return query.delete().returning("*")
}
}
}
@ -266,20 +280,23 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
const sqlClient = this.getSqlClient()
const client = knex({ client: sqlClient })
let query
const builder = new InternalBuilder(sqlClient)
switch (this._operation(json)) {
case Operation.CREATE:
query = buildCreate(client, json, opts)
query = builder.create(client, json, opts)
break
case Operation.READ:
query = buildRead(client, json, this.limit)
query = builder.read(client, json, this.limit)
break
case Operation.UPDATE:
query = buildUpdate(client, json, opts)
query = builder.update(client, json, opts)
break
case Operation.DELETE:
query = buildDelete(client, json, opts)
query = builder.delete(client, json, opts)
break
case Operation.CREATE_TABLE: case Operation.UPDATE_TABLE: case Operation.DELETE_TABLE:
case Operation.CREATE_TABLE:
case Operation.UPDATE_TABLE:
case Operation.DELETE_TABLE:
return this._tableQuery(json)
default:
throw `Operation type is not supported by SQL query builder`
@ -288,6 +305,85 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
// @ts-ignore
return query.toSQL().toNative()
}
async getReturningRow(queryFn: Function, json: QueryJson) {
if (!json.extra || !json.extra.idFilter) {
return {}
}
const input = this._query({
endpoint: {
...json.endpoint,
operation: Operation.READ,
},
resource: {
fields: [],
},
filters: json.extra.idFilter,
paginate: {
limit: 1,
},
meta: json.meta,
})
return queryFn(input, Operation.READ)
}
// when creating if an ID has been inserted need to make sure
// the id filter is enriched with it before trying to retrieve the row
checkLookupKeys(id: any, json: QueryJson) {
if (!id || !json.meta?.table || !json.meta.table.primary) {
return json
}
const primaryKey = json.meta.table.primary?.[0]
json.extra = {
idFilter: {
equal: {
[primaryKey]: id,
},
},
}
return json
}
// this function recreates the returning functionality of postgres
async queryWithReturning(
json: QueryJson,
queryFn: Function,
processFn: Function = (result: any) => result
) {
const sqlClient = this.getSqlClient()
const operation = this._operation(json)
const input = this._query(json, { disableReturning: true })
if (Array.isArray(input)) {
const responses = []
for (let query of input) {
responses.push(await queryFn(query, operation))
}
return responses
}
let row
// need to manage returning, a feature mySQL can't do
if (operation === Operation.DELETE) {
row = processFn(await this.getReturningRow(queryFn, json))
}
const response = await queryFn(input, operation)
const results = processFn(response)
// same as delete, manage returning
if (operation === Operation.CREATE || operation === Operation.UPDATE) {
let id
if (sqlClient === SqlClients.MS_SQL) {
id = results?.[0].id
} else if (sqlClient === SqlClients.MY_SQL) {
id = results?.insertId
}
row = processFn(
await this.getReturningRow(queryFn, this.checkLookupKeys(id, json))
)
}
if (operation !== Operation.READ) {
return row
}
return results.length ? results : [{ [operation.toLowerCase()]: true }]
}
}
module.exports = SqlQueryBuilder

View file

@ -6,7 +6,12 @@ import SchemaBuilder = Knex.SchemaBuilder
import CreateTableBuilder = Knex.CreateTableBuilder
const { FieldTypes, RelationshipTypes } = require("../../constants")
function generateSchema(schema: CreateTableBuilder, table: Table, tables: Record<string, Table>, oldTable: null | Table = null) {
function generateSchema(
schema: CreateTableBuilder,
table: Table,
tables: Record<string, Table>,
oldTable: null | Table = null
) {
let primaryKey = table && table.primary ? table.primary[0] : null
const columns = Object.values(table.schema)
// all columns in a junction table will be meta
@ -19,17 +24,21 @@ function generateSchema(schema: CreateTableBuilder, table: Table, tables: Record
schema.primary(metaCols.map(col => col.name))
}
// check if any columns need added
const foreignKeys = Object.values(table.schema).map(col => col.foreignKey)
for (let [key, column] of Object.entries(table.schema)) {
// skip things that are already correct
const oldColumn = oldTable ? oldTable.schema[key] : null
if ((oldColumn && oldColumn.type === column.type) || (primaryKey === key && !isJunction)) {
if (
(oldColumn && oldColumn.type === column.type) ||
(primaryKey === key && !isJunction)
) {
continue
}
switch (column.type) {
case FieldTypes.STRING: case FieldTypes.OPTIONS: case FieldTypes.LONGFORM:
case FieldTypes.STRING:
case FieldTypes.OPTIONS:
case FieldTypes.LONGFORM:
schema.string(key)
break
case FieldTypes.NUMBER:
@ -67,7 +76,9 @@ function generateSchema(schema: CreateTableBuilder, table: Table, tables: Record
throw "Referenced table doesn't exist"
}
schema.integer(column.foreignKey).unsigned()
schema.foreign(column.foreignKey).references(`${tableName}.${relatedTable.primary[0]}`)
schema
.foreign(column.foreignKey)
.references(`${tableName}.${relatedTable.primary[0]}`)
}
break
}
@ -76,7 +87,10 @@ function generateSchema(schema: CreateTableBuilder, table: Table, tables: Record
// need to check if any columns have been deleted
if (oldTable) {
const deletedColumns = Object.entries(oldTable.schema)
.filter(([key, schema]) => schema.type !== FieldTypes.LINK && table.schema[key] == null)
.filter(
([key, schema]) =>
schema.type !== FieldTypes.LINK && table.schema[key] == null
)
.map(([key]) => key)
deletedColumns.forEach(key => {
if (oldTable.constrained && oldTable.constrained.indexOf(key) !== -1) {
@ -92,7 +106,7 @@ function generateSchema(schema: CreateTableBuilder, table: Table, tables: Record
function buildCreateTable(
knex: Knex,
table: Table,
tables: Record<string, Table>,
tables: Record<string, Table>
): SchemaBuilder {
return knex.schema.createTable(table.name, schema => {
generateSchema(schema, table, tables)
@ -103,17 +117,14 @@ function buildUpdateTable(
knex: Knex,
table: Table,
tables: Record<string, Table>,
oldTable: Table,
oldTable: Table
): SchemaBuilder {
return knex.schema.alterTable(table.name, schema => {
generateSchema(schema, table, tables, oldTable)
})
}
function buildDeleteTable(
knex: Knex,
table: Table,
): SchemaBuilder {
function buildDeleteTable(knex: Knex, table: Table): SchemaBuilder {
return knex.schema.dropTable(table.name)
}
@ -151,7 +162,12 @@ class SqlTableQueryBuilder {
if (!json.meta || !json.meta.table) {
throw "Must specify old table for update"
}
query = buildUpdateTable(client, json.table, json.meta.tables, json.meta.table)
query = buildUpdateTable(
client,
json.table,
json.meta.tables,
json.meta.table
)
break
case Operation.DELETE_TABLE:
query = buildDeleteTable(client, json.table)
@ -164,4 +180,4 @@ class SqlTableQueryBuilder {
}
export default SqlTableQueryBuilder
module.exports = SqlTableQueryBuilder
module.exports = SqlTableQueryBuilder

View file

@ -4,7 +4,10 @@ import { Datasource } from "../../definitions/common"
module DatasourceUtils {
const { integrations } = require("../index")
export async function makeExternalQuery(datasource: Datasource, json: QueryJson) {
export async function makeExternalQuery(
datasource: Datasource,
json: QueryJson
) {
const Integration = integrations[datasource.source]
// query is the opinionated function
if (Integration.prototype.query) {

View file

@ -1,11 +1,20 @@
import {
Integration,
DatasourceFieldTypes,
QueryTypes,
Integration,
Operation,
QueryJson,
QueryTypes,
SqlQuery,
} from "../definitions/datasource"
import { getSqlQuery } from "./utils"
import {
getSqlQuery,
buildExternalTableId,
convertSqlType,
finaliseExternalTables,
SqlClients,
} from "./utils"
import { DatasourcePlus } from "./base/datasourcePlus"
import { Table, TableSchema } from "../definitions/common"
module MSSQLModule {
const sqlServer = require("mssql")
@ -22,6 +31,7 @@ module MSSQLModule {
const SCHEMA: Integration = {
docs: "https://github.com/tediousjs/node-mssql",
plus: true,
description:
"Microsoft SQL Server is a relational database management system developed by Microsoft. ",
friendlyName: "MS SQL Server",
@ -69,26 +79,84 @@ module MSSQLModule {
},
}
async function internalQuery(client: any, query: SqlQuery) {
async function internalQuery(
client: any,
query: SqlQuery,
operation: string | undefined = undefined
) {
const request = client.request()
try {
return await client.query(query.sql, query.bindings || {})
if (Array.isArray(query.bindings)) {
let count = 0
for (let binding of query.bindings) {
request.input(`p${count++}`, binding)
}
}
// this is a hack to get the inserted ID back,
// no way to do this with Knex nicely
const sql =
operation === Operation.CREATE
? `${query.sql}; SELECT SCOPE_IDENTITY() AS id;`
: query.sql
return await request.query(sql)
} catch (err) {
// @ts-ignore
throw new Error(err)
}
}
class SqlServerIntegration extends Sql {
class SqlServerIntegration extends Sql implements DatasourcePlus {
private readonly config: MSSQLConfig
static pool: any
public tables: Record<string, Table> = {}
public schemaErrors: Record<string, string> = {}
MASTER_TABLES = [
"spt_fallback_db",
"spt_fallback_dev",
"spt_fallback_usg",
"spt_monitor",
"MSreplication_options",
]
TABLES_SQL =
"SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='BASE TABLE'"
getDefinitionSQL(tableName: string) {
return `select *
from INFORMATION_SCHEMA.COLUMNS
where TABLE_NAME='${tableName}'`
}
getConstraintsSQL(tableName: string) {
return `SELECT * FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC
INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS KU
ON TC.CONSTRAINT_TYPE = 'PRIMARY KEY'
AND TC.CONSTRAINT_NAME = KU.CONSTRAINT_NAME
AND KU.table_name='${tableName}'
ORDER BY
KU.TABLE_NAME,
KU.ORDINAL_POSITION;`
}
getAutoColumnsSQL(tableName: string) {
return `SELECT
COLUMNPROPERTY(OBJECT_ID(TABLE_SCHEMA+'.'+TABLE_NAME),COLUMN_NAME,'IsComputed')
AS IS_COMPUTED,
COLUMNPROPERTY(object_id(TABLE_SCHEMA+'.'+TABLE_NAME), COLUMN_NAME, 'IsIdentity')
AS IS_IDENTITY,
*
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME='${tableName}'`
}
constructor(config: MSSQLConfig) {
super("mssql")
super(SqlClients.MS_SQL)
this.config = config
const clientCfg = {
...this.config,
options: {
encrypt: this.config.encrypt,
enableArithAbort: true,
},
}
delete clientCfg.encrypt
@ -99,14 +167,75 @@ module MSSQLModule {
async connect() {
try {
const client = await this.pool.connect()
this.client = client.request()
this.client = await this.pool.connect()
} catch (err) {
// @ts-ignore
throw new Error(err)
}
}
async runSQL(sql: string) {
return (await internalQuery(this.client, getSqlQuery(sql))).recordset
}
/**
* Fetches the tables from the sql server database and assigns them to the datasource.
* @param {*} datasourceId - datasourceId to fetch
* @param entities - the tables that are to be built
*/
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
await this.connect()
let tableNames = await this.runSQL(this.TABLES_SQL)
if (tableNames == null || !Array.isArray(tableNames)) {
throw "Unable to get list of tables in database"
}
tableNames = tableNames
.map((record: any) => record.TABLE_NAME)
.filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1)
const tables: Record<string, Table> = {}
for (let tableName of tableNames) {
// get the column definition (type)
const definition = await this.runSQL(this.getDefinitionSQL(tableName))
// find primary key constraints
const constraints = await this.runSQL(this.getConstraintsSQL(tableName))
// find the computed and identity columns (auto columns)
const columns = await this.runSQL(this.getAutoColumnsSQL(tableName))
const primaryKeys = constraints
.filter(
(constraint: any) => constraint.CONSTRAINT_TYPE === "PRIMARY KEY"
)
.map((constraint: any) => constraint.COLUMN_NAME)
const autoColumns = columns
.filter((col: any) => col.IS_COMPUTED || col.IS_IDENTITY)
.map((col: any) => col.COLUMN_NAME)
let schema: TableSchema = {}
for (let def of definition) {
const name = def.COLUMN_NAME
if (typeof name !== "string") {
continue
}
const type: string = convertSqlType(def.DATA_TYPE)
schema[name] = {
autocolumn: !!autoColumns.find((col: string) => col === name),
name: name,
type,
}
}
tables[tableName] = {
_id: buildExternalTableId(datasourceId, tableName),
primary: primaryKeys,
name: tableName,
schema,
}
}
const final = finaliseExternalTables(tables, entities)
this.tables = final.tables
this.schemaErrors = final.errors
}
async read(query: SqlQuery | string) {
await this.connect()
const response = await internalQuery(this.client, getSqlQuery(query))
@ -132,10 +261,13 @@ module MSSQLModule {
}
async query(json: QueryJson) {
const operation = this._operation(json).toLowerCase()
const input = this._query(json)
const response = await internalQuery(this.client, input)
return response.recordset ? response.recordset : [{ [operation]: true }]
await this.connect()
const operation = this._operation(json)
const queryFn = (query: any, op: string) =>
internalQuery(this.client, query, op)
const processFn = (result: any) =>
result.recordset ? result.recordset : [{ [operation]: true }]
return this.queryWithReturning(json, queryFn, processFn)
}
}

View file

@ -2,23 +2,22 @@ import {
Integration,
DatasourceFieldTypes,
QueryTypes,
Operation,
QueryJson,
SqlQuery,
} from "../definitions/datasource"
import { Table, TableSchema } from "../definitions/common"
import { getSqlQuery } from "./utils"
import {
getSqlQuery,
SqlClients,
buildExternalTableId,
convertSqlType,
finaliseExternalTables,
} from "./utils"
import { DatasourcePlus } from "./base/datasourcePlus"
module MySQLModule {
const mysql = require("mysql2")
const Sql = require("./base/sql")
const {
buildExternalTableId,
convertType,
finaliseExternalTables,
} = require("./utils")
const { FieldTypes } = require("../constants")
interface MySQLConfig {
host: string
@ -29,30 +28,6 @@ module MySQLModule {
ssl?: object
}
const TYPE_MAP = {
text: FieldTypes.LONGFORM,
blob: FieldTypes.LONGFORM,
enum: FieldTypes.STRING,
varchar: FieldTypes.STRING,
float: FieldTypes.NUMBER,
int: FieldTypes.NUMBER,
numeric: FieldTypes.NUMBER,
bigint: FieldTypes.NUMBER,
mediumint: FieldTypes.NUMBER,
decimal: FieldTypes.NUMBER,
dec: FieldTypes.NUMBER,
double: FieldTypes.NUMBER,
real: FieldTypes.NUMBER,
fixed: FieldTypes.NUMBER,
smallint: FieldTypes.NUMBER,
timestamp: FieldTypes.DATETIME,
date: FieldTypes.DATETIME,
datetime: FieldTypes.DATETIME,
time: FieldTypes.DATETIME,
tinyint: FieldTypes.BOOLEAN,
json: DatasourceFieldTypes.JSON,
}
const SCHEMA: Integration = {
docs: "https://github.com/mysqljs/mysql",
plus: true,
@ -139,7 +114,7 @@ module MySQLModule {
public schemaErrors: Record<string, string> = {}
constructor(config: MySQLConfig) {
super("mysql")
super(SqlClients.MY_SQL)
this.config = config
if (config.ssl && Object.keys(config.ssl).length === 0) {
delete config.ssl
@ -184,7 +159,7 @@ module MySQLModule {
schema[columnName] = {
name: columnName,
autocolumn: isAuto,
type: convertType(column.Type, TYPE_MAP),
type: convertSqlType(column.Type),
constraints,
}
}
@ -223,67 +198,12 @@ module MySQLModule {
return results.length ? results : [{ deleted: true }]
}
async getReturningRow(json: QueryJson) {
if (!json.extra || !json.extra.idFilter) {
return {}
}
const input = this._query({
endpoint: {
...json.endpoint,
operation: Operation.READ,
},
fields: [],
filters: json.extra.idFilter,
paginate: {
limit: 1,
},
})
return internalQuery(this.client, input, false)
}
// when creating if an ID has been inserted need to make sure
// the id filter is enriched with it before trying to retrieve the row
checkLookupKeys(results: any, json: QueryJson) {
if (!results?.insertId || !json.meta?.table || !json.meta.table.primary) {
return json
}
const primaryKey = json.meta.table.primary?.[0]
json.extra = {
idFilter: {
equal: {
[primaryKey]: results.insertId,
},
},
}
return json
}
async query(json: QueryJson) {
const operation = this._operation(json)
this.client.connect()
const input = this._query(json, { disableReturning: true })
if (Array.isArray(input)) {
const responses = []
for (let query of input) {
responses.push(await internalQuery(this.client, query, false))
}
return responses
}
let row
// need to manage returning, a feature mySQL can't do
if (operation === operation.DELETE) {
row = this.getReturningRow(json)
}
const results = await internalQuery(this.client, input, false)
// same as delete, manage returning
if (operation === Operation.CREATE || operation === Operation.UPDATE) {
row = this.getReturningRow(this.checkLookupKeys(results, json))
}
const queryFn = (query: any) => internalQuery(this.client, query, false)
const output = await this.queryWithReturning(json, queryFn)
this.client.end()
if (operation !== Operation.READ) {
return row
}
return results.length ? results : [{ [operation.toLowerCase()]: true }]
return output
}
}

View file

@ -6,18 +6,18 @@ import {
SqlQuery,
} from "../definitions/datasource"
import { Table } from "../definitions/common"
import { getSqlQuery } from "./utils"
import {
getSqlQuery,
buildExternalTableId,
convertSqlType,
finaliseExternalTables,
SqlClients,
} from "./utils"
import { DatasourcePlus } from "./base/datasourcePlus"
module PostgresModule {
const { Pool } = require("pg")
const Sql = require("./base/sql")
const { FieldTypes } = require("../constants")
const {
buildExternalTableId,
convertType,
finaliseExternalTables,
} = require("./utils")
const { escapeDangerousCharacters } = require("../utilities")
const JSON_REGEX = /'{.*}'::json/s
@ -97,22 +97,6 @@ module PostgresModule {
},
}
const TYPE_MAP = {
text: FieldTypes.LONGFORM,
varchar: FieldTypes.STRING,
integer: FieldTypes.NUMBER,
bigint: FieldTypes.NUMBER,
decimal: FieldTypes.NUMBER,
smallint: FieldTypes.NUMBER,
real: FieldTypes.NUMBER,
"double precision": FieldTypes.NUMBER,
timestamp: FieldTypes.DATETIME,
time: FieldTypes.DATETIME,
boolean: FieldTypes.BOOLEAN,
json: FieldTypes.JSON,
date: FieldTypes.DATETIME,
}
async function internalQuery(client: any, query: SqlQuery) {
// need to handle a specific issue with json data types in postgres,
// new lines inside the JSON data will break it
@ -154,7 +138,7 @@ module PostgresModule {
`
constructor(config: PostgresConfig) {
super("pg")
super(SqlClients.POSTGRES)
this.config = config
let newConfig = {
@ -216,7 +200,7 @@ module PostgresModule {
}
}
const type: string = convertType(column.data_type, TYPE_MAP)
const type: string = convertSqlType(column.data_type)
const identity = !!(
column.identity_generation ||
column.identity_start ||

View file

@ -9,32 +9,39 @@ class TestConfiguration {
}
describe("MS SQL Server Integration", () => {
let config
let config
beforeEach(() => {
beforeEach(async () => {
config = new TestConfiguration()
})
it("calls the create method with the correct params", async () => {
const sql = "insert into users (name, age) values ('Joe', 123);"
const response = await config.integration.create({
sql
describe("check sql used", () => {
beforeEach(async () => {
await config.integration.connect()
})
expect(config.integration.client.query).toHaveBeenCalledWith(sql, {})
})
it("calls the read method with the correct params", async () => {
const sql = "select * from users;"
const response = await config.integration.read({
sql
it("calls the create method with the correct params", async () => {
const sql = "insert into users (name, age) values ('Joe', 123);"
const response = await config.integration.create({
sql
})
expect(config.integration.client.request).toHaveBeenCalledWith()
expect(response[0]).toEqual(sql)
})
it("calls the read method with the correct params", async () => {
const sql = "select * from users;"
const response = await config.integration.read({
sql
})
expect(config.integration.client.request).toHaveBeenCalledWith()
expect(response[0]).toEqual(sql)
})
expect(config.integration.client.query).toHaveBeenCalledWith(sql, {})
})
describe("no rows returned", () => {
beforeEach(async () => {
await config.integration.connect()
config.integration.client.query.mockImplementation(() => ({ rows: [] }))
})
it("returns the correct response when the create response has no rows", async () => {
@ -42,7 +49,7 @@ describe("MS SQL Server Integration", () => {
const response = await config.integration.create({
sql
})
expect(response).toEqual([{ created: true }])
expect(response[0]).toEqual(sql)
})
})
})

View file

@ -7,6 +7,39 @@ const { FieldTypes, BuildSchemaErrors } = require("../constants")
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
const ROW_ID_REGEX = /^\[.*]$/g
const SQL_TYPE_MAP = {
text: FieldTypes.LONGFORM,
varchar: FieldTypes.STRING,
integer: FieldTypes.NUMBER,
bigint: FieldTypes.NUMBER,
decimal: FieldTypes.NUMBER,
smallint: FieldTypes.NUMBER,
real: FieldTypes.NUMBER,
"double precision": FieldTypes.NUMBER,
timestamp: FieldTypes.DATETIME,
time: FieldTypes.DATETIME,
boolean: FieldTypes.BOOLEAN,
json: FieldTypes.JSON,
date: FieldTypes.DATETIME,
blob: FieldTypes.LONGFORM,
enum: FieldTypes.STRING,
float: FieldTypes.NUMBER,
int: FieldTypes.NUMBER,
numeric: FieldTypes.NUMBER,
mediumint: FieldTypes.NUMBER,
dec: FieldTypes.NUMBER,
double: FieldTypes.NUMBER,
fixed: FieldTypes.NUMBER,
datetime: FieldTypes.DATETIME,
tinyint: FieldTypes.BOOLEAN,
}
export enum SqlClients {
MS_SQL = "mssql",
POSTGRES = "pg",
MY_SQL = "mysql",
}
export function isExternalTable(tableId: string) {
return tableId.includes(DocumentTypes.DATASOURCE)
}
@ -68,8 +101,8 @@ export function breakRowIdField(_id: string | { _id: string }): any[] {
}
}
export function convertType(type: string, map: { [key: string]: any }) {
for (let [external, internal] of Object.entries(map)) {
export function convertSqlType(type: string) {
for (let [external, internal] of Object.entries(SQL_TYPE_MAP)) {
if (type.toLowerCase().includes(external)) {
return internal
}

View file

@ -84,8 +84,7 @@ exports.setInitInfo = ctx => {
}
exports.getInitInfo = ctx => {
const initInfo = getCookie(ctx, Cookies.Init)
ctx.body = initInfo
ctx.body = getCookie(ctx, Cookies.Init)
}
/**