diff --git a/packages/builder/src/components/nav/ModelNavigator/CreateTable.svelte b/packages/builder/src/components/nav/ModelNavigator/CreateTable.svelte index 705da1365c..3ab5a10fe3 100644 --- a/packages/builder/src/components/nav/ModelNavigator/CreateTable.svelte +++ b/packages/builder/src/components/nav/ModelNavigator/CreateTable.svelte @@ -3,6 +3,7 @@ import { backendUiStore } from "builderStore" import { notifier } from "builderStore/store/notifications" import { + Body, DropdownMenu, Button, Heading, @@ -11,19 +12,20 @@ Select, Dropzone, } from "@budibase/bbui" + import TableDataImport from "./TableDataImport.svelte" import api from "builderStore/api" import analytics from "analytics" let anchor let dropdown let name - let recordImport = {} + let dataImport async function saveTable() { const model = await backendUiStore.actions.models.save({ name, - schema: recordImport.schema || {}, - recordImport, + schema: dataImport.schema || {}, + dataImport, }) notifier.success(`Table ${name} created successfully.`) $goto(`./model/${model._id}`) @@ -36,38 +38,6 @@ name = "" dropdown.hide() } - - function handleFileTooLarge(fileSizeLimit) { - notifier.danger( - `Files cannot exceed ${fileSizeLimit / - BYTES_IN_MB}MB. Please try again with smaller files.` - ) - } - - async function processFiles(fileList) { - const fileArray = Array.from(fileList) - let data = new FormData() - for (var i = 0; i < fileList.length; i++) { - data.append("file", fileList[i]) - } - - const response = await fetch("/api/csv/validate", { - method: "POST", - body: data, - headers: { - Accept: "application/json", - }, - }) - - recordImport = await response.json() - - if (response.status !== 200) { - notifier.danger("CSV Invalid, please try another CSV file") - return [] - } - - return fileArray.map(file => ({ ...file, extension: "csv" })) - }
@@ -76,21 +46,27 @@
Create Table
+ Table Name - Create Table from CSV (Optional) - + Create Table from CSV (Optional) +
- +
diff --git a/packages/builder/src/components/nav/ModelNavigator/TableDataImport.svelte b/packages/builder/src/components/nav/ModelNavigator/TableDataImport.svelte new file mode 100644 index 0000000000..9bdaf7e77d --- /dev/null +++ b/packages/builder/src/components/nav/ModelNavigator/TableDataImport.svelte @@ -0,0 +1,189 @@ + + +
+ + +
+
+ {#if schema} + {#each Object.keys(schema) as columnName} +
+ {columnName} + + + {schema[columnName].success ? 'Success' : 'Failure'} + + omitColumn(columnName)} /> +
+ {/each} + {/if} +
+ + diff --git a/packages/server/build/Hosting Portal-2.png b/packages/server/build/Hosting Portal-2.png new file mode 100644 index 0000000000..c42db9bd8f Binary files /dev/null and b/packages/server/build/Hosting Portal-2.png differ diff --git a/packages/server/src/api/controllers/model.js b/packages/server/src/api/controllers/model.js index 21cf837855..3c3be374c3 100644 --- a/packages/server/src/api/controllers/model.js +++ b/packages/server/src/api/controllers/model.js @@ -1,5 +1,6 @@ const CouchDB = require("../../db") const newid = require("../../db/newid") +const csvParser = require("../../utilities/csvParser") exports.fetch = async function(ctx) { const db = new CouchDB(ctx.user.instanceId) @@ -19,7 +20,7 @@ exports.find = async function(ctx) { exports.save = async function(ctx) { const db = new CouchDB(ctx.user.instanceId) - const { recordImport, ...rest } = ctx.request.body + const { dataImport, ...rest } = ctx.request.body const modelToSave = { type: "model", _id: newid(), @@ -86,15 +87,13 @@ exports.save = async function(ctx) { } await db.put(designDoc) - if (recordImport && recordImport.path) { + if (dataImport && dataImport.path) { // Populate the table with records imported from CSV in a bulk update - const csv = require("csvtojson") - const json = await csv().fromFile(recordImport.path) - const records = json.map(record => ({ - ...record, - modelId: modelToSave._id, - })) - await db.bulkDocs(records) + const data = await csvParser.transform(dataImport) + + for (let row of data) row.modelId = modelToSave._id + + await db.bulkDocs(data) } ctx.status = 200 @@ -135,3 +134,12 @@ exports.destroy = async function(ctx) { ctx.status = 200 ctx.message = `Model ${ctx.params.modelId} deleted.` } + +exports.validateCSVSchema = async function(ctx) { + const { file, schema = {} } = ctx.request.body + const result = await csvParser.parse(file.path, schema) + ctx.body = { + schema: result, + path: file.path, + } +} diff --git a/packages/server/src/api/controllers/static.js b/packages/server/src/api/controllers/static.js index f29b515aaa..663c4c7257 100644 --- a/packages/server/src/api/controllers/static.js +++ b/packages/server/src/api/controllers/static.js @@ -5,7 +5,6 @@ const fetch = require("node-fetch") const fs = require("fs") const uuid = require("uuid") const AWS = require("aws-sdk") -const csv = require("csvtojson") const { prepareUploadForS3 } = require("./deploy/aws") const { @@ -247,37 +246,3 @@ exports.serveComponentLibrary = async function(ctx) { await send(ctx, "/index.js", { root: componentLibraryPath }) } - -function schemaFromCSV(path) { - const result = csv().fromFile(path) - return new Promise((resolve, reject) => { - result.on("header", headers => { - const schema = {} - for (let header of headers) { - schema[header] = { - type: "string", - constraints: { - type: "string", - length: {}, - presence: { - allowEmpty: true, - }, - }, - name: header, - } - } - resolve(schema) - }) - result.on("error", reject) - }) -} - -exports.validateCSV = async function(ctx) { - const file = ctx.request.files.file - const schema = await schemaFromCSV(file.path) - // if (result.length === 0) ctx.throw(400, "CSV Invalid") - ctx.body = { - schema, - path: file.path, - } -} diff --git a/packages/server/src/api/routes/model.js b/packages/server/src/api/routes/model.js index 00eb46d515..fe782d4cf5 100644 --- a/packages/server/src/api/routes/model.js +++ b/packages/server/src/api/routes/model.js @@ -13,6 +13,11 @@ router modelController.find ) .post("/api/models", authorized(BUILDER), modelController.save) + .post( + "/api/models/csv/validate", + authorized(BUILDER), + modelController.validateCSVSchema + ) .delete( "/api/models/:modelId/:revId", authorized(BUILDER), diff --git a/packages/server/src/api/routes/static.js b/packages/server/src/api/routes/static.js index 4026e0205c..aa136a3d15 100644 --- a/packages/server/src/api/routes/static.js +++ b/packages/server/src/api/routes/static.js @@ -28,7 +28,6 @@ router authorized(BUILDER), controller.performLocalFileProcessing ) - .post("/api/csv/validate", authorized(BUILDER), controller.validateCSV) .post("/api/attachments/upload", controller.uploadFile) .get("/componentlibrary", controller.serveComponentLibrary) .get("/assets/:file*", controller.serveAppAsset) diff --git a/packages/server/src/utilities/csvParser.js b/packages/server/src/utilities/csvParser.js new file mode 100644 index 0000000000..1174c2e86f --- /dev/null +++ b/packages/server/src/utilities/csvParser.js @@ -0,0 +1,76 @@ +const csv = require("csvtojson") + +const VALIDATORS = { + string: () => true, + number: attribute => !isNaN(Number(attribute)), + datetime: attribute => !isNaN(new Date(attribute).getTime()), +} + +const PARSERS = { + string: attribute => attribute.toString(), + number: attribute => Number(attribute), + datetime: attribute => new Date(attribute).toISOString(), +} + +function parse(path, parsers) { + const result = csv().fromFile(path) + + const schema = {} + + return new Promise((resolve, reject) => { + result.on("header", headers => { + for (let header of headers) { + schema[header] = { + type: parsers[header] ? parsers[header].type : "string", + success: true, + } + } + }) + result.fromFile(path).subscribe(row => { + // For each CSV row + // parse all the columns that need parsed + for (let key in parsers) { + // if the schema has already borked for a parser, skip this column + if (!schema[key] || !schema[key].success) continue + + // get the validator + const validator = VALIDATORS[parsers[key].type] + + try { + schema[key].success = !!validator(row[key]) + } catch (err) { + schema[key].success = false + } + } + }) + result.on("done", error => { + if (error) { + console.error(error) + reject(error) + } + + resolve(schema) + }) + }) +} + +// TODO: significant refactor +async function transform({ schema, path }) { + const colParser = {} + + for (let key in schema) { + colParser[key] = PARSERS[schema[key].type] + } + + try { + const json = await csv({ colParser }).fromFile(path) + return json + } catch (err) { + console.error(`Error transforming CSV to JSON for data import`, err) + } +} + +module.exports = { + parse, + transform, +} diff --git a/packages/server/src/utilities/tests/csvParser.spec.js b/packages/server/src/utilities/tests/csvParser.spec.js new file mode 100644 index 0000000000..e69de29bb2