1
0
Fork 0
mirror of synced 2024-06-27 02:20:35 +12:00

added core into mono repo

This commit is contained in:
michael shanks 2019-07-15 07:12:52 +01:00
parent b5f58bac4c
commit d7eca3b2f4
148 changed files with 31993 additions and 0 deletions

12
packages/core/.babelrc Normal file
View file

@ -0,0 +1,12 @@
{
"presets": ["@babel/preset-env"],
"sourceMaps": "inline",
"retainLines": true,
"plugins": [
["@babel/plugin-transform-runtime",
{
"regenerator": true
}
]
]
}

View file

@ -0,0 +1,11 @@
{
"env": {
"browser": true,
"es6": true
},
"extends": "eslint:recommended",
"parser": "babel-eslint",
"parserOptions": {
"sourceType": "module"
}
}

55
packages/core/.gitignore vendored Normal file
View file

@ -0,0 +1,55 @@
# Logs
logs
*.log
# Runtime data
pids
*.pid
*.seed
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# node-waf configuration
.lock-wscript
# Compiled binary addons (http://nodejs.org/api/addons.html)
build/Release
.eslintcache
# Dependency directory
# https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git
node_modules
node_modules_ubuntu
node_modules_windows
app/node_modules
# OSX
.DS_Store
# flow-typed
flow-typed/npm/*
!flow-typed/npm/module_vx.x.x.js
# App packaged
release
app/main.prod.js
app/main.prod.js.map
app/renderer.prod.js
app/renderer.prod.js.map
app/style.css
app/style.css.map
dist
dist-test
dll
main.js
main.js.map
.idea
npm-debug.log.*

11
packages/core/.travis.yml Normal file
View file

@ -0,0 +1,11 @@
sudo: required
notifications:
slack: budibase:Nx2QNi9CP87Nn7ah2A4Qdzyy
script:
- npm install
- npm install -g jest
- node node_modules/eslint/bin/eslint src/**/*.js
- jest

14
packages/core/.vscode/launch.json vendored Normal file
View file

@ -0,0 +1,14 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "launch",
"name": "Launch Program",
"program": "${workspaceFolder}\\index.js"
}
]
}

0
packages/core/.vscode/settings.json vendored Normal file
View file

5
packages/core/AUTHORS.md Normal file
View file

@ -0,0 +1,5 @@
Contributors
===
* Michael Shanks - [@mikebudi](https://github.com/mjashanks)
* Daniel Loudon - [@danbudi](https://github.com/danbudi)

View file

@ -0,0 +1,22 @@
### Contributing to budibase-core
* The contributors are listed in [AUTHORS.md](https://github.com/budibase/budibase-core/blob/master/AUTHORS.md) (add yourself).
* This project uses a modified version of the MPLv2 license, see [LICENSE](https://github.com/budibase/budibase-core/blob/master/LICENSE).
* We use the [C4 (Collective Code Construction Contract)](https://rfc.zeromq.org/spec:42/C4/) process for contributions.
Please read this if you are unfamiliar with it.
* Please maintain the existing code style.
* Please try to keep your commits small and focussed.
* If the project diverges from your branch, please rebase instead of merging. This makes the commit graph easier to read.
#### p.S...
I am using contribution guidelines from the fantastic [ZeroMQ](https://github.com/zeromq) community. If you are interested why, it's because I believe in the ethos laid out by this community, and written about in depth in the book ["Social Architecture"](https://www.amazon.com/Social-Architecture-Building-line-Communities/dp/1533112452) by Pieter Hintjens.
I am very much open to evolving this to suit our needs.
Love from [Mike](https://github.com/mikebudi).

373
packages/core/LICENSE Normal file
View file

@ -0,0 +1,373 @@
Mozilla Public License Version 2.0
==================================
1. Definitions
--------------
1.1. "Contributor"
means each individual or legal entity that creates, contributes to
the creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used
by a Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached
the notice in Exhibit A, the Executable Form of such Source Code
Form, and Modifications of such Source Code Form, in each case
including portions thereof.
1.5. "Incompatible With Secondary Licenses"
means
(a) that the initial Contributor has attached the notice described
in Exhibit B to the Covered Software; or
(b) that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the
terms of a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in
a separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible,
whether at the time of the initial grant or subsequently, any and
all of the rights conveyed by this License.
1.10. "Modifications"
means any of the following:
(a) any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered
Software; or
(b) any new file in Source Code Form that contains any Covered
Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the
License, by the making, using, selling, offering for sale, having
made, import, or transfer of either its Contributions or its
Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU
Lesser General Public License, Version 2.1, the GNU Affero General
Public License, Version 3.0, or any later versions of those
licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that
controls, is controlled by, or is under common control with You. For
purposes of this definition, "control" means (a) the power, direct
or indirect, to cause the direction or management of such entity,
whether by contract or otherwise, or (b) ownership of more than
fifty percent (50%) of the outstanding shares or beneficial
ownership of such entity.
2. License Grants and Conditions
--------------------------------
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
(a) under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
(b) under Patent Claims of such Contributor to make, use, sell, offer
for sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
(a) for any code that a Contributor has removed from Covered Software;
or
(b) for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
(c) under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
in Section 2.1.
3. Responsibilities
-------------------
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
(a) such Covered Software must also be made available in Source Code
Form, as described in Section 3.1, and You must inform recipients of
the Executable Form how they can obtain a copy of such Source Code
Form by reasonable means in a timely manner, at a charge no more
than the cost of distribution to the recipient; and
(b) You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter
the recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
---------------------------------------------------
If it is impossible for You to comply with any of the terms of this
License with respect to some or all of the Covered Software due to
statute, judicial order, or regulation then You must: (a) comply with
the terms of this License to the maximum extent possible; and (b)
describe the limitations and the code they affect. Such description must
be placed in a text file included with all distributions of the Covered
Software under this License. Except to the extent prohibited by statute
or regulation, such description must be sufficiently detailed for a
recipient of ordinary skill to be able to understand it.
5. Termination
--------------
5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the
first time You have received notice of non-compliance with this License
from such Contributor, and You become compliant prior to 30 days after
Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.
************************************************************************
* *
* 6. Disclaimer of Warranty *
* ------------------------- *
* *
* Covered Software is provided under this License on an "as is" *
* basis, without warranty of any kind, either expressed, implied, or *
* statutory, including, without limitation, warranties that the *
* Covered Software is free of defects, merchantable, fit for a *
* particular purpose or non-infringing. The entire risk as to the *
* quality and performance of the Covered Software is with You. *
* Should any Covered Software prove defective in any respect, You *
* (not any Contributor) assume the cost of any necessary servicing, *
* repair, or correction. This disclaimer of warranty constitutes an *
* essential part of this License. No use of any Covered Software is *
* authorized under this License except under this disclaimer. *
* *
************************************************************************
************************************************************************
* *
* 7. Limitation of Liability *
* -------------------------- *
* *
* Under no circumstances and under no legal theory, whether tort *
* (including negligence), contract, or otherwise, shall any *
* Contributor, or anyone who distributes Covered Software as *
* permitted above, be liable to You for any direct, indirect, *
* special, incidental, or consequential damages of any character *
* including, without limitation, damages for lost profits, loss of *
* goodwill, work stoppage, computer failure or malfunction, or any *
* and all other commercial damages or losses, even if such party *
* shall have been informed of the possibility of such damages. This *
* limitation of liability shall not apply to liability for death or *
* personal injury resulting from such party's negligence to the *
* extent applicable law prohibits such limitation. Some *
* jurisdictions do not allow the exclusion or limitation of *
* incidental or consequential damages, so this exclusion and *
* limitation may not apply to You. *
* *
************************************************************************
8. Litigation
-------------
Any litigation relating to this License may be brought only in the
courts of a jurisdiction where the defendant maintains its principal
place of business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions.
Nothing in this Section shall prevent a party's ability to bring
cross-claims or counter-claims.
9. Miscellaneous
----------------
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides
that the language of a contract shall be construed against the drafter
shall not be used to construe this License against a Contributor.
10. Versions of the License
---------------------------
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
-------------------------------------------
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
---------------------------------------------------------
This Source Code Form is "Incompatible With Secondary Licenses", as
defined by the Mozilla Public License, v. 2.0.

9928
packages/core/package-lock.json generated Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,83 @@
{
"name": "budibase-core",
"version": "0.0.1",
"description": "core javascript library for budibase",
"main": "/dist/budibase-core.umd.js",
"module": "dist/budibase-core.esm.js",
"directories": {
"test": "test"
},
"scripts": {
"test": "jest",
"build": "rollup -c rollup.config.js"
},
"keywords": [
"budibase"
],
"author": "Michael Shanks",
"license": "MPL-2.0",
"jest": {
"globals": {
"GLOBALS": {
"client": "web"
}
},
"testURL": "http://jest-breaks-if-this-does-not-exist",
"moduleNameMapper": {
"\\.(jpg|jpeg|png|gif|eot|otf|webp|svg|ttf|woff|woff2|mp4|webm|wav|mp3|m4a|aac|oga)$": "<rootDir>/internals/mocks/fileMock.js",
"\\.(css|less|sass|scss)$": "identity-obj-proxy"
},
"moduleFileExtensions": [
"js"
],
"moduleDirectories": [
"node_modules",
"app/node_modules"
],
"transform": {
"^.+\\.js$": "babel-jest"
},
"transformIgnorePatterns": [
"/node_modules/(?!svelte).+\\.js$"
]
},
"devDependencies": {
"@babel/cli": "^7.4.4",
"@babel/core": "^7.4.5",
"@babel/plugin-transform-runtime": "^7.4.4",
"@babel/preset-env": "^7.4.5",
"@babel/runtime": "^7.4.5",
"argon2": "^0.20.1",
"babel-eslint": "^10.0.2",
"babel-jest": "^23.6.0",
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
"cross-env": "^5.1.4",
"eslint": "^5.3.0",
"eslint-config-airbnb": "^17.1.0",
"eslint-plugin-import": "^2.14.0",
"eslint-plugin-jsx-a11y": "^6.1.1",
"eslint-plugin-react": "^7.11.0",
"jest": "^24.8.0",
"readable-stream": "^3.1.1",
"regenerator-runtime": "^0.11.1",
"rimraf": "^2.6.2",
"rollup": "^1.12.0",
"rollup-plugin-commonjs": "^10.0.0",
"rollup-plugin-node-builtins": "^2.1.2",
"rollup-plugin-node-globals": "^1.4.0",
"rollup-plugin-node-resolve": "^5.0.0"
},
"dependencies": {
"@nx-js/compiler-util": "^2.0.0",
"date-fns": "^1.29.0",
"lodash": "^4.17.11",
"lunr": "^2.3.5",
"safe-buffer": "^5.1.2",
"shortid": "^2.2.8"
},
"devEngines": {
"node": ">=7.x",
"npm": ">=4.x",
"yarn": ">=0.21.3"
}
}

1131
packages/core/q Normal file

File diff suppressed because it is too large Load diff

32
packages/core/readme.md Normal file
View file

@ -0,0 +1,32 @@
## Getting Started
Install requires [node-gyp](https://github.com/nodejs/node-gyp), due to a dependancy on [argon2](https://github.com/ranisalt/node-argon2)
### For node gyp on windows
`npm install --global --production windows-build-tools`
and this might help: https://github.com/nodejs/node-gyp/issues/1278
### For node gyp on ubuntu
`sudo apt-get install build-essentials`
Once you have this, try...
`npm install`
Next, run the tests. Install jest, globally:
`npm install -g jest`
And finally, run
`jest`
## Documentation
A work in progress, lives here: https://github.com/Budibase/docs/blob/master/budibase-core.md

View file

@ -0,0 +1,76 @@
import builtins from 'rollup-plugin-node-builtins';
import resolve from 'rollup-plugin-node-resolve';
import commonjs from 'rollup-plugin-commonjs';
import nodeglobals from 'rollup-plugin-node-globals';
const lodash_fp_exports = ["union", "reduce", "isUndefined", "cloneDeep", "split", "some", "map", "filter", "isEmpty", "countBy", "includes", "last", "find", "constant",
"take", "first", "intersection", "mapValues", "isNull", "has", "isNumber", "isString", "isBoolean", "isDate", "isArray", "isObject", "clone", "values", "keyBy",
"keys", "orderBy", "concat", "reverse", "difference", "merge", "flatten", "each", "pull", "join", "defaultCase", "uniqBy", "every", "uniqWith", "isFunction", "groupBy",
"differenceBy", "intersectionBy", "isEqual", "max"];
const lodash_exports = ["toNumber", "flow", "isArray", "join", "replace", "trim", "dropRight", "takeRight", "head", "isUndefined", "isNull", "isNaN", "reduce", "isEmpty",
"constant", "tail", "includes", "startsWith", "findIndex", "isInteger", "isDate", "isString", "split", "clone", "keys", "isFunction", "merge", "has", "isBoolean", "isNumber",
"isObjectLike", "assign", "some", "each", "find", "orderBy", "union", "cloneDeep"];
const globals = {
"lodash/fp": "fp",
lodash: "_",
lunr: "lunr",
"safe-buffer": "safe_buffer",
shortid:"shortid",
"@nx-js/compiler-util":"compiler_util"
}
module.exports = {
input: 'src/index.js',
output: [
{
file: 'dist/budibase-core.cjs.js',
format: 'cjs',
sourcemap: 'inline',
globals
},
/*{
file: 'dist/budibase-core.iife.js',
format: 'iife',
sourcemap: 'inline',
globals: [
]
}*/,
{
file: 'dist/budibase-core.esm.mjs',
format: 'esm',
sourcemap: 'inline',
globals
},
{
file: 'dist/budibase-core.umd.js',
format: 'umd',
name: "budibase-core",
sourcemap: 'inline',
globals
}
],
plugins: [
nodeglobals(),
builtins(),
resolve({
preferBuiltins:true
}),
commonjs({
namedExports: {
"lodash/fp": lodash_fp_exports,
"lodash":lodash_exports,
"shortid": ["generate"]
}
})
],
external: [
"lodash", "lodash/fp", "date-fns",
"lunr", "safe-buffer", "shortid",
"@nx-js/compiler-util"
]
};

View file

@ -0,0 +1,15 @@
import { has } from 'lodash';
import { ConflictError } from '../common/errors';
export const createBehaviourSources = () => {
const sources = {};
const register = (name, funcsObj) => {
if (has(sources, name)) {
throw new ConflictError(`Source '${name}' already exists`);
}
sources[name] = funcsObj;
};
sources.register = register;
return sources;
};

View file

@ -0,0 +1,15 @@
import { permission } from '../authApi/permissions';
import { apiWrapperSync } from '../common/apiWrapper';
import { events } from '../common/events';
export const executeAction = app => (actionName, options) => {
apiWrapperSync(
app,
events.actionsApi.execute,
permission.executeAction.isAuthorized(actionName),
{ actionName, options },
app.actions[actionName], options,
);
};
export const _executeAction = (behaviourSources, action, options) => behaviourSources[action.behaviourSource][action.behaviourName](options);

View file

@ -0,0 +1,7 @@
import { executeAction } from './execute';
export const getActionsApi = app => ({
execute: executeAction(app),
});
export default getActionsApi;

View file

@ -0,0 +1,74 @@
import {
isFunction, filter, map,
uniqBy, keys, difference,
join, reduce, find,
} from 'lodash/fp';
import { compileExpression, compileCode } from '../common/compileCode';
import { $ } from '../common';
import { _executeAction } from './execute';
import { BadRequestError, NotFoundError } from '../common/errors';
export const initialiseActions = (subscribe, behaviourSources, actions, triggers, apis) => {
validateSources(behaviourSources, actions);
subscribeTriggers(subscribe, behaviourSources, actions, triggers, apis);
return createActionsCollection(behaviourSources, actions);
};
const createActionsCollection = (behaviourSources, actions) => $(actions, [
reduce((all, a) => {
all[a.name] = opts => _executeAction(behaviourSources, a, opts);
return all;
}, {}),
]);
const subscribeTriggers = (subscribe, behaviourSources, actions, triggers, apis) => {
const createOptions = (optionsCreator, eventContext) => {
if (!optionsCreator) return {};
const create = compileCode(optionsCreator);
return create({ context: eventContext, apis });
};
const shouldRunTrigger = (trigger, eventContext) => {
if (!trigger.condition) return true;
const shouldRun = compileExpression(trigger.condition);
return shouldRun({ context: eventContext });
};
for (let trig of triggers) {
subscribe(trig.eventName, async (ev, ctx) => {
if (shouldRunTrigger(trig, ctx)) {
await _executeAction(
behaviourSources,
find(a => a.name === trig.actionName)(actions),
createOptions(trig.optionsCreator, ctx),
);
}
});
}
};
const validateSources = (behaviourSources, actions) => {
const declaredSources = $(actions, [
uniqBy(a => a.behaviourSource),
map(a => a.behaviourSource),
]);
const suppliedSources = keys(behaviourSources);
const missingSources = difference(
declaredSources, suppliedSources,
);
if (missingSources.length > 0) {
throw new BadRequestError(`Declared behaviour sources are not supplied: ${join(', ', missingSources)}`);
}
const missingBehaviours = $(actions, [
filter(a => !isFunction(behaviourSources[a.behaviourSource][a.behaviourName])),
map(a => `Action: ${a.name} : ${a.behaviourSource}.${a.behaviourName}`),
]);
if (missingBehaviours.length > 0) {
throw new NotFoundError(`Missing behaviours: could not find behaviour functions: ${join(', ', missingBehaviours)}`);
}
};

View file

@ -0,0 +1,27 @@
import { isNothing } from '../common';
export const getDatabaseManager = databaseManager => ({
createEmptyMasterDb: createEmptyMasterDb(databaseManager),
createEmptyInstanceDb: createEmptyInstanceDb(databaseManager),
getInstanceDbRootConfig: databaseManager.getInstanceDbRootConfig,
masterDatastoreConfig: getMasterDatastoreConfig(databaseManager),
getInstanceDatastoreConfig: getInstanceDatastoreConfig(databaseManager),
});
const getMasterDatastoreConfig = databaseManager => databaseManager.getDatastoreConfig('master');
const getInstanceDatastoreConfig = databaseManager => (applicationId, instanceId) => databaseManager.getDatastoreConfig(
applicationId, instanceId,
);
const createEmptyMasterDb = databaseManager => async () => await databaseManager.createEmptyDb('master');
const createEmptyInstanceDb = databaseManager => async (applicationId, instanceId) => {
if (isNothing(applicationId)) { throw new Error('CreateDb: application id not supplied'); }
if (isNothing(instanceId)) { throw new Error('CreateDb: instance id not supplied'); }
return await databaseManager.createEmptyDb(
applicationId,
instanceId,
);
};

View file

@ -0,0 +1,27 @@
import { has } from 'lodash';
const publish = handlers => async (eventName, context = {}) => {
if (!has(handlers, eventName)) return;
for (const handler of handlers[eventName]) {
await handler(eventName, context);
}
};
const subscribe = handlers => (eventName, handler) => {
if (!has(handlers, eventName)) {
handlers[eventName] = [];
}
handlers[eventName].push(handler);
};
export const createEventAggregator = () => {
const handlers = {};
const eventAggregator = ({
publish: publish(handlers),
subscribe: subscribe(handlers),
});
return eventAggregator;
};
export default createEventAggregator;

View file

@ -0,0 +1,36 @@
import { retry } from '../common/index';
import { NotFoundError } from '../common/errors';
const createJson = originalCreateFile => async (key, obj, retries = 5, delay = 500) => await retry(originalCreateFile, retries, delay, key, JSON.stringify(obj));
const createNewFile = originalCreateFile => async (path, content, retries = 5, delay = 500) => await retry(originalCreateFile, retries, delay, path, content);
const loadJson = datastore => async (key, retries = 5, delay = 500) => {
try {
return await retry(JSON.parse, retries, delay, await datastore.loadFile(key));
} catch (err) {
throw new NotFoundError(err.message);
}
}
const updateJson = datastore => async (key, obj, retries = 5, delay = 500) => {
try {
return await retry(datastore.updateFile, retries, delay, key, JSON.stringify(obj));
} catch (err) {
throw new NotFoundError(err.message);
}
}
export const setupDatastore = (datastore) => {
const originalCreateFile = datastore.createFile;
datastore.loadJson = loadJson(datastore);
datastore.createJson = createJson(originalCreateFile);
datastore.updateJson = updateJson(datastore);
datastore.createFile = createNewFile(originalCreateFile);
if (datastore.createEmptyDb) { delete datastore.createEmptyDb; }
return datastore;
};
export { createEventAggregator } from './eventAggregator';
export default setupDatastore;

View file

@ -0,0 +1,58 @@
import { filter } from 'lodash/fp';
import { configFolder, appDefinitionFile, $ } from '../common';
import { TRANSACTIONS_FOLDER } from '../transactions/transactionsCommon';
import { AUTH_FOLDER, USERS_LIST_FILE, ACCESS_LEVELS_FILE } from '../authApi/authCommon';
import { initialiseRootCollections } from '../collectionApi/initialise';
import { initialiseIndex } from '../indexing/initialiseIndex';
import { getFlattenedHierarchy, isGlobalIndex, isSingleRecord } from '../templateApi/hierarchy';
import { _save } from '../recordApi/save';
import { getNew } from '../recordApi/getNew';
export const initialiseData = async (datastore, applicationDefinition, accessLevels) => {
await datastore.createFolder(configFolder);
await datastore.createJson(appDefinitionFile, applicationDefinition);
await initialiseRootCollections(datastore, applicationDefinition.hierarchy);
await initialiseRootIndexes(datastore, applicationDefinition.hierarchy);
await initialiseRootSingleRecords(datastore, applicationDefinition.hierarchy);
await datastore.createFolder(TRANSACTIONS_FOLDER);
await datastore.createFolder(AUTH_FOLDER);
await datastore.createJson(USERS_LIST_FILE, []);
await datastore.createJson(
ACCESS_LEVELS_FILE,
accessLevels ? accessLevels : { version: 0, levels: [] });
};
const initialiseRootIndexes = async (datastore, hierarchy) => {
const flathierarchy = getFlattenedHierarchy(hierarchy);
const globalIndexes = $(flathierarchy, [
filter(isGlobalIndex),
]);
for (const index of globalIndexes) {
if (!await datastore.exists(index.nodeKey())) { await initialiseIndex(datastore, '', index); }
}
};
const initialiseRootSingleRecords = async (datastore, hierachy) => {
const flathierarchy = getFlattenedHierarchy(hierachy);
const singleRecords = $(flathierarchy, [
filter(isSingleRecord),
]);
/* for (let record of singleRecords) {
const result = getNew({ datastore: datastore, hierarchy: appDefinition.hierarchy })
(record.nodeKey(),
record.name
);
_save({ datastore: datastore, hierarchy: appDefinition.hierarchy },
result
);
} */
};

View file

@ -0,0 +1,49 @@
import { clone, find, split } from 'lodash/fp';
import { joinKey, $ } from '../common';
// 5 minutes
export const tempCodeExpiryLength = 5 * 60 * 1000;
export const AUTH_FOLDER = '/.auth';
export const USERS_LIST_FILE = joinKey(AUTH_FOLDER, 'users.json');
export const userAuthFile = username => joinKey(AUTH_FOLDER, `auth_${username}.json`);
export const USERS_LOCK_FILE = joinKey(AUTH_FOLDER, 'users_lock');
export const ACCESS_LEVELS_FILE = joinKey(AUTH_FOLDER, 'access_levels.json');
export const ACCESS_LEVELS_LOCK_FILE = joinKey(AUTH_FOLDER, 'access_levels_lock');
export const permissionTypes = {
CREATE_RECORD: 'create record',
UPDATE_RECORD: 'update record',
READ_RECORD: 'read record',
DELETE_RECORD: 'delete record',
READ_INDEX: 'read index',
MANAGE_INDEX: 'manage index',
MANAGE_COLLECTION: 'manage collection',
WRITE_TEMPLATES: 'write templates',
CREATE_USER: 'create user',
SET_PASSWORD: 'set password',
CREATE_TEMPORARY_ACCESS: 'create temporary access',
ENABLE_DISABLE_USER: 'enable or disable user',
WRITE_ACCESS_LEVELS: 'write access levels',
LIST_USERS: 'list users',
LIST_ACCESS_LEVELS: 'list access levels',
EXECUTE_ACTION: 'execute action',
SET_USER_ACCESS_LEVELS: 'set user access levels',
};
export const getUserByName = (users, name) => $(users, [
find(u => u.name.toLowerCase() === name.toLowerCase()),
]);
export const stripUserOfSensitiveStuff = (user) => {
const stripped = clone(user);
delete stripped.tempCode;
return stripped;
};
export const parseTemporaryCode = fullCode => $(fullCode, [
split(':'),
parts => ({
id: parts[1],
code: parts[2],
}),
]);

View file

@ -0,0 +1,117 @@
import {
find, filter, some,
map, flatten,
} from 'lodash/fp';
import { generate } from 'shortid';
import { _getUsers } from './getUsers';
import {
getUserByName, userAuthFile,
parseTemporaryCode,
} from './authCommon';
import { _loadAccessLevels } from './loadAccessLevels';
import {
isNothingOrEmpty, $, apiWrapper, events,
} from '../common';
import { alwaysAuthorized } from './permissions';
const dummyHash = '$argon2i$v=19$m=4096,t=3,p=1$UZRo409UYBGjHJS3CV6Uxw$rU84qUqPeORFzKYmYY0ceBLDaPO+JWSH4PfNiKXfIKk';
export const authenticate = app => async (username, password) => apiWrapper(
app,
events.authApi.authenticate,
alwaysAuthorized,
{ username, password },
_authenticate, app, username, password,
);
export const _authenticate = async (app, username, password) => {
if (isNothingOrEmpty(username) || isNothingOrEmpty(password)) { return null; }
const allUsers = await _getUsers(app);
let user = getUserByName(
allUsers,
username,
);
const notAUser = 'not-a-user';
// continue with non-user - so time to verify remains consistent
// with verification of a valid user
if (!user || !user.enabled) { user = notAUser; }
let userAuth;
try {
userAuth = await app.datastore.loadJson(
userAuthFile(username),
);
} catch (_) {
userAuth = { accessLevels: [], passwordHash: dummyHash };
}
const permissions = await buildUserPermissions(app, user.accessLevels);
const verified = await app.crypto.verify(
userAuth.passwordHash,
password,
);
if (user === notAUser) { return null; }
return verified
? {
...user, permissions, temp: false, isUser: true,
}
: null;
};
export const authenticateTemporaryAccess = app => async (tempAccessCode) => {
if (isNothingOrEmpty(tempAccessCode)) { return null; }
const temp = parseTemporaryCode(tempAccessCode);
let user = $(await _getUsers(app), [
find(u => u.temporaryAccessId === temp.id),
]);
const notAUser = 'not-a-user';
if (!user || !user.enabled) { user = notAUser; }
let userAuth;
try {
userAuth = await app.datastore.loadJson(
userAuthFile(user.name),
);
} catch (e) {
userAuth = {
temporaryAccessHash: dummyHash,
temporaryAccessExpiryEpoch: (await app.getEpochTime() + 10000),
};
}
if (userAuth.temporaryAccessExpiryEpoch < await app.getEpochTime()) { user = notAUser; }
const tempCode = !temp.code ? generate() : temp.code;
const verified = await app.crypto.verify(
userAuth.temporaryAccessHash,
tempCode,
);
if (user === notAUser) { return null; }
return verified
? {
...user,
permissions: [],
temp: true,
isUser: true,
}
: null;
};
export const buildUserPermissions = async (app, userAccessLevels) => {
const allAccessLevels = await _loadAccessLevels(app);
return $(allAccessLevels.levels, [
filter(l => some(ua => l.name === ua)(userAccessLevels)),
map(l => l.permissions),
flatten,
]);
};

View file

@ -0,0 +1,79 @@
import { generate } from 'shortid';
import {
tempCodeExpiryLength, USERS_LOCK_FILE,
USERS_LIST_FILE, userAuthFile,
getUserByName,
} from './authCommon';
import {
getLock, isNolock,
releaseLock,
} from '../common/lock';
import { apiWrapper, events } from '../common';
import { alwaysAuthorized } from './permissions';
export const createTemporaryAccess = app => async userName => apiWrapper(
app,
events.authApi.createTemporaryAccess,
alwaysAuthorized,
{ userName },
_createTemporaryAccess, app, userName,
);
export const _createTemporaryAccess = async (app, userName) => {
const tempCode = await getTemporaryCode(app);
const lock = await getLock(
app, USERS_LOCK_FILE, 1000, 2,
);
if (isNolock(lock)) { throw new Error('Unable to create temporary access, could not get lock - try again'); }
try {
const users = await app.datastore.loadJson(USERS_LIST_FILE);
const user = getUserByName(users, userName);
user.temporaryAccessId = tempCode.temporaryAccessId;
await app.datastore.updateJson(
USERS_LIST_FILE,
users,
);
} finally {
await releaseLock(app, lock);
}
const userAuth = await app.datastore.loadJson(
userAuthFile(userName),
);
userAuth.temporaryAccessHash = tempCode.temporaryAccessHash;
userAuth.temporaryAccessExpiryEpoch = tempCode.temporaryAccessExpiryEpoch;
await app.datastore.updateJson(
userAuthFile(userName),
userAuth,
);
return tempCode.tempCode;
};
export const getTemporaryCode = async (app) => {
const tempCode = generate()
+ generate()
+ generate()
+ generate();
const tempId = generate();
return {
temporaryAccessHash: await app.crypto.hash(
tempCode,
),
temporaryAccessExpiryEpoch:
(await app.getEpochTime()) + tempCodeExpiryLength,
tempCode: `tmp:${tempId}:${tempCode}`,
temporaryAccessId: tempId,
};
};
export const looksLikeTemporaryCode = code => code.startsWith('tmp:');

View file

@ -0,0 +1,96 @@
import { join, some } from 'lodash/fp';
import { validateUser } from './validateUser';
import { getNewUserAuth } from './getNewUser';
import {
getLock, isNolock, releaseLock, apiWrapper, events,
insensitiveEquals, isNonEmptyString,
} from '../common';
import {
USERS_LOCK_FILE, stripUserOfSensitiveStuff,
USERS_LIST_FILE, userAuthFile,
} from './authCommon';
import { getTemporaryCode } from './createTemporaryAccess';
import { isValidPassword } from './setPassword';
import { permission } from './permissions';
import { BadRequestError } from '../common/errors';
export const createUser = app => async (user, password = null) => apiWrapper(
app,
events.authApi.createUser,
permission.createUser.isAuthorized,
{ user, password },
_createUser, app, user, password,
);
export const _createUser = async (app, user, password = null) => {
const lock = await getLock(
app, USERS_LOCK_FILE, 1000, 2,
);
if (isNolock(lock)) { throw new Error('Unable to create user, could not get lock - try again'); }
const users = await app.datastore.loadJson(USERS_LIST_FILE);
const userErrors = validateUser(app)([...users, user], user);
if (userErrors.length > 0) { throw new BadRequestError(`User is invalid. ${join('; ')(userErrors)}`); }
const { auth, tempCode, temporaryAccessId } = await getAccess(
app, password,
);
user.tempCode = tempCode;
user.temporaryAccessId = temporaryAccessId;
if (some(u => insensitiveEquals(u.name, user.name))(users)) {
throw new BadRequestError('User already exists');
}
users.push(
stripUserOfSensitiveStuff(user),
);
await app.datastore.updateJson(
USERS_LIST_FILE,
users,
);
try {
await app.datastore.createJson(
userAuthFile(user.name),
auth,
);
} catch (_) {
await app.datastore.updateJson(
userAuthFile(user.name),
auth,
);
}
await releaseLock(app, lock);
return user;
};
const getAccess = async (app, password) => {
const auth = getNewUserAuth(app)();
if (isNonEmptyString(password)) {
if (isValidPassword(password)) {
auth.passwordHash = await app.crypto.hash(password);
auth.temporaryAccessHash = '';
auth.temporaryAccessId = '';
auth.temporaryAccessExpiryEpoch = 0;
return { auth };
}
throw new BadRequestError('Password does not meet requirements');
} else {
const tempAccess = await getTemporaryCode(app);
auth.temporaryAccessHash = tempAccess.temporaryAccessHash;
auth.temporaryAccessExpiryEpoch = tempAccess.temporaryAccessExpiryEpoch;
auth.passwordHash = '';
return ({
auth,
tempCode: tempAccess.tempCode,
temporaryAccessId: tempAccess.temporaryAccessId,
});
}
};

View file

@ -0,0 +1,49 @@
import {
getLock,
isNolock, releaseLock,
} from '../common/lock';
import { USERS_LOCK_FILE, USERS_LIST_FILE, getUserByName } from './authCommon';
import { apiWrapper, events } from '../common';
import { permission } from './permissions';
import { NotFoundError } from '../common/errors';
export const enableUser = app => async username => apiWrapper(
app,
events.authApi.enableUser,
permission.enableDisableUser.isAuthorized,
{ username },
_enableUser, app, username,
);
export const disableUser = app => async username => apiWrapper(
app,
events.authApi.disableUser,
permission.enableDisableUser.isAuthorized,
{ username },
_disableUser, app, username,
);
export const _enableUser = async (app, username) => await toggleUser(app, username, true);
export const _disableUser = async (app, username) => await toggleUser(app, username, false);
const toggleUser = async (app, username, enabled) => {
const lock = await getLock(app, USERS_LOCK_FILE, 1000, 1, 0);
const actionName = enabled ? 'enable' : 'disable';
if (isNolock(lock)) { throw new Error(`Could not ${actionName} user - cannot get lock`); }
try {
const users = await app.datastore.loadJson(USERS_LIST_FILE);
const user = getUserByName(users, username);
if (!user) { throw new NotFoundError(`Could not find user to ${actionName}`); }
if (user.enabled === !enabled) {
user.enabled = enabled;
await app.datastore.updateJson(USERS_LIST_FILE, users);
}
} finally {
releaseLock(app, lock);
}
};

View file

@ -0,0 +1,45 @@
import {
filter, values, each, keys,
} from 'lodash/fp';
import { permission } from './permissions';
import {
getFlattenedHierarchy,
isIndex, isRecord,
} from '../templateApi/hierarchy';
import { $ } from '../common';
export const generateFullPermissions = (app) => {
const allNodes = getFlattenedHierarchy(app.hierarchy);
const accessLevel = { permissions: [] };
const recordNodes = $(allNodes, [
filter(isRecord),
]);
for (const n of recordNodes) {
permission.createRecord.add(n.nodeKey(), accessLevel);
permission.updateRecord.add(n.nodeKey(), accessLevel);
permission.deleteRecord.add(n.nodeKey(), accessLevel);
permission.readRecord.add(n.nodeKey(), accessLevel);
}
const indexNodes = $(allNodes, [
filter(isIndex),
]);
for (const n of indexNodes) {
permission.readIndex.add(n.nodeKey(), accessLevel);
}
for (const a of keys(app.actions)) {
permission.executeAction.add(a, accessLevel);
}
$(permission, [
values,
filter(p => !p.isNode),
each(p => p.add(accessLevel)),
]);
return accessLevel.permissions;
};

View file

@ -0,0 +1,5 @@
export const getNewAccessLevel = () => () => ({
name: '',
permissions: [],
default:false
});

View file

@ -0,0 +1,31 @@
import { apiWrapperSync, events } from '../common';
import { permission } from './permissions';
export const getNewUser = app => () => apiWrapperSync(
app,
events.authApi.getNewUser,
permission.createUser.isAuthorized,
{},
_getNewUser, app,
);
export const _getNewUser = () => ({
name: '',
accessLevels: [],
enabled: true,
temporaryAccessId: '',
});
export const getNewUserAuth = app => () => apiWrapperSync(
app,
events.authApi.getNewUserAuth,
permission.createUser.isAuthorized,
{},
_getNewUserAuth, app,
);
export const _getNewUserAuth = () => ({
passwordHash: '',
temporaryAccessHash: '',
temporaryAccessExpiryEpoch: 0,
});

View file

@ -0,0 +1,19 @@
import { map } from 'lodash/fp';
import {
USERS_LIST_FILE,
stripUserOfSensitiveStuff,
} from './authCommon';
import { $, apiWrapper, events } from '../common';
import { permission } from './permissions';
export const getUsers = app => async () => apiWrapper(
app,
events.authApi.getUsers,
permission.listUsers.isAuthorized,
{},
_getUsers, app,
);
export const _getUsers = async app => $(await app.datastore.loadJson(USERS_LIST_FILE), [
map(stripUserOfSensitiveStuff),
]);

View file

@ -0,0 +1,48 @@
import {
authenticate,
authenticateTemporaryAccess,
} from './authenticate';
import { createTemporaryAccess } from './createTemporaryAccess';
import { createUser } from './createUser';
import { enableUser, disableUser } from './enableUser';
import { loadAccessLevels } from './loadAccessLevels';
import { getNewAccessLevel } from './getNewAccessLevel';
import { getNewUser, getNewUserAuth } from './getNewUser';
import { getUsers } from './getUsers';
import { isAuthorized } from './isAuthorized';
import { saveAccessLevels } from './saveAccessLevels';
import {
changeMyPassword,
scorePassword, setPasswordFromTemporaryCode,
isValidPassword,
} from './setPassword';
import { validateUser } from './validateUser';
import { validateAccessLevels } from './validateAccessLevels';
import { generateFullPermissions } from './generateFullPermissions';
import { setUserAccessLevels } from './setUserAccessLevels';
export const getAuthApi = app => ({
authenticate: authenticate(app),
authenticateTemporaryAccess: authenticateTemporaryAccess(app),
createTemporaryAccess: createTemporaryAccess(app),
createUser: createUser(app),
loadAccessLevels: loadAccessLevels(app),
enableUser: enableUser(app),
disableUser: disableUser(app),
getNewAccessLevel: getNewAccessLevel(app),
getNewUser: getNewUser(app),
getNewUserAuth: getNewUserAuth(app),
getUsers: getUsers(app),
saveAccessLevels: saveAccessLevels(app),
isAuthorized: isAuthorized(app),
changeMyPassword: changeMyPassword(app),
setPasswordFromTemporaryCode: setPasswordFromTemporaryCode(app),
scorePassword,
isValidPassword: isValidPassword(app),
validateUser: validateUser(app),
validateAccessLevels: validateAccessLevels(app),
generateFullPermissions: () => generateFullPermissions(app),
setUserAccessLevels: setUserAccessLevels(app),
});
export default getAuthApi;

View file

@ -0,0 +1,50 @@
import { values, includes, some } from 'lodash/fp';
import { permissionTypes } from './authCommon';
import {
$, isNothing, apiWrapperSync, events,
} from '../common';
import { getNodeByKeyOrNodeKey, isNode } from '../templateApi/hierarchy';
import { alwaysAuthorized } from './permissions';
export const isAuthorized = app => (permissionType, resourceKey) => apiWrapperSync(
app,
events.authApi.isAuthorized,
alwaysAuthorized,
{ resourceKey, permissionType },
_isAuthorized, app, permissionType, resourceKey,
);
export const _isAuthorized = (app, permissionType, resourceKey) => {
if (!app.user) {
return false;
}
const validType = $(permissionTypes, [
values,
includes(permissionType),
]);
if (!validType) {
return false;
}
const permMatchesResource = (userperm) => {
const nodeKey = isNothing(resourceKey)
? null
: isNode(app.hierarchy, resourceKey)
? getNodeByKeyOrNodeKey(
app.hierarchy, resourceKey,
).nodeKey()
: resourceKey;
return (userperm.type === permissionType)
&& (
isNothing(resourceKey)
|| nodeKey === userperm.nodeKey
);
};
return $(app.user.permissions, [
some(permMatchesResource),
]);
};

View file

@ -0,0 +1,13 @@
import { ACCESS_LEVELS_FILE } from './authCommon';
import { apiWrapper, events } from '../common';
import { permission } from './permissions';
export const loadAccessLevels = app => async () => apiWrapper(
app,
events.authApi.loadAccessLevels,
permission.listAccessLevels.isAuthorized,
{},
_loadAccessLevels, app,
);
export const _loadAccessLevels = async app => await app.datastore.loadJson(ACCESS_LEVELS_FILE);

View file

@ -0,0 +1,74 @@
import { permissionTypes } from './authCommon';
import { isAuthorized } from './isAuthorized';
export const temporaryAccessPermissions = () => ([{ type: permissionTypes.SET_PASSWORD }]);
const nodePermission = type => ({
add: (nodeKey, accessLevel) => accessLevel.permissions.push({ type, nodeKey }),
isAuthorized: resourceKey => app => isAuthorized(app)(type, resourceKey),
isNode: true,
get: nodeKey => ({ type, nodeKey }),
});
const staticPermission = type => ({
add: accessLevel => accessLevel.permissions.push({ type }),
isAuthorized: app => isAuthorized(app)(type),
isNode: false,
get: () => ({ type }),
});
const createRecord = nodePermission(permissionTypes.CREATE_RECORD);
const updateRecord = nodePermission(permissionTypes.UPDATE_RECORD);
const deleteRecord = nodePermission(permissionTypes.DELETE_RECORD);
const readRecord = nodePermission(permissionTypes.READ_RECORD);
const writeTemplates = staticPermission(permissionTypes.WRITE_TEMPLATES);
const createUser = staticPermission(permissionTypes.CREATE_USER);
const setPassword = staticPermission(permissionTypes.SET_PASSWORD);
const readIndex = nodePermission(permissionTypes.READ_INDEX);
const manageIndex = staticPermission(permissionTypes.MANAGE_INDEX);
const manageCollection = staticPermission(permissionTypes.MANAGE_COLLECTION);
const createTemporaryAccess = staticPermission(permissionTypes.CREATE_TEMPORARY_ACCESS);
const enableDisableUser = staticPermission(permissionTypes.ENABLE_DISABLE_USER);
const writeAccessLevels = staticPermission(permissionTypes.WRITE_ACCESS_LEVELS);
const listUsers = staticPermission(permissionTypes.LIST_USERS);
const listAccessLevels = staticPermission(permissionTypes.LIST_ACCESS_LEVELS);
const setUserAccessLevels = staticPermission(permissionTypes.SET_USER_ACCESS_LEVELS);
const executeAction = nodePermission(permissionTypes.EXECUTE_ACTION);
export const alwaysAuthorized = () => true;
export const permission = {
createRecord,
updateRecord,
deleteRecord,
readRecord,
writeTemplates,
createUser,
setPassword,
readIndex,
createTemporaryAccess,
enableDisableUser,
writeAccessLevels,
listUsers,
listAccessLevels,
manageIndex,
manageCollection,
executeAction,
setUserAccessLevels,
};

View file

@ -0,0 +1,49 @@
import { join, map } from 'lodash/fp';
import {
getLock, releaseLock, $,
isNolock, apiWrapper, events,
} from '../common';
import {
ACCESS_LEVELS_LOCK_FILE,
ACCESS_LEVELS_FILE,
} from './authCommon';
import { validateAccessLevels } from './validateAccessLevels';
import { permission } from './permissions';
export const saveAccessLevels = app => async accessLevels => apiWrapper(
app,
events.authApi.saveAccessLevels,
permission.writeAccessLevels.isAuthorized,
{ accessLevels },
_saveAccessLevels, app, accessLevels,
);
export const _saveAccessLevels = async (app, accessLevels) => {
const validationErrors = validateAccessLevels(app)(accessLevels.levels);
if (validationErrors.length > 0) {
const errs = $(validationErrors, [
map(e => e.error),
join(', '),
]);
throw new Error(
`Access Levels Invalid: ${errs}`,
);
}
const lock = await getLock(
app, ACCESS_LEVELS_LOCK_FILE, 2000, 2,
);
if (isNolock(lock)) { throw new Error('Could not get lock to save access levels'); }
try {
const existing = await app.datastore.loadJson(ACCESS_LEVELS_FILE);
if (existing.version !== accessLevels.version) { throw new Error('Access levels have already been updated, since you loaded'); }
accessLevels.version++;
app.datastore.updateJson(ACCESS_LEVELS_FILE, accessLevels);
} finally {
await releaseLock(app, lock);
}
};

View file

@ -0,0 +1,153 @@
import { find } from 'lodash/fp';
import { userAuthFile, parseTemporaryCode } from './authCommon';
import {
isSomething, $, apiWrapper, apiWrapperSync, events,
} from '../common';
import { _getUsers } from './getUsers';
import { alwaysAuthorized } from './permissions';
export const isValidPassword = app => password => apiWrapperSync(
app,
events.authApi.isValidPassword,
alwaysAuthorized,
{ password },
_isValidPassword, app, password,
);
export const _isValidPassword = (app, password) => scorePassword(password).score > 30;
export const changeMyPassword = app => async (currentPw, newpassword) => apiWrapper(
app,
events.authApi.changeMyPassword,
alwaysAuthorized,
{ currentPw, newpassword },
_changeMyPassword, app, currentPw, newpassword,
);
export const _changeMyPassword = async (app, currentPw, newpassword) => {
const existingAuth = await app.datastore.loadJson(
userAuthFile(app.user.name),
);
if (isSomething(existingAuth.passwordHash)) {
const verified = await app.crypto.verify(
existingAuth.passwordHash,
currentPw,
);
if (verified) {
await await doSet(
app, existingAuth,
app.user.name, newpassword,
);
return true;
}
}
return false;
};
export const setPasswordFromTemporaryCode = app => async (tempCode, newpassword) => apiWrapper(
app,
events.authApi.setPasswordFromTemporaryCode,
alwaysAuthorized,
{ tempCode, newpassword },
_setPasswordFromTemporaryCode, app, tempCode, newpassword,
);
export const _setPasswordFromTemporaryCode = async (app, tempCode, newpassword) => {
const currentTime = await app.getEpochTime();
const temp = parseTemporaryCode(tempCode);
const user = $(await _getUsers(app), [
find(u => u.temporaryAccessId === temp.id),
]);
if (!user) { return false; }
const existingAuth = await app.datastore.loadJson(
userAuthFile(user.name),
);
if (isSomething(existingAuth.temporaryAccessHash)
&& existingAuth.temporaryAccessExpiryEpoch > currentTime) {
const verified = await app.crypto.verify(
existingAuth.temporaryAccessHash,
temp.code,
);
if (verified) {
await doSet(
app, existingAuth,
user.name, newpassword,
);
return true;
}
}
return false;
};
const doSet = async (app, auth, username, newpassword) => {
auth.temporaryAccessHash = '';
auth.temporaryAccessExpiryEpoch = 0;
auth.passwordHash = await app.crypto.hash(
newpassword,
);
await app.datastore.updateJson(
userAuthFile(username),
auth,
);
};
export const scorePassword = app => password => apiWrapperSync(
app,
events.authApi.scorePassword,
alwaysAuthorized,
{ password },
_scorePassword, password,
);
export const _scorePassword = (password) => {
// from https://stackoverflow.com/questions/948172/password-strength-meter
// thank you https://stackoverflow.com/users/46617/tm-lv
let score = 0;
if (!password) { return score; }
// award every unique letter until 5 repetitions
const letters = new Object();
for (let i = 0; i < password.length; i++) {
letters[password[i]] = (letters[password[i]] || 0) + 1;
score += 5.0 / letters[password[i]];
}
// bonus points for mixing it up
const variations = {
digits: /\d/.test(password),
lower: /[a-z]/.test(password),
upper: /[A-Z]/.test(password),
nonWords: /\W/.test(password),
};
let variationCount = 0;
for (const check in variations) {
variationCount += (variations[check] == true) ? 1 : 0;
}
score += (variationCount - 1) * 10;
const strengthText = score > 80
? 'strong'
: score > 60
? 'good'
: score >= 30
? 'weak'
: 'very weak';
return {
score: parseInt(score),
strengthText,
};
};

View file

@ -0,0 +1,49 @@
import { difference, map, join } from 'lodash/fp';
import {
getLock, isNolock, releaseLock, $,
apiWrapper, events,
} from '../common';
import {
USERS_LOCK_FILE, ACCESS_LEVELS_FILE,
getUserByName, USERS_LIST_FILE,
} from './authCommon';
import { permission } from './permissions';
import { NotFoundError } from '../common/errors';
export const setUserAccessLevels = app => async (userName, accessLevels) => apiWrapper(
app,
events.authApi.setUserAccessLevels,
permission.setUserAccessLevels.isAuthorized,
{ userName, accessLevels },
_setUserAccessLevels, app, userName, accessLevels,
);
export const _setUserAccessLevels = async (app, username, accessLevels) => {
const lock = await getLock(app, USERS_LOCK_FILE, 1000, 1, 0);
const actualAccessLevels = $(
await app.datastore.loadJson(ACCESS_LEVELS_FILE),
[
l => l.levels,
map(l => l.name),
],
);
const missing = difference(accessLevels)(actualAccessLevels);
if (missing.length > 0) {
throw new Error(`Invalid access levels supplied: ${join(', ', missing)}`);
}
if (isNolock(lock)) { throw new Error('Could set user access levels cannot get lock'); }
try {
const users = await app.datastore.loadJson(USERS_LIST_FILE);
const user = getUserByName(users, username);
if (!user) { throw new NotFoundError(`Could not find user with ${username}`); }
user.accessLevels = accessLevels;
await app.datastore.updateJson(USERS_LIST_FILE, users);
} finally {
releaseLock(app, lock);
}
};

View file

@ -0,0 +1,75 @@
import {
values, includes, map, concat, isEmpty, uniqWith, some,
flatten, filter,
} from 'lodash/fp';
import { applyRuleSet, makerule } from '../common/validationCommon';
import { permissionTypes } from './authCommon';
import {
$, isSomething, insensitiveEquals,
isNonEmptyString, apiWrapperSync, events,
} from '../common';
import { getNode } from '../templateApi/hierarchy';
import { alwaysAuthorized } from './permissions';
const isAllowedType = t => $(permissionTypes, [
values,
includes(t),
]);
const isRecordOrIndexType = t => some(p => p === t)([
permissionTypes.CREATE_RECORD,
permissionTypes.UPDATE_RECORD,
permissionTypes.DELETE_RECORD,
permissionTypes.READ_RECORD,
permissionTypes.READ_INDEX,
permissionTypes.EXECUTE_ACTION,
]);
const permissionRules = app => ([
makerule('type', 'type must be one of allowed types',
p => isAllowedType(p.type)),
makerule('nodeKey', 'record and index permissions must include a valid nodeKey',
p => (!isRecordOrIndexType(p.type))
|| isSomething(getNode(app.hierarchy, p.nodeKey))),
]);
const applyPermissionRules = app => applyRuleSet(permissionRules(app));
const accessLevelRules = allLevels => ([
makerule('name', 'name must be set',
l => isNonEmptyString(l.name)),
makerule('name', 'access level names must be unique',
l => isEmpty(l.name)
|| filter(a => insensitiveEquals(l.name, a.name))(allLevels).length === 1),
]);
const applyLevelRules = allLevels => applyRuleSet(accessLevelRules(allLevels));
export const validateAccessLevel = app => (allLevels, level) => {
const errs = $(level.permissions, [
map(applyPermissionRules(app)),
flatten,
concat(
applyLevelRules(allLevels)(level),
),
]);
return errs;
};
export const validateAccessLevels = app => allLevels => apiWrapperSync(
app,
events.authApi.validateAccessLevels,
alwaysAuthorized,
{ allLevels },
_validateAccessLevels, app, allLevels,
);
export const _validateAccessLevels = (app, allLevels) => $(allLevels, [
map(l => validateAccessLevel(app)(allLevels, l)),
flatten,
uniqWith((x, y) => x.field === y.field
&& x.item === y.item
&& x.error === y.error),
]);

View file

@ -0,0 +1,39 @@
import {
map, uniqWith,
flatten, filter,
} from 'lodash/fp';
import { applyRuleSet, makerule } from '../common/validationCommon';
import {
$, insensitiveEquals, apiWrapper, events,
isNonEmptyString, all,
} from '../common';
import { alwaysAuthorized } from './permissions';
const userRules = allUsers => [
makerule('name', 'username must be set',
u => isNonEmptyString(u.name)),
makerule('accessLevels', 'user must have at least one access level',
u => u.accessLevels.length > 0),
makerule('name', 'username must be unique',
u => filter(u2 => insensitiveEquals(u2.name, u.name))(allUsers).length === 1),
makerule('accessLevels', 'access levels must only contain stings',
u => all(isNonEmptyString)(u.accessLevels)),
];
export const validateUser = () => (allusers, user) => applyRuleSet(userRules(allusers))(user);
export const validateUsers = app => allUsers => apiWrapper(
app,
events.authApi.validateUsers,
alwaysAuthorized,
{ allUsers },
_validateUsers, app, allUsers,
);
export const _validateUsers = (app, allUsers) => $(allUsers, [
map(l => validateUser(app)(allUsers, l)),
flatten,
uniqWith((x, y) => x.field === y.field
&& x.item === y.item
&& x.error === y.error),
]);

View file

@ -0,0 +1,79 @@
import { includes } from 'lodash/fp';
import { getNodeForCollectionPath } from '../templateApi/hierarchy';
import {
safeKey, apiWrapper,
events, joinKey,
} from '../common';
import { _deleteRecord } from '../recordApi/delete';
import { getAllIdsIterator, getAllIdsShardKey } from '../indexing/allIds';
import { permission } from '../authApi/permissions';
export const deleteCollection = (app, disableCleanup = false) => async key => apiWrapper(
app,
events.collectionApi.delete,
permission.manageCollection.isAuthorized,
{ key },
_deleteCollection, app, key, disableCleanup,
);
export const _deleteCollection = async (app, key, disableCleanup) => {
key = safeKey(key);
const node = getNodeForCollectionPath(app.hierarchy)(key);
await deleteRecords(app, key);
await deleteAllIdsFolders(app, node, key);
await deleteCollectionFolder(app, key);
if (!disableCleanup) { await app.cleanupTransactions(); }
};
const deleteCollectionFolder = async (app, key) => await app.datastore.deleteFolder(key);
const deleteAllIdsFolders = async (app, node, key) => {
await app.datastore.deleteFolder(
joinKey(
key, 'allids',
node.nodeId,
),
);
await app.datastore.deleteFolder(
joinKey(key, 'allids'),
);
};
const deleteRecords = async (app, key) => {
const deletedAllIdsShards = [];
const deleteAllIdsShard = async (recordId) => {
const shardKey = getAllIdsShardKey(
app.hierarchy, key, recordId,
);
if (includes(shardKey)(deletedAllIdsShards)) {
return;
}
deletedAllIdsShards.push(shardKey);
await app.datastore.deleteFile(shardKey);
};
const iterate = await getAllIdsIterator(app)(key);
let ids = await iterate();
while (!ids.done) {
if (ids.result.collectionKey === key) {
for (const id of ids.result.ids) {
await _deleteRecord(
app,
joinKey(key, id),
true,
);
await deleteAllIdsShard(id);
}
}
ids = await iterate();
}
};

View file

@ -0,0 +1,19 @@
import { getNodeForCollectionPath } from '../templateApi/hierarchy';
import {
isNothing, safeKey, apiWrapperSync, events,
} from '../common';
import { alwaysAuthorized } from '../authApi/permissions';
export const getAllowedRecordTypes = app => key => apiWrapperSync(
app,
events.collectionApi.getAllowedRecordTypes,
alwaysAuthorized,
{ key },
_getAllowedRecordTypes, app, key,
);
const _getAllowedRecordTypes = (app, key) => {
key = safeKey(key);
const node = getNodeForCollectionPath(app.hierarchy)(key);
return isNothing(node) ? [] : [node.name];
};

View file

@ -0,0 +1,11 @@
import { getAllIdsIterator } from '../indexing/allIds';
import { getAllowedRecordTypes } from './getAllowedRecordTypes';
import { deleteCollection } from './delete';
export const getCollectionApi = app => ({
getAllowedRecordTypes: getAllowedRecordTypes(app),
getAllIdsIterator: getAllIdsIterator(app),
delete: deleteCollection(app),
});
export default getCollectionApi;

View file

@ -0,0 +1,61 @@
import { filter } from 'lodash/fp';
import {
getFlattenedHierarchy,
isCollectionRecord,
isRoot,
getExactNodeForPath,
} from '../templateApi/hierarchy';
import { $, allTrue, joinKey } from '../common';
const ensureCollectionIsInitialised = async (datastore, node, parentKey) => {
if (!await datastore.exists(parentKey)) {
await datastore.createFolder(parentKey);
await datastore.createFolder(
joinKey(parentKey, 'allids'),
);
await datastore.createFolder(
joinKey(
parentKey,
'allids',
node.nodeId.toString(),
),
);
}
};
export const initialiseRootCollections = async (datastore, hierarchy) => {
const rootCollectionRecord = allTrue(
n => isRoot(n.parent()),
isCollectionRecord,
);
const flathierarchy = getFlattenedHierarchy(hierarchy);
const collectionRecords = $(flathierarchy, [
filter(rootCollectionRecord),
]);
for (const col of collectionRecords) {
await ensureCollectionIsInitialised(
datastore,
col,
col.collectionPathRegx(),
);
}
};
export const initialiseChildCollections = async (app, recordKey) => {
const childCollectionRecords = $(recordKey, [
getExactNodeForPath(app.hierarchy),
n => n.children,
filter(isCollectionRecord),
]);
for (const child of childCollectionRecords) {
await ensureCollectionIsInitialised(
app.datastore,
child,
joinKey(recordKey, child.collectionName),
);
}
};

View file

@ -0,0 +1,116 @@
import { cloneDeep, isUndefined } from 'lodash/fp';
import { generate } from 'shortid';
import { UnauthorisedError } from './errors';
export const apiWrapper = async (app, eventNamespace, isAuthorized, eventContext, func, ...params) => {
pushCallStack(app, eventNamespace);
if (!isAuthorized(app)) {
handleNotAuthorized(app, eventContext, eventNamespace);
return;
}
const startDate = Date.now();
const elapsed = () => (Date.now() - startDate);
try {
await app.publish(
eventNamespace.onBegin,
eventContext,
);
const result = await func(...params);
await publishComplete(app, eventContext, eventNamespace, elapsed, result);
return result;
} catch (error) {
await publishError(app, eventContext, eventNamespace, elapsed, error);
throw error;
}
};
export const apiWrapperSync = (app, eventNamespace, isAuthorized, eventContext, func, ...params) => {
pushCallStack(app, eventNamespace);
if (!isAuthorized(app)) {
handleNotAuthorized(app, eventContext, eventNamespace);
return;
}
const startDate = Date.now();
const elapsed = () => (Date.now() - startDate);
try {
app.publish(
eventNamespace.onBegin,
eventContext,
);
const result = func(...params);
publishComplete(app, eventContext, eventNamespace, elapsed, result);
return result;
} catch (error) {
publishError(app, eventContext, eventNamespace, elapsed, error);
throw error;
}
};
const handleNotAuthorized = (app, eventContext, eventNamespace) => {
const err = new UnauthorisedError(`Unauthorized: ${eventNamespace}`);
publishError(app, eventContext, eventNamespace, () => 0, err);
throw err;
};
const pushCallStack = (app, eventNamespace, seedCallId) => {
const callId = generate();
const createCallStack = () => ({
seedCallId: !isUndefined(seedCallId)
? seedCallId
: callId,
threadCallId: callId,
stack: [],
});
if (isUndefined(app.calls)) {
app.calls = createCallStack();
}
app.calls.stack.push({
namespace: eventNamespace,
callId,
});
};
const popCallStack = (app) => {
app.calls.stack.pop();
if (app.calls.stack.length === 0) {
delete app.calls;
}
};
const publishError = async (app, eventContext, eventNamespace, elapsed, err) => {
const ctx = cloneDeep(eventContext);
ctx.error = err;
ctx.elapsed = elapsed();
await app.publish(
eventNamespace.onError,
ctx,
);
popCallStack(app);
};
const publishComplete = async (app, eventContext, eventNamespace, elapsed, result) => {
const endcontext = cloneDeep(eventContext);
endcontext.result = result;
endcontext.elapsed = elapsed();
await app.publish(
eventNamespace.onComplete,
endcontext,
);
popCallStack(app);
return result;
};
export default apiWrapper;

View file

@ -0,0 +1,30 @@
import {
compileExpression as cExp,
compileCode as cCode
} from '@nx-js/compiler-util';
export const compileCode = code => {
let func;
try {
func = cCode(code);
} catch(e) {
e.message = `Error compiling code : ${code} : ${e.message}`;
throw e;
}
return func;
}
export const compileExpression = code => {
let func;
try {
func = cExp(code);
} catch(e) {
e.message = `Error compiling expression : ${code} : ${e.message}`;
throw e;
}
return func;
}

View file

@ -0,0 +1,34 @@
export class BadRequestError extends Error {
constructor(message) {
super(message);
this.httpStatusCode = 400;
}
}
export class UnauthorisedError extends Error {
constructor(message) {
super(message);
this.httpStatusCode = 401;
}
}
export class ForbiddenError extends Error {
constructor(message) {
super(message);
this.httpStatusCode = 403;
}
}
export class NotFoundError extends Error {
constructor(message) {
super(message);
this.httpStatusCode = 404;
}
}
export class ConflictError extends Error {
constructor(message) {
super(message);
this.httpStatusCode = 409;
}
}

View file

@ -0,0 +1,93 @@
import { union, reduce } from 'lodash/fp';
const commonPlus = extra => union(['onBegin', 'onComplete', 'onError'])(extra);
const common = () => commonPlus([]);
const _events = {
recordApi: {
save: commonPlus([
'onInvalid',
'onRecordUpdated',
'onRecordCreated']),
delete: common(),
getContext: common(),
getNew: common(),
load: common(),
validate: common(),
uploadFile: common(),
downloadFile: common(),
},
indexApi: {
buildIndex: common(),
listItems: common(),
delete: common(),
aggregates: common(),
},
collectionApi: {
getAllowedRecordTypes: common(),
initialise: common(),
delete: common(),
},
authApi: {
authenticate: common(),
authenticateTemporaryAccess: common(),
createTemporaryAccess: common(),
createUser: common(),
enableUser: common(),
disableUser: common(),
loadAccessLevels: common(),
getNewAccessLevel: common(),
getNewUser: common(),
getNewUserAuth: common(),
getUsers: common(),
saveAccessLevels: common(),
isAuthorized: common(),
changeMyPassword: common(),
setPasswordFromTemporaryCode: common(),
scorePassword: common(),
isValidPassword: common(),
validateUser: common(),
validateAccessLevels: common(),
setUserAccessLevels: common(),
},
templateApi: {
saveApplicationHierarchy: common(),
saveActionsAndTriggers: common(),
},
actionsApi: {
execute: common(),
},
};
const _eventsList = [];
const makeEvent = (area, method, name) => `${area}:${method}:${name}`;
for (const areaKey in _events) {
for (const methodKey in _events[areaKey]) {
_events[areaKey][methodKey] = reduce((obj, s) => {
obj[s] = makeEvent(areaKey, methodKey, s);
return obj;
},
{})(_events[areaKey][methodKey]);
}
}
for (const areaKey in _events) {
for (const methodKey in _events[areaKey]) {
for (const name in _events[areaKey][methodKey]) {
_eventsList.push(
_events[areaKey][methodKey][name],
);
}
}
}
export const events = _events;
export const eventsList = _eventsList;
export default { events: _events, eventsList: _eventsList };

View file

@ -0,0 +1,265 @@
import {
isUndefined, isNaN, isNull,
reduce, constant, head, isEmpty,
tail, findIndex, startsWith, join,
dropRight, flow, takeRight, trim,
split, includes, replace, isArray,
isString, isInteger, isDate, toNumber,
} from 'lodash';
import { some } from 'lodash/fp';
import { events, eventsList } from './events';
import { apiWrapper } from './apiWrapper';
import {
getLock, NO_LOCK,
isNolock
} from './lock';
// this is the combinator function
export const $$ = (...funcs) => arg => flow(funcs)(arg);
// this is the pipe function
export const $ = (arg, funcs) => $$(...funcs)(arg);
export const keySep = '/';
const trimKeySep = str => trim(str, keySep);
const splitByKeySep = str => split(str, keySep);
export const safeKey = key => replace(`${keySep}${trimKeySep(key)}`, `${keySep}${keySep}`, keySep);
export const joinKey = (...strs) => {
const paramsOrArray = strs.length === 1 & isArray(strs[0])
? strs[0] : strs;
return safeKey(join(paramsOrArray, keySep));
};
export const splitKey = $$(trimKeySep, splitByKeySep);
export const getDirFomKey = $$(splitKey, dropRight, p => joinKey(...p));
export const getFileFromKey = $$(splitKey, takeRight, head);
export const configFolder = `${keySep}.config`;
export const fieldDefinitions = joinKey(configFolder, 'fields.json');
export const templateDefinitions = joinKey(configFolder, 'templates.json');
export const appDefinitionFile = joinKey(configFolder, 'appDefinition.json');
export const dirIndex = folderPath => joinKey(configFolder, 'dir', ...splitKey(folderPath), 'dir.idx');
export const getIndexKeyFromFileKey = $$(getDirFomKey, dirIndex);
export const ifExists = (val, exists, notExists) => (isUndefined(val)
? isUndefined(notExists) ? (() => { })() : notExists()
: exists());
export const getOrDefault = (val, defaultVal) => ifExists(val, () => val, () => defaultVal);
export const not = func => val => !func(val);
export const isDefined = not(isUndefined);
export const isNonNull = not(isNull);
export const isNotNaN = not(isNaN);
export const allTrue = (...funcArgs) => val => reduce(funcArgs,
(result, conditionFunc) => (isNull(result) || result == true) && conditionFunc(val),
null);
export const anyTrue = (...funcArgs) => val => reduce(funcArgs,
(result, conditionFunc) => result == true || conditionFunc(val),
null);
export const insensitiveEquals = (str1, str2) => str1.trim().toLowerCase() === str2.trim().toLowerCase();
export const isSomething = allTrue(isDefined, isNonNull, isNotNaN);
export const isNothing = not(isSomething);
export const isNothingOrEmpty = v => isNothing(v) || isEmpty(v);
export const somethingOrGetDefault = getDefaultFunc => val => (isSomething(val) ? val : getDefaultFunc());
export const somethingOrDefault = (val, defaultVal) => somethingOrGetDefault(constant(defaultVal))(val);
export const mapIfSomethingOrDefault = (mapFunc, defaultVal) => val => (isSomething(val) ? mapFunc(val) : defaultVal);
export const mapIfSomethingOrBlank = mapFunc => mapIfSomethingOrDefault(mapFunc, '');
export const none = predicate => collection => !some(predicate)(collection);
export const all = predicate => collection => none(v => !predicate(v))(collection);
export const isNotEmpty = ob => !isEmpty(ob);
export const isAsync = fn => fn.constructor.name === 'AsyncFunction';
export const isNonEmptyArray = allTrue(isArray, isNotEmpty);
export const isNonEmptyString = allTrue(isString, isNotEmpty);
export const tryOr = failFunc => (func, ...args) => {
try {
return func.apply(null, ...args);
} catch (_) {
return failFunc();
}
};
export const tryAwaitOr = failFunc => async (func, ...args) => {
try {
return await func.apply(null, ...args);
} catch (_) {
return await failFunc();
}
};
export const defineError = (func, errorPrefix) => {
try {
return func();
} catch (err) {
err.message = `${errorPrefix} : ${err.message}`;
throw err;
}
};
export const tryOrIgnore = tryOr(() => { });
export const tryAwaitOrIgnore = tryAwaitOr(async () => { });
export const causesException = (func) => {
try {
func();
return false;
} catch (e) {
return true;
}
};
export const executesWithoutException = func => !causesException(func);
export const handleErrorWith = returnValInError => tryOr(constant(returnValInError));
export const handleErrorWithUndefined = handleErrorWith(undefined);
export const switchCase = (...cases) => (value) => {
const nextCase = () => head(cases)[0](value);
const nextResult = () => head(cases)[1](value);
if (isEmpty(cases)) return; // undefined
if (nextCase() === true) return nextResult();
return switchCase(...tail(cases))(value);
};
export const isValue = val1 => val2 => (val1 === val2);
export const isOneOf = (...vals) => val => includes(vals, val);
export const defaultCase = constant(true);
export const memberMatches = (member, match) => obj => match(obj[member]);
export const StartsWith = searchFor => searchIn => startsWith(searchIn, searchFor);
export const contains = val => array => (findIndex(array, v => v === val) > -1);
export const getHashCode = (s) => {
let hash = 0; let i; let char; let
l;
if (s.length == 0) return hash;
for (i = 0, l = s.length; i < l; i++) {
char = s.charCodeAt(i);
hash = ((hash << 5) - hash) + char;
hash |= 0; // Convert to 32bit integer
}
// converting to string, but dont want a "-" prefixed
if (hash < 0) { return `n${(hash * -1).toString()}`; }
return hash.toString();
};
// thanks to https://blog.grossman.io/how-to-write-async-await-without-try-catch-blocks-in-javascript/
export const awEx = async (promise) => {
try {
const result = await promise;
return [undefined, result];
} catch (error) {
return [error, undefined];
}
};
export const isSafeInteger = n => isInteger(n)
&& n <= Number.MAX_SAFE_INTEGER
&& n >= 0 - Number.MAX_SAFE_INTEGER;
export const toDateOrNull = s => (isNull(s) ? null
: isDate(s) ? s : new Date(s));
export const toBoolOrNull = s => (isNull(s) ? null
: s === 'true' || s === true);
export const toNumberOrNull = s => (isNull(s) ? null
: toNumber(s));
export const isArrayOfString = opts => isArray(opts) && all(isString)(opts);
export const pause = async duration => new Promise(res => setTimeout(res, duration));
export const retry = async (fn, retries, delay, ...args) => {
try {
return await fn(...args);
} catch (err) {
if (retries > 1) {
return await pause(delay).then(async () => await retry(fn, (retries - 1), delay, ...args));
}
throw err;
}
};
export { events } from './events';
export { apiWrapper, apiWrapperSync } from './apiWrapper';
export {
getLock, NO_LOCK, releaseLock,
extendLock, isNolock,
} from './lock';
export default {
ifExists,
getOrDefault,
isDefined,
isNonNull,
isNotNaN,
allTrue,
isSomething,
mapIfSomethingOrDefault,
mapIfSomethingOrBlank,
configFolder,
fieldDefinitions,
isNothing,
not,
switchCase,
defaultCase,
StartsWith,
contains,
templateDefinitions,
handleErrorWith,
handleErrorWithUndefined,
tryOr,
tryOrIgnore,
tryAwaitOr,
tryAwaitOrIgnore,
dirIndex,
keySep,
$,
$$,
getDirFomKey,
getFileFromKey,
splitKey,
somethingOrDefault,
getIndexKeyFromFileKey,
joinKey,
somethingOrGetDefault,
appDefinitionFile,
isValue,
all,
isOneOf,
memberMatches,
defineError,
anyTrue,
isNonEmptyArray,
causesException,
executesWithoutException,
none,
getHashCode,
awEx,
apiWrapper,
events,
eventsList,
isNothingOrEmpty,
isSafeInteger,
toNumber,
toDate: toDateOrNull,
toBool: toBoolOrNull,
isArrayOfString,
getLock,
NO_LOCK,
isNolock,
insensitiveEquals,
pause,
retry,
};

View file

@ -0,0 +1,99 @@
import { split } from 'lodash/fp';
import { $ } from './index';
const lockOverlapMilliseconds = 10;
export const getLock = async (app, lockFile, timeoutMilliseconds, maxLockRetries, retryCount = 0) => {
try {
const timeout = (await app.getEpochTime())
+ timeoutMilliseconds;
const lock = {
timeout,
key: lockFile,
totalTimeout: timeoutMilliseconds,
};
await app.datastore.createFile(
lockFile,
getLockFileContent(
lock.totalTimeout,
lock.timeout,
),
);
return lock;
} catch (e) {
if (retryCount == maxLockRetries) { return NO_LOCK; }
const lock = parseLockFileContent(
lockFile,
await app.datastore.loadFile(lockFile),
);
const currentEpochTime = await app.getEpochTime();
if (currentEpochTime < lock.timeout) {
return NO_LOCK;
}
try {
await app.datastore.deleteFile(lockFile);
} catch (_) {
//empty
}
await sleepForRetry();
return await getLock(
app, lockFile, timeoutMilliseconds,
maxLockRetries, retryCount + 1,
);
}
};
export const getLockFileContent = (totalTimeout, epochTime) => `${totalTimeout}:${epochTime.toString()}`;
const parseLockFileContent = (key, content) => $(content, [
split(':'),
parts => ({
totalTimeout: new Number(parts[0]),
timeout: new Number(parts[1]),
key,
}),
]);
export const releaseLock = async (app, lock) => {
const currentEpochTime = await app.getEpochTime();
// only release if not timedout
if (currentEpochTime < (lock.timeout - lockOverlapMilliseconds)) {
try {
await app.datastore.deleteFile(lock.key);
} catch (_) {
//empty
}
}
};
export const extendLock = async (app, lock) => {
const currentEpochTime = await app.getEpochTime();
// only release if not timedout
if (currentEpochTime < (lock.timeout - lockOverlapMilliseconds)) {
try {
lock.timeout = currentEpochTime + lock.timeoutMilliseconds;
await app.datastore.updateFile(
lock.key,
getLockFileContent(lock.totalTimeout, lock.timeout),
);
return lock;
} catch (_) {
//empty
}
}
return NO_LOCK;
};
export const NO_LOCK = 'no lock';
export const isNolock = id => id === NO_LOCK;
const sleepForRetry = () => new Promise(resolve => setTimeout(resolve, lockOverlapMilliseconds));

View file

@ -0,0 +1,17 @@
import { filter, map } from 'lodash/fp';
import { $, isSomething } from './index';
export const stringNotEmpty = s => isSomething(s) && s.trim().length > 0;
export const makerule = (field, error, isValid) => ({ field, error, isValid });
export const validationError = (rule, item) => ({ ...rule, item });
export const applyRuleSet = ruleSet => itemToValidate => $(ruleSet, [
map(applyRule(itemToValidate)),
filter(isSomething),
]);
export const applyRule = itemTovalidate => rule => (rule.isValid(itemTovalidate)
? null
: validationError(rule, itemTovalidate));

113
packages/core/src/index.js Normal file
View file

@ -0,0 +1,113 @@
import getRecordApi from "./recordApi";
import getCollectionApi from "./collectionApi";
import getIndexApi from "./indexApi";
import getTemplateApi from "./templateApi";
import getAuthApi from "./authApi";
import getActionsApi from "./actionsApi";
import {setupDatastore, createEventAggregator} from "./appInitialise";
import {initialiseActions} from "./actionsApi/initialise"
import {isSomething} from "./common";
import {cleanup} from "./transactions/cleanup";
import {generateFullPermissions} from "./authApi/generateFullPermissions";
import {getApplicationDefinition} from "./templateApi/getApplicationDefinition";
import common from "./common";
import {getBehaviourSources} from "./templateApi/getBehaviourSources";
import hierarchy from "./templateApi/hierarchy";
export const getAppApis = async (store, behaviourSources = null,
cleanupTransactions = null,
getEpochTime = null,
crypto = null,
appDefinition = null) => {
store = setupDatastore(store);
if(!appDefinition)
appDefinition = await getApplicationDefinition(store)();
if(!behaviourSources)
behaviourSources = await getBehaviourSources(store);
const eventAggregator = createEventAggregator();
const app = {
datastore:store,
crypto,
publish:eventAggregator.publish,
hierarchy:appDefinition.hierarchy,
actions:appDefinition.actions
};
const templateApi = getTemplateApi(app);
app.cleanupTransactions = isSomething(cleanupTransactions)
? cleanupTransactions
: async () => await cleanup(app);
app.getEpochTime = isSomething(getEpochTime)
? getEpochTime
: async () => (new Date()).getTime();
const recordApi = getRecordApi(app);
const collectionApi = getCollectionApi(app);
const indexApi = getIndexApi(app);
const authApi = getAuthApi(app);
const actionsApi = getActionsApi(app);
const authenticateAs = async (username, password) => {
app.user = await authApi.authenticate(username, password);
};
const withFullAccess = () => {
app.user = {
name: "app",
permissions : generateFullPermissions(app),
isUser:false,
temp:false
}
};
const asUser = (user) => {
app.user = user
};
let apis = {
recordApi,
templateApi,
collectionApi,
indexApi,
authApi,
actionsApi,
subscribe: eventAggregator.subscribe,
authenticateAs,
withFullAccess,
asUser
};
apis.actions = initialiseActions(
eventAggregator.subscribe,
behaviourSources,
appDefinition.actions,
appDefinition.triggers,
apis);
return apis;
};
export {events, eventsList} from "./common/events";
export {getTemplateApi} from "./templateApi";
export {getRecordApi} from "./recordApi";
export {getCollectionApi} from "./collectionApi";
export {getAuthApi} from "./authApi";
export {getIndexApi} from "./indexApi";
export {setupDatastore} from "./appInitialise";
export {getActionsApi} from "./actionsApi";
export {initialiseData} from "./appInitialise/initialiseData";
export {getDatabaseManager} from "./appInitialise/databaseManager";
export {hierarchy};
export {common};
export default getAppApis;

View file

@ -0,0 +1,168 @@
import { has, isNumber, isUndefined } from 'lodash/fp';
import { compileExpression, compileCode } from '@nx-js/compiler-util';
import {
safeKey, apiWrapper,
events, isNonEmptyString,
} from '../common';
import { iterateIndex } from '../indexing/read';
import {
getUnshardedIndexDataKey,
getShardKeysInRange,
} from '../indexing/sharding';
import {
getExactNodeForPath, isIndex,
isShardedIndex,
} from '../templateApi/hierarchy';
import { CONTINUE_READING_RECORDS } from '../indexing/serializer';
import { permission } from '../authApi/permissions';
import { BadRequestError } from '../common/errors';
export const aggregates = app => async (indexKey, rangeStartParams = null, rangeEndParams = null) => apiWrapper(
app,
events.indexApi.aggregates,
permission.readIndex.isAuthorized(indexKey),
{ indexKey, rangeStartParams, rangeEndParams },
_aggregates, app, indexKey, rangeStartParams, rangeEndParams,
);
const _aggregates = async (app, indexKey, rangeStartParams, rangeEndParams) => {
indexKey = safeKey(indexKey);
const indexNode = getExactNodeForPath(app.hierarchy)(indexKey);
if (!isIndex(indexNode)) { throw new BadRequestError('supplied key is not an index'); }
if (isShardedIndex(indexNode)) {
const shardKeys = await getShardKeysInRange(
app, indexKey, rangeStartParams, rangeEndParams,
);
let aggregateResult = null;
for (const k of shardKeys) {
const shardResult = await getAggregates(app.hierarchy, app.datastore, indexNode, k);
if (aggregateResult === null) {
aggregateResult = shardResult;
} else {
aggregateResult = mergeShardAggregate(
aggregateResult,
shardResult,
);
}
}
return aggregateResult;
}
return await getAggregates(
app.hierarchy,
app.datastore,
indexNode,
getUnshardedIndexDataKey(indexKey),
);
};
const mergeShardAggregate = (totals, shard) => {
const mergeGrouping = (tot, shr) => {
tot.count += shr.count;
for (const aggName in tot) {
if (aggName === 'count') continue;
const totagg = tot[aggName];
const shragg = shr[aggName];
totagg.sum += shragg.sum;
totagg.max = totagg.max > shragg.max
? totagg.max
: shragg.max;
totagg.min = totagg.min < shragg.min
? totagg.min
: shragg.min;
totagg.mean = totagg.sum / tot.count;
}
return tot;
};
for (const aggGroupDef in totals) {
for (const grouping in shard[aggGroupDef]) {
const groupingTotal = totals[aggGroupDef][grouping];
totals[aggGroupDef][grouping] = isUndefined(groupingTotal)
? shard[aggGroupDef][grouping]
: mergeGrouping(
totals[aggGroupDef][grouping],
shard[aggGroupDef][grouping],
);
}
}
return totals;
};
const getAggregates = async (hierarchy, datastore, index, indexedDataKey) => {
const aggregateResult = {};
const doRead = iterateIndex(
async item => {
applyItemToAggregateResult(
index, aggregateResult, item,
);
return CONTINUE_READING_RECORDS;
},
async () => aggregateResult
);
return await doRead(hierarchy, datastore, index, indexedDataKey);
};
const applyItemToAggregateResult = (indexNode, result, item) => {
const getInitialAggregateResult = () => ({
sum: 0, mean: null, max: null, min: null,
});
const applyAggregateResult = (agg, existing, count) => {
const value = compileCode(agg.aggregatedValue)({ record: item });
if (!isNumber(value)) return existing;
existing.sum += value;
existing.max = value > existing.max || existing.max === null
? value
: existing.max;
existing.min = value < existing.min || existing.min === null
? value
: existing.min;
existing.mean = existing.sum / count;
return existing;
};
for (const aggGroup of indexNode.aggregateGroups) {
if (!has(aggGroup.name)(result)) {
result[aggGroup.name] = {};
}
const thisGroupResult = result[aggGroup.name];
if (isNonEmptyString(aggGroup.condition)) {
if (!compileExpression(aggGroup.condition)({ record: item })) {
continue;
}
}
let group = isNonEmptyString(aggGroup.groupBy)
? compileCode(aggGroup.groupBy)({ record: item })
: 'all';
if (!isNonEmptyString(group)) {
group = '(none)';
}
if (!has(group)(thisGroupResult)) {
thisGroupResult[group] = { count: 0 };
for (const agg of aggGroup.aggregates) {
thisGroupResult[group][agg.name] = getInitialAggregateResult();
}
}
thisGroupResult[group].count++;
for (const agg of aggGroup.aggregates) {
const existingValues = thisGroupResult[group][agg.name];
thisGroupResult[group][agg.name] = applyAggregateResult(
agg, existingValues,
thisGroupResult[group].count,
);
}
}
};

View file

@ -0,0 +1,198 @@
import {
find, filter,
includes, some,
} from 'lodash/fp';
import { getAllIdsIterator } from '../indexing/allIds';
import {
getFlattenedHierarchy, getRecordNodeById,
getCollectionNodeByKeyOrNodeKey, getNode, isIndex,
isRecord, isDecendant, getAllowedRecordNodesForIndex,
fieldReversesReferenceToIndex,
} from '../templateApi/hierarchy';
import {
joinKey, apiWrapper, events, $, allTrue,
} from '../common';
import {
createBuildIndexFolder,
transactionForBuildIndex,
} from '../transactions/create';
import { permission } from '../authApi/permissions';
import { BadRequestError } from '../common/errors';
/** rebuilds an index
* @param {object} app - the application container
* @param {string} indexNodeKey - node key of the index, which the index belongs to
*/
export const buildIndex = app => async indexNodeKey => apiWrapper(
app,
events.indexApi.buildIndex,
permission.manageIndex.isAuthorized,
{ indexNodeKey },
_buildIndex, app, indexNodeKey,
);
const _buildIndex = async (app, indexNodeKey) => {
const indexNode = getNode(app.hierarchy, indexNodeKey);
await createBuildIndexFolder(app.datastore, indexNodeKey);
if (!isIndex(indexNode)) { throw new BadRequestError('BuildIndex: must supply an indexnode'); }
if (indexNode.indexType === 'reference') {
await buildReverseReferenceIndex(
app, indexNode,
);
} else {
await buildHeirarchalIndex(
app, indexNode,
);
}
await app.cleanupTransactions();
};
const buildReverseReferenceIndex = async (app, indexNode) => {
// Iterate through all referencING records,
// and update referenced index for each record
let recordCount = 0;
const referencingNodes = $(app.hierarchy, [
getFlattenedHierarchy,
filter(n => isRecord(n)
&& some(fieldReversesReferenceToIndex(indexNode))(n.fields)),
]);
const createTransactionsForReferencingNode = async (referencingNode) => {
const iterateReferencingNodes = await getAllIdsIterator(app)(referencingNode.collectionNodeKey());
let referencingIdIterator = await iterateReferencingNodes();
while (!referencingIdIterator.done) {
const { result } = referencingIdIterator;
for (const id of result.ids) {
const recordKey = joinKey(result.collectionKey, id);
await transactionForBuildIndex(app, indexNode.nodeKey(), recordKey, recordCount);
recordCount++;
}
referencingIdIterator = await iterateReferencingNodes();
}
};
for (const referencingNode of referencingNodes) {
await createTransactionsForReferencingNode(referencingNode);
}
};
const getAllowedParentCollectionNodes = (hierarchy, indexNode) => $(getAllowedRecordNodesForIndex(hierarchy, indexNode), [
map(n => n.parent()),
]);
const buildHeirarchalIndex = async (app, indexNode) => {
let recordCount = 0;
const createTransactionsForIds = async (collectionKey, ids) => {
for (const recordId of ids) {
const recordKey = joinKey(collectionKey, recordId);
const recordNode = getRecordNodeById(
app.hierarchy,
recordId,
);
if (recordNodeApplies(indexNode)(recordNode)) {
await transactionForBuildIndex(
app, indexNode.nodeKey(),
recordKey, recordCount,
);
recordCount++;
}
}
};
const collectionRecords = getAllowedRecordNodesForIndex(app.hierarchy, indexNode);
for (const targetCollectionRecordNode of collectionRecords) {
const allIdsIterator = await getAllIdsIterator(app)(targetCollectionRecordNode.collectionNodeKey());
let allIds = await allIdsIterator();
while (allIds.done === false) {
await createTransactionsForIds(
allIds.result.collectionKey,
allIds.result.ids,
);
allIds = await allIdsIterator();
}
}
return recordCount;
};
const chooseChildRecordNodeByKey = (collectionNode, recordId) => find(c => recordId.startsWith(c.nodeId))(collectionNode.children);
const recordNodeApplies = indexNode => recordNode => includes(recordNode.nodeId)(indexNode.allowedRecordNodeIds);
const hasApplicableDecendant = (hierarchy, ancestorNode, indexNode) => $(hierarchy, [
getFlattenedHierarchy,
filter(
allTrue(
isRecord,
isDecendant(ancestorNode),
recordNodeApplies(indexNode),
),
),
]);
const applyAllDecendantRecords = async (app, collection_Key_or_NodeKey,
indexNode, indexKey, currentIndexedData,
currentIndexedDataKey, recordCount = 0) => {
const collectionNode = getCollectionNodeByKeyOrNodeKey(
app.hierarchy,
collection_Key_or_NodeKey,
);
const allIdsIterator = await getAllIdsIterator(app)(collection_Key_or_NodeKey);
const createTransactionsForIds = async (collectionKey, allIds) => {
for (const recordId of allIds) {
const recordKey = joinKey(collectionKey, recordId);
const recordNode = chooseChildRecordNodeByKey(
collectionNode,
recordId,
);
if (recordNodeApplies(indexNode)(recordNode)) {
await transactionForBuildIndex(
app, indexNode.nodeKey(),
recordKey, recordCount,
);
recordCount++;
}
if (hasApplicableDecendant(app.hierarchy, recordNode, indexNode)) {
for (const childCollectionNode of recordNode.children) {
recordCount = await applyAllDecendantRecords(
app,
joinKey(recordKey, childCollectionNode.collectionName),
indexNode, indexKey, currentIndexedData,
currentIndexedDataKey, recordCount,
);
}
}
}
};
let allIds = await allIdsIterator();
while (allIds.done === false) {
await createTransactionsForIds(
allIds.result.collectionKey,
allIds.result.ids,
);
allIds = await allIdsIterator();
}
return recordCount;
};
export default buildIndex;

View file

@ -0,0 +1,43 @@
import {
tryAwaitOrIgnore,
} from '../common';
import {
isIndex, isShardedIndex,
getExactNodeForPath,
} from '../templateApi/hierarchy';
import {
getAllShardKeys, getShardMapKey,
getUnshardedIndexDataKey,
} from '../indexing/sharding';
export const _deleteIndex = async (app, indexKey, includeFolder) => {
const indexNode = getExactNodeForPath(app.hierarchy)(indexKey);
if (!isIndex(indexNode)) { throw new Error('Supplied key is not an index'); }
if (isShardedIndex(indexNode)) {
const shardKeys = await getAllShardKeys(app, indexKey);
for (const k of shardKeys) {
await tryAwaitOrIgnore(
app.datastore.deleteFile(k),
);
}
tryAwaitOrIgnore(
await app.datastore.deleteFile(
getShardMapKey(indexKey),
),
);
} else {
await tryAwaitOrIgnore(
app.datastore.deleteFile(
getUnshardedIndexDataKey(indexKey),
),
);
}
if (includeFolder) {
tryAwaitOrIgnore(
await app.datastore.deleteFolder(indexKey),
);
}
};

View file

@ -0,0 +1,11 @@
import { buildIndex } from './buildIndex';
import { listItems } from './listItems';
import { aggregates } from './aggregates';
export const getIndexApi = app => ({
listItems: listItems(app),
buildIndex: buildIndex(app),
aggregates: aggregates(app),
});
export default getIndexApi;

View file

@ -0,0 +1,66 @@
import { flatten, merge } from 'lodash/fp';
import {
safeKey, apiWrapper, $,
events, isNonEmptyString,
} from '../common';
import { readIndex, searchIndex } from '../indexing/read';
import {
getUnshardedIndexDataKey,
getShardKeysInRange,
} from '../indexing/sharding';
import {
getExactNodeForPath, isIndex,
isShardedIndex,
} from '../templateApi/hierarchy';
import { permission } from '../authApi/permissions';
export const listItems = app => async (indexKey, options) => apiWrapper(
app,
events.indexApi.listItems,
permission.readIndex.isAuthorized(indexKey),
{ indexKey, options },
_listItems, app, indexKey, options,
);
const defaultOptions = { rangeStartParams: null, rangeEndParams: null, searchPhrase: null };
const _listItems = async (app, indexKey, options = defaultOptions) => {
const { searchPhrase, rangeStartParams, rangeEndParams } = $({}, [
merge(options),
merge(defaultOptions),
]);
const getItems = async key => (isNonEmptyString(searchPhrase)
? await searchIndex(
app.hierarchy,
app.datastore,
indexNode,
key,
searchPhrase,
)
: await readIndex(
app.hierarchy,
app.datastore,
indexNode,
key,
));
indexKey = safeKey(indexKey);
const indexNode = getExactNodeForPath(app.hierarchy)(indexKey);
if (!isIndex(indexNode)) { throw new Error('supplied key is not an index'); }
if (isShardedIndex(indexNode)) {
const shardKeys = await getShardKeysInRange(
app, indexKey, rangeStartParams, rangeEndParams,
);
const items = [];
for (const k of shardKeys) {
items.push(await getItems(k));
}
return flatten(items);
}
return await getItems(
getUnshardedIndexDataKey(indexKey),
);
};

View file

@ -0,0 +1,229 @@
import {
join, pull,
map, flatten, orderBy,
filter, find,
} from 'lodash/fp';
import {
getParentKey,
getFlattenedHierarchy,
getCollectionNodeByKeyOrNodeKey, getNodeForCollectionPath,
isCollectionRecord, isAncestor,
} from '../templateApi/hierarchy';
import { joinKey, safeKey, $ } from '../common';
const allIdChars = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_-';
const allIdsStringsForFactor = (collectionNode) => {
const factor = collectionNode.allidsShardFactor;
const charRangePerShard = 64 / factor;
const allIdStrings = [];
let index = 0;
let currentIdsShard = '';
while (index < 64) {
currentIdsShard += allIdChars[index];
if ((index + 1) % charRangePerShard === 0) {
allIdStrings.push(currentIdsShard);
currentIdsShard = '';
}
index++;
}
return allIdStrings;
};
export const getAllIdsShardNames = (appHierarchy, collectionKey) => {
const collectionRecordNode = getNodeForCollectionPath(appHierarchy)(collectionKey);
return $(collectionRecordNode, [
c => [c.nodeId],
map(i => map(c => _allIdsShardKey(collectionKey, i, c))(allIdsStringsForFactor(collectionRecordNode))),
flatten,
]);
};
const _allIdsShardKey = (collectionKey, childNo, shardKey) => joinKey(
collectionKey,
'allids',
childNo,
shardKey,
);
export const getAllIdsShardKey = (appHierarchy, collectionKey, recordId) => {
const indexOfFirstDash = recordId.indexOf('-');
const collectionNode = getNodeForCollectionPath(appHierarchy)(collectionKey);
const idFirstChar = recordId[indexOfFirstDash + 1];
const allIdsShardId = $(collectionNode, [
allIdsStringsForFactor,
find(i => i.includes(idFirstChar)),
]);
return _allIdsShardKey(
collectionKey,
recordId.slice(0, indexOfFirstDash),
allIdsShardId,
);
};
const getOrCreateShardFile = async (datastore, allIdsKey) => {
try {
return await datastore.loadFile(allIdsKey);
} catch (eLoad) {
try {
await datastore.createFile(allIdsKey, '');
return '';
} catch (eCreate) {
throw new Error(
`Error loading, then creating allIds ${allIdsKey
} : LOAD : ${eLoad.message
} : CREATE : ${eCreate}`,
);
}
}
};
const getShardFile = async (datastore, allIdsKey) => {
try {
return await datastore.loadFile(allIdsKey);
} catch (eLoad) {
return '';
}
};
export const addToAllIds = (appHierarchy, datastore) => async (record) => {
const allIdsKey = getAllIdsShardKey(
appHierarchy,
getParentKey(record.key),
record.id,
);
let allIds = await getOrCreateShardFile(datastore, allIdsKey);
allIds += `${allIds.length > 0 ? ',' : ''}${record.id}`;
await datastore.updateFile(allIdsKey, allIds);
};
export const getAllIdsIterator = app => async (collection_Key_or_NodeKey) => {
collection_Key_or_NodeKey = safeKey(collection_Key_or_NodeKey);
const targetNode = getCollectionNodeByKeyOrNodeKey(
app.hierarchy,
collection_Key_or_NodeKey,
);
const getAllIdsIteratorForCollectionKey = async (collectionKey) => {
const all_allIdsKeys = getAllIdsShardNames(app.hierarchy, collectionKey);
let shardIndex = 0;
const allIdsFromShardIterator = async () => {
if (shardIndex === all_allIdsKeys.length) { return ({ done: true, result: { ids: [], collectionKey } }); }
const shardKey = all_allIdsKeys[shardIndex];
const allIds = await getAllIdsFromShard(app.datastore, shardKey);
shardIndex++;
return ({
result: {
ids: allIds,
collectionKey,
},
done: false,
});
};
return allIdsFromShardIterator;
};
const ancestors = $(getFlattenedHierarchy(app.hierarchy), [
filter(isCollectionRecord),
filter(n => isAncestor(targetNode)(n)
|| n.nodeKey() === targetNode.nodeKey()),
orderBy([n => n.nodeKey().length], ['asc']),
]); // parents first
const traverseForIteraterators = async (parentRecordKey = '', currentNodeIndex = 0) => {
const currentNode = ancestors[currentNodeIndex];
const currentCollectionKey = joinKey(
parentRecordKey,
currentNode.collectionName,
);
if (currentNode.nodeKey() === targetNode.nodeKey()) {
return [
await getAllIdsIteratorForCollectionKey(
currentCollectionKey,
)];
}
const allIterators = [];
const currentIterator = await getAllIdsIteratorForCollectionKey(
currentCollectionKey,
);
let ids = await currentIterator();
while (ids.done === false) {
for (const id of ids.result.ids) {
allIterators.push(
await traverseForIteraterators(
joinKey(currentCollectionKey, id),
currentNodeIndex + 1,
),
);
}
ids = await currentIterator();
}
return flatten(allIterators);
};
const iteratorsArray = await traverseForIteraterators();
let currentIteratorIndex = 0;
return async () => {
if (iteratorsArray.length === 0) { return { done: true, result: [] }; }
const innerResult = await iteratorsArray[currentIteratorIndex]();
if (!innerResult.done) { return innerResult; }
if (currentIteratorIndex == iteratorsArray.length - 1) {
return { done: true, result: innerResult.result };
}
currentIteratorIndex++;
return { done: false, result: innerResult.result };
};
};
const getAllIdsFromShard = async (datastore, shardKey) => {
const allIdsStr = await getShardFile(datastore, shardKey);
const allIds = [];
let currentId = '';
for (let i = 0; i < allIdsStr.length; i++) {
const currentChar = allIdsStr.charAt(i);
const isLast = (i === allIdsStr.length - 1);
if (currentChar === ',' || isLast) {
if (isLast) currentId += currentChar;
allIds.push(currentId);
currentId = '';
} else {
currentId += currentChar;
}
}
return allIds;
};
export const removeFromAllIds = (appHierarchy, datastore) => async (record) => {
const shardKey = getAllIdsShardKey(
appHierarchy,
getParentKey(record.key),
record.id,
);
const allIds = await getAllIdsFromShard(datastore, shardKey);
const newIds = $(allIds, [
pull(record.id),
join(','),
]);
await datastore.updateFile(shardKey, newIds);
};
export default getAllIdsIterator;

View file

@ -0,0 +1,79 @@
import { ensureShardNameIsInShardMap } from './sharding';
import { getIndexWriter } from './serializer';
import { isShardedIndex } from '../templateApi/hierarchy';
import {promiseWriteableStream} from "./promiseWritableStream";
import {promiseReadableStream} from "./promiseReadableStream";
export const applyToShard = async (hierarchy, store, indexKey,
indexNode, indexShardKey, recordsToWrite, keysToRemove) => {
const createIfNotExists = recordsToWrite.length > 0;
const writer = await getWriter(hierarchy, store, indexKey, indexShardKey, indexNode, createIfNotExists);
if (writer === SHARD_DELETED) return;
await writer.updateIndex(recordsToWrite, keysToRemove);
await swapTempFileIn(store, indexShardKey);
};
const SHARD_DELETED = 'SHARD_DELETED';
const getWriter = async (hierarchy, store, indexKey, indexedDataKey, indexNode, createIfNotExists) => {
let readableStream = null;
if (isShardedIndex(indexNode)) {
await ensureShardNameIsInShardMap(store, indexKey, indexedDataKey);
if(!await store.exists(indexedDataKey)) {
await store.createFile(indexedDataKey, "");
}
}
try {
readableStream = promiseReadableStream(
await store.readableFileStream(indexedDataKey)
);
} catch (e) {
if (await store.exists(indexedDataKey)) {
throw e;
} else {
if (createIfNotExists) {
await store.createFile(indexedDataKey, '');
} else {
return SHARD_DELETED;
}
readableStream = promiseReadableStream(
await store.readableFileStream(indexedDataKey)
);
}
}
const writableStream = promiseWriteableStream(
await store.writableFileStream(indexedDataKey + ".temp")
);
return getIndexWriter(
hierarchy, indexNode,
readableStream, writableStream
);
};
const swapTempFileIn = async (store, indexedDataKey, isRetry = false) => {
const tempFile = `${indexedDataKey}.temp`;
try {
await store.deleteFile(indexedDataKey);
} catch (e) {
// ignore failure, incase it has not been created yet
}
try {
await store.renameFile(tempFile, indexedDataKey);
} catch (e) {
// retrying in case delete failure was for some other reason
if (!isRetry) {
await swapTempFileIn(store, indexedDataKey, true);
} else {
throw new Error("Failed to swap in index filed: " + e.message);
}
}
};

View file

@ -0,0 +1,98 @@
import { compileExpression, compileCode } from '@nx-js/compiler-util';
import {
isUndefined, keys,
cloneDeep, isFunction,
} from 'lodash';
import { defineError } from '../common';
export const filterEval = 'FILTER_EVALUATE';
export const filterCompile = 'FILTER_COMPILE';
export const mapEval = 'MAP_EVALUATE';
export const mapCompile = 'MAP_COMPILE';
export const removeUndeclaredFields = 'REMOVE_UNDECLARED_FIELDS';
export const addUnMappedFields = 'ADD_UNMAPPED_FIELDS';
export const addTheKey = 'ADD_KEY';
const getEvaluateResult = () => ({
isError: false,
passedFilter: true,
result: null,
});
export const compileFilter = index => compileExpression(index.filter);
export const compileMap = index => compileCode(index.map);
export const passesFilter = (record, index) => {
const context = { record };
if (!index.filter) return true;
const compiledFilter = defineError(
() => compileFilter(index),
filterCompile,
);
return defineError(
() => compiledFilter(context),
filterEval,
);
};
export const mapRecord = (record, index) => {
const recordClone = cloneDeep(record);
const context = { record: recordClone };
const map = index.map ? index.map : 'return {...record};';
const compiledMap = defineError(
() => compileCode(map),
mapCompile,
);
const mapped = defineError(
() => compiledMap(context),
mapEval,
);
const mappedKeys = keys(mapped);
for (let i = 0; i < mappedKeys.length; i++) {
const key = mappedKeys[i];
mapped[key] = isUndefined(mapped[key]) ? null : mapped[key];
if (isFunction(mapped[key])) {
delete mapped[key];
}
}
mapped.key = record.key;
mapped.sortKey = index.getSortKey
? compileCode(index.getSortKey)(context)
: record.id;
return mapped;
};
export const evaluate = record => (index) => {
const result = getEvaluateResult();
try {
result.passedFilter = passesFilter(record, index);
} catch (err) {
result.isError = true;
result.passedFilter = false;
result.result = err.message;
}
if (!result.passedFilter) return result;
try {
result.result = mapRecord(record, index);
} catch (err) {
result.isError = true;
result.result = err.message;
}
return result;
};
export default evaluate;

View file

@ -0,0 +1,58 @@
import {
has, keys, map, orderBy,
filter, concat, reverse,
} from 'lodash/fp';
import { getAllowedRecordNodesForIndex } from '../templateApi/hierarchy';
import { mapRecord } from './evaluate';
import { constructRecord } from '../recordApi/getNew';
import { getSampleFieldValue, detectType, all } from '../types';
import { $ } from '../common';
export const generateSchema = (hierarchy, indexNode) => {
const recordNodes = getAllowedRecordNodesForIndex(hierarchy, indexNode);
const mappedRecords = $(recordNodes, [
map(n => mapRecord(createSampleRecord(n), indexNode)),
]);
// always has record key and sort key
const schema = {
sortKey: all.string,
key: all.string,
};
const fieldsHas = has(schema);
const setField = (fieldName, value) => {
if (value === null || value === undefined) { return; }
const thisType = detectType(value);
if (fieldsHas(fieldName)) {
if (schema[fieldName] !== thisType) {
schema[fieldName] = all.string;
}
} else {
schema[fieldName] = thisType;
}
};
for (const mappedRec of mappedRecords) {
for (const f in mappedRec) {
setField(f, mappedRec[f]);
}
}
// returing an array of {name, type}
return $(schema, [
keys,
map(k => ({ name: k, type: schema[k].name })),
filter(s => s.name !== 'sortKey'),
orderBy('name', ['desc']), // reverse aplha
concat([{ name: 'sortKey', type: all.string.name }]), // sortKey on end
reverse, // sortKey first, then rest are alphabetical
]);
};
const createSampleRecord = recordNode => constructRecord(
recordNode,
getSampleFieldValue,
recordNode.parent().nodeKey(),
);

View file

@ -0,0 +1,22 @@
import { isShardedIndex } from '../templateApi/hierarchy';
import { joinKey } from '../common';
import { getShardMapKey, getUnshardedIndexDataKey, createIndexFile } from './sharding';
export const initialiseIndex = async (datastore, parentKey, index) => {
const indexKey = joinKey(parentKey, index.name);
await datastore.createFolder(indexKey);
if (isShardedIndex(index)) {
await datastore.createFile(
getShardMapKey(indexKey),
'[]',
);
} else {
await createIndexFile(
datastore,
getUnshardedIndexDataKey(indexKey),
index,
);
}
};

View file

@ -0,0 +1,84 @@
// adapted from https://github.com/dex4er/js-promise-readable
// thanks :)
export const promiseReadableStream = stream => {
let _errored;
const _errorHandler = err => {
_errored = err;
};
stream.on("error", _errorHandler);
const read = (size) => {
return new Promise((resolve, reject) => {
if (_errored) {
const err = _errored;
_errored = undefined;
return reject(err)
}
if (!stream.readable || stream.closed || stream.destroyed) {
return resolve();
}
const readableHandler = () => {
const chunk = stream.read(size);
if (chunk) {
removeListeners();
resolve(chunk);
}
}
const closeHandler = () => {
removeListeners();
resolve();
}
const endHandler = () => {
removeListeners();
resolve();
}
const errorHandler = (err) => {
_errored = undefined;
removeListeners();
reject(err);
}
const removeListeners = () => {
stream.removeListener("close", closeHandler);
stream.removeListener("error", errorHandler);
stream.removeListener("end", endHandler);
stream.removeListener("readable", readableHandler);
}
stream.on("close", closeHandler);
stream.on("end", endHandler);
stream.on("error", errorHandler);
stream.on("readable", readableHandler);
readableHandler();
});
}
const destroy = () => {
if (stream) {
if (_errorHandler) {
stream.removeListener("error", _errorHandler);
}
if (typeof stream.destroy === "function") {
stream.destroy();
}
}
};
return {read, destroy, stream};
}
export default promiseReadableStream

View file

@ -0,0 +1,120 @@
// adapted from https://github.com/dex4er/js-promise-writable
// Thank you :)
export const promiseWriteableStream = stream => {
let _errored;
const _errorHandler = err => {
_errored = err;
};
stream.on("error", _errorHandler);
const write = chunk => {
let rejected = false;
return new Promise((resolve, reject) => {
if (_errored) {
const err = _errored;
_errored = undefined;
return reject(err);
}
if (!stream.writable || stream.closed || stream.destroyed) {
return reject(new Error("write after end"));
}
const writeErrorHandler = err => {
_errored = undefined;
rejected = true;
reject(err);
}
stream.once("error", writeErrorHandler);
const canWrite = stream.write(chunk);
stream.removeListener("error", writeErrorHandler);
if (canWrite) {
if (!rejected) {
resolve(chunk.length);
}
} else {
const errorHandler = err => {
_errored = undefined;
removeListeners();
reject(err);
}
const drainHandler = () => {
removeListeners();
resolve(chunk.length);
}
const closeHandler = () => {
removeListeners();
resolve(chunk.length);
}
const finishHandler = () => {
removeListeners();
resolve(chunk.length);
}
const removeListeners = () => {
stream.removeListener("close", closeHandler);
stream.removeListener("drain", drainHandler);
stream.removeListener("error", errorHandler);
stream.removeListener("finish", finishHandler);
}
stream.on("close", closeHandler);
stream.on("drain", drainHandler);
stream.on("error", errorHandler);
stream.on("finish", finishHandler);
}
})
}
const end = () => {
return new Promise((resolve, reject) => {
if (_errored) {
const err = _errored;
_errored = undefined;
return reject(err);
}
if (!stream.writable || stream.closed || stream.destroyed) {
return resolve();
}
const finishHandler = () => {
removeListeners();
resolve();
}
const errorHandler = (err) => {
_errored = undefined;
removeListeners();
reject(err);
}
const removeListeners = () => {
stream.removeListener("error", errorHandler);
stream.removeListener("finish", finishHandler);
}
stream.on("finish", finishHandler);
stream.on("error", errorHandler);
stream.end();
})
}
return {write, end};
}
export default promiseWriteableStream

View file

@ -0,0 +1,99 @@
import lunr from 'lunr';
import {
getHashCode,
joinKey
} from '../common';
import {
getActualKeyOfParent,
isGlobalIndex,
} from '../templateApi/hierarchy';
import {promiseReadableStream} from "./promiseReadableStream";
import { createIndexFile } from './sharding';
import { generateSchema } from './indexSchemaCreator';
import { getIndexReader, CONTINUE_READING_RECORDS } from './serializer';
export const readIndex = async (hierarchy, datastore, index, indexedDataKey) => {
const records = [];
const doRead = iterateIndex(
async item => {
records.push(item);
return CONTINUE_READING_RECORDS;
},
async () => records
);
return await doRead(hierarchy, datastore, index, indexedDataKey);
};
export const searchIndex = async (hierarchy, datastore, index, indexedDataKey, searchPhrase) => {
const records = [];
const schema = generateSchema(hierarchy, index);
const doRead = iterateIndex(
async item => {
const idx = lunr(function () {
this.ref('key');
for (const field of schema) {
this.field(field.name);
}
this.add(item);
});
const searchResults = idx.search(searchPhrase);
if (searchResults.length === 1) {
item._searchResult = searchResults[0];
records.push(item);
}
return CONTINUE_READING_RECORDS;
},
async () => records
);
return await doRead(hierarchy, datastore, index, indexedDataKey);
};
export const getIndexedDataKey_fromIndexKey = (indexKey) =>
`${indexKey}${indexKey.endsWith('.csv') ? '' : '.csv'}`;
export const uniqueIndexName = index => `idx_${
getHashCode(`${index.filter}${index.map}`)
}.csv`;
export const getIndexedDataKey = (decendantKey, indexNode) => {
if (isGlobalIndex(indexNode)) { return `${indexNode.nodeKey()}.csv`; }
const indexedDataParentKey = getActualKeyOfParent(
indexNode.parent().nodeKey(),
decendantKey,
);
const indexName = indexNode.name
? `${indexNode.name}.csv`
: uniqueIndexName(indexNode);
return joinKey(
indexedDataParentKey,
indexName,
);
};
export const iterateIndex = (onGetItem, getFinalResult) => async (hierarchy, datastore, index, indexedDataKey) => {
try {
const readableStream = promiseReadableStream(
await datastore.readableFileStream(indexedDataKey)
);
const read = getIndexReader(hierarchy, index, readableStream);
await read(onGetItem);
return getFinalResult();
} catch (e) {
if (await datastore.exists(indexedDataKey)) {
throw e;
} else {
await createIndexFile(
datastore,
indexedDataKey,
index,
);
}
return [];
}
};

View file

@ -0,0 +1,81 @@
import { orderBy } from 'lodash';
import {
reduce, find, includes, flatten, union,
filter, each, map,
} from 'lodash/fp';
import {
joinKey, splitKey, isNonEmptyString,
isNothing, $, isSomething,
} from '../common';
import {
getFlattenedHierarchy, getNode, getRecordNodeId,
getExactNodeForPath, recordNodeIdIsAllowed,
isRecord, isGlobalIndex,
} from '../templateApi/hierarchy';
import { indexTypes } from '../templateApi/indexes';
export const getRelevantAncestorIndexes = (appHierarchy, record) => {
const key = record.key;
const keyParts = splitKey(key);
const nodeId = getRecordNodeId(key);
const flatHierarchy = orderBy(getFlattenedHierarchy(appHierarchy),
[node => node.pathRegx().length],
['desc']);
const makeindexNodeAndKey_ForAncestorIndex = (indexNode, indexKey) => makeIndexNodeAndKey(indexNode, joinKey(indexKey, indexNode.name));
const traverseAncestorIndexesInPath = () => reduce((acc, part) => {
const currentIndexKey = joinKey(acc.lastIndexKey, part);
acc.lastIndexKey = currentIndexKey;
const testPathRegx = p => new RegExp(`${p.pathRegx()}$`).test(currentIndexKey);
const nodeMatch = find(testPathRegx)(flatHierarchy);
if (isNothing(nodeMatch)) { return acc; }
if (!isRecord(nodeMatch)
|| nodeMatch.indexes.length === 0) { return acc; }
const indexes = $(nodeMatch.indexes, [
filter(i => i.indexType === indexTypes.ancestor
&& (i.allowedRecordNodeIds.length === 0
|| includes(nodeId)(i.allowedRecordNodeIds))),
]);
each(v => acc.nodesAndKeys.push(
makeindexNodeAndKey_ForAncestorIndex(v, currentIndexKey),
))(indexes);
return acc;
}, { lastIndexKey: '', nodesAndKeys: [] })(keyParts).nodesAndKeys;
const rootIndexes = $(flatHierarchy, [
filter(n => isGlobalIndex(n) && recordNodeIdIsAllowed(n)(nodeId)),
map(i => makeIndexNodeAndKey(i, i.nodeKey())),
]);
return union(traverseAncestorIndexesInPath())(rootIndexes);
};
export const getRelevantReverseReferenceIndexes = (appHierarchy, record) => $(record.key, [
getExactNodeForPath(appHierarchy),
n => n.fields,
filter(f => f.type === 'reference'
&& isSomething(record[f.name])
&& isNonEmptyString(record[f.name].key)),
map(f => $(f.typeOptions.reverseIndexNodeKeys, [
map(n => ({
recordNode: getNode(appHierarchy, n),
field: f,
})),
])),
flatten,
map(n => makeIndexNodeAndKey(
n.recordNode,
joinKey(record[n.field.name].key, n.recordNode.name),
)),
]);
const makeIndexNodeAndKey = (indexNode, indexKey) => ({ indexNode, indexKey });
export default getRelevantAncestorIndexes;

View file

@ -0,0 +1,252 @@
import {generateSchema} from "./indexSchemaCreator";
import { has, isString, difference, find } from "lodash/fp";
import { Buffer } from "safe-buffer";
import {StringDecoder} from "string_decoder";
import {getType} from "../types";
import { isSomething } from "../common";
export const BUFFER_MAX_BYTES = 524288; // 0.5Mb
export const CONTINUE_READING_RECORDS = "CONTINUE_READING";
export const READ_REMAINING_TEXT = "READ_REMAINING";
export const CANCEL_READ = "CANCEL";
export const getIndexWriter = (hierarchy, indexNode, readableStream, writableStream, end) => {
const schema = generateSchema(hierarchy, indexNode);
return ({
read: read(readableStream, schema),
updateIndex: updateIndex(readableStream, writableStream, schema, end)
});
};
export const getIndexReader = (hierarchy, indexNode, readableStream) =>
read(
readableStream,
generateSchema(hierarchy, indexNode)
);
const updateIndex = (readableStream, writableStream, schema) => async (itemsToWrite, keysToRemove) => {
const write = newOutputWriter(BUFFER_MAX_BYTES, writableStream);
const writtenItems = [];
await read(readableStream, schema)(
async indexedItem => {
const updated = find(i => indexedItem.key === i.key)(itemsToWrite);
const removed = find(k => indexedItem.key === k)(keysToRemove);
if(isSomething(removed))
return CONTINUE_READING_RECORDS;
if(isSomething(updated)) {
const serializedItem = serializeItem(schema, updated);
await write(serializedItem);
writtenItems.push(updated);
} else {
await write(
serializeItem(schema, indexedItem)
);
}
return CONTINUE_READING_RECORDS;
},
async text => await write(text)
);
if(writtenItems.length !== itemsToWrite.length) {
const toAdd = difference(itemsToWrite, writtenItems);
for(let added of toAdd) {
await write(
serializeItem(schema, added)
);
}
} else if(writtenItems.length === 0) {
// potentially are no records
await write("");
}
await write();
await writableStream.end();
};
const read = (readableStream, schema) => async (onGetItem, onGetText) => {
const readInput = newInputReader(readableStream);
let text = await readInput();
let status = CONTINUE_READING_RECORDS;
while(text.length > 0) {
if(status === READ_REMAINING_TEXT) {
await onGetText(text);
continue;
}
if(status === CANCEL_READ) {
return;
}
let rowText = "";
let currentCharIndex=0;
for(let currentChar of text) {
rowText += currentChar;
if(currentChar === "\r") {
status = await onGetItem(
deserializeRow(schema, rowText)
);
rowText = "";
if(status === READ_REMAINING_TEXT) {
break;
}
}
currentCharIndex++;
}
if(currentCharIndex < text.length -1) {
await onGetText(text.substring(currentCharIndex + 1));
}
text = await readInput();
}
await readableStream.destroy();
};
const newOutputWriter = (flushBoundary, writableStream) => {
let currentBuffer = null;
return async (text) => {
if(isString(text) && currentBuffer === null)
currentBuffer = Buffer.from(text, "utf8");
else if(isString(text))
currentBuffer = Buffer.concat([
currentBuffer,
Buffer.from(text, "utf8")
]);
if(currentBuffer !== null &&
(currentBuffer.length > flushBoundary
|| !isString(text))) {
await writableStream.write(currentBuffer);
currentBuffer = null;
}
}
};
const newInputReader = (readableStream) => {
const decoder = new StringDecoder('utf8');
let remainingBytes = [];
return async () => {
let nextBytesBuffer = await readableStream.read(BUFFER_MAX_BYTES);
const remainingBuffer = Buffer.from(remainingBytes);
if(!nextBytesBuffer) nextBytesBuffer = Buffer.from([]);
const moreToRead = nextBytesBuffer.length === BUFFER_MAX_BYTES;
const buffer = Buffer.concat(
[remainingBuffer, nextBytesBuffer],
remainingBuffer.length + nextBytesBuffer.length);
const text = decoder.write(buffer);
remainingBytes = decoder.end(buffer);
if(!moreToRead && remainingBytes.length > 0) {
// if for any reason, we have remaining bytes at the end
// of the stream, just discard - dont see why this should
// ever happen, but if it does, it could cause a stack overflow
remainingBytes = [];
}
return text;
};
};
const deserializeRow = (schema, rowText) => {
let currentPropIndex = 0;
let currentCharIndex = 0;
let currentValueText = "";
let isEscaped = false;
const item = {};
const setCurrentProp = () => {
const currentProp = schema[currentPropIndex];
const type = getType(currentProp.type);
const value = currentValueText === ""
? type.getDefaultValue()
: type.safeParseValue(
currentValueText);
item[currentProp.name] = value;
};
while(currentPropIndex < schema.length) {
if(currentCharIndex < rowText.length) {
const currentChar = rowText[currentCharIndex];
if(isEscaped) {
if(currentChar === "r") {
currentValueText += "\r";
} else {
currentValueText += currentChar;
}
isEscaped = false;
} else {
if(currentChar === ",") {
setCurrentProp();
currentValueText = "";
currentPropIndex++;
} else if(currentChar === "\\") {
isEscaped = true;
} else {
currentValueText += currentChar;
}
}
currentCharIndex++;
} else {
currentValueText = "";
setCurrentProp();
currentPropIndex++;
}
}
return item;
};
export const serializeItem = (schema, item) => {
let rowText = ""
for(let prop of schema) {
const type = getType(prop.type);
const value = has(prop.name)(item)
? item[prop.name]
: type.getDefaultValue()
const valStr = type.stringify(value);
for(let i = 0; i < valStr.length; i++) {
const currentChar = valStr[i];
if(currentChar === ","
|| currentChar === "\r"
|| currentChar === "\\") {
rowText += "\\";
}
if(currentChar === "\r") {
rowText += "r";
} else {
rowText += currentChar;
}
}
rowText += ",";
}
rowText += "\r";
return rowText;
};

View file

@ -0,0 +1,124 @@
import { compileCode } from '@nx-js/compiler-util';
import {
filter, includes, map, last,
} from 'lodash/fp';
import {
getActualKeyOfParent, isGlobalIndex,
getParentKey, isShardedIndex,
getExactNodeForPath,
} from '../templateApi/hierarchy';
import {
joinKey, isNonEmptyString, splitKey, $,
} from '../common';
export const getIndexedDataKey = (indexNode, indexKey, record) => {
const getShardName = (indexNode, record) => {
const shardNameFunc = compileCode(indexNode.getShardName);
try {
return shardNameFunc({ record });
} catch(e) {
const errorDetails = `shardCode: ${indexNode.getShardName} :: record: ${JSON.stringify(record)} :: `
e.message = "Error running index shardname func: " + errorDetails + e.message;
throw e;
}
};
const shardName = isNonEmptyString(indexNode.getShardName)
? `${getShardName(indexNode, record)}.csv`
: 'index.csv';
return joinKey(indexKey, shardName);
};
export const getShardKeysInRange = async (app, indexKey, startRecord = null, endRecord = null) => {
const indexNode = getExactNodeForPath(app.hierarchy)(indexKey);
const startShardName = !startRecord
? null
: shardNameFromKey(
getIndexedDataKey(
indexNode,
indexKey,
startRecord,
),
);
const endShardName = !endRecord
? null
: shardNameFromKey(
getIndexedDataKey(
indexNode,
indexKey,
endRecord,
),
);
return $(await getShardMap(app.datastore, indexKey), [
filter(k => (startRecord === null || k >= startShardName)
&& (endRecord === null || k <= endShardName)),
map(k => joinKey(indexKey, `${k}.csv`)),
]);
};
export const ensureShardNameIsInShardMap = async (store, indexKey, indexedDataKey) => {
const map = await getShardMap(store, indexKey);
const shardName = shardNameFromKey(indexedDataKey);
if (!includes(shardName)(map)) {
map.push(shardName);
await writeShardMap(store, indexKey, map);
}
};
export const getShardMap = async (datastore, indexKey) => {
const shardMapKey = getShardMapKey(indexKey);
try {
return await datastore.loadJson(shardMapKey);
} catch (_) {
await datastore.createJson(shardMapKey, []);
return [];
}
};
export const writeShardMap = async (datastore, indexKey, shardMap) => await datastore.updateJson(
getShardMapKey(indexKey),
shardMap,
);
export const getAllShardKeys = async (app, indexKey) => await getShardKeysInRange(app, indexKey);
export const getShardMapKey = indexKey => joinKey(indexKey, 'shardMap.json');
export const getUnshardedIndexDataKey = indexKey => joinKey(indexKey, 'index.csv');
export const getIndexFolderKey = indexKey => indexKey;
export const createIndexFile = async (datastore, indexedDataKey, index) => {
if (isShardedIndex(index)) {
const indexKey = getParentKey(indexedDataKey);
const shardMap = await getShardMap(datastore, indexKey);
shardMap.push(
shardNameFromKey(indexedDataKey),
);
await writeShardMap(datastore, indexKey, shardMap);
}
await datastore.createFile(indexedDataKey, '');
};
export const shardNameFromKey = key => $(key, [
splitKey,
last,
]).replace('.csv', '');
export const getIndexKey_BasedOnDecendant = (decendantKey, indexNode) => {
if (isGlobalIndex(indexNode)) { return `${indexNode.nodeKey()}`; }
const indexedDataParentKey = getActualKeyOfParent(
indexNode.parent().nodeKey(),
decendantKey,
);
return joinKey(
indexedDataParentKey,
indexNode.name,
);
};

View file

View file

@ -0,0 +1,29 @@
import { find, take, union } from 'lodash/fp';
import { getFlattenedHierarchy } from '../templateApi/hierarchy';
import { $, splitKey, joinKey } from '../common';
import { NotFoundError } from '../common/errors';
export const customId = app => (nodeName, id) => {
const node = $(app.hierarchy, [
getFlattenedHierarchy,
find(n => n.name === nodeName),
]);
if (!node) throw new NotFoundError(`Cannot find node ${nodeName}`);
return `${node.nodeId}-${id}`;
};
export const setCustomId = app => (record, id) => {
record.id = customId(app)(record.type, id);
const keyParts = splitKey(record.key);
record.key = $(keyParts, [
take(keyParts.length - 1),
union([record.id]),
joinKey,
]);
return record;
};

View file

@ -0,0 +1,96 @@
import {
safeKey, apiWrapper,
events, joinKey,
} from '../common';
import { _load, getRecordFileName } from './load';
import { _deleteCollection } from '../collectionApi/delete';
import {
getExactNodeForPath,
getFlattenedHierarchy, getNode,
fieldReversesReferenceToNode,
} from '../templateApi/hierarchy';
import { _deleteIndex } from '../indexApi/delete';
import { transactionForDeleteRecord } from '../transactions/create';
import { removeFromAllIds } from '../indexing/allIds';
import { permission } from '../authApi/permissions';
export const deleteRecord = (app, disableCleanup = false) => async key => apiWrapper(
app,
events.recordApi.delete,
permission.deleteRecord.isAuthorized(key),
{ key },
_deleteRecord, app, key, disableCleanup,
);
// called deleteRecord because delete is a keyword
export const _deleteRecord = async (app, key, disableCleanup) => {
key = safeKey(key);
const node = getExactNodeForPath(app.hierarchy)(key);
const record = await _load(app, key);
await transactionForDeleteRecord(app, record);
for (const collectionRecord of node.children) {
const collectionKey = joinKey(
key, collectionRecord.collectionName,
);
await _deleteCollection(app, collectionKey, true);
}
await app.datastore.deleteFile(
getRecordFileName(key),
);
await deleteFiles(app, key);
await removeFromAllIds(app.hierarchy, app.datastore)(record);
if (!disableCleanup) { await app.cleanupTransactions(); }
await app.datastore.deleteFolder(key);
await deleteIndexes(app, key);
};
const deleteIndexes = async (app, key) => {
const node = getExactNodeForPath(app.hierarchy)(key);
/* const reverseIndexKeys = $(app.hierarchy, [
getFlattenedHierarchy,
map(n => n.fields),
flatten,
filter(isSomething),
filter(fieldReversesReferenceToNode(node)),
map(f => $(f.typeOptions.reverseIndexNodeKeys, [
map(n => getNode(
app.hierarchy,
n))
])
),
flatten,
map(n => joinKey(key, n.name))
]);
for(let i of reverseIndexKeys) {
await _deleteIndex(app, i, true);
} */
for (const index of node.indexes) {
const indexKey = joinKey(key, index.name);
await _deleteIndex(app, indexKey, true);
}
};
const deleteFiles = async (app, key) => {
const filesFolder = joinKey(key, 'files');
const allFiles = await app.datastore.getFolderContents(
filesFolder,
);
for (const file of allFiles) {
await app.datastore.deleteFile(file);
}
await app.datastore.deleteFolder(
joinKey(key, 'files'),
);
};

View file

@ -0,0 +1,24 @@
import { apiWrapper, events, isNothing } from '../common';
import { permission } from '../authApi/permissions';
import { safeGetFullFilePath } from './uploadFile';
import { BadRequestError } from '../common/errors';
export const downloadFile = app => async (recordKey, relativePath) => apiWrapper(
app,
events.recordApi.uploadFile,
permission.readRecord.isAuthorized(recordKey),
{ recordKey, relativePath },//remove dupe key 'recordKey' from object
_downloadFile, app, recordKey, relativePath,
);
const _downloadFile = async (app, recordKey, relativePath) => {
if (isNothing(recordKey)) { throw new BadRequestError('Record Key not supplied'); }
if (isNothing(relativePath)) { throw new BadRequestError('file path not supplied'); }
return await app.datastore.readableFileStream(
safeGetFullFilePath(
recordKey, relativePath,
),
);
};

View file

@ -0,0 +1,75 @@
import { has, some } from 'lodash';
import { map, isString } from 'lodash/fp';
import {
getExactNodeForPath,
findField, getNode, isGlobalIndex,
} from '../templateApi/hierarchy';
import { listItems } from '../indexApi/listItems';
import {
$, apiWrapperSync, events,
} from '../common';
import { getIndexKey_BasedOnDecendant } from '../indexing/sharding';
import { permission } from '../authApi/permissions';
export const getContext = app => recordKey => apiWrapperSync(
app,
events.recordApi.getContext,
permission.readRecord.isAuthorized(recordKey),
{ recordKey },
_getContext, app, recordKey,
);
export const _getContext = (app, recordKey) => {
const recordNode = getExactNodeForPath(app.hierarchy)(recordKey);
const cachedReferenceIndexes = {};
const lazyLoadReferenceIndex = async (typeOptions) => {
if (!has(cachedReferenceIndexes, typeOptions.indexNodeKey)) {
cachedReferenceIndexes[typeOptions.indexNodeKey] = {
typeOptions,
data: await readReferenceIndex(
app, recordKey, typeOptions,
),
};
}
return cachedReferenceIndexes[typeOptions.indexNodeKey];
};
const getTypeOptions = typeOptions_or_fieldName => (isString(typeOptions_or_fieldName)
? findField(recordNode, typeOptions_or_fieldName)
.typeOptions
: typeOptions_or_fieldName);
return {
referenceExists: async (typeOptions_or_fieldName, key) => {
const typeOptions = getTypeOptions(typeOptions_or_fieldName);
const { data } = await lazyLoadReferenceIndex(typeOptions);
return some(data, i => i.key === key);
},
referenceOptions: async (typeOptions_or_fieldName) => {
const typeOptions = getTypeOptions(typeOptions_or_fieldName);
const { data } = await lazyLoadReferenceIndex(typeOptions);
return data;
},
recordNode,
};
};
const readReferenceIndex = async (app, recordKey, typeOptions) => {
const indexNode = getNode(app.hierarchy, typeOptions.indexNodeKey);
const indexKey = isGlobalIndex(indexNode)
? indexNode.nodeKey()
: getIndexKey_BasedOnDecendant(
recordKey, indexNode,
);
const items = await listItems(app)(indexKey);
return $(items, [
map(i => ({
key: i.key,
value: i[typeOptions.displayValue],
})),
]);
};

View file

@ -0,0 +1,44 @@
import {
keyBy, mapValues,
} from 'lodash/fp';
import { generate } from 'shortid';
import { getNodeForCollectionPath } from '../templateApi/hierarchy';
import { getNewFieldValue } from '../types';
import {
$, joinKey, safeKey, apiWrapperSync, events,
} from '../common';
import { permission } from '../authApi/permissions';
export const getNew = app => (collectionKey, recordTypeName) => {
const recordNode = getRecordNode(app, collectionKey, recordTypeName);
return apiWrapperSync(
app,
events.recordApi.getNew,
permission.createRecord.isAuthorized(recordNode.nodeKey()),
{ collectionKey, recordTypeName },
_getNew, recordNode, collectionKey,
);
};
const _getNew = (recordNode, collectionKey) => constructRecord(recordNode, getNewFieldValue, collectionKey);
const getRecordNode = (app, collectionKey) => {
collectionKey = safeKey(collectionKey);
return getNodeForCollectionPath(app.hierarchy)(collectionKey);
};
export const getNewChild = app => (recordKey, collectionName, recordTypeName) =>
getNew(app)(joinKey(recordKey, collectionName), recordTypeName);
export const constructRecord = (recordNode, getFieldValue, collectionKey) => {
const record = $(recordNode.fields, [
keyBy('name'),
mapValues(getFieldValue),
]);
record.id = `${recordNode.nodeId}-${generate()}`;
record.key = joinKey(collectionKey, record.id);
record.isNew = true;
record.type = recordNode.name;
return record;
};

View file

@ -0,0 +1,28 @@
import { getNew, getNewChild } from './getNew';
import { load } from './load';
import { validate } from './validate';
import { getContext } from './getContext';
import { save } from './save';
import { deleteRecord } from './delete';
import { uploadFile } from './uploadFile';
import { downloadFile } from './downloadFile';
import { customId, setCustomId } from './customId';
const api = app => ({
getNew: getNew(app),
getNewChild: getNewChild(app),
save: save(app),
load: load(app),
delete: deleteRecord(app, false),
validate: validate(app),
getContext: getContext(app),
uploadFile: uploadFile(app),
downloadFile: downloadFile(app),
customId: customId(app),
setCustomId: setCustomId(app),
});
export const getRecordApi = app => api(app);
export default getRecordApi;

View file

@ -0,0 +1,70 @@
import {
keyBy, mapValues, filter,
map, includes, last,
} from 'lodash/fp';
import { getExactNodeForPath, getNode } from '../templateApi/hierarchy';
import { safeParseField } from '../types';
import {
$, splitKey, safeKey, isNonEmptyString,
apiWrapper, events, joinKey,
} from '../common';
import { mapRecord } from '../indexing/evaluate';
import { permission } from '../authApi/permissions';
export const getRecordFileName = key => joinKey(key, 'record.json');
export const load = app => async key => apiWrapper(
app,
events.recordApi.load,
permission.readRecord.isAuthorized(key),
{ key },
_load, app, key,
);
export const _load = async (app, key, keyStack = []) => {
key = safeKey(key);
const recordNode = getExactNodeForPath(app.hierarchy)(key);
const storedData = await app.datastore.loadJson(
getRecordFileName(key),
);
const loadedRecord = $(recordNode.fields, [
keyBy('name'),
mapValues(f => safeParseField(f, storedData)),
]);
const newKeyStack = [...keyStack, key];
const references = $(recordNode.fields, [
filter(f => f.type === 'reference'
&& isNonEmptyString(loadedRecord[f.name].key)
&& !includes(loadedRecord[f.name].key)(newKeyStack)),
map(f => ({
promise: _load(app, loadedRecord[f.name].key, newKeyStack),
index: getNode(app.hierarchy, f.typeOptions.indexNodeKey),
field: f,
})),
]);
if (references.length > 0) {
const refRecords = await Promise.all(
map(p => p.promise)(references),
);
for (const ref of references) {
loadedRecord[ref.field.name] = mapRecord(
refRecords[references.indexOf(ref)],
ref.index,
);
}
}
loadedRecord.transactionId = storedData.transactionId;
loadedRecord.isNew = false;
loadedRecord.key = key;
loadedRecord.id = $(key, [splitKey, last]);
loadedRecord.type = recordNode.name;
return loadedRecord;
};
export default load;

View file

@ -0,0 +1,184 @@
import {
cloneDeep,
flatten,
map,
filter,
} from 'lodash/fp';
import { initialiseChildCollections } from '../collectionApi/initialise';
import { validate } from './validate';
import { _load, getRecordFileName } from './load';
import {
apiWrapper, events, $, joinKey,
} from '../common';
import {
getFlattenedHierarchy,
getExactNodeForPath,
isRecord,
getNode,
fieldReversesReferenceToNode,
} from '../templateApi/hierarchy';
import { mapRecord } from '../indexing/evaluate';
import { listItems } from '../indexApi/listItems';
import { addToAllIds } from '../indexing/allIds';
import {
transactionForCreateRecord,
transactionForUpdateRecord,
} from '../transactions/create';
import { permission } from '../authApi/permissions';
import { initialiseIndex } from '../indexing/initialiseIndex';
import { BadRequestError } from '../common/errors';
export const save = app => async (record, context) => apiWrapper(
app,
events.recordApi.save,
record.isNew
? permission.createRecord.isAuthorized(record.key)
: permission.updateRecord.isAuthorized(record.key), { record },
_save, app, record, context, false,
);
export const _save = async (app, record, context, skipValidation = false) => {
const recordClone = cloneDeep(record);
if (!skipValidation) {
const validationResult = await validate(app)(recordClone, context);
if (!validationResult.isValid) {
await app.publish(events.recordApi.save.onInvalid, { record, validationResult });
throw new BadRequestError(`Save : Record Invalid : ${
JSON.stringify(validationResult.errors)}`);
}
}
if (recordClone.isNew) {
await addToAllIds(app.hierarchy, app.datastore)(recordClone);
const transaction = await transactionForCreateRecord(
app, recordClone,
);
recordClone.transactionId = transaction.id;
await app.datastore.createFolder(recordClone.key);
await app.datastore.createFolder(
joinKey(recordClone.key, 'files'),
);
await app.datastore.createJson(
getRecordFileName(recordClone.key),
recordClone,
);
await initialiseReverseReferenceIndexes(app, record);
await initialiseAncestorIndexes(app, record);
await initialiseChildCollections(app, recordClone.key);
await app.publish(events.recordApi.save.onRecordCreated, {
record: recordClone,
});
} else {
const oldRecord = await _load(app, recordClone.key);
const transaction = await transactionForUpdateRecord(
app, oldRecord, recordClone,
);
recordClone.transactionId = transaction.id;
await app.datastore.updateJson(
getRecordFileName(recordClone.key),
recordClone,
);
await app.publish(events.recordApi.save.onRecordUpdated, {
old: oldRecord,
new: recordClone,
});
}
await app.cleanupTransactions();
const returnedClone = cloneDeep(recordClone);
returnedClone.isNew = false;
return returnedClone;
};
const initialiseAncestorIndexes = async (app, record) => {
const recordNode = getExactNodeForPath(app.hierarchy)(record.key);
for (const index of recordNode.indexes) {
const indexKey = joinKey(record.key, index.name);
if (!await app.datastore.exists(indexKey)) { await initialiseIndex(app.datastore, record.key, index); }
}
};
const initialiseReverseReferenceIndexes = async (app, record) => {
const recordNode = getExactNodeForPath(app.hierarchy)(record.key);
const indexNodes = $(fieldsThatReferenceThisRecord(app, recordNode), [
map(f => $(f.typeOptions.reverseIndexNodeKeys, [
map(n => getNode(
app.hierarchy,
n,
)),
])),
flatten,
]);
for (const indexNode of indexNodes) {
await initialiseIndex(
app.datastore, record.key, indexNode,
);
}
};
const maintainReferentialIntegrity = async (app, indexingApi, oldRecord, newRecord) => {
/*
FOREACH Field that reference this object
- options Index node that for field
- has options index changed for referenced record?
- FOREACH reverse index of field
- FOREACH referencingRecord in reverse index
- Is field value still pointing to referencedRecord
- Update referencingRecord.fieldName to new value
- Save
*/
const recordNode = getExactNodeForPath(app.hierarchy)(newRecord.key);
const referenceFields = fieldsThatReferenceThisRecord(
app, recordNode,
);
const updates = $(referenceFields, [
map(f => ({
node: getNode(
app.hierarchy, f.typeOptions.indexNodeKey,
),
field: f,
})),
map(n => ({
old: mapRecord(oldRecord, n.node),
new: mapRecord(newRecord, n.node),
indexNode: n.node,
field: n.field,
reverseIndexKeys: $(n.field.typeOptions.reverseIndexNodeKeys, [
map(k => joinKey(
newRecord.key,
getLastPartInKey(k),
)),
]),
})),
filter(diff => !isEqual(diff.old)(diff.new)),
]);
for (const update of updates) {
for (const reverseIndexKey of update.reverseIndexKeys) {
const rows = await listItems(app)(reverseIndexKey);
for (const key of map(r => r.key)(rows)) {
const record = await _load(app, key);
if (record[update.field.name].key === newRecord.key) {
record[update.field.name] = update.new;
await _save(app, indexingApi, record, undefined, true);
}
}
}
}
};
const fieldsThatReferenceThisRecord = (app, recordNode) => $(app.hierarchy, [
getFlattenedHierarchy,
filter(isRecord),
map(n => n.fields),
flatten,
filter(fieldReversesReferenceToNode(recordNode)),
]);

View file

@ -0,0 +1,128 @@
import {
includes, filter,
map, some,
} from 'lodash/fp';
import { generate } from 'shortid';
import { _load } from './load';
import {
apiWrapper, events, splitKey,
$, joinKey, isNothing, tryAwaitOrIgnore,
} from '../common';
import { getExactNodeForPath } from '../templateApi/hierarchy';
import { permission } from '../authApi/permissions';
import { isLegalFilename } from '../types/file';
import { BadRequestError, ForbiddenError } from '../common/errors';
export const uploadFile = app => async (recordKey, readableStream, relativeFilePath) => apiWrapper(
app,
events.recordApi.uploadFile,
permission.updateRecord.isAuthorized(recordKey),
{ recordKey, readableStream, relativeFilePath },
_uploadFile, app, recordKey, readableStream, relativeFilePath,
);
const _uploadFile = async (app, recordKey, readableStream, relativeFilePath) => {
if (isNothing(recordKey)) { throw new BadRequestError('Record Key not supplied'); }
if (isNothing(relativeFilePath)) { throw new BadRequestError('file path not supplied'); }
if (!isLegalFilename(relativeFilePath)) { throw new BadRequestError('Illegal filename'); }
const record = await _load(app, recordKey);
const fullFilePath = safeGetFullFilePath(
recordKey, relativeFilePath,
);
const tempFilePath = `${fullFilePath}_${generate()}.temp`;
const outputStream = await app.datastore.writableFileStream(
tempFilePath,
);
return new Promise((resolve,reject) => {
readableStream.pipe(outputStream);
outputStream.on('error', reject);
outputStream.on('finish', resolve);
})
.then(() => app.datastore.getFileSize(tempFilePath))
.then(size => {
const isExpectedFileSize = checkFileSizeAgainstFields(
app, record, relativeFilePath, size
);
if (!isExpectedFileSize) { throw new BadRequestError(`Fields for ${relativeFilePath} do not have expected size: ${join(',')(incorrectFields)}`); }
})
.then(() => tryAwaitOrIgnore(app.datastore.deleteFile, fullFilePath))
.then(() => app.datastore.renameFile(tempFilePath, fullFilePath));
/*
readableStream.pipe(outputStream);
await new Promise(fulfill => outputStream.on('finish', fulfill));
const isExpectedFileSize = checkFileSizeAgainstFields(
app,
record, relativeFilePath,
await app.datastore.getFileSize(tempFilePath),
);
if (!isExpectedFileSize) {
throw new Error(
`Fields for ${relativeFilePath} do not have expected size`);
}
await tryAwaitOrIgnore(app.datastore.deleteFile, fullFilePath);
await app.datastore.renameFile(tempFilePath, fullFilePath);
*/
};
const checkFileSizeAgainstFields = (app, record, relativeFilePath, expectedSize) => {
const recordNode = getExactNodeForPath(app.hierarchy)(record.key);
const incorrectFileFields = $(recordNode.fields, [
filter(f => f.type === 'file'
&& record[f.name].relativePath === relativeFilePath
&& record[f.name].size !== expectedSize),
map(f => f.name),
]);
const incorrectFileArrayFields = $(recordNode.fields, [
filter(a => a.type === 'array<file>'
&& $(record[a.name], [
some(f => record[f.name].relativePath === relativeFilePath
&& record[f.name].size !== expectedSize),
])),
map(f => f.name),
]);
const incorrectFields = [
...incorrectFileFields,
...incorrectFileArrayFields,
];
if (incorrectFields.length > 0) {
return false;
}
return true;
};
export const safeGetFullFilePath = (recordKey, relativeFilePath) => {
const naughtyUser = () => { throw new ForbiddenError('naughty naughty'); };
if (relativeFilePath.startsWith('..')) naughtyUser();
const pathParts = splitKey(relativeFilePath);
if (includes('..')(pathParts)) naughtyUser();
const recordKeyParts = splitKey(recordKey);
const fullPathParts = [
...recordKeyParts,
'files',
...filter(p => p !== '.')(pathParts),
];
return joinKey(fullPathParts);
};

View file

@ -0,0 +1,85 @@
import {
map, reduce, filter,
isEmpty, flatten, each,
} from 'lodash/fp';
import { compileExpression } from '@nx-js/compiler-util';
import _ from 'lodash';
import { getExactNodeForPath } from '../templateApi/hierarchy';
import { validateFieldParse, validateTypeConstraints } from '../types';
import { $, isNothing, isNonEmptyString } from '../common';
import { _getContext } from './getContext';
const fieldParseError = (fieldName, value) => ({
fields: [fieldName],
message: `Could not parse field ${fieldName}:${value}`,
});
const validateAllFieldParse = (record, recordNode) => $(recordNode.fields, [
map(f => ({ name: f.name, parseResult: validateFieldParse(f, record) })),
reduce((errors, f) => {
if (f.parseResult.success) return errors;
errors.push(
fieldParseError(f.name, f.parseResult.value),
);
return errors;
}, []),
]);
const validateAllTypeConstraints = async (record, recordNode, context) => {
const errors = [];
for (const field of recordNode.fields) {
$(await validateTypeConstraints(field, record, context), [
filter(isNonEmptyString),
map(m => ({ message: m, fields: [field.name] })),
each(e => errors.push(e)),
]);
}
return errors;
};
const runRecordValidationRules = (record, recordNode) => {
const runValidationRule = (rule) => {
const isValid = compileExpression(rule.expressionWhenValid);
const expressionContext = { record, _ };
return (isValid(expressionContext)
? { valid: true }
: ({
valid: false,
fields: rule.invalidFields,
message: rule.messageWhenInvalid,
}));
};
return $(recordNode.validationRules, [
map(runValidationRule),
flatten,
filter(r => r.valid === false),
map(r => ({ fields: r.fields, message: r.message })),
]);
};
export const validate = app => async (record, context) => {
context = isNothing(context)
? _getContext(app, record.key)
: context;
const recordNode = getExactNodeForPath(app.hierarchy)(record.key);
const fieldParseFails = validateAllFieldParse(record, recordNode);
// non parsing would cause further issues - exit here
if (!isEmpty(fieldParseFails)) { return ({ isValid: false, errors: fieldParseFails }); }
const recordValidationRuleFails = runRecordValidationRules(record, recordNode);
const typeContraintFails = await validateAllTypeConstraints(record, recordNode, context);
if (isEmpty(fieldParseFails)
&& isEmpty(recordValidationRuleFails)
&& isEmpty(typeContraintFails)) {
return ({ isValid: true, errors: [] });
}
return ({
isValid: false,
errors: _.union(fieldParseFails, typeContraintFails, recordValidationRuleFails),
});
};

View file

@ -0,0 +1,23 @@
export const createTrigger = () => ({
actionName: '',
eventName: '',
// function, has access to event context,
// returns object that is used as parameter to action
// only used if triggered by event
optionsCreator: '',
// action runs if true,
// has access to event context
condition: '',
});
export const createAction = () => ({
name: '',
behaviourSource: '',
// name of function in actionSource
behaviourName: '',
// parameter passed into behaviour.
// any other parms passed at runtime e.g.
// by trigger, or manually, will be merged into this
initialOptions: {},
});

View file

@ -0,0 +1,223 @@
import { each, constant, find } from 'lodash';
import { map, max } from 'lodash/fp';
import {
switchCase, defaultCase, joinKey,
$, isNothing, isSomething,
} from '../common';
import {
isIndex, isRoot, isSingleRecord, isCollectionRecord,
isRecord, isaggregateGroup,
getFlattenedHierarchy,
} from './hierarchy';
import { all } from '../types';
import { BadRequestError } from '../common/errors';
export const createNodeErrors = {
indexCannotBeParent: 'Index template cannot be a parent',
allNonRootNodesMustHaveParent: 'Only the root node may have no parent',
indexParentMustBeRecordOrRoot: 'An index may only have a record or root as a parent',
aggregateParentMustBeAnIndex: 'aggregateGroup parent must be an index',
};
const pathRegxMaker = node => () => node.nodeKey().replace(/{id}/g, '[a-zA-Z0-9_-]+');
const nodeKeyMaker = node => () => switchCase(
[n => isRecord(n) && !isSingleRecord(n),
n => joinKey(
node.parent().nodeKey(),
node.collectionName,
`${n.nodeId}-{id}`,
)],
[isRoot,
constant('/')],
[defaultCase,
n => joinKey(node.parent().nodeKey(), n.name)],
)(node);
const validate = parent => (node) => {
if (isIndex(node)
&& isSomething(parent)
&& !isRoot(parent)
&& !isRecord(parent)) {
throw new BadRequestError(createNodeErrors.indexParentMustBeRecordOrRoot);
}
if (isaggregateGroup(node)
&& isSomething(parent)
&& !isIndex(parent)) {
throw new BadRequestError(createNodeErrors.aggregateParentMustBeAnIndex);
}
if (isNothing(parent) && !isRoot(node)) { throw new BadRequestError(createNodeErrors.allNonRootNodesMustHaveParent); }
return node;
};
const construct = parent => (node) => {
node.nodeKey = nodeKeyMaker(node);
node.pathRegx = pathRegxMaker(node);
node.parent = constant(parent);
node.isRoot = () => isNothing(parent)
&& node.name === 'root'
&& node.type === 'root';
if (isCollectionRecord(node)) {
node.collectionNodeKey = () => joinKey(
parent.nodeKey(), node.collectionName,
);
node.collectionPathRegx = () => joinKey(
parent.pathRegx(), node.collectionName,
);
}
return node;
};
const addToParent = (obj) => {
const parent = obj.parent();
if (isSomething(parent)) {
if (isIndex(obj))
// Q: why are indexes not children ?
// A: because they cannot have children of their own.
{ parent.indexes.push(obj); } else if (isaggregateGroup(obj)) { parent.aggregateGroups.push(obj); } else { parent.children.push(obj); }
if (isRecord(obj)) {
const defaultIndex = find(
parent.indexes,
i => i.name === `${parent.name}_index`,
);
if (defaultIndex) {
defaultIndex.allowedRecordNodeIds.push(obj.nodeId);
}
}
}
return obj;
};
export const constructNode = (parent, obj) => $(obj, [
construct(parent),
validate(parent),
addToParent,
]);
const getNodeId = (parentNode) => {
// this case is handled better elsewhere
if (!parentNode) return null;
const findRoot = n => (isRoot(n) ? n : findRoot(n.parent()));
const root = findRoot(parentNode);
return ($(root, [
getFlattenedHierarchy,
map(n => n.nodeId),
max]) + 1);
};
export const constructHierarchy = (node, parent) => {
construct(parent)(node);
if (node.indexes) {
each(node.indexes,
child => constructHierarchy(child, node));
}
if (node.aggregateGroups) {
each(node.aggregateGroups,
child => constructHierarchy(child, node));
}
if (node.children && node.children.length > 0) {
each(node.children,
child => constructHierarchy(child, node));
}
if (node.fields) {
each(node.fields,
f => each(f.typeOptions, (val, key) => {
const def = all[f.type].optionDefinitions[key];
if (!def) {
// unknown typeOption
delete f.typeOptions[key];
} else {
f.typeOptions[key] = def.parse(val);
}
}));
}
return node;
};
export const getNewRootLevel = () => construct()({
name: 'root',
type: 'root',
children: [],
pathMaps: [],
indexes: [],
nodeId: 0,
});
const _getNewRecordTemplate = (parent, name, createDefaultIndex, isSingle) => {
const node = constructNode(parent, {
name,
type: 'record',
fields: [],
children: [],
validationRules: [],
nodeId: getNodeId(parent),
indexes: [],
allidsShardFactor: isRecord(parent) ? 1 : 64,
collectionName: '',
isSingle,
});
if (createDefaultIndex) {
const defaultIndex = getNewIndexTemplate(parent);
defaultIndex.name = `${name}_index`;
defaultIndex.allowedRecordNodeIds.push(node.nodeId);
}
return node;
};
export const getNewRecordTemplate = (parent, name = '', createDefaultIndex = true) => _getNewRecordTemplate(parent, name, createDefaultIndex, false);
export const getNewSingleRecordTemplate = parent => _getNewRecordTemplate(parent, '', false, true);
export const getNewIndexTemplate = (parent, type = 'ancestor') => constructNode(parent, {
name: '',
type: 'index',
map: 'return {...record};',
filter: '',
indexType: type,
getShardName: '',
getSortKey: 'record.id',
aggregateGroups: [],
allowedRecordNodeIds: [],
nodeId: getNodeId(parent),
});
export const getNewAggregateGroupTemplate = index => constructNode(index, {
name: '',
type: 'aggregateGroup',
groupBy: '',
aggregates: [],
condition: '',
nodeId: getNodeId(index),
});
export const getNewAggregateTemplate = (set) => {
const aggregatedValue = {
name: '',
aggregatedValue: '',
};
set.aggregates.push(aggregatedValue);
return aggregatedValue;
};
export default {
getNewRootLevel,
getNewRecordTemplate,
getNewIndexTemplate,
createNodeErrors,
constructHierarchy,
getNewAggregateGroupTemplate,
getNewAggregateTemplate,
};

View file

@ -0,0 +1,12 @@
import {
} from '../templateApi/heirarchy';
export const canDelete = (app, node) => {
/*
it must not exist on any index.allowedRecordNodeIds
it must not exist on and reference type fields
these rules should apply to any child nodes , which will also be deleted
*/
};

View file

@ -0,0 +1,87 @@
import {
some, map, filter, keys, includes,
countBy, flatten,
} from 'lodash/fp';
import {
isSomething, $,
isNonEmptyString,
isNothingOrEmpty,
isNothing,
} from '../common';
import { all, getDefaultOptions } from '../types';
import { applyRuleSet, makerule } from '../common/validationCommon';
import { BadRequestError } from '../common/errors';
export const fieldErrors = {
AddFieldValidationFailed: 'Add field validation: ',
};
export const allowedTypes = () => keys(all);
export const getNewField = type => ({
name: '', // how field is referenced internally
type,
typeOptions: getDefaultOptions(type),
label: '', // how field is displayed
getInitialValue: 'default', // function that gets value when initially created
getUndefinedValue: 'default', // function that gets value when field undefined on record
});
const fieldRules = allFields => [
makerule('name', 'field name is not set',
f => isNonEmptyString(f.name)),
makerule('type', 'field type is not set',
f => isNonEmptyString(f.type)),
makerule('label', 'field label is not set',
f => isNonEmptyString(f.label)),
makerule('getInitialValue', 'getInitialValue function is not set',
f => isNonEmptyString(f.getInitialValue)),
makerule('getUndefinedValue', 'getUndefinedValue function is not set',
f => isNonEmptyString(f.getUndefinedValue)),
makerule('name', 'field name is duplicated',
f => isNothingOrEmpty(f.name)
|| countBy('name')(allFields)[f.name] === 1),
makerule('type', 'type is unknown',
f => isNothingOrEmpty(f.type)
|| some(t => f.type === t)(allowedTypes())),
];
const typeOptionsRules = (field) => {
const type = all[field.type];
if (isNothing(type)) return [];
const def = optName => type.optionDefinitions[optName];
return $(field.typeOptions, [
keys,
filter(o => isSomething(def(o))
&& isSomething(def(o).isValid)),
map(o => makerule(
`typeOptions.${o}`,
`${def(o).requirementDescription}`,
field => def(o).isValid(field.typeOptions[o]),
)),
]);
};
export const validateField = allFields => (field) => {
const everySingleField = includes(field)(allFields) ? allFields : [...allFields, field];
return applyRuleSet([...fieldRules(everySingleField), ...typeOptionsRules(field)])(field);
};
export const validateAllFields = recordNode => $(recordNode.fields, [
map(validateField(recordNode.fields)),
flatten,
]);
export const addField = (recordTemplate, field) => {
if (isNothingOrEmpty(field.label)) {
field.label = field.name;
}
const validationMessages = validateField([...recordTemplate.fields, field])(field);
if (validationMessages.length > 0) {
const errors = map(m => m.error)(validationMessages);
throw new BadRequestError(`${fieldErrors.AddFieldValidationFailed} ${errors.join(', ')}`);
}
recordTemplate.fields.push(field);
};

View file

@ -0,0 +1,14 @@
import { appDefinitionFile } from '../common';
import { constructHierarchy } from './createNodes';
export const getApplicationDefinition = datastore => async () => {
const exists = await datastore.exists(appDefinitionFile);
if (!exists) throw new Error('Application definition does not exist');
const appDefinition = await datastore.loadJson(appDefinitionFile);
appDefinition.hierarchy = constructHierarchy(
appDefinition.hierarchy,
);
return appDefinition;
};

View file

@ -0,0 +1,4 @@
export const getBehaviourSources = async (datastore) => {
await datastore.loadFile('/.config/behaviourSources.js');
};

View file

@ -0,0 +1,234 @@
import {
find, constant, map, last,
first, split, intersection, take,
union, includes, filter, some,
} from 'lodash/fp';
import {
$, switchCase, isNothing, isSomething,
defaultCase, splitKey, isNonEmptyString,
joinKey, getHashCode,
} from '../common';
import { indexTypes } from './indexes';
export const getFlattenedHierarchy = (appHierarchy, useCached = true) => {
if (isSomething(appHierarchy.getFlattenedHierarchy) && useCached) { return appHierarchy.getFlattenedHierarchy(); }
const flattenHierarchy = (currentNode, flattened) => {
flattened.push(currentNode);
if ((!currentNode.children
|| currentNode.children.length === 0)
&& (!currentNode.indexes
|| currentNode.indexes.length === 0)
&& (!currentNode.aggregateGroups
|| currentNode.aggregateGroups.length === 0)) {
return flattened;
}
const unionIfAny = l2 => l1 => union(l1)(!l2 ? [] : l2);
const children = $([], [
unionIfAny(currentNode.children),
unionIfAny(currentNode.indexes),
unionIfAny(currentNode.aggregateGroups),
]);
for (const child of children) {
flattenHierarchy(child, flattened);
}
return flattened;
};
appHierarchy.getFlattenedHierarchy = () => flattenHierarchy(appHierarchy, []);
return appHierarchy.getFlattenedHierarchy();
};
export const getLastPartInKey = key => last(splitKey(key));
export const getNodesInPath = appHierarchy => key => $(appHierarchy, [
getFlattenedHierarchy,
filter(n => new RegExp(`${n.pathRegx()}`).test(key)),
]);
export const getExactNodeForPath = appHierarchy => key => $(appHierarchy, [
getFlattenedHierarchy,
find(n => new RegExp(`${n.pathRegx()}$`).test(key)),
]);
export const getNodeForCollectionPath = appHierarchy => collectionKey => $(appHierarchy, [
getFlattenedHierarchy,
find(n => (isCollectionRecord(n)
&& new RegExp(`${n.collectionPathRegx()}$`).test(collectionKey))),
]);
export const hasMatchingAncestor = ancestorPredicate => decendantNode => switchCase(
[node => isNothing(node.parent()),
constant(false)],
[node => ancestorPredicate(node.parent()),
constant(true)],
[defaultCase,
node => hasMatchingAncestor(ancestorPredicate)(node.parent())],
)(decendantNode);
export const getNode = (appHierarchy, nodeKey) => $(appHierarchy, [
getFlattenedHierarchy,
find(n => n.nodeKey() === nodeKey
|| (isCollectionRecord(n)
&& n.collectionNodeKey() === nodeKey)),
]);
export const getCollectionNode = (appHierarchy, nodeKey) => $(appHierarchy, [
getFlattenedHierarchy,
find(n => (isCollectionRecord(n)
&& n.collectionNodeKey() === nodeKey)),
]);
export const getNodeByKeyOrNodeKey = (appHierarchy, keyOrNodeKey) => {
const nodeByKey = getExactNodeForPath(appHierarchy)(keyOrNodeKey);
return isNothing(nodeByKey)
? getNode(appHierarchy, keyOrNodeKey)
: nodeByKey;
};
export const getCollectionNodeByKeyOrNodeKey = (appHierarchy, keyOrNodeKey) => {
const nodeByKey = getNodeForCollectionPath(appHierarchy)(keyOrNodeKey);
return isNothing(nodeByKey)
? getCollectionNode(appHierarchy, keyOrNodeKey)
: nodeByKey;
};
export const isNode = (appHierarchy, key) => isSomething(getExactNodeForPath(appHierarchy)(key));
export const getActualKeyOfParent = (parentNodeKey, actualChildKey) => $(actualChildKey, [
splitKey,
take(splitKey(parentNodeKey).length),
ks => joinKey(...ks),
]);
export const getParentKey = (key) => {
return $(key, [
splitKey,
take(splitKey(key).length - 1),
joinKey,
]);
};
export const isKeyAncestorOf = ancestorKey => decendantNode => hasMatchingAncestor(p => p.nodeKey() === ancestorKey)(decendantNode);
export const hasNoMatchingAncestors = parentPredicate => node => !hasMatchingAncestor(parentPredicate)(node);
export const findField = (recordNode, fieldName) => find(f => f.name == fieldName)(recordNode.fields);
export const isAncestor = decendant => ancestor => isKeyAncestorOf(ancestor.nodeKey())(decendant);
export const isDecendant = ancestor => decendant => isAncestor(decendant)(ancestor);
export const getRecordNodeId = recordKey => $(recordKey, [
splitKey,
last,
getRecordNodeIdFromId,
]);
export const getRecordNodeIdFromId = recordId => $(recordId, [split('-'), first, parseInt]);
export const getRecordNodeById = (hierarchy, recordId) => $(hierarchy, [
getFlattenedHierarchy,
find(n => isRecord(n)
&& n.nodeId === getRecordNodeIdFromId(recordId)),
]);
export const recordNodeIdIsAllowed = indexNode => nodeId => indexNode.allowedRecordNodeIds.length === 0
|| includes(nodeId)(indexNode.allowedRecordNodeIds);
export const recordNodeIsAllowed = indexNode => recordNode => recordNodeIdIsAllowed(indexNode)(recordNode.nodeId);
export const getAllowedRecordNodesForIndex = (appHierarchy, indexNode) => {
const recordNodes = $(appHierarchy, [
getFlattenedHierarchy,
filter(isRecord),
]);
if (isGlobalIndex(indexNode)) {
return $(recordNodes, [
filter(recordNodeIsAllowed(indexNode)),
]);
}
if (isAncestorIndex(indexNode)) {
return $(recordNodes, [
filter(isDecendant(indexNode.parent())),
filter(recordNodeIsAllowed(indexNode)),
]);
}
if (isReferenceIndex(indexNode)) {
return $(recordNodes, [
filter(n => some(fieldReversesReferenceToIndex(indexNode))(n.fields)),
]);
}
};
export const getNodeFromNodeKeyHash = hierarchy => hash => $(hierarchy, [
getFlattenedHierarchy,
find(n => getHashCode(n.nodeKey()) === hash),
]);
export const isRecord = node => isSomething(node) && node.type === 'record';
export const isSingleRecord = node => isRecord(node) && node.isSingle;
export const isCollectionRecord = node => isRecord(node) && !node.isSingle;
export const isIndex = node => isSomething(node) && node.type === 'index';
export const isaggregateGroup = node => isSomething(node) && node.type === 'aggregateGroup';
export const isShardedIndex = node => isIndex(node) && isNonEmptyString(node.getShardName);
export const isRoot = node => isSomething(node) && node.isRoot();
export const isDecendantOfARecord = hasMatchingAncestor(isRecord);
export const isGlobalIndex = node => isIndex(node) && isRoot(node.parent());
export const isReferenceIndex = node => isIndex(node) && node.indexType === indexTypes.reference;
export const isAncestorIndex = node => isIndex(node) && node.indexType === indexTypes.ancestor;
export const fieldReversesReferenceToNode = node => field => field.type === 'reference'
&& intersection(field.typeOptions.reverseIndexNodeKeys)(map(i => i.nodeKey())(node.indexes))
.length > 0;
export const fieldReversesReferenceToIndex = indexNode => field => field.type === 'reference'
&& intersection(field.typeOptions.reverseIndexNodeKeys)([indexNode.nodeKey()])
.length > 0;
export default {
getLastPartInKey,
getNodesInPath,
getExactNodeForPath,
hasMatchingAncestor,
getNode,
getNodeByKeyOrNodeKey,
isNode,
getActualKeyOfParent,
getParentKey,
isKeyAncestorOf,
hasNoMatchingAncestors,
findField,
isAncestor,
isDecendant,
getRecordNodeId,
getRecordNodeIdFromId,
getRecordNodeById,
recordNodeIdIsAllowed,
recordNodeIsAllowed,
getAllowedRecordNodesForIndex,
getNodeFromNodeKeyHash,
isRecord,
isCollectionRecord,
isIndex,
isaggregateGroup,
isShardedIndex,
isRoot,
isDecendantOfARecord,
isGlobalIndex,
isReferenceIndex,
isAncestorIndex,
fieldReversesReferenceToNode,
fieldReversesReferenceToIndex,
getFlattenedHierarchy,
};

View file

@ -0,0 +1,64 @@
import {
getNewRootLevel,
getNewRecordTemplate, getNewIndexTemplate,
createNodeErrors, constructHierarchy,
getNewAggregateGroupTemplate, getNewSingleRecordTemplate,
getNewAggregateTemplate, constructNode,
}
from './createNodes';
import {
getNewField, validateField,
addField, fieldErrors,
} from './fields';
import {
getNewRecordValidationRule, commonRecordValidationRules,
addRecordValidationRule,
} from './recordValidationRules';
import { createAction, createTrigger } from './createActions';
import {
validateTriggers, validateTrigger, validateNode,
validateActions, validateAll,
} from './validate';
import { getApplicationDefinition } from './getApplicationDefinition';
import { saveApplicationHierarchy } from './saveApplicationHierarchy';
import { saveActionsAndTriggers } from './saveActionsAndTriggers';
import { all } from '../types';
import { getBehaviourSources } from "./getBehaviourSources";
const api = app => ({
getApplicationDefinition: getApplicationDefinition(app.datastore),
saveApplicationHierarchy: saveApplicationHierarchy(app),
saveActionsAndTriggers: saveActionsAndTriggers(app),
getBehaviourSources: () => getBehaviourSources(app.datastore),
getNewRootLevel,
constructNode,
getNewIndexTemplate,
getNewRecordTemplate,
getNewField,
validateField,
addField,
fieldErrors,
getNewRecordValidationRule,
commonRecordValidationRules,
addRecordValidationRule,
createAction,
createTrigger,
validateActions,
validateTrigger,
getNewAggregateGroupTemplate,
getNewAggregateTemplate,
constructHierarchy,
getNewSingleRecordTemplate,
allTypes: all,
validateNode,
validateAll,
validateTriggers,
});
export const getTemplateApi = app => api(app);
export const errors = createNodeErrors;
export default getTemplateApi;

View file

@ -0,0 +1,38 @@
import {
map, isEmpty, countBy, flatten, includes,
} from 'lodash/fp';
import { join, keys } from 'lodash';
import { applyRuleSet, makerule } from '../common/validationCommon';
import { compileFilter, compileMap } from '../indexing/evaluate';
import { isNonEmptyString, executesWithoutException, $ } from '../common';
import { isRecord } from './hierarchy';
export const indexTypes = { reference: 'reference', ancestor: 'ancestor' };
export const indexRuleSet = [
makerule('map', 'index has no map function',
index => isNonEmptyString(index.map)),
makerule('map', "index's map function does not compile",
index => !isNonEmptyString(index.map)
|| executesWithoutException(() => compileMap(index))),
makerule('filter', "index's filter function does not compile",
index => !isNonEmptyString(index.filter)
|| executesWithoutException(() => compileFilter(index))),
makerule('name', 'must declare a name for index',
index => isNonEmptyString(index.name)),
makerule('name', 'there is a duplicate named index on this node',
index => isEmpty(index.name)
|| countBy('name')(index.parent().indexes)[index.name] === 1),
makerule('indexType', 'reference index may only exist on a record node',
index => isRecord(index.parent())
|| index.indexType !== indexTypes.reference),
makerule('indexType', `index type must be one of: ${join(', ', keys(indexTypes))}`,
index => includes(index.indexType)(keys(indexTypes))),
];
export const validateIndex = (index, allReferenceIndexesOnNode) => applyRuleSet(indexRuleSet(allReferenceIndexesOnNode))(index);
export const validateAllIndexes = node => $(node.indexes, [
map(i => validateIndex(i, node.indexes)),
flatten,
]);

View file

@ -0,0 +1,37 @@
import { isNumber, isBoolean, defaultCase } from 'lodash/fp';
import { switchCase } from '../common';
export const getNewRecordValidationRule = (invalidFields,
messageWhenInvalid,
expressionWhenValid) => ({
invalidFields, messageWhenInvalid, expressionWhenValid,
});
const getStaticValue = switchCase(
[isNumber, v => v.toString()],
[isBoolean, v => v.toString()],
[defaultCase, v => `'${v}'`],
);
export const commonRecordValidationRules = ({
fieldNotEmpty: fieldName => getNewRecordValidationRule(
[fieldName],
`${fieldName} is empty`,
`!_.isEmpty(record['${fieldName}'])`,
),
fieldBetween: (fieldName, min, max) => getNewRecordValidationRule(
[fieldName],
`${fieldName} must be between ${min.toString()} and ${max.toString()}`,
`record['${fieldName}'] >= ${getStaticValue(min)} && record['${fieldName}'] <= ${getStaticValue(max)} `,
),
fieldGreaterThan: (fieldName, min, max) => getNewRecordValidationRule(
[fieldName],
`${fieldName} must be greater than ${min.toString()} and ${max.toString()}`,
`record['${fieldName}'] >= ${getStaticValue(min)} `,
),
});
export const addRecordValidationRule = recordNode => rule => recordNode.validationRules.push(rule);

View file

@ -0,0 +1,40 @@
import { join } from 'lodash';
import { map } from 'lodash/fp';
import { appDefinitionFile } from '../common';
import { validateTriggers, validateActions } from './validate';
import { apiWrapper } from '../common/apiWrapper';
import { events } from '../common/events';
import { permission } from '../authApi/permissions';
import { BadRequestError } from '../common/errors';
export const saveActionsAndTriggers = app => async (actions, triggers) => apiWrapper(
app,
events.templateApi.saveActionsAndTriggers,
permission.writeTemplates.isAuthorized,
{ actions, triggers },
_saveActionsAndTriggers, app.datastore, actions, triggers,
);
export const _saveActionsAndTriggers = async (datastore, actions, triggers) => {
if (await datastore.exists(appDefinitionFile)) {
const appDefinition = await datastore.loadJson(appDefinitionFile);
appDefinition.actions = actions;
appDefinition.triggers = triggers;
const actionValidErrs = map(e => e.error)(validateActions(actions));
if (actionValidErrs.length > 0) {
throw new BadRequestError(`Actions are invalid: ${join(actionValidErrs, ', ')}`);
}
const triggerValidErrs = map(e => e.error)(validateTriggers(triggers, actions));
if (triggerValidErrs.length > 0) {
throw new BadRequestError(`Triggers are invalid: ${join(triggerValidErrs, ', ')}`);
}
await datastore.updateJson(appDefinitionFile, appDefinition);
} else {
throw new BadRequestError('Cannot save actions: Application definition does not exist');
}
};

View file

@ -0,0 +1,35 @@
import { join } from 'lodash';
import { permission } from '../authApi/permissions';
import { appDefinitionFile } from '../common';
import { validateAll } from './validate';
import { apiWrapper } from '../common/apiWrapper';
import { events } from '../common/events';
export const saveApplicationHierarchy = app => async hierarchy => apiWrapper(
app,
events.templateApi.saveApplicationHierarchy,
permission.writeTemplates.isAuthorized,
{ hierarchy },
_saveApplicationHierarchy, app.datastore, hierarchy,
);
export const _saveApplicationHierarchy = async (datastore, hierarchy) => {
const validationErrors = await validateAll(hierarchy);
if (validationErrors.length > 0) {
throw new Error(`Hierarchy is invalid: ${join(
validationErrors.map(e => `${e.item.nodeKey ? e.item.nodeKey() : ''} : ${e.error}`),
',',
)}`);
}
if (await datastore.exists(appDefinitionFile)) {
const appDefinition = await datastore.loadJson(appDefinitionFile);
appDefinition.hierarchy = hierarchy;
await datastore.updateJson(appDefinitionFile, appDefinition);
} else {
await datastore.createFolder('/.config');
const appDefinition = { actions: [], triggers: [], hierarchy };
await datastore.createJson(appDefinitionFile, appDefinition);
}
};

View file

@ -0,0 +1,183 @@
import {
filter, union, constant,
map, flatten, every, uniqBy,
some, includes, isEmpty,
} from 'lodash/fp';
import { has } from 'lodash';
import { compileExpression, compileCode } from '@nx-js/compiler-util';
import {
$, isSomething, switchCase,
anyTrue, isNonEmptyArray, executesWithoutException,
isNonEmptyString, defaultCase,
} from '../common';
import {
isRecord, isRoot, isaggregateGroup,
isIndex, getFlattenedHierarchy,
} from './hierarchy';
import { eventsList } from '../common/events';
import { validateAllFields } from './fields';
import {
applyRuleSet, makerule, stringNotEmpty,
validationError,
} from '../common/validationCommon';
import { indexRuleSet } from './indexes';
import { validateAllAggregates } from './validateAggregate';
export const ruleSet = (...sets) => constant(flatten([...sets]));
const commonRules = [
makerule('name', 'node name is not set',
node => stringNotEmpty(node.name)),
makerule('type', 'node type not recognised',
anyTrue(isRecord, isRoot, isIndex, isaggregateGroup)),
];
const recordRules = [
makerule('fields', 'no fields have been added to the record',
node => isNonEmptyArray(node.fields)),
makerule('validationRules', "validation rule is missing a 'messageWhenValid' member",
node => every(r => has(r, 'messageWhenInvalid'))(node.validationRules)),
makerule('validationRules', "validation rule is missing a 'expressionWhenValid' member",
node => every(r => has(r, 'expressionWhenValid'))(node.validationRules)),
];
const aggregateGroupRules = [
makerule('condition', 'condition does not compile',
a => isEmpty(a.condition)
|| executesWithoutException(
() => compileExpression(a.condition),
)),
];
const getRuleSet = node => switchCase(
[isRecord, ruleSet(
commonRules,
recordRules,
)],
[isIndex, ruleSet(
commonRules,
indexRuleSet,
)],
[isaggregateGroup, ruleSet(
commonRules,
aggregateGroupRules,
)],
[defaultCase, ruleSet(commonRules, [])],
)(node);
export const validateNode = node => applyRuleSet(getRuleSet(node))(node);
export const validateAll = (appHierarchy) => {
const flattened = getFlattenedHierarchy(
appHierarchy,
);
const duplicateNameRule = makerule(
'name', 'node names must be unique under shared parent',
n => filter(f => f.parent() === n.parent()
&& f.name === n.name)(flattened).length === 1,
);
const duplicateNodeKeyErrors = $(flattened, [
map(n => applyRuleSet([duplicateNameRule])(n)),
filter(isSomething),
flatten,
]);
const fieldErrors = $(flattened, [
filter(isRecord),
map(validateAllFields),
flatten,
]);
const aggregateErrors = $(flattened, [
filter(isaggregateGroup),
map(s => validateAllAggregates(
s.aggregates,
)),
flatten,
]);
return $(flattened, [
map(validateNode),
flatten,
union(duplicateNodeKeyErrors),
union(fieldErrors),
union(aggregateErrors),
]);
};
const actionRules = [
makerule('name', 'action must have a name',
a => isNonEmptyString(a.name)),
makerule('behaviourName', 'must supply a behaviour name to the action',
a => isNonEmptyString(a.behaviourName)),
makerule('behaviourSource', 'must supply a behaviour source for the action',
a => isNonEmptyString(a.behaviourSource)),
];
const duplicateActionRule = makerule('', 'action name must be unique', () => {});
const validateAction = action => applyRuleSet(actionRules)(action);
export const validateActions = (allActions) => {
const duplicateActions = $(allActions, [
filter(a => filter(a2 => a2.name === a.name)(allActions).length > 1),
map(a => validationError(duplicateActionRule, a)),
]);
const errors = $(allActions, [
map(validateAction),
flatten,
union(duplicateActions),
uniqBy('name'),
]);
return errors;
};
const triggerRules = actions => ([
makerule('actionName', 'must specify an action',
t => isNonEmptyString(t.actionName)),
makerule('eventName', 'must specify and event',
t => isNonEmptyString(t.eventName)),
makerule('actionName', 'specified action not supplied',
t => !t.actionName
|| some(a => a.name === t.actionName)(actions)),
makerule('eventName', 'invalid Event Name',
t => !t.eventName
|| includes(t.eventName)(eventsList)),
makerule('optionsCreator', 'Options Creator does not compile - check your expression',
(t) => {
if (!t.optionsCreator) return true;
try {
compileCode(t.optionsCreator);
return true;
} catch (_) { return false; }
}),
makerule('condition', 'Trigger condition does not compile - check your expression',
(t) => {
if (!t.condition) return true;
try {
compileExpression(t.condition);
return true;
} catch (_) { return false; }
}),
]);
export const validateTrigger = (trigger, allActions) => {
const errors = applyRuleSet(triggerRules(allActions))(trigger);
return errors;
};
export const validateTriggers = (triggers, allActions) => $(triggers, [
map(t => validateTrigger(t, allActions)),
flatten,
]);

View file

@ -0,0 +1,25 @@
import { flatten, map } from 'lodash/fp';
import { isEmpty } from 'lodash';
import { compileCode } from '@nx-js/compiler-util';
import {
isNonEmptyString,
executesWithoutException, $,
} from '../common';
import { applyRuleSet, makerule } from '../common/validationCommon';
const aggregateRules = [
makerule('name', 'choose a name for the aggregate',
a => isNonEmptyString(a.name)),
makerule('aggregatedValue', 'aggregatedValue does not compile',
a => isEmpty(a.aggregatedValue)
|| executesWithoutException(
() => compileCode(a.aggregatedValue),
)),
];
export const validateAggregate = aggregate => applyRuleSet(aggregateRules)(aggregate);
export const validateAllAggregates = all => $(all, [
map(validateAggregate),
flatten,
]);

View file

@ -0,0 +1,47 @@
import { map } from 'lodash/fp';
import { retrieve } from './retrieve';
import { executeTransactions } from './execute';
import {
$, joinKey, getLock, isNolock, releaseLock,
} from '../common';
import {
LOCK_FILE_KEY, TRANSACTIONS_FOLDER,
timeoutMilliseconds, getTransactionId,
maxLockRetries,
} from './transactionsCommon';
export const cleanup = async (app) => {
const lock = await getTransactionLock(app);
if (isNolock(lock)) return;
try {
const transactions = await retrieve(app);
if (transactions.length > 0) {
await executeTransactions(app)(transactions);
const folder = transactions.folderKey
? transactions.folderKey
: TRANSACTIONS_FOLDER;
const deleteFiles = $(transactions, [
map(t => joinKey(
folder,
getTransactionId(
t.recordId, t.transactionType,
t.uniqueId,
),
)),
map(app.datastore.deleteFile),
]);
await Promise.all(deleteFiles);
}
} finally {
await releaseLock(app, lock);
}
};
const getTransactionLock = async app => await getLock(
app, LOCK_FILE_KEY,
timeoutMilliseconds, maxLockRetries,
);

View file

@ -0,0 +1,69 @@
import { generate } from 'shortid';
import { joinKey } from '../common';
import { getLastPartInKey } from '../templateApi/hierarchy';
import {
IndexNodeKeyFolder, BUILDINDEX_BATCH_COUNT,
IndexNodeKeyBatchFolder, TRANSACTIONS_FOLDER, getTransactionId, CREATE_RECORD_TRANSACTION, UPDATE_RECORD_TRANSACTION,
DELETE_RECORD_TRANSACTION, BUILD_INDEX_TRANSACTION,
} from './transactionsCommon';
export const transactionForCreateRecord = async (app, record) => await transaction(
app.datastore, CREATE_RECORD_TRANSACTION,
record.key, { record },
getTransactionKey_Records,
);
export const transactionForUpdateRecord = async (app, oldRecord, newRecord) => await transaction(
app.datastore, UPDATE_RECORD_TRANSACTION,
newRecord.key, { oldRecord, record: newRecord },
getTransactionKey_Records,
);
export const transactionForDeleteRecord = async (app, record) => await transaction(
app.datastore, DELETE_RECORD_TRANSACTION,
record.key, { record },
getTransactionKey_Records,
);
export const transactionForBuildIndex = async (app, indexNodeKey, recordKey, count) => {
const transactionFolder = IndexNodeKeyBatchFolder(indexNodeKey, count);
if (count % BUILDINDEX_BATCH_COUNT === 0) {
await app.datastore.createFolder(transactionFolder);
}
return await transaction(
app.datastore, BUILD_INDEX_TRANSACTION,
recordKey, { recordKey },
id => joinKey(transactionFolder, id),
);
};
export const createBuildIndexFolder = async (datastore, indexNodeKey) => await datastore.createFolder(
IndexNodeKeyFolder(indexNodeKey),
);
const getTransactionKey_Records = id => joinKey(TRANSACTIONS_FOLDER, id);
const transaction = async (datastore, transactionType, recordKey, data, getTransactionKey) => {
const recordId = getLastPartInKey(recordKey);
const uniqueId = generate();
const id = getTransactionId(
recordId, transactionType, uniqueId,
);
const key = getTransactionKey(id);
const trans = {
transactionType,
recordKey,
...data,
id,
};
await datastore.createJson(
key, trans,
);
return trans;
};

View file

@ -0,0 +1,349 @@
import {
filter, map, isUndefined, includes,
flatten, intersectionBy,
isEqual, pull, keys,
differenceBy, difference,
} from 'lodash/fp';
import { union } from 'lodash';
import {
getRelevantAncestorIndexes,
getRelevantReverseReferenceIndexes,
} from '../indexing/relevant';
import { evaluate } from '../indexing/evaluate';
import {
$, isSomething,
isNonEmptyArray, joinKey,
isNonEmptyString,
} from '../common';
import { getIndexedDataKey } from '../indexing/sharding';
import {
isUpdate, isCreate,
isDelete, isBuildIndex,
} from './transactionsCommon';
import { applyToShard } from '../indexing/apply';
import {
getActualKeyOfParent,
isGlobalIndex, fieldReversesReferenceToIndex, isReferenceIndex,
getExactNodeForPath,
} from '../templateApi/hierarchy';
export const executeTransactions = app => async (transactions) => {
const recordsByShard = mappedRecordsByIndexShard(app.hierarchy, transactions);
for (const shard of keys(recordsByShard)) {
await applyToShard(
app.hierarchy, app.datastore,
recordsByShard[shard].indexKey,
recordsByShard[shard].indexNode,
shard,
recordsByShard[shard].writes,
recordsByShard[shard].removes,
);
}
};
const mappedRecordsByIndexShard = (hierarchy, transactions) => {
const updates = getUpdateTransactionsByShard(
hierarchy, transactions,
);
const created = getCreateTransactionsByShard(
hierarchy, transactions,
);
const deletes = getDeleteTransactionsByShard(
hierarchy, transactions,
);
const indexBuild = getBuildIndexTransactionsByShard(
hierarchy,
transactions,
);
const toRemove = [
...deletes,
...updates.toRemove,
];
const toWrite = [
...created,
...updates.toWrite,
...indexBuild,
];
const transByShard = {};
const initialiseShard = (t) => {
if (isUndefined(transByShard[t.indexShardKey])) {
transByShard[t.indexShardKey] = {
writes: [],
removes: [],
indexKey: t.indexKey,
indexNodeKey: t.indexNodeKey,
indexNode: t.indexNode,
};
}
};
for (const trans of toWrite) {
initialiseShard(trans);
transByShard[trans.indexShardKey].writes.push(
trans.mappedRecord.result,
);
}
for (const trans of toRemove) {
initialiseShard(trans);
transByShard[trans.indexShardKey].removes.push(
trans.mappedRecord.result.key,
);
}
return transByShard;
};
const getUpdateTransactionsByShard = (hierarchy, transactions) => {
const updateTransactions = $(transactions, [filter(isUpdate)]);
const evaluateIndex = (record, indexNodeAndPath) => {
const mappedRecord = evaluate(record)(indexNodeAndPath.indexNode);
return ({
mappedRecord,
indexNode: indexNodeAndPath.indexNode,
indexKey: indexNodeAndPath.indexKey,
indexShardKey: getIndexedDataKey(
indexNodeAndPath.indexNode,
indexNodeAndPath.indexKey,
mappedRecord.result,
),
});
};
const getIndexNodesToApply = indexFilter => (t, indexes) => $(indexes, [
map(n => ({
old: evaluateIndex(t.oldRecord, n),
new: evaluateIndex(t.record, n),
})),
filter(indexFilter),
]);
const toRemoveFilter = (n, isUnreferenced) => n.old.mappedRecord.passedFilter === true
&& (n.new.mappedRecord.passedFilter === false
|| isUnreferenced);
const toAddFilter = (n, isNewlyReferenced) => (n.old.mappedRecord.passedFilter === false
|| isNewlyReferenced)
&& n.new.mappedRecord.passedFilter === true;
const toUpdateFilter = n => n.new.mappedRecord.passedFilter === true
&& n.old.mappedRecord.passedFilter === true
&& !isEqual(n.old.mappedRecord.result,
n.new.mappedRecord.result);
const toRemove = [];
const toWrite = [];
for (const t of updateTransactions) {
const ancestorIdxs = getRelevantAncestorIndexes(
hierarchy, t.record,
);
const referenceChanges = diffReverseRefForUpdate(
hierarchy, t.oldRecord, t.record,
);
// old records to remove (filtered out)
const filteredOut_toRemove = union(
getIndexNodesToApply(toRemoveFilter)(t, ancestorIdxs),
// still referenced - check filter
getIndexNodesToApply(toRemoveFilter)(t, referenceChanges.notChanged),
// un referenced - remove if in there already
getIndexNodesToApply(n => toRemoveFilter(n, true))(t, referenceChanges.unReferenced),
);
// new records to add (filtered in)
const filteredIn_toAdd = union(
getIndexNodesToApply(toAddFilter)(t, ancestorIdxs),
// newly referenced - check filter
getIndexNodesToApply(n => toAddFilter(n, true))(t, referenceChanges.newlyReferenced),
// reference unchanged - rerun filter in case something else changed
getIndexNodesToApply(toAddFilter)(t, referenceChanges.notChanged),
);
const changed = union(
getIndexNodesToApply(toUpdateFilter)(t, ancestorIdxs),
// still referenced - recheck filter
getIndexNodesToApply(toUpdateFilter)(t, referenceChanges.notChanged),
);
const shardKeyChanged = $(changed, [
filter(c => c.old.indexShardKey !== c.new.indexShardKey),
]);
const changedInSameShard = $(shardKeyChanged, [
difference(changed),
]);
for (const res of shardKeyChanged) {
pull(res)(changed);
filteredOut_toRemove.push(res);
filteredIn_toAdd.push(res);
}
toRemove.push(
$(filteredOut_toRemove, [
map(i => i.old),
]),
);
toWrite.push(
$(filteredIn_toAdd, [
map(i => i.new),
]),
);
toWrite.push(
$(changedInSameShard, [
map(i => i.new),
]),
);
}
return ({
toRemove: flatten(toRemove),
toWrite: flatten(toWrite),
});
};
const getBuildIndexTransactionsByShard = (hierarchy, transactions) => {
const buildTransactions = $(transactions, [filter(isBuildIndex)]);
if (!isNonEmptyArray(buildTransactions)) return [];
const indexNode = transactions.indexNode;
const getIndexKeys = (t) => {
if (isGlobalIndex(indexNode)) {
return [indexNode.nodeKey()];
}
if (isReferenceIndex(indexNode)) {
const recordNode = getExactNodeForPath(hierarchy)(t.record.key);
const refFields = $(recordNode.fields, [
filter(fieldReversesReferenceToIndex(indexNode)),
]);
const indexKeys = [];
for (const refField of refFields) {
const refValue = t.record[refField.name];
if (isSomething(refValue)
&& isNonEmptyString(refValue.key)) {
const indexKey = joinKey(
refValue.key,
indexNode.name,
);
if (!includes(indexKey)(indexKeys)) { indexKeys.push(indexKey); }
}
}
return indexKeys;
}
return [joinKey(
getActualKeyOfParent(
indexNode.parent().nodeKey(),
t.record.key,
),
indexNode.name,
)];
};
return $(buildTransactions, [
map((t) => {
const mappedRecord = evaluate(t.record)(indexNode);
if (!mappedRecord.passedFilter) return null;
const indexKeys = getIndexKeys(t);
return $(indexKeys, [
map(indexKey => ({
mappedRecord,
indexNode,
indexKey,
indexShardKey: getIndexedDataKey(
indexNode,
indexKey,
mappedRecord.result,
),
})),
]);
}),
flatten,
filter(isSomething),
]);
};
const get_Create_Delete_TransactionsByShard = pred => (hierarchy, transactions) => {
const createTransactions = $(transactions, [filter(pred)]);
const getIndexNodesToApply = (t, indexes) => $(indexes, [
map((n) => {
const mappedRecord = evaluate(t.record)(n.indexNode);
return ({
mappedRecord,
indexNode: n.indexNode,
indexKey: n.indexKey,
indexShardKey: getIndexedDataKey(
n.indexNode,
n.indexKey,
mappedRecord.result,
),
});
}),
filter(n => n.mappedRecord.passedFilter),
]);
const allToApply = [];
for (const t of createTransactions) {
const ancestorIdxs = getRelevantAncestorIndexes(hierarchy, t.record);
const reverseRef = getRelevantReverseReferenceIndexes(hierarchy, t.record);
allToApply.push(
getIndexNodesToApply(t, ancestorIdxs),
);
allToApply.push(
getIndexNodesToApply(t, reverseRef),
);
}
return flatten(allToApply);
};
const getDeleteTransactionsByShard = get_Create_Delete_TransactionsByShard(isDelete);
const getCreateTransactionsByShard = get_Create_Delete_TransactionsByShard(isCreate);
const diffReverseRefForUpdate = (appHierarchy, oldRecord, newRecord) => {
const oldIndexes = getRelevantReverseReferenceIndexes(
appHierarchy, oldRecord,
);
const newIndexes = getRelevantReverseReferenceIndexes(
appHierarchy, newRecord,
);
const unReferenced = differenceBy(
i => i.indexKey,
oldIndexes, newIndexes,
);
const newlyReferenced = differenceBy(
i => i.indexKey,
newIndexes, oldIndexes,
);
const notChanged = intersectionBy(
i => i.indexKey,
newIndexes, oldIndexes,
);
return {
unReferenced,
newlyReferenced,
notChanged,
};
};

View file

@ -0,0 +1,206 @@
import {
map, filter, groupBy, split,
some, find,
} from 'lodash/fp';
import {
LOCK_FILENAME, TRANSACTIONS_FOLDER, idSep, isUpdate,
nodeKeyHashFromBuildFolder, isBuildIndexFolder, getTransactionId,
isDelete, isCreate,
} from './transactionsCommon';
import {
joinKey, $, none, isSomething,
} from '../common';
import { getLastPartInKey, getNodeFromNodeKeyHash } from '../templateApi/hierarchy';
import { _load } from '../recordApi/load';
export const retrieve = async (app) => {
const transactionFiles = await app.datastore.getFolderContents(
TRANSACTIONS_FOLDER,
);
let transactions = [];
if (some(isBuildIndexFolder)(transactionFiles)) {
const buildIndexFolder = find(isBuildIndexFolder)(transactionFiles);
transactions = await retrieveBuildIndexTransactions(
app,
joinKey(TRANSACTIONS_FOLDER, buildIndexFolder),
);
}
if (transactions.length > 0) return transactions;
return await retrieveStandardTransactions(
app, transactionFiles,
);
};
const retrieveBuildIndexTransactions = async (app, buildIndexFolder) => {
const childFolders = await app.datastore.getFolderContents(buildIndexFolder);
if (childFolders.length === 0) {
// cleanup
await app.datastore.deleteFolder(buildIndexFolder);
return [];
}
const getTransactionFiles = async (childFolderIndex = 0) => {
if (childFolderIndex >= childFolders.length) return [];
const childFolderKey = joinKey(buildIndexFolder, childFolders[childFolderIndex]);
const files = await app.datastore.getFolderContents(
childFolderKey,
);
if (files.length === 0) {
await app.datastore.deleteFolder(childFolderKey);
return await getTransactionFiles(childFolderIndex + 1);
}
return { childFolderKey, files };
};
const transactionFiles = await getTransactionFiles();
if (transactionFiles.files.length === 0) return [];
const transactions = $(transactionFiles.files, [
map(parseTransactionId),
]);
for (const t of transactions) {
const transactionContent = await app.datastore.loadJson(
joinKey(
transactionFiles.childFolderKey,
t.fullId,
),
);
t.record = await _load(app, transactionContent.recordKey);
}
transactions.indexNode = $(buildIndexFolder, [
getLastPartInKey,
nodeKeyHashFromBuildFolder,
getNodeFromNodeKeyHash(app.hierarchy),
]);
transactions.folderKey = transactionFiles.childFolderKey;
return transactions;
};
const retrieveStandardTransactions = async (app, transactionFiles) => {
const transactionIds = $(transactionFiles, [
filter(f => f !== LOCK_FILENAME
&& !isBuildIndexFolder(f)),
map(parseTransactionId),
]);
const transactionIdsByRecord = $(transactionIds, [
groupBy('recordId'),
]);
const dedupedTransactions = [];
const verify = async (t) => {
if (t.verified === true) return t;
const id = getTransactionId(
t.recordId,
t.transactionType,
t.uniqueId,
);
const transaction = await app.datastore.loadJson(
joinKey(TRANSACTIONS_FOLDER, id),
);
if (isDelete(t)) {
t.record = transaction.record;
t.verified = true;
return t;
}
const rec = await _load(
app,
transaction.recordKey,
);
if (rec.transactionId === id) {
t.record = rec;
if (transaction.oldRecord) { t.oldRecord = transaction.oldRecord; }
t.verified = true;
} else {
t.verified = false;
}
return t;
};
const pickOne = async (trans, forType) => {
const transForType = filter(forType)(trans);
if (transForType.length === 1) {
const t = await verify(transForType[0]);
return (t.verified === true ? t : null);
}
for (let t of transForType) {
t = await verify(t);
if (t.verified === true) { return t; }
}
return null;
};
for (const recordId in transactionIdsByRecord) {
const transIdsForRecord = transactionIdsByRecord[recordId];
if (transIdsForRecord.length === 1) {
const t = await verify(transIdsForRecord[0]);
if (t.verified) { dedupedTransactions.push(t); }
continue;
}
if (some(isDelete)(transIdsForRecord)) {
const t = await verify(find(isDelete)(transIdsForRecord));
if (t.verified) { dedupedTransactions.push(t); }
continue;
}
if (some(isUpdate)(transIdsForRecord)) {
const upd = await pickOne(transIdsForRecord, isUpdate);
if (isSomething(upd) && upd.verified) { dedupedTransactions.push(upd); }
continue;
}
if (some(isCreate)(transIdsForRecord)) {
const cre = await pickOne(transIdsForRecord, isCreate);
if (isSomething(cre)) { dedupedTransactions.push(cre); }
continue;
}
}
const duplicates = $(transactionIds, [
filter(t => none(ddt => ddt.uniqueId === t.uniqueId)(dedupedTransactions)),
]);
const deletePromises = map(t => app.datastore.deleteFile(
joinKey(
TRANSACTIONS_FOLDER,
getTransactionId(
t.recordId,
t.transactionType,
t.uniqueId,
),
),
))(duplicates);
await Promise.all(deletePromises);
return dedupedTransactions;
};
const parseTransactionId = (id) => {
const splitId = split(idSep)(id);
return ({
recordId: splitId[0],
transactionType: splitId[1],
uniqueId: splitId[2],
fullId: id,
});
};

View file

@ -0,0 +1,48 @@
import {
joinKey, keySep, getHashCode,
} from '../common';
import { getLastPartInKey } from '../templateApi/hierarchy';
export const TRANSACTIONS_FOLDER = `${keySep}.transactions`;
export const LOCK_FILENAME = 'lock';
export const LOCK_FILE_KEY = joinKey(
TRANSACTIONS_FOLDER, LOCK_FILENAME,
);
export const idSep = '$';
const isOfType = typ => trans => trans.transactionType === typ;
export const CREATE_RECORD_TRANSACTION = 'create';
export const UPDATE_RECORD_TRANSACTION = 'update';
export const DELETE_RECORD_TRANSACTION = 'delete';
export const BUILD_INDEX_TRANSACTION = 'build';
export const isUpdate = isOfType(UPDATE_RECORD_TRANSACTION);
export const isDelete = isOfType(DELETE_RECORD_TRANSACTION);
export const isCreate = isOfType(CREATE_RECORD_TRANSACTION);
export const isBuildIndex = isOfType(BUILD_INDEX_TRANSACTION);
export const keyToFolderName = nodeKey => getHashCode(nodeKey);
export const getTransactionId = (recordId, transactionType, uniqueId) =>
`${recordId}${idSep}${transactionType}${idSep}${uniqueId}`;
export const buildIndexFolder = '.BUILD-';
export const nodeKeyHashFromBuildFolder = folder => folder.replace(buildIndexFolder, '');
export const isBuildIndexFolder = key => getLastPartInKey(key).startsWith(buildIndexFolder);
export const IndexNodeKeyFolder = indexNodeKey => joinKey(
TRANSACTIONS_FOLDER,
buildIndexFolder + keyToFolderName(indexNodeKey),
);
export const IndexNodeKeyBatchFolder = (indexNodeKey, count) =>
joinKey(IndexNodeKeyFolder(indexNodeKey), Math.floor(count / BUILDINDEX_BATCH_COUNT).toString());
export const IndexShardKeyFolder = (indexNodeKey, indexShardKey) =>
joinKey(IndexNodeKeyFolder(indexNodeKey), indexShardKey);
export const BUILDINDEX_BATCH_COUNT = 1000;
export const timeoutMilliseconds = 30 * 1000; // 30 secs
export const maxLockRetries = 1;

View file

@ -0,0 +1,59 @@
import { constant, isArray } from 'lodash';
import { map } from 'lodash/fp';
import {
typeFunctions, makerule,
parsedFailed, getDefaultExport, parsedSuccess,
} from './typeHelpers';
import {
switchCase, defaultCase, toNumberOrNull,
$$, isSafeInteger,
} from '../common';
const arrayFunctions = () => typeFunctions({
default: constant([]),
});
const mapToParsedArrary = type => $$(
map(i => type.safeParseValue(i)),
parsedSuccess,
);
const arrayTryParse = type => switchCase(
[isArray, mapToParsedArrary(type)],
[defaultCase, parsedFailed],
);
const typeName = type => `array<${type}>`;
const options = {
maxLength: {
defaultValue: 10000,
isValid: isSafeInteger,
requirementDescription: 'must be a positive integer',
parse: toNumberOrNull,
},
minLength: {
defaultValue: 0,
isValid: n => isSafeInteger(n) && n >= 0,
requirementDescription: 'must be a positive integer',
parse: toNumberOrNull,
},
};
const typeConstraints = [
makerule(async (val, opts) => val === null || val.length >= opts.minLength,
(val, opts) => `must choose ${opts.minLength} or more options`),
makerule(async (val, opts) => val === null || val.length <= opts.maxLength,
(val, opts) => `cannot choose more than ${opts.maxLength} options`),
];
export default type => getDefaultExport(
typeName(type.name),
arrayTryParse(type),
arrayFunctions(type),
options,
typeConstraints,
[type.sampleValue],
JSON.stringify,
);

Some files were not shown because too many files have changed in this diff Show more