removed core library
This commit is contained in:
parent
910030e929
commit
0af5f12405
|
@ -1,5 +1,3 @@
|
||||||
import { common } from "../../../../core/src"
|
import { flow } from "lodash/fp";
|
||||||
|
|
||||||
export const pipe = common.$
|
export const pipe = (arg, funcs) => flow(funcs)(arg)
|
||||||
|
|
||||||
export const events = common.eventsList
|
|
||||||
|
|
|
@ -1,12 +0,0 @@
|
||||||
{
|
|
||||||
"presets": ["@babel/preset-env"],
|
|
||||||
"sourceMaps": "inline",
|
|
||||||
"retainLines": true,
|
|
||||||
"plugins": [
|
|
||||||
["@babel/plugin-transform-runtime",
|
|
||||||
{
|
|
||||||
"regenerator": true
|
|
||||||
}
|
|
||||||
]
|
|
||||||
]
|
|
||||||
}
|
|
|
@ -1,43 +0,0 @@
|
||||||
|
|
||||||
# Logs
|
|
||||||
logs
|
|
||||||
*.log
|
|
||||||
|
|
||||||
# Runtime data
|
|
||||||
pids
|
|
||||||
*.pid
|
|
||||||
*.seed
|
|
||||||
|
|
||||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
|
||||||
lib-cov
|
|
||||||
|
|
||||||
# Coverage directory used by tools like istanbul
|
|
||||||
coverage
|
|
||||||
|
|
||||||
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
|
|
||||||
.grunt
|
|
||||||
|
|
||||||
# node-waf configuration
|
|
||||||
.lock-wscript
|
|
||||||
|
|
||||||
# Compiled binary addons (http://nodejs.org/api/addons.html)
|
|
||||||
build/Release
|
|
||||||
.eslintcache
|
|
||||||
|
|
||||||
# Dependency directory
|
|
||||||
# https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git
|
|
||||||
node_modules
|
|
||||||
node_modules_ubuntu
|
|
||||||
node_modules_windows
|
|
||||||
|
|
||||||
# OSX
|
|
||||||
.DS_Store
|
|
||||||
|
|
||||||
# flow-typed
|
|
||||||
flow-typed/npm/*
|
|
||||||
!flow-typed/npm/module_vx.x.x.js
|
|
||||||
|
|
||||||
|
|
||||||
.idea
|
|
||||||
npm-debug.log.*
|
|
||||||
dist
|
|
|
@ -1,2 +0,0 @@
|
||||||
*
|
|
||||||
!dist/*
|
|
|
@ -1,11 +0,0 @@
|
||||||
sudo: required
|
|
||||||
|
|
||||||
notifications:
|
|
||||||
slack: budibase:Nx2QNi9CP87Nn7ah2A4Qdzyy
|
|
||||||
|
|
||||||
script:
|
|
||||||
- npm install
|
|
||||||
- npm install -g jest
|
|
||||||
- node node_modules/eslint/bin/eslint src/**/*.js
|
|
||||||
- jest
|
|
||||||
|
|
|
@ -1,14 +0,0 @@
|
||||||
{
|
|
||||||
// Use IntelliSense to learn about possible attributes.
|
|
||||||
// Hover to view descriptions of existing attributes.
|
|
||||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
|
||||||
"version": "0.2.0",
|
|
||||||
"configurations": [
|
|
||||||
{
|
|
||||||
"type": "node",
|
|
||||||
"request": "launch",
|
|
||||||
"name": "Launch Program",
|
|
||||||
"program": "${workspaceFolder}\\index.js"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
|
@ -1,22 +0,0 @@
|
||||||
### Contributing to budibase-core
|
|
||||||
|
|
||||||
* The contributors are listed in [AUTHORS.md](https://github.com/budibase/budibase-core/blob/master/AUTHORS.md) (add yourself).
|
|
||||||
|
|
||||||
* This project uses a modified version of the MPLv2 license, see [LICENSE](https://github.com/budibase/budibase-core/blob/master/LICENSE).
|
|
||||||
|
|
||||||
* We use the [C4 (Collective Code Construction Contract)](https://rfc.zeromq.org/spec:42/C4/) process for contributions.
|
|
||||||
Please read this if you are unfamiliar with it.
|
|
||||||
|
|
||||||
* Please maintain the existing code style.
|
|
||||||
|
|
||||||
* Please try to keep your commits small and focussed.
|
|
||||||
|
|
||||||
* If the project diverges from your branch, please rebase instead of merging. This makes the commit graph easier to read.
|
|
||||||
|
|
||||||
#### p.S...
|
|
||||||
|
|
||||||
I am using contribution guidelines from the fantastic [ZeroMQ](https://github.com/zeromq) community. If you are interested why, it's because I believe in the ethos laid out by this community, and written about in depth in the book ["Social Architecture"](https://www.amazon.com/Social-Architecture-Building-line-Communities/dp/1533112452) by Pieter Hintjens.
|
|
||||||
|
|
||||||
I am very much open to evolving this to suit our needs.
|
|
||||||
|
|
||||||
Love from [Mike](https://github.com/mikebudi).
|
|
|
@ -1,373 +0,0 @@
|
||||||
Mozilla Public License Version 2.0
|
|
||||||
==================================
|
|
||||||
|
|
||||||
1. Definitions
|
|
||||||
--------------
|
|
||||||
|
|
||||||
1.1. "Contributor"
|
|
||||||
means each individual or legal entity that creates, contributes to
|
|
||||||
the creation of, or owns Covered Software.
|
|
||||||
|
|
||||||
1.2. "Contributor Version"
|
|
||||||
means the combination of the Contributions of others (if any) used
|
|
||||||
by a Contributor and that particular Contributor's Contribution.
|
|
||||||
|
|
||||||
1.3. "Contribution"
|
|
||||||
means Covered Software of a particular Contributor.
|
|
||||||
|
|
||||||
1.4. "Covered Software"
|
|
||||||
means Source Code Form to which the initial Contributor has attached
|
|
||||||
the notice in Exhibit A, the Executable Form of such Source Code
|
|
||||||
Form, and Modifications of such Source Code Form, in each case
|
|
||||||
including portions thereof.
|
|
||||||
|
|
||||||
1.5. "Incompatible With Secondary Licenses"
|
|
||||||
means
|
|
||||||
|
|
||||||
(a) that the initial Contributor has attached the notice described
|
|
||||||
in Exhibit B to the Covered Software; or
|
|
||||||
|
|
||||||
(b) that the Covered Software was made available under the terms of
|
|
||||||
version 1.1 or earlier of the License, but not also under the
|
|
||||||
terms of a Secondary License.
|
|
||||||
|
|
||||||
1.6. "Executable Form"
|
|
||||||
means any form of the work other than Source Code Form.
|
|
||||||
|
|
||||||
1.7. "Larger Work"
|
|
||||||
means a work that combines Covered Software with other material, in
|
|
||||||
a separate file or files, that is not Covered Software.
|
|
||||||
|
|
||||||
1.8. "License"
|
|
||||||
means this document.
|
|
||||||
|
|
||||||
1.9. "Licensable"
|
|
||||||
means having the right to grant, to the maximum extent possible,
|
|
||||||
whether at the time of the initial grant or subsequently, any and
|
|
||||||
all of the rights conveyed by this License.
|
|
||||||
|
|
||||||
1.10. "Modifications"
|
|
||||||
means any of the following:
|
|
||||||
|
|
||||||
(a) any file in Source Code Form that results from an addition to,
|
|
||||||
deletion from, or modification of the contents of Covered
|
|
||||||
Software; or
|
|
||||||
|
|
||||||
(b) any new file in Source Code Form that contains any Covered
|
|
||||||
Software.
|
|
||||||
|
|
||||||
1.11. "Patent Claims" of a Contributor
|
|
||||||
means any patent claim(s), including without limitation, method,
|
|
||||||
process, and apparatus claims, in any patent Licensable by such
|
|
||||||
Contributor that would be infringed, but for the grant of the
|
|
||||||
License, by the making, using, selling, offering for sale, having
|
|
||||||
made, import, or transfer of either its Contributions or its
|
|
||||||
Contributor Version.
|
|
||||||
|
|
||||||
1.12. "Secondary License"
|
|
||||||
means either the GNU General Public License, Version 2.0, the GNU
|
|
||||||
Lesser General Public License, Version 2.1, the GNU Affero General
|
|
||||||
Public License, Version 3.0, or any later versions of those
|
|
||||||
licenses.
|
|
||||||
|
|
||||||
1.13. "Source Code Form"
|
|
||||||
means the form of the work preferred for making modifications.
|
|
||||||
|
|
||||||
1.14. "You" (or "Your")
|
|
||||||
means an individual or a legal entity exercising rights under this
|
|
||||||
License. For legal entities, "You" includes any entity that
|
|
||||||
controls, is controlled by, or is under common control with You. For
|
|
||||||
purposes of this definition, "control" means (a) the power, direct
|
|
||||||
or indirect, to cause the direction or management of such entity,
|
|
||||||
whether by contract or otherwise, or (b) ownership of more than
|
|
||||||
fifty percent (50%) of the outstanding shares or beneficial
|
|
||||||
ownership of such entity.
|
|
||||||
|
|
||||||
2. License Grants and Conditions
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
2.1. Grants
|
|
||||||
|
|
||||||
Each Contributor hereby grants You a world-wide, royalty-free,
|
|
||||||
non-exclusive license:
|
|
||||||
|
|
||||||
(a) under intellectual property rights (other than patent or trademark)
|
|
||||||
Licensable by such Contributor to use, reproduce, make available,
|
|
||||||
modify, display, perform, distribute, and otherwise exploit its
|
|
||||||
Contributions, either on an unmodified basis, with Modifications, or
|
|
||||||
as part of a Larger Work; and
|
|
||||||
|
|
||||||
(b) under Patent Claims of such Contributor to make, use, sell, offer
|
|
||||||
for sale, have made, import, and otherwise transfer either its
|
|
||||||
Contributions or its Contributor Version.
|
|
||||||
|
|
||||||
2.2. Effective Date
|
|
||||||
|
|
||||||
The licenses granted in Section 2.1 with respect to any Contribution
|
|
||||||
become effective for each Contribution on the date the Contributor first
|
|
||||||
distributes such Contribution.
|
|
||||||
|
|
||||||
2.3. Limitations on Grant Scope
|
|
||||||
|
|
||||||
The licenses granted in this Section 2 are the only rights granted under
|
|
||||||
this License. No additional rights or licenses will be implied from the
|
|
||||||
distribution or licensing of Covered Software under this License.
|
|
||||||
Notwithstanding Section 2.1(b) above, no patent license is granted by a
|
|
||||||
Contributor:
|
|
||||||
|
|
||||||
(a) for any code that a Contributor has removed from Covered Software;
|
|
||||||
or
|
|
||||||
|
|
||||||
(b) for infringements caused by: (i) Your and any other third party's
|
|
||||||
modifications of Covered Software, or (ii) the combination of its
|
|
||||||
Contributions with other software (except as part of its Contributor
|
|
||||||
Version); or
|
|
||||||
|
|
||||||
(c) under Patent Claims infringed by Covered Software in the absence of
|
|
||||||
its Contributions.
|
|
||||||
|
|
||||||
This License does not grant any rights in the trademarks, service marks,
|
|
||||||
or logos of any Contributor (except as may be necessary to comply with
|
|
||||||
the notice requirements in Section 3.4).
|
|
||||||
|
|
||||||
2.4. Subsequent Licenses
|
|
||||||
|
|
||||||
No Contributor makes additional grants as a result of Your choice to
|
|
||||||
distribute the Covered Software under a subsequent version of this
|
|
||||||
License (see Section 10.2) or under the terms of a Secondary License (if
|
|
||||||
permitted under the terms of Section 3.3).
|
|
||||||
|
|
||||||
2.5. Representation
|
|
||||||
|
|
||||||
Each Contributor represents that the Contributor believes its
|
|
||||||
Contributions are its original creation(s) or it has sufficient rights
|
|
||||||
to grant the rights to its Contributions conveyed by this License.
|
|
||||||
|
|
||||||
2.6. Fair Use
|
|
||||||
|
|
||||||
This License is not intended to limit any rights You have under
|
|
||||||
applicable copyright doctrines of fair use, fair dealing, or other
|
|
||||||
equivalents.
|
|
||||||
|
|
||||||
2.7. Conditions
|
|
||||||
|
|
||||||
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
|
|
||||||
in Section 2.1.
|
|
||||||
|
|
||||||
3. Responsibilities
|
|
||||||
-------------------
|
|
||||||
|
|
||||||
3.1. Distribution of Source Form
|
|
||||||
|
|
||||||
All distribution of Covered Software in Source Code Form, including any
|
|
||||||
Modifications that You create or to which You contribute, must be under
|
|
||||||
the terms of this License. You must inform recipients that the Source
|
|
||||||
Code Form of the Covered Software is governed by the terms of this
|
|
||||||
License, and how they can obtain a copy of this License. You may not
|
|
||||||
attempt to alter or restrict the recipients' rights in the Source Code
|
|
||||||
Form.
|
|
||||||
|
|
||||||
3.2. Distribution of Executable Form
|
|
||||||
|
|
||||||
If You distribute Covered Software in Executable Form then:
|
|
||||||
|
|
||||||
(a) such Covered Software must also be made available in Source Code
|
|
||||||
Form, as described in Section 3.1, and You must inform recipients of
|
|
||||||
the Executable Form how they can obtain a copy of such Source Code
|
|
||||||
Form by reasonable means in a timely manner, at a charge no more
|
|
||||||
than the cost of distribution to the recipient; and
|
|
||||||
|
|
||||||
(b) You may distribute such Executable Form under the terms of this
|
|
||||||
License, or sublicense it under different terms, provided that the
|
|
||||||
license for the Executable Form does not attempt to limit or alter
|
|
||||||
the recipients' rights in the Source Code Form under this License.
|
|
||||||
|
|
||||||
3.3. Distribution of a Larger Work
|
|
||||||
|
|
||||||
You may create and distribute a Larger Work under terms of Your choice,
|
|
||||||
provided that You also comply with the requirements of this License for
|
|
||||||
the Covered Software. If the Larger Work is a combination of Covered
|
|
||||||
Software with a work governed by one or more Secondary Licenses, and the
|
|
||||||
Covered Software is not Incompatible With Secondary Licenses, this
|
|
||||||
License permits You to additionally distribute such Covered Software
|
|
||||||
under the terms of such Secondary License(s), so that the recipient of
|
|
||||||
the Larger Work may, at their option, further distribute the Covered
|
|
||||||
Software under the terms of either this License or such Secondary
|
|
||||||
License(s).
|
|
||||||
|
|
||||||
3.4. Notices
|
|
||||||
|
|
||||||
You may not remove or alter the substance of any license notices
|
|
||||||
(including copyright notices, patent notices, disclaimers of warranty,
|
|
||||||
or limitations of liability) contained within the Source Code Form of
|
|
||||||
the Covered Software, except that You may alter any license notices to
|
|
||||||
the extent required to remedy known factual inaccuracies.
|
|
||||||
|
|
||||||
3.5. Application of Additional Terms
|
|
||||||
|
|
||||||
You may choose to offer, and to charge a fee for, warranty, support,
|
|
||||||
indemnity or liability obligations to one or more recipients of Covered
|
|
||||||
Software. However, You may do so only on Your own behalf, and not on
|
|
||||||
behalf of any Contributor. You must make it absolutely clear that any
|
|
||||||
such warranty, support, indemnity, or liability obligation is offered by
|
|
||||||
You alone, and You hereby agree to indemnify every Contributor for any
|
|
||||||
liability incurred by such Contributor as a result of warranty, support,
|
|
||||||
indemnity or liability terms You offer. You may include additional
|
|
||||||
disclaimers of warranty and limitations of liability specific to any
|
|
||||||
jurisdiction.
|
|
||||||
|
|
||||||
4. Inability to Comply Due to Statute or Regulation
|
|
||||||
---------------------------------------------------
|
|
||||||
|
|
||||||
If it is impossible for You to comply with any of the terms of this
|
|
||||||
License with respect to some or all of the Covered Software due to
|
|
||||||
statute, judicial order, or regulation then You must: (a) comply with
|
|
||||||
the terms of this License to the maximum extent possible; and (b)
|
|
||||||
describe the limitations and the code they affect. Such description must
|
|
||||||
be placed in a text file included with all distributions of the Covered
|
|
||||||
Software under this License. Except to the extent prohibited by statute
|
|
||||||
or regulation, such description must be sufficiently detailed for a
|
|
||||||
recipient of ordinary skill to be able to understand it.
|
|
||||||
|
|
||||||
5. Termination
|
|
||||||
--------------
|
|
||||||
|
|
||||||
5.1. The rights granted under this License will terminate automatically
|
|
||||||
if You fail to comply with any of its terms. However, if You become
|
|
||||||
compliant, then the rights granted under this License from a particular
|
|
||||||
Contributor are reinstated (a) provisionally, unless and until such
|
|
||||||
Contributor explicitly and finally terminates Your grants, and (b) on an
|
|
||||||
ongoing basis, if such Contributor fails to notify You of the
|
|
||||||
non-compliance by some reasonable means prior to 60 days after You have
|
|
||||||
come back into compliance. Moreover, Your grants from a particular
|
|
||||||
Contributor are reinstated on an ongoing basis if such Contributor
|
|
||||||
notifies You of the non-compliance by some reasonable means, this is the
|
|
||||||
first time You have received notice of non-compliance with this License
|
|
||||||
from such Contributor, and You become compliant prior to 30 days after
|
|
||||||
Your receipt of the notice.
|
|
||||||
|
|
||||||
5.2. If You initiate litigation against any entity by asserting a patent
|
|
||||||
infringement claim (excluding declaratory judgment actions,
|
|
||||||
counter-claims, and cross-claims) alleging that a Contributor Version
|
|
||||||
directly or indirectly infringes any patent, then the rights granted to
|
|
||||||
You by any and all Contributors for the Covered Software under Section
|
|
||||||
2.1 of this License shall terminate.
|
|
||||||
|
|
||||||
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
|
|
||||||
end user license agreements (excluding distributors and resellers) which
|
|
||||||
have been validly granted by You or Your distributors under this License
|
|
||||||
prior to termination shall survive termination.
|
|
||||||
|
|
||||||
************************************************************************
|
|
||||||
* *
|
|
||||||
* 6. Disclaimer of Warranty *
|
|
||||||
* ------------------------- *
|
|
||||||
* *
|
|
||||||
* Covered Software is provided under this License on an "as is" *
|
|
||||||
* basis, without warranty of any kind, either expressed, implied, or *
|
|
||||||
* statutory, including, without limitation, warranties that the *
|
|
||||||
* Covered Software is free of defects, merchantable, fit for a *
|
|
||||||
* particular purpose or non-infringing. The entire risk as to the *
|
|
||||||
* quality and performance of the Covered Software is with You. *
|
|
||||||
* Should any Covered Software prove defective in any respect, You *
|
|
||||||
* (not any Contributor) assume the cost of any necessary servicing, *
|
|
||||||
* repair, or correction. This disclaimer of warranty constitutes an *
|
|
||||||
* essential part of this License. No use of any Covered Software is *
|
|
||||||
* authorized under this License except under this disclaimer. *
|
|
||||||
* *
|
|
||||||
************************************************************************
|
|
||||||
|
|
||||||
************************************************************************
|
|
||||||
* *
|
|
||||||
* 7. Limitation of Liability *
|
|
||||||
* -------------------------- *
|
|
||||||
* *
|
|
||||||
* Under no circumstances and under no legal theory, whether tort *
|
|
||||||
* (including negligence), contract, or otherwise, shall any *
|
|
||||||
* Contributor, or anyone who distributes Covered Software as *
|
|
||||||
* permitted above, be liable to You for any direct, indirect, *
|
|
||||||
* special, incidental, or consequential damages of any character *
|
|
||||||
* including, without limitation, damages for lost profits, loss of *
|
|
||||||
* goodwill, work stoppage, computer failure or malfunction, or any *
|
|
||||||
* and all other commercial damages or losses, even if such party *
|
|
||||||
* shall have been informed of the possibility of such damages. This *
|
|
||||||
* limitation of liability shall not apply to liability for death or *
|
|
||||||
* personal injury resulting from such party's negligence to the *
|
|
||||||
* extent applicable law prohibits such limitation. Some *
|
|
||||||
* jurisdictions do not allow the exclusion or limitation of *
|
|
||||||
* incidental or consequential damages, so this exclusion and *
|
|
||||||
* limitation may not apply to You. *
|
|
||||||
* *
|
|
||||||
************************************************************************
|
|
||||||
|
|
||||||
8. Litigation
|
|
||||||
-------------
|
|
||||||
|
|
||||||
Any litigation relating to this License may be brought only in the
|
|
||||||
courts of a jurisdiction where the defendant maintains its principal
|
|
||||||
place of business and such litigation shall be governed by laws of that
|
|
||||||
jurisdiction, without reference to its conflict-of-law provisions.
|
|
||||||
Nothing in this Section shall prevent a party's ability to bring
|
|
||||||
cross-claims or counter-claims.
|
|
||||||
|
|
||||||
9. Miscellaneous
|
|
||||||
----------------
|
|
||||||
|
|
||||||
This License represents the complete agreement concerning the subject
|
|
||||||
matter hereof. If any provision of this License is held to be
|
|
||||||
unenforceable, such provision shall be reformed only to the extent
|
|
||||||
necessary to make it enforceable. Any law or regulation which provides
|
|
||||||
that the language of a contract shall be construed against the drafter
|
|
||||||
shall not be used to construe this License against a Contributor.
|
|
||||||
|
|
||||||
10. Versions of the License
|
|
||||||
---------------------------
|
|
||||||
|
|
||||||
10.1. New Versions
|
|
||||||
|
|
||||||
Mozilla Foundation is the license steward. Except as provided in Section
|
|
||||||
10.3, no one other than the license steward has the right to modify or
|
|
||||||
publish new versions of this License. Each version will be given a
|
|
||||||
distinguishing version number.
|
|
||||||
|
|
||||||
10.2. Effect of New Versions
|
|
||||||
|
|
||||||
You may distribute the Covered Software under the terms of the version
|
|
||||||
of the License under which You originally received the Covered Software,
|
|
||||||
or under the terms of any subsequent version published by the license
|
|
||||||
steward.
|
|
||||||
|
|
||||||
10.3. Modified Versions
|
|
||||||
|
|
||||||
If you create software not governed by this License, and you want to
|
|
||||||
create a new license for such software, you may create and use a
|
|
||||||
modified version of this License if you rename the license and remove
|
|
||||||
any references to the name of the license steward (except to note that
|
|
||||||
such modified license differs from this License).
|
|
||||||
|
|
||||||
10.4. Distributing Source Code Form that is Incompatible With Secondary
|
|
||||||
Licenses
|
|
||||||
|
|
||||||
If You choose to distribute Source Code Form that is Incompatible With
|
|
||||||
Secondary Licenses under the terms of this version of the License, the
|
|
||||||
notice described in Exhibit B of this License must be attached.
|
|
||||||
|
|
||||||
Exhibit A - Source Code Form License Notice
|
|
||||||
-------------------------------------------
|
|
||||||
|
|
||||||
This Source Code Form is subject to the terms of the Mozilla Public
|
|
||||||
License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
||||||
file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
||||||
|
|
||||||
If it is not possible or desirable to put the notice in a particular
|
|
||||||
file, then You may include the notice in a location (such as a LICENSE
|
|
||||||
file in a relevant directory) where a recipient would be likely to look
|
|
||||||
for such a notice.
|
|
||||||
|
|
||||||
You may add additional accurate notices of copyright ownership.
|
|
||||||
|
|
||||||
Exhibit B - "Incompatible With Secondary Licenses" Notice
|
|
||||||
---------------------------------------------------------
|
|
||||||
|
|
||||||
This Source Code Form is "Incompatible With Secondary Licenses", as
|
|
||||||
defined by the Mozilla Public License, v. 2.0.
|
|
|
@ -1,83 +0,0 @@
|
||||||
{
|
|
||||||
"name": "@budibase/core",
|
|
||||||
"version": "0.0.32",
|
|
||||||
"description": "core javascript library for budibase",
|
|
||||||
"main": "dist/budibase-core.umd.js",
|
|
||||||
"module": "dist/budibase-core.esm.js",
|
|
||||||
"files": [
|
|
||||||
"dist/**",
|
|
||||||
"!dist/node_modules"
|
|
||||||
],
|
|
||||||
"directories": {
|
|
||||||
"test": "test"
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"test": "jest",
|
|
||||||
"test:watch": "jest --watch",
|
|
||||||
"build": "rollup -c rollup.config.js"
|
|
||||||
},
|
|
||||||
"keywords": [
|
|
||||||
"budibase"
|
|
||||||
],
|
|
||||||
"author": "Michael Shanks",
|
|
||||||
"license": "MPL-2.0",
|
|
||||||
"jest": {
|
|
||||||
"globals": {
|
|
||||||
"GLOBALS": {
|
|
||||||
"client": "web"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"testURL": "http://jest-breaks-if-this-does-not-exist",
|
|
||||||
"moduleNameMapper": {
|
|
||||||
"\\.(jpg|jpeg|png|gif|eot|otf|webp|svg|ttf|woff|woff2|mp4|webm|wav|mp3|m4a|aac|oga)$": "<rootDir>/internals/mocks/fileMock.js",
|
|
||||||
"\\.(css|less|sass|scss)$": "identity-obj-proxy"
|
|
||||||
},
|
|
||||||
"moduleFileExtensions": [
|
|
||||||
"js"
|
|
||||||
],
|
|
||||||
"moduleDirectories": [
|
|
||||||
"node_modules"
|
|
||||||
],
|
|
||||||
"transform": {
|
|
||||||
"^.+\\.js$": "babel-jest"
|
|
||||||
},
|
|
||||||
"transformIgnorePatterns": [
|
|
||||||
"/node_modules/(?!svelte).+\\.js$"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@babel/cli": "^7.4.4",
|
|
||||||
"@babel/core": "^7.4.5",
|
|
||||||
"@babel/plugin-transform-runtime": "^7.4.4",
|
|
||||||
"@babel/preset-env": "^7.4.5",
|
|
||||||
"@babel/runtime": "^7.4.5",
|
|
||||||
"babel-jest": "^23.6.0",
|
|
||||||
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
|
|
||||||
"cross-env": "^5.1.4",
|
|
||||||
"jest": "^24.8.0",
|
|
||||||
"readable-stream": "^3.1.1",
|
|
||||||
"regenerator-runtime": "^0.11.1",
|
|
||||||
"rimraf": "^2.6.2",
|
|
||||||
"rollup": "^1.12.0",
|
|
||||||
"rollup-plugin-commonjs": "^10.0.0",
|
|
||||||
"rollup-plugin-node-builtins": "^2.1.2",
|
|
||||||
"rollup-plugin-node-globals": "^1.4.0",
|
|
||||||
"rollup-plugin-node-resolve": "^5.0.0"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@nx-js/compiler-util": "^2.0.0",
|
|
||||||
"bcryptjs": "^2.4.3",
|
|
||||||
"date-fns": "^1.29.0",
|
|
||||||
"lodash": "^4.17.13",
|
|
||||||
"lunr": "^2.3.5",
|
|
||||||
"nano": "^8.2.2",
|
|
||||||
"safe-buffer": "^5.1.2",
|
|
||||||
"shortid": "^2.2.8"
|
|
||||||
},
|
|
||||||
"devEngines": {
|
|
||||||
"node": ">=7.x",
|
|
||||||
"npm": ">=4.x",
|
|
||||||
"yarn": ">=0.21.3"
|
|
||||||
},
|
|
||||||
"gitHead": "b1f4f90927d9e494e513220ef060af28d2d42455"
|
|
||||||
}
|
|
|
@ -1,21 +0,0 @@
|
||||||
## Getting Started
|
|
||||||
|
|
||||||
Install packages:
|
|
||||||
|
|
||||||
`npm install`
|
|
||||||
|
|
||||||
Next, run the tests. Install jest, globally:
|
|
||||||
|
|
||||||
`npm install -g jest`
|
|
||||||
|
|
||||||
And finally, run
|
|
||||||
|
|
||||||
`jest`
|
|
||||||
|
|
||||||
## Documentation
|
|
||||||
|
|
||||||
A work in progress, lives here: https://github.com/Budibase/docs/blob/master/budibase-core.md
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,153 +0,0 @@
|
||||||
import builtins from "rollup-plugin-node-builtins"
|
|
||||||
import resolve from "rollup-plugin-node-resolve"
|
|
||||||
import commonjs from "rollup-plugin-commonjs"
|
|
||||||
import nodeglobals from "rollup-plugin-node-globals"
|
|
||||||
|
|
||||||
const lodash_fp_exports = [
|
|
||||||
"union",
|
|
||||||
"reduce",
|
|
||||||
"isUndefined",
|
|
||||||
"cloneDeep",
|
|
||||||
"split",
|
|
||||||
"some",
|
|
||||||
"map",
|
|
||||||
"filter",
|
|
||||||
"isEmpty",
|
|
||||||
"countBy",
|
|
||||||
"includes",
|
|
||||||
"last",
|
|
||||||
"find",
|
|
||||||
"constant",
|
|
||||||
"take",
|
|
||||||
"first",
|
|
||||||
"intersection",
|
|
||||||
"mapValues",
|
|
||||||
"isNull",
|
|
||||||
"has",
|
|
||||||
"isNumber",
|
|
||||||
"isString",
|
|
||||||
"isBoolean",
|
|
||||||
"isDate",
|
|
||||||
"isArray",
|
|
||||||
"isObject",
|
|
||||||
"clone",
|
|
||||||
"values",
|
|
||||||
"keyBy",
|
|
||||||
"keys",
|
|
||||||
"orderBy",
|
|
||||||
"concat",
|
|
||||||
"reverse",
|
|
||||||
"difference",
|
|
||||||
"merge",
|
|
||||||
"flatten",
|
|
||||||
"each",
|
|
||||||
"pull",
|
|
||||||
"join",
|
|
||||||
"defaultCase",
|
|
||||||
"uniqBy",
|
|
||||||
"every",
|
|
||||||
"uniqWith",
|
|
||||||
"isFunction",
|
|
||||||
"groupBy",
|
|
||||||
"differenceBy",
|
|
||||||
"intersectionBy",
|
|
||||||
"isEqual",
|
|
||||||
"max",
|
|
||||||
]
|
|
||||||
|
|
||||||
const lodash_exports = [
|
|
||||||
"toNumber",
|
|
||||||
"flow",
|
|
||||||
"isArray",
|
|
||||||
"join",
|
|
||||||
"replace",
|
|
||||||
"trim",
|
|
||||||
"dropRight",
|
|
||||||
"takeRight",
|
|
||||||
"head",
|
|
||||||
"isUndefined",
|
|
||||||
"isNull",
|
|
||||||
"isNaN",
|
|
||||||
"reduce",
|
|
||||||
"isEmpty",
|
|
||||||
"constant",
|
|
||||||
"tail",
|
|
||||||
"includes",
|
|
||||||
"startsWith",
|
|
||||||
"findIndex",
|
|
||||||
"isInteger",
|
|
||||||
"isDate",
|
|
||||||
"isString",
|
|
||||||
"split",
|
|
||||||
"clone",
|
|
||||||
"keys",
|
|
||||||
"isFunction",
|
|
||||||
"merge",
|
|
||||||
"has",
|
|
||||||
"isBoolean",
|
|
||||||
"isNumber",
|
|
||||||
"isObjectLike",
|
|
||||||
"assign",
|
|
||||||
"some",
|
|
||||||
"each",
|
|
||||||
"find",
|
|
||||||
"orderBy",
|
|
||||||
"union",
|
|
||||||
"cloneDeep",
|
|
||||||
]
|
|
||||||
|
|
||||||
const globals = {
|
|
||||||
"lodash/fp": "fp",
|
|
||||||
lodash: "_",
|
|
||||||
lunr: "lunr",
|
|
||||||
"safe-buffer": "safe_buffer",
|
|
||||||
shortid: "shortid",
|
|
||||||
"@nx-js/compiler-util": "compiler_util",
|
|
||||||
}
|
|
||||||
|
|
||||||
export default {
|
|
||||||
input: "src/index.js",
|
|
||||||
output: [
|
|
||||||
{
|
|
||||||
file: "dist/budibase-core.cjs.js",
|
|
||||||
format: "cjs",
|
|
||||||
sourcemap: "inline",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
file: "dist/budibase-core.esm.mjs",
|
|
||||||
format: "esm",
|
|
||||||
sourcemap: "inline",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
file: "dist/budibase-core.umd.js",
|
|
||||||
format: "umd",
|
|
||||||
name: "@budibase/core",
|
|
||||||
sourcemap: "inline",
|
|
||||||
globals,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
plugins: [
|
|
||||||
nodeglobals(),
|
|
||||||
builtins(),
|
|
||||||
resolve({
|
|
||||||
preferBuiltins: true,
|
|
||||||
}),
|
|
||||||
commonjs({
|
|
||||||
namedExports: {
|
|
||||||
"lodash/fp": lodash_fp_exports,
|
|
||||||
lodash: lodash_exports,
|
|
||||||
shortid: ["generate"],
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
external: [
|
|
||||||
"lodash",
|
|
||||||
"lodash/fp",
|
|
||||||
"date-fns",
|
|
||||||
"lunr",
|
|
||||||
"safe-buffer",
|
|
||||||
"shortid",
|
|
||||||
"@nx-js/compiler-util",
|
|
||||||
"bcryptjs",
|
|
||||||
],
|
|
||||||
}
|
|
|
@ -1,15 +0,0 @@
|
||||||
import { has } from "lodash"
|
|
||||||
import { ConflictError } from "../common/errors"
|
|
||||||
|
|
||||||
export const createBehaviourSources = () => {
|
|
||||||
const sources = {}
|
|
||||||
const register = (name, funcsObj) => {
|
|
||||||
if (has(sources, name)) {
|
|
||||||
throw new ConflictError(`Source '${name}' already exists`)
|
|
||||||
}
|
|
||||||
|
|
||||||
sources[name] = funcsObj
|
|
||||||
}
|
|
||||||
sources.register = register
|
|
||||||
return sources
|
|
||||||
}
|
|
|
@ -1,17 +0,0 @@
|
||||||
import { permission } from "../authApi/permissions"
|
|
||||||
import { apiWrapperSync } from "../common/apiWrapper"
|
|
||||||
import { events } from "../common/events"
|
|
||||||
|
|
||||||
export const executeAction = app => (actionName, options) => {
|
|
||||||
apiWrapperSync(
|
|
||||||
app,
|
|
||||||
events.actionsApi.execute,
|
|
||||||
permission.executeAction.isAuthorized(actionName),
|
|
||||||
{ actionName, options },
|
|
||||||
app.actions[actionName],
|
|
||||||
options
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const _executeAction = (behaviourSources, action, options) =>
|
|
||||||
behaviourSources[action.behaviourSource][action.behaviourName](options)
|
|
|
@ -1,7 +0,0 @@
|
||||||
import { executeAction } from "./execute"
|
|
||||||
|
|
||||||
export const getActionsApi = app => ({
|
|
||||||
execute: executeAction(app),
|
|
||||||
})
|
|
||||||
|
|
||||||
export default getActionsApi
|
|
|
@ -1,103 +0,0 @@
|
||||||
import {
|
|
||||||
isFunction,
|
|
||||||
filter,
|
|
||||||
map,
|
|
||||||
uniqBy,
|
|
||||||
keys,
|
|
||||||
difference,
|
|
||||||
join,
|
|
||||||
reduce,
|
|
||||||
find,
|
|
||||||
} from "lodash/fp"
|
|
||||||
import { compileCode } from "../common/compileCode"
|
|
||||||
import { $ } from "../common"
|
|
||||||
import { _executeAction } from "./execute"
|
|
||||||
import { BadRequestError, NotFoundError } from "../common/errors"
|
|
||||||
|
|
||||||
export const initialiseActions = (
|
|
||||||
subscribe,
|
|
||||||
behaviourSources,
|
|
||||||
actions,
|
|
||||||
triggers,
|
|
||||||
apis
|
|
||||||
) => {
|
|
||||||
validateSources(behaviourSources, actions)
|
|
||||||
subscribeTriggers(subscribe, behaviourSources, actions, triggers, apis)
|
|
||||||
return createActionsCollection(behaviourSources, actions)
|
|
||||||
}
|
|
||||||
|
|
||||||
const createActionsCollection = (behaviourSources, actions) =>
|
|
||||||
$(actions, [
|
|
||||||
reduce((all, a) => {
|
|
||||||
all[a.name] = opts => _executeAction(behaviourSources, a, opts)
|
|
||||||
return all
|
|
||||||
}, {}),
|
|
||||||
])
|
|
||||||
|
|
||||||
const subscribeTriggers = (
|
|
||||||
subscribe,
|
|
||||||
behaviourSources,
|
|
||||||
actions,
|
|
||||||
triggers,
|
|
||||||
apis
|
|
||||||
) => {
|
|
||||||
const createOptions = (optionsCreator, eventContext) => {
|
|
||||||
if (!optionsCreator) return {}
|
|
||||||
const create = compileCode(optionsCreator)
|
|
||||||
return create({ context: eventContext, apis })
|
|
||||||
}
|
|
||||||
|
|
||||||
const shouldRunTrigger = (trigger, eventContext) => {
|
|
||||||
if (!trigger.condition) return true
|
|
||||||
const shouldRun = compileCode(trigger.condition)
|
|
||||||
return shouldRun({ context: eventContext })
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let trig of triggers) {
|
|
||||||
subscribe(trig.eventName, async (ev, ctx) => {
|
|
||||||
if (shouldRunTrigger(trig, ctx)) {
|
|
||||||
await _executeAction(
|
|
||||||
behaviourSources,
|
|
||||||
find(a => a.name === trig.actionName)(actions),
|
|
||||||
createOptions(trig.optionsCreator, ctx)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const validateSources = (behaviourSources, actions) => {
|
|
||||||
const declaredSources = $(actions, [
|
|
||||||
uniqBy(a => a.behaviourSource),
|
|
||||||
map(a => a.behaviourSource),
|
|
||||||
])
|
|
||||||
|
|
||||||
const suppliedSources = keys(behaviourSources)
|
|
||||||
|
|
||||||
const missingSources = difference(declaredSources, suppliedSources)
|
|
||||||
|
|
||||||
if (missingSources.length > 0) {
|
|
||||||
throw new BadRequestError(
|
|
||||||
`Declared behaviour sources are not supplied: ${join(
|
|
||||||
", ",
|
|
||||||
missingSources
|
|
||||||
)}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const missingBehaviours = $(actions, [
|
|
||||||
filter(
|
|
||||||
a => !isFunction(behaviourSources[a.behaviourSource][a.behaviourName])
|
|
||||||
),
|
|
||||||
map(a => `Action: ${a.name} : ${a.behaviourSource}.${a.behaviourName}`),
|
|
||||||
])
|
|
||||||
|
|
||||||
if (missingBehaviours.length > 0) {
|
|
||||||
throw new NotFoundError(
|
|
||||||
`Missing behaviours: could not find behaviour functions: ${join(
|
|
||||||
", ",
|
|
||||||
missingBehaviours
|
|
||||||
)}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,8 +0,0 @@
|
||||||
import { setCleanupFunc } from "../transactions/setCleanupFunc"
|
|
||||||
|
|
||||||
export const cloneApp = (app, mergeWith) => {
|
|
||||||
const newApp = { ...app }
|
|
||||||
Object.assign(newApp, mergeWith)
|
|
||||||
setCleanupFunc(newApp)
|
|
||||||
return newApp
|
|
||||||
}
|
|
|
@ -1,27 +0,0 @@
|
||||||
import { has } from "lodash/fp"
|
|
||||||
|
|
||||||
const publish = handlers => async (eventName, context = {}) => {
|
|
||||||
if (!has(eventName)(handlers)) return
|
|
||||||
|
|
||||||
for (const handler of handlers[eventName]) {
|
|
||||||
await handler(eventName, context)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const subscribe = handlers => (eventName, handler) => {
|
|
||||||
if (!has(eventName)(handlers)) {
|
|
||||||
handlers[eventName] = []
|
|
||||||
}
|
|
||||||
handlers[eventName].push(handler)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const createEventAggregator = () => {
|
|
||||||
const handlers = {}
|
|
||||||
const eventAggregator = {
|
|
||||||
publish: publish(handlers),
|
|
||||||
subscribe: subscribe(handlers),
|
|
||||||
}
|
|
||||||
return eventAggregator
|
|
||||||
}
|
|
||||||
|
|
||||||
export default createEventAggregator
|
|
|
@ -1,13 +0,0 @@
|
||||||
export const setupDatastore = datastore => {
|
|
||||||
datastore.loadJson = datastore.loadFile
|
|
||||||
datastore.createJson = datastore.createFile
|
|
||||||
datastore.updateJson = datastore.updateFile
|
|
||||||
if (datastore.createEmptyDb) {
|
|
||||||
delete datastore.createEmptyDb
|
|
||||||
}
|
|
||||||
return datastore
|
|
||||||
}
|
|
||||||
|
|
||||||
export { createEventAggregator } from "./eventAggregator"
|
|
||||||
|
|
||||||
export default setupDatastore
|
|
|
@ -1,55 +0,0 @@
|
||||||
import { filter } from "lodash/fp"
|
|
||||||
import { configFolder, appDefinitionFile, $ } from "../common"
|
|
||||||
import { TRANSACTIONS_FOLDER } from "../transactions/transactionsCommon"
|
|
||||||
import {
|
|
||||||
AUTH_FOLDER,
|
|
||||||
USERS_LIST_FILE,
|
|
||||||
ACCESS_LEVELS_FILE,
|
|
||||||
} from "../authApi/authCommon"
|
|
||||||
import { initialiseRootCollections } from "../collectionApi/initialise"
|
|
||||||
import { initialiseIndex } from "../indexing/initialiseIndex"
|
|
||||||
import {
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
isGlobalIndex,
|
|
||||||
isSingleRecord,
|
|
||||||
} from "../templateApi/hierarchy"
|
|
||||||
import { _getNew } from "../recordApi/getNew"
|
|
||||||
import { _save } from "../recordApi/save"
|
|
||||||
|
|
||||||
export const initialiseData = async (
|
|
||||||
datastore,
|
|
||||||
applicationDefinition,
|
|
||||||
accessLevels
|
|
||||||
) => {
|
|
||||||
if (!(await datastore.exists(appDefinitionFile)))
|
|
||||||
await datastore.createJson(appDefinitionFile, applicationDefinition)
|
|
||||||
|
|
||||||
if (!(await datastore.exists(USERS_LIST_FILE)))
|
|
||||||
await datastore.createJson(USERS_LIST_FILE, [])
|
|
||||||
|
|
||||||
if (!(await datastore.exists(ACCESS_LEVELS_FILE)))
|
|
||||||
await datastore.createJson(
|
|
||||||
ACCESS_LEVELS_FILE,
|
|
||||||
accessLevels ? accessLevels : { version: 0, levels: [] }
|
|
||||||
)
|
|
||||||
|
|
||||||
await initialiseRootSingleRecords(datastore, applicationDefinition.hierarchy)
|
|
||||||
}
|
|
||||||
|
|
||||||
const initialiseRootSingleRecords = async (datastore, hierarchy) => {
|
|
||||||
const app = {
|
|
||||||
publish: () => {},
|
|
||||||
cleanupTransactions: () => {},
|
|
||||||
datastore,
|
|
||||||
hierarchy,
|
|
||||||
}
|
|
||||||
|
|
||||||
const flathierarchy = getFlattenedHierarchy(hierarchy)
|
|
||||||
const singleRecords = $(flathierarchy, [filter(isSingleRecord)])
|
|
||||||
|
|
||||||
for (let record of singleRecords) {
|
|
||||||
const result = _getNew(record, "")
|
|
||||||
result.key = record.nodeKey()
|
|
||||||
await _save(app, result)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,53 +0,0 @@
|
||||||
import { clone, find, split } from "lodash/fp"
|
|
||||||
import { joinKey, $ } from "../common"
|
|
||||||
// 5 minutes
|
|
||||||
export const tempCodeExpiryLength = 5 * 60 * 1000
|
|
||||||
|
|
||||||
export const AUTH_FOLDER = "/.auth"
|
|
||||||
export const USERS_LIST_FILE = joinKey(AUTH_FOLDER, "users.json")
|
|
||||||
export const userAuthFile = username =>
|
|
||||||
joinKey(AUTH_FOLDER, `auth_${username}.json`)
|
|
||||||
export const USERS_LOCK_FILE = joinKey(AUTH_FOLDER, "users_lock")
|
|
||||||
export const ACCESS_LEVELS_FILE = joinKey(AUTH_FOLDER, "access_levels.json")
|
|
||||||
export const ACCESS_LEVELS_LOCK_FILE = joinKey(
|
|
||||||
AUTH_FOLDER,
|
|
||||||
"access_levels_lock"
|
|
||||||
)
|
|
||||||
|
|
||||||
export const permissionTypes = {
|
|
||||||
CREATE_RECORD: "create record",
|
|
||||||
UPDATE_RECORD: "update record",
|
|
||||||
READ_RECORD: "read record",
|
|
||||||
DELETE_RECORD: "delete record",
|
|
||||||
READ_INDEX: "read index",
|
|
||||||
MANAGE_INDEX: "manage index",
|
|
||||||
MANAGE_COLLECTION: "manage collection",
|
|
||||||
WRITE_TEMPLATES: "write templates",
|
|
||||||
CREATE_USER: "create user",
|
|
||||||
SET_PASSWORD: "set password",
|
|
||||||
CREATE_TEMPORARY_ACCESS: "create temporary access",
|
|
||||||
ENABLE_DISABLE_USER: "enable or disable user",
|
|
||||||
WRITE_ACCESS_LEVELS: "write access levels",
|
|
||||||
LIST_USERS: "list users",
|
|
||||||
LIST_ACCESS_LEVELS: "list access levels",
|
|
||||||
EXECUTE_ACTION: "execute action",
|
|
||||||
SET_USER_ACCESS_LEVELS: "set user access levels",
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getUserByName = (users, name) =>
|
|
||||||
$(users, [find(u => u.name.toLowerCase() === name.toLowerCase())])
|
|
||||||
|
|
||||||
export const stripUserOfSensitiveStuff = user => {
|
|
||||||
const stripped = clone(user)
|
|
||||||
delete stripped.tempCode
|
|
||||||
return stripped
|
|
||||||
}
|
|
||||||
|
|
||||||
export const parseTemporaryCode = fullCode =>
|
|
||||||
$(fullCode, [
|
|
||||||
split(":"),
|
|
||||||
parts => ({
|
|
||||||
id: parts[1],
|
|
||||||
code: parts[2],
|
|
||||||
}),
|
|
||||||
])
|
|
|
@ -1,118 +0,0 @@
|
||||||
import { find, filter, some, map, flatten } from "lodash/fp"
|
|
||||||
import { generate } from "shortid"
|
|
||||||
import { _getUsers } from "./getUsers"
|
|
||||||
import { getUserByName, userAuthFile, parseTemporaryCode } from "./authCommon"
|
|
||||||
import { _loadAccessLevels } from "./loadAccessLevels"
|
|
||||||
import { isNothingOrEmpty, $, apiWrapper, events } from "../common"
|
|
||||||
import { alwaysAuthorized } from "./permissions"
|
|
||||||
|
|
||||||
const dummyHash =
|
|
||||||
"$argon2i$v=19$m=4096,t=3,p=1$UZRo409UYBGjHJS3CV6Uxw$rU84qUqPeORFzKYmYY0ceBLDaPO+JWSH4PfNiKXfIKk"
|
|
||||||
|
|
||||||
export const authenticate = app => async (username, password) =>
|
|
||||||
apiWrapper(
|
|
||||||
app,
|
|
||||||
events.authApi.authenticate,
|
|
||||||
alwaysAuthorized,
|
|
||||||
{ username, password },
|
|
||||||
_authenticate,
|
|
||||||
app,
|
|
||||||
username,
|
|
||||||
password
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _authenticate = async (app, username, password) => {
|
|
||||||
if (isNothingOrEmpty(username) || isNothingOrEmpty(password)) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
const allUsers = await _getUsers(app)
|
|
||||||
let user = getUserByName(allUsers, username)
|
|
||||||
|
|
||||||
const notAUser = "not-a-user"
|
|
||||||
// continue with non-user - so time to verify remains consistent
|
|
||||||
// with verification of a valid user
|
|
||||||
if (!user || !user.enabled) {
|
|
||||||
user = notAUser
|
|
||||||
}
|
|
||||||
|
|
||||||
let userAuth
|
|
||||||
try {
|
|
||||||
userAuth = await app.datastore.loadJson(userAuthFile(username))
|
|
||||||
} catch (_) {
|
|
||||||
userAuth = { accessLevels: [], passwordHash: dummyHash }
|
|
||||||
}
|
|
||||||
|
|
||||||
const permissions = await buildUserPermissions(app, user.accessLevels)
|
|
||||||
|
|
||||||
const verified = app.crypto.verify(userAuth.passwordHash, password)
|
|
||||||
|
|
||||||
if (user === notAUser) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
return verified
|
|
||||||
? {
|
|
||||||
...user,
|
|
||||||
permissions,
|
|
||||||
temp: false,
|
|
||||||
isUser: true,
|
|
||||||
}
|
|
||||||
: null
|
|
||||||
}
|
|
||||||
|
|
||||||
export const authenticateTemporaryAccess = app => async tempAccessCode => {
|
|
||||||
if (isNothingOrEmpty(tempAccessCode)) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
const temp = parseTemporaryCode(tempAccessCode)
|
|
||||||
let user = $(await _getUsers(app), [
|
|
||||||
find(u => u.temporaryAccessId === temp.id),
|
|
||||||
])
|
|
||||||
|
|
||||||
const notAUser = "not-a-user"
|
|
||||||
if (!user || !user.enabled) {
|
|
||||||
user = notAUser
|
|
||||||
}
|
|
||||||
|
|
||||||
let userAuth
|
|
||||||
try {
|
|
||||||
userAuth = await app.datastore.loadJson(userAuthFile(user.name))
|
|
||||||
} catch (e) {
|
|
||||||
userAuth = {
|
|
||||||
temporaryAccessHash: dummyHash,
|
|
||||||
temporaryAccessExpiryEpoch: (await app.getEpochTime()) + 10000,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (userAuth.temporaryAccessExpiryEpoch < (await app.getEpochTime())) {
|
|
||||||
user = notAUser
|
|
||||||
}
|
|
||||||
|
|
||||||
const tempCode = !temp.code ? generate() : temp.code
|
|
||||||
const verified = app.crypto.verify(userAuth.temporaryAccessHash, tempCode)
|
|
||||||
|
|
||||||
if (user === notAUser) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
return verified
|
|
||||||
? {
|
|
||||||
...user,
|
|
||||||
permissions: [],
|
|
||||||
temp: true,
|
|
||||||
isUser: true,
|
|
||||||
}
|
|
||||||
: null
|
|
||||||
}
|
|
||||||
|
|
||||||
export const buildUserPermissions = async (app, userAccessLevels) => {
|
|
||||||
const allAccessLevels = await _loadAccessLevels(app)
|
|
||||||
|
|
||||||
return $(allAccessLevels.levels, [
|
|
||||||
filter(l => some(ua => l.name === ua)(userAccessLevels)),
|
|
||||||
map(l => l.permissions),
|
|
||||||
flatten,
|
|
||||||
])
|
|
||||||
}
|
|
|
@ -1,70 +0,0 @@
|
||||||
import { generate } from "shortid"
|
|
||||||
import {
|
|
||||||
tempCodeExpiryLength,
|
|
||||||
USERS_LOCK_FILE,
|
|
||||||
USERS_LIST_FILE,
|
|
||||||
userAuthFile,
|
|
||||||
getUserByName,
|
|
||||||
} from "./authCommon"
|
|
||||||
import { getLock, isNolock, releaseLock } from "../common/lock"
|
|
||||||
import { apiWrapper, events } from "../common"
|
|
||||||
import { alwaysAuthorized } from "./permissions"
|
|
||||||
|
|
||||||
export const createTemporaryAccess = app => async userName =>
|
|
||||||
apiWrapper(
|
|
||||||
app,
|
|
||||||
events.authApi.createTemporaryAccess,
|
|
||||||
alwaysAuthorized,
|
|
||||||
{ userName },
|
|
||||||
_createTemporaryAccess,
|
|
||||||
app,
|
|
||||||
userName
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _createTemporaryAccess = async (app, userName) => {
|
|
||||||
const tempCode = await getTemporaryCode(app)
|
|
||||||
|
|
||||||
const lock = await getLock(app, USERS_LOCK_FILE, 1000, 2)
|
|
||||||
|
|
||||||
if (isNolock(lock)) {
|
|
||||||
throw new Error(
|
|
||||||
"Unable to create temporary access, could not get lock - try again"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const users = await app.datastore.loadJson(USERS_LIST_FILE)
|
|
||||||
|
|
||||||
const user = getUserByName(users, userName)
|
|
||||||
user.temporaryAccessId = tempCode.temporaryAccessId
|
|
||||||
|
|
||||||
await app.datastore.updateJson(USERS_LIST_FILE, users)
|
|
||||||
} finally {
|
|
||||||
await releaseLock(app, lock)
|
|
||||||
}
|
|
||||||
|
|
||||||
const userAuth = await app.datastore.loadJson(userAuthFile(userName))
|
|
||||||
userAuth.temporaryAccessHash = tempCode.temporaryAccessHash
|
|
||||||
|
|
||||||
userAuth.temporaryAccessExpiryEpoch = tempCode.temporaryAccessExpiryEpoch
|
|
||||||
|
|
||||||
await app.datastore.updateJson(userAuthFile(userName), userAuth)
|
|
||||||
|
|
||||||
return tempCode.tempCode
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getTemporaryCode = async app => {
|
|
||||||
const tempCode = generate() + generate() + generate() + generate()
|
|
||||||
|
|
||||||
const tempId = generate()
|
|
||||||
|
|
||||||
return {
|
|
||||||
temporaryAccessHash: app.crypto.hash(tempCode),
|
|
||||||
temporaryAccessExpiryEpoch:
|
|
||||||
(await app.getEpochTime()) + tempCodeExpiryLength,
|
|
||||||
tempCode: `tmp:${tempId}:${tempCode}`,
|
|
||||||
temporaryAccessId: tempId,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const looksLikeTemporaryCode = code => code.startsWith("tmp:")
|
|
|
@ -1,96 +0,0 @@
|
||||||
import { join, some } from "lodash/fp"
|
|
||||||
import { validateUser } from "./validateUser"
|
|
||||||
import { getNewUserAuth } from "./getNewUser"
|
|
||||||
import {
|
|
||||||
getLock,
|
|
||||||
isNolock,
|
|
||||||
releaseLock,
|
|
||||||
apiWrapper,
|
|
||||||
events,
|
|
||||||
insensitiveEquals,
|
|
||||||
isNonEmptyString,
|
|
||||||
} from "../common"
|
|
||||||
import {
|
|
||||||
USERS_LOCK_FILE,
|
|
||||||
stripUserOfSensitiveStuff,
|
|
||||||
USERS_LIST_FILE,
|
|
||||||
userAuthFile,
|
|
||||||
} from "./authCommon"
|
|
||||||
import { getTemporaryCode } from "./createTemporaryAccess"
|
|
||||||
import { isValidPassword } from "./setPassword"
|
|
||||||
import { permission } from "./permissions"
|
|
||||||
import { BadRequestError } from "../common/errors"
|
|
||||||
|
|
||||||
export const createUser = app => async (user, password = null) =>
|
|
||||||
apiWrapper(
|
|
||||||
app,
|
|
||||||
events.authApi.createUser,
|
|
||||||
permission.createUser.isAuthorized,
|
|
||||||
{ user, password },
|
|
||||||
_createUser,
|
|
||||||
app,
|
|
||||||
user,
|
|
||||||
password
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _createUser = async (app, user, password = null) => {
|
|
||||||
const lock = await getLock(app, USERS_LOCK_FILE, 1000, 2)
|
|
||||||
|
|
||||||
if (isNolock(lock)) {
|
|
||||||
throw new Error("Unable to create user, could not get lock - try again")
|
|
||||||
}
|
|
||||||
|
|
||||||
const users = await app.datastore.loadJson(USERS_LIST_FILE)
|
|
||||||
|
|
||||||
const userErrors = validateUser(app)([...users, user], user)
|
|
||||||
if (userErrors.length > 0) {
|
|
||||||
throw new BadRequestError(`User is invalid. ${join("; ")(userErrors.map(e => e.error))}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const { auth, tempCode, temporaryAccessId } = await getAccess(app, password)
|
|
||||||
user.tempCode = tempCode
|
|
||||||
user.temporaryAccessId = temporaryAccessId
|
|
||||||
|
|
||||||
if (some(u => insensitiveEquals(u.name, user.name))(users)) {
|
|
||||||
throw new BadRequestError("User already exists")
|
|
||||||
}
|
|
||||||
|
|
||||||
users.push(stripUserOfSensitiveStuff(user))
|
|
||||||
|
|
||||||
await app.datastore.updateJson(USERS_LIST_FILE, users)
|
|
||||||
|
|
||||||
try {
|
|
||||||
await app.datastore.createJson(userAuthFile(user.name), auth)
|
|
||||||
} catch (_) {
|
|
||||||
await app.datastore.updateJson(userAuthFile(user.name), auth)
|
|
||||||
}
|
|
||||||
|
|
||||||
await releaseLock(app, lock)
|
|
||||||
|
|
||||||
return user
|
|
||||||
}
|
|
||||||
|
|
||||||
const getAccess = async (app, password) => {
|
|
||||||
const auth = getNewUserAuth(app)()
|
|
||||||
|
|
||||||
if (isNonEmptyString(password)) {
|
|
||||||
if (isValidPassword(password)) {
|
|
||||||
auth.passwordHash = app.crypto.hash(password)
|
|
||||||
auth.temporaryAccessHash = ""
|
|
||||||
auth.temporaryAccessId = ""
|
|
||||||
auth.temporaryAccessExpiryEpoch = 0
|
|
||||||
return { auth }
|
|
||||||
}
|
|
||||||
throw new BadRequestError("Password does not meet requirements")
|
|
||||||
} else {
|
|
||||||
const tempAccess = await getTemporaryCode(app)
|
|
||||||
auth.temporaryAccessHash = tempAccess.temporaryAccessHash
|
|
||||||
auth.temporaryAccessExpiryEpoch = tempAccess.temporaryAccessExpiryEpoch
|
|
||||||
auth.passwordHash = ""
|
|
||||||
return {
|
|
||||||
auth,
|
|
||||||
tempCode: tempAccess.tempCode,
|
|
||||||
temporaryAccessId: tempAccess.temporaryAccessId,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,58 +0,0 @@
|
||||||
import { getLock, isNolock, releaseLock } from "../common/lock"
|
|
||||||
import { USERS_LOCK_FILE, USERS_LIST_FILE, getUserByName } from "./authCommon"
|
|
||||||
import { apiWrapper, events } from "../common"
|
|
||||||
import { permission } from "./permissions"
|
|
||||||
import { NotFoundError } from "../common/errors"
|
|
||||||
|
|
||||||
export const enableUser = app => async username =>
|
|
||||||
apiWrapper(
|
|
||||||
app,
|
|
||||||
events.authApi.enableUser,
|
|
||||||
permission.enableDisableUser.isAuthorized,
|
|
||||||
{ username },
|
|
||||||
_enableUser,
|
|
||||||
app,
|
|
||||||
username
|
|
||||||
)
|
|
||||||
|
|
||||||
export const disableUser = app => async username =>
|
|
||||||
apiWrapper(
|
|
||||||
app,
|
|
||||||
events.authApi.disableUser,
|
|
||||||
permission.enableDisableUser.isAuthorized,
|
|
||||||
{ username },
|
|
||||||
_disableUser,
|
|
||||||
app,
|
|
||||||
username
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _enableUser = async (app, username) =>
|
|
||||||
await toggleUser(app, username, true)
|
|
||||||
|
|
||||||
export const _disableUser = async (app, username) =>
|
|
||||||
await toggleUser(app, username, false)
|
|
||||||
|
|
||||||
const toggleUser = async (app, username, enabled) => {
|
|
||||||
const lock = await getLock(app, USERS_LOCK_FILE, 1000, 1, 0)
|
|
||||||
|
|
||||||
const actionName = enabled ? "enable" : "disable"
|
|
||||||
|
|
||||||
if (isNolock(lock)) {
|
|
||||||
throw new Error(`Could not ${actionName} user - cannot get lock`)
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const users = await app.datastore.loadJson(USERS_LIST_FILE)
|
|
||||||
const user = getUserByName(users, username)
|
|
||||||
if (!user) {
|
|
||||||
throw new NotFoundError(`Could not find user to ${actionName}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (user.enabled === !enabled) {
|
|
||||||
user.enabled = enabled
|
|
||||||
await app.datastore.updateJson(USERS_LIST_FILE, users)
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
releaseLock(app, lock)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,36 +0,0 @@
|
||||||
import { filter, values, each, keys } from "lodash/fp"
|
|
||||||
import { permission } from "./permissions"
|
|
||||||
import {
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
isIndex,
|
|
||||||
isModel,
|
|
||||||
} from "../templateApi/hierarchy"
|
|
||||||
import { $ } from "../common"
|
|
||||||
|
|
||||||
export const generateFullPermissions = app => {
|
|
||||||
const allNodes = getFlattenedHierarchy(app.hierarchy)
|
|
||||||
const accessLevel = { permissions: [] }
|
|
||||||
|
|
||||||
const recordNodes = $(allNodes, [filter(isModel)])
|
|
||||||
|
|
||||||
for (const n of recordNodes) {
|
|
||||||
permission.createRecord.add(n.nodeKey(), accessLevel)
|
|
||||||
permission.updateRecord.add(n.nodeKey(), accessLevel)
|
|
||||||
permission.deleteRecord.add(n.nodeKey(), accessLevel)
|
|
||||||
permission.readRecord.add(n.nodeKey(), accessLevel)
|
|
||||||
}
|
|
||||||
|
|
||||||
const indexNodes = $(allNodes, [filter(isIndex)])
|
|
||||||
|
|
||||||
for (const n of indexNodes) {
|
|
||||||
permission.readIndex.add(n.nodeKey(), accessLevel)
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const a of keys(app.actions)) {
|
|
||||||
permission.executeAction.add(a, accessLevel)
|
|
||||||
}
|
|
||||||
|
|
||||||
$(permission, [values, filter(p => !p.isNode), each(p => p.add(accessLevel))])
|
|
||||||
|
|
||||||
return accessLevel.permissions
|
|
||||||
}
|
|
|
@ -1,5 +0,0 @@
|
||||||
export const getNewAccessLevel = () => () => ({
|
|
||||||
name: "",
|
|
||||||
permissions: [],
|
|
||||||
default: false,
|
|
||||||
})
|
|
|
@ -1,35 +0,0 @@
|
||||||
import { apiWrapperSync, events } from "../common"
|
|
||||||
import { permission } from "./permissions"
|
|
||||||
|
|
||||||
export const getNewUser = app => () =>
|
|
||||||
apiWrapperSync(
|
|
||||||
app,
|
|
||||||
events.authApi.getNewUser,
|
|
||||||
permission.createUser.isAuthorized,
|
|
||||||
{},
|
|
||||||
_getNewUser,
|
|
||||||
app
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _getNewUser = () => ({
|
|
||||||
name: "",
|
|
||||||
accessLevels: [],
|
|
||||||
enabled: true,
|
|
||||||
temporaryAccessId: "",
|
|
||||||
})
|
|
||||||
|
|
||||||
export const getNewUserAuth = app => () =>
|
|
||||||
apiWrapperSync(
|
|
||||||
app,
|
|
||||||
events.authApi.getNewUserAuth,
|
|
||||||
permission.createUser.isAuthorized,
|
|
||||||
{},
|
|
||||||
_getNewUserAuth,
|
|
||||||
app
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _getNewUserAuth = () => ({
|
|
||||||
passwordHash: "",
|
|
||||||
temporaryAccessHash: "",
|
|
||||||
temporaryAccessExpiryEpoch: 0,
|
|
||||||
})
|
|
|
@ -1,19 +0,0 @@
|
||||||
import { map } from "lodash/fp"
|
|
||||||
import { USERS_LIST_FILE, stripUserOfSensitiveStuff } from "./authCommon"
|
|
||||||
import { $, apiWrapper, events } from "../common"
|
|
||||||
import { permission } from "./permissions"
|
|
||||||
|
|
||||||
export const getUsers = app => async () =>
|
|
||||||
apiWrapper(
|
|
||||||
app,
|
|
||||||
events.authApi.getUsers,
|
|
||||||
permission.listUsers.isAuthorized,
|
|
||||||
{},
|
|
||||||
_getUsers,
|
|
||||||
app
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _getUsers = async app =>
|
|
||||||
$(await app.datastore.loadJson(USERS_LIST_FILE), [
|
|
||||||
map(stripUserOfSensitiveStuff),
|
|
||||||
])
|
|
|
@ -1,46 +0,0 @@
|
||||||
import { authenticate, authenticateTemporaryAccess } from "./authenticate"
|
|
||||||
import { createTemporaryAccess } from "./createTemporaryAccess"
|
|
||||||
import { createUser } from "./createUser"
|
|
||||||
import { enableUser, disableUser } from "./enableUser"
|
|
||||||
import { loadAccessLevels } from "./loadAccessLevels"
|
|
||||||
import { getNewAccessLevel } from "./getNewAccessLevel"
|
|
||||||
import { getNewUser, getNewUserAuth } from "./getNewUser"
|
|
||||||
import { getUsers } from "./getUsers"
|
|
||||||
import { isAuthorized } from "./isAuthorized"
|
|
||||||
import { saveAccessLevels } from "./saveAccessLevels"
|
|
||||||
import {
|
|
||||||
changeMyPassword,
|
|
||||||
scorePassword,
|
|
||||||
setPasswordFromTemporaryCode,
|
|
||||||
isValidPassword,
|
|
||||||
} from "./setPassword"
|
|
||||||
import { validateUser } from "./validateUser"
|
|
||||||
import { validateAccessLevels } from "./validateAccessLevels"
|
|
||||||
import { generateFullPermissions } from "./generateFullPermissions"
|
|
||||||
import { setUserAccessLevels } from "./setUserAccessLevels"
|
|
||||||
|
|
||||||
export const getAuthApi = app => ({
|
|
||||||
authenticate: authenticate(app),
|
|
||||||
authenticateTemporaryAccess: authenticateTemporaryAccess(app),
|
|
||||||
createTemporaryAccess: createTemporaryAccess(app),
|
|
||||||
createUser: createUser(app),
|
|
||||||
loadAccessLevels: loadAccessLevels(app),
|
|
||||||
enableUser: enableUser(app),
|
|
||||||
disableUser: disableUser(app),
|
|
||||||
getNewAccessLevel: getNewAccessLevel(app),
|
|
||||||
getNewUser: getNewUser(app),
|
|
||||||
getNewUserAuth: getNewUserAuth(app),
|
|
||||||
getUsers: getUsers(app),
|
|
||||||
saveAccessLevels: saveAccessLevels(app),
|
|
||||||
isAuthorized: isAuthorized(app),
|
|
||||||
changeMyPassword: changeMyPassword(app),
|
|
||||||
setPasswordFromTemporaryCode: setPasswordFromTemporaryCode(app),
|
|
||||||
scorePassword,
|
|
||||||
isValidPassword: isValidPassword(app),
|
|
||||||
validateUser: validateUser(app),
|
|
||||||
validateAccessLevels: validateAccessLevels(app),
|
|
||||||
generateFullPermissions: () => generateFullPermissions(app),
|
|
||||||
setUserAccessLevels: setUserAccessLevels(app),
|
|
||||||
})
|
|
||||||
|
|
||||||
export default getAuthApi
|
|
|
@ -1,44 +0,0 @@
|
||||||
import { values, includes, some } from "lodash/fp"
|
|
||||||
import { permissionTypes } from "./authCommon"
|
|
||||||
import { $, isNothing, apiWrapperSync, events } from "../common"
|
|
||||||
import { getNodeByKeyOrNodeKey, isNode } from "../templateApi/hierarchy"
|
|
||||||
import { alwaysAuthorized } from "./permissions"
|
|
||||||
|
|
||||||
export const isAuthorized = app => (permissionType, resourceKey) =>
|
|
||||||
apiWrapperSync(
|
|
||||||
app,
|
|
||||||
events.authApi.isAuthorized,
|
|
||||||
alwaysAuthorized,
|
|
||||||
{ resourceKey, permissionType },
|
|
||||||
_isAuthorized,
|
|
||||||
app,
|
|
||||||
permissionType,
|
|
||||||
resourceKey
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _isAuthorized = (app, permissionType, resourceKey) => {
|
|
||||||
if (!app.user) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
const validType = $(permissionTypes, [values, includes(permissionType)])
|
|
||||||
|
|
||||||
if (!validType) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
const permMatchesResource = userperm => {
|
|
||||||
const nodeKey = isNothing(resourceKey)
|
|
||||||
? null
|
|
||||||
: isNode(app.hierarchy, resourceKey)
|
|
||||||
? getNodeByKeyOrNodeKey(app.hierarchy, resourceKey).nodeKey()
|
|
||||||
: resourceKey
|
|
||||||
|
|
||||||
return (
|
|
||||||
userperm.type === permissionType &&
|
|
||||||
(isNothing(resourceKey) || nodeKey === userperm.nodeKey)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
return $(app.user.permissions, [some(permMatchesResource)])
|
|
||||||
}
|
|
|
@ -1,16 +0,0 @@
|
||||||
import { ACCESS_LEVELS_FILE } from "./authCommon"
|
|
||||||
import { apiWrapper, events } from "../common"
|
|
||||||
import { permission } from "./permissions"
|
|
||||||
|
|
||||||
export const loadAccessLevels = app => async () =>
|
|
||||||
apiWrapper(
|
|
||||||
app,
|
|
||||||
events.authApi.loadAccessLevels,
|
|
||||||
permission.listAccessLevels.isAuthorized,
|
|
||||||
{},
|
|
||||||
_loadAccessLevels,
|
|
||||||
app
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _loadAccessLevels = async app =>
|
|
||||||
await app.datastore.loadJson(ACCESS_LEVELS_FILE)
|
|
|
@ -1,81 +0,0 @@
|
||||||
import { permissionTypes } from "./authCommon"
|
|
||||||
import { isAuthorized } from "./isAuthorized"
|
|
||||||
|
|
||||||
export const temporaryAccessPermissions = () => [
|
|
||||||
{ type: permissionTypes.SET_PASSWORD },
|
|
||||||
]
|
|
||||||
|
|
||||||
const nodePermission = type => ({
|
|
||||||
add: (nodeKey, accessLevel) =>
|
|
||||||
accessLevel.permissions.push({ type, nodeKey }),
|
|
||||||
isAuthorized: resourceKey => app => isAuthorized(app)(type, resourceKey),
|
|
||||||
isNode: true,
|
|
||||||
get: nodeKey => ({ type, nodeKey }),
|
|
||||||
})
|
|
||||||
|
|
||||||
const staticPermission = type => ({
|
|
||||||
add: accessLevel => accessLevel.permissions.push({ type }),
|
|
||||||
isAuthorized: app => isAuthorized(app)(type),
|
|
||||||
isNode: false,
|
|
||||||
get: () => ({ type }),
|
|
||||||
})
|
|
||||||
|
|
||||||
const createRecord = nodePermission(permissionTypes.CREATE_RECORD)
|
|
||||||
|
|
||||||
const updateRecord = nodePermission(permissionTypes.UPDATE_RECORD)
|
|
||||||
|
|
||||||
const deleteRecord = nodePermission(permissionTypes.DELETE_RECORD)
|
|
||||||
|
|
||||||
const readRecord = nodePermission(permissionTypes.READ_RECORD)
|
|
||||||
|
|
||||||
const writeTemplates = staticPermission(permissionTypes.WRITE_TEMPLATES)
|
|
||||||
|
|
||||||
const createUser = staticPermission(permissionTypes.CREATE_USER)
|
|
||||||
|
|
||||||
const setPassword = staticPermission(permissionTypes.SET_PASSWORD)
|
|
||||||
|
|
||||||
const readIndex = nodePermission(permissionTypes.READ_INDEX)
|
|
||||||
|
|
||||||
const manageIndex = staticPermission(permissionTypes.MANAGE_INDEX)
|
|
||||||
|
|
||||||
const manageCollection = staticPermission(permissionTypes.MANAGE_COLLECTION)
|
|
||||||
|
|
||||||
const createTemporaryAccess = staticPermission(
|
|
||||||
permissionTypes.CREATE_TEMPORARY_ACCESS
|
|
||||||
)
|
|
||||||
|
|
||||||
const enableDisableUser = staticPermission(permissionTypes.ENABLE_DISABLE_USER)
|
|
||||||
|
|
||||||
const writeAccessLevels = staticPermission(permissionTypes.WRITE_ACCESS_LEVELS)
|
|
||||||
|
|
||||||
const listUsers = staticPermission(permissionTypes.LIST_USERS)
|
|
||||||
|
|
||||||
const listAccessLevels = staticPermission(permissionTypes.LIST_ACCESS_LEVELS)
|
|
||||||
|
|
||||||
const setUserAccessLevels = staticPermission(
|
|
||||||
permissionTypes.SET_USER_ACCESS_LEVELS
|
|
||||||
)
|
|
||||||
|
|
||||||
const executeAction = nodePermission(permissionTypes.EXECUTE_ACTION)
|
|
||||||
|
|
||||||
export const alwaysAuthorized = () => true
|
|
||||||
|
|
||||||
export const permission = {
|
|
||||||
createRecord,
|
|
||||||
updateRecord,
|
|
||||||
deleteRecord,
|
|
||||||
readRecord,
|
|
||||||
writeTemplates,
|
|
||||||
createUser,
|
|
||||||
setPassword,
|
|
||||||
readIndex,
|
|
||||||
createTemporaryAccess,
|
|
||||||
enableDisableUser,
|
|
||||||
writeAccessLevels,
|
|
||||||
listUsers,
|
|
||||||
listAccessLevels,
|
|
||||||
manageIndex,
|
|
||||||
manageCollection,
|
|
||||||
executeAction,
|
|
||||||
setUserAccessLevels,
|
|
||||||
}
|
|
|
@ -1,52 +0,0 @@
|
||||||
import { join, map } from "lodash/fp"
|
|
||||||
import {
|
|
||||||
getLock,
|
|
||||||
releaseLock,
|
|
||||||
$,
|
|
||||||
isNolock,
|
|
||||||
apiWrapper,
|
|
||||||
events,
|
|
||||||
} from "../common"
|
|
||||||
import { ACCESS_LEVELS_LOCK_FILE, ACCESS_LEVELS_FILE } from "./authCommon"
|
|
||||||
import { validateAccessLevels } from "./validateAccessLevels"
|
|
||||||
import { permission } from "./permissions"
|
|
||||||
|
|
||||||
export const saveAccessLevels = app => async accessLevels =>
|
|
||||||
apiWrapper(
|
|
||||||
app,
|
|
||||||
events.authApi.saveAccessLevels,
|
|
||||||
permission.writeAccessLevels.isAuthorized,
|
|
||||||
{ accessLevels },
|
|
||||||
_saveAccessLevels,
|
|
||||||
app,
|
|
||||||
accessLevels
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _saveAccessLevels = async (app, accessLevels) => {
|
|
||||||
const validationErrors = validateAccessLevels(app)(accessLevels.levels)
|
|
||||||
if (validationErrors.length > 0) {
|
|
||||||
const errs = $(validationErrors, [map(e => e.error), join(", ")])
|
|
||||||
throw new Error(`Access Levels Invalid: ${errs}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const lock = await getLock(app, ACCESS_LEVELS_LOCK_FILE, 2000, 2)
|
|
||||||
|
|
||||||
if (isNolock(lock)) {
|
|
||||||
throw new Error("Could not get lock to save access levels")
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const existing = await app.datastore.loadJson(ACCESS_LEVELS_FILE)
|
|
||||||
if (existing.version !== accessLevels.version) {
|
|
||||||
throw new Error(
|
|
||||||
"Access levels have already been updated, since you loaded"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
accessLevels.version++
|
|
||||||
|
|
||||||
app.datastore.updateJson(ACCESS_LEVELS_FILE, accessLevels)
|
|
||||||
} finally {
|
|
||||||
await releaseLock(app, lock)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,160 +0,0 @@
|
||||||
import { find } from "lodash/fp"
|
|
||||||
import { userAuthFile, parseTemporaryCode } from "./authCommon"
|
|
||||||
import { isSomething, $, apiWrapper, apiWrapperSync, events } from "../common"
|
|
||||||
import { _getUsers } from "./getUsers"
|
|
||||||
import { alwaysAuthorized } from "./permissions"
|
|
||||||
|
|
||||||
export const isValidPassword = app => password =>
|
|
||||||
apiWrapperSync(
|
|
||||||
app,
|
|
||||||
events.authApi.isValidPassword,
|
|
||||||
alwaysAuthorized,
|
|
||||||
{ password },
|
|
||||||
_isValidPassword,
|
|
||||||
app,
|
|
||||||
password
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _isValidPassword = (app, password) =>
|
|
||||||
scorePassword(password).score > 30
|
|
||||||
|
|
||||||
export const changeMyPassword = app => async (currentPw, newpassword) =>
|
|
||||||
apiWrapper(
|
|
||||||
app,
|
|
||||||
events.authApi.changeMyPassword,
|
|
||||||
alwaysAuthorized,
|
|
||||||
{ currentPw, newpassword },
|
|
||||||
_changeMyPassword,
|
|
||||||
app,
|
|
||||||
currentPw,
|
|
||||||
newpassword
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _changeMyPassword = async (app, currentPw, newpassword) => {
|
|
||||||
const existingAuth = await app.datastore.loadJson(userAuthFile(app.user.name))
|
|
||||||
|
|
||||||
if (isSomething(existingAuth.passwordHash)) {
|
|
||||||
const verified = app.crypto.verify(existingAuth.passwordHash, currentPw)
|
|
||||||
|
|
||||||
if (verified) {
|
|
||||||
await await doSet(app, existingAuth, app.user.name, newpassword)
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
export const setPasswordFromTemporaryCode = app => async (
|
|
||||||
tempCode,
|
|
||||||
newpassword
|
|
||||||
) =>
|
|
||||||
apiWrapper(
|
|
||||||
app,
|
|
||||||
events.authApi.setPasswordFromTemporaryCode,
|
|
||||||
alwaysAuthorized,
|
|
||||||
{ tempCode, newpassword },
|
|
||||||
_setPasswordFromTemporaryCode,
|
|
||||||
app,
|
|
||||||
tempCode,
|
|
||||||
newpassword
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _setPasswordFromTemporaryCode = async (
|
|
||||||
app,
|
|
||||||
tempCode,
|
|
||||||
newpassword
|
|
||||||
) => {
|
|
||||||
const currentTime = await app.getEpochTime()
|
|
||||||
|
|
||||||
const temp = parseTemporaryCode(tempCode)
|
|
||||||
|
|
||||||
const user = $(await _getUsers(app), [
|
|
||||||
find(u => u.temporaryAccessId === temp.id),
|
|
||||||
])
|
|
||||||
|
|
||||||
if (!user) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
const existingAuth = await app.datastore.loadJson(userAuthFile(user.name))
|
|
||||||
|
|
||||||
if (
|
|
||||||
isSomething(existingAuth.temporaryAccessHash) &&
|
|
||||||
existingAuth.temporaryAccessExpiryEpoch > currentTime
|
|
||||||
) {
|
|
||||||
const verified = app.crypto.verify(
|
|
||||||
existingAuth.temporaryAccessHash,
|
|
||||||
temp.code
|
|
||||||
)
|
|
||||||
|
|
||||||
if (verified) {
|
|
||||||
await doSet(app, existingAuth, user.name, newpassword)
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
const doSet = async (app, auth, username, newpassword) => {
|
|
||||||
auth.temporaryAccessHash = ""
|
|
||||||
auth.temporaryAccessExpiryEpoch = 0
|
|
||||||
auth.passwordHash = app.crypto.hash(newpassword)
|
|
||||||
await app.datastore.updateJson(userAuthFile(username), auth)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const scorePassword = app => password =>
|
|
||||||
apiWrapperSync(
|
|
||||||
app,
|
|
||||||
events.authApi.scorePassword,
|
|
||||||
alwaysAuthorized,
|
|
||||||
{ password },
|
|
||||||
_scorePassword,
|
|
||||||
password
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _scorePassword = password => {
|
|
||||||
// from https://stackoverflow.com/questions/948172/password-strength-meter
|
|
||||||
// thank you https://stackoverflow.com/users/46617/tm-lv
|
|
||||||
|
|
||||||
let score = 0
|
|
||||||
if (!password) {
|
|
||||||
return score
|
|
||||||
}
|
|
||||||
|
|
||||||
// award every unique letter until 5 repetitions
|
|
||||||
const letters = new Object()
|
|
||||||
for (let i = 0; i < password.length; i++) {
|
|
||||||
letters[password[i]] = (letters[password[i]] || 0) + 1
|
|
||||||
score += 5.0 / letters[password[i]]
|
|
||||||
}
|
|
||||||
|
|
||||||
// bonus points for mixing it up
|
|
||||||
const variations = {
|
|
||||||
digits: /\d/.test(password),
|
|
||||||
lower: /[a-z]/.test(password),
|
|
||||||
upper: /[A-Z]/.test(password),
|
|
||||||
nonWords: /\W/.test(password),
|
|
||||||
}
|
|
||||||
|
|
||||||
let variationCount = 0
|
|
||||||
for (const check in variations) {
|
|
||||||
variationCount += variations[check] == true ? 1 : 0
|
|
||||||
}
|
|
||||||
score += (variationCount - 1) * 10
|
|
||||||
|
|
||||||
const strengthText =
|
|
||||||
score > 80
|
|
||||||
? "strong"
|
|
||||||
: score > 60
|
|
||||||
? "good"
|
|
||||||
: score >= 30
|
|
||||||
? "weak"
|
|
||||||
: "very weak"
|
|
||||||
|
|
||||||
return {
|
|
||||||
score: parseInt(score),
|
|
||||||
strengthText,
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,60 +0,0 @@
|
||||||
import { difference, map, join } from "lodash/fp"
|
|
||||||
import {
|
|
||||||
getLock,
|
|
||||||
isNolock,
|
|
||||||
releaseLock,
|
|
||||||
$,
|
|
||||||
apiWrapper,
|
|
||||||
events,
|
|
||||||
} from "../common"
|
|
||||||
import {
|
|
||||||
USERS_LOCK_FILE,
|
|
||||||
ACCESS_LEVELS_FILE,
|
|
||||||
getUserByName,
|
|
||||||
USERS_LIST_FILE,
|
|
||||||
} from "./authCommon"
|
|
||||||
import { permission } from "./permissions"
|
|
||||||
import { NotFoundError } from "../common/errors"
|
|
||||||
|
|
||||||
export const setUserAccessLevels = app => async (userName, accessLevels) =>
|
|
||||||
apiWrapper(
|
|
||||||
app,
|
|
||||||
events.authApi.setUserAccessLevels,
|
|
||||||
permission.setUserAccessLevels.isAuthorized,
|
|
||||||
{ userName, accessLevels },
|
|
||||||
_setUserAccessLevels,
|
|
||||||
app,
|
|
||||||
userName,
|
|
||||||
accessLevels
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _setUserAccessLevels = async (app, username, accessLevels) => {
|
|
||||||
const lock = await getLock(app, USERS_LOCK_FILE, 1000, 1, 0)
|
|
||||||
|
|
||||||
const actualAccessLevels = $(
|
|
||||||
await app.datastore.loadJson(ACCESS_LEVELS_FILE),
|
|
||||||
[l => l.levels, map(l => l.name)]
|
|
||||||
)
|
|
||||||
|
|
||||||
const missing = difference(accessLevels)(actualAccessLevels)
|
|
||||||
if (missing.length > 0) {
|
|
||||||
throw new Error(`Invalid access levels supplied: ${join(", ", missing)}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isNolock(lock)) {
|
|
||||||
throw new Error("Could set user access levels cannot get lock")
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const users = await app.datastore.loadJson(USERS_LIST_FILE)
|
|
||||||
const user = getUserByName(users, username)
|
|
||||||
if (!user) {
|
|
||||||
throw new NotFoundError(`Could not find user with ${username}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
user.accessLevels = accessLevels
|
|
||||||
await app.datastore.updateJson(USERS_LIST_FILE, users)
|
|
||||||
} finally {
|
|
||||||
releaseLock(app, lock)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,93 +0,0 @@
|
||||||
import {
|
|
||||||
values,
|
|
||||||
includes,
|
|
||||||
map,
|
|
||||||
concat,
|
|
||||||
isEmpty,
|
|
||||||
uniqWith,
|
|
||||||
some,
|
|
||||||
flatten,
|
|
||||||
filter,
|
|
||||||
} from "lodash/fp"
|
|
||||||
import { applyRuleSet, makerule } from "../common/validationCommon"
|
|
||||||
import { permissionTypes } from "./authCommon"
|
|
||||||
import {
|
|
||||||
$,
|
|
||||||
isSomething,
|
|
||||||
insensitiveEquals,
|
|
||||||
isNonEmptyString,
|
|
||||||
apiWrapperSync,
|
|
||||||
events,
|
|
||||||
} from "../common"
|
|
||||||
import { getNode } from "../templateApi/hierarchy"
|
|
||||||
import { alwaysAuthorized } from "./permissions"
|
|
||||||
|
|
||||||
const isAllowedType = t => $(permissionTypes, [values, includes(t)])
|
|
||||||
|
|
||||||
const isModelOrIndexType = t =>
|
|
||||||
some(p => p === t)([
|
|
||||||
permissionTypes.CREATE_RECORD,
|
|
||||||
permissionTypes.UPDATE_RECORD,
|
|
||||||
permissionTypes.DELETE_RECORD,
|
|
||||||
permissionTypes.READ_RECORD,
|
|
||||||
permissionTypes.READ_INDEX,
|
|
||||||
permissionTypes.EXECUTE_ACTION,
|
|
||||||
])
|
|
||||||
|
|
||||||
const permissionRules = app => [
|
|
||||||
makerule("type", "type must be one of allowed types", p =>
|
|
||||||
isAllowedType(p.type)
|
|
||||||
),
|
|
||||||
makerule(
|
|
||||||
"nodeKey",
|
|
||||||
"record and index permissions must include a valid nodeKey",
|
|
||||||
p =>
|
|
||||||
!isModelOrIndexType(p.type) ||
|
|
||||||
isSomething(getNode(app.hierarchy, p.nodeKey))
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
const applyPermissionRules = app => applyRuleSet(permissionRules(app))
|
|
||||||
|
|
||||||
const accessLevelRules = allLevels => [
|
|
||||||
makerule("name", "name must be set", l => isNonEmptyString(l.name)),
|
|
||||||
makerule(
|
|
||||||
"name",
|
|
||||||
"access level names must be unique",
|
|
||||||
l =>
|
|
||||||
isEmpty(l.name) ||
|
|
||||||
filter(a => insensitiveEquals(l.name, a.name))(allLevels).length === 1
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
const applyLevelRules = allLevels => applyRuleSet(accessLevelRules(allLevels))
|
|
||||||
|
|
||||||
export const validateAccessLevel = app => (allLevels, level) => {
|
|
||||||
const errs = $(level.permissions, [
|
|
||||||
map(applyPermissionRules(app)),
|
|
||||||
flatten,
|
|
||||||
concat(applyLevelRules(allLevels)(level)),
|
|
||||||
])
|
|
||||||
|
|
||||||
return errs
|
|
||||||
}
|
|
||||||
|
|
||||||
export const validateAccessLevels = app => allLevels =>
|
|
||||||
apiWrapperSync(
|
|
||||||
app,
|
|
||||||
events.authApi.validateAccessLevels,
|
|
||||||
alwaysAuthorized,
|
|
||||||
{ allLevels },
|
|
||||||
_validateAccessLevels,
|
|
||||||
app,
|
|
||||||
allLevels
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _validateAccessLevels = (app, allLevels) =>
|
|
||||||
$(allLevels, [
|
|
||||||
map(l => validateAccessLevel(app)(allLevels, l)),
|
|
||||||
flatten,
|
|
||||||
uniqWith(
|
|
||||||
(x, y) => x.field === y.field && x.item === y.item && x.error === y.error
|
|
||||||
),
|
|
||||||
])
|
|
|
@ -1,51 +0,0 @@
|
||||||
import { map, uniqWith, flatten, filter } from "lodash/fp"
|
|
||||||
import { applyRuleSet, makerule } from "../common/validationCommon"
|
|
||||||
import {
|
|
||||||
$,
|
|
||||||
insensitiveEquals,
|
|
||||||
apiWrapper,
|
|
||||||
events,
|
|
||||||
isNonEmptyString,
|
|
||||||
all,
|
|
||||||
} from "../common"
|
|
||||||
import { alwaysAuthorized } from "./permissions"
|
|
||||||
|
|
||||||
const userRules = allUsers => [
|
|
||||||
makerule("name", "username must be set", u => isNonEmptyString(u.name)),
|
|
||||||
makerule(
|
|
||||||
"accessLevels",
|
|
||||||
"user must have at least one access level",
|
|
||||||
u => u.accessLevels.length > 0
|
|
||||||
),
|
|
||||||
makerule(
|
|
||||||
"name",
|
|
||||||
"username must be unique",
|
|
||||||
u => filter(u2 => insensitiveEquals(u2.name, u.name))(allUsers).length === 1
|
|
||||||
),
|
|
||||||
makerule("accessLevels", "access levels must only contain stings", u =>
|
|
||||||
all(isNonEmptyString)(u.accessLevels)
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
export const validateUser = () => (allusers, user) =>
|
|
||||||
applyRuleSet(userRules(allusers))(user)
|
|
||||||
|
|
||||||
export const validateUsers = app => allUsers =>
|
|
||||||
apiWrapper(
|
|
||||||
app,
|
|
||||||
events.authApi.validateUsers,
|
|
||||||
alwaysAuthorized,
|
|
||||||
{ allUsers },
|
|
||||||
_validateUsers,
|
|
||||||
app,
|
|
||||||
allUsers
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _validateUsers = (app, allUsers) =>
|
|
||||||
$(allUsers, [
|
|
||||||
map(l => validateUser(app)(allUsers, l)),
|
|
||||||
flatten,
|
|
||||||
uniqWith(
|
|
||||||
(x, y) => x.field === y.field && x.item === y.item && x.error === y.error
|
|
||||||
),
|
|
||||||
])
|
|
|
@ -1,57 +0,0 @@
|
||||||
import { safeKey, apiWrapper, events, joinKey } from "../common"
|
|
||||||
import { _deleteRecord } from "../recordApi/delete"
|
|
||||||
import { getAllIdsIterator } from "../indexing/allIds"
|
|
||||||
import { permission } from "../authApi/permissions"
|
|
||||||
import { getCollectionDir } from "../recordApi/recordInfo"
|
|
||||||
import { ensureCollectionIsInitialised } from "./initialise"
|
|
||||||
import { getNodeForCollectionPath } from "../templateApi/hierarchy"
|
|
||||||
|
|
||||||
export const deleteCollection = (app, disableCleanup = false) => async key =>
|
|
||||||
apiWrapper(
|
|
||||||
app,
|
|
||||||
events.collectionApi.delete,
|
|
||||||
permission.manageCollection.isAuthorized,
|
|
||||||
{ key },
|
|
||||||
_deleteCollection,
|
|
||||||
app,
|
|
||||||
key,
|
|
||||||
disableCleanup
|
|
||||||
)
|
|
||||||
|
|
||||||
/*
|
|
||||||
const recordNode = getCollectionNode(app.hierarchy, key);
|
|
||||||
|
|
||||||
*/
|
|
||||||
|
|
||||||
export const _deleteCollection = async (app, key, disableCleanup) => {
|
|
||||||
key = safeKey(key)
|
|
||||||
const collectionDir = getCollectionDir(app.hierarchy, key)
|
|
||||||
await deleteRecords(app, key)
|
|
||||||
await deleteCollectionFolder(app, key, collectionDir)
|
|
||||||
if (!disableCleanup) {
|
|
||||||
await app.cleanupTransactions()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const deleteCollectionFolder = async (app, key, dir) => {
|
|
||||||
await app.datastore.deleteFolder(dir)
|
|
||||||
await ensureCollectionIsInitialised(
|
|
||||||
app.datastore,
|
|
||||||
getNodeForCollectionPath(app.hierarchy)(key),
|
|
||||||
dir)
|
|
||||||
}
|
|
||||||
|
|
||||||
const deleteRecords = async (app, key) => {
|
|
||||||
const iterate = await getAllIdsIterator(app)(key)
|
|
||||||
|
|
||||||
let ids = await iterate()
|
|
||||||
while (!ids.done) {
|
|
||||||
if (ids.result.collectionKey === key) {
|
|
||||||
for (const id of ids.result.ids) {
|
|
||||||
await _deleteRecord(app, joinKey(key, id), true)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ids = await iterate()
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,20 +0,0 @@
|
||||||
import { getNodeForCollectionPath } from "../templateApi/hierarchy"
|
|
||||||
import { isNothing, safeKey, apiWrapperSync, events } from "../common"
|
|
||||||
import { alwaysAuthorized } from "../authApi/permissions"
|
|
||||||
|
|
||||||
export const getAllowedRecordTypes = app => key =>
|
|
||||||
apiWrapperSync(
|
|
||||||
app,
|
|
||||||
events.collectionApi.getAllowedRecordTypes,
|
|
||||||
alwaysAuthorized,
|
|
||||||
{ key },
|
|
||||||
_getAllowedRecordTypes,
|
|
||||||
app,
|
|
||||||
key
|
|
||||||
)
|
|
||||||
|
|
||||||
const _getAllowedRecordTypes = (app, key) => {
|
|
||||||
key = safeKey(key)
|
|
||||||
const node = getNodeForCollectionPath(app.hierarchy)(key)
|
|
||||||
return isNothing(node) ? [] : [node.name]
|
|
||||||
}
|
|
|
@ -1,11 +0,0 @@
|
||||||
import { getAllIdsIterator } from "../indexing/allIds"
|
|
||||||
import { getAllowedRecordTypes } from "./getAllowedRecordTypes"
|
|
||||||
import { deleteCollection } from "./delete"
|
|
||||||
|
|
||||||
export const getCollectionApi = app => ({
|
|
||||||
getAllowedRecordTypes: getAllowedRecordTypes(app),
|
|
||||||
getAllIdsIterator: getAllIdsIterator(app),
|
|
||||||
delete: deleteCollection(app),
|
|
||||||
})
|
|
||||||
|
|
||||||
export default getCollectionApi
|
|
|
@ -1,48 +0,0 @@
|
||||||
import { filter } from "lodash/fp"
|
|
||||||
import {
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
isCollectionRecord,
|
|
||||||
isRoot,
|
|
||||||
} from "../templateApi/hierarchy"
|
|
||||||
import { $, allTrue, joinKey } from "../common"
|
|
||||||
|
|
||||||
export const ensureCollectionIsInitialised = async (datastore, node, dir) => {
|
|
||||||
if (!(await datastore.exists(dir))) {
|
|
||||||
await datastore.createFolder(dir)
|
|
||||||
await datastore.createFolder(joinKey(dir, node.nodeId))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const initialiseRootCollections = async (datastore, hierarchy) => {
|
|
||||||
const rootCollectionRecord = allTrue(
|
|
||||||
n => isRoot(n.parent()),
|
|
||||||
isCollectionRecord
|
|
||||||
)
|
|
||||||
|
|
||||||
const flathierarchy = getFlattenedHierarchy(hierarchy)
|
|
||||||
|
|
||||||
const collectionRecords = $(flathierarchy, [filter(rootCollectionRecord)])
|
|
||||||
|
|
||||||
for (const col of collectionRecords) {
|
|
||||||
await ensureCollectionIsInitialised(
|
|
||||||
datastore,
|
|
||||||
col,
|
|
||||||
col.collectionPathRegx()
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const initialiseChildCollections = async (app, recordInfo) => {
|
|
||||||
const childCollectionRecords = $(recordInfo.recordNode, [
|
|
||||||
n => n.children,
|
|
||||||
filter(isCollectionRecord),
|
|
||||||
])
|
|
||||||
|
|
||||||
for (const child of childCollectionRecords) {
|
|
||||||
await ensureCollectionIsInitialised(
|
|
||||||
app.datastore,
|
|
||||||
child,
|
|
||||||
recordInfo.child(child.collectionName)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,128 +0,0 @@
|
||||||
import { cloneDeep, isUndefined } from "lodash/fp"
|
|
||||||
import { generate } from "shortid"
|
|
||||||
import { UnauthorisedError } from "./errors"
|
|
||||||
|
|
||||||
export const apiWrapper = async (
|
|
||||||
app,
|
|
||||||
eventNamespace,
|
|
||||||
isAuthorized,
|
|
||||||
eventContext,
|
|
||||||
func,
|
|
||||||
...params
|
|
||||||
) => {
|
|
||||||
pushCallStack(app, eventNamespace)
|
|
||||||
|
|
||||||
if (!isAuthorized(app)) {
|
|
||||||
handleNotAuthorized(app, eventContext, eventNamespace)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const startDate = Date.now()
|
|
||||||
const elapsed = () => Date.now() - startDate
|
|
||||||
|
|
||||||
try {
|
|
||||||
await app.publish(eventNamespace.onBegin, eventContext)
|
|
||||||
|
|
||||||
const result = await func(...params)
|
|
||||||
|
|
||||||
await publishComplete(app, eventContext, eventNamespace, elapsed, result)
|
|
||||||
return result
|
|
||||||
} catch (error) {
|
|
||||||
await publishError(app, eventContext, eventNamespace, elapsed, error)
|
|
||||||
throw error
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const apiWrapperSync = (
|
|
||||||
app,
|
|
||||||
eventNamespace,
|
|
||||||
isAuthorized,
|
|
||||||
eventContext,
|
|
||||||
func,
|
|
||||||
...params
|
|
||||||
) => {
|
|
||||||
pushCallStack(app, eventNamespace)
|
|
||||||
|
|
||||||
if (!isAuthorized(app)) {
|
|
||||||
handleNotAuthorized(app, eventContext, eventNamespace)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const startDate = Date.now()
|
|
||||||
const elapsed = () => Date.now() - startDate
|
|
||||||
|
|
||||||
try {
|
|
||||||
app.publish(eventNamespace.onBegin, eventContext)
|
|
||||||
|
|
||||||
const result = func(...params)
|
|
||||||
|
|
||||||
publishComplete(app, eventContext, eventNamespace, elapsed, result)
|
|
||||||
return result
|
|
||||||
} catch (error) {
|
|
||||||
publishError(app, eventContext, eventNamespace, elapsed, error)
|
|
||||||
throw error
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const handleNotAuthorized = (app, eventContext, eventNamespace) => {
|
|
||||||
const err = new UnauthorisedError(`Unauthorized: ${eventNamespace}`)
|
|
||||||
publishError(app, eventContext, eventNamespace, () => 0, err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
|
|
||||||
const pushCallStack = (app, eventNamespace, seedCallId) => {
|
|
||||||
const callId = generate()
|
|
||||||
|
|
||||||
const createCallStack = () => ({
|
|
||||||
seedCallId: !isUndefined(seedCallId) ? seedCallId : callId,
|
|
||||||
threadCallId: callId,
|
|
||||||
stack: [],
|
|
||||||
})
|
|
||||||
|
|
||||||
if (isUndefined(app.calls)) {
|
|
||||||
app.calls = createCallStack()
|
|
||||||
}
|
|
||||||
|
|
||||||
app.calls.stack.push({
|
|
||||||
namespace: eventNamespace,
|
|
||||||
callId,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const popCallStack = app => {
|
|
||||||
app.calls.stack.pop()
|
|
||||||
if (app.calls.stack.length === 0) {
|
|
||||||
delete app.calls
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const publishError = async (
|
|
||||||
app,
|
|
||||||
eventContext,
|
|
||||||
eventNamespace,
|
|
||||||
elapsed,
|
|
||||||
err
|
|
||||||
) => {
|
|
||||||
const ctx = cloneDeep(eventContext)
|
|
||||||
ctx.error = err
|
|
||||||
ctx.elapsed = elapsed()
|
|
||||||
await app.publish(eventNamespace.onError, ctx)
|
|
||||||
popCallStack(app)
|
|
||||||
}
|
|
||||||
|
|
||||||
const publishComplete = async (
|
|
||||||
app,
|
|
||||||
eventContext,
|
|
||||||
eventNamespace,
|
|
||||||
elapsed,
|
|
||||||
result
|
|
||||||
) => {
|
|
||||||
const endcontext = cloneDeep(eventContext)
|
|
||||||
endcontext.result = result
|
|
||||||
endcontext.elapsed = elapsed()
|
|
||||||
await app.publish(eventNamespace.onComplete, endcontext)
|
|
||||||
popCallStack(app)
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
export default apiWrapper
|
|
|
@ -1,26 +0,0 @@
|
||||||
import { compileCode as cCode } from "@nx-js/compiler-util"
|
|
||||||
import { includes } from "lodash/fp"
|
|
||||||
|
|
||||||
export const compileCode = code => {
|
|
||||||
let func
|
|
||||||
let safeCode
|
|
||||||
|
|
||||||
if (includes("return ")(code)) {
|
|
||||||
safeCode = code
|
|
||||||
} else {
|
|
||||||
let trimmed = code.trim()
|
|
||||||
trimmed = trimmed.endsWith(";")
|
|
||||||
? trimmed.substring(0, trimmed.length - 1)
|
|
||||||
: trimmed
|
|
||||||
safeCode = `return (${trimmed})`
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
func = cCode(safeCode)
|
|
||||||
} catch (e) {
|
|
||||||
e.message = `Error compiling code : ${code} : ${e.message}`
|
|
||||||
throw e
|
|
||||||
}
|
|
||||||
|
|
||||||
return func
|
|
||||||
}
|
|
|
@ -1,34 +0,0 @@
|
||||||
export class BadRequestError extends Error {
|
|
||||||
constructor(message) {
|
|
||||||
super(message)
|
|
||||||
this.httpStatusCode = 400
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class UnauthorisedError extends Error {
|
|
||||||
constructor(message) {
|
|
||||||
super(message)
|
|
||||||
this.httpStatusCode = 401
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class ForbiddenError extends Error {
|
|
||||||
constructor(message) {
|
|
||||||
super(message)
|
|
||||||
this.httpStatusCode = 403
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class NotFoundError extends Error {
|
|
||||||
constructor(message) {
|
|
||||||
super(message)
|
|
||||||
this.httpStatusCode = 404
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class ConflictError extends Error {
|
|
||||||
constructor(message) {
|
|
||||||
super(message)
|
|
||||||
this.httpStatusCode = 409
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,74 +0,0 @@
|
||||||
import { union, reduce } from "lodash/fp"
|
|
||||||
|
|
||||||
const commonPlus = extra => union(["onBegin", "onComplete", "onError"])(extra)
|
|
||||||
|
|
||||||
const common = () => commonPlus([])
|
|
||||||
|
|
||||||
const _events = {
|
|
||||||
recordApi: {
|
|
||||||
save: commonPlus(["onInvalid", "onRecordUpdated", "onRecordCreated"]),
|
|
||||||
delete: common(),
|
|
||||||
getContext: common(),
|
|
||||||
getNew: common(),
|
|
||||||
load: common(),
|
|
||||||
validate: common(),
|
|
||||||
uploadFile: common(),
|
|
||||||
downloadFile: common(),
|
|
||||||
},
|
|
||||||
authApi: {
|
|
||||||
authenticate: common(),
|
|
||||||
authenticateTemporaryAccess: common(),
|
|
||||||
createTemporaryAccess: common(),
|
|
||||||
createUser: common(),
|
|
||||||
enableUser: common(),
|
|
||||||
disableUser: common(),
|
|
||||||
loadAccessLevels: common(),
|
|
||||||
getNewAccessLevel: common(),
|
|
||||||
getNewUser: common(),
|
|
||||||
getNewUserAuth: common(),
|
|
||||||
getUsers: common(),
|
|
||||||
saveAccessLevels: common(),
|
|
||||||
isAuthorized: common(),
|
|
||||||
changeMyPassword: common(),
|
|
||||||
setPasswordFromTemporaryCode: common(),
|
|
||||||
scorePassword: common(),
|
|
||||||
isValidPassword: common(),
|
|
||||||
validateUser: common(),
|
|
||||||
validateAccessLevels: common(),
|
|
||||||
setUserAccessLevels: common(),
|
|
||||||
},
|
|
||||||
templateApi: {
|
|
||||||
saveApplicationHierarchy: common(),
|
|
||||||
saveActionsAndTriggers: common(),
|
|
||||||
},
|
|
||||||
actionsApi: {
|
|
||||||
execute: common(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const _eventsList = []
|
|
||||||
|
|
||||||
const makeEvent = (area, method, name) => `${area}:${method}:${name}`
|
|
||||||
|
|
||||||
for (const areaKey in _events) {
|
|
||||||
for (const methodKey in _events[areaKey]) {
|
|
||||||
_events[areaKey][methodKey] = reduce((obj, s) => {
|
|
||||||
obj[s] = makeEvent(areaKey, methodKey, s)
|
|
||||||
return obj
|
|
||||||
}, {})(_events[areaKey][methodKey])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const areaKey in _events) {
|
|
||||||
for (const methodKey in _events[areaKey]) {
|
|
||||||
for (const name in _events[areaKey][methodKey]) {
|
|
||||||
_eventsList.push(_events[areaKey][methodKey][name])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const events = _events
|
|
||||||
|
|
||||||
export const eventsList = _eventsList
|
|
||||||
|
|
||||||
export default { events: _events, eventsList: _eventsList }
|
|
|
@ -1,317 +0,0 @@
|
||||||
import {
|
|
||||||
head,
|
|
||||||
tail,
|
|
||||||
findIndex,
|
|
||||||
startsWith,
|
|
||||||
dropRight,
|
|
||||||
flow,
|
|
||||||
takeRight,
|
|
||||||
trim,
|
|
||||||
replace,
|
|
||||||
} from "lodash"
|
|
||||||
import {
|
|
||||||
some,
|
|
||||||
reduce,
|
|
||||||
isEmpty,
|
|
||||||
isArray,
|
|
||||||
join,
|
|
||||||
isString,
|
|
||||||
isInteger,
|
|
||||||
isDate,
|
|
||||||
toNumber,
|
|
||||||
isUndefined,
|
|
||||||
isNaN,
|
|
||||||
isNull,
|
|
||||||
constant,
|
|
||||||
split,
|
|
||||||
includes,
|
|
||||||
filter,
|
|
||||||
} from "lodash/fp"
|
|
||||||
import { events, eventsList } from "./events"
|
|
||||||
import { apiWrapper } from "./apiWrapper"
|
|
||||||
import { getLock, NO_LOCK, isNolock } from "./lock"
|
|
||||||
import crypto from "./nodeCrypto"
|
|
||||||
|
|
||||||
// this is the combinator function
|
|
||||||
export const $$ = (...funcs) => arg => flow(funcs)(arg)
|
|
||||||
|
|
||||||
// this is the pipe function
|
|
||||||
export const $ = (arg, funcs) => $$(...funcs)(arg)
|
|
||||||
|
|
||||||
export const keySep = "/"
|
|
||||||
const trimKeySep = str => trim(str, keySep)
|
|
||||||
const splitByKeySep = str => split(keySep)(str)
|
|
||||||
export const safeKey = key =>
|
|
||||||
replace(`${keySep}${trimKeySep(key)}`, `${keySep}${keySep}`, keySep)
|
|
||||||
export const joinKey = (...strs) => {
|
|
||||||
const paramsOrArray = (strs.length === 1) & isArray(strs[0]) ? strs[0] : strs
|
|
||||||
return $(paramsOrArray, [
|
|
||||||
filter(s => !isUndefined(s) && !isNull(s) && s.toString().length > 0),
|
|
||||||
join(keySep),
|
|
||||||
safeKey,
|
|
||||||
])
|
|
||||||
}
|
|
||||||
export const splitKey = $$(trimKeySep, splitByKeySep)
|
|
||||||
export const getDirFomKey = $$(splitKey, dropRight, p => joinKey(...p))
|
|
||||||
export const getFileFromKey = $$(splitKey, takeRight, head)
|
|
||||||
|
|
||||||
export const configFolder = `${keySep}.config`
|
|
||||||
export const fieldDefinitions = joinKey(configFolder, "fields.json")
|
|
||||||
export const templateDefinitions = joinKey(configFolder, "templates.json")
|
|
||||||
export const appDefinitionFile = joinKey(configFolder, "appDefinition.json")
|
|
||||||
export const dirIndex = folderPath =>
|
|
||||||
joinKey(configFolder, "dir", ...splitKey(folderPath), "dir.idx")
|
|
||||||
export const getIndexKeyFromFileKey = $$(getDirFomKey, dirIndex)
|
|
||||||
|
|
||||||
export const ifExists = (val, exists, notExists) =>
|
|
||||||
isUndefined(val)
|
|
||||||
? isUndefined(notExists)
|
|
||||||
? (() => {})()
|
|
||||||
: notExists()
|
|
||||||
: exists()
|
|
||||||
|
|
||||||
export const getOrDefault = (val, defaultVal) =>
|
|
||||||
ifExists(
|
|
||||||
val,
|
|
||||||
() => val,
|
|
||||||
() => defaultVal
|
|
||||||
)
|
|
||||||
|
|
||||||
export const not = func => val => !func(val)
|
|
||||||
export const isDefined = not(isUndefined)
|
|
||||||
export const isNonNull = not(isNull)
|
|
||||||
export const isNotNaN = not(isNaN)
|
|
||||||
|
|
||||||
export const allTrue = (...funcArgs) => val =>
|
|
||||||
reduce(
|
|
||||||
(result, conditionFunc) =>
|
|
||||||
(isNull(result) || result == true) && conditionFunc(val),
|
|
||||||
null
|
|
||||||
)(funcArgs)
|
|
||||||
|
|
||||||
export const anyTrue = (...funcArgs) => val =>
|
|
||||||
reduce(
|
|
||||||
(result, conditionFunc) => result == true || conditionFunc(val),
|
|
||||||
null
|
|
||||||
)(funcArgs)
|
|
||||||
|
|
||||||
export const insensitiveEquals = (str1, str2) =>
|
|
||||||
str1.trim().toLowerCase() === str2.trim().toLowerCase()
|
|
||||||
|
|
||||||
export const isSomething = allTrue(isDefined, isNonNull, isNotNaN)
|
|
||||||
export const isNothing = not(isSomething)
|
|
||||||
export const isNothingOrEmpty = v => isNothing(v) || isEmpty(v)
|
|
||||||
export const somethingOrGetDefault = getDefaultFunc => val =>
|
|
||||||
isSomething(val) ? val : getDefaultFunc()
|
|
||||||
export const somethingOrDefault = (val, defaultVal) =>
|
|
||||||
somethingOrGetDefault(constant(defaultVal))(val)
|
|
||||||
|
|
||||||
export const mapIfSomethingOrDefault = (mapFunc, defaultVal) => val =>
|
|
||||||
isSomething(val) ? mapFunc(val) : defaultVal
|
|
||||||
|
|
||||||
export const mapIfSomethingOrBlank = mapFunc =>
|
|
||||||
mapIfSomethingOrDefault(mapFunc, "")
|
|
||||||
|
|
||||||
export const none = predicate => collection => !some(predicate)(collection)
|
|
||||||
|
|
||||||
export const all = predicate => collection =>
|
|
||||||
none(v => !predicate(v))(collection)
|
|
||||||
|
|
||||||
export const isNotEmpty = ob => !isEmpty(ob)
|
|
||||||
export const isAsync = fn => fn.constructor.name === "AsyncFunction"
|
|
||||||
export const isNonEmptyArray = allTrue(isArray, isNotEmpty)
|
|
||||||
export const isNonEmptyString = allTrue(isString, isNotEmpty)
|
|
||||||
export const tryOr = failFunc => (func, ...args) => {
|
|
||||||
try {
|
|
||||||
return func.apply(null, ...args)
|
|
||||||
} catch (_) {
|
|
||||||
return failFunc()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const tryAwaitOr = failFunc => async (func, ...args) => {
|
|
||||||
try {
|
|
||||||
return await func.apply(null, ...args)
|
|
||||||
} catch (_) {
|
|
||||||
return await failFunc()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const defineError = (func, errorPrefix) => {
|
|
||||||
try {
|
|
||||||
return func()
|
|
||||||
} catch (err) {
|
|
||||||
err.message = `${errorPrefix} : ${err.message}`
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const tryOrIgnore = tryOr(() => {})
|
|
||||||
export const tryAwaitOrIgnore = tryAwaitOr(async () => {})
|
|
||||||
export const causesException = func => {
|
|
||||||
try {
|
|
||||||
func()
|
|
||||||
return false
|
|
||||||
} catch (e) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const executesWithoutException = func => !causesException(func)
|
|
||||||
|
|
||||||
export const handleErrorWith = returnValInError =>
|
|
||||||
tryOr(constant(returnValInError))
|
|
||||||
|
|
||||||
export const handleErrorWithUndefined = handleErrorWith(undefined)
|
|
||||||
|
|
||||||
export const switchCase = (...cases) => value => {
|
|
||||||
const nextCase = () => head(cases)[0](value)
|
|
||||||
const nextResult = () => head(cases)[1](value)
|
|
||||||
|
|
||||||
if (isEmpty(cases)) return // undefined
|
|
||||||
if (nextCase() === true) return nextResult()
|
|
||||||
return switchCase(...tail(cases))(value)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const isValue = val1 => val2 => val1 === val2
|
|
||||||
export const isOneOf = (...vals) => val => includes(val)(vals)
|
|
||||||
export const defaultCase = constant(true)
|
|
||||||
export const memberMatches = (member, match) => obj => match(obj[member])
|
|
||||||
|
|
||||||
export const StartsWith = searchFor => searchIn =>
|
|
||||||
startsWith(searchIn, searchFor)
|
|
||||||
|
|
||||||
export const contains = val => array => findIndex(array, v => v === val) > -1
|
|
||||||
|
|
||||||
export const getHashCode = s => {
|
|
||||||
let hash = 0
|
|
||||||
let i
|
|
||||||
let char
|
|
||||||
let l
|
|
||||||
if (s.length == 0) return hash
|
|
||||||
for (i = 0, l = s.length; i < l; i++) {
|
|
||||||
char = s.charCodeAt(i)
|
|
||||||
hash = (hash << 5) - hash + char
|
|
||||||
hash |= 0 // Convert to 32bit integer
|
|
||||||
}
|
|
||||||
|
|
||||||
// converting to string, but dont want a "-" prefixed
|
|
||||||
if (hash < 0) {
|
|
||||||
return `n${(hash * -1).toString()}`
|
|
||||||
}
|
|
||||||
return hash.toString()
|
|
||||||
}
|
|
||||||
|
|
||||||
// thanks to https://blog.grossman.io/how-to-write-async-await-without-try-catch-blocks-in-javascript/
|
|
||||||
export const awEx = async promise => {
|
|
||||||
try {
|
|
||||||
const result = await promise
|
|
||||||
return [undefined, result]
|
|
||||||
} catch (error) {
|
|
||||||
return [error, undefined]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const isSafeInteger = n =>
|
|
||||||
isInteger(n) &&
|
|
||||||
n <= Number.MAX_SAFE_INTEGER &&
|
|
||||||
n >= 0 - Number.MAX_SAFE_INTEGER
|
|
||||||
|
|
||||||
export const toDateOrNull = s =>
|
|
||||||
isNull(s) ? null : isDate(s) ? s : new Date(s)
|
|
||||||
export const toBoolOrNull = s => (isNull(s) ? null : s === "true" || s === true)
|
|
||||||
export const toNumberOrNull = s => (isNull(s) ? null : toNumber(s))
|
|
||||||
|
|
||||||
export const isArrayOfString = opts => isArray(opts) && all(isString)(opts)
|
|
||||||
|
|
||||||
export const pushAll = (target, items) => {
|
|
||||||
for (let i of items) target.push(i)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const pause = async duration =>
|
|
||||||
new Promise(res => setTimeout(res, duration))
|
|
||||||
|
|
||||||
export const retry = async (fn, retries, delay, ...args) => {
|
|
||||||
try {
|
|
||||||
return await fn(...args)
|
|
||||||
} catch (err) {
|
|
||||||
if (retries > 1) {
|
|
||||||
return await pause(delay).then(
|
|
||||||
async () => await retry(fn, retries - 1, delay, ...args)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export { events } from "./events"
|
|
||||||
export { apiWrapper, apiWrapperSync } from "./apiWrapper"
|
|
||||||
export { getLock, NO_LOCK, releaseLock, extendLock, isNolock } from "./lock"
|
|
||||||
export { crypto }
|
|
||||||
|
|
||||||
export default {
|
|
||||||
ifExists,
|
|
||||||
getOrDefault,
|
|
||||||
isDefined,
|
|
||||||
isNonNull,
|
|
||||||
isNotNaN,
|
|
||||||
allTrue,
|
|
||||||
isSomething,
|
|
||||||
mapIfSomethingOrDefault,
|
|
||||||
mapIfSomethingOrBlank,
|
|
||||||
configFolder,
|
|
||||||
fieldDefinitions,
|
|
||||||
isNothing,
|
|
||||||
not,
|
|
||||||
switchCase,
|
|
||||||
defaultCase,
|
|
||||||
StartsWith,
|
|
||||||
contains,
|
|
||||||
templateDefinitions,
|
|
||||||
handleErrorWith,
|
|
||||||
handleErrorWithUndefined,
|
|
||||||
tryOr,
|
|
||||||
tryOrIgnore,
|
|
||||||
tryAwaitOr,
|
|
||||||
tryAwaitOrIgnore,
|
|
||||||
dirIndex,
|
|
||||||
keySep,
|
|
||||||
$,
|
|
||||||
$$,
|
|
||||||
getDirFomKey,
|
|
||||||
getFileFromKey,
|
|
||||||
splitKey,
|
|
||||||
somethingOrDefault,
|
|
||||||
getIndexKeyFromFileKey,
|
|
||||||
joinKey,
|
|
||||||
somethingOrGetDefault,
|
|
||||||
appDefinitionFile,
|
|
||||||
isValue,
|
|
||||||
all,
|
|
||||||
isOneOf,
|
|
||||||
memberMatches,
|
|
||||||
defineError,
|
|
||||||
anyTrue,
|
|
||||||
isNonEmptyArray,
|
|
||||||
causesException,
|
|
||||||
executesWithoutException,
|
|
||||||
none,
|
|
||||||
getHashCode,
|
|
||||||
awEx,
|
|
||||||
apiWrapper,
|
|
||||||
events,
|
|
||||||
eventsList,
|
|
||||||
isNothingOrEmpty,
|
|
||||||
isSafeInteger,
|
|
||||||
toNumber,
|
|
||||||
toDate: toDateOrNull,
|
|
||||||
toBool: toBoolOrNull,
|
|
||||||
isArrayOfString,
|
|
||||||
getLock,
|
|
||||||
NO_LOCK,
|
|
||||||
isNolock,
|
|
||||||
insensitiveEquals,
|
|
||||||
pause,
|
|
||||||
retry,
|
|
||||||
pushAll,
|
|
||||||
}
|
|
|
@ -1,109 +0,0 @@
|
||||||
import { split } from "lodash/fp"
|
|
||||||
import { $ } from "./index"
|
|
||||||
|
|
||||||
const lockOverlapMilliseconds = 10
|
|
||||||
|
|
||||||
export const getLock = async (
|
|
||||||
app,
|
|
||||||
lockFile,
|
|
||||||
timeoutMilliseconds,
|
|
||||||
maxLockRetries,
|
|
||||||
retryCount = 0
|
|
||||||
) => {
|
|
||||||
try {
|
|
||||||
const timeout = (await app.getEpochTime()) + timeoutMilliseconds
|
|
||||||
|
|
||||||
const lock = {
|
|
||||||
timeout,
|
|
||||||
key: lockFile,
|
|
||||||
totalTimeout: timeoutMilliseconds,
|
|
||||||
}
|
|
||||||
|
|
||||||
await app.datastore.createFile(
|
|
||||||
lockFile,
|
|
||||||
getLockFileContent(lock.totalTimeout, lock.timeout)
|
|
||||||
)
|
|
||||||
|
|
||||||
return lock
|
|
||||||
} catch (e) {
|
|
||||||
if (retryCount == maxLockRetries) {
|
|
||||||
return NO_LOCK
|
|
||||||
}
|
|
||||||
|
|
||||||
const lock = parseLockFileContent(
|
|
||||||
lockFile,
|
|
||||||
await app.datastore.loadFile(lockFile)
|
|
||||||
)
|
|
||||||
|
|
||||||
const currentEpochTime = await app.getEpochTime()
|
|
||||||
|
|
||||||
if (currentEpochTime < lock.timeout) {
|
|
||||||
return NO_LOCK
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
await app.datastore.deleteFile(lockFile)
|
|
||||||
} catch (_) {
|
|
||||||
//empty
|
|
||||||
}
|
|
||||||
|
|
||||||
await sleepForRetry()
|
|
||||||
|
|
||||||
return await getLock(
|
|
||||||
app,
|
|
||||||
lockFile,
|
|
||||||
timeoutMilliseconds,
|
|
||||||
maxLockRetries,
|
|
||||||
retryCount + 1
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getLockFileContent = (totalTimeout, epochTime) =>
|
|
||||||
`${totalTimeout}:${epochTime.toString()}`
|
|
||||||
|
|
||||||
const parseLockFileContent = (key, content) =>
|
|
||||||
$(content, [
|
|
||||||
split(":"),
|
|
||||||
parts => ({
|
|
||||||
totalTimeout: new Number(parts[0]),
|
|
||||||
timeout: new Number(parts[1]),
|
|
||||||
key,
|
|
||||||
}),
|
|
||||||
])
|
|
||||||
|
|
||||||
export const releaseLock = async (app, lock) => {
|
|
||||||
const currentEpochTime = await app.getEpochTime()
|
|
||||||
// only release if not timedout
|
|
||||||
if (currentEpochTime < lock.timeout - lockOverlapMilliseconds) {
|
|
||||||
try {
|
|
||||||
await app.datastore.deleteFile(lock.key)
|
|
||||||
} catch (_) {
|
|
||||||
//empty
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const extendLock = async (app, lock) => {
|
|
||||||
const currentEpochTime = await app.getEpochTime()
|
|
||||||
// only release if not timedout
|
|
||||||
if (currentEpochTime < lock.timeout - lockOverlapMilliseconds) {
|
|
||||||
try {
|
|
||||||
lock.timeout = currentEpochTime + lock.timeoutMilliseconds
|
|
||||||
await app.datastore.updateFile(
|
|
||||||
lock.key,
|
|
||||||
getLockFileContent(lock.totalTimeout, lock.timeout)
|
|
||||||
)
|
|
||||||
return lock
|
|
||||||
} catch (_) {
|
|
||||||
//empty
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return NO_LOCK
|
|
||||||
}
|
|
||||||
|
|
||||||
export const NO_LOCK = "no lock"
|
|
||||||
export const isNolock = id => id === NO_LOCK
|
|
||||||
|
|
||||||
const sleepForRetry = () =>
|
|
||||||
new Promise(resolve => setTimeout(resolve, lockOverlapMilliseconds))
|
|
|
@ -1,14 +0,0 @@
|
||||||
import bcrypt from "bcryptjs"
|
|
||||||
|
|
||||||
function hash(password) {
|
|
||||||
return bcrypt.hashSync(password, 10)
|
|
||||||
}
|
|
||||||
|
|
||||||
function verify(hash, password) {
|
|
||||||
return bcrypt.compareSync(password, hash)
|
|
||||||
}
|
|
||||||
|
|
||||||
export default {
|
|
||||||
hash,
|
|
||||||
verify,
|
|
||||||
}
|
|
|
@ -1,14 +0,0 @@
|
||||||
import { filter, map } from "lodash/fp"
|
|
||||||
import { $, isSomething } from "./index"
|
|
||||||
|
|
||||||
export const stringNotEmpty = s => isSomething(s) && s.trim().length > 0
|
|
||||||
|
|
||||||
export const makerule = (field, error, isValid) => ({ field, error, isValid })
|
|
||||||
|
|
||||||
export const validationError = (rule, item) => ({ ...rule, item })
|
|
||||||
|
|
||||||
export const applyRuleSet = ruleSet => itemToValidate =>
|
|
||||||
$(ruleSet, [map(applyRule(itemToValidate)), filter(isSomething)])
|
|
||||||
|
|
||||||
export const applyRule = itemTovalidate => rule =>
|
|
||||||
rule.isValid(itemTovalidate) ? null : validationError(rule, itemTovalidate)
|
|
|
@ -1,112 +0,0 @@
|
||||||
import getRecordApi from "./recordApi"
|
|
||||||
import getCollectionApi from "./collectionApi"
|
|
||||||
import getIndexApi from "./indexApi"
|
|
||||||
import getTemplateApi from "./templateApi"
|
|
||||||
import getAuthApi from "./authApi"
|
|
||||||
import getActionsApi from "./actionsApi"
|
|
||||||
import { setupDatastore, createEventAggregator } from "./appInitialise"
|
|
||||||
import { initialiseActions } from "./actionsApi/initialise"
|
|
||||||
import { isSomething, crypto } from "./common"
|
|
||||||
import { setCleanupFunc } from "./transactions/setCleanupFunc"
|
|
||||||
import { generateFullPermissions } from "./authApi/generateFullPermissions"
|
|
||||||
import { getApplicationDefinition } from "./templateApi/getApplicationDefinition"
|
|
||||||
import common from "./common"
|
|
||||||
import { getBehaviourSources } from "./templateApi/getBehaviourSources"
|
|
||||||
import hierarchy from "./templateApi/hierarchy"
|
|
||||||
|
|
||||||
export const getAppApis = async (
|
|
||||||
store,
|
|
||||||
behaviourSources = null,
|
|
||||||
cleanupTransactions = null,
|
|
||||||
getEpochTime = null,
|
|
||||||
crypto = null,
|
|
||||||
appDefinition = null
|
|
||||||
) => {
|
|
||||||
store = setupDatastore(store)
|
|
||||||
|
|
||||||
if (!appDefinition) appDefinition = await getApplicationDefinition(store)()
|
|
||||||
|
|
||||||
if (!behaviourSources) behaviourSources = await getBehaviourSources(store)
|
|
||||||
|
|
||||||
const eventAggregator = createEventAggregator()
|
|
||||||
|
|
||||||
const app = {
|
|
||||||
datastore: store,
|
|
||||||
crypto,
|
|
||||||
publish: eventAggregator.publish,
|
|
||||||
hierarchy: appDefinition.hierarchy,
|
|
||||||
actions: appDefinition.actions,
|
|
||||||
}
|
|
||||||
|
|
||||||
const templateApi = getTemplateApi(app)
|
|
||||||
|
|
||||||
setCleanupFunc(app, cleanupTransactions)
|
|
||||||
|
|
||||||
app.getEpochTime = isSomething(getEpochTime)
|
|
||||||
? getEpochTime
|
|
||||||
: async () => new Date().getTime()
|
|
||||||
|
|
||||||
const recordApi = getRecordApi(app)
|
|
||||||
const collectionApi = getCollectionApi(app)
|
|
||||||
const indexApi = getIndexApi(app)
|
|
||||||
const authApi = getAuthApi(app)
|
|
||||||
const actionsApi = getActionsApi(app)
|
|
||||||
|
|
||||||
const authenticateAs = async (username, password) => {
|
|
||||||
app.user = await authApi.authenticate(username, password)
|
|
||||||
}
|
|
||||||
|
|
||||||
const withFullAccess = () => userWithFullAccess(app)
|
|
||||||
|
|
||||||
const asUser = user => {
|
|
||||||
app.user = user
|
|
||||||
}
|
|
||||||
|
|
||||||
let apis = {
|
|
||||||
recordApi,
|
|
||||||
templateApi,
|
|
||||||
collectionApi,
|
|
||||||
indexApi,
|
|
||||||
authApi,
|
|
||||||
actionsApi,
|
|
||||||
subscribe: eventAggregator.subscribe,
|
|
||||||
authenticateAs,
|
|
||||||
withFullAccess,
|
|
||||||
asUser,
|
|
||||||
}
|
|
||||||
|
|
||||||
apis.actions = initialiseActions(
|
|
||||||
eventAggregator.subscribe,
|
|
||||||
behaviourSources,
|
|
||||||
appDefinition.actions,
|
|
||||||
appDefinition.triggers,
|
|
||||||
apis
|
|
||||||
)
|
|
||||||
|
|
||||||
return apis
|
|
||||||
}
|
|
||||||
|
|
||||||
export const userWithFullAccess = app => {
|
|
||||||
app.user = {
|
|
||||||
name: "app",
|
|
||||||
permissions: generateFullPermissions(app),
|
|
||||||
isUser: false,
|
|
||||||
temp: false,
|
|
||||||
}
|
|
||||||
return app.user
|
|
||||||
}
|
|
||||||
|
|
||||||
export { events, eventsList } from "./common/events"
|
|
||||||
export { getTemplateApi } from "./templateApi"
|
|
||||||
export { getRecordApi } from "./recordApi"
|
|
||||||
export { getCollectionApi } from "./collectionApi"
|
|
||||||
export { getAuthApi } from "./authApi"
|
|
||||||
export { getIndexApi } from "./indexApi"
|
|
||||||
export { setupDatastore } from "./appInitialise"
|
|
||||||
export { getActionsApi } from "./actionsApi"
|
|
||||||
export { initialiseData } from "./appInitialise/initialiseData"
|
|
||||||
export { hierarchy }
|
|
||||||
export { common }
|
|
||||||
export { crypto }
|
|
||||||
|
|
||||||
export default getAppApis
|
|
|
@ -1,180 +0,0 @@
|
||||||
import { has, isNumber, isUndefined } from "lodash/fp"
|
|
||||||
import { compileCode } from "../common/compileCode"
|
|
||||||
import { safeKey, apiWrapper, events, isNonEmptyString } from "../common"
|
|
||||||
import { iterateIndex } from "../indexing/read"
|
|
||||||
import {
|
|
||||||
getUnshardedIndexDataKey,
|
|
||||||
getShardKeysInRange,
|
|
||||||
} from "../indexing/sharding"
|
|
||||||
import {
|
|
||||||
getExactNodeForKey,
|
|
||||||
isIndex,
|
|
||||||
isShardedIndex,
|
|
||||||
} from "../templateApi/hierarchy"
|
|
||||||
import { CONTINUE_READING_RECORDS } from "../indexing/serializer"
|
|
||||||
import { permission } from "../authApi/permissions"
|
|
||||||
import { BadRequestError } from "../common/errors"
|
|
||||||
import { getIndexDir } from "./getIndexDir"
|
|
||||||
|
|
||||||
export const aggregates = app => async (
|
|
||||||
indexKey,
|
|
||||||
rangeStartParams = null,
|
|
||||||
rangeEndParams = null
|
|
||||||
) =>
|
|
||||||
apiWrapper(
|
|
||||||
app,
|
|
||||||
events.indexApi.aggregates,
|
|
||||||
permission.readIndex.isAuthorized(indexKey),
|
|
||||||
{ indexKey, rangeStartParams, rangeEndParams },
|
|
||||||
_aggregates,
|
|
||||||
app,
|
|
||||||
indexKey,
|
|
||||||
rangeStartParams,
|
|
||||||
rangeEndParams
|
|
||||||
)
|
|
||||||
|
|
||||||
const _aggregates = async (app, indexKey, rangeStartParams, rangeEndParams) => {
|
|
||||||
indexKey = safeKey(indexKey)
|
|
||||||
const indexNode = getExactNodeForKey(app.hierarchy)(indexKey)
|
|
||||||
const indexDir = getIndexDir(app.hierarchy, indexKey)
|
|
||||||
|
|
||||||
if (!isIndex(indexNode)) {
|
|
||||||
throw new BadRequestError("supplied key is not an index")
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isShardedIndex(indexNode)) {
|
|
||||||
const shardKeys = await getShardKeysInRange(
|
|
||||||
app,
|
|
||||||
indexNode,
|
|
||||||
indexDir,
|
|
||||||
rangeStartParams,
|
|
||||||
rangeEndParams
|
|
||||||
)
|
|
||||||
let aggregateResult = null
|
|
||||||
for (const k of shardKeys) {
|
|
||||||
const shardResult = await getAggregates(
|
|
||||||
app.hierarchy,
|
|
||||||
app.datastore,
|
|
||||||
indexNode,
|
|
||||||
k
|
|
||||||
)
|
|
||||||
if (aggregateResult === null) {
|
|
||||||
aggregateResult = shardResult
|
|
||||||
} else {
|
|
||||||
aggregateResult = mergeShardAggregate(aggregateResult, shardResult)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return aggregateResult
|
|
||||||
}
|
|
||||||
return await getAggregates(
|
|
||||||
app.hierarchy,
|
|
||||||
app.datastore,
|
|
||||||
indexNode,
|
|
||||||
getUnshardedIndexDataKey(indexDir)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const mergeShardAggregate = (totals, shard) => {
|
|
||||||
const mergeGrouping = (tot, shr) => {
|
|
||||||
tot.count += shr.count
|
|
||||||
for (const aggName in tot) {
|
|
||||||
if (aggName === "count") continue
|
|
||||||
const totagg = tot[aggName]
|
|
||||||
const shragg = shr[aggName]
|
|
||||||
totagg.sum += shragg.sum
|
|
||||||
totagg.max = totagg.max > shragg.max ? totagg.max : shragg.max
|
|
||||||
totagg.min = totagg.min < shragg.min ? totagg.min : shragg.min
|
|
||||||
totagg.mean = totagg.sum / tot.count
|
|
||||||
}
|
|
||||||
return tot
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const aggGroupDef in totals) {
|
|
||||||
for (const grouping in shard[aggGroupDef]) {
|
|
||||||
const groupingTotal = totals[aggGroupDef][grouping]
|
|
||||||
totals[aggGroupDef][grouping] = isUndefined(groupingTotal)
|
|
||||||
? shard[aggGroupDef][grouping]
|
|
||||||
: mergeGrouping(
|
|
||||||
totals[aggGroupDef][grouping],
|
|
||||||
shard[aggGroupDef][grouping]
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return totals
|
|
||||||
}
|
|
||||||
|
|
||||||
const getAggregates = async (hierarchy, datastore, index, indexedDataKey) => {
|
|
||||||
const aggregateResult = {}
|
|
||||||
const doRead = iterateIndex(
|
|
||||||
async item => {
|
|
||||||
applyItemToAggregateResult(index, aggregateResult, item)
|
|
||||||
return CONTINUE_READING_RECORDS
|
|
||||||
},
|
|
||||||
async () => aggregateResult
|
|
||||||
)
|
|
||||||
|
|
||||||
return await doRead(hierarchy, datastore, index, indexedDataKey)
|
|
||||||
}
|
|
||||||
|
|
||||||
const applyItemToAggregateResult = (indexNode, result, item) => {
|
|
||||||
const getInitialAggregateResult = () => ({
|
|
||||||
sum: 0,
|
|
||||||
mean: null,
|
|
||||||
max: null,
|
|
||||||
min: null,
|
|
||||||
})
|
|
||||||
|
|
||||||
const applyAggregateResult = (agg, existing, count) => {
|
|
||||||
const value = compileCode(agg.aggregatedValue)({ record: item })
|
|
||||||
|
|
||||||
if (!isNumber(value)) return existing
|
|
||||||
|
|
||||||
existing.sum += value
|
|
||||||
existing.max =
|
|
||||||
value > existing.max || existing.max === null ? value : existing.max
|
|
||||||
existing.min =
|
|
||||||
value < existing.min || existing.min === null ? value : existing.min
|
|
||||||
existing.mean = existing.sum / count
|
|
||||||
return existing
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const aggGroup of indexNode.aggregateGroups) {
|
|
||||||
if (!has(aggGroup.name)(result)) {
|
|
||||||
result[aggGroup.name] = {}
|
|
||||||
}
|
|
||||||
|
|
||||||
const thisGroupResult = result[aggGroup.name]
|
|
||||||
|
|
||||||
if (isNonEmptyString(aggGroup.condition)) {
|
|
||||||
if (!compileCode(aggGroup.condition)({ record: item })) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let group = isNonEmptyString(aggGroup.groupBy)
|
|
||||||
? compileCode(aggGroup.groupBy)({ record: item })
|
|
||||||
: "all"
|
|
||||||
if (!isNonEmptyString(group)) {
|
|
||||||
group = "(none)"
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!has(group)(thisGroupResult)) {
|
|
||||||
thisGroupResult[group] = { count: 0 }
|
|
||||||
for (const agg of aggGroup.aggregates) {
|
|
||||||
thisGroupResult[group][agg.name] = getInitialAggregateResult()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
thisGroupResult[group].count++
|
|
||||||
|
|
||||||
for (const agg of aggGroup.aggregates) {
|
|
||||||
const existingValues = thisGroupResult[group][agg.name]
|
|
||||||
thisGroupResult[group][agg.name] = applyAggregateResult(
|
|
||||||
agg,
|
|
||||||
existingValues,
|
|
||||||
thisGroupResult[group].count
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,143 +0,0 @@
|
||||||
import { filter, includes, some } from "lodash/fp"
|
|
||||||
import { getAllIdsIterator } from "../indexing/allIds"
|
|
||||||
import {
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
getRecordNodeById,
|
|
||||||
getNode,
|
|
||||||
isIndex,
|
|
||||||
isModel,
|
|
||||||
getActualKeyOfParent,
|
|
||||||
getAllowedRecordNodesForIndex,
|
|
||||||
fieldReversesReferenceToIndex,
|
|
||||||
isTopLevelIndex,
|
|
||||||
} from "../templateApi/hierarchy"
|
|
||||||
import { joinKey, apiWrapper, events, $ } from "../common"
|
|
||||||
import {
|
|
||||||
createBuildIndexFolder,
|
|
||||||
transactionForBuildIndex,
|
|
||||||
} from "../transactions/create"
|
|
||||||
import { permission } from "../authApi/permissions"
|
|
||||||
import { BadRequestError } from "../common/errors"
|
|
||||||
import { initialiseIndex } from "../indexing/initialiseIndex"
|
|
||||||
import { getRecordInfo } from "../recordApi/recordInfo"
|
|
||||||
|
|
||||||
/** rebuilds an index
|
|
||||||
* @param {object} app - the application container
|
|
||||||
* @param {string} indexNodeKey - node key of the index, which the index belongs to
|
|
||||||
*/
|
|
||||||
export const buildIndex = app => async indexNodeKey =>
|
|
||||||
apiWrapper(
|
|
||||||
app,
|
|
||||||
events.indexApi.buildIndex,
|
|
||||||
permission.manageIndex.isAuthorized,
|
|
||||||
{ indexNodeKey },
|
|
||||||
_buildIndex,
|
|
||||||
app,
|
|
||||||
indexNodeKey
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _buildIndex = async (app, indexNodeKey) => {
|
|
||||||
const indexNode = getNode(app.hierarchy, indexNodeKey)
|
|
||||||
|
|
||||||
await createBuildIndexFolder(app.datastore, indexNodeKey)
|
|
||||||
|
|
||||||
if (!isIndex(indexNode)) {
|
|
||||||
throw new BadRequestError("BuildIndex: must supply an indexnode")
|
|
||||||
}
|
|
||||||
|
|
||||||
if (indexNode.indexType === "reference") {
|
|
||||||
await buildReverseReferenceIndex(app, indexNode)
|
|
||||||
} else {
|
|
||||||
await buildHeirarchalIndex(app, indexNode)
|
|
||||||
}
|
|
||||||
|
|
||||||
await app.cleanupTransactions()
|
|
||||||
}
|
|
||||||
|
|
||||||
const buildReverseReferenceIndex = async (app, indexNode) => {
|
|
||||||
// Iterate through all referencING records,
|
|
||||||
// and update referenced index for each record
|
|
||||||
let recordCount = 0
|
|
||||||
const referencingNodes = $(app.hierarchy, [
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
filter(
|
|
||||||
n =>
|
|
||||||
isModel(n) && some(fieldReversesReferenceToIndex(indexNode))(n.fields)
|
|
||||||
),
|
|
||||||
])
|
|
||||||
|
|
||||||
const createTransactionsForReferencingNode = async referencingNode => {
|
|
||||||
const iterateReferencingNodes = await getAllIdsIterator(app)(
|
|
||||||
referencingNode.collectionNodeKey()
|
|
||||||
)
|
|
||||||
|
|
||||||
let referencingIdIterator = await iterateReferencingNodes()
|
|
||||||
while (!referencingIdIterator.done) {
|
|
||||||
const { result } = referencingIdIterator
|
|
||||||
for (const id of result.ids) {
|
|
||||||
const recordKey = joinKey(result.collectionKey, id)
|
|
||||||
await transactionForBuildIndex(
|
|
||||||
app,
|
|
||||||
indexNode.nodeKey(),
|
|
||||||
recordKey,
|
|
||||||
recordCount
|
|
||||||
)
|
|
||||||
recordCount++
|
|
||||||
}
|
|
||||||
referencingIdIterator = await iterateReferencingNodes()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const referencingNode of referencingNodes) {
|
|
||||||
await createTransactionsForReferencingNode(referencingNode)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const buildHeirarchalIndex = async (app, indexNode) => {
|
|
||||||
let recordCount = 0
|
|
||||||
|
|
||||||
const createTransactionsForIds = async (collectionKey, ids) => {
|
|
||||||
for (const recordId of ids) {
|
|
||||||
const recordKey = joinKey(collectionKey, recordId)
|
|
||||||
|
|
||||||
const recordNode = getRecordNodeById(app.hierarchy, recordId)
|
|
||||||
|
|
||||||
if (recordNodeApplies(indexNode)(recordNode)) {
|
|
||||||
await transactionForBuildIndex(
|
|
||||||
app,
|
|
||||||
indexNode.nodeKey(),
|
|
||||||
recordKey,
|
|
||||||
recordCount
|
|
||||||
)
|
|
||||||
recordCount++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const collectionRecords = getAllowedRecordNodesForIndex(
|
|
||||||
app.hierarchy,
|
|
||||||
indexNode
|
|
||||||
)
|
|
||||||
|
|
||||||
for (const targetCollectionRecordNode of collectionRecords) {
|
|
||||||
const allIdsIterator = await getAllIdsIterator(app)(
|
|
||||||
targetCollectionRecordNode.collectionNodeKey()
|
|
||||||
)
|
|
||||||
|
|
||||||
let allIds = await allIdsIterator()
|
|
||||||
while (allIds.done === false) {
|
|
||||||
await createTransactionsForIds(
|
|
||||||
allIds.result.collectionKey,
|
|
||||||
allIds.result.ids
|
|
||||||
)
|
|
||||||
allIds = await allIdsIterator()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return recordCount
|
|
||||||
}
|
|
||||||
|
|
||||||
const recordNodeApplies = indexNode => recordNode =>
|
|
||||||
includes(recordNode.nodeId)(indexNode.allowedModelNodeIds)
|
|
||||||
|
|
||||||
export default buildIndex
|
|
|
@ -1,38 +0,0 @@
|
||||||
import { tryAwaitOrIgnore, safeKey } from "../common"
|
|
||||||
import {
|
|
||||||
isIndex,
|
|
||||||
isShardedIndex,
|
|
||||||
getExactNodeForKey,
|
|
||||||
} from "../templateApi/hierarchy"
|
|
||||||
import {
|
|
||||||
getAllShardKeys,
|
|
||||||
getShardMapKey,
|
|
||||||
getUnshardedIndexDataKey,
|
|
||||||
} from "../indexing/sharding"
|
|
||||||
import { getIndexDir } from "./getIndexDir"
|
|
||||||
|
|
||||||
export const _deleteIndex = async (app, indexKey, includeFolder) => {
|
|
||||||
indexKey = safeKey(indexKey)
|
|
||||||
const indexNode = getExactNodeForKey(app.hierarchy)(indexKey)
|
|
||||||
const indexDir = getIndexDir(app.hierarchy, indexKey)
|
|
||||||
|
|
||||||
if (!isIndex(indexNode)) {
|
|
||||||
throw new Error("Supplied key is not an index")
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isShardedIndex(indexNode)) {
|
|
||||||
const shardKeys = await getAllShardKeys(app, indexNode, indexDir)
|
|
||||||
for (const k of shardKeys) {
|
|
||||||
await tryAwaitOrIgnore(app.datastore.deleteFile(k))
|
|
||||||
}
|
|
||||||
tryAwaitOrIgnore(await app.datastore.deleteFile(getShardMapKey(indexDir)))
|
|
||||||
} else {
|
|
||||||
await tryAwaitOrIgnore(
|
|
||||||
app.datastore.deleteFile(getUnshardedIndexDataKey(indexDir))
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (includeFolder) {
|
|
||||||
tryAwaitOrIgnore(await app.datastore.deleteFolder(indexDir))
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,14 +0,0 @@
|
||||||
import { getRecordInfo } from "../recordApi/recordInfo"
|
|
||||||
import { getParentKey, getLastPartInKey } from "../templateApi/hierarchy"
|
|
||||||
import { keySep } from "../common"
|
|
||||||
|
|
||||||
export const getIndexDir = (hierarchy, indexKey) => {
|
|
||||||
const parentKey = getParentKey(indexKey)
|
|
||||||
|
|
||||||
if (parentKey === "") return indexKey
|
|
||||||
if (parentKey === keySep) return indexKey
|
|
||||||
|
|
||||||
const recordInfo = getRecordInfo(hierarchy, parentKey)
|
|
||||||
|
|
||||||
return recordInfo.child(getLastPartInKey(indexKey))
|
|
||||||
}
|
|
|
@ -1,11 +0,0 @@
|
||||||
import { buildIndex } from "./buildIndex"
|
|
||||||
import { listItems } from "./listItems"
|
|
||||||
import { aggregates } from "./aggregates"
|
|
||||||
|
|
||||||
export const getIndexApi = app => ({
|
|
||||||
listItems: listItems(app),
|
|
||||||
buildIndex: buildIndex(app),
|
|
||||||
aggregates: aggregates(app),
|
|
||||||
})
|
|
||||||
|
|
||||||
export default getIndexApi
|
|
|
@ -1,76 +0,0 @@
|
||||||
import { flatten, merge } from "lodash/fp"
|
|
||||||
import { safeKey, apiWrapper, $, events, isNonEmptyString } from "../common"
|
|
||||||
import { readIndex, searchIndex } from "../indexing/read"
|
|
||||||
import {
|
|
||||||
getUnshardedIndexDataKey,
|
|
||||||
getShardKeysInRange,
|
|
||||||
} from "../indexing/sharding"
|
|
||||||
import {
|
|
||||||
getExactNodeForKey,
|
|
||||||
isIndex,
|
|
||||||
isShardedIndex,
|
|
||||||
} from "../templateApi/hierarchy"
|
|
||||||
import { permission } from "../authApi/permissions"
|
|
||||||
import { getIndexDir } from "./getIndexDir"
|
|
||||||
|
|
||||||
export const listItems = app => async (indexKey, options) => {
|
|
||||||
indexKey = safeKey(indexKey)
|
|
||||||
return apiWrapper(
|
|
||||||
app,
|
|
||||||
events.indexApi.listItems,
|
|
||||||
permission.readIndex.isAuthorized(indexKey),
|
|
||||||
{ indexKey, options },
|
|
||||||
_listItems,
|
|
||||||
app,
|
|
||||||
indexKey,
|
|
||||||
options
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const defaultOptions = {
|
|
||||||
rangeStartParams: null,
|
|
||||||
rangeEndParams: null,
|
|
||||||
searchPhrase: null,
|
|
||||||
}
|
|
||||||
|
|
||||||
export const _listItems = async (app, indexKey, options = defaultOptions) => {
|
|
||||||
const { searchPhrase, rangeStartParams, rangeEndParams } = $({}, [
|
|
||||||
merge(options),
|
|
||||||
merge(defaultOptions),
|
|
||||||
])
|
|
||||||
|
|
||||||
const getItems = async indexedDataKey =>
|
|
||||||
isNonEmptyString(searchPhrase)
|
|
||||||
? await searchIndex(
|
|
||||||
app.hierarchy,
|
|
||||||
app.datastore,
|
|
||||||
indexNode,
|
|
||||||
indexedDataKey,
|
|
||||||
searchPhrase
|
|
||||||
)
|
|
||||||
: await readIndex(app.hierarchy, app.datastore, indexNode, indexedDataKey)
|
|
||||||
|
|
||||||
indexKey = safeKey(indexKey)
|
|
||||||
const indexNode = getExactNodeForKey(app.hierarchy)(indexKey)
|
|
||||||
const indexDir = getIndexDir(app.hierarchy, indexKey)
|
|
||||||
|
|
||||||
if (!isIndex(indexNode)) {
|
|
||||||
throw new Error("supplied key is not an index")
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isShardedIndex(indexNode)) {
|
|
||||||
const shardKeys = await getShardKeysInRange(
|
|
||||||
app,
|
|
||||||
indexNode,
|
|
||||||
indexDir,
|
|
||||||
rangeStartParams,
|
|
||||||
rangeEndParams
|
|
||||||
)
|
|
||||||
const items = []
|
|
||||||
for (const k of shardKeys) {
|
|
||||||
items.push(await getItems(k))
|
|
||||||
}
|
|
||||||
return flatten(items)
|
|
||||||
}
|
|
||||||
return await getItems(getUnshardedIndexDataKey(indexDir))
|
|
||||||
}
|
|
|
@ -1,279 +0,0 @@
|
||||||
import { flatten, orderBy, filter, isUndefined } from "lodash/fp"
|
|
||||||
import {
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
getCollectionNodeByKeyOrNodeKey,
|
|
||||||
getNodeByKeyOrNodeKey,
|
|
||||||
isCollectionRecord,
|
|
||||||
isAncestor,
|
|
||||||
} from "../templateApi/hierarchy"
|
|
||||||
import { joinKey, safeKey, $ } from "../common"
|
|
||||||
import { getCollectionDir } from "../recordApi/recordInfo"
|
|
||||||
|
|
||||||
export const RECORDS_PER_FOLDER = 1000
|
|
||||||
export const allIdChars =
|
|
||||||
"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_-"
|
|
||||||
|
|
||||||
// this should never be changed - ever
|
|
||||||
// - existing databases depend on the order of chars this string
|
|
||||||
|
|
||||||
/**
|
|
||||||
* folderStructureArray should return an array like
|
|
||||||
* - [1] = all records fit into one folder
|
|
||||||
* - [2] = all records fite into 2 folders
|
|
||||||
* - [64, 3] = all records fit into 64 * 3 folders
|
|
||||||
* - [64, 64, 10] = all records fit into 64 * 64 * 10 folder
|
|
||||||
* (there are 64 possible chars in allIsChars)
|
|
||||||
*/
|
|
||||||
export const folderStructureArray = recordNode => {
|
|
||||||
const totalFolders = Math.ceil(recordNode.estimatedRecordCount / 1000)
|
|
||||||
const folderArray = []
|
|
||||||
let levelCount = 1
|
|
||||||
while (64 ** levelCount < totalFolders) {
|
|
||||||
levelCount += 1
|
|
||||||
folderArray.push(64)
|
|
||||||
}
|
|
||||||
|
|
||||||
const parentFactor = 64 ** folderArray.length
|
|
||||||
if (parentFactor < totalFolders) {
|
|
||||||
folderArray.push(Math.ceil(totalFolders / parentFactor))
|
|
||||||
}
|
|
||||||
|
|
||||||
return folderArray
|
|
||||||
|
|
||||||
/*
|
|
||||||
const maxRecords = currentFolderPosition === 0
|
|
||||||
? RECORDS_PER_FOLDER
|
|
||||||
: currentFolderPosition * 64 * RECORDS_PER_FOLDER;
|
|
||||||
|
|
||||||
if(maxRecords < recordNode.estimatedRecordCount) {
|
|
||||||
return folderStructureArray(
|
|
||||||
recordNode,
|
|
||||||
[...currentArray, 64],
|
|
||||||
currentFolderPosition + 1);
|
|
||||||
} else {
|
|
||||||
const childFolderCount = Math.ceil(recordNode.estimatedRecordCount / maxRecords );
|
|
||||||
return [...currentArray, childFolderCount]
|
|
||||||
}*/
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getAllIdsIterator = app => async collection_Key_or_NodeKey => {
|
|
||||||
collection_Key_or_NodeKey = safeKey(collection_Key_or_NodeKey)
|
|
||||||
const recordNode =
|
|
||||||
getCollectionNodeByKeyOrNodeKey(app.hierarchy, collection_Key_or_NodeKey) ||
|
|
||||||
getNodeByKeyOrNodeKey(app.hierarchy, collection_Key_or_NodeKey)
|
|
||||||
|
|
||||||
const getAllIdsIteratorForCollectionKey = async (
|
|
||||||
recordNode,
|
|
||||||
collectionKey
|
|
||||||
) => {
|
|
||||||
const folderStructure = folderStructureArray(recordNode)
|
|
||||||
|
|
||||||
let currentFolderContents = []
|
|
||||||
let currentPosition = []
|
|
||||||
|
|
||||||
const collectionDir = getCollectionDir(app.hierarchy, collectionKey)
|
|
||||||
const basePath = joinKey(collectionDir, recordNode.nodeId.toString())
|
|
||||||
|
|
||||||
// "folderStructure" determines the top, sharding folders
|
|
||||||
// we need to add one, for the collection root folder, which
|
|
||||||
// always exists
|
|
||||||
const levels = folderStructure.length + 1
|
|
||||||
const topLevel = levels - 1
|
|
||||||
|
|
||||||
/* populate initial directory structure in form:
|
|
||||||
[
|
|
||||||
{path: "/a", contents: ["b", "c", "d"]},
|
|
||||||
{path: "/a/b", contents: ["e","f","g"]},
|
|
||||||
{path: "/a/b/e", contents: ["1-abcd","2-cdef","3-efgh"]},
|
|
||||||
]
|
|
||||||
// stores contents on each parent level
|
|
||||||
// top level has ID folders
|
|
||||||
*/
|
|
||||||
const firstFolder = async () => {
|
|
||||||
let folderLevel = 0
|
|
||||||
|
|
||||||
const lastPathHasContent = () =>
|
|
||||||
folderLevel === 0 ||
|
|
||||||
currentFolderContents[folderLevel - 1].contents.length > 0
|
|
||||||
|
|
||||||
while (folderLevel <= topLevel && lastPathHasContent()) {
|
|
||||||
let thisPath = basePath
|
|
||||||
for (let lev = 0; lev < currentPosition.length; lev++) {
|
|
||||||
thisPath = joinKey(thisPath, currentFolderContents[lev].contents[0])
|
|
||||||
}
|
|
||||||
|
|
||||||
const contentsThisLevel = await app.datastore.getFolderContents(
|
|
||||||
thisPath
|
|
||||||
)
|
|
||||||
currentFolderContents.push({
|
|
||||||
contents: contentsThisLevel,
|
|
||||||
path: thisPath,
|
|
||||||
})
|
|
||||||
|
|
||||||
// should start as something like [0,0]
|
|
||||||
if (folderLevel < topLevel) currentPosition.push(0)
|
|
||||||
|
|
||||||
folderLevel += 1
|
|
||||||
}
|
|
||||||
|
|
||||||
return currentPosition.length === levels - 1
|
|
||||||
}
|
|
||||||
|
|
||||||
const isOnLastFolder = level => {
|
|
||||||
const result =
|
|
||||||
currentPosition[level] ===
|
|
||||||
currentFolderContents[level].contents.length - 1
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
const getNextFolder = async (lev = undefined) => {
|
|
||||||
lev = isUndefined(lev) ? topLevel : lev
|
|
||||||
const parentLev = lev - 1
|
|
||||||
|
|
||||||
if (parentLev < 0) return false
|
|
||||||
|
|
||||||
if (isOnLastFolder(parentLev)) {
|
|
||||||
return await getNextFolder(parentLev)
|
|
||||||
}
|
|
||||||
|
|
||||||
const newPosition = currentPosition[parentLev] + 1
|
|
||||||
currentPosition[parentLev] = newPosition
|
|
||||||
|
|
||||||
const nextFolder = joinKey(
|
|
||||||
currentFolderContents[parentLev].path,
|
|
||||||
currentFolderContents[parentLev].contents[newPosition]
|
|
||||||
)
|
|
||||||
currentFolderContents[
|
|
||||||
lev
|
|
||||||
].contents = await app.datastore.getFolderContents(nextFolder)
|
|
||||||
currentFolderContents[lev].path = nextFolder
|
|
||||||
|
|
||||||
if (lev !== topLevel) {
|
|
||||||
// we just advanced a parent folder, so now need to
|
|
||||||
// do the same to the next levels
|
|
||||||
let loopLevel = lev + 1
|
|
||||||
while (loopLevel <= topLevel) {
|
|
||||||
const loopParentLevel = loopLevel - 1
|
|
||||||
|
|
||||||
currentPosition[loopParentLevel] = 0
|
|
||||||
const nextLoopFolder = joinKey(
|
|
||||||
currentFolderContents[loopParentLevel].path,
|
|
||||||
currentFolderContents[loopParentLevel].contents[0]
|
|
||||||
)
|
|
||||||
currentFolderContents[
|
|
||||||
loopLevel
|
|
||||||
].contents = await app.datastore.getFolderContents(nextLoopFolder)
|
|
||||||
currentFolderContents[loopLevel].path = nextLoopFolder
|
|
||||||
loopLevel += 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// true ==has more ids... (just loaded more)
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
const idsCurrentFolder = () =>
|
|
||||||
currentFolderContents[currentFolderContents.length - 1].contents
|
|
||||||
|
|
||||||
const fininshedResult = { done: true, result: { ids: [], collectionKey } }
|
|
||||||
|
|
||||||
let hasStarted = false
|
|
||||||
let hasMore = true
|
|
||||||
const getIdsFromCurrentfolder = async () => {
|
|
||||||
if (!hasMore) {
|
|
||||||
return fininshedResult
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!hasStarted) {
|
|
||||||
hasMore = await firstFolder()
|
|
||||||
hasStarted = true
|
|
||||||
return {
|
|
||||||
result: {
|
|
||||||
ids: idsCurrentFolder(),
|
|
||||||
collectionKey,
|
|
||||||
},
|
|
||||||
done: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
hasMore = await getNextFolder()
|
|
||||||
|
|
||||||
return {
|
|
||||||
result: {
|
|
||||||
ids: hasMore ? idsCurrentFolder() : [],
|
|
||||||
collectionKey,
|
|
||||||
},
|
|
||||||
done: !hasMore,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return getIdsFromCurrentfolder
|
|
||||||
}
|
|
||||||
|
|
||||||
const ancestors = $(getFlattenedHierarchy(app.hierarchy), [
|
|
||||||
filter(isCollectionRecord),
|
|
||||||
filter(
|
|
||||||
n => isAncestor(recordNode)(n) || n.nodeKey() === recordNode.nodeKey()
|
|
||||||
),
|
|
||||||
orderBy([n => n.nodeKey().length], ["asc"]),
|
|
||||||
]) // parents first
|
|
||||||
|
|
||||||
const traverseForIteraterators = async (
|
|
||||||
parentRecordKey = "",
|
|
||||||
currentNodeIndex = 0
|
|
||||||
) => {
|
|
||||||
const currentNode = ancestors[currentNodeIndex]
|
|
||||||
const currentCollectionKey = joinKey(
|
|
||||||
parentRecordKey,
|
|
||||||
currentNode.collectionName
|
|
||||||
)
|
|
||||||
if (currentNode.nodeKey() === recordNode.nodeKey()) {
|
|
||||||
return [
|
|
||||||
await getAllIdsIteratorForCollectionKey(
|
|
||||||
currentNode,
|
|
||||||
currentCollectionKey
|
|
||||||
),
|
|
||||||
]
|
|
||||||
}
|
|
||||||
const allIterators = []
|
|
||||||
const currentIterator = await getAllIdsIteratorForCollectionKey(
|
|
||||||
currentNode,
|
|
||||||
currentCollectionKey
|
|
||||||
)
|
|
||||||
|
|
||||||
let ids = await currentIterator()
|
|
||||||
while (ids.done === false) {
|
|
||||||
for (const id of ids.result.ids) {
|
|
||||||
allIterators.push(
|
|
||||||
await traverseForIteraterators(
|
|
||||||
joinKey(currentCollectionKey, id),
|
|
||||||
currentNodeIndex + 1
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
ids = await currentIterator()
|
|
||||||
}
|
|
||||||
|
|
||||||
return flatten(allIterators)
|
|
||||||
}
|
|
||||||
|
|
||||||
const iteratorsArray = await traverseForIteraterators()
|
|
||||||
let currentIteratorIndex = 0
|
|
||||||
return async () => {
|
|
||||||
if (iteratorsArray.length === 0) {
|
|
||||||
return { done: true, result: [] }
|
|
||||||
}
|
|
||||||
const innerResult = await iteratorsArray[currentIteratorIndex]()
|
|
||||||
if (!innerResult.done) {
|
|
||||||
return innerResult
|
|
||||||
}
|
|
||||||
if (currentIteratorIndex == iteratorsArray.length - 1) {
|
|
||||||
return { done: true, result: innerResult.result }
|
|
||||||
}
|
|
||||||
currentIteratorIndex++
|
|
||||||
return { done: false, result: innerResult.result }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export default getAllIdsIterator
|
|
|
@ -1,107 +0,0 @@
|
||||||
import { ensureShardNameIsInShardMap } from "./sharding"
|
|
||||||
import { getIndexWriter } from "./serializer"
|
|
||||||
import { isShardedIndex, getParentKey } from "../templateApi/hierarchy"
|
|
||||||
import { promiseWriteableStream } from "./promiseWritableStream"
|
|
||||||
import { promiseReadableStream } from "./promiseReadableStream"
|
|
||||||
|
|
||||||
export const applyToShard = async (
|
|
||||||
hierarchy,
|
|
||||||
store,
|
|
||||||
indexDir,
|
|
||||||
indexNode,
|
|
||||||
indexShardKey,
|
|
||||||
recordsToWrite,
|
|
||||||
keysToRemove
|
|
||||||
) => {
|
|
||||||
const createIfNotExists = recordsToWrite.length > 0
|
|
||||||
const writer = await getWriter(
|
|
||||||
hierarchy,
|
|
||||||
store,
|
|
||||||
indexDir,
|
|
||||||
indexShardKey,
|
|
||||||
indexNode,
|
|
||||||
createIfNotExists
|
|
||||||
)
|
|
||||||
if (writer === SHARD_DELETED) return
|
|
||||||
|
|
||||||
await writer.updateIndex(recordsToWrite, keysToRemove)
|
|
||||||
await swapTempFileIn(store, indexShardKey)
|
|
||||||
}
|
|
||||||
|
|
||||||
const SHARD_DELETED = "SHARD_DELETED"
|
|
||||||
const getWriter = async (
|
|
||||||
hierarchy,
|
|
||||||
store,
|
|
||||||
indexDir,
|
|
||||||
indexedDataKey,
|
|
||||||
indexNode,
|
|
||||||
createIfNotExists
|
|
||||||
) => {
|
|
||||||
let readableStream = null
|
|
||||||
|
|
||||||
if (isShardedIndex(indexNode)) {
|
|
||||||
await ensureShardNameIsInShardMap(store, indexDir, indexedDataKey)
|
|
||||||
if (!(await store.exists(indexedDataKey))) {
|
|
||||||
if (await store.exists(getParentKey(indexedDataKey))) {
|
|
||||||
await store.createFile(indexedDataKey, "")
|
|
||||||
} else {
|
|
||||||
return SHARD_DELETED
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
readableStream = promiseReadableStream(
|
|
||||||
await store.readableFileStream(indexedDataKey)
|
|
||||||
)
|
|
||||||
} catch (e) {
|
|
||||||
if (await store.exists(indexedDataKey)) {
|
|
||||||
throw e
|
|
||||||
} else {
|
|
||||||
if (createIfNotExists) {
|
|
||||||
if (await store.exists(getParentKey(indexedDataKey))) {
|
|
||||||
await store.createFile(indexedDataKey, "")
|
|
||||||
} else {
|
|
||||||
return SHARD_DELETED
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return SHARD_DELETED
|
|
||||||
}
|
|
||||||
|
|
||||||
readableStream = promiseReadableStream(
|
|
||||||
await store.readableFileStream(indexedDataKey)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const writableStream = promiseWriteableStream(
|
|
||||||
await store.writableFileStream(indexedDataKey + ".temp")
|
|
||||||
)
|
|
||||||
|
|
||||||
return getIndexWriter(hierarchy, indexNode, readableStream, writableStream)
|
|
||||||
}
|
|
||||||
|
|
||||||
const swapTempFileIn = async (store, indexedDataKey, isRetry = false) => {
|
|
||||||
const tempFile = `${indexedDataKey}.temp`
|
|
||||||
try {
|
|
||||||
await store.deleteFile(indexedDataKey)
|
|
||||||
} catch (e) {
|
|
||||||
// ignore failure, incase it has not been created yet
|
|
||||||
|
|
||||||
// if parent folder does not exist, assume that this index
|
|
||||||
// should not be there
|
|
||||||
if (!(await store.exists(getParentKey(indexedDataKey)))) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
await store.renameFile(tempFile, indexedDataKey)
|
|
||||||
} catch (e) {
|
|
||||||
// retrying in case delete failure was for some other reason
|
|
||||||
if (!isRetry) {
|
|
||||||
await swapTempFileIn(store, indexedDataKey, true)
|
|
||||||
} else {
|
|
||||||
throw new Error("Failed to swap in index filed: " + e.message)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,85 +0,0 @@
|
||||||
import { compileCode } from "../common/compileCode"
|
|
||||||
import { isUndefined, keys, cloneDeep, isFunction, includes } from "lodash/fp"
|
|
||||||
import { defineError } from "../common"
|
|
||||||
|
|
||||||
export const filterEval = "FILTER_EVALUATE"
|
|
||||||
export const filterCompile = "FILTER_COMPILE"
|
|
||||||
export const mapEval = "MAP_EVALUATE"
|
|
||||||
export const mapCompile = "MAP_COMPILE"
|
|
||||||
export const removeUndeclaredFields = "REMOVE_UNDECLARED_FIELDS"
|
|
||||||
export const addUnMappedFields = "ADD_UNMAPPED_FIELDS"
|
|
||||||
export const addTheKey = "ADD_KEY"
|
|
||||||
|
|
||||||
const getEvaluateResult = () => ({
|
|
||||||
isError: false,
|
|
||||||
passedFilter: true,
|
|
||||||
result: null,
|
|
||||||
})
|
|
||||||
|
|
||||||
export const compileFilter = index => compileCode(index.filter)
|
|
||||||
|
|
||||||
export const compileMap = index => compileCode(index.map)
|
|
||||||
|
|
||||||
export const passesFilter = (record, index) => {
|
|
||||||
const context = { record }
|
|
||||||
if (!index.filter) return true
|
|
||||||
|
|
||||||
const compiledFilter = defineError(() => compileFilter(index), filterCompile)
|
|
||||||
|
|
||||||
return defineError(() => compiledFilter(context), filterEval)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const mapRecord = (record, index) => {
|
|
||||||
const recordClone = cloneDeep(record)
|
|
||||||
const context = { record: recordClone }
|
|
||||||
|
|
||||||
const map = index.map ? index.map : "return {...record};"
|
|
||||||
|
|
||||||
const compiledMap = defineError(() => compileCode(map), mapCompile)
|
|
||||||
|
|
||||||
const mapped = defineError(() => compiledMap(context), mapEval)
|
|
||||||
|
|
||||||
const mappedKeys = keys(mapped)
|
|
||||||
for (let i = 0; i < mappedKeys.length; i++) {
|
|
||||||
const key = mappedKeys[i]
|
|
||||||
mapped[key] = isUndefined(mapped[key]) ? null : mapped[key]
|
|
||||||
if (isFunction(mapped[key])) {
|
|
||||||
delete mapped[key]
|
|
||||||
}
|
|
||||||
if (key === "IsNew") {
|
|
||||||
delete mapped.IsNew
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mapped.key = record.key
|
|
||||||
mapped.sortKey = index.getSortKey
|
|
||||||
? compileCode(index.getSortKey)(context)
|
|
||||||
: record.id
|
|
||||||
|
|
||||||
return mapped
|
|
||||||
}
|
|
||||||
|
|
||||||
export const evaluate = record => index => {
|
|
||||||
const result = getEvaluateResult()
|
|
||||||
|
|
||||||
try {
|
|
||||||
result.passedFilter = passesFilter(record, index)
|
|
||||||
} catch (err) {
|
|
||||||
result.isError = true
|
|
||||||
result.passedFilter = false
|
|
||||||
result.result = err.message
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!result.passedFilter) return result
|
|
||||||
|
|
||||||
try {
|
|
||||||
result.result = mapRecord(record, index)
|
|
||||||
} catch (err) {
|
|
||||||
result.isError = true
|
|
||||||
result.result = err.message
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
export default evaluate
|
|
|
@ -1,58 +0,0 @@
|
||||||
import { has, keys, map, orderBy, filter, concat, reverse } from "lodash/fp"
|
|
||||||
import { getAllowedRecordNodesForIndex } from "../templateApi/hierarchy"
|
|
||||||
import { mapRecord } from "./evaluate"
|
|
||||||
import { constructRecord } from "../recordApi/getNew"
|
|
||||||
import { getSampleFieldValue, detectType, all } from "../types"
|
|
||||||
import { $ } from "../common"
|
|
||||||
|
|
||||||
export const generateSchema = (hierarchy, indexNode) => {
|
|
||||||
const recordNodes = getAllowedRecordNodesForIndex(hierarchy, indexNode)
|
|
||||||
const mappedRecords = $(recordNodes, [
|
|
||||||
map(n => mapRecord(createSampleRecord(n), indexNode)),
|
|
||||||
])
|
|
||||||
|
|
||||||
// always has record key and sort key
|
|
||||||
const schema = {
|
|
||||||
sortKey: all.string,
|
|
||||||
key: all.string,
|
|
||||||
}
|
|
||||||
|
|
||||||
const fieldsHas = has(schema)
|
|
||||||
const setField = (fieldName, value) => {
|
|
||||||
if (value === null || value === undefined) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const thisType = detectType(value)
|
|
||||||
if (fieldsHas(fieldName)) {
|
|
||||||
if (schema[fieldName] !== thisType) {
|
|
||||||
schema[fieldName] = all.string
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
schema[fieldName] = thisType
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const mappedRec of mappedRecords) {
|
|
||||||
for (const f in mappedRec) {
|
|
||||||
setField(f, mappedRec[f])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// returing an array of {name, type}
|
|
||||||
return $(schema, [
|
|
||||||
keys,
|
|
||||||
map(k => ({ name: k, type: schema[k].name })),
|
|
||||||
filter(s => s.name !== "sortKey"),
|
|
||||||
orderBy("name", ["desc"]), // reverse alpha
|
|
||||||
concat([{ name: "sortKey", type: all.string.name }]), // sortKey on end
|
|
||||||
reverse, // sortKey first, then rest are alphabetical
|
|
||||||
])
|
|
||||||
}
|
|
||||||
|
|
||||||
const createSampleRecord = recordNode =>
|
|
||||||
constructRecord(
|
|
||||||
recordNode,
|
|
||||||
getSampleFieldValue,
|
|
||||||
recordNode.parent().nodeKey()
|
|
||||||
)
|
|
|
@ -1,27 +0,0 @@
|
||||||
import { isShardedIndex } from "../templateApi/hierarchy"
|
|
||||||
import { joinKey } from "../common"
|
|
||||||
import {
|
|
||||||
getShardMapKey,
|
|
||||||
getUnshardedIndexDataKey,
|
|
||||||
createIndexFile,
|
|
||||||
} from "./sharding"
|
|
||||||
|
|
||||||
export const initialiseIndex = async (datastore, dir, index) => {
|
|
||||||
const indexDir = joinKey(dir, index.name)
|
|
||||||
|
|
||||||
let newDir = false
|
|
||||||
if (!(await datastore.exists(indexDir))) {
|
|
||||||
await datastore.createFolder(indexDir)
|
|
||||||
newDir = true
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isShardedIndex(index)) {
|
|
||||||
const shardFile = getShardMapKey(indexDir)
|
|
||||||
if (newDir || !(await datastore.exists(shardFile)))
|
|
||||||
await datastore.createFile(shardFile, "[]")
|
|
||||||
} else {
|
|
||||||
const indexFile = getUnshardedIndexDataKey(indexDir)
|
|
||||||
if (newDir || !(await datastore.exists(indexFile)))
|
|
||||||
await createIndexFile(datastore, indexFile, index)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,80 +0,0 @@
|
||||||
// adapted from https://github.com/dex4er/js-promise-readable
|
|
||||||
// thanks :)
|
|
||||||
|
|
||||||
export const promiseReadableStream = stream => {
|
|
||||||
let _errored
|
|
||||||
|
|
||||||
const _errorHandler = err => {
|
|
||||||
_errored = err
|
|
||||||
}
|
|
||||||
|
|
||||||
stream.on("error", _errorHandler)
|
|
||||||
|
|
||||||
const read = size => {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
if (_errored) {
|
|
||||||
const err = _errored
|
|
||||||
_errored = undefined
|
|
||||||
return reject(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!stream.readable || stream.closed || stream.destroyed) {
|
|
||||||
return resolve()
|
|
||||||
}
|
|
||||||
|
|
||||||
const readableHandler = () => {
|
|
||||||
const chunk = stream.read(size)
|
|
||||||
|
|
||||||
if (chunk) {
|
|
||||||
removeListeners()
|
|
||||||
resolve(chunk)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const closeHandler = () => {
|
|
||||||
removeListeners()
|
|
||||||
resolve()
|
|
||||||
}
|
|
||||||
|
|
||||||
const endHandler = () => {
|
|
||||||
removeListeners()
|
|
||||||
resolve()
|
|
||||||
}
|
|
||||||
|
|
||||||
const errorHandler = err => {
|
|
||||||
_errored = undefined
|
|
||||||
removeListeners()
|
|
||||||
reject(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
const removeListeners = () => {
|
|
||||||
stream.removeListener("close", closeHandler)
|
|
||||||
stream.removeListener("error", errorHandler)
|
|
||||||
stream.removeListener("end", endHandler)
|
|
||||||
stream.removeListener("readable", readableHandler)
|
|
||||||
}
|
|
||||||
|
|
||||||
stream.on("close", closeHandler)
|
|
||||||
stream.on("end", endHandler)
|
|
||||||
stream.on("error", errorHandler)
|
|
||||||
stream.on("readable", readableHandler)
|
|
||||||
|
|
||||||
readableHandler()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const destroy = () => {
|
|
||||||
if (stream) {
|
|
||||||
if (_errorHandler) {
|
|
||||||
stream.removeListener("error", _errorHandler)
|
|
||||||
}
|
|
||||||
if (typeof stream.destroy === "function") {
|
|
||||||
stream.destroy()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return { read, destroy, stream }
|
|
||||||
}
|
|
||||||
|
|
||||||
export default promiseReadableStream
|
|
|
@ -1,117 +0,0 @@
|
||||||
// adapted from https://github.com/dex4er/js-promise-writable
|
|
||||||
// Thank you :)
|
|
||||||
export const promiseWriteableStream = stream => {
|
|
||||||
let _errored
|
|
||||||
|
|
||||||
const _errorHandler = err => {
|
|
||||||
_errored = err
|
|
||||||
}
|
|
||||||
|
|
||||||
stream.on("error", _errorHandler)
|
|
||||||
|
|
||||||
const write = chunk => {
|
|
||||||
let rejected = false
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
if (_errored) {
|
|
||||||
const err = _errored
|
|
||||||
_errored = undefined
|
|
||||||
return reject(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!stream.writable || stream.closed || stream.destroyed) {
|
|
||||||
return reject(new Error("write after end"))
|
|
||||||
}
|
|
||||||
|
|
||||||
const writeErrorHandler = err => {
|
|
||||||
_errored = undefined
|
|
||||||
rejected = true
|
|
||||||
reject(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
stream.once("error", writeErrorHandler)
|
|
||||||
|
|
||||||
const canWrite = stream.write(chunk)
|
|
||||||
|
|
||||||
stream.removeListener("error", writeErrorHandler)
|
|
||||||
|
|
||||||
if (canWrite) {
|
|
||||||
if (!rejected) {
|
|
||||||
resolve(chunk.length)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
const errorHandler = err => {
|
|
||||||
_errored = undefined
|
|
||||||
removeListeners()
|
|
||||||
reject(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
const drainHandler = () => {
|
|
||||||
removeListeners()
|
|
||||||
resolve(chunk.length)
|
|
||||||
}
|
|
||||||
|
|
||||||
const closeHandler = () => {
|
|
||||||
removeListeners()
|
|
||||||
resolve(chunk.length)
|
|
||||||
}
|
|
||||||
|
|
||||||
const finishHandler = () => {
|
|
||||||
removeListeners()
|
|
||||||
resolve(chunk.length)
|
|
||||||
}
|
|
||||||
|
|
||||||
const removeListeners = () => {
|
|
||||||
stream.removeListener("close", closeHandler)
|
|
||||||
stream.removeListener("drain", drainHandler)
|
|
||||||
stream.removeListener("error", errorHandler)
|
|
||||||
stream.removeListener("finish", finishHandler)
|
|
||||||
}
|
|
||||||
|
|
||||||
stream.on("close", closeHandler)
|
|
||||||
stream.on("drain", drainHandler)
|
|
||||||
stream.on("error", errorHandler)
|
|
||||||
stream.on("finish", finishHandler)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const end = () => {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
if (_errored) {
|
|
||||||
const err = _errored
|
|
||||||
_errored = undefined
|
|
||||||
return reject(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!stream.writable || stream.closed || stream.destroyed) {
|
|
||||||
return resolve()
|
|
||||||
}
|
|
||||||
|
|
||||||
const finishHandler = () => {
|
|
||||||
removeListeners()
|
|
||||||
resolve()
|
|
||||||
}
|
|
||||||
|
|
||||||
const errorHandler = err => {
|
|
||||||
_errored = undefined
|
|
||||||
removeListeners()
|
|
||||||
reject(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
const removeListeners = () => {
|
|
||||||
stream.removeListener("error", errorHandler)
|
|
||||||
stream.removeListener("finish", finishHandler)
|
|
||||||
}
|
|
||||||
|
|
||||||
stream.on("finish", finishHandler)
|
|
||||||
stream.on("error", errorHandler)
|
|
||||||
|
|
||||||
stream.end()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return { write, end }
|
|
||||||
}
|
|
||||||
|
|
||||||
export default promiseWriteableStream
|
|
|
@ -1,93 +0,0 @@
|
||||||
import lunr from "lunr"
|
|
||||||
import { promiseReadableStream } from "./promiseReadableStream"
|
|
||||||
import { createIndexFile } from "./sharding"
|
|
||||||
import { generateSchema } from "./indexSchemaCreator"
|
|
||||||
import { getIndexReader, CONTINUE_READING_RECORDS } from "./serializer"
|
|
||||||
import {
|
|
||||||
getAllowedRecordNodesForIndex,
|
|
||||||
getRecordNodeId,
|
|
||||||
} from "../templateApi/hierarchy"
|
|
||||||
import { $ } from "../common"
|
|
||||||
import { filter, includes, find } from "lodash/fp"
|
|
||||||
|
|
||||||
export const readIndex = async (
|
|
||||||
hierarchy,
|
|
||||||
datastore,
|
|
||||||
index,
|
|
||||||
indexedDataKey
|
|
||||||
) => {
|
|
||||||
const records = []
|
|
||||||
const getType = typeLoader(index, hierarchy)
|
|
||||||
const doRead = iterateIndex(
|
|
||||||
async item => {
|
|
||||||
item.type = getType(item.key)
|
|
||||||
records.push(item)
|
|
||||||
return CONTINUE_READING_RECORDS
|
|
||||||
},
|
|
||||||
async () => records
|
|
||||||
)
|
|
||||||
|
|
||||||
return await doRead(hierarchy, datastore, index, indexedDataKey)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const searchIndex = async (
|
|
||||||
hierarchy,
|
|
||||||
datastore,
|
|
||||||
index,
|
|
||||||
indexedDataKey,
|
|
||||||
searchPhrase
|
|
||||||
) => {
|
|
||||||
const records = []
|
|
||||||
const schema = generateSchema(hierarchy, index)
|
|
||||||
const getType = typeLoader(index, hierarchy)
|
|
||||||
const doRead = iterateIndex(
|
|
||||||
async item => {
|
|
||||||
item.type = getType(item.key)
|
|
||||||
const idx = lunr(function() {
|
|
||||||
this.ref("key")
|
|
||||||
for (const field of schema) {
|
|
||||||
this.field(field.name)
|
|
||||||
}
|
|
||||||
this.add(item)
|
|
||||||
})
|
|
||||||
const searchResults = idx.search(searchPhrase)
|
|
||||||
if (searchResults.length === 1) {
|
|
||||||
item._searchResult = searchResults[0]
|
|
||||||
records.push(item)
|
|
||||||
}
|
|
||||||
return CONTINUE_READING_RECORDS
|
|
||||||
},
|
|
||||||
async () => records
|
|
||||||
)
|
|
||||||
|
|
||||||
return await doRead(hierarchy, datastore, index, indexedDataKey)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const iterateIndex = (onGetItem, getFinalResult) => async (
|
|
||||||
hierarchy,
|
|
||||||
datastore,
|
|
||||||
index,
|
|
||||||
indexedDataKey
|
|
||||||
) => {
|
|
||||||
try {
|
|
||||||
const readableStream = promiseReadableStream(
|
|
||||||
await datastore.readableFileStream(indexedDataKey)
|
|
||||||
)
|
|
||||||
|
|
||||||
const read = getIndexReader(hierarchy, index, readableStream)
|
|
||||||
await read(onGetItem)
|
|
||||||
return getFinalResult()
|
|
||||||
} catch (e) {
|
|
||||||
if (await datastore.exists(indexedDataKey)) {
|
|
||||||
throw e
|
|
||||||
} else {
|
|
||||||
await createIndexFile(datastore, indexedDataKey, index)
|
|
||||||
}
|
|
||||||
return []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const typeLoader = (index, hierarchy) => {
|
|
||||||
const allowedNodes = getAllowedRecordNodesForIndex(hierarchy, index)
|
|
||||||
return key => find(n => getRecordNodeId(key) === n.nodeId)(allowedNodes).name
|
|
||||||
}
|
|
|
@ -1,126 +0,0 @@
|
||||||
import { orderBy } from "lodash"
|
|
||||||
import {
|
|
||||||
reduce,
|
|
||||||
find,
|
|
||||||
includes,
|
|
||||||
flatten,
|
|
||||||
union,
|
|
||||||
filter,
|
|
||||||
each,
|
|
||||||
map,
|
|
||||||
} from "lodash/fp"
|
|
||||||
import {
|
|
||||||
joinKey,
|
|
||||||
splitKey,
|
|
||||||
isNonEmptyString,
|
|
||||||
isNothing,
|
|
||||||
$,
|
|
||||||
isSomething,
|
|
||||||
} from "../common"
|
|
||||||
import {
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
getNode,
|
|
||||||
getRecordNodeId,
|
|
||||||
getExactNodeForKey,
|
|
||||||
recordNodeIdIsAllowed,
|
|
||||||
isModel,
|
|
||||||
isGlobalIndex,
|
|
||||||
} from "../templateApi/hierarchy"
|
|
||||||
import { indexTypes } from "../templateApi/indexes"
|
|
||||||
import { getIndexDir } from "../indexApi/getIndexDir"
|
|
||||||
import { getRecordInfo } from "../recordApi/recordInfo"
|
|
||||||
|
|
||||||
export const getRelevantAncestorIndexes = (hierarchy, record) => {
|
|
||||||
const key = record.key
|
|
||||||
const keyParts = splitKey(key)
|
|
||||||
const nodeId = getRecordNodeId(key)
|
|
||||||
|
|
||||||
const flatHierarchy = orderBy(
|
|
||||||
getFlattenedHierarchy(hierarchy),
|
|
||||||
[node => node.pathRegx().length],
|
|
||||||
["desc"]
|
|
||||||
)
|
|
||||||
|
|
||||||
const makeindexNodeAndDir_ForAncestorIndex = (indexNode, parentRecordDir) =>
|
|
||||||
makeIndexNodeAndDir(indexNode, joinKey(parentRecordDir, indexNode.name))
|
|
||||||
|
|
||||||
const traverseAncestorIndexesInPath = () =>
|
|
||||||
reduce(
|
|
||||||
(acc, part) => {
|
|
||||||
const currentIndexKey = joinKey(acc.lastIndexKey, part)
|
|
||||||
acc.lastIndexKey = currentIndexKey
|
|
||||||
const testPathRegx = p =>
|
|
||||||
new RegExp(`${p.pathRegx()}$`).test(currentIndexKey)
|
|
||||||
const nodeMatch = find(testPathRegx)(flatHierarchy)
|
|
||||||
|
|
||||||
if (isNothing(nodeMatch)) {
|
|
||||||
return acc
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isModel(nodeMatch) || nodeMatch.indexes.length === 0) {
|
|
||||||
return acc
|
|
||||||
}
|
|
||||||
|
|
||||||
const indexes = $(nodeMatch.indexes, [
|
|
||||||
filter(
|
|
||||||
i =>
|
|
||||||
i.indexType === indexTypes.ancestor &&
|
|
||||||
(i.allowedModelNodeIds.length === 0 ||
|
|
||||||
includes(nodeId)(i.allowedModelNodeIds))
|
|
||||||
),
|
|
||||||
])
|
|
||||||
|
|
||||||
const currentRecordDir = getRecordInfo(hierarchy, currentIndexKey).dir
|
|
||||||
|
|
||||||
each(v =>
|
|
||||||
acc.nodesAndKeys.push(
|
|
||||||
makeindexNodeAndDir_ForAncestorIndex(v, currentRecordDir)
|
|
||||||
)
|
|
||||||
)(indexes)
|
|
||||||
|
|
||||||
return acc
|
|
||||||
},
|
|
||||||
{ lastIndexKey: "", nodesAndKeys: [] }
|
|
||||||
)(keyParts).nodesAndKeys
|
|
||||||
|
|
||||||
const rootIndexes = $(flatHierarchy, [
|
|
||||||
filter(n => isGlobalIndex(n) && recordNodeIdIsAllowed(n)(nodeId)),
|
|
||||||
map(i => makeIndexNodeAndDir(i, getIndexDir(hierarchy, i.nodeKey()))),
|
|
||||||
])
|
|
||||||
|
|
||||||
return union(traverseAncestorIndexesInPath())(rootIndexes)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getRelevantReverseReferenceIndexes = (hierarchy, record) =>
|
|
||||||
$(record.key, [
|
|
||||||
getExactNodeForKey(hierarchy),
|
|
||||||
n => n.fields,
|
|
||||||
filter(
|
|
||||||
f =>
|
|
||||||
f.type === "reference" &&
|
|
||||||
isSomething(record[f.name]) &&
|
|
||||||
isNonEmptyString(record[f.name].key)
|
|
||||||
),
|
|
||||||
map(f =>
|
|
||||||
$(f.typeOptions.reverseIndexNodeKeys, [
|
|
||||||
map(n => ({
|
|
||||||
recordNode: getNode(hierarchy, n),
|
|
||||||
field: f,
|
|
||||||
})),
|
|
||||||
])
|
|
||||||
),
|
|
||||||
flatten,
|
|
||||||
map(n =>
|
|
||||||
makeIndexNodeAndDir(
|
|
||||||
n.recordNode,
|
|
||||||
joinKey(
|
|
||||||
getRecordInfo(hierarchy, record[n.field.name].key).dir,
|
|
||||||
n.recordNode.name
|
|
||||||
)
|
|
||||||
)
|
|
||||||
),
|
|
||||||
])
|
|
||||||
|
|
||||||
const makeIndexNodeAndDir = (indexNode, indexDir) => ({ indexNode, indexDir })
|
|
||||||
|
|
||||||
export default getRelevantAncestorIndexes
|
|
|
@ -1,238 +0,0 @@
|
||||||
import { generateSchema } from "./indexSchemaCreator"
|
|
||||||
import { has, isString, difference, find } from "lodash/fp"
|
|
||||||
import { Buffer } from "safe-buffer"
|
|
||||||
import { StringDecoder } from "string_decoder"
|
|
||||||
import { getType } from "../types"
|
|
||||||
import { isSomething } from "../common"
|
|
||||||
|
|
||||||
export const BUFFER_MAX_BYTES = 524288 // 0.5Mb
|
|
||||||
|
|
||||||
export const CONTINUE_READING_RECORDS = "CONTINUE_READING"
|
|
||||||
export const READ_REMAINING_TEXT = "READ_REMAINING"
|
|
||||||
export const CANCEL_READ = "CANCEL"
|
|
||||||
|
|
||||||
export const getIndexWriter = (
|
|
||||||
hierarchy,
|
|
||||||
indexNode,
|
|
||||||
readableStream,
|
|
||||||
writableStream,
|
|
||||||
end
|
|
||||||
) => {
|
|
||||||
const schema = generateSchema(hierarchy, indexNode)
|
|
||||||
|
|
||||||
return {
|
|
||||||
read: read(readableStream, schema),
|
|
||||||
updateIndex: updateIndex(readableStream, writableStream, schema, end),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getIndexReader = (hierarchy, indexNode, readableStream) =>
|
|
||||||
read(readableStream, generateSchema(hierarchy, indexNode))
|
|
||||||
|
|
||||||
const updateIndex = (readableStream, writableStream, schema) => async (
|
|
||||||
itemsToWrite,
|
|
||||||
keysToRemove
|
|
||||||
) => {
|
|
||||||
const write = newOutputWriter(BUFFER_MAX_BYTES, writableStream)
|
|
||||||
const writtenItems = []
|
|
||||||
await read(readableStream, schema)(
|
|
||||||
async indexedItem => {
|
|
||||||
const updated = find(i => indexedItem.key === i.key)(itemsToWrite)
|
|
||||||
const removed = find(k => indexedItem.key === k)(keysToRemove)
|
|
||||||
|
|
||||||
if (isSomething(removed)) return CONTINUE_READING_RECORDS
|
|
||||||
|
|
||||||
if (isSomething(updated)) {
|
|
||||||
const serializedItem = serializeItem(schema, updated)
|
|
||||||
await write(serializedItem)
|
|
||||||
writtenItems.push(updated)
|
|
||||||
} else {
|
|
||||||
await write(serializeItem(schema, indexedItem))
|
|
||||||
}
|
|
||||||
|
|
||||||
return CONTINUE_READING_RECORDS
|
|
||||||
},
|
|
||||||
async text => await write(text)
|
|
||||||
)
|
|
||||||
|
|
||||||
if (writtenItems.length !== itemsToWrite.length) {
|
|
||||||
const toAdd = difference(itemsToWrite, writtenItems)
|
|
||||||
for (let added of toAdd) {
|
|
||||||
await write(serializeItem(schema, added))
|
|
||||||
}
|
|
||||||
} else if (writtenItems.length === 0) {
|
|
||||||
// potentially are no records
|
|
||||||
await write("")
|
|
||||||
}
|
|
||||||
|
|
||||||
await write()
|
|
||||||
await writableStream.end()
|
|
||||||
}
|
|
||||||
|
|
||||||
const read = (readableStream, schema) => async (onGetItem, onGetText) => {
|
|
||||||
const readInput = newInputReader(readableStream)
|
|
||||||
let text = await readInput()
|
|
||||||
let status = CONTINUE_READING_RECORDS
|
|
||||||
while (text.length > 0) {
|
|
||||||
if (status === READ_REMAINING_TEXT) {
|
|
||||||
await onGetText(text)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (status === CANCEL_READ) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
let rowText = ""
|
|
||||||
let currentCharIndex = 0
|
|
||||||
for (let currentChar of text) {
|
|
||||||
rowText += currentChar
|
|
||||||
if (currentChar === "\r") {
|
|
||||||
status = await onGetItem(deserializeRow(schema, rowText))
|
|
||||||
rowText = ""
|
|
||||||
if (status === READ_REMAINING_TEXT) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
currentCharIndex++
|
|
||||||
}
|
|
||||||
|
|
||||||
if (currentCharIndex < text.length - 1) {
|
|
||||||
await onGetText(text.substring(currentCharIndex + 1))
|
|
||||||
}
|
|
||||||
|
|
||||||
text = await readInput()
|
|
||||||
}
|
|
||||||
|
|
||||||
await readableStream.destroy()
|
|
||||||
}
|
|
||||||
|
|
||||||
const newOutputWriter = (flushBoundary, writableStream) => {
|
|
||||||
let currentBuffer = null
|
|
||||||
|
|
||||||
return async text => {
|
|
||||||
if (isString(text) && currentBuffer === null)
|
|
||||||
currentBuffer = Buffer.from(text, "utf8")
|
|
||||||
else if (isString(text))
|
|
||||||
currentBuffer = Buffer.concat([currentBuffer, Buffer.from(text, "utf8")])
|
|
||||||
|
|
||||||
if (
|
|
||||||
currentBuffer !== null &&
|
|
||||||
(currentBuffer.length > flushBoundary || !isString(text))
|
|
||||||
) {
|
|
||||||
await writableStream.write(currentBuffer)
|
|
||||||
currentBuffer = null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const newInputReader = readableStream => {
|
|
||||||
const decoder = new StringDecoder("utf8")
|
|
||||||
let remainingBytes = []
|
|
||||||
|
|
||||||
return async () => {
|
|
||||||
let nextBytesBuffer = await readableStream.read(BUFFER_MAX_BYTES)
|
|
||||||
const remainingBuffer = Buffer.from(remainingBytes)
|
|
||||||
|
|
||||||
if (!nextBytesBuffer) nextBytesBuffer = Buffer.from([])
|
|
||||||
|
|
||||||
const moreToRead = nextBytesBuffer.length === BUFFER_MAX_BYTES
|
|
||||||
|
|
||||||
const buffer = Buffer.concat(
|
|
||||||
[remainingBuffer, nextBytesBuffer],
|
|
||||||
remainingBuffer.length + nextBytesBuffer.length
|
|
||||||
)
|
|
||||||
|
|
||||||
const text = decoder.write(buffer)
|
|
||||||
remainingBytes = decoder.end(buffer)
|
|
||||||
|
|
||||||
if (!moreToRead && remainingBytes.length > 0) {
|
|
||||||
// if for any reason, we have remaining bytes at the end
|
|
||||||
// of the stream, just discard - dont see why this should
|
|
||||||
// ever happen, but if it does, it could cause a stack overflow
|
|
||||||
remainingBytes = []
|
|
||||||
}
|
|
||||||
|
|
||||||
return text
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const deserializeRow = (schema, rowText) => {
|
|
||||||
let currentPropIndex = 0
|
|
||||||
let currentCharIndex = 0
|
|
||||||
let currentValueText = ""
|
|
||||||
let isEscaped = false
|
|
||||||
const item = {}
|
|
||||||
|
|
||||||
const setCurrentProp = () => {
|
|
||||||
const currentProp = schema[currentPropIndex]
|
|
||||||
const type = getType(currentProp.type)
|
|
||||||
const value =
|
|
||||||
currentValueText === ""
|
|
||||||
? type.getDefaultValue()
|
|
||||||
: type.safeParseValue(currentValueText)
|
|
||||||
item[currentProp.name] = value
|
|
||||||
}
|
|
||||||
|
|
||||||
while (currentPropIndex < schema.length) {
|
|
||||||
if (currentCharIndex < rowText.length) {
|
|
||||||
const currentChar = rowText[currentCharIndex]
|
|
||||||
if (isEscaped) {
|
|
||||||
if (currentChar === "r") {
|
|
||||||
currentValueText += "\r"
|
|
||||||
} else {
|
|
||||||
currentValueText += currentChar
|
|
||||||
}
|
|
||||||
isEscaped = false
|
|
||||||
} else {
|
|
||||||
if (currentChar === ",") {
|
|
||||||
setCurrentProp()
|
|
||||||
currentValueText = ""
|
|
||||||
currentPropIndex++
|
|
||||||
} else if (currentChar === "\\") {
|
|
||||||
isEscaped = true
|
|
||||||
} else {
|
|
||||||
currentValueText += currentChar
|
|
||||||
}
|
|
||||||
}
|
|
||||||
currentCharIndex++
|
|
||||||
} else {
|
|
||||||
currentValueText = ""
|
|
||||||
setCurrentProp()
|
|
||||||
currentPropIndex++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return item
|
|
||||||
}
|
|
||||||
|
|
||||||
export const serializeItem = (schema, item) => {
|
|
||||||
let rowText = ""
|
|
||||||
|
|
||||||
for (let prop of schema) {
|
|
||||||
const type = getType(prop.type)
|
|
||||||
const value = has(prop.name)(item)
|
|
||||||
? item[prop.name]
|
|
||||||
: type.getDefaultValue()
|
|
||||||
|
|
||||||
const valStr = type.stringify(value)
|
|
||||||
|
|
||||||
for (let i = 0; i < valStr.length; i++) {
|
|
||||||
const currentChar = valStr[i]
|
|
||||||
if (currentChar === "," || currentChar === "\r" || currentChar === "\\") {
|
|
||||||
rowText += "\\"
|
|
||||||
}
|
|
||||||
|
|
||||||
if (currentChar === "\r") {
|
|
||||||
rowText += "r"
|
|
||||||
} else {
|
|
||||||
rowText += currentChar
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
rowText += ","
|
|
||||||
}
|
|
||||||
|
|
||||||
rowText += "\r"
|
|
||||||
return rowText
|
|
||||||
}
|
|
|
@ -1,116 +0,0 @@
|
||||||
import { compileCode } from "../common/compileCode"
|
|
||||||
import { filter, includes, map, last } from "lodash/fp"
|
|
||||||
import {
|
|
||||||
getActualKeyOfParent,
|
|
||||||
isGlobalIndex,
|
|
||||||
getParentKey,
|
|
||||||
isShardedIndex,
|
|
||||||
} from "../templateApi/hierarchy"
|
|
||||||
import { joinKey, isNonEmptyString, splitKey, $ } from "../common"
|
|
||||||
|
|
||||||
export const getIndexedDataKey = (indexNode, indexDir, record) => {
|
|
||||||
const getShardName = (indexNode, record) => {
|
|
||||||
const shardNameFunc = compileCode(indexNode.getShardName)
|
|
||||||
try {
|
|
||||||
return shardNameFunc({ record })
|
|
||||||
} catch (e) {
|
|
||||||
const errorDetails = `shardCode: ${
|
|
||||||
indexNode.getShardName
|
|
||||||
} :: record: ${JSON.stringify(record)} :: `
|
|
||||||
e.message =
|
|
||||||
"Error running index shardname func: " + errorDetails + e.message
|
|
||||||
throw e
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const shardName = isNonEmptyString(indexNode.getShardName)
|
|
||||||
? `${getShardName(indexNode, record)}.csv`
|
|
||||||
: "index.csv"
|
|
||||||
|
|
||||||
return joinKey(indexDir, shardName)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getShardKeysInRange = async (
|
|
||||||
app,
|
|
||||||
indexNode,
|
|
||||||
indexDir,
|
|
||||||
startRecord = null,
|
|
||||||
endRecord = null
|
|
||||||
) => {
|
|
||||||
const startShardName = !startRecord
|
|
||||||
? null
|
|
||||||
: shardNameFromKey(getIndexedDataKey(indexNode, indexDir, startRecord))
|
|
||||||
|
|
||||||
const endShardName = !endRecord
|
|
||||||
? null
|
|
||||||
: shardNameFromKey(getIndexedDataKey(indexNode, indexDir, endRecord))
|
|
||||||
|
|
||||||
return $(await getShardMap(app.datastore, indexDir), [
|
|
||||||
filter(
|
|
||||||
k =>
|
|
||||||
(startRecord === null || k >= startShardName) &&
|
|
||||||
(endRecord === null || k <= endShardName)
|
|
||||||
),
|
|
||||||
map(k => joinKey(indexDir, `${k}.csv`)),
|
|
||||||
])
|
|
||||||
}
|
|
||||||
|
|
||||||
export const ensureShardNameIsInShardMap = async (
|
|
||||||
store,
|
|
||||||
indexDir,
|
|
||||||
indexedDataKey
|
|
||||||
) => {
|
|
||||||
const map = await getShardMap(store, indexDir)
|
|
||||||
const shardName = shardNameFromKey(indexedDataKey)
|
|
||||||
if (!includes(shardName)(map)) {
|
|
||||||
map.push(shardName)
|
|
||||||
await writeShardMap(store, indexDir, map)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getShardMap = async (datastore, indexDir) => {
|
|
||||||
const shardMapKey = getShardMapKey(indexDir)
|
|
||||||
try {
|
|
||||||
return await datastore.loadJson(shardMapKey)
|
|
||||||
} catch (_) {
|
|
||||||
await datastore.createJson(shardMapKey, [])
|
|
||||||
return []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const writeShardMap = async (datastore, indexDir, shardMap) =>
|
|
||||||
await datastore.updateJson(getShardMapKey(indexDir), shardMap)
|
|
||||||
|
|
||||||
export const getAllShardKeys = async (app, indexNode, indexDir) =>
|
|
||||||
await getShardKeysInRange(app, indexNode, indexDir)
|
|
||||||
|
|
||||||
export const getShardMapKey = indexDir => joinKey(indexDir, "shardMap.json")
|
|
||||||
|
|
||||||
export const getUnshardedIndexDataKey = indexDir =>
|
|
||||||
joinKey(indexDir, "index.csv")
|
|
||||||
|
|
||||||
export const createIndexFile = async (datastore, indexedDataKey, index) => {
|
|
||||||
if (isShardedIndex(index)) {
|
|
||||||
const indexDir = getParentKey(indexedDataKey)
|
|
||||||
const shardMap = await getShardMap(datastore, indexDir)
|
|
||||||
shardMap.push(shardNameFromKey(indexedDataKey))
|
|
||||||
await writeShardMap(datastore, indexDir, shardMap)
|
|
||||||
}
|
|
||||||
await datastore.createFile(indexedDataKey, "")
|
|
||||||
}
|
|
||||||
|
|
||||||
export const shardNameFromKey = key =>
|
|
||||||
$(key, [splitKey, last]).replace(".csv", "")
|
|
||||||
|
|
||||||
export const getIndexKey_BasedOnDecendant = (decendantKey, indexNode) => {
|
|
||||||
if (isGlobalIndex(indexNode)) {
|
|
||||||
return `${indexNode.nodeKey()}`
|
|
||||||
}
|
|
||||||
|
|
||||||
const indexedDataParentKey = getActualKeyOfParent(
|
|
||||||
indexNode.parent().nodeKey(),
|
|
||||||
decendantKey
|
|
||||||
)
|
|
||||||
|
|
||||||
return joinKey(indexedDataParentKey, indexNode.name)
|
|
||||||
}
|
|
|
@ -1,29 +0,0 @@
|
||||||
import { find, take, union } from "lodash/fp"
|
|
||||||
import { getFlattenedHierarchy } from "../templateApi/hierarchy"
|
|
||||||
import { $, splitKey, joinKey } from "../common"
|
|
||||||
import { NotFoundError } from "../common/errors"
|
|
||||||
|
|
||||||
export const customId = app => (nodeName, id) => {
|
|
||||||
const node = $(app.hierarchy, [
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
find(n => n.name === nodeName),
|
|
||||||
])
|
|
||||||
|
|
||||||
if (!node) throw new NotFoundError(`Cannot find node ${nodeName}`)
|
|
||||||
|
|
||||||
return `${node.nodeId}-${id}`
|
|
||||||
}
|
|
||||||
|
|
||||||
export const setCustomId = app => (record, id) => {
|
|
||||||
record.id = customId(app)(record.type, id)
|
|
||||||
|
|
||||||
const keyParts = splitKey(record.key)
|
|
||||||
|
|
||||||
record.key = $(keyParts, [
|
|
||||||
take(keyParts.length - 1),
|
|
||||||
union([record.id]),
|
|
||||||
joinKey,
|
|
||||||
])
|
|
||||||
|
|
||||||
return record
|
|
||||||
}
|
|
|
@ -1,35 +0,0 @@
|
||||||
import { safeKey, apiWrapper, events, joinKey } from "../common"
|
|
||||||
import { _load } from "./load"
|
|
||||||
import { _deleteCollection } from "../collectionApi/delete"
|
|
||||||
import { getExactNodeForKey } from "../templateApi/hierarchy"
|
|
||||||
import { transactionForDeleteRecord } from "../transactions/create"
|
|
||||||
import { permission } from "../authApi/permissions"
|
|
||||||
import { getRecordInfo } from "./recordInfo"
|
|
||||||
|
|
||||||
export const deleteRecord = (app, disableCleanup = false) => async key => {
|
|
||||||
key = safeKey(key)
|
|
||||||
return apiWrapper(
|
|
||||||
app,
|
|
||||||
events.recordApi.delete,
|
|
||||||
permission.deleteRecord.isAuthorized(key),
|
|
||||||
{ key },
|
|
||||||
_deleteRecord,
|
|
||||||
app,
|
|
||||||
key,
|
|
||||||
disableCleanup
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// called deleteRecord because delete is a keyword
|
|
||||||
export const _deleteRecord = async (app, key) => {
|
|
||||||
const recordInfo = getRecordInfo(app.hierarchy, key)
|
|
||||||
key = recordInfo.key
|
|
||||||
const node = getExactNodeForKey(app.hierarchy)(key)
|
|
||||||
|
|
||||||
for (const collectionRecord of node.children) {
|
|
||||||
const collectionKey = joinKey(key, collectionRecord.collectionName)
|
|
||||||
await _deleteCollection(app, collectionKey, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
await app.datastore.deleteFile(key)
|
|
||||||
}
|
|
|
@ -1,31 +0,0 @@
|
||||||
import { apiWrapper, events, isNothing } from "../common"
|
|
||||||
import { permission } from "../authApi/permissions"
|
|
||||||
import { safeGetFullFilePath } from "./uploadFile"
|
|
||||||
import { BadRequestError } from "../common/errors"
|
|
||||||
import { getRecordInfo } from "./recordInfo"
|
|
||||||
|
|
||||||
export const downloadFile = app => async (recordKey, relativePath) =>
|
|
||||||
apiWrapper(
|
|
||||||
app,
|
|
||||||
events.recordApi.uploadFile,
|
|
||||||
permission.readRecord.isAuthorized(recordKey),
|
|
||||||
{ recordKey, relativePath }, //remove dupe key 'recordKey' from object
|
|
||||||
_downloadFile,
|
|
||||||
app,
|
|
||||||
recordKey,
|
|
||||||
relativePath
|
|
||||||
)
|
|
||||||
|
|
||||||
const _downloadFile = async (app, recordKey, relativePath) => {
|
|
||||||
if (isNothing(recordKey)) {
|
|
||||||
throw new BadRequestError("Record Key not supplied")
|
|
||||||
}
|
|
||||||
if (isNothing(relativePath)) {
|
|
||||||
throw new BadRequestError("file path not supplied")
|
|
||||||
}
|
|
||||||
|
|
||||||
const { dir } = getRecordInfo(app.hierarchy, recordKey)
|
|
||||||
return await app.datastore.readableFileStream(
|
|
||||||
safeGetFullFilePath(dir, relativePath)
|
|
||||||
)
|
|
||||||
}
|
|
|
@ -1,76 +0,0 @@
|
||||||
import { map, isString, has, some } from "lodash/fp"
|
|
||||||
import {
|
|
||||||
getExactNodeForKey,
|
|
||||||
findField,
|
|
||||||
getNode,
|
|
||||||
isGlobalIndex,
|
|
||||||
} from "../templateApi/hierarchy"
|
|
||||||
import { listItems } from "../indexApi/listItems"
|
|
||||||
import { $, apiWrapperSync, events, safeKey } from "../common"
|
|
||||||
import { getIndexKey_BasedOnDecendant } from "../indexing/sharding"
|
|
||||||
import { permission } from "../authApi/permissions"
|
|
||||||
|
|
||||||
export const getContext = app => recordKey => {
|
|
||||||
recordKey = safeKey(recordKey)
|
|
||||||
return apiWrapperSync(
|
|
||||||
app,
|
|
||||||
events.recordApi.getContext,
|
|
||||||
permission.readRecord.isAuthorized(recordKey),
|
|
||||||
{ recordKey },
|
|
||||||
_getContext,
|
|
||||||
app,
|
|
||||||
recordKey
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const _getContext = (app, recordKey) => {
|
|
||||||
recordKey = safeKey(recordKey)
|
|
||||||
const recordNode = getExactNodeForKey(app.hierarchy)(recordKey)
|
|
||||||
|
|
||||||
const cachedReferenceIndexes = {}
|
|
||||||
|
|
||||||
const lazyLoadReferenceIndex = async typeOptions => {
|
|
||||||
if (!has(typeOptions.indexNodeKey)(cachedReferenceIndexes)) {
|
|
||||||
cachedReferenceIndexes[typeOptions.indexNodeKey] = {
|
|
||||||
typeOptions,
|
|
||||||
data: await readReferenceIndex(app, recordKey, typeOptions),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return cachedReferenceIndexes[typeOptions.indexNodeKey]
|
|
||||||
}
|
|
||||||
|
|
||||||
const getTypeOptions = typeOptions_or_fieldName =>
|
|
||||||
isString(typeOptions_or_fieldName)
|
|
||||||
? findField(recordNode, typeOptions_or_fieldName).typeOptions
|
|
||||||
: typeOptions_or_fieldName
|
|
||||||
|
|
||||||
return {
|
|
||||||
referenceExists: async (typeOptions_or_fieldName, key) => {
|
|
||||||
const typeOptions = getTypeOptions(typeOptions_or_fieldName)
|
|
||||||
const { data } = await lazyLoadReferenceIndex(typeOptions)
|
|
||||||
return some(i => i.key === key)(data)
|
|
||||||
},
|
|
||||||
referenceOptions: async typeOptions_or_fieldName => {
|
|
||||||
const typeOptions = getTypeOptions(typeOptions_or_fieldName)
|
|
||||||
const { data } = await lazyLoadReferenceIndex(typeOptions)
|
|
||||||
return data
|
|
||||||
},
|
|
||||||
recordNode,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const readReferenceIndex = async (app, recordKey, typeOptions) => {
|
|
||||||
const indexNode = getNode(app.hierarchy, typeOptions.indexNodeKey)
|
|
||||||
const indexKey = isGlobalIndex(indexNode)
|
|
||||||
? indexNode.nodeKey()
|
|
||||||
: getIndexKey_BasedOnDecendant(recordKey, indexNode)
|
|
||||||
|
|
||||||
const items = await listItems(app)(indexKey)
|
|
||||||
return $(items, [
|
|
||||||
map(i => ({
|
|
||||||
key: i.key,
|
|
||||||
value: i[typeOptions.displayValue],
|
|
||||||
})),
|
|
||||||
])
|
|
||||||
}
|
|
|
@ -1,51 +0,0 @@
|
||||||
import { keyBy, mapValues } from "lodash/fp"
|
|
||||||
import { generate } from "shortid"
|
|
||||||
import {
|
|
||||||
getNodeForCollectionPath,
|
|
||||||
isSingleRecord,
|
|
||||||
} from "../templateApi/hierarchy"
|
|
||||||
import { getNewFieldValue } from "../types"
|
|
||||||
import { $, joinKey, safeKey, apiWrapperSync, events } from "../common"
|
|
||||||
import { permission } from "../authApi/permissions"
|
|
||||||
|
|
||||||
export const getNew = app => (collectionKey, recordTypeName) => {
|
|
||||||
const recordNode = getRecordNode(app, collectionKey, recordTypeName)
|
|
||||||
collectionKey = safeKey(collectionKey)
|
|
||||||
return apiWrapperSync(
|
|
||||||
app,
|
|
||||||
events.recordApi.getNew,
|
|
||||||
permission.createRecord.isAuthorized(recordNode.nodeKey()),
|
|
||||||
{ collectionKey, recordTypeName },
|
|
||||||
_getNew,
|
|
||||||
recordNode,
|
|
||||||
collectionKey
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Constructs a record object that can be saved to the backend.
|
|
||||||
* @param {*} recordNode - record
|
|
||||||
* @param {*} collectionKey - nested collection key that the record will be saved to.
|
|
||||||
*/
|
|
||||||
export const _getNew = (recordNode, collectionKey) =>
|
|
||||||
constructRecord(recordNode, getNewFieldValue, collectionKey)
|
|
||||||
|
|
||||||
const getRecordNode = (app, collectionKey) => {
|
|
||||||
collectionKey = safeKey(collectionKey)
|
|
||||||
return getNodeForCollectionPath(app.hierarchy)(collectionKey)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getNewChild = app => (recordKey, collectionName, recordTypeName) =>
|
|
||||||
getNew(app)(joinKey(recordKey, collectionName), recordTypeName)
|
|
||||||
|
|
||||||
export const constructRecord = (recordNode, getFieldValue, collectionKey) => {
|
|
||||||
const record = $(recordNode.fields, [keyBy("name"), mapValues(getFieldValue)])
|
|
||||||
|
|
||||||
record.id = `${recordNode.nodeId}-${generate()}`
|
|
||||||
record.key = isSingleRecord(recordNode)
|
|
||||||
? joinKey(collectionKey, recordNode.name)
|
|
||||||
: joinKey(collectionKey, record.id)
|
|
||||||
record.isNew = true
|
|
||||||
record.type = recordNode.name
|
|
||||||
return record
|
|
||||||
}
|
|
|
@ -1,27 +0,0 @@
|
||||||
import { getNew, getNewChild } from "./getNew"
|
|
||||||
import { load } from "./load"
|
|
||||||
import { validate } from "./validate"
|
|
||||||
import { getContext } from "./getContext"
|
|
||||||
import { save } from "./save"
|
|
||||||
import { deleteRecord } from "./delete"
|
|
||||||
import { uploadFile } from "./uploadFile"
|
|
||||||
import { downloadFile } from "./downloadFile"
|
|
||||||
import { customId, setCustomId } from "./customId"
|
|
||||||
|
|
||||||
const api = app => ({
|
|
||||||
getNew: getNew(app),
|
|
||||||
getNewChild: getNewChild(app),
|
|
||||||
save: save(app),
|
|
||||||
load: load(app),
|
|
||||||
delete: deleteRecord(app, false),
|
|
||||||
validate: validate(app),
|
|
||||||
getContext: getContext(app),
|
|
||||||
uploadFile: uploadFile(app),
|
|
||||||
downloadFile: downloadFile(app),
|
|
||||||
customId: customId(app),
|
|
||||||
setCustomId: setCustomId(app),
|
|
||||||
})
|
|
||||||
|
|
||||||
export const getRecordApi = app => api(app)
|
|
||||||
|
|
||||||
export default getRecordApi
|
|
|
@ -1,79 +0,0 @@
|
||||||
import { isString, flatten, map, filter } from "lodash/fp"
|
|
||||||
import { initialiseChildCollections } from "../collectionApi/initialise"
|
|
||||||
import { _loadFromInfo } from "./load"
|
|
||||||
import { $, joinKey } from "../common"
|
|
||||||
import {
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
isModel,
|
|
||||||
getNode,
|
|
||||||
isTopLevelRecord,
|
|
||||||
fieldReversesReferenceToNode,
|
|
||||||
} from "../templateApi/hierarchy"
|
|
||||||
import { initialiseIndex } from "../indexing/initialiseIndex"
|
|
||||||
import { getRecordInfo } from "./recordInfo"
|
|
||||||
import { getAllIdsIterator } from "../indexing/allIds"
|
|
||||||
|
|
||||||
export const initialiseChildren = async (app, recordInfoOrKey) => {
|
|
||||||
const recordInfo = isString(recordInfoOrKey)
|
|
||||||
? getRecordInfo(app.hierarchy, recordInfoOrKey)
|
|
||||||
: recordInfoOrKey
|
|
||||||
await initialiseReverseReferenceIndexes(app, recordInfo)
|
|
||||||
await initialiseAncestorIndexes(app, recordInfo)
|
|
||||||
await initialiseChildCollections(app, recordInfo)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const initialiseChildrenForNode = async (app, recordNode) => {
|
|
||||||
if (isTopLevelRecord(recordNode)) {
|
|
||||||
await initialiseChildren(app, recordNode.nodeKey())
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const iterate = await getAllIdsIterator(app)(
|
|
||||||
recordNode.parent().collectionNodeKey()
|
|
||||||
)
|
|
||||||
let iterateResult = await iterate()
|
|
||||||
while (!iterateResult.done) {
|
|
||||||
const { result } = iterateResult
|
|
||||||
for (const id of result.ids) {
|
|
||||||
const initialisingRecordKey = joinKey(result.collectionKey, id)
|
|
||||||
await initialiseChildren(app, initialisingRecordKey)
|
|
||||||
}
|
|
||||||
iterateResult = await iterate()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const initialiseAncestorIndexes = async (app, recordInfo) => {
|
|
||||||
for (const index of recordInfo.recordNode.indexes) {
|
|
||||||
const indexKey = recordInfo.child(index.name)
|
|
||||||
if (!(await app.datastore.exists(indexKey))) {
|
|
||||||
await initialiseIndex(app.datastore, recordInfo.dir, index)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const initialiseReverseReferenceIndexes = async (app, recordInfo) => {
|
|
||||||
const indexNodes = $(
|
|
||||||
fieldsThatReferenceThisModel(app, recordInfo.recordNode),
|
|
||||||
[
|
|
||||||
map(f =>
|
|
||||||
$(f.typeOptions.reverseIndexNodeKeys, [
|
|
||||||
map(n => getNode(app.hierarchy, n)),
|
|
||||||
])
|
|
||||||
),
|
|
||||||
flatten,
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
for (const indexNode of indexNodes) {
|
|
||||||
await initialiseIndex(app.datastore, recordInfo.dir, indexNode)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const fieldsThatReferenceThisModel = (app, recordNode) =>
|
|
||||||
$(app.hierarchy, [
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
filter(isModel),
|
|
||||||
map(n => n.fields),
|
|
||||||
flatten,
|
|
||||||
filter(fieldReversesReferenceToNode(recordNode)),
|
|
||||||
])
|
|
|
@ -1,77 +0,0 @@
|
||||||
import { keyBy, mapValues, filter, map, includes, last } from "lodash/fp"
|
|
||||||
import { getNode, getExactNodeForKey } from "../templateApi/hierarchy"
|
|
||||||
import { safeParseField } from "../types"
|
|
||||||
import {
|
|
||||||
$,
|
|
||||||
splitKey,
|
|
||||||
safeKey,
|
|
||||||
isNonEmptyString,
|
|
||||||
apiWrapper,
|
|
||||||
events,
|
|
||||||
joinKey,
|
|
||||||
} from "../common"
|
|
||||||
import { mapRecord } from "../indexing/evaluate"
|
|
||||||
import { permission } from "../authApi/permissions"
|
|
||||||
|
|
||||||
export const _loadFromInfo = () => {};
|
|
||||||
|
|
||||||
export const getRecordFileName = key => joinKey(key, "record.json")
|
|
||||||
|
|
||||||
export const load = app => async key => {
|
|
||||||
key = safeKey(key)
|
|
||||||
return apiWrapper(
|
|
||||||
app,
|
|
||||||
events.recordApi.load,
|
|
||||||
permission.readRecord.isAuthorized(key),
|
|
||||||
{ key },
|
|
||||||
_load,
|
|
||||||
app,
|
|
||||||
key
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const _load = async (app, key, keyStack = []) => {
|
|
||||||
const recordNode = getExactNodeForKey(app.hierarchy)(key)
|
|
||||||
const storedData = await app.datastore.loadJson(key)
|
|
||||||
|
|
||||||
const loadedRecord = $(recordNode.fields, [
|
|
||||||
keyBy("name"),
|
|
||||||
mapValues(f => safeParseField(f, storedData)),
|
|
||||||
])
|
|
||||||
|
|
||||||
const newKeyStack = [...keyStack, key]
|
|
||||||
|
|
||||||
const references = $(recordNode.fields, [
|
|
||||||
filter(
|
|
||||||
f =>
|
|
||||||
f.type === "reference" &&
|
|
||||||
isNonEmptyString(loadedRecord[f.name].key) &&
|
|
||||||
!includes(loadedRecord[f.name].key)(newKeyStack)
|
|
||||||
),
|
|
||||||
map(f => ({
|
|
||||||
promise: _load(app, loadedRecord[f.name].key, newKeyStack),
|
|
||||||
index: getNode(app.hierarchy, f.typeOptions.indexNodeKey),
|
|
||||||
field: f,
|
|
||||||
})),
|
|
||||||
])
|
|
||||||
|
|
||||||
if (references.length > 0) {
|
|
||||||
const refRecords = await Promise.all(map(p => p.promise)(references))
|
|
||||||
|
|
||||||
for (const ref of references) {
|
|
||||||
loadedRecord[ref.field.name] = mapRecord(
|
|
||||||
refRecords[references.indexOf(ref)],
|
|
||||||
ref.index
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
loadedRecord._rev = storedData._rev
|
|
||||||
loadedRecord._id = storedData._id
|
|
||||||
loadedRecord.key = key
|
|
||||||
loadedRecord.id = $(key, [splitKey, last])
|
|
||||||
loadedRecord.type = recordNode.name
|
|
||||||
return loadedRecord
|
|
||||||
}
|
|
||||||
|
|
||||||
export default load
|
|
|
@ -1,112 +0,0 @@
|
||||||
import {
|
|
||||||
getExactNodeForKey,
|
|
||||||
getActualKeyOfParent,
|
|
||||||
isRoot,
|
|
||||||
isSingleRecord,
|
|
||||||
getNodeForCollectionPath,
|
|
||||||
} from "../templateApi/hierarchy"
|
|
||||||
import { reduce, find, filter } from "lodash/fp"
|
|
||||||
import { $, getFileFromKey, joinKey, safeKey, keySep } from "../common"
|
|
||||||
import { folderStructureArray, allIdChars } from "../indexing/allIds"
|
|
||||||
|
|
||||||
export const getRecordInfo = (hierarchy, key) => {
|
|
||||||
const recordNode = getExactNodeForKey(hierarchy)(key)
|
|
||||||
const pathInfo = getRecordDirectory(recordNode, key)
|
|
||||||
const dir = joinKey(pathInfo.base, ...pathInfo.subdirs)
|
|
||||||
|
|
||||||
return {
|
|
||||||
recordJson: recordJson(dir),
|
|
||||||
files: files(dir),
|
|
||||||
child: name => joinKey(dir, name),
|
|
||||||
key: safeKey(key),
|
|
||||||
recordNode,
|
|
||||||
pathInfo,
|
|
||||||
dir,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getCollectionDir = (hierarchy, collectionKey) => {
|
|
||||||
const recordNode = getNodeForCollectionPath(hierarchy)(collectionKey)
|
|
||||||
const dummyRecordKey = joinKey(collectionKey, "1-abcd")
|
|
||||||
const pathInfo = getRecordDirectory(recordNode, dummyRecordKey)
|
|
||||||
return pathInfo.base
|
|
||||||
}
|
|
||||||
|
|
||||||
const recordJson = dir => joinKey(dir, "record.json")
|
|
||||||
|
|
||||||
const files = dir => joinKey(dir, "files")
|
|
||||||
|
|
||||||
const getRecordDirectory = (recordNode, key) => {
|
|
||||||
const id = getFileFromKey(key)
|
|
||||||
|
|
||||||
const traverseParentKeys = (n, parents = []) => {
|
|
||||||
if (isRoot(n)) return parents
|
|
||||||
const k = getActualKeyOfParent(n.nodeKey(), key)
|
|
||||||
const thisNodeDir = {
|
|
||||||
node: n,
|
|
||||||
relativeDir: joinKey(recordRelativeDirectory(n, getFileFromKey(k))),
|
|
||||||
}
|
|
||||||
return traverseParentKeys(n.parent(), [thisNodeDir, ...parents])
|
|
||||||
}
|
|
||||||
|
|
||||||
const parentDirs = $(recordNode.parent(), [
|
|
||||||
traverseParentKeys,
|
|
||||||
reduce((key, item) => {
|
|
||||||
return joinKey(key, item.node.collectionName, item.relativeDir)
|
|
||||||
}, keySep),
|
|
||||||
])
|
|
||||||
|
|
||||||
const subdirs = isSingleRecord(recordNode)
|
|
||||||
? []
|
|
||||||
: recordRelativeDirectory(recordNode, id)
|
|
||||||
const base = isSingleRecord(recordNode)
|
|
||||||
? joinKey(parentDirs, recordNode.name)
|
|
||||||
: joinKey(parentDirs, recordNode.collectionName)
|
|
||||||
|
|
||||||
return {
|
|
||||||
subdirs,
|
|
||||||
base,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const recordRelativeDirectory = (recordNode, id) => {
|
|
||||||
const folderStructure = folderStructureArray(recordNode)
|
|
||||||
const strippedId = id.substring(recordNode.nodeId.toString().length + 1)
|
|
||||||
const subfolders = $(folderStructure, [
|
|
||||||
reduce(
|
|
||||||
(result, currentCount) => {
|
|
||||||
result.folders.push(
|
|
||||||
folderForChar(strippedId[result.level], currentCount)
|
|
||||||
)
|
|
||||||
return { level: result.level + 1, folders: result.folders }
|
|
||||||
},
|
|
||||||
{ level: 0, folders: [] }
|
|
||||||
),
|
|
||||||
f => f.folders,
|
|
||||||
filter(f => !!f),
|
|
||||||
])
|
|
||||||
|
|
||||||
return [recordNode.nodeId.toString(), ...subfolders, id]
|
|
||||||
}
|
|
||||||
|
|
||||||
const folderForChar = (char, folderCount) =>
|
|
||||||
folderCount === 1
|
|
||||||
? ""
|
|
||||||
: $(folderCount, [idFoldersForFolderCount, find(f => f.includes(char))])
|
|
||||||
|
|
||||||
const idFoldersForFolderCount = folderCount => {
|
|
||||||
const charRangePerShard = 64 / folderCount
|
|
||||||
const idFolders = []
|
|
||||||
let index = 0
|
|
||||||
let currentIdsShard = ""
|
|
||||||
while (index < 64) {
|
|
||||||
currentIdsShard += allIdChars[index]
|
|
||||||
if ((index + 1) % charRangePerShard === 0) {
|
|
||||||
idFolders.push(currentIdsShard)
|
|
||||||
currentIdsShard = ""
|
|
||||||
}
|
|
||||||
index++
|
|
||||||
}
|
|
||||||
|
|
||||||
return idFolders
|
|
||||||
}
|
|
|
@ -1,65 +0,0 @@
|
||||||
import { cloneDeep } from "lodash/fp"
|
|
||||||
import { validate } from "./validate"
|
|
||||||
import { _load } from "./load"
|
|
||||||
import { apiWrapper, events } from "../common"
|
|
||||||
import { permission } from "../authApi/permissions"
|
|
||||||
import { BadRequestError } from "../common/errors"
|
|
||||||
import { getExactNodeForKey } from "../templateApi/hierarchy"
|
|
||||||
|
|
||||||
export const save = app => async (record, context) =>
|
|
||||||
apiWrapper(
|
|
||||||
app,
|
|
||||||
events.recordApi.save,
|
|
||||||
record._rev
|
|
||||||
? permission.updateRecord.isAuthorized(record.key)
|
|
||||||
: permission.createRecord.isAuthorized(record.key),
|
|
||||||
{ record },
|
|
||||||
_save,
|
|
||||||
app,
|
|
||||||
record,
|
|
||||||
context,
|
|
||||||
false
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _save = async (app, record, context, skipValidation = false) => {
|
|
||||||
const recordClone = cloneDeep(record)
|
|
||||||
if (!skipValidation) {
|
|
||||||
const validationResult = await validate(app)(recordClone, context)
|
|
||||||
if (!validationResult.isValid) {
|
|
||||||
await app.publish(events.recordApi.save.onInvalid, {
|
|
||||||
record,
|
|
||||||
validationResult,
|
|
||||||
})
|
|
||||||
throw new BadRequestError(
|
|
||||||
`Save : Record Invalid : ${JSON.stringify(validationResult.errors)}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const recordNode = getExactNodeForKey(app.hierarchy)(record.key)
|
|
||||||
|
|
||||||
recordClone.nodeKey = recordNode.nodeKey()
|
|
||||||
|
|
||||||
if (!record._rev) {
|
|
||||||
if (!recordNode) throw new Error("Cannot find node for " + record.key)
|
|
||||||
|
|
||||||
// FILES
|
|
||||||
// await app.datastore.createFolder(files)
|
|
||||||
await app.datastore.createJson(record.key, recordClone)
|
|
||||||
await app.publish(events.recordApi.save.onRecordCreated, {
|
|
||||||
record: recordClone,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
const oldRecord = await _load(app, record.key)
|
|
||||||
await app.datastore.updateJson(record.key, recordClone)
|
|
||||||
await app.publish(events.recordApi.save.onRecordUpdated, {
|
|
||||||
old: oldRecord,
|
|
||||||
new: recordClone,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: use nano.head to get _rev (saves loading whole doc)
|
|
||||||
const savedResult = await app.datastore.loadFile(record.key)
|
|
||||||
recordClone._rev = savedResult._rev
|
|
||||||
return recordClone
|
|
||||||
}
|
|
|
@ -1,146 +0,0 @@
|
||||||
import { includes, filter, map, some } from "lodash/fp"
|
|
||||||
import { generate } from "shortid"
|
|
||||||
import { _loadFromInfo } from "./load"
|
|
||||||
import {
|
|
||||||
apiWrapper,
|
|
||||||
events,
|
|
||||||
splitKey,
|
|
||||||
$,
|
|
||||||
joinKey,
|
|
||||||
isNothing,
|
|
||||||
tryAwaitOrIgnore,
|
|
||||||
} from "../common"
|
|
||||||
import { getExactNodeForKey } from "../templateApi/hierarchy"
|
|
||||||
import { permission } from "../authApi/permissions"
|
|
||||||
import { isLegalFilename } from "../types/file"
|
|
||||||
import { BadRequestError, ForbiddenError } from "../common/errors"
|
|
||||||
import { getRecordInfo } from "./recordInfo"
|
|
||||||
|
|
||||||
export const uploadFile = app => async (
|
|
||||||
recordKey,
|
|
||||||
readableStream,
|
|
||||||
relativeFilePath
|
|
||||||
) =>
|
|
||||||
apiWrapper(
|
|
||||||
app,
|
|
||||||
events.recordApi.uploadFile,
|
|
||||||
permission.updateRecord.isAuthorized(recordKey),
|
|
||||||
{ recordKey, readableStream, relativeFilePath },
|
|
||||||
_uploadFile,
|
|
||||||
app,
|
|
||||||
recordKey,
|
|
||||||
readableStream,
|
|
||||||
relativeFilePath
|
|
||||||
)
|
|
||||||
|
|
||||||
const _uploadFile = async (
|
|
||||||
app,
|
|
||||||
recordKey,
|
|
||||||
readableStream,
|
|
||||||
relativeFilePath
|
|
||||||
) => {
|
|
||||||
if (isNothing(recordKey)) {
|
|
||||||
throw new BadRequestError("Record Key not supplied")
|
|
||||||
}
|
|
||||||
if (isNothing(relativeFilePath)) {
|
|
||||||
throw new BadRequestError("file path not supplied")
|
|
||||||
}
|
|
||||||
if (!isLegalFilename(relativeFilePath)) {
|
|
||||||
throw new BadRequestError("Illegal filename")
|
|
||||||
}
|
|
||||||
|
|
||||||
const recordInfo = getRecordInfo(app.hierarchy, recordKey)
|
|
||||||
const record = await _loadFromInfo(app, recordInfo)
|
|
||||||
|
|
||||||
const fullFilePath = safeGetFullFilePath(recordInfo.dir, relativeFilePath)
|
|
||||||
|
|
||||||
const tempFilePath = `${fullFilePath}_${generate()}.temp`
|
|
||||||
|
|
||||||
const outputStream = await app.datastore.writableFileStream(tempFilePath)
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
readableStream.pipe(outputStream)
|
|
||||||
outputStream.on("error", reject)
|
|
||||||
outputStream.on("finish", resolve)
|
|
||||||
})
|
|
||||||
.then(() => app.datastore.getFileSize(tempFilePath))
|
|
||||||
.then(size => {
|
|
||||||
const isExpectedFileSize = checkFileSizeAgainstFields(
|
|
||||||
app,
|
|
||||||
record,
|
|
||||||
relativeFilePath,
|
|
||||||
size
|
|
||||||
)
|
|
||||||
if (!isExpectedFileSize) {
|
|
||||||
throw new BadRequestError(
|
|
||||||
`Fields for ${relativeFilePath} do not have expected size.`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.then(() => tryAwaitOrIgnore(app.datastore.deleteFile, fullFilePath))
|
|
||||||
.then(() => app.datastore.renameFile(tempFilePath, fullFilePath))
|
|
||||||
}
|
|
||||||
|
|
||||||
const checkFileSizeAgainstFields = (
|
|
||||||
app,
|
|
||||||
record,
|
|
||||||
relativeFilePath,
|
|
||||||
expectedSize
|
|
||||||
) => {
|
|
||||||
const recordNode = getExactNodeForKey(app.hierarchy)(record.key)
|
|
||||||
|
|
||||||
const incorrectFileFields = $(recordNode.fields, [
|
|
||||||
filter(
|
|
||||||
f =>
|
|
||||||
f.type === "file" &&
|
|
||||||
record[f.name].relativePath === relativeFilePath &&
|
|
||||||
record[f.name].size !== expectedSize
|
|
||||||
),
|
|
||||||
map(f => f.name),
|
|
||||||
])
|
|
||||||
|
|
||||||
const incorrectFileArrayFields = $(recordNode.fields, [
|
|
||||||
filter(
|
|
||||||
a =>
|
|
||||||
a.type === "array<file>" &&
|
|
||||||
$(record[a.name], [
|
|
||||||
some(
|
|
||||||
f =>
|
|
||||||
record[f.name].relativePath === relativeFilePath &&
|
|
||||||
record[f.name].size !== expectedSize
|
|
||||||
),
|
|
||||||
])
|
|
||||||
),
|
|
||||||
map(f => f.name),
|
|
||||||
])
|
|
||||||
|
|
||||||
const incorrectFields = [...incorrectFileFields, ...incorrectFileArrayFields]
|
|
||||||
|
|
||||||
if (incorrectFields.length > 0) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
export const safeGetFullFilePath = (recordDir, relativeFilePath) => {
|
|
||||||
const naughtyUser = () => {
|
|
||||||
throw new ForbiddenError("naughty naughty")
|
|
||||||
}
|
|
||||||
|
|
||||||
if (relativeFilePath.startsWith("..")) naughtyUser()
|
|
||||||
|
|
||||||
const pathParts = splitKey(relativeFilePath)
|
|
||||||
|
|
||||||
if (includes("..")(pathParts)) naughtyUser()
|
|
||||||
|
|
||||||
const recordKeyParts = splitKey(recordDir)
|
|
||||||
|
|
||||||
const fullPathParts = [
|
|
||||||
...recordKeyParts,
|
|
||||||
"files",
|
|
||||||
...filter(p => p !== ".")(pathParts),
|
|
||||||
]
|
|
||||||
|
|
||||||
return joinKey(fullPathParts)
|
|
||||||
}
|
|
|
@ -1,91 +0,0 @@
|
||||||
import { map, reduce, filter, isEmpty, flatten, each } from "lodash/fp"
|
|
||||||
import { compileCode } from "../common/compileCode"
|
|
||||||
import _ from "lodash"
|
|
||||||
import { getExactNodeForKey } from "../templateApi/hierarchy"
|
|
||||||
import { validateFieldParse, validateTypeConstraints } from "../types"
|
|
||||||
import { $, isNothing, isNonEmptyString } from "../common"
|
|
||||||
import { _getContext } from "./getContext"
|
|
||||||
|
|
||||||
const fieldParseError = (fieldName, value) => ({
|
|
||||||
fields: [fieldName],
|
|
||||||
message: `Could not parse field ${fieldName}:${value}`,
|
|
||||||
})
|
|
||||||
|
|
||||||
const validateAllFieldParse = (record, recordNode) =>
|
|
||||||
$(recordNode.fields, [
|
|
||||||
map(f => ({ name: f.name, parseResult: validateFieldParse(f, record) })),
|
|
||||||
reduce((errors, f) => {
|
|
||||||
if (f.parseResult.success) return errors
|
|
||||||
errors.push(fieldParseError(f.name, f.parseResult.value))
|
|
||||||
return errors
|
|
||||||
}, []),
|
|
||||||
])
|
|
||||||
|
|
||||||
const validateAllTypeConstraints = async (record, recordNode, context) => {
|
|
||||||
const errors = []
|
|
||||||
for (const field of recordNode.fields) {
|
|
||||||
$(await validateTypeConstraints(field, record, context), [
|
|
||||||
filter(isNonEmptyString),
|
|
||||||
map(m => ({ message: m, fields: [field.name] })),
|
|
||||||
each(e => errors.push(e)),
|
|
||||||
])
|
|
||||||
}
|
|
||||||
return errors
|
|
||||||
}
|
|
||||||
|
|
||||||
const runRecordValidationRules = (record, recordNode) => {
|
|
||||||
const runValidationRule = rule => {
|
|
||||||
const isValid = compileCode(rule.expressionWhenValid)
|
|
||||||
const expressionContext = { record, _ }
|
|
||||||
return isValid(expressionContext)
|
|
||||||
? { valid: true }
|
|
||||||
: {
|
|
||||||
valid: false,
|
|
||||||
fields: rule.invalidFields,
|
|
||||||
message: rule.messageWhenInvalid,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return $(recordNode.validationRules, [
|
|
||||||
map(runValidationRule),
|
|
||||||
flatten,
|
|
||||||
filter(r => r.valid === false),
|
|
||||||
map(r => ({ fields: r.fields, message: r.message })),
|
|
||||||
])
|
|
||||||
}
|
|
||||||
|
|
||||||
export const validate = app => async (record, context) => {
|
|
||||||
context = isNothing(context) ? _getContext(app, record.key) : context
|
|
||||||
|
|
||||||
const recordNode = getExactNodeForKey(app.hierarchy)(record.key)
|
|
||||||
const fieldParseFails = validateAllFieldParse(record, recordNode)
|
|
||||||
|
|
||||||
// non parsing would cause further issues - exit here
|
|
||||||
if (!isEmpty(fieldParseFails)) {
|
|
||||||
return { isValid: false, errors: fieldParseFails }
|
|
||||||
}
|
|
||||||
|
|
||||||
const recordValidationRuleFails = runRecordValidationRules(record, recordNode)
|
|
||||||
const typeContraintFails = await validateAllTypeConstraints(
|
|
||||||
record,
|
|
||||||
recordNode,
|
|
||||||
context
|
|
||||||
)
|
|
||||||
|
|
||||||
if (
|
|
||||||
isEmpty(fieldParseFails) &&
|
|
||||||
isEmpty(recordValidationRuleFails) &&
|
|
||||||
isEmpty(typeContraintFails)
|
|
||||||
) {
|
|
||||||
return { isValid: true, errors: [] }
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
isValid: false,
|
|
||||||
errors: _.union(
|
|
||||||
fieldParseFails,
|
|
||||||
typeContraintFails,
|
|
||||||
recordValidationRuleFails
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,54 +0,0 @@
|
||||||
import {
|
|
||||||
findRoot,
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
fieldReversesReferenceToIndex,
|
|
||||||
isModel,
|
|
||||||
} from "./hierarchy"
|
|
||||||
import { $ } from "../common"
|
|
||||||
import { map, filter, reduce } from "lodash/fp"
|
|
||||||
|
|
||||||
export const canDeleteIndex = indexNode => {
|
|
||||||
const flatHierarchy = $(indexNode, [findRoot, getFlattenedHierarchy])
|
|
||||||
|
|
||||||
const reverseIndexes = $(flatHierarchy, [
|
|
||||||
filter(isModel),
|
|
||||||
reduce((obj, r) => {
|
|
||||||
for (let field of r.fields) {
|
|
||||||
if (fieldReversesReferenceToIndex(indexNode)(field)) {
|
|
||||||
obj.push({ ...field, record: r })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return obj
|
|
||||||
}, []),
|
|
||||||
map(
|
|
||||||
f =>
|
|
||||||
`field "${f.name}" on record "${f.record.name}" uses this index as a reference`
|
|
||||||
),
|
|
||||||
])
|
|
||||||
|
|
||||||
const lookupIndexes = $(flatHierarchy, [
|
|
||||||
filter(isModel),
|
|
||||||
reduce((obj, r) => {
|
|
||||||
for (let field of r.fields) {
|
|
||||||
if (
|
|
||||||
field.type === "reference" &&
|
|
||||||
field.typeOptions.indexNodeKey === indexNode.nodeKey()
|
|
||||||
) {
|
|
||||||
obj.push({ ...field, record: r })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return obj
|
|
||||||
}, []),
|
|
||||||
map(
|
|
||||||
f =>
|
|
||||||
`field "${f.name}" on record "${f.record.name}" uses this index as a lookup`
|
|
||||||
),
|
|
||||||
])
|
|
||||||
|
|
||||||
const errors = [...reverseIndexes, ...lookupIndexes]
|
|
||||||
|
|
||||||
return {
|
|
||||||
canDelete: errors.length === 0,
|
|
||||||
errors,
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,45 +0,0 @@
|
||||||
import {
|
|
||||||
findRoot,
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
fieldReversesReferenceToIndex,
|
|
||||||
isModel,
|
|
||||||
isAncestorIndex,
|
|
||||||
isAncestor,
|
|
||||||
} from "./hierarchy"
|
|
||||||
import { $ } from "../common"
|
|
||||||
import { map, filter, includes } from "lodash/fp"
|
|
||||||
|
|
||||||
export const canDeleteModel = modelNode => {
|
|
||||||
const flatHierarchy = $(modelNode, [findRoot, getFlattenedHierarchy])
|
|
||||||
|
|
||||||
const ancestors = $(flatHierarchy, [filter(isAncestor(modelNode))])
|
|
||||||
|
|
||||||
const belongsToAncestor = i => ancestors.includes(i.parent())
|
|
||||||
|
|
||||||
const errorsForNode = node => {
|
|
||||||
const errorsThisNode = $(flatHierarchy, [
|
|
||||||
filter(
|
|
||||||
i =>
|
|
||||||
isAncestorIndex(i) &&
|
|
||||||
belongsToAncestor(i) &&
|
|
||||||
includes(node.nodeId)(i.allowedModelNodeIds)
|
|
||||||
),
|
|
||||||
map(
|
|
||||||
i =>
|
|
||||||
`index "${i.name}" indexes this model. Please remove the model from the index, or delete the index`
|
|
||||||
),
|
|
||||||
])
|
|
||||||
|
|
||||||
for (let child of node.children) {
|
|
||||||
for (let err of errorsForNode(child)) {
|
|
||||||
errorsThisNode.push(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return errorsThisNode
|
|
||||||
}
|
|
||||||
|
|
||||||
const errors = errorsForNode(modelNode)
|
|
||||||
|
|
||||||
return { errors, canDelete: errors.length === 0 }
|
|
||||||
}
|
|
|
@ -1,22 +0,0 @@
|
||||||
export const createTrigger = () => ({
|
|
||||||
actionName: "",
|
|
||||||
eventName: "",
|
|
||||||
// function, has access to event context,
|
|
||||||
// returns object that is used as parameter to action
|
|
||||||
// only used if triggered by event
|
|
||||||
optionsCreator: "",
|
|
||||||
// action runs if true,
|
|
||||||
// has access to event context
|
|
||||||
condition: "",
|
|
||||||
})
|
|
||||||
|
|
||||||
export const createAction = () => ({
|
|
||||||
name: "",
|
|
||||||
behaviourSource: "",
|
|
||||||
// name of function in actionSource
|
|
||||||
behaviourName: "",
|
|
||||||
// parameter passed into behaviour.
|
|
||||||
// any other parms passed at runtime e.g.
|
|
||||||
// by trigger, or manually, will be merged into this
|
|
||||||
initialOptions: {},
|
|
||||||
})
|
|
|
@ -1,242 +0,0 @@
|
||||||
import { each, find } from "lodash"
|
|
||||||
import { map, max, constant } from "lodash/fp"
|
|
||||||
import {
|
|
||||||
switchCase,
|
|
||||||
defaultCase,
|
|
||||||
joinKey,
|
|
||||||
$,
|
|
||||||
isNothing,
|
|
||||||
isSomething,
|
|
||||||
} from "../common"
|
|
||||||
import {
|
|
||||||
isIndex,
|
|
||||||
isRoot,
|
|
||||||
isSingleRecord,
|
|
||||||
isCollectionRecord,
|
|
||||||
isModel,
|
|
||||||
isaggregateGroup,
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
} from "./hierarchy"
|
|
||||||
import { all } from "../types"
|
|
||||||
import { BadRequestError } from "../common/errors"
|
|
||||||
|
|
||||||
export const createNodeErrors = {
|
|
||||||
indexCannotBeParent: "Index template cannot be a parent",
|
|
||||||
allNonRootNodesMustHaveParent: "Only the root node may have no parent",
|
|
||||||
indexParentMustBeRecordOrRoot:
|
|
||||||
"An index may only have a record or root as a parent",
|
|
||||||
aggregateParentMustBeAnIndex: "aggregateGroup parent must be an index",
|
|
||||||
}
|
|
||||||
|
|
||||||
const pathRegxMaker = node => () =>
|
|
||||||
node.nodeKey().replace(/{id}/g, "[a-zA-Z0-9_-]+")
|
|
||||||
|
|
||||||
const nodeKeyMaker = node => () =>
|
|
||||||
switchCase(
|
|
||||||
[
|
|
||||||
n => isModel(n) && !isSingleRecord(n),
|
|
||||||
n =>
|
|
||||||
joinKey(
|
|
||||||
node.parent().nodeKey(),
|
|
||||||
node.collectionName,
|
|
||||||
`${n.nodeId}-{id}`
|
|
||||||
),
|
|
||||||
],
|
|
||||||
|
|
||||||
[isRoot, constant("/")],
|
|
||||||
|
|
||||||
[defaultCase, n => joinKey(node.parent().nodeKey(), n.name)]
|
|
||||||
)(node)
|
|
||||||
|
|
||||||
const nodeNameMaker = node => () =>
|
|
||||||
isRoot(node)
|
|
||||||
? "/"
|
|
||||||
: joinKey(node.parent().nodeName(), node.name)
|
|
||||||
|
|
||||||
const validate = parent => node => {
|
|
||||||
if (
|
|
||||||
isIndex(node) &&
|
|
||||||
isSomething(parent) &&
|
|
||||||
!isRoot(parent) &&
|
|
||||||
!isModel(parent)
|
|
||||||
) {
|
|
||||||
throw new BadRequestError(createNodeErrors.indexParentMustBeRecordOrRoot)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isaggregateGroup(node) && isSomething(parent) && !isIndex(parent)) {
|
|
||||||
throw new BadRequestError(createNodeErrors.aggregateParentMustBeAnIndex)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isNothing(parent) && !isRoot(node)) {
|
|
||||||
throw new BadRequestError(createNodeErrors.allNonRootNodesMustHaveParent)
|
|
||||||
}
|
|
||||||
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
|
|
||||||
const construct = parent => node => {
|
|
||||||
node.nodeKey = nodeKeyMaker(node)
|
|
||||||
node.nodeName = nodeNameMaker(node)
|
|
||||||
node.pathRegx = pathRegxMaker(node)
|
|
||||||
node.parent = constant(parent)
|
|
||||||
node.isRoot = () =>
|
|
||||||
isNothing(parent) && node.name === "root" && node.type === "root"
|
|
||||||
if (isCollectionRecord(node)) {
|
|
||||||
node.collectionNodeKey = () =>
|
|
||||||
joinKey(parent.nodeKey(), node.collectionName)
|
|
||||||
node.collectionPathRegx = () =>
|
|
||||||
joinKey(parent.pathRegx(), node.collectionName)
|
|
||||||
}
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
|
|
||||||
const addToParent = obj => {
|
|
||||||
const parent = obj.parent()
|
|
||||||
if (isSomething(parent)) {
|
|
||||||
if (isIndex(obj)) {
|
|
||||||
// Q: why are indexes not children ?
|
|
||||||
// A: because they cannot have children of their own.
|
|
||||||
parent.indexes.push(obj)
|
|
||||||
} else if (isaggregateGroup(obj)) {
|
|
||||||
parent.aggregateGroups.push(obj)
|
|
||||||
} else {
|
|
||||||
parent.children.push(obj)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isModel(obj)) {
|
|
||||||
const defaultIndex = find(
|
|
||||||
parent.indexes,
|
|
||||||
i => i.name === `${parent.name}_index`
|
|
||||||
)
|
|
||||||
if (defaultIndex) {
|
|
||||||
defaultIndex.allowedModelNodeIds.push(obj.nodeId)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return obj
|
|
||||||
}
|
|
||||||
|
|
||||||
export const constructNode = (parent, obj) =>
|
|
||||||
$(obj, [construct(parent), validate(parent), addToParent])
|
|
||||||
|
|
||||||
const getNodeId = parentNode => {
|
|
||||||
// this case is handled better elsewhere
|
|
||||||
if (!parentNode) return null
|
|
||||||
const findRoot = n => (isRoot(n) ? n : findRoot(n.parent()))
|
|
||||||
const root = findRoot(parentNode)
|
|
||||||
|
|
||||||
return $(root, [getFlattenedHierarchy, map(n => n.nodeId), max]) + 1
|
|
||||||
}
|
|
||||||
|
|
||||||
export const constructHierarchy = (node, parent) => {
|
|
||||||
construct(parent)(node)
|
|
||||||
if (node.indexes) {
|
|
||||||
each(node.indexes, child => constructHierarchy(child, node))
|
|
||||||
}
|
|
||||||
if (node.aggregateGroups) {
|
|
||||||
each(node.aggregateGroups, child => constructHierarchy(child, node))
|
|
||||||
}
|
|
||||||
if (node.children && node.children.length > 0) {
|
|
||||||
each(node.children, child => constructHierarchy(child, node))
|
|
||||||
}
|
|
||||||
if (node.fields) {
|
|
||||||
each(node.fields, f =>
|
|
||||||
each(f.typeOptions, (val, key) => {
|
|
||||||
const def = all[f.type].optionDefinitions[key]
|
|
||||||
if (!def) {
|
|
||||||
// unknown typeOption
|
|
||||||
delete f.typeOptions[key]
|
|
||||||
} else {
|
|
||||||
f.typeOptions[key] = def.parse(val)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getNewRootLevel = () =>
|
|
||||||
construct()({
|
|
||||||
name: "root",
|
|
||||||
type: "root",
|
|
||||||
children: [],
|
|
||||||
pathMaps: [],
|
|
||||||
indexes: [],
|
|
||||||
nodeId: 0,
|
|
||||||
})
|
|
||||||
|
|
||||||
const _getNewModelTemplate = (parent, name, createDefaultIndex, isSingle) => {
|
|
||||||
const nodeId = getNodeId(parent)
|
|
||||||
const node = constructNode(parent, {
|
|
||||||
name,
|
|
||||||
type: "record",
|
|
||||||
fields: [],
|
|
||||||
children: [],
|
|
||||||
validationRules: [],
|
|
||||||
nodeId: nodeId,
|
|
||||||
indexes: [],
|
|
||||||
estimatedRecordCount: isModel(parent) ? 500 : 1000000,
|
|
||||||
collectionName: (nodeId || "").toString(),
|
|
||||||
isSingle,
|
|
||||||
})
|
|
||||||
|
|
||||||
if (createDefaultIndex) {
|
|
||||||
const defaultIndex = getNewIndexTemplate(parent)
|
|
||||||
defaultIndex.name = `${name}_index`
|
|
||||||
defaultIndex.allowedModelNodeIds.push(node.nodeId)
|
|
||||||
}
|
|
||||||
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getNewModelTemplate = (
|
|
||||||
parent,
|
|
||||||
name = "",
|
|
||||||
createDefaultIndex = true
|
|
||||||
) => _getNewModelTemplate(parent, name, createDefaultIndex, false)
|
|
||||||
|
|
||||||
export const getNewSingleRecordTemplate = parent =>
|
|
||||||
_getNewModelTemplate(parent, "", false, true)
|
|
||||||
|
|
||||||
export const getNewIndexTemplate = (parent, type = "ancestor") =>
|
|
||||||
constructNode(parent, {
|
|
||||||
name: "",
|
|
||||||
type: "index",
|
|
||||||
map: "return {...record};",
|
|
||||||
filter: "",
|
|
||||||
indexType: type,
|
|
||||||
getShardName: "",
|
|
||||||
getSortKey: "record.id",
|
|
||||||
aggregateGroups: [],
|
|
||||||
allowedModelNodeIds: [],
|
|
||||||
nodeId: getNodeId(parent),
|
|
||||||
})
|
|
||||||
|
|
||||||
export const getNewAggregateGroupTemplate = index =>
|
|
||||||
constructNode(index, {
|
|
||||||
name: "",
|
|
||||||
type: "aggregateGroup",
|
|
||||||
groupBy: "",
|
|
||||||
aggregates: [],
|
|
||||||
condition: "",
|
|
||||||
nodeId: getNodeId(index),
|
|
||||||
})
|
|
||||||
|
|
||||||
export const getNewAggregateTemplate = set => {
|
|
||||||
const aggregatedValue = {
|
|
||||||
name: "",
|
|
||||||
aggregatedValue: "",
|
|
||||||
}
|
|
||||||
set.aggregates.push(aggregatedValue)
|
|
||||||
return aggregatedValue
|
|
||||||
}
|
|
||||||
|
|
||||||
export default {
|
|
||||||
getNewRootLevel,
|
|
||||||
getNewModelTemplate,
|
|
||||||
getNewIndexTemplate,
|
|
||||||
createNodeErrors,
|
|
||||||
constructHierarchy,
|
|
||||||
getNewAggregateGroupTemplate,
|
|
||||||
getNewAggregateTemplate,
|
|
||||||
}
|
|
|
@ -1,30 +0,0 @@
|
||||||
import { getAllIdsIterator } from "../indexing/allIds"
|
|
||||||
import { getRecordInfo } from "../recordApi/recordInfo"
|
|
||||||
import { isTopLevelIndex, getParentKey, getLastPartInKey } from "./hierarchy"
|
|
||||||
import { safeKey, joinKey } from "../common"
|
|
||||||
|
|
||||||
export const deleteAllIndexFilesForNode = async (app, indexNode) => {
|
|
||||||
if (isTopLevelIndex(indexNode)) {
|
|
||||||
await app.datastore.deleteFolder(indexNode.nodeKey())
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const iterate = await getAllIdsIterator(app)(indexNode.parent().nodeKey())
|
|
||||||
let iterateResult = await iterate()
|
|
||||||
while (!iterateResult.done) {
|
|
||||||
const { result } = iterateResult
|
|
||||||
for (const id of result.ids) {
|
|
||||||
const deletingIndexKey = joinKey(result.collectionKey, id, indexNode.name)
|
|
||||||
await deleteIndexFolder(app, deletingIndexKey)
|
|
||||||
}
|
|
||||||
iterateResult = await iterate()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const deleteIndexFolder = async (app, indexKey) => {
|
|
||||||
indexKey = safeKey(indexKey)
|
|
||||||
const indexName = getLastPartInKey(indexKey)
|
|
||||||
const parentRecordKey = getParentKey(indexKey)
|
|
||||||
const recordInfo = getRecordInfo(app.hierarchy, parentRecordKey)
|
|
||||||
await app.datastore.deleteFolder(joinKey(recordInfo.dir, indexName))
|
|
||||||
}
|
|
|
@ -1,33 +0,0 @@
|
||||||
import { getAllIdsIterator } from "../indexing/allIds"
|
|
||||||
import { getCollectionDir } from "../recordApi/recordInfo"
|
|
||||||
import { isTopLevelRecord, getCollectionKey } from "./hierarchy"
|
|
||||||
import { safeKey, joinKey } from "../common"
|
|
||||||
|
|
||||||
export const deleteAllRecordsForNode = async (app, recordNode) => {
|
|
||||||
if (isTopLevelRecord(recordNode)) {
|
|
||||||
await deleteRecordCollection(app, recordNode.collectionName)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const iterate = await getAllIdsIterator(app)(recordNode.parent().nodeKey())
|
|
||||||
let iterateResult = await iterate()
|
|
||||||
while (!iterateResult.done) {
|
|
||||||
const { result } = iterateResult
|
|
||||||
for (const id of result.ids) {
|
|
||||||
const deletingCollectionKey = joinKey(
|
|
||||||
result.collectionKey,
|
|
||||||
id,
|
|
||||||
recordNode.collectionName
|
|
||||||
)
|
|
||||||
await deleteRecordCollection(app, deletingCollectionKey)
|
|
||||||
}
|
|
||||||
iterateResult = await iterate()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const deleteRecordCollection = async (app, collectionKey) => {
|
|
||||||
collectionKey = safeKey(collectionKey)
|
|
||||||
await app.datastore.deleteFolder(
|
|
||||||
getCollectionDir(app.hierarchy, collectionKey)
|
|
||||||
)
|
|
||||||
}
|
|
|
@ -1,9 +0,0 @@
|
||||||
import { } from "../templateApi/heirarchy"
|
|
||||||
|
|
||||||
export const canDelete = () => {
|
|
||||||
/*
|
|
||||||
it must not exist on any index.allowedModelNodeIds
|
|
||||||
it must not exist on and reference type fields
|
|
||||||
these rules should apply to any child nodes , which will also be deleted
|
|
||||||
*/
|
|
||||||
}
|
|
|
@ -1,203 +0,0 @@
|
||||||
import {
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
isModel,
|
|
||||||
isIndex,
|
|
||||||
isAncestor,
|
|
||||||
} from "./hierarchy"
|
|
||||||
import { $, none } from "../common"
|
|
||||||
import { map, filter, some, find, difference } from "lodash/fp"
|
|
||||||
|
|
||||||
export const HierarchyChangeTypes = {
|
|
||||||
recordCreated: "Record Created",
|
|
||||||
recordDeleted: "Record Deleted",
|
|
||||||
recordRenamed: "Record Renamed",
|
|
||||||
recordFieldsChanged: "Record Fields Changed",
|
|
||||||
recordEstimatedRecordTypeChanged: "Record's Estimated Record Count Changed",
|
|
||||||
indexCreated: "Index Created",
|
|
||||||
indexDeleted: "Index Deleted",
|
|
||||||
indexChanged: "Index Changed",
|
|
||||||
}
|
|
||||||
|
|
||||||
export const diffHierarchy = (oldHierarchy, newHierarchy) => {
|
|
||||||
const oldHierarchyFlat = getFlattenedHierarchy(oldHierarchy)
|
|
||||||
const newHierarchyFlat = getFlattenedHierarchy(newHierarchy)
|
|
||||||
|
|
||||||
const createdRecords = findCreatedRecords(oldHierarchyFlat, newHierarchyFlat)
|
|
||||||
const deletedRecords = findDeletedRecords(oldHierarchyFlat, newHierarchyFlat)
|
|
||||||
|
|
||||||
return [
|
|
||||||
...createdRecords,
|
|
||||||
...deletedRecords,
|
|
||||||
...findRenamedRecords(oldHierarchyFlat, newHierarchyFlat),
|
|
||||||
...findRecordsWithFieldsChanged(oldHierarchyFlat, newHierarchyFlat),
|
|
||||||
...findRecordsWithEstimatedRecordTypeChanged(
|
|
||||||
oldHierarchyFlat,
|
|
||||||
newHierarchyFlat
|
|
||||||
),
|
|
||||||
...findCreatedIndexes(oldHierarchyFlat, newHierarchyFlat, createdRecords),
|
|
||||||
...findDeletedIndexes(oldHierarchyFlat, newHierarchyFlat, deletedRecords),
|
|
||||||
...findUpdatedIndexes(oldHierarchyFlat, newHierarchyFlat),
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
const changeItem = (type, oldNode, newNode) => ({
|
|
||||||
type,
|
|
||||||
oldNode,
|
|
||||||
newNode,
|
|
||||||
})
|
|
||||||
|
|
||||||
const findCreatedRecords = (oldHierarchyFlat, newHierarchyFlat) => {
|
|
||||||
const allCreated = $(newHierarchyFlat, [
|
|
||||||
filter(isModel),
|
|
||||||
filter(nodeDoesNotExistIn(oldHierarchyFlat)),
|
|
||||||
map(n => changeItem(HierarchyChangeTypes.recordCreated, null, n)),
|
|
||||||
])
|
|
||||||
|
|
||||||
return $(allCreated, [
|
|
||||||
filter(r => none(r2 => isAncestor(r.newNode)(r2.newNode))(allCreated)),
|
|
||||||
])
|
|
||||||
}
|
|
||||||
|
|
||||||
const findDeletedRecords = (oldHierarchyFlat, newHierarchyFlat) => {
|
|
||||||
const allDeleted = $(oldHierarchyFlat, [
|
|
||||||
filter(isModel),
|
|
||||||
filter(nodeDoesNotExistIn(newHierarchyFlat)),
|
|
||||||
map(n => changeItem(HierarchyChangeTypes.recordDeleted, n, null)),
|
|
||||||
])
|
|
||||||
|
|
||||||
return $(allDeleted, [
|
|
||||||
filter(r => none(r2 => isAncestor(r.oldNode)(r2.oldNode))(allDeleted)),
|
|
||||||
])
|
|
||||||
}
|
|
||||||
|
|
||||||
const findRenamedRecords = (oldHierarchyFlat, newHierarchyFlat) =>
|
|
||||||
$(oldHierarchyFlat, [
|
|
||||||
filter(isModel),
|
|
||||||
filter(nodeExistsIn(newHierarchyFlat)),
|
|
||||||
filter(
|
|
||||||
nodeChanged(
|
|
||||||
newHierarchyFlat,
|
|
||||||
(_new, old) => _new.collectionKey !== old.collectionKey
|
|
||||||
)
|
|
||||||
),
|
|
||||||
map(n =>
|
|
||||||
changeItem(
|
|
||||||
HierarchyChangeTypes.recordRenamed,
|
|
||||||
n,
|
|
||||||
findNodeIn(n, newHierarchyFlat)
|
|
||||||
)
|
|
||||||
),
|
|
||||||
])
|
|
||||||
|
|
||||||
const findRecordsWithFieldsChanged = (oldHierarchyFlat, newHierarchyFlat) =>
|
|
||||||
$(oldHierarchyFlat, [
|
|
||||||
filter(isModel),
|
|
||||||
filter(nodeExistsIn(newHierarchyFlat)),
|
|
||||||
filter(hasDifferentFields(newHierarchyFlat)),
|
|
||||||
map(n =>
|
|
||||||
changeItem(
|
|
||||||
HierarchyChangeTypes.recordFieldsChanged,
|
|
||||||
n,
|
|
||||||
findNodeIn(n, newHierarchyFlat)
|
|
||||||
)
|
|
||||||
),
|
|
||||||
])
|
|
||||||
|
|
||||||
const findRecordsWithEstimatedRecordTypeChanged = (
|
|
||||||
oldHierarchyFlat,
|
|
||||||
newHierarchyFlat
|
|
||||||
) =>
|
|
||||||
$(oldHierarchyFlat, [
|
|
||||||
filter(isModel),
|
|
||||||
filter(nodeExistsIn(newHierarchyFlat)),
|
|
||||||
filter(
|
|
||||||
nodeChanged(
|
|
||||||
newHierarchyFlat,
|
|
||||||
(_new, old) => _new.estimatedRecordCount !== old.estimatedRecordCount
|
|
||||||
)
|
|
||||||
),
|
|
||||||
map(n =>
|
|
||||||
changeItem(
|
|
||||||
HierarchyChangeTypes.recordEstimatedRecordTypeChanged,
|
|
||||||
n,
|
|
||||||
findNodeIn(n, newHierarchyFlat)
|
|
||||||
)
|
|
||||||
),
|
|
||||||
])
|
|
||||||
|
|
||||||
const findCreatedIndexes = (
|
|
||||||
oldHierarchyFlat,
|
|
||||||
newHierarchyFlat,
|
|
||||||
createdRecords
|
|
||||||
) => {
|
|
||||||
const allCreated = $(newHierarchyFlat, [
|
|
||||||
filter(isIndex),
|
|
||||||
filter(nodeDoesNotExistIn(oldHierarchyFlat)),
|
|
||||||
map(n => changeItem(HierarchyChangeTypes.indexCreated, null, n)),
|
|
||||||
])
|
|
||||||
|
|
||||||
return $(allCreated, [
|
|
||||||
filter(r => none(r2 => isAncestor(r.newNode)(r2.newNode))(createdRecords)),
|
|
||||||
])
|
|
||||||
}
|
|
||||||
|
|
||||||
const findDeletedIndexes = (
|
|
||||||
oldHierarchyFlat,
|
|
||||||
newHierarchyFlat,
|
|
||||||
deletedRecords
|
|
||||||
) => {
|
|
||||||
const allDeleted = $(oldHierarchyFlat, [
|
|
||||||
filter(isIndex),
|
|
||||||
filter(nodeDoesNotExistIn(newHierarchyFlat)),
|
|
||||||
map(n => changeItem(HierarchyChangeTypes.indexDeleted, n, null)),
|
|
||||||
])
|
|
||||||
|
|
||||||
return $(allDeleted, [
|
|
||||||
filter(r => none(r2 => isAncestor(r.oldNode)(r2.oldNode))(deletedRecords)),
|
|
||||||
])
|
|
||||||
}
|
|
||||||
|
|
||||||
const findUpdatedIndexes = (oldHierarchyFlat, newHierarchyFlat) =>
|
|
||||||
$(oldHierarchyFlat, [
|
|
||||||
filter(isIndex),
|
|
||||||
filter(nodeExistsIn(newHierarchyFlat)),
|
|
||||||
filter(nodeChanged(newHierarchyFlat, indexHasChanged)),
|
|
||||||
map(n =>
|
|
||||||
changeItem(
|
|
||||||
HierarchyChangeTypes.indexChanged,
|
|
||||||
n,
|
|
||||||
findNodeIn(n, newHierarchyFlat)
|
|
||||||
)
|
|
||||||
),
|
|
||||||
])
|
|
||||||
|
|
||||||
const hasDifferentFields = otherFlatHierarchy => record1 => {
|
|
||||||
const record2 = findNodeIn(record1, otherFlatHierarchy)
|
|
||||||
|
|
||||||
if (record1.fields.length !== record2.fields.length) return true
|
|
||||||
|
|
||||||
for (let f1 of record1.fields) {
|
|
||||||
if (none(isFieldSame(f1))(record2.fields)) return true
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
const indexHasChanged = (_new, old) =>
|
|
||||||
_new.map !== old.map ||
|
|
||||||
_new.filter !== old.filter ||
|
|
||||||
_new.getShardName !== old.getShardName ||
|
|
||||||
difference(_new.allowedModelNodeIds)(old.allowedModelNodeIds).length > 0
|
|
||||||
|
|
||||||
const isFieldSame = f1 => f2 => f1.name === f2.name && f1.type === f2.type
|
|
||||||
|
|
||||||
const nodeDoesNotExistIn = inThis => node =>
|
|
||||||
none(n => n.nodeId === node.nodeId)(inThis)
|
|
||||||
|
|
||||||
const nodeExistsIn = inThis => node =>
|
|
||||||
some(n => n.nodeId === node.nodeId)(inThis)
|
|
||||||
|
|
||||||
const nodeChanged = (inThis, isChanged) => node =>
|
|
||||||
some(n => n.nodeId === node.nodeId && isChanged(n, node))(inThis)
|
|
||||||
|
|
||||||
const findNodeIn = (node, inThis) => find(n => n.nodeId === node.nodeId)(inThis)
|
|
|
@ -1,96 +0,0 @@
|
||||||
import { some, map, filter, keys, includes, countBy, flatten } from "lodash/fp"
|
|
||||||
import {
|
|
||||||
isSomething,
|
|
||||||
$,
|
|
||||||
isNonEmptyString,
|
|
||||||
isNothingOrEmpty,
|
|
||||||
isNothing,
|
|
||||||
} from "../common"
|
|
||||||
import { all, getDefaultOptions } from "../types"
|
|
||||||
import { applyRuleSet, makerule } from "../common/validationCommon"
|
|
||||||
import { BadRequestError } from "../common/errors"
|
|
||||||
import { generate } from "shortid"
|
|
||||||
|
|
||||||
export const fieldErrors = {
|
|
||||||
AddFieldValidationFailed: "Add field validation: ",
|
|
||||||
}
|
|
||||||
|
|
||||||
export const allowedTypes = () => keys(all)
|
|
||||||
|
|
||||||
export const getNewField = type => ({
|
|
||||||
id: generate(),
|
|
||||||
name: "", // how field is referenced internally
|
|
||||||
type,
|
|
||||||
typeOptions: getDefaultOptions(type),
|
|
||||||
label: "", // how field is displayed
|
|
||||||
getInitialValue: "default", // function that gets value when initially created
|
|
||||||
getUndefinedValue: "default", // function that gets value when field undefined on record
|
|
||||||
})
|
|
||||||
|
|
||||||
const fieldRules = allFields => [
|
|
||||||
makerule("name", "field name is not set", f => isNonEmptyString(f.name)),
|
|
||||||
makerule("type", "field type is not set", f => isNonEmptyString(f.type)),
|
|
||||||
makerule("label", "field label is not set", f => isNonEmptyString(f.label)),
|
|
||||||
makerule("getInitialValue", "getInitialValue function is not set", f =>
|
|
||||||
isNonEmptyString(f.getInitialValue)
|
|
||||||
),
|
|
||||||
makerule("getUndefinedValue", "getUndefinedValue function is not set", f =>
|
|
||||||
isNonEmptyString(f.getUndefinedValue)
|
|
||||||
),
|
|
||||||
makerule(
|
|
||||||
"name",
|
|
||||||
"field name is duplicated",
|
|
||||||
f => isNothingOrEmpty(f.name) || countBy("name")(allFields)[f.name] === 1
|
|
||||||
),
|
|
||||||
makerule(
|
|
||||||
"type",
|
|
||||||
"type is unknown",
|
|
||||||
f => isNothingOrEmpty(f.type) || some(t => f.type === t)(allowedTypes())
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
const typeOptionsRules = field => {
|
|
||||||
const type = all[field.type]
|
|
||||||
if (isNothing(type)) return []
|
|
||||||
|
|
||||||
const def = optName => type.optionDefinitions[optName]
|
|
||||||
|
|
||||||
return $(field.typeOptions, [
|
|
||||||
keys,
|
|
||||||
filter(o => isSomething(def(o)) && isSomething(def(o).isValid)),
|
|
||||||
map(o =>
|
|
||||||
makerule(`typeOptions.${o}`, `${def(o).requirementDescription}`, field =>
|
|
||||||
def(o).isValid(field.typeOptions[o])
|
|
||||||
)
|
|
||||||
),
|
|
||||||
])
|
|
||||||
}
|
|
||||||
|
|
||||||
export const validateField = allFields => field => {
|
|
||||||
const everySingleField = includes(field)(allFields)
|
|
||||||
? allFields
|
|
||||||
: [...allFields, field]
|
|
||||||
return applyRuleSet([
|
|
||||||
...fieldRules(everySingleField),
|
|
||||||
...typeOptionsRules(field),
|
|
||||||
])(field)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const validateAllFields = recordNode =>
|
|
||||||
$(recordNode.fields, [map(validateField(recordNode.fields)), flatten])
|
|
||||||
|
|
||||||
export const addField = (recordTemplate, field) => {
|
|
||||||
if (isNothingOrEmpty(field.label)) {
|
|
||||||
field.label = field.name
|
|
||||||
}
|
|
||||||
const validationMessages = validateField([...recordTemplate.fields, field])(
|
|
||||||
field
|
|
||||||
)
|
|
||||||
if (validationMessages.length > 0) {
|
|
||||||
const errors = map(m => m.error)(validationMessages)
|
|
||||||
throw new BadRequestError(
|
|
||||||
`${fieldErrors.AddFieldValidationFailed} ${errors.join(", ")}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
recordTemplate.fields.push(field)
|
|
||||||
}
|
|
|
@ -1,12 +0,0 @@
|
||||||
import { appDefinitionFile } from "../common"
|
|
||||||
import { constructHierarchy } from "./createNodes"
|
|
||||||
|
|
||||||
export const getApplicationDefinition = datastore => async () => {
|
|
||||||
const exists = await datastore.exists(appDefinitionFile)
|
|
||||||
|
|
||||||
if (!exists) throw new Error("Application definition does not exist")
|
|
||||||
|
|
||||||
const appDefinition = await datastore.loadJson(appDefinitionFile)
|
|
||||||
appDefinition.hierarchy = constructHierarchy(appDefinition.hierarchy)
|
|
||||||
return appDefinition
|
|
||||||
}
|
|
|
@ -1,3 +0,0 @@
|
||||||
export const getBehaviourSources = async datastore => {
|
|
||||||
await datastore.loadFile("/.config/behaviourSources.js")
|
|
||||||
}
|
|
|
@ -1,32 +0,0 @@
|
||||||
import { includes } from "lodash/fp"
|
|
||||||
|
|
||||||
export const getCouchDbView = (hierarchy, indexNode) => {
|
|
||||||
const filter = codeAsFunction("filter", indexNode.filter)
|
|
||||||
const map = codeAsFunction("map", indexNode.map)
|
|
||||||
const allowedIdsFilter
|
|
||||||
|
|
||||||
const includeDocs = !map
|
|
||||||
|
|
||||||
const couchDbMap = ``
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
const codeAsFunction = (name, code) => {
|
|
||||||
if ((code || "").trim().length === 0) return
|
|
||||||
|
|
||||||
let safeCode
|
|
||||||
|
|
||||||
if (includes("return ")(code)) {
|
|
||||||
safeCode = code
|
|
||||||
} else {
|
|
||||||
let trimmed = code.trim()
|
|
||||||
trimmed = trimmed.endsWith(";")
|
|
||||||
? trimmed.substring(0, trimmed.length - 1)
|
|
||||||
: trimmed
|
|
||||||
safeCode = `return (${trimmed})`
|
|
||||||
}
|
|
||||||
|
|
||||||
return `function ${name}() {
|
|
||||||
${safeCode}
|
|
||||||
}`
|
|
||||||
}
|
|
|
@ -1,290 +0,0 @@
|
||||||
import {
|
|
||||||
find,
|
|
||||||
constant,
|
|
||||||
map,
|
|
||||||
last,
|
|
||||||
first,
|
|
||||||
split,
|
|
||||||
intersection,
|
|
||||||
take,
|
|
||||||
union,
|
|
||||||
includes,
|
|
||||||
filter,
|
|
||||||
some,
|
|
||||||
} from "lodash/fp"
|
|
||||||
import {
|
|
||||||
$,
|
|
||||||
switchCase,
|
|
||||||
isNothing,
|
|
||||||
isSomething,
|
|
||||||
defaultCase,
|
|
||||||
splitKey,
|
|
||||||
isNonEmptyString,
|
|
||||||
joinKey,
|
|
||||||
getHashCode,
|
|
||||||
} from "../common"
|
|
||||||
import { indexTypes } from "./indexes"
|
|
||||||
|
|
||||||
export const getFlattenedHierarchy = (appHierarchy, useCached = true) => {
|
|
||||||
if (isSomething(appHierarchy.getFlattenedHierarchy) && useCached) {
|
|
||||||
return appHierarchy.getFlattenedHierarchy()
|
|
||||||
}
|
|
||||||
|
|
||||||
const flattenHierarchy = (currentNode, flattened) => {
|
|
||||||
flattened.push(currentNode)
|
|
||||||
if (
|
|
||||||
(!currentNode.children || currentNode.children.length === 0) &&
|
|
||||||
(!currentNode.indexes || currentNode.indexes.length === 0) &&
|
|
||||||
(!currentNode.aggregateGroups || currentNode.aggregateGroups.length === 0)
|
|
||||||
) {
|
|
||||||
return flattened
|
|
||||||
}
|
|
||||||
|
|
||||||
const unionIfAny = l2 => l1 => union(l1)(!l2 ? [] : l2)
|
|
||||||
|
|
||||||
const children = $(
|
|
||||||
[],
|
|
||||||
[
|
|
||||||
unionIfAny(currentNode.children),
|
|
||||||
unionIfAny(currentNode.indexes),
|
|
||||||
unionIfAny(currentNode.aggregateGroups),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
for (const child of children) {
|
|
||||||
flattenHierarchy(child, flattened)
|
|
||||||
}
|
|
||||||
return flattened
|
|
||||||
}
|
|
||||||
|
|
||||||
appHierarchy.getFlattenedHierarchy = () => flattenHierarchy(appHierarchy, [])
|
|
||||||
return appHierarchy.getFlattenedHierarchy()
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getLastPartInKey = key => last(splitKey(key))
|
|
||||||
|
|
||||||
export const getNodesInPath = appHierarchy => key =>
|
|
||||||
$(appHierarchy, [
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
filter(n => new RegExp(`${n.pathRegx()}`).test(key)),
|
|
||||||
])
|
|
||||||
|
|
||||||
export const getExactNodeForKey = appHierarchy => key =>
|
|
||||||
$(appHierarchy, [
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
find(n => new RegExp(`${n.pathRegx()}$`).test(key)),
|
|
||||||
])
|
|
||||||
|
|
||||||
export const getNodeForCollectionPath = appHierarchy => collectionKey =>
|
|
||||||
$(appHierarchy, [
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
find(
|
|
||||||
n =>
|
|
||||||
isCollectionRecord(n) &&
|
|
||||||
new RegExp(`${n.collectionPathRegx()}$`).test(collectionKey)
|
|
||||||
),
|
|
||||||
])
|
|
||||||
|
|
||||||
export const hasMatchingAncestor = ancestorPredicate => decendantNode =>
|
|
||||||
switchCase(
|
|
||||||
[node => isNothing(node.parent()), constant(false)],
|
|
||||||
|
|
||||||
[node => ancestorPredicate(node.parent()), constant(true)],
|
|
||||||
|
|
||||||
[defaultCase, node => hasMatchingAncestor(ancestorPredicate)(node.parent())]
|
|
||||||
)(decendantNode)
|
|
||||||
|
|
||||||
export const getNode = (appHierarchy, nodeKey) =>
|
|
||||||
$(appHierarchy, [
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
find(
|
|
||||||
n =>
|
|
||||||
n.nodeKey() === nodeKey ||
|
|
||||||
(isCollectionRecord(n) && n.collectionNodeKey() === nodeKey)
|
|
||||||
),
|
|
||||||
])
|
|
||||||
|
|
||||||
export const getCollectionNode = (appHierarchy, nodeKey) =>
|
|
||||||
$(appHierarchy, [
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
find(n => isCollectionRecord(n) && n.collectionNodeKey() === nodeKey),
|
|
||||||
])
|
|
||||||
|
|
||||||
export const getNodeByKeyOrNodeKey = (appHierarchy, keyOrNodeKey) => {
|
|
||||||
const nodeByKey = getExactNodeForKey(appHierarchy)(keyOrNodeKey)
|
|
||||||
return isNothing(nodeByKey) ? getNode(appHierarchy, keyOrNodeKey) : nodeByKey
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getCollectionNodeByKeyOrNodeKey = (appHierarchy, keyOrNodeKey) => {
|
|
||||||
const nodeByKey = getNodeForCollectionPath(appHierarchy)(keyOrNodeKey)
|
|
||||||
return isNothing(nodeByKey)
|
|
||||||
? getCollectionNode(appHierarchy, keyOrNodeKey)
|
|
||||||
: nodeByKey
|
|
||||||
}
|
|
||||||
|
|
||||||
export const isNode = (appHierarchy, key) =>
|
|
||||||
isSomething(getExactNodeForKey(appHierarchy)(key))
|
|
||||||
|
|
||||||
export const getActualKeyOfParent = (parentNodeKey, actualChildKey) =>
|
|
||||||
$(actualChildKey, [
|
|
||||||
splitKey,
|
|
||||||
take(splitKey(parentNodeKey).length),
|
|
||||||
ks => joinKey(...ks),
|
|
||||||
])
|
|
||||||
|
|
||||||
export const getParentKey = key => {
|
|
||||||
return $(key, [splitKey, take(splitKey(key).length - 1), joinKey])
|
|
||||||
}
|
|
||||||
|
|
||||||
export const isKeyAncestorOf = ancestorKey => decendantNode =>
|
|
||||||
hasMatchingAncestor(p => p.nodeKey() === ancestorKey)(decendantNode)
|
|
||||||
|
|
||||||
export const hasNoMatchingAncestors = parentPredicate => node =>
|
|
||||||
!hasMatchingAncestor(parentPredicate)(node)
|
|
||||||
|
|
||||||
export const findField = (recordNode, fieldName) =>
|
|
||||||
find(f => f.name == fieldName)(recordNode.fields)
|
|
||||||
|
|
||||||
export const isAncestor = decendant => ancestor =>
|
|
||||||
isKeyAncestorOf(ancestor.nodeKey())(decendant)
|
|
||||||
|
|
||||||
export const isDecendant = ancestor => decendant =>
|
|
||||||
isAncestor(decendant)(ancestor)
|
|
||||||
|
|
||||||
export const getRecordNodeId = recordKey =>
|
|
||||||
$(recordKey, [splitKey, last, getRecordNodeIdFromId])
|
|
||||||
|
|
||||||
export const getRecordNodeIdFromId = recordId =>
|
|
||||||
$(recordId, [split("-"), first, parseInt])
|
|
||||||
|
|
||||||
export const getRecordNodeById = (hierarchy, recordId) =>
|
|
||||||
$(hierarchy, [
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
find(n => isModel(n) && n.nodeId === getRecordNodeIdFromId(recordId)),
|
|
||||||
])
|
|
||||||
|
|
||||||
export const recordNodeIdIsAllowed = indexNode => nodeId =>
|
|
||||||
indexNode.allowedModelNodeIds.length === 0 ||
|
|
||||||
includes(nodeId)(indexNode.allowedModelNodeIds)
|
|
||||||
|
|
||||||
export const recordNodeIsAllowed = indexNode => recordNode =>
|
|
||||||
recordNodeIdIsAllowed(indexNode)(recordNode.nodeId)
|
|
||||||
|
|
||||||
export const getAllowedRecordNodesForIndex = (appHierarchy, indexNode) => {
|
|
||||||
const recordNodes = $(appHierarchy, [getFlattenedHierarchy, filter(isModel)])
|
|
||||||
|
|
||||||
if (isGlobalIndex(indexNode)) {
|
|
||||||
return $(recordNodes, [filter(recordNodeIsAllowed(indexNode))])
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isAncestorIndex(indexNode)) {
|
|
||||||
return $(recordNodes, [
|
|
||||||
filter(isDecendant(indexNode.parent())),
|
|
||||||
filter(recordNodeIsAllowed(indexNode)),
|
|
||||||
])
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isReferenceIndex(indexNode)) {
|
|
||||||
return $(recordNodes, [
|
|
||||||
filter(n => some(fieldReversesReferenceToIndex(indexNode))(n.fields)),
|
|
||||||
])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getDependantIndexes = (hierarchy, recordNode) => {
|
|
||||||
const allIndexes = $(hierarchy, [getFlattenedHierarchy, filter(isIndex)])
|
|
||||||
|
|
||||||
const allowedAncestors = $(allIndexes, [
|
|
||||||
filter(isAncestorIndex),
|
|
||||||
filter(i => recordNodeIsAllowed(i)(recordNode)),
|
|
||||||
])
|
|
||||||
|
|
||||||
const allowedReference = $(allIndexes, [
|
|
||||||
filter(isReferenceIndex),
|
|
||||||
filter(i => some(fieldReversesReferenceToIndex(i))(recordNode.fields)),
|
|
||||||
])
|
|
||||||
|
|
||||||
return [...allowedAncestors, ...allowedReference]
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getNodeFromNodeKeyHash = hierarchy => hash =>
|
|
||||||
$(hierarchy, [
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
find(n => getHashCode(n.nodeKey()) === hash),
|
|
||||||
])
|
|
||||||
|
|
||||||
export const isModel = node => isSomething(node) && node.type === "record"
|
|
||||||
export const isSingleRecord = node => isModel(node) && node.isSingle
|
|
||||||
export const isCollectionRecord = node => isModel(node) && !node.isSingle
|
|
||||||
export const isIndex = node => isSomething(node) && node.type === "index"
|
|
||||||
export const isaggregateGroup = node =>
|
|
||||||
isSomething(node) && node.type === "aggregateGroup"
|
|
||||||
export const isShardedIndex = node =>
|
|
||||||
isIndex(node) && isNonEmptyString(node.getShardName)
|
|
||||||
export const isRoot = node => isSomething(node) && node.isRoot()
|
|
||||||
export const findRoot = node => (isRoot(node) ? node : findRoot(node.parent()))
|
|
||||||
export const isDecendantOfARecord = hasMatchingAncestor(isModel)
|
|
||||||
export const isGlobalIndex = node => isIndex(node) && isRoot(node.parent())
|
|
||||||
export const isReferenceIndex = node =>
|
|
||||||
isIndex(node) && node.indexType === indexTypes.reference
|
|
||||||
export const isAncestorIndex = node =>
|
|
||||||
isIndex(node) && node.indexType === indexTypes.ancestor
|
|
||||||
export const isTopLevelRecord = node => isRoot(node.parent()) && isModel(node)
|
|
||||||
export const isTopLevelIndex = node => isRoot(node.parent()) && isIndex(node)
|
|
||||||
export const getCollectionKey = recordKey =>
|
|
||||||
$(recordKey, [splitKey, parts => joinKey(parts.slice(0, parts.length - 1))])
|
|
||||||
export const fieldReversesReferenceToNode = node => field =>
|
|
||||||
field.type === "reference" &&
|
|
||||||
intersection(field.typeOptions.reverseIndexNodeKeys)(
|
|
||||||
map(i => i.nodeKey())(node.indexes)
|
|
||||||
).length > 0
|
|
||||||
|
|
||||||
export const fieldReversesReferenceToIndex = indexNode => field =>
|
|
||||||
field.type === "reference" &&
|
|
||||||
intersection(field.typeOptions.reverseIndexNodeKeys)([indexNode.nodeKey()])
|
|
||||||
.length > 0
|
|
||||||
|
|
||||||
export const nodeNameFromNodeKey = (hierarchy, nodeKey) => {
|
|
||||||
const node = getNode(hierarchy, nodeKey)
|
|
||||||
return node ? node.nodeName() : ""
|
|
||||||
}
|
|
||||||
|
|
||||||
export default {
|
|
||||||
getLastPartInKey,
|
|
||||||
getNodesInPath,
|
|
||||||
getExactNodeForKey,
|
|
||||||
hasMatchingAncestor,
|
|
||||||
getNode,
|
|
||||||
getNodeByKeyOrNodeKey,
|
|
||||||
isNode,
|
|
||||||
getActualKeyOfParent,
|
|
||||||
getParentKey,
|
|
||||||
isKeyAncestorOf,
|
|
||||||
hasNoMatchingAncestors,
|
|
||||||
findField,
|
|
||||||
isAncestor,
|
|
||||||
isDecendant,
|
|
||||||
getRecordNodeId,
|
|
||||||
getRecordNodeIdFromId,
|
|
||||||
getRecordNodeById,
|
|
||||||
recordNodeIdIsAllowed,
|
|
||||||
recordNodeIsAllowed,
|
|
||||||
getAllowedRecordNodesForIndex,
|
|
||||||
getNodeFromNodeKeyHash,
|
|
||||||
isModel,
|
|
||||||
isCollectionRecord,
|
|
||||||
isIndex,
|
|
||||||
isaggregateGroup,
|
|
||||||
isShardedIndex,
|
|
||||||
isRoot,
|
|
||||||
isDecendantOfARecord,
|
|
||||||
isGlobalIndex,
|
|
||||||
isReferenceIndex,
|
|
||||||
isAncestorIndex,
|
|
||||||
fieldReversesReferenceToNode,
|
|
||||||
fieldReversesReferenceToIndex,
|
|
||||||
getFlattenedHierarchy,
|
|
||||||
isTopLevelIndex,
|
|
||||||
isTopLevelRecord,
|
|
||||||
nodeNameFromNodeKey,
|
|
||||||
}
|
|
|
@ -1,68 +0,0 @@
|
||||||
import {
|
|
||||||
getNewRootLevel,
|
|
||||||
getNewModelTemplate,
|
|
||||||
getNewIndexTemplate,
|
|
||||||
createNodeErrors,
|
|
||||||
constructHierarchy,
|
|
||||||
getNewAggregateGroupTemplate,
|
|
||||||
getNewSingleRecordTemplate,
|
|
||||||
getNewAggregateTemplate,
|
|
||||||
constructNode,
|
|
||||||
} from "./createNodes"
|
|
||||||
import { getNewField, validateField, addField, fieldErrors } from "./fields"
|
|
||||||
import {
|
|
||||||
getNewRecordValidationRule,
|
|
||||||
commonRecordValidationRules,
|
|
||||||
addRecordValidationRule,
|
|
||||||
} from "./recordValidationRules"
|
|
||||||
import { createAction, createTrigger } from "./createActions"
|
|
||||||
import {
|
|
||||||
validateTriggers,
|
|
||||||
validateTrigger,
|
|
||||||
validateNode,
|
|
||||||
validateActions,
|
|
||||||
validateAll,
|
|
||||||
} from "./validate"
|
|
||||||
import { getApplicationDefinition } from "./getApplicationDefinition"
|
|
||||||
import { saveApplicationHierarchy } from "./saveApplicationHierarchy"
|
|
||||||
import { saveActionsAndTriggers } from "./saveActionsAndTriggers"
|
|
||||||
import { all } from "../types"
|
|
||||||
import { getBehaviourSources } from "./getBehaviourSources"
|
|
||||||
import { upgradeData } from "./upgradeData"
|
|
||||||
|
|
||||||
const api = app => ({
|
|
||||||
getApplicationDefinition: getApplicationDefinition(app.datastore),
|
|
||||||
saveApplicationHierarchy: saveApplicationHierarchy(app),
|
|
||||||
saveActionsAndTriggers: saveActionsAndTriggers(app),
|
|
||||||
getBehaviourSources: () => getBehaviourSources(app.datastore),
|
|
||||||
getNewRootLevel,
|
|
||||||
constructNode,
|
|
||||||
getNewIndexTemplate,
|
|
||||||
getNewModelTemplate,
|
|
||||||
getNewField,
|
|
||||||
validateField,
|
|
||||||
addField,
|
|
||||||
fieldErrors,
|
|
||||||
getNewRecordValidationRule,
|
|
||||||
commonRecordValidationRules,
|
|
||||||
addRecordValidationRule,
|
|
||||||
createAction,
|
|
||||||
createTrigger,
|
|
||||||
validateActions,
|
|
||||||
validateTrigger,
|
|
||||||
getNewAggregateGroupTemplate,
|
|
||||||
getNewAggregateTemplate,
|
|
||||||
constructHierarchy,
|
|
||||||
getNewSingleRecordTemplate,
|
|
||||||
allTypes: all,
|
|
||||||
validateNode,
|
|
||||||
validateAll,
|
|
||||||
validateTriggers,
|
|
||||||
upgradeData: upgradeData(app),
|
|
||||||
})
|
|
||||||
|
|
||||||
export const getTemplateApi = app => api(app)
|
|
||||||
|
|
||||||
export const errors = createNodeErrors
|
|
||||||
|
|
||||||
export default getTemplateApi
|
|
|
@ -1,55 +0,0 @@
|
||||||
import { map, isEmpty, countBy, flatten, includes, join, keys } from "lodash/fp"
|
|
||||||
import { } from "lodash"
|
|
||||||
import { applyRuleSet, makerule } from "../common/validationCommon"
|
|
||||||
import { compileFilter, compileMap } from "../indexing/evaluate"
|
|
||||||
import { isNonEmptyString, executesWithoutException, $ } from "../common"
|
|
||||||
import { isModel } from "./hierarchy"
|
|
||||||
|
|
||||||
export const indexTypes = { reference: "reference", ancestor: "ancestor" }
|
|
||||||
|
|
||||||
export const indexRuleSet = [
|
|
||||||
makerule("map", "index has no map function", index =>
|
|
||||||
isNonEmptyString(index.map)
|
|
||||||
),
|
|
||||||
makerule(
|
|
||||||
"map",
|
|
||||||
"index's map function does not compile",
|
|
||||||
index =>
|
|
||||||
!isNonEmptyString(index.map) ||
|
|
||||||
executesWithoutException(() => compileMap(index))
|
|
||||||
),
|
|
||||||
makerule(
|
|
||||||
"filter",
|
|
||||||
"index's filter function does not compile",
|
|
||||||
index =>
|
|
||||||
!isNonEmptyString(index.filter) ||
|
|
||||||
executesWithoutException(() => compileFilter(index))
|
|
||||||
),
|
|
||||||
makerule("name", "must declare a name for index", index =>
|
|
||||||
isNonEmptyString(index.name)
|
|
||||||
),
|
|
||||||
makerule(
|
|
||||||
"name",
|
|
||||||
"there is a duplicate named index on this node",
|
|
||||||
index =>
|
|
||||||
isEmpty(index.name) ||
|
|
||||||
countBy("name")(index.parent().indexes)[index.name] === 1
|
|
||||||
),
|
|
||||||
makerule(
|
|
||||||
"indexType",
|
|
||||||
"reference index may only exist on a record node",
|
|
||||||
index =>
|
|
||||||
isModel(index.parent()) || index.indexType !== indexTypes.reference
|
|
||||||
),
|
|
||||||
makerule(
|
|
||||||
"indexType",
|
|
||||||
`index type must be one of: ${join(", ")(keys(indexTypes))}`,
|
|
||||||
index => includes(index.indexType)(keys(indexTypes))
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
export const validateIndex = (index, allReferenceIndexesOnNode) =>
|
|
||||||
applyRuleSet(indexRuleSet(allReferenceIndexesOnNode))(index)
|
|
||||||
|
|
||||||
export const validateAllIndexes = node =>
|
|
||||||
$(node.indexes, [map(i => validateIndex(i, node.indexes)), flatten])
|
|
|
@ -1,27 +0,0 @@
|
||||||
import { getAllIdsIterator } from "../indexing/allIds"
|
|
||||||
import { getRecordInfo } from "../recordApi/recordInfo"
|
|
||||||
import { isTopLevelIndex } from "./hierarchy"
|
|
||||||
import { joinKey } from "../common"
|
|
||||||
import { initialiseIndex } from "../indexing/initialiseIndex"
|
|
||||||
|
|
||||||
export const initialiseNewIndex = async (app, indexNode) => {
|
|
||||||
if (isTopLevelIndex(indexNode)) {
|
|
||||||
await initialiseIndex(app.datastore, "/", indexNode)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const iterate = await getAllIdsIterator(app)(indexNode.parent().nodeKey())
|
|
||||||
let iterateResult = await iterate()
|
|
||||||
while (!iterateResult.done) {
|
|
||||||
const { result } = iterateResult
|
|
||||||
for (const id of result.ids) {
|
|
||||||
const recordKey = joinKey(result.collectionKey, id)
|
|
||||||
await initialiseIndex(
|
|
||||||
app.datastore,
|
|
||||||
getRecordInfo(app.hierarchy, recordKey).dir,
|
|
||||||
indexNode
|
|
||||||
)
|
|
||||||
}
|
|
||||||
iterateResult = await iterate()
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,46 +0,0 @@
|
||||||
import { isNumber, isBoolean, defaultCase } from "lodash/fp"
|
|
||||||
import { switchCase } from "../common"
|
|
||||||
|
|
||||||
export const getNewRecordValidationRule = (
|
|
||||||
invalidFields,
|
|
||||||
messageWhenInvalid,
|
|
||||||
expressionWhenValid
|
|
||||||
) => ({
|
|
||||||
invalidFields,
|
|
||||||
messageWhenInvalid,
|
|
||||||
expressionWhenValid,
|
|
||||||
})
|
|
||||||
|
|
||||||
const getStaticValue = switchCase(
|
|
||||||
[isNumber, v => v.toString()],
|
|
||||||
[isBoolean, v => v.toString()],
|
|
||||||
[defaultCase, v => `'${v}'`]
|
|
||||||
)
|
|
||||||
|
|
||||||
export const commonRecordValidationRules = {
|
|
||||||
fieldNotEmpty: fieldName =>
|
|
||||||
getNewRecordValidationRule(
|
|
||||||
[fieldName],
|
|
||||||
`${fieldName} is empty`,
|
|
||||||
`!_.isEmpty(record['${fieldName}'])`
|
|
||||||
),
|
|
||||||
|
|
||||||
fieldBetween: (fieldName, min, max) =>
|
|
||||||
getNewRecordValidationRule(
|
|
||||||
[fieldName],
|
|
||||||
`${fieldName} must be between ${min.toString()} and ${max.toString()}`,
|
|
||||||
`record['${fieldName}'] >= ${getStaticValue(
|
|
||||||
min
|
|
||||||
)} && record['${fieldName}'] <= ${getStaticValue(max)} `
|
|
||||||
),
|
|
||||||
|
|
||||||
fieldGreaterThan: (fieldName, min, max) =>
|
|
||||||
getNewRecordValidationRule(
|
|
||||||
[fieldName],
|
|
||||||
`${fieldName} must be greater than ${min.toString()} and ${max.toString()}`,
|
|
||||||
`record['${fieldName}'] >= ${getStaticValue(min)} `
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
export const addRecordValidationRule = recordNode => rule =>
|
|
||||||
recordNode.validationRules.push(rule)
|
|
|
@ -1,52 +0,0 @@
|
||||||
import { join } from "lodash"
|
|
||||||
import { map } from "lodash/fp"
|
|
||||||
import { appDefinitionFile } from "../common"
|
|
||||||
import { validateTriggers, validateActions } from "./validate"
|
|
||||||
import { apiWrapper } from "../common/apiWrapper"
|
|
||||||
import { events } from "../common/events"
|
|
||||||
import { permission } from "../authApi/permissions"
|
|
||||||
import { BadRequestError } from "../common/errors"
|
|
||||||
|
|
||||||
export const saveActionsAndTriggers = app => async (actions, triggers) =>
|
|
||||||
apiWrapper(
|
|
||||||
app,
|
|
||||||
events.templateApi.saveActionsAndTriggers,
|
|
||||||
permission.writeTemplates.isAuthorized,
|
|
||||||
{ actions, triggers },
|
|
||||||
_saveActionsAndTriggers,
|
|
||||||
app.datastore,
|
|
||||||
actions,
|
|
||||||
triggers
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _saveActionsAndTriggers = async (datastore, actions, triggers) => {
|
|
||||||
if (await datastore.exists(appDefinitionFile)) {
|
|
||||||
const appDefinition = await datastore.loadJson(appDefinitionFile)
|
|
||||||
appDefinition.actions = actions
|
|
||||||
appDefinition.triggers = triggers
|
|
||||||
|
|
||||||
const actionValidErrs = map(e => e.error)(validateActions(actions))
|
|
||||||
|
|
||||||
if (actionValidErrs.length > 0) {
|
|
||||||
throw new BadRequestError(
|
|
||||||
`Actions are invalid: ${join(actionValidErrs, ", ")}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const triggerValidErrs = map(e => e.error)(
|
|
||||||
validateTriggers(triggers, actions)
|
|
||||||
)
|
|
||||||
|
|
||||||
if (triggerValidErrs.length > 0) {
|
|
||||||
throw new BadRequestError(
|
|
||||||
`Triggers are invalid: ${join(triggerValidErrs, ", ")}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
await datastore.updateJson(appDefinitionFile, appDefinition)
|
|
||||||
} else {
|
|
||||||
throw new BadRequestError(
|
|
||||||
"Cannot save actions: Application definition does not exist"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,45 +0,0 @@
|
||||||
import { join } from "lodash"
|
|
||||||
import { permission } from "../authApi/permissions"
|
|
||||||
import { appDefinitionFile } from "../common"
|
|
||||||
import { validateAll } from "./validate"
|
|
||||||
import { apiWrapper } from "../common/apiWrapper"
|
|
||||||
import { events } from "../common/events"
|
|
||||||
|
|
||||||
export const saveApplicationHierarchy = app => async hierarchy =>
|
|
||||||
apiWrapper(
|
|
||||||
app,
|
|
||||||
events.templateApi.saveApplicationHierarchy,
|
|
||||||
permission.writeTemplates.isAuthorized,
|
|
||||||
{ hierarchy },
|
|
||||||
_saveApplicationHierarchy,
|
|
||||||
app.datastore,
|
|
||||||
hierarchy
|
|
||||||
)
|
|
||||||
|
|
||||||
export const _saveApplicationHierarchy = async (datastore, hierarchy) => {
|
|
||||||
const validationErrors = await validateAll(hierarchy)
|
|
||||||
if (validationErrors.length > 0) {
|
|
||||||
throw new Error(
|
|
||||||
`Hierarchy is invalid: ${join(
|
|
||||||
validationErrors.map(
|
|
||||||
e => `${e.item.nodeKey ? e.item.nodeKey() : ""} : ${e.error}`
|
|
||||||
),
|
|
||||||
","
|
|
||||||
)}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hierarchy.getFlattenedHierarchy) {
|
|
||||||
delete hierarchy.getFlattenedHierarchy
|
|
||||||
}
|
|
||||||
|
|
||||||
if (await datastore.exists(appDefinitionFile)) {
|
|
||||||
const appDefinition = await datastore.loadJson(appDefinitionFile)
|
|
||||||
appDefinition.hierarchy = hierarchy
|
|
||||||
await datastore.updateJson(appDefinitionFile, appDefinition)
|
|
||||||
} else {
|
|
||||||
await datastore.createFolder("/.config")
|
|
||||||
const appDefinition = { actions: [], triggers: [], hierarchy }
|
|
||||||
await datastore.createJson(appDefinitionFile, appDefinition)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,208 +0,0 @@
|
||||||
import { diffHierarchy, HierarchyChangeTypes } from "./diffHierarchy"
|
|
||||||
import { $, switchCase } from "../common"
|
|
||||||
import {
|
|
||||||
differenceBy,
|
|
||||||
isEqual,
|
|
||||||
some,
|
|
||||||
map,
|
|
||||||
filter,
|
|
||||||
uniqBy,
|
|
||||||
flatten,
|
|
||||||
} from "lodash/fp"
|
|
||||||
import {
|
|
||||||
findRoot,
|
|
||||||
getDependantIndexes,
|
|
||||||
isTopLevelRecord,
|
|
||||||
isAncestorIndex,
|
|
||||||
} from "./hierarchy"
|
|
||||||
import { generateSchema } from "../indexing/indexSchemaCreator"
|
|
||||||
import { _buildIndex } from "../indexApi/buildIndex"
|
|
||||||
import { constructHierarchy } from "./createNodes"
|
|
||||||
import { deleteAllRecordsForNode } from "./deleteAllRecordsForNode"
|
|
||||||
import { deleteAllIndexFilesForNode } from "./deleteAllIndexFilesForNode"
|
|
||||||
import { cloneApp } from "../appInitialise/cloneApp"
|
|
||||||
import { initialiseData } from "../appInitialise/initialiseData"
|
|
||||||
import { initialiseChildrenForNode } from "../recordApi/initialiseChildren"
|
|
||||||
import { initialiseNewIndex } from "./initialiseNewIndex"
|
|
||||||
import { _saveApplicationHierarchy } from "../templateApi/saveApplicationHierarchy"
|
|
||||||
import { getApplicationDefinition } from "../templateApi/getApplicationDefinition"
|
|
||||||
|
|
||||||
export const upgradeData = app => async newHierarchy => {
|
|
||||||
const currentAppDef = await getApplicationDefinition(app.datastore)()
|
|
||||||
app.hierarchy = currentAppDef.hierarchy
|
|
||||||
newHierarchy = constructHierarchy(newHierarchy)
|
|
||||||
const diff = diffHierarchy(app.hierarchy, newHierarchy)
|
|
||||||
const changeActions = gatherChangeActions(diff)
|
|
||||||
|
|
||||||
if (changeActions.length === 0) return
|
|
||||||
|
|
||||||
const newApp =
|
|
||||||
newHierarchy &&
|
|
||||||
cloneApp(app, {
|
|
||||||
hierarchy: newHierarchy,
|
|
||||||
})
|
|
||||||
await doUpgrade(app, newApp, changeActions)
|
|
||||||
await _saveApplicationHierarchy(newApp.datastore, newHierarchy)
|
|
||||||
}
|
|
||||||
|
|
||||||
const gatherChangeActions = diff =>
|
|
||||||
$(diff, [map(actionForChange), flatten, uniqBy(a => a.compareKey)])
|
|
||||||
|
|
||||||
const doUpgrade = async (oldApp, newApp, changeActions) => {
|
|
||||||
for (let action of changeActions) {
|
|
||||||
await action.run(oldApp, newApp, action.diff)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const actionForChange = diff =>
|
|
||||||
switchCase(
|
|
||||||
[isChangeType(HierarchyChangeTypes.recordCreated), recordCreatedAction],
|
|
||||||
|
|
||||||
[isChangeType(HierarchyChangeTypes.recordDeleted), deleteRecordsAction],
|
|
||||||
|
|
||||||
[
|
|
||||||
isChangeType(HierarchyChangeTypes.recordFieldsChanged),
|
|
||||||
rebuildAffectedIndexesAction,
|
|
||||||
],
|
|
||||||
|
|
||||||
[isChangeType(HierarchyChangeTypes.recordRenamed), renameRecordAction],
|
|
||||||
|
|
||||||
[
|
|
||||||
isChangeType(HierarchyChangeTypes.recordEstimatedRecordTypeChanged),
|
|
||||||
reshardRecordsAction,
|
|
||||||
],
|
|
||||||
|
|
||||||
[isChangeType(HierarchyChangeTypes.indexCreated), newIndexAction],
|
|
||||||
|
|
||||||
[isChangeType(HierarchyChangeTypes.indexDeleted), deleteIndexAction],
|
|
||||||
|
|
||||||
[isChangeType(HierarchyChangeTypes.indexChanged), rebuildIndexAction]
|
|
||||||
)(diff)
|
|
||||||
|
|
||||||
const isChangeType = changeType => change => change.type === changeType
|
|
||||||
|
|
||||||
const action = (diff, compareKey, run) => ({
|
|
||||||
diff,
|
|
||||||
compareKey,
|
|
||||||
run,
|
|
||||||
})
|
|
||||||
|
|
||||||
const reshardRecordsAction = diff => [
|
|
||||||
action(diff, `reshardRecords-${diff.oldNode.nodeKey()}`, runReshardRecords),
|
|
||||||
]
|
|
||||||
|
|
||||||
const rebuildIndexAction = diff => [
|
|
||||||
action(diff, `rebuildIndex-${diff.newNode.nodeKey()}`, runRebuildIndex),
|
|
||||||
]
|
|
||||||
|
|
||||||
const newIndexAction = diff => {
|
|
||||||
if (isAncestorIndex(diff.newNode)) {
|
|
||||||
return [
|
|
||||||
action(diff, `rebuildIndex-${diff.newNode.nodeKey()}`, runRebuildIndex),
|
|
||||||
]
|
|
||||||
} else {
|
|
||||||
return [action(diff, `newIndex-${diff.newNode.nodeKey()}`, runNewIndex)]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const deleteIndexAction = diff => [
|
|
||||||
action(diff, `deleteIndex-${diff.oldNode.nodeKey()}`, runDeleteIndex),
|
|
||||||
]
|
|
||||||
|
|
||||||
const deleteRecordsAction = diff => [
|
|
||||||
action(diff, `deleteRecords-${diff.oldNode.nodeKey()}`, runDeleteRecords),
|
|
||||||
]
|
|
||||||
|
|
||||||
const renameRecordAction = diff => [
|
|
||||||
action(diff, `renameRecords-${diff.oldNode.nodeKey()}`, runRenameRecord),
|
|
||||||
]
|
|
||||||
|
|
||||||
const recordCreatedAction = diff => {
|
|
||||||
if (isTopLevelRecord(diff.newNode)) {
|
|
||||||
return [action(diff, `initialiseRoot`, runInitialiseRoot)]
|
|
||||||
}
|
|
||||||
|
|
||||||
return [
|
|
||||||
action(
|
|
||||||
diff,
|
|
||||||
`initialiseChildRecord-${diff.newNode.nodeKey()}`,
|
|
||||||
runInitialiseChildRecord
|
|
||||||
),
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
const rebuildAffectedIndexesAction = diff => {
|
|
||||||
const newHierarchy = findRoot(diff.newNode)
|
|
||||||
const oldHierarchy = findRoot(diff.oldNode)
|
|
||||||
const indexes = getDependantIndexes(newHierarchy, diff.newNode)
|
|
||||||
|
|
||||||
const changedFields = (() => {
|
|
||||||
const addedFields = differenceBy(f => f.name)(diff.oldNode.fields)(
|
|
||||||
diff.newNode.fields
|
|
||||||
)
|
|
||||||
|
|
||||||
const removedFields = differenceBy(f => f.name)(diff.newNode.fields)(
|
|
||||||
diff.oldNode.fields
|
|
||||||
)
|
|
||||||
|
|
||||||
return map(f => f.name)([...addedFields, ...removedFields])
|
|
||||||
})()
|
|
||||||
|
|
||||||
const isIndexAffected = i => {
|
|
||||||
if (
|
|
||||||
!isEqual(generateSchema(oldHierarchy, i), generateSchema(newHierarchy, i))
|
|
||||||
)
|
|
||||||
return true
|
|
||||||
|
|
||||||
if (some(f => indexes.filter.indexOf(`record.${f}`) > -1)(changedFields))
|
|
||||||
return true
|
|
||||||
|
|
||||||
if (
|
|
||||||
some(f => indexes.getShardName.indexOf(`record.${f}`) > -1)(changedFields)
|
|
||||||
)
|
|
||||||
return true
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
return $(indexes, [
|
|
||||||
filter(isIndexAffected),
|
|
||||||
map(i =>
|
|
||||||
action({ newNode: i }, `rebuildIndex-${i.nodeKey()}`, runRebuildIndex)
|
|
||||||
),
|
|
||||||
])
|
|
||||||
}
|
|
||||||
|
|
||||||
const runReshardRecords = async change => {
|
|
||||||
throw new Error("Resharding of records is not supported yet")
|
|
||||||
}
|
|
||||||
|
|
||||||
const runRebuildIndex = async (_, newApp, diff) => {
|
|
||||||
await _buildIndex(newApp, diff.newNode.nodeKey())
|
|
||||||
}
|
|
||||||
|
|
||||||
const runDeleteIndex = async (oldApp, _, diff) => {
|
|
||||||
await deleteAllIndexFilesForNode(oldApp, diff.oldNode)
|
|
||||||
}
|
|
||||||
|
|
||||||
const runDeleteRecords = async (oldApp, _, diff) => {
|
|
||||||
await deleteAllRecordsForNode(oldApp, diff.oldNode)
|
|
||||||
}
|
|
||||||
|
|
||||||
const runNewIndex = async (_, newApp, diff) => {
|
|
||||||
await initialiseNewIndex(newApp, diff.newNode)
|
|
||||||
}
|
|
||||||
|
|
||||||
const runRenameRecord = change => {
|
|
||||||
/*
|
|
||||||
Going to disllow this in the builder. once a collection key is set... its done
|
|
||||||
*/
|
|
||||||
}
|
|
||||||
|
|
||||||
const runInitialiseRoot = async (_, newApp) => {
|
|
||||||
await initialiseData(newApp.datastore, newApp)
|
|
||||||
}
|
|
||||||
|
|
||||||
const runInitialiseChildRecord = async (_, newApp, diff) => {
|
|
||||||
await initialiseChildrenForNode(newApp, diff.newNode)
|
|
||||||
}
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue