2019-12-30 19:08:50 +01:00
|
|
|
import { union, reduce, isUndefined, cloneDeep, split, toNumber, isArray as isArray$1, filter, isNull, join as join$1, isNaN as isNaN$1, isEmpty, constant, some, includes, isInteger, isDate, isString, map, keys, isFunction, countBy, last, find, take, first, intersection, mapValues, has, isBoolean, isNumber, isObjectLike, isObject, clone, values, keyBy, orderBy, flatten, concat, reverse, difference, merge as merge$1, each, takeRight as takeRight$1, max, defaultCase as defaultCase$1, uniqBy, every, uniqWith, groupBy, pull, differenceBy, intersectionBy, isEqual } from 'lodash/fp';
|
2019-09-10 10:49:22 +02:00
|
|
|
import { generate } from 'shortid';
|
2019-12-30 19:08:50 +01:00
|
|
|
import _, { flow, dropRight, head, takeRight, tail, startsWith, findIndex, replace, trim, merge, assign, each as each$1, find as find$1, join as join$2, orderBy as orderBy$1, union as union$1 } from 'lodash';
|
2019-09-10 10:49:22 +02:00
|
|
|
import { compileCode as compileCode$1, compileExpression as compileExpression$1 } from '@nx-js/compiler-util';
|
|
|
|
import lunr from 'lunr';
|
|
|
|
import { Buffer as Buffer$1 } from 'safe-buffer';
|
|
|
|
|
|
|
|
const commonPlus = extra => union(['onBegin', 'onComplete', 'onError'])(extra);
|
|
|
|
|
|
|
|
const common = () => commonPlus([]);
|
|
|
|
|
|
|
|
const _events = {
|
|
|
|
recordApi: {
|
|
|
|
save: commonPlus([
|
|
|
|
'onInvalid',
|
|
|
|
'onRecordUpdated',
|
|
|
|
'onRecordCreated']),
|
|
|
|
delete: common(),
|
|
|
|
getContext: common(),
|
|
|
|
getNew: common(),
|
|
|
|
load: common(),
|
|
|
|
validate: common(),
|
|
|
|
uploadFile: common(),
|
|
|
|
downloadFile: common(),
|
|
|
|
},
|
|
|
|
indexApi: {
|
|
|
|
buildIndex: common(),
|
|
|
|
listItems: common(),
|
|
|
|
delete: common(),
|
|
|
|
aggregates: common(),
|
|
|
|
},
|
|
|
|
collectionApi: {
|
|
|
|
getAllowedRecordTypes: common(),
|
|
|
|
initialise: common(),
|
|
|
|
delete: common(),
|
|
|
|
},
|
|
|
|
authApi: {
|
|
|
|
authenticate: common(),
|
|
|
|
authenticateTemporaryAccess: common(),
|
|
|
|
createTemporaryAccess: common(),
|
|
|
|
createUser: common(),
|
|
|
|
enableUser: common(),
|
|
|
|
disableUser: common(),
|
|
|
|
loadAccessLevels: common(),
|
|
|
|
getNewAccessLevel: common(),
|
|
|
|
getNewUser: common(),
|
|
|
|
getNewUserAuth: common(),
|
|
|
|
getUsers: common(),
|
|
|
|
saveAccessLevels: common(),
|
|
|
|
isAuthorized: common(),
|
|
|
|
changeMyPassword: common(),
|
|
|
|
setPasswordFromTemporaryCode: common(),
|
|
|
|
scorePassword: common(),
|
|
|
|
isValidPassword: common(),
|
|
|
|
validateUser: common(),
|
|
|
|
validateAccessLevels: common(),
|
|
|
|
setUserAccessLevels: common(),
|
|
|
|
},
|
|
|
|
templateApi: {
|
|
|
|
saveApplicationHierarchy: common(),
|
|
|
|
saveActionsAndTriggers: common(),
|
|
|
|
},
|
|
|
|
actionsApi: {
|
|
|
|
execute: common(),
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
const _eventsList = [];
|
|
|
|
|
|
|
|
const makeEvent = (area, method, name) => `${area}:${method}:${name}`;
|
|
|
|
|
|
|
|
for (const areaKey in _events) {
|
|
|
|
for (const methodKey in _events[areaKey]) {
|
|
|
|
_events[areaKey][methodKey] = reduce((obj, s) => {
|
|
|
|
obj[s] = makeEvent(areaKey, methodKey, s);
|
|
|
|
return obj;
|
|
|
|
},
|
|
|
|
{})(_events[areaKey][methodKey]);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
for (const areaKey in _events) {
|
|
|
|
for (const methodKey in _events[areaKey]) {
|
|
|
|
for (const name in _events[areaKey][methodKey]) {
|
|
|
|
_eventsList.push(
|
|
|
|
_events[areaKey][methodKey][name],
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
const events = _events;
|
|
|
|
|
|
|
|
const eventsList = _eventsList;
|
|
|
|
|
|
|
|
class BadRequestError extends Error {
|
|
|
|
constructor(message) {
|
|
|
|
super(message);
|
|
|
|
this.httpStatusCode = 400;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
class UnauthorisedError extends Error {
|
|
|
|
constructor(message) {
|
|
|
|
super(message);
|
|
|
|
this.httpStatusCode = 401;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
class ForbiddenError extends Error {
|
|
|
|
constructor(message) {
|
|
|
|
super(message);
|
|
|
|
this.httpStatusCode = 403;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
class NotFoundError extends Error {
|
|
|
|
constructor(message) {
|
|
|
|
super(message);
|
|
|
|
this.httpStatusCode = 404;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
const apiWrapper = async (app, eventNamespace, isAuthorized, eventContext, func, ...params) => {
|
|
|
|
pushCallStack(app, eventNamespace);
|
|
|
|
|
|
|
|
if (!isAuthorized(app)) {
|
|
|
|
handleNotAuthorized(app, eventContext, eventNamespace);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const startDate = Date.now();
|
|
|
|
const elapsed = () => (Date.now() - startDate);
|
|
|
|
|
|
|
|
try {
|
|
|
|
await app.publish(
|
|
|
|
eventNamespace.onBegin,
|
|
|
|
eventContext,
|
|
|
|
);
|
|
|
|
|
|
|
|
const result = await func(...params);
|
|
|
|
|
|
|
|
await publishComplete(app, eventContext, eventNamespace, elapsed, result);
|
|
|
|
return result;
|
|
|
|
} catch (error) {
|
|
|
|
await publishError(app, eventContext, eventNamespace, elapsed, error);
|
|
|
|
throw error;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const apiWrapperSync = (app, eventNamespace, isAuthorized, eventContext, func, ...params) => {
|
|
|
|
pushCallStack(app, eventNamespace);
|
|
|
|
|
|
|
|
if (!isAuthorized(app)) {
|
|
|
|
handleNotAuthorized(app, eventContext, eventNamespace);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const startDate = Date.now();
|
|
|
|
const elapsed = () => (Date.now() - startDate);
|
|
|
|
|
|
|
|
try {
|
|
|
|
app.publish(
|
|
|
|
eventNamespace.onBegin,
|
|
|
|
eventContext,
|
|
|
|
);
|
|
|
|
|
|
|
|
const result = func(...params);
|
|
|
|
|
|
|
|
publishComplete(app, eventContext, eventNamespace, elapsed, result);
|
|
|
|
return result;
|
|
|
|
} catch (error) {
|
|
|
|
publishError(app, eventContext, eventNamespace, elapsed, error);
|
|
|
|
throw error;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const handleNotAuthorized = (app, eventContext, eventNamespace) => {
|
|
|
|
const err = new UnauthorisedError(`Unauthorized: ${eventNamespace}`);
|
|
|
|
publishError(app, eventContext, eventNamespace, () => 0, err);
|
|
|
|
throw err;
|
|
|
|
};
|
|
|
|
|
|
|
|
const pushCallStack = (app, eventNamespace, seedCallId) => {
|
|
|
|
const callId = generate();
|
|
|
|
|
|
|
|
const createCallStack = () => ({
|
|
|
|
seedCallId: !isUndefined(seedCallId)
|
|
|
|
? seedCallId
|
|
|
|
: callId,
|
|
|
|
threadCallId: callId,
|
|
|
|
stack: [],
|
|
|
|
});
|
|
|
|
|
|
|
|
if (isUndefined(app.calls)) {
|
|
|
|
app.calls = createCallStack();
|
|
|
|
}
|
|
|
|
|
|
|
|
app.calls.stack.push({
|
|
|
|
namespace: eventNamespace,
|
|
|
|
callId,
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
const popCallStack = (app) => {
|
|
|
|
app.calls.stack.pop();
|
|
|
|
if (app.calls.stack.length === 0) {
|
|
|
|
delete app.calls;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const publishError = async (app, eventContext, eventNamespace, elapsed, err) => {
|
|
|
|
const ctx = cloneDeep(eventContext);
|
|
|
|
ctx.error = err;
|
|
|
|
ctx.elapsed = elapsed();
|
|
|
|
await app.publish(
|
|
|
|
eventNamespace.onError,
|
|
|
|
ctx,
|
|
|
|
);
|
|
|
|
popCallStack(app);
|
|
|
|
};
|
|
|
|
|
|
|
|
const publishComplete = async (app, eventContext, eventNamespace, elapsed, result) => {
|
|
|
|
const endcontext = cloneDeep(eventContext);
|
|
|
|
endcontext.result = result;
|
|
|
|
endcontext.elapsed = elapsed();
|
|
|
|
await app.publish(
|
|
|
|
eventNamespace.onComplete,
|
|
|
|
endcontext,
|
|
|
|
);
|
|
|
|
popCallStack(app);
|
|
|
|
return result;
|
|
|
|
};
|
|
|
|
|
|
|
|
const lockOverlapMilliseconds = 10;
|
|
|
|
|
|
|
|
const getLock = async (app, lockFile, timeoutMilliseconds, maxLockRetries, retryCount = 0) => {
|
|
|
|
try {
|
|
|
|
const timeout = (await app.getEpochTime())
|
|
|
|
+ timeoutMilliseconds;
|
|
|
|
|
|
|
|
const lock = {
|
|
|
|
timeout,
|
|
|
|
key: lockFile,
|
|
|
|
totalTimeout: timeoutMilliseconds,
|
|
|
|
};
|
|
|
|
|
|
|
|
await app.datastore.createFile(
|
|
|
|
lockFile,
|
|
|
|
getLockFileContent(
|
|
|
|
lock.totalTimeout,
|
|
|
|
lock.timeout,
|
|
|
|
),
|
|
|
|
);
|
|
|
|
|
|
|
|
return lock;
|
|
|
|
} catch (e) {
|
|
|
|
if (retryCount == maxLockRetries) { return NO_LOCK; }
|
|
|
|
|
|
|
|
const lock = parseLockFileContent(
|
|
|
|
lockFile,
|
|
|
|
await app.datastore.loadFile(lockFile),
|
|
|
|
);
|
|
|
|
|
|
|
|
const currentEpochTime = await app.getEpochTime();
|
|
|
|
|
|
|
|
if (currentEpochTime < lock.timeout) {
|
|
|
|
return NO_LOCK;
|
|
|
|
}
|
|
|
|
|
|
|
|
try {
|
|
|
|
await app.datastore.deleteFile(lockFile);
|
|
|
|
} catch (_) {
|
|
|
|
//empty
|
|
|
|
}
|
|
|
|
|
|
|
|
await sleepForRetry();
|
|
|
|
|
|
|
|
return await getLock(
|
|
|
|
app, lockFile, timeoutMilliseconds,
|
|
|
|
maxLockRetries, retryCount + 1,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const getLockFileContent = (totalTimeout, epochTime) => `${totalTimeout}:${epochTime.toString()}`;
|
|
|
|
|
|
|
|
const parseLockFileContent = (key, content) => $(content, [
|
|
|
|
split(':'),
|
|
|
|
parts => ({
|
|
|
|
totalTimeout: new Number(parts[0]),
|
|
|
|
timeout: new Number(parts[1]),
|
|
|
|
key,
|
|
|
|
}),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const releaseLock = async (app, lock) => {
|
|
|
|
const currentEpochTime = await app.getEpochTime();
|
|
|
|
// only release if not timedout
|
|
|
|
if (currentEpochTime < (lock.timeout - lockOverlapMilliseconds)) {
|
|
|
|
try {
|
|
|
|
await app.datastore.deleteFile(lock.key);
|
|
|
|
} catch (_) {
|
|
|
|
//empty
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const NO_LOCK = 'no lock';
|
|
|
|
const isNolock = id => id === NO_LOCK;
|
|
|
|
|
|
|
|
const sleepForRetry = () => new Promise(resolve => setTimeout(resolve, lockOverlapMilliseconds));
|
|
|
|
|
|
|
|
// this is the combinator function
|
|
|
|
const $$ = (...funcs) => arg => flow(funcs)(arg);
|
|
|
|
|
|
|
|
// this is the pipe function
|
|
|
|
const $ = (arg, funcs) => $$(...funcs)(arg);
|
|
|
|
|
|
|
|
const keySep = '/';
|
|
|
|
const trimKeySep = str => trim(str, keySep);
|
2019-09-28 06:28:11 +02:00
|
|
|
const splitByKeySep = str => split(keySep)(str);
|
2019-09-10 10:49:22 +02:00
|
|
|
const safeKey = key => replace(`${keySep}${trimKeySep(key)}`, `${keySep}${keySep}`, keySep);
|
|
|
|
const joinKey = (...strs) => {
|
|
|
|
const paramsOrArray = strs.length === 1 & isArray$1(strs[0])
|
|
|
|
? strs[0] : strs;
|
2019-12-30 19:08:50 +01:00
|
|
|
return $(paramsOrArray, [
|
|
|
|
filter(s => !isUndefined(s)
|
|
|
|
&& !isNull(s)
|
|
|
|
&& s.toString().length > 0),
|
|
|
|
join$1(keySep),
|
|
|
|
safeKey
|
|
|
|
]);
|
2019-09-10 10:49:22 +02:00
|
|
|
};
|
|
|
|
const splitKey = $$(trimKeySep, splitByKeySep);
|
|
|
|
const getDirFomKey = $$(splitKey, dropRight, p => joinKey(...p));
|
|
|
|
const getFileFromKey = $$(splitKey, takeRight, head);
|
|
|
|
|
|
|
|
const configFolder = `${keySep}.config`;
|
|
|
|
const fieldDefinitions = joinKey(configFolder, 'fields.json');
|
|
|
|
const templateDefinitions = joinKey(configFolder, 'templates.json');
|
|
|
|
const appDefinitionFile = joinKey(configFolder, 'appDefinition.json');
|
|
|
|
const dirIndex = folderPath => joinKey(configFolder, 'dir', ...splitKey(folderPath), 'dir.idx');
|
|
|
|
const getIndexKeyFromFileKey = $$(getDirFomKey, dirIndex);
|
|
|
|
|
2019-09-28 06:28:11 +02:00
|
|
|
const ifExists = (val, exists, notExists) => (isUndefined(val)
|
|
|
|
? isUndefined(notExists) ? (() => { })() : notExists()
|
2019-09-10 10:49:22 +02:00
|
|
|
: exists());
|
|
|
|
|
|
|
|
const getOrDefault = (val, defaultVal) => ifExists(val, () => val, () => defaultVal);
|
|
|
|
|
|
|
|
const not = func => val => !func(val);
|
2019-09-28 06:28:11 +02:00
|
|
|
const isDefined = not(isUndefined);
|
2019-09-10 10:49:22 +02:00
|
|
|
const isNonNull = not(isNull);
|
|
|
|
const isNotNaN = not(isNaN$1);
|
|
|
|
|
2019-09-28 06:28:11 +02:00
|
|
|
const allTrue = (...funcArgs) => val => reduce(
|
2019-09-10 10:49:22 +02:00
|
|
|
(result, conditionFunc) => (isNull(result) || result == true) && conditionFunc(val),
|
2019-09-28 06:28:11 +02:00
|
|
|
null)(funcArgs);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
2019-09-28 06:28:11 +02:00
|
|
|
const anyTrue = (...funcArgs) => val => reduce(
|
2019-09-10 10:49:22 +02:00
|
|
|
(result, conditionFunc) => result == true || conditionFunc(val),
|
2019-09-28 06:28:11 +02:00
|
|
|
null)(funcArgs);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const insensitiveEquals = (str1, str2) => str1.trim().toLowerCase() === str2.trim().toLowerCase();
|
|
|
|
|
|
|
|
const isSomething = allTrue(isDefined, isNonNull, isNotNaN);
|
|
|
|
const isNothing = not(isSomething);
|
|
|
|
const isNothingOrEmpty = v => isNothing(v) || isEmpty(v);
|
|
|
|
const somethingOrGetDefault = getDefaultFunc => val => (isSomething(val) ? val : getDefaultFunc());
|
|
|
|
const somethingOrDefault = (val, defaultVal) => somethingOrGetDefault(constant(defaultVal))(val);
|
|
|
|
|
|
|
|
const mapIfSomethingOrDefault = (mapFunc, defaultVal) => val => (isSomething(val) ? mapFunc(val) : defaultVal);
|
|
|
|
|
|
|
|
const mapIfSomethingOrBlank = mapFunc => mapIfSomethingOrDefault(mapFunc, '');
|
|
|
|
|
|
|
|
const none = predicate => collection => !some(predicate)(collection);
|
|
|
|
|
|
|
|
const all = predicate => collection => none(v => !predicate(v))(collection);
|
|
|
|
|
|
|
|
const isNotEmpty = ob => !isEmpty(ob);
|
|
|
|
const isNonEmptyArray = allTrue(isArray$1, isNotEmpty);
|
|
|
|
const isNonEmptyString = allTrue(isString, isNotEmpty);
|
|
|
|
const tryOr = failFunc => (func, ...args) => {
|
|
|
|
try {
|
|
|
|
return func.apply(null, ...args);
|
|
|
|
} catch (_) {
|
|
|
|
return failFunc();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const tryAwaitOr = failFunc => async (func, ...args) => {
|
|
|
|
try {
|
|
|
|
return await func.apply(null, ...args);
|
|
|
|
} catch (_) {
|
|
|
|
return await failFunc();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const defineError = (func, errorPrefix) => {
|
|
|
|
try {
|
|
|
|
return func();
|
|
|
|
} catch (err) {
|
|
|
|
err.message = `${errorPrefix} : ${err.message}`;
|
|
|
|
throw err;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const tryOrIgnore = tryOr(() => { });
|
|
|
|
const tryAwaitOrIgnore = tryAwaitOr(async () => { });
|
|
|
|
const causesException = (func) => {
|
|
|
|
try {
|
|
|
|
func();
|
|
|
|
return false;
|
|
|
|
} catch (e) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const executesWithoutException = func => !causesException(func);
|
|
|
|
|
|
|
|
const handleErrorWith = returnValInError => tryOr(constant(returnValInError));
|
|
|
|
|
|
|
|
const handleErrorWithUndefined = handleErrorWith(undefined);
|
|
|
|
|
|
|
|
const switchCase = (...cases) => (value) => {
|
|
|
|
const nextCase = () => head(cases)[0](value);
|
|
|
|
const nextResult = () => head(cases)[1](value);
|
|
|
|
|
|
|
|
if (isEmpty(cases)) return; // undefined
|
|
|
|
if (nextCase() === true) return nextResult();
|
|
|
|
return switchCase(...tail(cases))(value);
|
|
|
|
};
|
|
|
|
|
|
|
|
const isValue = val1 => val2 => (val1 === val2);
|
2019-09-28 06:28:11 +02:00
|
|
|
const isOneOf = (...vals) => val => includes(val)(vals);
|
2019-09-10 10:49:22 +02:00
|
|
|
const defaultCase = constant(true);
|
|
|
|
const memberMatches = (member, match) => obj => match(obj[member]);
|
|
|
|
|
|
|
|
|
|
|
|
const StartsWith = searchFor => searchIn => startsWith(searchIn, searchFor);
|
|
|
|
|
|
|
|
const contains = val => array => (findIndex(array, v => v === val) > -1);
|
|
|
|
|
|
|
|
const getHashCode = (s) => {
|
|
|
|
let hash = 0; let i; let char; let
|
|
|
|
l;
|
|
|
|
if (s.length == 0) return hash;
|
|
|
|
for (i = 0, l = s.length; i < l; i++) {
|
|
|
|
char = s.charCodeAt(i);
|
|
|
|
hash = ((hash << 5) - hash) + char;
|
|
|
|
hash |= 0; // Convert to 32bit integer
|
|
|
|
}
|
|
|
|
|
|
|
|
// converting to string, but dont want a "-" prefixed
|
|
|
|
if (hash < 0) { return `n${(hash * -1).toString()}`; }
|
|
|
|
return hash.toString();
|
|
|
|
};
|
|
|
|
|
|
|
|
// thanks to https://blog.grossman.io/how-to-write-async-await-without-try-catch-blocks-in-javascript/
|
|
|
|
const awEx = async (promise) => {
|
|
|
|
try {
|
|
|
|
const result = await promise;
|
|
|
|
return [undefined, result];
|
|
|
|
} catch (error) {
|
|
|
|
return [error, undefined];
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const isSafeInteger = n => isInteger(n)
|
|
|
|
&& n <= Number.MAX_SAFE_INTEGER
|
|
|
|
&& n >= 0 - Number.MAX_SAFE_INTEGER;
|
|
|
|
|
|
|
|
const toDateOrNull = s => (isNull(s) ? null
|
|
|
|
: isDate(s) ? s : new Date(s));
|
|
|
|
const toBoolOrNull = s => (isNull(s) ? null
|
|
|
|
: s === 'true' || s === true);
|
|
|
|
const toNumberOrNull = s => (isNull(s) ? null
|
|
|
|
: toNumber(s));
|
|
|
|
|
|
|
|
const isArrayOfString = opts => isArray$1(opts) && all(isString)(opts);
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const pushAll = (target, items) => {
|
|
|
|
for(let i of items) target.push(i);
|
|
|
|
};
|
|
|
|
|
2019-09-10 10:49:22 +02:00
|
|
|
const pause = async duration => new Promise(res => setTimeout(res, duration));
|
|
|
|
|
|
|
|
const retry = async (fn, retries, delay, ...args) => {
|
|
|
|
try {
|
|
|
|
return await fn(...args);
|
|
|
|
} catch (err) {
|
|
|
|
if (retries > 1) {
|
|
|
|
return await pause(delay).then(async () => await retry(fn, (retries - 1), delay, ...args));
|
|
|
|
}
|
|
|
|
throw err;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
var index = {
|
|
|
|
ifExists,
|
|
|
|
getOrDefault,
|
|
|
|
isDefined,
|
|
|
|
isNonNull,
|
|
|
|
isNotNaN,
|
|
|
|
allTrue,
|
|
|
|
isSomething,
|
|
|
|
mapIfSomethingOrDefault,
|
|
|
|
mapIfSomethingOrBlank,
|
|
|
|
configFolder,
|
|
|
|
fieldDefinitions,
|
|
|
|
isNothing,
|
|
|
|
not,
|
|
|
|
switchCase,
|
|
|
|
defaultCase,
|
|
|
|
StartsWith,
|
|
|
|
contains,
|
|
|
|
templateDefinitions,
|
|
|
|
handleErrorWith,
|
|
|
|
handleErrorWithUndefined,
|
|
|
|
tryOr,
|
|
|
|
tryOrIgnore,
|
|
|
|
tryAwaitOr,
|
|
|
|
tryAwaitOrIgnore,
|
|
|
|
dirIndex,
|
|
|
|
keySep,
|
|
|
|
$,
|
|
|
|
$$,
|
|
|
|
getDirFomKey,
|
|
|
|
getFileFromKey,
|
|
|
|
splitKey,
|
|
|
|
somethingOrDefault,
|
|
|
|
getIndexKeyFromFileKey,
|
|
|
|
joinKey,
|
|
|
|
somethingOrGetDefault,
|
|
|
|
appDefinitionFile,
|
|
|
|
isValue,
|
|
|
|
all,
|
|
|
|
isOneOf,
|
|
|
|
memberMatches,
|
|
|
|
defineError,
|
|
|
|
anyTrue,
|
|
|
|
isNonEmptyArray,
|
|
|
|
causesException,
|
|
|
|
executesWithoutException,
|
|
|
|
none,
|
|
|
|
getHashCode,
|
|
|
|
awEx,
|
|
|
|
apiWrapper,
|
|
|
|
events,
|
|
|
|
eventsList,
|
|
|
|
isNothingOrEmpty,
|
|
|
|
isSafeInteger,
|
|
|
|
toNumber,
|
|
|
|
toDate: toDateOrNull,
|
|
|
|
toBool: toBoolOrNull,
|
|
|
|
isArrayOfString,
|
|
|
|
getLock,
|
|
|
|
NO_LOCK,
|
|
|
|
isNolock,
|
|
|
|
insensitiveEquals,
|
|
|
|
pause,
|
|
|
|
retry,
|
2019-12-30 19:08:50 +01:00
|
|
|
pushAll
|
2019-09-10 10:49:22 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
const stringNotEmpty = s => isSomething(s) && s.trim().length > 0;
|
|
|
|
|
|
|
|
const makerule = (field, error, isValid) => ({ field, error, isValid });
|
|
|
|
|
|
|
|
const validationError = (rule, item) => ({ ...rule, item });
|
|
|
|
|
|
|
|
const applyRuleSet = ruleSet => itemToValidate => $(ruleSet, [
|
|
|
|
map(applyRule(itemToValidate)),
|
|
|
|
filter(isSomething),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const applyRule = itemTovalidate => rule => (rule.isValid(itemTovalidate)
|
|
|
|
? null
|
|
|
|
: validationError(rule, itemTovalidate));
|
|
|
|
|
|
|
|
const filterEval = 'FILTER_EVALUATE';
|
|
|
|
const filterCompile = 'FILTER_COMPILE';
|
|
|
|
const mapEval = 'MAP_EVALUATE';
|
|
|
|
const mapCompile = 'MAP_COMPILE';
|
|
|
|
|
|
|
|
|
|
|
|
const getEvaluateResult = () => ({
|
|
|
|
isError: false,
|
|
|
|
passedFilter: true,
|
|
|
|
result: null,
|
|
|
|
});
|
|
|
|
|
|
|
|
const compileFilter = index => compileExpression$1(index.filter);
|
|
|
|
|
|
|
|
const compileMap = index => compileCode$1(index.map);
|
|
|
|
|
|
|
|
const passesFilter = (record, index) => {
|
|
|
|
const context = { record };
|
|
|
|
if (!index.filter) return true;
|
|
|
|
|
|
|
|
const compiledFilter = defineError(
|
|
|
|
() => compileFilter(index),
|
|
|
|
filterCompile,
|
|
|
|
);
|
|
|
|
|
|
|
|
return defineError(
|
|
|
|
() => compiledFilter(context),
|
|
|
|
filterEval,
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
const mapRecord = (record, index) => {
|
2019-09-28 06:28:11 +02:00
|
|
|
const recordClone = cloneDeep(record);
|
2019-09-10 10:49:22 +02:00
|
|
|
const context = { record: recordClone };
|
|
|
|
|
|
|
|
const map = index.map ? index.map : 'return {...record};';
|
|
|
|
|
|
|
|
const compiledMap = defineError(
|
|
|
|
() => compileCode$1(map),
|
|
|
|
mapCompile,
|
|
|
|
);
|
|
|
|
|
|
|
|
const mapped = defineError(
|
|
|
|
() => compiledMap(context),
|
|
|
|
mapEval,
|
|
|
|
);
|
|
|
|
|
|
|
|
const mappedKeys = keys(mapped);
|
|
|
|
for (let i = 0; i < mappedKeys.length; i++) {
|
|
|
|
const key = mappedKeys[i];
|
2019-09-28 06:28:11 +02:00
|
|
|
mapped[key] = isUndefined(mapped[key]) ? null : mapped[key];
|
2019-09-10 10:49:22 +02:00
|
|
|
if (isFunction(mapped[key])) {
|
|
|
|
delete mapped[key];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
mapped.key = record.key;
|
|
|
|
mapped.sortKey = index.getSortKey
|
|
|
|
? compileCode$1(index.getSortKey)(context)
|
|
|
|
: record.id;
|
|
|
|
|
|
|
|
return mapped;
|
|
|
|
};
|
|
|
|
|
|
|
|
const evaluate = record => (index) => {
|
|
|
|
const result = getEvaluateResult();
|
|
|
|
|
|
|
|
try {
|
|
|
|
result.passedFilter = passesFilter(record, index);
|
|
|
|
} catch (err) {
|
|
|
|
result.isError = true;
|
|
|
|
result.passedFilter = false;
|
|
|
|
result.result = err.message;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!result.passedFilter) return result;
|
|
|
|
|
|
|
|
try {
|
|
|
|
result.result = mapRecord(record, index);
|
|
|
|
} catch (err) {
|
|
|
|
result.isError = true;
|
|
|
|
result.result = err.message;
|
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
|
|
|
};
|
|
|
|
|
|
|
|
const indexTypes = { reference: 'reference', ancestor: 'ancestor' };
|
|
|
|
|
|
|
|
const indexRuleSet = [
|
|
|
|
makerule('map', 'index has no map function',
|
|
|
|
index => isNonEmptyString(index.map)),
|
|
|
|
makerule('map', "index's map function does not compile",
|
|
|
|
index => !isNonEmptyString(index.map)
|
|
|
|
|| executesWithoutException(() => compileMap(index))),
|
|
|
|
makerule('filter', "index's filter function does not compile",
|
|
|
|
index => !isNonEmptyString(index.filter)
|
|
|
|
|| executesWithoutException(() => compileFilter(index))),
|
|
|
|
makerule('name', 'must declare a name for index',
|
|
|
|
index => isNonEmptyString(index.name)),
|
|
|
|
makerule('name', 'there is a duplicate named index on this node',
|
2019-09-28 06:28:11 +02:00
|
|
|
index => isEmpty(index.name)
|
2019-09-10 10:49:22 +02:00
|
|
|
|| countBy('name')(index.parent().indexes)[index.name] === 1),
|
|
|
|
makerule('indexType', 'reference index may only exist on a record node',
|
|
|
|
index => isRecord(index.parent())
|
|
|
|
|| index.indexType !== indexTypes.reference),
|
2019-09-28 06:28:11 +02:00
|
|
|
makerule('indexType', `index type must be one of: ${join$1(', ')(keys(indexTypes))}`,
|
|
|
|
index => includes(index.indexType)(keys(indexTypes))),
|
2019-09-10 10:49:22 +02:00
|
|
|
];
|
|
|
|
|
|
|
|
const getFlattenedHierarchy = (appHierarchy, useCached = true) => {
|
|
|
|
if (isSomething(appHierarchy.getFlattenedHierarchy) && useCached) { return appHierarchy.getFlattenedHierarchy(); }
|
|
|
|
|
|
|
|
const flattenHierarchy = (currentNode, flattened) => {
|
|
|
|
flattened.push(currentNode);
|
|
|
|
if ((!currentNode.children
|
|
|
|
|| currentNode.children.length === 0)
|
|
|
|
&& (!currentNode.indexes
|
|
|
|
|| currentNode.indexes.length === 0)
|
|
|
|
&& (!currentNode.aggregateGroups
|
|
|
|
|| currentNode.aggregateGroups.length === 0)) {
|
|
|
|
return flattened;
|
|
|
|
}
|
|
|
|
|
|
|
|
const unionIfAny = l2 => l1 => union(l1)(!l2 ? [] : l2);
|
|
|
|
|
|
|
|
const children = $([], [
|
|
|
|
unionIfAny(currentNode.children),
|
|
|
|
unionIfAny(currentNode.indexes),
|
|
|
|
unionIfAny(currentNode.aggregateGroups),
|
|
|
|
]);
|
|
|
|
|
|
|
|
for (const child of children) {
|
|
|
|
flattenHierarchy(child, flattened);
|
|
|
|
}
|
|
|
|
return flattened;
|
|
|
|
};
|
|
|
|
|
|
|
|
appHierarchy.getFlattenedHierarchy = () => flattenHierarchy(appHierarchy, []);
|
|
|
|
return appHierarchy.getFlattenedHierarchy();
|
|
|
|
};
|
|
|
|
|
|
|
|
const getLastPartInKey = key => last(splitKey(key));
|
|
|
|
|
|
|
|
const getNodesInPath = appHierarchy => key => $(appHierarchy, [
|
|
|
|
getFlattenedHierarchy,
|
|
|
|
filter(n => new RegExp(`${n.pathRegx()}`).test(key)),
|
|
|
|
]);
|
|
|
|
|
2019-12-22 08:12:21 +01:00
|
|
|
const getExactNodeForKey = appHierarchy => key => $(appHierarchy, [
|
2019-09-10 10:49:22 +02:00
|
|
|
getFlattenedHierarchy,
|
|
|
|
find(n => new RegExp(`${n.pathRegx()}$`).test(key)),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const getNodeForCollectionPath = appHierarchy => collectionKey => $(appHierarchy, [
|
|
|
|
getFlattenedHierarchy,
|
|
|
|
find(n => (isCollectionRecord(n)
|
|
|
|
&& new RegExp(`${n.collectionPathRegx()}$`).test(collectionKey))),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const hasMatchingAncestor = ancestorPredicate => decendantNode => switchCase(
|
|
|
|
|
|
|
|
[node => isNothing(node.parent()),
|
2019-09-28 06:28:11 +02:00
|
|
|
constant(false)],
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
[node => ancestorPredicate(node.parent()),
|
2019-09-28 06:28:11 +02:00
|
|
|
constant(true)],
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
[defaultCase,
|
|
|
|
node => hasMatchingAncestor(ancestorPredicate)(node.parent())],
|
|
|
|
|
|
|
|
)(decendantNode);
|
|
|
|
|
|
|
|
const getNode = (appHierarchy, nodeKey) => $(appHierarchy, [
|
|
|
|
getFlattenedHierarchy,
|
|
|
|
find(n => n.nodeKey() === nodeKey
|
|
|
|
|| (isCollectionRecord(n)
|
|
|
|
&& n.collectionNodeKey() === nodeKey)),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const getCollectionNode = (appHierarchy, nodeKey) => $(appHierarchy, [
|
|
|
|
getFlattenedHierarchy,
|
|
|
|
find(n => (isCollectionRecord(n)
|
|
|
|
&& n.collectionNodeKey() === nodeKey)),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const getNodeByKeyOrNodeKey = (appHierarchy, keyOrNodeKey) => {
|
2019-12-22 08:12:21 +01:00
|
|
|
const nodeByKey = getExactNodeForKey(appHierarchy)(keyOrNodeKey);
|
2019-09-10 10:49:22 +02:00
|
|
|
return isNothing(nodeByKey)
|
|
|
|
? getNode(appHierarchy, keyOrNodeKey)
|
|
|
|
: nodeByKey;
|
|
|
|
};
|
|
|
|
|
|
|
|
const getCollectionNodeByKeyOrNodeKey = (appHierarchy, keyOrNodeKey) => {
|
|
|
|
const nodeByKey = getNodeForCollectionPath(appHierarchy)(keyOrNodeKey);
|
|
|
|
return isNothing(nodeByKey)
|
|
|
|
? getCollectionNode(appHierarchy, keyOrNodeKey)
|
|
|
|
: nodeByKey;
|
|
|
|
};
|
|
|
|
|
2019-12-22 08:12:21 +01:00
|
|
|
const isNode = (appHierarchy, key) => isSomething(getExactNodeForKey(appHierarchy)(key));
|
2019-09-10 10:49:22 +02:00
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const getActualKeyOfParent = (parentNodeKey, actualChildKey) =>
|
|
|
|
$(actualChildKey, [
|
|
|
|
splitKey,
|
|
|
|
take(splitKey(parentNodeKey).length),
|
|
|
|
ks => joinKey(...ks),
|
|
|
|
]);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const getParentKey = (key) => {
|
|
|
|
return $(key, [
|
|
|
|
splitKey,
|
|
|
|
take(splitKey(key).length - 1),
|
|
|
|
joinKey,
|
|
|
|
]);
|
|
|
|
};
|
|
|
|
|
|
|
|
const isKeyAncestorOf = ancestorKey => decendantNode => hasMatchingAncestor(p => p.nodeKey() === ancestorKey)(decendantNode);
|
|
|
|
|
|
|
|
const hasNoMatchingAncestors = parentPredicate => node => !hasMatchingAncestor(parentPredicate)(node);
|
|
|
|
|
|
|
|
const findField = (recordNode, fieldName) => find(f => f.name == fieldName)(recordNode.fields);
|
|
|
|
|
|
|
|
const isAncestor = decendant => ancestor => isKeyAncestorOf(ancestor.nodeKey())(decendant);
|
|
|
|
|
|
|
|
const isDecendant = ancestor => decendant => isAncestor(decendant)(ancestor);
|
|
|
|
|
|
|
|
const getRecordNodeId = recordKey => $(recordKey, [
|
|
|
|
splitKey,
|
|
|
|
last,
|
|
|
|
getRecordNodeIdFromId,
|
|
|
|
]);
|
|
|
|
|
|
|
|
const getRecordNodeIdFromId = recordId => $(recordId, [split('-'), first, parseInt]);
|
|
|
|
|
|
|
|
const getRecordNodeById = (hierarchy, recordId) => $(hierarchy, [
|
|
|
|
getFlattenedHierarchy,
|
|
|
|
find(n => isRecord(n)
|
|
|
|
&& n.nodeId === getRecordNodeIdFromId(recordId)),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const recordNodeIdIsAllowed = indexNode => nodeId => indexNode.allowedRecordNodeIds.length === 0
|
2019-09-28 06:28:11 +02:00
|
|
|
|| includes(nodeId)(indexNode.allowedRecordNodeIds);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const recordNodeIsAllowed = indexNode => recordNode => recordNodeIdIsAllowed(indexNode)(recordNode.nodeId);
|
|
|
|
|
|
|
|
const getAllowedRecordNodesForIndex = (appHierarchy, indexNode) => {
|
|
|
|
const recordNodes = $(appHierarchy, [
|
|
|
|
getFlattenedHierarchy,
|
|
|
|
filter(isRecord),
|
|
|
|
]);
|
|
|
|
|
|
|
|
if (isGlobalIndex(indexNode)) {
|
|
|
|
return $(recordNodes, [
|
|
|
|
filter(recordNodeIsAllowed(indexNode)),
|
|
|
|
]);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (isAncestorIndex(indexNode)) {
|
|
|
|
return $(recordNodes, [
|
|
|
|
filter(isDecendant(indexNode.parent())),
|
|
|
|
filter(recordNodeIsAllowed(indexNode)),
|
|
|
|
]);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (isReferenceIndex(indexNode)) {
|
|
|
|
return $(recordNodes, [
|
|
|
|
filter(n => some(fieldReversesReferenceToIndex(indexNode))(n.fields)),
|
|
|
|
]);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const getNodeFromNodeKeyHash = hierarchy => hash => $(hierarchy, [
|
|
|
|
getFlattenedHierarchy,
|
|
|
|
find(n => getHashCode(n.nodeKey()) === hash),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const isRecord = node => isSomething(node) && node.type === 'record';
|
|
|
|
const isSingleRecord = node => isRecord(node) && node.isSingle;
|
|
|
|
const isCollectionRecord = node => isRecord(node) && !node.isSingle;
|
|
|
|
const isIndex = node => isSomething(node) && node.type === 'index';
|
|
|
|
const isaggregateGroup = node => isSomething(node) && node.type === 'aggregateGroup';
|
|
|
|
const isShardedIndex = node => isIndex(node) && isNonEmptyString(node.getShardName);
|
|
|
|
const isRoot = node => isSomething(node) && node.isRoot();
|
|
|
|
const isDecendantOfARecord = hasMatchingAncestor(isRecord);
|
|
|
|
const isGlobalIndex = node => isIndex(node) && isRoot(node.parent());
|
|
|
|
const isReferenceIndex = node => isIndex(node) && node.indexType === indexTypes.reference;
|
|
|
|
const isAncestorIndex = node => isIndex(node) && node.indexType === indexTypes.ancestor;
|
|
|
|
|
|
|
|
const fieldReversesReferenceToNode = node => field => field.type === 'reference'
|
|
|
|
&& intersection(field.typeOptions.reverseIndexNodeKeys)(map(i => i.nodeKey())(node.indexes))
|
|
|
|
.length > 0;
|
|
|
|
|
|
|
|
const fieldReversesReferenceToIndex = indexNode => field => field.type === 'reference'
|
|
|
|
&& intersection(field.typeOptions.reverseIndexNodeKeys)([indexNode.nodeKey()])
|
|
|
|
.length > 0;
|
|
|
|
|
|
|
|
var hierarchy = {
|
|
|
|
getLastPartInKey,
|
|
|
|
getNodesInPath,
|
2019-12-22 08:12:21 +01:00
|
|
|
getExactNodeForKey,
|
2019-09-10 10:49:22 +02:00
|
|
|
hasMatchingAncestor,
|
|
|
|
getNode,
|
|
|
|
getNodeByKeyOrNodeKey,
|
|
|
|
isNode,
|
|
|
|
getActualKeyOfParent,
|
|
|
|
getParentKey,
|
|
|
|
isKeyAncestorOf,
|
|
|
|
hasNoMatchingAncestors,
|
|
|
|
findField,
|
|
|
|
isAncestor,
|
|
|
|
isDecendant,
|
|
|
|
getRecordNodeId,
|
|
|
|
getRecordNodeIdFromId,
|
|
|
|
getRecordNodeById,
|
|
|
|
recordNodeIdIsAllowed,
|
|
|
|
recordNodeIsAllowed,
|
|
|
|
getAllowedRecordNodesForIndex,
|
|
|
|
getNodeFromNodeKeyHash,
|
|
|
|
isRecord,
|
|
|
|
isCollectionRecord,
|
|
|
|
isIndex,
|
|
|
|
isaggregateGroup,
|
|
|
|
isShardedIndex,
|
|
|
|
isRoot,
|
|
|
|
isDecendantOfARecord,
|
|
|
|
isGlobalIndex,
|
|
|
|
isReferenceIndex,
|
|
|
|
isAncestorIndex,
|
|
|
|
fieldReversesReferenceToNode,
|
|
|
|
fieldReversesReferenceToIndex,
|
|
|
|
getFlattenedHierarchy,
|
|
|
|
};
|
|
|
|
|
|
|
|
const getSafeFieldParser = (tryParse, defaultValueFunctions) => (field, record) => {
|
2019-09-28 06:28:11 +02:00
|
|
|
if (has(field.name)(record)) {
|
2019-09-10 10:49:22 +02:00
|
|
|
return getSafeValueParser(tryParse, defaultValueFunctions)(record[field.name]);
|
|
|
|
}
|
|
|
|
return defaultValueFunctions[field.getUndefinedValue]();
|
|
|
|
};
|
|
|
|
|
|
|
|
const getSafeValueParser = (tryParse, defaultValueFunctions) => (value) => {
|
|
|
|
const parsed = tryParse(value);
|
|
|
|
if (parsed.success) {
|
|
|
|
return parsed.value;
|
|
|
|
}
|
|
|
|
return defaultValueFunctions.default();
|
|
|
|
};
|
|
|
|
|
|
|
|
const getNewValue = (tryParse, defaultValueFunctions) => (field) => {
|
|
|
|
const getInitialValue = isUndefined(field) || isUndefined(field.getInitialValue)
|
|
|
|
? 'default'
|
|
|
|
: field.getInitialValue;
|
|
|
|
|
2019-09-28 06:28:11 +02:00
|
|
|
return has(getInitialValue)(defaultValueFunctions)
|
2019-09-10 10:49:22 +02:00
|
|
|
? defaultValueFunctions[getInitialValue]()
|
|
|
|
: getSafeValueParser(tryParse, defaultValueFunctions)(getInitialValue);
|
|
|
|
};
|
|
|
|
|
|
|
|
const typeFunctions = specificFunctions => merge({
|
2019-09-28 06:28:11 +02:00
|
|
|
value: constant,
|
|
|
|
null: constant(null),
|
2019-09-10 10:49:22 +02:00
|
|
|
}, specificFunctions);
|
|
|
|
|
|
|
|
const validateTypeConstraints = validationRules => async (field, record, context) => {
|
|
|
|
const fieldValue = record[field.name];
|
|
|
|
const validateRule = async r => (!await r.isValid(fieldValue, field.typeOptions, context)
|
|
|
|
? r.getMessage(fieldValue, field.typeOptions)
|
|
|
|
: '');
|
|
|
|
|
|
|
|
const errors = [];
|
|
|
|
for (const r of validationRules) {
|
|
|
|
const err = await validateRule(r);
|
|
|
|
if (isNotEmpty(err)) errors.push(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
return errors;
|
|
|
|
};
|
|
|
|
|
|
|
|
const getDefaultOptions = mapValues(v => v.defaultValue);
|
|
|
|
|
|
|
|
const makerule$1 = (isValid, getMessage) => ({ isValid, getMessage });
|
|
|
|
const parsedFailed = val => ({ success: false, value: val });
|
|
|
|
const parsedSuccess = val => ({ success: true, value: val });
|
|
|
|
const getDefaultExport = (name, tryParse, functions, options, validationRules, sampleValue, stringify) => ({
|
|
|
|
getNew: getNewValue(tryParse, functions),
|
|
|
|
safeParseField: getSafeFieldParser(tryParse, functions),
|
|
|
|
safeParseValue: getSafeValueParser(tryParse, functions),
|
|
|
|
tryParse,
|
|
|
|
name,
|
|
|
|
getDefaultOptions: () => getDefaultOptions(cloneDeep(options)),
|
|
|
|
optionDefinitions: options,
|
|
|
|
validateTypeConstraints: validateTypeConstraints(validationRules),
|
|
|
|
sampleValue,
|
|
|
|
stringify: val => (val === null || val === undefined
|
|
|
|
? '' : stringify(val)),
|
|
|
|
getDefaultValue: functions.default,
|
|
|
|
});
|
|
|
|
|
|
|
|
const stringFunctions = typeFunctions({
|
|
|
|
default: constant(null),
|
|
|
|
});
|
|
|
|
|
|
|
|
const stringTryParse = switchCase(
|
|
|
|
[isString, parsedSuccess],
|
|
|
|
[isNull, parsedSuccess],
|
|
|
|
[defaultCase, v => parsedSuccess(v.toString())],
|
|
|
|
);
|
|
|
|
|
|
|
|
const options = {
|
|
|
|
maxLength: {
|
|
|
|
defaultValue: null,
|
|
|
|
isValid: n => n === null || isSafeInteger(n) && n > 0,
|
|
|
|
requirementDescription: 'max length must be null (no limit) or a greater than zero integer',
|
|
|
|
parse: toNumberOrNull,
|
|
|
|
},
|
|
|
|
values: {
|
|
|
|
defaultValue: null,
|
|
|
|
isValid: v => v === null || (isArrayOfString(v) && v.length > 0 && v.length < 10000),
|
|
|
|
requirementDescription: "'values' must be null (no values) or an arry of at least one string",
|
|
|
|
parse: s => s,
|
|
|
|
},
|
|
|
|
allowDeclaredValuesOnly: {
|
|
|
|
defaultValue: false,
|
|
|
|
isValid: isBoolean,
|
|
|
|
requirementDescription: 'allowDeclaredValuesOnly must be true or false',
|
|
|
|
parse: toBoolOrNull,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
const typeConstraints = [
|
|
|
|
makerule$1(async (val, opts) => val === null || opts.maxLength === null || val.length <= opts.maxLength,
|
|
|
|
(val, opts) => `value exceeds maximum length of ${opts.maxLength}`),
|
|
|
|
makerule$1(async (val, opts) => val === null
|
|
|
|
|| opts.allowDeclaredValuesOnly === false
|
2019-09-28 06:28:11 +02:00
|
|
|
|| includes(val)(opts.values),
|
2019-09-10 10:49:22 +02:00
|
|
|
(val) => `"${val}" does not exist in the list of allowed values`),
|
|
|
|
];
|
|
|
|
|
|
|
|
var string = getDefaultExport(
|
|
|
|
'string',
|
|
|
|
stringTryParse,
|
|
|
|
stringFunctions,
|
|
|
|
options,
|
|
|
|
typeConstraints,
|
|
|
|
'abcde',
|
|
|
|
str => str,
|
|
|
|
);
|
|
|
|
|
|
|
|
const boolFunctions = typeFunctions({
|
|
|
|
default: constant(null),
|
|
|
|
});
|
|
|
|
|
|
|
|
const boolTryParse = switchCase(
|
|
|
|
[isBoolean, parsedSuccess],
|
|
|
|
[isNull, parsedSuccess],
|
|
|
|
[isOneOf('true', '1', 'yes', 'on'), () => parsedSuccess(true)],
|
|
|
|
[isOneOf('false', '0', 'no', 'off'), () => parsedSuccess(false)],
|
|
|
|
[defaultCase, parsedFailed],
|
|
|
|
);
|
|
|
|
|
|
|
|
const options$1 = {
|
|
|
|
allowNulls: {
|
|
|
|
defaultValue: true,
|
|
|
|
isValid: isBoolean,
|
|
|
|
requirementDescription: 'must be a true or false',
|
|
|
|
parse: toBoolOrNull,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
const typeConstraints$1 = [
|
|
|
|
makerule$1(async (val, opts) => opts.allowNulls === true || val !== null,
|
|
|
|
() => 'field cannot be null'),
|
|
|
|
];
|
|
|
|
|
|
|
|
var bool = getDefaultExport(
|
|
|
|
'bool', boolTryParse, boolFunctions,
|
|
|
|
options$1, typeConstraints$1, true, JSON.stringify,
|
|
|
|
);
|
|
|
|
|
|
|
|
const numberFunctions = typeFunctions({
|
|
|
|
default: constant(null),
|
|
|
|
});
|
|
|
|
|
|
|
|
const parseStringtoNumberOrNull = (s) => {
|
|
|
|
const num = Number(s);
|
|
|
|
return isNaN(num) ? parsedFailed(s) : parsedSuccess(num);
|
|
|
|
};
|
|
|
|
|
|
|
|
const numberTryParse = switchCase(
|
|
|
|
[isNumber, parsedSuccess],
|
|
|
|
[isString, parseStringtoNumberOrNull],
|
|
|
|
[isNull, parsedSuccess],
|
|
|
|
[defaultCase, parsedFailed],
|
|
|
|
);
|
|
|
|
|
|
|
|
const options$2 = {
|
|
|
|
maxValue: {
|
|
|
|
defaultValue: Number.MAX_SAFE_INTEGER,
|
|
|
|
isValid: isSafeInteger,
|
|
|
|
requirementDescription: 'must be a valid integer',
|
|
|
|
parse: toNumberOrNull,
|
|
|
|
},
|
|
|
|
minValue: {
|
|
|
|
defaultValue: 0 - Number.MAX_SAFE_INTEGER,
|
|
|
|
isValid: isSafeInteger,
|
|
|
|
requirementDescription: 'must be a valid integer',
|
|
|
|
parse: toNumberOrNull,
|
|
|
|
},
|
|
|
|
decimalPlaces: {
|
|
|
|
defaultValue: 0,
|
|
|
|
isValid: n => isSafeInteger(n) && n >= 0,
|
|
|
|
requirementDescription: 'must be a positive integer',
|
|
|
|
parse: toNumberOrNull,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
const getDecimalPlaces = (val) => {
|
|
|
|
const splitDecimal = val.toString().split('.');
|
|
|
|
if (splitDecimal.length === 1) return 0;
|
|
|
|
return splitDecimal[1].length;
|
|
|
|
};
|
|
|
|
|
|
|
|
const typeConstraints$2 = [
|
|
|
|
makerule$1(async (val, opts) => val === null || opts.minValue === null || val >= opts.minValue,
|
|
|
|
(val, opts) => `value (${val.toString()}) must be greater than or equal to ${opts.minValue}`),
|
|
|
|
makerule$1(async (val, opts) => val === null || opts.maxValue === null || val <= opts.maxValue,
|
|
|
|
(val, opts) => `value (${val.toString()}) must be less than or equal to ${opts.minValue} options`),
|
|
|
|
makerule$1(async (val, opts) => val === null || opts.decimalPlaces >= getDecimalPlaces(val),
|
|
|
|
(val, opts) => `value (${val.toString()}) must have ${opts.decimalPlaces} decimal places or less`),
|
|
|
|
];
|
|
|
|
|
|
|
|
var number = getDefaultExport(
|
|
|
|
'number',
|
|
|
|
numberTryParse,
|
|
|
|
numberFunctions,
|
|
|
|
options$2,
|
|
|
|
typeConstraints$2,
|
|
|
|
1,
|
|
|
|
num => num.toString(),
|
|
|
|
);
|
|
|
|
|
|
|
|
const dateFunctions = typeFunctions({
|
|
|
|
default: constant(null),
|
|
|
|
now: () => new Date(),
|
|
|
|
});
|
|
|
|
|
|
|
|
const isValidDate = d => d instanceof Date && !isNaN(d);
|
|
|
|
|
|
|
|
const parseStringToDate = s => switchCase(
|
|
|
|
[isValidDate, parsedSuccess],
|
|
|
|
[defaultCase, parsedFailed],
|
|
|
|
)(new Date(s));
|
|
|
|
|
|
|
|
|
|
|
|
const dateTryParse = switchCase(
|
|
|
|
[isDate, parsedSuccess],
|
|
|
|
[isString, parseStringToDate],
|
|
|
|
[isNull, parsedSuccess],
|
|
|
|
[defaultCase, parsedFailed],
|
|
|
|
);
|
|
|
|
|
|
|
|
const options$3 = {
|
|
|
|
maxValue: {
|
|
|
|
defaultValue: new Date(32503680000000),
|
|
|
|
isValid: isDate,
|
|
|
|
requirementDescription: 'must be a valid date',
|
|
|
|
parse: toDateOrNull,
|
|
|
|
},
|
|
|
|
minValue: {
|
|
|
|
defaultValue: new Date(-8520336000000),
|
|
|
|
isValid: isDate,
|
|
|
|
requirementDescription: 'must be a valid date',
|
|
|
|
parse: toDateOrNull,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
const typeConstraints$3 = [
|
|
|
|
makerule$1(async (val, opts) => val === null || opts.minValue === null || val >= opts.minValue,
|
|
|
|
(val, opts) => `value (${val.toString()}) must be greater than or equal to ${opts.minValue}`),
|
|
|
|
makerule$1(async (val, opts) => val === null || opts.maxValue === null || val <= opts.maxValue,
|
|
|
|
(val, opts) => `value (${val.toString()}) must be less than or equal to ${opts.minValue} options`),
|
|
|
|
];
|
|
|
|
|
|
|
|
var datetime = getDefaultExport(
|
|
|
|
'datetime',
|
|
|
|
dateTryParse,
|
|
|
|
dateFunctions,
|
|
|
|
options$3,
|
|
|
|
typeConstraints$3,
|
|
|
|
new Date(1984, 4, 1),
|
|
|
|
date => JSON.stringify(date).replace(new RegExp('"', 'g'), ''),
|
|
|
|
);
|
|
|
|
|
|
|
|
const arrayFunctions = () => typeFunctions({
|
|
|
|
default: constant([]),
|
|
|
|
});
|
|
|
|
|
|
|
|
const mapToParsedArrary = type => $$(
|
|
|
|
map(i => type.safeParseValue(i)),
|
|
|
|
parsedSuccess,
|
|
|
|
);
|
|
|
|
|
|
|
|
const arrayTryParse = type => switchCase(
|
|
|
|
[isArray$1, mapToParsedArrary(type)],
|
|
|
|
[defaultCase, parsedFailed],
|
|
|
|
);
|
|
|
|
|
|
|
|
const typeName = type => `array<${type}>`;
|
|
|
|
|
|
|
|
|
|
|
|
const options$4 = {
|
|
|
|
maxLength: {
|
|
|
|
defaultValue: 10000,
|
|
|
|
isValid: isSafeInteger,
|
|
|
|
requirementDescription: 'must be a positive integer',
|
|
|
|
parse: toNumberOrNull,
|
|
|
|
},
|
|
|
|
minLength: {
|
|
|
|
defaultValue: 0,
|
|
|
|
isValid: n => isSafeInteger(n) && n >= 0,
|
|
|
|
requirementDescription: 'must be a positive integer',
|
|
|
|
parse: toNumberOrNull,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
const typeConstraints$4 = [
|
|
|
|
makerule$1(async (val, opts) => val === null || val.length >= opts.minLength,
|
|
|
|
(val, opts) => `must choose ${opts.minLength} or more options`),
|
|
|
|
makerule$1(async (val, opts) => val === null || val.length <= opts.maxLength,
|
|
|
|
(val, opts) => `cannot choose more than ${opts.maxLength} options`),
|
|
|
|
];
|
|
|
|
|
|
|
|
var array = type => getDefaultExport(
|
|
|
|
typeName(type.name),
|
|
|
|
arrayTryParse(type),
|
|
|
|
arrayFunctions(),
|
|
|
|
options$4,
|
|
|
|
typeConstraints$4,
|
|
|
|
[type.sampleValue],
|
|
|
|
JSON.stringify,
|
|
|
|
);
|
|
|
|
|
|
|
|
const referenceNothing = () => ({ key: '' });
|
|
|
|
|
|
|
|
const referenceFunctions = typeFunctions({
|
|
|
|
default: referenceNothing,
|
|
|
|
});
|
|
|
|
|
2019-09-28 06:28:11 +02:00
|
|
|
const hasStringValue = (ob, path) => has(path)(ob)
|
2019-09-10 10:49:22 +02:00
|
|
|
&& isString(ob[path]);
|
|
|
|
|
|
|
|
const isObjectWithKey = v => isObjectLike(v)
|
|
|
|
&& hasStringValue(v, 'key');
|
|
|
|
|
|
|
|
const tryParseFromString = s => {
|
|
|
|
|
|
|
|
try {
|
|
|
|
const asObj = JSON.parse(s);
|
|
|
|
if(isObjectWithKey) {
|
|
|
|
return parsedSuccess(asObj);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
catch(_) {
|
|
|
|
// EMPTY
|
|
|
|
}
|
|
|
|
|
|
|
|
return parsedFailed(s);
|
|
|
|
};
|
|
|
|
|
|
|
|
const referenceTryParse = v => switchCase(
|
|
|
|
[isObjectWithKey, parsedSuccess],
|
|
|
|
[isString, tryParseFromString],
|
|
|
|
[isNull, () => parsedSuccess(referenceNothing())],
|
|
|
|
[defaultCase, parsedFailed],
|
|
|
|
)(v);
|
|
|
|
|
|
|
|
const options$5 = {
|
|
|
|
indexNodeKey: {
|
|
|
|
defaultValue: null,
|
|
|
|
isValid: isNonEmptyString,
|
|
|
|
requirementDescription: 'must be a non-empty string',
|
|
|
|
parse: s => s,
|
|
|
|
},
|
|
|
|
displayValue: {
|
|
|
|
defaultValue: '',
|
|
|
|
isValid: isNonEmptyString,
|
|
|
|
requirementDescription: 'must be a non-empty string',
|
|
|
|
parse: s => s,
|
|
|
|
},
|
|
|
|
reverseIndexNodeKeys: {
|
|
|
|
defaultValue: null,
|
|
|
|
isValid: v => isArrayOfString(v) && v.length > 0,
|
|
|
|
requirementDescription: 'must be a non-empty array of strings',
|
|
|
|
parse: s => s,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
const isEmptyString = s => isString(s) && isEmpty(s);
|
|
|
|
|
|
|
|
const ensureReferenceExists = async (val, opts, context) => isEmptyString(val.key)
|
|
|
|
|| await context.referenceExists(opts, val.key);
|
|
|
|
|
|
|
|
const typeConstraints$5 = [
|
|
|
|
makerule$1(
|
|
|
|
ensureReferenceExists,
|
|
|
|
(val, opts) => `"${val[opts.displayValue]}" does not exist in options list (key: ${val.key})`,
|
|
|
|
),
|
|
|
|
];
|
|
|
|
|
|
|
|
var reference = getDefaultExport(
|
|
|
|
'reference',
|
|
|
|
referenceTryParse,
|
|
|
|
referenceFunctions,
|
|
|
|
options$5,
|
|
|
|
typeConstraints$5,
|
|
|
|
{ key: 'key', value: 'value' },
|
|
|
|
JSON.stringify,
|
|
|
|
);
|
|
|
|
|
|
|
|
const illegalCharacters = '*?\\/:<>|\0\b\f\v';
|
|
|
|
|
|
|
|
const isLegalFilename = (filePath) => {
|
|
|
|
const fn = fileName(filePath);
|
|
|
|
return fn.length <= 255
|
|
|
|
&& intersection(fn.split(''))(illegalCharacters.split('')).length === 0
|
|
|
|
&& none(f => f === '..')(splitKey(filePath));
|
|
|
|
};
|
|
|
|
|
|
|
|
const fileNothing = () => ({ relativePath: '', size: 0 });
|
|
|
|
|
|
|
|
const fileFunctions = typeFunctions({
|
|
|
|
default: fileNothing,
|
|
|
|
});
|
|
|
|
|
|
|
|
const fileTryParse = v => switchCase(
|
|
|
|
[isValidFile, parsedSuccess],
|
2019-09-28 06:28:11 +02:00
|
|
|
[isNull, () => parsedSuccess(fileNothing())],
|
2019-09-10 10:49:22 +02:00
|
|
|
[defaultCase, parsedFailed],
|
|
|
|
)(v);
|
|
|
|
|
|
|
|
const fileName = filePath => $(filePath, [
|
|
|
|
splitKey,
|
|
|
|
last,
|
|
|
|
]);
|
|
|
|
|
2019-09-28 06:28:11 +02:00
|
|
|
const isValidFile = f => !isNull(f)
|
|
|
|
&& has('relativePath')(f) && has('size')(f)
|
|
|
|
&& isNumber(f.size)
|
|
|
|
&& isString(f.relativePath)
|
2019-09-10 10:49:22 +02:00
|
|
|
&& isLegalFilename(f.relativePath);
|
|
|
|
|
|
|
|
const options$6 = {};
|
|
|
|
|
|
|
|
const typeConstraints$6 = [];
|
|
|
|
|
|
|
|
var file = getDefaultExport(
|
|
|
|
'file',
|
|
|
|
fileTryParse,
|
|
|
|
fileFunctions,
|
|
|
|
options$6,
|
|
|
|
typeConstraints$6,
|
|
|
|
{ relativePath: 'some_file.jpg', size: 1000 },
|
|
|
|
JSON.stringify,
|
|
|
|
);
|
|
|
|
|
|
|
|
const allTypes = () => {
|
|
|
|
const basicTypes = {
|
|
|
|
string, number, datetime, bool, reference, file,
|
|
|
|
};
|
|
|
|
|
|
|
|
const arrays = $(basicTypes, [
|
|
|
|
keys,
|
|
|
|
map((k) => {
|
|
|
|
const kvType = {};
|
|
|
|
const concreteArray = array(basicTypes[k]);
|
|
|
|
kvType[concreteArray.name] = concreteArray;
|
|
|
|
return kvType;
|
|
|
|
}),
|
|
|
|
types => assign({}, ...types),
|
|
|
|
]);
|
|
|
|
|
|
|
|
return merge({}, basicTypes, arrays);
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
const all$1 = allTypes();
|
|
|
|
|
|
|
|
const getType = (typeName) => {
|
2019-09-28 06:28:11 +02:00
|
|
|
if (!has(typeName)(all$1)) throw new BadRequestError(`Do not recognise type ${typeName}`);
|
2019-09-10 10:49:22 +02:00
|
|
|
return all$1[typeName];
|
|
|
|
};
|
|
|
|
|
|
|
|
const getSampleFieldValue = field => getType(field.type).sampleValue;
|
|
|
|
|
|
|
|
const getNewFieldValue = field => getType(field.type).getNew(field);
|
|
|
|
|
|
|
|
const safeParseField = (field, record) => getType(field.type).safeParseField(field, record);
|
|
|
|
|
2019-09-28 06:28:11 +02:00
|
|
|
const validateFieldParse = (field, record) => (has(field.name)(record)
|
2019-09-10 10:49:22 +02:00
|
|
|
? getType(field.type).tryParse(record[field.name])
|
|
|
|
: parsedSuccess(undefined)); // fields may be undefined by default
|
|
|
|
|
|
|
|
const getDefaultOptions$1 = type => getType(type).getDefaultOptions();
|
|
|
|
|
|
|
|
const validateTypeConstraints$1 = async (field, record, context) => await getType(field.type).validateTypeConstraints(field, record, context);
|
|
|
|
|
|
|
|
const detectType = (value) => {
|
2019-09-28 06:28:11 +02:00
|
|
|
if (isString(value)) return string;
|
|
|
|
if (isBoolean(value)) return bool;
|
|
|
|
if (isNumber(value)) return number;
|
|
|
|
if (isDate(value)) return datetime;
|
|
|
|
if (isArray$1(value)) return array(detectType(value[0]));
|
2019-09-10 10:49:22 +02:00
|
|
|
if (isObject(value)
|
2019-09-28 06:28:11 +02:00
|
|
|
&& has('key')(value)
|
|
|
|
&& has('value')(value)) return reference;
|
2019-09-10 10:49:22 +02:00
|
|
|
if (isObject(value)
|
2019-09-28 06:28:11 +02:00
|
|
|
&& has('relativePath')(value)
|
|
|
|
&& has('size')(value)) return file;
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
throw new BadRequestError(`cannot determine type: ${JSON.stringify(value)}`);
|
|
|
|
};
|
|
|
|
|
|
|
|
// 5 minutes
|
|
|
|
const tempCodeExpiryLength = 5 * 60 * 1000;
|
|
|
|
|
|
|
|
const AUTH_FOLDER = '/.auth';
|
|
|
|
const USERS_LIST_FILE = joinKey(AUTH_FOLDER, 'users.json');
|
|
|
|
const userAuthFile = username => joinKey(AUTH_FOLDER, `auth_${username}.json`);
|
|
|
|
const USERS_LOCK_FILE = joinKey(AUTH_FOLDER, 'users_lock');
|
|
|
|
const ACCESS_LEVELS_FILE = joinKey(AUTH_FOLDER, 'access_levels.json');
|
|
|
|
const ACCESS_LEVELS_LOCK_FILE = joinKey(AUTH_FOLDER, 'access_levels_lock');
|
|
|
|
|
|
|
|
const permissionTypes = {
|
|
|
|
CREATE_RECORD: 'create record',
|
|
|
|
UPDATE_RECORD: 'update record',
|
|
|
|
READ_RECORD: 'read record',
|
|
|
|
DELETE_RECORD: 'delete record',
|
|
|
|
READ_INDEX: 'read index',
|
|
|
|
MANAGE_INDEX: 'manage index',
|
|
|
|
MANAGE_COLLECTION: 'manage collection',
|
|
|
|
WRITE_TEMPLATES: 'write templates',
|
|
|
|
CREATE_USER: 'create user',
|
|
|
|
SET_PASSWORD: 'set password',
|
|
|
|
CREATE_TEMPORARY_ACCESS: 'create temporary access',
|
|
|
|
ENABLE_DISABLE_USER: 'enable or disable user',
|
|
|
|
WRITE_ACCESS_LEVELS: 'write access levels',
|
|
|
|
LIST_USERS: 'list users',
|
|
|
|
LIST_ACCESS_LEVELS: 'list access levels',
|
|
|
|
EXECUTE_ACTION: 'execute action',
|
|
|
|
SET_USER_ACCESS_LEVELS: 'set user access levels',
|
|
|
|
};
|
|
|
|
|
|
|
|
const getUserByName = (users, name) => $(users, [
|
|
|
|
find(u => u.name.toLowerCase() === name.toLowerCase()),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const stripUserOfSensitiveStuff = (user) => {
|
|
|
|
const stripped = clone(user);
|
|
|
|
delete stripped.tempCode;
|
|
|
|
return stripped;
|
|
|
|
};
|
|
|
|
|
|
|
|
const parseTemporaryCode = fullCode => $(fullCode, [
|
|
|
|
split(':'),
|
|
|
|
parts => ({
|
|
|
|
id: parts[1],
|
|
|
|
code: parts[2],
|
|
|
|
}),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const isAuthorized = app => (permissionType, resourceKey) => apiWrapperSync(
|
|
|
|
app,
|
|
|
|
events.authApi.isAuthorized,
|
|
|
|
alwaysAuthorized,
|
|
|
|
{ resourceKey, permissionType },
|
|
|
|
_isAuthorized, app, permissionType, resourceKey,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _isAuthorized = (app, permissionType, resourceKey) => {
|
|
|
|
if (!app.user) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
const validType = $(permissionTypes, [
|
|
|
|
values,
|
2019-09-28 06:28:11 +02:00
|
|
|
includes(permissionType),
|
2019-09-10 10:49:22 +02:00
|
|
|
]);
|
|
|
|
|
|
|
|
if (!validType) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
const permMatchesResource = (userperm) => {
|
|
|
|
const nodeKey = isNothing(resourceKey)
|
|
|
|
? null
|
|
|
|
: isNode(app.hierarchy, resourceKey)
|
|
|
|
? getNodeByKeyOrNodeKey(
|
|
|
|
app.hierarchy, resourceKey,
|
|
|
|
).nodeKey()
|
|
|
|
: resourceKey;
|
|
|
|
|
|
|
|
return (userperm.type === permissionType)
|
|
|
|
&& (
|
|
|
|
isNothing(resourceKey)
|
|
|
|
|| nodeKey === userperm.nodeKey
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
return $(app.user.permissions, [
|
|
|
|
some(permMatchesResource),
|
|
|
|
]);
|
|
|
|
};
|
|
|
|
|
|
|
|
const nodePermission = type => ({
|
|
|
|
add: (nodeKey, accessLevel) => accessLevel.permissions.push({ type, nodeKey }),
|
|
|
|
isAuthorized: resourceKey => app => isAuthorized(app)(type, resourceKey),
|
|
|
|
isNode: true,
|
|
|
|
get: nodeKey => ({ type, nodeKey }),
|
|
|
|
});
|
|
|
|
|
|
|
|
const staticPermission = type => ({
|
|
|
|
add: accessLevel => accessLevel.permissions.push({ type }),
|
|
|
|
isAuthorized: app => isAuthorized(app)(type),
|
|
|
|
isNode: false,
|
|
|
|
get: () => ({ type }),
|
|
|
|
});
|
|
|
|
|
|
|
|
const createRecord = nodePermission(permissionTypes.CREATE_RECORD);
|
|
|
|
|
|
|
|
const updateRecord = nodePermission(permissionTypes.UPDATE_RECORD);
|
|
|
|
|
|
|
|
const deleteRecord = nodePermission(permissionTypes.DELETE_RECORD);
|
|
|
|
|
|
|
|
const readRecord = nodePermission(permissionTypes.READ_RECORD);
|
|
|
|
|
|
|
|
const writeTemplates = staticPermission(permissionTypes.WRITE_TEMPLATES);
|
|
|
|
|
|
|
|
const createUser = staticPermission(permissionTypes.CREATE_USER);
|
|
|
|
|
|
|
|
const setPassword = staticPermission(permissionTypes.SET_PASSWORD);
|
|
|
|
|
|
|
|
const readIndex = nodePermission(permissionTypes.READ_INDEX);
|
|
|
|
|
|
|
|
const manageIndex = staticPermission(permissionTypes.MANAGE_INDEX);
|
|
|
|
|
|
|
|
const manageCollection = staticPermission(permissionTypes.MANAGE_COLLECTION);
|
|
|
|
|
|
|
|
const createTemporaryAccess = staticPermission(permissionTypes.CREATE_TEMPORARY_ACCESS);
|
|
|
|
|
|
|
|
const enableDisableUser = staticPermission(permissionTypes.ENABLE_DISABLE_USER);
|
|
|
|
|
|
|
|
const writeAccessLevels = staticPermission(permissionTypes.WRITE_ACCESS_LEVELS);
|
|
|
|
|
|
|
|
const listUsers = staticPermission(permissionTypes.LIST_USERS);
|
|
|
|
|
|
|
|
const listAccessLevels = staticPermission(permissionTypes.LIST_ACCESS_LEVELS);
|
|
|
|
|
|
|
|
const setUserAccessLevels = staticPermission(permissionTypes.SET_USER_ACCESS_LEVELS);
|
|
|
|
|
|
|
|
const executeAction = nodePermission(permissionTypes.EXECUTE_ACTION);
|
|
|
|
|
|
|
|
const alwaysAuthorized = () => true;
|
|
|
|
|
|
|
|
const permission = {
|
|
|
|
createRecord,
|
|
|
|
updateRecord,
|
|
|
|
deleteRecord,
|
|
|
|
readRecord,
|
|
|
|
writeTemplates,
|
|
|
|
createUser,
|
|
|
|
setPassword,
|
|
|
|
readIndex,
|
|
|
|
createTemporaryAccess,
|
|
|
|
enableDisableUser,
|
|
|
|
writeAccessLevels,
|
|
|
|
listUsers,
|
|
|
|
listAccessLevels,
|
|
|
|
manageIndex,
|
|
|
|
manageCollection,
|
|
|
|
executeAction,
|
|
|
|
setUserAccessLevels,
|
|
|
|
};
|
|
|
|
|
|
|
|
const getNew = app => (collectionKey, recordTypeName) => {
|
|
|
|
const recordNode = getRecordNode(app, collectionKey);
|
2019-10-03 07:12:13 +02:00
|
|
|
collectionKey=safeKey(collectionKey);
|
2019-09-10 10:49:22 +02:00
|
|
|
return apiWrapperSync(
|
|
|
|
app,
|
|
|
|
events.recordApi.getNew,
|
|
|
|
permission.createRecord.isAuthorized(recordNode.nodeKey()),
|
|
|
|
{ collectionKey, recordTypeName },
|
|
|
|
_getNew, recordNode, collectionKey,
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
const _getNew = (recordNode, collectionKey) => constructRecord(recordNode, getNewFieldValue, collectionKey);
|
|
|
|
|
|
|
|
const getRecordNode = (app, collectionKey) => {
|
|
|
|
collectionKey = safeKey(collectionKey);
|
|
|
|
return getNodeForCollectionPath(app.hierarchy)(collectionKey);
|
|
|
|
};
|
|
|
|
|
|
|
|
const getNewChild = app => (recordKey, collectionName, recordTypeName) =>
|
|
|
|
getNew(app)(joinKey(recordKey, collectionName), recordTypeName);
|
|
|
|
|
|
|
|
const constructRecord = (recordNode, getFieldValue, collectionKey) => {
|
|
|
|
const record = $(recordNode.fields, [
|
|
|
|
keyBy('name'),
|
|
|
|
mapValues(getFieldValue),
|
|
|
|
]);
|
|
|
|
|
|
|
|
record.id = `${recordNode.nodeId}-${generate()}`;
|
2019-11-05 14:30:36 +01:00
|
|
|
record.key = isSingleRecord(recordNode)
|
|
|
|
? joinKey(collectionKey, recordNode.name)
|
|
|
|
: joinKey(collectionKey, record.id);
|
2019-09-10 10:49:22 +02:00
|
|
|
record.isNew = true;
|
|
|
|
record.type = recordNode.name;
|
|
|
|
return record;
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const allIdChars = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_-';
|
|
|
|
|
|
|
|
// this should never be changed - ever
|
|
|
|
// - existing databases depend on the order of chars this string
|
|
|
|
|
|
|
|
/**
|
|
|
|
* folderStructureArray should return an array like
|
|
|
|
* - [1] = all records fit into one folder
|
|
|
|
* - [2] = all records fite into 2 folders
|
|
|
|
* - [64, 3] = all records fit into 64 * 3 folders
|
|
|
|
* - [64, 64, 10] = all records fit into 64 * 64 * 10 folder
|
|
|
|
* (there are 64 possible chars in allIsChars)
|
|
|
|
*/
|
|
|
|
const folderStructureArray = (recordNode) => {
|
|
|
|
|
|
|
|
const totalFolders = Math.ceil(recordNode.estimatedRecordCount / 1000);
|
|
|
|
const folderArray = [];
|
|
|
|
let levelCount = 1;
|
|
|
|
while(64**levelCount < totalFolders) {
|
|
|
|
levelCount += 1;
|
|
|
|
folderArray.push(64);
|
|
|
|
}
|
|
|
|
|
|
|
|
const parentFactor = (64**folderArray.length);
|
|
|
|
if(parentFactor < totalFolders) {
|
|
|
|
folderArray.push(
|
|
|
|
Math.ceil(totalFolders / parentFactor)
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
return folderArray;
|
|
|
|
|
|
|
|
/*
|
|
|
|
const maxRecords = currentFolderPosition === 0
|
|
|
|
? RECORDS_PER_FOLDER
|
|
|
|
: currentFolderPosition * 64 * RECORDS_PER_FOLDER;
|
|
|
|
|
|
|
|
if(maxRecords < recordNode.estimatedRecordCount) {
|
|
|
|
return folderStructureArray(
|
|
|
|
recordNode,
|
|
|
|
[...currentArray, 64],
|
|
|
|
currentFolderPosition + 1);
|
|
|
|
} else {
|
|
|
|
const childFolderCount = Math.ceil(recordNode.estimatedRecordCount / maxRecords );
|
|
|
|
return [...currentArray, childFolderCount]
|
|
|
|
}*/
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
const getAllIdsIterator = app => async (collection_Key_or_NodeKey) => {
|
|
|
|
collection_Key_or_NodeKey = safeKey(collection_Key_or_NodeKey);
|
|
|
|
const recordNode = getCollectionNodeByKeyOrNodeKey(
|
|
|
|
app.hierarchy,
|
|
|
|
collection_Key_or_NodeKey,
|
|
|
|
);
|
|
|
|
|
|
|
|
const getAllIdsIteratorForCollectionKey = async (recordNode, collectionKey) => {
|
|
|
|
|
|
|
|
const folderStructure = folderStructureArray(recordNode);
|
|
|
|
|
|
|
|
let currentFolderContents = [];
|
|
|
|
let currentPosition = [];
|
|
|
|
|
|
|
|
const collectionDir = getCollectionDir(app.hierarchy, collectionKey);
|
|
|
|
const basePath = joinKey(
|
|
|
|
collectionDir, recordNode.nodeId.toString());
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// "folderStructure" determines the top, sharding folders
|
|
|
|
// we need to add one, for the collection root folder, which
|
|
|
|
// always exists
|
|
|
|
const levels = folderStructure.length + 1;
|
|
|
|
const topLevel = levels -1;
|
|
|
|
|
|
|
|
|
|
|
|
/* populate initial directory structure in form:
|
|
|
|
[
|
|
|
|
{path: "/a", contents: ["b", "c", "d"]},
|
|
|
|
{path: "/a/b", contents: ["e","f","g"]},
|
|
|
|
{path: "/a/b/e", contents: ["1-abcd","2-cdef","3-efgh"]},
|
|
|
|
]
|
|
|
|
// stores contents on each parent level
|
|
|
|
// top level has ID folders
|
|
|
|
*/
|
|
|
|
const firstFolder = async () => {
|
|
|
|
|
|
|
|
let folderLevel = 0;
|
|
|
|
|
|
|
|
const lastPathHasContent = () =>
|
|
|
|
folderLevel === 0
|
|
|
|
|| currentFolderContents[folderLevel - 1].contents.length > 0;
|
|
|
|
|
|
|
|
|
|
|
|
while (folderLevel <= topLevel && lastPathHasContent()) {
|
|
|
|
|
|
|
|
let thisPath = basePath;
|
|
|
|
for(let lev = 0; lev < currentPosition.length; lev++) {
|
|
|
|
thisPath = joinKey(
|
|
|
|
thisPath, currentFolderContents[lev].contents[0]);
|
|
|
|
}
|
|
|
|
|
|
|
|
const contentsThisLevel =
|
|
|
|
await app.datastore.getFolderContents(thisPath);
|
|
|
|
currentFolderContents.push({
|
|
|
|
contents:contentsThisLevel,
|
|
|
|
path: thisPath
|
|
|
|
});
|
|
|
|
|
|
|
|
// should start as something like [0,0]
|
|
|
|
if(folderLevel < topLevel)
|
|
|
|
currentPosition.push(0);
|
|
|
|
|
|
|
|
folderLevel+=1;
|
|
|
|
}
|
|
|
|
|
|
|
|
return (currentPosition.length === levels - 1);
|
|
|
|
};
|
|
|
|
|
|
|
|
const isOnLastFolder = level => {
|
|
|
|
|
|
|
|
const result = currentPosition[level] === currentFolderContents[level].contents.length - 1;
|
|
|
|
return result;
|
|
|
|
};
|
|
|
|
|
|
|
|
const getNextFolder = async (lev=undefined) => {
|
|
|
|
lev = isUndefined(lev) ? topLevel : lev;
|
|
|
|
const parentLev = lev - 1;
|
|
|
|
|
|
|
|
if(parentLev < 0) return false;
|
|
|
|
|
|
|
|
if(isOnLastFolder(parentLev)) {
|
|
|
|
return await getNextFolder(parentLev);
|
|
|
|
}
|
|
|
|
|
|
|
|
const newPosition = currentPosition[parentLev] + 1;
|
|
|
|
currentPosition[parentLev] = newPosition;
|
|
|
|
|
|
|
|
const nextFolder = joinKey(
|
|
|
|
currentFolderContents[parentLev].path,
|
|
|
|
currentFolderContents[parentLev].contents[newPosition]);
|
|
|
|
currentFolderContents[lev].contents = await app.datastore.getFolderContents(
|
|
|
|
nextFolder
|
|
|
|
);
|
|
|
|
currentFolderContents[lev].path = nextFolder;
|
|
|
|
|
|
|
|
if(lev !== topLevel) {
|
|
|
|
|
|
|
|
// we just advanced a parent folder, so now need to
|
|
|
|
// do the same to the next levels
|
|
|
|
let loopLevel = lev + 1;
|
|
|
|
while(loopLevel <= topLevel) {
|
|
|
|
const loopParentLevel = loopLevel-1;
|
|
|
|
|
|
|
|
currentPosition[loopParentLevel] = 0;
|
|
|
|
const nextLoopFolder = joinKey(
|
|
|
|
currentFolderContents[loopParentLevel].path,
|
|
|
|
currentFolderContents[loopParentLevel].contents[0]);
|
|
|
|
currentFolderContents[loopLevel].contents = await app.datastore.getFolderContents(
|
|
|
|
nextLoopFolder
|
|
|
|
);
|
|
|
|
currentFolderContents[loopLevel].path = nextLoopFolder;
|
|
|
|
loopLevel+=1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// true ==has more ids... (just loaded more)
|
|
|
|
return true;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
const idsCurrentFolder = () =>
|
|
|
|
currentFolderContents[currentFolderContents.length - 1].contents;
|
|
|
|
|
|
|
|
const fininshedResult = ({ done: true, result: { ids: [], collectionKey } });
|
|
|
|
|
|
|
|
let hasStarted = false;
|
|
|
|
let hasMore = true;
|
|
|
|
const getIdsFromCurrentfolder = async () => {
|
|
|
|
|
|
|
|
if(!hasMore) {
|
|
|
|
return fininshedResult;
|
|
|
|
}
|
|
|
|
|
|
|
|
if(!hasStarted) {
|
|
|
|
hasMore = await firstFolder();
|
|
|
|
hasStarted = true;
|
|
|
|
return ({
|
|
|
|
result: {
|
|
|
|
ids: idsCurrentFolder(),
|
|
|
|
collectionKey
|
|
|
|
},
|
|
|
|
done: false
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
hasMore = await getNextFolder();
|
|
|
|
|
|
|
|
return ({
|
|
|
|
result: {
|
|
|
|
ids: hasMore ? idsCurrentFolder() : [],
|
|
|
|
collectionKey
|
|
|
|
},
|
|
|
|
done: !hasMore
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
return getIdsFromCurrentfolder;
|
|
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
const ancestors = $(getFlattenedHierarchy(app.hierarchy), [
|
|
|
|
filter(isCollectionRecord),
|
|
|
|
filter(n => isAncestor(recordNode)(n)
|
|
|
|
|| n.nodeKey() === recordNode.nodeKey()),
|
|
|
|
orderBy([n => n.nodeKey().length], ['asc']),
|
|
|
|
]); // parents first
|
|
|
|
|
|
|
|
const traverseForIteraterators = async (parentRecordKey = '', currentNodeIndex = 0) => {
|
|
|
|
const currentNode = ancestors[currentNodeIndex];
|
|
|
|
const currentCollectionKey = joinKey(
|
|
|
|
parentRecordKey,
|
|
|
|
currentNode.collectionName,
|
|
|
|
);
|
|
|
|
if (currentNode.nodeKey() === recordNode.nodeKey()) {
|
|
|
|
return [
|
|
|
|
await getAllIdsIteratorForCollectionKey(
|
|
|
|
currentNode,
|
|
|
|
currentCollectionKey,
|
|
|
|
)];
|
|
|
|
}
|
|
|
|
const allIterators = [];
|
|
|
|
const currentIterator = await getAllIdsIteratorForCollectionKey(
|
|
|
|
currentNode,
|
|
|
|
currentCollectionKey,
|
|
|
|
);
|
|
|
|
|
|
|
|
let ids = await currentIterator();
|
|
|
|
while (ids.done === false) {
|
|
|
|
for (const id of ids.result.ids) {
|
|
|
|
allIterators.push(
|
|
|
|
await traverseForIteraterators(
|
|
|
|
joinKey(currentCollectionKey, id),
|
|
|
|
currentNodeIndex + 1,
|
|
|
|
),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
ids = await currentIterator();
|
|
|
|
}
|
|
|
|
|
|
|
|
return flatten(allIterators);
|
|
|
|
};
|
|
|
|
|
|
|
|
const iteratorsArray = await traverseForIteraterators();
|
|
|
|
let currentIteratorIndex = 0;
|
|
|
|
return async () => {
|
|
|
|
if (iteratorsArray.length === 0) { return { done: true, result: [] }; }
|
|
|
|
const innerResult = await iteratorsArray[currentIteratorIndex]();
|
|
|
|
if (!innerResult.done) { return innerResult; }
|
|
|
|
if (currentIteratorIndex == iteratorsArray.length - 1) {
|
|
|
|
return { done: true, result: innerResult.result };
|
|
|
|
}
|
|
|
|
currentIteratorIndex++;
|
|
|
|
return { done: false, result: innerResult.result };
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
|
|
|
const getRecordInfo = (hierarchy, key) => {
|
|
|
|
const recordNode = getExactNodeForKey(hierarchy)(key);
|
|
|
|
const pathInfo = getRecordDirectory(recordNode, key);
|
|
|
|
const dir = joinKey(pathInfo.base, ...pathInfo.subdirs);
|
|
|
|
|
|
|
|
return {
|
|
|
|
recordJson: recordJson(dir),
|
|
|
|
files: files(dir),
|
|
|
|
child:(name) => joinKey(dir, name),
|
|
|
|
key: safeKey(key),
|
|
|
|
recordNode, pathInfo, dir
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
|
|
|
const getCollectionDir = (hierarchy, collectionKey) => {
|
|
|
|
const recordNode = getNodeForCollectionPath(hierarchy)(collectionKey);
|
|
|
|
const dummyRecordKey = joinKey(collectionKey, "1-abcd");
|
|
|
|
const pathInfo = getRecordDirectory(recordNode, dummyRecordKey);
|
|
|
|
return pathInfo.base;
|
|
|
|
};
|
|
|
|
|
|
|
|
const recordJson = (dir) =>
|
|
|
|
joinKey(dir, "record.json");
|
|
|
|
|
|
|
|
const files = (dir) =>
|
|
|
|
joinKey(dir, "files");
|
|
|
|
|
|
|
|
const getRecordDirectory = (recordNode, key) => {
|
|
|
|
const id = getFileFromKey(key);
|
|
|
|
|
|
|
|
const traverseParentKeys = (n, parents=[]) => {
|
|
|
|
if(isRoot(n)) return parents;
|
|
|
|
const k = getActualKeyOfParent(n.nodeKey(), key);
|
|
|
|
const thisNodeDir = {
|
|
|
|
node:n,
|
|
|
|
relativeDir: joinKey(
|
|
|
|
recordRelativeDirectory(n, getFileFromKey(k)))
|
|
|
|
};
|
|
|
|
return traverseParentKeys(
|
|
|
|
n.parent(),
|
|
|
|
[thisNodeDir, ...parents]);
|
|
|
|
};
|
|
|
|
|
|
|
|
const parentDirs = $(recordNode.parent(), [
|
|
|
|
traverseParentKeys,
|
|
|
|
reduce((key, item) => {
|
|
|
|
return joinKey(key, item.node.collectionName, item.relativeDir)
|
|
|
|
}, keySep)
|
|
|
|
]);
|
|
|
|
|
|
|
|
const subdirs = isSingleRecord(recordNode)
|
|
|
|
? []
|
|
|
|
: recordRelativeDirectory(recordNode, id);
|
|
|
|
const base = isSingleRecord(recordNode)
|
|
|
|
? joinKey(parentDirs, recordNode.name)
|
|
|
|
: joinKey(parentDirs, recordNode.collectionName);
|
|
|
|
|
|
|
|
return ({
|
|
|
|
subdirs, base
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
const recordRelativeDirectory = (recordNode, id) => {
|
|
|
|
const folderStructure = folderStructureArray(recordNode);
|
|
|
|
const strippedId = id.substring(recordNode.nodeId.toString().length + 1);
|
|
|
|
const subfolders = $(folderStructure, [
|
|
|
|
reduce((result, currentCount) => {
|
|
|
|
result.folders.push(
|
|
|
|
folderForChar(strippedId[result.level], currentCount)
|
|
|
|
);
|
|
|
|
return {level:result.level+1, folders:result.folders};
|
|
|
|
}, {level:0, folders:[]}),
|
|
|
|
f => f.folders,
|
|
|
|
filter(f => !!f)
|
|
|
|
]);
|
|
|
|
|
|
|
|
return [recordNode.nodeId.toString(), ...subfolders, id]
|
|
|
|
};
|
|
|
|
|
|
|
|
const folderForChar = (char, folderCount) =>
|
|
|
|
folderCount === 1 ? ""
|
|
|
|
: $(folderCount, [
|
|
|
|
idFoldersForFolderCount,
|
|
|
|
find(f => f.includes(char))
|
|
|
|
]);
|
|
|
|
|
|
|
|
const idFoldersForFolderCount = (folderCount) => {
|
|
|
|
const charRangePerShard = 64 / folderCount;
|
|
|
|
const idFolders = [];
|
|
|
|
let index = 0;
|
|
|
|
let currentIdsShard = '';
|
|
|
|
while (index < 64) {
|
|
|
|
currentIdsShard += allIdChars[index];
|
|
|
|
if ((index + 1) % charRangePerShard === 0) {
|
|
|
|
idFolders.push(currentIdsShard);
|
|
|
|
currentIdsShard = '';
|
|
|
|
}
|
|
|
|
index++;
|
|
|
|
}
|
|
|
|
|
|
|
|
return idFolders;
|
|
|
|
};
|
2019-09-10 10:49:22 +02:00
|
|
|
|
2019-10-03 07:12:13 +02:00
|
|
|
const load = app => async key => {
|
|
|
|
key = safeKey(key);
|
|
|
|
return apiWrapper(
|
|
|
|
app,
|
|
|
|
events.recordApi.load,
|
|
|
|
permission.readRecord.isAuthorized(key),
|
|
|
|
{ key },
|
|
|
|
_load, app, key,
|
|
|
|
);
|
|
|
|
};
|
2019-09-10 10:49:22 +02:00
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const _loadFromInfo = async (app, recordInfo, keyStack = []) => {
|
|
|
|
const key = recordInfo.key;
|
|
|
|
const {recordNode, recordJson} = recordInfo;
|
|
|
|
const storedData = await app.datastore.loadJson(recordJson);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const loadedRecord = $(recordNode.fields, [
|
|
|
|
keyBy('name'),
|
|
|
|
mapValues(f => safeParseField(f, storedData)),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const newKeyStack = [...keyStack, key];
|
|
|
|
|
|
|
|
const references = $(recordNode.fields, [
|
|
|
|
filter(f => f.type === 'reference'
|
|
|
|
&& isNonEmptyString(loadedRecord[f.name].key)
|
2019-09-28 06:28:11 +02:00
|
|
|
&& !includes(loadedRecord[f.name].key)(newKeyStack)),
|
2019-09-10 10:49:22 +02:00
|
|
|
map(f => ({
|
|
|
|
promise: _load(app, loadedRecord[f.name].key, newKeyStack),
|
|
|
|
index: getNode(app.hierarchy, f.typeOptions.indexNodeKey),
|
|
|
|
field: f,
|
|
|
|
})),
|
|
|
|
]);
|
|
|
|
|
|
|
|
if (references.length > 0) {
|
|
|
|
const refRecords = await Promise.all(
|
|
|
|
map(p => p.promise)(references),
|
|
|
|
);
|
|
|
|
|
|
|
|
for (const ref of references) {
|
|
|
|
loadedRecord[ref.field.name] = mapRecord(
|
|
|
|
refRecords[references.indexOf(ref)],
|
|
|
|
ref.index,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
loadedRecord.transactionId = storedData.transactionId;
|
|
|
|
loadedRecord.isNew = false;
|
|
|
|
loadedRecord.key = key;
|
|
|
|
loadedRecord.id = $(key, [splitKey, last]);
|
|
|
|
loadedRecord.type = recordNode.name;
|
|
|
|
return loadedRecord;
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const _load = async (app, key, keyStack = []) =>
|
|
|
|
_loadFromInfo(
|
|
|
|
app,
|
|
|
|
getRecordInfo(app.hierarchy, key),
|
|
|
|
keyStack);
|
|
|
|
|
2019-09-10 10:49:22 +02:00
|
|
|
// adapted from https://github.com/dex4er/js-promise-readable
|
|
|
|
// thanks :)
|
|
|
|
|
|
|
|
const promiseReadableStream = stream => {
|
|
|
|
|
|
|
|
let _errored;
|
|
|
|
|
|
|
|
const _errorHandler = err => {
|
|
|
|
_errored = err;
|
|
|
|
};
|
|
|
|
|
|
|
|
stream.on("error", _errorHandler);
|
|
|
|
|
|
|
|
const read = (size) => {
|
|
|
|
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
if (_errored) {
|
|
|
|
const err = _errored;
|
|
|
|
_errored = undefined;
|
|
|
|
return reject(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!stream.readable || stream.closed || stream.destroyed) {
|
|
|
|
return resolve();
|
|
|
|
}
|
|
|
|
|
|
|
|
const readableHandler = () => {
|
|
|
|
const chunk = stream.read(size);
|
|
|
|
|
|
|
|
if (chunk) {
|
|
|
|
removeListeners();
|
|
|
|
resolve(chunk);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const closeHandler = () => {
|
|
|
|
removeListeners();
|
|
|
|
resolve();
|
|
|
|
};
|
|
|
|
|
|
|
|
const endHandler = () => {
|
|
|
|
removeListeners();
|
|
|
|
resolve();
|
|
|
|
};
|
|
|
|
|
|
|
|
const errorHandler = (err) => {
|
|
|
|
_errored = undefined;
|
|
|
|
removeListeners();
|
|
|
|
reject(err);
|
|
|
|
};
|
|
|
|
|
|
|
|
const removeListeners = () => {
|
|
|
|
stream.removeListener("close", closeHandler);
|
|
|
|
stream.removeListener("error", errorHandler);
|
|
|
|
stream.removeListener("end", endHandler);
|
|
|
|
stream.removeListener("readable", readableHandler);
|
|
|
|
};
|
|
|
|
|
|
|
|
stream.on("close", closeHandler);
|
|
|
|
stream.on("end", endHandler);
|
|
|
|
stream.on("error", errorHandler);
|
|
|
|
stream.on("readable", readableHandler);
|
|
|
|
|
|
|
|
readableHandler();
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
const destroy = () => {
|
|
|
|
if (stream) {
|
|
|
|
if (_errorHandler) {
|
|
|
|
stream.removeListener("error", _errorHandler);
|
|
|
|
}
|
|
|
|
if (typeof stream.destroy === "function") {
|
|
|
|
stream.destroy();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
return {read, destroy, stream};
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const getIndexedDataKey = (indexNode, indexDir, record) => {
|
|
|
|
|
2019-09-10 10:49:22 +02:00
|
|
|
const getShardName = (indexNode, record) => {
|
|
|
|
const shardNameFunc = compileCode$1(indexNode.getShardName);
|
|
|
|
try {
|
|
|
|
return shardNameFunc({ record });
|
|
|
|
} catch(e) {
|
|
|
|
const errorDetails = `shardCode: ${indexNode.getShardName} :: record: ${JSON.stringify(record)} :: `;
|
|
|
|
e.message = "Error running index shardname func: " + errorDetails + e.message;
|
|
|
|
throw e;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const shardName = isNonEmptyString(indexNode.getShardName)
|
|
|
|
? `${getShardName(indexNode, record)}.csv`
|
|
|
|
: 'index.csv';
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
return joinKey(indexDir, shardName);
|
2019-09-10 10:49:22 +02:00
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const getShardKeysInRange = async (app, indexNode, indexDir, startRecord = null, endRecord = null) => {
|
2019-09-10 10:49:22 +02:00
|
|
|
const startShardName = !startRecord
|
|
|
|
? null
|
|
|
|
: shardNameFromKey(
|
|
|
|
getIndexedDataKey(
|
|
|
|
indexNode,
|
2019-12-30 19:08:50 +01:00
|
|
|
indexDir,
|
2019-09-10 10:49:22 +02:00
|
|
|
startRecord,
|
|
|
|
),
|
|
|
|
);
|
|
|
|
|
|
|
|
const endShardName = !endRecord
|
|
|
|
? null
|
|
|
|
: shardNameFromKey(
|
|
|
|
getIndexedDataKey(
|
|
|
|
indexNode,
|
2019-12-30 19:08:50 +01:00
|
|
|
indexDir,
|
2019-09-10 10:49:22 +02:00
|
|
|
endRecord,
|
|
|
|
),
|
|
|
|
);
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
return $(await getShardMap(app.datastore, indexDir), [
|
2019-09-10 10:49:22 +02:00
|
|
|
filter(k => (startRecord === null || k >= startShardName)
|
|
|
|
&& (endRecord === null || k <= endShardName)),
|
2019-12-30 19:08:50 +01:00
|
|
|
map(k => joinKey(indexDir, `${k}.csv`)),
|
2019-09-10 10:49:22 +02:00
|
|
|
]);
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const ensureShardNameIsInShardMap = async (store, indexDir, indexedDataKey) => {
|
|
|
|
const map = await getShardMap(store, indexDir);
|
2019-09-10 10:49:22 +02:00
|
|
|
const shardName = shardNameFromKey(indexedDataKey);
|
2019-09-28 06:28:11 +02:00
|
|
|
if (!includes(shardName)(map)) {
|
2019-09-10 10:49:22 +02:00
|
|
|
map.push(shardName);
|
2019-12-30 19:08:50 +01:00
|
|
|
await writeShardMap(store, indexDir, map);
|
2019-09-10 10:49:22 +02:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const getShardMap = async (datastore, indexDir) => {
|
|
|
|
const shardMapKey = getShardMapKey(indexDir);
|
2019-09-10 10:49:22 +02:00
|
|
|
try {
|
|
|
|
return await datastore.loadJson(shardMapKey);
|
|
|
|
} catch (_) {
|
|
|
|
await datastore.createJson(shardMapKey, []);
|
|
|
|
return [];
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const writeShardMap = async (datastore, indexDir, shardMap) => await datastore.updateJson(
|
|
|
|
getShardMapKey(indexDir),
|
2019-09-10 10:49:22 +02:00
|
|
|
shardMap,
|
|
|
|
);
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const getShardMapKey = indexDir => joinKey(indexDir, 'shardMap.json');
|
2019-09-10 10:49:22 +02:00
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const getUnshardedIndexDataKey = indexDir => joinKey(indexDir, 'index.csv');
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const createIndexFile = async (datastore, indexedDataKey, index) => {
|
|
|
|
if (isShardedIndex(index)) {
|
2019-12-30 19:08:50 +01:00
|
|
|
const indexDir = getParentKey(indexedDataKey);
|
|
|
|
const shardMap = await getShardMap(datastore, indexDir);
|
2019-09-10 10:49:22 +02:00
|
|
|
shardMap.push(
|
|
|
|
shardNameFromKey(indexedDataKey),
|
|
|
|
);
|
2019-12-30 19:08:50 +01:00
|
|
|
await writeShardMap(datastore, indexDir, shardMap);
|
2019-09-10 10:49:22 +02:00
|
|
|
}
|
|
|
|
await datastore.createFile(indexedDataKey, '');
|
|
|
|
};
|
|
|
|
|
|
|
|
const shardNameFromKey = key => $(key, [
|
|
|
|
splitKey,
|
|
|
|
last,
|
|
|
|
]).replace('.csv', '');
|
|
|
|
|
|
|
|
const getIndexKey_BasedOnDecendant = (decendantKey, indexNode) => {
|
|
|
|
if (isGlobalIndex(indexNode)) { return `${indexNode.nodeKey()}`; }
|
|
|
|
|
|
|
|
const indexedDataParentKey = getActualKeyOfParent(
|
|
|
|
indexNode.parent().nodeKey(),
|
|
|
|
decendantKey,
|
|
|
|
);
|
|
|
|
|
|
|
|
return joinKey(
|
|
|
|
indexedDataParentKey,
|
|
|
|
indexNode.name,
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
const generateSchema = (hierarchy, indexNode) => {
|
|
|
|
const recordNodes = getAllowedRecordNodesForIndex(hierarchy, indexNode);
|
|
|
|
const mappedRecords = $(recordNodes, [
|
|
|
|
map(n => mapRecord(createSampleRecord(n), indexNode)),
|
|
|
|
]);
|
|
|
|
|
|
|
|
// always has record key and sort key
|
|
|
|
const schema = {
|
|
|
|
sortKey: all$1.string,
|
|
|
|
key: all$1.string,
|
|
|
|
};
|
|
|
|
|
2019-09-28 06:28:11 +02:00
|
|
|
const fieldsHas = has(schema);
|
2019-09-10 10:49:22 +02:00
|
|
|
const setField = (fieldName, value) => {
|
|
|
|
if (value === null || value === undefined) { return; }
|
|
|
|
|
|
|
|
const thisType = detectType(value);
|
|
|
|
if (fieldsHas(fieldName)) {
|
|
|
|
if (schema[fieldName] !== thisType) {
|
|
|
|
schema[fieldName] = all$1.string;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
schema[fieldName] = thisType;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
for (const mappedRec of mappedRecords) {
|
|
|
|
for (const f in mappedRec) {
|
|
|
|
setField(f, mappedRec[f]);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// returing an array of {name, type}
|
|
|
|
return $(schema, [
|
2019-09-28 06:28:11 +02:00
|
|
|
keys,
|
2019-09-10 10:49:22 +02:00
|
|
|
map(k => ({ name: k, type: schema[k].name })),
|
|
|
|
filter(s => s.name !== 'sortKey'),
|
|
|
|
orderBy('name', ['desc']), // reverse aplha
|
|
|
|
concat([{ name: 'sortKey', type: all$1.string.name }]), // sortKey on end
|
|
|
|
reverse, // sortKey first, then rest are alphabetical
|
|
|
|
]);
|
|
|
|
};
|
|
|
|
|
|
|
|
const createSampleRecord = recordNode => constructRecord(
|
|
|
|
recordNode,
|
|
|
|
getSampleFieldValue,
|
|
|
|
recordNode.parent().nodeKey(),
|
|
|
|
);
|
|
|
|
|
|
|
|
var global$1 = (typeof global !== "undefined" ? global :
|
|
|
|
typeof self !== "undefined" ? self :
|
|
|
|
typeof window !== "undefined" ? window : {});
|
|
|
|
|
|
|
|
var lookup = [];
|
|
|
|
var revLookup = [];
|
|
|
|
var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array;
|
|
|
|
var inited = false;
|
|
|
|
function init () {
|
|
|
|
inited = true;
|
|
|
|
var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
|
|
|
for (var i = 0, len = code.length; i < len; ++i) {
|
|
|
|
lookup[i] = code[i];
|
|
|
|
revLookup[code.charCodeAt(i)] = i;
|
|
|
|
}
|
|
|
|
|
|
|
|
revLookup['-'.charCodeAt(0)] = 62;
|
|
|
|
revLookup['_'.charCodeAt(0)] = 63;
|
|
|
|
}
|
|
|
|
|
|
|
|
function toByteArray (b64) {
|
|
|
|
if (!inited) {
|
|
|
|
init();
|
|
|
|
}
|
|
|
|
var i, j, l, tmp, placeHolders, arr;
|
|
|
|
var len = b64.length;
|
|
|
|
|
|
|
|
if (len % 4 > 0) {
|
|
|
|
throw new Error('Invalid string. Length must be a multiple of 4')
|
|
|
|
}
|
|
|
|
|
|
|
|
// the number of equal signs (place holders)
|
|
|
|
// if there are two placeholders, than the two characters before it
|
|
|
|
// represent one byte
|
|
|
|
// if there is only one, then the three characters before it represent 2 bytes
|
|
|
|
// this is just a cheap hack to not do indexOf twice
|
|
|
|
placeHolders = b64[len - 2] === '=' ? 2 : b64[len - 1] === '=' ? 1 : 0;
|
|
|
|
|
|
|
|
// base64 is 4/3 + up to two characters of the original data
|
|
|
|
arr = new Arr(len * 3 / 4 - placeHolders);
|
|
|
|
|
|
|
|
// if there are placeholders, only get up to the last complete 4 chars
|
|
|
|
l = placeHolders > 0 ? len - 4 : len;
|
|
|
|
|
|
|
|
var L = 0;
|
|
|
|
|
|
|
|
for (i = 0, j = 0; i < l; i += 4, j += 3) {
|
|
|
|
tmp = (revLookup[b64.charCodeAt(i)] << 18) | (revLookup[b64.charCodeAt(i + 1)] << 12) | (revLookup[b64.charCodeAt(i + 2)] << 6) | revLookup[b64.charCodeAt(i + 3)];
|
|
|
|
arr[L++] = (tmp >> 16) & 0xFF;
|
|
|
|
arr[L++] = (tmp >> 8) & 0xFF;
|
|
|
|
arr[L++] = tmp & 0xFF;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (placeHolders === 2) {
|
|
|
|
tmp = (revLookup[b64.charCodeAt(i)] << 2) | (revLookup[b64.charCodeAt(i + 1)] >> 4);
|
|
|
|
arr[L++] = tmp & 0xFF;
|
|
|
|
} else if (placeHolders === 1) {
|
|
|
|
tmp = (revLookup[b64.charCodeAt(i)] << 10) | (revLookup[b64.charCodeAt(i + 1)] << 4) | (revLookup[b64.charCodeAt(i + 2)] >> 2);
|
|
|
|
arr[L++] = (tmp >> 8) & 0xFF;
|
|
|
|
arr[L++] = tmp & 0xFF;
|
|
|
|
}
|
|
|
|
|
|
|
|
return arr
|
|
|
|
}
|
|
|
|
|
|
|
|
function tripletToBase64 (num) {
|
|
|
|
return lookup[num >> 18 & 0x3F] + lookup[num >> 12 & 0x3F] + lookup[num >> 6 & 0x3F] + lookup[num & 0x3F]
|
|
|
|
}
|
|
|
|
|
|
|
|
function encodeChunk (uint8, start, end) {
|
|
|
|
var tmp;
|
|
|
|
var output = [];
|
|
|
|
for (var i = start; i < end; i += 3) {
|
|
|
|
tmp = (uint8[i] << 16) + (uint8[i + 1] << 8) + (uint8[i + 2]);
|
|
|
|
output.push(tripletToBase64(tmp));
|
|
|
|
}
|
|
|
|
return output.join('')
|
|
|
|
}
|
|
|
|
|
|
|
|
function fromByteArray (uint8) {
|
|
|
|
if (!inited) {
|
|
|
|
init();
|
|
|
|
}
|
|
|
|
var tmp;
|
|
|
|
var len = uint8.length;
|
|
|
|
var extraBytes = len % 3; // if we have 1 byte left, pad 2 bytes
|
|
|
|
var output = '';
|
|
|
|
var parts = [];
|
|
|
|
var maxChunkLength = 16383; // must be multiple of 3
|
|
|
|
|
|
|
|
// go through the array every three bytes, we'll deal with trailing stuff later
|
|
|
|
for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
|
|
|
parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength)));
|
|
|
|
}
|
|
|
|
|
|
|
|
// pad the end with zeros, but make sure to not forget the extra bytes
|
|
|
|
if (extraBytes === 1) {
|
|
|
|
tmp = uint8[len - 1];
|
|
|
|
output += lookup[tmp >> 2];
|
|
|
|
output += lookup[(tmp << 4) & 0x3F];
|
|
|
|
output += '==';
|
|
|
|
} else if (extraBytes === 2) {
|
|
|
|
tmp = (uint8[len - 2] << 8) + (uint8[len - 1]);
|
|
|
|
output += lookup[tmp >> 10];
|
|
|
|
output += lookup[(tmp >> 4) & 0x3F];
|
|
|
|
output += lookup[(tmp << 2) & 0x3F];
|
|
|
|
output += '=';
|
|
|
|
}
|
|
|
|
|
|
|
|
parts.push(output);
|
|
|
|
|
|
|
|
return parts.join('')
|
|
|
|
}
|
|
|
|
|
|
|
|
function read (buffer, offset, isLE, mLen, nBytes) {
|
|
|
|
var e, m;
|
|
|
|
var eLen = nBytes * 8 - mLen - 1;
|
|
|
|
var eMax = (1 << eLen) - 1;
|
|
|
|
var eBias = eMax >> 1;
|
|
|
|
var nBits = -7;
|
|
|
|
var i = isLE ? (nBytes - 1) : 0;
|
|
|
|
var d = isLE ? -1 : 1;
|
|
|
|
var s = buffer[offset + i];
|
|
|
|
|
|
|
|
i += d;
|
|
|
|
|
|
|
|
e = s & ((1 << (-nBits)) - 1);
|
|
|
|
s >>= (-nBits);
|
|
|
|
nBits += eLen;
|
|
|
|
for (; nBits > 0; e = e * 256 + buffer[offset + i], i += d, nBits -= 8) {}
|
|
|
|
|
|
|
|
m = e & ((1 << (-nBits)) - 1);
|
|
|
|
e >>= (-nBits);
|
|
|
|
nBits += mLen;
|
|
|
|
for (; nBits > 0; m = m * 256 + buffer[offset + i], i += d, nBits -= 8) {}
|
|
|
|
|
|
|
|
if (e === 0) {
|
|
|
|
e = 1 - eBias;
|
|
|
|
} else if (e === eMax) {
|
|
|
|
return m ? NaN : ((s ? -1 : 1) * Infinity)
|
|
|
|
} else {
|
|
|
|
m = m + Math.pow(2, mLen);
|
|
|
|
e = e - eBias;
|
|
|
|
}
|
|
|
|
return (s ? -1 : 1) * m * Math.pow(2, e - mLen)
|
|
|
|
}
|
|
|
|
|
|
|
|
function write (buffer, value, offset, isLE, mLen, nBytes) {
|
|
|
|
var e, m, c;
|
|
|
|
var eLen = nBytes * 8 - mLen - 1;
|
|
|
|
var eMax = (1 << eLen) - 1;
|
|
|
|
var eBias = eMax >> 1;
|
|
|
|
var rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0);
|
|
|
|
var i = isLE ? 0 : (nBytes - 1);
|
|
|
|
var d = isLE ? 1 : -1;
|
|
|
|
var s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0;
|
|
|
|
|
|
|
|
value = Math.abs(value);
|
|
|
|
|
|
|
|
if (isNaN(value) || value === Infinity) {
|
|
|
|
m = isNaN(value) ? 1 : 0;
|
|
|
|
e = eMax;
|
|
|
|
} else {
|
|
|
|
e = Math.floor(Math.log(value) / Math.LN2);
|
|
|
|
if (value * (c = Math.pow(2, -e)) < 1) {
|
|
|
|
e--;
|
|
|
|
c *= 2;
|
|
|
|
}
|
|
|
|
if (e + eBias >= 1) {
|
|
|
|
value += rt / c;
|
|
|
|
} else {
|
|
|
|
value += rt * Math.pow(2, 1 - eBias);
|
|
|
|
}
|
|
|
|
if (value * c >= 2) {
|
|
|
|
e++;
|
|
|
|
c /= 2;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (e + eBias >= eMax) {
|
|
|
|
m = 0;
|
|
|
|
e = eMax;
|
|
|
|
} else if (e + eBias >= 1) {
|
|
|
|
m = (value * c - 1) * Math.pow(2, mLen);
|
|
|
|
e = e + eBias;
|
|
|
|
} else {
|
|
|
|
m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen);
|
|
|
|
e = 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8) {}
|
|
|
|
|
|
|
|
e = (e << mLen) | m;
|
|
|
|
eLen += mLen;
|
|
|
|
for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8) {}
|
|
|
|
|
|
|
|
buffer[offset + i - d] |= s * 128;
|
|
|
|
}
|
|
|
|
|
|
|
|
var toString = {}.toString;
|
|
|
|
|
|
|
|
var isArray = Array.isArray || function (arr) {
|
|
|
|
return toString.call(arr) == '[object Array]';
|
|
|
|
};
|
|
|
|
|
|
|
|
var INSPECT_MAX_BYTES = 50;
|
|
|
|
|
|
|
|
/**
|
|
|
|
* If `Buffer.TYPED_ARRAY_SUPPORT`:
|
|
|
|
* === true Use Uint8Array implementation (fastest)
|
|
|
|
* === false Use Object implementation (most compatible, even IE6)
|
|
|
|
*
|
|
|
|
* Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+,
|
|
|
|
* Opera 11.6+, iOS 4.2+.
|
|
|
|
*
|
|
|
|
* Due to various browser bugs, sometimes the Object implementation will be used even
|
|
|
|
* when the browser supports typed arrays.
|
|
|
|
*
|
|
|
|
* Note:
|
|
|
|
*
|
|
|
|
* - Firefox 4-29 lacks support for adding new properties to `Uint8Array` instances,
|
|
|
|
* See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438.
|
|
|
|
*
|
|
|
|
* - Chrome 9-10 is missing the `TypedArray.prototype.subarray` function.
|
|
|
|
*
|
|
|
|
* - IE10 has a broken `TypedArray.prototype.subarray` function which returns arrays of
|
|
|
|
* incorrect length in some situations.
|
|
|
|
|
|
|
|
* We detect these buggy browsers and set `Buffer.TYPED_ARRAY_SUPPORT` to `false` so they
|
|
|
|
* get the Object implementation, which is slower but behaves correctly.
|
|
|
|
*/
|
|
|
|
Buffer.TYPED_ARRAY_SUPPORT = global$1.TYPED_ARRAY_SUPPORT !== undefined
|
|
|
|
? global$1.TYPED_ARRAY_SUPPORT
|
|
|
|
: true;
|
|
|
|
|
|
|
|
function kMaxLength () {
|
|
|
|
return Buffer.TYPED_ARRAY_SUPPORT
|
|
|
|
? 0x7fffffff
|
|
|
|
: 0x3fffffff
|
|
|
|
}
|
|
|
|
|
|
|
|
function createBuffer (that, length) {
|
|
|
|
if (kMaxLength() < length) {
|
|
|
|
throw new RangeError('Invalid typed array length')
|
|
|
|
}
|
|
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
|
|
// Return an augmented `Uint8Array` instance, for best performance
|
|
|
|
that = new Uint8Array(length);
|
|
|
|
that.__proto__ = Buffer.prototype;
|
|
|
|
} else {
|
|
|
|
// Fallback: Return an object instance of the Buffer class
|
|
|
|
if (that === null) {
|
|
|
|
that = new Buffer(length);
|
|
|
|
}
|
|
|
|
that.length = length;
|
|
|
|
}
|
|
|
|
|
|
|
|
return that
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* The Buffer constructor returns instances of `Uint8Array` that have their
|
|
|
|
* prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of
|
|
|
|
* `Uint8Array`, so the returned instances will have all the node `Buffer` methods
|
|
|
|
* and the `Uint8Array` methods. Square bracket notation works as expected -- it
|
|
|
|
* returns a single octet.
|
|
|
|
*
|
|
|
|
* The `Uint8Array` prototype remains unmodified.
|
|
|
|
*/
|
|
|
|
|
|
|
|
function Buffer (arg, encodingOrOffset, length) {
|
|
|
|
if (!Buffer.TYPED_ARRAY_SUPPORT && !(this instanceof Buffer)) {
|
|
|
|
return new Buffer(arg, encodingOrOffset, length)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Common case.
|
|
|
|
if (typeof arg === 'number') {
|
|
|
|
if (typeof encodingOrOffset === 'string') {
|
|
|
|
throw new Error(
|
|
|
|
'If encoding is specified then the first argument must be a string'
|
|
|
|
)
|
|
|
|
}
|
|
|
|
return allocUnsafe(this, arg)
|
|
|
|
}
|
|
|
|
return from(this, arg, encodingOrOffset, length)
|
|
|
|
}
|
|
|
|
|
|
|
|
Buffer.poolSize = 8192; // not used by this implementation
|
|
|
|
|
|
|
|
// TODO: Legacy, not needed anymore. Remove in next major version.
|
|
|
|
Buffer._augment = function (arr) {
|
|
|
|
arr.__proto__ = Buffer.prototype;
|
|
|
|
return arr
|
|
|
|
};
|
|
|
|
|
|
|
|
function from (that, value, encodingOrOffset, length) {
|
|
|
|
if (typeof value === 'number') {
|
|
|
|
throw new TypeError('"value" argument must not be a number')
|
|
|
|
}
|
|
|
|
|
|
|
|
if (typeof ArrayBuffer !== 'undefined' && value instanceof ArrayBuffer) {
|
|
|
|
return fromArrayBuffer(that, value, encodingOrOffset, length)
|
|
|
|
}
|
|
|
|
|
|
|
|
if (typeof value === 'string') {
|
|
|
|
return fromString(that, value, encodingOrOffset)
|
|
|
|
}
|
|
|
|
|
|
|
|
return fromObject(that, value)
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Functionally equivalent to Buffer(arg, encoding) but throws a TypeError
|
|
|
|
* if value is a number.
|
|
|
|
* Buffer.from(str[, encoding])
|
|
|
|
* Buffer.from(array)
|
|
|
|
* Buffer.from(buffer)
|
|
|
|
* Buffer.from(arrayBuffer[, byteOffset[, length]])
|
|
|
|
**/
|
|
|
|
Buffer.from = function (value, encodingOrOffset, length) {
|
|
|
|
return from(null, value, encodingOrOffset, length)
|
|
|
|
};
|
|
|
|
|
|
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
|
|
Buffer.prototype.__proto__ = Uint8Array.prototype;
|
|
|
|
Buffer.__proto__ = Uint8Array;
|
|
|
|
}
|
|
|
|
|
|
|
|
function assertSize (size) {
|
|
|
|
if (typeof size !== 'number') {
|
|
|
|
throw new TypeError('"size" argument must be a number')
|
|
|
|
} else if (size < 0) {
|
|
|
|
throw new RangeError('"size" argument must not be negative')
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function alloc (that, size, fill, encoding) {
|
|
|
|
assertSize(size);
|
|
|
|
if (size <= 0) {
|
|
|
|
return createBuffer(that, size)
|
|
|
|
}
|
|
|
|
if (fill !== undefined) {
|
|
|
|
// Only pay attention to encoding if it's a string. This
|
|
|
|
// prevents accidentally sending in a number that would
|
|
|
|
// be interpretted as a start offset.
|
|
|
|
return typeof encoding === 'string'
|
|
|
|
? createBuffer(that, size).fill(fill, encoding)
|
|
|
|
: createBuffer(that, size).fill(fill)
|
|
|
|
}
|
|
|
|
return createBuffer(that, size)
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Creates a new filled Buffer instance.
|
|
|
|
* alloc(size[, fill[, encoding]])
|
|
|
|
**/
|
|
|
|
Buffer.alloc = function (size, fill, encoding) {
|
|
|
|
return alloc(null, size, fill, encoding)
|
|
|
|
};
|
|
|
|
|
|
|
|
function allocUnsafe (that, size) {
|
|
|
|
assertSize(size);
|
|
|
|
that = createBuffer(that, size < 0 ? 0 : checked(size) | 0);
|
|
|
|
if (!Buffer.TYPED_ARRAY_SUPPORT) {
|
|
|
|
for (var i = 0; i < size; ++i) {
|
|
|
|
that[i] = 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return that
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance.
|
|
|
|
* */
|
|
|
|
Buffer.allocUnsafe = function (size) {
|
|
|
|
return allocUnsafe(null, size)
|
|
|
|
};
|
|
|
|
/**
|
|
|
|
* Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
|
|
|
*/
|
|
|
|
Buffer.allocUnsafeSlow = function (size) {
|
|
|
|
return allocUnsafe(null, size)
|
|
|
|
};
|
|
|
|
|
|
|
|
function fromString (that, string, encoding) {
|
|
|
|
if (typeof encoding !== 'string' || encoding === '') {
|
|
|
|
encoding = 'utf8';
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!Buffer.isEncoding(encoding)) {
|
|
|
|
throw new TypeError('"encoding" must be a valid string encoding')
|
|
|
|
}
|
|
|
|
|
|
|
|
var length = byteLength(string, encoding) | 0;
|
|
|
|
that = createBuffer(that, length);
|
|
|
|
|
|
|
|
var actual = that.write(string, encoding);
|
|
|
|
|
|
|
|
if (actual !== length) {
|
|
|
|
// Writing a hex string, for example, that contains invalid characters will
|
|
|
|
// cause everything after the first invalid character to be ignored. (e.g.
|
|
|
|
// 'abxxcd' will be treated as 'ab')
|
|
|
|
that = that.slice(0, actual);
|
|
|
|
}
|
|
|
|
|
|
|
|
return that
|
|
|
|
}
|
|
|
|
|
|
|
|
function fromArrayLike (that, array) {
|
|
|
|
var length = array.length < 0 ? 0 : checked(array.length) | 0;
|
|
|
|
that = createBuffer(that, length);
|
|
|
|
for (var i = 0; i < length; i += 1) {
|
|
|
|
that[i] = array[i] & 255;
|
|
|
|
}
|
|
|
|
return that
|
|
|
|
}
|
|
|
|
|
|
|
|
function fromArrayBuffer (that, array, byteOffset, length) {
|
|
|
|
array.byteLength; // this throws if `array` is not a valid ArrayBuffer
|
|
|
|
|
|
|
|
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
|
|
|
throw new RangeError('\'offset\' is out of bounds')
|
|
|
|
}
|
|
|
|
|
|
|
|
if (array.byteLength < byteOffset + (length || 0)) {
|
|
|
|
throw new RangeError('\'length\' is out of bounds')
|
|
|
|
}
|
|
|
|
|
|
|
|
if (byteOffset === undefined && length === undefined) {
|
|
|
|
array = new Uint8Array(array);
|
|
|
|
} else if (length === undefined) {
|
|
|
|
array = new Uint8Array(array, byteOffset);
|
|
|
|
} else {
|
|
|
|
array = new Uint8Array(array, byteOffset, length);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
|
|
// Return an augmented `Uint8Array` instance, for best performance
|
|
|
|
that = array;
|
|
|
|
that.__proto__ = Buffer.prototype;
|
|
|
|
} else {
|
|
|
|
// Fallback: Return an object instance of the Buffer class
|
|
|
|
that = fromArrayLike(that, array);
|
|
|
|
}
|
|
|
|
return that
|
|
|
|
}
|
|
|
|
|
|
|
|
function fromObject (that, obj) {
|
|
|
|
if (internalIsBuffer(obj)) {
|
|
|
|
var len = checked(obj.length) | 0;
|
|
|
|
that = createBuffer(that, len);
|
|
|
|
|
|
|
|
if (that.length === 0) {
|
|
|
|
return that
|
|
|
|
}
|
|
|
|
|
|
|
|
obj.copy(that, 0, 0, len);
|
|
|
|
return that
|
|
|
|
}
|
|
|
|
|
|
|
|
if (obj) {
|
|
|
|
if ((typeof ArrayBuffer !== 'undefined' &&
|
|
|
|
obj.buffer instanceof ArrayBuffer) || 'length' in obj) {
|
|
|
|
if (typeof obj.length !== 'number' || isnan(obj.length)) {
|
|
|
|
return createBuffer(that, 0)
|
|
|
|
}
|
|
|
|
return fromArrayLike(that, obj)
|
|
|
|
}
|
|
|
|
|
|
|
|
if (obj.type === 'Buffer' && isArray(obj.data)) {
|
|
|
|
return fromArrayLike(that, obj.data)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
throw new TypeError('First argument must be a string, Buffer, ArrayBuffer, Array, or array-like object.')
|
|
|
|
}
|
|
|
|
|
|
|
|
function checked (length) {
|
|
|
|
// Note: cannot use `length < kMaxLength()` here because that fails when
|
|
|
|
// length is NaN (which is otherwise coerced to zero.)
|
|
|
|
if (length >= kMaxLength()) {
|
|
|
|
throw new RangeError('Attempt to allocate Buffer larger than maximum ' +
|
|
|
|
'size: 0x' + kMaxLength().toString(16) + ' bytes')
|
|
|
|
}
|
|
|
|
return length | 0
|
|
|
|
}
|
|
|
|
Buffer.isBuffer = isBuffer;
|
|
|
|
function internalIsBuffer (b) {
|
|
|
|
return !!(b != null && b._isBuffer)
|
|
|
|
}
|
|
|
|
|
|
|
|
Buffer.compare = function compare (a, b) {
|
|
|
|
if (!internalIsBuffer(a) || !internalIsBuffer(b)) {
|
|
|
|
throw new TypeError('Arguments must be Buffers')
|
|
|
|
}
|
|
|
|
|
|
|
|
if (a === b) return 0
|
|
|
|
|
|
|
|
var x = a.length;
|
|
|
|
var y = b.length;
|
|
|
|
|
|
|
|
for (var i = 0, len = Math.min(x, y); i < len; ++i) {
|
|
|
|
if (a[i] !== b[i]) {
|
|
|
|
x = a[i];
|
|
|
|
y = b[i];
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (x < y) return -1
|
|
|
|
if (y < x) return 1
|
|
|
|
return 0
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.isEncoding = function isEncoding (encoding) {
|
|
|
|
switch (String(encoding).toLowerCase()) {
|
|
|
|
case 'hex':
|
|
|
|
case 'utf8':
|
|
|
|
case 'utf-8':
|
|
|
|
case 'ascii':
|
|
|
|
case 'latin1':
|
|
|
|
case 'binary':
|
|
|
|
case 'base64':
|
|
|
|
case 'ucs2':
|
|
|
|
case 'ucs-2':
|
|
|
|
case 'utf16le':
|
|
|
|
case 'utf-16le':
|
|
|
|
return true
|
|
|
|
default:
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.concat = function concat (list, length) {
|
|
|
|
if (!isArray(list)) {
|
|
|
|
throw new TypeError('"list" argument must be an Array of Buffers')
|
|
|
|
}
|
|
|
|
|
|
|
|
if (list.length === 0) {
|
|
|
|
return Buffer.alloc(0)
|
|
|
|
}
|
|
|
|
|
|
|
|
var i;
|
|
|
|
if (length === undefined) {
|
|
|
|
length = 0;
|
|
|
|
for (i = 0; i < list.length; ++i) {
|
|
|
|
length += list[i].length;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
var buffer = Buffer.allocUnsafe(length);
|
|
|
|
var pos = 0;
|
|
|
|
for (i = 0; i < list.length; ++i) {
|
|
|
|
var buf = list[i];
|
|
|
|
if (!internalIsBuffer(buf)) {
|
|
|
|
throw new TypeError('"list" argument must be an Array of Buffers')
|
|
|
|
}
|
|
|
|
buf.copy(buffer, pos);
|
|
|
|
pos += buf.length;
|
|
|
|
}
|
|
|
|
return buffer
|
|
|
|
};
|
|
|
|
|
|
|
|
function byteLength (string, encoding) {
|
|
|
|
if (internalIsBuffer(string)) {
|
|
|
|
return string.length
|
|
|
|
}
|
|
|
|
if (typeof ArrayBuffer !== 'undefined' && typeof ArrayBuffer.isView === 'function' &&
|
|
|
|
(ArrayBuffer.isView(string) || string instanceof ArrayBuffer)) {
|
|
|
|
return string.byteLength
|
|
|
|
}
|
|
|
|
if (typeof string !== 'string') {
|
|
|
|
string = '' + string;
|
|
|
|
}
|
|
|
|
|
|
|
|
var len = string.length;
|
|
|
|
if (len === 0) return 0
|
|
|
|
|
|
|
|
// Use a for loop to avoid recursion
|
|
|
|
var loweredCase = false;
|
|
|
|
for (;;) {
|
|
|
|
switch (encoding) {
|
|
|
|
case 'ascii':
|
|
|
|
case 'latin1':
|
|
|
|
case 'binary':
|
|
|
|
return len
|
|
|
|
case 'utf8':
|
|
|
|
case 'utf-8':
|
|
|
|
case undefined:
|
|
|
|
return utf8ToBytes(string).length
|
|
|
|
case 'ucs2':
|
|
|
|
case 'ucs-2':
|
|
|
|
case 'utf16le':
|
|
|
|
case 'utf-16le':
|
|
|
|
return len * 2
|
|
|
|
case 'hex':
|
|
|
|
return len >>> 1
|
|
|
|
case 'base64':
|
|
|
|
return base64ToBytes(string).length
|
|
|
|
default:
|
|
|
|
if (loweredCase) return utf8ToBytes(string).length // assume utf8
|
|
|
|
encoding = ('' + encoding).toLowerCase();
|
|
|
|
loweredCase = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Buffer.byteLength = byteLength;
|
|
|
|
|
|
|
|
function slowToString (encoding, start, end) {
|
|
|
|
var loweredCase = false;
|
|
|
|
|
|
|
|
// No need to verify that "this.length <= MAX_UINT32" since it's a read-only
|
|
|
|
// property of a typed array.
|
|
|
|
|
|
|
|
// This behaves neither like String nor Uint8Array in that we set start/end
|
|
|
|
// to their upper/lower bounds if the value passed is out of range.
|
|
|
|
// undefined is handled specially as per ECMA-262 6th Edition,
|
|
|
|
// Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization.
|
|
|
|
if (start === undefined || start < 0) {
|
|
|
|
start = 0;
|
|
|
|
}
|
|
|
|
// Return early if start > this.length. Done here to prevent potential uint32
|
|
|
|
// coercion fail below.
|
|
|
|
if (start > this.length) {
|
|
|
|
return ''
|
|
|
|
}
|
|
|
|
|
|
|
|
if (end === undefined || end > this.length) {
|
|
|
|
end = this.length;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (end <= 0) {
|
|
|
|
return ''
|
|
|
|
}
|
|
|
|
|
|
|
|
// Force coersion to uint32. This will also coerce falsey/NaN values to 0.
|
|
|
|
end >>>= 0;
|
|
|
|
start >>>= 0;
|
|
|
|
|
|
|
|
if (end <= start) {
|
|
|
|
return ''
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!encoding) encoding = 'utf8';
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
switch (encoding) {
|
|
|
|
case 'hex':
|
|
|
|
return hexSlice(this, start, end)
|
|
|
|
|
|
|
|
case 'utf8':
|
|
|
|
case 'utf-8':
|
|
|
|
return utf8Slice(this, start, end)
|
|
|
|
|
|
|
|
case 'ascii':
|
|
|
|
return asciiSlice(this, start, end)
|
|
|
|
|
|
|
|
case 'latin1':
|
|
|
|
case 'binary':
|
|
|
|
return latin1Slice(this, start, end)
|
|
|
|
|
|
|
|
case 'base64':
|
|
|
|
return base64Slice(this, start, end)
|
|
|
|
|
|
|
|
case 'ucs2':
|
|
|
|
case 'ucs-2':
|
|
|
|
case 'utf16le':
|
|
|
|
case 'utf-16le':
|
|
|
|
return utf16leSlice(this, start, end)
|
|
|
|
|
|
|
|
default:
|
|
|
|
if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)
|
|
|
|
encoding = (encoding + '').toLowerCase();
|
|
|
|
loweredCase = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// The property is used by `Buffer.isBuffer` and `is-buffer` (in Safari 5-7) to detect
|
|
|
|
// Buffer instances.
|
|
|
|
Buffer.prototype._isBuffer = true;
|
|
|
|
|
|
|
|
function swap (b, n, m) {
|
|
|
|
var i = b[n];
|
|
|
|
b[n] = b[m];
|
|
|
|
b[m] = i;
|
|
|
|
}
|
|
|
|
|
|
|
|
Buffer.prototype.swap16 = function swap16 () {
|
|
|
|
var len = this.length;
|
|
|
|
if (len % 2 !== 0) {
|
|
|
|
throw new RangeError('Buffer size must be a multiple of 16-bits')
|
|
|
|
}
|
|
|
|
for (var i = 0; i < len; i += 2) {
|
|
|
|
swap(this, i, i + 1);
|
|
|
|
}
|
|
|
|
return this
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.swap32 = function swap32 () {
|
|
|
|
var len = this.length;
|
|
|
|
if (len % 4 !== 0) {
|
|
|
|
throw new RangeError('Buffer size must be a multiple of 32-bits')
|
|
|
|
}
|
|
|
|
for (var i = 0; i < len; i += 4) {
|
|
|
|
swap(this, i, i + 3);
|
|
|
|
swap(this, i + 1, i + 2);
|
|
|
|
}
|
|
|
|
return this
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.swap64 = function swap64 () {
|
|
|
|
var len = this.length;
|
|
|
|
if (len % 8 !== 0) {
|
|
|
|
throw new RangeError('Buffer size must be a multiple of 64-bits')
|
|
|
|
}
|
|
|
|
for (var i = 0; i < len; i += 8) {
|
|
|
|
swap(this, i, i + 7);
|
|
|
|
swap(this, i + 1, i + 6);
|
|
|
|
swap(this, i + 2, i + 5);
|
|
|
|
swap(this, i + 3, i + 4);
|
|
|
|
}
|
|
|
|
return this
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.toString = function toString () {
|
|
|
|
var length = this.length | 0;
|
|
|
|
if (length === 0) return ''
|
|
|
|
if (arguments.length === 0) return utf8Slice(this, 0, length)
|
|
|
|
return slowToString.apply(this, arguments)
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.equals = function equals (b) {
|
|
|
|
if (!internalIsBuffer(b)) throw new TypeError('Argument must be a Buffer')
|
|
|
|
if (this === b) return true
|
|
|
|
return Buffer.compare(this, b) === 0
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.inspect = function inspect () {
|
|
|
|
var str = '';
|
|
|
|
var max = INSPECT_MAX_BYTES;
|
|
|
|
if (this.length > 0) {
|
|
|
|
str = this.toString('hex', 0, max).match(/.{2}/g).join(' ');
|
|
|
|
if (this.length > max) str += ' ... ';
|
|
|
|
}
|
|
|
|
return '<Buffer ' + str + '>'
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) {
|
|
|
|
if (!internalIsBuffer(target)) {
|
|
|
|
throw new TypeError('Argument must be a Buffer')
|
|
|
|
}
|
|
|
|
|
|
|
|
if (start === undefined) {
|
|
|
|
start = 0;
|
|
|
|
}
|
|
|
|
if (end === undefined) {
|
|
|
|
end = target ? target.length : 0;
|
|
|
|
}
|
|
|
|
if (thisStart === undefined) {
|
|
|
|
thisStart = 0;
|
|
|
|
}
|
|
|
|
if (thisEnd === undefined) {
|
|
|
|
thisEnd = this.length;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) {
|
|
|
|
throw new RangeError('out of range index')
|
|
|
|
}
|
|
|
|
|
|
|
|
if (thisStart >= thisEnd && start >= end) {
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
if (thisStart >= thisEnd) {
|
|
|
|
return -1
|
|
|
|
}
|
|
|
|
if (start >= end) {
|
|
|
|
return 1
|
|
|
|
}
|
|
|
|
|
|
|
|
start >>>= 0;
|
|
|
|
end >>>= 0;
|
|
|
|
thisStart >>>= 0;
|
|
|
|
thisEnd >>>= 0;
|
|
|
|
|
|
|
|
if (this === target) return 0
|
|
|
|
|
|
|
|
var x = thisEnd - thisStart;
|
|
|
|
var y = end - start;
|
|
|
|
var len = Math.min(x, y);
|
|
|
|
|
|
|
|
var thisCopy = this.slice(thisStart, thisEnd);
|
|
|
|
var targetCopy = target.slice(start, end);
|
|
|
|
|
|
|
|
for (var i = 0; i < len; ++i) {
|
|
|
|
if (thisCopy[i] !== targetCopy[i]) {
|
|
|
|
x = thisCopy[i];
|
|
|
|
y = targetCopy[i];
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (x < y) return -1
|
|
|
|
if (y < x) return 1
|
|
|
|
return 0
|
|
|
|
};
|
|
|
|
|
|
|
|
// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`,
|
|
|
|
// OR the last index of `val` in `buffer` at offset <= `byteOffset`.
|
|
|
|
//
|
|
|
|
// Arguments:
|
|
|
|
// - buffer - a Buffer to search
|
|
|
|
// - val - a string, Buffer, or number
|
|
|
|
// - byteOffset - an index into `buffer`; will be clamped to an int32
|
|
|
|
// - encoding - an optional encoding, relevant is val is a string
|
|
|
|
// - dir - true for indexOf, false for lastIndexOf
|
|
|
|
function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) {
|
|
|
|
// Empty buffer means no match
|
|
|
|
if (buffer.length === 0) return -1
|
|
|
|
|
|
|
|
// Normalize byteOffset
|
|
|
|
if (typeof byteOffset === 'string') {
|
|
|
|
encoding = byteOffset;
|
|
|
|
byteOffset = 0;
|
|
|
|
} else if (byteOffset > 0x7fffffff) {
|
|
|
|
byteOffset = 0x7fffffff;
|
|
|
|
} else if (byteOffset < -0x80000000) {
|
|
|
|
byteOffset = -0x80000000;
|
|
|
|
}
|
|
|
|
byteOffset = +byteOffset; // Coerce to Number.
|
|
|
|
if (isNaN(byteOffset)) {
|
|
|
|
// byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer
|
|
|
|
byteOffset = dir ? 0 : (buffer.length - 1);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Normalize byteOffset: negative offsets start from the end of the buffer
|
|
|
|
if (byteOffset < 0) byteOffset = buffer.length + byteOffset;
|
|
|
|
if (byteOffset >= buffer.length) {
|
|
|
|
if (dir) return -1
|
|
|
|
else byteOffset = buffer.length - 1;
|
|
|
|
} else if (byteOffset < 0) {
|
|
|
|
if (dir) byteOffset = 0;
|
|
|
|
else return -1
|
|
|
|
}
|
|
|
|
|
|
|
|
// Normalize val
|
|
|
|
if (typeof val === 'string') {
|
|
|
|
val = Buffer.from(val, encoding);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Finally, search either indexOf (if dir is true) or lastIndexOf
|
|
|
|
if (internalIsBuffer(val)) {
|
|
|
|
// Special case: looking for empty string/buffer always fails
|
|
|
|
if (val.length === 0) {
|
|
|
|
return -1
|
|
|
|
}
|
|
|
|
return arrayIndexOf(buffer, val, byteOffset, encoding, dir)
|
|
|
|
} else if (typeof val === 'number') {
|
|
|
|
val = val & 0xFF; // Search for a byte value [0-255]
|
|
|
|
if (Buffer.TYPED_ARRAY_SUPPORT &&
|
|
|
|
typeof Uint8Array.prototype.indexOf === 'function') {
|
|
|
|
if (dir) {
|
|
|
|
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset)
|
|
|
|
} else {
|
|
|
|
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return arrayIndexOf(buffer, [ val ], byteOffset, encoding, dir)
|
|
|
|
}
|
|
|
|
|
|
|
|
throw new TypeError('val must be string, number or Buffer')
|
|
|
|
}
|
|
|
|
|
|
|
|
function arrayIndexOf (arr, val, byteOffset, encoding, dir) {
|
|
|
|
var indexSize = 1;
|
|
|
|
var arrLength = arr.length;
|
|
|
|
var valLength = val.length;
|
|
|
|
|
|
|
|
if (encoding !== undefined) {
|
|
|
|
encoding = String(encoding).toLowerCase();
|
|
|
|
if (encoding === 'ucs2' || encoding === 'ucs-2' ||
|
|
|
|
encoding === 'utf16le' || encoding === 'utf-16le') {
|
|
|
|
if (arr.length < 2 || val.length < 2) {
|
|
|
|
return -1
|
|
|
|
}
|
|
|
|
indexSize = 2;
|
|
|
|
arrLength /= 2;
|
|
|
|
valLength /= 2;
|
|
|
|
byteOffset /= 2;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function read (buf, i) {
|
|
|
|
if (indexSize === 1) {
|
|
|
|
return buf[i]
|
|
|
|
} else {
|
|
|
|
return buf.readUInt16BE(i * indexSize)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
var i;
|
|
|
|
if (dir) {
|
|
|
|
var foundIndex = -1;
|
|
|
|
for (i = byteOffset; i < arrLength; i++) {
|
|
|
|
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
|
|
|
if (foundIndex === -1) foundIndex = i;
|
|
|
|
if (i - foundIndex + 1 === valLength) return foundIndex * indexSize
|
|
|
|
} else {
|
|
|
|
if (foundIndex !== -1) i -= i - foundIndex;
|
|
|
|
foundIndex = -1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength;
|
|
|
|
for (i = byteOffset; i >= 0; i--) {
|
|
|
|
var found = true;
|
|
|
|
for (var j = 0; j < valLength; j++) {
|
|
|
|
if (read(arr, i + j) !== read(val, j)) {
|
|
|
|
found = false;
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (found) return i
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return -1
|
|
|
|
}
|
|
|
|
|
|
|
|
Buffer.prototype.includes = function includes (val, byteOffset, encoding) {
|
|
|
|
return this.indexOf(val, byteOffset, encoding) !== -1
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) {
|
|
|
|
return bidirectionalIndexOf(this, val, byteOffset, encoding, true)
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) {
|
|
|
|
return bidirectionalIndexOf(this, val, byteOffset, encoding, false)
|
|
|
|
};
|
|
|
|
|
|
|
|
function hexWrite (buf, string, offset, length) {
|
|
|
|
offset = Number(offset) || 0;
|
|
|
|
var remaining = buf.length - offset;
|
|
|
|
if (!length) {
|
|
|
|
length = remaining;
|
|
|
|
} else {
|
|
|
|
length = Number(length);
|
|
|
|
if (length > remaining) {
|
|
|
|
length = remaining;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// must be an even number of digits
|
|
|
|
var strLen = string.length;
|
|
|
|
if (strLen % 2 !== 0) throw new TypeError('Invalid hex string')
|
|
|
|
|
|
|
|
if (length > strLen / 2) {
|
|
|
|
length = strLen / 2;
|
|
|
|
}
|
|
|
|
for (var i = 0; i < length; ++i) {
|
|
|
|
var parsed = parseInt(string.substr(i * 2, 2), 16);
|
|
|
|
if (isNaN(parsed)) return i
|
|
|
|
buf[offset + i] = parsed;
|
|
|
|
}
|
|
|
|
return i
|
|
|
|
}
|
|
|
|
|
|
|
|
function utf8Write (buf, string, offset, length) {
|
|
|
|
return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length)
|
|
|
|
}
|
|
|
|
|
|
|
|
function asciiWrite (buf, string, offset, length) {
|
|
|
|
return blitBuffer(asciiToBytes(string), buf, offset, length)
|
|
|
|
}
|
|
|
|
|
|
|
|
function latin1Write (buf, string, offset, length) {
|
|
|
|
return asciiWrite(buf, string, offset, length)
|
|
|
|
}
|
|
|
|
|
|
|
|
function base64Write (buf, string, offset, length) {
|
|
|
|
return blitBuffer(base64ToBytes(string), buf, offset, length)
|
|
|
|
}
|
|
|
|
|
|
|
|
function ucs2Write (buf, string, offset, length) {
|
|
|
|
return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length)
|
|
|
|
}
|
|
|
|
|
|
|
|
Buffer.prototype.write = function write (string, offset, length, encoding) {
|
|
|
|
// Buffer#write(string)
|
|
|
|
if (offset === undefined) {
|
|
|
|
encoding = 'utf8';
|
|
|
|
length = this.length;
|
|
|
|
offset = 0;
|
|
|
|
// Buffer#write(string, encoding)
|
|
|
|
} else if (length === undefined && typeof offset === 'string') {
|
|
|
|
encoding = offset;
|
|
|
|
length = this.length;
|
|
|
|
offset = 0;
|
|
|
|
// Buffer#write(string, offset[, length][, encoding])
|
|
|
|
} else if (isFinite(offset)) {
|
|
|
|
offset = offset | 0;
|
|
|
|
if (isFinite(length)) {
|
|
|
|
length = length | 0;
|
|
|
|
if (encoding === undefined) encoding = 'utf8';
|
|
|
|
} else {
|
|
|
|
encoding = length;
|
|
|
|
length = undefined;
|
|
|
|
}
|
|
|
|
// legacy write(string, encoding, offset, length) - remove in v0.13
|
|
|
|
} else {
|
|
|
|
throw new Error(
|
|
|
|
'Buffer.write(string, encoding, offset[, length]) is no longer supported'
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
var remaining = this.length - offset;
|
|
|
|
if (length === undefined || length > remaining) length = remaining;
|
|
|
|
|
|
|
|
if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) {
|
|
|
|
throw new RangeError('Attempt to write outside buffer bounds')
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!encoding) encoding = 'utf8';
|
|
|
|
|
|
|
|
var loweredCase = false;
|
|
|
|
for (;;) {
|
|
|
|
switch (encoding) {
|
|
|
|
case 'hex':
|
|
|
|
return hexWrite(this, string, offset, length)
|
|
|
|
|
|
|
|
case 'utf8':
|
|
|
|
case 'utf-8':
|
|
|
|
return utf8Write(this, string, offset, length)
|
|
|
|
|
|
|
|
case 'ascii':
|
|
|
|
return asciiWrite(this, string, offset, length)
|
|
|
|
|
|
|
|
case 'latin1':
|
|
|
|
case 'binary':
|
|
|
|
return latin1Write(this, string, offset, length)
|
|
|
|
|
|
|
|
case 'base64':
|
|
|
|
// Warning: maxLength not taken into account in base64Write
|
|
|
|
return base64Write(this, string, offset, length)
|
|
|
|
|
|
|
|
case 'ucs2':
|
|
|
|
case 'ucs-2':
|
|
|
|
case 'utf16le':
|
|
|
|
case 'utf-16le':
|
|
|
|
return ucs2Write(this, string, offset, length)
|
|
|
|
|
|
|
|
default:
|
|
|
|
if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)
|
|
|
|
encoding = ('' + encoding).toLowerCase();
|
|
|
|
loweredCase = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.toJSON = function toJSON () {
|
|
|
|
return {
|
|
|
|
type: 'Buffer',
|
|
|
|
data: Array.prototype.slice.call(this._arr || this, 0)
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
function base64Slice (buf, start, end) {
|
|
|
|
if (start === 0 && end === buf.length) {
|
|
|
|
return fromByteArray(buf)
|
|
|
|
} else {
|
|
|
|
return fromByteArray(buf.slice(start, end))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function utf8Slice (buf, start, end) {
|
|
|
|
end = Math.min(buf.length, end);
|
|
|
|
var res = [];
|
|
|
|
|
|
|
|
var i = start;
|
|
|
|
while (i < end) {
|
|
|
|
var firstByte = buf[i];
|
|
|
|
var codePoint = null;
|
|
|
|
var bytesPerSequence = (firstByte > 0xEF) ? 4
|
|
|
|
: (firstByte > 0xDF) ? 3
|
|
|
|
: (firstByte > 0xBF) ? 2
|
|
|
|
: 1;
|
|
|
|
|
|
|
|
if (i + bytesPerSequence <= end) {
|
|
|
|
var secondByte, thirdByte, fourthByte, tempCodePoint;
|
|
|
|
|
|
|
|
switch (bytesPerSequence) {
|
|
|
|
case 1:
|
|
|
|
if (firstByte < 0x80) {
|
|
|
|
codePoint = firstByte;
|
|
|
|
}
|
|
|
|
break
|
|
|
|
case 2:
|
|
|
|
secondByte = buf[i + 1];
|
|
|
|
if ((secondByte & 0xC0) === 0x80) {
|
|
|
|
tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F);
|
|
|
|
if (tempCodePoint > 0x7F) {
|
|
|
|
codePoint = tempCodePoint;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
break
|
|
|
|
case 3:
|
|
|
|
secondByte = buf[i + 1];
|
|
|
|
thirdByte = buf[i + 2];
|
|
|
|
if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) {
|
|
|
|
tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F);
|
|
|
|
if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) {
|
|
|
|
codePoint = tempCodePoint;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
break
|
|
|
|
case 4:
|
|
|
|
secondByte = buf[i + 1];
|
|
|
|
thirdByte = buf[i + 2];
|
|
|
|
fourthByte = buf[i + 3];
|
|
|
|
if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) {
|
|
|
|
tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F);
|
|
|
|
if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) {
|
|
|
|
codePoint = tempCodePoint;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (codePoint === null) {
|
|
|
|
// we did not generate a valid codePoint so insert a
|
|
|
|
// replacement char (U+FFFD) and advance only 1 byte
|
|
|
|
codePoint = 0xFFFD;
|
|
|
|
bytesPerSequence = 1;
|
|
|
|
} else if (codePoint > 0xFFFF) {
|
|
|
|
// encode to utf16 (surrogate pair dance)
|
|
|
|
codePoint -= 0x10000;
|
|
|
|
res.push(codePoint >>> 10 & 0x3FF | 0xD800);
|
|
|
|
codePoint = 0xDC00 | codePoint & 0x3FF;
|
|
|
|
}
|
|
|
|
|
|
|
|
res.push(codePoint);
|
|
|
|
i += bytesPerSequence;
|
|
|
|
}
|
|
|
|
|
|
|
|
return decodeCodePointsArray(res)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Based on http://stackoverflow.com/a/22747272/680742, the browser with
|
|
|
|
// the lowest limit is Chrome, with 0x10000 args.
|
|
|
|
// We go 1 magnitude less, for safety
|
|
|
|
var MAX_ARGUMENTS_LENGTH = 0x1000;
|
|
|
|
|
|
|
|
function decodeCodePointsArray (codePoints) {
|
|
|
|
var len = codePoints.length;
|
|
|
|
if (len <= MAX_ARGUMENTS_LENGTH) {
|
|
|
|
return String.fromCharCode.apply(String, codePoints) // avoid extra slice()
|
|
|
|
}
|
|
|
|
|
|
|
|
// Decode in chunks to avoid "call stack size exceeded".
|
|
|
|
var res = '';
|
|
|
|
var i = 0;
|
|
|
|
while (i < len) {
|
|
|
|
res += String.fromCharCode.apply(
|
|
|
|
String,
|
|
|
|
codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH)
|
|
|
|
);
|
|
|
|
}
|
|
|
|
return res
|
|
|
|
}
|
|
|
|
|
|
|
|
function asciiSlice (buf, start, end) {
|
|
|
|
var ret = '';
|
|
|
|
end = Math.min(buf.length, end);
|
|
|
|
|
|
|
|
for (var i = start; i < end; ++i) {
|
|
|
|
ret += String.fromCharCode(buf[i] & 0x7F);
|
|
|
|
}
|
|
|
|
return ret
|
|
|
|
}
|
|
|
|
|
|
|
|
function latin1Slice (buf, start, end) {
|
|
|
|
var ret = '';
|
|
|
|
end = Math.min(buf.length, end);
|
|
|
|
|
|
|
|
for (var i = start; i < end; ++i) {
|
|
|
|
ret += String.fromCharCode(buf[i]);
|
|
|
|
}
|
|
|
|
return ret
|
|
|
|
}
|
|
|
|
|
|
|
|
function hexSlice (buf, start, end) {
|
|
|
|
var len = buf.length;
|
|
|
|
|
|
|
|
if (!start || start < 0) start = 0;
|
|
|
|
if (!end || end < 0 || end > len) end = len;
|
|
|
|
|
|
|
|
var out = '';
|
|
|
|
for (var i = start; i < end; ++i) {
|
|
|
|
out += toHex(buf[i]);
|
|
|
|
}
|
|
|
|
return out
|
|
|
|
}
|
|
|
|
|
|
|
|
function utf16leSlice (buf, start, end) {
|
|
|
|
var bytes = buf.slice(start, end);
|
|
|
|
var res = '';
|
|
|
|
for (var i = 0; i < bytes.length; i += 2) {
|
|
|
|
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
|
|
|
|
}
|
|
|
|
return res
|
|
|
|
}
|
|
|
|
|
|
|
|
Buffer.prototype.slice = function slice (start, end) {
|
|
|
|
var len = this.length;
|
|
|
|
start = ~~start;
|
|
|
|
end = end === undefined ? len : ~~end;
|
|
|
|
|
|
|
|
if (start < 0) {
|
|
|
|
start += len;
|
|
|
|
if (start < 0) start = 0;
|
|
|
|
} else if (start > len) {
|
|
|
|
start = len;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (end < 0) {
|
|
|
|
end += len;
|
|
|
|
if (end < 0) end = 0;
|
|
|
|
} else if (end > len) {
|
|
|
|
end = len;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (end < start) end = start;
|
|
|
|
|
|
|
|
var newBuf;
|
|
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
|
|
newBuf = this.subarray(start, end);
|
|
|
|
newBuf.__proto__ = Buffer.prototype;
|
|
|
|
} else {
|
|
|
|
var sliceLen = end - start;
|
|
|
|
newBuf = new Buffer(sliceLen, undefined);
|
|
|
|
for (var i = 0; i < sliceLen; ++i) {
|
|
|
|
newBuf[i] = this[i + start];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return newBuf
|
|
|
|
};
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Need to make sure that buffer isn't trying to write out of bounds.
|
|
|
|
*/
|
|
|
|
function checkOffset (offset, ext, length) {
|
|
|
|
if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint')
|
|
|
|
if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length')
|
|
|
|
}
|
|
|
|
|
|
|
|
Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) {
|
|
|
|
offset = offset | 0;
|
|
|
|
byteLength = byteLength | 0;
|
|
|
|
if (!noAssert) checkOffset(offset, byteLength, this.length);
|
|
|
|
|
|
|
|
var val = this[offset];
|
|
|
|
var mul = 1;
|
|
|
|
var i = 0;
|
|
|
|
while (++i < byteLength && (mul *= 0x100)) {
|
|
|
|
val += this[offset + i] * mul;
|
|
|
|
}
|
|
|
|
|
|
|
|
return val
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) {
|
|
|
|
offset = offset | 0;
|
|
|
|
byteLength = byteLength | 0;
|
|
|
|
if (!noAssert) {
|
|
|
|
checkOffset(offset, byteLength, this.length);
|
|
|
|
}
|
|
|
|
|
|
|
|
var val = this[offset + --byteLength];
|
|
|
|
var mul = 1;
|
|
|
|
while (byteLength > 0 && (mul *= 0x100)) {
|
|
|
|
val += this[offset + --byteLength] * mul;
|
|
|
|
}
|
|
|
|
|
|
|
|
return val
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) {
|
|
|
|
if (!noAssert) checkOffset(offset, 1, this.length);
|
|
|
|
return this[offset]
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) {
|
|
|
|
if (!noAssert) checkOffset(offset, 2, this.length);
|
|
|
|
return this[offset] | (this[offset + 1] << 8)
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) {
|
|
|
|
if (!noAssert) checkOffset(offset, 2, this.length);
|
|
|
|
return (this[offset] << 8) | this[offset + 1]
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) {
|
|
|
|
if (!noAssert) checkOffset(offset, 4, this.length);
|
|
|
|
|
|
|
|
return ((this[offset]) |
|
|
|
|
(this[offset + 1] << 8) |
|
|
|
|
(this[offset + 2] << 16)) +
|
|
|
|
(this[offset + 3] * 0x1000000)
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) {
|
|
|
|
if (!noAssert) checkOffset(offset, 4, this.length);
|
|
|
|
|
|
|
|
return (this[offset] * 0x1000000) +
|
|
|
|
((this[offset + 1] << 16) |
|
|
|
|
(this[offset + 2] << 8) |
|
|
|
|
this[offset + 3])
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) {
|
|
|
|
offset = offset | 0;
|
|
|
|
byteLength = byteLength | 0;
|
|
|
|
if (!noAssert) checkOffset(offset, byteLength, this.length);
|
|
|
|
|
|
|
|
var val = this[offset];
|
|
|
|
var mul = 1;
|
|
|
|
var i = 0;
|
|
|
|
while (++i < byteLength && (mul *= 0x100)) {
|
|
|
|
val += this[offset + i] * mul;
|
|
|
|
}
|
|
|
|
mul *= 0x80;
|
|
|
|
|
|
|
|
if (val >= mul) val -= Math.pow(2, 8 * byteLength);
|
|
|
|
|
|
|
|
return val
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) {
|
|
|
|
offset = offset | 0;
|
|
|
|
byteLength = byteLength | 0;
|
|
|
|
if (!noAssert) checkOffset(offset, byteLength, this.length);
|
|
|
|
|
|
|
|
var i = byteLength;
|
|
|
|
var mul = 1;
|
|
|
|
var val = this[offset + --i];
|
|
|
|
while (i > 0 && (mul *= 0x100)) {
|
|
|
|
val += this[offset + --i] * mul;
|
|
|
|
}
|
|
|
|
mul *= 0x80;
|
|
|
|
|
|
|
|
if (val >= mul) val -= Math.pow(2, 8 * byteLength);
|
|
|
|
|
|
|
|
return val
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) {
|
|
|
|
if (!noAssert) checkOffset(offset, 1, this.length);
|
|
|
|
if (!(this[offset] & 0x80)) return (this[offset])
|
|
|
|
return ((0xff - this[offset] + 1) * -1)
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) {
|
|
|
|
if (!noAssert) checkOffset(offset, 2, this.length);
|
|
|
|
var val = this[offset] | (this[offset + 1] << 8);
|
|
|
|
return (val & 0x8000) ? val | 0xFFFF0000 : val
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) {
|
|
|
|
if (!noAssert) checkOffset(offset, 2, this.length);
|
|
|
|
var val = this[offset + 1] | (this[offset] << 8);
|
|
|
|
return (val & 0x8000) ? val | 0xFFFF0000 : val
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) {
|
|
|
|
if (!noAssert) checkOffset(offset, 4, this.length);
|
|
|
|
|
|
|
|
return (this[offset]) |
|
|
|
|
(this[offset + 1] << 8) |
|
|
|
|
(this[offset + 2] << 16) |
|
|
|
|
(this[offset + 3] << 24)
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) {
|
|
|
|
if (!noAssert) checkOffset(offset, 4, this.length);
|
|
|
|
|
|
|
|
return (this[offset] << 24) |
|
|
|
|
(this[offset + 1] << 16) |
|
|
|
|
(this[offset + 2] << 8) |
|
|
|
|
(this[offset + 3])
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) {
|
|
|
|
if (!noAssert) checkOffset(offset, 4, this.length);
|
|
|
|
return read(this, offset, true, 23, 4)
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) {
|
|
|
|
if (!noAssert) checkOffset(offset, 4, this.length);
|
|
|
|
return read(this, offset, false, 23, 4)
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) {
|
|
|
|
if (!noAssert) checkOffset(offset, 8, this.length);
|
|
|
|
return read(this, offset, true, 52, 8)
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) {
|
|
|
|
if (!noAssert) checkOffset(offset, 8, this.length);
|
|
|
|
return read(this, offset, false, 52, 8)
|
|
|
|
};
|
|
|
|
|
|
|
|
function checkInt (buf, value, offset, ext, max, min) {
|
|
|
|
if (!internalIsBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance')
|
|
|
|
if (value > max || value < min) throw new RangeError('"value" argument is out of bounds')
|
|
|
|
if (offset + ext > buf.length) throw new RangeError('Index out of range')
|
|
|
|
}
|
|
|
|
|
|
|
|
Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) {
|
|
|
|
value = +value;
|
|
|
|
offset = offset | 0;
|
|
|
|
byteLength = byteLength | 0;
|
|
|
|
if (!noAssert) {
|
|
|
|
var maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
|
|
|
checkInt(this, value, offset, byteLength, maxBytes, 0);
|
|
|
|
}
|
|
|
|
|
|
|
|
var mul = 1;
|
|
|
|
var i = 0;
|
|
|
|
this[offset] = value & 0xFF;
|
|
|
|
while (++i < byteLength && (mul *= 0x100)) {
|
|
|
|
this[offset + i] = (value / mul) & 0xFF;
|
|
|
|
}
|
|
|
|
|
|
|
|
return offset + byteLength
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) {
|
|
|
|
value = +value;
|
|
|
|
offset = offset | 0;
|
|
|
|
byteLength = byteLength | 0;
|
|
|
|
if (!noAssert) {
|
|
|
|
var maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
|
|
|
checkInt(this, value, offset, byteLength, maxBytes, 0);
|
|
|
|
}
|
|
|
|
|
|
|
|
var i = byteLength - 1;
|
|
|
|
var mul = 1;
|
|
|
|
this[offset + i] = value & 0xFF;
|
|
|
|
while (--i >= 0 && (mul *= 0x100)) {
|
|
|
|
this[offset + i] = (value / mul) & 0xFF;
|
|
|
|
}
|
|
|
|
|
|
|
|
return offset + byteLength
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) {
|
|
|
|
value = +value;
|
|
|
|
offset = offset | 0;
|
|
|
|
if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0);
|
|
|
|
if (!Buffer.TYPED_ARRAY_SUPPORT) value = Math.floor(value);
|
|
|
|
this[offset] = (value & 0xff);
|
|
|
|
return offset + 1
|
|
|
|
};
|
|
|
|
|
|
|
|
function objectWriteUInt16 (buf, value, offset, littleEndian) {
|
|
|
|
if (value < 0) value = 0xffff + value + 1;
|
|
|
|
for (var i = 0, j = Math.min(buf.length - offset, 2); i < j; ++i) {
|
|
|
|
buf[offset + i] = (value & (0xff << (8 * (littleEndian ? i : 1 - i)))) >>>
|
|
|
|
(littleEndian ? i : 1 - i) * 8;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) {
|
|
|
|
value = +value;
|
|
|
|
offset = offset | 0;
|
|
|
|
if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0);
|
|
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
|
|
this[offset] = (value & 0xff);
|
|
|
|
this[offset + 1] = (value >>> 8);
|
|
|
|
} else {
|
|
|
|
objectWriteUInt16(this, value, offset, true);
|
|
|
|
}
|
|
|
|
return offset + 2
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) {
|
|
|
|
value = +value;
|
|
|
|
offset = offset | 0;
|
|
|
|
if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0);
|
|
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
|
|
this[offset] = (value >>> 8);
|
|
|
|
this[offset + 1] = (value & 0xff);
|
|
|
|
} else {
|
|
|
|
objectWriteUInt16(this, value, offset, false);
|
|
|
|
}
|
|
|
|
return offset + 2
|
|
|
|
};
|
|
|
|
|
|
|
|
function objectWriteUInt32 (buf, value, offset, littleEndian) {
|
|
|
|
if (value < 0) value = 0xffffffff + value + 1;
|
|
|
|
for (var i = 0, j = Math.min(buf.length - offset, 4); i < j; ++i) {
|
|
|
|
buf[offset + i] = (value >>> (littleEndian ? i : 3 - i) * 8) & 0xff;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) {
|
|
|
|
value = +value;
|
|
|
|
offset = offset | 0;
|
|
|
|
if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0);
|
|
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
|
|
this[offset + 3] = (value >>> 24);
|
|
|
|
this[offset + 2] = (value >>> 16);
|
|
|
|
this[offset + 1] = (value >>> 8);
|
|
|
|
this[offset] = (value & 0xff);
|
|
|
|
} else {
|
|
|
|
objectWriteUInt32(this, value, offset, true);
|
|
|
|
}
|
|
|
|
return offset + 4
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) {
|
|
|
|
value = +value;
|
|
|
|
offset = offset | 0;
|
|
|
|
if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0);
|
|
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
|
|
this[offset] = (value >>> 24);
|
|
|
|
this[offset + 1] = (value >>> 16);
|
|
|
|
this[offset + 2] = (value >>> 8);
|
|
|
|
this[offset + 3] = (value & 0xff);
|
|
|
|
} else {
|
|
|
|
objectWriteUInt32(this, value, offset, false);
|
|
|
|
}
|
|
|
|
return offset + 4
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) {
|
|
|
|
value = +value;
|
|
|
|
offset = offset | 0;
|
|
|
|
if (!noAssert) {
|
|
|
|
var limit = Math.pow(2, 8 * byteLength - 1);
|
|
|
|
|
|
|
|
checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
|
|
|
}
|
|
|
|
|
|
|
|
var i = 0;
|
|
|
|
var mul = 1;
|
|
|
|
var sub = 0;
|
|
|
|
this[offset] = value & 0xFF;
|
|
|
|
while (++i < byteLength && (mul *= 0x100)) {
|
|
|
|
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
|
|
|
sub = 1;
|
|
|
|
}
|
|
|
|
this[offset + i] = ((value / mul) >> 0) - sub & 0xFF;
|
|
|
|
}
|
|
|
|
|
|
|
|
return offset + byteLength
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) {
|
|
|
|
value = +value;
|
|
|
|
offset = offset | 0;
|
|
|
|
if (!noAssert) {
|
|
|
|
var limit = Math.pow(2, 8 * byteLength - 1);
|
|
|
|
|
|
|
|
checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
|
|
|
}
|
|
|
|
|
|
|
|
var i = byteLength - 1;
|
|
|
|
var mul = 1;
|
|
|
|
var sub = 0;
|
|
|
|
this[offset + i] = value & 0xFF;
|
|
|
|
while (--i >= 0 && (mul *= 0x100)) {
|
|
|
|
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
|
|
|
sub = 1;
|
|
|
|
}
|
|
|
|
this[offset + i] = ((value / mul) >> 0) - sub & 0xFF;
|
|
|
|
}
|
|
|
|
|
|
|
|
return offset + byteLength
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) {
|
|
|
|
value = +value;
|
|
|
|
offset = offset | 0;
|
|
|
|
if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80);
|
|
|
|
if (!Buffer.TYPED_ARRAY_SUPPORT) value = Math.floor(value);
|
|
|
|
if (value < 0) value = 0xff + value + 1;
|
|
|
|
this[offset] = (value & 0xff);
|
|
|
|
return offset + 1
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) {
|
|
|
|
value = +value;
|
|
|
|
offset = offset | 0;
|
|
|
|
if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000);
|
|
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
|
|
this[offset] = (value & 0xff);
|
|
|
|
this[offset + 1] = (value >>> 8);
|
|
|
|
} else {
|
|
|
|
objectWriteUInt16(this, value, offset, true);
|
|
|
|
}
|
|
|
|
return offset + 2
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) {
|
|
|
|
value = +value;
|
|
|
|
offset = offset | 0;
|
|
|
|
if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000);
|
|
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
|
|
this[offset] = (value >>> 8);
|
|
|
|
this[offset + 1] = (value & 0xff);
|
|
|
|
} else {
|
|
|
|
objectWriteUInt16(this, value, offset, false);
|
|
|
|
}
|
|
|
|
return offset + 2
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) {
|
|
|
|
value = +value;
|
|
|
|
offset = offset | 0;
|
|
|
|
if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000);
|
|
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
|
|
this[offset] = (value & 0xff);
|
|
|
|
this[offset + 1] = (value >>> 8);
|
|
|
|
this[offset + 2] = (value >>> 16);
|
|
|
|
this[offset + 3] = (value >>> 24);
|
|
|
|
} else {
|
|
|
|
objectWriteUInt32(this, value, offset, true);
|
|
|
|
}
|
|
|
|
return offset + 4
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) {
|
|
|
|
value = +value;
|
|
|
|
offset = offset | 0;
|
|
|
|
if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000);
|
|
|
|
if (value < 0) value = 0xffffffff + value + 1;
|
|
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
|
|
this[offset] = (value >>> 24);
|
|
|
|
this[offset + 1] = (value >>> 16);
|
|
|
|
this[offset + 2] = (value >>> 8);
|
|
|
|
this[offset + 3] = (value & 0xff);
|
|
|
|
} else {
|
|
|
|
objectWriteUInt32(this, value, offset, false);
|
|
|
|
}
|
|
|
|
return offset + 4
|
|
|
|
};
|
|
|
|
|
|
|
|
function checkIEEE754 (buf, value, offset, ext, max, min) {
|
|
|
|
if (offset + ext > buf.length) throw new RangeError('Index out of range')
|
|
|
|
if (offset < 0) throw new RangeError('Index out of range')
|
|
|
|
}
|
|
|
|
|
|
|
|
function writeFloat (buf, value, offset, littleEndian, noAssert) {
|
|
|
|
if (!noAssert) {
|
|
|
|
checkIEEE754(buf, value, offset, 4);
|
|
|
|
}
|
|
|
|
write(buf, value, offset, littleEndian, 23, 4);
|
|
|
|
return offset + 4
|
|
|
|
}
|
|
|
|
|
|
|
|
Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) {
|
|
|
|
return writeFloat(this, value, offset, true, noAssert)
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) {
|
|
|
|
return writeFloat(this, value, offset, false, noAssert)
|
|
|
|
};
|
|
|
|
|
|
|
|
function writeDouble (buf, value, offset, littleEndian, noAssert) {
|
|
|
|
if (!noAssert) {
|
|
|
|
checkIEEE754(buf, value, offset, 8);
|
|
|
|
}
|
|
|
|
write(buf, value, offset, littleEndian, 52, 8);
|
|
|
|
return offset + 8
|
|
|
|
}
|
|
|
|
|
|
|
|
Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) {
|
|
|
|
return writeDouble(this, value, offset, true, noAssert)
|
|
|
|
};
|
|
|
|
|
|
|
|
Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) {
|
|
|
|
return writeDouble(this, value, offset, false, noAssert)
|
|
|
|
};
|
|
|
|
|
|
|
|
// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length)
|
|
|
|
Buffer.prototype.copy = function copy (target, targetStart, start, end) {
|
|
|
|
if (!start) start = 0;
|
|
|
|
if (!end && end !== 0) end = this.length;
|
|
|
|
if (targetStart >= target.length) targetStart = target.length;
|
|
|
|
if (!targetStart) targetStart = 0;
|
|
|
|
if (end > 0 && end < start) end = start;
|
|
|
|
|
|
|
|
// Copy 0 bytes; we're done
|
|
|
|
if (end === start) return 0
|
|
|
|
if (target.length === 0 || this.length === 0) return 0
|
|
|
|
|
|
|
|
// Fatal error conditions
|
|
|
|
if (targetStart < 0) {
|
|
|
|
throw new RangeError('targetStart out of bounds')
|
|
|
|
}
|
|
|
|
if (start < 0 || start >= this.length) throw new RangeError('sourceStart out of bounds')
|
|
|
|
if (end < 0) throw new RangeError('sourceEnd out of bounds')
|
|
|
|
|
|
|
|
// Are we oob?
|
|
|
|
if (end > this.length) end = this.length;
|
|
|
|
if (target.length - targetStart < end - start) {
|
|
|
|
end = target.length - targetStart + start;
|
|
|
|
}
|
|
|
|
|
|
|
|
var len = end - start;
|
|
|
|
var i;
|
|
|
|
|
|
|
|
if (this === target && start < targetStart && targetStart < end) {
|
|
|
|
// descending copy from end
|
|
|
|
for (i = len - 1; i >= 0; --i) {
|
|
|
|
target[i + targetStart] = this[i + start];
|
|
|
|
}
|
|
|
|
} else if (len < 1000 || !Buffer.TYPED_ARRAY_SUPPORT) {
|
|
|
|
// ascending copy from start
|
|
|
|
for (i = 0; i < len; ++i) {
|
|
|
|
target[i + targetStart] = this[i + start];
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
Uint8Array.prototype.set.call(
|
|
|
|
target,
|
|
|
|
this.subarray(start, start + len),
|
|
|
|
targetStart
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
return len
|
|
|
|
};
|
|
|
|
|
|
|
|
// Usage:
|
|
|
|
// buffer.fill(number[, offset[, end]])
|
|
|
|
// buffer.fill(buffer[, offset[, end]])
|
|
|
|
// buffer.fill(string[, offset[, end]][, encoding])
|
|
|
|
Buffer.prototype.fill = function fill (val, start, end, encoding) {
|
|
|
|
// Handle string cases:
|
|
|
|
if (typeof val === 'string') {
|
|
|
|
if (typeof start === 'string') {
|
|
|
|
encoding = start;
|
|
|
|
start = 0;
|
|
|
|
end = this.length;
|
|
|
|
} else if (typeof end === 'string') {
|
|
|
|
encoding = end;
|
|
|
|
end = this.length;
|
|
|
|
}
|
|
|
|
if (val.length === 1) {
|
|
|
|
var code = val.charCodeAt(0);
|
|
|
|
if (code < 256) {
|
|
|
|
val = code;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (encoding !== undefined && typeof encoding !== 'string') {
|
|
|
|
throw new TypeError('encoding must be a string')
|
|
|
|
}
|
|
|
|
if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) {
|
|
|
|
throw new TypeError('Unknown encoding: ' + encoding)
|
|
|
|
}
|
|
|
|
} else if (typeof val === 'number') {
|
|
|
|
val = val & 255;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Invalid ranges are not set to a default, so can range check early.
|
|
|
|
if (start < 0 || this.length < start || this.length < end) {
|
|
|
|
throw new RangeError('Out of range index')
|
|
|
|
}
|
|
|
|
|
|
|
|
if (end <= start) {
|
|
|
|
return this
|
|
|
|
}
|
|
|
|
|
|
|
|
start = start >>> 0;
|
|
|
|
end = end === undefined ? this.length : end >>> 0;
|
|
|
|
|
|
|
|
if (!val) val = 0;
|
|
|
|
|
|
|
|
var i;
|
|
|
|
if (typeof val === 'number') {
|
|
|
|
for (i = start; i < end; ++i) {
|
|
|
|
this[i] = val;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
var bytes = internalIsBuffer(val)
|
|
|
|
? val
|
|
|
|
: utf8ToBytes(new Buffer(val, encoding).toString());
|
|
|
|
var len = bytes.length;
|
|
|
|
for (i = 0; i < end - start; ++i) {
|
|
|
|
this[i + start] = bytes[i % len];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return this
|
|
|
|
};
|
|
|
|
|
|
|
|
// HELPER FUNCTIONS
|
|
|
|
// ================
|
|
|
|
|
|
|
|
var INVALID_BASE64_RE = /[^+\/0-9A-Za-z-_]/g;
|
|
|
|
|
|
|
|
function base64clean (str) {
|
|
|
|
// Node strips out invalid characters like \n and \t from the string, base64-js does not
|
|
|
|
str = stringtrim(str).replace(INVALID_BASE64_RE, '');
|
|
|
|
// Node converts strings with length < 2 to ''
|
|
|
|
if (str.length < 2) return ''
|
|
|
|
// Node allows for non-padded base64 strings (missing trailing ===), base64-js does not
|
|
|
|
while (str.length % 4 !== 0) {
|
|
|
|
str = str + '=';
|
|
|
|
}
|
|
|
|
return str
|
|
|
|
}
|
|
|
|
|
|
|
|
function stringtrim (str) {
|
|
|
|
if (str.trim) return str.trim()
|
|
|
|
return str.replace(/^\s+|\s+$/g, '')
|
|
|
|
}
|
|
|
|
|
|
|
|
function toHex (n) {
|
|
|
|
if (n < 16) return '0' + n.toString(16)
|
|
|
|
return n.toString(16)
|
|
|
|
}
|
|
|
|
|
|
|
|
function utf8ToBytes (string, units) {
|
|
|
|
units = units || Infinity;
|
|
|
|
var codePoint;
|
|
|
|
var length = string.length;
|
|
|
|
var leadSurrogate = null;
|
|
|
|
var bytes = [];
|
|
|
|
|
|
|
|
for (var i = 0; i < length; ++i) {
|
|
|
|
codePoint = string.charCodeAt(i);
|
|
|
|
|
|
|
|
// is surrogate component
|
|
|
|
if (codePoint > 0xD7FF && codePoint < 0xE000) {
|
|
|
|
// last char was a lead
|
|
|
|
if (!leadSurrogate) {
|
|
|
|
// no lead yet
|
|
|
|
if (codePoint > 0xDBFF) {
|
|
|
|
// unexpected trail
|
|
|
|
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD);
|
|
|
|
continue
|
|
|
|
} else if (i + 1 === length) {
|
|
|
|
// unpaired lead
|
|
|
|
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD);
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// valid lead
|
|
|
|
leadSurrogate = codePoint;
|
|
|
|
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// 2 leads in a row
|
|
|
|
if (codePoint < 0xDC00) {
|
|
|
|
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD);
|
|
|
|
leadSurrogate = codePoint;
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// valid surrogate pair
|
|
|
|
codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000;
|
|
|
|
} else if (leadSurrogate) {
|
|
|
|
// valid bmp char, but last char was a lead
|
|
|
|
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD);
|
|
|
|
}
|
|
|
|
|
|
|
|
leadSurrogate = null;
|
|
|
|
|
|
|
|
// encode utf8
|
|
|
|
if (codePoint < 0x80) {
|
|
|
|
if ((units -= 1) < 0) break
|
|
|
|
bytes.push(codePoint);
|
|
|
|
} else if (codePoint < 0x800) {
|
|
|
|
if ((units -= 2) < 0) break
|
|
|
|
bytes.push(
|
|
|
|
codePoint >> 0x6 | 0xC0,
|
|
|
|
codePoint & 0x3F | 0x80
|
|
|
|
);
|
|
|
|
} else if (codePoint < 0x10000) {
|
|
|
|
if ((units -= 3) < 0) break
|
|
|
|
bytes.push(
|
|
|
|
codePoint >> 0xC | 0xE0,
|
|
|
|
codePoint >> 0x6 & 0x3F | 0x80,
|
|
|
|
codePoint & 0x3F | 0x80
|
|
|
|
);
|
|
|
|
} else if (codePoint < 0x110000) {
|
|
|
|
if ((units -= 4) < 0) break
|
|
|
|
bytes.push(
|
|
|
|
codePoint >> 0x12 | 0xF0,
|
|
|
|
codePoint >> 0xC & 0x3F | 0x80,
|
|
|
|
codePoint >> 0x6 & 0x3F | 0x80,
|
|
|
|
codePoint & 0x3F | 0x80
|
|
|
|
);
|
|
|
|
} else {
|
|
|
|
throw new Error('Invalid code point')
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return bytes
|
|
|
|
}
|
|
|
|
|
|
|
|
function asciiToBytes (str) {
|
|
|
|
var byteArray = [];
|
|
|
|
for (var i = 0; i < str.length; ++i) {
|
|
|
|
// Node's code seems to be doing this and not & 0x7F..
|
|
|
|
byteArray.push(str.charCodeAt(i) & 0xFF);
|
|
|
|
}
|
|
|
|
return byteArray
|
|
|
|
}
|
|
|
|
|
|
|
|
function utf16leToBytes (str, units) {
|
|
|
|
var c, hi, lo;
|
|
|
|
var byteArray = [];
|
|
|
|
for (var i = 0; i < str.length; ++i) {
|
|
|
|
if ((units -= 2) < 0) break
|
|
|
|
|
|
|
|
c = str.charCodeAt(i);
|
|
|
|
hi = c >> 8;
|
|
|
|
lo = c % 256;
|
|
|
|
byteArray.push(lo);
|
|
|
|
byteArray.push(hi);
|
|
|
|
}
|
|
|
|
|
|
|
|
return byteArray
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
function base64ToBytes (str) {
|
|
|
|
return toByteArray(base64clean(str))
|
|
|
|
}
|
|
|
|
|
|
|
|
function blitBuffer (src, dst, offset, length) {
|
|
|
|
for (var i = 0; i < length; ++i) {
|
|
|
|
if ((i + offset >= dst.length) || (i >= src.length)) break
|
|
|
|
dst[i + offset] = src[i];
|
|
|
|
}
|
|
|
|
return i
|
|
|
|
}
|
|
|
|
|
|
|
|
function isnan (val) {
|
|
|
|
return val !== val // eslint-disable-line no-self-compare
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// the following is from is-buffer, also by Feross Aboukhadijeh and with same lisence
|
|
|
|
// The _isBuffer check is for Safari 5-7 support, because it's missing
|
|
|
|
// Object.prototype.constructor. Remove this eventually
|
|
|
|
function isBuffer(obj) {
|
|
|
|
return obj != null && (!!obj._isBuffer || isFastBuffer(obj) || isSlowBuffer(obj))
|
|
|
|
}
|
|
|
|
|
|
|
|
function isFastBuffer (obj) {
|
|
|
|
return !!obj.constructor && typeof obj.constructor.isBuffer === 'function' && obj.constructor.isBuffer(obj)
|
|
|
|
}
|
|
|
|
|
|
|
|
// For Node v0.10 support. Remove this eventually.
|
|
|
|
function isSlowBuffer (obj) {
|
|
|
|
return typeof obj.readFloatLE === 'function' && typeof obj.slice === 'function' && isFastBuffer(obj.slice(0, 0))
|
|
|
|
}
|
|
|
|
|
|
|
|
// Copyright Joyent, Inc. and other Node contributors.
|
|
|
|
var isBufferEncoding = Buffer.isEncoding
|
|
|
|
|| function(encoding) {
|
|
|
|
switch (encoding && encoding.toLowerCase()) {
|
|
|
|
case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': case 'raw': return true;
|
|
|
|
default: return false;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
function assertEncoding(encoding) {
|
|
|
|
if (encoding && !isBufferEncoding(encoding)) {
|
|
|
|
throw new Error('Unknown encoding: ' + encoding);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// StringDecoder provides an interface for efficiently splitting a series of
|
|
|
|
// buffers into a series of JS strings without breaking apart multi-byte
|
|
|
|
// characters. CESU-8 is handled as part of the UTF-8 encoding.
|
|
|
|
//
|
|
|
|
// @TODO Handling all encodings inside a single object makes it very difficult
|
|
|
|
// to reason about this code, so it should be split up in the future.
|
|
|
|
// @TODO There should be a utf8-strict encoding that rejects invalid UTF-8 code
|
|
|
|
// points as used by CESU-8.
|
|
|
|
function StringDecoder(encoding) {
|
|
|
|
this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, '');
|
|
|
|
assertEncoding(encoding);
|
|
|
|
switch (this.encoding) {
|
|
|
|
case 'utf8':
|
|
|
|
// CESU-8 represents each of Surrogate Pair by 3-bytes
|
|
|
|
this.surrogateSize = 3;
|
|
|
|
break;
|
|
|
|
case 'ucs2':
|
|
|
|
case 'utf16le':
|
|
|
|
// UTF-16 represents each of Surrogate Pair by 2-bytes
|
|
|
|
this.surrogateSize = 2;
|
|
|
|
this.detectIncompleteChar = utf16DetectIncompleteChar;
|
|
|
|
break;
|
|
|
|
case 'base64':
|
|
|
|
// Base-64 stores 3 bytes in 4 chars, and pads the remainder.
|
|
|
|
this.surrogateSize = 3;
|
|
|
|
this.detectIncompleteChar = base64DetectIncompleteChar;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
this.write = passThroughWrite;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Enough space to store all bytes of a single character. UTF-8 needs 4
|
|
|
|
// bytes, but CESU-8 may require up to 6 (3 bytes per surrogate).
|
|
|
|
this.charBuffer = new Buffer(6);
|
|
|
|
// Number of bytes received for the current incomplete multi-byte character.
|
|
|
|
this.charReceived = 0;
|
|
|
|
// Number of bytes expected for the current incomplete multi-byte character.
|
|
|
|
this.charLength = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
// write decodes the given buffer and returns it as JS string that is
|
|
|
|
// guaranteed to not contain any partial multi-byte characters. Any partial
|
|
|
|
// character found at the end of the buffer is buffered up, and will be
|
|
|
|
// returned when calling write again with the remaining bytes.
|
|
|
|
//
|
|
|
|
// Note: Converting a Buffer containing an orphan surrogate to a String
|
|
|
|
// currently works, but converting a String to a Buffer (via `new Buffer`, or
|
|
|
|
// Buffer#write) will replace incomplete surrogates with the unicode
|
|
|
|
// replacement character. See https://codereview.chromium.org/121173009/ .
|
|
|
|
StringDecoder.prototype.write = function(buffer) {
|
|
|
|
var charStr = '';
|
|
|
|
// if our last write ended with an incomplete multibyte character
|
|
|
|
while (this.charLength) {
|
|
|
|
// determine how many remaining bytes this buffer has to offer for this char
|
|
|
|
var available = (buffer.length >= this.charLength - this.charReceived) ?
|
|
|
|
this.charLength - this.charReceived :
|
|
|
|
buffer.length;
|
|
|
|
|
|
|
|
// add the new bytes to the char buffer
|
|
|
|
buffer.copy(this.charBuffer, this.charReceived, 0, available);
|
|
|
|
this.charReceived += available;
|
|
|
|
|
|
|
|
if (this.charReceived < this.charLength) {
|
|
|
|
// still not enough chars in this buffer? wait for more ...
|
|
|
|
return '';
|
|
|
|
}
|
|
|
|
|
|
|
|
// remove bytes belonging to the current character from the buffer
|
|
|
|
buffer = buffer.slice(available, buffer.length);
|
|
|
|
|
|
|
|
// get the character that was split
|
|
|
|
charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding);
|
|
|
|
|
|
|
|
// CESU-8: lead surrogate (D800-DBFF) is also the incomplete character
|
|
|
|
var charCode = charStr.charCodeAt(charStr.length - 1);
|
|
|
|
if (charCode >= 0xD800 && charCode <= 0xDBFF) {
|
|
|
|
this.charLength += this.surrogateSize;
|
|
|
|
charStr = '';
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
this.charReceived = this.charLength = 0;
|
|
|
|
|
|
|
|
// if there are no more bytes in this buffer, just emit our char
|
|
|
|
if (buffer.length === 0) {
|
|
|
|
return charStr;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
// determine and set charLength / charReceived
|
|
|
|
this.detectIncompleteChar(buffer);
|
|
|
|
|
|
|
|
var end = buffer.length;
|
|
|
|
if (this.charLength) {
|
|
|
|
// buffer the incomplete character bytes we got
|
|
|
|
buffer.copy(this.charBuffer, 0, buffer.length - this.charReceived, end);
|
|
|
|
end -= this.charReceived;
|
|
|
|
}
|
|
|
|
|
|
|
|
charStr += buffer.toString(this.encoding, 0, end);
|
|
|
|
|
|
|
|
var end = charStr.length - 1;
|
|
|
|
var charCode = charStr.charCodeAt(end);
|
|
|
|
// CESU-8: lead surrogate (D800-DBFF) is also the incomplete character
|
|
|
|
if (charCode >= 0xD800 && charCode <= 0xDBFF) {
|
|
|
|
var size = this.surrogateSize;
|
|
|
|
this.charLength += size;
|
|
|
|
this.charReceived += size;
|
|
|
|
this.charBuffer.copy(this.charBuffer, size, 0, size);
|
|
|
|
buffer.copy(this.charBuffer, 0, 0, size);
|
|
|
|
return charStr.substring(0, end);
|
|
|
|
}
|
|
|
|
|
|
|
|
// or just emit the charStr
|
|
|
|
return charStr;
|
|
|
|
};
|
|
|
|
|
|
|
|
// detectIncompleteChar determines if there is an incomplete UTF-8 character at
|
|
|
|
// the end of the given buffer. If so, it sets this.charLength to the byte
|
|
|
|
// length that character, and sets this.charReceived to the number of bytes
|
|
|
|
// that are available for this character.
|
|
|
|
StringDecoder.prototype.detectIncompleteChar = function(buffer) {
|
|
|
|
// determine how many bytes we have to check at the end of this buffer
|
|
|
|
var i = (buffer.length >= 3) ? 3 : buffer.length;
|
|
|
|
|
|
|
|
// Figure out if one of the last i bytes of our buffer announces an
|
|
|
|
// incomplete char.
|
|
|
|
for (; i > 0; i--) {
|
|
|
|
var c = buffer[buffer.length - i];
|
|
|
|
|
|
|
|
// See http://en.wikipedia.org/wiki/UTF-8#Description
|
|
|
|
|
|
|
|
// 110XXXXX
|
|
|
|
if (i == 1 && c >> 5 == 0x06) {
|
|
|
|
this.charLength = 2;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
// 1110XXXX
|
|
|
|
if (i <= 2 && c >> 4 == 0x0E) {
|
|
|
|
this.charLength = 3;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
// 11110XXX
|
|
|
|
if (i <= 3 && c >> 3 == 0x1E) {
|
|
|
|
this.charLength = 4;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
this.charReceived = i;
|
|
|
|
};
|
|
|
|
|
|
|
|
StringDecoder.prototype.end = function(buffer) {
|
|
|
|
var res = '';
|
|
|
|
if (buffer && buffer.length)
|
|
|
|
res = this.write(buffer);
|
|
|
|
|
|
|
|
if (this.charReceived) {
|
|
|
|
var cr = this.charReceived;
|
|
|
|
var buf = this.charBuffer;
|
|
|
|
var enc = this.encoding;
|
|
|
|
res += buf.slice(0, cr).toString(enc);
|
|
|
|
}
|
|
|
|
|
|
|
|
return res;
|
|
|
|
};
|
|
|
|
|
|
|
|
function passThroughWrite(buffer) {
|
|
|
|
return buffer.toString(this.encoding);
|
|
|
|
}
|
|
|
|
|
|
|
|
function utf16DetectIncompleteChar(buffer) {
|
|
|
|
this.charReceived = buffer.length % 2;
|
|
|
|
this.charLength = this.charReceived ? 2 : 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
function base64DetectIncompleteChar(buffer) {
|
|
|
|
this.charReceived = buffer.length % 3;
|
|
|
|
this.charLength = this.charReceived ? 3 : 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
const BUFFER_MAX_BYTES = 524288; // 0.5Mb
|
|
|
|
|
|
|
|
const CONTINUE_READING_RECORDS = "CONTINUE_READING";
|
|
|
|
const READ_REMAINING_TEXT = "READ_REMAINING";
|
|
|
|
const CANCEL_READ = "CANCEL";
|
|
|
|
|
|
|
|
const getIndexWriter = (hierarchy, indexNode, readableStream, writableStream, end) => {
|
|
|
|
const schema = generateSchema(hierarchy, indexNode);
|
|
|
|
|
|
|
|
return ({
|
|
|
|
read: read$1(readableStream, schema),
|
|
|
|
updateIndex: updateIndex(readableStream, writableStream, schema)
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
const getIndexReader = (hierarchy, indexNode, readableStream) =>
|
|
|
|
read$1(
|
|
|
|
readableStream,
|
|
|
|
generateSchema(hierarchy, indexNode)
|
|
|
|
);
|
|
|
|
|
|
|
|
const updateIndex = (readableStream, writableStream, schema) => async (itemsToWrite, keysToRemove) => {
|
|
|
|
const write = newOutputWriter(BUFFER_MAX_BYTES, writableStream);
|
|
|
|
const writtenItems = [];
|
|
|
|
await read$1(readableStream, schema)(
|
|
|
|
async indexedItem => {
|
|
|
|
const updated = find(i => indexedItem.key === i.key)(itemsToWrite);
|
|
|
|
const removed = find(k => indexedItem.key === k)(keysToRemove);
|
|
|
|
|
|
|
|
if(isSomething(removed))
|
|
|
|
return CONTINUE_READING_RECORDS;
|
|
|
|
|
|
|
|
if(isSomething(updated)) {
|
|
|
|
const serializedItem = serializeItem(schema, updated);
|
|
|
|
await write(serializedItem);
|
|
|
|
writtenItems.push(updated);
|
|
|
|
} else {
|
|
|
|
await write(
|
|
|
|
serializeItem(schema, indexedItem)
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
return CONTINUE_READING_RECORDS;
|
|
|
|
|
|
|
|
},
|
|
|
|
async text => await write(text)
|
|
|
|
);
|
|
|
|
|
|
|
|
if(writtenItems.length !== itemsToWrite.length) {
|
|
|
|
const toAdd = difference(itemsToWrite, writtenItems);
|
|
|
|
for(let added of toAdd) {
|
|
|
|
await write(
|
|
|
|
serializeItem(schema, added)
|
|
|
|
);
|
|
|
|
}
|
|
|
|
} else if(writtenItems.length === 0) {
|
|
|
|
// potentially are no records
|
|
|
|
await write("");
|
|
|
|
}
|
|
|
|
|
|
|
|
await write();
|
|
|
|
await writableStream.end();
|
|
|
|
};
|
|
|
|
|
|
|
|
const read$1 = (readableStream, schema) => async (onGetItem, onGetText) => {
|
|
|
|
const readInput = newInputReader(readableStream);
|
|
|
|
let text = await readInput();
|
|
|
|
let status = CONTINUE_READING_RECORDS;
|
|
|
|
while(text.length > 0) {
|
|
|
|
|
|
|
|
if(status === READ_REMAINING_TEXT) {
|
|
|
|
await onGetText(text);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
if(status === CANCEL_READ) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
let rowText = "";
|
|
|
|
let currentCharIndex=0;
|
|
|
|
for(let currentChar of text) {
|
|
|
|
rowText += currentChar;
|
|
|
|
if(currentChar === "\r") {
|
|
|
|
status = await onGetItem(
|
|
|
|
deserializeRow(schema, rowText)
|
|
|
|
);
|
|
|
|
rowText = "";
|
|
|
|
if(status === READ_REMAINING_TEXT) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
currentCharIndex++;
|
|
|
|
}
|
|
|
|
|
|
|
|
if(currentCharIndex < text.length -1) {
|
|
|
|
await onGetText(text.substring(currentCharIndex + 1));
|
|
|
|
}
|
|
|
|
|
|
|
|
text = await readInput();
|
|
|
|
}
|
|
|
|
|
|
|
|
await readableStream.destroy();
|
|
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
const newOutputWriter = (flushBoundary, writableStream) => {
|
|
|
|
|
|
|
|
let currentBuffer = null;
|
|
|
|
|
|
|
|
return async (text) => {
|
|
|
|
|
2019-09-28 06:28:11 +02:00
|
|
|
if(isString(text) && currentBuffer === null)
|
2019-09-10 10:49:22 +02:00
|
|
|
currentBuffer = Buffer$1.from(text, "utf8");
|
2019-09-28 06:28:11 +02:00
|
|
|
else if(isString(text))
|
2019-09-10 10:49:22 +02:00
|
|
|
currentBuffer = Buffer$1.concat([
|
|
|
|
currentBuffer,
|
|
|
|
Buffer$1.from(text, "utf8")
|
|
|
|
]);
|
|
|
|
|
|
|
|
if(currentBuffer !== null &&
|
|
|
|
(currentBuffer.length > flushBoundary
|
2019-09-28 06:28:11 +02:00
|
|
|
|| !isString(text))) {
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
await writableStream.write(currentBuffer);
|
|
|
|
currentBuffer = null;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const newInputReader = (readableStream) => {
|
|
|
|
|
|
|
|
const decoder = new StringDecoder('utf8');
|
|
|
|
let remainingBytes = [];
|
|
|
|
|
|
|
|
return async () => {
|
|
|
|
|
|
|
|
let nextBytesBuffer = await readableStream.read(BUFFER_MAX_BYTES);
|
|
|
|
const remainingBuffer = Buffer$1.from(remainingBytes);
|
|
|
|
|
|
|
|
if(!nextBytesBuffer) nextBytesBuffer = Buffer$1.from([]);
|
|
|
|
|
|
|
|
const moreToRead = nextBytesBuffer.length === BUFFER_MAX_BYTES;
|
|
|
|
|
|
|
|
const buffer = Buffer$1.concat(
|
|
|
|
[remainingBuffer, nextBytesBuffer],
|
|
|
|
remainingBuffer.length + nextBytesBuffer.length);
|
|
|
|
|
|
|
|
const text = decoder.write(buffer);
|
|
|
|
remainingBytes = decoder.end(buffer);
|
|
|
|
|
|
|
|
if(!moreToRead && remainingBytes.length > 0) {
|
|
|
|
// if for any reason, we have remaining bytes at the end
|
|
|
|
// of the stream, just discard - dont see why this should
|
|
|
|
// ever happen, but if it does, it could cause a stack overflow
|
|
|
|
remainingBytes = [];
|
|
|
|
}
|
|
|
|
|
|
|
|
return text;
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
|
|
|
const deserializeRow = (schema, rowText) => {
|
|
|
|
let currentPropIndex = 0;
|
|
|
|
let currentCharIndex = 0;
|
|
|
|
let currentValueText = "";
|
|
|
|
let isEscaped = false;
|
|
|
|
const item = {};
|
|
|
|
|
|
|
|
const setCurrentProp = () => {
|
|
|
|
const currentProp = schema[currentPropIndex];
|
|
|
|
const type = getType(currentProp.type);
|
|
|
|
const value = currentValueText === ""
|
|
|
|
? type.getDefaultValue()
|
|
|
|
: type.safeParseValue(
|
|
|
|
currentValueText);
|
|
|
|
item[currentProp.name] = value;
|
|
|
|
};
|
|
|
|
|
|
|
|
while(currentPropIndex < schema.length) {
|
|
|
|
|
|
|
|
if(currentCharIndex < rowText.length) {
|
|
|
|
const currentChar = rowText[currentCharIndex];
|
|
|
|
if(isEscaped) {
|
|
|
|
if(currentChar === "r") {
|
|
|
|
currentValueText += "\r";
|
|
|
|
} else {
|
|
|
|
currentValueText += currentChar;
|
|
|
|
}
|
|
|
|
isEscaped = false;
|
|
|
|
} else {
|
|
|
|
if(currentChar === ",") {
|
|
|
|
setCurrentProp();
|
|
|
|
currentValueText = "";
|
|
|
|
currentPropIndex++;
|
|
|
|
} else if(currentChar === "\\") {
|
|
|
|
isEscaped = true;
|
|
|
|
} else {
|
|
|
|
currentValueText += currentChar;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
currentCharIndex++;
|
|
|
|
} else {
|
|
|
|
currentValueText = "";
|
|
|
|
setCurrentProp();
|
|
|
|
currentPropIndex++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return item;
|
|
|
|
};
|
|
|
|
|
|
|
|
const serializeItem = (schema, item) => {
|
|
|
|
|
|
|
|
let rowText = "";
|
|
|
|
|
|
|
|
for(let prop of schema) {
|
|
|
|
const type = getType(prop.type);
|
2019-09-28 06:28:11 +02:00
|
|
|
const value = has(prop.name)(item)
|
2019-09-10 10:49:22 +02:00
|
|
|
? item[prop.name]
|
|
|
|
: type.getDefaultValue();
|
|
|
|
|
|
|
|
const valStr = type.stringify(value);
|
|
|
|
|
|
|
|
for(let i = 0; i < valStr.length; i++) {
|
|
|
|
const currentChar = valStr[i];
|
|
|
|
if(currentChar === ","
|
|
|
|
|| currentChar === "\r"
|
|
|
|
|| currentChar === "\\") {
|
|
|
|
rowText += "\\";
|
|
|
|
}
|
|
|
|
|
|
|
|
if(currentChar === "\r") {
|
|
|
|
rowText += "r";
|
|
|
|
} else {
|
|
|
|
rowText += currentChar;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
rowText += ",";
|
|
|
|
}
|
|
|
|
|
|
|
|
rowText += "\r";
|
|
|
|
return rowText;
|
|
|
|
};
|
|
|
|
|
|
|
|
const readIndex$1 = async (hierarchy, datastore, index, indexedDataKey) => {
|
|
|
|
const records = [];
|
|
|
|
const doRead = iterateIndex(
|
|
|
|
async item => {
|
|
|
|
records.push(item);
|
|
|
|
return CONTINUE_READING_RECORDS;
|
|
|
|
},
|
|
|
|
async () => records
|
|
|
|
);
|
|
|
|
|
|
|
|
return await doRead(hierarchy, datastore, index, indexedDataKey);
|
|
|
|
};
|
|
|
|
|
|
|
|
const searchIndex = async (hierarchy, datastore, index, indexedDataKey, searchPhrase) => {
|
|
|
|
const records = [];
|
|
|
|
const schema = generateSchema(hierarchy, index);
|
|
|
|
const doRead = iterateIndex(
|
|
|
|
async item => {
|
|
|
|
const idx = lunr(function () {
|
|
|
|
this.ref('key');
|
|
|
|
for (const field of schema) {
|
|
|
|
this.field(field.name);
|
|
|
|
}
|
|
|
|
this.add(item);
|
|
|
|
});
|
|
|
|
const searchResults = idx.search(searchPhrase);
|
|
|
|
if (searchResults.length === 1) {
|
|
|
|
item._searchResult = searchResults[0];
|
|
|
|
records.push(item);
|
|
|
|
}
|
|
|
|
return CONTINUE_READING_RECORDS;
|
|
|
|
},
|
|
|
|
async () => records
|
|
|
|
);
|
|
|
|
|
|
|
|
return await doRead(hierarchy, datastore, index, indexedDataKey);
|
|
|
|
};
|
|
|
|
|
|
|
|
const iterateIndex = (onGetItem, getFinalResult) => async (hierarchy, datastore, index, indexedDataKey) => {
|
|
|
|
try {
|
|
|
|
const readableStream = promiseReadableStream(
|
|
|
|
await datastore.readableFileStream(indexedDataKey)
|
|
|
|
);
|
|
|
|
|
|
|
|
const read = getIndexReader(hierarchy, index, readableStream);
|
|
|
|
await read(onGetItem);
|
|
|
|
return getFinalResult();
|
|
|
|
} catch (e) {
|
|
|
|
if (await datastore.exists(indexedDataKey)) {
|
|
|
|
throw e;
|
|
|
|
} else {
|
|
|
|
await createIndexFile(
|
|
|
|
datastore,
|
|
|
|
indexedDataKey,
|
|
|
|
index,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
return [];
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const getIndexDir = (hierarchy, indexKey) => {
|
|
|
|
|
|
|
|
const parentKey = getParentKey(indexKey);
|
|
|
|
|
|
|
|
if(parentKey === "") return indexKey;
|
|
|
|
if(parentKey === keySep) return indexKey;
|
|
|
|
|
|
|
|
const recordInfo = getRecordInfo(
|
|
|
|
hierarchy,
|
|
|
|
parentKey);
|
|
|
|
|
|
|
|
return recordInfo.child(
|
|
|
|
getLastPartInKey(indexKey));
|
|
|
|
};
|
|
|
|
|
2019-10-03 07:12:13 +02:00
|
|
|
const listItems = app => async (indexKey, options) => {
|
|
|
|
indexKey = safeKey(indexKey);
|
|
|
|
return apiWrapper(
|
|
|
|
app,
|
|
|
|
events.indexApi.listItems,
|
|
|
|
permission.readIndex.isAuthorized(indexKey),
|
|
|
|
{ indexKey, options },
|
|
|
|
_listItems, app, indexKey, options,
|
|
|
|
);
|
|
|
|
};
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const defaultOptions = { rangeStartParams: null, rangeEndParams: null, searchPhrase: null };
|
|
|
|
|
|
|
|
const _listItems = async (app, indexKey, options = defaultOptions) => {
|
|
|
|
const { searchPhrase, rangeStartParams, rangeEndParams } = $({}, [
|
|
|
|
merge$1(options),
|
|
|
|
merge$1(defaultOptions),
|
|
|
|
]);
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const getItems = async indexedDataKey => (isNonEmptyString(searchPhrase)
|
2019-09-10 10:49:22 +02:00
|
|
|
? await searchIndex(
|
|
|
|
app.hierarchy,
|
|
|
|
app.datastore,
|
|
|
|
indexNode,
|
2019-12-30 19:08:50 +01:00
|
|
|
indexedDataKey,
|
2019-09-10 10:49:22 +02:00
|
|
|
searchPhrase,
|
|
|
|
)
|
|
|
|
: await readIndex$1(
|
|
|
|
app.hierarchy,
|
|
|
|
app.datastore,
|
|
|
|
indexNode,
|
2019-12-30 19:08:50 +01:00
|
|
|
indexedDataKey,
|
2019-09-10 10:49:22 +02:00
|
|
|
));
|
|
|
|
|
|
|
|
indexKey = safeKey(indexKey);
|
2019-12-22 08:12:21 +01:00
|
|
|
const indexNode = getExactNodeForKey(app.hierarchy)(indexKey);
|
2019-12-30 19:08:50 +01:00
|
|
|
const indexDir = getIndexDir(app.hierarchy, indexKey);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
if (!isIndex(indexNode)) { throw new Error('supplied key is not an index'); }
|
|
|
|
|
|
|
|
if (isShardedIndex(indexNode)) {
|
|
|
|
const shardKeys = await getShardKeysInRange(
|
2019-12-30 19:08:50 +01:00
|
|
|
app, indexNode, indexDir, rangeStartParams, rangeEndParams,
|
2019-09-10 10:49:22 +02:00
|
|
|
);
|
|
|
|
const items = [];
|
|
|
|
for (const k of shardKeys) {
|
|
|
|
items.push(await getItems(k));
|
|
|
|
}
|
|
|
|
return flatten(items);
|
|
|
|
}
|
|
|
|
return await getItems(
|
2019-12-30 19:08:50 +01:00
|
|
|
getUnshardedIndexDataKey(indexDir),
|
2019-09-10 10:49:22 +02:00
|
|
|
);
|
|
|
|
};
|
|
|
|
|
2019-10-03 07:12:13 +02:00
|
|
|
const getContext = app => recordKey => {
|
|
|
|
recordKey = safeKey(recordKey);
|
|
|
|
return apiWrapperSync(
|
|
|
|
app,
|
|
|
|
events.recordApi.getContext,
|
|
|
|
permission.readRecord.isAuthorized(recordKey),
|
|
|
|
{ recordKey },
|
|
|
|
_getContext, app, recordKey,
|
|
|
|
);
|
|
|
|
};
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const _getContext = (app, recordKey) => {
|
2019-10-03 07:12:13 +02:00
|
|
|
recordKey = safeKey(recordKey);
|
2019-12-22 08:12:21 +01:00
|
|
|
const recordNode = getExactNodeForKey(app.hierarchy)(recordKey);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const cachedReferenceIndexes = {};
|
|
|
|
|
|
|
|
const lazyLoadReferenceIndex = async (typeOptions) => {
|
2019-09-28 06:28:11 +02:00
|
|
|
if (!has(typeOptions.indexNodeKey)(cachedReferenceIndexes)) {
|
2019-09-10 10:49:22 +02:00
|
|
|
cachedReferenceIndexes[typeOptions.indexNodeKey] = {
|
|
|
|
typeOptions,
|
|
|
|
data: await readReferenceIndex(
|
|
|
|
app, recordKey, typeOptions,
|
|
|
|
),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
return cachedReferenceIndexes[typeOptions.indexNodeKey];
|
|
|
|
};
|
|
|
|
|
2019-09-28 06:28:11 +02:00
|
|
|
const getTypeOptions = typeOptions_or_fieldName => (isString(typeOptions_or_fieldName)
|
2019-09-10 10:49:22 +02:00
|
|
|
? findField(recordNode, typeOptions_or_fieldName)
|
|
|
|
.typeOptions
|
|
|
|
: typeOptions_or_fieldName);
|
|
|
|
|
|
|
|
return {
|
|
|
|
referenceExists: async (typeOptions_or_fieldName, key) => {
|
|
|
|
const typeOptions = getTypeOptions(typeOptions_or_fieldName);
|
|
|
|
const { data } = await lazyLoadReferenceIndex(typeOptions);
|
2019-09-28 06:28:11 +02:00
|
|
|
return some(i => i.key === key)(data);
|
2019-09-10 10:49:22 +02:00
|
|
|
},
|
|
|
|
referenceOptions: async (typeOptions_or_fieldName) => {
|
|
|
|
const typeOptions = getTypeOptions(typeOptions_or_fieldName);
|
|
|
|
const { data } = await lazyLoadReferenceIndex(typeOptions);
|
|
|
|
return data;
|
|
|
|
},
|
|
|
|
recordNode,
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
|
|
|
const readReferenceIndex = async (app, recordKey, typeOptions) => {
|
|
|
|
const indexNode = getNode(app.hierarchy, typeOptions.indexNodeKey);
|
|
|
|
const indexKey = isGlobalIndex(indexNode)
|
|
|
|
? indexNode.nodeKey()
|
|
|
|
: getIndexKey_BasedOnDecendant(
|
|
|
|
recordKey, indexNode,
|
|
|
|
);
|
|
|
|
|
|
|
|
const items = await listItems(app)(indexKey);
|
|
|
|
return $(items, [
|
|
|
|
map(i => ({
|
|
|
|
key: i.key,
|
|
|
|
value: i[typeOptions.displayValue],
|
|
|
|
})),
|
|
|
|
]);
|
|
|
|
};
|
|
|
|
|
|
|
|
const fieldParseError = (fieldName, value) => ({
|
|
|
|
fields: [fieldName],
|
|
|
|
message: `Could not parse field ${fieldName}:${value}`,
|
|
|
|
});
|
|
|
|
|
|
|
|
const validateAllFieldParse = (record, recordNode) => $(recordNode.fields, [
|
|
|
|
map(f => ({ name: f.name, parseResult: validateFieldParse(f, record) })),
|
|
|
|
reduce((errors, f) => {
|
|
|
|
if (f.parseResult.success) return errors;
|
|
|
|
errors.push(
|
|
|
|
fieldParseError(f.name, f.parseResult.value),
|
|
|
|
);
|
|
|
|
return errors;
|
|
|
|
}, []),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const validateAllTypeConstraints = async (record, recordNode, context) => {
|
|
|
|
const errors = [];
|
|
|
|
for (const field of recordNode.fields) {
|
|
|
|
$(await validateTypeConstraints$1(field, record, context), [
|
|
|
|
filter(isNonEmptyString),
|
|
|
|
map(m => ({ message: m, fields: [field.name] })),
|
|
|
|
each(e => errors.push(e)),
|
|
|
|
]);
|
|
|
|
}
|
|
|
|
return errors;
|
|
|
|
};
|
|
|
|
|
|
|
|
const runRecordValidationRules = (record, recordNode) => {
|
|
|
|
const runValidationRule = (rule) => {
|
|
|
|
const isValid = compileExpression$1(rule.expressionWhenValid);
|
|
|
|
const expressionContext = { record, _ };
|
|
|
|
return (isValid(expressionContext)
|
|
|
|
? { valid: true }
|
|
|
|
: ({
|
|
|
|
valid: false,
|
|
|
|
fields: rule.invalidFields,
|
|
|
|
message: rule.messageWhenInvalid,
|
|
|
|
}));
|
|
|
|
};
|
|
|
|
|
|
|
|
return $(recordNode.validationRules, [
|
|
|
|
map(runValidationRule),
|
|
|
|
flatten,
|
|
|
|
filter(r => r.valid === false),
|
|
|
|
map(r => ({ fields: r.fields, message: r.message })),
|
|
|
|
]);
|
|
|
|
};
|
|
|
|
|
|
|
|
const validate = app => async (record, context) => {
|
|
|
|
context = isNothing(context)
|
|
|
|
? _getContext(app, record.key)
|
|
|
|
: context;
|
|
|
|
|
2019-12-22 08:12:21 +01:00
|
|
|
const recordNode = getExactNodeForKey(app.hierarchy)(record.key);
|
2019-09-10 10:49:22 +02:00
|
|
|
const fieldParseFails = validateAllFieldParse(record, recordNode);
|
|
|
|
|
|
|
|
// non parsing would cause further issues - exit here
|
2019-09-28 06:28:11 +02:00
|
|
|
if (!isEmpty(fieldParseFails)) { return ({ isValid: false, errors: fieldParseFails }); }
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const recordValidationRuleFails = runRecordValidationRules(record, recordNode);
|
|
|
|
const typeContraintFails = await validateAllTypeConstraints(record, recordNode, context);
|
|
|
|
|
2019-09-28 06:28:11 +02:00
|
|
|
if (isEmpty(fieldParseFails)
|
|
|
|
&& isEmpty(recordValidationRuleFails)
|
|
|
|
&& isEmpty(typeContraintFails)) {
|
2019-09-10 10:49:22 +02:00
|
|
|
return ({ isValid: true, errors: [] });
|
|
|
|
}
|
|
|
|
|
|
|
|
return ({
|
|
|
|
isValid: false,
|
|
|
|
errors: _.union(fieldParseFails, typeContraintFails, recordValidationRuleFails),
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const ensureCollectionIsInitialised = async (datastore, node, dir) => {
|
|
|
|
if (!await datastore.exists(dir)) {
|
|
|
|
await datastore.createFolder(dir);
|
|
|
|
await datastore.createFolder(joinKey(dir, node.nodeId));
|
2019-09-10 10:49:22 +02:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const initialiseRootCollections = async (datastore, hierarchy) => {
|
|
|
|
const rootCollectionRecord = allTrue(
|
|
|
|
n => isRoot(n.parent()),
|
|
|
|
isCollectionRecord,
|
|
|
|
);
|
|
|
|
|
|
|
|
const flathierarchy = getFlattenedHierarchy(hierarchy);
|
|
|
|
|
|
|
|
const collectionRecords = $(flathierarchy, [
|
|
|
|
filter(rootCollectionRecord),
|
|
|
|
]);
|
|
|
|
|
|
|
|
for (const col of collectionRecords) {
|
|
|
|
await ensureCollectionIsInitialised(
|
|
|
|
datastore,
|
|
|
|
col,
|
2019-12-30 19:08:50 +01:00
|
|
|
col.collectionPathRegx()
|
2019-09-10 10:49:22 +02:00
|
|
|
);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const initialiseChildCollections = async (app, recordInfo) => {
|
|
|
|
const childCollectionRecords = $(recordInfo.recordNode, [
|
2019-09-10 10:49:22 +02:00
|
|
|
n => n.children,
|
|
|
|
filter(isCollectionRecord),
|
|
|
|
]);
|
|
|
|
|
|
|
|
for (const child of childCollectionRecords) {
|
|
|
|
await ensureCollectionIsInitialised(
|
|
|
|
app.datastore,
|
|
|
|
child,
|
2019-12-30 19:08:50 +01:00
|
|
|
recordInfo.child(child.collectionName),
|
2019-09-10 10:49:22 +02:00
|
|
|
);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const TRANSACTIONS_FOLDER = `${keySep}.transactions`;
|
|
|
|
const LOCK_FILENAME = 'lock';
|
|
|
|
const LOCK_FILE_KEY = joinKey(
|
|
|
|
TRANSACTIONS_FOLDER, LOCK_FILENAME,
|
|
|
|
);
|
|
|
|
const idSep = '$';
|
|
|
|
|
|
|
|
const isOfType = typ => trans => trans.transactionType === typ;
|
|
|
|
|
|
|
|
const CREATE_RECORD_TRANSACTION = 'create';
|
|
|
|
const UPDATE_RECORD_TRANSACTION = 'update';
|
|
|
|
const DELETE_RECORD_TRANSACTION = 'delete';
|
|
|
|
const BUILD_INDEX_TRANSACTION = 'build';
|
|
|
|
|
|
|
|
const isUpdate = isOfType(UPDATE_RECORD_TRANSACTION);
|
|
|
|
const isDelete = isOfType(DELETE_RECORD_TRANSACTION);
|
|
|
|
const isCreate = isOfType(CREATE_RECORD_TRANSACTION);
|
|
|
|
const isBuildIndex = isOfType(BUILD_INDEX_TRANSACTION);
|
|
|
|
|
|
|
|
const keyToFolderName = nodeKey => getHashCode(nodeKey);
|
|
|
|
|
|
|
|
const getTransactionId = (recordId, transactionType, uniqueId) =>
|
|
|
|
`${recordId}${idSep}${transactionType}${idSep}${uniqueId}`;
|
|
|
|
|
|
|
|
const buildIndexFolder = '.BUILD-';
|
|
|
|
const nodeKeyHashFromBuildFolder = folder => folder.replace(buildIndexFolder, '');
|
|
|
|
|
|
|
|
const isBuildIndexFolder = key => getLastPartInKey(key).startsWith(buildIndexFolder);
|
|
|
|
|
|
|
|
const IndexNodeKeyFolder = indexNodeKey => joinKey(
|
|
|
|
TRANSACTIONS_FOLDER,
|
|
|
|
buildIndexFolder + keyToFolderName(indexNodeKey),
|
|
|
|
);
|
|
|
|
|
|
|
|
const IndexNodeKeyBatchFolder = (indexNodeKey, count) =>
|
|
|
|
joinKey(IndexNodeKeyFolder(indexNodeKey), Math.floor(count / BUILDINDEX_BATCH_COUNT).toString());
|
|
|
|
|
|
|
|
const BUILDINDEX_BATCH_COUNT = 1000;
|
|
|
|
const timeoutMilliseconds = 30 * 1000; // 30 secs
|
|
|
|
const maxLockRetries = 1;
|
|
|
|
|
|
|
|
const transactionForCreateRecord = async (app, record) => await transaction(
|
|
|
|
app.datastore, CREATE_RECORD_TRANSACTION,
|
|
|
|
record.key, { record },
|
|
|
|
getTransactionKey_Records,
|
|
|
|
);
|
|
|
|
|
|
|
|
const transactionForUpdateRecord = async (app, oldRecord, newRecord) => await transaction(
|
|
|
|
app.datastore, UPDATE_RECORD_TRANSACTION,
|
|
|
|
newRecord.key, { oldRecord, record: newRecord },
|
|
|
|
getTransactionKey_Records,
|
|
|
|
);
|
|
|
|
|
|
|
|
const transactionForDeleteRecord = async (app, record) => await transaction(
|
|
|
|
app.datastore, DELETE_RECORD_TRANSACTION,
|
|
|
|
record.key, { record },
|
|
|
|
getTransactionKey_Records,
|
|
|
|
);
|
|
|
|
|
|
|
|
const transactionForBuildIndex = async (app, indexNodeKey, recordKey, count) => {
|
|
|
|
const transactionFolder = IndexNodeKeyBatchFolder(indexNodeKey, count);
|
|
|
|
if (count % BUILDINDEX_BATCH_COUNT === 0) {
|
|
|
|
await app.datastore.createFolder(transactionFolder);
|
|
|
|
}
|
|
|
|
|
|
|
|
return await transaction(
|
|
|
|
app.datastore, BUILD_INDEX_TRANSACTION,
|
|
|
|
recordKey, { recordKey },
|
|
|
|
id => joinKey(transactionFolder, id),
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
const createBuildIndexFolder = async (datastore, indexNodeKey) => await datastore.createFolder(
|
|
|
|
IndexNodeKeyFolder(indexNodeKey),
|
|
|
|
);
|
|
|
|
|
|
|
|
const getTransactionKey_Records = id => joinKey(TRANSACTIONS_FOLDER, id);
|
|
|
|
|
|
|
|
const transaction = async (datastore, transactionType, recordKey, data, getTransactionKey) => {
|
|
|
|
const recordId = getLastPartInKey(recordKey);
|
|
|
|
const uniqueId = generate();
|
|
|
|
const id = getTransactionId(
|
|
|
|
recordId, transactionType, uniqueId,
|
|
|
|
);
|
|
|
|
|
|
|
|
const key = getTransactionKey(id);
|
|
|
|
|
|
|
|
const trans = {
|
|
|
|
transactionType,
|
|
|
|
recordKey,
|
|
|
|
...data,
|
|
|
|
id,
|
|
|
|
};
|
|
|
|
|
|
|
|
await datastore.createJson(
|
|
|
|
key, trans,
|
|
|
|
);
|
|
|
|
|
|
|
|
return trans;
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const initialiseIndex = async (datastore, dir, index) => {
|
|
|
|
const indexDir = joinKey(dir, index.name);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
await datastore.createFolder(indexDir);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
if (isShardedIndex(index)) {
|
|
|
|
await datastore.createFile(
|
2019-12-30 19:08:50 +01:00
|
|
|
getShardMapKey(indexDir),
|
2019-09-10 10:49:22 +02:00
|
|
|
'[]',
|
|
|
|
);
|
|
|
|
} else {
|
|
|
|
await createIndexFile(
|
|
|
|
datastore,
|
2019-12-30 19:08:50 +01:00
|
|
|
getUnshardedIndexDataKey(indexDir),
|
2019-09-10 10:49:22 +02:00
|
|
|
index,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const save = app => async (record, context) => apiWrapper(
|
|
|
|
app,
|
|
|
|
events.recordApi.save,
|
|
|
|
record.isNew
|
|
|
|
? permission.createRecord.isAuthorized(record.key)
|
|
|
|
: permission.updateRecord.isAuthorized(record.key), { record },
|
|
|
|
_save, app, record, context, false,
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
|
|
const _save = async (app, record, context, skipValidation = false) => {
|
|
|
|
const recordClone = cloneDeep(record);
|
|
|
|
if (!skipValidation) {
|
|
|
|
const validationResult = await validate(app)(recordClone, context);
|
|
|
|
if (!validationResult.isValid) {
|
|
|
|
await app.publish(events.recordApi.save.onInvalid, { record, validationResult });
|
|
|
|
throw new BadRequestError(`Save : Record Invalid : ${
|
|
|
|
JSON.stringify(validationResult.errors)}`);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const recordInfo = getRecordInfo(app.hierarchy, record.key);
|
|
|
|
const {
|
|
|
|
recordNode, pathInfo,
|
|
|
|
recordJson, files,
|
|
|
|
} = recordInfo;
|
|
|
|
|
2019-09-10 10:49:22 +02:00
|
|
|
if (recordClone.isNew) {
|
2019-12-30 19:08:50 +01:00
|
|
|
|
2019-11-05 14:30:36 +01:00
|
|
|
if(!recordNode)
|
|
|
|
throw new Error("Cannot find node for " + record.key);
|
|
|
|
|
2019-09-10 10:49:22 +02:00
|
|
|
const transaction = await transactionForCreateRecord(
|
|
|
|
app, recordClone,
|
|
|
|
);
|
|
|
|
recordClone.transactionId = transaction.id;
|
2019-12-30 19:08:50 +01:00
|
|
|
await createRecordFolderPath(app.datastore, pathInfo);
|
|
|
|
await app.datastore.createFolder(files);
|
|
|
|
await app.datastore.createJson(recordJson, recordClone);
|
|
|
|
await initialiseReverseReferenceIndexes(app, recordInfo);
|
|
|
|
await initialiseAncestorIndexes(app, recordInfo);
|
|
|
|
await initialiseChildCollections(app, recordInfo);
|
2019-09-10 10:49:22 +02:00
|
|
|
await app.publish(events.recordApi.save.onRecordCreated, {
|
|
|
|
record: recordClone,
|
|
|
|
});
|
|
|
|
} else {
|
2019-12-30 19:08:50 +01:00
|
|
|
const oldRecord = await _loadFromInfo(app, recordInfo);
|
2019-09-10 10:49:22 +02:00
|
|
|
const transaction = await transactionForUpdateRecord(
|
|
|
|
app, oldRecord, recordClone,
|
|
|
|
);
|
|
|
|
recordClone.transactionId = transaction.id;
|
|
|
|
await app.datastore.updateJson(
|
2019-12-30 19:08:50 +01:00
|
|
|
recordJson,
|
2019-09-10 10:49:22 +02:00
|
|
|
recordClone,
|
|
|
|
);
|
|
|
|
await app.publish(events.recordApi.save.onRecordUpdated, {
|
|
|
|
old: oldRecord,
|
|
|
|
new: recordClone,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
await app.cleanupTransactions();
|
|
|
|
|
|
|
|
const returnedClone = cloneDeep(recordClone);
|
|
|
|
returnedClone.isNew = false;
|
|
|
|
return returnedClone;
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const initialiseAncestorIndexes = async (app, recordInfo) => {
|
|
|
|
for (const index of recordInfo.recordNode.indexes) {
|
|
|
|
const indexKey = recordInfo.child(index.name);
|
|
|
|
if (!await app.datastore.exists(indexKey)) {
|
|
|
|
await initialiseIndex(app.datastore, recordInfo.dir, index);
|
|
|
|
}
|
2019-09-10 10:49:22 +02:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const initialiseReverseReferenceIndexes = async (app, recordInfo) => {
|
2019-09-10 10:49:22 +02:00
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const indexNodes = $(fieldsThatReferenceThisRecord(app, recordInfo.recordNode), [
|
2019-09-10 10:49:22 +02:00
|
|
|
map(f => $(f.typeOptions.reverseIndexNodeKeys, [
|
|
|
|
map(n => getNode(
|
|
|
|
app.hierarchy,
|
|
|
|
n,
|
|
|
|
)),
|
|
|
|
])),
|
|
|
|
flatten,
|
|
|
|
]);
|
|
|
|
|
|
|
|
for (const indexNode of indexNodes) {
|
|
|
|
await initialiseIndex(
|
2019-12-30 19:08:50 +01:00
|
|
|
app.datastore, recordInfo.dir, indexNode,
|
2019-09-10 10:49:22 +02:00
|
|
|
);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const fieldsThatReferenceThisRecord = (app, recordNode) => $(app.hierarchy, [
|
|
|
|
getFlattenedHierarchy,
|
|
|
|
filter(isRecord),
|
|
|
|
map(n => n.fields),
|
|
|
|
flatten,
|
|
|
|
filter(fieldReversesReferenceToNode(recordNode)),
|
|
|
|
]);
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const createRecordFolderPath = async (datastore, pathInfo) => {
|
|
|
|
|
|
|
|
const recursiveCreateFolder = async (subdirs, dirsThatNeedCreated=undefined) => {
|
|
|
|
|
|
|
|
// iterate backwards through directory hierachy
|
|
|
|
// until we get to a folder that exists, then create the rest
|
|
|
|
// e.g
|
|
|
|
// - some/folder/here
|
|
|
|
// - some/folder
|
|
|
|
// - some
|
|
|
|
const thisFolder = joinKey(pathInfo.base, ...subdirs);
|
|
|
|
|
|
|
|
if(await datastore.exists(thisFolder)) {
|
|
|
|
|
|
|
|
let creationFolder = thisFolder;
|
|
|
|
for(let nextDir of (dirsThatNeedCreated || []) ) {
|
|
|
|
creationFolder = joinKey(creationFolder, nextDir);
|
|
|
|
await datastore.createFolder(creationFolder);
|
|
|
|
}
|
|
|
|
|
|
|
|
} else if(!dirsThatNeedCreated || dirsThatNeedCreated.length > 0) {
|
|
|
|
|
|
|
|
dirsThatNeedCreated = !dirsThatNeedCreated
|
|
|
|
? []
|
|
|
|
:dirsThatNeedCreated;
|
|
|
|
|
|
|
|
await recursiveCreateFolder(
|
|
|
|
take(subdirs.length - 1)(subdirs),
|
|
|
|
[...takeRight$1(1)(subdirs), ...dirsThatNeedCreated]
|
|
|
|
);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
await recursiveCreateFolder(pathInfo.subdirs);
|
|
|
|
|
|
|
|
return joinKey(pathInfo.base, ...pathInfo.subdirs);
|
|
|
|
|
|
|
|
};
|
|
|
|
|
2019-09-10 10:49:22 +02:00
|
|
|
const deleteCollection = (app, disableCleanup = false) => async key => apiWrapper(
|
|
|
|
app,
|
|
|
|
events.collectionApi.delete,
|
|
|
|
permission.manageCollection.isAuthorized,
|
|
|
|
{ key },
|
|
|
|
_deleteCollection, app, key, disableCleanup,
|
|
|
|
);
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
/*
|
|
|
|
const recordNode = getCollectionNode(app.hierarchy, key);
|
|
|
|
|
|
|
|
*/
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const _deleteCollection = async (app, key, disableCleanup) => {
|
|
|
|
key = safeKey(key);
|
2019-12-30 19:08:50 +01:00
|
|
|
const collectionDir = getCollectionDir(app.hierarchy, key);
|
2019-09-10 10:49:22 +02:00
|
|
|
await deleteRecords(app, key);
|
2019-12-30 19:08:50 +01:00
|
|
|
await deleteCollectionFolder(app, collectionDir);
|
2019-09-10 10:49:22 +02:00
|
|
|
if (!disableCleanup) { await app.cleanupTransactions(); }
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const deleteCollectionFolder = async (app, dir) =>
|
|
|
|
await app.datastore.deleteFolder(dir);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const deleteRecords = async (app, key) => {
|
2019-12-30 19:08:50 +01:00
|
|
|
|
2019-09-10 10:49:22 +02:00
|
|
|
const iterate = await getAllIdsIterator(app)(key);
|
|
|
|
|
|
|
|
let ids = await iterate();
|
|
|
|
while (!ids.done) {
|
|
|
|
if (ids.result.collectionKey === key) {
|
|
|
|
for (const id of ids.result.ids) {
|
|
|
|
await _deleteRecord(
|
|
|
|
app,
|
|
|
|
joinKey(key, id),
|
|
|
|
true,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
ids = await iterate();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2019-10-03 07:12:13 +02:00
|
|
|
const deleteRecord$1 = (app, disableCleanup = false) => async key => {
|
|
|
|
key = safeKey(key);
|
|
|
|
return apiWrapper(
|
|
|
|
app,
|
|
|
|
events.recordApi.delete,
|
|
|
|
permission.deleteRecord.isAuthorized(key),
|
|
|
|
{ key },
|
|
|
|
_deleteRecord, app, key, disableCleanup,
|
|
|
|
);
|
|
|
|
};
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
// called deleteRecord because delete is a keyword
|
|
|
|
const _deleteRecord = async (app, key, disableCleanup) => {
|
2019-12-30 19:08:50 +01:00
|
|
|
const recordInfo = getRecordInfo(app.hierarchy, key);
|
|
|
|
key = recordInfo.key;
|
2019-12-22 08:12:21 +01:00
|
|
|
const node = getExactNodeForKey(app.hierarchy)(key);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const record = await _load(app, key);
|
|
|
|
await transactionForDeleteRecord(app, record);
|
|
|
|
|
|
|
|
for (const collectionRecord of node.children) {
|
|
|
|
const collectionKey = joinKey(
|
|
|
|
key, collectionRecord.collectionName,
|
|
|
|
);
|
|
|
|
await _deleteCollection(app, collectionKey, true);
|
|
|
|
}
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
await app.datastore.deleteFolder(recordInfo.dir);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
if (!disableCleanup) { await app.cleanupTransactions(); }
|
|
|
|
};
|
|
|
|
|
|
|
|
const uploadFile = app => async (recordKey, readableStream, relativeFilePath) => apiWrapper(
|
|
|
|
app,
|
|
|
|
events.recordApi.uploadFile,
|
|
|
|
permission.updateRecord.isAuthorized(recordKey),
|
|
|
|
{ recordKey, readableStream, relativeFilePath },
|
|
|
|
_uploadFile, app, recordKey, readableStream, relativeFilePath,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _uploadFile = async (app, recordKey, readableStream, relativeFilePath) => {
|
|
|
|
if (isNothing(recordKey)) { throw new BadRequestError('Record Key not supplied'); }
|
|
|
|
if (isNothing(relativeFilePath)) { throw new BadRequestError('file path not supplied'); }
|
|
|
|
if (!isLegalFilename(relativeFilePath)) { throw new BadRequestError('Illegal filename'); }
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const recordInfo = getRecordInfo(app.hierarchy, recordKey);
|
|
|
|
const record = await _loadFromInfo(app, recordInfo);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const fullFilePath = safeGetFullFilePath(
|
2019-12-30 19:08:50 +01:00
|
|
|
recordInfo.dir, relativeFilePath,
|
2019-09-10 10:49:22 +02:00
|
|
|
);
|
|
|
|
|
|
|
|
const tempFilePath = `${fullFilePath}_${generate()}.temp`;
|
|
|
|
|
|
|
|
const outputStream = await app.datastore.writableFileStream(
|
|
|
|
tempFilePath,
|
|
|
|
);
|
|
|
|
|
|
|
|
return new Promise((resolve,reject) => {
|
|
|
|
readableStream.pipe(outputStream);
|
|
|
|
outputStream.on('error', reject);
|
|
|
|
outputStream.on('finish', resolve);
|
|
|
|
})
|
|
|
|
.then(() => app.datastore.getFileSize(tempFilePath))
|
|
|
|
.then(size => {
|
|
|
|
const isExpectedFileSize = checkFileSizeAgainstFields(
|
|
|
|
app, record, relativeFilePath, size
|
|
|
|
);
|
|
|
|
if (!isExpectedFileSize) { throw new BadRequestError(`Fields for ${relativeFilePath} do not have expected size: ${join(',')(incorrectFields)}`); }
|
|
|
|
|
|
|
|
})
|
|
|
|
.then(() => tryAwaitOrIgnore(app.datastore.deleteFile, fullFilePath))
|
|
|
|
.then(() => app.datastore.renameFile(tempFilePath, fullFilePath));
|
|
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
const checkFileSizeAgainstFields = (app, record, relativeFilePath, expectedSize) => {
|
2019-12-22 08:12:21 +01:00
|
|
|
const recordNode = getExactNodeForKey(app.hierarchy)(record.key);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const incorrectFileFields = $(recordNode.fields, [
|
|
|
|
filter(f => f.type === 'file'
|
|
|
|
&& record[f.name].relativePath === relativeFilePath
|
|
|
|
&& record[f.name].size !== expectedSize),
|
|
|
|
map(f => f.name),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const incorrectFileArrayFields = $(recordNode.fields, [
|
|
|
|
filter(a => a.type === 'array<file>'
|
|
|
|
&& $(record[a.name], [
|
|
|
|
some(f => record[f.name].relativePath === relativeFilePath
|
|
|
|
&& record[f.name].size !== expectedSize),
|
|
|
|
])),
|
|
|
|
map(f => f.name),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const incorrectFields = [
|
|
|
|
...incorrectFileFields,
|
|
|
|
...incorrectFileArrayFields,
|
|
|
|
];
|
|
|
|
|
|
|
|
if (incorrectFields.length > 0) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const safeGetFullFilePath = (recordDir, relativeFilePath) => {
|
2019-09-10 10:49:22 +02:00
|
|
|
const naughtyUser = () => { throw new ForbiddenError('naughty naughty'); };
|
|
|
|
|
|
|
|
if (relativeFilePath.startsWith('..')) naughtyUser();
|
|
|
|
|
|
|
|
const pathParts = splitKey(relativeFilePath);
|
|
|
|
|
2019-09-28 06:28:11 +02:00
|
|
|
if (includes('..')(pathParts)) naughtyUser();
|
2019-09-10 10:49:22 +02:00
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const recordKeyParts = splitKey(recordDir);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const fullPathParts = [
|
|
|
|
...recordKeyParts,
|
|
|
|
'files',
|
|
|
|
...filter(p => p !== '.')(pathParts),
|
|
|
|
];
|
|
|
|
|
|
|
|
return joinKey(fullPathParts);
|
|
|
|
};
|
|
|
|
|
|
|
|
const downloadFile = app => async (recordKey, relativePath) => apiWrapper(
|
|
|
|
app,
|
|
|
|
events.recordApi.uploadFile,
|
|
|
|
permission.readRecord.isAuthorized(recordKey),
|
|
|
|
{ recordKey, relativePath },//remove dupe key 'recordKey' from object
|
|
|
|
_downloadFile, app, recordKey, relativePath,
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
|
|
const _downloadFile = async (app, recordKey, relativePath) => {
|
|
|
|
if (isNothing(recordKey)) { throw new BadRequestError('Record Key not supplied'); }
|
|
|
|
if (isNothing(relativePath)) { throw new BadRequestError('file path not supplied'); }
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const {dir} = getRecordInfo(app.hierarchy, recordKey);
|
2019-09-10 10:49:22 +02:00
|
|
|
return await app.datastore.readableFileStream(
|
|
|
|
safeGetFullFilePath(
|
2019-12-30 19:08:50 +01:00
|
|
|
dir, relativePath,
|
2019-09-10 10:49:22 +02:00
|
|
|
),
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
const customId = app => (nodeName, id) => {
|
|
|
|
const node = $(app.hierarchy, [
|
|
|
|
getFlattenedHierarchy,
|
|
|
|
find(n => n.name === nodeName),
|
|
|
|
]);
|
|
|
|
|
|
|
|
if (!node) throw new NotFoundError(`Cannot find node ${nodeName}`);
|
|
|
|
|
|
|
|
return `${node.nodeId}-${id}`;
|
|
|
|
};
|
|
|
|
|
|
|
|
const setCustomId = app => (record, id) => {
|
|
|
|
record.id = customId(app)(record.type, id);
|
|
|
|
|
|
|
|
const keyParts = splitKey(record.key);
|
|
|
|
|
|
|
|
record.key = $(keyParts, [
|
|
|
|
take(keyParts.length - 1),
|
|
|
|
union([record.id]),
|
|
|
|
joinKey,
|
|
|
|
]);
|
|
|
|
|
|
|
|
return record;
|
|
|
|
};
|
|
|
|
|
|
|
|
const api = app => ({
|
|
|
|
getNew: getNew(app),
|
|
|
|
getNewChild: getNewChild(app),
|
|
|
|
save: save(app),
|
|
|
|
load: load(app),
|
|
|
|
delete: deleteRecord$1(app, false),
|
|
|
|
validate: validate(app),
|
|
|
|
getContext: getContext(app),
|
|
|
|
uploadFile: uploadFile(app),
|
|
|
|
downloadFile: downloadFile(app),
|
|
|
|
customId: customId(app),
|
|
|
|
setCustomId: setCustomId(app),
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
const getRecordApi = app => api(app);
|
|
|
|
|
|
|
|
const getAllowedRecordTypes = app => key => apiWrapperSync(
|
|
|
|
app,
|
|
|
|
events.collectionApi.getAllowedRecordTypes,
|
|
|
|
alwaysAuthorized,
|
|
|
|
{ key },
|
|
|
|
_getAllowedRecordTypes, app, key,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _getAllowedRecordTypes = (app, key) => {
|
|
|
|
key = safeKey(key);
|
|
|
|
const node = getNodeForCollectionPath(app.hierarchy)(key);
|
|
|
|
return isNothing(node) ? [] : [node.name];
|
|
|
|
};
|
|
|
|
|
|
|
|
const getCollectionApi = app => ({
|
|
|
|
getAllowedRecordTypes: getAllowedRecordTypes(app),
|
|
|
|
getAllIdsIterator: getAllIdsIterator(app),
|
|
|
|
delete: deleteCollection(app),
|
|
|
|
});
|
|
|
|
|
|
|
|
/** rebuilds an index
|
|
|
|
* @param {object} app - the application container
|
|
|
|
* @param {string} indexNodeKey - node key of the index, which the index belongs to
|
|
|
|
*/
|
|
|
|
const buildIndex = app => async indexNodeKey => apiWrapper(
|
|
|
|
app,
|
|
|
|
events.indexApi.buildIndex,
|
|
|
|
permission.manageIndex.isAuthorized,
|
|
|
|
{ indexNodeKey },
|
|
|
|
_buildIndex, app, indexNodeKey,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _buildIndex = async (app, indexNodeKey) => {
|
|
|
|
const indexNode = getNode(app.hierarchy, indexNodeKey);
|
|
|
|
|
|
|
|
await createBuildIndexFolder(app.datastore, indexNodeKey);
|
|
|
|
|
|
|
|
if (!isIndex(indexNode)) { throw new BadRequestError('BuildIndex: must supply an indexnode'); }
|
|
|
|
|
|
|
|
if (indexNode.indexType === 'reference') {
|
|
|
|
await buildReverseReferenceIndex(
|
|
|
|
app, indexNode,
|
|
|
|
);
|
|
|
|
} else {
|
|
|
|
await buildHeirarchalIndex(
|
|
|
|
app, indexNode,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
await app.cleanupTransactions();
|
|
|
|
};
|
|
|
|
|
|
|
|
const buildReverseReferenceIndex = async (app, indexNode) => {
|
|
|
|
// Iterate through all referencING records,
|
|
|
|
// and update referenced index for each record
|
|
|
|
let recordCount = 0;
|
|
|
|
const referencingNodes = $(app.hierarchy, [
|
|
|
|
getFlattenedHierarchy,
|
|
|
|
filter(n => isRecord(n)
|
|
|
|
&& some(fieldReversesReferenceToIndex(indexNode))(n.fields)),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const createTransactionsForReferencingNode = async (referencingNode) => {
|
|
|
|
const iterateReferencingNodes = await getAllIdsIterator(app)(referencingNode.collectionNodeKey());
|
|
|
|
|
|
|
|
let referencingIdIterator = await iterateReferencingNodes();
|
|
|
|
while (!referencingIdIterator.done) {
|
|
|
|
const { result } = referencingIdIterator;
|
|
|
|
for (const id of result.ids) {
|
|
|
|
const recordKey = joinKey(result.collectionKey, id);
|
|
|
|
await transactionForBuildIndex(app, indexNode.nodeKey(), recordKey, recordCount);
|
|
|
|
recordCount++;
|
|
|
|
}
|
|
|
|
referencingIdIterator = await iterateReferencingNodes();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
for (const referencingNode of referencingNodes) {
|
|
|
|
await createTransactionsForReferencingNode(referencingNode);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
/*
|
|
|
|
const getAllowedParentCollectionNodes = (hierarchy, indexNode) => $(getAllowedRecordNodesForIndex(hierarchy, indexNode), [
|
|
|
|
map(n => n.parent()),
|
|
|
|
]);
|
|
|
|
*/
|
|
|
|
|
2019-09-10 10:49:22 +02:00
|
|
|
const buildHeirarchalIndex = async (app, indexNode) => {
|
|
|
|
let recordCount = 0;
|
|
|
|
|
|
|
|
const createTransactionsForIds = async (collectionKey, ids) => {
|
|
|
|
for (const recordId of ids) {
|
|
|
|
const recordKey = joinKey(collectionKey, recordId);
|
|
|
|
|
|
|
|
const recordNode = getRecordNodeById(
|
|
|
|
app.hierarchy,
|
|
|
|
recordId,
|
|
|
|
);
|
|
|
|
|
|
|
|
if (recordNodeApplies(indexNode)(recordNode)) {
|
|
|
|
await transactionForBuildIndex(
|
|
|
|
app, indexNode.nodeKey(),
|
|
|
|
recordKey, recordCount,
|
|
|
|
);
|
|
|
|
recordCount++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
const collectionRecords = getAllowedRecordNodesForIndex(app.hierarchy, indexNode);
|
|
|
|
|
|
|
|
for (const targetCollectionRecordNode of collectionRecords) {
|
|
|
|
const allIdsIterator = await getAllIdsIterator(app)(targetCollectionRecordNode.collectionNodeKey());
|
|
|
|
|
|
|
|
let allIds = await allIdsIterator();
|
|
|
|
while (allIds.done === false) {
|
|
|
|
await createTransactionsForIds(
|
|
|
|
allIds.result.collectionKey,
|
|
|
|
allIds.result.ids,
|
|
|
|
);
|
|
|
|
allIds = await allIdsIterator();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return recordCount;
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
// const chooseChildRecordNodeByKey = (collectionNode, recordId) => find(c => recordId.startsWith(c.nodeId))(collectionNode.children);
|
|
|
|
|
2019-09-28 06:28:11 +02:00
|
|
|
const recordNodeApplies = indexNode => recordNode => includes(recordNode.nodeId)(indexNode.allowedRecordNodeIds);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const aggregates = app => async (indexKey, rangeStartParams = null, rangeEndParams = null) => apiWrapper(
|
|
|
|
app,
|
|
|
|
events.indexApi.aggregates,
|
|
|
|
permission.readIndex.isAuthorized(indexKey),
|
|
|
|
{ indexKey, rangeStartParams, rangeEndParams },
|
|
|
|
_aggregates, app, indexKey, rangeStartParams, rangeEndParams,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _aggregates = async (app, indexKey, rangeStartParams, rangeEndParams) => {
|
|
|
|
indexKey = safeKey(indexKey);
|
2019-12-22 08:12:21 +01:00
|
|
|
const indexNode = getExactNodeForKey(app.hierarchy)(indexKey);
|
2019-12-30 19:08:50 +01:00
|
|
|
const indexDir = getIndexDir(app.hierarchy, indexKey);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
if (!isIndex(indexNode)) { throw new BadRequestError('supplied key is not an index'); }
|
|
|
|
|
|
|
|
if (isShardedIndex(indexNode)) {
|
|
|
|
const shardKeys = await getShardKeysInRange(
|
2019-12-30 19:08:50 +01:00
|
|
|
app, indexNode, indexDir, rangeStartParams, rangeEndParams,
|
2019-09-10 10:49:22 +02:00
|
|
|
);
|
|
|
|
let aggregateResult = null;
|
|
|
|
for (const k of shardKeys) {
|
|
|
|
const shardResult = await getAggregates(app.hierarchy, app.datastore, indexNode, k);
|
|
|
|
if (aggregateResult === null) {
|
|
|
|
aggregateResult = shardResult;
|
|
|
|
} else {
|
|
|
|
aggregateResult = mergeShardAggregate(
|
|
|
|
aggregateResult,
|
|
|
|
shardResult,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return aggregateResult;
|
|
|
|
}
|
|
|
|
return await getAggregates(
|
|
|
|
app.hierarchy,
|
|
|
|
app.datastore,
|
|
|
|
indexNode,
|
2019-12-30 19:08:50 +01:00
|
|
|
getUnshardedIndexDataKey(indexDir),
|
2019-09-10 10:49:22 +02:00
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
const mergeShardAggregate = (totals, shard) => {
|
|
|
|
const mergeGrouping = (tot, shr) => {
|
|
|
|
tot.count += shr.count;
|
|
|
|
for (const aggName in tot) {
|
|
|
|
if (aggName === 'count') continue;
|
|
|
|
const totagg = tot[aggName];
|
|
|
|
const shragg = shr[aggName];
|
|
|
|
totagg.sum += shragg.sum;
|
|
|
|
totagg.max = totagg.max > shragg.max
|
|
|
|
? totagg.max
|
|
|
|
: shragg.max;
|
|
|
|
totagg.min = totagg.min < shragg.min
|
|
|
|
? totagg.min
|
|
|
|
: shragg.min;
|
|
|
|
totagg.mean = totagg.sum / tot.count;
|
|
|
|
}
|
|
|
|
return tot;
|
|
|
|
};
|
|
|
|
|
|
|
|
for (const aggGroupDef in totals) {
|
|
|
|
for (const grouping in shard[aggGroupDef]) {
|
|
|
|
const groupingTotal = totals[aggGroupDef][grouping];
|
|
|
|
totals[aggGroupDef][grouping] = isUndefined(groupingTotal)
|
|
|
|
? shard[aggGroupDef][grouping]
|
|
|
|
: mergeGrouping(
|
|
|
|
totals[aggGroupDef][grouping],
|
|
|
|
shard[aggGroupDef][grouping],
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return totals;
|
|
|
|
};
|
|
|
|
|
|
|
|
const getAggregates = async (hierarchy, datastore, index, indexedDataKey) => {
|
|
|
|
const aggregateResult = {};
|
|
|
|
const doRead = iterateIndex(
|
|
|
|
async item => {
|
|
|
|
applyItemToAggregateResult(
|
|
|
|
index, aggregateResult, item,
|
|
|
|
);
|
|
|
|
return CONTINUE_READING_RECORDS;
|
|
|
|
},
|
|
|
|
async () => aggregateResult
|
|
|
|
);
|
|
|
|
|
|
|
|
return await doRead(hierarchy, datastore, index, indexedDataKey);
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
const applyItemToAggregateResult = (indexNode, result, item) => {
|
|
|
|
const getInitialAggregateResult = () => ({
|
|
|
|
sum: 0, mean: null, max: null, min: null,
|
|
|
|
});
|
|
|
|
|
|
|
|
const applyAggregateResult = (agg, existing, count) => {
|
|
|
|
const value = compileCode$1(agg.aggregatedValue)({ record: item });
|
|
|
|
|
2019-09-28 06:28:11 +02:00
|
|
|
if (!isNumber(value)) return existing;
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
existing.sum += value;
|
|
|
|
existing.max = value > existing.max || existing.max === null
|
|
|
|
? value
|
|
|
|
: existing.max;
|
|
|
|
existing.min = value < existing.min || existing.min === null
|
|
|
|
? value
|
|
|
|
: existing.min;
|
|
|
|
existing.mean = existing.sum / count;
|
|
|
|
return existing;
|
|
|
|
};
|
|
|
|
|
|
|
|
for (const aggGroup of indexNode.aggregateGroups) {
|
2019-09-28 06:28:11 +02:00
|
|
|
if (!has(aggGroup.name)(result)) {
|
2019-09-10 10:49:22 +02:00
|
|
|
result[aggGroup.name] = {};
|
|
|
|
}
|
|
|
|
|
|
|
|
const thisGroupResult = result[aggGroup.name];
|
|
|
|
|
|
|
|
if (isNonEmptyString(aggGroup.condition)) {
|
|
|
|
if (!compileExpression$1(aggGroup.condition)({ record: item })) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let group = isNonEmptyString(aggGroup.groupBy)
|
|
|
|
? compileCode$1(aggGroup.groupBy)({ record: item })
|
|
|
|
: 'all';
|
|
|
|
if (!isNonEmptyString(group)) {
|
|
|
|
group = '(none)';
|
|
|
|
}
|
|
|
|
|
2019-09-28 06:28:11 +02:00
|
|
|
if (!has(group)(thisGroupResult)) {
|
2019-09-10 10:49:22 +02:00
|
|
|
thisGroupResult[group] = { count: 0 };
|
|
|
|
for (const agg of aggGroup.aggregates) {
|
|
|
|
thisGroupResult[group][agg.name] = getInitialAggregateResult();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
thisGroupResult[group].count++;
|
|
|
|
|
|
|
|
for (const agg of aggGroup.aggregates) {
|
|
|
|
const existingValues = thisGroupResult[group][agg.name];
|
|
|
|
thisGroupResult[group][agg.name] = applyAggregateResult(
|
|
|
|
agg, existingValues,
|
|
|
|
thisGroupResult[group].count,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const getIndexApi = app => ({
|
|
|
|
listItems: listItems(app),
|
|
|
|
buildIndex: buildIndex(app),
|
|
|
|
aggregates: aggregates(app),
|
|
|
|
});
|
|
|
|
|
|
|
|
const createNodeErrors = {
|
|
|
|
indexCannotBeParent: 'Index template cannot be a parent',
|
|
|
|
allNonRootNodesMustHaveParent: 'Only the root node may have no parent',
|
|
|
|
indexParentMustBeRecordOrRoot: 'An index may only have a record or root as a parent',
|
|
|
|
aggregateParentMustBeAnIndex: 'aggregateGroup parent must be an index',
|
|
|
|
};
|
|
|
|
|
|
|
|
const pathRegxMaker = node => () => node.nodeKey().replace(/{id}/g, '[a-zA-Z0-9_-]+');
|
|
|
|
|
|
|
|
const nodeKeyMaker = node => () => switchCase(
|
|
|
|
|
|
|
|
[n => isRecord(n) && !isSingleRecord(n),
|
|
|
|
n => joinKey(
|
|
|
|
node.parent().nodeKey(),
|
|
|
|
node.collectionName,
|
|
|
|
`${n.nodeId}-{id}`,
|
|
|
|
)],
|
|
|
|
|
|
|
|
[isRoot,
|
|
|
|
constant('/')],
|
|
|
|
|
|
|
|
[defaultCase,
|
|
|
|
n => joinKey(node.parent().nodeKey(), n.name)],
|
|
|
|
|
|
|
|
)(node);
|
|
|
|
|
|
|
|
|
|
|
|
const validate$1 = parent => (node) => {
|
|
|
|
if (isIndex(node)
|
|
|
|
&& isSomething(parent)
|
|
|
|
&& !isRoot(parent)
|
|
|
|
&& !isRecord(parent)) {
|
|
|
|
throw new BadRequestError(createNodeErrors.indexParentMustBeRecordOrRoot);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (isaggregateGroup(node)
|
|
|
|
&& isSomething(parent)
|
|
|
|
&& !isIndex(parent)) {
|
|
|
|
throw new BadRequestError(createNodeErrors.aggregateParentMustBeAnIndex);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (isNothing(parent) && !isRoot(node)) { throw new BadRequestError(createNodeErrors.allNonRootNodesMustHaveParent); }
|
|
|
|
|
|
|
|
return node;
|
|
|
|
};
|
|
|
|
|
|
|
|
const construct = parent => (node) => {
|
|
|
|
node.nodeKey = nodeKeyMaker(node);
|
|
|
|
node.pathRegx = pathRegxMaker(node);
|
|
|
|
node.parent = constant(parent);
|
|
|
|
node.isRoot = () => isNothing(parent)
|
|
|
|
&& node.name === 'root'
|
|
|
|
&& node.type === 'root';
|
|
|
|
if (isCollectionRecord(node)) {
|
|
|
|
node.collectionNodeKey = () => joinKey(
|
|
|
|
parent.nodeKey(), node.collectionName,
|
|
|
|
);
|
|
|
|
node.collectionPathRegx = () => joinKey(
|
|
|
|
parent.pathRegx(), node.collectionName,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
return node;
|
|
|
|
};
|
|
|
|
|
|
|
|
const addToParent = (obj) => {
|
|
|
|
const parent = obj.parent();
|
|
|
|
if (isSomething(parent)) {
|
|
|
|
if (isIndex(obj))
|
|
|
|
// Q: why are indexes not children ?
|
|
|
|
// A: because they cannot have children of their own.
|
2019-11-05 14:30:36 +01:00
|
|
|
{
|
|
|
|
parent.indexes.push(obj);
|
|
|
|
}
|
|
|
|
else if (isaggregateGroup(obj))
|
|
|
|
{
|
|
|
|
parent.aggregateGroups.push(obj);
|
|
|
|
} else {
|
|
|
|
parent.children.push(obj);
|
|
|
|
}
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
if (isRecord(obj)) {
|
|
|
|
const defaultIndex = find$1(
|
|
|
|
parent.indexes,
|
|
|
|
i => i.name === `${parent.name}_index`,
|
|
|
|
);
|
|
|
|
if (defaultIndex) {
|
|
|
|
defaultIndex.allowedRecordNodeIds.push(obj.nodeId);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return obj;
|
|
|
|
};
|
|
|
|
|
|
|
|
const constructNode = (parent, obj) => $(obj, [
|
|
|
|
construct(parent),
|
|
|
|
validate$1(parent),
|
|
|
|
addToParent,
|
|
|
|
]);
|
|
|
|
|
|
|
|
const getNodeId = (parentNode) => {
|
|
|
|
// this case is handled better elsewhere
|
|
|
|
if (!parentNode) return null;
|
|
|
|
const findRoot = n => (isRoot(n) ? n : findRoot(n.parent()));
|
|
|
|
const root = findRoot(parentNode);
|
|
|
|
|
|
|
|
return ($(root, [
|
|
|
|
getFlattenedHierarchy,
|
|
|
|
map(n => n.nodeId),
|
|
|
|
max]) + 1);
|
|
|
|
};
|
|
|
|
|
|
|
|
const constructHierarchy = (node, parent) => {
|
|
|
|
construct(parent)(node);
|
|
|
|
if (node.indexes) {
|
|
|
|
each$1(node.indexes,
|
|
|
|
child => constructHierarchy(child, node));
|
|
|
|
}
|
|
|
|
if (node.aggregateGroups) {
|
|
|
|
each$1(node.aggregateGroups,
|
|
|
|
child => constructHierarchy(child, node));
|
|
|
|
}
|
|
|
|
if (node.children && node.children.length > 0) {
|
|
|
|
each$1(node.children,
|
|
|
|
child => constructHierarchy(child, node));
|
|
|
|
}
|
|
|
|
if (node.fields) {
|
|
|
|
each$1(node.fields,
|
|
|
|
f => each$1(f.typeOptions, (val, key) => {
|
|
|
|
const def = all$1[f.type].optionDefinitions[key];
|
|
|
|
if (!def) {
|
|
|
|
// unknown typeOption
|
|
|
|
delete f.typeOptions[key];
|
|
|
|
} else {
|
|
|
|
f.typeOptions[key] = def.parse(val);
|
|
|
|
}
|
|
|
|
}));
|
|
|
|
}
|
|
|
|
return node;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
const getNewRootLevel = () => construct()({
|
|
|
|
name: 'root',
|
|
|
|
type: 'root',
|
|
|
|
children: [],
|
|
|
|
pathMaps: [],
|
|
|
|
indexes: [],
|
|
|
|
nodeId: 0,
|
|
|
|
});
|
|
|
|
|
|
|
|
const _getNewRecordTemplate = (parent, name, createDefaultIndex, isSingle) => {
|
|
|
|
const node = constructNode(parent, {
|
|
|
|
name,
|
|
|
|
type: 'record',
|
|
|
|
fields: [],
|
|
|
|
children: [],
|
|
|
|
validationRules: [],
|
|
|
|
nodeId: getNodeId(parent),
|
|
|
|
indexes: [],
|
2019-12-30 19:08:50 +01:00
|
|
|
estimatedRecordCount: isRecord(parent) ? 500 : 1000000,
|
2019-09-10 10:49:22 +02:00
|
|
|
collectionName: '',
|
|
|
|
isSingle,
|
|
|
|
});
|
|
|
|
|
|
|
|
if (createDefaultIndex) {
|
|
|
|
const defaultIndex = getNewIndexTemplate(parent);
|
|
|
|
defaultIndex.name = `${name}_index`;
|
|
|
|
defaultIndex.allowedRecordNodeIds.push(node.nodeId);
|
|
|
|
}
|
|
|
|
|
|
|
|
return node;
|
|
|
|
};
|
|
|
|
|
|
|
|
const getNewRecordTemplate = (parent, name = '', createDefaultIndex = true) => _getNewRecordTemplate(parent, name, createDefaultIndex, false);
|
|
|
|
|
|
|
|
const getNewSingleRecordTemplate = parent => _getNewRecordTemplate(parent, '', false, true);
|
|
|
|
|
|
|
|
const getNewIndexTemplate = (parent, type = 'ancestor') => constructNode(parent, {
|
|
|
|
name: '',
|
|
|
|
type: 'index',
|
|
|
|
map: 'return {...record};',
|
|
|
|
filter: '',
|
|
|
|
indexType: type,
|
|
|
|
getShardName: '',
|
|
|
|
getSortKey: 'record.id',
|
|
|
|
aggregateGroups: [],
|
|
|
|
allowedRecordNodeIds: [],
|
|
|
|
nodeId: getNodeId(parent),
|
|
|
|
});
|
|
|
|
|
|
|
|
const getNewAggregateGroupTemplate = index => constructNode(index, {
|
|
|
|
name: '',
|
|
|
|
type: 'aggregateGroup',
|
|
|
|
groupBy: '',
|
|
|
|
aggregates: [],
|
|
|
|
condition: '',
|
|
|
|
nodeId: getNodeId(index),
|
|
|
|
});
|
|
|
|
|
|
|
|
const getNewAggregateTemplate = (set) => {
|
|
|
|
const aggregatedValue = {
|
|
|
|
name: '',
|
|
|
|
aggregatedValue: '',
|
|
|
|
};
|
|
|
|
set.aggregates.push(aggregatedValue);
|
|
|
|
return aggregatedValue;
|
|
|
|
};
|
|
|
|
|
|
|
|
const fieldErrors = {
|
|
|
|
AddFieldValidationFailed: 'Add field validation: ',
|
|
|
|
};
|
|
|
|
|
2019-09-28 06:28:11 +02:00
|
|
|
const allowedTypes = () => keys(all$1);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const getNewField = type => ({
|
|
|
|
name: '', // how field is referenced internally
|
|
|
|
type,
|
|
|
|
typeOptions: getDefaultOptions$1(type),
|
|
|
|
label: '', // how field is displayed
|
|
|
|
getInitialValue: 'default', // function that gets value when initially created
|
|
|
|
getUndefinedValue: 'default', // function that gets value when field undefined on record
|
|
|
|
});
|
|
|
|
|
|
|
|
const fieldRules = allFields => [
|
|
|
|
makerule('name', 'field name is not set',
|
|
|
|
f => isNonEmptyString(f.name)),
|
|
|
|
makerule('type', 'field type is not set',
|
|
|
|
f => isNonEmptyString(f.type)),
|
|
|
|
makerule('label', 'field label is not set',
|
|
|
|
f => isNonEmptyString(f.label)),
|
|
|
|
makerule('getInitialValue', 'getInitialValue function is not set',
|
|
|
|
f => isNonEmptyString(f.getInitialValue)),
|
|
|
|
makerule('getUndefinedValue', 'getUndefinedValue function is not set',
|
|
|
|
f => isNonEmptyString(f.getUndefinedValue)),
|
|
|
|
makerule('name', 'field name is duplicated',
|
|
|
|
f => isNothingOrEmpty(f.name)
|
|
|
|
|| countBy('name')(allFields)[f.name] === 1),
|
|
|
|
makerule('type', 'type is unknown',
|
|
|
|
f => isNothingOrEmpty(f.type)
|
|
|
|
|| some(t => f.type === t)(allowedTypes())),
|
|
|
|
];
|
|
|
|
|
|
|
|
const typeOptionsRules = (field) => {
|
|
|
|
const type = all$1[field.type];
|
|
|
|
if (isNothing(type)) return [];
|
|
|
|
|
|
|
|
const def = optName => type.optionDefinitions[optName];
|
|
|
|
|
|
|
|
return $(field.typeOptions, [
|
2019-09-28 06:28:11 +02:00
|
|
|
keys,
|
2019-09-10 10:49:22 +02:00
|
|
|
filter(o => isSomething(def(o))
|
|
|
|
&& isSomething(def(o).isValid)),
|
|
|
|
map(o => makerule(
|
|
|
|
`typeOptions.${o}`,
|
|
|
|
`${def(o).requirementDescription}`,
|
|
|
|
field => def(o).isValid(field.typeOptions[o]),
|
|
|
|
)),
|
|
|
|
]);
|
|
|
|
};
|
|
|
|
|
|
|
|
const validateField = allFields => (field) => {
|
2019-09-28 06:28:11 +02:00
|
|
|
const everySingleField = includes(field)(allFields) ? allFields : [...allFields, field];
|
2019-09-10 10:49:22 +02:00
|
|
|
return applyRuleSet([...fieldRules(everySingleField), ...typeOptionsRules(field)])(field);
|
|
|
|
};
|
|
|
|
|
|
|
|
const validateAllFields = recordNode => $(recordNode.fields, [
|
|
|
|
map(validateField(recordNode.fields)),
|
|
|
|
flatten,
|
|
|
|
]);
|
|
|
|
|
|
|
|
const addField = (recordTemplate, field) => {
|
|
|
|
if (isNothingOrEmpty(field.label)) {
|
|
|
|
field.label = field.name;
|
|
|
|
}
|
|
|
|
const validationMessages = validateField([...recordTemplate.fields, field])(field);
|
|
|
|
if (validationMessages.length > 0) {
|
|
|
|
const errors = map(m => m.error)(validationMessages);
|
|
|
|
throw new BadRequestError(`${fieldErrors.AddFieldValidationFailed} ${errors.join(', ')}`);
|
|
|
|
}
|
|
|
|
recordTemplate.fields.push(field);
|
|
|
|
};
|
|
|
|
|
|
|
|
const getNewRecordValidationRule = (invalidFields,
|
|
|
|
messageWhenInvalid,
|
|
|
|
expressionWhenValid) => ({
|
|
|
|
invalidFields, messageWhenInvalid, expressionWhenValid,
|
|
|
|
});
|
|
|
|
|
|
|
|
const getStaticValue = switchCase(
|
2019-09-28 06:28:11 +02:00
|
|
|
[isNumber, v => v.toString()],
|
|
|
|
[isBoolean, v => v.toString()],
|
2019-09-10 10:49:22 +02:00
|
|
|
[defaultCase$1, v => `'${v}'`],
|
|
|
|
);
|
|
|
|
|
|
|
|
const commonRecordValidationRules = ({
|
|
|
|
|
|
|
|
fieldNotEmpty: fieldName => getNewRecordValidationRule(
|
|
|
|
[fieldName],
|
|
|
|
`${fieldName} is empty`,
|
|
|
|
`!_.isEmpty(record['${fieldName}'])`,
|
|
|
|
),
|
|
|
|
|
|
|
|
fieldBetween: (fieldName, min, max) => getNewRecordValidationRule(
|
|
|
|
[fieldName],
|
|
|
|
`${fieldName} must be between ${min.toString()} and ${max.toString()}`,
|
|
|
|
`record['${fieldName}'] >= ${getStaticValue(min)} && record['${fieldName}'] <= ${getStaticValue(max)} `,
|
|
|
|
),
|
|
|
|
|
|
|
|
fieldGreaterThan: (fieldName, min, max) => getNewRecordValidationRule(
|
|
|
|
[fieldName],
|
|
|
|
`${fieldName} must be greater than ${min.toString()} and ${max.toString()}`,
|
|
|
|
`record['${fieldName}'] >= ${getStaticValue(min)} `,
|
|
|
|
),
|
|
|
|
});
|
|
|
|
|
|
|
|
const addRecordValidationRule = recordNode => rule => recordNode.validationRules.push(rule);
|
|
|
|
|
|
|
|
const createTrigger = () => ({
|
|
|
|
actionName: '',
|
|
|
|
eventName: '',
|
|
|
|
// function, has access to event context,
|
|
|
|
// returns object that is used as parameter to action
|
|
|
|
// only used if triggered by event
|
|
|
|
optionsCreator: '',
|
|
|
|
// action runs if true,
|
|
|
|
// has access to event context
|
|
|
|
condition: '',
|
|
|
|
});
|
|
|
|
|
|
|
|
const createAction = () => ({
|
|
|
|
name: '',
|
|
|
|
behaviourSource: '',
|
|
|
|
// name of function in actionSource
|
|
|
|
behaviourName: '',
|
|
|
|
// parameter passed into behaviour.
|
|
|
|
// any other parms passed at runtime e.g.
|
|
|
|
// by trigger, or manually, will be merged into this
|
|
|
|
initialOptions: {},
|
|
|
|
});
|
|
|
|
|
|
|
|
const aggregateRules = [
|
|
|
|
makerule('name', 'choose a name for the aggregate',
|
|
|
|
a => isNonEmptyString(a.name)),
|
|
|
|
makerule('aggregatedValue', 'aggregatedValue does not compile',
|
|
|
|
a => isEmpty(a.aggregatedValue)
|
|
|
|
|| executesWithoutException(
|
|
|
|
() => compileCode$1(a.aggregatedValue),
|
|
|
|
)),
|
|
|
|
];
|
|
|
|
|
|
|
|
const validateAggregate = aggregate => applyRuleSet(aggregateRules)(aggregate);
|
|
|
|
|
|
|
|
const validateAllAggregates = all => $(all, [
|
|
|
|
map(validateAggregate),
|
|
|
|
flatten,
|
|
|
|
]);
|
|
|
|
|
2019-09-28 06:28:11 +02:00
|
|
|
const ruleSet = (...sets) => constant(flatten([...sets]));
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const commonRules = [
|
|
|
|
makerule('name', 'node name is not set',
|
|
|
|
node => stringNotEmpty(node.name)),
|
|
|
|
makerule('type', 'node type not recognised',
|
|
|
|
anyTrue(isRecord, isRoot, isIndex, isaggregateGroup)),
|
|
|
|
];
|
|
|
|
|
|
|
|
const recordRules = [
|
|
|
|
makerule('fields', 'no fields have been added to the record',
|
|
|
|
node => isNonEmptyArray(node.fields)),
|
|
|
|
makerule('validationRules', "validation rule is missing a 'messageWhenValid' member",
|
2019-09-28 06:28:11 +02:00
|
|
|
node => every(r => has('messageWhenInvalid')(r))(node.validationRules)),
|
2019-09-10 10:49:22 +02:00
|
|
|
makerule('validationRules', "validation rule is missing a 'expressionWhenValid' member",
|
2019-09-28 06:28:11 +02:00
|
|
|
node => every(r => has('expressionWhenValid')(r))(node.validationRules)),
|
2019-09-10 10:49:22 +02:00
|
|
|
];
|
|
|
|
|
|
|
|
|
|
|
|
const aggregateGroupRules = [
|
|
|
|
makerule('condition', 'condition does not compile',
|
2019-09-28 06:28:11 +02:00
|
|
|
a => isEmpty(a.condition)
|
2019-09-10 10:49:22 +02:00
|
|
|
|| executesWithoutException(
|
|
|
|
() => compileExpression$1(a.condition),
|
|
|
|
)),
|
|
|
|
];
|
|
|
|
|
|
|
|
const getRuleSet = node => switchCase(
|
|
|
|
|
|
|
|
[isRecord, ruleSet(
|
|
|
|
commonRules,
|
|
|
|
recordRules,
|
|
|
|
)],
|
|
|
|
|
|
|
|
[isIndex, ruleSet(
|
|
|
|
commonRules,
|
|
|
|
indexRuleSet,
|
|
|
|
)],
|
|
|
|
|
|
|
|
[isaggregateGroup, ruleSet(
|
|
|
|
commonRules,
|
|
|
|
aggregateGroupRules,
|
|
|
|
)],
|
|
|
|
|
|
|
|
[defaultCase, ruleSet(commonRules, [])],
|
|
|
|
)(node);
|
|
|
|
|
|
|
|
const validateNode = node => applyRuleSet(getRuleSet(node))(node);
|
|
|
|
|
|
|
|
const validateAll = (appHierarchy) => {
|
|
|
|
const flattened = getFlattenedHierarchy(
|
|
|
|
appHierarchy,
|
|
|
|
);
|
|
|
|
|
|
|
|
const duplicateNameRule = makerule(
|
|
|
|
'name', 'node names must be unique under shared parent',
|
|
|
|
n => filter(f => f.parent() === n.parent()
|
|
|
|
&& f.name === n.name)(flattened).length === 1,
|
|
|
|
);
|
|
|
|
|
|
|
|
const duplicateNodeKeyErrors = $(flattened, [
|
|
|
|
map(n => applyRuleSet([duplicateNameRule])(n)),
|
|
|
|
filter(isSomething),
|
|
|
|
flatten,
|
|
|
|
]);
|
|
|
|
|
|
|
|
const fieldErrors = $(flattened, [
|
|
|
|
filter(isRecord),
|
|
|
|
map(validateAllFields),
|
|
|
|
flatten,
|
|
|
|
]);
|
|
|
|
|
|
|
|
const aggregateErrors = $(flattened, [
|
|
|
|
filter(isaggregateGroup),
|
|
|
|
map(s => validateAllAggregates(
|
|
|
|
s.aggregates,
|
|
|
|
)),
|
|
|
|
flatten,
|
|
|
|
]);
|
|
|
|
|
|
|
|
return $(flattened, [
|
|
|
|
map(validateNode),
|
|
|
|
flatten,
|
|
|
|
union(duplicateNodeKeyErrors),
|
|
|
|
union(fieldErrors),
|
|
|
|
union(aggregateErrors),
|
|
|
|
]);
|
|
|
|
};
|
|
|
|
|
|
|
|
const actionRules = [
|
|
|
|
makerule('name', 'action must have a name',
|
|
|
|
a => isNonEmptyString(a.name)),
|
|
|
|
makerule('behaviourName', 'must supply a behaviour name to the action',
|
|
|
|
a => isNonEmptyString(a.behaviourName)),
|
|
|
|
makerule('behaviourSource', 'must supply a behaviour source for the action',
|
|
|
|
a => isNonEmptyString(a.behaviourSource)),
|
|
|
|
];
|
|
|
|
|
|
|
|
const duplicateActionRule = makerule('', 'action name must be unique', () => {});
|
|
|
|
|
|
|
|
const validateAction = action => applyRuleSet(actionRules)(action);
|
|
|
|
|
|
|
|
|
|
|
|
const validateActions = (allActions) => {
|
|
|
|
const duplicateActions = $(allActions, [
|
|
|
|
filter(a => filter(a2 => a2.name === a.name)(allActions).length > 1),
|
|
|
|
map(a => validationError(duplicateActionRule, a)),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const errors = $(allActions, [
|
|
|
|
map(validateAction),
|
|
|
|
flatten,
|
|
|
|
union(duplicateActions),
|
|
|
|
uniqBy('name'),
|
|
|
|
]);
|
|
|
|
|
|
|
|
return errors;
|
|
|
|
};
|
|
|
|
|
|
|
|
const triggerRules = actions => ([
|
|
|
|
makerule('actionName', 'must specify an action',
|
|
|
|
t => isNonEmptyString(t.actionName)),
|
|
|
|
makerule('eventName', 'must specify and event',
|
|
|
|
t => isNonEmptyString(t.eventName)),
|
|
|
|
makerule('actionName', 'specified action not supplied',
|
|
|
|
t => !t.actionName
|
|
|
|
|| some(a => a.name === t.actionName)(actions)),
|
|
|
|
makerule('eventName', 'invalid Event Name',
|
|
|
|
t => !t.eventName
|
2019-09-28 06:28:11 +02:00
|
|
|
|| includes(t.eventName)(eventsList)),
|
2019-09-10 10:49:22 +02:00
|
|
|
makerule('optionsCreator', 'Options Creator does not compile - check your expression',
|
|
|
|
(t) => {
|
|
|
|
if (!t.optionsCreator) return true;
|
|
|
|
try {
|
|
|
|
compileCode$1(t.optionsCreator);
|
|
|
|
return true;
|
|
|
|
} catch (_) { return false; }
|
|
|
|
}),
|
|
|
|
makerule('condition', 'Trigger condition does not compile - check your expression',
|
|
|
|
(t) => {
|
|
|
|
if (!t.condition) return true;
|
|
|
|
try {
|
|
|
|
compileExpression$1(t.condition);
|
|
|
|
return true;
|
|
|
|
} catch (_) { return false; }
|
|
|
|
}),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const validateTrigger = (trigger, allActions) => {
|
|
|
|
const errors = applyRuleSet(triggerRules(allActions))(trigger);
|
|
|
|
|
|
|
|
return errors;
|
|
|
|
};
|
|
|
|
|
|
|
|
const validateTriggers = (triggers, allActions) => $(triggers, [
|
|
|
|
map(t => validateTrigger(t, allActions)),
|
|
|
|
flatten,
|
|
|
|
]);
|
|
|
|
|
|
|
|
const getApplicationDefinition = datastore => async () => {
|
|
|
|
const exists = await datastore.exists(appDefinitionFile);
|
|
|
|
|
|
|
|
if (!exists) throw new Error('Application definition does not exist');
|
|
|
|
|
|
|
|
const appDefinition = await datastore.loadJson(appDefinitionFile);
|
|
|
|
appDefinition.hierarchy = constructHierarchy(
|
|
|
|
appDefinition.hierarchy,
|
|
|
|
);
|
|
|
|
return appDefinition;
|
|
|
|
};
|
|
|
|
|
|
|
|
const saveApplicationHierarchy = app => async hierarchy => apiWrapper(
|
|
|
|
app,
|
|
|
|
events.templateApi.saveApplicationHierarchy,
|
|
|
|
permission.writeTemplates.isAuthorized,
|
|
|
|
{ hierarchy },
|
|
|
|
_saveApplicationHierarchy, app.datastore, hierarchy,
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
|
|
const _saveApplicationHierarchy = async (datastore, hierarchy) => {
|
|
|
|
const validationErrors = await validateAll(hierarchy);
|
|
|
|
if (validationErrors.length > 0) {
|
2019-09-28 06:28:11 +02:00
|
|
|
throw new Error(`Hierarchy is invalid: ${join$2(
|
2019-09-10 10:49:22 +02:00
|
|
|
validationErrors.map(e => `${e.item.nodeKey ? e.item.nodeKey() : ''} : ${e.error}`),
|
|
|
|
',',
|
|
|
|
)}`);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (await datastore.exists(appDefinitionFile)) {
|
|
|
|
const appDefinition = await datastore.loadJson(appDefinitionFile);
|
|
|
|
appDefinition.hierarchy = hierarchy;
|
|
|
|
await datastore.updateJson(appDefinitionFile, appDefinition);
|
|
|
|
} else {
|
|
|
|
await datastore.createFolder('/.config');
|
|
|
|
const appDefinition = { actions: [], triggers: [], hierarchy };
|
|
|
|
await datastore.createJson(appDefinitionFile, appDefinition);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const saveActionsAndTriggers = app => async (actions, triggers) => apiWrapper(
|
|
|
|
app,
|
|
|
|
events.templateApi.saveActionsAndTriggers,
|
|
|
|
permission.writeTemplates.isAuthorized,
|
|
|
|
{ actions, triggers },
|
|
|
|
_saveActionsAndTriggers, app.datastore, actions, triggers,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _saveActionsAndTriggers = async (datastore, actions, triggers) => {
|
|
|
|
if (await datastore.exists(appDefinitionFile)) {
|
|
|
|
const appDefinition = await datastore.loadJson(appDefinitionFile);
|
|
|
|
appDefinition.actions = actions;
|
|
|
|
appDefinition.triggers = triggers;
|
|
|
|
|
|
|
|
const actionValidErrs = map(e => e.error)(validateActions(actions));
|
|
|
|
|
|
|
|
if (actionValidErrs.length > 0) {
|
2019-09-28 06:28:11 +02:00
|
|
|
throw new BadRequestError(`Actions are invalid: ${join$2(actionValidErrs, ', ')}`);
|
2019-09-10 10:49:22 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
const triggerValidErrs = map(e => e.error)(validateTriggers(triggers, actions));
|
|
|
|
|
|
|
|
if (triggerValidErrs.length > 0) {
|
2019-09-28 06:28:11 +02:00
|
|
|
throw new BadRequestError(`Triggers are invalid: ${join$2(triggerValidErrs, ', ')}`);
|
2019-09-10 10:49:22 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
await datastore.updateJson(appDefinitionFile, appDefinition);
|
|
|
|
} else {
|
|
|
|
throw new BadRequestError('Cannot save actions: Application definition does not exist');
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const getBehaviourSources = async (datastore) => {
|
|
|
|
await datastore.loadFile('/.config/behaviourSources.js');
|
|
|
|
};
|
|
|
|
|
|
|
|
const api$1 = app => ({
|
|
|
|
|
|
|
|
getApplicationDefinition: getApplicationDefinition(app.datastore),
|
|
|
|
saveApplicationHierarchy: saveApplicationHierarchy(app),
|
|
|
|
saveActionsAndTriggers: saveActionsAndTriggers(app),
|
|
|
|
getBehaviourSources: () => getBehaviourSources(app.datastore),
|
|
|
|
getNewRootLevel,
|
|
|
|
constructNode,
|
|
|
|
getNewIndexTemplate,
|
|
|
|
getNewRecordTemplate,
|
|
|
|
getNewField,
|
|
|
|
validateField,
|
|
|
|
addField,
|
|
|
|
fieldErrors,
|
|
|
|
getNewRecordValidationRule,
|
|
|
|
commonRecordValidationRules,
|
|
|
|
addRecordValidationRule,
|
|
|
|
createAction,
|
|
|
|
createTrigger,
|
|
|
|
validateActions,
|
|
|
|
validateTrigger,
|
|
|
|
getNewAggregateGroupTemplate,
|
|
|
|
getNewAggregateTemplate,
|
|
|
|
constructHierarchy,
|
|
|
|
getNewSingleRecordTemplate,
|
|
|
|
allTypes: all$1,
|
|
|
|
validateNode,
|
|
|
|
validateAll,
|
|
|
|
validateTriggers,
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
const getTemplateApi = app => api$1(app);
|
|
|
|
|
|
|
|
const getUsers = app => async () => apiWrapper(
|
|
|
|
app,
|
|
|
|
events.authApi.getUsers,
|
|
|
|
permission.listUsers.isAuthorized,
|
|
|
|
{},
|
|
|
|
_getUsers, app,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _getUsers = async app => $(await app.datastore.loadJson(USERS_LIST_FILE), [
|
|
|
|
map(stripUserOfSensitiveStuff),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const loadAccessLevels = app => async () => apiWrapper(
|
|
|
|
app,
|
|
|
|
events.authApi.loadAccessLevels,
|
|
|
|
permission.listAccessLevels.isAuthorized,
|
|
|
|
{},
|
|
|
|
_loadAccessLevels, app,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _loadAccessLevels = async app => await app.datastore.loadJson(ACCESS_LEVELS_FILE);
|
|
|
|
|
|
|
|
const dummyHash = '$argon2i$v=19$m=4096,t=3,p=1$UZRo409UYBGjHJS3CV6Uxw$rU84qUqPeORFzKYmYY0ceBLDaPO+JWSH4PfNiKXfIKk';
|
|
|
|
|
|
|
|
const authenticate = app => async (username, password) => apiWrapper(
|
|
|
|
app,
|
|
|
|
events.authApi.authenticate,
|
|
|
|
alwaysAuthorized,
|
|
|
|
{ username, password },
|
|
|
|
_authenticate, app, username, password,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _authenticate = async (app, username, password) => {
|
|
|
|
if (isNothingOrEmpty(username) || isNothingOrEmpty(password)) { return null; }
|
|
|
|
|
|
|
|
const allUsers = await _getUsers(app);
|
|
|
|
let user = getUserByName(
|
|
|
|
allUsers,
|
|
|
|
username,
|
|
|
|
);
|
|
|
|
|
|
|
|
const notAUser = 'not-a-user';
|
|
|
|
// continue with non-user - so time to verify remains consistent
|
|
|
|
// with verification of a valid user
|
|
|
|
if (!user || !user.enabled) { user = notAUser; }
|
|
|
|
|
|
|
|
let userAuth;
|
|
|
|
try {
|
|
|
|
userAuth = await app.datastore.loadJson(
|
|
|
|
userAuthFile(username),
|
|
|
|
);
|
|
|
|
} catch (_) {
|
|
|
|
userAuth = { accessLevels: [], passwordHash: dummyHash };
|
|
|
|
}
|
|
|
|
|
|
|
|
const permissions = await buildUserPermissions(app, user.accessLevels);
|
|
|
|
|
|
|
|
const verified = await app.crypto.verify(
|
|
|
|
userAuth.passwordHash,
|
|
|
|
password,
|
|
|
|
);
|
|
|
|
|
|
|
|
if (user === notAUser) { return null; }
|
|
|
|
|
|
|
|
return verified
|
|
|
|
? {
|
|
|
|
...user, permissions, temp: false, isUser: true,
|
|
|
|
}
|
|
|
|
: null;
|
|
|
|
};
|
|
|
|
|
|
|
|
const authenticateTemporaryAccess = app => async (tempAccessCode) => {
|
|
|
|
if (isNothingOrEmpty(tempAccessCode)) { return null; }
|
|
|
|
|
|
|
|
const temp = parseTemporaryCode(tempAccessCode);
|
|
|
|
let user = $(await _getUsers(app), [
|
|
|
|
find(u => u.temporaryAccessId === temp.id),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const notAUser = 'not-a-user';
|
|
|
|
if (!user || !user.enabled) { user = notAUser; }
|
|
|
|
|
|
|
|
let userAuth;
|
|
|
|
try {
|
|
|
|
userAuth = await app.datastore.loadJson(
|
|
|
|
userAuthFile(user.name),
|
|
|
|
);
|
|
|
|
} catch (e) {
|
|
|
|
userAuth = {
|
|
|
|
temporaryAccessHash: dummyHash,
|
|
|
|
temporaryAccessExpiryEpoch: (await app.getEpochTime() + 10000),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
if (userAuth.temporaryAccessExpiryEpoch < await app.getEpochTime()) { user = notAUser; }
|
|
|
|
|
|
|
|
const tempCode = !temp.code ? generate() : temp.code;
|
|
|
|
const verified = await app.crypto.verify(
|
|
|
|
userAuth.temporaryAccessHash,
|
|
|
|
tempCode,
|
|
|
|
);
|
|
|
|
|
|
|
|
if (user === notAUser) { return null; }
|
|
|
|
|
|
|
|
return verified
|
|
|
|
? {
|
|
|
|
...user,
|
|
|
|
permissions: [],
|
|
|
|
temp: true,
|
|
|
|
isUser: true,
|
|
|
|
}
|
|
|
|
: null;
|
|
|
|
};
|
|
|
|
|
|
|
|
const buildUserPermissions = async (app, userAccessLevels) => {
|
|
|
|
const allAccessLevels = await _loadAccessLevels(app);
|
|
|
|
|
|
|
|
return $(allAccessLevels.levels, [
|
|
|
|
filter(l => some(ua => l.name === ua)(userAccessLevels)),
|
|
|
|
map(l => l.permissions),
|
|
|
|
flatten,
|
|
|
|
]);
|
|
|
|
};
|
|
|
|
|
|
|
|
const createTemporaryAccess$1 = app => async userName => apiWrapper(
|
|
|
|
app,
|
|
|
|
events.authApi.createTemporaryAccess,
|
|
|
|
alwaysAuthorized,
|
|
|
|
{ userName },
|
|
|
|
_createTemporaryAccess, app, userName,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _createTemporaryAccess = async (app, userName) => {
|
|
|
|
const tempCode = await getTemporaryCode(app);
|
|
|
|
|
|
|
|
const lock = await getLock(
|
|
|
|
app, USERS_LOCK_FILE, 1000, 2,
|
|
|
|
);
|
|
|
|
|
|
|
|
if (isNolock(lock)) { throw new Error('Unable to create temporary access, could not get lock - try again'); }
|
|
|
|
|
|
|
|
try {
|
|
|
|
const users = await app.datastore.loadJson(USERS_LIST_FILE);
|
|
|
|
|
|
|
|
const user = getUserByName(users, userName);
|
|
|
|
user.temporaryAccessId = tempCode.temporaryAccessId;
|
|
|
|
|
|
|
|
await app.datastore.updateJson(
|
|
|
|
USERS_LIST_FILE,
|
|
|
|
users,
|
|
|
|
);
|
|
|
|
} finally {
|
|
|
|
await releaseLock(app, lock);
|
|
|
|
}
|
|
|
|
|
|
|
|
const userAuth = await app.datastore.loadJson(
|
|
|
|
userAuthFile(userName),
|
|
|
|
);
|
|
|
|
userAuth.temporaryAccessHash = tempCode.temporaryAccessHash;
|
|
|
|
|
|
|
|
userAuth.temporaryAccessExpiryEpoch = tempCode.temporaryAccessExpiryEpoch;
|
|
|
|
|
|
|
|
await app.datastore.updateJson(
|
|
|
|
userAuthFile(userName),
|
|
|
|
userAuth,
|
|
|
|
);
|
|
|
|
|
|
|
|
return tempCode.tempCode;
|
|
|
|
};
|
|
|
|
|
|
|
|
const getTemporaryCode = async (app) => {
|
|
|
|
const tempCode = generate()
|
|
|
|
+ generate()
|
|
|
|
+ generate()
|
|
|
|
+ generate();
|
|
|
|
|
|
|
|
const tempId = generate();
|
|
|
|
|
|
|
|
return {
|
|
|
|
temporaryAccessHash: await app.crypto.hash(
|
|
|
|
tempCode,
|
|
|
|
),
|
|
|
|
temporaryAccessExpiryEpoch:
|
|
|
|
(await app.getEpochTime()) + tempCodeExpiryLength,
|
|
|
|
tempCode: `tmp:${tempId}:${tempCode}`,
|
|
|
|
temporaryAccessId: tempId,
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
|
|
|
const userRules = allUsers => [
|
|
|
|
makerule('name', 'username must be set',
|
|
|
|
u => isNonEmptyString(u.name)),
|
|
|
|
makerule('accessLevels', 'user must have at least one access level',
|
|
|
|
u => u.accessLevels.length > 0),
|
|
|
|
makerule('name', 'username must be unique',
|
|
|
|
u => filter(u2 => insensitiveEquals(u2.name, u.name))(allUsers).length === 1),
|
|
|
|
makerule('accessLevels', 'access levels must only contain stings',
|
|
|
|
u => all(isNonEmptyString)(u.accessLevels)),
|
|
|
|
];
|
|
|
|
|
|
|
|
const validateUser = () => (allusers, user) => applyRuleSet(userRules(allusers))(user);
|
|
|
|
|
|
|
|
const getNewUser = app => () => apiWrapperSync(
|
|
|
|
app,
|
|
|
|
events.authApi.getNewUser,
|
|
|
|
permission.createUser.isAuthorized,
|
|
|
|
{},
|
|
|
|
_getNewUser, app,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _getNewUser = () => ({
|
|
|
|
name: '',
|
|
|
|
accessLevels: [],
|
|
|
|
enabled: true,
|
|
|
|
temporaryAccessId: '',
|
|
|
|
});
|
|
|
|
|
|
|
|
const getNewUserAuth = app => () => apiWrapperSync(
|
|
|
|
app,
|
|
|
|
events.authApi.getNewUserAuth,
|
|
|
|
permission.createUser.isAuthorized,
|
|
|
|
{},
|
|
|
|
_getNewUserAuth, app,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _getNewUserAuth = () => ({
|
|
|
|
passwordHash: '',
|
|
|
|
temporaryAccessHash: '',
|
|
|
|
temporaryAccessExpiryEpoch: 0,
|
|
|
|
});
|
|
|
|
|
|
|
|
const isValidPassword = app => password => apiWrapperSync(
|
|
|
|
app,
|
|
|
|
events.authApi.isValidPassword,
|
|
|
|
alwaysAuthorized,
|
|
|
|
{ password },
|
|
|
|
_isValidPassword, app, password,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _isValidPassword = (app, password) => scorePassword(password).score > 30;
|
|
|
|
|
|
|
|
const changeMyPassword = app => async (currentPw, newpassword) => apiWrapper(
|
|
|
|
app,
|
|
|
|
events.authApi.changeMyPassword,
|
|
|
|
alwaysAuthorized,
|
|
|
|
{ currentPw, newpassword },
|
|
|
|
_changeMyPassword, app, currentPw, newpassword,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _changeMyPassword = async (app, currentPw, newpassword) => {
|
|
|
|
const existingAuth = await app.datastore.loadJson(
|
|
|
|
userAuthFile(app.user.name),
|
|
|
|
);
|
|
|
|
|
|
|
|
if (isSomething(existingAuth.passwordHash)) {
|
|
|
|
const verified = await app.crypto.verify(
|
|
|
|
existingAuth.passwordHash,
|
|
|
|
currentPw,
|
|
|
|
);
|
|
|
|
|
|
|
|
if (verified) {
|
|
|
|
await await doSet(
|
|
|
|
app, existingAuth,
|
|
|
|
app.user.name, newpassword,
|
|
|
|
);
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
};
|
|
|
|
|
|
|
|
const setPasswordFromTemporaryCode = app => async (tempCode, newpassword) => apiWrapper(
|
|
|
|
app,
|
|
|
|
events.authApi.setPasswordFromTemporaryCode,
|
|
|
|
alwaysAuthorized,
|
|
|
|
{ tempCode, newpassword },
|
|
|
|
_setPasswordFromTemporaryCode, app, tempCode, newpassword,
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
|
|
const _setPasswordFromTemporaryCode = async (app, tempCode, newpassword) => {
|
|
|
|
const currentTime = await app.getEpochTime();
|
|
|
|
|
|
|
|
const temp = parseTemporaryCode(tempCode);
|
|
|
|
|
|
|
|
const user = $(await _getUsers(app), [
|
|
|
|
find(u => u.temporaryAccessId === temp.id),
|
|
|
|
]);
|
|
|
|
|
|
|
|
if (!user) { return false; }
|
|
|
|
|
|
|
|
const existingAuth = await app.datastore.loadJson(
|
|
|
|
userAuthFile(user.name),
|
|
|
|
);
|
|
|
|
|
|
|
|
if (isSomething(existingAuth.temporaryAccessHash)
|
|
|
|
&& existingAuth.temporaryAccessExpiryEpoch > currentTime) {
|
|
|
|
const verified = await app.crypto.verify(
|
|
|
|
existingAuth.temporaryAccessHash,
|
|
|
|
temp.code,
|
|
|
|
);
|
|
|
|
|
|
|
|
if (verified) {
|
|
|
|
await doSet(
|
|
|
|
app, existingAuth,
|
|
|
|
user.name, newpassword,
|
|
|
|
);
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
};
|
|
|
|
|
|
|
|
const doSet = async (app, auth, username, newpassword) => {
|
|
|
|
auth.temporaryAccessHash = '';
|
|
|
|
auth.temporaryAccessExpiryEpoch = 0;
|
|
|
|
auth.passwordHash = await app.crypto.hash(
|
|
|
|
newpassword,
|
|
|
|
);
|
|
|
|
await app.datastore.updateJson(
|
|
|
|
userAuthFile(username),
|
|
|
|
auth,
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
const scorePassword = app => password => apiWrapperSync(
|
|
|
|
app,
|
|
|
|
events.authApi.scorePassword,
|
|
|
|
alwaysAuthorized,
|
|
|
|
{ password },
|
|
|
|
_scorePassword, password,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _scorePassword = (password) => {
|
|
|
|
// from https://stackoverflow.com/questions/948172/password-strength-meter
|
|
|
|
// thank you https://stackoverflow.com/users/46617/tm-lv
|
|
|
|
|
|
|
|
let score = 0;
|
|
|
|
if (!password) { return score; }
|
|
|
|
|
|
|
|
// award every unique letter until 5 repetitions
|
|
|
|
const letters = new Object();
|
|
|
|
for (let i = 0; i < password.length; i++) {
|
|
|
|
letters[password[i]] = (letters[password[i]] || 0) + 1;
|
|
|
|
score += 5.0 / letters[password[i]];
|
|
|
|
}
|
|
|
|
|
|
|
|
// bonus points for mixing it up
|
|
|
|
const variations = {
|
|
|
|
digits: /\d/.test(password),
|
|
|
|
lower: /[a-z]/.test(password),
|
|
|
|
upper: /[A-Z]/.test(password),
|
|
|
|
nonWords: /\W/.test(password),
|
|
|
|
};
|
|
|
|
|
|
|
|
let variationCount = 0;
|
|
|
|
for (const check in variations) {
|
|
|
|
variationCount += (variations[check] == true) ? 1 : 0;
|
|
|
|
}
|
|
|
|
score += (variationCount - 1) * 10;
|
|
|
|
|
|
|
|
const strengthText = score > 80
|
|
|
|
? 'strong'
|
|
|
|
: score > 60
|
|
|
|
? 'good'
|
|
|
|
: score >= 30
|
|
|
|
? 'weak'
|
|
|
|
: 'very weak';
|
|
|
|
|
|
|
|
return {
|
|
|
|
score: parseInt(score),
|
|
|
|
strengthText,
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
|
|
|
const createUser$1 = app => async (user, password = null) => apiWrapper(
|
|
|
|
app,
|
|
|
|
events.authApi.createUser,
|
|
|
|
permission.createUser.isAuthorized,
|
|
|
|
{ user, password },
|
|
|
|
_createUser, app, user, password,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _createUser = async (app, user, password = null) => {
|
|
|
|
const lock = await getLock(
|
|
|
|
app, USERS_LOCK_FILE, 1000, 2,
|
|
|
|
);
|
|
|
|
|
|
|
|
if (isNolock(lock)) { throw new Error('Unable to create user, could not get lock - try again'); }
|
|
|
|
|
|
|
|
const users = await app.datastore.loadJson(USERS_LIST_FILE);
|
|
|
|
|
|
|
|
const userErrors = validateUser()([...users, user], user);
|
2019-09-28 06:28:11 +02:00
|
|
|
if (userErrors.length > 0) { throw new BadRequestError(`User is invalid. ${join$1('; ')(userErrors)}`); }
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const { auth, tempCode, temporaryAccessId } = await getAccess(
|
|
|
|
app, password,
|
|
|
|
);
|
|
|
|
user.tempCode = tempCode;
|
|
|
|
user.temporaryAccessId = temporaryAccessId;
|
|
|
|
|
|
|
|
if (some(u => insensitiveEquals(u.name, user.name))(users)) {
|
|
|
|
throw new BadRequestError('User already exists');
|
|
|
|
}
|
|
|
|
|
|
|
|
users.push(
|
|
|
|
stripUserOfSensitiveStuff(user),
|
|
|
|
);
|
|
|
|
|
|
|
|
await app.datastore.updateJson(
|
|
|
|
USERS_LIST_FILE,
|
|
|
|
users,
|
|
|
|
);
|
|
|
|
|
|
|
|
try {
|
|
|
|
await app.datastore.createJson(
|
|
|
|
userAuthFile(user.name),
|
|
|
|
auth,
|
|
|
|
);
|
|
|
|
} catch (_) {
|
|
|
|
await app.datastore.updateJson(
|
|
|
|
userAuthFile(user.name),
|
|
|
|
auth,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
await releaseLock(app, lock);
|
|
|
|
|
|
|
|
return user;
|
|
|
|
};
|
|
|
|
|
|
|
|
const getAccess = async (app, password) => {
|
|
|
|
const auth = getNewUserAuth(app)();
|
|
|
|
|
|
|
|
if (isNonEmptyString(password)) {
|
|
|
|
if (isValidPassword(password)) {
|
|
|
|
auth.passwordHash = await app.crypto.hash(password);
|
|
|
|
auth.temporaryAccessHash = '';
|
|
|
|
auth.temporaryAccessId = '';
|
|
|
|
auth.temporaryAccessExpiryEpoch = 0;
|
|
|
|
return { auth };
|
|
|
|
}
|
|
|
|
throw new BadRequestError('Password does not meet requirements');
|
|
|
|
} else {
|
|
|
|
const tempAccess = await getTemporaryCode(app);
|
|
|
|
auth.temporaryAccessHash = tempAccess.temporaryAccessHash;
|
|
|
|
auth.temporaryAccessExpiryEpoch = tempAccess.temporaryAccessExpiryEpoch;
|
|
|
|
auth.passwordHash = '';
|
|
|
|
return ({
|
|
|
|
auth,
|
|
|
|
tempCode: tempAccess.tempCode,
|
|
|
|
temporaryAccessId: tempAccess.temporaryAccessId,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const enableUser = app => async username => apiWrapper(
|
|
|
|
app,
|
|
|
|
events.authApi.enableUser,
|
|
|
|
permission.enableDisableUser.isAuthorized,
|
|
|
|
{ username },
|
|
|
|
_enableUser, app, username,
|
|
|
|
);
|
|
|
|
|
|
|
|
const disableUser = app => async username => apiWrapper(
|
|
|
|
app,
|
|
|
|
events.authApi.disableUser,
|
|
|
|
permission.enableDisableUser.isAuthorized,
|
|
|
|
{ username },
|
|
|
|
_disableUser, app, username,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _enableUser = async (app, username) => await toggleUser(app, username, true);
|
|
|
|
|
|
|
|
const _disableUser = async (app, username) => await toggleUser(app, username, false);
|
|
|
|
|
|
|
|
const toggleUser = async (app, username, enabled) => {
|
|
|
|
const lock = await getLock(app, USERS_LOCK_FILE, 1000, 1, 0);
|
|
|
|
|
|
|
|
const actionName = enabled ? 'enable' : 'disable';
|
|
|
|
|
|
|
|
if (isNolock(lock)) { throw new Error(`Could not ${actionName} user - cannot get lock`); }
|
|
|
|
|
|
|
|
try {
|
|
|
|
const users = await app.datastore.loadJson(USERS_LIST_FILE);
|
|
|
|
const user = getUserByName(users, username);
|
|
|
|
if (!user) { throw new NotFoundError(`Could not find user to ${actionName}`); }
|
|
|
|
|
|
|
|
if (user.enabled === !enabled) {
|
|
|
|
user.enabled = enabled;
|
|
|
|
await app.datastore.updateJson(USERS_LIST_FILE, users);
|
|
|
|
}
|
|
|
|
} finally {
|
|
|
|
releaseLock(app, lock);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const getNewAccessLevel = () => () => ({
|
|
|
|
name: '',
|
|
|
|
permissions: [],
|
|
|
|
default:false
|
|
|
|
});
|
|
|
|
|
|
|
|
const isAllowedType = t => $(permissionTypes, [
|
|
|
|
values,
|
2019-09-28 06:28:11 +02:00
|
|
|
includes(t),
|
2019-09-10 10:49:22 +02:00
|
|
|
]);
|
|
|
|
|
|
|
|
const isRecordOrIndexType = t => some(p => p === t)([
|
|
|
|
permissionTypes.CREATE_RECORD,
|
|
|
|
permissionTypes.UPDATE_RECORD,
|
|
|
|
permissionTypes.DELETE_RECORD,
|
|
|
|
permissionTypes.READ_RECORD,
|
|
|
|
permissionTypes.READ_INDEX,
|
|
|
|
permissionTypes.EXECUTE_ACTION,
|
|
|
|
]);
|
|
|
|
|
|
|
|
|
|
|
|
const permissionRules = app => ([
|
|
|
|
makerule('type', 'type must be one of allowed types',
|
|
|
|
p => isAllowedType(p.type)),
|
|
|
|
makerule('nodeKey', 'record and index permissions must include a valid nodeKey',
|
|
|
|
p => (!isRecordOrIndexType(p.type))
|
|
|
|
|| isSomething(getNode(app.hierarchy, p.nodeKey))),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const applyPermissionRules = app => applyRuleSet(permissionRules(app));
|
|
|
|
|
|
|
|
const accessLevelRules = allLevels => ([
|
|
|
|
makerule('name', 'name must be set',
|
|
|
|
l => isNonEmptyString(l.name)),
|
|
|
|
makerule('name', 'access level names must be unique',
|
2019-09-28 06:28:11 +02:00
|
|
|
l => isEmpty(l.name)
|
2019-09-10 10:49:22 +02:00
|
|
|
|| filter(a => insensitiveEquals(l.name, a.name))(allLevels).length === 1),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const applyLevelRules = allLevels => applyRuleSet(accessLevelRules(allLevels));
|
|
|
|
|
|
|
|
const validateAccessLevel = app => (allLevels, level) => {
|
|
|
|
const errs = $(level.permissions, [
|
|
|
|
map(applyPermissionRules(app)),
|
|
|
|
flatten,
|
|
|
|
concat(
|
|
|
|
applyLevelRules(allLevels)(level),
|
|
|
|
),
|
|
|
|
]);
|
|
|
|
|
|
|
|
return errs;
|
|
|
|
};
|
|
|
|
|
|
|
|
const validateAccessLevels = app => allLevels => apiWrapperSync(
|
|
|
|
app,
|
|
|
|
events.authApi.validateAccessLevels,
|
|
|
|
alwaysAuthorized,
|
|
|
|
{ allLevels },
|
|
|
|
_validateAccessLevels, app, allLevels,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _validateAccessLevels = (app, allLevels) => $(allLevels, [
|
|
|
|
map(l => validateAccessLevel(app)(allLevels, l)),
|
|
|
|
flatten,
|
|
|
|
uniqWith((x, y) => x.field === y.field
|
|
|
|
&& x.item === y.item
|
|
|
|
&& x.error === y.error),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const saveAccessLevels = app => async accessLevels => apiWrapper(
|
|
|
|
app,
|
|
|
|
events.authApi.saveAccessLevels,
|
|
|
|
permission.writeAccessLevels.isAuthorized,
|
|
|
|
{ accessLevels },
|
|
|
|
_saveAccessLevels, app, accessLevels,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _saveAccessLevels = async (app, accessLevels) => {
|
|
|
|
const validationErrors = validateAccessLevels(app)(accessLevels.levels);
|
|
|
|
if (validationErrors.length > 0) {
|
|
|
|
const errs = $(validationErrors, [
|
|
|
|
map(e => e.error),
|
2019-09-28 06:28:11 +02:00
|
|
|
join$1(', '),
|
2019-09-10 10:49:22 +02:00
|
|
|
]);
|
|
|
|
throw new Error(
|
|
|
|
`Access Levels Invalid: ${errs}`,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
const lock = await getLock(
|
|
|
|
app, ACCESS_LEVELS_LOCK_FILE, 2000, 2,
|
|
|
|
);
|
|
|
|
|
|
|
|
if (isNolock(lock)) { throw new Error('Could not get lock to save access levels'); }
|
|
|
|
|
|
|
|
try {
|
|
|
|
const existing = await app.datastore.loadJson(ACCESS_LEVELS_FILE);
|
|
|
|
if (existing.version !== accessLevels.version) { throw new Error('Access levels have already been updated, since you loaded'); }
|
|
|
|
|
|
|
|
accessLevels.version++;
|
|
|
|
|
|
|
|
app.datastore.updateJson(ACCESS_LEVELS_FILE, accessLevels);
|
|
|
|
} finally {
|
|
|
|
await releaseLock(app, lock);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const generateFullPermissions = (app) => {
|
|
|
|
const allNodes = getFlattenedHierarchy(app.hierarchy);
|
|
|
|
const accessLevel = { permissions: [] };
|
|
|
|
|
|
|
|
const recordNodes = $(allNodes, [
|
|
|
|
filter(isRecord),
|
|
|
|
]);
|
|
|
|
|
|
|
|
for (const n of recordNodes) {
|
|
|
|
permission.createRecord.add(n.nodeKey(), accessLevel);
|
|
|
|
permission.updateRecord.add(n.nodeKey(), accessLevel);
|
|
|
|
permission.deleteRecord.add(n.nodeKey(), accessLevel);
|
|
|
|
permission.readRecord.add(n.nodeKey(), accessLevel);
|
|
|
|
}
|
|
|
|
|
|
|
|
const indexNodes = $(allNodes, [
|
|
|
|
filter(isIndex),
|
|
|
|
]);
|
|
|
|
|
|
|
|
for (const n of indexNodes) {
|
|
|
|
permission.readIndex.add(n.nodeKey(), accessLevel);
|
|
|
|
}
|
|
|
|
|
2019-09-28 06:28:11 +02:00
|
|
|
for (const a of keys(app.actions)) {
|
2019-09-10 10:49:22 +02:00
|
|
|
permission.executeAction.add(a, accessLevel);
|
|
|
|
}
|
|
|
|
|
|
|
|
$(permission, [
|
|
|
|
values,
|
|
|
|
filter(p => !p.isNode),
|
|
|
|
each(p => p.add(accessLevel)),
|
|
|
|
]);
|
|
|
|
|
|
|
|
return accessLevel.permissions;
|
|
|
|
};
|
|
|
|
|
|
|
|
const setUserAccessLevels$1 = app => async (userName, accessLevels) => apiWrapper(
|
|
|
|
app,
|
|
|
|
events.authApi.setUserAccessLevels,
|
|
|
|
permission.setUserAccessLevels.isAuthorized,
|
|
|
|
{ userName, accessLevels },
|
|
|
|
_setUserAccessLevels, app, userName, accessLevels,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _setUserAccessLevels = async (app, username, accessLevels) => {
|
|
|
|
const lock = await getLock(app, USERS_LOCK_FILE, 1000, 1, 0);
|
|
|
|
|
|
|
|
const actualAccessLevels = $(
|
|
|
|
await app.datastore.loadJson(ACCESS_LEVELS_FILE),
|
|
|
|
[
|
|
|
|
l => l.levels,
|
|
|
|
map(l => l.name),
|
|
|
|
],
|
|
|
|
);
|
|
|
|
|
|
|
|
const missing = difference(accessLevels)(actualAccessLevels);
|
|
|
|
if (missing.length > 0) {
|
2019-09-28 06:28:11 +02:00
|
|
|
throw new Error(`Invalid access levels supplied: ${join$1(', ', missing)}`);
|
2019-09-10 10:49:22 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if (isNolock(lock)) { throw new Error('Could set user access levels cannot get lock'); }
|
|
|
|
|
|
|
|
try {
|
|
|
|
const users = await app.datastore.loadJson(USERS_LIST_FILE);
|
|
|
|
const user = getUserByName(users, username);
|
|
|
|
if (!user) { throw new NotFoundError(`Could not find user with ${username}`); }
|
|
|
|
|
|
|
|
user.accessLevels = accessLevels;
|
|
|
|
await app.datastore.updateJson(USERS_LIST_FILE, users);
|
|
|
|
} finally {
|
|
|
|
releaseLock(app, lock);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const getAuthApi = app => ({
|
|
|
|
authenticate: authenticate(app),
|
|
|
|
authenticateTemporaryAccess: authenticateTemporaryAccess(app),
|
|
|
|
createTemporaryAccess: createTemporaryAccess$1(app),
|
|
|
|
createUser: createUser$1(app),
|
|
|
|
loadAccessLevels: loadAccessLevels(app),
|
|
|
|
enableUser: enableUser(app),
|
|
|
|
disableUser: disableUser(app),
|
|
|
|
getNewAccessLevel: getNewAccessLevel(),
|
|
|
|
getNewUser: getNewUser(app),
|
|
|
|
getNewUserAuth: getNewUserAuth(app),
|
|
|
|
getUsers: getUsers(app),
|
|
|
|
saveAccessLevels: saveAccessLevels(app),
|
|
|
|
isAuthorized: isAuthorized(app),
|
|
|
|
changeMyPassword: changeMyPassword(app),
|
|
|
|
setPasswordFromTemporaryCode: setPasswordFromTemporaryCode(app),
|
|
|
|
scorePassword,
|
|
|
|
isValidPassword: isValidPassword(app),
|
|
|
|
validateUser: validateUser(),
|
|
|
|
validateAccessLevels: validateAccessLevels(app),
|
|
|
|
generateFullPermissions: () => generateFullPermissions(app),
|
|
|
|
setUserAccessLevels: setUserAccessLevels$1(app),
|
|
|
|
});
|
|
|
|
|
|
|
|
const executeAction$1 = app => (actionName, options) => {
|
|
|
|
apiWrapperSync(
|
|
|
|
app,
|
|
|
|
events.actionsApi.execute,
|
|
|
|
permission.executeAction.isAuthorized(actionName),
|
|
|
|
{ actionName, options },
|
|
|
|
app.actions[actionName], options,
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
const _executeAction = (behaviourSources, action, options) => behaviourSources[action.behaviourSource][action.behaviourName](options);
|
|
|
|
|
|
|
|
const getActionsApi = app => ({
|
|
|
|
execute: executeAction$1(app),
|
|
|
|
});
|
|
|
|
|
|
|
|
const publish = handlers => async (eventName, context = {}) => {
|
2019-09-28 06:28:11 +02:00
|
|
|
if (!has(eventName)(handlers)) return;
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
for (const handler of handlers[eventName]) {
|
|
|
|
await handler(eventName, context);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const subscribe = handlers => (eventName, handler) => {
|
2019-09-28 06:28:11 +02:00
|
|
|
if (!has(eventName)(handlers)) {
|
2019-09-10 10:49:22 +02:00
|
|
|
handlers[eventName] = [];
|
|
|
|
}
|
|
|
|
handlers[eventName].push(handler);
|
|
|
|
};
|
|
|
|
|
|
|
|
const createEventAggregator = () => {
|
|
|
|
const handlers = {};
|
|
|
|
const eventAggregator = ({
|
|
|
|
publish: publish(handlers),
|
|
|
|
subscribe: subscribe(handlers),
|
|
|
|
});
|
|
|
|
return eventAggregator;
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const createJson = originalCreateFile => async (key, obj, retries = 2, delay = 100) => await retry(originalCreateFile, retries, delay, key, JSON.stringify(obj));
|
2019-09-10 10:49:22 +02:00
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const createNewFile = originalCreateFile => async (path, content, retries = 2, delay = 100) => await retry(originalCreateFile, retries, delay, path, content);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const loadJson = datastore => async (key, retries = 3, delay = 100) => {
|
2019-09-10 10:49:22 +02:00
|
|
|
try {
|
|
|
|
return await retry(JSON.parse, retries, delay, await datastore.loadFile(key));
|
|
|
|
} catch (err) {
|
2019-12-30 19:08:50 +01:00
|
|
|
const newErr = new NotFoundError(err.message);
|
|
|
|
newErr.stack = err.stack;
|
|
|
|
throw(newErr);
|
2019-09-10 10:49:22 +02:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const updateJson = datastore => async (key, obj, retries = 3, delay = 100) => {
|
2019-09-10 10:49:22 +02:00
|
|
|
try {
|
|
|
|
return await retry(datastore.updateFile, retries, delay, key, JSON.stringify(obj));
|
|
|
|
} catch (err) {
|
2019-12-30 19:08:50 +01:00
|
|
|
const newErr = new NotFoundError(err.message);
|
|
|
|
newErr.stack = err.stack;
|
|
|
|
throw(newErr);
|
2019-09-10 10:49:22 +02:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const setupDatastore = (datastore) => {
|
|
|
|
const originalCreateFile = datastore.createFile;
|
|
|
|
datastore.loadJson = loadJson(datastore);
|
|
|
|
datastore.createJson = createJson(originalCreateFile);
|
|
|
|
datastore.updateJson = updateJson(datastore);
|
|
|
|
datastore.createFile = createNewFile(originalCreateFile);
|
|
|
|
if (datastore.createEmptyDb) { delete datastore.createEmptyDb; }
|
|
|
|
return datastore;
|
|
|
|
};
|
|
|
|
|
|
|
|
const compileCode = code => {
|
|
|
|
let func;
|
|
|
|
|
|
|
|
try {
|
|
|
|
func = compileCode$1(code);
|
|
|
|
} catch(e) {
|
|
|
|
e.message = `Error compiling code : ${code} : ${e.message}`;
|
|
|
|
throw e;
|
|
|
|
}
|
|
|
|
|
|
|
|
return func;
|
|
|
|
};
|
|
|
|
|
|
|
|
const compileExpression = code => {
|
|
|
|
let func;
|
|
|
|
|
|
|
|
try {
|
|
|
|
func = compileExpression$1(code);
|
|
|
|
} catch(e) {
|
|
|
|
e.message = `Error compiling expression : ${code} : ${e.message}`;
|
|
|
|
throw e;
|
|
|
|
}
|
|
|
|
|
|
|
|
return func;
|
|
|
|
};
|
|
|
|
|
|
|
|
const initialiseActions = (subscribe, behaviourSources, actions, triggers, apis) => {
|
|
|
|
validateSources(behaviourSources, actions);
|
|
|
|
subscribeTriggers(subscribe, behaviourSources, actions, triggers, apis);
|
|
|
|
return createActionsCollection(behaviourSources, actions);
|
|
|
|
};
|
|
|
|
|
|
|
|
const createActionsCollection = (behaviourSources, actions) => $(actions, [
|
|
|
|
reduce((all, a) => {
|
|
|
|
all[a.name] = opts => _executeAction(behaviourSources, a, opts);
|
|
|
|
return all;
|
|
|
|
}, {}),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const subscribeTriggers = (subscribe, behaviourSources, actions, triggers, apis) => {
|
|
|
|
const createOptions = (optionsCreator, eventContext) => {
|
|
|
|
if (!optionsCreator) return {};
|
|
|
|
const create = compileCode(optionsCreator);
|
|
|
|
return create({ context: eventContext, apis });
|
|
|
|
};
|
|
|
|
|
|
|
|
const shouldRunTrigger = (trigger, eventContext) => {
|
|
|
|
if (!trigger.condition) return true;
|
|
|
|
const shouldRun = compileExpression(trigger.condition);
|
|
|
|
return shouldRun({ context: eventContext });
|
|
|
|
};
|
|
|
|
|
|
|
|
for (let trig of triggers) {
|
|
|
|
subscribe(trig.eventName, async (ev, ctx) => {
|
|
|
|
if (shouldRunTrigger(trig, ctx)) {
|
|
|
|
await _executeAction(
|
|
|
|
behaviourSources,
|
|
|
|
find(a => a.name === trig.actionName)(actions),
|
|
|
|
createOptions(trig.optionsCreator, ctx),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const validateSources = (behaviourSources, actions) => {
|
|
|
|
const declaredSources = $(actions, [
|
|
|
|
uniqBy(a => a.behaviourSource),
|
|
|
|
map(a => a.behaviourSource),
|
|
|
|
]);
|
|
|
|
|
2019-09-28 06:28:11 +02:00
|
|
|
const suppliedSources = keys(behaviourSources);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const missingSources = difference(
|
|
|
|
declaredSources, suppliedSources,
|
|
|
|
);
|
|
|
|
|
|
|
|
if (missingSources.length > 0) {
|
2019-09-28 06:28:11 +02:00
|
|
|
throw new BadRequestError(`Declared behaviour sources are not supplied: ${join$1(', ', missingSources)}`);
|
2019-09-10 10:49:22 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
const missingBehaviours = $(actions, [
|
2019-09-28 06:28:11 +02:00
|
|
|
filter(a => !isFunction(behaviourSources[a.behaviourSource][a.behaviourName])),
|
2019-09-10 10:49:22 +02:00
|
|
|
map(a => `Action: ${a.name} : ${a.behaviourSource}.${a.behaviourName}`),
|
|
|
|
]);
|
|
|
|
|
|
|
|
if (missingBehaviours.length > 0) {
|
2019-09-28 06:28:11 +02:00
|
|
|
throw new NotFoundError(`Missing behaviours: could not find behaviour functions: ${join$1(', ', missingBehaviours)}`);
|
2019-09-10 10:49:22 +02:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const retrieve = async (app) => {
|
|
|
|
const transactionFiles = await app.datastore.getFolderContents(
|
|
|
|
TRANSACTIONS_FOLDER,
|
|
|
|
);
|
|
|
|
|
|
|
|
let transactions = [];
|
|
|
|
|
|
|
|
if (some(isBuildIndexFolder)(transactionFiles)) {
|
|
|
|
const buildIndexFolder = find(isBuildIndexFolder)(transactionFiles);
|
|
|
|
|
|
|
|
transactions = await retrieveBuildIndexTransactions(
|
|
|
|
app,
|
|
|
|
joinKey(TRANSACTIONS_FOLDER, buildIndexFolder),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (transactions.length > 0) return transactions;
|
|
|
|
|
|
|
|
return await retrieveStandardTransactions(
|
|
|
|
app, transactionFiles,
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
const retrieveBuildIndexTransactions = async (app, buildIndexFolder) => {
|
|
|
|
const childFolders = await app.datastore.getFolderContents(buildIndexFolder);
|
|
|
|
if (childFolders.length === 0) {
|
|
|
|
// cleanup
|
|
|
|
await app.datastore.deleteFolder(buildIndexFolder);
|
|
|
|
return [];
|
|
|
|
}
|
|
|
|
|
|
|
|
const getTransactionFiles = async (childFolderIndex = 0) => {
|
|
|
|
if (childFolderIndex >= childFolders.length) return [];
|
|
|
|
|
|
|
|
const childFolderKey = joinKey(buildIndexFolder, childFolders[childFolderIndex]);
|
|
|
|
const files = await app.datastore.getFolderContents(
|
|
|
|
childFolderKey,
|
|
|
|
);
|
|
|
|
|
|
|
|
if (files.length === 0) {
|
|
|
|
await app.datastore.deleteFolder(childFolderKey);
|
|
|
|
return await getTransactionFiles(childFolderIndex + 1);
|
|
|
|
}
|
|
|
|
|
|
|
|
return { childFolderKey, files };
|
|
|
|
};
|
|
|
|
|
|
|
|
const transactionFiles = await getTransactionFiles();
|
|
|
|
|
|
|
|
if (transactionFiles.files.length === 0) return [];
|
|
|
|
|
|
|
|
const transactions = $(transactionFiles.files, [
|
|
|
|
map(parseTransactionId),
|
|
|
|
]);
|
|
|
|
|
|
|
|
for (const t of transactions) {
|
|
|
|
const transactionContent = await app.datastore.loadJson(
|
|
|
|
joinKey(
|
|
|
|
transactionFiles.childFolderKey,
|
|
|
|
t.fullId,
|
|
|
|
),
|
|
|
|
);
|
|
|
|
t.record = await _load(app, transactionContent.recordKey);
|
|
|
|
}
|
|
|
|
|
|
|
|
transactions.indexNode = $(buildIndexFolder, [
|
|
|
|
getLastPartInKey,
|
|
|
|
nodeKeyHashFromBuildFolder,
|
|
|
|
getNodeFromNodeKeyHash(app.hierarchy),
|
|
|
|
]);
|
|
|
|
|
|
|
|
transactions.folderKey = transactionFiles.childFolderKey;
|
|
|
|
|
|
|
|
return transactions;
|
|
|
|
};
|
|
|
|
|
|
|
|
const retrieveStandardTransactions = async (app, transactionFiles) => {
|
|
|
|
const transactionIds = $(transactionFiles, [
|
|
|
|
filter(f => f !== LOCK_FILENAME
|
|
|
|
&& !isBuildIndexFolder(f)),
|
|
|
|
map(parseTransactionId),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const transactionIdsByRecord = $(transactionIds, [
|
|
|
|
groupBy('recordId'),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const dedupedTransactions = [];
|
|
|
|
|
|
|
|
const verify = async (t) => {
|
|
|
|
if (t.verified === true) return t;
|
|
|
|
|
|
|
|
const id = getTransactionId(
|
|
|
|
t.recordId,
|
|
|
|
t.transactionType,
|
|
|
|
t.uniqueId,
|
|
|
|
);
|
|
|
|
|
|
|
|
const transaction = await app.datastore.loadJson(
|
|
|
|
joinKey(TRANSACTIONS_FOLDER, id),
|
|
|
|
);
|
|
|
|
|
|
|
|
if (isDelete(t)) {
|
|
|
|
t.record = transaction.record;
|
|
|
|
t.verified = true;
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
const rec = await _load(
|
|
|
|
app,
|
|
|
|
transaction.recordKey,
|
|
|
|
);
|
|
|
|
if (rec.transactionId === id) {
|
|
|
|
t.record = rec;
|
|
|
|
if (transaction.oldRecord) { t.oldRecord = transaction.oldRecord; }
|
|
|
|
t.verified = true;
|
|
|
|
} else {
|
|
|
|
t.verified = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
return t;
|
|
|
|
};
|
|
|
|
|
|
|
|
const pickOne = async (trans, forType) => {
|
|
|
|
const transForType = filter(forType)(trans);
|
|
|
|
if (transForType.length === 1) {
|
|
|
|
const t = await verify(transForType[0]);
|
|
|
|
return (t.verified === true ? t : null);
|
|
|
|
}
|
|
|
|
for (let t of transForType) {
|
|
|
|
t = await verify(t);
|
|
|
|
if (t.verified === true) { return t; }
|
|
|
|
}
|
|
|
|
|
|
|
|
return null;
|
|
|
|
};
|
|
|
|
|
|
|
|
for (const recordId in transactionIdsByRecord) {
|
|
|
|
const transIdsForRecord = transactionIdsByRecord[recordId];
|
|
|
|
if (transIdsForRecord.length === 1) {
|
|
|
|
const t = await verify(transIdsForRecord[0]);
|
|
|
|
if (t.verified) { dedupedTransactions.push(t); }
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
if (some(isDelete)(transIdsForRecord)) {
|
|
|
|
const t = await verify(find(isDelete)(transIdsForRecord));
|
|
|
|
if (t.verified) { dedupedTransactions.push(t); }
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
if (some(isUpdate)(transIdsForRecord)) {
|
|
|
|
const upd = await pickOne(transIdsForRecord, isUpdate);
|
|
|
|
if (isSomething(upd) && upd.verified) { dedupedTransactions.push(upd); }
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
if (some(isCreate)(transIdsForRecord)) {
|
|
|
|
const cre = await pickOne(transIdsForRecord, isCreate);
|
|
|
|
if (isSomething(cre)) { dedupedTransactions.push(cre); }
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
const duplicates = $(transactionIds, [
|
|
|
|
filter(t => none(ddt => ddt.uniqueId === t.uniqueId)(dedupedTransactions)),
|
|
|
|
]);
|
|
|
|
|
|
|
|
|
|
|
|
const deletePromises = map(t => app.datastore.deleteFile(
|
|
|
|
joinKey(
|
|
|
|
TRANSACTIONS_FOLDER,
|
|
|
|
getTransactionId(
|
|
|
|
t.recordId,
|
|
|
|
t.transactionType,
|
|
|
|
t.uniqueId,
|
|
|
|
),
|
|
|
|
),
|
|
|
|
))(duplicates);
|
|
|
|
|
|
|
|
await Promise.all(deletePromises);
|
|
|
|
|
|
|
|
return dedupedTransactions;
|
|
|
|
};
|
|
|
|
|
|
|
|
const parseTransactionId = (id) => {
|
|
|
|
const splitId = split(idSep)(id);
|
|
|
|
return ({
|
|
|
|
recordId: splitId[0],
|
|
|
|
transactionType: splitId[1],
|
|
|
|
uniqueId: splitId[2],
|
|
|
|
fullId: id,
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const getRelevantAncestorIndexes = (hierarchy, record) => {
|
2019-09-10 10:49:22 +02:00
|
|
|
const key = record.key;
|
|
|
|
const keyParts = splitKey(key);
|
|
|
|
const nodeId = getRecordNodeId(key);
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const flatHierarchy = orderBy$1(getFlattenedHierarchy(hierarchy),
|
2019-09-10 10:49:22 +02:00
|
|
|
[node => node.pathRegx().length],
|
|
|
|
['desc']);
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const makeindexNodeAndDir_ForAncestorIndex = (indexNode, parentRecordDir) => makeIndexNodeAndDir(indexNode, joinKey(parentRecordDir, indexNode.name));
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const traverseAncestorIndexesInPath = () => reduce((acc, part) => {
|
|
|
|
const currentIndexKey = joinKey(acc.lastIndexKey, part);
|
|
|
|
acc.lastIndexKey = currentIndexKey;
|
|
|
|
const testPathRegx = p => new RegExp(`${p.pathRegx()}$`).test(currentIndexKey);
|
|
|
|
const nodeMatch = find(testPathRegx)(flatHierarchy);
|
|
|
|
|
|
|
|
if (isNothing(nodeMatch)) { return acc; }
|
|
|
|
|
|
|
|
if (!isRecord(nodeMatch)
|
|
|
|
|| nodeMatch.indexes.length === 0) { return acc; }
|
|
|
|
|
|
|
|
const indexes = $(nodeMatch.indexes, [
|
|
|
|
filter(i => i.indexType === indexTypes.ancestor
|
|
|
|
&& (i.allowedRecordNodeIds.length === 0
|
2019-09-28 06:28:11 +02:00
|
|
|
|| includes(nodeId)(i.allowedRecordNodeIds))),
|
2019-09-10 10:49:22 +02:00
|
|
|
]);
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const currentRecordDir = getRecordInfo(hierarchy, currentIndexKey).dir;
|
|
|
|
|
2019-09-10 10:49:22 +02:00
|
|
|
each(v => acc.nodesAndKeys.push(
|
2019-12-30 19:08:50 +01:00
|
|
|
makeindexNodeAndDir_ForAncestorIndex(v, currentRecordDir),
|
2019-09-10 10:49:22 +02:00
|
|
|
))(indexes);
|
|
|
|
|
|
|
|
return acc;
|
|
|
|
}, { lastIndexKey: '', nodesAndKeys: [] })(keyParts).nodesAndKeys;
|
|
|
|
|
|
|
|
const rootIndexes = $(flatHierarchy, [
|
|
|
|
filter(n => isGlobalIndex(n) && recordNodeIdIsAllowed(n)(nodeId)),
|
2019-12-30 19:08:50 +01:00
|
|
|
map(i => makeIndexNodeAndDir(
|
|
|
|
i,
|
|
|
|
getIndexDir(hierarchy, i.nodeKey()))),
|
2019-09-10 10:49:22 +02:00
|
|
|
]);
|
|
|
|
|
|
|
|
return union(traverseAncestorIndexesInPath())(rootIndexes);
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const getRelevantReverseReferenceIndexes = (hierarchy, record) => $(record.key, [
|
|
|
|
getExactNodeForKey(hierarchy),
|
2019-09-10 10:49:22 +02:00
|
|
|
n => n.fields,
|
|
|
|
filter(f => f.type === 'reference'
|
|
|
|
&& isSomething(record[f.name])
|
|
|
|
&& isNonEmptyString(record[f.name].key)),
|
|
|
|
map(f => $(f.typeOptions.reverseIndexNodeKeys, [
|
|
|
|
map(n => ({
|
2019-12-30 19:08:50 +01:00
|
|
|
recordNode: getNode(hierarchy, n),
|
2019-09-10 10:49:22 +02:00
|
|
|
field: f,
|
|
|
|
})),
|
|
|
|
])),
|
|
|
|
flatten,
|
2019-12-30 19:08:50 +01:00
|
|
|
map(n => makeIndexNodeAndDir(
|
2019-09-10 10:49:22 +02:00
|
|
|
n.recordNode,
|
2019-12-30 19:08:50 +01:00
|
|
|
joinKey(
|
|
|
|
getRecordInfo(hierarchy, record[n.field.name].key).dir,
|
|
|
|
n.recordNode.name),
|
2019-09-10 10:49:22 +02:00
|
|
|
)),
|
|
|
|
]);
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const makeIndexNodeAndDir = (indexNode, indexDir) => ({ indexNode, indexDir });
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
// adapted from https://github.com/dex4er/js-promise-writable
|
|
|
|
// Thank you :)
|
|
|
|
const promiseWriteableStream = stream => {
|
|
|
|
|
|
|
|
let _errored;
|
|
|
|
|
|
|
|
const _errorHandler = err => {
|
|
|
|
_errored = err;
|
|
|
|
};
|
|
|
|
|
|
|
|
stream.on("error", _errorHandler);
|
|
|
|
|
|
|
|
const write = chunk => {
|
|
|
|
let rejected = false;
|
|
|
|
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
if (_errored) {
|
|
|
|
const err = _errored;
|
|
|
|
_errored = undefined;
|
|
|
|
return reject(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!stream.writable || stream.closed || stream.destroyed) {
|
|
|
|
return reject(new Error("write after end"));
|
|
|
|
}
|
|
|
|
|
|
|
|
const writeErrorHandler = err => {
|
|
|
|
_errored = undefined;
|
|
|
|
rejected = true;
|
|
|
|
reject(err);
|
|
|
|
};
|
|
|
|
|
|
|
|
stream.once("error", writeErrorHandler);
|
|
|
|
|
|
|
|
const canWrite = stream.write(chunk);
|
|
|
|
|
|
|
|
stream.removeListener("error", writeErrorHandler);
|
|
|
|
|
|
|
|
if (canWrite) {
|
|
|
|
if (!rejected) {
|
|
|
|
resolve(chunk.length);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
const errorHandler = err => {
|
|
|
|
_errored = undefined;
|
|
|
|
removeListeners();
|
|
|
|
reject(err);
|
|
|
|
};
|
|
|
|
|
|
|
|
const drainHandler = () => {
|
|
|
|
removeListeners();
|
|
|
|
resolve(chunk.length);
|
|
|
|
};
|
|
|
|
|
|
|
|
const closeHandler = () => {
|
|
|
|
removeListeners();
|
|
|
|
resolve(chunk.length);
|
|
|
|
};
|
|
|
|
|
|
|
|
const finishHandler = () => {
|
|
|
|
removeListeners();
|
|
|
|
resolve(chunk.length);
|
|
|
|
};
|
|
|
|
|
|
|
|
const removeListeners = () => {
|
|
|
|
stream.removeListener("close", closeHandler);
|
|
|
|
stream.removeListener("drain", drainHandler);
|
|
|
|
stream.removeListener("error", errorHandler);
|
|
|
|
stream.removeListener("finish", finishHandler);
|
|
|
|
};
|
|
|
|
|
|
|
|
stream.on("close", closeHandler);
|
|
|
|
stream.on("drain", drainHandler);
|
|
|
|
stream.on("error", errorHandler);
|
|
|
|
stream.on("finish", finishHandler);
|
|
|
|
}
|
|
|
|
})
|
|
|
|
};
|
|
|
|
|
|
|
|
const end = () => {
|
|
|
|
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
if (_errored) {
|
|
|
|
const err = _errored;
|
|
|
|
_errored = undefined;
|
|
|
|
return reject(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!stream.writable || stream.closed || stream.destroyed) {
|
|
|
|
return resolve();
|
|
|
|
}
|
|
|
|
|
|
|
|
const finishHandler = () => {
|
|
|
|
removeListeners();
|
|
|
|
resolve();
|
|
|
|
};
|
|
|
|
|
|
|
|
const errorHandler = (err) => {
|
|
|
|
_errored = undefined;
|
|
|
|
removeListeners();
|
|
|
|
reject(err);
|
|
|
|
};
|
|
|
|
|
|
|
|
const removeListeners = () => {
|
|
|
|
stream.removeListener("error", errorHandler);
|
|
|
|
stream.removeListener("finish", finishHandler);
|
|
|
|
};
|
|
|
|
|
|
|
|
stream.on("finish", finishHandler);
|
|
|
|
stream.on("error", errorHandler);
|
|
|
|
|
|
|
|
stream.end();
|
|
|
|
})
|
|
|
|
};
|
|
|
|
|
|
|
|
return {write, end};
|
|
|
|
};
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const applyToShard = async (hierarchy, store, indexDir,
|
2019-09-10 10:49:22 +02:00
|
|
|
indexNode, indexShardKey, recordsToWrite, keysToRemove) => {
|
|
|
|
const createIfNotExists = recordsToWrite.length > 0;
|
2019-12-30 19:08:50 +01:00
|
|
|
const writer = await getWriter(hierarchy, store, indexDir, indexShardKey, indexNode, createIfNotExists);
|
2019-09-10 10:49:22 +02:00
|
|
|
if (writer === SHARD_DELETED) return;
|
|
|
|
|
|
|
|
await writer.updateIndex(recordsToWrite, keysToRemove);
|
|
|
|
await swapTempFileIn(store, indexShardKey);
|
|
|
|
};
|
|
|
|
|
|
|
|
const SHARD_DELETED = 'SHARD_DELETED';
|
2019-12-30 19:08:50 +01:00
|
|
|
const getWriter = async (hierarchy, store, indexDir, indexedDataKey, indexNode, createIfNotExists) => {
|
2019-09-10 10:49:22 +02:00
|
|
|
let readableStream = null;
|
|
|
|
|
|
|
|
if (isShardedIndex(indexNode)) {
|
2019-12-30 19:08:50 +01:00
|
|
|
await ensureShardNameIsInShardMap(store, indexDir, indexedDataKey);
|
2019-09-10 10:49:22 +02:00
|
|
|
if(!await store.exists(indexedDataKey)) {
|
2019-12-30 19:08:50 +01:00
|
|
|
if (await store.exists(getParentKey(indexedDataKey))) {
|
|
|
|
await store.createFile(indexedDataKey, "");
|
|
|
|
} else {
|
|
|
|
return SHARD_DELETED;
|
|
|
|
}
|
2019-09-10 10:49:22 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
|
|
|
readableStream = promiseReadableStream(
|
|
|
|
await store.readableFileStream(indexedDataKey)
|
|
|
|
);
|
|
|
|
|
|
|
|
} catch (e) {
|
|
|
|
|
|
|
|
if (await store.exists(indexedDataKey)) {
|
|
|
|
throw e;
|
|
|
|
} else {
|
|
|
|
if (createIfNotExists) {
|
2019-12-30 19:08:50 +01:00
|
|
|
if(await store.exists(getParentKey(indexedDataKey))) {
|
|
|
|
await store.createFile(indexedDataKey, '');
|
|
|
|
} else {
|
|
|
|
return SHARD_DELETED;
|
|
|
|
}
|
2019-09-10 10:49:22 +02:00
|
|
|
} else {
|
|
|
|
return SHARD_DELETED;
|
|
|
|
}
|
|
|
|
|
|
|
|
readableStream = promiseReadableStream(
|
|
|
|
await store.readableFileStream(indexedDataKey)
|
|
|
|
);
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
const writableStream = promiseWriteableStream(
|
|
|
|
await store.writableFileStream(indexedDataKey + ".temp")
|
|
|
|
);
|
|
|
|
|
|
|
|
return getIndexWriter(
|
|
|
|
hierarchy, indexNode,
|
|
|
|
readableStream, writableStream
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
const swapTempFileIn = async (store, indexedDataKey, isRetry = false) => {
|
|
|
|
const tempFile = `${indexedDataKey}.temp`;
|
|
|
|
try {
|
|
|
|
await store.deleteFile(indexedDataKey);
|
|
|
|
} catch (e) {
|
|
|
|
// ignore failure, incase it has not been created yet
|
2019-12-30 19:08:50 +01:00
|
|
|
|
|
|
|
// if parent folder does not exist, assume that this index
|
|
|
|
// should not be there
|
|
|
|
if(!await store.exists(getParentKey(indexedDataKey))) {
|
|
|
|
return;
|
|
|
|
}
|
2019-09-10 10:49:22 +02:00
|
|
|
}
|
|
|
|
try {
|
|
|
|
await store.renameFile(tempFile, indexedDataKey);
|
|
|
|
} catch (e) {
|
|
|
|
// retrying in case delete failure was for some other reason
|
|
|
|
if (!isRetry) {
|
|
|
|
await swapTempFileIn(store, indexedDataKey, true);
|
|
|
|
} else {
|
|
|
|
throw new Error("Failed to swap in index filed: " + e.message);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const executeTransactions = app => async (transactions) => {
|
|
|
|
const recordsByShard = mappedRecordsByIndexShard(app.hierarchy, transactions);
|
|
|
|
|
2019-09-28 06:28:11 +02:00
|
|
|
for (const shard of keys(recordsByShard)) {
|
2019-09-10 10:49:22 +02:00
|
|
|
await applyToShard(
|
|
|
|
app.hierarchy, app.datastore,
|
2019-12-30 19:08:50 +01:00
|
|
|
recordsByShard[shard].indexDir,
|
2019-09-10 10:49:22 +02:00
|
|
|
recordsByShard[shard].indexNode,
|
|
|
|
shard,
|
|
|
|
recordsByShard[shard].writes,
|
|
|
|
recordsByShard[shard].removes,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const mappedRecordsByIndexShard = (hierarchy, transactions) => {
|
|
|
|
const updates = getUpdateTransactionsByShard(
|
|
|
|
hierarchy, transactions,
|
|
|
|
);
|
|
|
|
|
|
|
|
const created = getCreateTransactionsByShard(
|
|
|
|
hierarchy, transactions,
|
|
|
|
);
|
|
|
|
const deletes = getDeleteTransactionsByShard(
|
|
|
|
hierarchy, transactions,
|
|
|
|
);
|
|
|
|
|
|
|
|
const indexBuild = getBuildIndexTransactionsByShard(
|
|
|
|
hierarchy,
|
|
|
|
transactions,
|
|
|
|
);
|
|
|
|
|
|
|
|
const toRemove = [
|
|
|
|
...deletes,
|
|
|
|
...updates.toRemove,
|
|
|
|
];
|
|
|
|
|
|
|
|
const toWrite = [
|
|
|
|
...created,
|
|
|
|
...updates.toWrite,
|
|
|
|
...indexBuild,
|
|
|
|
];
|
|
|
|
|
|
|
|
const transByShard = {};
|
|
|
|
|
|
|
|
const initialiseShard = (t) => {
|
|
|
|
if (isUndefined(transByShard[t.indexShardKey])) {
|
|
|
|
transByShard[t.indexShardKey] = {
|
|
|
|
writes: [],
|
|
|
|
removes: [],
|
2019-12-30 19:08:50 +01:00
|
|
|
indexDir: t.indexDir,
|
|
|
|
indexNodeKey: t.indexNode.nodeKey(),
|
2019-09-10 10:49:22 +02:00
|
|
|
indexNode: t.indexNode,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
for (const trans of toWrite) {
|
|
|
|
initialiseShard(trans);
|
|
|
|
transByShard[trans.indexShardKey].writes.push(
|
|
|
|
trans.mappedRecord.result,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
for (const trans of toRemove) {
|
|
|
|
initialiseShard(trans);
|
|
|
|
transByShard[trans.indexShardKey].removes.push(
|
|
|
|
trans.mappedRecord.result.key,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
return transByShard;
|
|
|
|
};
|
|
|
|
|
|
|
|
const getUpdateTransactionsByShard = (hierarchy, transactions) => {
|
|
|
|
const updateTransactions = $(transactions, [filter(isUpdate)]);
|
|
|
|
|
|
|
|
const evaluateIndex = (record, indexNodeAndPath) => {
|
|
|
|
const mappedRecord = evaluate(record)(indexNodeAndPath.indexNode);
|
|
|
|
return ({
|
|
|
|
mappedRecord,
|
|
|
|
indexNode: indexNodeAndPath.indexNode,
|
2019-12-30 19:08:50 +01:00
|
|
|
indexDir: indexNodeAndPath.indexDir,
|
2019-09-10 10:49:22 +02:00
|
|
|
indexShardKey: getIndexedDataKey(
|
|
|
|
indexNodeAndPath.indexNode,
|
2019-12-30 19:08:50 +01:00
|
|
|
indexNodeAndPath.indexDir,
|
2019-09-10 10:49:22 +02:00
|
|
|
mappedRecord.result,
|
|
|
|
),
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
const getIndexNodesToApply = indexFilter => (t, indexes) => $(indexes, [
|
|
|
|
map(n => ({
|
|
|
|
old: evaluateIndex(t.oldRecord, n),
|
|
|
|
new: evaluateIndex(t.record, n),
|
|
|
|
})),
|
|
|
|
filter(indexFilter),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const toRemoveFilter = (n, isUnreferenced) => n.old.mappedRecord.passedFilter === true
|
|
|
|
&& (n.new.mappedRecord.passedFilter === false
|
|
|
|
|| isUnreferenced);
|
|
|
|
|
|
|
|
const toAddFilter = (n, isNewlyReferenced) => (n.old.mappedRecord.passedFilter === false
|
|
|
|
|| isNewlyReferenced)
|
|
|
|
&& n.new.mappedRecord.passedFilter === true;
|
|
|
|
|
|
|
|
const toUpdateFilter = n => n.new.mappedRecord.passedFilter === true
|
|
|
|
&& n.old.mappedRecord.passedFilter === true
|
|
|
|
&& !isEqual(n.old.mappedRecord.result,
|
|
|
|
n.new.mappedRecord.result);
|
|
|
|
|
|
|
|
const toRemove = [];
|
|
|
|
const toWrite = [];
|
|
|
|
|
|
|
|
for (const t of updateTransactions) {
|
|
|
|
const ancestorIdxs = getRelevantAncestorIndexes(
|
|
|
|
hierarchy, t.record,
|
|
|
|
);
|
|
|
|
|
|
|
|
const referenceChanges = diffReverseRefForUpdate(
|
|
|
|
hierarchy, t.oldRecord, t.record,
|
|
|
|
);
|
|
|
|
|
|
|
|
// old records to remove (filtered out)
|
|
|
|
const filteredOut_toRemove = union$1(
|
|
|
|
getIndexNodesToApply(toRemoveFilter)(t, ancestorIdxs),
|
|
|
|
// still referenced - check filter
|
|
|
|
getIndexNodesToApply(toRemoveFilter)(t, referenceChanges.notChanged),
|
|
|
|
// un referenced - remove if in there already
|
|
|
|
getIndexNodesToApply(n => toRemoveFilter(n, true))(t, referenceChanges.unReferenced),
|
|
|
|
);
|
|
|
|
|
|
|
|
// new records to add (filtered in)
|
|
|
|
const filteredIn_toAdd = union$1(
|
|
|
|
getIndexNodesToApply(toAddFilter)(t, ancestorIdxs),
|
|
|
|
// newly referenced - check filter
|
|
|
|
getIndexNodesToApply(n => toAddFilter(n, true))(t, referenceChanges.newlyReferenced),
|
|
|
|
// reference unchanged - rerun filter in case something else changed
|
|
|
|
getIndexNodesToApply(toAddFilter)(t, referenceChanges.notChanged),
|
|
|
|
);
|
|
|
|
|
|
|
|
const changed = union$1(
|
|
|
|
getIndexNodesToApply(toUpdateFilter)(t, ancestorIdxs),
|
|
|
|
// still referenced - recheck filter
|
|
|
|
getIndexNodesToApply(toUpdateFilter)(t, referenceChanges.notChanged),
|
|
|
|
);
|
|
|
|
|
|
|
|
const shardKeyChanged = $(changed, [
|
|
|
|
filter(c => c.old.indexShardKey !== c.new.indexShardKey),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const changedInSameShard = $(shardKeyChanged, [
|
|
|
|
difference(changed),
|
|
|
|
]);
|
|
|
|
|
|
|
|
for (const res of shardKeyChanged) {
|
|
|
|
pull(res)(changed);
|
|
|
|
filteredOut_toRemove.push(res);
|
|
|
|
filteredIn_toAdd.push(res);
|
|
|
|
}
|
|
|
|
|
|
|
|
toRemove.push(
|
|
|
|
$(filteredOut_toRemove, [
|
|
|
|
map(i => i.old),
|
|
|
|
]),
|
|
|
|
);
|
|
|
|
|
|
|
|
toWrite.push(
|
|
|
|
$(filteredIn_toAdd, [
|
|
|
|
map(i => i.new),
|
|
|
|
]),
|
|
|
|
);
|
|
|
|
|
|
|
|
toWrite.push(
|
|
|
|
$(changedInSameShard, [
|
|
|
|
map(i => i.new),
|
|
|
|
]),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
return ({
|
|
|
|
toRemove: flatten(toRemove),
|
|
|
|
toWrite: flatten(toWrite),
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
const getBuildIndexTransactionsByShard = (hierarchy, transactions) => {
|
|
|
|
const buildTransactions = $(transactions, [filter(isBuildIndex)]);
|
|
|
|
if (!isNonEmptyArray(buildTransactions)) return [];
|
|
|
|
const indexNode = transactions.indexNode;
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const getIndexDirs = (t) => {
|
2019-09-10 10:49:22 +02:00
|
|
|
if (isGlobalIndex(indexNode)) {
|
|
|
|
return [indexNode.nodeKey()];
|
|
|
|
}
|
|
|
|
|
|
|
|
if (isReferenceIndex(indexNode)) {
|
2019-12-22 08:12:21 +01:00
|
|
|
const recordNode = getExactNodeForKey(hierarchy)(t.record.key);
|
2019-09-10 10:49:22 +02:00
|
|
|
const refFields = $(recordNode.fields, [
|
|
|
|
filter(fieldReversesReferenceToIndex(indexNode)),
|
|
|
|
]);
|
2019-12-30 19:08:50 +01:00
|
|
|
const indexDirs = [];
|
2019-09-10 10:49:22 +02:00
|
|
|
for (const refField of refFields) {
|
|
|
|
const refValue = t.record[refField.name];
|
|
|
|
if (isSomething(refValue)
|
|
|
|
&& isNonEmptyString(refValue.key)) {
|
2019-12-30 19:08:50 +01:00
|
|
|
const indexDir = joinKey(
|
|
|
|
getRecordInfo(hierarchy, refValue.key).dir,
|
2019-09-10 10:49:22 +02:00
|
|
|
indexNode.name,
|
|
|
|
);
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
if (!includes(indexDir)(indexDirs)) { indexDirs.push(indexDir); }
|
2019-09-10 10:49:22 +02:00
|
|
|
}
|
|
|
|
}
|
2019-12-30 19:08:50 +01:00
|
|
|
return indexDirs;
|
2019-09-10 10:49:22 +02:00
|
|
|
}
|
|
|
|
|
2019-12-30 19:08:50 +01:00
|
|
|
const indexKey = joinKey(
|
2019-09-10 10:49:22 +02:00
|
|
|
getActualKeyOfParent(
|
|
|
|
indexNode.parent().nodeKey(),
|
|
|
|
t.record.key,
|
|
|
|
),
|
|
|
|
indexNode.name,
|
2019-12-30 19:08:50 +01:00
|
|
|
);
|
|
|
|
|
|
|
|
return [getIndexDir(hierarchy, indexKey)];
|
2019-09-10 10:49:22 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
return $(buildTransactions, [
|
|
|
|
map((t) => {
|
|
|
|
const mappedRecord = evaluate(t.record)(indexNode);
|
|
|
|
if (!mappedRecord.passedFilter) return null;
|
2019-12-30 19:08:50 +01:00
|
|
|
const indexDirs = getIndexDirs(t);
|
|
|
|
return $(indexDirs, [
|
|
|
|
map(indexDir => ({
|
2019-09-10 10:49:22 +02:00
|
|
|
mappedRecord,
|
|
|
|
indexNode,
|
2019-12-30 19:08:50 +01:00
|
|
|
indexDir,
|
2019-09-10 10:49:22 +02:00
|
|
|
indexShardKey: getIndexedDataKey(
|
|
|
|
indexNode,
|
2019-12-30 19:08:50 +01:00
|
|
|
indexDir,
|
2019-09-10 10:49:22 +02:00
|
|
|
mappedRecord.result,
|
|
|
|
),
|
|
|
|
})),
|
|
|
|
]);
|
|
|
|
}),
|
|
|
|
flatten,
|
|
|
|
filter(isSomething),
|
|
|
|
]);
|
|
|
|
};
|
|
|
|
|
|
|
|
const get_Create_Delete_TransactionsByShard = pred => (hierarchy, transactions) => {
|
|
|
|
const createTransactions = $(transactions, [filter(pred)]);
|
|
|
|
|
|
|
|
const getIndexNodesToApply = (t, indexes) => $(indexes, [
|
|
|
|
map((n) => {
|
|
|
|
const mappedRecord = evaluate(t.record)(n.indexNode);
|
|
|
|
return ({
|
|
|
|
mappedRecord,
|
|
|
|
indexNode: n.indexNode,
|
2019-12-30 19:08:50 +01:00
|
|
|
indexDir: n.indexDir,
|
2019-09-10 10:49:22 +02:00
|
|
|
indexShardKey: getIndexedDataKey(
|
|
|
|
n.indexNode,
|
2019-12-30 19:08:50 +01:00
|
|
|
n.indexDir,
|
2019-09-10 10:49:22 +02:00
|
|
|
mappedRecord.result,
|
|
|
|
),
|
|
|
|
});
|
|
|
|
}),
|
|
|
|
filter(n => n.mappedRecord.passedFilter),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const allToApply = [];
|
|
|
|
|
|
|
|
for (const t of createTransactions) {
|
|
|
|
const ancestorIdxs = getRelevantAncestorIndexes(hierarchy, t.record);
|
|
|
|
const reverseRef = getRelevantReverseReferenceIndexes(hierarchy, t.record);
|
|
|
|
|
|
|
|
allToApply.push(
|
|
|
|
getIndexNodesToApply(t, ancestorIdxs),
|
|
|
|
);
|
|
|
|
allToApply.push(
|
|
|
|
getIndexNodesToApply(t, reverseRef),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
return flatten(allToApply);
|
|
|
|
};
|
|
|
|
|
|
|
|
const getDeleteTransactionsByShard = get_Create_Delete_TransactionsByShard(isDelete);
|
|
|
|
|
|
|
|
const getCreateTransactionsByShard = get_Create_Delete_TransactionsByShard(isCreate);
|
|
|
|
|
|
|
|
const diffReverseRefForUpdate = (appHierarchy, oldRecord, newRecord) => {
|
|
|
|
const oldIndexes = getRelevantReverseReferenceIndexes(
|
|
|
|
appHierarchy, oldRecord,
|
|
|
|
);
|
|
|
|
const newIndexes = getRelevantReverseReferenceIndexes(
|
|
|
|
appHierarchy, newRecord,
|
|
|
|
);
|
|
|
|
|
|
|
|
const unReferenced = differenceBy(
|
2019-12-30 19:08:50 +01:00
|
|
|
i => i.indexDir,
|
2019-09-10 10:49:22 +02:00
|
|
|
oldIndexes, newIndexes,
|
|
|
|
);
|
|
|
|
|
|
|
|
const newlyReferenced = differenceBy(
|
2019-12-30 19:08:50 +01:00
|
|
|
i => i.indexDir,
|
2019-09-10 10:49:22 +02:00
|
|
|
newIndexes, oldIndexes,
|
|
|
|
);
|
|
|
|
|
|
|
|
const notChanged = intersectionBy(
|
2019-12-30 19:08:50 +01:00
|
|
|
i => i.indexDir,
|
2019-09-10 10:49:22 +02:00
|
|
|
newIndexes, oldIndexes,
|
|
|
|
);
|
|
|
|
|
|
|
|
return {
|
|
|
|
unReferenced,
|
|
|
|
newlyReferenced,
|
|
|
|
notChanged,
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
|
|
|
const cleanup = async (app) => {
|
|
|
|
const lock = await getTransactionLock(app);
|
|
|
|
if (isNolock(lock)) return;
|
|
|
|
|
|
|
|
try {
|
|
|
|
const transactions = await retrieve(app);
|
|
|
|
if (transactions.length > 0) {
|
|
|
|
await executeTransactions(app)(transactions);
|
|
|
|
|
|
|
|
const folder = transactions.folderKey
|
|
|
|
? transactions.folderKey
|
|
|
|
: TRANSACTIONS_FOLDER;
|
|
|
|
|
|
|
|
const deleteFiles = $(transactions, [
|
|
|
|
map(t => joinKey(
|
|
|
|
folder,
|
|
|
|
getTransactionId(
|
|
|
|
t.recordId, t.transactionType,
|
|
|
|
t.uniqueId,
|
|
|
|
),
|
|
|
|
)),
|
|
|
|
map(app.datastore.deleteFile),
|
|
|
|
]);
|
|
|
|
|
|
|
|
await Promise.all(deleteFiles);
|
|
|
|
}
|
|
|
|
} finally {
|
|
|
|
await releaseLock(app, lock);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const getTransactionLock = async app => await getLock(
|
|
|
|
app, LOCK_FILE_KEY,
|
|
|
|
timeoutMilliseconds, maxLockRetries,
|
|
|
|
);
|
|
|
|
|
|
|
|
const initialiseData = async (datastore, applicationDefinition, accessLevels) => {
|
|
|
|
await datastore.createFolder(configFolder);
|
|
|
|
await datastore.createJson(appDefinitionFile, applicationDefinition);
|
|
|
|
|
|
|
|
await initialiseRootCollections(datastore, applicationDefinition.hierarchy);
|
|
|
|
await initialiseRootIndexes(datastore, applicationDefinition.hierarchy);
|
|
|
|
|
|
|
|
await datastore.createFolder(TRANSACTIONS_FOLDER);
|
|
|
|
|
|
|
|
await datastore.createFolder(AUTH_FOLDER);
|
|
|
|
|
|
|
|
await datastore.createJson(USERS_LIST_FILE, []);
|
|
|
|
|
|
|
|
await datastore.createJson(
|
|
|
|
ACCESS_LEVELS_FILE,
|
|
|
|
accessLevels ? accessLevels : { version: 0, levels: [] });
|
2019-11-05 14:30:36 +01:00
|
|
|
|
|
|
|
await initialiseRootSingleRecords(datastore, applicationDefinition.hierarchy);
|
2019-09-10 10:49:22 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
const initialiseRootIndexes = async (datastore, hierarchy) => {
|
|
|
|
const flathierarchy = getFlattenedHierarchy(hierarchy);
|
|
|
|
const globalIndexes = $(flathierarchy, [
|
|
|
|
filter(isGlobalIndex),
|
|
|
|
]);
|
|
|
|
|
|
|
|
for (const index of globalIndexes) {
|
2019-12-30 19:08:50 +01:00
|
|
|
if (!await datastore.exists(index.nodeKey())) {
|
|
|
|
await initialiseIndex(datastore, '', index);
|
|
|
|
}
|
2019-09-10 10:49:22 +02:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2019-11-05 14:30:36 +01:00
|
|
|
const initialiseRootSingleRecords = async (datastore, hierarchy) => {
|
|
|
|
const app = {
|
|
|
|
publish:()=>{},
|
|
|
|
cleanupTransactions: () => {},
|
|
|
|
datastore, hierarchy
|
|
|
|
};
|
|
|
|
|
|
|
|
const flathierarchy = getFlattenedHierarchy(hierarchy);
|
2019-09-10 10:49:22 +02:00
|
|
|
const singleRecords = $(flathierarchy, [
|
|
|
|
filter(isSingleRecord),
|
|
|
|
]);
|
|
|
|
|
2019-11-05 14:30:36 +01:00
|
|
|
for (let record of singleRecords) {
|
2019-12-30 19:08:50 +01:00
|
|
|
await datastore.createFolder(record.nodeKey());
|
2019-11-05 14:30:36 +01:00
|
|
|
const result = _getNew(record, "");
|
|
|
|
await _save(app,result);
|
|
|
|
}
|
2019-09-10 10:49:22 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
const getDatabaseManager = databaseManager => ({
|
|
|
|
createEmptyMasterDb: createEmptyMasterDb(databaseManager),
|
|
|
|
createEmptyInstanceDb: createEmptyInstanceDb(databaseManager),
|
|
|
|
getInstanceDbRootConfig: databaseManager.getInstanceDbRootConfig,
|
|
|
|
masterDatastoreConfig: getMasterDatastoreConfig(databaseManager),
|
|
|
|
getInstanceDatastoreConfig: getInstanceDatastoreConfig(databaseManager),
|
|
|
|
});
|
|
|
|
|
|
|
|
const getMasterDatastoreConfig = databaseManager => databaseManager.getDatastoreConfig('master');
|
|
|
|
|
|
|
|
const getInstanceDatastoreConfig = databaseManager => (applicationId, instanceId) => databaseManager.getDatastoreConfig(
|
|
|
|
applicationId, instanceId,
|
|
|
|
);
|
|
|
|
|
|
|
|
const createEmptyMasterDb = databaseManager => async () => await databaseManager.createEmptyDb('master');
|
|
|
|
|
|
|
|
const createEmptyInstanceDb = databaseManager => async (applicationId, instanceId) => {
|
|
|
|
if (isNothing(applicationId)) { throw new Error('CreateDb: application id not supplied'); }
|
|
|
|
if (isNothing(instanceId)) { throw new Error('CreateDb: instance id not supplied'); }
|
|
|
|
|
|
|
|
return await databaseManager.createEmptyDb(
|
|
|
|
applicationId,
|
|
|
|
instanceId,
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
const getAppApis = async (store, behaviourSources = null,
|
|
|
|
cleanupTransactions = null,
|
|
|
|
getEpochTime = null,
|
|
|
|
crypto = null,
|
|
|
|
appDefinition = null) => {
|
|
|
|
|
|
|
|
store = setupDatastore(store);
|
|
|
|
|
|
|
|
if(!appDefinition)
|
|
|
|
appDefinition = await getApplicationDefinition(store)();
|
|
|
|
|
|
|
|
if(!behaviourSources)
|
|
|
|
behaviourSources = await getBehaviourSources(store);
|
|
|
|
|
|
|
|
const eventAggregator = createEventAggregator();
|
|
|
|
|
|
|
|
const app = {
|
|
|
|
datastore:store,
|
|
|
|
crypto,
|
|
|
|
publish:eventAggregator.publish,
|
|
|
|
hierarchy:appDefinition.hierarchy,
|
|
|
|
actions:appDefinition.actions
|
|
|
|
};
|
|
|
|
|
|
|
|
const templateApi = getTemplateApi(app);
|
|
|
|
|
|
|
|
app.cleanupTransactions = isSomething(cleanupTransactions)
|
|
|
|
? cleanupTransactions
|
|
|
|
: async () => await cleanup(app);
|
|
|
|
|
|
|
|
app.getEpochTime = isSomething(getEpochTime)
|
|
|
|
? getEpochTime
|
|
|
|
: async () => (new Date()).getTime();
|
|
|
|
|
|
|
|
const recordApi = getRecordApi(app);
|
|
|
|
const collectionApi = getCollectionApi(app);
|
|
|
|
const indexApi = getIndexApi(app);
|
|
|
|
const authApi = getAuthApi(app);
|
|
|
|
const actionsApi = getActionsApi(app);
|
|
|
|
|
|
|
|
const authenticateAs = async (username, password) => {
|
|
|
|
app.user = await authApi.authenticate(username, password);
|
|
|
|
};
|
|
|
|
|
2019-11-05 14:30:36 +01:00
|
|
|
const withFullAccess = () =>
|
|
|
|
userWithFullAccess(app);
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
const asUser = (user) => {
|
|
|
|
app.user = user;
|
2019-11-05 14:30:36 +01:00
|
|
|
};
|
2019-09-10 10:49:22 +02:00
|
|
|
|
|
|
|
let apis = {
|
|
|
|
recordApi,
|
|
|
|
templateApi,
|
|
|
|
collectionApi,
|
|
|
|
indexApi,
|
|
|
|
authApi,
|
|
|
|
actionsApi,
|
|
|
|
subscribe: eventAggregator.subscribe,
|
|
|
|
authenticateAs,
|
|
|
|
withFullAccess,
|
|
|
|
asUser
|
|
|
|
};
|
|
|
|
|
|
|
|
apis.actions = initialiseActions(
|
|
|
|
eventAggregator.subscribe,
|
|
|
|
behaviourSources,
|
|
|
|
appDefinition.actions,
|
|
|
|
appDefinition.triggers,
|
|
|
|
apis);
|
|
|
|
|
|
|
|
|
|
|
|
return apis;
|
|
|
|
};
|
|
|
|
|
2019-11-05 14:30:36 +01:00
|
|
|
const userWithFullAccess = (app) => {
|
|
|
|
app.user = {
|
|
|
|
name: "app",
|
|
|
|
permissions : generateFullPermissions(app),
|
|
|
|
isUser:false,
|
|
|
|
temp:false
|
|
|
|
};
|
|
|
|
return app.user;
|
|
|
|
};
|
|
|
|
|
2019-09-10 10:49:22 +02:00
|
|
|
export default getAppApis;
|
2019-11-05 14:30:36 +01:00
|
|
|
export { index as common, events, eventsList, getActionsApi, getAppApis, getAuthApi, getCollectionApi, getDatabaseManager, getIndexApi, getRecordApi, getTemplateApi, hierarchy, initialiseData, setupDatastore, userWithFullAccess };
|
2020-01-21 16:50:00 +01:00
|
|
|
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiYnVkaWJhc2UtY29yZS5lc20ubWpzIiwic291cmNlcyI6WyIuLi9zcmMvY29tbW9uL2V2ZW50cy5qcyIsIi4uL3NyYy9jb21tb24vZXJyb3JzLmpzIiwiLi4vc3JjL2NvbW1vbi9hcGlXcmFwcGVyLmpzIiwiLi4vc3JjL2NvbW1vbi9sb2NrLmpzIiwiLi4vc3JjL2NvbW1vbi9pbmRleC5qcyIsIi4uL3NyYy9jb21tb24vdmFsaWRhdGlvbkNvbW1vbi5qcyIsIi4uL3NyYy9pbmRleGluZy9ldmFsdWF0ZS5qcyIsIi4uL3NyYy90ZW1wbGF0ZUFwaS9pbmRleGVzLmpzIiwiLi4vc3JjL3RlbXBsYXRlQXBpL2hpZXJhcmNoeS5qcyIsIi4uL3NyYy90eXBlcy90eXBlSGVscGVycy5qcyIsIi4uL3NyYy90eXBlcy9zdHJpbmcuanMiLCIuLi9zcmMvdHlwZXMvYm9vbC5qcyIsIi4uL3NyYy90eXBlcy9udW1iZXIuanMiLCIuLi9zcmMvdHlwZXMvZGF0ZXRpbWUuanMiLCIuLi9zcmMvdHlwZXMvYXJyYXkuanMiLCIuLi9zcmMvdHlwZXMvcmVmZXJlbmNlLmpzIiwiLi4vc3JjL3R5cGVzL2ZpbGUuanMiLCIuLi9zcmMvdHlwZXMvaW5kZXguanMiLCIuLi9zcmMvYXV0aEFwaS9hdXRoQ29tbW9uLmpzIiwiLi4vc3JjL2F1dGhBcGkvaXNBdXRob3JpemVkLmpzIiwiLi4vc3JjL2F1dGhBcGkvcGVybWlzc2lvbnMuanMiLCIuLi9zcmMvcmVjb3JkQXBpL2dldE5ldy5qcyIsIi4uL3NyYy9pbmRleGluZy9hbGxJZHMuanMiLCIuLi9zcmMvcmVjb3JkQXBpL3JlY29yZEluZm8uanMiLCIuLi9zcmMvcmVjb3JkQXBpL2xvYWQuanMiLCIuLi9zcmMvaW5kZXhpbmcvcHJvbWlzZVJlYWRhYmxlU3RyZWFtLmpzIiwiLi4vc3JjL2luZGV4aW5nL3NoYXJkaW5nLmpzIiwiLi4vc3JjL2luZGV4aW5nL2luZGV4U2NoZW1hQ3JlYXRvci5qcyIsIi4uL25vZGVfbW9kdWxlcy9yb2xsdXAtcGx1Z2luLW5vZGUtZ2xvYmFscy9zcmMvZ2xvYmFsLmpzIiwiLi4vbm9kZV9tb2R1bGVzL2J1ZmZlci1lczYvYmFzZTY0LmpzIiwiLi4vbm9kZV9tb2R1bGVzL2J1ZmZlci1lczYvaWVlZTc1NC5qcyIsIi4uL25vZGVfbW9kdWxlcy9idWZmZXItZXM2L2lzQXJyYXkuanMiLCIuLi9ub2RlX21vZHVsZXMvYnVmZmVyLWVzNi9pbmRleC5qcyIsIi4uL25vZGVfbW9kdWxlcy9yb2xsdXAtcGx1Z2luLW5vZGUtYnVpbHRpbnMvc3JjL2VzNi9zdHJpbmctZGVjb2Rlci5qcyIsIi4uL3NyYy9pbmRleGluZy9zZXJpYWxpemVyLmpzIiwiLi4vc3JjL2luZGV4aW5nL3JlYWQuanMiLCIuLi9zcmMvaW5kZXhBcGkvZ2V0SW5kZXhEaXIuanMiLCIuLi9zcmMvaW5kZXhBcGkvbGlzdEl0ZW1zLmpzIiwiLi4vc3JjL3JlY29yZEFwaS9nZXRDb250ZXh0LmpzIiwiLi4vc3JjL3JlY29yZEFwaS92YWxpZGF0ZS5qcyIsIi4uL3NyYy9jb2xsZWN0aW9uQXBpL2luaXRpYWxpc2UuanMiLCIuLi9zcmMvdHJhbnNhY3Rpb25zL3RyYW5zYWN0aW9uc0NvbW1vbi5qcyIsIi4uL3NyYy90cmFuc2FjdGlvbnMvY3JlYXRlLmpzIiwiLi4vc3JjL2luZGV4aW5nL2luaXRpYWxpc2VJbmRleC5qcyIsIi4uL3NyYy9yZWNvcmRBcGkvc2F2ZS5qcyIsIi4uL3NyYy9jb2xsZWN0aW9uQXBpL2RlbGV0ZS5qcyIsIi4uL3NyYy9yZWNvcmRBcGkvZGVsZXRlLmpzIiwiLi4vc3JjL3JlY29yZEFwaS91cGxvYWRGaWxlLmpzIiwiLi4vc3JjL3JlY29yZEFwaS9kb3dubG9hZEZpbGUuanMiLCIuLi9zcmMvcmVjb3JkQXBpL2N1c3RvbUlkLmpzIiwiLi4vc3JjL3JlY29yZEFwaS9pbmRleC5qcyIsIi4uL3NyYy9jb2xsZWN0aW9uQXBpL2dldEFsbG93ZWRSZWNvcmRUeXBlcy5qcyIsIi4uL3NyYy9jb2xsZWN0aW9uQXBpL2luZGV4LmpzIiwiLi4vc3JjL2luZGV4QXBpL2J1aWxkSW5kZXguanMiLCIuLi9zcmMvaW5kZXhBcGkvYWdncmVnYXRlcy5qcyIsIi4uL3NyYy9pbmRleEFwaS9pbmRleC5qcyIsIi4uL3NyYy90ZW1wbGF0ZUFwaS9jcmVhdGVOb2Rlcy5qcyIsIi4uL3NyYy90ZW1wbGF0ZUFwaS9maWVsZHMuanMiLCIuLi9zcmMvdGVtcGxhdGVBcGkvcmVjb3JkVmFsaWRhdGlvblJ1bGVzLmpzIiwiLi4vc3JjL3RlbXBsYXRlQXBpL2NyZWF0ZUFjdGlvbnMuanMiLCIuLi9zcmMvdGVtcGxhdGVBcGkvdmFsaWRhdGVBZ2dyZWdhdGUuanMiLCIuLi9zcmMvdGVtcGxhdGVBcGkvdmFsaWRhdGUuanMiLCIuLi9zcmMvdGVtcGxhdGVBcGkvZ2V0QXBwbGljYXRpb25EZWZpbml0aW9uLmpzIiwiLi4vc3JjL3RlbXBsYXRlQXBpL3NhdmVBcHBsaWNhdGlvbkhpZXJhcmNoeS5qcyIsIi4uL3NyYy90ZW1wbGF0ZUFwaS9zYXZlQWN0aW9uc0FuZFRyaWdnZXJzLmpzIiwiLi4vc3JjL3RlbXBsYXRlQXBpL2dldEJlaGF2aW91clNvdXJjZXMuanMiLCIuLi9zcmMvdGVtcGxhdGVBcGkvaW5kZXguanMiLCIuLi9zcmMvYXV0aEFwaS9nZXRVc2Vycy5qcyIsIi4uL3NyYy9hdXRoQXBpL2xvYWRBY2Nlc3NMZXZlbHMuanMiLCIuLi9zcmMvYXV0aEFwaS9hdXRoZW50aWNhdGUuanMiLCIuLi9zcmMvYXV0aEFwaS9jcmVhdGVUZW1wb3JhcnlBY2Nlc3MuanMiLCIuLi9zcmMvYXV0aEFwaS92YWxpZGF0ZVVzZXIuanMiLCIuLi9zcmMvYXV0aEFwaS9nZXROZXdVc2VyLmpzIiwiLi4vc3JjL2F1dGhBcGkvc2V0UGFzc3dvcmQuanMiLCIuLi9zcmMvYXV0aEFwaS9jcmVhdGVVc2VyLmpzIiwiLi4vc3JjL2F1dGhBcGkvZW5hYmxlVXNlci5qcyIsIi4uL3NyYy9hdXRoQXBpL2dldE5ld0FjY2Vzc0xldmVsLmpzIiwiLi4vc3JjL2F1dGhBcGkvdmFsaWRhdGVBY2Nlc3NMZXZlbHMuanMiLCIuLi9zcmMvYXV0aEFwaS9zYXZlQWNjZXNzTGV2ZWxzLmpzIiwiLi4vc3JjL2F1dGhBcGkvZ2VuZXJhdGVGdWxsUGVybWlzc2lvbnMuanMiLCIuLi9zcmMvYXV0aEFwaS9zZXRVc2VyQWNjZXNzTGV2ZWxzLmpzIiwiLi4vc3JjL2F1dGhBcGkvaW5kZXguanMiLCIuLi9zcmMvYWN0aW9uc0FwaS9leGVjdXRlLmpzIiwiLi4vc3JjL2FjdGlvbnNBcGkvaW5kZXguanMiLCIuLi9zcmMvYXBwSW5pdGlhbGlzZS9ldmVudEFnZ3JlZ2F0b3IuanMiLCIuLi9zcmMvYXBwSW5pdGlhbGlzZS9pbmRleC5qcyIsIi4uL3NyYy9jb21tb24vY29tcGlsZUNvZGUuanMiLCIuLi9
|