Merge branch 'master' of https://github.com/Budibase/budibase
This commit is contained in:
commit
1e5749187f
|
@ -34,7 +34,7 @@ const lodash_fp_exports = ["union", "reduce", "isUndefined", "cloneDeep", "split
|
|||
"take", "first", "intersection", "mapValues", "isNull", "has", "isInteger", "isNumber", "isString", "isBoolean", "isDate", "isArray", "isObject", "clone", "values", "keyBy", "isNaN",
|
||||
"keys", "orderBy", "concat", "reverse", "difference", "merge", "flatten", "each", "pull", "join", "defaultCase", "uniqBy", "every", "uniqWith", "isFunction", "groupBy",
|
||||
"differenceBy", "intersectionBy", "isEqual", "max", "sortBy", "assign", "uniq", "trimChars", "trimCharsStart", "isObjectLike", "flattenDeep", "indexOf", "isPlainObject",
|
||||
"toNumber"];
|
||||
"toNumber", "takeRight"];
|
||||
|
||||
const lodash_exports = ["flow", "join", "replace", "trim", "dropRight", "takeRight", "head", "reduce",
|
||||
"tail", "startsWith", "findIndex", "merge",
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1,23 +1,27 @@
|
|||
import { retry } from '../common/index';
|
||||
import { NotFoundError } from '../common/errors';
|
||||
|
||||
const createJson = originalCreateFile => async (key, obj, retries = 5, delay = 500) => await retry(originalCreateFile, retries, delay, key, JSON.stringify(obj));
|
||||
const createJson = originalCreateFile => async (key, obj, retries = 2, delay = 100) => await retry(originalCreateFile, retries, delay, key, JSON.stringify(obj));
|
||||
|
||||
const createNewFile = originalCreateFile => async (path, content, retries = 5, delay = 500) => await retry(originalCreateFile, retries, delay, path, content);
|
||||
const createNewFile = originalCreateFile => async (path, content, retries = 2, delay = 100) => await retry(originalCreateFile, retries, delay, path, content);
|
||||
|
||||
const loadJson = datastore => async (key, retries = 5, delay = 500) => {
|
||||
const loadJson = datastore => async (key, retries = 3, delay = 100) => {
|
||||
try {
|
||||
return await retry(JSON.parse, retries, delay, await datastore.loadFile(key));
|
||||
} catch (err) {
|
||||
throw new NotFoundError(err.message);
|
||||
const newErr = new NotFoundError(err.message);
|
||||
newErr.stack = err.stack;
|
||||
throw(newErr);
|
||||
}
|
||||
}
|
||||
|
||||
const updateJson = datastore => async (key, obj, retries = 5, delay = 500) => {
|
||||
const updateJson = datastore => async (key, obj, retries = 3, delay = 100) => {
|
||||
try {
|
||||
return await retry(datastore.updateFile, retries, delay, key, JSON.stringify(obj));
|
||||
} catch (err) {
|
||||
throw new NotFoundError(err.message);
|
||||
const newErr = new NotFoundError(err.message);
|
||||
newErr.stack = err.stack;
|
||||
throw(newErr);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -35,7 +35,9 @@ const initialiseRootIndexes = async (datastore, hierarchy) => {
|
|||
]);
|
||||
|
||||
for (const index of globalIndexes) {
|
||||
if (!await datastore.exists(index.nodeKey())) { await initialiseIndex(datastore, '', index); }
|
||||
if (!await datastore.exists(index.nodeKey())) {
|
||||
await initialiseIndex(datastore, '', index);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -52,6 +54,7 @@ const initialiseRootSingleRecords = async (datastore, hierarchy) => {
|
|||
]);
|
||||
|
||||
for (let record of singleRecords) {
|
||||
await datastore.createFolder(record.nodeKey());
|
||||
const result = _getNew(record, "");
|
||||
await _save(app,result);
|
||||
}
|
||||
|
|
|
@ -1,12 +1,11 @@
|
|||
import { includes } from 'lodash/fp';
|
||||
import { getNodeForCollectionPath } from '../templateApi/hierarchy';
|
||||
import {
|
||||
safeKey, apiWrapper,
|
||||
events, joinKey,
|
||||
} from '../common';
|
||||
import { _deleteRecord } from '../recordApi/delete';
|
||||
import { getAllIdsIterator, getAllIdsShardKey } from '../indexing/allIds';
|
||||
import { getAllIdsIterator } from '../indexing/allIds';
|
||||
import { permission } from '../authApi/permissions';
|
||||
import { getCollectionDir } from "../recordApi/recordInfo";
|
||||
|
||||
export const deleteCollection = (app, disableCleanup = false) => async key => apiWrapper(
|
||||
app,
|
||||
|
@ -16,49 +15,24 @@ export const deleteCollection = (app, disableCleanup = false) => async key => ap
|
|||
_deleteCollection, app, key, disableCleanup,
|
||||
);
|
||||
|
||||
/*
|
||||
const recordNode = getCollectionNode(app.hierarchy, key);
|
||||
|
||||
*/
|
||||
|
||||
export const _deleteCollection = async (app, key, disableCleanup) => {
|
||||
key = safeKey(key);
|
||||
const node = getNodeForCollectionPath(app.hierarchy)(key);
|
||||
|
||||
const collectionDir = getCollectionDir(app.hierarchy, key);
|
||||
await deleteRecords(app, key);
|
||||
await deleteAllIdsFolders(app, node, key);
|
||||
await deleteCollectionFolder(app, key);
|
||||
await deleteCollectionFolder(app, collectionDir);
|
||||
if (!disableCleanup) { await app.cleanupTransactions(); }
|
||||
};
|
||||
|
||||
const deleteCollectionFolder = async (app, key) => await app.datastore.deleteFolder(key);
|
||||
|
||||
|
||||
const deleteAllIdsFolders = async (app, node, key) => {
|
||||
await app.datastore.deleteFolder(
|
||||
joinKey(
|
||||
key, 'allids',
|
||||
node.nodeId,
|
||||
),
|
||||
);
|
||||
|
||||
await app.datastore.deleteFolder(
|
||||
joinKey(key, 'allids'),
|
||||
);
|
||||
};
|
||||
const deleteCollectionFolder = async (app, dir) =>
|
||||
await app.datastore.deleteFolder(dir);
|
||||
|
||||
const deleteRecords = async (app, key) => {
|
||||
const deletedAllIdsShards = [];
|
||||
const deleteAllIdsShard = async (recordId) => {
|
||||
const shardKey = getAllIdsShardKey(
|
||||
app.hierarchy, key, recordId,
|
||||
);
|
||||
|
||||
if (includes(shardKey)(deletedAllIdsShards)) {
|
||||
return;
|
||||
}
|
||||
|
||||
deletedAllIdsShards.push(shardKey);
|
||||
|
||||
await app.datastore.deleteFile(shardKey);
|
||||
};
|
||||
|
||||
|
||||
const iterate = await getAllIdsIterator(app)(key);
|
||||
|
||||
let ids = await iterate();
|
||||
|
@ -70,7 +44,6 @@ const deleteRecords = async (app, key) => {
|
|||
joinKey(key, id),
|
||||
true,
|
||||
);
|
||||
await deleteAllIdsShard(id);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3,23 +3,13 @@ import {
|
|||
getFlattenedHierarchy,
|
||||
isCollectionRecord,
|
||||
isRoot,
|
||||
getExactNodeForPath,
|
||||
} from '../templateApi/hierarchy';
|
||||
import { $, allTrue, joinKey } from '../common';
|
||||
|
||||
const ensureCollectionIsInitialised = async (datastore, node, parentKey) => {
|
||||
if (!await datastore.exists(parentKey)) {
|
||||
await datastore.createFolder(parentKey);
|
||||
await datastore.createFolder(
|
||||
joinKey(parentKey, 'allids'),
|
||||
);
|
||||
await datastore.createFolder(
|
||||
joinKey(
|
||||
parentKey,
|
||||
'allids',
|
||||
node.nodeId.toString(),
|
||||
),
|
||||
);
|
||||
const ensureCollectionIsInitialised = async (datastore, node, dir) => {
|
||||
if (!await datastore.exists(dir)) {
|
||||
await datastore.createFolder(dir);
|
||||
await datastore.createFolder(joinKey(dir, node.nodeId));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -39,14 +29,13 @@ export const initialiseRootCollections = async (datastore, hierarchy) => {
|
|||
await ensureCollectionIsInitialised(
|
||||
datastore,
|
||||
col,
|
||||
col.collectionPathRegx(),
|
||||
col.collectionPathRegx()
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
export const initialiseChildCollections = async (app, recordKey) => {
|
||||
const childCollectionRecords = $(recordKey, [
|
||||
getExactNodeForPath(app.hierarchy),
|
||||
export const initialiseChildCollections = async (app, recordInfo) => {
|
||||
const childCollectionRecords = $(recordInfo.recordNode, [
|
||||
n => n.children,
|
||||
filter(isCollectionRecord),
|
||||
]);
|
||||
|
@ -55,7 +44,7 @@ export const initialiseChildCollections = async (app, recordKey) => {
|
|||
await ensureCollectionIsInitialised(
|
||||
app.datastore,
|
||||
child,
|
||||
joinKey(recordKey, child.collectionName),
|
||||
recordInfo.child(child.collectionName),
|
||||
);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -4,13 +4,12 @@ import {
|
|||
tail, findIndex, startsWith,
|
||||
dropRight, flow, takeRight, trim,
|
||||
replace
|
||||
|
||||
} from 'lodash';
|
||||
import {
|
||||
some, reduce, isEmpty, isArray, join,
|
||||
isString, isInteger, isDate, toNumber,
|
||||
isUndefined, isNaN, isNull, constant,
|
||||
split, includes
|
||||
split, includes, filter
|
||||
} from 'lodash/fp';
|
||||
import { events, eventsList } from './events';
|
||||
import { apiWrapper } from './apiWrapper';
|
||||
|
@ -32,7 +31,13 @@ export const safeKey = key => replace(`${keySep}${trimKeySep(key)}`, `${keySep}$
|
|||
export const joinKey = (...strs) => {
|
||||
const paramsOrArray = strs.length === 1 & isArray(strs[0])
|
||||
? strs[0] : strs;
|
||||
return safeKey(join(keySep)(paramsOrArray));
|
||||
return $(paramsOrArray, [
|
||||
filter(s => !isUndefined(s)
|
||||
&& !isNull(s)
|
||||
&& s.toString().length > 0),
|
||||
join(keySep),
|
||||
safeKey
|
||||
]);
|
||||
};
|
||||
export const splitKey = $$(trimKeySep, splitByKeySep);
|
||||
export const getDirFomKey = $$(splitKey, dropRight, p => joinKey(...p));
|
||||
|
@ -183,6 +188,10 @@ export const toNumberOrNull = s => (isNull(s) ? null
|
|||
|
||||
export const isArrayOfString = opts => isArray(opts) && all(isString)(opts);
|
||||
|
||||
export const pushAll = (target, items) => {
|
||||
for(let i of items) target.push(i);
|
||||
}
|
||||
|
||||
export const pause = async duration => new Promise(res => setTimeout(res, duration));
|
||||
|
||||
export const retry = async (fn, retries, delay, ...args) => {
|
||||
|
@ -267,4 +276,5 @@ export default {
|
|||
insensitiveEquals,
|
||||
pause,
|
||||
retry,
|
||||
pushAll
|
||||
};
|
||||
|
|
|
@ -10,12 +10,13 @@ import {
|
|||
getShardKeysInRange,
|
||||
} from '../indexing/sharding';
|
||||
import {
|
||||
getExactNodeForPath, isIndex,
|
||||
getExactNodeForKey, isIndex,
|
||||
isShardedIndex,
|
||||
} from '../templateApi/hierarchy';
|
||||
import { CONTINUE_READING_RECORDS } from '../indexing/serializer';
|
||||
import { permission } from '../authApi/permissions';
|
||||
import { BadRequestError } from '../common/errors';
|
||||
import { getIndexDir } from "./getIndexDir";
|
||||
|
||||
export const aggregates = app => async (indexKey, rangeStartParams = null, rangeEndParams = null) => apiWrapper(
|
||||
app,
|
||||
|
@ -27,13 +28,14 @@ export const aggregates = app => async (indexKey, rangeStartParams = null, range
|
|||
|
||||
const _aggregates = async (app, indexKey, rangeStartParams, rangeEndParams) => {
|
||||
indexKey = safeKey(indexKey);
|
||||
const indexNode = getExactNodeForPath(app.hierarchy)(indexKey);
|
||||
const indexNode = getExactNodeForKey(app.hierarchy)(indexKey);
|
||||
const indexDir = getIndexDir(app.hierarchy, indexKey);
|
||||
|
||||
if (!isIndex(indexNode)) { throw new BadRequestError('supplied key is not an index'); }
|
||||
|
||||
if (isShardedIndex(indexNode)) {
|
||||
const shardKeys = await getShardKeysInRange(
|
||||
app, indexKey, rangeStartParams, rangeEndParams,
|
||||
app, indexNode, indexDir, rangeStartParams, rangeEndParams,
|
||||
);
|
||||
let aggregateResult = null;
|
||||
for (const k of shardKeys) {
|
||||
|
@ -53,7 +55,7 @@ const _aggregates = async (app, indexKey, rangeStartParams, rangeEndParams) => {
|
|||
app.hierarchy,
|
||||
app.datastore,
|
||||
indexNode,
|
||||
getUnshardedIndexDataKey(indexKey),
|
||||
getUnshardedIndexDataKey(indexDir),
|
||||
);
|
||||
};
|
||||
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
import {
|
||||
find, filter,
|
||||
filter,
|
||||
includes, some,
|
||||
} from 'lodash/fp';
|
||||
import { getAllIdsIterator } from '../indexing/allIds';
|
||||
import {
|
||||
getFlattenedHierarchy, getRecordNodeById,
|
||||
getCollectionNodeByKeyOrNodeKey, getNode, isIndex,
|
||||
isRecord, isDecendant, getAllowedRecordNodesForIndex,
|
||||
getNode, isIndex,
|
||||
isRecord, getAllowedRecordNodesForIndex,
|
||||
fieldReversesReferenceToIndex,
|
||||
} from '../templateApi/hierarchy';
|
||||
import {
|
||||
joinKey, apiWrapper, events, $, allTrue,
|
||||
joinKey, apiWrapper, events, $
|
||||
} from '../common';
|
||||
import {
|
||||
createBuildIndexFolder,
|
||||
|
@ -82,9 +82,11 @@ const buildReverseReferenceIndex = async (app, indexNode) => {
|
|||
}
|
||||
};
|
||||
|
||||
/*
|
||||
const getAllowedParentCollectionNodes = (hierarchy, indexNode) => $(getAllowedRecordNodesForIndex(hierarchy, indexNode), [
|
||||
map(n => n.parent()),
|
||||
]);
|
||||
*/
|
||||
|
||||
const buildHeirarchalIndex = async (app, indexNode) => {
|
||||
let recordCount = 0;
|
||||
|
@ -127,10 +129,11 @@ const buildHeirarchalIndex = async (app, indexNode) => {
|
|||
return recordCount;
|
||||
};
|
||||
|
||||
const chooseChildRecordNodeByKey = (collectionNode, recordId) => find(c => recordId.startsWith(c.nodeId))(collectionNode.children);
|
||||
// const chooseChildRecordNodeByKey = (collectionNode, recordId) => find(c => recordId.startsWith(c.nodeId))(collectionNode.children);
|
||||
|
||||
const recordNodeApplies = indexNode => recordNode => includes(recordNode.nodeId)(indexNode.allowedRecordNodeIds);
|
||||
|
||||
/*
|
||||
const hasApplicableDecendant = (hierarchy, ancestorNode, indexNode) => $(hierarchy, [
|
||||
getFlattenedHierarchy,
|
||||
filter(
|
||||
|
@ -141,7 +144,9 @@ const hasApplicableDecendant = (hierarchy, ancestorNode, indexNode) => $(hierarc
|
|||
),
|
||||
),
|
||||
]);
|
||||
*/
|
||||
|
||||
/*
|
||||
const applyAllDecendantRecords = async (app, collection_Key_or_NodeKey,
|
||||
indexNode, indexKey, currentIndexedData,
|
||||
currentIndexedDataKey, recordCount = 0) => {
|
||||
|
@ -194,5 +199,6 @@ const applyAllDecendantRecords = async (app, collection_Key_or_NodeKey,
|
|||
|
||||
return recordCount;
|
||||
};
|
||||
*/
|
||||
|
||||
export default buildIndex;
|
||||
|
|
|
@ -4,21 +4,23 @@ import {
|
|||
} from '../common';
|
||||
import {
|
||||
isIndex, isShardedIndex,
|
||||
getExactNodeForPath,
|
||||
getExactNodeForKey,
|
||||
} from '../templateApi/hierarchy';
|
||||
import {
|
||||
getAllShardKeys, getShardMapKey,
|
||||
getUnshardedIndexDataKey,
|
||||
} from '../indexing/sharding';
|
||||
import { getIndexDir } from "./getIndexDir";
|
||||
|
||||
export const _deleteIndex = async (app, indexKey, includeFolder) => {
|
||||
indexKey = safeKey(indexKey);
|
||||
const indexNode = getExactNodeForPath(app.hierarchy)(indexKey);
|
||||
const indexNode = getExactNodeForKey(app.hierarchy)(indexKey);
|
||||
const indexDir = getIndexDir(app.hierarchy, indexKey);
|
||||
|
||||
if (!isIndex(indexNode)) { throw new Error('Supplied key is not an index'); }
|
||||
|
||||
if (isShardedIndex(indexNode)) {
|
||||
const shardKeys = await getAllShardKeys(app, indexKey);
|
||||
const shardKeys = await getAllShardKeys(app, indexNode, indexDir);
|
||||
for (const k of shardKeys) {
|
||||
await tryAwaitOrIgnore(
|
||||
app.datastore.deleteFile(k),
|
||||
|
@ -26,20 +28,20 @@ export const _deleteIndex = async (app, indexKey, includeFolder) => {
|
|||
}
|
||||
tryAwaitOrIgnore(
|
||||
await app.datastore.deleteFile(
|
||||
getShardMapKey(indexKey),
|
||||
getShardMapKey(indexDir),
|
||||
),
|
||||
);
|
||||
} else {
|
||||
await tryAwaitOrIgnore(
|
||||
app.datastore.deleteFile(
|
||||
getUnshardedIndexDataKey(indexKey),
|
||||
getUnshardedIndexDataKey(indexDir),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
if (includeFolder) {
|
||||
tryAwaitOrIgnore(
|
||||
await app.datastore.deleteFolder(indexKey),
|
||||
await app.datastore.deleteFolder(indexDir),
|
||||
);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
import { getRecordInfo } from "../recordApi/recordInfo";
|
||||
import {
|
||||
getParentKey, getLastPartInKey
|
||||
} from "../templateApi/hierarchy";
|
||||
import { keySep } from "../common";
|
||||
|
||||
export const getIndexDir = (hierarchy, indexKey) => {
|
||||
|
||||
const parentKey = getParentKey(indexKey);
|
||||
|
||||
if(parentKey === "") return indexKey;
|
||||
if(parentKey === keySep) return indexKey;
|
||||
|
||||
const recordInfo = getRecordInfo(
|
||||
hierarchy,
|
||||
parentKey);
|
||||
|
||||
return recordInfo.child(
|
||||
getLastPartInKey(indexKey));
|
||||
}
|
|
@ -9,10 +9,11 @@ import {
|
|||
getShardKeysInRange,
|
||||
} from '../indexing/sharding';
|
||||
import {
|
||||
getExactNodeForPath, isIndex,
|
||||
getExactNodeForKey, isIndex,
|
||||
isShardedIndex,
|
||||
} from '../templateApi/hierarchy';
|
||||
import { permission } from '../authApi/permissions';
|
||||
import { getIndexDir } from "./getIndexDir";
|
||||
|
||||
export const listItems = app => async (indexKey, options) => {
|
||||
indexKey = safeKey(indexKey);
|
||||
|
@ -33,29 +34,30 @@ const _listItems = async (app, indexKey, options = defaultOptions) => {
|
|||
merge(defaultOptions),
|
||||
]);
|
||||
|
||||
const getItems = async key => (isNonEmptyString(searchPhrase)
|
||||
const getItems = async indexedDataKey => (isNonEmptyString(searchPhrase)
|
||||
? await searchIndex(
|
||||
app.hierarchy,
|
||||
app.datastore,
|
||||
indexNode,
|
||||
key,
|
||||
indexedDataKey,
|
||||
searchPhrase,
|
||||
)
|
||||
: await readIndex(
|
||||
app.hierarchy,
|
||||
app.datastore,
|
||||
indexNode,
|
||||
key,
|
||||
indexedDataKey,
|
||||
));
|
||||
|
||||
indexKey = safeKey(indexKey);
|
||||
const indexNode = getExactNodeForPath(app.hierarchy)(indexKey);
|
||||
const indexNode = getExactNodeForKey(app.hierarchy)(indexKey);
|
||||
const indexDir = getIndexDir(app.hierarchy, indexKey);
|
||||
|
||||
if (!isIndex(indexNode)) { throw new Error('supplied key is not an index'); }
|
||||
|
||||
if (isShardedIndex(indexNode)) {
|
||||
const shardKeys = await getShardKeysInRange(
|
||||
app, indexKey, rangeStartParams, rangeEndParams,
|
||||
app, indexNode, indexDir, rangeStartParams, rangeEndParams,
|
||||
);
|
||||
const items = [];
|
||||
for (const k of shardKeys) {
|
||||
|
@ -64,6 +66,6 @@ const _listItems = async (app, indexKey, options = defaultOptions) => {
|
|||
return flatten(items);
|
||||
}
|
||||
return await getItems(
|
||||
getUnshardedIndexDataKey(indexKey),
|
||||
getUnshardedIndexDataKey(indexDir),
|
||||
);
|
||||
};
|
||||
|
|
|
@ -1,145 +1,231 @@
|
|||
import {
|
||||
join, pull,
|
||||
map, flatten, orderBy,
|
||||
filter, find,
|
||||
flatten, orderBy,
|
||||
filter, isUndefined
|
||||
} from 'lodash/fp';
|
||||
import {
|
||||
getParentKey,
|
||||
import hierarchy, {
|
||||
getFlattenedHierarchy,
|
||||
getCollectionNodeByKeyOrNodeKey, getNodeForCollectionPath,
|
||||
getCollectionNodeByKeyOrNodeKey,
|
||||
isCollectionRecord, isAncestor,
|
||||
} from '../templateApi/hierarchy';
|
||||
import { joinKey, safeKey, $ } from '../common';
|
||||
import { getCollectionDir } from "../recordApi/recordInfo";
|
||||
|
||||
const allIdChars = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_-';
|
||||
export const RECORDS_PER_FOLDER = 1000;
|
||||
export const allIdChars = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_-';
|
||||
|
||||
const allIdsStringsForFactor = (collectionNode) => {
|
||||
const factor = collectionNode.allidsShardFactor;
|
||||
const charRangePerShard = 64 / factor;
|
||||
const allIdStrings = [];
|
||||
let index = 0;
|
||||
let currentIdsShard = '';
|
||||
while (index < 64) {
|
||||
currentIdsShard += allIdChars[index];
|
||||
if ((index + 1) % charRangePerShard === 0) {
|
||||
allIdStrings.push(currentIdsShard);
|
||||
currentIdsShard = '';
|
||||
}
|
||||
index++;
|
||||
// this should never be changed - ever
|
||||
// - existing databases depend on the order of chars this string
|
||||
|
||||
/**
|
||||
* folderStructureArray should return an array like
|
||||
* - [1] = all records fit into one folder
|
||||
* - [2] = all records fite into 2 folders
|
||||
* - [64, 3] = all records fit into 64 * 3 folders
|
||||
* - [64, 64, 10] = all records fit into 64 * 64 * 10 folder
|
||||
* (there are 64 possible chars in allIsChars)
|
||||
*/
|
||||
export const folderStructureArray = (recordNode) => {
|
||||
|
||||
const totalFolders = Math.ceil(recordNode.estimatedRecordCount / 1000);
|
||||
const folderArray = [];
|
||||
let levelCount = 1;
|
||||
while(64**levelCount < totalFolders) {
|
||||
levelCount += 1;
|
||||
folderArray.push(64);
|
||||
}
|
||||
|
||||
return allIdStrings;
|
||||
};
|
||||
const parentFactor = (64**folderArray.length);
|
||||
if(parentFactor < totalFolders) {
|
||||
folderArray.push(
|
||||
Math.ceil(totalFolders / parentFactor)
|
||||
);
|
||||
}
|
||||
|
||||
export const getAllIdsShardNames = (appHierarchy, collectionKey) => {
|
||||
const collectionRecordNode = getNodeForCollectionPath(appHierarchy)(collectionKey);
|
||||
return $(collectionRecordNode, [
|
||||
c => [c.nodeId],
|
||||
map(i => map(c => _allIdsShardKey(collectionKey, i, c))(allIdsStringsForFactor(collectionRecordNode))),
|
||||
flatten,
|
||||
]);
|
||||
};
|
||||
return folderArray;
|
||||
|
||||
const _allIdsShardKey = (collectionKey, childNo, shardKey) => joinKey(
|
||||
collectionKey,
|
||||
'allids',
|
||||
childNo,
|
||||
shardKey,
|
||||
);
|
||||
/*
|
||||
const maxRecords = currentFolderPosition === 0
|
||||
? RECORDS_PER_FOLDER
|
||||
: currentFolderPosition * 64 * RECORDS_PER_FOLDER;
|
||||
|
||||
export const getAllIdsShardKey = (appHierarchy, collectionKey, recordId) => {
|
||||
const indexOfFirstDash = recordId.indexOf('-');
|
||||
if(maxRecords < recordNode.estimatedRecordCount) {
|
||||
return folderStructureArray(
|
||||
recordNode,
|
||||
[...currentArray, 64],
|
||||
currentFolderPosition + 1);
|
||||
} else {
|
||||
const childFolderCount = Math.ceil(recordNode.estimatedRecordCount / maxRecords );
|
||||
return [...currentArray, childFolderCount]
|
||||
}*/
|
||||
}
|
||||
|
||||
const collectionNode = getNodeForCollectionPath(appHierarchy)(collectionKey);
|
||||
|
||||
const idFirstChar = recordId[indexOfFirstDash + 1];
|
||||
const allIdsShardId = $(collectionNode, [
|
||||
allIdsStringsForFactor,
|
||||
find(i => i.includes(idFirstChar)),
|
||||
]);
|
||||
|
||||
return _allIdsShardKey(
|
||||
collectionKey,
|
||||
recordId.slice(0, indexOfFirstDash),
|
||||
allIdsShardId,
|
||||
);
|
||||
};
|
||||
|
||||
const getOrCreateShardFile = async (datastore, allIdsKey) => {
|
||||
try {
|
||||
return await datastore.loadFile(allIdsKey);
|
||||
} catch (eLoad) {
|
||||
try {
|
||||
await datastore.createFile(allIdsKey, '');
|
||||
return '';
|
||||
} catch (eCreate) {
|
||||
throw new Error(
|
||||
`Error loading, then creating allIds ${allIdsKey
|
||||
} : LOAD : ${eLoad.message
|
||||
} : CREATE : ${eCreate}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const getShardFile = async (datastore, allIdsKey) => {
|
||||
try {
|
||||
return await datastore.loadFile(allIdsKey);
|
||||
} catch (eLoad) {
|
||||
return '';
|
||||
}
|
||||
};
|
||||
|
||||
export const addToAllIds = (appHierarchy, datastore) => async (record) => {
|
||||
const allIdsKey = getAllIdsShardKey(
|
||||
appHierarchy,
|
||||
getParentKey(record.key),
|
||||
record.id,
|
||||
);
|
||||
|
||||
let allIds = await getOrCreateShardFile(datastore, allIdsKey);
|
||||
|
||||
allIds += `${allIds.length > 0 ? ',' : ''}${record.id}`;
|
||||
|
||||
await datastore.updateFile(allIdsKey, allIds);
|
||||
};
|
||||
|
||||
export const getAllIdsIterator = app => async (collection_Key_or_NodeKey) => {
|
||||
collection_Key_or_NodeKey = safeKey(collection_Key_or_NodeKey);
|
||||
const targetNode = getCollectionNodeByKeyOrNodeKey(
|
||||
const recordNode = getCollectionNodeByKeyOrNodeKey(
|
||||
app.hierarchy,
|
||||
collection_Key_or_NodeKey,
|
||||
);
|
||||
|
||||
const getAllIdsIteratorForCollectionKey = async (collectionKey) => {
|
||||
const all_allIdsKeys = getAllIdsShardNames(app.hierarchy, collectionKey);
|
||||
let shardIndex = 0;
|
||||
const getAllIdsIteratorForCollectionKey = async (recordNode, collectionKey) => {
|
||||
|
||||
const folderStructure = folderStructureArray(recordNode)
|
||||
|
||||
const allIdsFromShardIterator = async () => {
|
||||
if (shardIndex === all_allIdsKeys.length) { return ({ done: true, result: { ids: [], collectionKey } }); }
|
||||
let currentFolderContents = [];
|
||||
let currentPosition = [];
|
||||
|
||||
const shardKey = all_allIdsKeys[shardIndex];
|
||||
const collectionDir = getCollectionDir(app.hierarchy, collectionKey);
|
||||
const basePath = joinKey(
|
||||
collectionDir, recordNode.nodeId.toString());
|
||||
|
||||
|
||||
const allIds = await getAllIdsFromShard(app.datastore, shardKey);
|
||||
|
||||
// "folderStructure" determines the top, sharding folders
|
||||
// we need to add one, for the collection root folder, which
|
||||
// always exists
|
||||
const levels = folderStructure.length + 1;
|
||||
const topLevel = levels -1;
|
||||
|
||||
shardIndex++;
|
||||
|
||||
/* populate initial directory structure in form:
|
||||
[
|
||||
{path: "/a", contents: ["b", "c", "d"]},
|
||||
{path: "/a/b", contents: ["e","f","g"]},
|
||||
{path: "/a/b/e", contents: ["1-abcd","2-cdef","3-efgh"]},
|
||||
]
|
||||
// stores contents on each parent level
|
||||
// top level has ID folders
|
||||
*/
|
||||
const firstFolder = async () => {
|
||||
|
||||
let folderLevel = 0;
|
||||
|
||||
const lastPathHasContent = () =>
|
||||
folderLevel === 0
|
||||
|| currentFolderContents[folderLevel - 1].contents.length > 0;
|
||||
|
||||
|
||||
while (folderLevel <= topLevel && lastPathHasContent()) {
|
||||
|
||||
let thisPath = basePath;
|
||||
for(let lev = 0; lev < currentPosition.length; lev++) {
|
||||
thisPath = joinKey(
|
||||
thisPath, currentFolderContents[lev].contents[0]);
|
||||
}
|
||||
|
||||
const contentsThisLevel =
|
||||
await app.datastore.getFolderContents(thisPath);
|
||||
currentFolderContents.push({
|
||||
contents:contentsThisLevel,
|
||||
path: thisPath
|
||||
});
|
||||
|
||||
// should start as something like [0,0]
|
||||
if(folderLevel < topLevel)
|
||||
currentPosition.push(0);
|
||||
|
||||
folderLevel+=1;
|
||||
}
|
||||
|
||||
return (currentPosition.length === levels - 1);
|
||||
}
|
||||
|
||||
const isOnLastFolder = level => {
|
||||
|
||||
const result = currentPosition[level] === currentFolderContents[level].contents.length - 1;
|
||||
return result;
|
||||
}
|
||||
|
||||
const getNextFolder = async (lev=undefined) => {
|
||||
lev = isUndefined(lev) ? topLevel : lev;
|
||||
const parentLev = lev - 1;
|
||||
|
||||
if(parentLev < 0) return false;
|
||||
|
||||
if(isOnLastFolder(parentLev)) {
|
||||
return await getNextFolder(parentLev);
|
||||
}
|
||||
|
||||
const newPosition = currentPosition[parentLev] + 1;
|
||||
currentPosition[parentLev] = newPosition;
|
||||
|
||||
const nextFolder = joinKey(
|
||||
currentFolderContents[parentLev].path,
|
||||
currentFolderContents[parentLev].contents[newPosition]);
|
||||
currentFolderContents[lev].contents = await app.datastore.getFolderContents(
|
||||
nextFolder
|
||||
);
|
||||
currentFolderContents[lev].path = nextFolder;
|
||||
|
||||
if(lev !== topLevel) {
|
||||
|
||||
// we just advanced a parent folder, so now need to
|
||||
// do the same to the next levels
|
||||
let loopLevel = lev + 1;
|
||||
while(loopLevel <= topLevel) {
|
||||
const loopParentLevel = loopLevel-1;
|
||||
|
||||
currentPosition[loopParentLevel] = 0;
|
||||
const nextLoopFolder = joinKey(
|
||||
currentFolderContents[loopParentLevel].path,
|
||||
currentFolderContents[loopParentLevel].contents[0]);
|
||||
currentFolderContents[loopLevel].contents = await app.datastore.getFolderContents(
|
||||
nextLoopFolder
|
||||
);
|
||||
currentFolderContents[loopLevel].path = nextLoopFolder;
|
||||
loopLevel+=1;
|
||||
}
|
||||
}
|
||||
|
||||
// true ==has more ids... (just loaded more)
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
const idsCurrentFolder = () =>
|
||||
currentFolderContents[currentFolderContents.length - 1].contents;
|
||||
|
||||
const fininshedResult = ({ done: true, result: { ids: [], collectionKey } });
|
||||
|
||||
let hasStarted = false;
|
||||
let hasMore = true;
|
||||
const getIdsFromCurrentfolder = async () => {
|
||||
|
||||
if(!hasMore) {
|
||||
return fininshedResult;
|
||||
}
|
||||
|
||||
if(!hasStarted) {
|
||||
hasMore = await firstFolder();
|
||||
hasStarted = true;
|
||||
return ({
|
||||
result: {
|
||||
ids: idsCurrentFolder(),
|
||||
collectionKey
|
||||
},
|
||||
done: false
|
||||
})
|
||||
}
|
||||
|
||||
hasMore = await getNextFolder();
|
||||
|
||||
return ({
|
||||
result: {
|
||||
ids: allIds,
|
||||
collectionKey,
|
||||
ids: hasMore ? idsCurrentFolder() : [],
|
||||
collectionKey
|
||||
},
|
||||
done: false,
|
||||
done: !hasMore
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
return allIdsFromShardIterator;
|
||||
return getIdsFromCurrentfolder;
|
||||
|
||||
};
|
||||
|
||||
const ancestors = $(getFlattenedHierarchy(app.hierarchy), [
|
||||
filter(isCollectionRecord),
|
||||
filter(n => isAncestor(targetNode)(n)
|
||||
|| n.nodeKey() === targetNode.nodeKey()),
|
||||
filter(n => isAncestor(recordNode)(n)
|
||||
|| n.nodeKey() === recordNode.nodeKey()),
|
||||
orderBy([n => n.nodeKey().length], ['asc']),
|
||||
]); // parents first
|
||||
|
||||
|
@ -149,14 +235,16 @@ export const getAllIdsIterator = app => async (collection_Key_or_NodeKey) => {
|
|||
parentRecordKey,
|
||||
currentNode.collectionName,
|
||||
);
|
||||
if (currentNode.nodeKey() === targetNode.nodeKey()) {
|
||||
if (currentNode.nodeKey() === recordNode.nodeKey()) {
|
||||
return [
|
||||
await getAllIdsIteratorForCollectionKey(
|
||||
currentNode,
|
||||
currentCollectionKey,
|
||||
)];
|
||||
}
|
||||
const allIterators = [];
|
||||
const currentIterator = await getAllIdsIteratorForCollectionKey(
|
||||
currentNode,
|
||||
currentCollectionKey,
|
||||
);
|
||||
|
||||
|
@ -191,39 +279,5 @@ export const getAllIdsIterator = app => async (collection_Key_or_NodeKey) => {
|
|||
};
|
||||
};
|
||||
|
||||
const getAllIdsFromShard = async (datastore, shardKey) => {
|
||||
const allIdsStr = await getShardFile(datastore, shardKey);
|
||||
|
||||
const allIds = [];
|
||||
let currentId = '';
|
||||
for (let i = 0; i < allIdsStr.length; i++) {
|
||||
const currentChar = allIdsStr.charAt(i);
|
||||
const isLast = (i === allIdsStr.length - 1);
|
||||
if (currentChar === ',' || isLast) {
|
||||
if (isLast) currentId += currentChar;
|
||||
allIds.push(currentId);
|
||||
currentId = '';
|
||||
} else {
|
||||
currentId += currentChar;
|
||||
}
|
||||
}
|
||||
return allIds;
|
||||
};
|
||||
|
||||
export const removeFromAllIds = (appHierarchy, datastore) => async (record) => {
|
||||
const shardKey = getAllIdsShardKey(
|
||||
appHierarchy,
|
||||
getParentKey(record.key),
|
||||
record.id,
|
||||
);
|
||||
const allIds = await getAllIdsFromShard(datastore, shardKey);
|
||||
|
||||
const newIds = $(allIds, [
|
||||
pull(record.id),
|
||||
join(','),
|
||||
]);
|
||||
|
||||
await datastore.updateFile(shardKey, newIds);
|
||||
};
|
||||
|
||||
export default getAllIdsIterator;
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import { ensureShardNameIsInShardMap } from './sharding';
|
||||
import { getIndexWriter } from './serializer';
|
||||
import { isShardedIndex } from '../templateApi/hierarchy';
|
||||
import { isShardedIndex, getParentKey } from '../templateApi/hierarchy';
|
||||
import {promiseWriteableStream} from "./promiseWritableStream";
|
||||
import {promiseReadableStream} from "./promiseReadableStream";
|
||||
|
||||
export const applyToShard = async (hierarchy, store, indexKey,
|
||||
export const applyToShard = async (hierarchy, store, indexDir,
|
||||
indexNode, indexShardKey, recordsToWrite, keysToRemove) => {
|
||||
const createIfNotExists = recordsToWrite.length > 0;
|
||||
const writer = await getWriter(hierarchy, store, indexKey, indexShardKey, indexNode, createIfNotExists);
|
||||
const writer = await getWriter(hierarchy, store, indexDir, indexShardKey, indexNode, createIfNotExists);
|
||||
if (writer === SHARD_DELETED) return;
|
||||
|
||||
await writer.updateIndex(recordsToWrite, keysToRemove);
|
||||
|
@ -15,13 +15,17 @@ export const applyToShard = async (hierarchy, store, indexKey,
|
|||
};
|
||||
|
||||
const SHARD_DELETED = 'SHARD_DELETED';
|
||||
const getWriter = async (hierarchy, store, indexKey, indexedDataKey, indexNode, createIfNotExists) => {
|
||||
const getWriter = async (hierarchy, store, indexDir, indexedDataKey, indexNode, createIfNotExists) => {
|
||||
let readableStream = null;
|
||||
|
||||
if (isShardedIndex(indexNode)) {
|
||||
await ensureShardNameIsInShardMap(store, indexKey, indexedDataKey);
|
||||
await ensureShardNameIsInShardMap(store, indexDir, indexedDataKey);
|
||||
if(!await store.exists(indexedDataKey)) {
|
||||
await store.createFile(indexedDataKey, "");
|
||||
if (await store.exists(getParentKey(indexedDataKey))) {
|
||||
await store.createFile(indexedDataKey, "");
|
||||
} else {
|
||||
return SHARD_DELETED;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -37,7 +41,11 @@ const getWriter = async (hierarchy, store, indexKey, indexedDataKey, indexNode,
|
|||
throw e;
|
||||
} else {
|
||||
if (createIfNotExists) {
|
||||
await store.createFile(indexedDataKey, '');
|
||||
if(await store.exists(getParentKey(indexedDataKey))) {
|
||||
await store.createFile(indexedDataKey, '');
|
||||
} else {
|
||||
return SHARD_DELETED;
|
||||
}
|
||||
} else {
|
||||
return SHARD_DELETED;
|
||||
}
|
||||
|
@ -65,6 +73,12 @@ const swapTempFileIn = async (store, indexedDataKey, isRetry = false) => {
|
|||
await store.deleteFile(indexedDataKey);
|
||||
} catch (e) {
|
||||
// ignore failure, incase it has not been created yet
|
||||
|
||||
// if parent folder does not exist, assume that this index
|
||||
// should not be there
|
||||
if(!await store.exists(getParentKey(indexedDataKey))) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
try {
|
||||
await store.renameFile(tempFile, indexedDataKey);
|
||||
|
|
|
@ -2,20 +2,20 @@ import { isShardedIndex } from '../templateApi/hierarchy';
|
|||
import { joinKey } from '../common';
|
||||
import { getShardMapKey, getUnshardedIndexDataKey, createIndexFile } from './sharding';
|
||||
|
||||
export const initialiseIndex = async (datastore, parentKey, index) => {
|
||||
const indexKey = joinKey(parentKey, index.name);
|
||||
export const initialiseIndex = async (datastore, dir, index) => {
|
||||
const indexDir = joinKey(dir, index.name);
|
||||
|
||||
await datastore.createFolder(indexKey);
|
||||
await datastore.createFolder(indexDir);
|
||||
|
||||
if (isShardedIndex(index)) {
|
||||
await datastore.createFile(
|
||||
getShardMapKey(indexKey),
|
||||
getShardMapKey(indexDir),
|
||||
'[]',
|
||||
);
|
||||
} else {
|
||||
await createIndexFile(
|
||||
datastore,
|
||||
getUnshardedIndexDataKey(indexKey),
|
||||
getUnshardedIndexDataKey(indexDir),
|
||||
index,
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,12 +1,4 @@
|
|||
import lunr from 'lunr';
|
||||
import {
|
||||
getHashCode,
|
||||
joinKey
|
||||
} from '../common';
|
||||
import {
|
||||
getActualKeyOfParent,
|
||||
isGlobalIndex,
|
||||
} from '../templateApi/hierarchy';
|
||||
import {promiseReadableStream} from "./promiseReadableStream";
|
||||
import { createIndexFile } from './sharding';
|
||||
import { generateSchema } from './indexSchemaCreator';
|
||||
|
@ -50,31 +42,6 @@ export const searchIndex = async (hierarchy, datastore, index, indexedDataKey, s
|
|||
return await doRead(hierarchy, datastore, index, indexedDataKey);
|
||||
};
|
||||
|
||||
export const getIndexedDataKey_fromIndexKey = (indexKey) =>
|
||||
`${indexKey}${indexKey.endsWith('.csv') ? '' : '.csv'}`;
|
||||
|
||||
export const uniqueIndexName = index => `idx_${
|
||||
getHashCode(`${index.filter}${index.map}`)
|
||||
}.csv`;
|
||||
|
||||
export const getIndexedDataKey = (decendantKey, indexNode) => {
|
||||
if (isGlobalIndex(indexNode)) { return `${indexNode.nodeKey()}.csv`; }
|
||||
|
||||
const indexedDataParentKey = getActualKeyOfParent(
|
||||
indexNode.parent().nodeKey(),
|
||||
decendantKey,
|
||||
);
|
||||
|
||||
const indexName = indexNode.name
|
||||
? `${indexNode.name}.csv`
|
||||
: uniqueIndexName(indexNode);
|
||||
|
||||
return joinKey(
|
||||
indexedDataParentKey,
|
||||
indexName,
|
||||
);
|
||||
};
|
||||
|
||||
export const iterateIndex = (onGetItem, getFinalResult) => async (hierarchy, datastore, index, indexedDataKey) => {
|
||||
try {
|
||||
const readableStream = promiseReadableStream(
|
||||
|
|
|
@ -9,21 +9,23 @@ import {
|
|||
} from '../common';
|
||||
import {
|
||||
getFlattenedHierarchy, getNode, getRecordNodeId,
|
||||
getExactNodeForPath, recordNodeIdIsAllowed,
|
||||
getExactNodeForKey, recordNodeIdIsAllowed,
|
||||
isRecord, isGlobalIndex,
|
||||
} from '../templateApi/hierarchy';
|
||||
import { indexTypes } from '../templateApi/indexes';
|
||||
import { getIndexDir } from "../indexApi/getIndexDir";
|
||||
import { getRecordInfo} from "../recordApi/recordInfo";
|
||||
|
||||
export const getRelevantAncestorIndexes = (appHierarchy, record) => {
|
||||
export const getRelevantAncestorIndexes = (hierarchy, record) => {
|
||||
const key = record.key;
|
||||
const keyParts = splitKey(key);
|
||||
const nodeId = getRecordNodeId(key);
|
||||
|
||||
const flatHierarchy = orderBy(getFlattenedHierarchy(appHierarchy),
|
||||
const flatHierarchy = orderBy(getFlattenedHierarchy(hierarchy),
|
||||
[node => node.pathRegx().length],
|
||||
['desc']);
|
||||
|
||||
const makeindexNodeAndKey_ForAncestorIndex = (indexNode, indexKey) => makeIndexNodeAndKey(indexNode, joinKey(indexKey, indexNode.name));
|
||||
const makeindexNodeAndDir_ForAncestorIndex = (indexNode, parentRecordDir) => makeIndexNodeAndDir(indexNode, joinKey(parentRecordDir, indexNode.name));
|
||||
|
||||
const traverseAncestorIndexesInPath = () => reduce((acc, part) => {
|
||||
const currentIndexKey = joinKey(acc.lastIndexKey, part);
|
||||
|
@ -42,8 +44,10 @@ export const getRelevantAncestorIndexes = (appHierarchy, record) => {
|
|||
|| includes(nodeId)(i.allowedRecordNodeIds))),
|
||||
]);
|
||||
|
||||
const currentRecordDir = getRecordInfo(hierarchy, currentIndexKey).dir;
|
||||
|
||||
each(v => acc.nodesAndKeys.push(
|
||||
makeindexNodeAndKey_ForAncestorIndex(v, currentIndexKey),
|
||||
makeindexNodeAndDir_ForAncestorIndex(v, currentRecordDir),
|
||||
))(indexes);
|
||||
|
||||
return acc;
|
||||
|
@ -51,31 +55,35 @@ export const getRelevantAncestorIndexes = (appHierarchy, record) => {
|
|||
|
||||
const rootIndexes = $(flatHierarchy, [
|
||||
filter(n => isGlobalIndex(n) && recordNodeIdIsAllowed(n)(nodeId)),
|
||||
map(i => makeIndexNodeAndKey(i, i.nodeKey())),
|
||||
map(i => makeIndexNodeAndDir(
|
||||
i,
|
||||
getIndexDir(hierarchy, i.nodeKey()))),
|
||||
]);
|
||||
|
||||
return union(traverseAncestorIndexesInPath())(rootIndexes);
|
||||
};
|
||||
|
||||
export const getRelevantReverseReferenceIndexes = (appHierarchy, record) => $(record.key, [
|
||||
getExactNodeForPath(appHierarchy),
|
||||
export const getRelevantReverseReferenceIndexes = (hierarchy, record) => $(record.key, [
|
||||
getExactNodeForKey(hierarchy),
|
||||
n => n.fields,
|
||||
filter(f => f.type === 'reference'
|
||||
&& isSomething(record[f.name])
|
||||
&& isNonEmptyString(record[f.name].key)),
|
||||
map(f => $(f.typeOptions.reverseIndexNodeKeys, [
|
||||
map(n => ({
|
||||
recordNode: getNode(appHierarchy, n),
|
||||
recordNode: getNode(hierarchy, n),
|
||||
field: f,
|
||||
})),
|
||||
])),
|
||||
flatten,
|
||||
map(n => makeIndexNodeAndKey(
|
||||
map(n => makeIndexNodeAndDir(
|
||||
n.recordNode,
|
||||
joinKey(record[n.field.name].key, n.recordNode.name),
|
||||
joinKey(
|
||||
getRecordInfo(hierarchy, record[n.field.name].key).dir,
|
||||
n.recordNode.name),
|
||||
)),
|
||||
]);
|
||||
|
||||
const makeIndexNodeAndKey = (indexNode, indexKey) => ({ indexNode, indexKey });
|
||||
const makeIndexNodeAndDir = (indexNode, indexDir) => ({ indexNode, indexDir });
|
||||
|
||||
export default getRelevantAncestorIndexes;
|
||||
|
|
|
@ -5,13 +5,14 @@ import {
|
|||
import {
|
||||
getActualKeyOfParent, isGlobalIndex,
|
||||
getParentKey, isShardedIndex,
|
||||
getExactNodeForPath,
|
||||
getExactNodeForKey,
|
||||
} from '../templateApi/hierarchy';
|
||||
import {
|
||||
joinKey, isNonEmptyString, splitKey, $,
|
||||
} from '../common';
|
||||
|
||||
export const getIndexedDataKey = (indexNode, indexKey, record) => {
|
||||
export const getIndexedDataKey = (indexNode, indexDir, record) => {
|
||||
|
||||
const getShardName = (indexNode, record) => {
|
||||
const shardNameFunc = compileCode(indexNode.getShardName);
|
||||
try {
|
||||
|
@ -27,18 +28,16 @@ export const getIndexedDataKey = (indexNode, indexKey, record) => {
|
|||
? `${getShardName(indexNode, record)}.csv`
|
||||
: 'index.csv';
|
||||
|
||||
return joinKey(indexKey, shardName);
|
||||
return joinKey(indexDir, shardName);
|
||||
};
|
||||
|
||||
export const getShardKeysInRange = async (app, indexKey, startRecord = null, endRecord = null) => {
|
||||
const indexNode = getExactNodeForPath(app.hierarchy)(indexKey);
|
||||
|
||||
export const getShardKeysInRange = async (app, indexNode, indexDir, startRecord = null, endRecord = null) => {
|
||||
const startShardName = !startRecord
|
||||
? null
|
||||
: shardNameFromKey(
|
||||
getIndexedDataKey(
|
||||
indexNode,
|
||||
indexKey,
|
||||
indexDir,
|
||||
startRecord,
|
||||
),
|
||||
);
|
||||
|
@ -48,29 +47,29 @@ export const getShardKeysInRange = async (app, indexKey, startRecord = null, end
|
|||
: shardNameFromKey(
|
||||
getIndexedDataKey(
|
||||
indexNode,
|
||||
indexKey,
|
||||
indexDir,
|
||||
endRecord,
|
||||
),
|
||||
);
|
||||
|
||||
return $(await getShardMap(app.datastore, indexKey), [
|
||||
return $(await getShardMap(app.datastore, indexDir), [
|
||||
filter(k => (startRecord === null || k >= startShardName)
|
||||
&& (endRecord === null || k <= endShardName)),
|
||||
map(k => joinKey(indexKey, `${k}.csv`)),
|
||||
map(k => joinKey(indexDir, `${k}.csv`)),
|
||||
]);
|
||||
};
|
||||
|
||||
export const ensureShardNameIsInShardMap = async (store, indexKey, indexedDataKey) => {
|
||||
const map = await getShardMap(store, indexKey);
|
||||
export const ensureShardNameIsInShardMap = async (store, indexDir, indexedDataKey) => {
|
||||
const map = await getShardMap(store, indexDir);
|
||||
const shardName = shardNameFromKey(indexedDataKey);
|
||||
if (!includes(shardName)(map)) {
|
||||
map.push(shardName);
|
||||
await writeShardMap(store, indexKey, map);
|
||||
await writeShardMap(store, indexDir, map);
|
||||
}
|
||||
};
|
||||
|
||||
export const getShardMap = async (datastore, indexKey) => {
|
||||
const shardMapKey = getShardMapKey(indexKey);
|
||||
export const getShardMap = async (datastore, indexDir) => {
|
||||
const shardMapKey = getShardMapKey(indexDir);
|
||||
try {
|
||||
return await datastore.loadJson(shardMapKey);
|
||||
} catch (_) {
|
||||
|
@ -79,27 +78,26 @@ export const getShardMap = async (datastore, indexKey) => {
|
|||
}
|
||||
};
|
||||
|
||||
export const writeShardMap = async (datastore, indexKey, shardMap) => await datastore.updateJson(
|
||||
getShardMapKey(indexKey),
|
||||
export const writeShardMap = async (datastore, indexDir, shardMap) => await datastore.updateJson(
|
||||
getShardMapKey(indexDir),
|
||||
shardMap,
|
||||
);
|
||||
|
||||
export const getAllShardKeys = async (app, indexKey) => await getShardKeysInRange(app, indexKey);
|
||||
export const getAllShardKeys = async (app, indexNode, indexDir) =>
|
||||
await getShardKeysInRange(app, indexNode, indexDir);
|
||||
|
||||
export const getShardMapKey = indexKey => joinKey(indexKey, 'shardMap.json');
|
||||
export const getShardMapKey = indexDir => joinKey(indexDir, 'shardMap.json');
|
||||
|
||||
export const getUnshardedIndexDataKey = indexKey => joinKey(indexKey, 'index.csv');
|
||||
|
||||
export const getIndexFolderKey = indexKey => indexKey;
|
||||
export const getUnshardedIndexDataKey = indexDir => joinKey(indexDir, 'index.csv');
|
||||
|
||||
export const createIndexFile = async (datastore, indexedDataKey, index) => {
|
||||
if (isShardedIndex(index)) {
|
||||
const indexKey = getParentKey(indexedDataKey);
|
||||
const shardMap = await getShardMap(datastore, indexKey);
|
||||
const indexDir = getParentKey(indexedDataKey);
|
||||
const shardMap = await getShardMap(datastore, indexDir);
|
||||
shardMap.push(
|
||||
shardNameFromKey(indexedDataKey),
|
||||
);
|
||||
await writeShardMap(datastore, indexKey, shardMap);
|
||||
await writeShardMap(datastore, indexDir, shardMap);
|
||||
}
|
||||
await datastore.createFile(indexedDataKey, '');
|
||||
};
|
||||
|
|
|
@ -2,15 +2,14 @@ import {
|
|||
safeKey, apiWrapper,
|
||||
events, joinKey,
|
||||
} from '../common';
|
||||
import { _load, getRecordFileName } from './load';
|
||||
import { _load } from './load';
|
||||
import { _deleteCollection } from '../collectionApi/delete';
|
||||
import {
|
||||
getExactNodeForPath
|
||||
getExactNodeForKey
|
||||
} from '../templateApi/hierarchy';
|
||||
import { _deleteIndex } from '../indexApi/delete';
|
||||
import { transactionForDeleteRecord } from '../transactions/create';
|
||||
import { removeFromAllIds } from '../indexing/allIds';
|
||||
import { permission } from '../authApi/permissions';
|
||||
import { getRecordInfo } from './recordInfo';
|
||||
|
||||
export const deleteRecord = (app, disableCleanup = false) => async key => {
|
||||
key = safeKey(key);
|
||||
|
@ -25,8 +24,9 @@ export const deleteRecord = (app, disableCleanup = false) => async key => {
|
|||
|
||||
// called deleteRecord because delete is a keyword
|
||||
export const _deleteRecord = async (app, key, disableCleanup) => {
|
||||
key = safeKey(key);
|
||||
const node = getExactNodeForPath(app.hierarchy)(key);
|
||||
const recordInfo = getRecordInfo(app.hierarchy, key);
|
||||
key = recordInfo.key;
|
||||
const node = getExactNodeForKey(app.hierarchy)(key);
|
||||
|
||||
const record = await _load(app, key);
|
||||
await transactionForDeleteRecord(app, record);
|
||||
|
@ -38,60 +38,8 @@ export const _deleteRecord = async (app, key, disableCleanup) => {
|
|||
await _deleteCollection(app, collectionKey, true);
|
||||
}
|
||||
|
||||
await app.datastore.deleteFile(
|
||||
getRecordFileName(key),
|
||||
);
|
||||
|
||||
await deleteFiles(app, key);
|
||||
|
||||
await removeFromAllIds(app.hierarchy, app.datastore)(record);
|
||||
await app.datastore.deleteFolder(recordInfo.dir);
|
||||
|
||||
if (!disableCleanup) { await app.cleanupTransactions(); }
|
||||
|
||||
await app.datastore.deleteFolder(key);
|
||||
await deleteIndexes(app, key);
|
||||
};
|
||||
|
||||
const deleteIndexes = async (app, key) => {
|
||||
const node = getExactNodeForPath(app.hierarchy)(key);
|
||||
/* const reverseIndexKeys = $(app.hierarchy, [
|
||||
getFlattenedHierarchy,
|
||||
map(n => n.fields),
|
||||
flatten,
|
||||
filter(isSomething),
|
||||
filter(fieldReversesReferenceToNode(node)),
|
||||
map(f => $(f.typeOptions.reverseIndexNodeKeys, [
|
||||
map(n => getNode(
|
||||
app.hierarchy,
|
||||
n))
|
||||
])
|
||||
),
|
||||
flatten,
|
||||
map(n => joinKey(key, n.name))
|
||||
]);
|
||||
|
||||
for(let i of reverseIndexKeys) {
|
||||
await _deleteIndex(app, i, true);
|
||||
} */
|
||||
|
||||
|
||||
for (const index of node.indexes) {
|
||||
const indexKey = joinKey(key, index.name);
|
||||
await _deleteIndex(app, indexKey, true);
|
||||
}
|
||||
};
|
||||
|
||||
const deleteFiles = async (app, key) => {
|
||||
const filesFolder = joinKey(key, 'files');
|
||||
const allFiles = await app.datastore.getFolderContents(
|
||||
filesFolder,
|
||||
);
|
||||
|
||||
for (const file of allFiles) {
|
||||
await app.datastore.deleteFile(file);
|
||||
}
|
||||
|
||||
await app.datastore.deleteFolder(
|
||||
joinKey(key, 'files'),
|
||||
);
|
||||
};
|
||||
|
|
|
@ -2,6 +2,7 @@ import { apiWrapper, events, isNothing } from '../common';
|
|||
import { permission } from '../authApi/permissions';
|
||||
import { safeGetFullFilePath } from './uploadFile';
|
||||
import { BadRequestError } from '../common/errors';
|
||||
import { getRecordInfo } from "./recordInfo";
|
||||
|
||||
export const downloadFile = app => async (recordKey, relativePath) => apiWrapper(
|
||||
app,
|
||||
|
@ -16,9 +17,10 @@ const _downloadFile = async (app, recordKey, relativePath) => {
|
|||
if (isNothing(recordKey)) { throw new BadRequestError('Record Key not supplied'); }
|
||||
if (isNothing(relativePath)) { throw new BadRequestError('file path not supplied'); }
|
||||
|
||||
const {dir} = getRecordInfo(app.hierarchy, recordKey);
|
||||
return await app.datastore.readableFileStream(
|
||||
safeGetFullFilePath(
|
||||
recordKey, relativePath,
|
||||
dir, relativePath,
|
||||
),
|
||||
);
|
||||
};
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { map, isString, has, some } from 'lodash/fp';
|
||||
import {
|
||||
getExactNodeForPath,
|
||||
getExactNodeForKey,
|
||||
findField, getNode, isGlobalIndex,
|
||||
} from '../templateApi/hierarchy';
|
||||
import { listItems } from '../indexApi/listItems';
|
||||
|
@ -23,7 +23,7 @@ export const getContext = app => recordKey => {
|
|||
|
||||
export const _getContext = (app, recordKey) => {
|
||||
recordKey = safeKey(recordKey);
|
||||
const recordNode = getExactNodeForPath(app.hierarchy)(recordKey);
|
||||
const recordNode = getExactNodeForKey(app.hierarchy)(recordKey);
|
||||
|
||||
const cachedReferenceIndexes = {};
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ import {
|
|||
keyBy, mapValues, filter,
|
||||
map, includes, last,
|
||||
} from 'lodash/fp';
|
||||
import { getExactNodeForPath, getNode } from '../templateApi/hierarchy';
|
||||
import { getExactNodeForKey, getNode } from '../templateApi/hierarchy';
|
||||
import { safeParseField } from '../types';
|
||||
import {
|
||||
$, splitKey, safeKey, isNonEmptyString,
|
||||
|
@ -10,6 +10,7 @@ import {
|
|||
} from '../common';
|
||||
import { mapRecord } from '../indexing/evaluate';
|
||||
import { permission } from '../authApi/permissions';
|
||||
import { getRecordInfo } from "./recordInfo";
|
||||
|
||||
export const getRecordFileName = key => joinKey(key, 'record.json');
|
||||
|
||||
|
@ -24,12 +25,10 @@ export const load = app => async key => {
|
|||
);
|
||||
}
|
||||
|
||||
export const _load = async (app, key, keyStack = []) => {
|
||||
key = safeKey(key);
|
||||
const recordNode = getExactNodeForPath(app.hierarchy)(key);
|
||||
const storedData = await app.datastore.loadJson(
|
||||
getRecordFileName(key),
|
||||
);
|
||||
export const _loadFromInfo = async (app, recordInfo, keyStack = []) => {
|
||||
const key = recordInfo.key;
|
||||
const {recordNode, recordJson} = recordInfo;
|
||||
const storedData = await app.datastore.loadJson(recordJson);
|
||||
|
||||
const loadedRecord = $(recordNode.fields, [
|
||||
keyBy('name'),
|
||||
|
@ -70,4 +69,11 @@ export const _load = async (app, key, keyStack = []) => {
|
|||
return loadedRecord;
|
||||
};
|
||||
|
||||
export const _load = async (app, key, keyStack = []) =>
|
||||
_loadFromInfo(
|
||||
app,
|
||||
getRecordInfo(app.hierarchy, key),
|
||||
keyStack);
|
||||
|
||||
|
||||
export default load;
|
||||
|
|
|
@ -0,0 +1,117 @@
|
|||
import {
|
||||
getExactNodeForKey, getActualKeyOfParent,
|
||||
isRoot, isSingleRecord, getNodeForCollectionPath
|
||||
} from '../templateApi/hierarchy';
|
||||
import {
|
||||
reduce, find, filter, take
|
||||
} from 'lodash/fp';
|
||||
import {
|
||||
$, getFileFromKey, joinKey, safeKey, keySep
|
||||
} from '../common';
|
||||
import {
|
||||
folderStructureArray, allIdChars
|
||||
} from "../indexing/allIds";
|
||||
|
||||
export const getRecordInfo = (hierarchy, key) => {
|
||||
const recordNode = getExactNodeForKey(hierarchy)(key);
|
||||
const pathInfo = getRecordDirectory(recordNode, key);
|
||||
const dir = joinKey(pathInfo.base, ...pathInfo.subdirs);
|
||||
|
||||
return {
|
||||
recordJson: recordJson(dir),
|
||||
files: files(dir),
|
||||
child:(name) => joinKey(dir, name),
|
||||
key: safeKey(key),
|
||||
recordNode, pathInfo, dir
|
||||
};
|
||||
}
|
||||
|
||||
export const getCollectionDir = (hierarchy, collectionKey) => {
|
||||
const recordNode = getNodeForCollectionPath(hierarchy)(collectionKey);
|
||||
const dummyRecordKey = joinKey(collectionKey, "1-abcd");
|
||||
const pathInfo = getRecordDirectory(recordNode, dummyRecordKey);
|
||||
return pathInfo.base;
|
||||
}
|
||||
|
||||
const recordJson = (dir) =>
|
||||
joinKey(dir, "record.json")
|
||||
|
||||
const files = (dir) =>
|
||||
joinKey(dir, "files")
|
||||
|
||||
const getRecordDirectory = (recordNode, key) => {
|
||||
const id = getFileFromKey(key);
|
||||
|
||||
const traverseParentKeys = (n, parents=[]) => {
|
||||
if(isRoot(n)) return parents;
|
||||
const k = getActualKeyOfParent(n.nodeKey(), key);
|
||||
const thisNodeDir = {
|
||||
node:n,
|
||||
relativeDir: joinKey(
|
||||
recordRelativeDirectory(n, getFileFromKey(k)))
|
||||
};
|
||||
return traverseParentKeys(
|
||||
n.parent(),
|
||||
[thisNodeDir, ...parents]);
|
||||
}
|
||||
|
||||
const parentDirs = $(recordNode.parent(), [
|
||||
traverseParentKeys,
|
||||
reduce((key, item) => {
|
||||
return joinKey(key, item.node.collectionName, item.relativeDir)
|
||||
}, keySep)
|
||||
]);
|
||||
|
||||
const subdirs = isSingleRecord(recordNode)
|
||||
? []
|
||||
: recordRelativeDirectory(recordNode, id);
|
||||
const base = isSingleRecord(recordNode)
|
||||
? joinKey(parentDirs, recordNode.name)
|
||||
: joinKey(parentDirs, recordNode.collectionName);
|
||||
|
||||
return ({
|
||||
subdirs, base
|
||||
});
|
||||
}
|
||||
|
||||
const recordRelativeDirectory = (recordNode, id) => {
|
||||
const folderStructure = folderStructureArray(recordNode);
|
||||
const strippedId = id.substring(recordNode.nodeId.toString().length + 1);
|
||||
const subfolders = $(folderStructure, [
|
||||
reduce((result, currentCount) => {
|
||||
result.folders.push(
|
||||
folderForChar(strippedId[result.level], currentCount)
|
||||
);
|
||||
return {level:result.level+1, folders:result.folders};
|
||||
}, {level:0, folders:[]}),
|
||||
f => f.folders,
|
||||
filter(f => !!f)
|
||||
]);
|
||||
|
||||
return [recordNode.nodeId.toString(), ...subfolders, id]
|
||||
}
|
||||
|
||||
const folderForChar = (char, folderCount) =>
|
||||
folderCount === 1 ? ""
|
||||
: $(folderCount, [
|
||||
idFoldersForFolderCount,
|
||||
find(f => f.includes(char))
|
||||
]);
|
||||
|
||||
const idFoldersForFolderCount = (folderCount) => {
|
||||
const charRangePerShard = 64 / folderCount;
|
||||
const idFolders = [];
|
||||
let index = 0;
|
||||
let currentIdsShard = '';
|
||||
while (index < 64) {
|
||||
currentIdsShard += allIdChars[index];
|
||||
if ((index + 1) % charRangePerShard === 0) {
|
||||
idFolders.push(currentIdsShard);
|
||||
currentIdsShard = '';
|
||||
}
|
||||
index++;
|
||||
}
|
||||
|
||||
return idFolders;
|
||||
};
|
||||
|
|
@ -1,22 +1,17 @@
|
|||
import {
|
||||
cloneDeep,
|
||||
flatten,
|
||||
map,
|
||||
filter,
|
||||
isEqual
|
||||
cloneDeep, take, takeRight,
|
||||
flatten, map, filter
|
||||
} from 'lodash/fp';
|
||||
import { initialiseChildCollections } from '../collectionApi/initialise';
|
||||
import { validate } from './validate';
|
||||
import { _load, getRecordFileName } from './load';
|
||||
import { _loadFromInfo } from './load';
|
||||
import {
|
||||
apiWrapper, events, $, joinKey,
|
||||
} from '../common';
|
||||
import {
|
||||
getFlattenedHierarchy, getExactNodeForPath,
|
||||
isRecord, getNode, isSingleRecord,
|
||||
getFlattenedHierarchy, isRecord, getNode,
|
||||
fieldReversesReferenceToNode,
|
||||
} from '../templateApi/hierarchy';
|
||||
import { addToAllIds } from '../indexing/allIds';
|
||||
import {
|
||||
transactionForCreateRecord,
|
||||
transactionForUpdateRecord,
|
||||
|
@ -24,6 +19,7 @@ import {
|
|||
import { permission } from '../authApi/permissions';
|
||||
import { initialiseIndex } from '../indexing/initialiseIndex';
|
||||
import { BadRequestError } from '../common/errors';
|
||||
import { getRecordInfo } from "./recordInfo";
|
||||
|
||||
export const save = app => async (record, context) => apiWrapper(
|
||||
app,
|
||||
|
@ -46,40 +42,38 @@ export const _save = async (app, record, context, skipValidation = false) => {
|
|||
}
|
||||
}
|
||||
|
||||
const recordInfo = getRecordInfo(app.hierarchy, record.key);
|
||||
const {
|
||||
recordNode, pathInfo,
|
||||
recordJson, files,
|
||||
} = recordInfo;
|
||||
|
||||
if (recordClone.isNew) {
|
||||
const recordNode = getExactNodeForPath(app.hierarchy)(record.key);
|
||||
|
||||
if(!recordNode)
|
||||
throw new Error("Cannot find node for " + record.key);
|
||||
|
||||
if(!isSingleRecord(recordNode))
|
||||
await addToAllIds(app.hierarchy, app.datastore)(recordClone);
|
||||
|
||||
const transaction = await transactionForCreateRecord(
|
||||
app, recordClone,
|
||||
);
|
||||
recordClone.transactionId = transaction.id;
|
||||
await app.datastore.createFolder(recordClone.key);
|
||||
await app.datastore.createFolder(
|
||||
joinKey(recordClone.key, 'files'),
|
||||
);
|
||||
await app.datastore.createJson(
|
||||
getRecordFileName(recordClone.key),
|
||||
recordClone,
|
||||
);
|
||||
await initialiseReverseReferenceIndexes(app, record);
|
||||
await initialiseAncestorIndexes(app, record);
|
||||
await initialiseChildCollections(app, recordClone.key);
|
||||
await createRecordFolderPath(app.datastore, pathInfo);
|
||||
await app.datastore.createFolder(files);
|
||||
await app.datastore.createJson(recordJson, recordClone);
|
||||
await initialiseReverseReferenceIndexes(app, recordInfo);
|
||||
await initialiseAncestorIndexes(app, recordInfo);
|
||||
await initialiseChildCollections(app, recordInfo);
|
||||
await app.publish(events.recordApi.save.onRecordCreated, {
|
||||
record: recordClone,
|
||||
});
|
||||
} else {
|
||||
const oldRecord = await _load(app, recordClone.key);
|
||||
const oldRecord = await _loadFromInfo(app, recordInfo);
|
||||
const transaction = await transactionForUpdateRecord(
|
||||
app, oldRecord, recordClone,
|
||||
);
|
||||
recordClone.transactionId = transaction.id;
|
||||
await app.datastore.updateJson(
|
||||
getRecordFileName(recordClone.key),
|
||||
recordJson,
|
||||
recordClone,
|
||||
);
|
||||
await app.publish(events.recordApi.save.onRecordUpdated, {
|
||||
|
@ -95,19 +89,18 @@ export const _save = async (app, record, context, skipValidation = false) => {
|
|||
return returnedClone;
|
||||
};
|
||||
|
||||
const initialiseAncestorIndexes = async (app, record) => {
|
||||
const recordNode = getExactNodeForPath(app.hierarchy)(record.key);
|
||||
|
||||
for (const index of recordNode.indexes) {
|
||||
const indexKey = joinKey(record.key, index.name);
|
||||
if (!await app.datastore.exists(indexKey)) { await initialiseIndex(app.datastore, record.key, index); }
|
||||
const initialiseAncestorIndexes = async (app, recordInfo) => {
|
||||
for (const index of recordInfo.recordNode.indexes) {
|
||||
const indexKey = recordInfo.child(index.name);
|
||||
if (!await app.datastore.exists(indexKey)) {
|
||||
await initialiseIndex(app.datastore, recordInfo.dir, index);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const initialiseReverseReferenceIndexes = async (app, record) => {
|
||||
const recordNode = getExactNodeForPath(app.hierarchy)(record.key);
|
||||
const initialiseReverseReferenceIndexes = async (app, recordInfo) => {
|
||||
|
||||
const indexNodes = $(fieldsThatReferenceThisRecord(app, recordNode), [
|
||||
const indexNodes = $(fieldsThatReferenceThisRecord(app, recordInfo.recordNode), [
|
||||
map(f => $(f.typeOptions.reverseIndexNodeKeys, [
|
||||
map(n => getNode(
|
||||
app.hierarchy,
|
||||
|
@ -119,7 +112,7 @@ const initialiseReverseReferenceIndexes = async (app, record) => {
|
|||
|
||||
for (const indexNode of indexNodes) {
|
||||
await initialiseIndex(
|
||||
app.datastore, record.key, indexNode,
|
||||
app.datastore, recordInfo.dir, indexNode,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
@ -131,3 +124,42 @@ const fieldsThatReferenceThisRecord = (app, recordNode) => $(app.hierarchy, [
|
|||
flatten,
|
||||
filter(fieldReversesReferenceToNode(recordNode)),
|
||||
]);
|
||||
|
||||
const createRecordFolderPath = async (datastore, pathInfo) => {
|
||||
|
||||
const recursiveCreateFolder = async (subdirs, dirsThatNeedCreated=undefined) => {
|
||||
|
||||
// iterate backwards through directory hierachy
|
||||
// until we get to a folder that exists, then create the rest
|
||||
// e.g
|
||||
// - some/folder/here
|
||||
// - some/folder
|
||||
// - some
|
||||
const thisFolder = joinKey(pathInfo.base, ...subdirs);
|
||||
|
||||
if(await datastore.exists(thisFolder)) {
|
||||
|
||||
let creationFolder = thisFolder;
|
||||
for(let nextDir of (dirsThatNeedCreated || []) ) {
|
||||
creationFolder = joinKey(creationFolder, nextDir);
|
||||
await datastore.createFolder(creationFolder);
|
||||
}
|
||||
|
||||
} else if(!dirsThatNeedCreated || dirsThatNeedCreated.length > 0) {
|
||||
|
||||
dirsThatNeedCreated = !dirsThatNeedCreated
|
||||
? []
|
||||
:dirsThatNeedCreated;
|
||||
|
||||
await recursiveCreateFolder(
|
||||
take(subdirs.length - 1)(subdirs),
|
||||
[...takeRight(1)(subdirs), ...dirsThatNeedCreated]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
await recursiveCreateFolder(pathInfo.subdirs);
|
||||
|
||||
return joinKey(pathInfo.base, ...pathInfo.subdirs);
|
||||
|
||||
}
|
|
@ -3,15 +3,16 @@ import {
|
|||
map, some,
|
||||
} from 'lodash/fp';
|
||||
import { generate } from 'shortid';
|
||||
import { _load } from './load';
|
||||
import { _loadFromInfo } from './load';
|
||||
import {
|
||||
apiWrapper, events, splitKey,
|
||||
$, joinKey, isNothing, tryAwaitOrIgnore,
|
||||
} from '../common';
|
||||
import { getExactNodeForPath } from '../templateApi/hierarchy';
|
||||
import { getExactNodeForKey } from '../templateApi/hierarchy';
|
||||
import { permission } from '../authApi/permissions';
|
||||
import { isLegalFilename } from '../types/file';
|
||||
import { BadRequestError, ForbiddenError } from '../common/errors';
|
||||
import { getRecordInfo } from "./recordInfo";
|
||||
|
||||
export const uploadFile = app => async (recordKey, readableStream, relativeFilePath) => apiWrapper(
|
||||
app,
|
||||
|
@ -26,10 +27,11 @@ const _uploadFile = async (app, recordKey, readableStream, relativeFilePath) =>
|
|||
if (isNothing(relativeFilePath)) { throw new BadRequestError('file path not supplied'); }
|
||||
if (!isLegalFilename(relativeFilePath)) { throw new BadRequestError('Illegal filename'); }
|
||||
|
||||
const record = await _load(app, recordKey);
|
||||
const recordInfo = getRecordInfo(app.hierarchy, recordKey);
|
||||
const record = await _loadFromInfo(app, recordInfo);
|
||||
|
||||
const fullFilePath = safeGetFullFilePath(
|
||||
recordKey, relativeFilePath,
|
||||
recordInfo.dir, relativeFilePath,
|
||||
);
|
||||
|
||||
const tempFilePath = `${fullFilePath}_${generate()}.temp`;
|
||||
|
@ -54,30 +56,10 @@ const _uploadFile = async (app, recordKey, readableStream, relativeFilePath) =>
|
|||
.then(() => tryAwaitOrIgnore(app.datastore.deleteFile, fullFilePath))
|
||||
.then(() => app.datastore.renameFile(tempFilePath, fullFilePath));
|
||||
|
||||
/*
|
||||
readableStream.pipe(outputStream);
|
||||
|
||||
await new Promise(fulfill => outputStream.on('finish', fulfill));
|
||||
|
||||
const isExpectedFileSize = checkFileSizeAgainstFields(
|
||||
app,
|
||||
record, relativeFilePath,
|
||||
await app.datastore.getFileSize(tempFilePath),
|
||||
);
|
||||
|
||||
if (!isExpectedFileSize) {
|
||||
throw new Error(
|
||||
`Fields for ${relativeFilePath} do not have expected size`);
|
||||
}
|
||||
|
||||
await tryAwaitOrIgnore(app.datastore.deleteFile, fullFilePath);
|
||||
|
||||
await app.datastore.renameFile(tempFilePath, fullFilePath);
|
||||
*/
|
||||
};
|
||||
|
||||
const checkFileSizeAgainstFields = (app, record, relativeFilePath, expectedSize) => {
|
||||
const recordNode = getExactNodeForPath(app.hierarchy)(record.key);
|
||||
const recordNode = getExactNodeForKey(app.hierarchy)(record.key);
|
||||
|
||||
const incorrectFileFields = $(recordNode.fields, [
|
||||
filter(f => f.type === 'file'
|
||||
|
@ -107,7 +89,7 @@ const checkFileSizeAgainstFields = (app, record, relativeFilePath, expectedSize)
|
|||
return true;
|
||||
};
|
||||
|
||||
export const safeGetFullFilePath = (recordKey, relativeFilePath) => {
|
||||
export const safeGetFullFilePath = (recordDir, relativeFilePath) => {
|
||||
const naughtyUser = () => { throw new ForbiddenError('naughty naughty'); };
|
||||
|
||||
if (relativeFilePath.startsWith('..')) naughtyUser();
|
||||
|
@ -116,7 +98,7 @@ export const safeGetFullFilePath = (recordKey, relativeFilePath) => {
|
|||
|
||||
if (includes('..')(pathParts)) naughtyUser();
|
||||
|
||||
const recordKeyParts = splitKey(recordKey);
|
||||
const recordKeyParts = splitKey(recordDir);
|
||||
|
||||
const fullPathParts = [
|
||||
...recordKeyParts,
|
||||
|
|
|
@ -4,7 +4,7 @@ import {
|
|||
} from 'lodash/fp';
|
||||
import { compileExpression } from '@nx-js/compiler-util';
|
||||
import _ from 'lodash';
|
||||
import { getExactNodeForPath } from '../templateApi/hierarchy';
|
||||
import { getExactNodeForKey } from '../templateApi/hierarchy';
|
||||
import { validateFieldParse, validateTypeConstraints } from '../types';
|
||||
import { $, isNothing, isNonEmptyString } from '../common';
|
||||
import { _getContext } from './getContext';
|
||||
|
@ -63,7 +63,7 @@ export const validate = app => async (record, context) => {
|
|||
? _getContext(app, record.key)
|
||||
: context;
|
||||
|
||||
const recordNode = getExactNodeForPath(app.hierarchy)(record.key);
|
||||
const recordNode = getExactNodeForKey(app.hierarchy)(record.key);
|
||||
const fieldParseFails = validateAllFieldParse(record, recordNode);
|
||||
|
||||
// non parsing would cause further issues - exit here
|
||||
|
|
|
@ -171,7 +171,7 @@ const _getNewRecordTemplate = (parent, name, createDefaultIndex, isSingle) => {
|
|||
validationRules: [],
|
||||
nodeId: getNodeId(parent),
|
||||
indexes: [],
|
||||
allidsShardFactor: isRecord(parent) ? 1 : 64,
|
||||
estimatedRecordCount: isRecord(parent) ? 500 : 1000000,
|
||||
collectionName: '',
|
||||
isSingle,
|
||||
});
|
||||
|
|
|
@ -49,7 +49,7 @@ export const getNodesInPath = appHierarchy => key => $(appHierarchy, [
|
|||
filter(n => new RegExp(`${n.pathRegx()}`).test(key)),
|
||||
]);
|
||||
|
||||
export const getExactNodeForPath = appHierarchy => key => $(appHierarchy, [
|
||||
export const getExactNodeForKey = appHierarchy => key => $(appHierarchy, [
|
||||
getFlattenedHierarchy,
|
||||
find(n => new RegExp(`${n.pathRegx()}$`).test(key)),
|
||||
]);
|
||||
|
@ -87,7 +87,7 @@ export const getCollectionNode = (appHierarchy, nodeKey) => $(appHierarchy, [
|
|||
]);
|
||||
|
||||
export const getNodeByKeyOrNodeKey = (appHierarchy, keyOrNodeKey) => {
|
||||
const nodeByKey = getExactNodeForPath(appHierarchy)(keyOrNodeKey);
|
||||
const nodeByKey = getExactNodeForKey(appHierarchy)(keyOrNodeKey);
|
||||
return isNothing(nodeByKey)
|
||||
? getNode(appHierarchy, keyOrNodeKey)
|
||||
: nodeByKey;
|
||||
|
@ -100,13 +100,14 @@ export const getCollectionNodeByKeyOrNodeKey = (appHierarchy, keyOrNodeKey) => {
|
|||
: nodeByKey;
|
||||
};
|
||||
|
||||
export const isNode = (appHierarchy, key) => isSomething(getExactNodeForPath(appHierarchy)(key));
|
||||
export const isNode = (appHierarchy, key) => isSomething(getExactNodeForKey(appHierarchy)(key));
|
||||
|
||||
export const getActualKeyOfParent = (parentNodeKey, actualChildKey) => $(actualChildKey, [
|
||||
splitKey,
|
||||
take(splitKey(parentNodeKey).length),
|
||||
ks => joinKey(...ks),
|
||||
]);
|
||||
export const getActualKeyOfParent = (parentNodeKey, actualChildKey) =>
|
||||
$(actualChildKey, [
|
||||
splitKey,
|
||||
take(splitKey(parentNodeKey).length),
|
||||
ks => joinKey(...ks),
|
||||
]);
|
||||
|
||||
export const getParentKey = (key) => {
|
||||
return $(key, [
|
||||
|
@ -199,7 +200,7 @@ export const fieldReversesReferenceToIndex = indexNode => field => field.type ==
|
|||
export default {
|
||||
getLastPartInKey,
|
||||
getNodesInPath,
|
||||
getExactNodeForPath,
|
||||
getExactNodeForKey,
|
||||
hasMatchingAncestor,
|
||||
getNode,
|
||||
getNodeByKeyOrNodeKey,
|
||||
|
|
|
@ -24,8 +24,10 @@ import { applyToShard } from '../indexing/apply';
|
|||
import {
|
||||
getActualKeyOfParent,
|
||||
isGlobalIndex, fieldReversesReferenceToIndex, isReferenceIndex,
|
||||
getExactNodeForPath,
|
||||
getExactNodeForKey,
|
||||
} from '../templateApi/hierarchy';
|
||||
import { getRecordInfo } from "../recordApi/recordInfo";
|
||||
import { getIndexDir } from '../indexApi/getIndexDir';
|
||||
|
||||
export const executeTransactions = app => async (transactions) => {
|
||||
const recordsByShard = mappedRecordsByIndexShard(app.hierarchy, transactions);
|
||||
|
@ -33,7 +35,7 @@ export const executeTransactions = app => async (transactions) => {
|
|||
for (const shard of keys(recordsByShard)) {
|
||||
await applyToShard(
|
||||
app.hierarchy, app.datastore,
|
||||
recordsByShard[shard].indexKey,
|
||||
recordsByShard[shard].indexDir,
|
||||
recordsByShard[shard].indexNode,
|
||||
shard,
|
||||
recordsByShard[shard].writes,
|
||||
|
@ -77,8 +79,8 @@ const mappedRecordsByIndexShard = (hierarchy, transactions) => {
|
|||
transByShard[t.indexShardKey] = {
|
||||
writes: [],
|
||||
removes: [],
|
||||
indexKey: t.indexKey,
|
||||
indexNodeKey: t.indexNodeKey,
|
||||
indexDir: t.indexDir,
|
||||
indexNodeKey: t.indexNode.nodeKey(),
|
||||
indexNode: t.indexNode,
|
||||
};
|
||||
}
|
||||
|
@ -109,10 +111,10 @@ const getUpdateTransactionsByShard = (hierarchy, transactions) => {
|
|||
return ({
|
||||
mappedRecord,
|
||||
indexNode: indexNodeAndPath.indexNode,
|
||||
indexKey: indexNodeAndPath.indexKey,
|
||||
indexDir: indexNodeAndPath.indexDir,
|
||||
indexShardKey: getIndexedDataKey(
|
||||
indexNodeAndPath.indexNode,
|
||||
indexNodeAndPath.indexKey,
|
||||
indexNodeAndPath.indexDir,
|
||||
mappedRecord.result,
|
||||
),
|
||||
});
|
||||
|
@ -219,54 +221,56 @@ const getBuildIndexTransactionsByShard = (hierarchy, transactions) => {
|
|||
if (!isNonEmptyArray(buildTransactions)) return [];
|
||||
const indexNode = transactions.indexNode;
|
||||
|
||||
const getIndexKeys = (t) => {
|
||||
const getIndexDirs = (t) => {
|
||||
if (isGlobalIndex(indexNode)) {
|
||||
return [indexNode.nodeKey()];
|
||||
}
|
||||
|
||||
if (isReferenceIndex(indexNode)) {
|
||||
const recordNode = getExactNodeForPath(hierarchy)(t.record.key);
|
||||
const recordNode = getExactNodeForKey(hierarchy)(t.record.key);
|
||||
const refFields = $(recordNode.fields, [
|
||||
filter(fieldReversesReferenceToIndex(indexNode)),
|
||||
]);
|
||||
const indexKeys = [];
|
||||
const indexDirs = [];
|
||||
for (const refField of refFields) {
|
||||
const refValue = t.record[refField.name];
|
||||
if (isSomething(refValue)
|
||||
&& isNonEmptyString(refValue.key)) {
|
||||
const indexKey = joinKey(
|
||||
refValue.key,
|
||||
const indexDir = joinKey(
|
||||
getRecordInfo(hierarchy, refValue.key).dir,
|
||||
indexNode.name,
|
||||
);
|
||||
|
||||
if (!includes(indexKey)(indexKeys)) { indexKeys.push(indexKey); }
|
||||
if (!includes(indexDir)(indexDirs)) { indexDirs.push(indexDir); }
|
||||
}
|
||||
}
|
||||
return indexKeys;
|
||||
return indexDirs;
|
||||
}
|
||||
|
||||
return [joinKey(
|
||||
const indexKey = joinKey(
|
||||
getActualKeyOfParent(
|
||||
indexNode.parent().nodeKey(),
|
||||
t.record.key,
|
||||
),
|
||||
indexNode.name,
|
||||
)];
|
||||
);
|
||||
|
||||
return [getIndexDir(hierarchy, indexKey)];
|
||||
};
|
||||
|
||||
return $(buildTransactions, [
|
||||
map((t) => {
|
||||
const mappedRecord = evaluate(t.record)(indexNode);
|
||||
if (!mappedRecord.passedFilter) return null;
|
||||
const indexKeys = getIndexKeys(t);
|
||||
return $(indexKeys, [
|
||||
map(indexKey => ({
|
||||
const indexDirs = getIndexDirs(t);
|
||||
return $(indexDirs, [
|
||||
map(indexDir => ({
|
||||
mappedRecord,
|
||||
indexNode,
|
||||
indexKey,
|
||||
indexDir,
|
||||
indexShardKey: getIndexedDataKey(
|
||||
indexNode,
|
||||
indexKey,
|
||||
indexDir,
|
||||
mappedRecord.result,
|
||||
),
|
||||
})),
|
||||
|
@ -286,10 +290,10 @@ const get_Create_Delete_TransactionsByShard = pred => (hierarchy, transactions)
|
|||
return ({
|
||||
mappedRecord,
|
||||
indexNode: n.indexNode,
|
||||
indexKey: n.indexKey,
|
||||
indexDir: n.indexDir,
|
||||
indexShardKey: getIndexedDataKey(
|
||||
n.indexNode,
|
||||
n.indexKey,
|
||||
n.indexDir,
|
||||
mappedRecord.result,
|
||||
),
|
||||
});
|
||||
|
@ -327,17 +331,17 @@ const diffReverseRefForUpdate = (appHierarchy, oldRecord, newRecord) => {
|
|||
);
|
||||
|
||||
const unReferenced = differenceBy(
|
||||
i => i.indexKey,
|
||||
i => i.indexDir,
|
||||
oldIndexes, newIndexes,
|
||||
);
|
||||
|
||||
const newlyReferenced = differenceBy(
|
||||
i => i.indexKey,
|
||||
i => i.indexDir,
|
||||
newIndexes, oldIndexes,
|
||||
);
|
||||
|
||||
const notChanged = intersectionBy(
|
||||
i => i.indexKey,
|
||||
i => i.indexDir,
|
||||
newIndexes, oldIndexes,
|
||||
);
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import {setupApphierarchy, basicAppHierarchyCreator_WithFields,
|
||||
basicAppHierarchyCreator_WithFields_AndIndexes} from "./specHelpers";
|
||||
import {setupApphierarchy, basicAppHierarchyCreator_WithFields} from "./specHelpers";
|
||||
import {includes, union} from "lodash";
|
||||
import {joinKey} from "../src/common";
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ import { joinKey } from "../src/common";
|
|||
import {some} from "lodash";
|
||||
import {_deleteIndex} from "../src/indexApi/delete";
|
||||
import {permission} from "../src/authApi/permissions";
|
||||
import { getExactNodeForPath } from "../src/templateApi/hierarchy";
|
||||
import { getExactNodeForKey } from "../src/templateApi/hierarchy";
|
||||
|
||||
describe("buildIndex > Global index", () => {
|
||||
|
||||
|
@ -213,7 +213,7 @@ describe("buildIndex > nested collection", () => {
|
|||
const indexKey = joinKey(customer.key, "invoice_index");
|
||||
await _deleteIndex(app, indexKey, false);
|
||||
|
||||
const indexNode = getExactNodeForPath(appHierarchy.root)(indexKey);
|
||||
const indexNode = getExactNodeForKey(appHierarchy.root)(indexKey);
|
||||
await indexApi.buildIndex(indexNode.nodeKey());
|
||||
const indexItems = await indexApi.listItems(indexKey);
|
||||
|
||||
|
@ -269,7 +269,7 @@ describe("buildIndex > nested collection", () => {
|
|||
const indexKey = joinKey(customer.key, "invoice_index");
|
||||
await _deleteIndex(app, indexKey, false);
|
||||
|
||||
const indexNode = getExactNodeForPath(appHierarchy.root)(indexKey);
|
||||
const indexNode = getExactNodeForKey(appHierarchy.root)(indexKey);
|
||||
await indexApi.buildIndex(
|
||||
indexNode.nodeKey());
|
||||
const indexItems = await indexApi.listItems(indexKey);
|
||||
|
|
|
@ -5,7 +5,7 @@ import {getLockFileContent} from "../src/common/lock";
|
|||
import {some, isArray} from "lodash";
|
||||
import {cleanup} from "../src/transactions/cleanup";
|
||||
import {LOCK_FILE_KEY} from "../src/transactions/transactionsCommon";
|
||||
|
||||
import { getRecordInfo } from "../src/recordApi/recordInfo";
|
||||
|
||||
describe("cleanup transactions", () => {
|
||||
|
||||
|
@ -96,7 +96,7 @@ describe("cleanup transactions", () => {
|
|||
|
||||
it("should not reindex when transactionId does not match that of the record", async () => {
|
||||
|
||||
const {recordApi, app,
|
||||
const {recordApi, app,
|
||||
indexApi} = await setupApphierarchy(basicAppHierarchyCreator_WithFields_AndIndexes, true);
|
||||
const record = recordApi.getNew("/customers", "customer");
|
||||
record.surname = "Ledog";
|
||||
|
@ -109,8 +109,10 @@ describe("cleanup transactions", () => {
|
|||
await recordApi.save(savedRecord);
|
||||
|
||||
savedRecord.transactionId = "something else";
|
||||
|
||||
const recordInfo = getRecordInfo(app.hierarchy, savedRecord.key);
|
||||
await recordApi._storeHandle.updateJson(
|
||||
joinKey(savedRecord.key, "record.json"),
|
||||
recordInfo.child("record.json"),
|
||||
savedRecord);
|
||||
|
||||
await cleanup(app);
|
||||
|
@ -123,7 +125,7 @@ describe("cleanup transactions", () => {
|
|||
|
||||
it("should not reindex when transactionId does not match that of the record, and has multiple transactions", async () => {
|
||||
|
||||
const {recordApi, app,
|
||||
const {recordApi, app,
|
||||
indexApi} = await setupApphierarchy(basicAppHierarchyCreator_WithFields_AndIndexes, true);
|
||||
const record = recordApi.getNew("/customers", "customer");
|
||||
record.surname = "Ledog";
|
||||
|
@ -139,8 +141,10 @@ describe("cleanup transactions", () => {
|
|||
await recordApi.save(savedRecord);
|
||||
|
||||
savedRecord.transactionId = "something else";
|
||||
|
||||
const recordInfo = getRecordInfo(app.hierarchy, savedRecord.key);
|
||||
await recordApi._storeHandle.updateJson(
|
||||
joinKey(savedRecord.key, "record.json"),
|
||||
recordInfo.child("record.json"),
|
||||
savedRecord);
|
||||
|
||||
await cleanup(app);
|
||||
|
@ -228,7 +232,7 @@ describe("cleanup transactions", () => {
|
|||
indexApi} = await setupApphierarchy(basicAppHierarchyCreator_WithFields_AndIndexes, true);
|
||||
const record = recordApi.getNew("/customers", "customer");
|
||||
record.surname = "Ledog";
|
||||
const savedRecord = await recordApi.save(record);
|
||||
await recordApi.save(record);
|
||||
const currentTime = await app.getEpochTime();
|
||||
await recordApi._storeHandle.createFile(
|
||||
LOCK_FILE_KEY,
|
||||
|
@ -252,7 +256,7 @@ describe("cleanup transactions", () => {
|
|||
indexApi} = await setupApphierarchy(basicAppHierarchyCreator_WithFields_AndIndexes, true);
|
||||
const record = recordApi.getNew("/customers", "customer");
|
||||
record.surname = "Ledog";
|
||||
const savedRecord = await recordApi.save(record);
|
||||
await recordApi.save(record);
|
||||
await recordApi._storeHandle.createFile(
|
||||
LOCK_FILE_KEY,
|
||||
getLockFileContent(30000, (new Date(1990,1,1,0,0,0,0).getTime()))
|
||||
|
|
|
@ -1,11 +1,14 @@
|
|||
import {getMemoryTemplateApi,
|
||||
basicAppHierarchyCreator_WithFields,
|
||||
setupApphierarchy,
|
||||
basicAppHierarchyCreator_WithFields_AndIndexes} from "./specHelpers";
|
||||
import {getRelevantReverseReferenceIndexes,
|
||||
getRelevantAncestorIndexes} from "../src/indexing/relevant";
|
||||
import {
|
||||
setupApphierarchy,
|
||||
basicAppHierarchyCreator_WithFields_AndIndexes
|
||||
} from "./specHelpers";
|
||||
import {
|
||||
getRelevantReverseReferenceIndexes,
|
||||
getRelevantAncestorIndexes
|
||||
} from "../src/indexing/relevant";
|
||||
import {some} from "lodash";
|
||||
import {joinKey} from "../src/common";
|
||||
import { getRecordInfo } from "../src/recordApi/recordInfo";
|
||||
|
||||
describe("getRelevantIndexes", () => {
|
||||
|
||||
|
@ -45,7 +48,7 @@ describe("getRelevantIndexes", () => {
|
|||
expect(indexes.length).toBe(4);
|
||||
|
||||
const indexExists = key =>
|
||||
some(indexes, c => c.indexKey === key);
|
||||
some(indexes, c => c.indexDir === key);
|
||||
|
||||
expect(indexExists("/customer_index")).toBeTruthy();
|
||||
expect(indexExists("/deceased")).toBeTruthy();
|
||||
|
@ -64,7 +67,7 @@ describe("getRelevantIndexes", () => {
|
|||
appHierarchy.root, invoice);
|
||||
|
||||
const indexExists = key =>
|
||||
some(indexes, c => c.indexKey === key);
|
||||
some(indexes, c => c.indexDir === key);
|
||||
|
||||
expect(indexExists("/customersBySurname")).toBeFalsy();
|
||||
});
|
||||
|
@ -82,7 +85,7 @@ describe("getRelevantIndexes", () => {
|
|||
expect(indexes.length).toBe(4);
|
||||
|
||||
const indexExists = key =>
|
||||
some(indexes, c => c.indexKey === key);
|
||||
some(indexes, c => c.indexDir === key);
|
||||
|
||||
expect(indexExists("/customersBySurname")).toBeTruthy();
|
||||
});
|
||||
|
@ -96,14 +99,14 @@ describe("getRelevantIndexes", () => {
|
|||
|
||||
const indexes = getRelevantAncestorIndexes(
|
||||
appHierarchy.root, invoice);
|
||||
|
||||
const {dir} = getRecordInfo(appHierarchy.root, `/customers/${nodeid}-1234`);
|
||||
expect(indexes.length).toBe(4);
|
||||
expect(some(indexes, i => i.indexKey === `/customer_invoices`)).toBeTruthy();
|
||||
expect(some(indexes, i => i.indexKey === `/customers/${nodeid}-1234/invoice_index`)).toBeTruthy();
|
||||
expect(some(indexes, i => i.indexDir === `/customer_invoices`)).toBeTruthy();
|
||||
expect(some(indexes, i => i.indexDir === `${dir}/invoice_index`)).toBeTruthy();
|
||||
});
|
||||
|
||||
it("should get reverseReferenceIndex accross hierarchy branches", async () => {
|
||||
const {appHierarchy,
|
||||
const {appHierarchy,
|
||||
recordApi} = await setupApphierarchy(basicAppHierarchyCreator_WithFields_AndIndexes);
|
||||
|
||||
const partner = recordApi.getNew("/partners", "partner");
|
||||
|
@ -118,8 +121,9 @@ describe("getRelevantIndexes", () => {
|
|||
const indexes = getRelevantReverseReferenceIndexes(
|
||||
appHierarchy.root, customer);
|
||||
expect(indexes.length).toBe(1);
|
||||
expect(indexes[0].indexKey)
|
||||
.toBe(joinKey(partner.key, appHierarchy.partnerCustomersReverseIndex.name));
|
||||
const partnerdir = getRecordInfo(appHierarchy.root, partner.key).dir;
|
||||
expect(indexes[0].indexDir)
|
||||
.toBe(joinKey(partnerdir, appHierarchy.partnerCustomersReverseIndex.name));
|
||||
|
||||
|
||||
});
|
||||
|
@ -136,8 +140,11 @@ describe("getRelevantIndexes", () => {
|
|||
|
||||
const indexes = getRelevantReverseReferenceIndexes(
|
||||
appHierarchy.root, referredToCustomer);
|
||||
|
||||
const referredByCustomerDir = getRecordInfo(appHierarchy.root, referredByCustomer.key).dir;
|
||||
|
||||
expect(indexes.length).toBe(1);
|
||||
expect(indexes[0].indexKey)
|
||||
.toBe(joinKey(referredByCustomer.key, appHierarchy.referredToCustomersReverseIndex.name));
|
||||
expect(indexes[0].indexDir)
|
||||
.toBe(joinKey(referredByCustomerDir, appHierarchy.referredToCustomersReverseIndex.name));
|
||||
});
|
||||
});
|
|
@ -1,5 +1,5 @@
|
|||
import {generateSchema} from "../src/indexing/indexSchemaCreator";
|
||||
import {setupApphierarchy, findCollectionDefaultIndex} from "./specHelpers";
|
||||
import {setupApphierarchy} from "./specHelpers";
|
||||
import {find} from "lodash";
|
||||
import {indexTypes} from "../src/templateApi/indexes";
|
||||
|
||||
|
|
|
@ -21,17 +21,6 @@ describe("initialiseData", () => {
|
|||
expect(await datastore.exists(`/customers`)).toBeTruthy();
|
||||
});
|
||||
|
||||
|
||||
it("should create allids folders", async () => {
|
||||
const {appDef, datastore, h} = getApplicationDefinition();
|
||||
await initialiseData(datastore, appDef);
|
||||
|
||||
const allIdsTypeFolder = "/customers/allids/" + h.customerRecord.nodeId;
|
||||
const allIdsFolder = "/customers/allids";
|
||||
expect(await datastore.exists(allIdsTypeFolder)).toBeTruthy();
|
||||
expect(await datastore.exists(allIdsFolder)).toBeTruthy();
|
||||
});
|
||||
|
||||
it("should create transactions folder", async () => {
|
||||
const {appDef, datastore} = getApplicationDefinition();
|
||||
await initialiseData(datastore, appDef);
|
||||
|
|
|
@ -2,11 +2,16 @@ import {isUndefined, has} from "lodash";
|
|||
import {take} from "lodash/fp";
|
||||
import {Readable, Writable} from "readable-stream";
|
||||
import { Buffer } from "safe-buffer";
|
||||
import {splitKey, joinKey, $} from "../src/common";
|
||||
import {splitKey, joinKey, $, keySep, getFileFromKey} from "../src/common";
|
||||
import {getLastPartInKey} from "../src/templateApi/hierarchy";
|
||||
|
||||
const folderMarker = "OH-YES-ITSA-FOLDER-";
|
||||
const isFolder = val => val.includes(folderMarker);
|
||||
const isFolder = val => {
|
||||
if(isUndefined(val)) {
|
||||
throw new Error("Passed undefined value for folder");
|
||||
}
|
||||
return val.includes(folderMarker);
|
||||
}
|
||||
|
||||
const getParentFolderKey = key =>
|
||||
$(key, [
|
||||
|
@ -16,7 +21,9 @@ const getParentFolderKey = key =>
|
|||
]);
|
||||
|
||||
const getParentFolder = (data,key) => {
|
||||
if(key === keySep) return null;
|
||||
const parentKey = getParentFolderKey(key);
|
||||
if(parentKey === keySep) return null;
|
||||
if(data[parentKey] === undefined)
|
||||
throw new Error("Parent folder for " + key + " does not exist (" + parentKey + ")");
|
||||
return JSON.parse(data[parentKey]);
|
||||
|
@ -39,12 +46,18 @@ export const createFile = data => async (path, content) => {
|
|||
};
|
||||
export const updateFile = data => async (path, content) => {
|
||||
// putting this check in to force use of create
|
||||
if(!await exists(data)(path)) throw new Error("cannot update " + path + " - does not exist");
|
||||
if(!await exists(data)(path)) {
|
||||
throw new Error("cannot update " + path + " - does not exist");
|
||||
}
|
||||
data[path] = content;
|
||||
}
|
||||
|
||||
export const writableFileStream = data => async (path) => {
|
||||
//if(!await exists(data)(path)) throw new Error("cannot write stream to " + path + " - does not exist");
|
||||
if(!getParentFolder(data, path)) {
|
||||
throw new Error("Parent folder for " + path + " does not exist");
|
||||
}
|
||||
|
||||
const stream = Writable();
|
||||
stream._write = (chunk, encoding, done) => {
|
||||
data[path] = data[path] === undefined
|
||||
|
@ -52,6 +65,9 @@ export const writableFileStream = data => async (path) => {
|
|||
data[path] = [...data[path], ...chunk];
|
||||
done();
|
||||
};
|
||||
|
||||
addItemToParentFolder(data, path);
|
||||
|
||||
return stream;
|
||||
};
|
||||
|
||||
|
@ -77,11 +93,19 @@ export const renameFile = data => async (oldKey, newKey) => {
|
|||
if(await exists(data)(newKey)) throw new Error("cannot rename path: " + newKey + " ... already exists");
|
||||
data[newKey] = data[oldKey];
|
||||
delete data[oldKey];
|
||||
|
||||
const parent = getParentFolder(data, newKey);
|
||||
const oldFileName = getFileFromKey(oldKey);
|
||||
const newFileName = getFileFromKey(newKey);
|
||||
parent.items = [...parent.items.filter(i => i !== oldFileName), newFileName];
|
||||
data[getParentFolderKey(newKey)] = JSON.stringify(parent);
|
||||
};
|
||||
|
||||
export const loadFile = data => async (path) => {
|
||||
const result = data[path];
|
||||
if(isUndefined(result)) throw new Error("Load failed - path " + path + " does not exist");
|
||||
if(isUndefined(result)) {
|
||||
throw new Error("Load failed - path " + path + " does not exist");
|
||||
}
|
||||
return result;
|
||||
};
|
||||
export const exists = data => async (path) => has(data, path);
|
||||
|
@ -95,7 +119,8 @@ export const deleteFile = data => async (path) => {
|
|||
delete data[path];
|
||||
}
|
||||
export const createFolder = data => async (path) => {
|
||||
if(await exists(data)(path)) throw new Error("Cannot create folder, path " + path + " already exists");
|
||||
if(await exists(data)(path))
|
||||
throw new Error("Cannot create folder, path " + path + " already exists");
|
||||
addItemToParentFolder(data, path);
|
||||
data[path] = JSON.stringify({folderMarker, items:[]});
|
||||
}
|
||||
|
@ -103,14 +128,30 @@ export const deleteFolder = data => async (path) => {
|
|||
if(!await exists(data)(path)) throw new Error("Cannot delete folder, path " + path + " does not exist");
|
||||
if(!isFolder(data[path]))
|
||||
throw new Error("DeleteFolder: Path " + path + " is not a folder");
|
||||
|
||||
for(let item of JSON.parse(data[path]).items) {
|
||||
const fullItemPath = `${path}/${item}`;
|
||||
if(isFolder(data[fullItemPath])) {
|
||||
await deleteFolder(data)(fullItemPath);
|
||||
} else {
|
||||
await deleteFile(data)(fullItemPath);
|
||||
}
|
||||
}
|
||||
|
||||
const parent = getParentFolder(data, path);
|
||||
if(parent) {
|
||||
parent.items = parent.items.filter(f => f !== getLastPartInKey(path));
|
||||
data[getParentFolderKey(path)] = JSON.stringify(parent);
|
||||
}
|
||||
|
||||
delete data[path];
|
||||
}
|
||||
|
||||
export const getFolderContents = data => async (folderPath) => {
|
||||
if(!isFolder(data[folderPath]))
|
||||
throw new Error("Not a folder: " + folderPath);
|
||||
export const getFolderContents = data => async (folderPath) => {
|
||||
if(!await exists(data)(folderPath))
|
||||
throw new Error("Folder does not exist: " + folderPath);
|
||||
if(!isFolder(data[folderPath]))
|
||||
throw new Error("Not a folder: " + folderPath);
|
||||
return JSON.parse(data[folderPath]).items;
|
||||
};
|
||||
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
import {setupApphierarchy,
|
||||
basicAppHierarchyCreator_WithFields} from "./specHelpers";
|
||||
import {keys, filter} from "lodash/fp";
|
||||
import {$} from "../src/common";
|
||||
import {permission} from "../src/authApi/permissions";
|
||||
import {Readable} from "readable-stream";
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,93 @@
|
|||
import {setupApphierarchy, basicAppHierarchyCreator_WithFields,
|
||||
getNewFieldAndAdd} from "./specHelpers";
|
||||
import {isNonEmptyString} from "../src/common";
|
||||
import { isBoolean } from "util";
|
||||
import {permission} from "../src/authApi/permissions";
|
||||
import { _getNew } from "../src/recordApi/getNew";
|
||||
|
||||
describe("recordApi > getNew", () => {
|
||||
|
||||
it("should get object with generated id and key (full path)", async () => {
|
||||
const {recordApi} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
const record = recordApi.getNew("/customers", "customer");
|
||||
|
||||
expect(record.id).toBeDefined();
|
||||
expect(isNonEmptyString(record.id)).toBeTruthy();
|
||||
|
||||
expect(record.key).toBeDefined();
|
||||
expect(isNonEmptyString(record.key)).toBeTruthy();
|
||||
expect(record.key).toBe(`/customers/${record.id}`);
|
||||
});
|
||||
|
||||
it("should create object with all declared fields, using default values", async () => {
|
||||
const {recordApi} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
|
||||
const newRecord = recordApi.getNew("/customers", "customer")
|
||||
|
||||
expect(newRecord.surname).toBe(null);
|
||||
expect(newRecord.isalive).toBe(true);
|
||||
expect(newRecord.createddate).toBe(null);
|
||||
expect(newRecord.age).toBe(null);
|
||||
});
|
||||
|
||||
it("should create object with all declared fields, and use inital values", async () => {
|
||||
const {recordApi} = await setupApphierarchy(templateApi => {
|
||||
const hierarchy = basicAppHierarchyCreator_WithFields(templateApi);
|
||||
const {customerRecord} = hierarchy;
|
||||
|
||||
customerRecord.fields = [];
|
||||
|
||||
const newField = getNewFieldAndAdd(templateApi, customerRecord);
|
||||
newField("surname", "string", "hello");
|
||||
newField("isalive", "bool", "true");
|
||||
newField("age", "number", "999");
|
||||
|
||||
return hierarchy;
|
||||
});
|
||||
|
||||
const newRecord = recordApi.getNew("/customers", "customer")
|
||||
|
||||
expect(newRecord.surname).toBe("hello");
|
||||
expect(newRecord.isalive).toBe(true);
|
||||
expect(newRecord.age).toBe(999);
|
||||
});
|
||||
|
||||
it("should add a function 'isNew' which always returns true", async () => {
|
||||
const {recordApi} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
const record = recordApi.getNew("/customers", "customer");
|
||||
|
||||
expect(record.isNew).toBeDefined();
|
||||
expect(isBoolean(record.isNew)).toBeTruthy();
|
||||
expect(record.isNew).toBeTruthy();
|
||||
});
|
||||
|
||||
it("should add a function 'type' returns type", async () => {
|
||||
const {recordApi} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
const record = recordApi.getNew("/customers", "customer");
|
||||
|
||||
expect(record.type).toBeDefined();
|
||||
expect(isNonEmptyString(record.type)).toBeTruthy();
|
||||
expect(record.type).toBe("customer");
|
||||
});
|
||||
|
||||
it("should throw error, user user does not have permission", async () => {
|
||||
const {recordApi, app, appHierarchy} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
app.removePermission(permission.createRecord.get(appHierarchy.customerRecord.nodeKey()));
|
||||
expect(() => recordApi.getNew("/customers", "customer")).toThrow(/Unauthorized/);
|
||||
});
|
||||
|
||||
it("should not depend on having any other permissions", async () => {
|
||||
const {recordApi, app, appHierarchy} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
app.withOnlyThisPermission(permission.createRecord.get(appHierarchy.customerRecord.nodeKey()));
|
||||
recordApi.getNew("/customers", "customer");
|
||||
});
|
||||
|
||||
it("for 'single record' type, should create with key ending in node name", async () => {
|
||||
const {appHierarchy} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
const {settingsRecord} = appHierarchy;
|
||||
const result = _getNew(settingsRecord, "");
|
||||
expect(result.key).toBe("/settings")
|
||||
})
|
||||
});
|
||||
|
||||
|
|
@ -0,0 +1,124 @@
|
|||
import { folderStructureArray } from "../src/indexing/allIds";
|
||||
import { getRecordInfo } from "../src/recordApi/recordInfo";
|
||||
import {setupApphierarchy} from "./specHelpers";
|
||||
|
||||
describe("getRecordInfo", () => {
|
||||
|
||||
it("dir should not be sharded when record count = 1000", async () => {
|
||||
const {root} = (await setup({parentCount: 1000})).appHierarchy;
|
||||
const {dir} = getRecordInfo(root, "/parents/1-abcd");
|
||||
expect(dir).toBe("/parents/1/1-abcd");
|
||||
});
|
||||
|
||||
it("dir should be sharded when record count = 1001", async () => {
|
||||
const {root} = (await setup({parentCount: 1001})).appHierarchy;
|
||||
const {dir} = getRecordInfo(root, "/parents/1-abcd");
|
||||
expect(dir).toBe("/parents/1/0123456789abcdefghijklmnopqrstuv/1-abcd");
|
||||
});
|
||||
|
||||
it("dir should be sharded to one char per folder when record count = 63,000 (64*1000)", async () => {
|
||||
const {root} = (await setup({parentCount: 64000})).appHierarchy;
|
||||
const {dir} = getRecordInfo(root, "/parents/1-abcd");
|
||||
expect(dir).toBe("/parents/1/a/1-abcd");
|
||||
});
|
||||
|
||||
it("dir should be sharded to one char per folder, on 2 levels when record count = 4096000 (64*64*1000)", async () => {
|
||||
const {root} = (await setup({parentCount: 4096000})).appHierarchy;
|
||||
const {dir} = getRecordInfo(root, "/parents/1-abcd");
|
||||
expect(dir).toBe("/parents/1/a/b/1-abcd");
|
||||
});
|
||||
|
||||
it("child dir should not be sharded when record count = 1000", async () => {
|
||||
const {root, child} = (await setup({parentCount: 4096000, childCount: 1000})).appHierarchy;
|
||||
const {dir} = getRecordInfo(root, `/parents/1-abcd/children/${child.nodeId}-defg`);
|
||||
expect(dir).toBe(`/parents/1/a/b/1-abcd/children/${child.nodeId}/${child.nodeId}-defg`);
|
||||
});
|
||||
|
||||
it("grandchild dir should not be sharded when record count = 1000", async () => {
|
||||
const {root, child, grandchild} = (await setup({parentCount: 4096000, childCount: 4096000})).appHierarchy;
|
||||
const {dir} = getRecordInfo(root, `/parents/1-abcd/children/${child.nodeId}-defg/grandchildren/${grandchild.nodeId}-hijk`);
|
||||
expect(dir).toBe(`/parents/1/a/b/1-abcd/children/${child.nodeId}/d/e/${child.nodeId}-defg/grandchildren/${grandchild.nodeId}/${grandchild.nodeId}-hijk`);
|
||||
});
|
||||
|
||||
it("grandchild dir should be sharded when record count = 4096000", async () => {
|
||||
const {root, child, grandchild} = (await setup({parentCount: 4096000, childCount: 4096000, grandChildCount: 4096000})).appHierarchy;
|
||||
const {dir} = getRecordInfo(root, `/parents/1-abcd/children/${child.nodeId}-defg/grandchildren/${grandchild.nodeId}-hijk`);
|
||||
expect(dir).toBe(`/parents/1/a/b/1-abcd/children/${child.nodeId}/d/e/${child.nodeId}-defg/grandchildren/${grandchild.nodeId}/h/i/${grandchild.nodeId}-hijk`);
|
||||
});
|
||||
|
||||
it("child levels can be sharded, with parent not", async () => {
|
||||
const {root, child, grandchild} = (await setup({parentCount: 1000, childCount: 4096000, grandChildCount: 4096000})).appHierarchy;
|
||||
const {dir} = getRecordInfo(root, `/parents/1-abcd/children/${child.nodeId}-defg/grandchildren/${grandchild.nodeId}-hijk`);
|
||||
expect(dir).toBe(`/parents/1/1-abcd/children/${child.nodeId}/d/e/${child.nodeId}-defg/grandchildren/${grandchild.nodeId}/h/i/${grandchild.nodeId}-hijk`);
|
||||
});
|
||||
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe("folderStructureArray", () => {
|
||||
|
||||
const recordNode = (count) => ({estimatedRecordCount: count});
|
||||
|
||||
it("should return [] when folder count < 1000", () => {
|
||||
const result = folderStructureArray(recordNode(999));
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it("should return [4] when folder count between 3000 - 4000", () => {
|
||||
const result = folderStructureArray(recordNode(3456));
|
||||
expect(result).toEqual([4]);
|
||||
})
|
||||
|
||||
it("should return [64, 2] when folder count between 64000 - 65000", () => {
|
||||
const result = folderStructureArray(recordNode(64001));
|
||||
expect(result).toEqual([64, 2]);
|
||||
})
|
||||
|
||||
it("should return [64, 64] when folder = 4095999", () => {
|
||||
const result = folderStructureArray(recordNode(4095999));
|
||||
expect(result).toEqual([64, 64]);
|
||||
});
|
||||
|
||||
it("should return [64, 64] when folder = 4096000", () => {
|
||||
const result = folderStructureArray(recordNode(4096000));
|
||||
expect(result).toEqual([64, 64]);
|
||||
});
|
||||
|
||||
it("should return [64, 64, 2] when folder = 4096001", () => {
|
||||
const result = folderStructureArray(recordNode(4096001));
|
||||
expect(result).toEqual([64, 64, 2]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
const setup = ({parentCount, childCount, grandChildCount}) =>
|
||||
setupApphierarchy((templateApi) => {
|
||||
|
||||
const root = templateApi.getNewRootLevel();
|
||||
|
||||
const addField = (recordNode) => {
|
||||
const field = templateApi.getNewField("string");
|
||||
field.name = "test";
|
||||
templateApi.addField(recordNode, field);
|
||||
return field;
|
||||
};
|
||||
|
||||
const parent = templateApi.getNewRecordTemplate(root, "parent");
|
||||
parent.estimatedRecordCount = parentCount || 1000;
|
||||
parent.collectionName = "parents";
|
||||
addField(parent);
|
||||
const child = templateApi.getNewRecordTemplate(parent, "child");
|
||||
child.estimatedRecordCount = childCount || 1000;
|
||||
child.collectionName = "children";
|
||||
addField(child);
|
||||
const grandchild = templateApi.getNewRecordTemplate(child, "grandchild");
|
||||
grandchild.estimatedRecordCount = grandChildCount || 1000;
|
||||
grandchild.collectionName = "grandchildren";
|
||||
addField(grandchild);
|
||||
|
||||
return ({
|
||||
parent, child, grandchild, root
|
||||
});
|
||||
});
|
||||
|
|
@ -1,94 +1,8 @@
|
|||
import {setupApphierarchy, basicAppHierarchyCreator_WithFields,
|
||||
getNewFieldAndAdd, stubEventHandler} from "./specHelpers";
|
||||
stubEventHandler} from "./specHelpers";
|
||||
import {events, isNonEmptyString} from "../src/common";
|
||||
import { isBoolean } from "util";
|
||||
import {permission} from "../src/authApi/permissions";
|
||||
import { _getNew } from "../src/recordApi/getNew";
|
||||
|
||||
describe("recordApi > getNew", () => {
|
||||
|
||||
it("should get object with generated id and key (full path)", async () => {
|
||||
const {recordApi} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
const record = recordApi.getNew("/customers", "customer");
|
||||
|
||||
expect(record.id).toBeDefined();
|
||||
expect(isNonEmptyString(record.id)).toBeTruthy();
|
||||
|
||||
expect(record.key).toBeDefined();
|
||||
expect(isNonEmptyString(record.key)).toBeTruthy();
|
||||
expect(record.key).toBe(`/customers/${record.id}`);
|
||||
});
|
||||
|
||||
it("should create object with all declared fields, using default values", async () => {
|
||||
const {recordApi} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
|
||||
const newRecord = recordApi.getNew("/customers", "customer")
|
||||
|
||||
expect(newRecord.surname).toBe(null);
|
||||
expect(newRecord.isalive).toBe(true);
|
||||
expect(newRecord.createddate).toBe(null);
|
||||
expect(newRecord.age).toBe(null);
|
||||
});
|
||||
|
||||
it("should create object with all declared fields, and use inital values", async () => {
|
||||
const {recordApi} = await setupApphierarchy(templateApi => {
|
||||
const hierarchy = basicAppHierarchyCreator_WithFields(templateApi);
|
||||
const {customerRecord} = hierarchy;
|
||||
|
||||
customerRecord.fields = [];
|
||||
|
||||
const newField = getNewFieldAndAdd(templateApi, customerRecord);
|
||||
newField("surname", "string", "hello");
|
||||
newField("isalive", "bool", "true");
|
||||
newField("age", "number", "999");
|
||||
|
||||
return hierarchy;
|
||||
});
|
||||
|
||||
const newRecord = recordApi.getNew("/customers", "customer")
|
||||
|
||||
expect(newRecord.surname).toBe("hello");
|
||||
expect(newRecord.isalive).toBe(true);
|
||||
expect(newRecord.age).toBe(999);
|
||||
});
|
||||
|
||||
it("should add a function 'isNew' which always returns true", async () => {
|
||||
const {recordApi} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
const record = recordApi.getNew("/customers", "customer");
|
||||
|
||||
expect(record.isNew).toBeDefined();
|
||||
expect(isBoolean(record.isNew)).toBeTruthy();
|
||||
expect(record.isNew).toBeTruthy();
|
||||
});
|
||||
|
||||
it("should add a function 'type' returns type", async () => {
|
||||
const {recordApi} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
const record = recordApi.getNew("/customers", "customer");
|
||||
|
||||
expect(record.type).toBeDefined();
|
||||
expect(isNonEmptyString(record.type)).toBeTruthy();
|
||||
expect(record.type).toBe("customer");
|
||||
});
|
||||
|
||||
it("should throw error, user user does not have permission", async () => {
|
||||
const {recordApi, app, appHierarchy} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
app.removePermission(permission.createRecord.get(appHierarchy.customerRecord.nodeKey()));
|
||||
expect(() => recordApi.getNew("/customers", "customer")).toThrow(/Unauthorized/);
|
||||
});
|
||||
|
||||
it("should not depend on having any other permissions", async () => {
|
||||
const {recordApi, app, appHierarchy} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
app.withOnlyThisPermission(permission.createRecord.get(appHierarchy.customerRecord.nodeKey()));
|
||||
recordApi.getNew("/customers", "customer");
|
||||
});
|
||||
|
||||
it("for 'single record' type, should create with key ending in node name", async () => {
|
||||
const {appHierarchy} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
const {settingsRecord} = appHierarchy;
|
||||
const result = _getNew(settingsRecord, "");
|
||||
expect(result.key).toBe("/settings")
|
||||
})
|
||||
});
|
||||
import { getRecordInfo } from "../src/recordApi/recordInfo";
|
||||
|
||||
describe('recordApi > save then load', () => {
|
||||
|
||||
|
@ -288,48 +202,41 @@ describe("save", () => {
|
|||
});
|
||||
|
||||
it("should create folder and index for subcollection", async () => {
|
||||
const {recordApi} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
const record = recordApi.getNew("/customers", "customer");
|
||||
record.surname = "Ledog";
|
||||
|
||||
const savedRecord = await recordApi.save(record);
|
||||
expect(await recordApi._storeHandle.exists(`${record.key}/invoice_index/index.csv`)).toBeTruthy()
|
||||
expect(await recordApi._storeHandle.exists(`${record.key}/invoice_index`)).toBeTruthy()
|
||||
expect(await recordApi._storeHandle.exists(`${record.key}/invoices`)).toBeTruthy()
|
||||
});
|
||||
|
||||
it("should create index folder and shardMap for sharded reverse reference index", async () => {
|
||||
const {recordApi} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
const record = recordApi.getNew("/customers", "customer");
|
||||
record.surname = "Ledog";
|
||||
|
||||
await recordApi.save(record);
|
||||
expect(await recordApi._storeHandle.exists(`${record.key}/referredToCustomers/shardMap.json`)).toBeTruthy();
|
||||
expect(await recordApi._storeHandle.exists(`${record.key}/referredToCustomers`)).toBeTruthy();
|
||||
});
|
||||
|
||||
it("should create folder for record", async () => {
|
||||
const {recordApi} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
const record = recordApi.getNew("/customers", "customer");
|
||||
record.surname = "Ledog";
|
||||
|
||||
const savedRecord = await recordApi.save(record);
|
||||
expect(await recordApi._storeHandle.exists(`${record.key}`)).toBeTruthy();
|
||||
expect(await recordApi._storeHandle.exists(`${record.key}/record.json`)).toBeTruthy();
|
||||
});
|
||||
|
||||
it("should create allids file", async () => {
|
||||
const {recordApi, appHierarchy} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
const record = recordApi.getNew("/customers", "customer");
|
||||
record.surname = "Ledog";
|
||||
|
||||
await recordApi.save(record);
|
||||
|
||||
const allIdsPath = `/customers/allids/${appHierarchy.customerRecord.nodeId}/${record.id[2]}`;
|
||||
expect(await recordApi._storeHandle.exists(allIdsPath)).toBeTruthy();
|
||||
|
||||
const recordDir = getRecordInfo(appHierarchy.root, record.key).dir;
|
||||
expect(await recordApi._storeHandle.exists(`${recordDir}/invoice_index/index.csv`)).toBeTruthy()
|
||||
expect(await recordApi._storeHandle.exists(`${recordDir}/invoice_index`)).toBeTruthy()
|
||||
expect(await recordApi._storeHandle.exists(`${recordDir}/invoices`)).toBeTruthy()
|
||||
});
|
||||
|
||||
it("should create index folder and shardMap for sharded reverse reference index", async () => {
|
||||
const {recordApi, appHierarchy} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
const record = recordApi.getNew("/customers", "customer");
|
||||
record.surname = "Ledog";
|
||||
|
||||
await recordApi.save(record);
|
||||
const recordDir = getRecordInfo(appHierarchy.root, record.key).dir;
|
||||
expect(await recordApi._storeHandle.exists(`${recordDir}/referredToCustomers/shardMap.json`)).toBeTruthy();
|
||||
expect(await recordApi._storeHandle.exists(`${recordDir}/referredToCustomers`)).toBeTruthy();
|
||||
});
|
||||
|
||||
it("should create folder for record", async () => {
|
||||
const {recordApi, appHierarchy} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
const record = recordApi.getNew("/customers", "customer");
|
||||
record.surname = "Ledog";
|
||||
|
||||
await recordApi.save(record);
|
||||
const recordDir = getRecordInfo(appHierarchy.root, record.key).dir;
|
||||
|
||||
expect(await recordApi._storeHandle.exists(`${recordDir}`)).toBeTruthy();
|
||||
expect(await recordApi._storeHandle.exists(`${recordDir}/record.json`)).toBeTruthy();
|
||||
});
|
||||
|
||||
|
||||
it("create should throw error, user user does not have permission", async () => {
|
||||
const {recordApi, app, appHierarchy} = await setupApphierarchy(basicAppHierarchyCreator_WithFields);
|
||||
const record = recordApi.getNew("/customers", "customer");
|
|
@ -22,12 +22,10 @@ import {permission} from "../src/authApi/permissions";
|
|||
import {generateFullPermissions} from "../src/authApi/generateFullPermissions"
|
||||
import {initialiseData} from "../src/appInitialise/initialiseData";
|
||||
|
||||
const exp = module.exports;
|
||||
|
||||
export const testFileArea = (testNameArea) => path.join("test", "fs_test_area", testNameArea);
|
||||
export const testConfigFolder = (testAreaName) => path.join(exp.testFileArea(testAreaName), configFolder);
|
||||
export const testFieldDefinitionsPath = (testAreaName) => path.join(exp.testFileArea(testAreaName), fieldDefinitions);
|
||||
export const testTemplatesPath = (testAreaName) => path.join(exp.testFileArea(testAreaName), templateDefinitions);
|
||||
export const testConfigFolder = (testAreaName) => path.join(testFileArea(testAreaName), configFolder);
|
||||
export const testFieldDefinitionsPath = (testAreaName) => path.join(testFileArea(testAreaName), fieldDefinitions);
|
||||
export const testTemplatesPath = (testAreaName) => path.join(testFileArea(testAreaName), templateDefinitions);
|
||||
|
||||
export const getMemoryStore = () => setupDatastore(memory({}));
|
||||
export const getMemoryTemplateApi = () => {
|
||||
|
|
|
@ -27,7 +27,7 @@ describe("hierarchy node creation", () => {
|
|||
expect(record.indexes).toEqual([]);
|
||||
expect(record.parent()).toBe(root);
|
||||
expect(record.collectionName).toBe("");
|
||||
expect(record.allidsShardFactor).toBe(64);
|
||||
expect(record.estimatedRecordCount).toBe(1000000);
|
||||
expect(record.isSingle).toBe(false);
|
||||
|
||||
record.collectionName = "records";
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -131,6 +131,29 @@
|
|||
lodash "^4.17.13"
|
||||
to-fast-properties "^2.0.0"
|
||||
|
||||
"@budibase/client@^0.0.15":
|
||||
version "0.0.15"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/client/-/client-0.0.15.tgz#4bf7af751802a5703e72176ba2b7648f6983931f"
|
||||
integrity sha512-D+r0vrKaxjUITi+4BNpn06aQwrhazYLLyt4yAVBkPjabJwS6DRVYgGYElXkDrgqqHnst6jRAiZVhlbJDq9CHTQ==
|
||||
dependencies:
|
||||
"@nx-js/compiler-util" "^2.0.0"
|
||||
lodash "^4.17.15"
|
||||
lunr "^2.3.5"
|
||||
shortid "^2.2.8"
|
||||
svelte "^3.9.2"
|
||||
|
||||
"@budibase/core@^0.0.15":
|
||||
version "0.0.15"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/core/-/core-0.0.15.tgz#aab510246804c59085de588cb1ff0cc116306204"
|
||||
integrity sha512-TV0oCCTJww3BEDAN5y/GAe0aLmekCqt4/adBLaeZ8z9fyZwPTZZ1ggoyy4DTSmljO4fABAoFnCHEC7dLNyAQRg==
|
||||
dependencies:
|
||||
"@nx-js/compiler-util" "^2.0.0"
|
||||
date-fns "^1.29.0"
|
||||
lodash "^4.17.13"
|
||||
lunr "^2.3.5"
|
||||
safe-buffer "^5.1.2"
|
||||
shortid "^2.2.8"
|
||||
|
||||
"@cnakazawa/watch@^1.0.3":
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/@cnakazawa/watch/-/watch-1.0.3.tgz#099139eaec7ebf07a27c1786a3ff64f39464d2ef"
|
||||
|
@ -299,6 +322,11 @@
|
|||
path-to-regexp "^1.1.1"
|
||||
urijs "^1.19.0"
|
||||
|
||||
"@nx-js/compiler-util@^2.0.0":
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@nx-js/compiler-util/-/compiler-util-2.0.0.tgz#c74c12165fa2f017a292bb79af007e8fce0af297"
|
||||
integrity sha512-AxSQbwj9zqt8DYPZ6LwZdytqnwfiOEdcFdq4l8sdjkZmU2clTht7RDLCI8xvkp7KqgcNaOGlTeCM55TULWruyQ==
|
||||
|
||||
"@phc/format@^0.5.0":
|
||||
version "0.5.0"
|
||||
resolved "https://registry.yarnpkg.com/@phc/format/-/format-0.5.0.tgz#a99d27a83d78b3100a191412adda04315e2e3aba"
|
||||
|
@ -845,10 +873,10 @@ combined-stream@^1.0.6, combined-stream@~1.0.6:
|
|||
dependencies:
|
||||
delayed-stream "~1.0.0"
|
||||
|
||||
commander@~2.20.0:
|
||||
version "2.20.0"
|
||||
resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.0.tgz#d58bb2b5c1ee8f87b0d340027e9e94e222c5a422"
|
||||
integrity sha512-7j2y+40w61zy6YC2iRNpUe/NwhNyoXrYpHMrSunaMG64nRnaf96zO/KMQR4OyN/UnE5KLyEBnKHd4aG3rskjpQ==
|
||||
commander@~2.20.3:
|
||||
version "2.20.3"
|
||||
resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33"
|
||||
integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==
|
||||
|
||||
component-emitter@^1.2.0, component-emitter@^1.2.1:
|
||||
version "1.3.0"
|
||||
|
@ -953,6 +981,11 @@ data-urls@^1.0.0:
|
|||
whatwg-mimetype "^2.2.0"
|
||||
whatwg-url "^7.0.0"
|
||||
|
||||
date-fns@^1.29.0:
|
||||
version "1.30.1"
|
||||
resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-1.30.1.tgz#2e71bf0b119153dbb4cc4e88d9ea5acfb50dc05c"
|
||||
integrity sha512-hBSVCvSmWC+QypYObzwGOd9wqdDpOt+0wl0KbU+R+uuZBS1jN8VsD1ss3irQDknRj5NvxiTF6oj/nDRnN/UQNw==
|
||||
|
||||
debug@^2.2.0, debug@^2.3.3:
|
||||
version "2.6.9"
|
||||
resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f"
|
||||
|
@ -1453,9 +1486,9 @@ growly@^1.3.0:
|
|||
integrity sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE=
|
||||
|
||||
handlebars@^4.1.2:
|
||||
version "4.1.2"
|
||||
resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.1.2.tgz#b6b37c1ced0306b221e094fc7aca3ec23b131b67"
|
||||
integrity sha512-nvfrjqvt9xQ8Z/w0ijewdD/vvWDTOweBUm96NTr66Wfvo1mJenBLwcYmPs3TIBP5ruzYGD7Hx/DaM9RmhroGPw==
|
||||
version "4.5.3"
|
||||
resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.5.3.tgz#5cf75bd8714f7605713511a56be7c349becb0482"
|
||||
integrity sha512-3yPecJoJHK/4c6aZhSvxOyG4vJKDshV36VHp0iVCDVh7o9w2vwi3NSnL2MMPj3YdduqaBcu7cGbggJQM0br9xA==
|
||||
dependencies:
|
||||
neo-async "^2.6.0"
|
||||
optimist "^0.6.1"
|
||||
|
@ -2504,7 +2537,7 @@ lodash.sortby@^4.7.0:
|
|||
resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438"
|
||||
integrity sha1-7dFMgk4sycHgsKG0K7UhBRakJDg=
|
||||
|
||||
lodash@^4.17.11, lodash@^4.17.13:
|
||||
lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.15:
|
||||
version "4.17.15"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548"
|
||||
integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==
|
||||
|
@ -2516,6 +2549,11 @@ loose-envify@^1.0.0:
|
|||
dependencies:
|
||||
js-tokens "^3.0.0 || ^4.0.0"
|
||||
|
||||
lunr@^2.3.5:
|
||||
version "2.3.8"
|
||||
resolved "https://registry.yarnpkg.com/lunr/-/lunr-2.3.8.tgz#a8b89c31f30b5a044b97d2d28e2da191b6ba2072"
|
||||
integrity sha512-oxMeX/Y35PNFuZoHp+jUj5OSEmLCaIH4KTFJh7a93cHBoFmpw2IoPs22VIz7vyO2YUnx2Tn9dzIwO2P/4quIRg==
|
||||
|
||||
make-dir@^2.1.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5"
|
||||
|
@ -2670,6 +2708,11 @@ nan@^2.12.1:
|
|||
resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c"
|
||||
integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==
|
||||
|
||||
nanoid@^2.1.0:
|
||||
version "2.1.8"
|
||||
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-2.1.8.tgz#2dbb0224231b246e3b4c819de7bfea6384dabf08"
|
||||
integrity sha512-g1z+n5s26w0TGKh7gjn7HCqurNKMZWzH08elXzh/gM/csQHd/UqDV6uxMghQYg9IvqRPm1QpeMk50YMofHvEjQ==
|
||||
|
||||
nanomatch@^1.2.9:
|
||||
version "1.2.13"
|
||||
resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119"
|
||||
|
@ -3423,6 +3466,13 @@ shellwords@^0.1.1:
|
|||
resolved "https://registry.yarnpkg.com/shellwords/-/shellwords-0.1.1.tgz#d6b9181c1a48d397324c84871efbcfc73fc0654b"
|
||||
integrity sha512-vFwSUfQvqybiICwZY5+DAWIPLKsWO31Q91JSKl3UYv+K5c2QRPzn0qzec6QPu1Qc9eHYItiP3NdJqNVqetYAww==
|
||||
|
||||
shortid@^2.2.8:
|
||||
version "2.2.15"
|
||||
resolved "https://registry.yarnpkg.com/shortid/-/shortid-2.2.15.tgz#2b902eaa93a69b11120373cd42a1f1fe4437c122"
|
||||
integrity sha512-5EaCy2mx2Jgc/Fdn9uuDuNIIfWBpzY4XIlhoqtXF6qsf+/+SGZ+FxDdX/ZsMZiWupIWNqAEmiNY4RC+LSmCeOw==
|
||||
dependencies:
|
||||
nanoid "^2.1.0"
|
||||
|
||||
signal-exit@^3.0.0, signal-exit@^3.0.2:
|
||||
version "3.0.2"
|
||||
resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d"
|
||||
|
@ -3700,6 +3750,11 @@ supports-color@^6.1.0:
|
|||
dependencies:
|
||||
has-flag "^3.0.0"
|
||||
|
||||
svelte@^3.9.2:
|
||||
version "3.16.7"
|
||||
resolved "https://registry.yarnpkg.com/svelte/-/svelte-3.16.7.tgz#9ade80a4bbbac95595c676dd817222f632fa2c07"
|
||||
integrity sha512-egrva1UklB1n7KAv179IhDpQzMGAvubJUlOQ9PitmmZmAfrCUEgrQnx2vPxn2s+mGV3aYegXvJ/yQ35N2SfnYQ==
|
||||
|
||||
symbol-tree@^3.2.2:
|
||||
version "3.2.4"
|
||||
resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2"
|
||||
|
@ -3864,11 +3919,11 @@ type-is@^1.6.14, type-is@^1.6.16:
|
|||
mime-types "~2.1.24"
|
||||
|
||||
uglify-js@^3.1.4:
|
||||
version "3.6.0"
|
||||
resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.6.0.tgz#704681345c53a8b2079fb6cec294b05ead242ff5"
|
||||
integrity sha512-W+jrUHJr3DXKhrsS7NUVxn3zqMOFn0hL/Ei6v0anCIMoKC93TjcflTagwIHLW7SfMFfiQuktQyFVCFHGUE0+yg==
|
||||
version "3.7.3"
|
||||
resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.7.3.tgz#f918fce9182f466d5140f24bb0ff35c2d32dcc6a"
|
||||
integrity sha512-7tINm46/3puUA4hCkKYo4Xdts+JDaVC9ZPRcG8Xw9R4nhO/gZgUM3TENq8IF4Vatk8qCig4MzP/c8G4u2BkVQg==
|
||||
dependencies:
|
||||
commander "~2.20.0"
|
||||
commander "~2.20.3"
|
||||
source-map "~0.6.1"
|
||||
|
||||
union-value@^1.0.0:
|
||||
|
|
Loading…
Reference in New Issue