mono repo

This commit is contained in:
michael shanks 2019-06-07 14:18:10 +01:00
parent 04eef6ab4d
commit 2fc0bc7156
38 changed files with 3611 additions and 3637 deletions

3
.gitmodules vendored Normal file
View File

@ -0,0 +1,3 @@
[submodule "budibase-core"]
path = packages/core
url = git@github.com:Budibase/budibase-core.git

View File

@ -0,0 +1,35 @@
commit 78a241d11c55ae717ba844c3d853e594d805e0a2 (HEAD -> master)
Author: michael shanks <mike@budibase.com>
Date: Fri Jun 7 14:11:23 2019 +0100
mono-repo
commit 04eef6ab4d291621114949c14097987ec1804274 (bb/master)
Author: michael shanks <mike@budibase.com>
Date: Fri Jun 7 12:22:23 2019 +0100
license AGPL
commit 5bcf5157e0010aea34be4bc338057a75ae0befec
Author: michael shanks <mike@budibase.com>
Date: Fri Jun 7 12:09:06 2019 +0100
making OS friendly :)
commit d5129fbda3c0fbb35e7275420aee0b2e8a4f6c2b
Author: michael shanks <mike@budibase.com>
Date: Fri Jun 7 12:02:48 2019 +0100
initialise master database working
commit 36f9e7c64faf9c1ce27a1586337b4bd168fe88ec (origin/master, origin/HEAD)
Author: Michael Shanks <mjashanks@hotmail.com>
Date: Fri Apr 5 16:30:29 2019 +0100
backup..
commit a8aa18d01d0640db6fb0f7bcb20e713b38b5610b
Author: Michael Shanks <mjashanks@hotmail.com>
Date: Tue Mar 19 21:45:21 2019 +0000
initial commit

6
lerna.json Normal file
View File

@ -0,0 +1,6 @@
{
"packages": [
"packages/*"
],
"version": "0.0.0"
}

View File

@ -1,20 +1,7 @@
{
"name": "budibase",
"version": "0.0.1",
"description": "budibase wrapper repo for development",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [
"budibase"
],
"author": "Michael Shanks",
"license": "AGPL-3.0-or-later",
"dependencies": {
"argon2": "^0.23.0",
"budibase-core": "git+ssh://git@github.com:Budibase/budibase-core-dist.git",
"rimraf": "^2.6.3",
"yargs": "^13.2.4"
"name": "root",
"private": true,
"devDependencies": {
"lerna": "^3.14.1"
}
}

View File

@ -0,0 +1,14 @@
{
"name": "budibase-client-library",
"version": "0.0.1",
"description": "Client library for talking to budibase web server",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [
"budibase"
],
"author": "Michael Shanks",
"license": "MPL-2.0"
}

1
packages/core Submodule

@ -0,0 +1 @@
Subproject commit 896f3b6c062fcac68de67e33a07732e1293a3ae2

View File

@ -1,5 +1,5 @@
{
"presets": ["@babel/env"],
"sourceMaps": "inline",
"retainLines": true
{
"presets": ["@babel/env"],
"sourceMaps": "inline",
"retainLines": true
}

View File

@ -1,20 +1,20 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "launch",
"name": "debug memory",
"program": "${workspaceFolder}/index.js",
"runtimeExecutable": "${workspaceFolder}/node_modules/.bin/babel-node",
"runtimeArgs": ["--nolazy"],
"args":["memory"],
"skipFiles": [
"<node_internals>/**/*.js"
]
}
]
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "launch",
"name": "debug memory",
"program": "${workspaceFolder}/index.js",
"runtimeExecutable": "${workspaceFolder}/node_modules/.bin/babel-node",
"runtimeArgs": ["--nolazy"],
"args":["memory"],
"skipFiles": [
"<node_internals>/**/*.js"
]
}
]
}

View File

@ -1,37 +1,37 @@
import fs from "fs";
import {join} from "path";
import {promisify} from 'es6-promisify';
import _rimraf from "rimraf";
const mkdir = promisify(fs.mkdir);
const rmdir = promisify(fs.rmdir);
const rimraf = promisify(_rimraf);
const getConfig = async () => {
const config = {
local: {
root: "./output/local/files"
},
memory: {
root:"./output/memory"
},
azure: {
root:"./output/azure"
}
};
await rimraf("./output");
await mkdir("./output");
for(let type in config) {
await mkdir(join("output", type));
}
await mkdir("./output/local/files");
return config;
};
import fs from "fs";
import {join} from "path";
import {promisify} from 'es6-promisify';
import _rimraf from "rimraf";
const mkdir = promisify(fs.mkdir);
const rmdir = promisify(fs.rmdir);
const rimraf = promisify(_rimraf);
const getConfig = async () => {
const config = {
local: {
root: "./output/local/files"
},
memory: {
root:"./output/memory"
},
azure: {
root:"./output/azure"
}
};
await rimraf("./output");
await mkdir("./output");
for(let type in config) {
await mkdir(join("output", type));
}
await mkdir("./output/local/files");
return config;
};
export default getConfig;

View File

@ -1,32 +1,32 @@
import fs from "fs";
import {mkdir} from "fs";
import {join} from "path";
import {promisify} from 'es6-promisify';
mkdirp = promisify(mkdir);
const getConfig = async () => {
const config = {
local: {
root: "./output/local/files"
},
memory: {}
};
try {
await mkdir("./output");
} catch(e){}
for(let type in config) {
await mkdir(join("output", type));
}
await mkdir("./output/local/files");
return config;
};
import fs from "fs";
import {mkdir} from "fs";
import {join} from "path";
import {promisify} from 'es6-promisify';
mkdirp = promisify(mkdir);
const getConfig = async () => {
const config = {
local: {
root: "./output/local/files"
},
memory: {}
};
try {
await mkdir("./output");
} catch(e){}
for(let type in config) {
await mkdir(join("output", type));
}
await mkdir("./output/local/files");
return config;
};
export default getConfig;

View File

@ -1,86 +1,86 @@
import {SharedKeyCredential, BlockBlobURL,
BlobURL, ContainerURL, ServiceURL,
StorageURL, Aborter} from "@azure/storage-blob";
export const createFile = ({containerUrl}) => async (key, content) => {
const blobURL = BlobURL.fromContainerURL(containerURL, key);
const blockBlobURL = BlockBlobURL.fromBlobURL(blobURL);
await blockBlobURL.upload(
Aborter.none,
content,
content.length
);
};
export const updateFile = opts => async (path, content) =>
createFile(opts)(path,content);
export const loadFile = ({containerUrl}) => async key => {
const blobURL = BlobURL.fromContainerURL(
containerUrl, key);
const downloadBlockBlobResponse =
await blobURL.download(Aborter.none, 0);
return downloadBlockBlobResponse
.readableStreamBody
.read(content.length)
.toString();
};
export const exists = ({containerURL}) => async (key) => {
const blobURL = BlobURL.fromContainerURL(containerURL, key);
const getPropsResponse = await blobURL.getProperties();
return getPropsResponse._response.StatusCode === 200;
}
export const deleteFile = ({containerURL}) => async key => {
const blobURL = BlobURL.fromContainerURL(
containerURL, key);
await blobURL.delete(Aborter.none);
}
export const createContainer = ({containerUrl}) => async () =>
await containerUrl.create(Aborter.none);
export const deleteContainer = ({containerUrl}) => async () =>
await containerUrl.delete(Aborter.none);
const initialise = opts => {
const sharedKeyCredential = new SharedKeyCredential(
opts.account,
opts.accountKey
);
const pipeline = StorageURL.newPipeline(sharedKeyCredential);
const serviceURL = new ServiceURL(
`https://${account}.blob.core.windows.net`,
pipeline
);
const containerURL = ContainerURL.fromServiceURL(
serviceURL,
opts.containerName
);
return ({
containerURL
});
};
export default opts => {
const access = initialise(opts);
return ({
createFile : createFile(access),
updateFile : updateFile(access),
loadFile : loadFile(access),
exists : exists(access),
datastoreType : "azure-blob-storage",
datastoreDescription: "",
data
});
import {SharedKeyCredential, BlockBlobURL,
BlobURL, ContainerURL, ServiceURL,
StorageURL, Aborter} from "@azure/storage-blob";
export const createFile = ({containerUrl}) => async (key, content) => {
const blobURL = BlobURL.fromContainerURL(containerURL, key);
const blockBlobURL = BlockBlobURL.fromBlobURL(blobURL);
await blockBlobURL.upload(
Aborter.none,
content,
content.length
);
};
export const updateFile = opts => async (path, content) =>
createFile(opts)(path,content);
export const loadFile = ({containerUrl}) => async key => {
const blobURL = BlobURL.fromContainerURL(
containerUrl, key);
const downloadBlockBlobResponse =
await blobURL.download(Aborter.none, 0);
return downloadBlockBlobResponse
.readableStreamBody
.read(content.length)
.toString();
};
export const exists = ({containerURL}) => async (key) => {
const blobURL = BlobURL.fromContainerURL(containerURL, key);
const getPropsResponse = await blobURL.getProperties();
return getPropsResponse._response.StatusCode === 200;
}
export const deleteFile = ({containerURL}) => async key => {
const blobURL = BlobURL.fromContainerURL(
containerURL, key);
await blobURL.delete(Aborter.none);
}
export const createContainer = ({containerUrl}) => async () =>
await containerUrl.create(Aborter.none);
export const deleteContainer = ({containerUrl}) => async () =>
await containerUrl.delete(Aborter.none);
const initialise = opts => {
const sharedKeyCredential = new SharedKeyCredential(
opts.account,
opts.accountKey
);
const pipeline = StorageURL.newPipeline(sharedKeyCredential);
const serviceURL = new ServiceURL(
`https://${account}.blob.core.windows.net`,
pipeline
);
const containerURL = ContainerURL.fromServiceURL(
serviceURL,
opts.containerName
);
return ({
containerURL
});
};
export default opts => {
const access = initialise(opts);
return ({
createFile : createFile(access),
updateFile : updateFile(access),
loadFile : loadFile(access),
exists : exists(access),
datastoreType : "azure-blob-storage",
datastoreDescription: "",
data
});
};

View File

@ -1,126 +1,126 @@
const {promisify} = require('util');
const fs = require("fs");
const {join} = require("path");
const readFile = promisify(fs.readFile);
const writeFile = (path, content) =>
promisify(fs.writeFile)(path, content, "utf8");
const access = promisify(fs.access);
const mkdir = promisify(fs.mkdir);
const rmdir = promisify(fs.rmdir);
const unlink = promisify(fs.unlink);
const readdir = promisify(fs.readdir);
const rename = promisify(fs.rename);
const updateFile = root => async (path, file) =>
await writeFile(
join(root,path),
file
);
const createFile = updateFile;
const loadFile = root => async (path) =>
await readFile(
join(root,path)
, "utf8");
const exists = root => async (path) => {
try {
await access(
join(root,path)
);
} catch (e) {
return false;
}
return true;
};
const createFolder = root => async (path) =>
await mkdir(
join(root, path));
const deleteFile = root => async (path) =>
await unlink(
join(root, path)
);
module.exports.deleteFile = deleteFile;
const deleteFolder = root => async (path) =>
await rmdir(
join(root, path));
const readableFileStream = root => async path =>
fs.createReadStream(
join(root, path), "utf8"
);
const writableFileStream = root => async path =>
fs.createWriteStream(
join(root, path), "utf8"
);
const getFolderContents = root => async path => {
await readdir(
join(root, path)
);
};
const renameFile = root => async (oldPath, newPath) =>
await rename(
join(root, oldPath),
join(root, newPath)
);
const datastoreFolder = (applicationId, instanceId) =>
applicationId === "master" ? "master"
: `app.${applicationId}.${instanceId}`;
const createEmptyDb = (rootConfig) => async (applicationId, instanceId) => {
const folder = datastoreFolder(applicationId, instanceId);
const dbRootConfig = getDbRootConfig(rootConfig, applicationId, instanceId);
await createFolder(dbRootConfig)(folder);
return folder;
};
const getDatastoreConfig = (rootConfig) => (applicationId, instanceId) =>
join(rootConfig.rootPath,
datastoreFolder(
applicationId, instanceId
));
const getMasterDbRootConfig = (rootConfig) => () => rootConfig.rootPath;
const getInstanceDbRootConfig = (rootConfig) => async (applicationId, instanceId) => rootConfig.rootPath;
const getDbRootConfig = (rootConfig, applicationId, instanceId) =>
applicationId === "master"
? getMasterDbRootConfig(rootConfig)()
: getInstanceDbRootConfig(rootConfig)(applicationId, instanceId);
module.exports.databaseManager = rootConfig => ({
createEmptyDb:createEmptyDb(rootConfig),
getDatastoreConfig:getDatastoreConfig(rootConfig),
getMasterDbRootConfig:getMasterDbRootConfig(rootConfig),
getInstanceDbRootConfig:getInstanceDbRootConfig(rootConfig)
});
module.exports.getDatastore = rootFolderPath => ({
createFile : createFile(rootFolderPath),
updateFile : updateFile(rootFolderPath),
loadFile : loadFile(rootFolderPath),
exists : exists(rootFolderPath),
deleteFile : deleteFile(rootFolderPath),
createFolder: createFolder(rootFolderPath),
deleteFolder: deleteFolder(rootFolderPath),
readableFileStream: readableFileStream(rootFolderPath),
writableFileStream: writableFileStream(rootFolderPath),
renameFile: renameFile(rootFolderPath),
getFolderContents: getFolderContents(rootFolderPath),
createEmptyDb: createEmptyDb(rootFolderPath),
datastoreType : "local",
datastoreDescription: rootFolderPath
});
module.exports.configParameters = {
rootPath: "Root Data Folder"
};
const {promisify} = require('util');
const fs = require("fs");
const {join} = require("path");
const readFile = promisify(fs.readFile);
const writeFile = (path, content) =>
promisify(fs.writeFile)(path, content, "utf8");
const access = promisify(fs.access);
const mkdir = promisify(fs.mkdir);
const rmdir = promisify(fs.rmdir);
const unlink = promisify(fs.unlink);
const readdir = promisify(fs.readdir);
const rename = promisify(fs.rename);
const updateFile = root => async (path, file) =>
await writeFile(
join(root,path),
file
);
const createFile = updateFile;
const loadFile = root => async (path) =>
await readFile(
join(root,path)
, "utf8");
const exists = root => async (path) => {
try {
await access(
join(root,path)
);
} catch (e) {
return false;
}
return true;
};
const createFolder = root => async (path) =>
await mkdir(
join(root, path));
const deleteFile = root => async (path) =>
await unlink(
join(root, path)
);
module.exports.deleteFile = deleteFile;
const deleteFolder = root => async (path) =>
await rmdir(
join(root, path));
const readableFileStream = root => async path =>
fs.createReadStream(
join(root, path), "utf8"
);
const writableFileStream = root => async path =>
fs.createWriteStream(
join(root, path), "utf8"
);
const getFolderContents = root => async path => {
await readdir(
join(root, path)
);
};
const renameFile = root => async (oldPath, newPath) =>
await rename(
join(root, oldPath),
join(root, newPath)
);
const datastoreFolder = (applicationId, instanceId) =>
applicationId === "master" ? "master"
: `app.${applicationId}.${instanceId}`;
const createEmptyDb = (rootConfig) => async (applicationId, instanceId) => {
const folder = datastoreFolder(applicationId, instanceId);
const dbRootConfig = getDbRootConfig(rootConfig, applicationId, instanceId);
await createFolder(dbRootConfig)(folder);
return folder;
};
const getDatastoreConfig = (rootConfig) => (applicationId, instanceId) =>
join(rootConfig.rootPath,
datastoreFolder(
applicationId, instanceId
));
const getMasterDbRootConfig = (rootConfig) => () => rootConfig.rootPath;
const getInstanceDbRootConfig = (rootConfig) => async (applicationId, instanceId) => rootConfig.rootPath;
const getDbRootConfig = (rootConfig, applicationId, instanceId) =>
applicationId === "master"
? getMasterDbRootConfig(rootConfig)()
: getInstanceDbRootConfig(rootConfig)(applicationId, instanceId);
module.exports.databaseManager = rootConfig => ({
createEmptyDb:createEmptyDb(rootConfig),
getDatastoreConfig:getDatastoreConfig(rootConfig),
getMasterDbRootConfig:getMasterDbRootConfig(rootConfig),
getInstanceDbRootConfig:getInstanceDbRootConfig(rootConfig)
});
module.exports.getDatastore = rootFolderPath => ({
createFile : createFile(rootFolderPath),
updateFile : updateFile(rootFolderPath),
loadFile : loadFile(rootFolderPath),
exists : exists(rootFolderPath),
deleteFile : deleteFile(rootFolderPath),
createFolder: createFolder(rootFolderPath),
deleteFolder: deleteFolder(rootFolderPath),
readableFileStream: readableFileStream(rootFolderPath),
writableFileStream: writableFileStream(rootFolderPath),
renameFile: renameFile(rootFolderPath),
getFolderContents: getFolderContents(rootFolderPath),
createEmptyDb: createEmptyDb(rootFolderPath),
datastoreType : "local",
datastoreDescription: rootFolderPath
});
module.exports.configParameters = {
rootPath: "Root Data Folder"
};

View File

@ -1,128 +1,128 @@
import {isUndefined, has} from "lodash";
import {take} from "lodash/fp";
import {Readable, Writable} from "readable-stream";
import { Buffer } from "safe-buffer";
import {splitKey, joinKey, $} from "../src/common";
import {getLastPartInKey} from "../src/templateApi/heirarchy";
const folderMarker = "OH-YES-ITSA-FOLDER-";
const isFolder = val => val.includes(folderMarker);
const getParentFolderKey = key =>
$(key, [
splitKey,
take((splitKey(key).length - 1)),
joinKey,
]);
const getParentFolder = (data,key) => {
const parentKey = getParentFolderKey(key);
if(data[parentKey] === undefined)
throw new Error("Parent folder for " + key + " does not exist (" + parentKey + ")");
return JSON.parse(data[parentKey]);
}
const addItemToParentFolder = (data, path) => {
if(getParentFolderKey(path) === "/") return;
const parentFolder = getParentFolder(data, path);
parentFolder.items.push(
getLastPartInKey(path));
data[getParentFolderKey(path)] = JSON.stringify(parentFolder);
}
export const createFile = data => async (path, content) => {
if(await exists(data)(path)) {
throw new Error(path + " already exists");
}
addItemToParentFolder(data, path);
data[path] = content;
};
export const updateFile = data => async (path, content) => {
// putting this check in to force use of create
if(!await exists(data)(path)) throw new Error("cannot update " + path + " - does not exist");
data[path] = content;
}
export const writableFileStream = data => async (path) => {
//if(!await exists(data)(path)) throw new Error("cannot write stream to " + path + " - does not exist");
const stream = Writable();
stream._write = (chunk, encoding, done) => {
data[path] = data[path] === undefined
? [] : data[path];
data[path] = [...data[path], ...chunk];
done();
};
return stream;
};
export const readableFileStream = data => async (path) => {
if(!await exists(data)(path))
throw new Error("cannot read stream from " + path + " - does not exist");
const s = new Readable();
s._read = () => {
s.push(Buffer.from(data[path]));
s.push(null);
};
return s;
};
export const renameFile = data => async (oldKey, newKey) => {
if(!await exists(data)(oldKey)) throw new Error("cannot rename path: " + oldKey + " ... does not exist");
if(await exists(data)(newKey)) throw new Error("cannot rename path: " + newKey + " ... already exists");
data[newKey] = data[oldKey];
delete data[oldKey];
};
export const loadFile = data => async (path) => {
const result = data[path];
if(isUndefined(result)) throw new Error("Load failed - path " + path + " does not exist");
return result;
};
export const exists = data => async (path) => has(data, path);
export const deleteFile = data => async (path) => {
if(!await exists(data)(path))
throw new Error("Cannot delete file, path " + path + " does not exist");
if(isFolder(data[path])) throw new Error("DeleteFile: Path " + path + " is a folder, not a file");
const parentFolder = getParentFolder(data, path);
parentFolder.items = parentFolder.items.filter(i => i !== getLastPartInKey(path));
data[getParentFolderKey(path)] = JSON.stringify(parentFolder);
delete data[path];
}
export const createFolder = data => async (path) => {
if(await exists(data)(path)) throw new Error("Cannot create folder, path " + path + " already exists");
addItemToParentFolder(data, path);
data[path] = JSON.stringify({folderMarker, items:[]});
}
export const deleteFolder = data => async (path) => {
if(!await exists(data)(path)) throw new Error("Cannot delete folder, path " + path + " does not exist");
if(!isFolder(data[path]))
throw new Error("DeleteFolder: Path " + path + " is not a folder");
delete data[path];
}
export const getFolderContents = data => async (folderPath) => {
if(!isFolder(data[folderPath]))
throw new Error("Not a folder: " + folderPath);
if(!await exists(data)(folderPath))
throw new Error("Folder does not exist: " + folderPath);
return JSON.parse(data[folderPath]).items;
};
export default data => {
return {
createFile : createFile(data),
updateFile : updateFile(data),
loadFile : loadFile(data),
exists : exists(data),
deleteFile : deleteFile(data),
createFolder: createFolder(data),
deleteFolder: deleteFolder(data),
readableFileStream: readableFileStream(data),
writableFileStream: writableFileStream(data),
renameFile: renameFile(data),
getFolderContents: getFolderContents(data),
datastoreType : "memory",
datastoreDescription: "",
data
};
import {isUndefined, has} from "lodash";
import {take} from "lodash/fp";
import {Readable, Writable} from "readable-stream";
import { Buffer } from "safe-buffer";
import {splitKey, joinKey, $} from "../src/common";
import {getLastPartInKey} from "../src/templateApi/heirarchy";
const folderMarker = "OH-YES-ITSA-FOLDER-";
const isFolder = val => val.includes(folderMarker);
const getParentFolderKey = key =>
$(key, [
splitKey,
take((splitKey(key).length - 1)),
joinKey,
]);
const getParentFolder = (data,key) => {
const parentKey = getParentFolderKey(key);
if(data[parentKey] === undefined)
throw new Error("Parent folder for " + key + " does not exist (" + parentKey + ")");
return JSON.parse(data[parentKey]);
}
const addItemToParentFolder = (data, path) => {
if(getParentFolderKey(path) === "/") return;
const parentFolder = getParentFolder(data, path);
parentFolder.items.push(
getLastPartInKey(path));
data[getParentFolderKey(path)] = JSON.stringify(parentFolder);
}
export const createFile = data => async (path, content) => {
if(await exists(data)(path)) {
throw new Error(path + " already exists");
}
addItemToParentFolder(data, path);
data[path] = content;
};
export const updateFile = data => async (path, content) => {
// putting this check in to force use of create
if(!await exists(data)(path)) throw new Error("cannot update " + path + " - does not exist");
data[path] = content;
}
export const writableFileStream = data => async (path) => {
//if(!await exists(data)(path)) throw new Error("cannot write stream to " + path + " - does not exist");
const stream = Writable();
stream._write = (chunk, encoding, done) => {
data[path] = data[path] === undefined
? [] : data[path];
data[path] = [...data[path], ...chunk];
done();
};
return stream;
};
export const readableFileStream = data => async (path) => {
if(!await exists(data)(path))
throw new Error("cannot read stream from " + path + " - does not exist");
const s = new Readable();
s._read = () => {
s.push(Buffer.from(data[path]));
s.push(null);
};
return s;
};
export const renameFile = data => async (oldKey, newKey) => {
if(!await exists(data)(oldKey)) throw new Error("cannot rename path: " + oldKey + " ... does not exist");
if(await exists(data)(newKey)) throw new Error("cannot rename path: " + newKey + " ... already exists");
data[newKey] = data[oldKey];
delete data[oldKey];
};
export const loadFile = data => async (path) => {
const result = data[path];
if(isUndefined(result)) throw new Error("Load failed - path " + path + " does not exist");
return result;
};
export const exists = data => async (path) => has(data, path);
export const deleteFile = data => async (path) => {
if(!await exists(data)(path))
throw new Error("Cannot delete file, path " + path + " does not exist");
if(isFolder(data[path])) throw new Error("DeleteFile: Path " + path + " is a folder, not a file");
const parentFolder = getParentFolder(data, path);
parentFolder.items = parentFolder.items.filter(i => i !== getLastPartInKey(path));
data[getParentFolderKey(path)] = JSON.stringify(parentFolder);
delete data[path];
}
export const createFolder = data => async (path) => {
if(await exists(data)(path)) throw new Error("Cannot create folder, path " + path + " already exists");
addItemToParentFolder(data, path);
data[path] = JSON.stringify({folderMarker, items:[]});
}
export const deleteFolder = data => async (path) => {
if(!await exists(data)(path)) throw new Error("Cannot delete folder, path " + path + " does not exist");
if(!isFolder(data[path]))
throw new Error("DeleteFolder: Path " + path + " is not a folder");
delete data[path];
}
export const getFolderContents = data => async (folderPath) => {
if(!isFolder(data[folderPath]))
throw new Error("Not a folder: " + folderPath);
if(!await exists(data)(folderPath))
throw new Error("Folder does not exist: " + folderPath);
return JSON.parse(data[folderPath]).items;
};
export default data => {
return {
createFile : createFile(data),
updateFile : updateFile(data),
loadFile : loadFile(data),
exists : exists(data),
deleteFile : deleteFile(data),
createFolder: createFolder(data),
deleteFolder: deleteFolder(data),
readableFileStream: readableFileStream(data),
writableFileStream: writableFileStream(data),
renameFile: renameFile(data),
getFolderContents: getFolderContents(data),
datastoreType : "memory",
datastoreDescription: "",
data
};
};

View File

@ -1,29 +1,29 @@
import local from "./datastores/local";
import azureBlob from "./datastores/azure-blob";
import memory from "./datastores/memory";
import getConfig from "./config";
import tests from "./tests";
const initialise = async () => {
const type = process.argv[2];
const config = (await getConfig())[type];
switch (type) {
case "local":
return {datastore:local(config.root), config};
case "memory":
return {datastore:memory(config), config};
case "azure":
return {datastore:azureBlob(config), config};
default:
break;
}
}
initialise()
.then(init => {
return tests(init.datastore, init.config);
})
.then(_ => console.log("done"))
.catch(e => console.log(e));
import local from "./datastores/local";
import azureBlob from "./datastores/azure-blob";
import memory from "./datastores/memory";
import getConfig from "./config";
import tests from "./tests";
const initialise = async () => {
const type = process.argv[2];
const config = (await getConfig())[type];
switch (type) {
case "local":
return {datastore:local(config.root), config};
case "memory":
return {datastore:memory(config), config};
case "azure":
return {datastore:azureBlob(config), config};
default:
break;
}
}
initialise()
.then(init => {
return tests(init.datastore, init.config);
})
.then(_ => console.log("done"))
.catch(e => console.log(e));

View File

@ -1,39 +1,39 @@
import {eventsList} from "budibase-core";
import {filter,union,has,map} from "lodash/fp";
import records from "./records";
const allEventsOfType = type =>
filter(
e => e.endsWith(`:${type}`)
)(eventsList);
const getEventNamespace = ev => {
const parts = ev.split(":");
return `${parts[0]}:${parts[1]}`;
}
const hasRecord = has("record");
export const register = (app, logTimeElapsed, eventNamespaces = []) => {
const onCompleteEvents =
eventNamespaces.length === 0
? allEventsOfType("onComplete")
: map(e => `${e}:onComplete`)(eventNamespaces)
const onErrorEvents =
eventNamespaces.length === 0
? allEventsOfType("onError")
: map(e => `${e}:onError`)(eventNamespaces)
for(let ev of union(onCompleteEvents)(onErrorEvents)) {
app.subscribe(ev, (_, ctx) => {
const info =
hasRecord(ctx)
? ctx.record.type()
: "";
logTimeElapsed(
ev, ctx.elapsed, info);
});
}
import {eventsList} from "budibase-core";
import {filter,union,has,map} from "lodash/fp";
import records from "./records";
const allEventsOfType = type =>
filter(
e => e.endsWith(`:${type}`)
)(eventsList);
const getEventNamespace = ev => {
const parts = ev.split(":");
return `${parts[0]}:${parts[1]}`;
}
const hasRecord = has("record");
export const register = (app, logTimeElapsed, eventNamespaces = []) => {
const onCompleteEvents =
eventNamespaces.length === 0
? allEventsOfType("onComplete")
: map(e => `${e}:onComplete`)(eventNamespaces)
const onErrorEvents =
eventNamespaces.length === 0
? allEventsOfType("onError")
: map(e => `${e}:onError`)(eventNamespaces)
for(let ev of union(onCompleteEvents)(onErrorEvents)) {
app.subscribe(ev, (_, ctx) => {
const info =
hasRecord(ctx)
? ctx.record.type()
: "";
logTimeElapsed(
ev, ctx.elapsed, info);
});
}
};

View File

@ -1,9 +1,9 @@
import {map} from "lodash";
export const action = (name, run, iterator=iterateActionTimes(1)) => ({name, run, iterator});
export const iterateActionTimes = times => run =>
map([...Array(times).keys()], run);
export const iterateCollection = getCollection => run =>
import {map} from "lodash";
export const action = (name, run, iterator=iterateActionTimes(1)) => ({name, run, iterator});
export const iterateActionTimes = times => run =>
map([...Array(times).keys()], run);
export const iterateCollection = getCollection => run =>
map(getCollection(), run);

View File

@ -1,57 +1,57 @@
import setup from "./setup";
import records from "./records";
import {register} from "./diagnosticPlugin";
import pLimit from "p-limit";
import papa from "papaparse";
import {writeFileSync} from "fs";
const limit = pLimit(1);
const iterateActions = async (apis,getIterator) => {
const iterator = getIterator(apis);
let result = iterator();
while(!result.done) {
try {
const runPromises = result.action.iterator(i =>
limit(() => result.action.run(i))
);
let n = 1;
await Promise.all(runPromises);
result = iterator();
} catch(e) {
e.message = `FAILED: ${result.action.name}: ${e.message}`;
throw e;
}
}
}
export default async (datastore,config) => {
const apis = await setup(datastore);
const diagnostics = [];
let currentRecordCount = 0;
register(apis, (ev, elapsed, info) => {
if(ev === "recordApi:save:onComplete") {
currentRecordCount++;
} else if(ev === "recordApi:delete:onComplete") {
currentRecordCount--;
}
diagnostics.push(
{method:ev, elapsed, info, count:currentRecordCount}
);
console.log(`${ev} ${info} ${elapsed/1000} s`);
}, [
"recordApi:save",
"recordApi:load",
"viewApi:listItems",
"recordApi:delete"]);
await iterateActions(apis, records);
const diagnosticscsv = papa.unparse(diagnostics);
writeFileSync(config.root + "\\results.csv", diagnosticscsv, {encoding:"utf8"});
import setup from "./setup";
import records from "./records";
import {register} from "./diagnosticPlugin";
import pLimit from "p-limit";
import papa from "papaparse";
import {writeFileSync} from "fs";
const limit = pLimit(1);
const iterateActions = async (apis,getIterator) => {
const iterator = getIterator(apis);
let result = iterator();
while(!result.done) {
try {
const runPromises = result.action.iterator(i =>
limit(() => result.action.run(i))
);
let n = 1;
await Promise.all(runPromises);
result = iterator();
} catch(e) {
e.message = `FAILED: ${result.action.name}: ${e.message}`;
throw e;
}
}
}
export default async (datastore,config) => {
const apis = await setup(datastore);
const diagnostics = [];
let currentRecordCount = 0;
register(apis, (ev, elapsed, info) => {
if(ev === "recordApi:save:onComplete") {
currentRecordCount++;
} else if(ev === "recordApi:delete:onComplete") {
currentRecordCount--;
}
diagnostics.push(
{method:ev, elapsed, info, count:currentRecordCount}
);
console.log(`${ev} ${info} ${elapsed/1000} s`);
}, [
"recordApi:save",
"recordApi:load",
"viewApi:listItems",
"recordApi:delete"]);
await iterateActions(apis, records);
const diagnosticscsv = papa.unparse(diagnostics);
writeFileSync(config.root + "\\results.csv", diagnosticscsv, {encoding:"utf8"});
};

View File

@ -1,96 +1,96 @@
import {action, iterateActionTimes, iterateCollection} from "./helpers";
import {isUndefined, union, takeRight} from "lodash";
const createClient = (apis, getState) => async (i) => {
const client = apis.recordApi.getNew("/clients", "client");
client.FamilyName = "Humperdink";
client.Address1 = `${i} Mainitucetts Avenue`;
client.Address2 = "Longerton Road South";
client.Address3 = "Chalico City";
client.Address4 = "Northern Humranistan";
client.Postcode = "BY71 5FR";
client.CreatedDate = new Date();
const state = getState();
if(isUndefined(state.clientKeys)) state.clientKeys = [];
state.clientKeys.push(client.key());
await apis.recordApi.save(client);
return client.key();
}
const getClient = (apis, getState) => async (k) => {
const state = getState();
if(isUndefined(state.clients)) state.clients = [];
const client = await apis.recordApi.load(k);
state.clients.push(client);
return `key: ${k} , add1: ${client.Address1} , count: ${state.clients.length}`;
}
const listClients = (apis, getState) => async () => {
const clients = await apis.viewApi.listItems("/clients/default");
const state = getState();
if(state.clientKeys.length !== clients.length) {
throw new Error(
"list CLients, expected "
+ state.clientKeys.length.toString()
+ " clients, actual "
+ clients.length.toString()
)
}
}
const deleteClient = (apis, getState) => async k => {
await apis.recordApi.delete(k);
const state = getState();
state.clientKeys = state.clientKeys.filter(key => key !== k);
}
export default (apis) => {
const state = {};
const getState = () => state;
const noOfRecords = 10000;
const recordsPerIteration = 10;
const noOfIterations = noOfRecords / recordsPerIteration;
const actionsInOneIteration = () => ([
action("Create client", createClient(apis, getState),
iterateActionTimes(recordsPerIteration)),
/*action("Get client", getClient(apis, getState),
iterateCollection(() => takeRight(getState().clientKeys, recordsPerIteration))),*/
action("List Clients", listClients(apis, getState))
]);
let actions = [];
for (let index = 0; index < noOfIterations; index++) {
actions = union(actions, actionsInOneIteration());
}
/*
for (let index = 0; index < noOfIterations; index++) {
actions.push(
action("Delete Clients", deleteClient(apis, getState),
iterateCollection(() => takeRight(getState().clientKeys, recordsPerIteration))),
action("List Clients", listClients(apis, getState))
);
}*/
let actionIndex = 0;
return () => {
if(actionIndex == actions.length) {
return {done:true};
}
const result = {action:actions[actionIndex], done:false};
actionIndex++;
return result;
};
import {action, iterateActionTimes, iterateCollection} from "./helpers";
import {isUndefined, union, takeRight} from "lodash";
const createClient = (apis, getState) => async (i) => {
const client = apis.recordApi.getNew("/clients", "client");
client.FamilyName = "Humperdink";
client.Address1 = `${i} Mainitucetts Avenue`;
client.Address2 = "Longerton Road South";
client.Address3 = "Chalico City";
client.Address4 = "Northern Humranistan";
client.Postcode = "BY71 5FR";
client.CreatedDate = new Date();
const state = getState();
if(isUndefined(state.clientKeys)) state.clientKeys = [];
state.clientKeys.push(client.key());
await apis.recordApi.save(client);
return client.key();
}
const getClient = (apis, getState) => async (k) => {
const state = getState();
if(isUndefined(state.clients)) state.clients = [];
const client = await apis.recordApi.load(k);
state.clients.push(client);
return `key: ${k} , add1: ${client.Address1} , count: ${state.clients.length}`;
}
const listClients = (apis, getState) => async () => {
const clients = await apis.viewApi.listItems("/clients/default");
const state = getState();
if(state.clientKeys.length !== clients.length) {
throw new Error(
"list CLients, expected "
+ state.clientKeys.length.toString()
+ " clients, actual "
+ clients.length.toString()
)
}
}
const deleteClient = (apis, getState) => async k => {
await apis.recordApi.delete(k);
const state = getState();
state.clientKeys = state.clientKeys.filter(key => key !== k);
}
export default (apis) => {
const state = {};
const getState = () => state;
const noOfRecords = 10000;
const recordsPerIteration = 10;
const noOfIterations = noOfRecords / recordsPerIteration;
const actionsInOneIteration = () => ([
action("Create client", createClient(apis, getState),
iterateActionTimes(recordsPerIteration)),
/*action("Get client", getClient(apis, getState),
iterateCollection(() => takeRight(getState().clientKeys, recordsPerIteration))),*/
action("List Clients", listClients(apis, getState))
]);
let actions = [];
for (let index = 0; index < noOfIterations; index++) {
actions = union(actions, actionsInOneIteration());
}
/*
for (let index = 0; index < noOfIterations; index++) {
actions.push(
action("Delete Clients", deleteClient(apis, getState),
iterateCollection(() => takeRight(getState().clientKeys, recordsPerIteration))),
action("List Clients", listClients(apis, getState))
);
}*/
let actionIndex = 0;
return () => {
if(actionIndex == actions.length) {
return {done:true};
}
const result = {action:actions[actionIndex], done:false};
actionIndex++;
return result;
};
};

View File

@ -1,70 +1,70 @@
import {getAppApis, getTemplateApi, setupDatastore} from "budibase-core";
import {action} from "./helpers";
const addField = templateApi => type => (record, name) => {
const field = templateApi.getNewField(type);
field.name = name;
field.type = type;
field.label = name;
templateApi.addField(
record,
field);
}
export default async (datastore) => {
datastore = setupDatastore(datastore);
const templateApi = await getTemplateApi(datastore);
const addStringField = addField(templateApi)("string");
const addDateField = addField(templateApi)("datetime");
const addBoolField = addField(templateApi)("bool");
const root = templateApi.getNewRootLevel();
const clients = templateApi.getNewCollectionTemplate(root);
clients.name = "clients";
const client = templateApi.getNewRecordTemplate(clients);
client.name = "client"
addStringField(client, "FamilyName");
addStringField(client, "Address1");
addStringField(client, "Address2");
addStringField(client, "Address3");
addStringField(client, "Address4");
addStringField(client, "Postcode");
addDateField(client, "CreatedDate");
const children = templateApi.getNewCollectionTemplate(client);
children.name = "children";
const child = templateApi.getNewRecordTemplate(children);
child.name = "child";
addStringField(child, "FirstName");
addStringField(child, "Surname");
addDateField(child, "DateOfBirth");
addBoolField(child, "Current");
const contacts = templateApi.getNewCollectionTemplate(client);
contacts.name = "contacts";
const contact = templateApi.getNewRecordTemplate(contacts);
contact.name = "contact";
addStringField(contact, "Name");
addStringField(contact, "relationship");
addStringField(contact, "phone1");
addStringField(contact, "phone2");
addBoolField(contact, "active");
await templateApi.saveApplicationHeirarchy(root);
const apis = await getAppApis(datastore);
await apis.collectionApi.initialiseAll();
return apis;
}
import {getAppApis, getTemplateApi, setupDatastore} from "budibase-core";
import {action} from "./helpers";
const addField = templateApi => type => (record, name) => {
const field = templateApi.getNewField(type);
field.name = name;
field.type = type;
field.label = name;
templateApi.addField(
record,
field);
}
export default async (datastore) => {
datastore = setupDatastore(datastore);
const templateApi = await getTemplateApi(datastore);
const addStringField = addField(templateApi)("string");
const addDateField = addField(templateApi)("datetime");
const addBoolField = addField(templateApi)("bool");
const root = templateApi.getNewRootLevel();
const clients = templateApi.getNewCollectionTemplate(root);
clients.name = "clients";
const client = templateApi.getNewRecordTemplate(clients);
client.name = "client"
addStringField(client, "FamilyName");
addStringField(client, "Address1");
addStringField(client, "Address2");
addStringField(client, "Address3");
addStringField(client, "Address4");
addStringField(client, "Postcode");
addDateField(client, "CreatedDate");
const children = templateApi.getNewCollectionTemplate(client);
children.name = "children";
const child = templateApi.getNewRecordTemplate(children);
child.name = "child";
addStringField(child, "FirstName");
addStringField(child, "Surname");
addDateField(child, "DateOfBirth");
addBoolField(child, "Current");
const contacts = templateApi.getNewCollectionTemplate(client);
contacts.name = "contacts";
const contact = templateApi.getNewRecordTemplate(contacts);
contact.name = "contact";
addStringField(contact, "Name");
addStringField(contact, "relationship");
addStringField(contact, "phone1");
addStringField(contact, "phone2");
addBoolField(contact, "active");
await templateApi.saveApplicationHeirarchy(root);
const apis = await getAppApis(datastore);
await apis.collectionApi.initialiseAll();
return apis;
}

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,8 @@
const Koa = require('koa');
const app = new Koa();
app.use(async ctx => {
ctx.body = 'Hello World';
});
const Koa = require('koa');
const app = new Koa();
app.use(async ctx => {
ctx.body = 'Hello World';
});
app.listen(3000);

View File

@ -1,41 +1,41 @@
const {initialiseData, setupDatastore,
getTemplateApi} = require("budibase-core");
const {getApisForUser, getDatabaseManager,
getApisWithFullAccess} = require("./helpers");
const masterDbAppDefinition = require("../appPackages/master/appDefinition.json");
const masterDbAccessLevels = require("../appPackages/master/access_levels.json");
module.exports = async (datastoreModule, rootConfig, username, password) => {
try {
const databaseManager = getDatabaseManager(datastoreModule, rootConfig);
await databaseManager.createEmptyMasterDb();
const masterDbConfig = databaseManager.masterDatastoreConfig;
const datastore = setupDatastore(
datastoreModule.getDatastore(masterDbConfig)
);
const templateApi = getTemplateApi({datastore});
await initialiseData(datastore, {
heirarchy:templateApi.constructHeirarchy(masterDbAppDefinition.hierarchy),
actions:masterDbAppDefinition.actions,
triggers:masterDbAppDefinition.triggers
});
const bbMaster = await getApisWithFullAccess(datastore);
await bbMaster.authApi.saveAccessLevels(masterDbAccessLevels);
const user = bbMaster.authApi.getNewUser();
user.name = username;
user.accessLevels= ["owner"];
await bbMaster.authApi.createUser(user, password);
return await getApisForUser(datastore, username, password);
} catch(e) {
throw e;
}
};
const {initialiseData, setupDatastore,
getTemplateApi} = require("budibase-core");
const {getApisForUser, getDatabaseManager,
getApisWithFullAccess} = require("./helpers");
const masterDbAppDefinition = require("../appPackages/master/appDefinition.json");
const masterDbAccessLevels = require("../appPackages/master/access_levels.json");
module.exports = async (datastoreModule, rootConfig, username, password) => {
try {
const databaseManager = getDatabaseManager(datastoreModule, rootConfig);
await databaseManager.createEmptyMasterDb();
const masterDbConfig = databaseManager.masterDatastoreConfig;
const datastore = setupDatastore(
datastoreModule.getDatastore(masterDbConfig)
);
const templateApi = getTemplateApi({datastore});
await initialiseData(datastore, {
heirarchy:templateApi.constructHeirarchy(masterDbAppDefinition.hierarchy),
actions:masterDbAppDefinition.actions,
triggers:masterDbAppDefinition.triggers
});
const bbMaster = await getApisWithFullAccess(datastore);
await bbMaster.authApi.saveAccessLevels(masterDbAccessLevels);
const user = bbMaster.authApi.getNewUser();
user.name = username;
user.accessLevels= ["owner"];
await bbMaster.authApi.createUser(user, password);
return await getApisForUser(datastore, username, password);
} catch(e) {
throw e;
}
};

View File

@ -1,29 +1,29 @@
const crypto = require("../server/nodeCrypto");
const {getDatabaseManager, getAppApis} = require("budibase-core");
module.exports.getApisWithFullAccess = async (datastore) => {
const bb = await getAppApis(
datastore,
null, null, null,
crypto
);
bb.withFullAccess();
return bb;
};
module.exports.getApisForUser = async (datastore, username, password) => {
const bb = await getAppApis(
datastore,
null, null, null,
crypto
);
await bb.authenticateAs(username, password);
return bb;
}
module.exports.getDatabaseManager = (datastoreModule, datastoreConfig) =>
getDatabaseManager(datastoreModule.databaseManager(datastoreConfig));
const crypto = require("../nodeCrypto");
const {getDatabaseManager, getAppApis} = require("budibase-core");
module.exports.getApisWithFullAccess = async (datastore) => {
const bb = await getAppApis(
datastore,
null, null, null,
crypto
);
bb.withFullAccess();
return bb;
};
module.exports.getApisForUser = async (datastore, username, password) => {
const bb = await getAppApis(
datastore,
null, null, null,
crypto
);
await bb.authenticateAs(username, password);
return bb;
}
module.exports.getDatabaseManager = (datastoreModule, datastoreConfig) =>
getDatabaseManager(datastoreModule.databaseManager(datastoreConfig));

View File

@ -49,7 +49,7 @@ const question = async (q) => {
if(!username) throw new Error("Username not supplied!");
if(!password) throw new Error("Password not supplied!");
var datastoreModule = require("../datastores/datastores/" + datastore);
var datastoreModule = require("../../datastores/datastores/" + datastore);
const rootconfig = {};
for(let parameter in datastoreModule.configParameters) {

View File

@ -1,34 +1,34 @@
export const budibaseRouting = (options) => {
return async (ctx, next) => {
ctx.request.path
};
};
/* api Routes (all /api/..)
POST executeAction/<name> {}
POST authenticate {}
POST authenticateTemporaryAccess {}
POST createUser {}
POST enabledUser {}
POST disableUser {}
GET users
GET accessLevels
POST accessLevels {}
POST changeMyPassword {}
POST setPasswordFromTemporaryCode {}
POST listItems/index/key {}
POST aggregates/index/key {}
POST record/key/to/rec {}
GET record/key/to/rec
DELETE record/key/to/rec
POST appHeirarchy {}
POST actionsAndTriggers {}
GET appDefinition
export const budibaseRouting = (options) => {
return async (ctx, next) => {
ctx.request.path
};
};
/* api Routes (all /api/..)
POST executeAction/<name> {}
POST authenticate {}
POST authenticateTemporaryAccess {}
POST createUser {}
POST enabledUser {}
POST disableUser {}
GET users
GET accessLevels
POST accessLevels {}
POST changeMyPassword {}
POST setPasswordFromTemporaryCode {}
POST listItems/index/key {}
POST aggregates/index/key {}
POST record/key/to/rec {}
GET record/key/to/rec
DELETE record/key/to/rec
POST appHeirarchy {}
POST actionsAndTriggers {}
GET appDefinition
*/

View File

@ -1,41 +1,41 @@
const {getAppApis} = require("budibase-core");
module.exports = (datastoreConfig, datastoreModule, method, path) => {
const datastore = datastoreModule.getDatastore(
datastoreConfig);
const bb = getAppApis(
datastore
)
}
/* api Routes (all /api/..)
POST executeAction/<name> {}
POST authenticate {}
POST authenticateTemporaryAccess {}
POST createUser {}
POST enabledUser {}
POST disableUser {}
GET users
GET accessLevels
POST accessLevels {}
POST changeMyPassword {}
POST setPasswordFromTemporaryCode {}
POST listItems/index/key {}
POST aggregates/index/key {}
POST record/key/to/rec {}
GET record/key/to/rec
DELETE record/key/to/rec
POST appHeirarchy {}
POST actionsAndTriggers {}
GET appDefinition
const {getAppApis} = require("budibase-core");
module.exports = (datastoreConfig, datastoreModule, method, path) => {
const datastore = datastoreModule.getDatastore(
datastoreConfig);
const bb = getAppApis(
datastore
)
}
/* api Routes (all /api/..)
POST executeAction/<name> {}
POST authenticate {}
POST authenticateTemporaryAccess {}
POST createUser {}
POST enabledUser {}
POST disableUser {}
GET users
GET accessLevels
POST accessLevels {}
POST changeMyPassword {}
POST setPasswordFromTemporaryCode {}
POST listItems/index/key {}
POST aggregates/index/key {}
POST record/key/to/rec {}
GET record/key/to/rec
DELETE record/key/to/rec
POST appHeirarchy {}
POST actionsAndTriggers {}
GET appDefinition
*/

View File

@ -1,5 +1,5 @@
const {hash, verify} = require("argon2");
module.exports = {
hash, verify
const {hash, verify} = require("argon2");
module.exports = {
hash, verify
};

View File

@ -0,0 +1,20 @@
{
"name": "budibase",
"version": "0.0.1",
"description": "budibase wrapper repo for development",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [
"budibase"
],
"author": "Michael Shanks",
"license": "AGPL-3.0-or-later",
"dependencies": {
"argon2": "^0.23.0",
"budibase-core": "file:../budibase-core/dist",
"rimraf": "^2.6.3",
"yargs": "^13.2.4"
}
}

File diff suppressed because it is too large Load Diff

73
server/.gitignore vendored
View File

@ -1,73 +0,0 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# TypeScript v1 declaration files
typings/
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
# parcel-bundler cache (https://parceljs.org/)
.cache
# next.js build output
.next
# nuxt.js build output
.nuxt
# vuepress build output
.vuepress/dist
# Serverless directories
.serverless

View File

@ -1,19 +0,0 @@
{
"name": "budibase-server",
"version": "0.0.1",
"description": "budibase http api",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"build-core": "cd ./node_modules/budibase-core && rollup --config rollup.config.js"
},
"keywords": [
"budibase"
],
"author": "Michael Shanks",
"license": "ISC",
"dependencies": {
"budibase-core": "git+ssh://git@gitlab.com/budibase-dist/budibase-core.git",
"koa": "^2.7.0"
}
}