Merge branch 'master' of github.com:Budibase/budibase into backend-ag-grid
This commit is contained in:
commit
bc58c8ebf5
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"version": "0.2.5",
|
"version": "0.2.6",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*"
|
"packages/*"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/builder",
|
"name": "@budibase/builder",
|
||||||
"version": "0.2.5",
|
"version": "0.2.6",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
@ -64,7 +64,7 @@
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/bbui": "^1.44.1",
|
"@budibase/bbui": "^1.44.1",
|
||||||
"@budibase/client": "^0.2.5",
|
"@budibase/client": "^0.2.6",
|
||||||
"@budibase/colorpicker": "^1.0.1",
|
"@budibase/colorpicker": "^1.0.1",
|
||||||
"@budibase/svelte-ag-grid": "^0.0.13",
|
"@budibase/svelte-ag-grid": "^0.0.13",
|
||||||
"@fortawesome/fontawesome-free": "^5.14.0",
|
"@fortawesome/fontawesome-free": "^5.14.0",
|
||||||
|
|
|
@ -86,10 +86,12 @@ const contextToBindables = (tables, walkResult) => context => {
|
||||||
}
|
}
|
||||||
|
|
||||||
const newBindable = ([key, fieldSchema]) => {
|
const newBindable = ([key, fieldSchema]) => {
|
||||||
// Replace link bindings with a new property representing the count
|
// Replace certain bindings with a new property to help display components
|
||||||
let runtimeBoundKey = key
|
let runtimeBoundKey = key
|
||||||
if (fieldSchema.type === "link") {
|
if (fieldSchema.type === "link") {
|
||||||
runtimeBoundKey = `${key}_count`
|
runtimeBoundKey = `${key}_count`
|
||||||
|
} else if (fieldSchema.type === "attachment") {
|
||||||
|
runtimeBoundKey = `${key}_first`
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
type: "context",
|
type: "context",
|
||||||
|
|
|
@ -233,7 +233,7 @@ const createLink = store => async (url, title) => {
|
||||||
|
|
||||||
// Save page and regenerate all CSS because otherwise weird things happen
|
// Save page and regenerate all CSS because otherwise weird things happen
|
||||||
nav._children = [...nav._children, newLink]
|
nav._children = [...nav._children, newLink]
|
||||||
setCurrentPage("main")
|
state.currentPageName = "main"
|
||||||
regenerateCssForScreen(state.pages.main)
|
regenerateCssForScreen(state.pages.main)
|
||||||
for (let screen of state.pages.main._screens) {
|
for (let screen of state.pages.main._screens) {
|
||||||
regenerateCssForScreen(screen)
|
regenerateCssForScreen(screen)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
<script>
|
<script>
|
||||||
import { Heading, Body, Button, Select } from "@budibase/bbui"
|
import { Heading, Body, Button, Select, Label } from "@budibase/bbui"
|
||||||
import { notifier } from "builderStore/store/notifications"
|
import { notifier } from "builderStore/store/notifications"
|
||||||
import { FIELDS } from "constants/backend"
|
import { FIELDS } from "constants/backend"
|
||||||
import api from "builderStore/api"
|
import api from "builderStore/api"
|
||||||
|
@ -14,15 +14,17 @@
|
||||||
schema: {},
|
schema: {},
|
||||||
}
|
}
|
||||||
|
|
||||||
let parseResult
|
let csvString
|
||||||
|
let primaryDisplay
|
||||||
|
let schema = {}
|
||||||
|
let fields = []
|
||||||
|
|
||||||
$: schema = parseResult && parseResult.schema
|
$: valid = !schema || fields.every(column => schema[column].success)
|
||||||
$: valid =
|
|
||||||
!schema || Object.keys(schema).every(column => schema[column].success)
|
|
||||||
$: dataImport = {
|
$: dataImport = {
|
||||||
valid,
|
valid,
|
||||||
schema: buildTableSchema(schema),
|
schema: buildTableSchema(schema),
|
||||||
path: files[0] && files[0].path,
|
csvString,
|
||||||
|
primaryDisplay,
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildTableSchema(schema) {
|
function buildTableSchema(schema) {
|
||||||
|
@ -43,11 +45,20 @@
|
||||||
|
|
||||||
async function validateCSV() {
|
async function validateCSV() {
|
||||||
const response = await api.post("/api/tables/csv/validate", {
|
const response = await api.post("/api/tables/csv/validate", {
|
||||||
file: files[0],
|
csvString,
|
||||||
schema: schema || {},
|
schema: schema || {},
|
||||||
})
|
})
|
||||||
|
|
||||||
parseResult = await response.json()
|
const parseResult = await response.json()
|
||||||
|
schema = parseResult && parseResult.schema
|
||||||
|
fields = Object.keys(schema || {}).filter(
|
||||||
|
key => schema[key].type !== "omit"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Check primary display is valid
|
||||||
|
if (!primaryDisplay || fields.indexOf(primaryDisplay) === -1) {
|
||||||
|
primaryDisplay = fields[0]
|
||||||
|
}
|
||||||
|
|
||||||
if (response.status !== 200) {
|
if (response.status !== 200) {
|
||||||
notifier.danger("CSV Invalid, please try another CSV file")
|
notifier.danger("CSV Invalid, please try another CSV file")
|
||||||
|
@ -57,13 +68,7 @@
|
||||||
|
|
||||||
async function handleFile(evt) {
|
async function handleFile(evt) {
|
||||||
const fileArray = Array.from(evt.target.files)
|
const fileArray = Array.from(evt.target.files)
|
||||||
const filesToProcess = fileArray.map(({ name, path, size }) => ({
|
if (fileArray.some(file => file.size >= FILE_SIZE_LIMIT)) {
|
||||||
name,
|
|
||||||
path,
|
|
||||||
size,
|
|
||||||
}))
|
|
||||||
|
|
||||||
if (filesToProcess.some(file => file.size >= FILE_SIZE_LIMIT)) {
|
|
||||||
notifier.danger(
|
notifier.danger(
|
||||||
`Files cannot exceed ${FILE_SIZE_LIMIT /
|
`Files cannot exceed ${FILE_SIZE_LIMIT /
|
||||||
BYTES_IN_MB}MB. Please try again with smaller files.`
|
BYTES_IN_MB}MB. Please try again with smaller files.`
|
||||||
|
@ -71,9 +76,14 @@
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
files = filesToProcess
|
// Read CSV as plain text to upload alongside schema
|
||||||
|
let reader = new FileReader()
|
||||||
await validateCSV()
|
reader.addEventListener("load", function(e) {
|
||||||
|
csvString = e.target.result
|
||||||
|
files = fileArray
|
||||||
|
validateCSV()
|
||||||
|
})
|
||||||
|
reader.readAsBinaryString(fileArray[0])
|
||||||
}
|
}
|
||||||
|
|
||||||
async function omitColumn(columnName) {
|
async function omitColumn(columnName) {
|
||||||
|
@ -94,8 +104,8 @@
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
<div class="schema-fields">
|
<div class="schema-fields">
|
||||||
{#if schema}
|
{#if fields.length}
|
||||||
{#each Object.keys(schema).filter(key => schema[key].type !== 'omit') as columnName}
|
{#each fields as columnName}
|
||||||
<div class="field">
|
<div class="field">
|
||||||
<span>{columnName}</span>
|
<span>{columnName}</span>
|
||||||
<Select
|
<Select
|
||||||
|
@ -117,6 +127,16 @@
|
||||||
{/each}
|
{/each}
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
|
{#if fields.length}
|
||||||
|
<div class="display-column">
|
||||||
|
<Label extraSmall grey>Display Column</Label>
|
||||||
|
<Select thin secondary bind:value={primaryDisplay}>
|
||||||
|
{#each fields as field}
|
||||||
|
<option value={field}>{field}</option>
|
||||||
|
{/each}
|
||||||
|
</Select>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
|
||||||
<style>
|
<style>
|
||||||
.dropzone {
|
.dropzone {
|
||||||
|
@ -188,4 +208,8 @@
|
||||||
grid-gap: var(--spacing-m);
|
grid-gap: var(--spacing-m);
|
||||||
font-size: var(--font-size-xs);
|
font-size: var(--font-size-xs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.display-column {
|
||||||
|
margin-top: var(--spacing-xl);
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|
|
@ -38,12 +38,19 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
async function saveTable() {
|
async function saveTable() {
|
||||||
// Create table
|
let newTable = {
|
||||||
const table = await backendUiStore.actions.tables.save({
|
|
||||||
name,
|
name,
|
||||||
schema: dataImport.schema || {},
|
schema: dataImport.schema || {},
|
||||||
dataImport,
|
dataImport,
|
||||||
})
|
}
|
||||||
|
|
||||||
|
// Only set primary display if defined
|
||||||
|
if (dataImport.primaryDisplay && dataImport.primaryDisplay.length) {
|
||||||
|
newTable.primaryDisplay = dataImport.primaryDisplay
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create table
|
||||||
|
const table = await backendUiStore.actions.tables.save(newTable)
|
||||||
notifier.success(`Table ${name} created successfully.`)
|
notifier.success(`Table ${name} created successfully.`)
|
||||||
analytics.captureEvent("Table Created", { name })
|
analytics.captureEvent("Table Created", { name })
|
||||||
|
|
||||||
|
|
|
@ -5,11 +5,11 @@
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<div class="spinner-container">
|
<div class="spinner-container">
|
||||||
<Circle {size} color="#000000" unit="px" />
|
<Circle {size} color="#000000" unit="px" />
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<style>
|
<style>
|
||||||
.spinner-container {
|
.spinner-container {
|
||||||
display: block;
|
display: block;
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
hour12: true,
|
hour12: true,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
const POLL_INTERVAL = 1000
|
const POLL_INTERVAL = 5000
|
||||||
|
|
||||||
export let appId
|
export let appId
|
||||||
|
|
||||||
|
@ -68,7 +68,7 @@
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
<div class="deployment-right">
|
<div class="deployment-right">
|
||||||
{#if deployment.status.toLowerCase() === "pending"}
|
{#if deployment.status.toLowerCase() === 'pending'}
|
||||||
<Spinner size="10" />
|
<Spinner size="10" />
|
||||||
{/if}
|
{/if}
|
||||||
<div class={`deployment-status ${deployment.status}`}>
|
<div class={`deployment-status ${deployment.status}`}>
|
||||||
|
|
|
@ -135,6 +135,9 @@
|
||||||
|
|
||||||
.toprightnav {
|
.toprightnav {
|
||||||
display: flex;
|
display: flex;
|
||||||
|
flex-direction: row;
|
||||||
|
justify-content: flex-end;
|
||||||
|
align-items: center;
|
||||||
}
|
}
|
||||||
|
|
||||||
.topleftnav {
|
.topleftnav {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "budibase",
|
"name": "budibase",
|
||||||
"version": "0.2.5",
|
"version": "0.2.6",
|
||||||
"description": "Budibase CLI",
|
"description": "Budibase CLI",
|
||||||
"repository": "https://github.com/Budibase/Budibase",
|
"repository": "https://github.com/Budibase/Budibase",
|
||||||
"homepage": "https://www.budibase.com",
|
"homepage": "https://www.budibase.com",
|
||||||
|
@ -17,7 +17,7 @@
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
"license": "AGPL-3.0-or-later",
|
"license": "AGPL-3.0-or-later",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/server": "^0.2.5",
|
"@budibase/server": "^0.2.6",
|
||||||
"@inquirer/password": "^0.0.6-alpha.0",
|
"@inquirer/password": "^0.0.6-alpha.0",
|
||||||
"chalk": "^2.4.2",
|
"chalk": "^2.4.2",
|
||||||
"dotenv": "^8.2.0",
|
"dotenv": "^8.2.0",
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/client",
|
"name": "@budibase/client",
|
||||||
"version": "0.2.5",
|
"version": "0.2.6",
|
||||||
"license": "MPL-2.0",
|
"license": "MPL-2.0",
|
||||||
"main": "dist/budibase-client.js",
|
"main": "dist/budibase-client.js",
|
||||||
"module": "dist/budibase-client.esm.mjs",
|
"module": "dist/budibase-client.esm.mjs",
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/server",
|
"name": "@budibase/server",
|
||||||
"version": "0.2.5",
|
"version": "0.2.6",
|
||||||
"description": "Budibase Web Server",
|
"description": "Budibase Web Server",
|
||||||
"main": "src/electron.js",
|
"main": "src/electron.js",
|
||||||
"repository": {
|
"repository": {
|
||||||
|
@ -42,7 +42,7 @@
|
||||||
"author": "Michael Shanks",
|
"author": "Michael Shanks",
|
||||||
"license": "AGPL-3.0-or-later",
|
"license": "AGPL-3.0-or-later",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/client": "^0.2.5",
|
"@budibase/client": "^0.2.6",
|
||||||
"@koa/router": "^8.0.0",
|
"@koa/router": "^8.0.0",
|
||||||
"@sendgrid/mail": "^7.1.1",
|
"@sendgrid/mail": "^7.1.1",
|
||||||
"@sentry/node": "^5.19.2",
|
"@sentry/node": "^5.19.2",
|
||||||
|
@ -57,6 +57,7 @@
|
||||||
"electron-updater": "^4.3.1",
|
"electron-updater": "^4.3.1",
|
||||||
"fix-path": "^3.0.0",
|
"fix-path": "^3.0.0",
|
||||||
"fs-extra": "^8.1.0",
|
"fs-extra": "^8.1.0",
|
||||||
|
"jimp": "^0.16.1",
|
||||||
"joi": "^17.2.1",
|
"joi": "^17.2.1",
|
||||||
"jsonwebtoken": "^8.5.1",
|
"jsonwebtoken": "^8.5.1",
|
||||||
"koa": "^2.7.0",
|
"koa": "^2.7.0",
|
||||||
|
@ -73,7 +74,7 @@
|
||||||
"pouchdb": "^7.2.1",
|
"pouchdb": "^7.2.1",
|
||||||
"pouchdb-all-dbs": "^1.0.2",
|
"pouchdb-all-dbs": "^1.0.2",
|
||||||
"pouchdb-replication-stream": "^1.2.9",
|
"pouchdb-replication-stream": "^1.2.9",
|
||||||
"sharp": "^0.26.0",
|
"sanitize-s3-objectkey": "^0.0.1",
|
||||||
"squirrelly": "^7.5.0",
|
"squirrelly": "^7.5.0",
|
||||||
"tar-fs": "^2.1.0",
|
"tar-fs": "^2.1.0",
|
||||||
"uuid": "^3.3.2",
|
"uuid": "^3.3.2",
|
||||||
|
|
|
@ -1,19 +1,13 @@
|
||||||
const fs = require("fs")
|
const fs = require("fs")
|
||||||
const { join } = require("../../../utilities/centralPath")
|
const { join } = require("../../../utilities/centralPath")
|
||||||
let { wait } = require("../../../utilities")
|
|
||||||
const AWS = require("aws-sdk")
|
const AWS = require("aws-sdk")
|
||||||
const fetch = require("node-fetch")
|
const fetch = require("node-fetch")
|
||||||
const uuid = require("uuid")
|
const uuid = require("uuid")
|
||||||
|
const sanitize = require("sanitize-s3-objectkey")
|
||||||
const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
|
const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
|
||||||
const PouchDB = require("../../../db")
|
const PouchDB = require("../../../db")
|
||||||
const environment = require("../../../environment")
|
const environment = require("../../../environment")
|
||||||
|
|
||||||
const MAX_INVALIDATE_WAIT_MS = 120000
|
|
||||||
const INVALIDATE_WAIT_PERIODS_MS = 5000
|
|
||||||
|
|
||||||
// export so main deploy functions can use too
|
|
||||||
exports.MAX_INVALIDATE_WAIT_MS = MAX_INVALIDATE_WAIT_MS
|
|
||||||
|
|
||||||
async function invalidateCDN(cfDistribution, appId) {
|
async function invalidateCDN(cfDistribution, appId) {
|
||||||
const cf = new AWS.CloudFront({})
|
const cf = new AWS.CloudFront({})
|
||||||
const resp = await cf
|
const resp = await cf
|
||||||
|
@ -28,28 +22,24 @@ async function invalidateCDN(cfDistribution, appId) {
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
.promise()
|
.promise()
|
||||||
let totalWaitTimeMs = 0
|
return resp.Invalidation.Id
|
||||||
let complete = false
|
}
|
||||||
do {
|
|
||||||
try {
|
exports.isInvalidationComplete = async function(
|
||||||
const state = await cf
|
distributionId,
|
||||||
.getInvalidation({
|
invalidationId
|
||||||
DistributionId: cfDistribution,
|
) {
|
||||||
Id: resp.Invalidation.Id,
|
if (distributionId == null || invalidationId == null) {
|
||||||
})
|
return false
|
||||||
.promise()
|
|
||||||
if (state.Invalidation.Status === "Completed") {
|
|
||||||
complete = true
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.log()
|
|
||||||
}
|
|
||||||
await wait(INVALIDATE_WAIT_PERIODS_MS)
|
|
||||||
totalWaitTimeMs += INVALIDATE_WAIT_PERIODS_MS
|
|
||||||
} while (totalWaitTimeMs <= MAX_INVALIDATE_WAIT_MS && !complete)
|
|
||||||
if (!complete) {
|
|
||||||
throw "Unable to invalidate old app version"
|
|
||||||
}
|
}
|
||||||
|
const cf = new AWS.CloudFront({})
|
||||||
|
const resp = await cf
|
||||||
|
.getInvalidation({
|
||||||
|
DistributionId: distributionId,
|
||||||
|
Id: invalidationId,
|
||||||
|
})
|
||||||
|
.promise()
|
||||||
|
return resp.Invalidation.Status === "Completed"
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.updateDeploymentQuota = async function(quota) {
|
exports.updateDeploymentQuota = async function(quota) {
|
||||||
|
@ -102,6 +92,18 @@ exports.verifyDeployment = async function({ instanceId, appId, quota }) {
|
||||||
}
|
}
|
||||||
|
|
||||||
const json = await response.json()
|
const json = await response.json()
|
||||||
|
if (json.errors) {
|
||||||
|
throw new Error(json.errors)
|
||||||
|
}
|
||||||
|
|
||||||
|
// set credentials here, means any time we're verified we're ready to go
|
||||||
|
if (json.credentials) {
|
||||||
|
AWS.config.update({
|
||||||
|
accessKeyId: json.credentials.AccessKeyId,
|
||||||
|
secretAccessKey: json.credentials.SecretAccessKey,
|
||||||
|
sessionToken: json.credentials.SessionToken,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
return json
|
return json
|
||||||
}
|
}
|
||||||
|
@ -136,7 +138,8 @@ async function prepareUploadForS3({ s3Key, metadata, s3, file }) {
|
||||||
|
|
||||||
const upload = await s3
|
const upload = await s3
|
||||||
.upload({
|
.upload({
|
||||||
Key: s3Key,
|
// windows filepaths need to be converted to forward slashes for s3
|
||||||
|
Key: sanitize(s3Key).replace(/\\/g, "/"),
|
||||||
Body: fileBytes,
|
Body: fileBytes,
|
||||||
ContentType: file.type || CONTENT_TYPE_MAP[extension.toLowerCase()],
|
ContentType: file.type || CONTENT_TYPE_MAP[extension.toLowerCase()],
|
||||||
Metadata: metadata,
|
Metadata: metadata,
|
||||||
|
@ -157,17 +160,10 @@ exports.prepareUploadForS3 = prepareUploadForS3
|
||||||
exports.uploadAppAssets = async function({
|
exports.uploadAppAssets = async function({
|
||||||
appId,
|
appId,
|
||||||
instanceId,
|
instanceId,
|
||||||
credentials,
|
|
||||||
bucket,
|
bucket,
|
||||||
cfDistribution,
|
cfDistribution,
|
||||||
accountId,
|
accountId,
|
||||||
}) {
|
}) {
|
||||||
AWS.config.update({
|
|
||||||
accessKeyId: credentials.AccessKeyId,
|
|
||||||
secretAccessKey: credentials.SecretAccessKey,
|
|
||||||
sessionToken: credentials.SessionToken,
|
|
||||||
})
|
|
||||||
|
|
||||||
const s3 = new AWS.S3({
|
const s3 = new AWS.S3({
|
||||||
params: {
|
params: {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -225,7 +221,7 @@ exports.uploadAppAssets = async function({
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await Promise.all(uploads)
|
await Promise.all(uploads)
|
||||||
await invalidateCDN(cfDistribution, appId)
|
return await invalidateCDN(cfDistribution, appId)
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error("Error uploading budibase app assets to s3", err)
|
console.error("Error uploading budibase app assets to s3", err)
|
||||||
throw err
|
throw err
|
||||||
|
|
|
@ -4,17 +4,73 @@ const {
|
||||||
uploadAppAssets,
|
uploadAppAssets,
|
||||||
verifyDeployment,
|
verifyDeployment,
|
||||||
updateDeploymentQuota,
|
updateDeploymentQuota,
|
||||||
MAX_INVALIDATE_WAIT_MS,
|
isInvalidationComplete,
|
||||||
} = require("./aws")
|
} = require("./aws")
|
||||||
const { DocumentTypes, SEPARATOR, UNICODE_MAX } = require("../../../db/utils")
|
const { DocumentTypes, SEPARATOR, UNICODE_MAX } = require("../../../db/utils")
|
||||||
const newid = require("../../../db/newid")
|
const newid = require("../../../db/newid")
|
||||||
|
|
||||||
|
// the max time we can wait for an invalidation to complete before considering it failed
|
||||||
|
const MAX_PENDING_TIME_MS = 30 * 60000
|
||||||
|
|
||||||
const DeploymentStatus = {
|
const DeploymentStatus = {
|
||||||
SUCCESS: "SUCCESS",
|
SUCCESS: "SUCCESS",
|
||||||
PENDING: "PENDING",
|
PENDING: "PENDING",
|
||||||
FAILURE: "FAILURE",
|
FAILURE: "FAILURE",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// checks that deployments are in a good state, any pending will be updated
|
||||||
|
async function checkAllDeployments(deployments, user) {
|
||||||
|
let updated = false
|
||||||
|
function update(deployment, status) {
|
||||||
|
deployment.status = status
|
||||||
|
delete deployment.invalidationId
|
||||||
|
delete deployment.cfDistribution
|
||||||
|
updated = true
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let deployment of Object.values(deployments.history)) {
|
||||||
|
// check that no deployments have crashed etc and are now stuck
|
||||||
|
if (
|
||||||
|
deployment.status === DeploymentStatus.PENDING &&
|
||||||
|
Date.now() - deployment.updatedAt > MAX_PENDING_TIME_MS
|
||||||
|
) {
|
||||||
|
update(deployment, DeploymentStatus.FAILURE)
|
||||||
|
}
|
||||||
|
// if pending but not past failure point need to update them
|
||||||
|
else if (deployment.status === DeploymentStatus.PENDING) {
|
||||||
|
let complete = false
|
||||||
|
try {
|
||||||
|
complete = await isInvalidationComplete(
|
||||||
|
deployment.cfDistribution,
|
||||||
|
deployment.invalidationId
|
||||||
|
)
|
||||||
|
} catch (err) {
|
||||||
|
// system may have restarted, need to re-verify
|
||||||
|
if (
|
||||||
|
err !== undefined &&
|
||||||
|
err.code === "InvalidClientTokenId" &&
|
||||||
|
deployment.quota
|
||||||
|
) {
|
||||||
|
await verifyDeployment({
|
||||||
|
...user,
|
||||||
|
quota: deployment.quota,
|
||||||
|
})
|
||||||
|
complete = await isInvalidationComplete(
|
||||||
|
deployment.cfDistribution,
|
||||||
|
deployment.invalidationId
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (complete) {
|
||||||
|
update(deployment, DeploymentStatus.SUCCESS)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return { updated, deployments }
|
||||||
|
}
|
||||||
|
|
||||||
function replicate(local, remote) {
|
function replicate(local, remote) {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const replication = local.sync(remote)
|
const replication = local.sync(remote)
|
||||||
|
@ -102,7 +158,7 @@ async function storeLocalDeploymentHistory(deployment) {
|
||||||
async function deployApp({ instanceId, appId, clientId, deploymentId }) {
|
async function deployApp({ instanceId, appId, clientId, deploymentId }) {
|
||||||
try {
|
try {
|
||||||
const instanceQuota = await getCurrentInstanceQuota(instanceId)
|
const instanceQuota = await getCurrentInstanceQuota(instanceId)
|
||||||
const credentials = await verifyDeployment({
|
const verification = await verifyDeployment({
|
||||||
instanceId,
|
instanceId,
|
||||||
appId,
|
appId,
|
||||||
quota: instanceQuota,
|
quota: instanceQuota,
|
||||||
|
@ -110,31 +166,36 @@ async function deployApp({ instanceId, appId, clientId, deploymentId }) {
|
||||||
|
|
||||||
console.log(`Uploading assets for appID ${appId} assets to s3..`)
|
console.log(`Uploading assets for appID ${appId} assets to s3..`)
|
||||||
|
|
||||||
if (credentials.errors) throw new Error(credentials.errors)
|
const invalidationId = await uploadAppAssets({
|
||||||
|
clientId,
|
||||||
await uploadAppAssets({ clientId, appId, instanceId, ...credentials })
|
appId,
|
||||||
|
instanceId,
|
||||||
|
...verification,
|
||||||
|
})
|
||||||
|
|
||||||
// replicate the DB to the couchDB cluster in prod
|
// replicate the DB to the couchDB cluster in prod
|
||||||
console.log("Replicating local PouchDB to remote..")
|
console.log("Replicating local PouchDB to remote..")
|
||||||
await replicateCouch({
|
await replicateCouch({
|
||||||
instanceId,
|
instanceId,
|
||||||
clientId,
|
clientId,
|
||||||
session: credentials.couchDbSession,
|
session: verification.couchDbSession,
|
||||||
})
|
})
|
||||||
|
|
||||||
await updateDeploymentQuota(credentials.quota)
|
await updateDeploymentQuota(verification.quota)
|
||||||
|
|
||||||
await storeLocalDeploymentHistory({
|
await storeLocalDeploymentHistory({
|
||||||
_id: deploymentId,
|
_id: deploymentId,
|
||||||
instanceId,
|
instanceId,
|
||||||
quota: credentials.quota,
|
invalidationId,
|
||||||
status: "SUCCESS",
|
cfDistribution: verification.cfDistribution,
|
||||||
|
quota: verification.quota,
|
||||||
|
status: DeploymentStatus.PENDING,
|
||||||
})
|
})
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
await storeLocalDeploymentHistory({
|
await storeLocalDeploymentHistory({
|
||||||
_id: deploymentId,
|
_id: deploymentId,
|
||||||
instanceId,
|
instanceId,
|
||||||
status: "FAILURE",
|
status: DeploymentStatus.FAILURE,
|
||||||
err: err.message,
|
err: err.message,
|
||||||
})
|
})
|
||||||
throw new Error(`Deployment Failed: ${err.message}`)
|
throw new Error(`Deployment Failed: ${err.message}`)
|
||||||
|
@ -145,21 +206,14 @@ exports.fetchDeployments = async function(ctx) {
|
||||||
try {
|
try {
|
||||||
const db = new PouchDB(ctx.user.instanceId)
|
const db = new PouchDB(ctx.user.instanceId)
|
||||||
const deploymentDoc = await db.get("_local/deployments")
|
const deploymentDoc = await db.get("_local/deployments")
|
||||||
// check that no deployments have crashed etc and are now stuck
|
const { updated, deployments } = await checkAllDeployments(
|
||||||
let changed = false
|
deploymentDoc,
|
||||||
for (let deployment of Object.values(deploymentDoc.history)) {
|
ctx.user
|
||||||
if (
|
)
|
||||||
deployment.status === DeploymentStatus.PENDING &&
|
if (updated) {
|
||||||
Date.now() - deployment.updatedAt > MAX_INVALIDATE_WAIT_MS
|
await db.put(deployments)
|
||||||
) {
|
|
||||||
deployment.status = DeploymentStatus.FAILURE
|
|
||||||
changed = true
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (changed) {
|
ctx.body = Object.values(deployments.history).reverse()
|
||||||
await db.put(deploymentDoc)
|
|
||||||
}
|
|
||||||
ctx.body = Object.values(deploymentDoc.history).reverse()
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
ctx.body = []
|
ctx.body = []
|
||||||
}
|
}
|
||||||
|
@ -185,10 +239,10 @@ exports.deployApp = async function(ctx) {
|
||||||
const deployment = await storeLocalDeploymentHistory({
|
const deployment = await storeLocalDeploymentHistory({
|
||||||
instanceId: ctx.user.instanceId,
|
instanceId: ctx.user.instanceId,
|
||||||
appId: ctx.user.appId,
|
appId: ctx.user.appId,
|
||||||
status: "PENDING",
|
status: DeploymentStatus.PENDING,
|
||||||
})
|
})
|
||||||
|
|
||||||
deployApp({
|
await deployApp({
|
||||||
...ctx.user,
|
...ctx.user,
|
||||||
clientId,
|
clientId,
|
||||||
deploymentId: deployment._id,
|
deploymentId: deployment._id,
|
||||||
|
|
|
@ -163,15 +163,14 @@ exports.serveApp = async function(ctx) {
|
||||||
|
|
||||||
exports.serveAttachment = async function(ctx) {
|
exports.serveAttachment = async function(ctx) {
|
||||||
const appId = ctx.user.appId
|
const appId = ctx.user.appId
|
||||||
|
|
||||||
const attachmentsPath = resolve(budibaseAppsDir(), appId, "attachments")
|
const attachmentsPath = resolve(budibaseAppsDir(), appId, "attachments")
|
||||||
|
|
||||||
// Serve from CloudFront
|
// Serve from CloudFront
|
||||||
if (process.env.CLOUD) {
|
if (process.env.CLOUD) {
|
||||||
const S3_URL = `https://cdn.app.budi.live/assets/${appId}/attachments/${ctx.file}`
|
const S3_URL = `https://cdn.app.budi.live/assets/${appId}/attachments/${ctx.file}`
|
||||||
|
|
||||||
const response = await fetch(S3_URL)
|
const response = await fetch(S3_URL)
|
||||||
const body = await response.text()
|
const body = await response.text()
|
||||||
|
ctx.set("Content-Type", response.headers.get("Content-Type"))
|
||||||
ctx.body = body
|
ctx.body = body
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
|
@ -109,7 +109,7 @@ exports.save = async function(ctx) {
|
||||||
ctx.eventEmitter &&
|
ctx.eventEmitter &&
|
||||||
ctx.eventEmitter.emitTable(`table:save`, instanceId, tableToSave)
|
ctx.eventEmitter.emitTable(`table:save`, instanceId, tableToSave)
|
||||||
|
|
||||||
if (dataImport && dataImport.path) {
|
if (dataImport && dataImport.csvString) {
|
||||||
// Populate the table with rows imported from CSV in a bulk update
|
// Populate the table with rows imported from CSV in a bulk update
|
||||||
const data = await csvParser.transform(dataImport)
|
const data = await csvParser.transform(dataImport)
|
||||||
|
|
||||||
|
@ -156,10 +156,7 @@ exports.destroy = async function(ctx) {
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.validateCSVSchema = async function(ctx) {
|
exports.validateCSVSchema = async function(ctx) {
|
||||||
const { file, schema = {} } = ctx.request.body
|
const { csvString, schema = {} } = ctx.request.body
|
||||||
const result = await csvParser.parse(file.path, schema)
|
const result = await csvParser.parse(csvString, schema)
|
||||||
ctx.body = {
|
ctx.body = { schema: result }
|
||||||
schema: result,
|
|
||||||
path: file.path,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,8 +11,8 @@ const PARSERS = {
|
||||||
datetime: attribute => new Date(attribute).toISOString(),
|
datetime: attribute => new Date(attribute).toISOString(),
|
||||||
}
|
}
|
||||||
|
|
||||||
function parse(path, parsers) {
|
function parse(csvString, parsers) {
|
||||||
const result = csv().fromFile(path)
|
const result = csv().fromString(csvString)
|
||||||
|
|
||||||
const schema = {}
|
const schema = {}
|
||||||
|
|
||||||
|
@ -52,7 +52,7 @@ function parse(path, parsers) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async function transform({ schema, path }) {
|
async function transform({ schema, csvString }) {
|
||||||
const colParser = {}
|
const colParser = {}
|
||||||
|
|
||||||
for (let key in schema) {
|
for (let key in schema) {
|
||||||
|
@ -60,7 +60,7 @@ async function transform({ schema, path }) {
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const json = await csv({ colParser }).fromFile(path)
|
const json = await csv({ colParser }).fromString(csvString)
|
||||||
return json
|
return json
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(`Error transforming CSV to JSON for data import`, err)
|
console.error(`Error transforming CSV to JSON for data import`, err)
|
||||||
|
|
|
@ -1,24 +1,21 @@
|
||||||
const fs = require("fs")
|
const fs = require("fs")
|
||||||
const sharp = require("sharp")
|
const jimp = require("jimp")
|
||||||
const fsPromises = fs.promises
|
const fsPromises = fs.promises
|
||||||
|
|
||||||
const FORMATS = {
|
const FORMATS = {
|
||||||
IMAGES: ["png", "jpg", "jpeg", "gif", "svg", "tiff", "raw"],
|
IMAGES: ["png", "jpg", "jpeg", "gif", "bmp", "tiff"],
|
||||||
}
|
}
|
||||||
|
|
||||||
async function processImage(file) {
|
function processImage(file) {
|
||||||
const imgMeta = await sharp(file.path)
|
return jimp.read(file.path).then(img => {
|
||||||
.resize(300)
|
return img.resize(300, jimp.AUTO).write(file.outputPath)
|
||||||
.toFile(file.outputPath)
|
})
|
||||||
return {
|
|
||||||
...file,
|
|
||||||
...imgMeta,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function process(file) {
|
async function process(file) {
|
||||||
if (FORMATS.IMAGES.includes(file.extension.toLowerCase())) {
|
if (FORMATS.IMAGES.includes(file.extension.toLowerCase())) {
|
||||||
return await processImage(file)
|
await processImage(file)
|
||||||
|
return file
|
||||||
}
|
}
|
||||||
|
|
||||||
// No processing required
|
// No processing required
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
const csvParser = require("../csvParser");
|
const fs = require("fs")
|
||||||
|
const csvParser = require("../csvParser")
|
||||||
|
|
||||||
const CSV_PATH = __dirname + "/test.csv";
|
const CSV_PATH = __dirname + "/test.csv"
|
||||||
|
|
||||||
const SCHEMAS = {
|
const SCHEMAS = {
|
||||||
VALID: {
|
VALID: {
|
||||||
|
@ -27,16 +28,16 @@ const SCHEMAS = {
|
||||||
BROKEN: {
|
BROKEN: {
|
||||||
Address: {
|
Address: {
|
||||||
type: "datetime",
|
type: "datetime",
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
};
|
}
|
||||||
|
|
||||||
describe("CSV Parser", () => {
|
describe("CSV Parser", () => {
|
||||||
|
const csvString = fs.readFileSync(CSV_PATH, "utf8")
|
||||||
|
|
||||||
describe("parsing", () => {
|
describe("parsing", () => {
|
||||||
it("returns status and types for a valid CSV transformation", async () => {
|
it("returns status and types for a valid CSV transformation", async () => {
|
||||||
expect(
|
expect(await csvParser.parse(csvString, SCHEMAS.VALID)).toEqual({
|
||||||
await csvParser.parse(CSV_PATH, SCHEMAS.VALID)
|
|
||||||
).toEqual({
|
|
||||||
Address: {
|
Address: {
|
||||||
success: true,
|
success: true,
|
||||||
type: "string",
|
type: "string",
|
||||||
|
@ -49,13 +50,11 @@ describe("CSV Parser", () => {
|
||||||
success: true,
|
success: true,
|
||||||
type: "string",
|
type: "string",
|
||||||
},
|
},
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
it("returns status and types for an invalid CSV transformation", async () => {
|
it("returns status and types for an invalid CSV transformation", async () => {
|
||||||
expect(
|
expect(await csvParser.parse(csvString, SCHEMAS.INVALID)).toEqual({
|
||||||
await csvParser.parse(CSV_PATH, SCHEMAS.INVALID)
|
|
||||||
).toEqual({
|
|
||||||
Address: {
|
Address: {
|
||||||
success: false,
|
success: false,
|
||||||
type: "number",
|
type: "number",
|
||||||
|
@ -68,41 +67,43 @@ describe("CSV Parser", () => {
|
||||||
success: true,
|
success: true,
|
||||||
type: "string",
|
type: "string",
|
||||||
},
|
},
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe("transformation", () => {
|
describe("transformation", () => {
|
||||||
it("transforms a CSV file into JSON", async () => {
|
it("transforms a CSV file into JSON", async () => {
|
||||||
expect(
|
expect(
|
||||||
await csvParser.transform({
|
await csvParser.transform({
|
||||||
schema: SCHEMAS.VALID,
|
schema: SCHEMAS.VALID,
|
||||||
path: CSV_PATH,
|
csvString,
|
||||||
})
|
})
|
||||||
).toMatchSnapshot();
|
).toMatchSnapshot()
|
||||||
});
|
})
|
||||||
|
|
||||||
it("transforms a CSV file into JSON ignoring certain fields", async () => {
|
it("transforms a CSV file into JSON ignoring certain fields", async () => {
|
||||||
expect(
|
expect(
|
||||||
await csvParser.transform({
|
await csvParser.transform({
|
||||||
schema: SCHEMAS.IGNORE,
|
schema: SCHEMAS.IGNORE,
|
||||||
path: CSV_PATH,
|
csvString,
|
||||||
})
|
})
|
||||||
).toEqual([
|
).toEqual([
|
||||||
{
|
{
|
||||||
Name: "Bert"
|
Name: "Bert",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Name: "Ernie"
|
Name: "Ernie",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Name: "Big Bird"
|
Name: "Big Bird",
|
||||||
}
|
},
|
||||||
]);
|
])
|
||||||
});
|
})
|
||||||
|
|
||||||
it("throws an error on invalid schema", async () => {
|
it("throws an error on invalid schema", async () => {
|
||||||
await expect(csvParser.transform({ schema: SCHEMAS.BROKEN, path: CSV_PATH })).rejects.toThrow()
|
await expect(
|
||||||
});
|
csvParser.transform({ schema: SCHEMAS.BROKEN, csvString })
|
||||||
});
|
).rejects.toThrow()
|
||||||
});
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -13,7 +13,7 @@
|
||||||
"dev:builder": "rollup -cw"
|
"dev:builder": "rollup -cw"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@budibase/client": "^0.2.5",
|
"@budibase/client": "^0.2.6",
|
||||||
"@rollup/plugin-commonjs": "^11.1.0",
|
"@rollup/plugin-commonjs": "^11.1.0",
|
||||||
"lodash": "^4.17.15",
|
"lodash": "^4.17.15",
|
||||||
"rollup": "^2.11.2",
|
"rollup": "^2.11.2",
|
||||||
|
@ -31,7 +31,7 @@
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"svelte"
|
"svelte"
|
||||||
],
|
],
|
||||||
"version": "0.2.5",
|
"version": "0.2.6",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"gitHead": "284cceb9b703c38566c6e6363c022f79a08d5691",
|
"gitHead": "284cceb9b703c38566c6e6363c022f79a08d5691",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
|
|
@ -51,8 +51,15 @@
|
||||||
// Fetch table schema so we can check for linked rows
|
// Fetch table schema so we can check for linked rows
|
||||||
const tableObj = await fetchTable(row.tableId)
|
const tableObj = await fetchTable(row.tableId)
|
||||||
for (let key of Object.keys(tableObj.schema)) {
|
for (let key of Object.keys(tableObj.schema)) {
|
||||||
if (tableObj.schema[key].type === "link") {
|
const type = tableObj.schema[key].type
|
||||||
|
if (type === "link") {
|
||||||
row[`${key}_count`] = Array.isArray(row[key]) ? row[key].length : 0
|
row[`${key}_count`] = Array.isArray(row[key]) ? row[key].length : 0
|
||||||
|
} else if (type === "attachment") {
|
||||||
|
let url = null
|
||||||
|
if (Array.isArray(row[key]) && row[key][0] != null) {
|
||||||
|
url = row[key][0].url
|
||||||
|
}
|
||||||
|
row[`${key}_first`] = url
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,11 +6,11 @@ export default async function fetchData(datasource, store) {
|
||||||
if (name) {
|
if (name) {
|
||||||
let rows = []
|
let rows = []
|
||||||
if (type === "table") {
|
if (type === "table") {
|
||||||
rows = fetchTableData()
|
rows = await fetchTableData()
|
||||||
} else if (type === "view") {
|
} else if (type === "view") {
|
||||||
rows = fetchViewData()
|
rows = await fetchViewData()
|
||||||
} else if (type === "link") {
|
} else if (type === "link") {
|
||||||
rows = fetchLinkedRowsData()
|
rows = await fetchLinkedRowsData()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fetch table schema so we can check for linked rows
|
// Fetch table schema so we can check for linked rows
|
||||||
|
@ -19,8 +19,15 @@ export default async function fetchData(datasource, store) {
|
||||||
const keys = Object.keys(table.schema)
|
const keys = Object.keys(table.schema)
|
||||||
rows.forEach(row => {
|
rows.forEach(row => {
|
||||||
for (let key of keys) {
|
for (let key of keys) {
|
||||||
if (table.schema[key].type === "link") {
|
const type = table.schema[key].type
|
||||||
|
if (type === "link") {
|
||||||
row[`${key}_count`] = Array.isArray(row[key]) ? row[key].length : 0
|
row[`${key}_count`] = Array.isArray(row[key]) ? row[key].length : 0
|
||||||
|
} else if (type === "attachment") {
|
||||||
|
let url = null
|
||||||
|
if (Array.isArray(row[key]) && row[key][0] != null) {
|
||||||
|
url = row[key][0].url
|
||||||
|
}
|
||||||
|
row[`${key}_first`] = url
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
Loading…
Reference in New Issue