Merge branch 'linked-records' of github.com:Budibase/budibase into linked-records

This commit is contained in:
Andrew Kingston 2020-10-02 16:43:08 +01:00
commit 8706561bd4
60 changed files with 1069 additions and 630 deletions

View File

@ -1,5 +1,5 @@
{
"version": "0.1.21",
"version": "0.1.22",
"npmClient": "yarn",
"packages": [
"packages/*"

View File

@ -14,6 +14,7 @@ rimraf.sync(homedir)
process.env.BUDIBASE_API_KEY = "6BE826CB-6B30-4AEC-8777-2E90464633DE"
process.env.NODE_ENV = "cypress"
process.env.ENABLE_ANALYTICS = "false"
initialiseBudibase({ dir: homedir, clientId: "cypress-test" })
.then(() => {

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/builder",
"version": "0.1.21",
"version": "0.1.22",
"license": "AGPL-3.0",
"private": true,
"scripts": {
@ -64,7 +64,7 @@
},
"dependencies": {
"@budibase/bbui": "^1.39.0",
"@budibase/client": "^0.1.21",
"@budibase/client": "^0.1.22",
"@budibase/colorpicker": "^1.0.1",
"@fortawesome/fontawesome-free": "^5.14.0",
"@sentry/browser": "5.19.1",
@ -75,7 +75,7 @@
"fast-sort": "^2.2.0",
"lodash": "^4.17.13",
"mustache": "^4.0.1",
"posthog-js": "1.3.1",
"posthog-js": "1.4.5",
"shortid": "^2.2.15",
"svelte-loading-spinners": "^0.1.1",
"svelte-portal": "^0.1.0",

View File

@ -158,6 +158,10 @@ export default {
find: "constants",
replacement: path.resolve(projectRootDir, "src/constants"),
},
{
find: "analytics",
replacement: path.resolve(projectRootDir, "src/analytics"),
},
],
customResolver,
}),

View File

@ -1,25 +1,71 @@
import * as Sentry from "@sentry/browser"
import posthog from "posthog-js"
import api from "builderStore/api"
function activate() {
Sentry.init({ dsn: process.env.SENTRY_DSN })
if (!process.env.POSTHOG_TOKEN) return
posthog.init(process.env.POSTHOG_TOKEN, {
api_host: process.env.POSTHOG_URL,
})
let analyticsEnabled
const posthogConfigured = process.env.POSTHOG_TOKEN && process.env.POSTHOG_URL
const sentryConfigured = process.env.SENTRY_DSN
async function activate() {
if (analyticsEnabled === undefined) {
// only the server knows the true NODE_ENV
// this was an issue as NODE_ENV = 'cypress' on the server,
// but 'production' on the client
const response = await api.get("/api/analytics")
analyticsEnabled = (await response.json()) === true
}
if (!analyticsEnabled) return
if (sentryConfigured) Sentry.init({ dsn: process.env.SENTRY_DSN })
if (posthogConfigured) {
posthog.init(process.env.POSTHOG_TOKEN, {
api_host: process.env.POSTHOG_URL,
})
posthog.set_config({ persistence: "cookie" })
}
}
function identify(id) {
if (!analyticsEnabled || !id) return
if (posthogConfigured) posthog.identify(id)
if (sentryConfigured)
Sentry.configureScope(scope => {
scope.setUser({ id: id })
})
}
async function identifyByApiKey(apiKey) {
if (!analyticsEnabled) return true
const response = await fetch(
`https://03gaine137.execute-api.eu-west-1.amazonaws.com/prod/account/id?api_key=${apiKey.trim()}`
)
if (response.status === 200) {
const id = await response.json()
await api.put("/api/keys/userId", { value: id })
identify(id)
return true
}
return false
}
function captureException(err) {
if (!analyticsEnabled) return
Sentry.captureException(err)
captureEvent("Error", { error: err.message ? err.message : err })
}
function captureEvent(event) {
if (!process.env.POSTHOG_TOKEN) return
posthog.capture(event)
function captureEvent(eventName, props = {}) {
if (!analyticsEnabled || !process.env.POSTHOG_TOKEN) return
props.sourceApp = "builder"
posthog.capture(eventName, props)
}
export default {
activate,
identify,
identifyByApiKey,
captureException,
captureEvent,
}

View File

@ -1,7 +1,7 @@
import { getStore } from "./store"
import { getBackendUiStore } from "./store/backend"
import { getAutomationStore } from "./store/automation/"
import analytics from "../analytics"
import analytics from "analytics"
export const store = getStore()
export const backendUiStore = getBackendUiStore()
@ -9,9 +9,8 @@ export const automationStore = getAutomationStore()
export const initialise = async () => {
try {
if (process.env.NODE_ENV === "production") {
analytics.activate()
}
analytics.activate()
analytics.captureEvent("Builder Started")
} catch (err) {
console.log(err)
}

View File

@ -14,6 +14,7 @@ import { fetchComponentLibDefinitions } from "../loadComponentLibraries"
import { buildCodeForScreens } from "../buildCodeForScreens"
import { generate_screen_css } from "../generate_css"
import { insertCodeMetadata } from "../insertCodeMetadata"
import analytics from "analytics"
import { uuid } from "../uuid"
import {
selectComponent as _selectComponent,
@ -308,7 +309,9 @@ const addChildComponent = store => (componentToAdd, presetProps = {}) => {
state.currentView = "component"
state.currentComponentInfo = newComponent.props
analytics.captureEvent("Added Component", {
name: newComponent.props._component,
})
return state
})
}

View File

@ -3,6 +3,7 @@
import { notifier } from "builderStore/store/notifications"
import ActionButton from "components/common/ActionButton.svelte"
import { Input } from "@budibase/bbui"
import analytics from "analytics"
export let onClosed
@ -19,6 +20,7 @@
})
onClosed()
notifier.success(`Automation ${name} created.`)
analytics.captureEvent("Automation Created", { name })
}
</script>

View File

@ -1,5 +1,6 @@
<script>
import { automationStore } from "builderStore"
import analytics from "analytics"
export let blockDefinition
export let stepId
@ -12,6 +13,9 @@
stepId,
type: blockType,
})
analytics.captureEvent("Added Automation Block", {
name: blockDefinition.name,
})
}
</script>

View File

@ -2,6 +2,7 @@
import { Button, Input, Select } from "@budibase/bbui"
import { backendUiStore } from "builderStore"
import { notifier } from "builderStore/store/notifications"
import analytics from "analytics"
const CALCULATIONS = [
{
@ -26,6 +27,7 @@
backendUiStore.actions.views.save(view)
notifier.success(`View ${view.name} saved.`)
onClosed()
analytics.captureEvent("Added View Calculate", { field: view.field })
}
</script>

View File

@ -3,6 +3,7 @@
import { goto } from "@sveltech/routify"
import { backendUiStore } from "builderStore"
import { notifier } from "builderStore/store/notifications"
import analytics from "analytics"
export let onClosed
@ -28,6 +29,7 @@
})
notifier.success(`View ${name} created`)
onClosed()
analytics.captureEvent("View Created", { name })
$goto(`../../../view/${name}`)
}
</script>

View File

@ -2,6 +2,7 @@
import { Button, Input, Select } from "@budibase/bbui"
import { backendUiStore } from "builderStore"
import { notifier } from "builderStore/store/notifications"
import analytics from "analytics"
const CONDITIONS = [
{
@ -53,6 +54,9 @@
backendUiStore.actions.views.save(view)
notifier.success(`View ${view.name} saved.`)
onClosed()
analytics.captureEvent("Added View Filter", {
filters: JSON.stringify(view.filters),
})
}
function removeFilter(idx) {

View File

@ -3,6 +3,7 @@
import { backendUiStore } from "builderStore"
import { notifier } from "builderStore/store/notifications"
import { DropdownMenu, Button, Icon, Input, Select } from "@budibase/bbui"
import analytics from "analytics"
export let table
@ -19,6 +20,7 @@
$goto(`./model/${model._id}`)
name = ""
dropdown.hide()
analytics.captureEvent("Table Created", { name })
}
const onClosed = () => {

View File

@ -1,299 +1,32 @@
<script>
import { notifier } from "builderStore/store/notifications"
import { Heading, Body, Button } from "@budibase/bbui"
import { FILE_TYPES } from "constants/backend"
import { Heading, Body, Button, Dropzone } from "@budibase/bbui"
import api from "builderStore/api"
const BYTES_IN_KB = 1000
const BYTES_IN_MB = 1000000
export let files = []
export let fileSizeLimit = BYTES_IN_MB * 20
let selectedImageIdx = 0
let fileDragged = false
$: selectedImage = files[selectedImageIdx]
function determineFileIcon(extension) {
const ext = extension.toLowerCase()
if (FILE_TYPES.IMAGE.includes(ext)) return "ri-image-2-line"
if (FILE_TYPES.CODE.includes(ext)) return "ri-terminal-box-line"
return "ri-file-line"
function handleFileTooLarge() {
notifier.danger(
`Files cannot exceed ${fileSizeLimit /
BYTES_IN_MB}MB. Please try again with smaller files.`
)
}
async function processFiles(fileList) {
const fileArray = Array.from(fileList)
if (fileArray.some(file => file.size >= fileSizeLimit)) {
notifier.danger(
`Files cannot exceed ${fileSizeLimit /
BYTES_IN_MB}MB. Please try again with smaller files.`
)
return
}
const filesToProcess = fileArray.map(({ name, path, size }) => ({
const filesToProcess = fileArray.map(({ name, path, size, type }) => ({
name,
path,
size,
type,
}))
const response = await api.post(`/api/attachments/process`, {
files: filesToProcess,
})
const processedFiles = await response.json()
files = [...processedFiles, ...files]
selectedImageIdx = 0
}
async function removeFile() {
files.splice(selectedImageIdx, 1)
files = files
selectedImageIdx = 0
}
function navigateLeft() {
selectedImageIdx -= 1
}
function navigateRight() {
selectedImageIdx += 1
}
function handleFile(evt) {
processFiles(evt.target.files)
}
function handleDragOver(evt) {
evt.preventDefault()
fileDragged = true
}
function handleDragLeave(evt) {
evt.preventDefault()
fileDragged = false
}
function handleDrop(evt) {
evt.preventDefault()
processFiles(evt.dataTransfer.files)
fileDragged = false
return await response.json()
}
</script>
<div
class="dropzone"
on:dragover={handleDragOver}
on:dragleave={handleDragLeave}
on:dragenter={handleDragOver}
on:drop={handleDrop}
class:fileDragged>
<ul>
{#if selectedImage}
<li>
<header>
<div>
<i
class={`file-icon ${determineFileIcon(selectedImage.extension)}`} />
<span class="filename">{selectedImage.name}</span>
</div>
<p>
{#if selectedImage.size <= BYTES_IN_MB}
{selectedImage.size / BYTES_IN_KB}KB
{:else}{selectedImage.size / BYTES_IN_MB}MB{/if}
</p>
</header>
<div class="delete-button" on:click={removeFile}>
<i class="ri-close-line" />
</div>
{#if selectedImageIdx !== 0}
<div class="nav left" on:click={navigateLeft}>
<i class="ri-arrow-left-line" />
</div>
{/if}
<img src={selectedImage.url} />
{#if selectedImageIdx !== files.length - 1}
<div class="nav right" on:click={navigateRight}>
<i class="ri-arrow-right-line" />
</div>
{/if}
</li>
{/if}
</ul>
<i class="ri-folder-upload-line" />
<input id="file-upload" type="file" multiple on:change={handleFile} />
<label for="file-upload">Upload</label>
</div>
<style>
.dropzone {
padding: var(--spacing-l);
border: 2px dashed var(--grey-7);
text-align: center;
display: flex;
align-items: center;
flex-direction: column;
border-radius: 10px;
transition: all 0.3s;
}
.fileDragged {
border: 2px dashed var(--grey-7);
transform: scale(1.03);
background: var(--blue-light);
}
input[type="file"] {
display: none;
}
label {
font-family: var(--font-sans);
cursor: pointer;
font-weight: 600;
box-sizing: border-box;
overflow: hidden;
border-radius: var(--border-radius-s);
color: var(--white);
padding: var(--spacing-s) var(--spacing-l);
transition: all 0.2s ease 0s;
display: inline-flex;
text-rendering: optimizeLegibility;
min-width: auto;
outline: none;
font-feature-settings: "case" 1, "rlig" 1, "calt" 0;
-webkit-box-align: center;
user-select: none;
flex-shrink: 0;
align-items: center;
justify-content: center;
margin-top: 10px;
width: 100%;
border: solid 1.5px var(--ink);
background-color: var(--ink);
}
div.nav {
position: absolute;
background: black;
color: var(--white);
display: flex;
align-items: center;
bottom: var(--spacing-s);
border-radius: 10px;
transition: 0.2s transform;
}
.nav:hover {
cursor: pointer;
transform: scale(1.1);
}
.left {
left: var(--spacing-s);
}
.right {
right: var(--spacing-s);
}
li {
position: relative;
height: 300px;
background: var(--grey-7);
display: flex;
justify-content: center;
border-radius: 10px;
}
img {
border-radius: 10px;
width: 100%;
box-shadow: 0 var(--spacing-s) 12px rgba(0, 0, 0, 0.15);
object-fit: contain;
}
i {
font-size: 3em;
}
.file-icon {
color: var(--white);
font-size: 2em;
margin-right: var(--spacing-s);
}
ul {
padding: 0;
display: grid;
grid-gap: var(--spacing-s);
list-style-type: none;
width: 100%;
}
header {
display: flex;
align-items: center;
justify-content: space-between;
position: absolute;
background: linear-gradient(
180deg,
rgba(12, 12, 12, 1),
rgba(60, 60, 60, 0)
);
width: 100%;
border-top-left-radius: 10px;
border-top-right-radius: 10px;
height: 60px;
}
header > div {
color: var(--white);
display: flex;
align-items: center;
font-size: 15px;
margin-left: var(--spacing-m);
width: 60%;
overflow: hidden;
white-space: nowrap;
text-overflow: ellipsis;
}
.filename {
overflow: hidden;
text-overflow: ellipsis;
}
header > p {
color: var(--grey-5);
margin-right: var(--spacing-m);
}
.delete-button {
position: absolute;
top: var(--spacing-s);
right: var(--spacing-s);
padding: var(--spacing-s);
border-radius: 10px;
opacity: 0;
transition: all 0.3s;
color: var(--white);
}
.delete-button i {
font-size: 2em;
}
.delete-button:hover {
opacity: 1;
cursor: pointer;
background: linear-gradient(
to top right,
rgba(60, 60, 60, 0),
rgba(255, 0, 0, 0.2)
);
}
</style>
<Dropzone bind:files {processFiles} {handleFileTooLarge} />

View File

@ -0,0 +1,196 @@
<script>
import { onMount } from "svelte"
import fsort from "fast-sort"
import getOr from "lodash/fp/getOr"
import { store, backendUiStore } from "builderStore"
import { Button, Icon } from "@budibase/bbui"
import ActionButton from "components/common/ActionButton.svelte"
import LinkedRecord from "./LinkedRecord.svelte"
import AttachmentList from "./AttachmentList.svelte"
import TablePagination from "./TablePagination.svelte"
import { DeleteRecordModal, CreateEditRecordModal } from "./modals"
import RowPopover from "./popovers/Row.svelte"
import ColumnPopover from "./popovers/Column.svelte"
import ViewPopover from "./popovers/View.svelte"
import ExportPopover from "./popovers/Export.svelte"
import ColumnHeaderPopover from "./popovers/ColumnHeader.svelte"
import EditRowPopover from "./popovers/EditRow.svelte"
import * as api from "./api"
const ITEMS_PER_PAGE = 10
// Internal headers we want to hide from the user
const INTERNAL_HEADERS = ["_id", "_rev", "modelId", "type"]
let modalOpen = false
let data = []
let headers = []
let currentPage = 0
let search
$: {
if (
$backendUiStore.selectedView &&
$backendUiStore.selectedView.name.startsWith("all_")
) {
api.fetchDataForView($backendUiStore.selectedView).then(records => {
data = records || []
})
}
}
$: sort = $backendUiStore.sort
$: sorted = sort ? fsort(data)[sort.direction](sort.column) : data
$: paginatedData = sorted
? sorted.slice(
currentPage * ITEMS_PER_PAGE,
currentPage * ITEMS_PER_PAGE + ITEMS_PER_PAGE
)
: []
$: headers = Object.keys($backendUiStore.selectedModel.schema)
.sort()
.filter(id => !INTERNAL_HEADERS.includes(id))
$: schema = $backendUiStore.selectedModel.schema
$: modelView = {
schema: $backendUiStore.selectedModel.schema,
name: $backendUiStore.selectedView.name,
}
</script>
<section>
<div class="table-controls">
<h2 class="title">{$backendUiStore.selectedModel.name}</h2>
<div class="popovers">
<ColumnPopover />
{#if Object.keys($backendUiStore.selectedModel.schema).length > 0}
<RowPopover />
<ViewPopover />
<ExportPopover view={modelView} />
{/if}
</div>
</div>
<table class="bb-table">
<thead>
<tr>
<th class="edit-header">
<div>Edit</div>
</th>
{#each headers as header}
<th>
<ColumnHeaderPopover
field={$backendUiStore.selectedModel.schema[header]} />
</th>
{/each}
</tr>
</thead>
<tbody>
{#if paginatedData.length === 0}
<div class="no-data">No Data.</div>
{/if}
{#each paginatedData as row}
<tr>
<td>
<EditRowPopover {row} />
</td>
{#each headers as header}
<td>
{#if schema[header].type === 'link'}
<LinkedRecord field={schema[header]} ids={row[header]} />
{:else if schema[header].type === 'attachment'}
<AttachmentList files={row[header] || []} />
{:else}{getOr('', header, row)}{/if}
</td>
{/each}
</tr>
{/each}
</tbody>
</table>
<TablePagination
{data}
bind:currentPage
pageItemCount={paginatedData.length}
{ITEMS_PER_PAGE} />
</section>
<style>
section {
margin-bottom: 20px;
}
.title {
font-size: 24px;
font-weight: 600;
text-rendering: optimizeLegibility;
text-transform: capitalize;
}
table {
border: 1px solid var(--grey-4);
background: #fff;
border-radius: 3px;
border-collapse: collapse;
}
thead {
height: 40px;
background: var(--grey-3);
border: 1px solid var(--grey-4);
}
thead th {
color: var(--ink);
text-transform: capitalize;
font-weight: 500;
font-size: 14px;
text-rendering: optimizeLegibility;
transition: 0.5s all;
vertical-align: middle;
}
.edit-header {
width: 100px;
cursor: default;
}
.edit-header:hover {
color: var(--ink);
}
th:hover {
color: var(--blue);
cursor: pointer;
}
td {
max-width: 200px;
text-overflow: ellipsis;
border: 1px solid var(--grey-4);
overflow: hidden;
white-space: pre;
box-sizing: border-box;
}
tbody tr {
border-bottom: 1px solid var(--grey-4);
transition: 0.3s background-color;
color: var(--ink);
font-size: 12px;
}
tbody tr:hover {
background: var(--grey-1);
}
.table-controls {
width: 100%;
}
.popovers {
display: flex;
}
.no-data {
padding: 14px;
}
</style>

View File

@ -0,0 +1,56 @@
<script>
import { onMount } from "svelte"
import fsort from "fast-sort"
import getOr from "lodash/fp/getOr"
import { store, backendUiStore } from "builderStore"
import api from "builderStore/api"
import { Button, Icon } from "@budibase/bbui"
import Table from "./Table.svelte"
import ActionButton from "components/common/ActionButton.svelte"
import LinkedRecord from "./LinkedRecord.svelte"
import TablePagination from "./TablePagination.svelte"
import { DeleteRecordModal, CreateEditRecordModal } from "./modals"
import RowPopover from "./popovers/Row.svelte"
import ColumnPopover from "./popovers/Column.svelte"
import ViewPopover from "./popovers/View.svelte"
import ColumnHeaderPopover from "./popovers/ColumnHeader.svelte"
import EditRowPopover from "./popovers/EditRow.svelte"
import CalculationPopover from "./popovers/Calculate.svelte"
import GroupByPopover from "./popovers/GroupBy.svelte"
import FilterPopover from "./popovers/Filter.svelte"
import ExportPopover from "./popovers/Export.svelte"
export let view = {}
let data = []
$: name = view.name
$: filters = view.filters
$: field = view.field
$: groupBy = view.groupBy
$: !name.startsWith("all_") && filters && fetchViewData(name, field, groupBy)
async function fetchViewData(name, field, groupBy) {
const params = new URLSearchParams()
if (field) {
params.set("field", field)
params.set("stats", true)
}
if (groupBy) params.set("group", groupBy)
let QUERY_VIEW_URL = `/api/views/${name}?${params}`
const response = await api.get(QUERY_VIEW_URL)
data = await response.json()
}
</script>
<Table title={decodeURI(name)} schema={view.schema} {data}>
<FilterPopover {view} />
<CalculationPopover {view} />
{#if view.calculation}
<GroupByPopover {view} />
{/if}
<ExportPopover {view} />
</Table>

View File

@ -0,0 +1,71 @@
<script>
import {
TextButton,
Button,
Icon,
Input,
Select,
Popover,
} from "@budibase/bbui"
import { backendUiStore } from "builderStore"
import { notifier } from "builderStore/store/notifications"
import api from "builderStore/api"
const FORMATS = [
{
name: "CSV",
key: "csv",
},
{
name: "JSON",
key: "json",
},
]
export let view
let anchor
let dropdown
let exportFormat
async function exportView() {
const response = await api.post(
`/api/views/export?format=${exportFormat}`,
view
)
const downloadInfo = await response.json()
window.location = downloadInfo.url
}
</script>
<div bind:this={anchor}>
<TextButton text small on:click={dropdown.show}>
<Icon name="download" />
Export
</TextButton>
</div>
<Popover bind:this={dropdown} {anchor} align="left">
<h5>Export Format</h5>
<Select secondary thin bind:value={exportFormat}>
<option value={''}>Select an option</option>
{#each FORMATS as format}
<option value={format.key}>{format.name}</option>
{/each}
</Select>
<div class="button-group">
<Button secondary on:click={dropdown.hide}>Cancel</Button>
<Button primary on:click={exportView}>Export</Button>
</div>
</Popover>
<style>
h5 {
margin-top: 0;
}
.button-group {
margin-top: var(--spacing-l);
display: flex;
justify-content: flex-end;
}
</style>

View File

@ -15,6 +15,7 @@
name="Name"
placeholder="Username" />
<Select disabled={!editMode} bind:value={user.accessLevelId} thin secondary>
<option value="">Choose an option</option>
<option value="ADMIN">Admin</option>
<option value="POWER_USER">Power User</option>
</Select>

View File

@ -3,13 +3,21 @@
import { store } from "builderStore"
import api from "builderStore/api"
import posthog from "posthog-js"
import analytics from "analytics"
let keys = { budibase: "", sendGrid: "" }
async function updateKey([key, value]) {
if (key === "budibase") {
const isValid = await analytics.identifyByApiKey(value)
if (!isValid) {
// TODO: add validation message
keys = { ...keys }
return
}
}
const response = await api.put(`/api/keys/${key}`, { value })
const res = await response.json()
if (key === "budibase") posthog.identify(value)
keys = { ...keys, ...res }
}
@ -17,6 +25,8 @@
async function fetchKeys() {
const response = await api.get(`/api/keys/`)
const res = await response.json()
// dont want this to ever be editable, as its fetched based on Api Key
if (res.userId) delete res.userId
keys = res
}

View File

@ -62,6 +62,7 @@
name="Password"
placeholder="Password" />
<Select secondary bind:value={accessLevelId} thin>
<option value="">Choose an option</option>
<option value="ADMIN">Admin</option>
<option value="POWER_USER">Power User</option>
</Select>

View File

@ -14,7 +14,7 @@
import { getContext } from "svelte"
import { fade } from "svelte/transition"
import { post } from "builderStore/api"
import analytics from "../../analytics"
import analytics from "analytics"
const { open, close } = getContext("simple-modal")
//Move this to context="module" once svelte-forms is updated so that it can bind to stores correctly
@ -22,12 +22,34 @@
export let hasKey
let isApiKeyValid
let lastApiKey
let fetchApiKeyPromise
const validateApiKey = async apiKey => {
if (!apiKey) return false
// make sure we only fetch once, unless API Key is changed
if (isApiKeyValid === undefined || apiKey !== lastApiKey) {
lastApiKey = apiKey
// svelte reactivity was causing a requst to get fired mutiple times
// so, we make everything await the same promise, if one exists
if (!fetchApiKeyPromise) {
fetchApiKeyPromise = analytics.identifyByApiKey(apiKey)
}
isApiKeyValid = await fetchApiKeyPromise
fetchApiKeyPromise = undefined
}
return isApiKeyValid
}
let submitting = false
let errors = {}
let validationErrors = {}
let validationSchemas = [
{
apiKey: string().required("Please enter your API key."),
apiKey: string()
.required("Please enter your API key.")
.test("valid-apikey", "This API key is invalid", validateApiKey),
},
{
applicationName: string().required("Your application must have a name."),
@ -122,7 +144,7 @@
name: $createAppStore.values.applicationName,
})
const appJson = await appResp.json()
analytics.captureEvent("web_app_created", {
analytics.captureEvent("App Created", {
name,
appId: appJson._id,
})
@ -160,6 +182,7 @@
}
function extractErrors({ inner }) {
if (!inner) return {}
return inner.reduce((acc, err) => {
return { ...acc, [err.path]: err.message }
}, {})

View File

@ -1,139 +0,0 @@
<script>
import { Button, Select } from "@budibase/bbui"
import StateBindingCascader from "./StateBindingCascader.svelte"
import { find, map, keys, reduce, keyBy } from "lodash/fp"
import { pipe } from "components/common/core"
import {
EVENT_TYPE_MEMBER_NAME,
allHandlers,
} from "components/common/eventHandlers"
import { store } from "builderStore"
export let handler
export let onCreate
export let onChanged
export let onRemoved
export let index
export let newHandler
let eventOptions
let handlerType
let parameters = []
$: eventOptions = allHandlers()
$: {
if (handler) {
handlerType = handler[EVENT_TYPE_MEMBER_NAME]
parameters = Object.entries(handler.parameters).map(([name, value]) => ({
name,
value,
}))
} else {
// Empty Handler
handlerType = ""
parameters = []
}
}
const handlerChanged = (type, params) => {
const handlerParams = {}
for (let param of params) {
handlerParams[param.name] = param.value
}
const updatedHandler = {
[EVENT_TYPE_MEMBER_NAME]: type,
parameters: handlerParams,
}
onChanged(updatedHandler, index)
}
const handlerTypeChanged = e => {
const handlerType = eventOptions.find(
handler => handler.name === e.target.value
)
const defaultParams = handlerType.parameters.map(param => ({
name: param,
value: "",
}))
handlerChanged(handlerType.name, defaultParams)
}
const onParameterChanged = index => e => {
const value = e.target ? e.target.value : e
const newParams = [...parameters]
newParams[index].value = value
handlerChanged(handlerType, newParams)
}
</script>
<div class="type-selector-container {newHandler && 'new-handler'}">
<div class="handler-controls">
<div class="handler-option">
<span>Action</span>
<Select value={handlerType} on:change={handlerTypeChanged}>
<option />
{#each eventOptions as option}
<option value={option.name}>{option.name}</option>
{/each}
</Select>
</div>
{#if parameters}
<br />
{#each parameters as parameter, idx}
<StateBindingCascader on:change={onParameterChanged(idx)} {parameter} />
{/each}
{/if}
{#if parameters.length > 0}
<div class="button-container">
{#if newHandler}
<Button primary thin on:click={onCreate}>Add Action</Button>
{:else}
<Button outline thin on:click={onRemoved}>Remove Action</Button>
{/if}
</div>
{/if}
</div>
</div>
<style>
.type-selector-container {
display: grid;
grid-gap: 20px;
width: 100%;
background: rgba(223, 223, 223, 0.5);
border: 1px solid #dfdfdf;
margin-bottom: 18px;
}
.handler-option {
display: flex;
flex-direction: column;
}
.new-handler {
background: #fff;
}
.handler-controls {
display: grid;
grid-template-columns: 1fr;
grid-gap: 20px;
padding: 22px;
}
.button-container {
display: grid;
justify-items: end;
}
span {
font-size: 18px;
margin-bottom: 10px;
font-weight: 500;
}
</style>

View File

@ -4,7 +4,7 @@
import { notifier } from "builderStore/store/notifications"
import api from "builderStore/api"
import Spinner from "components/common/Spinner.svelte"
import analytics from "../../../analytics"
import analytics from "analytics"
let deployed = false
let loading = false
@ -26,10 +26,13 @@
notifier.success(`Your Deployment is Complete.`)
deployed = true
loading = false
analytics.captureEvent("web_app_deployment", {
analytics.captureEvent("Deployed App", {
appId,
})
} catch (err) {
analytics.captureEvent("Deploy App Failed", {
appId,
})
analytics.captureException(err)
notifier.danger("Deployment unsuccessful. Please try again later.")
loading = false

View File

@ -8,8 +8,8 @@
import { get } from "builderStore/api"
import Spinner from "components/common/Spinner.svelte"
import CreateAppModal from "components/start/CreateAppModal.svelte"
import { Button } from "@budibase/bbui"
import { Heading } from "@budibase/bbui"
import { Button, Heading } from "@budibase/bbui"
import analytics from "analytics"
let promise = getApps()
@ -28,16 +28,18 @@
async function fetchKeys() {
const response = await api.get(`/api/keys/`)
const res = await response.json()
return res.budibase
return await response.json()
}
async function checkIfKeysAndApps() {
const key = await fetchKeys()
const keys = await fetchKeys()
const apps = await getApps()
if (key) {
if (keys.userId) {
hasKey = true
} else {
analytics.identify(keys.userId)
}
if (!keys.budibase) {
showCreateAppModal()
}
}

View File

@ -4847,9 +4847,10 @@ posix-character-classes@^0.1.0:
version "0.1.1"
resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab"
posthog-js@1.3.1:
version "1.3.1"
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.3.1.tgz#970acec1423eaa5dba0d2603410c9c70294e16da"
posthog-js@1.4.5:
version "1.4.5"
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.4.5.tgz#b16235afe47938bd71eaed4ede3790c8b910ed71"
integrity sha512-Rzc5/DpuX55BqwNEbZB0tLav1gEinnr5H+82cbLiMtXLADlxmCwZiEaVXcC3XOqW0x8bcAEehicx1TbpfBamzA==
prelude-ls@~1.1.2:
version "1.1.2"

View File

@ -1,6 +1,6 @@
{
"name": "budibase",
"version": "0.1.21",
"version": "0.1.22",
"description": "Budibase CLI",
"repository": "https://github.com/Budibase/Budibase",
"homepage": "https://www.budibase.com",
@ -17,7 +17,7 @@
"author": "Budibase",
"license": "AGPL-3.0-or-later",
"dependencies": {
"@budibase/server": "^0.1.21",
"@budibase/server": "^0.1.22",
"@inquirer/password": "^0.0.6-alpha.0",
"chalk": "^2.4.2",
"dotenv": "^8.2.0",

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/client",
"version": "0.1.21",
"version": "0.1.22",
"license": "MPL-2.0",
"main": "dist/budibase-client.js",
"module": "dist/budibase-client.esm.mjs",

View File

@ -16,4 +16,5 @@ LOG_LEVEL=error
DEPLOYMENT_CREDENTIALS_URL="https://dt4mpwwap8.execute-api.eu-west-1.amazonaws.com/prod/"
DEPLOYMENT_DB_URL="https://couchdb.budi.live:5984"
SENTRY_DSN=https://a34ae347621946bf8acded18e5b7d4b8@o420233.ingest.sentry.io/5338131
SENTRY_DSN=https://a34ae347621946bf8acded18e5b7d4b8@o420233.ingest.sentry.io/5338131
ENABLE_ANALYTICS="true"

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/server",
"version": "0.1.21",
"version": "0.1.22",
"description": "Budibase Web Server",
"main": "src/electron.js",
"repository": {
@ -42,7 +42,7 @@
"author": "Michael Shanks",
"license": "AGPL-3.0-or-later",
"dependencies": {
"@budibase/client": "^0.1.21",
"@budibase/client": "^0.1.22",
"@koa/router": "^8.0.0",
"@sendgrid/mail": "^7.1.1",
"@sentry/node": "^5.19.2",
@ -59,7 +59,7 @@
"joi": "^17.2.1",
"jsonwebtoken": "^8.5.1",
"koa": "^2.7.0",
"koa-body": "^4.1.0",
"koa-body": "^4.2.0",
"koa-compress": "^4.0.1",
"koa-pino-logger": "^3.0.0",
"koa-send": "^5.0.0",
@ -92,9 +92,6 @@
"server-destroy": "^1.0.1",
"supertest": "^4.0.2"
},
"nodemonConfig": {
"delay": "1000"
},
"jest": {
"testEnvironment": "node",
"setupFiles": [

View File

@ -1,18 +1,22 @@
const CouchDB = require("../../db")
const newid = require("../../db/newid")
const {
generateAdminPermissions,
generatePowerUserPermissions,
POWERUSER_LEVEL_ID,
ADMIN_LEVEL_ID,
} = require("../../utilities/accessLevels")
const {
generateAccessLevelID,
getAccessLevelParams,
} = require("../../db/utils")
exports.fetch = async function(ctx) {
const db = new CouchDB(ctx.user.instanceId)
const body = await db.query("database/by_type", {
include_docs: true,
key: ["accesslevel"],
})
const body = await db.allDocs(
getAccessLevelParams(null, {
include_docs: true,
})
)
const customAccessLevels = body.rows.map(row => row.doc)
const staticAccessLevels = [
@ -90,7 +94,7 @@ exports.create = async function(ctx) {
name: ctx.request.body.name,
_rev: ctx.request.body._rev,
permissions: ctx.request.body.permissions || [],
_id: newid(),
_id: generateAccessLevelID(),
type: "accesslevel",
}

View File

@ -0,0 +1,3 @@
exports.isEnabled = async function(ctx) {
ctx.body = JSON.stringify(process.env.ENABLE_ANALYTICS === "true")
}

View File

@ -8,6 +8,7 @@ exports.fetch = async function(ctx) {
ctx.body = {
budibase: process.env.BUDIBASE_API_KEY,
sendgrid: process.env.SENDGRID_API_KEY,
userId: process.env.USERID_API_KEY,
}
}

View File

@ -1,7 +1,6 @@
const CouchDB = require("../../db")
const ClientDb = require("../../db/clientDb")
const { getPackageForBuilder, buildPage } = require("../../utilities/builder")
const newid = require("../../db/newid")
const env = require("../../environment")
const instanceController = require("./instance")
const { resolve, join } = require("path")
@ -12,17 +11,18 @@ const setBuilderToken = require("../../utilities/builder/setBuilderToken")
const fs = require("fs-extra")
const { promisify } = require("util")
const chmodr = require("chmodr")
const { generateAppID, getAppParams } = require("../../db/utils")
const {
downloadExtractComponentLibraries,
} = require("../../utilities/createAppPackage")
exports.fetch = async function(ctx) {
const db = new CouchDB(ClientDb.name(getClientId(ctx)))
const body = await db.query("client/by_type", {
include_docs: true,
key: ["app"],
})
const body = await db.allDocs(
getAppParams(null, {
include_docs: true,
})
)
ctx.body = body.rows.map(row => row.doc)
}
@ -48,7 +48,7 @@ exports.create = async function(ctx) {
if (!clientId) {
ctx.throw(400, "ClientId not suplied")
}
const appId = newid()
const appId = generateAppID()
// insert an appId -> clientId lookup
const masterDb = new CouchDB("client_app_lookup")

View File

@ -2,6 +2,7 @@ const jwt = require("jsonwebtoken")
const CouchDB = require("../../db")
const ClientDb = require("../../db/clientDb")
const bcrypt = require("../../utilities/bcrypt")
const { generateUserID } = require("../../db/utils")
exports.authenticate = async ctx => {
if (!ctx.user.appId) ctx.throw(400, "No appId")
@ -35,7 +36,7 @@ exports.authenticate = async ctx => {
let dbUser
try {
dbUser = await instanceDb.get(`user_${username}`)
dbUser = await instanceDb.get(generateUserID(username))
} catch (_) {
// do not want to throw a 404 - as this could be
// used to dtermine valid usernames

View File

@ -1,8 +1,8 @@
const CouchDB = require("../../db")
const newid = require("../../db/newid")
const actions = require("../../automations/actions")
const logic = require("../../automations/logic")
const triggers = require("../../automations/triggers")
const { getAutomationParams, generateAutomationID } = require("../../db/utils")
/*************************
* *
@ -34,7 +34,7 @@ exports.create = async function(ctx) {
const db = new CouchDB(ctx.user.instanceId)
let automation = ctx.request.body
automation._id = newid()
automation._id = generateAutomationID()
automation.type = "automation"
automation = cleanAutomationInputs(automation)
@ -72,10 +72,11 @@ exports.update = async function(ctx) {
exports.fetch = async function(ctx) {
const db = new CouchDB(ctx.user.instanceId)
const response = await db.query(`database/by_type`, {
key: ["automation"],
include_docs: true,
})
const response = await db.allDocs(
getAutomationParams(null, {
include_docs: true,
})
)
ctx.body = response.rows.map(row => row.doc)
}

View File

@ -64,19 +64,30 @@ function walkDir(dirPath, callback) {
}
}
function prepareUploadForS3({ filePath, s3Key, metadata, s3 }) {
const fileExtension = [...filePath.split(".")].pop()
const fileBytes = fs.readFileSync(filePath)
return s3
async function prepareUploadForS3({ s3Key, metadata, s3, file }) {
const extension = [...file.name.split(".")].pop()
const fileBytes = fs.readFileSync(file.path)
const upload = await s3
.upload({
Key: s3Key,
Body: fileBytes,
ContentType: CONTENT_TYPE_MAP[fileExtension.toLowerCase()],
ContentType: file.type || CONTENT_TYPE_MAP[extension.toLowerCase()],
Metadata: metadata,
})
.promise()
return {
size: file.size,
name: file.name,
extension,
url: upload.Location,
key: upload.Key,
}
}
exports.prepareUploadForS3 = prepareUploadForS3
exports.uploadAppAssets = async function({
appId,
instanceId,
@ -107,7 +118,10 @@ exports.uploadAppAssets = async function({
// Upload HTML, CSS and JS for each page of the web app
walkDir(`${appAssetsPath}/${page}`, function(filePath) {
const appAssetUpload = prepareUploadForS3({
filePath,
file: {
path: filePath,
name: [...filePath.split("/")].pop(),
},
s3Key: filePath.replace(appAssetsPath, `assets/${appId}`),
s3,
metadata: { accountId },
@ -124,8 +138,8 @@ exports.uploadAppAssets = async function({
if (file.uploaded) continue
const attachmentUpload = prepareUploadForS3({
filePath: file.path,
s3Key: `assets/${appId}/attachments/${file.name}`,
file,
s3Key: `assets/${appId}/attachments/${file.processedFileName}`,
s3,
metadata: { accountId },
})

View File

@ -19,32 +19,9 @@ exports.create = async function(ctx) {
clientId,
applicationId: appId,
},
views: {
// view collation information, read before writing any complex views:
// https://docs.couchdb.org/en/master/ddocs/views/collation.html#collation-specification
by_username: {
map: function(doc) {
if (doc.type === "user") {
emit([doc.username], doc._id)
}
}.toString(),
},
by_type: {
map: function(doc) {
emit([doc.type], doc._id)
}.toString(),
},
by_automation_trigger: {
map: function(doc) {
if (doc.type === "automation") {
const trigger = doc.definition.trigger
if (trigger) {
emit([trigger.event], trigger)
}
}
}.toString(),
},
},
// view collation information, read before writing any complex views:
// https://docs.couchdb.org/en/master/ddocs/views/collation.html#collation-specification
views: {},
})
// add view for linked records
await createLinkView(instanceId)

View File

@ -1,13 +1,18 @@
const CouchDB = require("../../db")
const newid = require("../../db/newid")
const linkRecords = require("../../db/linkedRecords")
const {
getRecordParams,
getModelParams,
generateModelID,
} = require("../../db/utils")
exports.fetch = async function(ctx) {
const db = new CouchDB(ctx.user.instanceId)
const body = await db.query("database/by_type", {
include_docs: true,
key: ["model"],
})
const body = await db.allDocs(
getModelParams(null, {
include_docs: true,
})
)
ctx.body = body.rows.map(row => row.doc)
}
@ -22,7 +27,7 @@ exports.save = async function(ctx) {
const oldModelId = ctx.request.body._id
const modelToSave = {
type: "model",
_id: newid(),
_id: generateModelID(),
views: {},
...ctx.request.body,
}
@ -39,9 +44,12 @@ exports.save = async function(ctx) {
} else if (_rename && modelToSave.primaryDisplay === _rename.old) {
throw "Cannot rename the primary display field."
} else if (_rename) {
const records = await db.query(`database/all_${modelToSave._id}`, {
include_docs: true,
})
const records = await db.allDocs(
getRecordParams(modelToSave._id, null, {
include_docs: true,
})
)
const docs = records.rows.map(({ doc }) => {
doc[_rename.updated] = doc[_rename.old]
delete doc[_rename.old]
@ -64,19 +72,6 @@ exports.save = async function(ctx) {
const result = await db.post(modelToSave)
modelToSave._rev = result.rev
const designDoc = await db.get("_design/database")
/** TODO: should we include the doc type here - currently it is possible for anything
with a modelId in it to be returned */
designDoc.views = {
...designDoc.views,
[`all_${modelToSave._id}`]: {
map: `function(doc) {
if (doc.modelId === "${modelToSave._id}") {
emit(doc._id);
}
}`,
},
}
// update linked records
await linkRecords.updateLinks({
instanceId,
@ -86,7 +81,6 @@ exports.save = async function(ctx) {
model: modelToSave,
oldModel: oldModel,
})
await db.put(designDoc)
ctx.eventEmitter &&
ctx.eventEmitter.emitModel(`model:save`, instanceId, modelToSave)
@ -103,10 +97,12 @@ exports.destroy = async function(ctx) {
await db.remove(modelToDelete)
const modelViewId = `all_${ctx.params.modelId}`
// Delete all records for that model
const records = await db.query(`database/${modelViewId}`)
const records = await db.allDocs(
getRecordParams(ctx.params.modelId, null, {
include_docs: true,
})
)
await db.bulkDocs(
records.rows.map(record => ({ _id: record.id, _deleted: true }))
)
@ -117,10 +113,6 @@ exports.destroy = async function(ctx) {
eventType: linkRecords.EventType.MODEL_DELETE,
model: modelToDelete,
})
// delete the "all" view
const designDoc = await db.get("_design/database")
delete designDoc.views[modelViewId]
await db.put(designDoc)
ctx.eventEmitter &&
ctx.eventEmitter.emitModel(`model:delete`, instanceId, modelToDelete)

View File

@ -1,7 +1,9 @@
const CouchDB = require("../../db")
const validateJs = require("validate.js")
const newid = require("../../db/newid")
const linkRecords = require("../../db/linkedRecords")
const { getRecordParams, generateRecordID } = require("../../db/utils")
const MODEL_VIEW_BEGINS_WITH = "all_model:"
validateJs.extend(validateJs.validators.datetime, {
parse: function(value) {
@ -65,7 +67,7 @@ exports.save = async function(ctx) {
record.modelId = ctx.params.modelId
if (!record._rev && !record._id) {
record._id = newid()
record._id = generateRecordID(record.modelId)
}
const model = await db.get(record.modelId)
@ -120,7 +122,16 @@ exports.fetchView = async function(ctx) {
const instanceId = ctx.user.instanceId
const db = new CouchDB(instanceId)
const { stats, group, field } = ctx.query
const response = await db.query(`database/${ctx.params.viewName}`, {
const viewName = ctx.params.viewName
// if this is a model view being looked for just transfer to that
if (viewName.indexOf(MODEL_VIEW_BEGINS_WITH) === 0) {
ctx.params.modelId = viewName.substring(4)
await exports.fetchModelRecords(ctx)
return
}
const response = await db.query(`database/${viewName}`, {
include_docs: !stats,
group,
})
@ -141,11 +152,14 @@ exports.fetchView = async function(ctx) {
exports.fetchModelRecords = async function(ctx) {
const instanceId = ctx.user.instanceId
const db = new CouchDB(instanceId)
const response = await db.query(`database/all_${ctx.params.modelId}`, {
include_docs: true,
})
ctx.body = await linkRecords.attachLinkInfo(
const db = new CouchDB(instanceId)
const response = await db.allDocs(
getRecordParams(ctx.params.modelId, null, {
include_docs: true,
})
)
ctx.body = response.rows.map(row => row.doc)
ctx.body = await linkRecords.attachLinkInfo(
instanceId,
response.rows.map(row => row.doc)
)

View File

@ -4,6 +4,8 @@ const jwt = require("jsonwebtoken")
const fetch = require("node-fetch")
const fs = require("fs")
const uuid = require("uuid")
const AWS = require("aws-sdk")
const { prepareUploadForS3 } = require("./deploy/aws")
const {
budibaseAppsDir,
@ -22,8 +24,12 @@ exports.serveBuilder = async function(ctx) {
await send(ctx, ctx.file, { root: ctx.devPath || builderPath })
}
exports.processLocalFileUpload = async function(ctx) {
const { files } = ctx.request.body
exports.uploadFile = async function(ctx) {
let files
files =
ctx.request.files.file.length > 1
? Array.from(ctx.request.files.file)
: [ctx.request.files.file]
const attachmentsPath = resolve(
budibaseAppsDir(),
@ -31,52 +37,99 @@ exports.processLocalFileUpload = async function(ctx) {
"attachments"
)
if (process.env.CLOUD) {
// remote upload
const s3 = new AWS.S3({
params: {
Bucket: "prod-budi-app-assets",
},
})
const uploads = files.map(file => {
const fileExtension = [...file.name.split(".")].pop()
const processedFileName = `${uuid.v4()}.${fileExtension}`
return prepareUploadForS3({
file,
s3Key: `assets/${ctx.user.appId}/attachments/${processedFileName}`,
s3,
})
})
ctx.body = await Promise.all(uploads)
return
}
ctx.body = await processLocalFileUploads({
files,
outputPath: attachmentsPath,
instanceId: ctx.user.instanceId,
})
}
async function processLocalFileUploads({ files, outputPath, instanceId }) {
// create attachments dir if it doesnt exist
!fs.existsSync(attachmentsPath) &&
fs.mkdirSync(attachmentsPath, { recursive: true })
!fs.existsSync(outputPath) && fs.mkdirSync(outputPath, { recursive: true })
const filesToProcess = files.map(file => {
const fileExtension = [...file.path.split(".")].pop()
const fileExtension = [...file.name.split(".")].pop()
// filenames converted to UUIDs so they are unique
const fileName = `${uuid.v4()}.${fileExtension}`
const processedFileName = `${uuid.v4()}.${fileExtension}`
return {
...file,
fileName,
name: file.name,
path: file.path,
size: file.size,
type: file.type,
processedFileName,
extension: fileExtension,
outputPath: join(attachmentsPath, fileName),
url: join("/attachments", fileName),
outputPath: join(outputPath, processedFileName),
url: join("/attachments", processedFileName),
}
})
const fileProcessOperations = filesToProcess.map(file =>
fileProcessor.process(file)
const fileProcessOperations = filesToProcess.map(fileProcessor.process)
const processedFiles = await Promise.all(fileProcessOperations)
let pendingFileUploads
// local document used to track which files need to be uploaded
// db.get throws an error if the document doesn't exist
// need to use a promise to default
const db = new CouchDB(instanceId)
await db
.get("_local/fileuploads")
.then(data => {
pendingFileUploads = data
})
.catch(() => {
pendingFileUploads = { _id: "_local/fileuploads", uploads: [] }
})
pendingFileUploads.uploads = [
...processedFiles,
...pendingFileUploads.uploads,
]
await db.put(pendingFileUploads)
return processedFiles
}
exports.performLocalFileProcessing = async function(ctx) {
const { files } = ctx.request.body
const processedFileOutputPath = resolve(
budibaseAppsDir(),
ctx.user.appId,
"attachments"
)
try {
const processedFiles = await Promise.all(fileProcessOperations)
let pendingFileUploads
// local document used to track which files need to be uploaded
// db.get throws an error if the document doesn't exist
// need to use a promise to default
const db = new CouchDB(ctx.user.instanceId)
await db
.get("_local/fileuploads")
.then(data => {
pendingFileUploads = data
})
.catch(() => {
pendingFileUploads = { _id: "_local/fileuploads", uploads: [] }
})
pendingFileUploads.uploads = [
...processedFiles,
...pendingFileUploads.uploads,
]
await db.put(pendingFileUploads)
ctx.body = processedFiles
ctx.body = await processLocalFileUploads({
files,
outputPath: processedFileOutputPath,
instanceId: ctx.user.instanceId,
})
} catch (err) {
ctx.throw(500, err)
}

View File

@ -1,7 +1,7 @@
const CouchDB = require("../../db")
const clientDb = require("../../db/clientDb")
const bcrypt = require("../../utilities/bcrypt")
const getUserId = userName => `user_${userName}`
const { generateUserID, getUserParams } = require("../../db/utils")
const {
POWERUSER_LEVEL_ID,
ADMIN_LEVEL_ID,
@ -9,11 +9,11 @@ const {
exports.fetch = async function(ctx) {
const database = new CouchDB(ctx.user.instanceId)
const data = await database.query("database/by_type", {
include_docs: true,
key: ["user"],
})
const data = await database.allDocs(
getUserParams(null, {
include_docs: true,
})
)
ctx.body = data.rows.map(row => row.doc)
}
@ -31,7 +31,7 @@ exports.create = async function(ctx) {
if (!accessLevel) ctx.throw(400, "Invalid Access Level")
const user = {
_id: getUserId(username),
_id: generateUserID(username),
username,
password: await bcrypt.hash(password),
name: name || username,
@ -80,14 +80,14 @@ exports.update = async function(ctx) {
exports.destroy = async function(ctx) {
const database = new CouchDB(ctx.user.instanceId)
await database.destroy(getUserId(ctx.params.username))
await database.destroy(generateUserID(ctx.params.username))
ctx.message = `User ${ctx.params.username} deleted.`
ctx.status = 200
}
exports.find = async function(ctx) {
const database = new CouchDB(ctx.user.instanceId)
const user = await database.get(getUserId(ctx.params.username))
const user = await database.get(generateUserID(ctx.params.username))
ctx.body = {
username: user.username,
name: user.name,

View File

@ -0,0 +1,14 @@
exports.csv = function(headers, rows) {
let csv = headers.map(key => `"${key}"`).join(",")
for (let row of rows) {
csv = `${csv}\n${headers
.map(header => `"${row[header]}"`.trim())
.join(",")}`
}
return csv
}
exports.json = function(headers, rows) {
return JSON.stringify(rows, undefined, 2)
}

View File

@ -1,5 +1,9 @@
const CouchDB = require("../../../db")
const viewTemplate = require("./viewBuilder")
const fs = require("fs")
const path = require("path")
const os = require("os")
const exporters = require("./exporters")
const controller = {
fetch: async ctx => {
@ -7,18 +11,11 @@ const controller = {
const designDoc = await db.get("_design/database")
const response = []
for (let name in designDoc.views) {
if (
!name.startsWith("all") &&
name !== "by_type" &&
name !== "by_username" &&
name !== "by_automation_trigger"
) {
response.push({
name,
...designDoc.views[name],
})
}
for (let name of Object.keys(designDoc.views)) {
response.push({
name,
...designDoc.views[name],
})
}
ctx.body = response
@ -79,6 +76,48 @@ const controller = {
ctx.body = view
ctx.message = `View ${ctx.params.viewName} saved successfully.`
},
exportView: async ctx => {
const db = new CouchDB(ctx.user.instanceId)
const view = ctx.request.body
const format = ctx.query.format
// fetch records for the view
const response = await db.query(`database/${view.name}`, {
include_docs: !view.calculation,
group: view.groupBy,
})
if (view.calculation === "stats") {
response.rows = response.rows.map(row => ({
group: row.key,
field: view.field,
...row.value,
avg: row.value.sum / row.value.count,
}))
} else {
response.rows = response.rows.map(row => row.doc)
}
let headers = Object.keys(view.schema)
const exporter = exporters[format]
const exportedFile = exporter(headers, response.rows)
const filename = `${view.name}.${format}`
fs.writeFileSync(path.join(os.tmpdir(), filename), exportedFile)
ctx.body = {
url: `/api/views/export/download/${filename}`,
name: view.name,
}
},
downloadExport: async ctx => {
const filename = ctx.params.fileName
ctx.attachment(filename)
ctx.body = fs.createReadStream(path.join(os.tmpdir(), filename))
},
}
module.exports = controller

View File

@ -19,6 +19,7 @@ const {
automationRoutes,
accesslevelRoutes,
apiKeysRoutes,
analyticsRoutes,
} = require("./routes")
const router = new Router()
@ -109,6 +110,9 @@ router.use(accesslevelRoutes.allowedMethods())
router.use(apiKeysRoutes.routes())
router.use(apiKeysRoutes.allowedMethods())
router.use(analyticsRoutes.routes())
router.use(analyticsRoutes.allowedMethods())
router.use(staticRoutes.routes())
router.use(staticRoutes.allowedMethods())

View File

@ -0,0 +1,10 @@
const Router = require("@koa/router")
const authorized = require("../../middleware/authorized")
const { BUILDER } = require("../../utilities/accessLevels")
const controller = require("../controllers/analytics")
const router = Router()
router.get("/api/analytics", authorized(BUILDER), controller.isEnabled)
module.exports = router

View File

@ -13,6 +13,7 @@ const automationRoutes = require("./automation")
const accesslevelRoutes = require("./accesslevel")
const deployRoutes = require("./deploy")
const apiKeysRoutes = require("./apikeys")
const analyticsRoutes = require("./analytics")
module.exports = {
deployRoutes,
@ -30,4 +31,5 @@ module.exports = {
automationRoutes,
accesslevelRoutes,
apiKeysRoutes,
analyticsRoutes,
}

View File

@ -26,8 +26,9 @@ router
.post(
"/api/attachments/process",
authorized(BUILDER),
controller.processLocalFileUpload
controller.performLocalFileProcessing
)
.post("/api/attachments/upload", controller.uploadFile)
.get("/componentlibrary", controller.serveComponentLibrary)
.get("/assets/:file*", controller.serveAppAsset)
.get("/attachments/:file*", controller.serveAttachment)

View File

@ -10,12 +10,14 @@ const {
destroyDocument,
builderEndpointShouldBlockNormalUsers
} = require("./couchTestUtils")
let { generateAutomationID } = require("../../../db/utils")
const { delay } = require("./testUtils")
const MAX_RETRIES = 4
const AUTOMATION_ID = generateAutomationID()
const TEST_AUTOMATION = {
_id: "Test Automation",
_id: AUTOMATION_ID,
name: "My Automation",
pageId: "123123123",
screenId: "kasdkfldsafkl",
@ -206,7 +208,7 @@ describe("/automations", () => {
.expect('Content-Type', /json/)
.expect(200)
expect(res.body.message).toEqual("Automation Test Automation updated successfully.")
expect(res.body.message).toEqual(`Automation ${AUTOMATION_ID} updated successfully.`)
expect(res.body.automation.name).toEqual("Updated Name")
})
})

View File

@ -15,5 +15,11 @@ router
.get("/api/views", authorized(BUILDER), viewController.fetch)
.delete("/api/views/:viewName", authorized(BUILDER), viewController.destroy)
.post("/api/views", authorized(BUILDER), viewController.save)
.post("/api/views/export", authorized(BUILDER), viewController.exportView)
.get(
"/api/views/export/download/:fileName",
authorized(BUILDER),
viewController.downloadExport
)
module.exports = router

View File

@ -1,6 +1,7 @@
const CouchDB = require("../db")
const emitter = require("../events/index")
const InMemoryQueue = require("../utilities/queue/inMemoryQueue")
const { getAutomationParams } = require("../db/utils")
let automationQueue = new InMemoryQueue("automationQueue")
@ -89,15 +90,18 @@ async function queueRelevantRecordAutomations(event, eventType) {
throw `No instanceId specified for ${eventType} - check event emitters.`
}
const db = new CouchDB(event.instanceId)
const automationsToTrigger = await db.query(
"database/by_automation_trigger",
{
key: [eventType],
include_docs: true,
}
let automations = await db.allDocs(
getAutomationParams(null, { include_docs: true })
)
const automations = automationsToTrigger.rows.map(wf => wf.doc)
// filter down to the correct event type
automations = automations.rows
.map(automation => automation.doc)
.filter(automation => {
const trigger = automation.definition.trigger
return trigger && trigger.event === eventType
})
for (let automation of automations) {
let automationDef = automation.definition
let automationTrigger = automationDef ? automationDef.trigger : {}

View File

@ -57,12 +57,12 @@ exports.createLinkView = async instanceId => {
* @returns {Promise<object[]>} This will return an array of the linking documents that were found
* (if any).
*/
exports.getLinkDocuments = async ({
exports.getLinkDocuments = async function({
instanceId,
modelId,
recordId,
includeDocs,
}) => {
}) {
const db = new CouchDB(instanceId)
let params
if (recordId != null) {
@ -84,6 +84,7 @@ exports.getLinkDocuments = async ({
// check if the view doesn't exist, it should for all new instances
if (err != null && err.name === "not_found") {
await exports.createLinkView(instanceId)
return exports.getLinkDocuments(arguments[0])
} else {
Sentry.captureException(err)
}

View File

@ -0,0 +1,152 @@
const newid = require("./newid")
const DocumentTypes = {
MODEL: "model",
RECORD: "record",
USER: "user",
AUTOMATION: "automation",
LINK: "link",
APP: "app",
ACCESS_LEVEL: "accesslevel",
}
exports.DocumentTypes = DocumentTypes
const UNICODE_MAX = "\ufff0"
/**
* If creating DB allDocs/query params with only a single top level ID this can be used, this
* is usually the case as most of our docs are top level e.g. models, automations, users and so on.
* More complex cases such as link docs and records which have multiple levels of IDs that their
* ID consists of need their own functions to build the allDocs parameters.
* @param {string} docType The type of document which input params are being built for, e.g. user,
* link, app, model and so on.
* @param {string|null} docId The ID of the document minus its type - this is only needed if looking
* for a singular document.
* @param {object} otherProps Add any other properties onto the request, e.g. include_docs.
* @returns {object} Parameters which can then be used with an allDocs request.
*/
function getDocParams(docType, docId = null, otherProps = {}) {
if (docId == null) {
docId = ""
}
return {
...otherProps,
startkey: `${docType}:${docId}`,
endkey: `${docType}:${docId}${UNICODE_MAX}`,
}
}
/**
* Gets parameters for retrieving models, this is a utility function for the getDocParams function.
*/
exports.getModelParams = (modelId = null, otherProps = {}) => {
return getDocParams(DocumentTypes.MODEL, modelId, otherProps)
}
/**
* Generates a new model ID.
* @returns {string} The new model ID which the model doc can be stored under.
*/
exports.generateModelID = () => {
return `${DocumentTypes.MODEL}:${newid()}`
}
/**
* Gets the DB allDocs/query params for retrieving a record.
* @param {string} modelId The model in which the records have been stored.
* @param {string|null} recordId The ID of the record which is being specifically queried for. This can be
* left null to get all the records in the model.
* @param {object} otherProps Any other properties to add to the request.
* @returns {object} Parameters which can then be used with an allDocs request.
*/
exports.getRecordParams = (modelId, recordId = null, otherProps = {}) => {
if (modelId == null) {
throw "Cannot build params for records without a model ID"
}
const endOfKey = recordId == null ? `${modelId}:` : `${modelId}:${recordId}`
return getDocParams(DocumentTypes.RECORD, endOfKey, otherProps)
}
/**
* Gets a new record ID for the specified model.
* @param {string} modelId The model which the record is being created for.
* @returns {string} The new ID which a record doc can be stored under.
*/
exports.generateRecordID = modelId => {
return `${DocumentTypes.RECORD}:${modelId}:${newid()}`
}
/**
* Gets parameters for retrieving users, this is a utility function for the getDocParams function.
*/
exports.getUserParams = (username = null, otherProps = {}) => {
return getDocParams(DocumentTypes.USER, username, otherProps)
}
/**
* Generates a new user ID based on the passed in username.
* @param {string} username The username which the ID is going to be built up of.
* @returns {string} The new user ID which the user doc can be stored under.
*/
exports.generateUserID = username => {
return `${DocumentTypes.USER}:${username}`
}
/**
* Gets parameters for retrieving automations, this is a utility function for the getDocParams function.
*/
exports.getAutomationParams = (automationId = null, otherProps = {}) => {
return getDocParams(DocumentTypes.AUTOMATION, automationId, otherProps)
}
/**
* Generates a new automation ID.
* @returns {string} The new automation ID which the automation doc can be stored under.
*/
exports.generateAutomationID = () => {
return `${DocumentTypes.AUTOMATION}:${newid()}`
}
/**
* Generates a new link doc ID. This is currently not usable with the alldocs call,
* instead a view is built to make walking to tree easier.
* @param {string} modelId1 The ID of the linker model.
* @param {string} modelId2 The ID of the linked model.
* @param {string} recordId1 The ID of the linker record.
* @param {string} recordId2 The ID of the linked record.
* @returns {string} The new link doc ID which the automation doc can be stored under.
*/
exports.generateLinkID = (modelId1, modelId2, recordId1, recordId2) => {
return `${DocumentTypes.AUTOMATION}:${modelId1}:${modelId2}:${recordId1}:${recordId2}`
}
/**
* Generates a new app ID.
* @returns {string} The new app ID which the app doc can be stored under.
*/
exports.generateAppID = () => {
return `${DocumentTypes.APP}:${newid()}`
}
/**
* Gets parameters for retrieving apps, this is a utility function for the getDocParams function.
*/
exports.getAppParams = (appId = null, otherProps = {}) => {
return getDocParams(DocumentTypes.APP, appId, otherProps)
}
/**
* Generates a new access level ID.
* @returns {string} The new access level ID which the access level doc can be stored under.
*/
exports.generateAccessLevelID = () => {
return `${DocumentTypes.ACCESS_LEVEL}:${newid()}`
}
/**
* Gets parameters for retrieving an access level, this is a utility function for the getDocParams function.
*/
exports.getAccessLevelParams = (accessLevelId = null, otherProps = {}) => {
return getDocParams(DocumentTypes.ACCESS_LEVEL, accessLevelId, otherProps)
}

View File

@ -530,10 +530,12 @@
"@types/events@*":
version "3.0.0"
resolved "https://registry.yarnpkg.com/@types/events/-/events-3.0.0.tgz#2862f3f58a9a7f7c3e78d79f130dd4d71c25c2a7"
integrity sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g==
"@types/formidable@^1.0.31":
version "1.0.31"
resolved "https://registry.yarnpkg.com/@types/formidable/-/formidable-1.0.31.tgz#274f9dc2d0a1a9ce1feef48c24ca0859e7ec947b"
integrity sha512-dIhM5t8lRP0oWe2HF8MuPvdd1TpPTjhDMAqemcq6oIZQCBQTovhBAdTQ5L5veJB4pdQChadmHuxtB0YzqvfU3Q==
dependencies:
"@types/events" "*"
"@types/node" "*"
@ -3946,9 +3948,10 @@ kleur@^3.0.3:
version "3.0.3"
resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e"
koa-body@^4.1.0:
version "4.1.1"
resolved "https://registry.yarnpkg.com/koa-body/-/koa-body-4.1.1.tgz#50686d290891fc6f1acb986cf7cfcd605f855ef0"
koa-body@^4.2.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/koa-body/-/koa-body-4.2.0.tgz#37229208b820761aca5822d14c5fc55cee31b26f"
integrity sha512-wdGu7b9amk4Fnk/ytH8GuWwfs4fsB5iNkY8kZPpgQVb04QZSv85T0M8reb+cJmvLE8cjPYvBzRikD3s6qz8OoA==
dependencies:
"@types/formidable" "^1.0.31"
co-body "^5.1.1"

View File

@ -13,7 +13,7 @@
"dev:builder": "rollup -cw"
},
"devDependencies": {
"@budibase/client": "^0.1.21",
"@budibase/client": "^0.1.22",
"@rollup/plugin-commonjs": "^11.1.0",
"lodash": "^4.17.15",
"rollup": "^1.11.0",
@ -31,13 +31,13 @@
"keywords": [
"svelte"
],
"version": "0.1.21",
"version": "0.1.22",
"license": "MIT",
"gitHead": "284cceb9b703c38566c6e6363c022f79a08d5691",
"dependencies": {
"@beyonk/svelte-googlemaps": "^2.2.0",
"@fortawesome/fontawesome-free": "^5.14.0",
"@budibase/bbui": "^1.39.0",
"@fortawesome/fontawesome-free": "^5.14.0",
"britecharts": "^2.16.1",
"d3-selection": "^1.4.2",
"fast-sort": "^2.2.0",

View File

@ -2,6 +2,7 @@
import { onMount } from "svelte"
import { fade } from "svelte/transition"
import { Label, DatePicker } from "@budibase/bbui"
import Dropzone from "./attachments/Dropzone.svelte"
import debounce from "lodash.debounce"
export let _bb
@ -54,8 +55,9 @@
const save = debounce(async () => {
for (let field of fields) {
// Assign defaults to empty fields to prevent validation issues
if (!(field in record))
if (!(field in record)) {
record[field] = DEFAULTS_FOR_TYPE[schema[field].type]
}
}
const SAVE_RECORD_URL = `/api/${model}/records`
@ -132,6 +134,8 @@
<input class="input" type="number" bind:value={record[field]} />
{:else if schema[field].type === 'string'}
<input class="input" type="text" bind:value={record[field]} />
{:else if schema[field].type === 'attachment'}
<Dropzone bind:files={record[field]} />
{/if}
</div>
<hr />

View File

@ -6,6 +6,7 @@
import fsort from "fast-sort"
import fetchData from "./fetchData.js"
import { isEmpty } from "lodash/fp"
import AttachmentList from "./attachments/AttachmentList.svelte"
export let backgroundColor
export let color
@ -17,6 +18,7 @@
let headers = []
let sort = {}
let sorted = []
let schema = {}
$: cssVariables = {
backgroundColor,
@ -83,7 +85,10 @@
{#each sorted as row (row._id)}
<tr>
{#each headers as header}
{#if row[header]}
<!-- Rudimentary solution for attachments on array given this entire table will be replaced by AG Grid -->
{#if Array.isArray(row[header])}
<AttachmentList files={row[header]} />
{:else if row[header]}
<td>{row[header]}</td>
{/if}
{/each}

View File

@ -1,7 +1,10 @@
const apiCall = method => async (url, body) => {
const headers = {
const apiCall = method => async (
url,
body,
headers = {
"Content-Type": "application/json",
}
) => {
const response = await fetch(url, {
method: method,
body: body && JSON.stringify(body),

View File

@ -0,0 +1,64 @@
<script>
import { FILE_TYPES } from "./fileTypes"
export let files
export let height = "70"
export let width = "70"
</script>
<div class="file-list">
{#each files as file}
<a href={file.url} target="_blank">
<div class="file">
{#if FILE_TYPES.IMAGE.includes(file.extension.toLowerCase())}
<img {width} {height} src={file.url} />
{:else}
<i class="far fa-file" />
{/if}
</div>
<span>{file.name}</span>
</a>
{/each}
</div>
<style>
.file-list {
display: grid;
grid-auto-flow: column;
grid-gap: var(--spacing-m);
grid-template-columns: repeat(10, 1fr);
}
img {
object-fit: contain;
}
i {
margin-bottom: var(--spacing-m);
}
a {
color: var(--ink);
text-decoration: none;
}
.file {
position: relative;
height: 75px;
width: 75px;
border: 2px dashed var(--grey-7);
padding: var(--spacing-xs);
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
overflow: hidden;
text-overflow: ellipsis;
}
span {
width: 75px;
overflow: hidden;
text-overflow: ellipsis;
}
</style>

View File

@ -0,0 +1,35 @@
<script>
import { Heading, Body, Button, Dropzone } from "@budibase/bbui"
import { FILE_TYPES } from "./fileTypes"
const BYTES_IN_KB = 1000
export let files = []
function handleFileTooLarge(fileSizeLimit) {
alert(
`Files cannot exceed ${fileSizeLimit /
BYTES_IN_MB}MB. Please try again with smaller files.`
)
}
async function processFiles(fileList) {
let data = new FormData()
for (var i = 0; i < fileList.length; i++) {
data.append("file", fileList[i])
}
const response = await fetch("/api/attachments/upload", {
method: "POST",
body: data,
headers: {
Accept: "application/json",
},
})
const processedFiles = await response.json()
return processedFiles
}
</script>
<Dropzone bind:files {processFiles} {handleFileTooLarge} />

View File

@ -0,0 +1,5 @@
export const FILE_TYPES = {
IMAGE: ["png", "tiff", "gif", "raw", "jpg", "jpeg"],
CODE: ["js", "rs", "py", "java", "rb", "hs", "yml"],
DOCUMENT: ["odf", "docx", "doc", "pdf", "csv"],
}