Merge branch 'master' into prefill-cards
This commit is contained in:
commit
d39ec25f0c
|
@ -1,4 +1,5 @@
|
||||||
{
|
{
|
||||||
|
"root": true,
|
||||||
"env": {
|
"env": {
|
||||||
"browser": true,
|
"browser": true,
|
||||||
"es6": true,
|
"es6": true,
|
||||||
|
|
|
@ -170,7 +170,8 @@ jobs:
|
||||||
docker pull mongo:7.0-jammy &
|
docker pull mongo:7.0-jammy &
|
||||||
docker pull mariadb:lts &
|
docker pull mariadb:lts &
|
||||||
docker pull testcontainers/ryuk:0.5.1 &
|
docker pull testcontainers/ryuk:0.5.1 &
|
||||||
docker pull budibase/couchdb:v3.2.1-sql &
|
docker pull budibase/couchdb:v3.2.1-sqs &
|
||||||
|
docker pull minio/minio &
|
||||||
docker pull redis &
|
docker pull redis &
|
||||||
|
|
||||||
wait $(jobs -p)
|
wait $(jobs -p)
|
||||||
|
|
|
@ -12,4 +12,5 @@ packages/pro/coverage
|
||||||
packages/account-portal/packages/ui/build
|
packages/account-portal/packages/ui/build
|
||||||
packages/account-portal/packages/ui/.routify
|
packages/account-portal/packages/ui/.routify
|
||||||
packages/account-portal/packages/server/build
|
packages/account-portal/packages/server/build
|
||||||
|
packages/account-portal/packages/server/coverage
|
||||||
**/*.ivm.bundle.js
|
**/*.ivm.bundle.js
|
|
@ -2,7 +2,7 @@
|
||||||
apiVersion: {{ ternary "autoscaling/v2" "autoscaling/v2beta2" (.Capabilities.APIVersions.Has "autoscaling/v2") }}
|
apiVersion: {{ ternary "autoscaling/v2" "autoscaling/v2beta2" (.Capabilities.APIVersions.Has "autoscaling/v2") }}
|
||||||
kind: HorizontalPodAutoscaler
|
kind: HorizontalPodAutoscaler
|
||||||
metadata:
|
metadata:
|
||||||
name: {{ include "budibase.fullname" . }}-apps
|
name: {{ include "budibase.fullname" . }}-automation-worker
|
||||||
labels:
|
labels:
|
||||||
{{- include "budibase.labels" . | nindent 4 }}
|
{{- include "budibase.labels" . | nindent 4 }}
|
||||||
spec:
|
spec:
|
||||||
|
|
|
@ -46,7 +46,7 @@ export default async function setup() {
|
||||||
await killContainers(containers)
|
await killContainers(containers)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
let couchdb = new GenericContainer("budibase/couchdb:v3.2.1-sqs")
|
const couchdb = new GenericContainer("budibase/couchdb:v3.2.1-sqs")
|
||||||
.withExposedPorts(5984, 4984)
|
.withExposedPorts(5984, 4984)
|
||||||
.withEnvironment({
|
.withEnvironment({
|
||||||
COUCHDB_PASSWORD: "budibase",
|
COUCHDB_PASSWORD: "budibase",
|
||||||
|
@ -69,7 +69,20 @@ export default async function setup() {
|
||||||
).withStartupTimeout(20000)
|
).withStartupTimeout(20000)
|
||||||
)
|
)
|
||||||
|
|
||||||
await couchdb.start()
|
const minio = new GenericContainer("minio/minio")
|
||||||
|
.withExposedPorts(9000)
|
||||||
|
.withCommand(["server", "/data"])
|
||||||
|
.withEnvironment({
|
||||||
|
MINIO_ACCESS_KEY: "budibase",
|
||||||
|
MINIO_SECRET_KEY: "budibase",
|
||||||
|
})
|
||||||
|
.withLabels({ "com.budibase": "true" })
|
||||||
|
.withReuse()
|
||||||
|
.withWaitStrategy(
|
||||||
|
Wait.forHttp("/minio/health/ready", 9000).withStartupTimeout(10000)
|
||||||
|
)
|
||||||
|
|
||||||
|
await Promise.all([couchdb.start(), minio.start()])
|
||||||
} finally {
|
} finally {
|
||||||
lockfile.unlockSync(lockPath)
|
lockfile.unlockSync(lockPath)
|
||||||
}
|
}
|
||||||
|
|
|
@ -70,10 +70,10 @@ sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/clouseau/clouse
|
||||||
/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 &
|
/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 &
|
||||||
|
|
||||||
# Start CouchDB.
|
# Start CouchDB.
|
||||||
/docker-entrypoint.sh /opt/couchdb/bin/couchdb &
|
/docker-entrypoint.sh /opt/couchdb/bin/couchdb > /dev/stdout 2>&1 &
|
||||||
|
|
||||||
# Start SQS.
|
# Start SQS. Use 127.0.0.1 instead of localhost to avoid IPv6 issues.
|
||||||
/opt/sqs/sqs --server "http://localhost:5984" --data-dir ${DATA_DIR}/sqs --bind-address=0.0.0.0 &
|
/opt/sqs/sqs --server "http://127.0.0.1:5984" --data-dir ${DATA_DIR}/sqs --bind-address=0.0.0.0 > /dev/stdout 2>&1 &
|
||||||
|
|
||||||
# Wait for CouchDB to start up.
|
# Wait for CouchDB to start up.
|
||||||
while [[ $(curl -s -w "%{http_code}\n" http://localhost:5984/_up -o /dev/null) -ne 200 ]]; do
|
while [[ $(curl -s -w "%{http_code}\n" http://localhost:5984/_up -o /dev/null) -ne 200 ]]; do
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"version": "2.23.12",
|
"version": "2.25.0",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*",
|
"packages/*",
|
||||||
|
|
|
@ -281,7 +281,7 @@ export function doInScimContext(task: any) {
|
||||||
return newContext(updates, task)
|
return newContext(updates, task)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function ensureSnippetContext() {
|
export async function ensureSnippetContext(enabled = !env.isTest()) {
|
||||||
const ctx = getCurrentContext()
|
const ctx = getCurrentContext()
|
||||||
|
|
||||||
// If we've already added snippets to context, continue
|
// If we've already added snippets to context, continue
|
||||||
|
@ -292,7 +292,7 @@ export async function ensureSnippetContext() {
|
||||||
// Otherwise get snippets for this app and update context
|
// Otherwise get snippets for this app and update context
|
||||||
let snippets: Snippet[] | undefined
|
let snippets: Snippet[] | undefined
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
if (db && !env.isTest()) {
|
if (db && enabled) {
|
||||||
const app = await db.get<App>(DocumentType.APP_METADATA)
|
const app = await db.get<App>(DocumentType.APP_METADATA)
|
||||||
snippets = app.snippets
|
snippets = app.snippets
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,11 +3,11 @@ import {
|
||||||
AllDocsResponse,
|
AllDocsResponse,
|
||||||
AnyDocument,
|
AnyDocument,
|
||||||
Database,
|
Database,
|
||||||
DatabaseOpts,
|
|
||||||
DatabaseQueryOpts,
|
|
||||||
DatabasePutOpts,
|
|
||||||
DatabaseCreateIndexOpts,
|
DatabaseCreateIndexOpts,
|
||||||
DatabaseDeleteIndexOpts,
|
DatabaseDeleteIndexOpts,
|
||||||
|
DatabaseOpts,
|
||||||
|
DatabasePutOpts,
|
||||||
|
DatabaseQueryOpts,
|
||||||
Document,
|
Document,
|
||||||
isDocument,
|
isDocument,
|
||||||
RowResponse,
|
RowResponse,
|
||||||
|
@ -17,7 +17,7 @@ import {
|
||||||
import { getCouchInfo } from "./connections"
|
import { getCouchInfo } from "./connections"
|
||||||
import { directCouchUrlCall } from "./utils"
|
import { directCouchUrlCall } from "./utils"
|
||||||
import { getPouchDB } from "./pouchDB"
|
import { getPouchDB } from "./pouchDB"
|
||||||
import { WriteStream, ReadStream } from "fs"
|
import { ReadStream, WriteStream } from "fs"
|
||||||
import { newid } from "../../docIds/newid"
|
import { newid } from "../../docIds/newid"
|
||||||
import { SQLITE_DESIGN_DOC_ID } from "../../constants"
|
import { SQLITE_DESIGN_DOC_ID } from "../../constants"
|
||||||
import { DDInstrumentedDatabase } from "../instrumentation"
|
import { DDInstrumentedDatabase } from "../instrumentation"
|
||||||
|
@ -38,6 +38,39 @@ function buildNano(couchInfo: { url: string; cookie: string }) {
|
||||||
|
|
||||||
type DBCall<T> = () => Promise<T>
|
type DBCall<T> = () => Promise<T>
|
||||||
|
|
||||||
|
class CouchDBError extends Error {
|
||||||
|
status: number
|
||||||
|
statusCode: number
|
||||||
|
reason: string
|
||||||
|
name: string
|
||||||
|
errid: string
|
||||||
|
error: string
|
||||||
|
description: string
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
message: string,
|
||||||
|
info: {
|
||||||
|
status: number | undefined
|
||||||
|
statusCode: number | undefined
|
||||||
|
name: string
|
||||||
|
errid: string
|
||||||
|
description: string
|
||||||
|
reason: string
|
||||||
|
error: string
|
||||||
|
}
|
||||||
|
) {
|
||||||
|
super(message)
|
||||||
|
const statusCode = info.status || info.statusCode || 500
|
||||||
|
this.status = statusCode
|
||||||
|
this.statusCode = statusCode
|
||||||
|
this.reason = info.reason
|
||||||
|
this.name = info.name
|
||||||
|
this.errid = info.errid
|
||||||
|
this.description = info.description
|
||||||
|
this.error = info.error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export function DatabaseWithConnection(
|
export function DatabaseWithConnection(
|
||||||
dbName: string,
|
dbName: string,
|
||||||
connection: string,
|
connection: string,
|
||||||
|
@ -119,7 +152,7 @@ export class DatabaseImpl implements Database {
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
// Handling race conditions
|
// Handling race conditions
|
||||||
if (err.statusCode !== 412) {
|
if (err.statusCode !== 412) {
|
||||||
throw err
|
throw new CouchDBError(err.message, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -138,10 +171,9 @@ export class DatabaseImpl implements Database {
|
||||||
if (err.statusCode === 404 && err.reason === DATABASE_NOT_FOUND) {
|
if (err.statusCode === 404 && err.reason === DATABASE_NOT_FOUND) {
|
||||||
await this.checkAndCreateDb()
|
await this.checkAndCreateDb()
|
||||||
return await this.performCall(call)
|
return await this.performCall(call)
|
||||||
} else if (err.statusCode) {
|
|
||||||
err.status = err.statusCode
|
|
||||||
}
|
}
|
||||||
throw err
|
// stripping the error down the props which are safe/useful, drop everything else
|
||||||
|
throw new CouchDBError(`CouchDB error: ${err.message}`, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -288,7 +320,7 @@ export class DatabaseImpl implements Database {
|
||||||
if (err.statusCode === 404) {
|
if (err.statusCode === 404) {
|
||||||
return
|
return
|
||||||
} else {
|
} else {
|
||||||
throw { ...err, status: err.statusCode }
|
throw new CouchDBError(err.message, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,6 +12,10 @@ import { dataFilters } from "@budibase/shared-core"
|
||||||
|
|
||||||
export const removeKeyNumbering = dataFilters.removeKeyNumbering
|
export const removeKeyNumbering = dataFilters.removeKeyNumbering
|
||||||
|
|
||||||
|
function isEmpty(value: any) {
|
||||||
|
return value == null || value === ""
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class to build lucene query URLs.
|
* Class to build lucene query URLs.
|
||||||
* Optionally takes a base lucene query object.
|
* Optionally takes a base lucene query object.
|
||||||
|
@ -282,15 +286,14 @@ export class QueryBuilder<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
const equal = (key: string, value: any) => {
|
const equal = (key: string, value: any) => {
|
||||||
// 0 evaluates to false, which means we would return all rows if we don't check it
|
if (isEmpty(value)) {
|
||||||
if (!value && value !== 0) {
|
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
return `${key}:${builder.preprocess(value, allPreProcessingOpts)}`
|
return `${key}:${builder.preprocess(value, allPreProcessingOpts)}`
|
||||||
}
|
}
|
||||||
|
|
||||||
const contains = (key: string, value: any, mode = "AND") => {
|
const contains = (key: string, value: any, mode = "AND") => {
|
||||||
if (!value || (Array.isArray(value) && value.length === 0)) {
|
if (isEmpty(value)) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
if (!Array.isArray(value)) {
|
if (!Array.isArray(value)) {
|
||||||
|
@ -306,7 +309,7 @@ export class QueryBuilder<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
const fuzzy = (key: string, value: any) => {
|
const fuzzy = (key: string, value: any) => {
|
||||||
if (!value) {
|
if (isEmpty(value)) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
value = builder.preprocess(value, {
|
value = builder.preprocess(value, {
|
||||||
|
@ -328,7 +331,7 @@ export class QueryBuilder<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
const oneOf = (key: string, value: any) => {
|
const oneOf = (key: string, value: any) => {
|
||||||
if (!value) {
|
if (isEmpty(value)) {
|
||||||
return `*:*`
|
return `*:*`
|
||||||
}
|
}
|
||||||
if (!Array.isArray(value)) {
|
if (!Array.isArray(value)) {
|
||||||
|
@ -386,7 +389,7 @@ export class QueryBuilder<T> {
|
||||||
// Construct the actual lucene search query string from JSON structure
|
// Construct the actual lucene search query string from JSON structure
|
||||||
if (this.#query.string) {
|
if (this.#query.string) {
|
||||||
build(this.#query.string, (key: string, value: any) => {
|
build(this.#query.string, (key: string, value: any) => {
|
||||||
if (!value) {
|
if (isEmpty(value)) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
value = builder.preprocess(value, {
|
value = builder.preprocess(value, {
|
||||||
|
@ -399,7 +402,7 @@ export class QueryBuilder<T> {
|
||||||
}
|
}
|
||||||
if (this.#query.range) {
|
if (this.#query.range) {
|
||||||
build(this.#query.range, (key: string, value: any) => {
|
build(this.#query.range, (key: string, value: any) => {
|
||||||
if (!value) {
|
if (isEmpty(value)) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
if (value.low == null || value.low === "") {
|
if (value.low == null || value.low === "") {
|
||||||
|
@ -421,7 +424,7 @@ export class QueryBuilder<T> {
|
||||||
}
|
}
|
||||||
if (this.#query.notEqual) {
|
if (this.#query.notEqual) {
|
||||||
build(this.#query.notEqual, (key: string, value: any) => {
|
build(this.#query.notEqual, (key: string, value: any) => {
|
||||||
if (!value) {
|
if (isEmpty(value)) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
if (typeof value === "boolean") {
|
if (typeof value === "boolean") {
|
||||||
|
@ -431,10 +434,28 @@ export class QueryBuilder<T> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if (this.#query.empty) {
|
if (this.#query.empty) {
|
||||||
build(this.#query.empty, (key: string) => `(*:* -${key}:["" TO *])`)
|
build(this.#query.empty, (key: string) => {
|
||||||
|
// Because the structure of an empty filter looks like this:
|
||||||
|
// { empty: { someKey: null } }
|
||||||
|
//
|
||||||
|
// The check inside of `build` does not set `allFiltersEmpty`, which results
|
||||||
|
// in weird behaviour when the empty filter is the only filter. We get around
|
||||||
|
// this by setting `allFiltersEmpty` to false here.
|
||||||
|
allFiltersEmpty = false
|
||||||
|
return `(*:* -${key}:["" TO *])`
|
||||||
|
})
|
||||||
}
|
}
|
||||||
if (this.#query.notEmpty) {
|
if (this.#query.notEmpty) {
|
||||||
build(this.#query.notEmpty, (key: string) => `${key}:["" TO *]`)
|
build(this.#query.notEmpty, (key: string) => {
|
||||||
|
// Because the structure of a notEmpty filter looks like this:
|
||||||
|
// { notEmpty: { someKey: null } }
|
||||||
|
//
|
||||||
|
// The check inside of `build` does not set `allFiltersEmpty`, which results
|
||||||
|
// in weird behaviour when the empty filter is the only filter. We get around
|
||||||
|
// this by setting `allFiltersEmpty` to false here.
|
||||||
|
allFiltersEmpty = false
|
||||||
|
return `${key}:["" TO *]`
|
||||||
|
})
|
||||||
}
|
}
|
||||||
if (this.#query.oneOf) {
|
if (this.#query.oneOf) {
|
||||||
build(this.#query.oneOf, oneOf)
|
build(this.#query.oneOf, oneOf)
|
||||||
|
|
|
@ -13,13 +13,14 @@ import { bucketTTLConfig, budibaseTempDir } from "./utils"
|
||||||
import { v4 } from "uuid"
|
import { v4 } from "uuid"
|
||||||
import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
|
import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
|
||||||
import fsp from "fs/promises"
|
import fsp from "fs/promises"
|
||||||
|
import { HeadObjectOutput } from "aws-sdk/clients/s3"
|
||||||
|
|
||||||
const streamPipeline = promisify(stream.pipeline)
|
const streamPipeline = promisify(stream.pipeline)
|
||||||
// use this as a temporary store of buckets that are being created
|
// use this as a temporary store of buckets that are being created
|
||||||
const STATE = {
|
const STATE = {
|
||||||
bucketCreationPromises: {},
|
bucketCreationPromises: {},
|
||||||
}
|
}
|
||||||
const signedFilePrefix = "/files/signed"
|
export const SIGNED_FILE_PREFIX = "/files/signed"
|
||||||
|
|
||||||
type ListParams = {
|
type ListParams = {
|
||||||
ContinuationToken?: string
|
ContinuationToken?: string
|
||||||
|
@ -40,8 +41,13 @@ type UploadParams = BaseUploadParams & {
|
||||||
path?: string | PathLike
|
path?: string | PathLike
|
||||||
}
|
}
|
||||||
|
|
||||||
type StreamUploadParams = BaseUploadParams & {
|
export type StreamTypes =
|
||||||
stream: ReadStream
|
| ReadStream
|
||||||
|
| NodeJS.ReadableStream
|
||||||
|
| ReadableStream<Uint8Array>
|
||||||
|
|
||||||
|
export type StreamUploadParams = BaseUploadParams & {
|
||||||
|
stream?: StreamTypes
|
||||||
}
|
}
|
||||||
|
|
||||||
const CONTENT_TYPE_MAP: any = {
|
const CONTENT_TYPE_MAP: any = {
|
||||||
|
@ -83,7 +89,7 @@ export function ObjectStore(
|
||||||
bucket: string,
|
bucket: string,
|
||||||
opts: { presigning: boolean } = { presigning: false }
|
opts: { presigning: boolean } = { presigning: false }
|
||||||
) {
|
) {
|
||||||
const config: any = {
|
const config: AWS.S3.ClientConfiguration = {
|
||||||
s3ForcePathStyle: true,
|
s3ForcePathStyle: true,
|
||||||
signatureVersion: "v4",
|
signatureVersion: "v4",
|
||||||
apiVersion: "2006-03-01",
|
apiVersion: "2006-03-01",
|
||||||
|
@ -174,11 +180,9 @@ export async function upload({
|
||||||
const objectStore = ObjectStore(bucketName)
|
const objectStore = ObjectStore(bucketName)
|
||||||
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
|
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
|
||||||
|
|
||||||
if (ttl && (bucketCreated.created || bucketCreated.exists)) {
|
if (ttl && bucketCreated.created) {
|
||||||
let ttlConfig = bucketTTLConfig(bucketName, ttl)
|
let ttlConfig = bucketTTLConfig(bucketName, ttl)
|
||||||
if (objectStore.putBucketLifecycleConfiguration) {
|
await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
|
||||||
await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let contentType = type
|
let contentType = type
|
||||||
|
@ -222,11 +226,9 @@ export async function streamUpload({
|
||||||
const objectStore = ObjectStore(bucketName)
|
const objectStore = ObjectStore(bucketName)
|
||||||
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
|
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
|
||||||
|
|
||||||
if (ttl && (bucketCreated.created || bucketCreated.exists)) {
|
if (ttl && bucketCreated.created) {
|
||||||
let ttlConfig = bucketTTLConfig(bucketName, ttl)
|
let ttlConfig = bucketTTLConfig(bucketName, ttl)
|
||||||
if (objectStore.putBucketLifecycleConfiguration) {
|
await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
|
||||||
await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set content type for certain known extensions
|
// Set content type for certain known extensions
|
||||||
|
@ -333,7 +335,7 @@ export function getPresignedUrl(
|
||||||
const signedUrl = new URL(url)
|
const signedUrl = new URL(url)
|
||||||
const path = signedUrl.pathname
|
const path = signedUrl.pathname
|
||||||
const query = signedUrl.search
|
const query = signedUrl.search
|
||||||
return `${signedFilePrefix}${path}${query}`
|
return `${SIGNED_FILE_PREFIX}${path}${query}`
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -521,6 +523,26 @@ export async function getReadStream(
|
||||||
return client.getObject(params).createReadStream()
|
return client.getObject(params).createReadStream()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function getObjectMetadata(
|
||||||
|
bucket: string,
|
||||||
|
path: string
|
||||||
|
): Promise<HeadObjectOutput> {
|
||||||
|
bucket = sanitizeBucket(bucket)
|
||||||
|
path = sanitizeKey(path)
|
||||||
|
|
||||||
|
const client = ObjectStore(bucket)
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: path,
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await client.headObject(params).promise()
|
||||||
|
} catch (err: any) {
|
||||||
|
throw new Error("Unable to retrieve metadata from object")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Given a signed url like '/files/signed/tmp-files-attachments/app_123456/myfile.txt' extract
|
Given a signed url like '/files/signed/tmp-files-attachments/app_123456/myfile.txt' extract
|
||||||
the bucket and the path from it
|
the bucket and the path from it
|
||||||
|
@ -530,7 +552,9 @@ export function extractBucketAndPath(
|
||||||
): { bucket: string; path: string } | null {
|
): { bucket: string; path: string } | null {
|
||||||
const baseUrl = url.split("?")[0]
|
const baseUrl = url.split("?")[0]
|
||||||
|
|
||||||
const regex = new RegExp(`^${signedFilePrefix}/(?<bucket>[^/]+)/(?<path>.+)$`)
|
const regex = new RegExp(
|
||||||
|
`^${SIGNED_FILE_PREFIX}/(?<bucket>[^/]+)/(?<path>.+)$`
|
||||||
|
)
|
||||||
const match = baseUrl.match(regex)
|
const match = baseUrl.match(regex)
|
||||||
|
|
||||||
if (match && match.groups) {
|
if (match && match.groups) {
|
||||||
|
|
|
@ -1,9 +1,14 @@
|
||||||
import { join } from "path"
|
import path, { join } from "path"
|
||||||
import { tmpdir } from "os"
|
import { tmpdir } from "os"
|
||||||
import fs from "fs"
|
import fs from "fs"
|
||||||
import env from "../environment"
|
import env from "../environment"
|
||||||
import { PutBucketLifecycleConfigurationRequest } from "aws-sdk/clients/s3"
|
import { PutBucketLifecycleConfigurationRequest } from "aws-sdk/clients/s3"
|
||||||
|
import * as objectStore from "./objectStore"
|
||||||
|
import {
|
||||||
|
AutomationAttachment,
|
||||||
|
AutomationAttachmentContent,
|
||||||
|
BucketedContent,
|
||||||
|
} from "@budibase/types"
|
||||||
/****************************************************
|
/****************************************************
|
||||||
* NOTE: When adding a new bucket - name *
|
* NOTE: When adding a new bucket - name *
|
||||||
* sure that S3 usages (like budibase-infra) *
|
* sure that S3 usages (like budibase-infra) *
|
||||||
|
@ -55,3 +60,50 @@ export const bucketTTLConfig = (
|
||||||
|
|
||||||
return params
|
return params
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function processUrlAttachment(
|
||||||
|
attachment: AutomationAttachment
|
||||||
|
): Promise<AutomationAttachmentContent> {
|
||||||
|
const response = await fetch(attachment.url)
|
||||||
|
if (!response.ok || !response.body) {
|
||||||
|
throw new Error(`Unexpected response ${response.statusText}`)
|
||||||
|
}
|
||||||
|
const fallbackFilename = path.basename(new URL(attachment.url).pathname)
|
||||||
|
return {
|
||||||
|
filename: attachment.filename || fallbackFilename,
|
||||||
|
content: response.body,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function processObjectStoreAttachment(
|
||||||
|
attachment: AutomationAttachment
|
||||||
|
): Promise<BucketedContent> {
|
||||||
|
const result = objectStore.extractBucketAndPath(attachment.url)
|
||||||
|
|
||||||
|
if (result === null) {
|
||||||
|
throw new Error("Invalid signed URL")
|
||||||
|
}
|
||||||
|
|
||||||
|
const { bucket, path: objectPath } = result
|
||||||
|
const readStream = await objectStore.getReadStream(bucket, objectPath)
|
||||||
|
const fallbackFilename = path.basename(objectPath)
|
||||||
|
return {
|
||||||
|
bucket,
|
||||||
|
path: objectPath,
|
||||||
|
filename: attachment.filename || fallbackFilename,
|
||||||
|
content: readStream,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function processAutomationAttachment(
|
||||||
|
attachment: AutomationAttachment
|
||||||
|
): Promise<AutomationAttachmentContent | BucketedContent> {
|
||||||
|
const isFullyFormedUrl =
|
||||||
|
attachment.url?.startsWith("http://") ||
|
||||||
|
attachment.url?.startsWith("https://")
|
||||||
|
if (isFullyFormedUrl) {
|
||||||
|
return await processUrlAttachment(attachment)
|
||||||
|
} else {
|
||||||
|
return await processObjectStoreAttachment(attachment)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -4,6 +4,3 @@ export { generator } from "./structures"
|
||||||
export * as testContainerUtils from "./testContainerUtils"
|
export * as testContainerUtils from "./testContainerUtils"
|
||||||
export * as utils from "./utils"
|
export * as utils from "./utils"
|
||||||
export * from "./jestUtils"
|
export * from "./jestUtils"
|
||||||
import * as minio from "./minio"
|
|
||||||
|
|
||||||
export const objectStoreTestProviders = { minio }
|
|
||||||
|
|
|
@ -1,34 +0,0 @@
|
||||||
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
|
|
||||||
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
|
|
||||||
import env from "../../../src/environment"
|
|
||||||
|
|
||||||
let container: StartedTestContainer | undefined
|
|
||||||
|
|
||||||
class ObjectStoreWaitStrategy extends AbstractWaitStrategy {
|
|
||||||
async waitUntilReady(container: any, boundPorts: any, startTime?: Date) {
|
|
||||||
const logs = Wait.forListeningPorts()
|
|
||||||
await logs.waitUntilReady(container, boundPorts, startTime)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function start(): Promise<void> {
|
|
||||||
container = await new GenericContainer("minio/minio")
|
|
||||||
.withExposedPorts(9000)
|
|
||||||
.withCommand(["server", "/data"])
|
|
||||||
.withEnvironment({
|
|
||||||
MINIO_ACCESS_KEY: "budibase",
|
|
||||||
MINIO_SECRET_KEY: "budibase",
|
|
||||||
})
|
|
||||||
.withWaitStrategy(new ObjectStoreWaitStrategy().withStartupTimeout(30000))
|
|
||||||
.start()
|
|
||||||
|
|
||||||
const port = container.getMappedPort(9000)
|
|
||||||
env._set("MINIO_URL", `http://0.0.0.0:${port}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function stop() {
|
|
||||||
if (container) {
|
|
||||||
await container.stop()
|
|
||||||
container = undefined
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -86,10 +86,18 @@ export function setupEnv(...envs: any[]) {
|
||||||
throw new Error("CouchDB SQL port not found")
|
throw new Error("CouchDB SQL port not found")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const minio = getContainerByImage("minio/minio")
|
||||||
|
|
||||||
|
const minioPort = getExposedV4Port(minio, 9000)
|
||||||
|
if (!minioPort) {
|
||||||
|
throw new Error("Minio port not found")
|
||||||
|
}
|
||||||
|
|
||||||
const configs = [
|
const configs = [
|
||||||
{ key: "COUCH_DB_PORT", value: `${couchPort}` },
|
{ key: "COUCH_DB_PORT", value: `${couchPort}` },
|
||||||
{ key: "COUCH_DB_URL", value: `http://127.0.0.1:${couchPort}` },
|
{ key: "COUCH_DB_URL", value: `http://127.0.0.1:${couchPort}` },
|
||||||
{ key: "COUCH_DB_SQL_URL", value: `http://127.0.0.1:${couchSqlPort}` },
|
{ key: "COUCH_DB_SQL_URL", value: `http://127.0.0.1:${couchSqlPort}` },
|
||||||
|
{ key: "MINIO_URL", value: `http://127.0.0.1:${minioPort}` },
|
||||||
]
|
]
|
||||||
|
|
||||||
for (const config of configs.filter(x => !!x.value)) {
|
for (const config of configs.filter(x => !!x.value)) {
|
||||||
|
|
|
@ -83,7 +83,6 @@
|
||||||
"dayjs": "^1.10.8",
|
"dayjs": "^1.10.8",
|
||||||
"easymde": "^2.16.1",
|
"easymde": "^2.16.1",
|
||||||
"svelte-dnd-action": "^0.9.8",
|
"svelte-dnd-action": "^0.9.8",
|
||||||
"svelte-flatpickr": "3.2.3",
|
|
||||||
"svelte-portal": "^1.0.0"
|
"svelte-portal": "^1.0.0"
|
||||||
},
|
},
|
||||||
"resolutions": {
|
"resolutions": {
|
||||||
|
|
|
@ -1,23 +1,25 @@
|
||||||
|
// These class names will never trigger a callback if clicked, no matter what
|
||||||
const ignoredClasses = [
|
const ignoredClasses = [
|
||||||
".download-js-link",
|
".download-js-link",
|
||||||
".flatpickr-calendar",
|
|
||||||
".spectrum-Menu",
|
".spectrum-Menu",
|
||||||
".date-time-popover",
|
".date-time-popover",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
// These class names will only trigger a callback when clicked if the registered
|
||||||
|
// component is not nested inside them. For example, clicking inside a modal
|
||||||
|
// will not close the modal, or clicking inside a popover will not close the
|
||||||
|
// popover.
|
||||||
const conditionallyIgnoredClasses = [
|
const conditionallyIgnoredClasses = [
|
||||||
".spectrum-Underlay",
|
".spectrum-Underlay",
|
||||||
".drawer-wrapper",
|
".drawer-wrapper",
|
||||||
".spectrum-Popover",
|
".spectrum-Popover",
|
||||||
]
|
]
|
||||||
let clickHandlers = []
|
let clickHandlers = []
|
||||||
|
let candidateTarget
|
||||||
|
|
||||||
/**
|
// Processes a "click outside" event and invokes callbacks if our source element
|
||||||
* Handle a body click event
|
// is valid
|
||||||
*/
|
|
||||||
const handleClick = event => {
|
const handleClick = event => {
|
||||||
// Treat right clicks (context menu events) as normal clicks
|
|
||||||
const eventType = event.type === "contextmenu" ? "click" : event.type
|
|
||||||
|
|
||||||
// Ignore click if this is an ignored class
|
// Ignore click if this is an ignored class
|
||||||
if (event.target.closest('[data-ignore-click-outside="true"]')) {
|
if (event.target.closest('[data-ignore-click-outside="true"]')) {
|
||||||
return
|
return
|
||||||
|
@ -30,11 +32,6 @@ const handleClick = event => {
|
||||||
|
|
||||||
// Process handlers
|
// Process handlers
|
||||||
clickHandlers.forEach(handler => {
|
clickHandlers.forEach(handler => {
|
||||||
// Check that we're the right kind of click event
|
|
||||||
if (handler.allowedType && eventType !== handler.allowedType) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check that the click isn't inside the target
|
// Check that the click isn't inside the target
|
||||||
if (handler.element.contains(event.target)) {
|
if (handler.element.contains(event.target)) {
|
||||||
return
|
return
|
||||||
|
@ -52,17 +49,43 @@ const handleClick = event => {
|
||||||
handler.callback?.(event)
|
handler.callback?.(event)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
document.documentElement.addEventListener("click", handleClick, true)
|
|
||||||
document.documentElement.addEventListener("mousedown", handleClick, true)
|
// On mouse up we only trigger a "click outside" callback if we targetted the
|
||||||
document.documentElement.addEventListener("contextmenu", handleClick, true)
|
// same element that we did on mouse down. This fixes all sorts of issues where
|
||||||
|
// we get annoying callbacks firing when we drag to select text.
|
||||||
|
const handleMouseUp = e => {
|
||||||
|
if (candidateTarget === e.target) {
|
||||||
|
handleClick(e)
|
||||||
|
}
|
||||||
|
candidateTarget = null
|
||||||
|
}
|
||||||
|
|
||||||
|
// On mouse down we store which element was targetted for comparison later
|
||||||
|
const handleMouseDown = e => {
|
||||||
|
// Only handle the primary mouse button here.
|
||||||
|
// We handle context menu (right click) events in another handler.
|
||||||
|
if (e.button !== 0) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
candidateTarget = e.target
|
||||||
|
|
||||||
|
// Clear any previous listeners in case of multiple down events, and register
|
||||||
|
// a single mouse up listener
|
||||||
|
document.removeEventListener("mouseup", handleMouseUp)
|
||||||
|
document.addEventListener("mouseup", handleMouseUp, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Global singleton listeners for our events
|
||||||
|
document.addEventListener("mousedown", handleMouseDown)
|
||||||
|
document.addEventListener("contextmenu", handleClick)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Adds or updates a click handler
|
* Adds or updates a click handler
|
||||||
*/
|
*/
|
||||||
const updateHandler = (id, element, anchor, callback, allowedType) => {
|
const updateHandler = (id, element, anchor, callback) => {
|
||||||
let existingHandler = clickHandlers.find(x => x.id === id)
|
let existingHandler = clickHandlers.find(x => x.id === id)
|
||||||
if (!existingHandler) {
|
if (!existingHandler) {
|
||||||
clickHandlers.push({ id, element, anchor, callback, allowedType })
|
clickHandlers.push({ id, element, anchor, callback })
|
||||||
} else {
|
} else {
|
||||||
existingHandler.callback = callback
|
existingHandler.callback = callback
|
||||||
}
|
}
|
||||||
|
@ -89,8 +112,7 @@ export default (element, opts) => {
|
||||||
const callback =
|
const callback =
|
||||||
newOpts?.callback || (typeof newOpts === "function" ? newOpts : null)
|
newOpts?.callback || (typeof newOpts === "function" ? newOpts : null)
|
||||||
const anchor = newOpts?.anchor || element
|
const anchor = newOpts?.anchor || element
|
||||||
const allowedType = newOpts?.allowedType || "click"
|
updateHandler(id, element, anchor, callback)
|
||||||
updateHandler(id, element, anchor, callback, allowedType)
|
|
||||||
}
|
}
|
||||||
update(opts)
|
update(opts)
|
||||||
return {
|
return {
|
||||||
|
|
|
@ -8,6 +8,8 @@
|
||||||
export let size = "S"
|
export let size = "S"
|
||||||
export let extraButtonText
|
export let extraButtonText
|
||||||
export let extraButtonAction
|
export let extraButtonAction
|
||||||
|
export let extraLinkText
|
||||||
|
export let extraLinkAction
|
||||||
export let showCloseButton = true
|
export let showCloseButton = true
|
||||||
|
|
||||||
let show = true
|
let show = true
|
||||||
|
@ -28,8 +30,13 @@
|
||||||
<use xlink:href="#spectrum-icon-18-{icon}" />
|
<use xlink:href="#spectrum-icon-18-{icon}" />
|
||||||
</svg>
|
</svg>
|
||||||
<div class="spectrum-Toast-body">
|
<div class="spectrum-Toast-body">
|
||||||
<div class="spectrum-Toast-content">
|
<div class="spectrum-Toast-content row-content">
|
||||||
<slot />
|
<slot />
|
||||||
|
{#if extraLinkText}
|
||||||
|
<button class="link" on:click={extraLinkAction}>
|
||||||
|
<u>{extraLinkText}</u>
|
||||||
|
</button>
|
||||||
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
{#if extraButtonText && extraButtonAction}
|
{#if extraButtonText && extraButtonAction}
|
||||||
<button
|
<button
|
||||||
|
@ -73,4 +80,23 @@
|
||||||
.spectrum-Button {
|
.spectrum-Button {
|
||||||
border: 1px solid rgba(255, 255, 255, 0.2);
|
border: 1px solid rgba(255, 255, 255, 0.2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.row-content {
|
||||||
|
display: flex;
|
||||||
|
}
|
||||||
|
|
||||||
|
.link {
|
||||||
|
background: none;
|
||||||
|
border: none;
|
||||||
|
margin: 0;
|
||||||
|
margin-left: 0.5em;
|
||||||
|
padding: 0;
|
||||||
|
cursor: pointer;
|
||||||
|
color: white;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
u {
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|
|
@ -11,6 +11,7 @@
|
||||||
export let error = null
|
export let error = null
|
||||||
export let validate = null
|
export let validate = null
|
||||||
export let suffix = null
|
export let suffix = null
|
||||||
|
export let validateOn = "change"
|
||||||
|
|
||||||
const dispatch = createEventDispatcher()
|
const dispatch = createEventDispatcher()
|
||||||
|
|
||||||
|
@ -24,7 +25,16 @@
|
||||||
const newValue = e.target.value
|
const newValue = e.target.value
|
||||||
dispatch("change", newValue)
|
dispatch("change", newValue)
|
||||||
value = newValue
|
value = newValue
|
||||||
if (validate) {
|
if (validate && (error || validateOn === "change")) {
|
||||||
|
error = validate(newValue)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const onBlur = e => {
|
||||||
|
focused = false
|
||||||
|
const newValue = e.target.value
|
||||||
|
dispatch("blur", newValue)
|
||||||
|
if (validate && validateOn === "blur") {
|
||||||
error = validate(newValue)
|
error = validate(newValue)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -61,7 +71,7 @@
|
||||||
type={type || "text"}
|
type={type || "text"}
|
||||||
on:input={onChange}
|
on:input={onChange}
|
||||||
on:focus={() => (focused = true)}
|
on:focus={() => (focused = true)}
|
||||||
on:blur={() => (focused = false)}
|
on:blur={onBlur}
|
||||||
class:placeholder
|
class:placeholder
|
||||||
bind:this={ref}
|
bind:this={ref}
|
||||||
/>
|
/>
|
||||||
|
|
|
@ -4,6 +4,9 @@
|
||||||
import dayjs from "dayjs"
|
import dayjs from "dayjs"
|
||||||
import NumberInput from "./NumberInput.svelte"
|
import NumberInput from "./NumberInput.svelte"
|
||||||
import { createEventDispatcher } from "svelte"
|
import { createEventDispatcher } from "svelte"
|
||||||
|
import isoWeek from "dayjs/plugin/isoWeek"
|
||||||
|
|
||||||
|
dayjs.extend(isoWeek)
|
||||||
|
|
||||||
export let value
|
export let value
|
||||||
|
|
||||||
|
@ -43,7 +46,7 @@
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
let monthEnd = monthStart.endOf("month")
|
let monthEnd = monthStart.endOf("month")
|
||||||
let calendarStart = monthStart.startOf("week")
|
let calendarStart = monthStart.startOf("isoWeek")
|
||||||
const numWeeks = Math.ceil((monthEnd.diff(calendarStart, "day") + 1) / 7)
|
const numWeeks = Math.ceil((monthEnd.diff(calendarStart, "day") + 1) / 7)
|
||||||
|
|
||||||
let mondays = []
|
let mondays = []
|
||||||
|
|
|
@ -7,11 +7,11 @@
|
||||||
export let narrower = false
|
export let narrower = false
|
||||||
export let noPadding = false
|
export let noPadding = false
|
||||||
|
|
||||||
let sidePanelVisble = false
|
let sidePanelVisible = false
|
||||||
|
|
||||||
setContext("side-panel", {
|
setContext("side-panel", {
|
||||||
open: () => (sidePanelVisble = true),
|
open: () => (sidePanelVisible = true),
|
||||||
close: () => (sidePanelVisble = false),
|
close: () => (sidePanelVisible = false),
|
||||||
})
|
})
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
@ -24,9 +24,9 @@
|
||||||
</div>
|
</div>
|
||||||
<div
|
<div
|
||||||
id="side-panel"
|
id="side-panel"
|
||||||
class:visible={sidePanelVisble}
|
class:visible={sidePanelVisible}
|
||||||
use:clickOutside={() => {
|
use:clickOutside={() => {
|
||||||
sidePanelVisble = false
|
sidePanelVisible = false
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<slot name="side-panel" />
|
<slot name="side-panel" />
|
||||||
|
|
|
@ -154,7 +154,7 @@ export const parseDate = (value, { enableTime = true }) => {
|
||||||
// schema flags
|
// schema flags
|
||||||
export const stringifyDate = (
|
export const stringifyDate = (
|
||||||
value,
|
value,
|
||||||
{ enableTime = true, timeOnly = false, ignoreTimezones = false }
|
{ enableTime = true, timeOnly = false, ignoreTimezones = false } = {}
|
||||||
) => {
|
) => {
|
||||||
if (!value) {
|
if (!value) {
|
||||||
return null
|
return null
|
||||||
|
@ -210,7 +210,7 @@ const localeDateFormat = new Intl.DateTimeFormat()
|
||||||
// Formats a dayjs date according to schema flags
|
// Formats a dayjs date according to schema flags
|
||||||
export const getDateDisplayValue = (
|
export const getDateDisplayValue = (
|
||||||
value,
|
value,
|
||||||
{ enableTime = true, timeOnly = false }
|
{ enableTime = true, timeOnly = false } = {}
|
||||||
) => {
|
) => {
|
||||||
if (!value?.isValid()) {
|
if (!value?.isValid()) {
|
||||||
return ""
|
return ""
|
||||||
|
|
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 804 KiB |
|
@ -358,7 +358,8 @@
|
||||||
value.customType !== "cron" &&
|
value.customType !== "cron" &&
|
||||||
value.customType !== "triggerSchema" &&
|
value.customType !== "triggerSchema" &&
|
||||||
value.customType !== "automationFields" &&
|
value.customType !== "automationFields" &&
|
||||||
value.type !== "attachment"
|
value.type !== "attachment" &&
|
||||||
|
value.type !== "attachment_single"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,8 @@
|
||||||
import { tables } from "stores/builder"
|
import { tables } from "stores/builder"
|
||||||
import { Select, Checkbox, Label } from "@budibase/bbui"
|
import { Select, Checkbox, Label } from "@budibase/bbui"
|
||||||
import { createEventDispatcher } from "svelte"
|
import { createEventDispatcher } from "svelte"
|
||||||
|
import { FieldType } from "@budibase/types"
|
||||||
|
|
||||||
import RowSelectorTypes from "./RowSelectorTypes.svelte"
|
import RowSelectorTypes from "./RowSelectorTypes.svelte"
|
||||||
import DrawerBindableSlot from "../../common/bindings/DrawerBindableSlot.svelte"
|
import DrawerBindableSlot from "../../common/bindings/DrawerBindableSlot.svelte"
|
||||||
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
|
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
|
||||||
|
@ -14,7 +16,6 @@
|
||||||
export let bindings
|
export let bindings
|
||||||
export let isTestModal
|
export let isTestModal
|
||||||
export let isUpdateRow
|
export let isUpdateRow
|
||||||
|
|
||||||
$: parsedBindings = bindings.map(binding => {
|
$: parsedBindings = bindings.map(binding => {
|
||||||
let clone = Object.assign({}, binding)
|
let clone = Object.assign({}, binding)
|
||||||
clone.icon = "ShareAndroid"
|
clone.icon = "ShareAndroid"
|
||||||
|
@ -26,15 +27,19 @@
|
||||||
|
|
||||||
$: {
|
$: {
|
||||||
table = $tables.list.find(table => table._id === value?.tableId)
|
table = $tables.list.find(table => table._id === value?.tableId)
|
||||||
schemaFields = Object.entries(table?.schema ?? {})
|
|
||||||
// surface the schema so the user can see it in the json
|
// Just sorting attachment types to the bottom here for a cleaner UX
|
||||||
schemaFields.map(([, schema]) => {
|
schemaFields = Object.entries(table?.schema ?? {}).sort(
|
||||||
|
([, schemaA], [, schemaB]) =>
|
||||||
|
(schemaA.type === "attachment") - (schemaB.type === "attachment")
|
||||||
|
)
|
||||||
|
|
||||||
|
schemaFields.forEach(([, schema]) => {
|
||||||
if (!schema.autocolumn && !value[schema.name]) {
|
if (!schema.autocolumn && !value[schema.name]) {
|
||||||
value[schema.name] = ""
|
value[schema.name] = ""
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
const onChangeTable = e => {
|
const onChangeTable = e => {
|
||||||
value["tableId"] = e.detail
|
value["tableId"] = e.detail
|
||||||
dispatch("change", value)
|
dispatch("change", value)
|
||||||
|
@ -114,10 +119,16 @@
|
||||||
</div>
|
</div>
|
||||||
{#if schemaFields.length}
|
{#if schemaFields.length}
|
||||||
{#each schemaFields as [field, schema]}
|
{#each schemaFields as [field, schema]}
|
||||||
{#if !schema.autocolumn && schema.type !== "attachment"}
|
{#if !schema.autocolumn}
|
||||||
<div class="schema-fields">
|
<div
|
||||||
|
class:schema-fields={schema.type !== FieldType.ATTACHMENTS &&
|
||||||
|
schema.type !== FieldType.ATTACHMENT_SINGLE}
|
||||||
|
>
|
||||||
<Label>{field}</Label>
|
<Label>{field}</Label>
|
||||||
<div class="field-width">
|
<div
|
||||||
|
class:field-width={schema.type !== FieldType.ATTACHMENTS &&
|
||||||
|
schema.type !== FieldType.ATTACHMENT_SINGLE}
|
||||||
|
>
|
||||||
{#if isTestModal}
|
{#if isTestModal}
|
||||||
<RowSelectorTypes
|
<RowSelectorTypes
|
||||||
{isTestModal}
|
{isTestModal}
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
<script>
|
<script>
|
||||||
import { Select, DatePicker, Multiselect, TextArea } from "@budibase/bbui"
|
import { Select, DatePicker, Multiselect, TextArea } from "@budibase/bbui"
|
||||||
|
import { FieldType } from "@budibase/types"
|
||||||
import LinkedRowSelector from "components/common/LinkedRowSelector.svelte"
|
import LinkedRowSelector from "components/common/LinkedRowSelector.svelte"
|
||||||
import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte"
|
import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte"
|
||||||
import ModalBindableInput from "../../common/bindings/ModalBindableInput.svelte"
|
import ModalBindableInput from "../../common/bindings/ModalBindableInput.svelte"
|
||||||
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
|
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
|
||||||
import Editor from "components/integration/QueryEditor.svelte"
|
import Editor from "components/integration/QueryEditor.svelte"
|
||||||
|
import KeyValueBuilder from "components/integration/KeyValueBuilder.svelte"
|
||||||
|
|
||||||
export let onChange
|
export let onChange
|
||||||
export let field
|
export let field
|
||||||
|
@ -22,6 +24,27 @@
|
||||||
function schemaHasOptions(schema) {
|
function schemaHasOptions(schema) {
|
||||||
return !!schema.constraints?.inclusion?.length
|
return !!schema.constraints?.inclusion?.length
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const handleAttachmentParams = keyValuObj => {
|
||||||
|
let params = {}
|
||||||
|
|
||||||
|
if (
|
||||||
|
schema.type === FieldType.ATTACHMENT_SINGLE &&
|
||||||
|
Object.keys(keyValuObj).length === 0
|
||||||
|
) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
if (!Array.isArray(keyValuObj)) {
|
||||||
|
keyValuObj = [keyValuObj]
|
||||||
|
}
|
||||||
|
|
||||||
|
if (keyValuObj.length) {
|
||||||
|
for (let param of keyValuObj) {
|
||||||
|
params[param.url] = param.filename
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return params
|
||||||
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
{#if schemaHasOptions(schema) && schema.type !== "array"}
|
{#if schemaHasOptions(schema) && schema.type !== "array"}
|
||||||
|
@ -77,6 +100,35 @@
|
||||||
on:change={e => onChange(e, field)}
|
on:change={e => onChange(e, field)}
|
||||||
useLabel={false}
|
useLabel={false}
|
||||||
/>
|
/>
|
||||||
|
{:else if schema.type === FieldType.ATTACHMENTS || schema.type === FieldType.ATTACHMENT_SINGLE}
|
||||||
|
<div class="attachment-field-spacinng">
|
||||||
|
<KeyValueBuilder
|
||||||
|
on:change={e =>
|
||||||
|
onChange(
|
||||||
|
{
|
||||||
|
detail:
|
||||||
|
schema.type === FieldType.ATTACHMENT_SINGLE
|
||||||
|
? e.detail.length > 0
|
||||||
|
? { url: e.detail[0].name, filename: e.detail[0].value }
|
||||||
|
: {}
|
||||||
|
: e.detail.map(({ name, value }) => ({
|
||||||
|
url: name,
|
||||||
|
filename: value,
|
||||||
|
})),
|
||||||
|
},
|
||||||
|
field
|
||||||
|
)}
|
||||||
|
object={handleAttachmentParams(value[field])}
|
||||||
|
allowJS
|
||||||
|
{bindings}
|
||||||
|
keyBindings
|
||||||
|
customButtonText={"Add attachment"}
|
||||||
|
keyPlaceholder={"URL"}
|
||||||
|
valuePlaceholder={"Filename"}
|
||||||
|
actionButtonDisabled={schema.type === FieldType.ATTACHMENT_SINGLE &&
|
||||||
|
Object.keys(value[field]).length >= 1}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
{:else if ["string", "number", "bigint", "barcodeqr", "array"].includes(schema.type)}
|
{:else if ["string", "number", "bigint", "barcodeqr", "array"].includes(schema.type)}
|
||||||
<svelte:component
|
<svelte:component
|
||||||
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
|
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
|
||||||
|
@ -90,3 +142,10 @@
|
||||||
title={schema.name}
|
title={schema.name}
|
||||||
/>
|
/>
|
||||||
{/if}
|
{/if}
|
||||||
|
|
||||||
|
<style>
|
||||||
|
.attachment-field-spacinng {
|
||||||
|
margin-top: var(--spacing-s);
|
||||||
|
margin-bottom: var(--spacing-l);
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
<script>
|
<script>
|
||||||
import { createEventDispatcher } from "svelte"
|
import { createEventDispatcher } from "svelte"
|
||||||
import { ActionButton, Modal, ModalContent } from "@budibase/bbui"
|
import { ActionButton, Drawer, DrawerContent, Button } from "@budibase/bbui"
|
||||||
import FilterBuilder from "components/design/settings/controls/FilterEditor/FilterBuilder.svelte"
|
import FilterBuilder from "components/design/settings/controls/FilterEditor/FilterBuilder.svelte"
|
||||||
|
import { getUserBindings } from "dataBinding"
|
||||||
|
import { makePropSafe } from "@budibase/string-templates"
|
||||||
|
|
||||||
export let schema
|
export let schema
|
||||||
export let filters
|
export let filters
|
||||||
|
@ -10,7 +12,7 @@
|
||||||
|
|
||||||
const dispatch = createEventDispatcher()
|
const dispatch = createEventDispatcher()
|
||||||
|
|
||||||
let modal
|
let drawer
|
||||||
|
|
||||||
$: tempValue = filters || []
|
$: tempValue = filters || []
|
||||||
$: schemaFields = Object.entries(schema || {}).map(
|
$: schemaFields = Object.entries(schema || {}).map(
|
||||||
|
@ -22,37 +24,53 @@
|
||||||
|
|
||||||
$: text = getText(filters)
|
$: text = getText(filters)
|
||||||
$: selected = tempValue.filter(x => !x.onEmptyFilter)?.length > 0
|
$: selected = tempValue.filter(x => !x.onEmptyFilter)?.length > 0
|
||||||
|
$: bindings = [
|
||||||
|
{
|
||||||
|
type: "context",
|
||||||
|
runtimeBinding: `${makePropSafe("now")}`,
|
||||||
|
readableBinding: `Date`,
|
||||||
|
category: "Date",
|
||||||
|
icon: "Date",
|
||||||
|
display: {
|
||||||
|
name: "Server date",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
...getUserBindings(),
|
||||||
|
]
|
||||||
const getText = filters => {
|
const getText = filters => {
|
||||||
const count = filters?.filter(filter => filter.field)?.length
|
const count = filters?.filter(filter => filter.field)?.length
|
||||||
return count ? `Filter (${count})` : "Filter"
|
return count ? `Filter (${count})` : "Filter"
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<ActionButton icon="Filter" quiet {disabled} on:click={modal.show} {selected}>
|
<ActionButton icon="Filter" quiet {disabled} on:click={drawer.show} {selected}>
|
||||||
{text}
|
{text}
|
||||||
</ActionButton>
|
</ActionButton>
|
||||||
<Modal bind:this={modal}>
|
|
||||||
<ModalContent
|
|
||||||
title="Filter"
|
|
||||||
confirmText="Save"
|
|
||||||
size="XL"
|
|
||||||
onConfirm={() => dispatch("change", tempValue)}
|
|
||||||
>
|
|
||||||
<div class="wrapper">
|
|
||||||
<FilterBuilder
|
|
||||||
allowBindings={false}
|
|
||||||
{filters}
|
|
||||||
{schemaFields}
|
|
||||||
datasource={{ type: "table", tableId }}
|
|
||||||
on:change={e => (tempValue = e.detail)}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</ModalContent>
|
|
||||||
</Modal>
|
|
||||||
|
|
||||||
<style>
|
<Drawer
|
||||||
.wrapper :global(.main) {
|
bind:this={drawer}
|
||||||
padding: 0;
|
title="Filtering"
|
||||||
}
|
on:drawerHide
|
||||||
</style>
|
on:drawerShow
|
||||||
|
forceModal
|
||||||
|
>
|
||||||
|
<Button
|
||||||
|
cta
|
||||||
|
slot="buttons"
|
||||||
|
on:click={() => {
|
||||||
|
dispatch("change", tempValue)
|
||||||
|
drawer.hide()
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
Save
|
||||||
|
</Button>
|
||||||
|
<DrawerContent slot="body">
|
||||||
|
<FilterBuilder
|
||||||
|
{filters}
|
||||||
|
{schemaFields}
|
||||||
|
datasource={{ type: "table", tableId }}
|
||||||
|
on:change={e => (tempValue = e.detail)}
|
||||||
|
{bindings}
|
||||||
|
/>
|
||||||
|
</DrawerContent>
|
||||||
|
</Drawer>
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
readableToRuntimeBinding,
|
readableToRuntimeBinding,
|
||||||
runtimeToReadableBinding,
|
runtimeToReadableBinding,
|
||||||
} from "dataBinding"
|
} from "dataBinding"
|
||||||
|
import { FieldType } from "@budibase/types"
|
||||||
|
|
||||||
import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte"
|
import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte"
|
||||||
import { createEventDispatcher, setContext } from "svelte"
|
import { createEventDispatcher, setContext } from "svelte"
|
||||||
|
@ -102,6 +103,8 @@
|
||||||
longform: value => !isJSBinding(value),
|
longform: value => !isJSBinding(value),
|
||||||
json: value => !isJSBinding(value),
|
json: value => !isJSBinding(value),
|
||||||
boolean: isValidBoolean,
|
boolean: isValidBoolean,
|
||||||
|
attachment: false,
|
||||||
|
attachment_single: false,
|
||||||
}
|
}
|
||||||
|
|
||||||
const isValid = value => {
|
const isValid = value => {
|
||||||
|
@ -116,7 +119,16 @@
|
||||||
if (type === "json" && !isJSBinding(value)) {
|
if (type === "json" && !isJSBinding(value)) {
|
||||||
return "json-slot-icon"
|
return "json-slot-icon"
|
||||||
}
|
}
|
||||||
if (!["string", "number", "bigint", "barcodeqr"].includes(type)) {
|
if (
|
||||||
|
![
|
||||||
|
"string",
|
||||||
|
"number",
|
||||||
|
"bigint",
|
||||||
|
"barcodeqr",
|
||||||
|
"attachment",
|
||||||
|
"attachment_single",
|
||||||
|
].includes(type)
|
||||||
|
) {
|
||||||
return "slot-icon"
|
return "slot-icon"
|
||||||
}
|
}
|
||||||
return ""
|
return ""
|
||||||
|
@ -157,7 +169,7 @@
|
||||||
{updateOnChange}
|
{updateOnChange}
|
||||||
/>
|
/>
|
||||||
{/if}
|
{/if}
|
||||||
{#if !disabled && type !== "formula"}
|
{#if !disabled && type !== "formula" && !disabled && type !== FieldType.ATTACHMENTS && !disabled && type !== FieldType.ATTACHMENT_SINGLE}
|
||||||
<div
|
<div
|
||||||
class={`icon ${getIconClass(value, type)}`}
|
class={`icon ${getIconClass(value, type)}`}
|
||||||
on:click={() => {
|
on:click={() => {
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
<script>
|
<script>
|
||||||
import { Body, Label, Input } from "@budibase/bbui"
|
import { Body, Label } from "@budibase/bbui"
|
||||||
import { onMount } from "svelte"
|
import { onMount } from "svelte"
|
||||||
|
import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte"
|
||||||
|
|
||||||
export let parameters
|
export let parameters
|
||||||
|
export let bindings
|
||||||
|
|
||||||
onMount(() => {
|
onMount(() => {
|
||||||
if (!parameters.confirm) {
|
if (!parameters.confirm) {
|
||||||
|
@ -15,11 +17,18 @@
|
||||||
<Body size="S">Enter the message you wish to display to the user.</Body>
|
<Body size="S">Enter the message you wish to display to the user.</Body>
|
||||||
<div class="params">
|
<div class="params">
|
||||||
<Label small>Title</Label>
|
<Label small>Title</Label>
|
||||||
<Input placeholder="Prompt User" bind:value={parameters.customTitleText} />
|
<DrawerBindableInput
|
||||||
|
placeholder="Title"
|
||||||
|
value={parameters.customTitleText}
|
||||||
|
on:change={e => (parameters.customTitleText = e.detail)}
|
||||||
|
{bindings}
|
||||||
|
/>
|
||||||
<Label small>Message</Label>
|
<Label small>Message</Label>
|
||||||
<Input
|
<DrawerBindableInput
|
||||||
placeholder="Are you sure you want to continue?"
|
placeholder="Are you sure you want to continue?"
|
||||||
bind:value={parameters.confirmText}
|
value={parameters.confirmText}
|
||||||
|
on:change={e => (parameters.confirmText = e.detail)}
|
||||||
|
{bindings}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -21,26 +21,24 @@
|
||||||
const currentStep = derived(multiStepStore, state => state.currentStep)
|
const currentStep = derived(multiStepStore, state => state.currentStep)
|
||||||
const componentType = "@budibase/standard-components/multistepformblockstep"
|
const componentType = "@budibase/standard-components/multistepformblockstep"
|
||||||
|
|
||||||
|
setContext("multi-step-form-block", multiStepStore)
|
||||||
|
|
||||||
let cachedValue
|
let cachedValue
|
||||||
let cachedInstance = {}
|
let cachedInstance = {}
|
||||||
|
|
||||||
$: if (!isEqual(cachedValue, value)) {
|
$: if (!isEqual(cachedValue, value)) {
|
||||||
cachedValue = value
|
cachedValue = value
|
||||||
}
|
}
|
||||||
|
|
||||||
$: if (!isEqual(componentInstance, cachedInstance)) {
|
$: if (!isEqual(componentInstance, cachedInstance)) {
|
||||||
cachedInstance = componentInstance
|
cachedInstance = componentInstance
|
||||||
}
|
}
|
||||||
|
|
||||||
setContext("multi-step-form-block", multiStepStore)
|
|
||||||
|
|
||||||
$: stepCount = cachedValue?.length || 0
|
$: stepCount = cachedValue?.length || 0
|
||||||
$: updateStore(stepCount)
|
$: updateStore(stepCount)
|
||||||
$: dataSource = getDatasourceForProvider($selectedScreen, cachedInstance)
|
$: dataSource = getDatasourceForProvider($selectedScreen, cachedInstance)
|
||||||
$: emitCurrentStep($currentStep)
|
$: emitCurrentStep($currentStep)
|
||||||
$: stepLabel = getStepLabel($multiStepStore)
|
$: stepLabel = getStepLabel($multiStepStore)
|
||||||
$: stepDef = getDefinition(stepLabel)
|
$: stepDef = getDefinition(stepLabel)
|
||||||
$: stepSettings = cachedValue?.[$currentStep] || {}
|
$: savedInstance = cachedValue?.[$currentStep] || {}
|
||||||
$: defaults = Utils.buildMultiStepFormBlockDefaultProps({
|
$: defaults = Utils.buildMultiStepFormBlockDefaultProps({
|
||||||
_id: cachedInstance._id,
|
_id: cachedInstance._id,
|
||||||
stepCount: $multiStepStore.stepCount,
|
stepCount: $multiStepStore.stepCount,
|
||||||
|
@ -48,14 +46,16 @@
|
||||||
actionType: cachedInstance.actionType,
|
actionType: cachedInstance.actionType,
|
||||||
dataSource: cachedInstance.dataSource,
|
dataSource: cachedInstance.dataSource,
|
||||||
})
|
})
|
||||||
|
// For backwards compatibility we need to sometimes manually set base
|
||||||
|
// properties like _id and _component as we didn't used to save these
|
||||||
$: stepInstance = {
|
$: stepInstance = {
|
||||||
_id: Helpers.uuid(),
|
_id: savedInstance._id || Helpers.uuid(),
|
||||||
_component: componentType,
|
_component: savedInstance._component || componentType,
|
||||||
_instanceName: `Step ${currentStep + 1}`,
|
_instanceName: `Step ${currentStep + 1}`,
|
||||||
title: stepSettings.title ?? defaults?.title,
|
title: savedInstance.title ?? defaults?.title,
|
||||||
buttons: stepSettings.buttons || defaults?.buttons,
|
buttons: savedInstance.buttons || defaults?.buttons,
|
||||||
fields: stepSettings.fields,
|
fields: savedInstance.fields,
|
||||||
desc: stepSettings.desc,
|
desc: savedInstance.desc,
|
||||||
|
|
||||||
// Needed for field configuration
|
// Needed for field configuration
|
||||||
dataSource,
|
dataSource,
|
||||||
|
@ -92,7 +92,8 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
const addStep = () => {
|
const addStep = () => {
|
||||||
value = value.toSpliced($currentStep + 1, 0, {})
|
const newInstance = componentStore.createInstance(componentType)
|
||||||
|
value = value.toSpliced($currentStep + 1, 0, newInstance)
|
||||||
dispatch("change", value)
|
dispatch("change", value)
|
||||||
multiStepStore.update(state => ({
|
multiStepStore.update(state => ({
|
||||||
...state,
|
...state,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
<script>
|
<script>
|
||||||
import { createEventDispatcher, getContext } from "svelte"
|
import { createEventDispatcher, getContext } from "svelte"
|
||||||
import { ActionButton } from "@budibase/bbui"
|
import { ActionButton, AbsTooltip } from "@budibase/bbui"
|
||||||
|
|
||||||
const multiStepStore = getContext("multi-step-form-block")
|
const multiStepStore = getContext("multi-step-form-block")
|
||||||
const dispatch = createEventDispatcher()
|
const dispatch = createEventDispatcher()
|
||||||
|
@ -28,45 +28,49 @@
|
||||||
</div>
|
</div>
|
||||||
{:else}
|
{:else}
|
||||||
<div class="step-actions">
|
<div class="step-actions">
|
||||||
<ActionButton
|
<AbsTooltip text="Previous step" noWrap>
|
||||||
size="S"
|
<ActionButton
|
||||||
secondary
|
size="S"
|
||||||
icon="ChevronLeft"
|
secondary
|
||||||
disabled={currentStep === 0}
|
icon="ChevronLeft"
|
||||||
on:click={() => {
|
disabled={currentStep === 0}
|
||||||
stepAction("previousStep")
|
on:click={() => {
|
||||||
}}
|
stepAction("previousStep")
|
||||||
tooltip={"Previous step"}
|
}}
|
||||||
/>
|
/>
|
||||||
<ActionButton
|
</AbsTooltip>
|
||||||
size="S"
|
<AbsTooltip text="Next step" noWrap>
|
||||||
secondary
|
<ActionButton
|
||||||
disabled={currentStep === stepCount - 1}
|
size="S"
|
||||||
icon="ChevronRight"
|
secondary
|
||||||
on:click={() => {
|
disabled={currentStep === stepCount - 1}
|
||||||
stepAction("nextStep")
|
icon="ChevronRight"
|
||||||
}}
|
on:click={() => {
|
||||||
tooltip={"Next step"}
|
stepAction("nextStep")
|
||||||
/>
|
}}
|
||||||
<ActionButton
|
/>
|
||||||
size="S"
|
</AbsTooltip>
|
||||||
secondary
|
<AbsTooltip text="Remove step" noWrap>
|
||||||
icon="Close"
|
<ActionButton
|
||||||
disabled={stepCount === 1}
|
size="S"
|
||||||
on:click={() => {
|
secondary
|
||||||
stepAction("removeStep")
|
icon="Close"
|
||||||
}}
|
disabled={stepCount === 1}
|
||||||
tooltip={"Remove step"}
|
on:click={() => {
|
||||||
/>
|
stepAction("removeStep")
|
||||||
<ActionButton
|
}}
|
||||||
size="S"
|
/>
|
||||||
secondary
|
</AbsTooltip>
|
||||||
icon="MultipleAdd"
|
<AbsTooltip text="Add step" noWrap>
|
||||||
on:click={() => {
|
<ActionButton
|
||||||
stepAction("addStep")
|
size="S"
|
||||||
}}
|
secondary
|
||||||
tooltip={"Add step"}
|
icon="MultipleAdd"
|
||||||
/>
|
on:click={() => {
|
||||||
|
stepAction("addStep")
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</AbsTooltip>
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
|
|
||||||
|
|
|
@ -75,6 +75,7 @@ const toDraggableListFormat = (gridFormatColumns, createComponent, schema) => {
|
||||||
return createComponent(
|
return createComponent(
|
||||||
"@budibase/standard-components/labelfield",
|
"@budibase/standard-components/labelfield",
|
||||||
{
|
{
|
||||||
|
_id: column.field,
|
||||||
_instanceName: column.field,
|
_instanceName: column.field,
|
||||||
active: column.active,
|
active: column.active,
|
||||||
field: column.field,
|
field: column.field,
|
||||||
|
|
|
@ -65,6 +65,7 @@ describe("getColumns", () => {
|
||||||
it("returns the selected and unselected fields in the modern format, respecting the original order", ctx => {
|
it("returns the selected and unselected fields in the modern format, respecting the original order", ctx => {
|
||||||
expect(ctx.columns.sortable).toEqual([
|
expect(ctx.columns.sortable).toEqual([
|
||||||
{
|
{
|
||||||
|
_id: "three",
|
||||||
_instanceName: "three",
|
_instanceName: "three",
|
||||||
active: true,
|
active: true,
|
||||||
columnType: "foo",
|
columnType: "foo",
|
||||||
|
@ -73,6 +74,7 @@ describe("getColumns", () => {
|
||||||
label: "three label",
|
label: "three label",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
_id: "two",
|
||||||
_instanceName: "two",
|
_instanceName: "two",
|
||||||
active: true,
|
active: true,
|
||||||
columnType: "foo",
|
columnType: "foo",
|
||||||
|
@ -81,6 +83,7 @@ describe("getColumns", () => {
|
||||||
label: "two label",
|
label: "two label",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
_id: "one",
|
||||||
_instanceName: "one",
|
_instanceName: "one",
|
||||||
active: false,
|
active: false,
|
||||||
columnType: "foo",
|
columnType: "foo",
|
||||||
|
@ -91,6 +94,7 @@ describe("getColumns", () => {
|
||||||
])
|
])
|
||||||
|
|
||||||
expect(ctx.columns.primary).toEqual({
|
expect(ctx.columns.primary).toEqual({
|
||||||
|
_id: "four",
|
||||||
_instanceName: "four",
|
_instanceName: "four",
|
||||||
active: true,
|
active: true,
|
||||||
columnType: "foo",
|
columnType: "foo",
|
||||||
|
@ -115,6 +119,7 @@ describe("getColumns", () => {
|
||||||
it("returns all columns, with non-hidden columns automatically selected", ctx => {
|
it("returns all columns, with non-hidden columns automatically selected", ctx => {
|
||||||
expect(ctx.columns.sortable).toEqual([
|
expect(ctx.columns.sortable).toEqual([
|
||||||
{
|
{
|
||||||
|
_id: "two",
|
||||||
_instanceName: "two",
|
_instanceName: "two",
|
||||||
active: true,
|
active: true,
|
||||||
columnType: "foo",
|
columnType: "foo",
|
||||||
|
@ -123,6 +128,7 @@ describe("getColumns", () => {
|
||||||
label: "two",
|
label: "two",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
_id: "three",
|
||||||
_instanceName: "three",
|
_instanceName: "three",
|
||||||
active: true,
|
active: true,
|
||||||
columnType: "foo",
|
columnType: "foo",
|
||||||
|
@ -131,6 +137,7 @@ describe("getColumns", () => {
|
||||||
label: "three",
|
label: "three",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
_id: "one",
|
||||||
_instanceName: "one",
|
_instanceName: "one",
|
||||||
active: false,
|
active: false,
|
||||||
columnType: "foo",
|
columnType: "foo",
|
||||||
|
@ -141,6 +148,7 @@ describe("getColumns", () => {
|
||||||
])
|
])
|
||||||
|
|
||||||
expect(ctx.columns.primary).toEqual({
|
expect(ctx.columns.primary).toEqual({
|
||||||
|
_id: "four",
|
||||||
_instanceName: "four",
|
_instanceName: "four",
|
||||||
active: true,
|
active: true,
|
||||||
columnType: "foo",
|
columnType: "foo",
|
||||||
|
@ -173,6 +181,7 @@ describe("getColumns", () => {
|
||||||
it("returns all columns, including those missing from the initial data", ctx => {
|
it("returns all columns, including those missing from the initial data", ctx => {
|
||||||
expect(ctx.columns.sortable).toEqual([
|
expect(ctx.columns.sortable).toEqual([
|
||||||
{
|
{
|
||||||
|
_id: "three",
|
||||||
_instanceName: "three",
|
_instanceName: "three",
|
||||||
active: true,
|
active: true,
|
||||||
columnType: "foo",
|
columnType: "foo",
|
||||||
|
@ -181,6 +190,7 @@ describe("getColumns", () => {
|
||||||
label: "three label",
|
label: "three label",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
_id: "two",
|
||||||
_instanceName: "two",
|
_instanceName: "two",
|
||||||
active: false,
|
active: false,
|
||||||
columnType: "foo",
|
columnType: "foo",
|
||||||
|
@ -189,6 +199,7 @@ describe("getColumns", () => {
|
||||||
label: "two",
|
label: "two",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
_id: "one",
|
||||||
_instanceName: "one",
|
_instanceName: "one",
|
||||||
active: false,
|
active: false,
|
||||||
columnType: "foo",
|
columnType: "foo",
|
||||||
|
@ -199,6 +210,7 @@ describe("getColumns", () => {
|
||||||
])
|
])
|
||||||
|
|
||||||
expect(ctx.columns.primary).toEqual({
|
expect(ctx.columns.primary).toEqual({
|
||||||
|
_id: "four",
|
||||||
_instanceName: "four",
|
_instanceName: "four",
|
||||||
active: true,
|
active: true,
|
||||||
columnType: "foo",
|
columnType: "foo",
|
||||||
|
@ -228,6 +240,7 @@ describe("getColumns", () => {
|
||||||
it("returns all valid columns, excluding those that aren't valid for the schema", ctx => {
|
it("returns all valid columns, excluding those that aren't valid for the schema", ctx => {
|
||||||
expect(ctx.columns.sortable).toEqual([
|
expect(ctx.columns.sortable).toEqual([
|
||||||
{
|
{
|
||||||
|
_id: "three",
|
||||||
_instanceName: "three",
|
_instanceName: "three",
|
||||||
active: true,
|
active: true,
|
||||||
columnType: "foo",
|
columnType: "foo",
|
||||||
|
@ -236,6 +249,7 @@ describe("getColumns", () => {
|
||||||
label: "three label",
|
label: "three label",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
_id: "two",
|
||||||
_instanceName: "two",
|
_instanceName: "two",
|
||||||
active: false,
|
active: false,
|
||||||
columnType: "foo",
|
columnType: "foo",
|
||||||
|
@ -244,6 +258,7 @@ describe("getColumns", () => {
|
||||||
label: "two",
|
label: "two",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
_id: "one",
|
||||||
_instanceName: "one",
|
_instanceName: "one",
|
||||||
active: false,
|
active: false,
|
||||||
columnType: "foo",
|
columnType: "foo",
|
||||||
|
@ -254,6 +269,7 @@ describe("getColumns", () => {
|
||||||
])
|
])
|
||||||
|
|
||||||
expect(ctx.columns.primary).toEqual({
|
expect(ctx.columns.primary).toEqual({
|
||||||
|
_id: "four",
|
||||||
_instanceName: "four",
|
_instanceName: "four",
|
||||||
active: true,
|
active: true,
|
||||||
columnType: "foo",
|
columnType: "foo",
|
||||||
|
@ -318,6 +334,7 @@ describe("getColumns", () => {
|
||||||
beforeEach(ctx => {
|
beforeEach(ctx => {
|
||||||
ctx.updateSortable([
|
ctx.updateSortable([
|
||||||
{
|
{
|
||||||
|
_id: "three",
|
||||||
_instanceName: "three",
|
_instanceName: "three",
|
||||||
active: true,
|
active: true,
|
||||||
columnType: "foo",
|
columnType: "foo",
|
||||||
|
@ -326,6 +343,7 @@ describe("getColumns", () => {
|
||||||
label: "three",
|
label: "three",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
_id: "one",
|
||||||
_instanceName: "one",
|
_instanceName: "one",
|
||||||
active: true,
|
active: true,
|
||||||
columnType: "foo",
|
columnType: "foo",
|
||||||
|
@ -334,6 +352,7 @@ describe("getColumns", () => {
|
||||||
label: "one",
|
label: "one",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
_id: "two",
|
||||||
_instanceName: "two",
|
_instanceName: "two",
|
||||||
active: false,
|
active: false,
|
||||||
columnType: "foo",
|
columnType: "foo",
|
||||||
|
|
|
@ -37,6 +37,7 @@
|
||||||
export let customButtonText = null
|
export let customButtonText = null
|
||||||
export let keyBindings = false
|
export let keyBindings = false
|
||||||
export let allowJS = false
|
export let allowJS = false
|
||||||
|
export let actionButtonDisabled = false
|
||||||
export let compare = (option, value) => option === value
|
export let compare = (option, value) => option === value
|
||||||
|
|
||||||
let fields = Object.entries(object || {}).map(([name, value]) => ({
|
let fields = Object.entries(object || {}).map(([name, value]) => ({
|
||||||
|
@ -189,7 +190,14 @@
|
||||||
{/if}
|
{/if}
|
||||||
{#if !readOnly && !noAddButton}
|
{#if !readOnly && !noAddButton}
|
||||||
<div>
|
<div>
|
||||||
<ActionButton icon="Add" secondary thin outline on:click={addEntry}>
|
<ActionButton
|
||||||
|
disabled={actionButtonDisabled}
|
||||||
|
icon="Add"
|
||||||
|
secondary
|
||||||
|
thin
|
||||||
|
outline
|
||||||
|
on:click={addEntry}
|
||||||
|
>
|
||||||
{#if customButtonText}
|
{#if customButtonText}
|
||||||
{customButtonText}
|
{customButtonText}
|
||||||
{:else}
|
{:else}
|
||||||
|
|
|
@ -25,6 +25,6 @@
|
||||||
name="field"
|
name="field"
|
||||||
headings
|
headings
|
||||||
options={SchemaTypeOptionsExpanded}
|
options={SchemaTypeOptionsExpanded}
|
||||||
compare={(option, value) => option.type === value.type}
|
compare={(option, value) => option.type === value?.type}
|
||||||
/>
|
/>
|
||||||
{/key}
|
{/key}
|
||||||
|
|
|
@ -695,7 +695,7 @@
|
||||||
menuItems={schemaMenuItems}
|
menuItems={schemaMenuItems}
|
||||||
showMenu={!schemaReadOnly}
|
showMenu={!schemaReadOnly}
|
||||||
readOnly={schemaReadOnly}
|
readOnly={schemaReadOnly}
|
||||||
compare={(option, value) => option.type === value.type}
|
compare={(option, value) => option.type === value?.type}
|
||||||
/>
|
/>
|
||||||
</Tab>
|
</Tab>
|
||||||
{/if}
|
{/if}
|
||||||
|
|
|
@ -0,0 +1,43 @@
|
||||||
|
<script>
|
||||||
|
import "@spectrum-css/toast/dist/index-vars.css"
|
||||||
|
import Portal from "svelte-portal"
|
||||||
|
import { fly } from "svelte/transition"
|
||||||
|
import { Banner, BANNER_TYPES } from "@budibase/bbui"
|
||||||
|
import { licensing } from "stores/portal"
|
||||||
|
|
||||||
|
export let show = true
|
||||||
|
|
||||||
|
const oneDayInSeconds = 86400
|
||||||
|
|
||||||
|
$: license = $licensing.license
|
||||||
|
|
||||||
|
function daysUntilCancel() {
|
||||||
|
const cancelAt = license?.billing?.subscription?.cancelAt
|
||||||
|
const diffTime = Math.abs(cancelAt - new Date().getTime()) / 1000
|
||||||
|
return Math.floor(diffTime / oneDayInSeconds)
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<Portal target=".banner-container">
|
||||||
|
<div class="banner">
|
||||||
|
{#if show}
|
||||||
|
<div transition:fly={{ y: -30 }}>
|
||||||
|
<Banner
|
||||||
|
type={BANNER_TYPES.INFO}
|
||||||
|
extraLinkText={"Please select a plan."}
|
||||||
|
extraLinkAction={$licensing.goToUpgradePage}
|
||||||
|
showCloseButton={false}
|
||||||
|
>
|
||||||
|
Your free trial will end in {daysUntilCancel()} days.
|
||||||
|
</Banner>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
</Portal>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
.banner {
|
||||||
|
pointer-events: none;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
</style>
|
|
@ -12,7 +12,7 @@ const defaultCacheFn = key => {
|
||||||
const upgradeAction = key => {
|
const upgradeAction = key => {
|
||||||
return defaultNavigateAction(
|
return defaultNavigateAction(
|
||||||
key,
|
key,
|
||||||
"Upgrade Plan",
|
"Upgrade",
|
||||||
`${get(admin).accountPortalUrl}/portal/upgrade`
|
`${get(admin).accountPortalUrl}/portal/upgrade`
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,66 @@
|
||||||
|
<script>
|
||||||
|
import { Modal, ModalContent } from "@budibase/bbui"
|
||||||
|
import FreeTrial from "../../../../assets/FreeTrial.svelte"
|
||||||
|
import { get } from "svelte/store"
|
||||||
|
import { auth, licensing } from "stores/portal"
|
||||||
|
import { API } from "api"
|
||||||
|
import { PlanType } from "@budibase/types"
|
||||||
|
import { sdk } from "@budibase/shared-core"
|
||||||
|
|
||||||
|
let freeTrialModal
|
||||||
|
|
||||||
|
$: planType = $licensing?.license?.plan?.type
|
||||||
|
$: showFreeTrialModal(planType, freeTrialModal)
|
||||||
|
|
||||||
|
const showFreeTrialModal = (planType, freeTrialModal) => {
|
||||||
|
if (
|
||||||
|
planType === PlanType.ENTERPRISE_BASIC_TRIAL &&
|
||||||
|
!$auth.user?.freeTrialConfirmedAt &&
|
||||||
|
sdk.users.isAdmin($auth.user)
|
||||||
|
) {
|
||||||
|
freeTrialModal?.show()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<Modal bind:this={freeTrialModal} disableCancel={true}>
|
||||||
|
<ModalContent
|
||||||
|
confirmText="Get started"
|
||||||
|
size="M"
|
||||||
|
showCancelButton={false}
|
||||||
|
showCloseIcon={false}
|
||||||
|
onConfirm={async () => {
|
||||||
|
if (get(auth).user) {
|
||||||
|
try {
|
||||||
|
await API.updateSelf({
|
||||||
|
freeTrialConfirmedAt: new Date().toISOString(),
|
||||||
|
})
|
||||||
|
// Update the cached user
|
||||||
|
await auth.getSelf()
|
||||||
|
} finally {
|
||||||
|
freeTrialModal.hide()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<h1>Experience all of Budibase with a free 14-day trial</h1>
|
||||||
|
<div class="free-trial-text">
|
||||||
|
We've upgraded you to a free 14-day trial that allows you to try all our
|
||||||
|
features before deciding which plan is right for you.
|
||||||
|
<p>
|
||||||
|
At the end of your trial, we'll automatically downgrade you to the Free
|
||||||
|
plan unless you choose to upgrade.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<FreeTrial />
|
||||||
|
</ModalContent>
|
||||||
|
</Modal>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
h1 {
|
||||||
|
font-size: 26px;
|
||||||
|
}
|
||||||
|
.free-trial-text {
|
||||||
|
font-size: 16px;
|
||||||
|
}
|
||||||
|
</style>
|
|
@ -253,6 +253,7 @@ export const SchemaTypeOptions = [
|
||||||
{ label: "Number", value: FieldType.NUMBER },
|
{ label: "Number", value: FieldType.NUMBER },
|
||||||
{ label: "Boolean", value: FieldType.BOOLEAN },
|
{ label: "Boolean", value: FieldType.BOOLEAN },
|
||||||
{ label: "Datetime", value: FieldType.DATETIME },
|
{ label: "Datetime", value: FieldType.DATETIME },
|
||||||
|
{ label: "JSON", value: FieldType.JSON },
|
||||||
]
|
]
|
||||||
|
|
||||||
export const SchemaTypeOptionsExpanded = SchemaTypeOptions.map(el => ({
|
export const SchemaTypeOptionsExpanded = SchemaTypeOptions.map(el => ({
|
||||||
|
|
|
@ -1106,50 +1106,51 @@ export const getAllStateVariables = () => {
|
||||||
getAllAssets().forEach(asset => {
|
getAllAssets().forEach(asset => {
|
||||||
findAllMatchingComponents(asset.props, component => {
|
findAllMatchingComponents(asset.props, component => {
|
||||||
const settings = componentStore.getComponentSettings(component._component)
|
const settings = componentStore.getComponentSettings(component._component)
|
||||||
|
const nestedTypes = [
|
||||||
|
"buttonConfiguration",
|
||||||
|
"fieldConfiguration",
|
||||||
|
"stepConfiguration",
|
||||||
|
]
|
||||||
|
|
||||||
|
// Extracts all event settings from a component instance.
|
||||||
|
// Recurses into nested types to find all event-like settings at any
|
||||||
|
// depth.
|
||||||
const parseEventSettings = (settings, comp) => {
|
const parseEventSettings = (settings, comp) => {
|
||||||
|
if (!settings?.length) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract top level event settings
|
||||||
settings
|
settings
|
||||||
.filter(setting => setting.type === "event")
|
.filter(setting => setting.type === "event")
|
||||||
.forEach(setting => {
|
.forEach(setting => {
|
||||||
eventSettings.push(comp[setting.key])
|
eventSettings.push(comp[setting.key])
|
||||||
})
|
})
|
||||||
}
|
|
||||||
|
|
||||||
const parseComponentSettings = (settings, component) => {
|
// Recurse into any nested instance types
|
||||||
// Parse the nested button configurations
|
|
||||||
settings
|
settings
|
||||||
.filter(setting => setting.type === "buttonConfiguration")
|
.filter(setting => nestedTypes.includes(setting.type))
|
||||||
.forEach(setting => {
|
.forEach(setting => {
|
||||||
const buttonConfig = component[setting.key]
|
const instances = comp[setting.key]
|
||||||
|
if (Array.isArray(instances) && instances.length) {
|
||||||
|
instances.forEach(instance => {
|
||||||
|
let type = instance?._component
|
||||||
|
|
||||||
if (Array.isArray(buttonConfig)) {
|
// Backwards compatibility for multi-step from blocks which
|
||||||
buttonConfig.forEach(button => {
|
// didn't set a proper component type previously.
|
||||||
const nestedSettings = componentStore.getComponentSettings(
|
if (setting.type === "stepConfiguration" && !type) {
|
||||||
button._component
|
type = "@budibase/standard-components/multistepformblockstep"
|
||||||
)
|
}
|
||||||
parseEventSettings(nestedSettings, button)
|
|
||||||
|
// Parsed nested component instances inside this setting
|
||||||
|
const nestedSettings = componentStore.getComponentSettings(type)
|
||||||
|
parseEventSettings(nestedSettings, instance)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
parseEventSettings(settings, component)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse the base component settings
|
parseEventSettings(settings, component)
|
||||||
parseComponentSettings(settings, component)
|
|
||||||
|
|
||||||
// Parse step configuration
|
|
||||||
const stepSetting = settings.find(
|
|
||||||
setting => setting.type === "stepConfiguration"
|
|
||||||
)
|
|
||||||
const steps = stepSetting ? component[stepSetting.key] : []
|
|
||||||
const stepDefinition = componentStore.getComponentSettings(
|
|
||||||
"@budibase/standard-components/multistepformblockstep"
|
|
||||||
)
|
|
||||||
|
|
||||||
steps?.forEach(step => {
|
|
||||||
parseComponentSettings(stepDefinition, step)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -20,6 +20,9 @@ export function getFormattedPlanName(userPlanType) {
|
||||||
case PlanType.ENTERPRISE:
|
case PlanType.ENTERPRISE:
|
||||||
planName = "Enterprise"
|
planName = "Enterprise"
|
||||||
break
|
break
|
||||||
|
case PlanType.ENTERPRISE_BASIC_TRIAL:
|
||||||
|
planName = "Trial"
|
||||||
|
break
|
||||||
default:
|
default:
|
||||||
planName = "Free" // Default to "Free" if the type is not explicitly handled
|
planName = "Free" // Default to "Free" if the type is not explicitly handled
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,6 +32,7 @@
|
||||||
import { UserAvatars } from "@budibase/frontend-core"
|
import { UserAvatars } from "@budibase/frontend-core"
|
||||||
import { TOUR_KEYS } from "components/portal/onboarding/tours.js"
|
import { TOUR_KEYS } from "components/portal/onboarding/tours.js"
|
||||||
import PreviewOverlay from "./_components/PreviewOverlay.svelte"
|
import PreviewOverlay from "./_components/PreviewOverlay.svelte"
|
||||||
|
import EnterpriseBasicTrialModal from "components/portal/onboarding/EnterpriseBasicTrialModal.svelte"
|
||||||
|
|
||||||
export let application
|
export let application
|
||||||
|
|
||||||
|
@ -192,6 +193,8 @@
|
||||||
<CommandPalette />
|
<CommandPalette />
|
||||||
</Modal>
|
</Modal>
|
||||||
|
|
||||||
|
<EnterpriseBasicTrialModal />
|
||||||
|
|
||||||
<style>
|
<style>
|
||||||
.back-to-apps {
|
.back-to-apps {
|
||||||
display: contents;
|
display: contents;
|
||||||
|
|
|
@ -98,14 +98,22 @@
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async function fetchBackups(filters, page, dateRange) {
|
async function fetchBackups(filters, page, dateRange = []) {
|
||||||
const response = await backups.searchBackups({
|
const body = {
|
||||||
appId: $appStore.appId,
|
appId: $appStore.appId,
|
||||||
...filters,
|
...filters,
|
||||||
page,
|
page,
|
||||||
startDate: dateRange[0],
|
}
|
||||||
endDate: dateRange[1],
|
|
||||||
})
|
const [startDate, endDate] = dateRange
|
||||||
|
if (startDate) {
|
||||||
|
body.startDate = startDate
|
||||||
|
}
|
||||||
|
if (endDate) {
|
||||||
|
body.endDate = endDate
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await backups.searchBackups(body)
|
||||||
pageInfo.fetched(response.hasNextPage, response.nextPage)
|
pageInfo.fetched(response.hasNextPage, response.nextPage)
|
||||||
|
|
||||||
// flatten so we have an easier structure to use for the table schema
|
// flatten so we have an easier structure to use for the table schema
|
||||||
|
@ -120,7 +128,7 @@
|
||||||
})
|
})
|
||||||
await fetchBackups(filterOpt, page)
|
await fetchBackups(filterOpt, page)
|
||||||
notifications.success(response.message)
|
notifications.success(response.message)
|
||||||
} catch {
|
} catch (err) {
|
||||||
notifications.error("Unable to create backup")
|
notifications.error("Unable to create backup")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
import { sdk } from "@budibase/shared-core"
|
import { sdk } from "@budibase/shared-core"
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
{#if isEnabled(TENANT_FEATURE_FLAGS.LICENSING) && !$licensing.isEnterprisePlan}
|
{#if isEnabled(TENANT_FEATURE_FLAGS.LICENSING) && !$licensing.isEnterprisePlan && !$licensing.isEnterpriseTrial}
|
||||||
{#if $admin.cloud && $auth?.user?.accountPortalAccess}
|
{#if $admin.cloud && $auth?.user?.accountPortalAccess}
|
||||||
<Button
|
<Button
|
||||||
cta
|
cta
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
<script>
|
<script>
|
||||||
import { isActive, redirect, goto, url } from "@roxi/routify"
|
import { isActive, redirect, goto, url } from "@roxi/routify"
|
||||||
import { Icon, notifications, Tabs, Tab } from "@budibase/bbui"
|
import { Icon, notifications, Tabs, Tab } from "@budibase/bbui"
|
||||||
import { organisation, auth, menu, appsStore } from "stores/portal"
|
import { organisation, auth, menu, appsStore, licensing } from "stores/portal"
|
||||||
import { onMount } from "svelte"
|
import { onMount } from "svelte"
|
||||||
import UpgradeButton from "./_components/UpgradeButton.svelte"
|
import UpgradeButton from "./_components/UpgradeButton.svelte"
|
||||||
import MobileMenu from "./_components/MobileMenu.svelte"
|
import MobileMenu from "./_components/MobileMenu.svelte"
|
||||||
|
@ -10,6 +10,8 @@
|
||||||
import HelpMenu from "components/common/HelpMenu.svelte"
|
import HelpMenu from "components/common/HelpMenu.svelte"
|
||||||
import VerificationPromptBanner from "components/common/VerificationPromptBanner.svelte"
|
import VerificationPromptBanner from "components/common/VerificationPromptBanner.svelte"
|
||||||
import { sdk } from "@budibase/shared-core"
|
import { sdk } from "@budibase/shared-core"
|
||||||
|
import EnterpriseBasicTrialBanner from "components/portal/licensing/EnterpriseBasicTrialBanner.svelte"
|
||||||
|
import { Constants } from "@budibase/frontend-core"
|
||||||
|
|
||||||
let loaded = false
|
let loaded = false
|
||||||
let mobileMenuVisible = false
|
let mobileMenuVisible = false
|
||||||
|
@ -33,6 +35,14 @@
|
||||||
const showMobileMenu = () => (mobileMenuVisible = true)
|
const showMobileMenu = () => (mobileMenuVisible = true)
|
||||||
const hideMobileMenu = () => (mobileMenuVisible = false)
|
const hideMobileMenu = () => (mobileMenuVisible = false)
|
||||||
|
|
||||||
|
const showFreeTrialBanner = () => {
|
||||||
|
return (
|
||||||
|
$licensing.license?.plan?.type ===
|
||||||
|
Constants.PlanType.ENTERPRISE_BASIC_TRIAL &&
|
||||||
|
sdk.users.isAdmin($auth.user)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
onMount(async () => {
|
onMount(async () => {
|
||||||
// Prevent non-builders from accessing the portal
|
// Prevent non-builders from accessing the portal
|
||||||
if ($auth.user) {
|
if ($auth.user) {
|
||||||
|
@ -58,6 +68,7 @@
|
||||||
<HelpMenu />
|
<HelpMenu />
|
||||||
<div class="container">
|
<div class="container">
|
||||||
<VerificationPromptBanner />
|
<VerificationPromptBanner />
|
||||||
|
<EnterpriseBasicTrialBanner show={showFreeTrialBanner()} />
|
||||||
<div class="nav">
|
<div class="nav">
|
||||||
<div class="branding">
|
<div class="branding">
|
||||||
<Logo />
|
<Logo />
|
||||||
|
|
|
@ -29,6 +29,7 @@
|
||||||
const manageUrl = `${$admin.accountPortalUrl}/portal/billing`
|
const manageUrl = `${$admin.accountPortalUrl}/portal/billing`
|
||||||
|
|
||||||
const WARN_USAGE = ["Queries", "Automations", "Rows", "Day Passes", "Users"]
|
const WARN_USAGE = ["Queries", "Automations", "Rows", "Day Passes", "Users"]
|
||||||
|
const oneDayInSeconds = 86400
|
||||||
|
|
||||||
const EXCLUDE_QUOTAS = {
|
const EXCLUDE_QUOTAS = {
|
||||||
Queries: () => true,
|
Queries: () => true,
|
||||||
|
@ -104,24 +105,17 @@
|
||||||
if (!timestamp) {
|
if (!timestamp) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
const now = new Date()
|
const diffTime = Math.abs(timestamp - new Date().getTime()) / 1000
|
||||||
now.setHours(0)
|
return Math.floor(diffTime / oneDayInSeconds)
|
||||||
now.setMinutes(0)
|
|
||||||
|
|
||||||
const thenDate = new Date(timestamp)
|
|
||||||
thenDate.setHours(0)
|
|
||||||
thenDate.setMinutes(0)
|
|
||||||
|
|
||||||
const difference = thenDate.getTime() - now
|
|
||||||
// return the difference in days
|
|
||||||
return (difference / (1000 * 3600 * 24)).toFixed(0)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const setTextRows = () => {
|
const setTextRows = () => {
|
||||||
textRows = []
|
textRows = []
|
||||||
|
|
||||||
if (cancelAt && !usesInvoicing) {
|
if (cancelAt && !usesInvoicing) {
|
||||||
textRows.push({ message: "Subscription has been cancelled" })
|
if (plan?.type !== Constants.PlanType.ENTERPRISE_BASIC_TRIAL) {
|
||||||
|
textRows.push({ message: "Subscription has been cancelled" })
|
||||||
|
}
|
||||||
textRows.push({
|
textRows.push({
|
||||||
message: `${getDaysRemaining(cancelAt)} days remaining`,
|
message: `${getDaysRemaining(cancelAt)} days remaining`,
|
||||||
tooltip: new Date(cancelAt),
|
tooltip: new Date(cancelAt),
|
||||||
|
|
|
@ -103,6 +103,8 @@ export const createLicensingStore = () => {
|
||||||
const isEnterprisePlan = planType === Constants.PlanType.ENTERPRISE
|
const isEnterprisePlan = planType === Constants.PlanType.ENTERPRISE
|
||||||
const isFreePlan = planType === Constants.PlanType.FREE
|
const isFreePlan = planType === Constants.PlanType.FREE
|
||||||
const isBusinessPlan = planType === Constants.PlanType.BUSINESS
|
const isBusinessPlan = planType === Constants.PlanType.BUSINESS
|
||||||
|
const isEnterpriseTrial =
|
||||||
|
planType === Constants.PlanType.ENTERPRISE_BASIC_TRIAL
|
||||||
const groupsEnabled = license.features.includes(
|
const groupsEnabled = license.features.includes(
|
||||||
Constants.Features.USER_GROUPS
|
Constants.Features.USER_GROUPS
|
||||||
)
|
)
|
||||||
|
@ -143,6 +145,7 @@ export const createLicensingStore = () => {
|
||||||
isEnterprisePlan,
|
isEnterprisePlan,
|
||||||
isFreePlan,
|
isFreePlan,
|
||||||
isBusinessPlan,
|
isBusinessPlan,
|
||||||
|
isEnterpriseTrial,
|
||||||
groupsEnabled,
|
groupsEnabled,
|
||||||
backupsEnabled,
|
backupsEnabled,
|
||||||
brandingEnabled,
|
brandingEnabled,
|
||||||
|
|
|
@ -4220,8 +4220,8 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"attachmentfield": {
|
"attachmentfield": {
|
||||||
"name": "Attachment list",
|
"name": "Attachment List",
|
||||||
"icon": "Attach",
|
"icon": "DocumentFragmentGroup",
|
||||||
"styles": ["size"],
|
"styles": ["size"],
|
||||||
"requiredAncestors": ["form"],
|
"requiredAncestors": ["form"],
|
||||||
"editable": true,
|
"editable": true,
|
||||||
|
@ -4318,7 +4318,7 @@
|
||||||
},
|
},
|
||||||
"attachmentsinglefield": {
|
"attachmentsinglefield": {
|
||||||
"name": "Single Attachment",
|
"name": "Single Attachment",
|
||||||
"icon": "Attach",
|
"icon": "DocumentFragment",
|
||||||
"styles": ["size"],
|
"styles": ["size"],
|
||||||
"requiredAncestors": ["form"],
|
"requiredAncestors": ["form"],
|
||||||
"editable": true,
|
"editable": true,
|
||||||
|
@ -6718,7 +6718,20 @@
|
||||||
"illegalChildren": ["section", "sidepanel"],
|
"illegalChildren": ["section", "sidepanel"],
|
||||||
"showEmptyState": false,
|
"showEmptyState": false,
|
||||||
"draggable": false,
|
"draggable": false,
|
||||||
"info": "Side panels are hidden by default. They will only be revealed when triggered by the 'Open Side Panel' action."
|
"info": "Side panels are hidden by default. They will only be revealed when triggered by the 'Open Side Panel' action.",
|
||||||
|
"settings": [
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"key": "ignoreClicksOutside",
|
||||||
|
"label": "Ignore clicks outside",
|
||||||
|
"defaultValue": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "event",
|
||||||
|
"key": "onClose",
|
||||||
|
"label": "On close"
|
||||||
|
}
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"rowexplorer": {
|
"rowexplorer": {
|
||||||
"block": true,
|
"block": true,
|
||||||
|
|
|
@ -34,7 +34,6 @@
|
||||||
"screenfull": "^6.0.1",
|
"screenfull": "^6.0.1",
|
||||||
"shortid": "^2.2.15",
|
"shortid": "^2.2.15",
|
||||||
"svelte-apexcharts": "^1.0.2",
|
"svelte-apexcharts": "^1.0.2",
|
||||||
"svelte-flatpickr": "^3.3.4",
|
|
||||||
"svelte-spa-router": "^4.0.1"
|
"svelte-spa-router": "^4.0.1"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|
|
@ -206,13 +206,6 @@
|
||||||
/>
|
/>
|
||||||
{/key}
|
{/key}
|
||||||
|
|
||||||
<!--
|
|
||||||
Flatpickr needs to be inside the theme wrapper.
|
|
||||||
It also needs its own container because otherwise it hijacks
|
|
||||||
key events on the whole page. It is painful to work with.
|
|
||||||
-->
|
|
||||||
<div id="flatpickr-root" />
|
|
||||||
|
|
||||||
<!-- Modal container to ensure they sit on top -->
|
<!-- Modal container to ensure they sit on top -->
|
||||||
<div class="modal-container" />
|
<div class="modal-container" />
|
||||||
|
|
||||||
|
|
|
@ -60,16 +60,6 @@
|
||||||
--spectrum-link-primary-m-text-color-hover: var(--primaryColorHover);
|
--spectrum-link-primary-m-text-color-hover: var(--primaryColorHover);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Theme flatpickr */
|
|
||||||
:global(.flatpickr-day.selected) {
|
|
||||||
background: var(--primaryColor);
|
|
||||||
border-color: var(--primaryColor);
|
|
||||||
}
|
|
||||||
:global(.flatpickr-day.selected:hover) {
|
|
||||||
background: var(--primaryColorHover);
|
|
||||||
border-color: var(--primaryColorHover);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Custom scrollbars */
|
/* Custom scrollbars */
|
||||||
:global(::-webkit-scrollbar) {
|
:global(::-webkit-scrollbar) {
|
||||||
width: 8px;
|
width: 8px;
|
||||||
|
|
|
@ -73,7 +73,10 @@
|
||||||
$context.device.width,
|
$context.device.width,
|
||||||
$context.device.height
|
$context.device.height
|
||||||
)
|
)
|
||||||
$: autoCloseSidePanel = !$builderStore.inBuilder && $sidePanelStore.open
|
$: autoCloseSidePanel =
|
||||||
|
!$builderStore.inBuilder &&
|
||||||
|
$sidePanelStore.open &&
|
||||||
|
!$sidePanelStore.ignoreClicksOutside
|
||||||
$: screenId = $builderStore.inBuilder
|
$: screenId = $builderStore.inBuilder
|
||||||
? `${$builderStore.screen?._id}-screen`
|
? `${$builderStore.screen?._id}-screen`
|
||||||
: "screen"
|
: "screen"
|
||||||
|
@ -191,6 +194,11 @@
|
||||||
}
|
}
|
||||||
return url
|
return url
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const handleClickLink = () => {
|
||||||
|
mobileOpen = false
|
||||||
|
sidePanelStore.actions.close()
|
||||||
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||||
|
@ -281,7 +289,7 @@
|
||||||
url={navItem.url}
|
url={navItem.url}
|
||||||
subLinks={navItem.subLinks}
|
subLinks={navItem.subLinks}
|
||||||
internalLink={navItem.internalLink}
|
internalLink={navItem.internalLink}
|
||||||
on:clickLink={() => (mobileOpen = false)}
|
on:clickLink={handleClickLink}
|
||||||
leftNav={navigation === "Left"}
|
leftNav={navigation === "Left"}
|
||||||
{mobile}
|
{mobile}
|
||||||
{navStateStore}
|
{navStateStore}
|
||||||
|
@ -316,10 +324,7 @@
|
||||||
<div
|
<div
|
||||||
id="side-panel-container"
|
id="side-panel-container"
|
||||||
class:open={$sidePanelStore.open}
|
class:open={$sidePanelStore.open}
|
||||||
use:clickOutside={{
|
use:clickOutside={autoCloseSidePanel ? sidePanelStore.actions.close : null}
|
||||||
callback: autoCloseSidePanel ? sidePanelStore.actions.close : null,
|
|
||||||
allowedType: "mousedown",
|
|
||||||
}}
|
|
||||||
class:builder={$builderStore.inBuilder}
|
class:builder={$builderStore.inBuilder}
|
||||||
>
|
>
|
||||||
<div class="side-panel-header">
|
<div class="side-panel-header">
|
||||||
|
|
|
@ -5,6 +5,9 @@
|
||||||
const { styleable, sidePanelStore, builderStore, dndIsDragging } =
|
const { styleable, sidePanelStore, builderStore, dndIsDragging } =
|
||||||
getContext("sdk")
|
getContext("sdk")
|
||||||
|
|
||||||
|
export let onClose
|
||||||
|
export let ignoreClicksOutside
|
||||||
|
|
||||||
// Automatically show and hide the side panel when inside the builder.
|
// Automatically show and hide the side panel when inside the builder.
|
||||||
// For some unknown reason, svelte reactivity breaks if we reference the
|
// For some unknown reason, svelte reactivity breaks if we reference the
|
||||||
// reactive variable "open" inside the following expression, or if we define
|
// reactive variable "open" inside the following expression, or if we define
|
||||||
|
@ -26,6 +29,10 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// $: {
|
||||||
|
|
||||||
|
// }
|
||||||
|
|
||||||
// Derive visibility
|
// Derive visibility
|
||||||
$: open = $sidePanelStore.contentId === $component.id
|
$: open = $sidePanelStore.contentId === $component.id
|
||||||
|
|
||||||
|
@ -36,10 +43,17 @@
|
||||||
let renderKey = null
|
let renderKey = null
|
||||||
$: {
|
$: {
|
||||||
if (open) {
|
if (open) {
|
||||||
|
sidePanelStore.actions.setIgnoreClicksOutside(ignoreClicksOutside)
|
||||||
renderKey = Math.random()
|
renderKey = Math.random()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const handleSidePanelClose = async () => {
|
||||||
|
if (onClose) {
|
||||||
|
await onClose()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const showInSidePanel = (el, visible) => {
|
const showInSidePanel = (el, visible) => {
|
||||||
const update = visible => {
|
const update = visible => {
|
||||||
const target = document.getElementById("side-panel-container")
|
const target = document.getElementById("side-panel-container")
|
||||||
|
@ -51,6 +65,7 @@
|
||||||
} else {
|
} else {
|
||||||
if (target.contains(node)) {
|
if (target.contains(node)) {
|
||||||
target.removeChild(node)
|
target.removeChild(node)
|
||||||
|
handleSidePanelClose()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,7 +49,6 @@
|
||||||
readonly={fieldState.readonly}
|
readonly={fieldState.readonly}
|
||||||
error={fieldState.error}
|
error={fieldState.error}
|
||||||
id={fieldState.fieldId}
|
id={fieldState.fieldId}
|
||||||
appendTo={document.getElementById("flatpickr-root")}
|
|
||||||
{enableTime}
|
{enableTime}
|
||||||
{timeOnly}
|
{timeOnly}
|
||||||
{time24hr}
|
{time24hr}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import flatpickr from "flatpickr"
|
import dayjs from "dayjs"
|
||||||
import { FieldTypes } from "../../../constants"
|
import { FieldTypes } from "../../../constants"
|
||||||
|
import { Helpers } from "@budibase/bbui"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a validation function from a combination of schema-level constraints
|
* Creates a validation function from a combination of schema-level constraints
|
||||||
|
@ -81,7 +82,7 @@ export const createValidatorFromConstraints = (
|
||||||
// Date constraint
|
// Date constraint
|
||||||
if (exists(schemaConstraints.datetime?.earliest)) {
|
if (exists(schemaConstraints.datetime?.earliest)) {
|
||||||
const limit = schemaConstraints.datetime.earliest
|
const limit = schemaConstraints.datetime.earliest
|
||||||
const limitString = flatpickr.formatDate(new Date(limit), "F j Y, H:i")
|
const limitString = Helpers.getDateDisplayValue(dayjs(limit))
|
||||||
rules.push({
|
rules.push({
|
||||||
type: "datetime",
|
type: "datetime",
|
||||||
constraint: "minValue",
|
constraint: "minValue",
|
||||||
|
@ -91,7 +92,7 @@ export const createValidatorFromConstraints = (
|
||||||
}
|
}
|
||||||
if (exists(schemaConstraints.datetime?.latest)) {
|
if (exists(schemaConstraints.datetime?.latest)) {
|
||||||
const limit = schemaConstraints.datetime.latest
|
const limit = schemaConstraints.datetime.latest
|
||||||
const limitString = flatpickr.formatDate(new Date(limit), "F j Y, H:i")
|
const limitString = Helpers.getDateDisplayValue(dayjs(limit))
|
||||||
rules.push({
|
rules.push({
|
||||||
type: "datetime",
|
type: "datetime",
|
||||||
constraint: "maxValue",
|
constraint: "maxValue",
|
||||||
|
|
|
@ -3,6 +3,7 @@ import { writable, derived } from "svelte/store"
|
||||||
export const createSidePanelStore = () => {
|
export const createSidePanelStore = () => {
|
||||||
const initialState = {
|
const initialState = {
|
||||||
contentId: null,
|
contentId: null,
|
||||||
|
ignoreClicksOutside: true,
|
||||||
}
|
}
|
||||||
const store = writable(initialState)
|
const store = writable(initialState)
|
||||||
const derivedStore = derived(store, $store => {
|
const derivedStore = derived(store, $store => {
|
||||||
|
@ -32,11 +33,18 @@ export const createSidePanelStore = () => {
|
||||||
}, 50)
|
}, 50)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const setIgnoreClicksOutside = bool => {
|
||||||
|
store.update(state => {
|
||||||
|
state.ignoreClicksOutside = bool
|
||||||
|
return state
|
||||||
|
})
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
subscribe: derivedStore.subscribe,
|
subscribe: derivedStore.subscribe,
|
||||||
actions: {
|
actions: {
|
||||||
open,
|
open,
|
||||||
close,
|
close,
|
||||||
|
setIgnoreClicksOutside,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -240,6 +240,7 @@ const triggerAutomationHandler = async action => {
|
||||||
const navigationHandler = action => {
|
const navigationHandler = action => {
|
||||||
const { url, peek, externalNewTab } = action.parameters
|
const { url, peek, externalNewTab } = action.parameters
|
||||||
routeStore.actions.navigate(url, peek, externalNewTab)
|
routeStore.actions.navigate(url, peek, externalNewTab)
|
||||||
|
closeSidePanelHandler()
|
||||||
}
|
}
|
||||||
|
|
||||||
const queryExecutionHandler = async action => {
|
const queryExecutionHandler = async action => {
|
||||||
|
@ -541,16 +542,22 @@ export const enrichButtonActions = (actions, context) => {
|
||||||
// then execute the rest of the actions in the chain
|
// then execute the rest of the actions in the chain
|
||||||
const result = await callback()
|
const result = await callback()
|
||||||
if (result !== false) {
|
if (result !== false) {
|
||||||
// Generate a new total context to pass into the next enrichment
|
// Generate a new total context for the next enrichment
|
||||||
buttonContext.push(result)
|
buttonContext.push(result)
|
||||||
const newContext = { ...context, actions: buttonContext }
|
const newContext = { ...context, actions: buttonContext }
|
||||||
|
|
||||||
// Enrich and call the next button action if there is more than one action remaining
|
// Enrich and call the next button action if there is more
|
||||||
|
// than one action remaining
|
||||||
const next = enrichButtonActions(
|
const next = enrichButtonActions(
|
||||||
actions.slice(i + 1),
|
actions.slice(i + 1),
|
||||||
newContext
|
newContext
|
||||||
)
|
)
|
||||||
resolve(typeof next === "function" ? await next() : true)
|
if (typeof next === "function") {
|
||||||
|
// Pass the event context back into the new action chain
|
||||||
|
resolve(await next(eventContext))
|
||||||
|
} else {
|
||||||
|
resolve(true)
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
resolve(false)
|
resolve(false)
|
||||||
}
|
}
|
||||||
|
|
|
@ -124,6 +124,7 @@
|
||||||
const fieldSchema = schemaFields.find(x => x.name === filter.field)
|
const fieldSchema = schemaFields.find(x => x.name === filter.field)
|
||||||
filter.type = fieldSchema?.type
|
filter.type = fieldSchema?.type
|
||||||
filter.subtype = fieldSchema?.subtype
|
filter.subtype = fieldSchema?.subtype
|
||||||
|
filter.formulaType = fieldSchema?.formulaType
|
||||||
|
|
||||||
// Update external type based on field
|
// Update external type based on field
|
||||||
filter.externalType = getSchema(filter)?.externalType
|
filter.externalType = getSchema(filter)?.externalType
|
||||||
|
@ -288,6 +289,7 @@
|
||||||
OperatorOptions.ContainsAny.value,
|
OperatorOptions.ContainsAny.value,
|
||||||
].includes(filter.operator)}
|
].includes(filter.operator)}
|
||||||
disabled={filter.noValue}
|
disabled={filter.noValue}
|
||||||
|
type={filter.valueType}
|
||||||
/>
|
/>
|
||||||
{:else}
|
{:else}
|
||||||
<Input disabled />
|
<Input disabled />
|
||||||
|
@ -324,8 +326,6 @@
|
||||||
<style>
|
<style>
|
||||||
.container {
|
.container {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
max-width: 1000px;
|
|
||||||
margin: 0 auto;
|
|
||||||
}
|
}
|
||||||
.fields {
|
.fields {
|
||||||
display: grid;
|
display: grid;
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
import { createAPIClient } from "../api"
|
import { createAPIClient } from "../api"
|
||||||
|
|
||||||
export let API = createAPIClient()
|
export let API = createAPIClient()
|
||||||
|
|
||||||
export let value = null
|
export let value = null
|
||||||
export let disabled
|
export let disabled
|
||||||
export let multiselect = false
|
export let multiselect = false
|
||||||
|
@ -23,12 +24,14 @@
|
||||||
$: component = multiselect ? Multiselect : Select
|
$: component = multiselect ? Multiselect : Select
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<svelte:component
|
<div class="user-control">
|
||||||
this={component}
|
<svelte:component
|
||||||
bind:value
|
this={component}
|
||||||
autocomplete
|
bind:value
|
||||||
{options}
|
autocomplete
|
||||||
getOptionLabel={option => option.email}
|
{options}
|
||||||
getOptionValue={option => option._id}
|
getOptionLabel={option => option.email}
|
||||||
{disabled}
|
getOptionValue={option => option._id}
|
||||||
/>
|
{disabled}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
|
@ -121,8 +121,14 @@
|
||||||
|
|
||||||
const onContextMenu = e => {
|
const onContextMenu = e => {
|
||||||
e.preventDefault()
|
e.preventDefault()
|
||||||
ui.actions.blur()
|
|
||||||
open = !open
|
// The timeout allows time for clickoutside to close other open popvers
|
||||||
|
// before we show this one. Without the timeout, this popover closes again
|
||||||
|
// before it's even visible as clickoutside closes it.
|
||||||
|
setTimeout(() => {
|
||||||
|
ui.actions.blur()
|
||||||
|
open = !open
|
||||||
|
}, 10)
|
||||||
}
|
}
|
||||||
|
|
||||||
const sortAscending = () => {
|
const sortAscending = () => {
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
focusedCellAPI,
|
focusedCellAPI,
|
||||||
focusedRowId,
|
focusedRowId,
|
||||||
notifications,
|
notifications,
|
||||||
isDatasourcePlus,
|
hasBudibaseIdentifiers,
|
||||||
} = getContext("grid")
|
} = getContext("grid")
|
||||||
|
|
||||||
let anchor
|
let anchor
|
||||||
|
@ -82,7 +82,7 @@
|
||||||
</MenuItem>
|
</MenuItem>
|
||||||
<MenuItem
|
<MenuItem
|
||||||
icon="Copy"
|
icon="Copy"
|
||||||
disabled={isNewRow || !$focusedRow?._id || !$isDatasourcePlus}
|
disabled={isNewRow || !$focusedRow?._id || !$hasBudibaseIdentifiers}
|
||||||
on:click={() => copyToClipboard($focusedRow?._id)}
|
on:click={() => copyToClipboard($focusedRow?._id)}
|
||||||
on:click={menu.actions.close}
|
on:click={menu.actions.close}
|
||||||
>
|
>
|
||||||
|
@ -90,7 +90,7 @@
|
||||||
</MenuItem>
|
</MenuItem>
|
||||||
<MenuItem
|
<MenuItem
|
||||||
icon="Copy"
|
icon="Copy"
|
||||||
disabled={isNewRow || !$focusedRow?._rev}
|
disabled={isNewRow || !$focusedRow?._rev || !$hasBudibaseIdentifiers}
|
||||||
on:click={() => copyToClipboard($focusedRow?._rev)}
|
on:click={() => copyToClipboard($focusedRow?._rev)}
|
||||||
on:click={menu.actions.close}
|
on:click={menu.actions.close}
|
||||||
>
|
>
|
||||||
|
|
|
@ -75,14 +75,18 @@ export const deriveStores = context => {
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
const isDatasourcePlus = derived(datasource, $datasource => {
|
const hasBudibaseIdentifiers = derived(datasource, $datasource => {
|
||||||
return ["table", "viewV2"].includes($datasource?.type)
|
let type = $datasource?.type
|
||||||
|
if (type === "provider") {
|
||||||
|
type = $datasource.value?.datasource?.type
|
||||||
|
}
|
||||||
|
return ["table", "viewV2", "link"].includes(type)
|
||||||
})
|
})
|
||||||
|
|
||||||
return {
|
return {
|
||||||
schema,
|
schema,
|
||||||
enrichedSchema,
|
enrichedSchema,
|
||||||
isDatasourcePlus,
|
hasBudibaseIdentifiers,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -17,6 +17,7 @@ export const createActions = context => {
|
||||||
|
|
||||||
const open = (cellId, e) => {
|
const open = (cellId, e) => {
|
||||||
e.preventDefault()
|
e.preventDefault()
|
||||||
|
e.stopPropagation()
|
||||||
|
|
||||||
// Get DOM node for grid data wrapper to compute relative position to
|
// Get DOM node for grid data wrapper to compute relative position to
|
||||||
const gridNode = document.getElementById(gridID)
|
const gridNode = document.getElementById(gridID)
|
||||||
|
|
|
@ -83,7 +83,7 @@ export const createActions = context => {
|
||||||
error,
|
error,
|
||||||
notifications,
|
notifications,
|
||||||
fetch,
|
fetch,
|
||||||
isDatasourcePlus,
|
hasBudibaseIdentifiers,
|
||||||
refreshing,
|
refreshing,
|
||||||
} = context
|
} = context
|
||||||
const instanceLoaded = writable(false)
|
const instanceLoaded = writable(false)
|
||||||
|
@ -196,9 +196,16 @@ export const createActions = context => {
|
||||||
// Handles validation errors from the rows API and updates local validation
|
// Handles validation errors from the rows API and updates local validation
|
||||||
// state, storing error messages against relevant cells
|
// state, storing error messages against relevant cells
|
||||||
const handleValidationError = (rowId, error) => {
|
const handleValidationError = (rowId, error) => {
|
||||||
|
let errorString
|
||||||
|
if (typeof error === "string") {
|
||||||
|
errorString = error
|
||||||
|
} else if (typeof error?.message === "string") {
|
||||||
|
errorString = error.message
|
||||||
|
}
|
||||||
|
|
||||||
// If the server doesn't reply with a valid error, assume that the source
|
// If the server doesn't reply with a valid error, assume that the source
|
||||||
// of the error is the focused cell's column
|
// of the error is the focused cell's column
|
||||||
if (!error?.json?.validationErrors && error?.message) {
|
if (!error?.json?.validationErrors && errorString) {
|
||||||
const focusedColumn = get(focusedCellId)?.split("-")[1]
|
const focusedColumn = get(focusedCellId)?.split("-")[1]
|
||||||
if (focusedColumn) {
|
if (focusedColumn) {
|
||||||
error = {
|
error = {
|
||||||
|
@ -261,7 +268,7 @@ export const createActions = context => {
|
||||||
focusedCellId.set(`${rowId}-${erroredColumns[0]}`)
|
focusedCellId.set(`${rowId}-${erroredColumns[0]}`)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
get(notifications).error(error?.message || "An unknown error occurred")
|
get(notifications).error(errorString || "An unknown error occurred")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -458,14 +465,14 @@ export const createActions = context => {
|
||||||
}
|
}
|
||||||
let rowsToAppend = []
|
let rowsToAppend = []
|
||||||
let newRow
|
let newRow
|
||||||
const $isDatasourcePlus = get(isDatasourcePlus)
|
const $hasBudibaseIdentifiers = get(hasBudibaseIdentifiers)
|
||||||
for (let i = 0; i < newRows.length; i++) {
|
for (let i = 0; i < newRows.length; i++) {
|
||||||
newRow = newRows[i]
|
newRow = newRows[i]
|
||||||
|
|
||||||
// Ensure we have a unique _id.
|
// Ensure we have a unique _id.
|
||||||
// This means generating one for non DS+, overwriting any that may already
|
// This means generating one for non DS+, overwriting any that may already
|
||||||
// exist as we cannot allow duplicates.
|
// exist as we cannot allow duplicates.
|
||||||
if (!$isDatasourcePlus) {
|
if (!$hasBudibaseIdentifiers) {
|
||||||
newRow._id = Helpers.uuid()
|
newRow._id = Helpers.uuid()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -510,7 +517,7 @@ export const createActions = context => {
|
||||||
const cleanRow = row => {
|
const cleanRow = row => {
|
||||||
let clone = { ...row }
|
let clone = { ...row }
|
||||||
delete clone.__idx
|
delete clone.__idx
|
||||||
if (!get(isDatasourcePlus)) {
|
if (!get(hasBudibaseIdentifiers)) {
|
||||||
delete clone._id
|
delete clone._id
|
||||||
}
|
}
|
||||||
return clone
|
return clone
|
||||||
|
|
|
@ -57,6 +57,7 @@ export const PlanType = {
|
||||||
PRO: "pro",
|
PRO: "pro",
|
||||||
BUSINESS: "business",
|
BUSINESS: "business",
|
||||||
ENTERPRISE: "enterprise",
|
ENTERPRISE: "enterprise",
|
||||||
|
ENTERPRISE_BASIC_TRIAL: "enterprise_basic_trial",
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -124,8 +125,8 @@ export const TypeIconMap = {
|
||||||
[FieldType.ARRAY]: "Duplicate",
|
[FieldType.ARRAY]: "Duplicate",
|
||||||
[FieldType.NUMBER]: "123",
|
[FieldType.NUMBER]: "123",
|
||||||
[FieldType.BOOLEAN]: "Boolean",
|
[FieldType.BOOLEAN]: "Boolean",
|
||||||
[FieldType.ATTACHMENTS]: "Attach",
|
[FieldType.ATTACHMENTS]: "DocumentFragmentGroup",
|
||||||
[FieldType.ATTACHMENT_SINGLE]: "Attach",
|
[FieldType.ATTACHMENT_SINGLE]: "DocumentFragment",
|
||||||
[FieldType.LINK]: "DataCorrelated",
|
[FieldType.LINK]: "DataCorrelated",
|
||||||
[FieldType.FORMULA]: "Calculator",
|
[FieldType.FORMULA]: "Calculator",
|
||||||
[FieldType.JSON]: "Brackets",
|
[FieldType.JSON]: "Brackets",
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 479879246aac5dd3073cc695945c62c41fae5b0e
|
Subproject commit ff397e5454ad3361b25efdf14746c36dcbd3f409
|
|
@ -52,6 +52,8 @@ import {
|
||||||
FetchAppPackageResponse,
|
FetchAppPackageResponse,
|
||||||
DuplicateAppRequest,
|
DuplicateAppRequest,
|
||||||
DuplicateAppResponse,
|
DuplicateAppResponse,
|
||||||
|
UpdateAppRequest,
|
||||||
|
UpdateAppResponse,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { BASE_LAYOUT_PROP_IDS } from "../../constants/layouts"
|
import { BASE_LAYOUT_PROP_IDS } from "../../constants/layouts"
|
||||||
import sdk from "../../sdk"
|
import sdk from "../../sdk"
|
||||||
|
@ -450,7 +452,7 @@ export async function create(ctx: UserCtx<CreateAppRequest, App>) {
|
||||||
// This endpoint currently operates as a PATCH rather than a PUT
|
// This endpoint currently operates as a PATCH rather than a PUT
|
||||||
// Thus name and url fields are handled only if present
|
// Thus name and url fields are handled only if present
|
||||||
export async function update(
|
export async function update(
|
||||||
ctx: UserCtx<{ name?: string; url?: string }, App>
|
ctx: UserCtx<UpdateAppRequest, UpdateAppResponse>
|
||||||
) {
|
) {
|
||||||
const apps = (await dbCore.getAllApps({ dev: true })) as App[]
|
const apps = (await dbCore.getAllApps({ dev: true })) as App[]
|
||||||
// validation
|
// validation
|
||||||
|
|
|
@ -2,7 +2,7 @@ import stream from "stream"
|
||||||
import archiver from "archiver"
|
import archiver from "archiver"
|
||||||
|
|
||||||
import { quotas } from "@budibase/pro"
|
import { quotas } from "@budibase/pro"
|
||||||
import { objectStore } from "@budibase/backend-core"
|
import { objectStore, context } from "@budibase/backend-core"
|
||||||
import * as internal from "./internal"
|
import * as internal from "./internal"
|
||||||
import * as external from "./external"
|
import * as external from "./external"
|
||||||
import { isExternalTableID } from "../../../integrations/utils"
|
import { isExternalTableID } from "../../../integrations/utils"
|
||||||
|
@ -198,8 +198,18 @@ export async function destroy(ctx: UserCtx<DeleteRowRequest>) {
|
||||||
export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
|
export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
|
||||||
const tableId = utils.getTableId(ctx)
|
const tableId = utils.getTableId(ctx)
|
||||||
|
|
||||||
|
await context.ensureSnippetContext(true)
|
||||||
|
|
||||||
|
const enrichedQuery = await utils.enrichSearchContext(
|
||||||
|
{ ...ctx.request.body.query },
|
||||||
|
{
|
||||||
|
user: sdk.users.getUserContextBindings(ctx.user),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
const searchParams: RowSearchParams = {
|
const searchParams: RowSearchParams = {
|
||||||
...ctx.request.body,
|
...ctx.request.body,
|
||||||
|
query: enrichedQuery,
|
||||||
tableId,
|
tableId,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,20 +1,11 @@
|
||||||
import { getRowParams } from "../../../db/utils"
|
import { getRowParams } from "../../../db/utils"
|
||||||
import {
|
import {
|
||||||
outputProcessing,
|
outputProcessing,
|
||||||
processAutoColumn,
|
|
||||||
processFormulas,
|
processFormulas,
|
||||||
} from "../../../utilities/rowProcessor"
|
} from "../../../utilities/rowProcessor"
|
||||||
import { context, locks } from "@budibase/backend-core"
|
import { context } from "@budibase/backend-core"
|
||||||
import {
|
import { Table, Row, FormulaType, FieldType } from "@budibase/types"
|
||||||
Table,
|
|
||||||
Row,
|
|
||||||
LockType,
|
|
||||||
LockName,
|
|
||||||
FormulaType,
|
|
||||||
FieldType,
|
|
||||||
} from "@budibase/types"
|
|
||||||
import * as linkRows from "../../../db/linkedRows"
|
import * as linkRows from "../../../db/linkedRows"
|
||||||
import sdk from "../../../sdk"
|
|
||||||
import isEqual from "lodash/isEqual"
|
import isEqual from "lodash/isEqual"
|
||||||
import { cloneDeep } from "lodash/fp"
|
import { cloneDeep } from "lodash/fp"
|
||||||
|
|
||||||
|
@ -151,30 +142,7 @@ export async function finaliseRow(
|
||||||
// if another row has been written since processing this will
|
// if another row has been written since processing this will
|
||||||
// handle the auto ID clash
|
// handle the auto ID clash
|
||||||
if (oldTable && !isEqual(oldTable, table)) {
|
if (oldTable && !isEqual(oldTable, table)) {
|
||||||
try {
|
await db.put(table)
|
||||||
await db.put(table)
|
|
||||||
} catch (err: any) {
|
|
||||||
if (err.status === 409) {
|
|
||||||
// Some conflicts with the autocolumns occurred, we need to refetch the table and recalculate
|
|
||||||
await locks.doWithLock(
|
|
||||||
{
|
|
||||||
type: LockType.AUTO_EXTEND,
|
|
||||||
name: LockName.PROCESS_AUTO_COLUMNS,
|
|
||||||
resource: table._id,
|
|
||||||
},
|
|
||||||
async () => {
|
|
||||||
const latestTable = await sdk.tables.getTable(table._id!)
|
|
||||||
let response = processAutoColumn(null, latestTable, row, {
|
|
||||||
reprocessing: true,
|
|
||||||
})
|
|
||||||
await db.put(response.table)
|
|
||||||
row = response.row
|
|
||||||
}
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
const response = await db.put(row)
|
const response = await db.put(row)
|
||||||
// for response, calculate the formulas for the enriched row
|
// for response, calculate the formulas for the enriched row
|
||||||
|
|
|
@ -73,12 +73,15 @@ export function basicProcessing({
|
||||||
// filter the row down to what is actually the row (not joined)
|
// filter the row down to what is actually the row (not joined)
|
||||||
for (let field of Object.values(table.schema)) {
|
for (let field of Object.values(table.schema)) {
|
||||||
const fieldName = field.name
|
const fieldName = field.name
|
||||||
const value = extractFieldValue({
|
let value = extractFieldValue({
|
||||||
row,
|
row,
|
||||||
tableName: table.name,
|
tableName: table.name,
|
||||||
fieldName,
|
fieldName,
|
||||||
isLinked,
|
isLinked,
|
||||||
})
|
})
|
||||||
|
if (value instanceof Buffer) {
|
||||||
|
value = value.toString()
|
||||||
|
}
|
||||||
// all responses include "select col as table.col" so that overlaps are handled
|
// all responses include "select col as table.col" so that overlaps are handled
|
||||||
if (value != null) {
|
if (value != null) {
|
||||||
thisRow[fieldName] = value
|
thisRow[fieldName] = value
|
||||||
|
|
|
@ -22,7 +22,7 @@ import {
|
||||||
getInternalRowId,
|
getInternalRowId,
|
||||||
} from "./basic"
|
} from "./basic"
|
||||||
import sdk from "../../../../sdk"
|
import sdk from "../../../../sdk"
|
||||||
|
import { processStringSync } from "@budibase/string-templates"
|
||||||
import validateJs from "validate.js"
|
import validateJs from "validate.js"
|
||||||
|
|
||||||
validateJs.extend(validateJs.validators.datetime, {
|
validateJs.extend(validateJs.validators.datetime, {
|
||||||
|
@ -117,6 +117,19 @@ export async function validate(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function fixBooleanFields({ row, table }: { row: Row; table: Table }) {
|
||||||
|
for (let col of Object.values(table.schema)) {
|
||||||
|
if (col.type === FieldType.BOOLEAN) {
|
||||||
|
if (row[col.name] === 1) {
|
||||||
|
row[col.name] = true
|
||||||
|
} else if (row[col.name] === 0) {
|
||||||
|
row[col.name] = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return row
|
||||||
|
}
|
||||||
|
|
||||||
export async function sqlOutputProcessing(
|
export async function sqlOutputProcessing(
|
||||||
rows: DatasourcePlusQueryResponse,
|
rows: DatasourcePlusQueryResponse,
|
||||||
table: Table,
|
table: Table,
|
||||||
|
@ -161,7 +174,9 @@ export async function sqlOutputProcessing(
|
||||||
if (thisRow._id == null) {
|
if (thisRow._id == null) {
|
||||||
throw new Error("Unable to generate row ID for SQL rows")
|
throw new Error("Unable to generate row ID for SQL rows")
|
||||||
}
|
}
|
||||||
finalRows[thisRow._id] = thisRow
|
|
||||||
|
finalRows[thisRow._id] = fixBooleanFields({ row: thisRow, table })
|
||||||
|
|
||||||
// do this at end once its been added to the final rows
|
// do this at end once its been added to the final rows
|
||||||
finalRows = await updateRelationshipColumns(
|
finalRows = await updateRelationshipColumns(
|
||||||
table,
|
table,
|
||||||
|
@ -189,3 +204,63 @@ export async function sqlOutputProcessing(
|
||||||
export function isUserMetadataTable(tableId: string) {
|
export function isUserMetadataTable(tableId: string) {
|
||||||
return tableId === InternalTables.USER_METADATA
|
return tableId === InternalTables.USER_METADATA
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function enrichArrayContext(
|
||||||
|
fields: any[],
|
||||||
|
inputs = {},
|
||||||
|
helpers = true
|
||||||
|
): Promise<any[]> {
|
||||||
|
const map: Record<string, any> = {}
|
||||||
|
for (let index in fields) {
|
||||||
|
map[index] = fields[index]
|
||||||
|
}
|
||||||
|
const output = await enrichSearchContext(map, inputs, helpers)
|
||||||
|
const outputArray: any[] = []
|
||||||
|
for (let [key, value] of Object.entries(output)) {
|
||||||
|
outputArray[parseInt(key)] = value
|
||||||
|
}
|
||||||
|
return outputArray
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function enrichSearchContext(
|
||||||
|
fields: Record<string, any>,
|
||||||
|
inputs = {},
|
||||||
|
helpers = true
|
||||||
|
): Promise<Record<string, any>> {
|
||||||
|
const enrichedQuery: Record<string, any> = {}
|
||||||
|
if (!fields || !inputs) {
|
||||||
|
return enrichedQuery
|
||||||
|
}
|
||||||
|
const parameters = { ...inputs }
|
||||||
|
|
||||||
|
if (Array.isArray(fields)) {
|
||||||
|
return enrichArrayContext(fields, inputs, helpers)
|
||||||
|
}
|
||||||
|
|
||||||
|
// enrich the fields with dynamic parameters
|
||||||
|
for (let key of Object.keys(fields)) {
|
||||||
|
if (fields[key] == null) {
|
||||||
|
enrichedQuery[key] = null
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if (typeof fields[key] === "object") {
|
||||||
|
// enrich nested fields object
|
||||||
|
enrichedQuery[key] = await enrichSearchContext(
|
||||||
|
fields[key],
|
||||||
|
parameters,
|
||||||
|
helpers
|
||||||
|
)
|
||||||
|
} else if (typeof fields[key] === "string") {
|
||||||
|
// enrich string value as normal
|
||||||
|
enrichedQuery[key] = processStringSync(fields[key], parameters, {
|
||||||
|
noEscaping: true,
|
||||||
|
noHelpers: !helpers,
|
||||||
|
escapeNewlines: true,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
enrichedQuery[key] = fields[key]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return enrichedQuery
|
||||||
|
}
|
||||||
|
|
|
@ -9,7 +9,8 @@ import {
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { dataFilters } from "@budibase/shared-core"
|
import { dataFilters } from "@budibase/shared-core"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
import { db } from "@budibase/backend-core"
|
import { db, context } from "@budibase/backend-core"
|
||||||
|
import { enrichSearchContext } from "./utils"
|
||||||
|
|
||||||
export async function searchView(
|
export async function searchView(
|
||||||
ctx: UserCtx<SearchViewRowRequest, SearchRowResponse>
|
ctx: UserCtx<SearchViewRowRequest, SearchRowResponse>
|
||||||
|
@ -56,10 +57,16 @@ export async function searchView(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await context.ensureSnippetContext(true)
|
||||||
|
|
||||||
|
const enrichedQuery = await enrichSearchContext(query, {
|
||||||
|
user: sdk.users.getUserContextBindings(ctx.user),
|
||||||
|
})
|
||||||
|
|
||||||
const searchOptions: RequiredKeys<SearchViewRowRequest> &
|
const searchOptions: RequiredKeys<SearchViewRowRequest> &
|
||||||
RequiredKeys<Pick<RowSearchParams, "tableId" | "query" | "fields">> = {
|
RequiredKeys<Pick<RowSearchParams, "tableId" | "query" | "fields">> = {
|
||||||
tableId: view.tableId,
|
tableId: view.tableId,
|
||||||
query,
|
query: enrichedQuery,
|
||||||
fields: viewFields,
|
fields: viewFields,
|
||||||
...getSortOptions(body, view),
|
...getSortOptions(body, view),
|
||||||
limit: body.limit,
|
limit: body.limit,
|
||||||
|
|
|
@ -128,6 +128,7 @@ export async function importToRows(
|
||||||
for (let i = 0; i < data.length; i++) {
|
for (let i = 0; i < data.length; i++) {
|
||||||
let row = data[i]
|
let row = data[i]
|
||||||
row._id = generateRowID(table._id!)
|
row._id = generateRowID(table._id!)
|
||||||
|
row.type = "row"
|
||||||
row.tableId = table._id
|
row.tableId = table._id
|
||||||
|
|
||||||
// We use a reference to table here and update it after input processing,
|
// We use a reference to table here and update it after input processing,
|
||||||
|
|
|
@ -4,10 +4,12 @@ import { APIError } from "@budibase/types"
|
||||||
describe("/api/applications/:appId/sync", () => {
|
describe("/api/applications/:appId/sync", () => {
|
||||||
let config = setup.getConfig()
|
let config = setup.getConfig()
|
||||||
|
|
||||||
afterAll(setup.afterAll)
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.init()
|
await config.init()
|
||||||
})
|
})
|
||||||
|
afterAll(async () => {
|
||||||
|
setup.afterAll()
|
||||||
|
})
|
||||||
|
|
||||||
describe("/api/attachments/process", () => {
|
describe("/api/attachments/process", () => {
|
||||||
it("should accept an image file upload", async () => {
|
it("should accept an image file upload", async () => {
|
||||||
|
@ -18,7 +20,8 @@ describe("/api/applications/:appId/sync", () => {
|
||||||
expect(resp.length).toBe(1)
|
expect(resp.length).toBe(1)
|
||||||
|
|
||||||
let upload = resp[0]
|
let upload = resp[0]
|
||||||
expect(upload.url.endsWith(".jpg")).toBe(true)
|
|
||||||
|
expect(upload.url.split("?")[0].endsWith(".jpg")).toBe(true)
|
||||||
expect(upload.extension).toBe("jpg")
|
expect(upload.extension).toBe("jpg")
|
||||||
expect(upload.size).toBe(1)
|
expect(upload.size).toBe(1)
|
||||||
expect(upload.name).toBe("1px.jpg")
|
expect(upload.name).toBe("1px.jpg")
|
||||||
|
|
|
@ -1,16 +1,18 @@
|
||||||
|
import { mocks } from "@budibase/backend-core/tests"
|
||||||
import tk from "timekeeper"
|
import tk from "timekeeper"
|
||||||
import * as setup from "./utilities"
|
import * as setup from "./utilities"
|
||||||
import { events } from "@budibase/backend-core"
|
import { events } from "@budibase/backend-core"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||||
import { mocks } from "@budibase/backend-core/tests"
|
|
||||||
|
|
||||||
mocks.licenses.useBackups()
|
mocks.licenses.useBackups()
|
||||||
|
|
||||||
describe("/backups", () => {
|
describe("/backups", () => {
|
||||||
let config = setup.getConfig()
|
let config = setup.getConfig()
|
||||||
|
|
||||||
afterAll(setup.afterAll)
|
afterAll(async () => {
|
||||||
|
setup.afterAll()
|
||||||
|
})
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
tk.reset()
|
tk.reset()
|
||||||
|
|
|
@ -32,8 +32,6 @@ import * as uuid from "uuid"
|
||||||
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
|
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
|
||||||
tk.freeze(timestamp)
|
tk.freeze(timestamp)
|
||||||
|
|
||||||
jest.unmock("mssql")
|
|
||||||
|
|
||||||
describe.each([
|
describe.each([
|
||||||
["internal", undefined],
|
["internal", undefined],
|
||||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||||
|
@ -131,7 +129,13 @@ describe.each([
|
||||||
|
|
||||||
const assertRowUsage = async (expected: number) => {
|
const assertRowUsage = async (expected: number) => {
|
||||||
const usage = await getRowUsage()
|
const usage = await getRowUsage()
|
||||||
expect(usage).toBe(expected)
|
|
||||||
|
// Because our quota tracking is not perfect, we allow a 10% margin of
|
||||||
|
// error. This is to account for the fact that parallel writes can result
|
||||||
|
// in some quota updates getting lost. We don't have any need to solve this
|
||||||
|
// right now, so we just allow for some error.
|
||||||
|
expect(usage).toBeGreaterThan(expected * 0.9)
|
||||||
|
expect(usage).toBeLessThan(expected * 1.1)
|
||||||
}
|
}
|
||||||
|
|
||||||
const defaultRowFields = isInternal
|
const defaultRowFields = isInternal
|
||||||
|
@ -194,39 +198,99 @@ describe.each([
|
||||||
await assertRowUsage(rowUsage)
|
await assertRowUsage(rowUsage)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("increment row autoId per create row request", async () => {
|
isInternal &&
|
||||||
const rowUsage = await getRowUsage()
|
it("increment row autoId per create row request", async () => {
|
||||||
|
const rowUsage = await getRowUsage()
|
||||||
|
|
||||||
const newTable = await config.api.table.save(
|
const newTable = await config.api.table.save(
|
||||||
saveTableRequest({
|
saveTableRequest({
|
||||||
schema: {
|
schema: {
|
||||||
"Row ID": {
|
"Row ID": {
|
||||||
name: "Row ID",
|
name: "Row ID",
|
||||||
type: FieldType.NUMBER,
|
type: FieldType.NUMBER,
|
||||||
subtype: AutoFieldSubType.AUTO_ID,
|
subtype: AutoFieldSubType.AUTO_ID,
|
||||||
icon: "ri-magic-line",
|
icon: "ri-magic-line",
|
||||||
autocolumn: true,
|
autocolumn: true,
|
||||||
constraints: {
|
constraints: {
|
||||||
type: "number",
|
type: "number",
|
||||||
presence: true,
|
presence: true,
|
||||||
numericality: {
|
numericality: {
|
||||||
greaterThanOrEqualTo: "",
|
greaterThanOrEqualTo: "",
|
||||||
lessThanOrEqualTo: "",
|
lessThanOrEqualTo: "",
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
})
|
||||||
})
|
)
|
||||||
)
|
|
||||||
|
|
||||||
let previousId = 0
|
let previousId = 0
|
||||||
for (let i = 0; i < 10; i++) {
|
for (let i = 0; i < 10; i++) {
|
||||||
const row = await config.api.row.save(newTable._id!, {})
|
const row = await config.api.row.save(newTable._id!, {})
|
||||||
expect(row["Row ID"]).toBeGreaterThan(previousId)
|
expect(row["Row ID"]).toBeGreaterThan(previousId)
|
||||||
previousId = row["Row ID"]
|
previousId = row["Row ID"]
|
||||||
}
|
}
|
||||||
await assertRowUsage(rowUsage + 10)
|
await assertRowUsage(rowUsage + 10)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
isInternal &&
|
||||||
|
it("should increment auto ID correctly when creating rows in parallel", async () => {
|
||||||
|
const table = await config.api.table.save(
|
||||||
|
saveTableRequest({
|
||||||
|
schema: {
|
||||||
|
"Row ID": {
|
||||||
|
name: "Row ID",
|
||||||
|
type: FieldType.NUMBER,
|
||||||
|
subtype: AutoFieldSubType.AUTO_ID,
|
||||||
|
icon: "ri-magic-line",
|
||||||
|
autocolumn: true,
|
||||||
|
constraints: {
|
||||||
|
type: "number",
|
||||||
|
presence: true,
|
||||||
|
numericality: {
|
||||||
|
greaterThanOrEqualTo: "",
|
||||||
|
lessThanOrEqualTo: "",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const sequence = Array(50)
|
||||||
|
.fill(0)
|
||||||
|
.map((_, i) => i + 1)
|
||||||
|
|
||||||
|
// This block of code is simulating users creating auto ID rows at the
|
||||||
|
// same time. It's expected that this operation will sometimes return
|
||||||
|
// a document conflict error (409), but the idea is to retry in those
|
||||||
|
// situations. The code below does this a large number of times with
|
||||||
|
// small, random delays between them to try and get through the list
|
||||||
|
// as quickly as possible.
|
||||||
|
await Promise.all(
|
||||||
|
sequence.map(async () => {
|
||||||
|
const attempts = 20
|
||||||
|
for (let attempt = 0; attempt < attempts; attempt++) {
|
||||||
|
try {
|
||||||
|
await config.api.row.save(table._id!, {})
|
||||||
|
return
|
||||||
|
} catch (e) {
|
||||||
|
await new Promise(r => setTimeout(r, Math.random() * 15))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw new Error(`Failed to create row after ${attempts} attempts`)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const rows = await config.api.row.fetch(table._id!)
|
||||||
|
expect(rows).toHaveLength(50)
|
||||||
|
|
||||||
|
// The main purpose of this test is to ensure that even under pressure,
|
||||||
|
// we maintain data integrity. An auto ID column should hand out
|
||||||
|
// monotonically increasing unique integers no matter what.
|
||||||
|
const ids = rows.map(r => r["Row ID"])
|
||||||
|
expect(ids).toEqual(expect.arrayContaining(sequence))
|
||||||
|
})
|
||||||
|
|
||||||
isInternal &&
|
isInternal &&
|
||||||
it("row values are coerced", async () => {
|
it("row values are coerced", async () => {
|
||||||
|
@ -856,7 +920,7 @@ describe.each([
|
||||||
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
||||||
return context.doInAppContext(config.getAppId(), async () => {
|
return context.doInAppContext(config.getAppId(), async () => {
|
||||||
const enriched = await outputProcessing(table, [row])
|
const enriched = await outputProcessing(table, [row])
|
||||||
expect((enriched as Row[])[0].attachment.url).toBe(
|
expect((enriched as Row[])[0].attachment.url.split("?")[0]).toBe(
|
||||||
`/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}`
|
`/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}`
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -889,7 +953,7 @@ describe.each([
|
||||||
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
||||||
return context.doInAppContext(config.getAppId(), async () => {
|
return context.doInAppContext(config.getAppId(), async () => {
|
||||||
const enriched = await outputProcessing(table, [row])
|
const enriched = await outputProcessing(table, [row])
|
||||||
expect((enriched as Row[])[0].attachment[0].url).toBe(
|
expect((enriched as Row[])[0].attachment[0].url.split("?")[0]).toBe(
|
||||||
`/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}`
|
`/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}`
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,10 +1,13 @@
|
||||||
import { tableForDatasource } from "../../../tests/utilities/structures"
|
import { tableForDatasource } from "../../../tests/utilities/structures"
|
||||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
||||||
|
import { db as dbCore } from "@budibase/backend-core"
|
||||||
|
|
||||||
import * as setup from "./utilities"
|
import * as setup from "./utilities"
|
||||||
import {
|
import {
|
||||||
|
AutoFieldSubType,
|
||||||
Datasource,
|
Datasource,
|
||||||
EmptyFilterOption,
|
EmptyFilterOption,
|
||||||
|
BBReferenceFieldSubType,
|
||||||
FieldType,
|
FieldType,
|
||||||
RowSearchParams,
|
RowSearchParams,
|
||||||
SearchFilters,
|
SearchFilters,
|
||||||
|
@ -12,32 +15,51 @@ import {
|
||||||
SortType,
|
SortType,
|
||||||
Table,
|
Table,
|
||||||
TableSchema,
|
TableSchema,
|
||||||
|
User,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import _ from "lodash"
|
import _ from "lodash"
|
||||||
|
import tk from "timekeeper"
|
||||||
|
import { encodeJSBinding } from "@budibase/string-templates"
|
||||||
|
|
||||||
jest.unmock("mssql")
|
const serverTime = new Date("2024-05-06T00:00:00.000Z")
|
||||||
|
tk.freeze(serverTime)
|
||||||
|
|
||||||
describe.each([
|
describe.each([
|
||||||
["internal", undefined],
|
["lucene", undefined],
|
||||||
["internal-sqs", undefined],
|
["sqs", undefined],
|
||||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||||
])("/api/:sourceId/search (%s)", (name, dsProvider) => {
|
])("/api/:sourceId/search (%s)", (name, dsProvider) => {
|
||||||
const isSqs = name === "internal-sqs"
|
const isSqs = name === "sqs"
|
||||||
const isInternal = name === "internal"
|
const isLucene = name === "lucene"
|
||||||
|
const isInternal = isSqs || isLucene
|
||||||
const config = setup.getConfig()
|
const config = setup.getConfig()
|
||||||
|
|
||||||
let envCleanup: (() => void) | undefined
|
let envCleanup: (() => void) | undefined
|
||||||
let datasource: Datasource | undefined
|
let datasource: Datasource | undefined
|
||||||
let table: Table
|
let table: Table
|
||||||
|
|
||||||
|
const snippets = [
|
||||||
|
{
|
||||||
|
name: "WeeksAgo",
|
||||||
|
code: "return function (weeks) {\n const currentTime = new Date();\n currentTime.setDate(currentTime.getDate()-(7 * (weeks || 1)));\n return currentTime.toISOString();\n}",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
if (isSqs) {
|
if (isSqs) {
|
||||||
envCleanup = config.setEnv({ SQS_SEARCH_ENABLE: "true" })
|
envCleanup = config.setEnv({ SQS_SEARCH_ENABLE: "true" })
|
||||||
}
|
}
|
||||||
await config.init()
|
await config.init()
|
||||||
|
|
||||||
|
if (config.app?.appId) {
|
||||||
|
config.app = await config.api.application.update(config.app?.appId, {
|
||||||
|
snippets,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
if (dsProvider) {
|
if (dsProvider) {
|
||||||
datasource = await config.createDatasource({
|
datasource = await config.createDatasource({
|
||||||
datasource: await dsProvider,
|
datasource: await dsProvider,
|
||||||
|
@ -59,12 +81,28 @@ describe.each([
|
||||||
}
|
}
|
||||||
|
|
||||||
async function createRows(rows: Record<string, any>[]) {
|
async function createRows(rows: Record<string, any>[]) {
|
||||||
await Promise.all(rows.map(r => config.api.row.save(table._id!, r)))
|
await config.api.row.bulkImport(table._id!, { rows })
|
||||||
}
|
}
|
||||||
|
|
||||||
class SearchAssertion {
|
class SearchAssertion {
|
||||||
constructor(private readonly query: RowSearchParams) {}
|
constructor(private readonly query: RowSearchParams) {}
|
||||||
|
|
||||||
|
private findRow(expectedRow: any, foundRows: any[]) {
|
||||||
|
const row = foundRows.find(foundRow => _.isMatch(foundRow, expectedRow))
|
||||||
|
if (!row) {
|
||||||
|
const fields = Object.keys(expectedRow)
|
||||||
|
// To make the error message more readable, we only include the fields
|
||||||
|
// that are present in the expected row.
|
||||||
|
const searchedObjects = foundRows.map(row => _.pick(row, fields))
|
||||||
|
throw new Error(
|
||||||
|
`Failed to find row: ${JSON.stringify(
|
||||||
|
expectedRow
|
||||||
|
)} in ${JSON.stringify(searchedObjects)}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return row
|
||||||
|
}
|
||||||
|
|
||||||
// Asserts that the query returns rows matching exactly the set of rows
|
// Asserts that the query returns rows matching exactly the set of rows
|
||||||
// passed in. The order of the rows matters. Rows returned in an order
|
// passed in. The order of the rows matters. Rows returned in an order
|
||||||
// different to the one passed in will cause the assertion to fail. Extra
|
// different to the one passed in will cause the assertion to fail. Extra
|
||||||
|
@ -80,9 +118,7 @@ describe.each([
|
||||||
// eslint-disable-next-line jest/no-standalone-expect
|
// eslint-disable-next-line jest/no-standalone-expect
|
||||||
expect(foundRows).toEqual(
|
expect(foundRows).toEqual(
|
||||||
expectedRows.map((expectedRow: any) =>
|
expectedRows.map((expectedRow: any) =>
|
||||||
expect.objectContaining(
|
expect.objectContaining(this.findRow(expectedRow, foundRows))
|
||||||
foundRows.find(foundRow => _.isMatch(foundRow, expectedRow))
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -102,9 +138,7 @@ describe.each([
|
||||||
expect(foundRows).toEqual(
|
expect(foundRows).toEqual(
|
||||||
expect.arrayContaining(
|
expect.arrayContaining(
|
||||||
expectedRows.map((expectedRow: any) =>
|
expectedRows.map((expectedRow: any) =>
|
||||||
expect.objectContaining(
|
expect.objectContaining(this.findRow(expectedRow, foundRows))
|
||||||
foundRows.find(foundRow => _.isMatch(foundRow, expectedRow))
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -123,9 +157,7 @@ describe.each([
|
||||||
expect(foundRows).toEqual(
|
expect(foundRows).toEqual(
|
||||||
expect.arrayContaining(
|
expect.arrayContaining(
|
||||||
expectedRows.map((expectedRow: any) =>
|
expectedRows.map((expectedRow: any) =>
|
||||||
expect.objectContaining(
|
expect.objectContaining(this.findRow(expectedRow, foundRows))
|
||||||
foundRows.find(foundRow => _.isMatch(foundRow, expectedRow))
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -154,7 +186,294 @@ describe.each([
|
||||||
return expectSearch({ query })
|
return expectSearch({ query })
|
||||||
}
|
}
|
||||||
|
|
||||||
describe("strings", () => {
|
describe("boolean", () => {
|
||||||
|
beforeAll(async () => {
|
||||||
|
await createTable({
|
||||||
|
isTrue: { name: "isTrue", type: FieldType.BOOLEAN },
|
||||||
|
})
|
||||||
|
await createRows([{ isTrue: true }, { isTrue: false }])
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("equal", () => {
|
||||||
|
it("successfully finds true row", () =>
|
||||||
|
expectQuery({ equal: { isTrue: true } }).toMatchExactly([
|
||||||
|
{ isTrue: true },
|
||||||
|
]))
|
||||||
|
|
||||||
|
it("successfully finds false row", () =>
|
||||||
|
expectQuery({ equal: { isTrue: false } }).toMatchExactly([
|
||||||
|
{ isTrue: false },
|
||||||
|
]))
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("notEqual", () => {
|
||||||
|
it("successfully finds false row", () =>
|
||||||
|
expectQuery({ notEqual: { isTrue: true } }).toContainExactly([
|
||||||
|
{ isTrue: false },
|
||||||
|
]))
|
||||||
|
|
||||||
|
it("successfully finds true row", () =>
|
||||||
|
expectQuery({ notEqual: { isTrue: false } }).toContainExactly([
|
||||||
|
{ isTrue: true },
|
||||||
|
]))
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("oneOf", () => {
|
||||||
|
it("successfully finds true row", () =>
|
||||||
|
expectQuery({ oneOf: { isTrue: [true] } }).toContainExactly([
|
||||||
|
{ isTrue: true },
|
||||||
|
]))
|
||||||
|
|
||||||
|
it("successfully finds false row", () =>
|
||||||
|
expectQuery({ oneOf: { isTrue: [false] } }).toContainExactly([
|
||||||
|
{ isTrue: false },
|
||||||
|
]))
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("sort", () => {
|
||||||
|
it("sorts ascending", () =>
|
||||||
|
expectSearch({
|
||||||
|
query: {},
|
||||||
|
sort: "isTrue",
|
||||||
|
sortOrder: SortOrder.ASCENDING,
|
||||||
|
}).toMatchExactly([{ isTrue: false }, { isTrue: true }]))
|
||||||
|
|
||||||
|
it("sorts descending", () =>
|
||||||
|
expectSearch({
|
||||||
|
query: {},
|
||||||
|
sort: "isTrue",
|
||||||
|
sortOrder: SortOrder.DESCENDING,
|
||||||
|
}).toMatchExactly([{ isTrue: true }, { isTrue: false }]))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// Ensure all bindings resolve and perform as expected
|
||||||
|
describe("bindings", () => {
|
||||||
|
let globalUsers: any = []
|
||||||
|
|
||||||
|
const future = new Date(serverTime.getTime())
|
||||||
|
future.setDate(future.getDate() + 30)
|
||||||
|
|
||||||
|
const rows = (currentUser: User) => {
|
||||||
|
return [
|
||||||
|
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
||||||
|
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
||||||
|
{ name: currentUser.firstName, appointment: future.toISOString() },
|
||||||
|
{ name: "serverDate", appointment: serverTime.toISOString() },
|
||||||
|
{
|
||||||
|
name: "single user, session user",
|
||||||
|
single_user: JSON.stringify([currentUser]),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "single user",
|
||||||
|
single_user: JSON.stringify([globalUsers[0]]),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "multi user",
|
||||||
|
multi_user: JSON.stringify(globalUsers),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "multi user with session user",
|
||||||
|
multi_user: JSON.stringify([...globalUsers, currentUser]),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
// Set up some global users
|
||||||
|
globalUsers = await Promise.all(
|
||||||
|
Array(2)
|
||||||
|
.fill(0)
|
||||||
|
.map(async () => {
|
||||||
|
const globalUser = await config.globalUser()
|
||||||
|
const userMedataId = globalUser._id
|
||||||
|
? dbCore.generateUserMetadataID(globalUser._id)
|
||||||
|
: null
|
||||||
|
return {
|
||||||
|
_id: globalUser._id,
|
||||||
|
_meta: userMedataId,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
await createTable({
|
||||||
|
name: { name: "name", type: FieldType.STRING },
|
||||||
|
appointment: { name: "appointment", type: FieldType.DATETIME },
|
||||||
|
single_user: {
|
||||||
|
name: "single_user",
|
||||||
|
type: FieldType.BB_REFERENCE,
|
||||||
|
subtype: BBReferenceFieldSubType.USER,
|
||||||
|
},
|
||||||
|
multi_user: {
|
||||||
|
name: "multi_user",
|
||||||
|
type: FieldType.BB_REFERENCE,
|
||||||
|
subtype: BBReferenceFieldSubType.USERS,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
await createRows(rows(config.getUser()))
|
||||||
|
})
|
||||||
|
|
||||||
|
// !! Current User is auto generated per run
|
||||||
|
it("should return all rows matching the session user firstname", async () => {
|
||||||
|
await expectQuery({
|
||||||
|
equal: { name: "{{ [user].firstName }}" },
|
||||||
|
}).toContainExactly([
|
||||||
|
{
|
||||||
|
name: config.getUser().firstName,
|
||||||
|
appointment: future.toISOString(),
|
||||||
|
},
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should parse the date binding and return all rows after the resolved value", async () => {
|
||||||
|
await expectQuery({
|
||||||
|
range: {
|
||||||
|
appointment: {
|
||||||
|
low: "{{ [now] }}",
|
||||||
|
high: "9999-00-00T00:00:00.000Z",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}).toContainExactly([
|
||||||
|
{
|
||||||
|
name: config.getUser().firstName,
|
||||||
|
appointment: future.toISOString(),
|
||||||
|
},
|
||||||
|
{ name: "serverDate", appointment: serverTime.toISOString() },
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should parse the date binding and return all rows before the resolved value", async () => {
|
||||||
|
await expectQuery({
|
||||||
|
range: {
|
||||||
|
appointment: {
|
||||||
|
low: "0000-00-00T00:00:00.000Z",
|
||||||
|
high: "{{ [now] }}",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}).toContainExactly([
|
||||||
|
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
||||||
|
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
||||||
|
{ name: "serverDate", appointment: serverTime.toISOString() },
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should parse the encoded js snippet. Return rows with appointments up to 1 week in the past", async () => {
|
||||||
|
const jsBinding = "return snippets.WeeksAgo();"
|
||||||
|
const encodedBinding = encodeJSBinding(jsBinding)
|
||||||
|
|
||||||
|
await expectQuery({
|
||||||
|
range: {
|
||||||
|
appointment: {
|
||||||
|
low: "0000-00-00T00:00:00.000Z",
|
||||||
|
high: encodedBinding,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}).toContainExactly([
|
||||||
|
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
||||||
|
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should parse the encoded js binding. Return rows with appointments 2 weeks in the past", async () => {
|
||||||
|
const jsBinding =
|
||||||
|
"const currentTime = new Date()\ncurrentTime.setDate(currentTime.getDate()-14);\nreturn currentTime.toISOString();"
|
||||||
|
const encodedBinding = encodeJSBinding(jsBinding)
|
||||||
|
|
||||||
|
await expectQuery({
|
||||||
|
range: {
|
||||||
|
appointment: {
|
||||||
|
low: "0000-00-00T00:00:00.000Z",
|
||||||
|
high: encodedBinding,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}).toContainExactly([
|
||||||
|
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
||||||
|
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should match a single user row by the session user id", async () => {
|
||||||
|
await expectQuery({
|
||||||
|
equal: { single_user: "{{ [user]._id }}" },
|
||||||
|
}).toContainExactly([
|
||||||
|
{
|
||||||
|
name: "single user, session user",
|
||||||
|
single_user: [{ _id: config.getUser()._id }],
|
||||||
|
},
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
// TODO(samwho): fix for SQS
|
||||||
|
!isSqs &&
|
||||||
|
it("should match the session user id in a multi user field", async () => {
|
||||||
|
const allUsers = [...globalUsers, config.getUser()].map((user: any) => {
|
||||||
|
return { _id: user._id }
|
||||||
|
})
|
||||||
|
|
||||||
|
await expectQuery({
|
||||||
|
contains: { multi_user: ["{{ [user]._id }}"] },
|
||||||
|
}).toContainExactly([
|
||||||
|
{
|
||||||
|
name: "multi user with session user",
|
||||||
|
multi_user: allUsers,
|
||||||
|
},
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
// TODO(samwho): fix for SQS
|
||||||
|
!isSqs &&
|
||||||
|
it("should not match the session user id in a multi user field", async () => {
|
||||||
|
await expectQuery({
|
||||||
|
notContains: { multi_user: ["{{ [user]._id }}"] },
|
||||||
|
notEmpty: { multi_user: true },
|
||||||
|
}).toContainExactly([
|
||||||
|
{
|
||||||
|
name: "multi user",
|
||||||
|
multi_user: globalUsers.map((user: any) => {
|
||||||
|
return { _id: user._id }
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should match the session user id and a user table row id using helpers, user binding and a static user id.", async () => {
|
||||||
|
await expectQuery({
|
||||||
|
oneOf: {
|
||||||
|
single_user: [
|
||||||
|
"{{ default [user]._id '_empty_' }}",
|
||||||
|
globalUsers[0]._id,
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}).toContainExactly([
|
||||||
|
{
|
||||||
|
name: "single user, session user",
|
||||||
|
single_user: [{ _id: config.getUser()._id }],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "single user",
|
||||||
|
single_user: [{ _id: globalUsers[0]._id }],
|
||||||
|
},
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should resolve 'default' helper to '_empty_' when binding resolves to nothing", async () => {
|
||||||
|
await expectQuery({
|
||||||
|
oneOf: {
|
||||||
|
single_user: [
|
||||||
|
"{{ default [user]._idx '_empty_' }}",
|
||||||
|
globalUsers[0]._id,
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}).toContainExactly([
|
||||||
|
{
|
||||||
|
name: "single user",
|
||||||
|
single_user: [{ _id: globalUsers[0]._id }],
|
||||||
|
},
|
||||||
|
])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe.each([FieldType.STRING, FieldType.LONGFORM])("%s", () => {
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await createTable({
|
await createTable({
|
||||||
name: { name: "name", type: FieldType.STRING },
|
name: { name: "name", type: FieldType.STRING },
|
||||||
|
@ -250,6 +569,31 @@ describe.each([
|
||||||
}).toFindNothing())
|
}).toFindNothing())
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe("empty", () => {
|
||||||
|
it("finds no empty rows", () =>
|
||||||
|
expectQuery({ empty: { name: null } }).toFindNothing())
|
||||||
|
|
||||||
|
it("should not be affected by when filter empty behaviour", () =>
|
||||||
|
expectQuery({
|
||||||
|
empty: { name: null },
|
||||||
|
onEmptyFilter: EmptyFilterOption.RETURN_ALL,
|
||||||
|
}).toFindNothing())
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("notEmpty", () => {
|
||||||
|
it("finds all non-empty rows", () =>
|
||||||
|
expectQuery({ notEmpty: { name: null } }).toContainExactly([
|
||||||
|
{ name: "foo" },
|
||||||
|
{ name: "bar" },
|
||||||
|
]))
|
||||||
|
|
||||||
|
it("should not be affected by when filter empty behaviour", () =>
|
||||||
|
expectQuery({
|
||||||
|
notEmpty: { name: null },
|
||||||
|
onEmptyFilter: EmptyFilterOption.RETURN_NONE,
|
||||||
|
}).toContainExactly([{ name: "foo" }, { name: "bar" }]))
|
||||||
|
})
|
||||||
|
|
||||||
describe("sort", () => {
|
describe("sort", () => {
|
||||||
it("sorts ascending", () =>
|
it("sorts ascending", () =>
|
||||||
expectSearch({
|
expectSearch({
|
||||||
|
@ -339,14 +683,14 @@ describe.each([
|
||||||
}).toFindNothing())
|
}).toFindNothing())
|
||||||
|
|
||||||
// We never implemented half-open ranges in Lucene.
|
// We never implemented half-open ranges in Lucene.
|
||||||
!isInternal &&
|
!isLucene &&
|
||||||
it("can search using just a low value", () =>
|
it("can search using just a low value", () =>
|
||||||
expectQuery({
|
expectQuery({
|
||||||
range: { age: { low: 5 } },
|
range: { age: { low: 5 } },
|
||||||
}).toContainExactly([{ age: 10 }]))
|
}).toContainExactly([{ age: 10 }]))
|
||||||
|
|
||||||
// We never implemented half-open ranges in Lucene.
|
// We never implemented half-open ranges in Lucene.
|
||||||
!isInternal &&
|
!isLucene &&
|
||||||
it("can search using just a high value", () =>
|
it("can search using just a high value", () =>
|
||||||
expectQuery({
|
expectQuery({
|
||||||
range: { age: { high: 5 } },
|
range: { age: { high: 5 } },
|
||||||
|
@ -457,14 +801,14 @@ describe.each([
|
||||||
}).toFindNothing())
|
}).toFindNothing())
|
||||||
|
|
||||||
// We never implemented half-open ranges in Lucene.
|
// We never implemented half-open ranges in Lucene.
|
||||||
!isInternal &&
|
!isLucene &&
|
||||||
it("can search using just a low value", () =>
|
it("can search using just a low value", () =>
|
||||||
expectQuery({
|
expectQuery({
|
||||||
range: { dob: { low: JAN_5TH } },
|
range: { dob: { low: JAN_5TH } },
|
||||||
}).toContainExactly([{ dob: JAN_10TH }]))
|
}).toContainExactly([{ dob: JAN_10TH }]))
|
||||||
|
|
||||||
// We never implemented half-open ranges in Lucene.
|
// We never implemented half-open ranges in Lucene.
|
||||||
!isInternal &&
|
!isLucene &&
|
||||||
it("can search using just a high value", () =>
|
it("can search using just a high value", () =>
|
||||||
expectQuery({
|
expectQuery({
|
||||||
range: { dob: { high: JAN_5TH } },
|
range: { dob: { high: JAN_5TH } },
|
||||||
|
@ -506,7 +850,7 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("array of strings", () => {
|
describe.each([FieldType.ARRAY, FieldType.OPTIONS])("%s", () => {
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await createTable({
|
await createTable({
|
||||||
numbers: {
|
numbers: {
|
||||||
|
@ -642,7 +986,7 @@ describe.each([
|
||||||
// Range searches against bigints don't seem to work at all in Lucene, and I
|
// Range searches against bigints don't seem to work at all in Lucene, and I
|
||||||
// couldn't figure out why. Given that we're replacing Lucene with SQS,
|
// couldn't figure out why. Given that we're replacing Lucene with SQS,
|
||||||
// we've decided not to spend time on it.
|
// we've decided not to spend time on it.
|
||||||
!isInternal &&
|
!isLucene &&
|
||||||
describe("range", () => {
|
describe("range", () => {
|
||||||
it("successfully finds a row", () =>
|
it("successfully finds a row", () =>
|
||||||
expectQuery({
|
expectQuery({
|
||||||
|
@ -675,4 +1019,137 @@ describe.each([
|
||||||
}).toContainExactly([{ num: SMALL }, { num: MEDIUM }]))
|
}).toContainExactly([{ num: SMALL }, { num: MEDIUM }]))
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
isInternal &&
|
||||||
|
describe("auto", () => {
|
||||||
|
beforeAll(async () => {
|
||||||
|
await createTable({
|
||||||
|
auto: {
|
||||||
|
name: "auto",
|
||||||
|
type: FieldType.AUTO,
|
||||||
|
autocolumn: true,
|
||||||
|
subtype: AutoFieldSubType.AUTO_ID,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
await createRows(new Array(10).fill({}))
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("equal", () => {
|
||||||
|
it("successfully finds a row", () =>
|
||||||
|
expectQuery({ equal: { auto: 1 } }).toContainExactly([{ auto: 1 }]))
|
||||||
|
|
||||||
|
it("fails to find nonexistent row", () =>
|
||||||
|
expectQuery({ equal: { auto: 0 } }).toFindNothing())
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("not equal", () => {
|
||||||
|
it("successfully finds a row", () =>
|
||||||
|
expectQuery({ notEqual: { auto: 1 } }).toContainExactly([
|
||||||
|
{ auto: 2 },
|
||||||
|
{ auto: 3 },
|
||||||
|
{ auto: 4 },
|
||||||
|
{ auto: 5 },
|
||||||
|
{ auto: 6 },
|
||||||
|
{ auto: 7 },
|
||||||
|
{ auto: 8 },
|
||||||
|
{ auto: 9 },
|
||||||
|
{ auto: 10 },
|
||||||
|
]))
|
||||||
|
|
||||||
|
it("fails to find nonexistent row", () =>
|
||||||
|
expectQuery({ notEqual: { auto: 0 } }).toContainExactly([
|
||||||
|
{ auto: 1 },
|
||||||
|
{ auto: 2 },
|
||||||
|
{ auto: 3 },
|
||||||
|
{ auto: 4 },
|
||||||
|
{ auto: 5 },
|
||||||
|
{ auto: 6 },
|
||||||
|
{ auto: 7 },
|
||||||
|
{ auto: 8 },
|
||||||
|
{ auto: 9 },
|
||||||
|
{ auto: 10 },
|
||||||
|
]))
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("oneOf", () => {
|
||||||
|
it("successfully finds a row", () =>
|
||||||
|
expectQuery({ oneOf: { auto: [1] } }).toContainExactly([{ auto: 1 }]))
|
||||||
|
|
||||||
|
it("fails to find nonexistent row", () =>
|
||||||
|
expectQuery({ oneOf: { auto: [0] } }).toFindNothing())
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("range", () => {
|
||||||
|
it("successfully finds a row", () =>
|
||||||
|
expectQuery({
|
||||||
|
range: { auto: { low: 1, high: 1 } },
|
||||||
|
}).toContainExactly([{ auto: 1 }]))
|
||||||
|
|
||||||
|
it("successfully finds multiple rows", () =>
|
||||||
|
expectQuery({
|
||||||
|
range: { auto: { low: 1, high: 2 } },
|
||||||
|
}).toContainExactly([{ auto: 1 }, { auto: 2 }]))
|
||||||
|
|
||||||
|
it("successfully finds a row with a high bound", () =>
|
||||||
|
expectQuery({
|
||||||
|
range: { auto: { low: 2, high: 2 } },
|
||||||
|
}).toContainExactly([{ auto: 2 }]))
|
||||||
|
|
||||||
|
it("successfully finds no rows", () =>
|
||||||
|
expectQuery({
|
||||||
|
range: { auto: { low: 0, high: 0 } },
|
||||||
|
}).toFindNothing())
|
||||||
|
|
||||||
|
isSqs &&
|
||||||
|
it("can search using just a low value", () =>
|
||||||
|
expectQuery({
|
||||||
|
range: { auto: { low: 9 } },
|
||||||
|
}).toContainExactly([{ auto: 9 }, { auto: 10 }]))
|
||||||
|
|
||||||
|
isSqs &&
|
||||||
|
it("can search using just a high value", () =>
|
||||||
|
expectQuery({
|
||||||
|
range: { auto: { high: 2 } },
|
||||||
|
}).toContainExactly([{ auto: 1 }, { auto: 2 }]))
|
||||||
|
})
|
||||||
|
|
||||||
|
isSqs &&
|
||||||
|
describe("sort", () => {
|
||||||
|
it("sorts ascending", () =>
|
||||||
|
expectSearch({
|
||||||
|
query: {},
|
||||||
|
sort: "auto",
|
||||||
|
sortOrder: SortOrder.ASCENDING,
|
||||||
|
}).toMatchExactly([
|
||||||
|
{ auto: 1 },
|
||||||
|
{ auto: 2 },
|
||||||
|
{ auto: 3 },
|
||||||
|
{ auto: 4 },
|
||||||
|
{ auto: 5 },
|
||||||
|
{ auto: 6 },
|
||||||
|
{ auto: 7 },
|
||||||
|
{ auto: 8 },
|
||||||
|
{ auto: 9 },
|
||||||
|
{ auto: 10 },
|
||||||
|
]))
|
||||||
|
|
||||||
|
it("sorts descending", () =>
|
||||||
|
expectSearch({
|
||||||
|
query: {},
|
||||||
|
sort: "auto",
|
||||||
|
sortOrder: SortOrder.DESCENDING,
|
||||||
|
}).toMatchExactly([
|
||||||
|
{ auto: 10 },
|
||||||
|
{ auto: 9 },
|
||||||
|
{ auto: 8 },
|
||||||
|
{ auto: 7 },
|
||||||
|
{ auto: 6 },
|
||||||
|
{ auto: 5 },
|
||||||
|
{ auto: 4 },
|
||||||
|
{ auto: 3 },
|
||||||
|
{ auto: 2 },
|
||||||
|
{ auto: 1 },
|
||||||
|
]))
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,3 +1,13 @@
|
||||||
|
// Directly mock the AWS SDK
|
||||||
|
jest.mock("aws-sdk", () => ({
|
||||||
|
S3: jest.fn(() => ({
|
||||||
|
getSignedUrl: jest.fn(
|
||||||
|
(operation, params) => `http://example.com/${params.Bucket}/${params.Key}`
|
||||||
|
),
|
||||||
|
upload: jest.fn(() => ({ Contents: {} })),
|
||||||
|
})),
|
||||||
|
}))
|
||||||
|
|
||||||
const setup = require("./utilities")
|
const setup = require("./utilities")
|
||||||
const { constants } = require("@budibase/backend-core")
|
const { constants } = require("@budibase/backend-core")
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
import { context, events } from "@budibase/backend-core"
|
import { context, events } from "@budibase/backend-core"
|
||||||
import {
|
import {
|
||||||
AutoFieldSubType,
|
AutoFieldSubType,
|
||||||
Datasource,
|
|
||||||
BBReferenceFieldSubType,
|
BBReferenceFieldSubType,
|
||||||
|
Datasource,
|
||||||
FieldType,
|
FieldType,
|
||||||
INTERNAL_TABLE_SOURCE_ID,
|
INTERNAL_TABLE_SOURCE_ID,
|
||||||
InternalTable,
|
InternalTable,
|
||||||
|
@ -149,58 +149,59 @@ describe.each([
|
||||||
expect(res.name).toBeUndefined()
|
expect(res.name).toBeUndefined()
|
||||||
})
|
})
|
||||||
|
|
||||||
it("updates only the passed fields", async () => {
|
isInternal &&
|
||||||
await timekeeper.withFreeze(new Date(2021, 1, 1), async () => {
|
it("updates only the passed fields", async () => {
|
||||||
const table = await config.api.table.save(
|
await timekeeper.withFreeze(new Date(2021, 1, 1), async () => {
|
||||||
tableForDatasource(datasource, {
|
const table = await config.api.table.save(
|
||||||
schema: {
|
tableForDatasource(datasource, {
|
||||||
autoId: {
|
schema: {
|
||||||
name: "id",
|
autoId: {
|
||||||
type: FieldType.NUMBER,
|
name: "id",
|
||||||
subtype: AutoFieldSubType.AUTO_ID,
|
type: FieldType.NUMBER,
|
||||||
autocolumn: true,
|
subtype: AutoFieldSubType.AUTO_ID,
|
||||||
constraints: {
|
autocolumn: true,
|
||||||
type: "number",
|
constraints: {
|
||||||
presence: false,
|
type: "number",
|
||||||
|
presence: false,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const newName = generator.guid()
|
||||||
|
|
||||||
|
const updatedTable = await config.api.table.save({
|
||||||
|
...table,
|
||||||
|
name: newName,
|
||||||
})
|
})
|
||||||
)
|
|
||||||
|
|
||||||
const newName = generator.guid()
|
let expected: Table = {
|
||||||
|
...table,
|
||||||
|
name: newName,
|
||||||
|
_id: expect.any(String),
|
||||||
|
}
|
||||||
|
if (isInternal) {
|
||||||
|
expected._rev = expect.stringMatching(/^2-.+/)
|
||||||
|
}
|
||||||
|
|
||||||
const updatedTable = await config.api.table.save({
|
expect(updatedTable).toEqual(expected)
|
||||||
...table,
|
|
||||||
name: newName,
|
const persistedTable = await config.api.table.get(updatedTable._id!)
|
||||||
|
expected = {
|
||||||
|
...table,
|
||||||
|
name: newName,
|
||||||
|
_id: updatedTable._id,
|
||||||
|
}
|
||||||
|
if (datasource?.isSQL) {
|
||||||
|
expected.sql = true
|
||||||
|
}
|
||||||
|
if (isInternal) {
|
||||||
|
expected._rev = expect.stringMatching(/^2-.+/)
|
||||||
|
}
|
||||||
|
expect(persistedTable).toEqual(expected)
|
||||||
})
|
})
|
||||||
|
|
||||||
let expected: Table = {
|
|
||||||
...table,
|
|
||||||
name: newName,
|
|
||||||
_id: expect.any(String),
|
|
||||||
}
|
|
||||||
if (isInternal) {
|
|
||||||
expected._rev = expect.stringMatching(/^2-.+/)
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(updatedTable).toEqual(expected)
|
|
||||||
|
|
||||||
const persistedTable = await config.api.table.get(updatedTable._id!)
|
|
||||||
expected = {
|
|
||||||
...table,
|
|
||||||
name: newName,
|
|
||||||
_id: updatedTable._id,
|
|
||||||
}
|
|
||||||
if (datasource?.isSQL) {
|
|
||||||
expected.sql = true
|
|
||||||
}
|
|
||||||
if (isInternal) {
|
|
||||||
expected._rev = expect.stringMatching(/^2-.+/)
|
|
||||||
}
|
|
||||||
expect(persistedTable).toEqual(expected)
|
|
||||||
})
|
})
|
||||||
})
|
|
||||||
|
|
||||||
describe("user table", () => {
|
describe("user table", () => {
|
||||||
isInternal &&
|
isInternal &&
|
||||||
|
@ -214,6 +215,57 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe("external table validation", () => {
|
||||||
|
!isInternal &&
|
||||||
|
it("should error if column is of type auto", async () => {
|
||||||
|
const table = basicTable(datasource)
|
||||||
|
await config.api.table.save(
|
||||||
|
{
|
||||||
|
...table,
|
||||||
|
schema: {
|
||||||
|
...table.schema,
|
||||||
|
auto: {
|
||||||
|
name: "auto",
|
||||||
|
autocolumn: true,
|
||||||
|
type: FieldType.AUTO,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
status: 400,
|
||||||
|
body: {
|
||||||
|
message: `Column "auto" has type "${FieldType.AUTO}" - this is not supported.`,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
!isInternal &&
|
||||||
|
it("should error if column has auto subtype", async () => {
|
||||||
|
const table = basicTable(datasource)
|
||||||
|
await config.api.table.save(
|
||||||
|
{
|
||||||
|
...table,
|
||||||
|
schema: {
|
||||||
|
...table.schema,
|
||||||
|
auto: {
|
||||||
|
name: "auto",
|
||||||
|
autocolumn: true,
|
||||||
|
type: FieldType.NUMBER,
|
||||||
|
subtype: AutoFieldSubType.AUTO_ID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
status: 400,
|
||||||
|
body: {
|
||||||
|
message: `Column "auto" has subtype "${AutoFieldSubType.AUTO_ID}" - this is not supported.`,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
it("should add a new column for an internal DB table", async () => {
|
it("should add a new column for an internal DB table", async () => {
|
||||||
const saveTableRequest: SaveTableRequest = {
|
const saveTableRequest: SaveTableRequest = {
|
||||||
...basicTable(),
|
...basicTable(),
|
||||||
|
|
|
@ -24,8 +24,6 @@ import merge from "lodash/merge"
|
||||||
import { quotas } from "@budibase/pro"
|
import { quotas } from "@budibase/pro"
|
||||||
import { roles } from "@budibase/backend-core"
|
import { roles } from "@budibase/backend-core"
|
||||||
|
|
||||||
jest.unmock("mssql")
|
|
||||||
|
|
||||||
describe.each([
|
describe.each([
|
||||||
["internal", undefined],
|
["internal", undefined],
|
||||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||||
|
|
|
@ -4,8 +4,11 @@ import {
|
||||||
encodeJSBinding,
|
encodeJSBinding,
|
||||||
} from "@budibase/string-templates"
|
} from "@budibase/string-templates"
|
||||||
import sdk from "../sdk"
|
import sdk from "../sdk"
|
||||||
import { Row } from "@budibase/types"
|
import { AutomationAttachment, FieldType, Row } from "@budibase/types"
|
||||||
import { LoopInput, LoopStepType } from "../definitions/automations"
|
import { LoopInput, LoopStepType } from "../definitions/automations"
|
||||||
|
import { objectStore, context } from "@budibase/backend-core"
|
||||||
|
import * as uuid from "uuid"
|
||||||
|
import path from "path"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* When values are input to the system generally they will be of type string as this is required for template strings.
|
* When values are input to the system generally they will be of type string as this is required for template strings.
|
||||||
|
@ -96,6 +99,98 @@ export function getError(err: any) {
|
||||||
return typeof err !== "string" ? err.toString() : err
|
return typeof err !== "string" ? err.toString() : err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function sendAutomationAttachmentsToStorage(
|
||||||
|
tableId: string,
|
||||||
|
row: Row
|
||||||
|
): Promise<Row> {
|
||||||
|
const table = await sdk.tables.getTable(tableId)
|
||||||
|
const attachmentRows: Record<
|
||||||
|
string,
|
||||||
|
AutomationAttachment[] | AutomationAttachment
|
||||||
|
> = {}
|
||||||
|
|
||||||
|
for (const [prop, value] of Object.entries(row)) {
|
||||||
|
const schema = table.schema[prop]
|
||||||
|
if (
|
||||||
|
schema?.type === FieldType.ATTACHMENTS ||
|
||||||
|
schema?.type === FieldType.ATTACHMENT_SINGLE
|
||||||
|
) {
|
||||||
|
attachmentRows[prop] = value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (const [prop, attachments] of Object.entries(attachmentRows)) {
|
||||||
|
if (Array.isArray(attachments)) {
|
||||||
|
if (attachments.length) {
|
||||||
|
row[prop] = await Promise.all(
|
||||||
|
attachments.map(attachment => generateAttachmentRow(attachment))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else if (Object.keys(row[prop]).length > 0) {
|
||||||
|
row[prop] = await generateAttachmentRow(attachments)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return row
|
||||||
|
}
|
||||||
|
|
||||||
|
async function generateAttachmentRow(attachment: AutomationAttachment) {
|
||||||
|
const prodAppId = context.getProdAppId()
|
||||||
|
|
||||||
|
async function uploadToS3(
|
||||||
|
extension: string,
|
||||||
|
content: objectStore.StreamTypes
|
||||||
|
) {
|
||||||
|
const fileName = `${uuid.v4()}${extension}`
|
||||||
|
const s3Key = `${prodAppId}/attachments/${fileName}`
|
||||||
|
|
||||||
|
await objectStore.streamUpload({
|
||||||
|
bucket: objectStore.ObjectStoreBuckets.APPS,
|
||||||
|
stream: content,
|
||||||
|
filename: s3Key,
|
||||||
|
})
|
||||||
|
|
||||||
|
return s3Key
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getSize(s3Key: string) {
|
||||||
|
return (
|
||||||
|
await objectStore.getObjectMetadata(
|
||||||
|
objectStore.ObjectStoreBuckets.APPS,
|
||||||
|
s3Key
|
||||||
|
)
|
||||||
|
).ContentLength
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { filename } = attachment
|
||||||
|
const extension = path.extname(filename)
|
||||||
|
const attachmentResult = await objectStore.processAutomationAttachment(
|
||||||
|
attachment
|
||||||
|
)
|
||||||
|
|
||||||
|
let s3Key = ""
|
||||||
|
if (
|
||||||
|
"path" in attachmentResult &&
|
||||||
|
attachmentResult.path.startsWith(`${prodAppId}/attachments/`)
|
||||||
|
) {
|
||||||
|
s3Key = attachmentResult.path
|
||||||
|
} else {
|
||||||
|
s3Key = await uploadToS3(extension, attachmentResult.content)
|
||||||
|
}
|
||||||
|
|
||||||
|
const size = await getSize(s3Key)
|
||||||
|
|
||||||
|
return {
|
||||||
|
size,
|
||||||
|
name: filename,
|
||||||
|
extension,
|
||||||
|
key: s3Key,
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to process attachment:", error)
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
}
|
||||||
export function substituteLoopStep(hbsString: string, substitute: string) {
|
export function substituteLoopStep(hbsString: string, substitute: string) {
|
||||||
let checkForJS = isJSBinding(hbsString)
|
let checkForJS = isJSBinding(hbsString)
|
||||||
let substitutedHbsString = ""
|
let substitutedHbsString = ""
|
||||||
|
|
|
@ -1,5 +1,9 @@
|
||||||
import { save } from "../../api/controllers/row"
|
import { save } from "../../api/controllers/row"
|
||||||
import { cleanUpRow, getError } from "../automationUtils"
|
import {
|
||||||
|
cleanUpRow,
|
||||||
|
getError,
|
||||||
|
sendAutomationAttachmentsToStorage,
|
||||||
|
} from "../automationUtils"
|
||||||
import { buildCtx } from "./utils"
|
import { buildCtx } from "./utils"
|
||||||
import {
|
import {
|
||||||
AutomationActionStepId,
|
AutomationActionStepId,
|
||||||
|
@ -89,6 +93,10 @@ export async function run({ inputs, appId, emitter }: AutomationStepInput) {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
inputs.row = await cleanUpRow(inputs.row.tableId, inputs.row)
|
inputs.row = await cleanUpRow(inputs.row.tableId, inputs.row)
|
||||||
|
inputs.row = await sendAutomationAttachmentsToStorage(
|
||||||
|
inputs.row.tableId,
|
||||||
|
inputs.row
|
||||||
|
)
|
||||||
await save(ctx)
|
await save(ctx)
|
||||||
return {
|
return {
|
||||||
row: inputs.row,
|
row: inputs.row,
|
||||||
|
|
|
@ -108,7 +108,15 @@ export async function run({ inputs, appId, emitter }: AutomationStepInput) {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (tableId) {
|
if (tableId) {
|
||||||
inputs.row = await automationUtils.cleanUpRow(tableId, inputs.row)
|
inputs.row = await automationUtils.cleanUpRow(
|
||||||
|
inputs.row.tableId,
|
||||||
|
inputs.row
|
||||||
|
)
|
||||||
|
|
||||||
|
inputs.row = await automationUtils.sendAutomationAttachmentsToStorage(
|
||||||
|
inputs.row.tableId,
|
||||||
|
inputs.row
|
||||||
|
)
|
||||||
}
|
}
|
||||||
await rowController.patch(ctx)
|
await rowController.patch(ctx)
|
||||||
return {
|
return {
|
||||||
|
|
|
@ -1,5 +1,18 @@
|
||||||
import * as setup from "./utilities"
|
import * as setup from "./utilities"
|
||||||
|
import { basicTableWithAttachmentField } from "../../tests/utilities/structures"
|
||||||
|
import { objectStore } from "@budibase/backend-core"
|
||||||
|
|
||||||
|
async function uploadTestFile(filename: string) {
|
||||||
|
let bucket = "testbucket"
|
||||||
|
await objectStore.upload({
|
||||||
|
bucket,
|
||||||
|
filename,
|
||||||
|
body: Buffer.from("test data"),
|
||||||
|
})
|
||||||
|
let presignedUrl = await objectStore.getPresignedUrl(bucket, filename, 60000)
|
||||||
|
|
||||||
|
return presignedUrl
|
||||||
|
}
|
||||||
describe("test the create row action", () => {
|
describe("test the create row action", () => {
|
||||||
let table: any
|
let table: any
|
||||||
let row: any
|
let row: any
|
||||||
|
@ -43,4 +56,76 @@ describe("test the create row action", () => {
|
||||||
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {})
|
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {})
|
||||||
expect(res.success).toEqual(false)
|
expect(res.success).toEqual(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("should check that an attachment field is sent to storage and parsed", async () => {
|
||||||
|
let attachmentTable = await config.createTable(
|
||||||
|
basicTableWithAttachmentField()
|
||||||
|
)
|
||||||
|
|
||||||
|
let attachmentRow: any = {
|
||||||
|
tableId: attachmentTable._id,
|
||||||
|
}
|
||||||
|
|
||||||
|
let filename = "test1.txt"
|
||||||
|
let presignedUrl = await uploadTestFile(filename)
|
||||||
|
let attachmentObject = [
|
||||||
|
{
|
||||||
|
url: presignedUrl,
|
||||||
|
filename,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
attachmentRow.file_attachment = attachmentObject
|
||||||
|
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {
|
||||||
|
row: attachmentRow,
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(res.success).toEqual(true)
|
||||||
|
expect(res.row.file_attachment[0]).toHaveProperty("key")
|
||||||
|
let s3Key = res.row.file_attachment[0].key
|
||||||
|
|
||||||
|
const client = objectStore.ObjectStore(objectStore.ObjectStoreBuckets.APPS)
|
||||||
|
|
||||||
|
const objectData = await client
|
||||||
|
.headObject({ Bucket: objectStore.ObjectStoreBuckets.APPS, Key: s3Key })
|
||||||
|
.promise()
|
||||||
|
|
||||||
|
expect(objectData).toBeDefined()
|
||||||
|
expect(objectData.ContentLength).toBeGreaterThan(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should check that an single attachment field is sent to storage and parsed", async () => {
|
||||||
|
let attachmentTable = await config.createTable(
|
||||||
|
basicTableWithAttachmentField()
|
||||||
|
)
|
||||||
|
|
||||||
|
let attachmentRow: any = {
|
||||||
|
tableId: attachmentTable._id,
|
||||||
|
}
|
||||||
|
|
||||||
|
let filename = "test2.txt"
|
||||||
|
let presignedUrl = await uploadTestFile(filename)
|
||||||
|
let attachmentObject = {
|
||||||
|
url: presignedUrl,
|
||||||
|
filename,
|
||||||
|
}
|
||||||
|
|
||||||
|
attachmentRow.single_file_attachment = attachmentObject
|
||||||
|
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {
|
||||||
|
row: attachmentRow,
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(res.success).toEqual(true)
|
||||||
|
expect(res.row.single_file_attachment).toHaveProperty("key")
|
||||||
|
let s3Key = res.row.single_file_attachment.key
|
||||||
|
|
||||||
|
const client = objectStore.ObjectStore(objectStore.ObjectStoreBuckets.APPS)
|
||||||
|
|
||||||
|
const objectData = await client
|
||||||
|
.headObject({ Bucket: objectStore.ObjectStoreBuckets.APPS, Key: s3Key })
|
||||||
|
.promise()
|
||||||
|
|
||||||
|
expect(objectData).toBeDefined()
|
||||||
|
expect(objectData.ContentLength).toBeGreaterThan(0)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,14 +1,4 @@
|
||||||
// lucene searching not supported in test due to use of PouchDB
|
import { Table } from "@budibase/types"
|
||||||
let rows: Row[] = []
|
|
||||||
jest.mock("../../sdk/app/rows/search/internalSearch", () => ({
|
|
||||||
fullSearch: jest.fn(() => {
|
|
||||||
return {
|
|
||||||
rows,
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
paginatedSearch: jest.fn(),
|
|
||||||
}))
|
|
||||||
import { Row, Table } from "@budibase/types"
|
|
||||||
import * as setup from "./utilities"
|
import * as setup from "./utilities"
|
||||||
|
|
||||||
const NAME = "Test"
|
const NAME = "Test"
|
||||||
|
@ -25,8 +15,8 @@ describe("Test a query step automation", () => {
|
||||||
description: "original description",
|
description: "original description",
|
||||||
tableId: table._id,
|
tableId: table._id,
|
||||||
}
|
}
|
||||||
rows.push(await config.createRow(row))
|
await config.createRow(row)
|
||||||
rows.push(await config.createRow(row))
|
await config.createRow(row)
|
||||||
})
|
})
|
||||||
|
|
||||||
afterAll(setup.afterAll)
|
afterAll(setup.afterAll)
|
||||||
|
|
|
@ -1200,4 +1200,38 @@ describe("postgres integrations", () => {
|
||||||
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
|
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe("check custom column types", () => {
|
||||||
|
beforeAll(async () => {
|
||||||
|
await rawQuery(
|
||||||
|
rawDatasource,
|
||||||
|
`CREATE TABLE binaryTable (
|
||||||
|
id BYTEA PRIMARY KEY,
|
||||||
|
column1 TEXT,
|
||||||
|
column2 INT
|
||||||
|
);
|
||||||
|
`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle binary columns", async () => {
|
||||||
|
const response = await makeRequest(
|
||||||
|
"post",
|
||||||
|
`/api/datasources/${datasource._id}/schema`
|
||||||
|
)
|
||||||
|
expect(response.body).toBeDefined()
|
||||||
|
expect(response.body.datasource.entities).toBeDefined()
|
||||||
|
const table = response.body.datasource.entities["binarytable"]
|
||||||
|
expect(table).toBeDefined()
|
||||||
|
expect(table.schema.id.externalType).toBe("bytea")
|
||||||
|
const row = await config.api.row.save(table._id, {
|
||||||
|
id: "1111",
|
||||||
|
column1: "hello",
|
||||||
|
column2: 222,
|
||||||
|
})
|
||||||
|
expect(row._id).toBeDefined()
|
||||||
|
const decoded = decodeURIComponent(row._id!).replace(/'/g, '"')
|
||||||
|
expect(JSON.parse(decoded)[0]).toBe("1111")
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -150,6 +150,22 @@ function getTableName(table?: Table): string | undefined {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function convertBooleans(query: SqlQuery | SqlQuery[]): SqlQuery | SqlQuery[] {
|
||||||
|
if (Array.isArray(query)) {
|
||||||
|
return query.map((q: SqlQuery) => convertBooleans(q) as SqlQuery)
|
||||||
|
} else {
|
||||||
|
if (query.bindings) {
|
||||||
|
query.bindings = query.bindings.map(binding => {
|
||||||
|
if (typeof binding === "boolean") {
|
||||||
|
return binding ? 1 : 0
|
||||||
|
}
|
||||||
|
return binding
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
class InternalBuilder {
|
class InternalBuilder {
|
||||||
private readonly client: string
|
private readonly client: string
|
||||||
|
|
||||||
|
@ -654,7 +670,11 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
||||||
if (opts?.disableBindings) {
|
if (opts?.disableBindings) {
|
||||||
return { sql: query.toString() }
|
return { sql: query.toString() }
|
||||||
} else {
|
} else {
|
||||||
return getNativeSql(query)
|
let native = getNativeSql(query)
|
||||||
|
if (sqlClient === SqlClient.SQL_LITE) {
|
||||||
|
native = convertBooleans(native)
|
||||||
|
}
|
||||||
|
return native
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,13 +26,13 @@ import { parse } from "content-disposition"
|
||||||
import path from "path"
|
import path from "path"
|
||||||
import { Builder as XmlBuilder } from "xml2js"
|
import { Builder as XmlBuilder } from "xml2js"
|
||||||
|
|
||||||
const BodyTypes = {
|
enum BodyType {
|
||||||
NONE: "none",
|
NONE = "none",
|
||||||
FORM_DATA: "form",
|
FORM_DATA = "form",
|
||||||
XML: "xml",
|
XML = "xml",
|
||||||
ENCODED: "encoded",
|
ENCODED = "encoded",
|
||||||
JSON: "json",
|
JSON = "json",
|
||||||
TEXT: "text",
|
TEXT = "text",
|
||||||
}
|
}
|
||||||
|
|
||||||
const coreFields = {
|
const coreFields = {
|
||||||
|
@ -54,7 +54,7 @@ const coreFields = {
|
||||||
},
|
},
|
||||||
bodyType: {
|
bodyType: {
|
||||||
type: DatasourceFieldType.STRING,
|
type: DatasourceFieldType.STRING,
|
||||||
enum: Object.values(BodyTypes),
|
enum: Object.values(BodyType),
|
||||||
},
|
},
|
||||||
pagination: {
|
pagination: {
|
||||||
type: DatasourceFieldType.OBJECT,
|
type: DatasourceFieldType.OBJECT,
|
||||||
|
@ -131,25 +131,28 @@ class RestIntegration implements IntegrationBase {
|
||||||
}
|
}
|
||||||
|
|
||||||
async parseResponse(response: any, pagination: PaginationConfig | null) {
|
async parseResponse(response: any, pagination: PaginationConfig | null) {
|
||||||
let data, raw, headers, filename
|
let data: any[] | string | undefined,
|
||||||
|
raw: string | undefined,
|
||||||
|
headers: Record<string, string> = {},
|
||||||
|
filename: string | undefined
|
||||||
|
|
||||||
const contentType = response.headers.get("content-type") || ""
|
const contentType = response.headers.get("content-type") || ""
|
||||||
const contentDisposition = response.headers.get("content-disposition") || ""
|
const contentDisposition = response.headers.get("content-disposition") || ""
|
||||||
if (
|
if (
|
||||||
|
contentDisposition.includes("filename") ||
|
||||||
contentDisposition.includes("attachment") ||
|
contentDisposition.includes("attachment") ||
|
||||||
contentDisposition.includes("form-data")
|
contentDisposition.includes("form-data")
|
||||||
) {
|
) {
|
||||||
filename =
|
filename =
|
||||||
path.basename(parse(contentDisposition).parameters?.filename) || ""
|
path.basename(parse(contentDisposition).parameters?.filename) || ""
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (filename) {
|
if (filename) {
|
||||||
return handleFileResponse(response, filename, this.startTimeMs)
|
return handleFileResponse(response, filename, this.startTimeMs)
|
||||||
} else {
|
} else {
|
||||||
if (response.status === 204) {
|
if (response.status === 204) {
|
||||||
data = []
|
data = []
|
||||||
raw = []
|
raw = ""
|
||||||
} else if (contentType.includes("application/json")) {
|
} else if (contentType.includes("application/json")) {
|
||||||
data = await response.json()
|
data = await response.json()
|
||||||
raw = JSON.stringify(data)
|
raw = JSON.stringify(data)
|
||||||
|
@ -162,16 +165,18 @@ class RestIntegration implements IntegrationBase {
|
||||||
raw = xmlResponse.rawXml
|
raw = xmlResponse.rawXml
|
||||||
} else {
|
} else {
|
||||||
data = await response.text()
|
data = await response.text()
|
||||||
raw = data
|
raw = data as string
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
throw `Failed to parse response body: ${err}`
|
throw `Failed to parse response body: ${err}`
|
||||||
}
|
}
|
||||||
|
|
||||||
const size = formatBytes(
|
let contentLength: string = response.headers.get("content-length")
|
||||||
response.headers.get("content-length") || Buffer.byteLength(raw, "utf8")
|
if (!contentLength && raw) {
|
||||||
)
|
contentLength = Buffer.byteLength(raw, "utf8").toString()
|
||||||
|
}
|
||||||
|
const size = formatBytes(contentLength || "0")
|
||||||
const time = `${Math.round(performance.now() - this.startTimeMs)}ms`
|
const time = `${Math.round(performance.now() - this.startTimeMs)}ms`
|
||||||
headers = response.headers.raw()
|
headers = response.headers.raw()
|
||||||
for (let [key, value] of Object.entries(headers)) {
|
for (let [key, value] of Object.entries(headers)) {
|
||||||
|
@ -255,7 +260,7 @@ class RestIntegration implements IntegrationBase {
|
||||||
if (!input.headers) {
|
if (!input.headers) {
|
||||||
input.headers = {}
|
input.headers = {}
|
||||||
}
|
}
|
||||||
if (bodyType === BodyTypes.NONE) {
|
if (bodyType === BodyType.NONE) {
|
||||||
return input
|
return input
|
||||||
}
|
}
|
||||||
let error,
|
let error,
|
||||||
|
@ -283,11 +288,11 @@ class RestIntegration implements IntegrationBase {
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (bodyType) {
|
switch (bodyType) {
|
||||||
case BodyTypes.TEXT:
|
case BodyType.TEXT:
|
||||||
// content type defaults to plaintext
|
// content type defaults to plaintext
|
||||||
input.body = string
|
input.body = string
|
||||||
break
|
break
|
||||||
case BodyTypes.ENCODED: {
|
case BodyType.ENCODED: {
|
||||||
const params = new URLSearchParams()
|
const params = new URLSearchParams()
|
||||||
for (let [key, value] of Object.entries(object)) {
|
for (let [key, value] of Object.entries(object)) {
|
||||||
params.append(key, value as string)
|
params.append(key, value as string)
|
||||||
|
@ -298,7 +303,7 @@ class RestIntegration implements IntegrationBase {
|
||||||
input.body = params
|
input.body = params
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
case BodyTypes.FORM_DATA: {
|
case BodyType.FORM_DATA: {
|
||||||
const form = new FormData()
|
const form = new FormData()
|
||||||
for (let [key, value] of Object.entries(object)) {
|
for (let [key, value] of Object.entries(object)) {
|
||||||
form.append(key, value)
|
form.append(key, value)
|
||||||
|
@ -309,14 +314,14 @@ class RestIntegration implements IntegrationBase {
|
||||||
input.body = form
|
input.body = form
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
case BodyTypes.XML:
|
case BodyType.XML:
|
||||||
if (object != null && Object.keys(object).length) {
|
if (object != null && Object.keys(object).length) {
|
||||||
string = new XmlBuilder().buildObject(object)
|
string = new XmlBuilder().buildObject(object)
|
||||||
}
|
}
|
||||||
input.body = string
|
input.body = string
|
||||||
input.headers["Content-Type"] = "application/xml"
|
input.headers["Content-Type"] = "application/xml"
|
||||||
break
|
break
|
||||||
case BodyTypes.JSON:
|
case BodyType.JSON:
|
||||||
// if JSON error, throw it
|
// if JSON error, throw it
|
||||||
if (error) {
|
if (error) {
|
||||||
throw "Invalid JSON for request body"
|
throw "Invalid JSON for request body"
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
import fs from "fs"
|
|
||||||
import { join } from "path"
|
|
||||||
|
|
||||||
const response = (body: any, extra?: any) => () => ({
|
const response = (body: any, extra?: any) => () => ({
|
||||||
promise: () => body,
|
promise: () => body,
|
||||||
...extra,
|
...extra,
|
||||||
|
@ -62,9 +59,7 @@ class S3 {
|
||||||
Body: "",
|
Body: "",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
createReadStream: jest
|
createReadStream: jest.fn().mockReturnValue("stream"),
|
||||||
.fn()
|
|
||||||
.mockReturnValue(fs.createReadStream(join(__dirname, "aws-sdk.ts"))),
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
)
|
)
|
|
@ -1,7 +1,6 @@
|
||||||
|
jest.mock("aws-sdk", () => require("./aws-sdk.mock"))
|
||||||
import { default as DynamoDBIntegration } from "../dynamodb"
|
import { default as DynamoDBIntegration } from "../dynamodb"
|
||||||
|
|
||||||
jest.mock("aws-sdk")
|
|
||||||
|
|
||||||
class TestConfiguration {
|
class TestConfiguration {
|
||||||
integration: any
|
integration: any
|
||||||
|
|
||||||
|
|
|
@ -28,7 +28,6 @@ jest.mock("uuid", () => ({ v4: () => "00000000-0000-0000-0000-000000000000" }))
|
||||||
import { default as RestIntegration } from "../rest"
|
import { default as RestIntegration } from "../rest"
|
||||||
import { RestAuthType } from "@budibase/types"
|
import { RestAuthType } from "@budibase/types"
|
||||||
import fetch from "node-fetch"
|
import fetch from "node-fetch"
|
||||||
import { objectStoreTestProviders } from "@budibase/backend-core/tests"
|
|
||||||
import { Readable } from "stream"
|
import { Readable } from "stream"
|
||||||
|
|
||||||
const FormData = require("form-data")
|
const FormData = require("form-data")
|
||||||
|
@ -246,13 +245,13 @@ describe("REST Integration", () => {
|
||||||
expect(output.extra.headers["content-type"]).toEqual("application/xml")
|
expect(output.extra.headers["content-type"]).toEqual("application/xml")
|
||||||
})
|
})
|
||||||
|
|
||||||
test.each(contentTypes)(
|
test.each([...contentTypes, undefined])(
|
||||||
"should not throw an error on 204 no content",
|
"should not throw an error on 204 no content",
|
||||||
async contentType => {
|
async contentType => {
|
||||||
const input = buildInput(undefined, null, contentType, 204)
|
const input = buildInput(undefined, null, contentType, 204)
|
||||||
const output = await config.integration.parseResponse(input)
|
const output = await config.integration.parseResponse(input)
|
||||||
expect(output.data).toEqual([])
|
expect(output.data).toEqual([])
|
||||||
expect(output.extra.raw).toEqual([])
|
expect(output.extra.raw).toEqual("")
|
||||||
expect(output.info.code).toEqual(204)
|
expect(output.info.code).toEqual(204)
|
||||||
expect(output.extra.headers["content-type"]).toEqual(contentType)
|
expect(output.extra.headers["content-type"]).toEqual(contentType)
|
||||||
}
|
}
|
||||||
|
@ -627,15 +626,6 @@ describe("REST Integration", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("File Handling", () => {
|
describe("File Handling", () => {
|
||||||
beforeAll(async () => {
|
|
||||||
jest.unmock("aws-sdk")
|
|
||||||
await objectStoreTestProviders.minio.start()
|
|
||||||
})
|
|
||||||
|
|
||||||
afterAll(async () => {
|
|
||||||
await objectStoreTestProviders.minio.stop()
|
|
||||||
})
|
|
||||||
|
|
||||||
it("uploads file to object store and returns signed URL", async () => {
|
it("uploads file to object store and returns signed URL", async () => {
|
||||||
const responseData = Buffer.from("teest file contnt")
|
const responseData = Buffer.from("teest file contnt")
|
||||||
const filename = "test.tar.gz"
|
const filename = "test.tar.gz"
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
|
jest.mock("aws-sdk", () => require("./aws-sdk.mock"))
|
||||||
import { default as S3Integration } from "../s3"
|
import { default as S3Integration } from "../s3"
|
||||||
|
|
||||||
jest.mock("aws-sdk")
|
|
||||||
|
|
||||||
class TestConfiguration {
|
class TestConfiguration {
|
||||||
integration: any
|
integration: any
|
||||||
|
|
||||||
|
|
|
@ -192,6 +192,11 @@ export function generateRowIdField(keyProps: any[] = []) {
|
||||||
if (!Array.isArray(keyProps)) {
|
if (!Array.isArray(keyProps)) {
|
||||||
keyProps = [keyProps]
|
keyProps = [keyProps]
|
||||||
}
|
}
|
||||||
|
for (let index in keyProps) {
|
||||||
|
if (keyProps[index] instanceof Buffer) {
|
||||||
|
keyProps[index] = keyProps[index].toString()
|
||||||
|
}
|
||||||
|
}
|
||||||
// this conserves order and types
|
// this conserves order and types
|
||||||
// we have to swap the double quotes to single quotes for use in HBS statements
|
// we have to swap the double quotes to single quotes for use in HBS statements
|
||||||
// when using the literal helper the double quotes can break things
|
// when using the literal helper the double quotes can break things
|
||||||
|
|
|
@ -131,11 +131,6 @@ export async function search(
|
||||||
},
|
},
|
||||||
relationships,
|
relationships,
|
||||||
}
|
}
|
||||||
// make sure only rows returned
|
|
||||||
request.filters!.equal = {
|
|
||||||
...request.filters?.equal,
|
|
||||||
type: "row",
|
|
||||||
}
|
|
||||||
|
|
||||||
if (params.sort) {
|
if (params.sort) {
|
||||||
const sortField = table.schema[params.sort]
|
const sortField = table.schema[params.sort]
|
||||||
|
@ -169,8 +164,8 @@ export async function search(
|
||||||
throw new Error("SQS cannot currently handle multiple queries")
|
throw new Error("SQS cannot currently handle multiple queries")
|
||||||
}
|
}
|
||||||
|
|
||||||
let sql = query.sql,
|
let sql = query.sql
|
||||||
bindings = query.bindings
|
let bindings = query.bindings
|
||||||
|
|
||||||
// quick hack for docIds
|
// quick hack for docIds
|
||||||
sql = sql.replace(/`doc1`.`rowId`/g, "`doc1.rowId`")
|
sql = sql.replace(/`doc1`.`rowId`/g, "`doc1.rowId`")
|
||||||
|
|
|
@ -1,117 +0,0 @@
|
||||||
import {
|
|
||||||
FieldType,
|
|
||||||
Row,
|
|
||||||
Table,
|
|
||||||
RowSearchParams,
|
|
||||||
INTERNAL_TABLE_SOURCE_ID,
|
|
||||||
TableSourceType,
|
|
||||||
} from "@budibase/types"
|
|
||||||
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
|
|
||||||
import { search } from "../internal"
|
|
||||||
import {
|
|
||||||
expectAnyInternalColsAttributes,
|
|
||||||
generator,
|
|
||||||
} from "@budibase/backend-core/tests"
|
|
||||||
|
|
||||||
describe("internal", () => {
|
|
||||||
const config = new TestConfiguration()
|
|
||||||
|
|
||||||
const tableData: Table = {
|
|
||||||
name: generator.word(),
|
|
||||||
type: "table",
|
|
||||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
|
||||||
sourceType: TableSourceType.INTERNAL,
|
|
||||||
schema: {
|
|
||||||
name: {
|
|
||||||
name: "name",
|
|
||||||
type: FieldType.STRING,
|
|
||||||
constraints: {
|
|
||||||
type: FieldType.STRING,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
surname: {
|
|
||||||
name: "surname",
|
|
||||||
type: FieldType.STRING,
|
|
||||||
constraints: {
|
|
||||||
type: FieldType.STRING,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
age: {
|
|
||||||
name: "age",
|
|
||||||
type: FieldType.NUMBER,
|
|
||||||
constraints: {
|
|
||||||
type: FieldType.NUMBER,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
address: {
|
|
||||||
name: "address",
|
|
||||||
type: FieldType.STRING,
|
|
||||||
constraints: {
|
|
||||||
type: FieldType.STRING,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
beforeAll(async () => {
|
|
||||||
await config.init()
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("search", () => {
|
|
||||||
const rows: Row[] = []
|
|
||||||
beforeAll(async () => {
|
|
||||||
await config.createTable(tableData)
|
|
||||||
for (let i = 0; i < 10; i++) {
|
|
||||||
rows.push(
|
|
||||||
await config.createRow({
|
|
||||||
name: generator.first(),
|
|
||||||
surname: generator.last(),
|
|
||||||
age: generator.age(),
|
|
||||||
address: generator.address(),
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
it("default search returns all the data", async () => {
|
|
||||||
await config.doInContext(config.appId, async () => {
|
|
||||||
const tableId = config.table!._id!
|
|
||||||
|
|
||||||
const searchParams: RowSearchParams = {
|
|
||||||
tableId,
|
|
||||||
query: {},
|
|
||||||
}
|
|
||||||
const result = await search(searchParams, config.table!)
|
|
||||||
|
|
||||||
expect(result.rows).toHaveLength(10)
|
|
||||||
expect(result.rows).toEqual(
|
|
||||||
expect.arrayContaining(rows.map(r => expect.objectContaining(r)))
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it("querying by fields will always return data attribute columns", async () => {
|
|
||||||
await config.doInContext(config.appId, async () => {
|
|
||||||
const tableId = config.table!._id!
|
|
||||||
|
|
||||||
const searchParams: RowSearchParams = {
|
|
||||||
tableId,
|
|
||||||
query: {},
|
|
||||||
fields: ["name", "age"],
|
|
||||||
}
|
|
||||||
const result = await search(searchParams, config.table!)
|
|
||||||
|
|
||||||
expect(result.rows).toHaveLength(10)
|
|
||||||
expect(result.rows).toEqual(
|
|
||||||
expect.arrayContaining(
|
|
||||||
rows.map(r => ({
|
|
||||||
...expectAnyInternalColsAttributes,
|
|
||||||
name: r.name,
|
|
||||||
age: r.age,
|
|
||||||
}))
|
|
||||||
)
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
|
@ -1,228 +0,0 @@
|
||||||
import tk from "timekeeper"
|
|
||||||
import * as internalSdk from "../internal"
|
|
||||||
|
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
|
||||||
import {
|
|
||||||
INTERNAL_TABLE_SOURCE_ID,
|
|
||||||
TableSourceType,
|
|
||||||
FieldType,
|
|
||||||
Table,
|
|
||||||
AutoFieldSubType,
|
|
||||||
AutoColumnFieldMetadata,
|
|
||||||
} from "@budibase/types"
|
|
||||||
|
|
||||||
import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
|
|
||||||
|
|
||||||
tk.freeze(Date.now())
|
|
||||||
|
|
||||||
describe("sdk >> rows >> internal", () => {
|
|
||||||
const config = new TestConfiguration()
|
|
||||||
|
|
||||||
beforeAll(async () => {
|
|
||||||
await config.init()
|
|
||||||
})
|
|
||||||
|
|
||||||
function makeRow() {
|
|
||||||
return {
|
|
||||||
name: generator.first(),
|
|
||||||
surname: generator.last(),
|
|
||||||
age: generator.age(),
|
|
||||||
address: generator.address(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
describe("save", () => {
|
|
||||||
const tableData: Table = {
|
|
||||||
name: generator.word(),
|
|
||||||
type: "table",
|
|
||||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
|
||||||
sourceType: TableSourceType.INTERNAL,
|
|
||||||
schema: {
|
|
||||||
name: {
|
|
||||||
name: "name",
|
|
||||||
type: FieldType.STRING,
|
|
||||||
constraints: {
|
|
||||||
type: FieldType.STRING,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
surname: {
|
|
||||||
name: "surname",
|
|
||||||
type: FieldType.STRING,
|
|
||||||
constraints: {
|
|
||||||
type: FieldType.STRING,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
age: {
|
|
||||||
name: "age",
|
|
||||||
type: FieldType.NUMBER,
|
|
||||||
constraints: {
|
|
||||||
type: FieldType.NUMBER,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
address: {
|
|
||||||
name: "address",
|
|
||||||
type: FieldType.STRING,
|
|
||||||
constraints: {
|
|
||||||
type: FieldType.STRING,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
jest.clearAllMocks()
|
|
||||||
})
|
|
||||||
|
|
||||||
it("save will persist the row properly", async () => {
|
|
||||||
const table = await config.createTable(tableData)
|
|
||||||
const row = makeRow()
|
|
||||||
|
|
||||||
await config.doInContext(config.appId, async () => {
|
|
||||||
const response = await internalSdk.save(
|
|
||||||
table._id!,
|
|
||||||
row,
|
|
||||||
config.getUser()._id
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(response).toEqual({
|
|
||||||
table,
|
|
||||||
row: {
|
|
||||||
...row,
|
|
||||||
type: "row",
|
|
||||||
_rev: expect.stringMatching("1-.*"),
|
|
||||||
},
|
|
||||||
squashed: {
|
|
||||||
...row,
|
|
||||||
type: "row",
|
|
||||||
_rev: expect.stringMatching("1-.*"),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const persistedRow = await config.api.row.get(
|
|
||||||
table._id!,
|
|
||||||
response.row._id!
|
|
||||||
)
|
|
||||||
expect(persistedRow).toEqual({
|
|
||||||
...row,
|
|
||||||
type: "row",
|
|
||||||
_rev: expect.stringMatching("1-.*"),
|
|
||||||
createdAt: expect.any(String),
|
|
||||||
updatedAt: expect.any(String),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it("auto ids will update when creating new rows", async () => {
|
|
||||||
const table = await config.createTable({
|
|
||||||
...tableData,
|
|
||||||
schema: {
|
|
||||||
...tableData.schema,
|
|
||||||
id: {
|
|
||||||
name: "id",
|
|
||||||
type: FieldType.AUTO,
|
|
||||||
subtype: AutoFieldSubType.AUTO_ID,
|
|
||||||
autocolumn: true,
|
|
||||||
lastID: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
const row = makeRow()
|
|
||||||
|
|
||||||
await config.doInContext(config.appId, async () => {
|
|
||||||
const response = await internalSdk.save(
|
|
||||||
table._id!,
|
|
||||||
row,
|
|
||||||
config.getUser()._id
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(response).toEqual({
|
|
||||||
table: {
|
|
||||||
...table,
|
|
||||||
schema: {
|
|
||||||
...table.schema,
|
|
||||||
id: {
|
|
||||||
...table.schema.id,
|
|
||||||
lastID: 1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
row: {
|
|
||||||
...row,
|
|
||||||
id: 1,
|
|
||||||
type: "row",
|
|
||||||
_rev: expect.stringMatching("1-.*"),
|
|
||||||
},
|
|
||||||
squashed: {
|
|
||||||
...row,
|
|
||||||
id: 1,
|
|
||||||
type: "row",
|
|
||||||
_rev: expect.stringMatching("1-.*"),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const persistedRow = await config.api.row.get(
|
|
||||||
table._id!,
|
|
||||||
response.row._id!
|
|
||||||
)
|
|
||||||
expect(persistedRow).toEqual({
|
|
||||||
...row,
|
|
||||||
type: "row",
|
|
||||||
id: 1,
|
|
||||||
_rev: expect.stringMatching("1-.*"),
|
|
||||||
createdAt: expect.any(String),
|
|
||||||
updatedAt: expect.any(String),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it("auto ids will update when creating new rows in parallel", async () => {
|
|
||||||
function makeRows(count: number) {
|
|
||||||
return Array.from({ length: count }, () => makeRow())
|
|
||||||
}
|
|
||||||
|
|
||||||
const table = await config.createTable({
|
|
||||||
...tableData,
|
|
||||||
schema: {
|
|
||||||
...tableData.schema,
|
|
||||||
id: {
|
|
||||||
name: "id",
|
|
||||||
type: FieldType.AUTO,
|
|
||||||
subtype: AutoFieldSubType.AUTO_ID,
|
|
||||||
autocolumn: true,
|
|
||||||
lastID: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
await config.doInContext(config.appId, async () => {
|
|
||||||
for (const row of makeRows(5)) {
|
|
||||||
await internalSdk.save(table._id!, row, config.getUser()._id)
|
|
||||||
}
|
|
||||||
await Promise.all(
|
|
||||||
makeRows(10).map(row =>
|
|
||||||
internalSdk.save(table._id!, row, config.getUser()._id)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
for (const row of makeRows(5)) {
|
|
||||||
await internalSdk.save(table._id!, row, config.getUser()._id)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
const persistedRows = await config.getRows(table._id!)
|
|
||||||
expect(persistedRows).toHaveLength(20)
|
|
||||||
expect(persistedRows).toEqual(
|
|
||||||
expect.arrayContaining(
|
|
||||||
Array.from({ length: 20 }).map((_, i) =>
|
|
||||||
expect.objectContaining({ id: i + 1 })
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
const persistedTable = await config.getTable(table._id)
|
|
||||||
expect((table.schema.id as AutoColumnFieldMetadata).lastID).toBe(0)
|
|
||||||
expect((persistedTable.schema.id as AutoColumnFieldMetadata).lastID).toBe(
|
|
||||||
20
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
|
@ -6,6 +6,7 @@ import {
|
||||||
Table,
|
Table,
|
||||||
TableRequest,
|
TableRequest,
|
||||||
ViewV2,
|
ViewV2,
|
||||||
|
AutoFieldSubType,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { context } from "@budibase/backend-core"
|
import { context } from "@budibase/backend-core"
|
||||||
import { buildExternalTableId } from "../../../../integrations/utils"
|
import { buildExternalTableId } from "../../../../integrations/utils"
|
||||||
|
@ -29,6 +30,52 @@ import { populateExternalTableSchemas } from "../validation"
|
||||||
import datasourceSdk from "../../datasources"
|
import datasourceSdk from "../../datasources"
|
||||||
import * as viewSdk from "../../views"
|
import * as viewSdk from "../../views"
|
||||||
|
|
||||||
|
const DEFAULT_PRIMARY_COLUMN = "id"
|
||||||
|
|
||||||
|
function noPrimaryKey(table: Table) {
|
||||||
|
return table.primary == null || table.primary.length === 0
|
||||||
|
}
|
||||||
|
|
||||||
|
function validate(table: Table, oldTable?: Table) {
|
||||||
|
if (
|
||||||
|
!oldTable &&
|
||||||
|
table.schema[DEFAULT_PRIMARY_COLUMN] &&
|
||||||
|
noPrimaryKey(table)
|
||||||
|
) {
|
||||||
|
throw new Error(
|
||||||
|
"External tables with no `primary` column set will define an `id` column, but we found an `id` column in the supplied schema. Either set a `primary` column or remove the `id` column."
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasTypeChanged(table, oldTable)) {
|
||||||
|
throw new Error("A column type has changed.")
|
||||||
|
}
|
||||||
|
|
||||||
|
const autoSubTypes = Object.values(AutoFieldSubType)
|
||||||
|
// check for auto columns, they are not allowed
|
||||||
|
for (let [key, column] of Object.entries(table.schema)) {
|
||||||
|
// this column is a special case, do not validate it
|
||||||
|
if (key === DEFAULT_PRIMARY_COLUMN) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// the auto-column type should never be used
|
||||||
|
if (column.type === FieldType.AUTO) {
|
||||||
|
throw new Error(
|
||||||
|
`Column "${key}" has type "${FieldType.AUTO}" - this is not supported.`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
column.subtype &&
|
||||||
|
autoSubTypes.includes(column.subtype as AutoFieldSubType)
|
||||||
|
) {
|
||||||
|
throw new Error(
|
||||||
|
`Column "${key}" has subtype "${column.subtype}" - this is not supported.`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function save(
|
export async function save(
|
||||||
datasourceId: string,
|
datasourceId: string,
|
||||||
update: Table,
|
update: Table,
|
||||||
|
@ -47,28 +94,18 @@ export async function save(
|
||||||
oldTable = await getTable(tableId)
|
oldTable = await getTable(tableId)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (
|
// this will throw an error if something is wrong
|
||||||
!oldTable &&
|
validate(tableToSave, oldTable)
|
||||||
(tableToSave.primary == null || tableToSave.primary.length === 0)
|
|
||||||
) {
|
|
||||||
if (tableToSave.schema.id) {
|
|
||||||
throw new Error(
|
|
||||||
"External tables with no `primary` column set will define an `id` column, but we found an `id` column in the supplied schema. Either set a `primary` column or remove the `id` column."
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
tableToSave.primary = ["id"]
|
if (!oldTable && noPrimaryKey(tableToSave)) {
|
||||||
tableToSave.schema.id = {
|
tableToSave.primary = [DEFAULT_PRIMARY_COLUMN]
|
||||||
|
tableToSave.schema[DEFAULT_PRIMARY_COLUMN] = {
|
||||||
type: FieldType.NUMBER,
|
type: FieldType.NUMBER,
|
||||||
autocolumn: true,
|
autocolumn: true,
|
||||||
name: "id",
|
name: DEFAULT_PRIMARY_COLUMN,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (hasTypeChanged(tableToSave, oldTable)) {
|
|
||||||
throw new Error("A column type has changed.")
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let view in tableToSave.views) {
|
for (let view in tableToSave.views) {
|
||||||
const tableView = tableToSave.views[view]
|
const tableView = tableToSave.views[view]
|
||||||
if (!tableView || !viewSdk.isV2(tableView)) continue
|
if (!tableView || !viewSdk.isV2(tableView)) continue
|
||||||
|
|
|
@ -33,7 +33,7 @@ const FieldTypeMap: Record<FieldType, SQLiteType> = {
|
||||||
[FieldType.LONGFORM]: SQLiteType.TEXT,
|
[FieldType.LONGFORM]: SQLiteType.TEXT,
|
||||||
[FieldType.NUMBER]: SQLiteType.REAL,
|
[FieldType.NUMBER]: SQLiteType.REAL,
|
||||||
[FieldType.STRING]: SQLiteType.TEXT,
|
[FieldType.STRING]: SQLiteType.TEXT,
|
||||||
[FieldType.AUTO]: SQLiteType.TEXT,
|
[FieldType.AUTO]: SQLiteType.REAL,
|
||||||
[FieldType.OPTIONS]: SQLiteType.TEXT,
|
[FieldType.OPTIONS]: SQLiteType.TEXT,
|
||||||
[FieldType.JSON]: SQLiteType.BLOB,
|
[FieldType.JSON]: SQLiteType.BLOB,
|
||||||
[FieldType.INTERNAL]: SQLiteType.BLOB,
|
[FieldType.INTERNAL]: SQLiteType.BLOB,
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue