Add support for accessing unlimited levels of nested JSON arrays
This commit is contained in:
parent
7146b994ff
commit
b1cc72c54a
|
@ -15,7 +15,10 @@ import {
|
|||
encodeJSBinding,
|
||||
} from "@budibase/string-templates"
|
||||
import { TableNames } from "../constants"
|
||||
import { convertJSONSchemaToTableSchema } from "./jsonUtils"
|
||||
import {
|
||||
convertJSONSchemaToTableSchema,
|
||||
getJSONArrayDatasourceSchema,
|
||||
} from "./jsonUtils"
|
||||
|
||||
// Regex to match all instances of template strings
|
||||
const CAPTURE_VAR_INSIDE_TEMPLATE = /{{([^}]+)}}/g
|
||||
|
@ -218,7 +221,9 @@ const getProviderContextBindings = (asset, dataProviders) => {
|
|||
Object.keys(schema).forEach(fieldKey => {
|
||||
const fieldSchema = schema[fieldKey]
|
||||
if (fieldSchema.type === "json") {
|
||||
const jsonSchema = convertJSONSchemaToTableSchema(fieldSchema, true)
|
||||
const jsonSchema = convertJSONSchemaToTableSchema(fieldSchema, {
|
||||
squashObjects: true,
|
||||
})
|
||||
Object.keys(jsonSchema).forEach(jsonKey => {
|
||||
jsonAdditions[`${fieldKey}.${jsonKey}`] = {
|
||||
type: jsonSchema[jsonKey].type,
|
||||
|
@ -419,19 +424,8 @@ export const getSchemaForDatasource = (asset, datasource, isForm = false) => {
|
|||
// "jsonarray" datasources are arrays inside JSON fields
|
||||
else if (type === "jsonarray") {
|
||||
table = tables.find(table => table._id === datasource.tableId)
|
||||
|
||||
// We parse the label of the datasource to work out where we are inside
|
||||
// the structure. We can use this to know which part of the schema
|
||||
// is available underneath our current position.
|
||||
const keysToSchema = datasource.label.split(".").slice(2)
|
||||
let jsonSchema = table?.schema
|
||||
for (let i = 0; i < keysToSchema.length; i++) {
|
||||
jsonSchema = jsonSchema[keysToSchema[i]].schema
|
||||
}
|
||||
|
||||
// We need to convert the JSON schema into a more typical looking table
|
||||
// schema so that it works with the rest of the platform
|
||||
schema = convertJSONSchemaToTableSchema(jsonSchema, true)
|
||||
let tableSchema = table?.schema
|
||||
schema = getJSONArrayDatasourceSchema(tableSchema, datasource)
|
||||
}
|
||||
|
||||
// Otherwise we assume we're targeting an internal table or a plus
|
||||
|
|
|
@ -1,10 +1,59 @@
|
|||
export const convertJSONSchemaToTableSchema = (
|
||||
jsonSchema,
|
||||
squashObjects = false
|
||||
) => {
|
||||
/**
|
||||
* Gets the schema for a datasource which is targeting a JSON array, including
|
||||
* nested JSON arrays. The returned schema is a squashed, table-like schema
|
||||
* which is fully compatible with the rest of the platform.
|
||||
* @param tableSchema the full schema for the table this JSON field is in
|
||||
* @param datasource the datasource configuration
|
||||
*/
|
||||
export const getJSONArrayDatasourceSchema = (tableSchema, datasource) => {
|
||||
let jsonSchema = tableSchema
|
||||
let keysToSchema = []
|
||||
|
||||
// If we are already deep inside a JSON field then we need to account
|
||||
// for the keys that brought us here, so we can get the schema for the
|
||||
// depth we're actually at
|
||||
if (datasource.prefixKeys) {
|
||||
keysToSchema = datasource.prefixKeys.concat(["schema"])
|
||||
}
|
||||
|
||||
// We parse the label of the datasource to work out where we are inside
|
||||
// the structure. We can use this to know which part of the schema
|
||||
// is available underneath our current position.
|
||||
keysToSchema = keysToSchema.concat(datasource.label.split(".").slice(2))
|
||||
|
||||
// Follow the JSON key path until we reach the schema for the level
|
||||
// we are at
|
||||
for (let i = 0; i < keysToSchema.length; i++) {
|
||||
jsonSchema = jsonSchema?.[keysToSchema[i]]
|
||||
if (jsonSchema?.schema) {
|
||||
jsonSchema = jsonSchema.schema
|
||||
}
|
||||
}
|
||||
|
||||
// We need to convert the JSON schema into a more typical looking table
|
||||
// schema so that it works with the rest of the platform
|
||||
return convertJSONSchemaToTableSchema(jsonSchema, {
|
||||
squashObjects: true,
|
||||
prefixKeys: keysToSchema,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a JSON field schema (or sub-schema of a nested field) into a schema
|
||||
* that looks like a typical table schema.
|
||||
* @param jsonSchema the JSON field schema or sub-schema
|
||||
* @param options
|
||||
*/
|
||||
export const convertJSONSchemaToTableSchema = (jsonSchema, options) => {
|
||||
if (!jsonSchema) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Add default options
|
||||
options = { squashObjects: false, prefixKeys: null, ...options }
|
||||
|
||||
// Immediately strip the wrapper schema for objects, or wrap shallow values in
|
||||
// a fake "value" schema
|
||||
if (jsonSchema.schema) {
|
||||
jsonSchema = jsonSchema.schema
|
||||
} else {
|
||||
|
@ -12,34 +61,60 @@ export const convertJSONSchemaToTableSchema = (
|
|||
value: jsonSchema,
|
||||
}
|
||||
}
|
||||
const keys = extractJSONSchemaKeys(jsonSchema, squashObjects)
|
||||
|
||||
// Extract all deep keys from the schema
|
||||
const keys = extractJSONSchemaKeys(jsonSchema, options.squashObjects)
|
||||
|
||||
// Form a full schema from all the deep schema keys
|
||||
let schema = {}
|
||||
keys.forEach(({ key, type }) => {
|
||||
schema[key] = { type, name: key }
|
||||
schema[key] = { type, name: key, prefixKeys: options.prefixKeys }
|
||||
})
|
||||
return schema
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively builds paths to all leaf fields in a JSON field schema structure,
|
||||
* stopping when leaf nodes or arrays are reached.
|
||||
* @param jsonSchema the JSON field schema or sub-schema
|
||||
* @param squashObjects whether to recurse into objects or not
|
||||
*/
|
||||
const extractJSONSchemaKeys = (jsonSchema, squashObjects = false) => {
|
||||
if (!jsonSchema || !Object.keys(jsonSchema).length) {
|
||||
return []
|
||||
}
|
||||
|
||||
// Iterate through every schema key
|
||||
let keys = []
|
||||
Object.keys(jsonSchema).forEach(key => {
|
||||
const type = jsonSchema[key].type
|
||||
|
||||
// If we encounter an object, then only go deeper if we want to squash
|
||||
// object paths
|
||||
if (type === "json" && squashObjects) {
|
||||
// Find all keys within this objects schema
|
||||
const childKeys = extractJSONSchemaKeys(
|
||||
jsonSchema[key].schema,
|
||||
squashObjects
|
||||
)
|
||||
|
||||
// Append child paths onto the current path to build the full path
|
||||
keys = keys.concat(
|
||||
childKeys.map(childKey => ({
|
||||
key: `${key}.${childKey.key}`,
|
||||
type: childKey.type,
|
||||
}))
|
||||
)
|
||||
} else if (type !== "array") {
|
||||
keys.push({ key, type })
|
||||
}
|
||||
|
||||
// Otherwise add this as a lead node.
|
||||
// We transform array types from "array" into "jsonarray" here to avoid
|
||||
// confusion with the existing "array" type that represents a multi-select.
|
||||
else {
|
||||
keys.push({
|
||||
key,
|
||||
type: type === "array" ? "jsonarray" : type,
|
||||
})
|
||||
}
|
||||
})
|
||||
return keys
|
||||
|
|
|
@ -105,53 +105,22 @@
|
|||
value: `{{ literal ${runtimeBinding} }}`,
|
||||
}
|
||||
})
|
||||
$: jsonArrays = findJSONArrays(bindings)
|
||||
|
||||
const findJSONArrays = bindings => {
|
||||
let arrays = []
|
||||
const jsonBindings = bindings.filter(x => x.fieldSchema?.type === "json")
|
||||
jsonBindings.forEach(binding => {
|
||||
const {
|
||||
providerId,
|
||||
readableBinding,
|
||||
runtimeBinding,
|
||||
fieldSchema,
|
||||
tableId,
|
||||
} = binding
|
||||
const { name, type } = fieldSchema
|
||||
const schemaArrays = findArraysInSchema(fieldSchema).map(path => {
|
||||
const safePath = path.split(".").map(makePropSafe).join(".")
|
||||
$: jsonArrays = bindings
|
||||
.filter(x => x.fieldSchema?.type === "jsonarray")
|
||||
.map(binding => {
|
||||
const { providerId, readableBinding, runtimeBinding, tableId } = binding
|
||||
const { name, type, prefixKeys } = binding.fieldSchema
|
||||
return {
|
||||
providerId,
|
||||
label: `${readableBinding}.${path}`,
|
||||
label: readableBinding,
|
||||
fieldName: name,
|
||||
fieldType: type,
|
||||
tableId,
|
||||
prefixKeys,
|
||||
type: "jsonarray",
|
||||
value: `{{ literal ${runtimeBinding}.${safePath} }}`,
|
||||
value: `{{ literal ${runtimeBinding} }}`,
|
||||
}
|
||||
})
|
||||
arrays = arrays.concat(schemaArrays)
|
||||
})
|
||||
|
||||
return arrays
|
||||
}
|
||||
|
||||
const findArraysInSchema = (schema, path) => {
|
||||
if (!schema?.schema || !Object.keys(schema.schema).length) {
|
||||
return []
|
||||
}
|
||||
if (schema.type === "array") {
|
||||
return [path]
|
||||
}
|
||||
let arrays = []
|
||||
Object.keys(schema.schema).forEach(key => {
|
||||
const newPath = `${path ? `${path}.` : ""}${key}`
|
||||
const childArrays = findArraysInSchema(schema.schema[key], newPath)
|
||||
arrays = arrays.concat(childArrays)
|
||||
})
|
||||
return arrays
|
||||
}
|
||||
|
||||
const handleSelected = selected => {
|
||||
dispatch("change", selected)
|
||||
|
|
|
@ -4,7 +4,7 @@ import { fetchViewData } from "./views"
|
|||
import { fetchRelationshipData } from "./relationships"
|
||||
import { FieldTypes } from "../constants"
|
||||
import { executeQuery, fetchQueryDefinition } from "./queries"
|
||||
import { convertJSONSchemaToTableSchema } from "builder/src/builderStore/jsonUtils"
|
||||
import { getJSONArrayDatasourceSchema } from "builder/src/builderStore/jsonUtils"
|
||||
|
||||
/**
|
||||
* Fetches all rows for a particular Budibase data source.
|
||||
|
@ -80,12 +80,7 @@ export const fetchDatasourceSchema = async dataSource => {
|
|||
// We can then extract their schema as a subset of the table schema.
|
||||
if (type === "jsonarray") {
|
||||
const table = await fetchTableDefinition(dataSource.tableId)
|
||||
const keysToSchema = dataSource.label.split(".").slice(2)
|
||||
let schema = table?.schema
|
||||
for (let i = 0; i < keysToSchema.length; i++) {
|
||||
schema = schema[keysToSchema[i]].schema
|
||||
}
|
||||
return convertJSONSchemaToTableSchema(schema, true)
|
||||
return getJSONArrayDatasourceSchema(table?.schema, dataSource)
|
||||
}
|
||||
|
||||
// Tables, views and links can be fetched by table ID
|
||||
|
|
Loading…
Reference in New Issue