Fix array type missing from query schema selector (#12772)
* Tidy MongoDB aggregation pipeline view * Remove unused code * WIP * Add bindings for bindings drawer * Is not external table if it's a query * Add QueryArrayFetch * Bug fix * JavaScript is the worst * refactor * Add array label to query schema * Remove console log * type fix * Don't include Array in SchemaTypeOptions, but show label * Fix bindings * refactor * Rename isObject to hasSchema * WIP * Typing WIP * Type not Types * Unused import * type fix * Handle json array subtype * Support queryarray datasource type * refactor * yarn lock * update account portal --------- Co-authored-by: Sam Rose <hello@samwho.dev>
This commit is contained in:
parent
36aef404a0
commit
c12e5fd196
|
@ -3,6 +3,7 @@ import {
|
||||||
Event,
|
Event,
|
||||||
Datasource,
|
Datasource,
|
||||||
Query,
|
Query,
|
||||||
|
QueryPreview,
|
||||||
QueryCreatedEvent,
|
QueryCreatedEvent,
|
||||||
QueryUpdatedEvent,
|
QueryUpdatedEvent,
|
||||||
QueryDeletedEvent,
|
QueryDeletedEvent,
|
||||||
|
@ -68,9 +69,9 @@ const run = async (count: number, timestamp?: string | number) => {
|
||||||
await publishEvent(Event.QUERIES_RUN, properties, timestamp)
|
await publishEvent(Event.QUERIES_RUN, properties, timestamp)
|
||||||
}
|
}
|
||||||
|
|
||||||
const previewed = async (datasource: Datasource, query: Query) => {
|
const previewed = async (datasource: Datasource, query: QueryPreview) => {
|
||||||
const properties: QueryPreviewedEvent = {
|
const properties: QueryPreviewedEvent = {
|
||||||
queryId: query._id,
|
queryId: query.queryId,
|
||||||
datasourceId: datasource._id as string,
|
datasourceId: datasource._id as string,
|
||||||
source: datasource.source,
|
source: datasource.source,
|
||||||
queryVerb: query.queryVerb,
|
queryVerb: query.queryVerb,
|
||||||
|
|
|
@ -127,10 +127,14 @@
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
$: jsonArrays = bindings
|
$: jsonArrays = bindings
|
||||||
.filter(x => x.fieldSchema?.type === "jsonarray")
|
.filter(
|
||||||
|
x =>
|
||||||
|
x.fieldSchema?.type === "jsonarray" ||
|
||||||
|
(x.fieldSchema?.type === "json" && x.fieldSchema?.subtype === "array")
|
||||||
|
)
|
||||||
.map(binding => {
|
.map(binding => {
|
||||||
const { providerId, readableBinding, runtimeBinding, tableId } = binding
|
const { providerId, readableBinding, runtimeBinding, tableId } = binding
|
||||||
const { name, type, prefixKeys } = binding.fieldSchema
|
const { name, type, prefixKeys, subtype } = binding.fieldSchema
|
||||||
return {
|
return {
|
||||||
providerId,
|
providerId,
|
||||||
label: readableBinding,
|
label: readableBinding,
|
||||||
|
@ -138,7 +142,8 @@
|
||||||
fieldType: type,
|
fieldType: type,
|
||||||
tableId,
|
tableId,
|
||||||
prefixKeys,
|
prefixKeys,
|
||||||
type: "jsonarray",
|
type: type === "jsonarray" ? "jsonarray" : "queryarray",
|
||||||
|
subtype,
|
||||||
value: `{{ literal ${runtimeBinding} }}`,
|
value: `{{ literal ${runtimeBinding} }}`,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
|
@ -85,6 +85,16 @@
|
||||||
activity = newActivity
|
activity = newActivity
|
||||||
dispatch("change", fields)
|
dispatch("change", fields)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function isJsonArray(value) {
|
||||||
|
if (!value || typeof value === "string") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if (value.type === "array") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return value.type === "json" && value.subtype === "array"
|
||||||
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<!-- Builds Objects with Key Value Pairs. Useful for building things like Request Headers. -->
|
<!-- Builds Objects with Key Value Pairs. Useful for building things like Request Headers. -->
|
||||||
|
@ -112,7 +122,9 @@
|
||||||
bind:value={field.name}
|
bind:value={field.name}
|
||||||
on:blur={changed}
|
on:blur={changed}
|
||||||
/>
|
/>
|
||||||
{#if options}
|
{#if isJsonArray(field.value)}
|
||||||
|
<Select readonly={true} value="Array" options={["Array"]} />
|
||||||
|
{:else if options}
|
||||||
<Select
|
<Select
|
||||||
bind:value={field.value}
|
bind:value={field.value}
|
||||||
{compare}
|
{compare}
|
||||||
|
|
|
@ -40,6 +40,7 @@
|
||||||
let schemaType
|
let schemaType
|
||||||
|
|
||||||
let autoSchema = {}
|
let autoSchema = {}
|
||||||
|
let nestedSchemaFields = {}
|
||||||
let rows = []
|
let rows = []
|
||||||
let keys = {}
|
let keys = {}
|
||||||
|
|
||||||
|
@ -83,13 +84,14 @@
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
nestedSchemaFields = response.nestedSchemaFields
|
||||||
|
|
||||||
if (Object.keys(newQuery.schema).length === 0) {
|
if (Object.keys(newQuery.schema).length === 0) {
|
||||||
// Assign this to a variable instead of directly to the newQuery.schema so that a user
|
// Assign this to a variable instead of directly to the newQuery.schema so that a user
|
||||||
// can change the table they're querying and have the schema update until they first
|
// can change the table they're querying and have the schema update until they first
|
||||||
// edit it
|
// edit it
|
||||||
autoSchema = response.schema
|
autoSchema = response.schema
|
||||||
}
|
}
|
||||||
|
|
||||||
rows = response.rows
|
rows = response.rows
|
||||||
|
|
||||||
notifications.success("Query executed successfully")
|
notifications.success("Query executed successfully")
|
||||||
|
@ -120,6 +122,7 @@
|
||||||
Object.keys(newQuery.schema).length === 0
|
Object.keys(newQuery.schema).length === 0
|
||||||
? autoSchema
|
? autoSchema
|
||||||
: newQuery.schema,
|
: newQuery.schema,
|
||||||
|
nestedSchemaFields,
|
||||||
})
|
})
|
||||||
|
|
||||||
notifications.success("Query saved successfully")
|
notifications.success("Query saved successfully")
|
||||||
|
|
|
@ -5,7 +5,6 @@
|
||||||
Label,
|
Label,
|
||||||
Input,
|
Input,
|
||||||
Select,
|
Select,
|
||||||
Divider,
|
|
||||||
Layout,
|
Layout,
|
||||||
Icon,
|
Icon,
|
||||||
Button,
|
Button,
|
||||||
|
@ -124,7 +123,6 @@
|
||||||
{#each query.fields.steps ?? [] as step, index}
|
{#each query.fields.steps ?? [] as step, index}
|
||||||
<div class="block">
|
<div class="block">
|
||||||
<div class="subblock">
|
<div class="subblock">
|
||||||
<Divider noMargin />
|
|
||||||
<div class="blockSection">
|
<div class="blockSection">
|
||||||
<div class="block-options">
|
<div class="block-options">
|
||||||
Stage {index + 1}
|
Stage {index + 1}
|
||||||
|
|
|
@ -310,6 +310,7 @@ export const BannedSearchTypes = [
|
||||||
"formula",
|
"formula",
|
||||||
"json",
|
"json",
|
||||||
"jsonarray",
|
"jsonarray",
|
||||||
|
"queryarray",
|
||||||
]
|
]
|
||||||
|
|
||||||
export const DatasourceTypes = {
|
export const DatasourceTypes = {
|
||||||
|
|
|
@ -425,7 +425,7 @@ const generateComponentContextBindings = (asset, componentContext) => {
|
||||||
table = info.table
|
table = info.table
|
||||||
|
|
||||||
// Determine what to prefix bindings with
|
// Determine what to prefix bindings with
|
||||||
if (datasource.type === "jsonarray") {
|
if (datasource.type === "jsonarray" || datasource.type === "queryarray") {
|
||||||
// For JSON arrays, use the array name as the readable prefix
|
// For JSON arrays, use the array name as the readable prefix
|
||||||
const split = datasource.label.split(".")
|
const split = datasource.label.split(".")
|
||||||
readablePrefix = split[split.length - 1]
|
readablePrefix = split[split.length - 1]
|
||||||
|
@ -904,6 +904,19 @@ export const getSchemaForDatasource = (asset, datasource, options) => {
|
||||||
schema = JSONUtils.getJSONArrayDatasourceSchema(tableSchema, datasource)
|
schema = JSONUtils.getJSONArrayDatasourceSchema(tableSchema, datasource)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// "queryarray" datasources are arrays inside JSON responses
|
||||||
|
else if (type === "queryarray") {
|
||||||
|
const queries = get(queriesStores).list
|
||||||
|
table = queries.find(query => query._id === datasource.tableId)
|
||||||
|
let tableSchema = table?.schema
|
||||||
|
let nestedSchemaFields = table?.nestedSchemaFields
|
||||||
|
schema = JSONUtils.generateQueryArraySchemas(
|
||||||
|
tableSchema,
|
||||||
|
nestedSchemaFields
|
||||||
|
)
|
||||||
|
schema = JSONUtils.getJSONArrayDatasourceSchema(schema, datasource)
|
||||||
|
}
|
||||||
|
|
||||||
// Otherwise we assume we're targeting an internal table or a plus
|
// Otherwise we assume we're targeting an internal table or a plus
|
||||||
// datasource, and we can treat it as a table with a schema
|
// datasource, and we can treat it as a table with a schema
|
||||||
else {
|
else {
|
||||||
|
|
|
@ -84,7 +84,7 @@
|
||||||
|
|
||||||
// Fetches the form schema from this form's dataSource
|
// Fetches the form schema from this form's dataSource
|
||||||
const fetchSchema = async dataSource => {
|
const fetchSchema = async dataSource => {
|
||||||
if (dataSource?.tableId && dataSource?.type !== "query") {
|
if (dataSource?.tableId && !dataSource?.type?.startsWith("query")) {
|
||||||
try {
|
try {
|
||||||
table = await API.fetchTableDefinition(dataSource.tableId)
|
table = await API.fetchTableDefinition(dataSource.tableId)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|
|
@ -7,6 +7,7 @@ import NestedProviderFetch from "@budibase/frontend-core/src/fetch/NestedProvide
|
||||||
import FieldFetch from "@budibase/frontend-core/src/fetch/FieldFetch.js"
|
import FieldFetch from "@budibase/frontend-core/src/fetch/FieldFetch.js"
|
||||||
import JSONArrayFetch from "@budibase/frontend-core/src/fetch/JSONArrayFetch.js"
|
import JSONArrayFetch from "@budibase/frontend-core/src/fetch/JSONArrayFetch.js"
|
||||||
import ViewV2Fetch from "@budibase/frontend-core/src/fetch/ViewV2Fetch.js"
|
import ViewV2Fetch from "@budibase/frontend-core/src/fetch/ViewV2Fetch.js"
|
||||||
|
import QueryArrayFetch from "@budibase/frontend-core/src/fetch/QueryArrayFetch"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetches the schema of any kind of datasource.
|
* Fetches the schema of any kind of datasource.
|
||||||
|
@ -28,6 +29,7 @@ export const fetchDatasourceSchema = async (
|
||||||
provider: NestedProviderFetch,
|
provider: NestedProviderFetch,
|
||||||
field: FieldFetch,
|
field: FieldFetch,
|
||||||
jsonarray: JSONArrayFetch,
|
jsonarray: JSONArrayFetch,
|
||||||
|
queryarray: QueryArrayFetch,
|
||||||
}[datasource?.type]
|
}[datasource?.type]
|
||||||
if (!handler) {
|
if (!handler) {
|
||||||
return null
|
return null
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
import FieldFetch from "./FieldFetch.js"
|
||||||
|
import {
|
||||||
|
getJSONArrayDatasourceSchema,
|
||||||
|
generateQueryArraySchemas,
|
||||||
|
} from "../utils/json"
|
||||||
|
|
||||||
|
export default class QueryArrayFetch extends FieldFetch {
|
||||||
|
async getDefinition(datasource) {
|
||||||
|
if (!datasource?.tableId) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
// JSON arrays need their table definitions fetched.
|
||||||
|
// We can then extract their schema as a subset of the table schema.
|
||||||
|
try {
|
||||||
|
const table = await this.API.fetchQueryDefinition(datasource.tableId)
|
||||||
|
const schema = generateQueryArraySchemas(
|
||||||
|
table?.schema,
|
||||||
|
table?.nestedSchemaFields
|
||||||
|
)
|
||||||
|
return { schema: getJSONArrayDatasourceSchema(schema, datasource) }
|
||||||
|
} catch (error) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -9,6 +9,7 @@ import JSONArrayFetch from "./JSONArrayFetch.js"
|
||||||
import UserFetch from "./UserFetch.js"
|
import UserFetch from "./UserFetch.js"
|
||||||
import GroupUserFetch from "./GroupUserFetch.js"
|
import GroupUserFetch from "./GroupUserFetch.js"
|
||||||
import CustomFetch from "./CustomFetch.js"
|
import CustomFetch from "./CustomFetch.js"
|
||||||
|
import QueryArrayFetch from "./QueryArrayFetch.js"
|
||||||
|
|
||||||
const DataFetchMap = {
|
const DataFetchMap = {
|
||||||
table: TableFetch,
|
table: TableFetch,
|
||||||
|
@ -24,6 +25,7 @@ const DataFetchMap = {
|
||||||
provider: NestedProviderFetch,
|
provider: NestedProviderFetch,
|
||||||
field: FieldFetch,
|
field: FieldFetch,
|
||||||
jsonarray: JSONArrayFetch,
|
jsonarray: JSONArrayFetch,
|
||||||
|
queryarray: QueryArrayFetch,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Constructs a new fetch model for a certain datasource
|
// Constructs a new fetch model for a certain datasource
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
import { utils } from "@budibase/shared-core"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the schema for a datasource which is targeting a JSON array, including
|
* Gets the schema for a datasource which is targeting a JSON array, including
|
||||||
* nested JSON arrays. The returned schema is a squashed, table-like schema
|
* nested JSON arrays. The returned schema is a squashed, table-like schema
|
||||||
|
@ -119,3 +121,33 @@ const extractJSONSchemaKeys = (jsonSchema, squashObjects = false) => {
|
||||||
})
|
})
|
||||||
return keys
|
return keys
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const generateQueryArraySchemas = (schema, nestedSchemaFields) => {
|
||||||
|
for (let key in schema) {
|
||||||
|
if (
|
||||||
|
schema[key]?.type === "json" &&
|
||||||
|
schema[key]?.subtype === "array" &&
|
||||||
|
utils.hasSchema(nestedSchemaFields[key])
|
||||||
|
) {
|
||||||
|
schema[key] = {
|
||||||
|
schema: {
|
||||||
|
schema: Object.entries(nestedSchemaFields[key] || {}).reduce(
|
||||||
|
(acc, [nestedKey, fieldSchema]) => {
|
||||||
|
acc[nestedKey] = {
|
||||||
|
name: nestedKey,
|
||||||
|
type: fieldSchema.type,
|
||||||
|
subtype: fieldSchema.subtype,
|
||||||
|
}
|
||||||
|
return acc
|
||||||
|
},
|
||||||
|
{}
|
||||||
|
),
|
||||||
|
type: "json",
|
||||||
|
},
|
||||||
|
type: "json",
|
||||||
|
subtype: "array",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return schema
|
||||||
|
}
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import { generateQueryID } from "../../../db/utils"
|
import { generateQueryID } from "../../../db/utils"
|
||||||
import { BaseQueryVerbs } from "../../../constants"
|
|
||||||
import { Thread, ThreadType } from "../../../threads"
|
import { Thread, ThreadType } from "../../../threads"
|
||||||
import { save as saveDatasource } from "../datasource"
|
import { save as saveDatasource } from "../datasource"
|
||||||
import { RestImporter } from "./import"
|
import { RestImporter } from "./import"
|
||||||
|
@ -7,36 +6,27 @@ import { invalidateDynamicVariables } from "../../../threads/utils"
|
||||||
import env from "../../../environment"
|
import env from "../../../environment"
|
||||||
import { events, context, utils, constants } from "@budibase/backend-core"
|
import { events, context, utils, constants } from "@budibase/backend-core"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
import { QueryEvent, QueryResponse } from "../../../threads/definitions"
|
import { QueryEvent } from "../../../threads/definitions"
|
||||||
import {
|
import {
|
||||||
ConfigType,
|
ConfigType,
|
||||||
Query,
|
Query,
|
||||||
UserCtx,
|
UserCtx,
|
||||||
SessionCookie,
|
SessionCookie,
|
||||||
|
JsonFieldSubType,
|
||||||
|
QueryResponse,
|
||||||
|
QueryPreview,
|
||||||
QuerySchema,
|
QuerySchema,
|
||||||
FieldType,
|
FieldType,
|
||||||
type ExecuteQueryRequest,
|
type ExecuteQueryRequest,
|
||||||
type ExecuteQueryResponse,
|
type ExecuteQueryResponse,
|
||||||
type Row,
|
type Row,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { ValidQueryNameRegex } from "@budibase/shared-core"
|
import { ValidQueryNameRegex, utils as JsonUtils } from "@budibase/shared-core"
|
||||||
|
|
||||||
const Runner = new Thread(ThreadType.QUERY, {
|
const Runner = new Thread(ThreadType.QUERY, {
|
||||||
timeoutMs: env.QUERY_THREAD_TIMEOUT,
|
timeoutMs: env.QUERY_THREAD_TIMEOUT,
|
||||||
})
|
})
|
||||||
|
|
||||||
// simple function to append "readable" to all read queries
|
|
||||||
function enrichQueries(input: any) {
|
|
||||||
const wasArray = Array.isArray(input)
|
|
||||||
const queries = wasArray ? input : [input]
|
|
||||||
for (let query of queries) {
|
|
||||||
if (query.queryVerb === BaseQueryVerbs.READ) {
|
|
||||||
query.readable = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return wasArray ? queries : queries[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function fetch(ctx: UserCtx) {
|
export async function fetch(ctx: UserCtx) {
|
||||||
ctx.body = await sdk.queries.fetch()
|
ctx.body = await sdk.queries.fetch()
|
||||||
}
|
}
|
||||||
|
@ -84,7 +74,7 @@ export { _import as import }
|
||||||
|
|
||||||
export async function save(ctx: UserCtx) {
|
export async function save(ctx: UserCtx) {
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
const query = ctx.request.body
|
const query: Query = ctx.request.body
|
||||||
|
|
||||||
// Validate query name
|
// Validate query name
|
||||||
if (!query?.name.match(ValidQueryNameRegex)) {
|
if (!query?.name.match(ValidQueryNameRegex)) {
|
||||||
|
@ -100,7 +90,6 @@ export async function save(ctx: UserCtx) {
|
||||||
} else {
|
} else {
|
||||||
eventFn = () => events.query.updated(datasource, query)
|
eventFn = () => events.query.updated(datasource, query)
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await db.put(query)
|
const response = await db.put(query)
|
||||||
await eventFn()
|
await eventFn()
|
||||||
query._rev = response.rev
|
query._rev = response.rev
|
||||||
|
@ -133,7 +122,7 @@ export async function preview(ctx: UserCtx) {
|
||||||
const { datasource, envVars } = await sdk.datasources.getWithEnvVars(
|
const { datasource, envVars } = await sdk.datasources.getWithEnvVars(
|
||||||
ctx.request.body.datasourceId
|
ctx.request.body.datasourceId
|
||||||
)
|
)
|
||||||
const query = ctx.request.body
|
const query: QueryPreview = ctx.request.body
|
||||||
// preview may not have a queryId as it hasn't been saved, but if it does
|
// preview may not have a queryId as it hasn't been saved, but if it does
|
||||||
// this stops dynamic variables from calling the same query
|
// this stops dynamic variables from calling the same query
|
||||||
const { fields, parameters, queryVerb, transformer, queryId, schema } = query
|
const { fields, parameters, queryVerb, transformer, queryId, schema } = query
|
||||||
|
@ -153,6 +142,69 @@ export async function preview(ctx: UserCtx) {
|
||||||
|
|
||||||
const authConfigCtx: any = getAuthConfig(ctx)
|
const authConfigCtx: any = getAuthConfig(ctx)
|
||||||
|
|
||||||
|
function getSchemaFields(
|
||||||
|
rows: any[],
|
||||||
|
keys: string[]
|
||||||
|
): {
|
||||||
|
previewSchema: Record<string, string | QuerySchema>
|
||||||
|
nestedSchemaFields: {
|
||||||
|
[key: string]: Record<string, string | QuerySchema>
|
||||||
|
}
|
||||||
|
} {
|
||||||
|
const previewSchema: Record<string, string | QuerySchema> = {}
|
||||||
|
const nestedSchemaFields: {
|
||||||
|
[key: string]: Record<string, string | QuerySchema>
|
||||||
|
} = {}
|
||||||
|
const makeQuerySchema = (
|
||||||
|
type: FieldType,
|
||||||
|
name: string,
|
||||||
|
subtype?: string
|
||||||
|
): QuerySchema => ({
|
||||||
|
type,
|
||||||
|
name,
|
||||||
|
subtype,
|
||||||
|
})
|
||||||
|
if (rows?.length > 0) {
|
||||||
|
for (let key of [...new Set(keys)] as string[]) {
|
||||||
|
const field = rows[0][key]
|
||||||
|
let type = typeof field,
|
||||||
|
fieldMetadata = makeQuerySchema(FieldType.STRING, key)
|
||||||
|
if (field)
|
||||||
|
switch (type) {
|
||||||
|
case "boolean":
|
||||||
|
fieldMetadata = makeQuerySchema(FieldType.BOOLEAN, key)
|
||||||
|
break
|
||||||
|
case "object":
|
||||||
|
if (field instanceof Date) {
|
||||||
|
fieldMetadata = makeQuerySchema(FieldType.DATETIME, key)
|
||||||
|
} else if (Array.isArray(field)) {
|
||||||
|
if (JsonUtils.hasSchema(field[0])) {
|
||||||
|
fieldMetadata = makeQuerySchema(
|
||||||
|
FieldType.JSON,
|
||||||
|
key,
|
||||||
|
JsonFieldSubType.ARRAY
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
fieldMetadata = makeQuerySchema(FieldType.ARRAY, key)
|
||||||
|
}
|
||||||
|
nestedSchemaFields[key] = getSchemaFields(
|
||||||
|
field,
|
||||||
|
Object.keys(field[0])
|
||||||
|
).previewSchema
|
||||||
|
} else {
|
||||||
|
fieldMetadata = makeQuerySchema(FieldType.JSON, key)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
case "number":
|
||||||
|
fieldMetadata = makeQuerySchema(FieldType.NUMBER, key)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
previewSchema[key] = fieldMetadata
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return { previewSchema, nestedSchemaFields }
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const inputs: QueryEvent = {
|
const inputs: QueryEvent = {
|
||||||
appId: ctx.appId,
|
appId: ctx.appId,
|
||||||
|
@ -171,38 +223,11 @@ export async function preview(ctx: UserCtx) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
const { rows, keys, info, extra } = await Runner.run<QueryResponse>(inputs)
|
const { rows, keys, info, extra } = (await Runner.run(
|
||||||
const previewSchema: Record<string, QuerySchema> = {}
|
inputs
|
||||||
const makeQuerySchema = (type: FieldType, name: string): QuerySchema => ({
|
)) as QueryResponse
|
||||||
type,
|
const { previewSchema, nestedSchemaFields } = getSchemaFields(rows, keys)
|
||||||
name,
|
|
||||||
})
|
|
||||||
if (rows?.length > 0) {
|
|
||||||
for (let key of [...new Set(keys)] as string[]) {
|
|
||||||
const field = rows[0][key]
|
|
||||||
let type = typeof field,
|
|
||||||
fieldMetadata = makeQuerySchema(FieldType.STRING, key)
|
|
||||||
if (field)
|
|
||||||
switch (type) {
|
|
||||||
case "boolean":
|
|
||||||
fieldMetadata = makeQuerySchema(FieldType.BOOLEAN, key)
|
|
||||||
break
|
|
||||||
case "object":
|
|
||||||
if (field instanceof Date) {
|
|
||||||
fieldMetadata = makeQuerySchema(FieldType.DATETIME, key)
|
|
||||||
} else if (Array.isArray(field)) {
|
|
||||||
fieldMetadata = makeQuerySchema(FieldType.ARRAY, key)
|
|
||||||
} else {
|
|
||||||
fieldMetadata = makeQuerySchema(FieldType.JSON, key)
|
|
||||||
}
|
|
||||||
break
|
|
||||||
case "number":
|
|
||||||
fieldMetadata = makeQuerySchema(FieldType.NUMBER, key)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
previewSchema[key] = fieldMetadata
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// if existing schema, update to include any previous schema keys
|
// if existing schema, update to include any previous schema keys
|
||||||
if (existingSchema) {
|
if (existingSchema) {
|
||||||
for (let key of Object.keys(previewSchema)) {
|
for (let key of Object.keys(previewSchema)) {
|
||||||
|
@ -216,6 +241,7 @@ export async function preview(ctx: UserCtx) {
|
||||||
await events.query.previewed(datasource, query)
|
await events.query.previewed(datasource, query)
|
||||||
ctx.body = {
|
ctx.body = {
|
||||||
rows,
|
rows,
|
||||||
|
nestedSchemaFields,
|
||||||
schema: previewSchema,
|
schema: previewSchema,
|
||||||
info,
|
info,
|
||||||
extra,
|
extra,
|
||||||
|
|
|
@ -57,3 +57,13 @@ export function filterValueToLabel() {
|
||||||
{}
|
{}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function hasSchema(test: any) {
|
||||||
|
return (
|
||||||
|
typeof test === "object" &&
|
||||||
|
!Array.isArray(test) &&
|
||||||
|
test !== null &&
|
||||||
|
!(test instanceof Date) &&
|
||||||
|
Object.keys(test).length > 0
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
|
@ -4,6 +4,7 @@ import type { Row } from "./row"
|
||||||
export interface QuerySchema {
|
export interface QuerySchema {
|
||||||
name?: string
|
name?: string
|
||||||
type: string
|
type: string
|
||||||
|
subtype?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface Query extends Document {
|
export interface Query extends Document {
|
||||||
|
@ -17,11 +18,23 @@ export interface Query extends Document {
|
||||||
queryVerb: string
|
queryVerb: string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface QueryPreview extends Omit<Query, "_id"> {
|
||||||
|
queryId: string
|
||||||
|
}
|
||||||
|
|
||||||
export interface QueryParameter {
|
export interface QueryParameter {
|
||||||
name: string
|
name: string
|
||||||
default: string
|
default: string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface QueryResponse {
|
||||||
|
rows: any[]
|
||||||
|
keys: string[]
|
||||||
|
info: any
|
||||||
|
extra: any
|
||||||
|
pagination: any
|
||||||
|
}
|
||||||
|
|
||||||
export interface RestQueryFields {
|
export interface RestQueryFields {
|
||||||
path: string
|
path: string
|
||||||
queryString?: string
|
queryString?: string
|
||||||
|
|
|
@ -16,6 +16,10 @@ export enum AutoFieldSubType {
|
||||||
AUTO_ID = "autoID",
|
AUTO_ID = "autoID",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export enum JsonFieldSubType {
|
||||||
|
ARRAY = "array",
|
||||||
|
}
|
||||||
|
|
||||||
export enum FormulaType {
|
export enum FormulaType {
|
||||||
STATIC = "static",
|
STATIC = "static",
|
||||||
DYNAMIC = "dynamic",
|
DYNAMIC = "dynamic",
|
||||||
|
|
|
@ -5,6 +5,7 @@ import {
|
||||||
AutoFieldSubType,
|
AutoFieldSubType,
|
||||||
AutoReason,
|
AutoReason,
|
||||||
FormulaType,
|
FormulaType,
|
||||||
|
JsonFieldSubType,
|
||||||
RelationshipType,
|
RelationshipType,
|
||||||
} from "./constants"
|
} from "./constants"
|
||||||
|
|
||||||
|
@ -81,6 +82,11 @@ export interface NumberFieldMetadata extends Omit<BaseFieldSchema, "subtype"> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface JsonFieldMetadata extends Omit<BaseFieldSchema, "subtype"> {
|
||||||
|
type: FieldType.JSON
|
||||||
|
subtype?: JsonFieldSubType.ARRAY
|
||||||
|
}
|
||||||
|
|
||||||
export interface DateFieldMetadata extends Omit<BaseFieldSchema, "subtype"> {
|
export interface DateFieldMetadata extends Omit<BaseFieldSchema, "subtype"> {
|
||||||
type: FieldType.DATETIME
|
type: FieldType.DATETIME
|
||||||
ignoreTimezones?: boolean
|
ignoreTimezones?: boolean
|
||||||
|
@ -162,6 +168,7 @@ export type FieldSchema =
|
||||||
| NumberFieldMetadata
|
| NumberFieldMetadata
|
||||||
| LongFormFieldMetadata
|
| LongFormFieldMetadata
|
||||||
| BBReferenceFieldMetadata
|
| BBReferenceFieldMetadata
|
||||||
|
| JsonFieldMetadata
|
||||||
|
|
||||||
export interface TableSchema {
|
export interface TableSchema {
|
||||||
[key: string]: FieldSchema
|
[key: string]: FieldSchema
|
||||||
|
|
|
@ -5572,9 +5572,9 @@
|
||||||
integrity sha512-7GgtHCs/QZrBrDzgIJnQtuSvhFSwhyYSI2uafSwZoNt1iOGhEN5fwNrQMjtONyHm9+/LoA4453jH0CMYcr06Pg==
|
integrity sha512-7GgtHCs/QZrBrDzgIJnQtuSvhFSwhyYSI2uafSwZoNt1iOGhEN5fwNrQMjtONyHm9+/LoA4453jH0CMYcr06Pg==
|
||||||
|
|
||||||
"@types/node@^18.11.18":
|
"@types/node@^18.11.18":
|
||||||
version "18.19.10"
|
version "18.19.13"
|
||||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.10.tgz#4de314ab66faf6bc8ba691021a091ddcdf13a158"
|
resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.13.tgz#c3e989ca967b862a1f6c8c4148fe31865eedaf1a"
|
||||||
integrity sha512-IZD8kAM02AW1HRDTPOlz3npFava678pr8Ie9Vp8uRhBROXAv8MXT2pCnGZZAKYdromsNQLHQcfWQ6EOatVLtqA==
|
integrity sha512-kgnbRDj8ioDyGxoiaXsiu1Ybm/K14ajCgMOkwiqpHrnF7d7QiYRoRqHIpglMMs3DwXinlK4qJ8TZGlj4hfleJg==
|
||||||
dependencies:
|
dependencies:
|
||||||
undici-types "~5.26.4"
|
undici-types "~5.26.4"
|
||||||
|
|
||||||
|
@ -10763,7 +10763,7 @@ fetch-cookie@0.11.0:
|
||||||
dependencies:
|
dependencies:
|
||||||
tough-cookie "^2.3.3 || ^3.0.1 || ^4.0.0"
|
tough-cookie "^2.3.3 || ^3.0.1 || ^4.0.0"
|
||||||
|
|
||||||
fflate@^0.4.1:
|
fflate@^0.4.1, fflate@^0.4.8:
|
||||||
version "0.4.8"
|
version "0.4.8"
|
||||||
resolved "https://registry.yarnpkg.com/fflate/-/fflate-0.4.8.tgz#f90b82aefbd8ac174213abb338bd7ef848f0f5ae"
|
resolved "https://registry.yarnpkg.com/fflate/-/fflate-0.4.8.tgz#f90b82aefbd8ac174213abb338bd7ef848f0f5ae"
|
||||||
integrity sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==
|
integrity sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==
|
||||||
|
|
Loading…
Reference in New Issue