Merge remote-tracking branch 'refs/remotes/origin/fix/branding-cookie-update' into fix/branding-cookie-update
This commit is contained in:
commit
00dfaf5f45
|
@ -98,7 +98,6 @@ services:
|
|||
couchdb-service:
|
||||
restart: unless-stopped
|
||||
image: budibase/couchdb
|
||||
pull_policy: always
|
||||
environment:
|
||||
- COUCHDB_PASSWORD=${COUCH_DB_PASSWORD}
|
||||
- COUCHDB_USER=${COUCH_DB_USER}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.19.5",
|
||||
"version": "2.19.6",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -3,6 +3,7 @@ import {
|
|||
Event,
|
||||
Datasource,
|
||||
Query,
|
||||
QueryPreview,
|
||||
QueryCreatedEvent,
|
||||
QueryUpdatedEvent,
|
||||
QueryDeletedEvent,
|
||||
|
@ -68,9 +69,9 @@ const run = async (count: number, timestamp?: string | number) => {
|
|||
await publishEvent(Event.QUERIES_RUN, properties, timestamp)
|
||||
}
|
||||
|
||||
const previewed = async (datasource: Datasource, query: Query) => {
|
||||
const previewed = async (datasource: Datasource, query: QueryPreview) => {
|
||||
const properties: QueryPreviewedEvent = {
|
||||
queryId: query._id,
|
||||
queryId: query.queryId,
|
||||
datasourceId: datasource._id as string,
|
||||
source: datasource.source,
|
||||
queryVerb: query.queryVerb,
|
||||
|
|
|
@ -6,6 +6,7 @@ import * as context from "./context"
|
|||
import semver from "semver"
|
||||
import { bustCache, withCache, TTL, CacheKey } from "./cache/generic"
|
||||
import environment from "./environment"
|
||||
import { logAlert } from "./logging"
|
||||
|
||||
export const getInstall = async (): Promise<Installation> => {
|
||||
return withCache(CacheKey.INSTALLATION, TTL.ONE_DAY, getInstallFromDB, {
|
||||
|
@ -80,27 +81,35 @@ export const checkInstallVersion = async (): Promise<void> => {
|
|||
const currentVersion = install.version
|
||||
const newVersion = environment.VERSION
|
||||
|
||||
if (currentVersion !== newVersion) {
|
||||
const isUpgrade = semver.gt(newVersion, currentVersion)
|
||||
const isDowngrade = semver.lt(newVersion, currentVersion)
|
||||
try {
|
||||
if (currentVersion !== newVersion) {
|
||||
const isUpgrade = semver.gt(newVersion, currentVersion)
|
||||
const isDowngrade = semver.lt(newVersion, currentVersion)
|
||||
|
||||
const success = await updateVersion(newVersion)
|
||||
const success = await updateVersion(newVersion)
|
||||
|
||||
if (success) {
|
||||
await context.doInIdentityContext(
|
||||
{
|
||||
_id: install.installId,
|
||||
type: IdentityType.INSTALLATION,
|
||||
},
|
||||
async () => {
|
||||
if (isUpgrade) {
|
||||
await events.installation.upgraded(currentVersion, newVersion)
|
||||
} else if (isDowngrade) {
|
||||
await events.installation.downgraded(currentVersion, newVersion)
|
||||
if (success) {
|
||||
await context.doInIdentityContext(
|
||||
{
|
||||
_id: install.installId,
|
||||
type: IdentityType.INSTALLATION,
|
||||
},
|
||||
async () => {
|
||||
if (isUpgrade) {
|
||||
await events.installation.upgraded(currentVersion, newVersion)
|
||||
} else if (isDowngrade) {
|
||||
await events.installation.downgraded(currentVersion, newVersion)
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
await events.identification.identifyInstallationGroup(install.installId)
|
||||
)
|
||||
await events.identification.identifyInstallationGroup(install.installId)
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
if (err?.message?.includes("Invalid Version")) {
|
||||
logAlert(`Invalid version "${newVersion}" - is it semver?`)
|
||||
} else {
|
||||
logAlert("Failed to retrieve version", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -127,10 +127,14 @@
|
|||
}
|
||||
})
|
||||
$: jsonArrays = bindings
|
||||
.filter(x => x.fieldSchema?.type === "jsonarray")
|
||||
.filter(
|
||||
x =>
|
||||
x.fieldSchema?.type === "jsonarray" ||
|
||||
(x.fieldSchema?.type === "json" && x.fieldSchema?.subtype === "array")
|
||||
)
|
||||
.map(binding => {
|
||||
const { providerId, readableBinding, runtimeBinding, tableId } = binding
|
||||
const { name, type, prefixKeys } = binding.fieldSchema
|
||||
const { name, type, prefixKeys, subtype } = binding.fieldSchema
|
||||
return {
|
||||
providerId,
|
||||
label: readableBinding,
|
||||
|
@ -138,7 +142,8 @@
|
|||
fieldType: type,
|
||||
tableId,
|
||||
prefixKeys,
|
||||
type: "jsonarray",
|
||||
type: type === "jsonarray" ? "jsonarray" : "queryarray",
|
||||
subtype,
|
||||
value: `{{ literal ${runtimeBinding} }}`,
|
||||
}
|
||||
})
|
||||
|
|
|
@ -85,6 +85,16 @@
|
|||
activity = newActivity
|
||||
dispatch("change", fields)
|
||||
}
|
||||
|
||||
function isJsonArray(value) {
|
||||
if (!value || typeof value === "string") {
|
||||
return false
|
||||
}
|
||||
if (value.type === "array") {
|
||||
return true
|
||||
}
|
||||
return value.type === "json" && value.subtype === "array"
|
||||
}
|
||||
</script>
|
||||
|
||||
<!-- Builds Objects with Key Value Pairs. Useful for building things like Request Headers. -->
|
||||
|
@ -112,7 +122,9 @@
|
|||
bind:value={field.name}
|
||||
on:blur={changed}
|
||||
/>
|
||||
{#if options}
|
||||
{#if isJsonArray(field.value)}
|
||||
<Select readonly={true} value="Array" options={["Array"]} />
|
||||
{:else if options}
|
||||
<Select
|
||||
bind:value={field.value}
|
||||
{compare}
|
||||
|
|
|
@ -40,6 +40,7 @@
|
|||
let schemaType
|
||||
|
||||
let autoSchema = {}
|
||||
let nestedSchemaFields = {}
|
||||
let rows = []
|
||||
let keys = {}
|
||||
|
||||
|
@ -83,13 +84,14 @@
|
|||
return
|
||||
}
|
||||
|
||||
nestedSchemaFields = response.nestedSchemaFields
|
||||
|
||||
if (Object.keys(newQuery.schema).length === 0) {
|
||||
// Assign this to a variable instead of directly to the newQuery.schema so that a user
|
||||
// can change the table they're querying and have the schema update until they first
|
||||
// edit it
|
||||
autoSchema = response.schema
|
||||
}
|
||||
|
||||
rows = response.rows
|
||||
|
||||
notifications.success("Query executed successfully")
|
||||
|
@ -120,6 +122,7 @@
|
|||
Object.keys(newQuery.schema).length === 0
|
||||
? autoSchema
|
||||
: newQuery.schema,
|
||||
nestedSchemaFields,
|
||||
})
|
||||
|
||||
notifications.success("Query saved successfully")
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
Label,
|
||||
Input,
|
||||
Select,
|
||||
Divider,
|
||||
Layout,
|
||||
Icon,
|
||||
Button,
|
||||
|
@ -124,7 +123,6 @@
|
|||
{#each query.fields.steps ?? [] as step, index}
|
||||
<div class="block">
|
||||
<div class="subblock">
|
||||
<Divider noMargin />
|
||||
<div class="blockSection">
|
||||
<div class="block-options">
|
||||
Stage {index + 1}
|
||||
|
|
|
@ -310,6 +310,7 @@ export const BannedSearchTypes = [
|
|||
"formula",
|
||||
"json",
|
||||
"jsonarray",
|
||||
"queryarray",
|
||||
]
|
||||
|
||||
export const DatasourceTypes = {
|
||||
|
|
|
@ -425,7 +425,7 @@ const generateComponentContextBindings = (asset, componentContext) => {
|
|||
table = info.table
|
||||
|
||||
// Determine what to prefix bindings with
|
||||
if (datasource.type === "jsonarray") {
|
||||
if (datasource.type === "jsonarray" || datasource.type === "queryarray") {
|
||||
// For JSON arrays, use the array name as the readable prefix
|
||||
const split = datasource.label.split(".")
|
||||
readablePrefix = split[split.length - 1]
|
||||
|
@ -904,6 +904,19 @@ export const getSchemaForDatasource = (asset, datasource, options) => {
|
|||
schema = JSONUtils.getJSONArrayDatasourceSchema(tableSchema, datasource)
|
||||
}
|
||||
|
||||
// "queryarray" datasources are arrays inside JSON responses
|
||||
else if (type === "queryarray") {
|
||||
const queries = get(queriesStores).list
|
||||
table = queries.find(query => query._id === datasource.tableId)
|
||||
let tableSchema = table?.schema
|
||||
let nestedSchemaFields = table?.nestedSchemaFields
|
||||
schema = JSONUtils.generateQueryArraySchemas(
|
||||
tableSchema,
|
||||
nestedSchemaFields
|
||||
)
|
||||
schema = JSONUtils.getJSONArrayDatasourceSchema(schema, datasource)
|
||||
}
|
||||
|
||||
// Otherwise we assume we're targeting an internal table or a plus
|
||||
// datasource, and we can treat it as a table with a schema
|
||||
else {
|
||||
|
|
|
@ -13,7 +13,12 @@ import { COMPOSE_PATH } from "./makeFiles"
|
|||
import { info, success } from "../utils"
|
||||
import { start } from "./start"
|
||||
|
||||
const BB_COMPOSE_SERVICES = ["app-service", "worker-service", "proxy-service"]
|
||||
const BB_COMPOSE_SERVICES = [
|
||||
"app-service",
|
||||
"worker-service",
|
||||
"proxy-service",
|
||||
"couchdb-service",
|
||||
]
|
||||
const BB_SINGLE_SERVICE = ["budibase"]
|
||||
|
||||
export async function update() {
|
||||
|
|
|
@ -84,7 +84,7 @@
|
|||
|
||||
// Fetches the form schema from this form's dataSource
|
||||
const fetchSchema = async dataSource => {
|
||||
if (dataSource?.tableId && dataSource?.type !== "query") {
|
||||
if (dataSource?.tableId && !dataSource?.type?.startsWith("query")) {
|
||||
try {
|
||||
table = await API.fetchTableDefinition(dataSource.tableId)
|
||||
} catch (error) {
|
||||
|
|
|
@ -7,6 +7,7 @@ import NestedProviderFetch from "@budibase/frontend-core/src/fetch/NestedProvide
|
|||
import FieldFetch from "@budibase/frontend-core/src/fetch/FieldFetch.js"
|
||||
import JSONArrayFetch from "@budibase/frontend-core/src/fetch/JSONArrayFetch.js"
|
||||
import ViewV2Fetch from "@budibase/frontend-core/src/fetch/ViewV2Fetch.js"
|
||||
import QueryArrayFetch from "@budibase/frontend-core/src/fetch/QueryArrayFetch"
|
||||
|
||||
/**
|
||||
* Fetches the schema of any kind of datasource.
|
||||
|
@ -28,6 +29,7 @@ export const fetchDatasourceSchema = async (
|
|||
provider: NestedProviderFetch,
|
||||
field: FieldFetch,
|
||||
jsonarray: JSONArrayFetch,
|
||||
queryarray: QueryArrayFetch,
|
||||
}[datasource?.type]
|
||||
if (!handler) {
|
||||
return null
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
import FieldFetch from "./FieldFetch.js"
|
||||
import {
|
||||
getJSONArrayDatasourceSchema,
|
||||
generateQueryArraySchemas,
|
||||
} from "../utils/json"
|
||||
|
||||
export default class QueryArrayFetch extends FieldFetch {
|
||||
async getDefinition(datasource) {
|
||||
if (!datasource?.tableId) {
|
||||
return null
|
||||
}
|
||||
// JSON arrays need their table definitions fetched.
|
||||
// We can then extract their schema as a subset of the table schema.
|
||||
try {
|
||||
const table = await this.API.fetchQueryDefinition(datasource.tableId)
|
||||
const schema = generateQueryArraySchemas(
|
||||
table?.schema,
|
||||
table?.nestedSchemaFields
|
||||
)
|
||||
return { schema: getJSONArrayDatasourceSchema(schema, datasource) }
|
||||
} catch (error) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
}
|
|
@ -9,6 +9,7 @@ import JSONArrayFetch from "./JSONArrayFetch.js"
|
|||
import UserFetch from "./UserFetch.js"
|
||||
import GroupUserFetch from "./GroupUserFetch.js"
|
||||
import CustomFetch from "./CustomFetch.js"
|
||||
import QueryArrayFetch from "./QueryArrayFetch.js"
|
||||
|
||||
const DataFetchMap = {
|
||||
table: TableFetch,
|
||||
|
@ -24,6 +25,7 @@ const DataFetchMap = {
|
|||
provider: NestedProviderFetch,
|
||||
field: FieldFetch,
|
||||
jsonarray: JSONArrayFetch,
|
||||
queryarray: QueryArrayFetch,
|
||||
}
|
||||
|
||||
// Constructs a new fetch model for a certain datasource
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import { utils } from "@budibase/shared-core"
|
||||
|
||||
/**
|
||||
* Gets the schema for a datasource which is targeting a JSON array, including
|
||||
* nested JSON arrays. The returned schema is a squashed, table-like schema
|
||||
|
@ -119,3 +121,33 @@ const extractJSONSchemaKeys = (jsonSchema, squashObjects = false) => {
|
|||
})
|
||||
return keys
|
||||
}
|
||||
|
||||
export const generateQueryArraySchemas = (schema, nestedSchemaFields) => {
|
||||
for (let key in schema) {
|
||||
if (
|
||||
schema[key]?.type === "json" &&
|
||||
schema[key]?.subtype === "array" &&
|
||||
utils.hasSchema(nestedSchemaFields[key])
|
||||
) {
|
||||
schema[key] = {
|
||||
schema: {
|
||||
schema: Object.entries(nestedSchemaFields[key] || {}).reduce(
|
||||
(acc, [nestedKey, fieldSchema]) => {
|
||||
acc[nestedKey] = {
|
||||
name: nestedKey,
|
||||
type: fieldSchema.type,
|
||||
subtype: fieldSchema.subtype,
|
||||
}
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
),
|
||||
type: "json",
|
||||
},
|
||||
type: "json",
|
||||
subtype: "array",
|
||||
}
|
||||
}
|
||||
}
|
||||
return schema
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import { generateQueryID } from "../../../db/utils"
|
||||
import { BaseQueryVerbs } from "../../../constants"
|
||||
import { Thread, ThreadType } from "../../../threads"
|
||||
import { save as saveDatasource } from "../datasource"
|
||||
import { RestImporter } from "./import"
|
||||
|
@ -7,36 +6,27 @@ import { invalidateDynamicVariables } from "../../../threads/utils"
|
|||
import env from "../../../environment"
|
||||
import { events, context, utils, constants } from "@budibase/backend-core"
|
||||
import sdk from "../../../sdk"
|
||||
import { QueryEvent, QueryResponse } from "../../../threads/definitions"
|
||||
import { QueryEvent } from "../../../threads/definitions"
|
||||
import {
|
||||
ConfigType,
|
||||
Query,
|
||||
UserCtx,
|
||||
SessionCookie,
|
||||
JsonFieldSubType,
|
||||
QueryResponse,
|
||||
QueryPreview,
|
||||
QuerySchema,
|
||||
FieldType,
|
||||
type ExecuteQueryRequest,
|
||||
type ExecuteQueryResponse,
|
||||
type Row,
|
||||
} from "@budibase/types"
|
||||
import { ValidQueryNameRegex } from "@budibase/shared-core"
|
||||
import { ValidQueryNameRegex, utils as JsonUtils } from "@budibase/shared-core"
|
||||
|
||||
const Runner = new Thread(ThreadType.QUERY, {
|
||||
timeoutMs: env.QUERY_THREAD_TIMEOUT,
|
||||
})
|
||||
|
||||
// simple function to append "readable" to all read queries
|
||||
function enrichQueries(input: any) {
|
||||
const wasArray = Array.isArray(input)
|
||||
const queries = wasArray ? input : [input]
|
||||
for (let query of queries) {
|
||||
if (query.queryVerb === BaseQueryVerbs.READ) {
|
||||
query.readable = true
|
||||
}
|
||||
}
|
||||
return wasArray ? queries : queries[0]
|
||||
}
|
||||
|
||||
export async function fetch(ctx: UserCtx) {
|
||||
ctx.body = await sdk.queries.fetch()
|
||||
}
|
||||
|
@ -84,7 +74,7 @@ export { _import as import }
|
|||
|
||||
export async function save(ctx: UserCtx) {
|
||||
const db = context.getAppDB()
|
||||
const query = ctx.request.body
|
||||
const query: Query = ctx.request.body
|
||||
|
||||
// Validate query name
|
||||
if (!query?.name.match(ValidQueryNameRegex)) {
|
||||
|
@ -100,7 +90,6 @@ export async function save(ctx: UserCtx) {
|
|||
} else {
|
||||
eventFn = () => events.query.updated(datasource, query)
|
||||
}
|
||||
|
||||
const response = await db.put(query)
|
||||
await eventFn()
|
||||
query._rev = response.rev
|
||||
|
@ -133,7 +122,7 @@ export async function preview(ctx: UserCtx) {
|
|||
const { datasource, envVars } = await sdk.datasources.getWithEnvVars(
|
||||
ctx.request.body.datasourceId
|
||||
)
|
||||
const query = ctx.request.body
|
||||
const query: QueryPreview = ctx.request.body
|
||||
// preview may not have a queryId as it hasn't been saved, but if it does
|
||||
// this stops dynamic variables from calling the same query
|
||||
const { fields, parameters, queryVerb, transformer, queryId, schema } = query
|
||||
|
@ -153,6 +142,69 @@ export async function preview(ctx: UserCtx) {
|
|||
|
||||
const authConfigCtx: any = getAuthConfig(ctx)
|
||||
|
||||
function getSchemaFields(
|
||||
rows: any[],
|
||||
keys: string[]
|
||||
): {
|
||||
previewSchema: Record<string, string | QuerySchema>
|
||||
nestedSchemaFields: {
|
||||
[key: string]: Record<string, string | QuerySchema>
|
||||
}
|
||||
} {
|
||||
const previewSchema: Record<string, string | QuerySchema> = {}
|
||||
const nestedSchemaFields: {
|
||||
[key: string]: Record<string, string | QuerySchema>
|
||||
} = {}
|
||||
const makeQuerySchema = (
|
||||
type: FieldType,
|
||||
name: string,
|
||||
subtype?: string
|
||||
): QuerySchema => ({
|
||||
type,
|
||||
name,
|
||||
subtype,
|
||||
})
|
||||
if (rows?.length > 0) {
|
||||
for (let key of [...new Set(keys)] as string[]) {
|
||||
const field = rows[0][key]
|
||||
let type = typeof field,
|
||||
fieldMetadata = makeQuerySchema(FieldType.STRING, key)
|
||||
if (field)
|
||||
switch (type) {
|
||||
case "boolean":
|
||||
fieldMetadata = makeQuerySchema(FieldType.BOOLEAN, key)
|
||||
break
|
||||
case "object":
|
||||
if (field instanceof Date) {
|
||||
fieldMetadata = makeQuerySchema(FieldType.DATETIME, key)
|
||||
} else if (Array.isArray(field)) {
|
||||
if (JsonUtils.hasSchema(field[0])) {
|
||||
fieldMetadata = makeQuerySchema(
|
||||
FieldType.JSON,
|
||||
key,
|
||||
JsonFieldSubType.ARRAY
|
||||
)
|
||||
} else {
|
||||
fieldMetadata = makeQuerySchema(FieldType.ARRAY, key)
|
||||
}
|
||||
nestedSchemaFields[key] = getSchemaFields(
|
||||
field,
|
||||
Object.keys(field[0])
|
||||
).previewSchema
|
||||
} else {
|
||||
fieldMetadata = makeQuerySchema(FieldType.JSON, key)
|
||||
}
|
||||
break
|
||||
case "number":
|
||||
fieldMetadata = makeQuerySchema(FieldType.NUMBER, key)
|
||||
break
|
||||
}
|
||||
previewSchema[key] = fieldMetadata
|
||||
}
|
||||
}
|
||||
return { previewSchema, nestedSchemaFields }
|
||||
}
|
||||
|
||||
try {
|
||||
const inputs: QueryEvent = {
|
||||
appId: ctx.appId,
|
||||
|
@ -171,38 +223,11 @@ export async function preview(ctx: UserCtx) {
|
|||
},
|
||||
}
|
||||
|
||||
const { rows, keys, info, extra } = await Runner.run<QueryResponse>(inputs)
|
||||
const previewSchema: Record<string, QuerySchema> = {}
|
||||
const makeQuerySchema = (type: FieldType, name: string): QuerySchema => ({
|
||||
type,
|
||||
name,
|
||||
})
|
||||
if (rows?.length > 0) {
|
||||
for (let key of [...new Set(keys)] as string[]) {
|
||||
const field = rows[0][key]
|
||||
let type = typeof field,
|
||||
fieldMetadata = makeQuerySchema(FieldType.STRING, key)
|
||||
if (field)
|
||||
switch (type) {
|
||||
case "boolean":
|
||||
fieldMetadata = makeQuerySchema(FieldType.BOOLEAN, key)
|
||||
break
|
||||
case "object":
|
||||
if (field instanceof Date) {
|
||||
fieldMetadata = makeQuerySchema(FieldType.DATETIME, key)
|
||||
} else if (Array.isArray(field)) {
|
||||
fieldMetadata = makeQuerySchema(FieldType.ARRAY, key)
|
||||
} else {
|
||||
fieldMetadata = makeQuerySchema(FieldType.JSON, key)
|
||||
}
|
||||
break
|
||||
case "number":
|
||||
fieldMetadata = makeQuerySchema(FieldType.NUMBER, key)
|
||||
break
|
||||
}
|
||||
previewSchema[key] = fieldMetadata
|
||||
}
|
||||
}
|
||||
const { rows, keys, info, extra } = (await Runner.run(
|
||||
inputs
|
||||
)) as QueryResponse
|
||||
const { previewSchema, nestedSchemaFields } = getSchemaFields(rows, keys)
|
||||
|
||||
// if existing schema, update to include any previous schema keys
|
||||
if (existingSchema) {
|
||||
for (let key of Object.keys(previewSchema)) {
|
||||
|
@ -216,6 +241,7 @@ export async function preview(ctx: UserCtx) {
|
|||
await events.query.previewed(datasource, query)
|
||||
ctx.body = {
|
||||
rows,
|
||||
nestedSchemaFields,
|
||||
schema: previewSchema,
|
||||
info,
|
||||
extra,
|
||||
|
|
|
@ -11,7 +11,9 @@ export async function start(): Promise<StartedTestContainer> {
|
|||
MONGO_INITDB_ROOT_PASSWORD: "password",
|
||||
})
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(`mongosh --eval "db.version()"`)
|
||||
Wait.forSuccessfulCommand(
|
||||
`mongosh --eval "db.version()"`
|
||||
).withStartupTimeout(10000)
|
||||
)
|
||||
.start()
|
||||
}
|
||||
|
|
|
@ -1,26 +1,34 @@
|
|||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
|
||||
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
|
||||
|
||||
let container: StartedTestContainer | undefined
|
||||
|
||||
class MySQLWaitStrategy extends AbstractWaitStrategy {
|
||||
async waitUntilReady(container: any, boundPorts: any, startTime?: Date) {
|
||||
// Because MySQL first starts itself up, runs an init script, then restarts,
|
||||
// it's possible for the mysqladmin ping to succeed early and then tests to
|
||||
// run against a MySQL that's mid-restart and fail. To get around this, we
|
||||
// wait for logs and then do a ping check.
|
||||
|
||||
const logs = Wait.forLogMessage(
|
||||
"/usr/sbin/mysqld: ready for connections",
|
||||
2
|
||||
)
|
||||
await logs.waitUntilReady(container, boundPorts, startTime)
|
||||
|
||||
const command = Wait.forSuccessfulCommand(
|
||||
`mysqladmin ping -h localhost -P 3306 -u root -ppassword`
|
||||
)
|
||||
await command.waitUntilReady(container)
|
||||
}
|
||||
}
|
||||
|
||||
export async function start(): Promise<StartedTestContainer> {
|
||||
return await new GenericContainer("mysql:8.3")
|
||||
.withExposedPorts(3306)
|
||||
.withEnvironment({ MYSQL_ROOT_PASSWORD: "password" })
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
// Because MySQL first starts itself up, runs an init script, then restarts,
|
||||
// it's possible for the mysqladmin ping to succeed early and then tests to
|
||||
// run against a MySQL that's mid-restart and fail. To avoid this, we run
|
||||
// the ping command three times with a small delay between each.
|
||||
`
|
||||
mysqladmin ping -h localhost -P 3306 -u root -ppassword && sleep 1 &&
|
||||
mysqladmin ping -h localhost -P 3306 -u root -ppassword && sleep 1 &&
|
||||
mysqladmin ping -h localhost -P 3306 -u root -ppassword && sleep 1 &&
|
||||
mysqladmin ping -h localhost -P 3306 -u root -ppassword
|
||||
`
|
||||
)
|
||||
)
|
||||
.withWaitStrategy(new MySQLWaitStrategy().withStartupTimeout(10000))
|
||||
.start()
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,9 @@ export async function start(): Promise<StartedTestContainer> {
|
|||
.withExposedPorts(5432)
|
||||
.withEnvironment({ POSTGRES_PASSWORD: "password" })
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand("pg_isready -h localhost -p 5432")
|
||||
Wait.forSuccessfulCommand(
|
||||
"pg_isready -h localhost -p 5432"
|
||||
).withStartupTimeout(10000)
|
||||
)
|
||||
.start()
|
||||
}
|
||||
|
|
|
@ -57,3 +57,13 @@ export function filterValueToLabel() {
|
|||
{}
|
||||
)
|
||||
}
|
||||
|
||||
export function hasSchema(test: any) {
|
||||
return (
|
||||
typeof test === "object" &&
|
||||
!Array.isArray(test) &&
|
||||
test !== null &&
|
||||
!(test instanceof Date) &&
|
||||
Object.keys(test).length > 0
|
||||
)
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ import type { Row } from "./row"
|
|||
export interface QuerySchema {
|
||||
name?: string
|
||||
type: string
|
||||
subtype?: string
|
||||
}
|
||||
|
||||
export interface Query extends Document {
|
||||
|
@ -17,11 +18,23 @@ export interface Query extends Document {
|
|||
queryVerb: string
|
||||
}
|
||||
|
||||
export interface QueryPreview extends Omit<Query, "_id"> {
|
||||
queryId: string
|
||||
}
|
||||
|
||||
export interface QueryParameter {
|
||||
name: string
|
||||
default: string
|
||||
}
|
||||
|
||||
export interface QueryResponse {
|
||||
rows: any[]
|
||||
keys: string[]
|
||||
info: any
|
||||
extra: any
|
||||
pagination: any
|
||||
}
|
||||
|
||||
export interface RestQueryFields {
|
||||
path: string
|
||||
queryString?: string
|
||||
|
|
|
@ -16,6 +16,10 @@ export enum AutoFieldSubType {
|
|||
AUTO_ID = "autoID",
|
||||
}
|
||||
|
||||
export enum JsonFieldSubType {
|
||||
ARRAY = "array",
|
||||
}
|
||||
|
||||
export enum FormulaType {
|
||||
STATIC = "static",
|
||||
DYNAMIC = "dynamic",
|
||||
|
|
|
@ -5,6 +5,7 @@ import {
|
|||
AutoFieldSubType,
|
||||
AutoReason,
|
||||
FormulaType,
|
||||
JsonFieldSubType,
|
||||
RelationshipType,
|
||||
} from "./constants"
|
||||
|
||||
|
@ -81,6 +82,11 @@ export interface NumberFieldMetadata extends Omit<BaseFieldSchema, "subtype"> {
|
|||
}
|
||||
}
|
||||
|
||||
export interface JsonFieldMetadata extends Omit<BaseFieldSchema, "subtype"> {
|
||||
type: FieldType.JSON
|
||||
subtype?: JsonFieldSubType.ARRAY
|
||||
}
|
||||
|
||||
export interface DateFieldMetadata extends Omit<BaseFieldSchema, "subtype"> {
|
||||
type: FieldType.DATETIME
|
||||
ignoreTimezones?: boolean
|
||||
|
@ -162,6 +168,7 @@ export type FieldSchema =
|
|||
| NumberFieldMetadata
|
||||
| LongFormFieldMetadata
|
||||
| BBReferenceFieldMetadata
|
||||
| JsonFieldMetadata
|
||||
|
||||
export interface TableSchema {
|
||||
[key: string]: FieldSchema
|
||||
|
|
|
@ -5572,9 +5572,9 @@
|
|||
integrity sha512-7GgtHCs/QZrBrDzgIJnQtuSvhFSwhyYSI2uafSwZoNt1iOGhEN5fwNrQMjtONyHm9+/LoA4453jH0CMYcr06Pg==
|
||||
|
||||
"@types/node@^18.11.18":
|
||||
version "18.19.10"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.10.tgz#4de314ab66faf6bc8ba691021a091ddcdf13a158"
|
||||
integrity sha512-IZD8kAM02AW1HRDTPOlz3npFava678pr8Ie9Vp8uRhBROXAv8MXT2pCnGZZAKYdromsNQLHQcfWQ6EOatVLtqA==
|
||||
version "18.19.13"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.13.tgz#c3e989ca967b862a1f6c8c4148fe31865eedaf1a"
|
||||
integrity sha512-kgnbRDj8ioDyGxoiaXsiu1Ybm/K14ajCgMOkwiqpHrnF7d7QiYRoRqHIpglMMs3DwXinlK4qJ8TZGlj4hfleJg==
|
||||
dependencies:
|
||||
undici-types "~5.26.4"
|
||||
|
||||
|
@ -10763,7 +10763,7 @@ fetch-cookie@0.11.0:
|
|||
dependencies:
|
||||
tough-cookie "^2.3.3 || ^3.0.1 || ^4.0.0"
|
||||
|
||||
fflate@^0.4.1:
|
||||
fflate@^0.4.1, fflate@^0.4.8:
|
||||
version "0.4.8"
|
||||
resolved "https://registry.yarnpkg.com/fflate/-/fflate-0.4.8.tgz#f90b82aefbd8ac174213abb338bd7ef848f0f5ae"
|
||||
integrity sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==
|
||||
|
|
Loading…
Reference in New Issue