Merge remote-tracking branch 'origin/master' into feature/filter-bindings
This commit is contained in:
commit
0945ad5603
|
@ -42,6 +42,8 @@
|
||||||
},
|
},
|
||||||
"rules": {
|
"rules": {
|
||||||
"no-unused-vars": "off",
|
"no-unused-vars": "off",
|
||||||
|
"local-rules/no-budibase-imports": "error",
|
||||||
|
"local-rules/no-console-error": "error",
|
||||||
"@typescript-eslint/no-unused-vars": [
|
"@typescript-eslint/no-unused-vars": [
|
||||||
"error",
|
"error",
|
||||||
{
|
{
|
||||||
|
|
|
@ -1,4 +1,25 @@
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
"no-console-error": {
|
||||||
|
create: function(context) {
|
||||||
|
return {
|
||||||
|
CallExpression(node) {
|
||||||
|
if (
|
||||||
|
node.callee.type === "MemberExpression" &&
|
||||||
|
node.callee.object.name === "console" &&
|
||||||
|
node.callee.property.name === "error" &&
|
||||||
|
node.arguments.length === 1 &&
|
||||||
|
node.arguments[0].name &&
|
||||||
|
node.arguments[0].name.startsWith("err")
|
||||||
|
) {
|
||||||
|
context.report({
|
||||||
|
node,
|
||||||
|
message: 'Using console.error(err) on its own is not allowed. Either provide context to the error (console.error(msg, err)) or throw it.',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
},
|
||||||
"no-budibase-imports": {
|
"no-budibase-imports": {
|
||||||
create: function (context) {
|
create: function (context) {
|
||||||
return {
|
return {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"version": "2.23.9",
|
"version": "2.23.10",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*",
|
"packages/*",
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit eb7d5da233885c5cffd9c255d3e954d0cd39185e
|
Subproject commit c167c331ff9b8161fc18e2ecbaaf1ea5815ba964
|
|
@ -64,7 +64,6 @@ async function refreshOIDCAccessToken(
|
||||||
}
|
}
|
||||||
strategy = await oidc.strategyFactory(enrichedConfig, ssoSaveUserNoOp)
|
strategy = await oidc.strategyFactory(enrichedConfig, ssoSaveUserNoOp)
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err)
|
|
||||||
throw new Error("Could not refresh OAuth Token")
|
throw new Error("Could not refresh OAuth Token")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -99,7 +98,6 @@ async function refreshGoogleAccessToken(
|
||||||
ssoSaveUserNoOp
|
ssoSaveUserNoOp
|
||||||
)
|
)
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
console.error(err)
|
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Error constructing OIDC refresh strategy: message=${err.message}`
|
`Error constructing OIDC refresh strategy: message=${err.message}`
|
||||||
)
|
)
|
||||||
|
|
|
@ -138,7 +138,6 @@ export default function (
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
authenticated = false
|
authenticated = false
|
||||||
console.error(`Auth Error: ${err.message}`)
|
console.error(`Auth Error: ${err.message}`)
|
||||||
console.error(err)
|
|
||||||
// remove the cookie as the user does not exist anymore
|
// remove the cookie as the user does not exist anymore
|
||||||
clearCookie(ctx, Cookie.Auth)
|
clearCookie(ctx, Cookie.Auth)
|
||||||
}
|
}
|
||||||
|
@ -187,7 +186,6 @@ export default function (
|
||||||
}
|
}
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
console.error(`Auth Error: ${err.message}`)
|
console.error(`Auth Error: ${err.message}`)
|
||||||
console.error(err)
|
|
||||||
// invalid token, clear the cookie
|
// invalid token, clear the cookie
|
||||||
if (err?.name === "JsonWebTokenError") {
|
if (err?.name === "JsonWebTokenError") {
|
||||||
clearCookie(ctx, Cookie.Auth)
|
clearCookie(ctx, Cookie.Auth)
|
||||||
|
|
|
@ -12,7 +12,7 @@ export async function errorHandling(ctx: any, next: any) {
|
||||||
if (status >= 400 && status < 500) {
|
if (status >= 400 && status < 500) {
|
||||||
console.warn(err)
|
console.warn(err)
|
||||||
} else {
|
} else {
|
||||||
console.error(err)
|
console.error("Got 400 response code", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
let error: APIError = {
|
let error: APIError = {
|
||||||
|
|
|
@ -68,7 +68,6 @@ export async function strategyFactory(
|
||||||
verify
|
verify
|
||||||
)
|
)
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
console.error(err)
|
|
||||||
throw new Error(`Error constructing google authentication strategy: ${err}`)
|
throw new Error(`Error constructing google authentication strategy: ${err}`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -103,7 +103,6 @@ export async function strategyFactory(
|
||||||
strategy.name = "oidc"
|
strategy.name = "oidc"
|
||||||
return strategy
|
return strategy
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
console.error(err)
|
|
||||||
throw new Error(`Error constructing OIDC authentication strategy - ${err}`)
|
throw new Error(`Error constructing OIDC authentication strategy - ${err}`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -142,7 +141,6 @@ export async function fetchStrategyConfig(
|
||||||
callbackURL: callbackUrl,
|
callbackURL: callbackUrl,
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err)
|
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Error constructing OIDC authentication configuration - ${err}`
|
`Error constructing OIDC authentication configuration - ${err}`
|
||||||
)
|
)
|
||||||
|
|
|
@ -26,7 +26,6 @@ export const getMigrationsDoc = async (db: any) => {
|
||||||
if (err.status && err.status === 404) {
|
if (err.status && err.status === 404) {
|
||||||
return { _id: DocumentType.MIGRATIONS }
|
return { _id: DocumentType.MIGRATIONS }
|
||||||
} else {
|
} else {
|
||||||
console.error(err)
|
|
||||||
throw err
|
throw err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -189,6 +189,7 @@
|
||||||
<Select
|
<Select
|
||||||
options={settingOptions}
|
options={settingOptions}
|
||||||
bind:value={condition.setting}
|
bind:value={condition.setting}
|
||||||
|
on:change={() => delete condition.settingValue}
|
||||||
/>
|
/>
|
||||||
<div>TO</div>
|
<div>TO</div>
|
||||||
{#if definition}
|
{#if definition}
|
||||||
|
|
|
@ -76,7 +76,7 @@ function writeFile(output: any, filename: string) {
|
||||||
console.log(`Wrote spec to ${path}`)
|
console.log(`Wrote spec to ${path}`)
|
||||||
return path
|
return path
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err)
|
console.error("Error writing spec file", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,19 @@ import sdk from "../../../sdk"
|
||||||
|
|
||||||
import tk from "timekeeper"
|
import tk from "timekeeper"
|
||||||
import { mocks } from "@budibase/backend-core/tests"
|
import { mocks } from "@budibase/backend-core/tests"
|
||||||
import { QueryPreview, SourceName } from "@budibase/types"
|
import {
|
||||||
|
Datasource,
|
||||||
|
FieldSchema,
|
||||||
|
FieldSubtype,
|
||||||
|
FieldType,
|
||||||
|
QueryPreview,
|
||||||
|
RelationshipType,
|
||||||
|
SourceName,
|
||||||
|
Table,
|
||||||
|
TableSchema,
|
||||||
|
} from "@budibase/types"
|
||||||
|
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
||||||
|
import { tableForDatasource } from "../../../tests/utilities/structures"
|
||||||
|
|
||||||
tk.freeze(mocks.date.MOCK_DATE)
|
tk.freeze(mocks.date.MOCK_DATE)
|
||||||
|
|
||||||
|
@ -223,4 +235,152 @@ describe("/datasources", () => {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe.each([
|
||||||
|
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||||
|
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||||
|
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||||
|
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||||
|
])("fetch schema (%s)", (_, dsProvider) => {
|
||||||
|
beforeAll(async () => {
|
||||||
|
datasource = await config.api.datasource.create(await dsProvider)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("fetching schema will not drop tables or columns", async () => {
|
||||||
|
const datasourceId = datasource!._id!
|
||||||
|
|
||||||
|
const simpleTable = await config.api.table.save(
|
||||||
|
tableForDatasource(datasource, {
|
||||||
|
name: "simple",
|
||||||
|
schema: {
|
||||||
|
name: {
|
||||||
|
name: "name",
|
||||||
|
type: FieldType.STRING,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
type SupportedSqlTypes =
|
||||||
|
| FieldType.STRING
|
||||||
|
| FieldType.BARCODEQR
|
||||||
|
| FieldType.LONGFORM
|
||||||
|
| FieldType.OPTIONS
|
||||||
|
| FieldType.DATETIME
|
||||||
|
| FieldType.NUMBER
|
||||||
|
| FieldType.BOOLEAN
|
||||||
|
| FieldType.FORMULA
|
||||||
|
| FieldType.BIGINT
|
||||||
|
| FieldType.BB_REFERENCE
|
||||||
|
| FieldType.LINK
|
||||||
|
| FieldType.ARRAY
|
||||||
|
|
||||||
|
const fullSchema: {
|
||||||
|
[type in SupportedSqlTypes]: FieldSchema & { type: type }
|
||||||
|
} = {
|
||||||
|
[FieldType.STRING]: {
|
||||||
|
name: "string",
|
||||||
|
type: FieldType.STRING,
|
||||||
|
constraints: {
|
||||||
|
presence: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[FieldType.LONGFORM]: {
|
||||||
|
name: "longform",
|
||||||
|
type: FieldType.LONGFORM,
|
||||||
|
},
|
||||||
|
[FieldType.OPTIONS]: {
|
||||||
|
name: "options",
|
||||||
|
type: FieldType.OPTIONS,
|
||||||
|
constraints: {
|
||||||
|
presence: { allowEmpty: false },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[FieldType.NUMBER]: {
|
||||||
|
name: "number",
|
||||||
|
type: FieldType.NUMBER,
|
||||||
|
},
|
||||||
|
[FieldType.BOOLEAN]: {
|
||||||
|
name: "boolean",
|
||||||
|
type: FieldType.BOOLEAN,
|
||||||
|
},
|
||||||
|
[FieldType.ARRAY]: {
|
||||||
|
name: "array",
|
||||||
|
type: FieldType.ARRAY,
|
||||||
|
},
|
||||||
|
[FieldType.DATETIME]: {
|
||||||
|
name: "datetime",
|
||||||
|
type: FieldType.DATETIME,
|
||||||
|
dateOnly: true,
|
||||||
|
timeOnly: false,
|
||||||
|
},
|
||||||
|
[FieldType.LINK]: {
|
||||||
|
name: "link",
|
||||||
|
type: FieldType.LINK,
|
||||||
|
tableId: simpleTable._id!,
|
||||||
|
relationshipType: RelationshipType.ONE_TO_MANY,
|
||||||
|
fieldName: "link",
|
||||||
|
},
|
||||||
|
[FieldType.FORMULA]: {
|
||||||
|
name: "formula",
|
||||||
|
type: FieldType.FORMULA,
|
||||||
|
formula: "any formula",
|
||||||
|
},
|
||||||
|
[FieldType.BARCODEQR]: {
|
||||||
|
name: "barcodeqr",
|
||||||
|
type: FieldType.BARCODEQR,
|
||||||
|
},
|
||||||
|
[FieldType.BIGINT]: {
|
||||||
|
name: "bigint",
|
||||||
|
type: FieldType.BIGINT,
|
||||||
|
},
|
||||||
|
[FieldType.BB_REFERENCE]: {
|
||||||
|
name: "bb_reference",
|
||||||
|
type: FieldType.BB_REFERENCE,
|
||||||
|
subtype: FieldSubtype.USERS,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
await config.api.table.save(
|
||||||
|
tableForDatasource(datasource, {
|
||||||
|
name: "full",
|
||||||
|
schema: fullSchema,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const persisted = await config.api.datasource.get(datasourceId)
|
||||||
|
await config.api.datasource.fetchSchema(datasourceId)
|
||||||
|
|
||||||
|
const updated = await config.api.datasource.get(datasourceId)
|
||||||
|
const expected: Datasource = {
|
||||||
|
...persisted,
|
||||||
|
entities:
|
||||||
|
persisted?.entities &&
|
||||||
|
Object.entries(persisted.entities).reduce<Record<string, Table>>(
|
||||||
|
(acc, [tableName, table]) => {
|
||||||
|
acc[tableName] = {
|
||||||
|
...table,
|
||||||
|
primaryDisplay: expect.not.stringMatching(
|
||||||
|
new RegExp(`^${table.primaryDisplay || ""}$`)
|
||||||
|
),
|
||||||
|
schema: Object.entries(table.schema).reduce<TableSchema>(
|
||||||
|
(acc, [fieldName, field]) => {
|
||||||
|
acc[fieldName] = expect.objectContaining({
|
||||||
|
...field,
|
||||||
|
})
|
||||||
|
return acc
|
||||||
|
},
|
||||||
|
{}
|
||||||
|
),
|
||||||
|
}
|
||||||
|
return acc
|
||||||
|
},
|
||||||
|
{}
|
||||||
|
),
|
||||||
|
|
||||||
|
_rev: expect.any(String),
|
||||||
|
}
|
||||||
|
expect(updated).toEqual(expected)
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -34,7 +34,7 @@ describe.each([
|
||||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||||
])("/tables (%s)", (_, dsProvider) => {
|
])("/tables (%s)", (_, dsProvider) => {
|
||||||
let isInternal: boolean
|
const isInternal: boolean = !dsProvider
|
||||||
let datasource: Datasource | undefined
|
let datasource: Datasource | undefined
|
||||||
let config = setup.getConfig()
|
let config = setup.getConfig()
|
||||||
|
|
||||||
|
@ -44,9 +44,6 @@ describe.each([
|
||||||
await config.init()
|
await config.init()
|
||||||
if (dsProvider) {
|
if (dsProvider) {
|
||||||
datasource = await config.api.datasource.create(await dsProvider)
|
datasource = await config.api.datasource.create(await dsProvider)
|
||||||
isInternal = false
|
|
||||||
} else {
|
|
||||||
isInternal = true
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -378,7 +378,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
||||||
try {
|
try {
|
||||||
await connection.close()
|
await connection.close()
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err)
|
console.error("Error connecting to Oracle", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,10 +4,11 @@ import {
|
||||||
Datasource,
|
Datasource,
|
||||||
FieldType,
|
FieldType,
|
||||||
TableSourceType,
|
TableSourceType,
|
||||||
|
FieldSchema,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { DocumentType, SEPARATOR } from "../../db/utils"
|
import { DocumentType, SEPARATOR } from "../../db/utils"
|
||||||
import { InvalidColumns, DEFAULT_BB_DATASOURCE_ID } from "../../constants"
|
import { InvalidColumns, DEFAULT_BB_DATASOURCE_ID } from "../../constants"
|
||||||
import { SWITCHABLE_TYPES, helpers } from "@budibase/shared-core"
|
import { helpers, utils } from "@budibase/shared-core"
|
||||||
import env from "../../environment"
|
import env from "../../environment"
|
||||||
import { Knex } from "knex"
|
import { Knex } from "knex"
|
||||||
|
|
||||||
|
@ -15,7 +16,28 @@ const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
||||||
const ROW_ID_REGEX = /^\[.*]$/g
|
const ROW_ID_REGEX = /^\[.*]$/g
|
||||||
const ENCODED_SPACE = encodeURIComponent(" ")
|
const ENCODED_SPACE = encodeURIComponent(" ")
|
||||||
|
|
||||||
const SQL_NUMBER_TYPE_MAP = {
|
type PrimitiveTypes =
|
||||||
|
| FieldType.STRING
|
||||||
|
| FieldType.NUMBER
|
||||||
|
| FieldType.BOOLEAN
|
||||||
|
| FieldType.DATETIME
|
||||||
|
| FieldType.JSON
|
||||||
|
| FieldType.BIGINT
|
||||||
|
| FieldType.OPTIONS
|
||||||
|
|
||||||
|
function isPrimitiveType(type: FieldType): type is PrimitiveTypes {
|
||||||
|
return [
|
||||||
|
FieldType.STRING,
|
||||||
|
FieldType.NUMBER,
|
||||||
|
FieldType.BOOLEAN,
|
||||||
|
FieldType.DATETIME,
|
||||||
|
FieldType.JSON,
|
||||||
|
FieldType.BIGINT,
|
||||||
|
FieldType.OPTIONS,
|
||||||
|
].includes(type)
|
||||||
|
}
|
||||||
|
|
||||||
|
const SQL_NUMBER_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||||
integer: FieldType.NUMBER,
|
integer: FieldType.NUMBER,
|
||||||
int: FieldType.NUMBER,
|
int: FieldType.NUMBER,
|
||||||
decimal: FieldType.NUMBER,
|
decimal: FieldType.NUMBER,
|
||||||
|
@ -35,7 +57,7 @@ const SQL_NUMBER_TYPE_MAP = {
|
||||||
smallmoney: FieldType.NUMBER,
|
smallmoney: FieldType.NUMBER,
|
||||||
}
|
}
|
||||||
|
|
||||||
const SQL_DATE_TYPE_MAP = {
|
const SQL_DATE_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||||
timestamp: FieldType.DATETIME,
|
timestamp: FieldType.DATETIME,
|
||||||
time: FieldType.DATETIME,
|
time: FieldType.DATETIME,
|
||||||
datetime: FieldType.DATETIME,
|
datetime: FieldType.DATETIME,
|
||||||
|
@ -46,7 +68,7 @@ const SQL_DATE_TYPE_MAP = {
|
||||||
const SQL_DATE_ONLY_TYPES = ["date"]
|
const SQL_DATE_ONLY_TYPES = ["date"]
|
||||||
const SQL_TIME_ONLY_TYPES = ["time"]
|
const SQL_TIME_ONLY_TYPES = ["time"]
|
||||||
|
|
||||||
const SQL_STRING_TYPE_MAP = {
|
const SQL_STRING_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||||
varchar: FieldType.STRING,
|
varchar: FieldType.STRING,
|
||||||
char: FieldType.STRING,
|
char: FieldType.STRING,
|
||||||
nchar: FieldType.STRING,
|
nchar: FieldType.STRING,
|
||||||
|
@ -58,22 +80,22 @@ const SQL_STRING_TYPE_MAP = {
|
||||||
text: FieldType.STRING,
|
text: FieldType.STRING,
|
||||||
}
|
}
|
||||||
|
|
||||||
const SQL_BOOLEAN_TYPE_MAP = {
|
const SQL_BOOLEAN_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||||
boolean: FieldType.BOOLEAN,
|
boolean: FieldType.BOOLEAN,
|
||||||
bit: FieldType.BOOLEAN,
|
bit: FieldType.BOOLEAN,
|
||||||
tinyint: FieldType.BOOLEAN,
|
tinyint: FieldType.BOOLEAN,
|
||||||
}
|
}
|
||||||
|
|
||||||
const SQL_OPTIONS_TYPE_MAP = {
|
const SQL_OPTIONS_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||||
"user-defined": FieldType.OPTIONS,
|
"user-defined": FieldType.OPTIONS,
|
||||||
}
|
}
|
||||||
|
|
||||||
const SQL_MISC_TYPE_MAP = {
|
const SQL_MISC_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||||
json: FieldType.JSON,
|
json: FieldType.JSON,
|
||||||
bigint: FieldType.BIGINT,
|
bigint: FieldType.BIGINT,
|
||||||
}
|
}
|
||||||
|
|
||||||
const SQL_TYPE_MAP = {
|
const SQL_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||||
...SQL_NUMBER_TYPE_MAP,
|
...SQL_NUMBER_TYPE_MAP,
|
||||||
...SQL_DATE_TYPE_MAP,
|
...SQL_DATE_TYPE_MAP,
|
||||||
...SQL_STRING_TYPE_MAP,
|
...SQL_STRING_TYPE_MAP,
|
||||||
|
@ -239,14 +261,14 @@ export function generateColumnDefinition(config: {
|
||||||
constraints.inclusion = options
|
constraints.inclusion = options
|
||||||
}
|
}
|
||||||
|
|
||||||
const schema: any = {
|
const schema: FieldSchema = {
|
||||||
type: foundType,
|
type: foundType,
|
||||||
externalType,
|
externalType,
|
||||||
autocolumn,
|
autocolumn,
|
||||||
name,
|
name,
|
||||||
constraints,
|
constraints,
|
||||||
}
|
}
|
||||||
if (foundType === FieldType.DATETIME) {
|
if (schema.type === FieldType.DATETIME) {
|
||||||
schema.dateOnly = SQL_DATE_ONLY_TYPES.includes(lowerCaseType)
|
schema.dateOnly = SQL_DATE_ONLY_TYPES.includes(lowerCaseType)
|
||||||
schema.timeOnly = SQL_TIME_ONLY_TYPES.includes(lowerCaseType)
|
schema.timeOnly = SQL_TIME_ONLY_TYPES.includes(lowerCaseType)
|
||||||
}
|
}
|
||||||
|
@ -274,49 +296,6 @@ export function isIsoDateString(str: string) {
|
||||||
return d.toISOString() === trimmedValue
|
return d.toISOString() === trimmedValue
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* This function will determine whether a column is a relationship and whether it
|
|
||||||
* is currently valid. The reason for the validity check is that tables can be deleted
|
|
||||||
* outside of Budibase control and if this is the case it will break Budibase relationships.
|
|
||||||
* The tableIds is a list passed down from the main finalise tables function, which is
|
|
||||||
* based on the tables that have just been fetched. This will only really be used on subsequent
|
|
||||||
* fetches to the first one - if the user is periodically refreshing Budibase knowledge of tables.
|
|
||||||
* @param column The column to check, to see if it is a valid relationship.
|
|
||||||
* @param tableIds The IDs of the tables which currently exist.
|
|
||||||
*/
|
|
||||||
function shouldCopyRelationship(
|
|
||||||
column: { type: FieldType.LINK; tableId?: string },
|
|
||||||
tableIds: string[]
|
|
||||||
) {
|
|
||||||
return (
|
|
||||||
column.type === FieldType.LINK &&
|
|
||||||
column.tableId &&
|
|
||||||
tableIds.includes(column.tableId)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Similar function to the shouldCopyRelationship function, but instead this looks for options and boolean
|
|
||||||
* types. It is possible to switch a string -> options and a number -> boolean (and vice versus) need to make
|
|
||||||
* sure that these get copied over when tables are fetched. Also checks whether they are still valid, if a
|
|
||||||
* column has changed type in the external database then copying it over may not be possible.
|
|
||||||
* @param column The column to check for options or boolean type.
|
|
||||||
* @param fetchedColumn The fetched column to check for the type in the external database.
|
|
||||||
*/
|
|
||||||
function shouldCopySpecialColumn(
|
|
||||||
column: { type: FieldType },
|
|
||||||
fetchedColumn: { type: FieldType } | undefined
|
|
||||||
) {
|
|
||||||
const isFormula = column.type === FieldType.FORMULA
|
|
||||||
// column has been deleted, remove - formulas will never exist, always copy
|
|
||||||
if (!isFormula && column && !fetchedColumn) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
const fetchedIsNumber =
|
|
||||||
!fetchedColumn || fetchedColumn.type === FieldType.NUMBER
|
|
||||||
return fetchedIsNumber && column.type === FieldType.BOOLEAN
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Looks for columns which need to be copied over into the new table definitions, like relationships,
|
* Looks for columns which need to be copied over into the new table definitions, like relationships,
|
||||||
* options types and views.
|
* options types and views.
|
||||||
|
@ -338,6 +317,9 @@ function copyExistingPropsOver(
|
||||||
if (entities[tableName]?.created) {
|
if (entities[tableName]?.created) {
|
||||||
table.created = entities[tableName]?.created
|
table.created = entities[tableName]?.created
|
||||||
}
|
}
|
||||||
|
if (entities[tableName]?.constrained) {
|
||||||
|
table.constrained = entities[tableName]?.constrained
|
||||||
|
}
|
||||||
|
|
||||||
table.views = entities[tableName].views
|
table.views = entities[tableName].views
|
||||||
|
|
||||||
|
@ -346,45 +328,73 @@ function copyExistingPropsOver(
|
||||||
if (!Object.prototype.hasOwnProperty.call(existingTableSchema, key)) {
|
if (!Object.prototype.hasOwnProperty.call(existingTableSchema, key)) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
const column = existingTableSchema[key]
|
const column = existingTableSchema[key]
|
||||||
|
|
||||||
const existingColumnType = column?.type
|
const existingColumnType = column?.type
|
||||||
const updatedColumnType = table.schema[key]?.type
|
const updatedColumnType = table.schema[key]?.type
|
||||||
|
|
||||||
// If the db column type changed to a non-compatible one, we want to re-fetch it
|
const keepIfType = (...validTypes: PrimitiveTypes[]) => {
|
||||||
if (
|
return (
|
||||||
updatedColumnType !== existingColumnType &&
|
isPrimitiveType(updatedColumnType) &&
|
||||||
!SWITCHABLE_TYPES[updatedColumnType]?.includes(existingColumnType)
|
table.schema[key] &&
|
||||||
) {
|
validTypes.includes(updatedColumnType)
|
||||||
continue
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (
|
let shouldKeepSchema = false
|
||||||
column.type === FieldType.LINK &&
|
switch (existingColumnType) {
|
||||||
!shouldCopyRelationship(column, tableIds)
|
case FieldType.FORMULA:
|
||||||
) {
|
case FieldType.AUTO:
|
||||||
continue
|
case FieldType.INTERNAL:
|
||||||
|
shouldKeepSchema = true
|
||||||
|
break
|
||||||
|
|
||||||
|
case FieldType.LINK:
|
||||||
|
shouldKeepSchema =
|
||||||
|
existingColumnType === FieldType.LINK &&
|
||||||
|
tableIds.includes(column.tableId)
|
||||||
|
break
|
||||||
|
|
||||||
|
case FieldType.STRING:
|
||||||
|
case FieldType.OPTIONS:
|
||||||
|
case FieldType.LONGFORM:
|
||||||
|
case FieldType.BARCODEQR:
|
||||||
|
shouldKeepSchema = keepIfType(FieldType.STRING)
|
||||||
|
break
|
||||||
|
|
||||||
|
case FieldType.NUMBER:
|
||||||
|
case FieldType.BOOLEAN:
|
||||||
|
shouldKeepSchema = keepIfType(FieldType.BOOLEAN, FieldType.NUMBER)
|
||||||
|
break
|
||||||
|
|
||||||
|
case FieldType.ARRAY:
|
||||||
|
case FieldType.ATTACHMENTS:
|
||||||
|
case FieldType.ATTACHMENT_SINGLE:
|
||||||
|
case FieldType.JSON:
|
||||||
|
case FieldType.BB_REFERENCE:
|
||||||
|
shouldKeepSchema = keepIfType(FieldType.JSON, FieldType.STRING)
|
||||||
|
break
|
||||||
|
|
||||||
|
case FieldType.DATETIME:
|
||||||
|
shouldKeepSchema = keepIfType(FieldType.DATETIME, FieldType.STRING)
|
||||||
|
break
|
||||||
|
|
||||||
|
case FieldType.BIGINT:
|
||||||
|
shouldKeepSchema = keepIfType(FieldType.BIGINT, FieldType.NUMBER)
|
||||||
|
break
|
||||||
|
|
||||||
|
default:
|
||||||
|
utils.unreachable(existingColumnType)
|
||||||
}
|
}
|
||||||
|
|
||||||
const specialTypes = [
|
if (shouldKeepSchema) {
|
||||||
FieldType.OPTIONS,
|
table.schema[key] = {
|
||||||
FieldType.LONGFORM,
|
...existingTableSchema[key],
|
||||||
FieldType.ARRAY,
|
externalType:
|
||||||
FieldType.FORMULA,
|
existingTableSchema[key].externalType ||
|
||||||
FieldType.BB_REFERENCE,
|
table.schema[key]?.externalType,
|
||||||
]
|
}
|
||||||
if (
|
|
||||||
specialTypes.includes(column.type) &&
|
|
||||||
!shouldCopySpecialColumn(column, table.schema[key])
|
|
||||||
) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
table.schema[key] = {
|
|
||||||
...existingTableSchema[key],
|
|
||||||
externalType:
|
|
||||||
existingTableSchema[key].externalType ||
|
|
||||||
table.schema[key].externalType,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -125,7 +125,7 @@ describe("validation and update of external table schemas", () => {
|
||||||
}
|
}
|
||||||
|
|
||||||
it("should correctly set utilised foreign keys to autocolumns", () => {
|
it("should correctly set utilised foreign keys to autocolumns", () => {
|
||||||
const response = populateExternalTableSchemas(cloneDeep(SCHEMA) as any)
|
const response = populateExternalTableSchemas(cloneDeep(SCHEMA))
|
||||||
const foreignKey = getForeignKeyColumn(response)
|
const foreignKey = getForeignKeyColumn(response)
|
||||||
expect(foreignKey.autocolumn).toBe(true)
|
expect(foreignKey.autocolumn).toBe(true)
|
||||||
expect(foreignKey.autoReason).toBe(AutoReason.FOREIGN_KEY)
|
expect(foreignKey.autoReason).toBe(AutoReason.FOREIGN_KEY)
|
||||||
|
@ -133,7 +133,7 @@ describe("validation and update of external table schemas", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should correctly unset foreign keys when no longer used", () => {
|
it("should correctly unset foreign keys when no longer used", () => {
|
||||||
const setResponse = populateExternalTableSchemas(cloneDeep(SCHEMA) as any)
|
const setResponse = populateExternalTableSchemas(cloneDeep(SCHEMA))
|
||||||
const beforeFk = getForeignKeyColumn(setResponse)
|
const beforeFk = getForeignKeyColumn(setResponse)
|
||||||
delete setResponse.entities!.client.schema.project
|
delete setResponse.entities!.client.schema.project
|
||||||
delete setResponse.entities!.project.schema.client
|
delete setResponse.entities!.project.schema.client
|
||||||
|
|
|
@ -44,7 +44,10 @@ function checkForeignKeysAreAutoColumns(datasource: Datasource) {
|
||||||
if (shouldBeForeign && !column.autocolumn) {
|
if (shouldBeForeign && !column.autocolumn) {
|
||||||
column.autocolumn = true
|
column.autocolumn = true
|
||||||
column.autoReason = AutoReason.FOREIGN_KEY
|
column.autoReason = AutoReason.FOREIGN_KEY
|
||||||
} else if (column.autoReason === AutoReason.FOREIGN_KEY) {
|
} else if (
|
||||||
|
!shouldBeForeign &&
|
||||||
|
column.autoReason === AutoReason.FOREIGN_KEY
|
||||||
|
) {
|
||||||
delete column.autocolumn
|
delete column.autocolumn
|
||||||
delete column.autoReason
|
delete column.autoReason
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@ import {
|
||||||
UpdateDatasourceResponse,
|
UpdateDatasourceResponse,
|
||||||
UpdateDatasourceRequest,
|
UpdateDatasourceRequest,
|
||||||
QueryJson,
|
QueryJson,
|
||||||
|
BuildSchemaFromSourceResponse,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { Expectations, TestAPI } from "./base"
|
import { Expectations, TestAPI } from "./base"
|
||||||
|
|
||||||
|
@ -69,4 +70,13 @@ export class DatasourceAPI extends TestAPI {
|
||||||
expectations,
|
expectations,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fetchSchema = async (id: string, expectations?: Expectations) => {
|
||||||
|
return await this._post<BuildSchemaFromSourceResponse>(
|
||||||
|
`/api/datasources/${id}/schema`,
|
||||||
|
{
|
||||||
|
expectations,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,7 +43,7 @@ export const checkDevelopmentEnvironment = () => {
|
||||||
error = "Must run via yarn once to generate environment."
|
error = "Must run via yarn once to generate environment."
|
||||||
}
|
}
|
||||||
if (error) {
|
if (error) {
|
||||||
console.error(error)
|
console.error("Error during development environment check", error)
|
||||||
process.exit(-1)
|
process.exit(-1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,9 +13,7 @@ export interface Datasource extends Document {
|
||||||
config?: Record<string, any>
|
config?: Record<string, any>
|
||||||
plus?: boolean
|
plus?: boolean
|
||||||
isSQL?: boolean
|
isSQL?: boolean
|
||||||
entities?: {
|
entities?: Record<string, Table>
|
||||||
[key: string]: Table
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum RestAuthType {
|
export enum RestAuthType {
|
||||||
|
|
|
@ -91,6 +91,7 @@ export interface DateFieldMetadata extends Omit<BaseFieldSchema, "subtype"> {
|
||||||
type: FieldType.DATETIME
|
type: FieldType.DATETIME
|
||||||
ignoreTimezones?: boolean
|
ignoreTimezones?: boolean
|
||||||
timeOnly?: boolean
|
timeOnly?: boolean
|
||||||
|
dateOnly?: boolean
|
||||||
subtype?: AutoFieldSubType.CREATED_AT | AutoFieldSubType.UPDATED_AT
|
subtype?: AutoFieldSubType.CREATED_AT | AutoFieldSubType.UPDATED_AT
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -35,8 +35,7 @@ async function passportCallback(
|
||||||
info: { message: string } | null = null
|
info: { message: string } | null = null
|
||||||
) {
|
) {
|
||||||
if (err) {
|
if (err) {
|
||||||
console.error("Authentication error")
|
console.error("Authentication error", err)
|
||||||
console.error(err)
|
|
||||||
console.trace(err)
|
console.trace(err)
|
||||||
return ctx.throw(403, info ? info : "Unauthorized")
|
return ctx.throw(403, info ? info : "Unauthorized")
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue