Merge branch 'master' into fix/conditional-ui-updates
This commit is contained in:
commit
987858d4ed
|
@ -42,6 +42,8 @@
|
|||
},
|
||||
"rules": {
|
||||
"no-unused-vars": "off",
|
||||
"local-rules/no-budibase-imports": "error",
|
||||
"local-rules/no-console-error": "error",
|
||||
"@typescript-eslint/no-unused-vars": [
|
||||
"error",
|
||||
{
|
||||
|
|
|
@ -1,4 +1,25 @@
|
|||
module.exports = {
|
||||
"no-console-error": {
|
||||
create: function(context) {
|
||||
return {
|
||||
CallExpression(node) {
|
||||
if (
|
||||
node.callee.type === "MemberExpression" &&
|
||||
node.callee.object.name === "console" &&
|
||||
node.callee.property.name === "error" &&
|
||||
node.arguments.length === 1 &&
|
||||
node.arguments[0].name &&
|
||||
node.arguments[0].name.startsWith("err")
|
||||
) {
|
||||
context.report({
|
||||
node,
|
||||
message: 'Using console.error(err) on its own is not allowed. Either provide context to the error (console.error(msg, err)) or throw it.',
|
||||
})
|
||||
}
|
||||
},
|
||||
};
|
||||
},
|
||||
},
|
||||
"no-budibase-imports": {
|
||||
create: function (context) {
|
||||
return {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.23.9",
|
||||
"version": "2.23.10",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit eb7d5da233885c5cffd9c255d3e954d0cd39185e
|
||||
Subproject commit c167c331ff9b8161fc18e2ecbaaf1ea5815ba964
|
|
@ -64,7 +64,6 @@ async function refreshOIDCAccessToken(
|
|||
}
|
||||
strategy = await oidc.strategyFactory(enrichedConfig, ssoSaveUserNoOp)
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
throw new Error("Could not refresh OAuth Token")
|
||||
}
|
||||
|
||||
|
@ -99,7 +98,6 @@ async function refreshGoogleAccessToken(
|
|||
ssoSaveUserNoOp
|
||||
)
|
||||
} catch (err: any) {
|
||||
console.error(err)
|
||||
throw new Error(
|
||||
`Error constructing OIDC refresh strategy: message=${err.message}`
|
||||
)
|
||||
|
|
|
@ -138,7 +138,6 @@ export default function (
|
|||
} catch (err: any) {
|
||||
authenticated = false
|
||||
console.error(`Auth Error: ${err.message}`)
|
||||
console.error(err)
|
||||
// remove the cookie as the user does not exist anymore
|
||||
clearCookie(ctx, Cookie.Auth)
|
||||
}
|
||||
|
@ -187,7 +186,6 @@ export default function (
|
|||
}
|
||||
} catch (err: any) {
|
||||
console.error(`Auth Error: ${err.message}`)
|
||||
console.error(err)
|
||||
// invalid token, clear the cookie
|
||||
if (err?.name === "JsonWebTokenError") {
|
||||
clearCookie(ctx, Cookie.Auth)
|
||||
|
|
|
@ -12,7 +12,7 @@ export async function errorHandling(ctx: any, next: any) {
|
|||
if (status >= 400 && status < 500) {
|
||||
console.warn(err)
|
||||
} else {
|
||||
console.error(err)
|
||||
console.error("Got 400 response code", err)
|
||||
}
|
||||
|
||||
let error: APIError = {
|
||||
|
|
|
@ -68,7 +68,6 @@ export async function strategyFactory(
|
|||
verify
|
||||
)
|
||||
} catch (err: any) {
|
||||
console.error(err)
|
||||
throw new Error(`Error constructing google authentication strategy: ${err}`)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -103,7 +103,6 @@ export async function strategyFactory(
|
|||
strategy.name = "oidc"
|
||||
return strategy
|
||||
} catch (err: any) {
|
||||
console.error(err)
|
||||
throw new Error(`Error constructing OIDC authentication strategy - ${err}`)
|
||||
}
|
||||
}
|
||||
|
@ -142,7 +141,6 @@ export async function fetchStrategyConfig(
|
|||
callbackURL: callbackUrl,
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
throw new Error(
|
||||
`Error constructing OIDC authentication configuration - ${err}`
|
||||
)
|
||||
|
|
|
@ -26,7 +26,6 @@ export const getMigrationsDoc = async (db: any) => {
|
|||
if (err.status && err.status === 404) {
|
||||
return { _id: DocumentType.MIGRATIONS }
|
||||
} else {
|
||||
console.error(err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
|
|
@ -119,14 +119,15 @@
|
|||
{/if}
|
||||
</svelte:head>
|
||||
|
||||
<div
|
||||
{#if dataLoaded}
|
||||
<div
|
||||
id="spectrum-root"
|
||||
lang="en"
|
||||
dir="ltr"
|
||||
class="spectrum spectrum--medium {$themeStore.baseTheme} {$themeStore.theme}"
|
||||
class:builder={$builderStore.inBuilder}
|
||||
class:show={fontsLoaded && dataLoaded}
|
||||
>
|
||||
>
|
||||
{#if $environmentStore.maintenance.length > 0}
|
||||
<MaintenanceScreen maintenanceList={$environmentStore.maintenance} />
|
||||
{:else}
|
||||
|
@ -251,8 +252,9 @@
|
|||
</UserBindingsProvider>
|
||||
</DeviceBindingsProvider>
|
||||
{/if}
|
||||
</div>
|
||||
<KeyboardManager />
|
||||
</div>
|
||||
<KeyboardManager />
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
#spectrum-root {
|
||||
|
|
|
@ -76,7 +76,7 @@ function writeFile(output: any, filename: string) {
|
|||
console.log(`Wrote spec to ${path}`)
|
||||
return path
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
console.error("Error writing spec file", err)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,19 @@ import sdk from "../../../sdk"
|
|||
|
||||
import tk from "timekeeper"
|
||||
import { mocks } from "@budibase/backend-core/tests"
|
||||
import { QueryPreview, SourceName } from "@budibase/types"
|
||||
import {
|
||||
Datasource,
|
||||
FieldSchema,
|
||||
FieldSubtype,
|
||||
FieldType,
|
||||
QueryPreview,
|
||||
RelationshipType,
|
||||
SourceName,
|
||||
Table,
|
||||
TableSchema,
|
||||
} from "@budibase/types"
|
||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
||||
import { tableForDatasource } from "../../../tests/utilities/structures"
|
||||
|
||||
tk.freeze(mocks.date.MOCK_DATE)
|
||||
|
||||
|
@ -223,4 +235,152 @@ describe("/datasources", () => {
|
|||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe.each([
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
])("fetch schema (%s)", (_, dsProvider) => {
|
||||
beforeAll(async () => {
|
||||
datasource = await config.api.datasource.create(await dsProvider)
|
||||
})
|
||||
|
||||
it("fetching schema will not drop tables or columns", async () => {
|
||||
const datasourceId = datasource!._id!
|
||||
|
||||
const simpleTable = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
name: "simple",
|
||||
schema: {
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
type SupportedSqlTypes =
|
||||
| FieldType.STRING
|
||||
| FieldType.BARCODEQR
|
||||
| FieldType.LONGFORM
|
||||
| FieldType.OPTIONS
|
||||
| FieldType.DATETIME
|
||||
| FieldType.NUMBER
|
||||
| FieldType.BOOLEAN
|
||||
| FieldType.FORMULA
|
||||
| FieldType.BIGINT
|
||||
| FieldType.BB_REFERENCE
|
||||
| FieldType.LINK
|
||||
| FieldType.ARRAY
|
||||
|
||||
const fullSchema: {
|
||||
[type in SupportedSqlTypes]: FieldSchema & { type: type }
|
||||
} = {
|
||||
[FieldType.STRING]: {
|
||||
name: "string",
|
||||
type: FieldType.STRING,
|
||||
constraints: {
|
||||
presence: true,
|
||||
},
|
||||
},
|
||||
[FieldType.LONGFORM]: {
|
||||
name: "longform",
|
||||
type: FieldType.LONGFORM,
|
||||
},
|
||||
[FieldType.OPTIONS]: {
|
||||
name: "options",
|
||||
type: FieldType.OPTIONS,
|
||||
constraints: {
|
||||
presence: { allowEmpty: false },
|
||||
},
|
||||
},
|
||||
[FieldType.NUMBER]: {
|
||||
name: "number",
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
[FieldType.BOOLEAN]: {
|
||||
name: "boolean",
|
||||
type: FieldType.BOOLEAN,
|
||||
},
|
||||
[FieldType.ARRAY]: {
|
||||
name: "array",
|
||||
type: FieldType.ARRAY,
|
||||
},
|
||||
[FieldType.DATETIME]: {
|
||||
name: "datetime",
|
||||
type: FieldType.DATETIME,
|
||||
dateOnly: true,
|
||||
timeOnly: false,
|
||||
},
|
||||
[FieldType.LINK]: {
|
||||
name: "link",
|
||||
type: FieldType.LINK,
|
||||
tableId: simpleTable._id!,
|
||||
relationshipType: RelationshipType.ONE_TO_MANY,
|
||||
fieldName: "link",
|
||||
},
|
||||
[FieldType.FORMULA]: {
|
||||
name: "formula",
|
||||
type: FieldType.FORMULA,
|
||||
formula: "any formula",
|
||||
},
|
||||
[FieldType.BARCODEQR]: {
|
||||
name: "barcodeqr",
|
||||
type: FieldType.BARCODEQR,
|
||||
},
|
||||
[FieldType.BIGINT]: {
|
||||
name: "bigint",
|
||||
type: FieldType.BIGINT,
|
||||
},
|
||||
[FieldType.BB_REFERENCE]: {
|
||||
name: "bb_reference",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: FieldSubtype.USERS,
|
||||
},
|
||||
}
|
||||
|
||||
await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
name: "full",
|
||||
schema: fullSchema,
|
||||
})
|
||||
)
|
||||
|
||||
const persisted = await config.api.datasource.get(datasourceId)
|
||||
await config.api.datasource.fetchSchema(datasourceId)
|
||||
|
||||
const updated = await config.api.datasource.get(datasourceId)
|
||||
const expected: Datasource = {
|
||||
...persisted,
|
||||
entities:
|
||||
persisted?.entities &&
|
||||
Object.entries(persisted.entities).reduce<Record<string, Table>>(
|
||||
(acc, [tableName, table]) => {
|
||||
acc[tableName] = {
|
||||
...table,
|
||||
primaryDisplay: expect.not.stringMatching(
|
||||
new RegExp(`^${table.primaryDisplay || ""}$`)
|
||||
),
|
||||
schema: Object.entries(table.schema).reduce<TableSchema>(
|
||||
(acc, [fieldName, field]) => {
|
||||
acc[fieldName] = expect.objectContaining({
|
||||
...field,
|
||||
})
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
),
|
||||
}
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
),
|
||||
|
||||
_rev: expect.any(String),
|
||||
}
|
||||
expect(updated).toEqual(expected)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -34,7 +34,7 @@ describe.each([
|
|||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
])("/tables (%s)", (_, dsProvider) => {
|
||||
let isInternal: boolean
|
||||
const isInternal: boolean = !dsProvider
|
||||
let datasource: Datasource | undefined
|
||||
let config = setup.getConfig()
|
||||
|
||||
|
@ -44,9 +44,6 @@ describe.each([
|
|||
await config.init()
|
||||
if (dsProvider) {
|
||||
datasource = await config.api.datasource.create(await dsProvider)
|
||||
isInternal = false
|
||||
} else {
|
||||
isInternal = true
|
||||
}
|
||||
})
|
||||
|
||||
|
|
|
@ -378,7 +378,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
|||
try {
|
||||
await connection.close()
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
console.error("Error connecting to Oracle", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,10 +4,11 @@ import {
|
|||
Datasource,
|
||||
FieldType,
|
||||
TableSourceType,
|
||||
FieldSchema,
|
||||
} from "@budibase/types"
|
||||
import { DocumentType, SEPARATOR } from "../../db/utils"
|
||||
import { InvalidColumns, DEFAULT_BB_DATASOURCE_ID } from "../../constants"
|
||||
import { SWITCHABLE_TYPES, helpers } from "@budibase/shared-core"
|
||||
import { helpers, utils } from "@budibase/shared-core"
|
||||
import env from "../../environment"
|
||||
import { Knex } from "knex"
|
||||
|
||||
|
@ -15,7 +16,28 @@ const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
|||
const ROW_ID_REGEX = /^\[.*]$/g
|
||||
const ENCODED_SPACE = encodeURIComponent(" ")
|
||||
|
||||
const SQL_NUMBER_TYPE_MAP = {
|
||||
type PrimitiveTypes =
|
||||
| FieldType.STRING
|
||||
| FieldType.NUMBER
|
||||
| FieldType.BOOLEAN
|
||||
| FieldType.DATETIME
|
||||
| FieldType.JSON
|
||||
| FieldType.BIGINT
|
||||
| FieldType.OPTIONS
|
||||
|
||||
function isPrimitiveType(type: FieldType): type is PrimitiveTypes {
|
||||
return [
|
||||
FieldType.STRING,
|
||||
FieldType.NUMBER,
|
||||
FieldType.BOOLEAN,
|
||||
FieldType.DATETIME,
|
||||
FieldType.JSON,
|
||||
FieldType.BIGINT,
|
||||
FieldType.OPTIONS,
|
||||
].includes(type)
|
||||
}
|
||||
|
||||
const SQL_NUMBER_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||
integer: FieldType.NUMBER,
|
||||
int: FieldType.NUMBER,
|
||||
decimal: FieldType.NUMBER,
|
||||
|
@ -35,7 +57,7 @@ const SQL_NUMBER_TYPE_MAP = {
|
|||
smallmoney: FieldType.NUMBER,
|
||||
}
|
||||
|
||||
const SQL_DATE_TYPE_MAP = {
|
||||
const SQL_DATE_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||
timestamp: FieldType.DATETIME,
|
||||
time: FieldType.DATETIME,
|
||||
datetime: FieldType.DATETIME,
|
||||
|
@ -46,7 +68,7 @@ const SQL_DATE_TYPE_MAP = {
|
|||
const SQL_DATE_ONLY_TYPES = ["date"]
|
||||
const SQL_TIME_ONLY_TYPES = ["time"]
|
||||
|
||||
const SQL_STRING_TYPE_MAP = {
|
||||
const SQL_STRING_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||
varchar: FieldType.STRING,
|
||||
char: FieldType.STRING,
|
||||
nchar: FieldType.STRING,
|
||||
|
@ -58,22 +80,22 @@ const SQL_STRING_TYPE_MAP = {
|
|||
text: FieldType.STRING,
|
||||
}
|
||||
|
||||
const SQL_BOOLEAN_TYPE_MAP = {
|
||||
const SQL_BOOLEAN_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||
boolean: FieldType.BOOLEAN,
|
||||
bit: FieldType.BOOLEAN,
|
||||
tinyint: FieldType.BOOLEAN,
|
||||
}
|
||||
|
||||
const SQL_OPTIONS_TYPE_MAP = {
|
||||
const SQL_OPTIONS_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||
"user-defined": FieldType.OPTIONS,
|
||||
}
|
||||
|
||||
const SQL_MISC_TYPE_MAP = {
|
||||
const SQL_MISC_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||
json: FieldType.JSON,
|
||||
bigint: FieldType.BIGINT,
|
||||
}
|
||||
|
||||
const SQL_TYPE_MAP = {
|
||||
const SQL_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||
...SQL_NUMBER_TYPE_MAP,
|
||||
...SQL_DATE_TYPE_MAP,
|
||||
...SQL_STRING_TYPE_MAP,
|
||||
|
@ -239,14 +261,14 @@ export function generateColumnDefinition(config: {
|
|||
constraints.inclusion = options
|
||||
}
|
||||
|
||||
const schema: any = {
|
||||
const schema: FieldSchema = {
|
||||
type: foundType,
|
||||
externalType,
|
||||
autocolumn,
|
||||
name,
|
||||
constraints,
|
||||
}
|
||||
if (foundType === FieldType.DATETIME) {
|
||||
if (schema.type === FieldType.DATETIME) {
|
||||
schema.dateOnly = SQL_DATE_ONLY_TYPES.includes(lowerCaseType)
|
||||
schema.timeOnly = SQL_TIME_ONLY_TYPES.includes(lowerCaseType)
|
||||
}
|
||||
|
@ -274,49 +296,6 @@ export function isIsoDateString(str: string) {
|
|||
return d.toISOString() === trimmedValue
|
||||
}
|
||||
|
||||
/**
|
||||
* This function will determine whether a column is a relationship and whether it
|
||||
* is currently valid. The reason for the validity check is that tables can be deleted
|
||||
* outside of Budibase control and if this is the case it will break Budibase relationships.
|
||||
* The tableIds is a list passed down from the main finalise tables function, which is
|
||||
* based on the tables that have just been fetched. This will only really be used on subsequent
|
||||
* fetches to the first one - if the user is periodically refreshing Budibase knowledge of tables.
|
||||
* @param column The column to check, to see if it is a valid relationship.
|
||||
* @param tableIds The IDs of the tables which currently exist.
|
||||
*/
|
||||
function shouldCopyRelationship(
|
||||
column: { type: FieldType.LINK; tableId?: string },
|
||||
tableIds: string[]
|
||||
) {
|
||||
return (
|
||||
column.type === FieldType.LINK &&
|
||||
column.tableId &&
|
||||
tableIds.includes(column.tableId)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Similar function to the shouldCopyRelationship function, but instead this looks for options and boolean
|
||||
* types. It is possible to switch a string -> options and a number -> boolean (and vice versus) need to make
|
||||
* sure that these get copied over when tables are fetched. Also checks whether they are still valid, if a
|
||||
* column has changed type in the external database then copying it over may not be possible.
|
||||
* @param column The column to check for options or boolean type.
|
||||
* @param fetchedColumn The fetched column to check for the type in the external database.
|
||||
*/
|
||||
function shouldCopySpecialColumn(
|
||||
column: { type: FieldType },
|
||||
fetchedColumn: { type: FieldType } | undefined
|
||||
) {
|
||||
const isFormula = column.type === FieldType.FORMULA
|
||||
// column has been deleted, remove - formulas will never exist, always copy
|
||||
if (!isFormula && column && !fetchedColumn) {
|
||||
return false
|
||||
}
|
||||
const fetchedIsNumber =
|
||||
!fetchedColumn || fetchedColumn.type === FieldType.NUMBER
|
||||
return fetchedIsNumber && column.type === FieldType.BOOLEAN
|
||||
}
|
||||
|
||||
/**
|
||||
* Looks for columns which need to be copied over into the new table definitions, like relationships,
|
||||
* options types and views.
|
||||
|
@ -338,6 +317,9 @@ function copyExistingPropsOver(
|
|||
if (entities[tableName]?.created) {
|
||||
table.created = entities[tableName]?.created
|
||||
}
|
||||
if (entities[tableName]?.constrained) {
|
||||
table.constrained = entities[tableName]?.constrained
|
||||
}
|
||||
|
||||
table.views = entities[tableName].views
|
||||
|
||||
|
@ -346,45 +328,73 @@ function copyExistingPropsOver(
|
|||
if (!Object.prototype.hasOwnProperty.call(existingTableSchema, key)) {
|
||||
continue
|
||||
}
|
||||
|
||||
const column = existingTableSchema[key]
|
||||
|
||||
const existingColumnType = column?.type
|
||||
const updatedColumnType = table.schema[key]?.type
|
||||
|
||||
// If the db column type changed to a non-compatible one, we want to re-fetch it
|
||||
if (
|
||||
updatedColumnType !== existingColumnType &&
|
||||
!SWITCHABLE_TYPES[updatedColumnType]?.includes(existingColumnType)
|
||||
) {
|
||||
continue
|
||||
const keepIfType = (...validTypes: PrimitiveTypes[]) => {
|
||||
return (
|
||||
isPrimitiveType(updatedColumnType) &&
|
||||
table.schema[key] &&
|
||||
validTypes.includes(updatedColumnType)
|
||||
)
|
||||
}
|
||||
|
||||
if (
|
||||
column.type === FieldType.LINK &&
|
||||
!shouldCopyRelationship(column, tableIds)
|
||||
) {
|
||||
continue
|
||||
}
|
||||
|
||||
const specialTypes = [
|
||||
FieldType.OPTIONS,
|
||||
FieldType.LONGFORM,
|
||||
FieldType.ARRAY,
|
||||
FieldType.FORMULA,
|
||||
FieldType.BB_REFERENCE,
|
||||
]
|
||||
if (
|
||||
specialTypes.includes(column.type) &&
|
||||
!shouldCopySpecialColumn(column, table.schema[key])
|
||||
) {
|
||||
continue
|
||||
let shouldKeepSchema = false
|
||||
switch (existingColumnType) {
|
||||
case FieldType.FORMULA:
|
||||
case FieldType.AUTO:
|
||||
case FieldType.INTERNAL:
|
||||
shouldKeepSchema = true
|
||||
break
|
||||
|
||||
case FieldType.LINK:
|
||||
shouldKeepSchema =
|
||||
existingColumnType === FieldType.LINK &&
|
||||
tableIds.includes(column.tableId)
|
||||
break
|
||||
|
||||
case FieldType.STRING:
|
||||
case FieldType.OPTIONS:
|
||||
case FieldType.LONGFORM:
|
||||
case FieldType.BARCODEQR:
|
||||
shouldKeepSchema = keepIfType(FieldType.STRING)
|
||||
break
|
||||
|
||||
case FieldType.NUMBER:
|
||||
case FieldType.BOOLEAN:
|
||||
shouldKeepSchema = keepIfType(FieldType.BOOLEAN, FieldType.NUMBER)
|
||||
break
|
||||
|
||||
case FieldType.ARRAY:
|
||||
case FieldType.ATTACHMENTS:
|
||||
case FieldType.ATTACHMENT_SINGLE:
|
||||
case FieldType.JSON:
|
||||
case FieldType.BB_REFERENCE:
|
||||
shouldKeepSchema = keepIfType(FieldType.JSON, FieldType.STRING)
|
||||
break
|
||||
|
||||
case FieldType.DATETIME:
|
||||
shouldKeepSchema = keepIfType(FieldType.DATETIME, FieldType.STRING)
|
||||
break
|
||||
|
||||
case FieldType.BIGINT:
|
||||
shouldKeepSchema = keepIfType(FieldType.BIGINT, FieldType.NUMBER)
|
||||
break
|
||||
|
||||
default:
|
||||
utils.unreachable(existingColumnType)
|
||||
}
|
||||
|
||||
if (shouldKeepSchema) {
|
||||
table.schema[key] = {
|
||||
...existingTableSchema[key],
|
||||
externalType:
|
||||
existingTableSchema[key].externalType ||
|
||||
table.schema[key].externalType,
|
||||
table.schema[key]?.externalType,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -125,7 +125,7 @@ describe("validation and update of external table schemas", () => {
|
|||
}
|
||||
|
||||
it("should correctly set utilised foreign keys to autocolumns", () => {
|
||||
const response = populateExternalTableSchemas(cloneDeep(SCHEMA) as any)
|
||||
const response = populateExternalTableSchemas(cloneDeep(SCHEMA))
|
||||
const foreignKey = getForeignKeyColumn(response)
|
||||
expect(foreignKey.autocolumn).toBe(true)
|
||||
expect(foreignKey.autoReason).toBe(AutoReason.FOREIGN_KEY)
|
||||
|
@ -133,7 +133,7 @@ describe("validation and update of external table schemas", () => {
|
|||
})
|
||||
|
||||
it("should correctly unset foreign keys when no longer used", () => {
|
||||
const setResponse = populateExternalTableSchemas(cloneDeep(SCHEMA) as any)
|
||||
const setResponse = populateExternalTableSchemas(cloneDeep(SCHEMA))
|
||||
const beforeFk = getForeignKeyColumn(setResponse)
|
||||
delete setResponse.entities!.client.schema.project
|
||||
delete setResponse.entities!.project.schema.client
|
||||
|
|
|
@ -44,7 +44,10 @@ function checkForeignKeysAreAutoColumns(datasource: Datasource) {
|
|||
if (shouldBeForeign && !column.autocolumn) {
|
||||
column.autocolumn = true
|
||||
column.autoReason = AutoReason.FOREIGN_KEY
|
||||
} else if (column.autoReason === AutoReason.FOREIGN_KEY) {
|
||||
} else if (
|
||||
!shouldBeForeign &&
|
||||
column.autoReason === AutoReason.FOREIGN_KEY
|
||||
) {
|
||||
delete column.autocolumn
|
||||
delete column.autoReason
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ import {
|
|||
UpdateDatasourceResponse,
|
||||
UpdateDatasourceRequest,
|
||||
QueryJson,
|
||||
BuildSchemaFromSourceResponse,
|
||||
} from "@budibase/types"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
|
||||
|
@ -69,4 +70,13 @@ export class DatasourceAPI extends TestAPI {
|
|||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
fetchSchema = async (id: string, expectations?: Expectations) => {
|
||||
return await this._post<BuildSchemaFromSourceResponse>(
|
||||
`/api/datasources/${id}/schema`,
|
||||
{
|
||||
expectations,
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,7 +43,7 @@ export const checkDevelopmentEnvironment = () => {
|
|||
error = "Must run via yarn once to generate environment."
|
||||
}
|
||||
if (error) {
|
||||
console.error(error)
|
||||
console.error("Error during development environment check", error)
|
||||
process.exit(-1)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,9 +13,7 @@ export interface Datasource extends Document {
|
|||
config?: Record<string, any>
|
||||
plus?: boolean
|
||||
isSQL?: boolean
|
||||
entities?: {
|
||||
[key: string]: Table
|
||||
}
|
||||
entities?: Record<string, Table>
|
||||
}
|
||||
|
||||
export enum RestAuthType {
|
||||
|
|
|
@ -91,6 +91,7 @@ export interface DateFieldMetadata extends Omit<BaseFieldSchema, "subtype"> {
|
|||
type: FieldType.DATETIME
|
||||
ignoreTimezones?: boolean
|
||||
timeOnly?: boolean
|
||||
dateOnly?: boolean
|
||||
subtype?: AutoFieldSubType.CREATED_AT | AutoFieldSubType.UPDATED_AT
|
||||
}
|
||||
|
||||
|
|
|
@ -35,8 +35,7 @@ async function passportCallback(
|
|||
info: { message: string } | null = null
|
||||
) {
|
||||
if (err) {
|
||||
console.error("Authentication error")
|
||||
console.error(err)
|
||||
console.error("Authentication error", err)
|
||||
console.trace(err)
|
||||
return ctx.throw(403, info ? info : "Unauthorized")
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue