Merge pull request #15106 from Budibase/fix-options-with-commas-and-types
Fix MySQL options column imports that have commas or other type names in them.
This commit is contained in:
commit
07b5dc6e57
|
@ -169,331 +169,422 @@ const descriptions = datasourceDescribe({
|
|||
})
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)("$dbName", ({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let rawDatasource: Datasource
|
||||
let client: Knex
|
||||
describe.each(descriptions)(
|
||||
"$dbName",
|
||||
({ config, dsProvider, isOracle, isMSSQL }) => {
|
||||
let datasource: Datasource
|
||||
let rawDatasource: Datasource
|
||||
let client: Knex
|
||||
|
||||
beforeEach(async () => {
|
||||
const ds = await dsProvider()
|
||||
rawDatasource = ds.rawDatasource!
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
beforeEach(async () => {
|
||||
const ds = await dsProvider()
|
||||
rawDatasource = ds.rawDatasource!
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
|
||||
jest.clearAllMocks()
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
describe("get", () => {
|
||||
it("should be able to get a datasource", async () => {
|
||||
const ds = await config.api.datasource.get(datasource._id!)
|
||||
expect(ds).toEqual({
|
||||
config: expect.any(Object),
|
||||
plus: datasource.plus,
|
||||
source: datasource.source,
|
||||
isSQL: true,
|
||||
type: "datasource_plus",
|
||||
_id: datasource._id,
|
||||
_rev: expect.any(String),
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
})
|
||||
jest.clearAllMocks()
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
it("should not return database password", async () => {
|
||||
const ds = await config.api.datasource.get(datasource._id!)
|
||||
expect(ds.config!.password).toBe("--secret-value--")
|
||||
})
|
||||
})
|
||||
|
||||
describe("list", () => {
|
||||
it("returns all the datasources", async () => {
|
||||
const datasources = await config.api.datasource.fetch()
|
||||
expect(datasources).toContainEqual(expect.objectContaining(datasource))
|
||||
})
|
||||
})
|
||||
|
||||
describe("put", () => {
|
||||
it("should update an existing datasource", async () => {
|
||||
const newName = generator.guid()
|
||||
datasource.name = newName
|
||||
const updatedDs = await config.api.datasource.update(datasource)
|
||||
expect(updatedDs.name).toEqual(newName)
|
||||
expect(events.datasource.updated).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("should not overwrite database password with --secret-value--", async () => {
|
||||
const password = await context.doInAppContext(
|
||||
config.getAppId(),
|
||||
async () => {
|
||||
const ds = await sdk.datasources.get(datasource._id!)
|
||||
return ds.config!.password
|
||||
}
|
||||
)
|
||||
|
||||
expect(password).not.toBe("--secret-value--")
|
||||
|
||||
const ds = await config.api.datasource.get(datasource._id!)
|
||||
expect(ds.config!.password).toBe("--secret-value--")
|
||||
|
||||
await config.api.datasource.update(
|
||||
await config.api.datasource.get(datasource._id!)
|
||||
)
|
||||
|
||||
const newPassword = await context.doInAppContext(
|
||||
config.getAppId(),
|
||||
async () => {
|
||||
const ds = await sdk.datasources.get(datasource._id!)
|
||||
return ds.config!.password
|
||||
}
|
||||
)
|
||||
|
||||
expect(newPassword).not.toBe("--secret-value--")
|
||||
expect(newPassword).toBe(password)
|
||||
})
|
||||
})
|
||||
|
||||
describe("destroy", () => {
|
||||
it("deletes queries for the datasource after deletion and returns a success message", async () => {
|
||||
await config.api.query.save({
|
||||
datasourceId: datasource._id!,
|
||||
name: "Test Query",
|
||||
parameters: [],
|
||||
fields: {},
|
||||
schema: {},
|
||||
queryVerb: "read",
|
||||
transformer: null,
|
||||
readable: true,
|
||||
})
|
||||
|
||||
await config.api.datasource.delete(datasource)
|
||||
const datasources = await config.api.datasource.fetch()
|
||||
expect(datasources).not.toContainEqual(
|
||||
expect.objectContaining(datasource)
|
||||
)
|
||||
expect(events.datasource.deleted).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe("schema", () => {
|
||||
it("fetching schema will not drop tables or columns", async () => {
|
||||
const datasourceId = datasource!._id!
|
||||
|
||||
const simpleTable = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
name: "simple",
|
||||
schema: {
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
describe("get", () => {
|
||||
it("should be able to get a datasource", async () => {
|
||||
const ds = await config.api.datasource.get(datasource._id!)
|
||||
expect(ds).toEqual({
|
||||
config: expect.any(Object),
|
||||
plus: datasource.plus,
|
||||
source: datasource.source,
|
||||
isSQL: true,
|
||||
type: "datasource_plus",
|
||||
_id: datasource._id,
|
||||
_rev: expect.any(String),
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
})
|
||||
)
|
||||
|
||||
const stringName = "string"
|
||||
const fullSchema: {
|
||||
[type in SupportedSqlTypes]: FieldSchema & { type: type }
|
||||
} = {
|
||||
[FieldType.STRING]: {
|
||||
name: stringName,
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
[FieldType.LONGFORM]: {
|
||||
name: "longform",
|
||||
type: FieldType.LONGFORM,
|
||||
},
|
||||
[FieldType.OPTIONS]: {
|
||||
name: "options",
|
||||
type: FieldType.OPTIONS,
|
||||
constraints: {
|
||||
presence: {
|
||||
allowEmpty: false,
|
||||
},
|
||||
inclusion: [],
|
||||
},
|
||||
},
|
||||
[FieldType.NUMBER]: {
|
||||
name: "number",
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
[FieldType.BOOLEAN]: {
|
||||
name: "boolean",
|
||||
type: FieldType.BOOLEAN,
|
||||
},
|
||||
[FieldType.ARRAY]: {
|
||||
name: "array",
|
||||
type: FieldType.ARRAY,
|
||||
constraints: {
|
||||
type: JsonFieldSubType.ARRAY,
|
||||
inclusion: [],
|
||||
},
|
||||
},
|
||||
[FieldType.DATETIME]: {
|
||||
name: "datetime",
|
||||
type: FieldType.DATETIME,
|
||||
dateOnly: true,
|
||||
timeOnly: false,
|
||||
},
|
||||
[FieldType.LINK]: {
|
||||
name: "link",
|
||||
type: FieldType.LINK,
|
||||
tableId: simpleTable._id!,
|
||||
relationshipType: RelationshipType.ONE_TO_MANY,
|
||||
fieldName: "link",
|
||||
},
|
||||
[FieldType.FORMULA]: {
|
||||
name: "formula",
|
||||
type: FieldType.FORMULA,
|
||||
formula: "any formula",
|
||||
},
|
||||
[FieldType.BARCODEQR]: {
|
||||
name: "barcodeqr",
|
||||
type: FieldType.BARCODEQR,
|
||||
},
|
||||
[FieldType.BIGINT]: {
|
||||
name: "bigint",
|
||||
type: FieldType.BIGINT,
|
||||
},
|
||||
[FieldType.BB_REFERENCE]: {
|
||||
name: "bb_reference",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: BBReferenceFieldSubType.USER,
|
||||
},
|
||||
[FieldType.BB_REFERENCE_SINGLE]: {
|
||||
name: "bb_reference_single",
|
||||
type: FieldType.BB_REFERENCE_SINGLE,
|
||||
subtype: BBReferenceFieldSubType.USER,
|
||||
},
|
||||
}
|
||||
|
||||
await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
name: "full",
|
||||
schema: fullSchema,
|
||||
})
|
||||
)
|
||||
|
||||
const persisted = await config.api.datasource.get(datasourceId)
|
||||
await config.api.datasource.fetchSchema({ datasourceId })
|
||||
|
||||
const updated = await config.api.datasource.get(datasourceId)
|
||||
const expected: Datasource = {
|
||||
...persisted,
|
||||
entities:
|
||||
persisted?.entities &&
|
||||
Object.entries(persisted.entities).reduce<Record<string, Table>>(
|
||||
(acc, [tableName, table]) => {
|
||||
acc[tableName] = expect.objectContaining({
|
||||
...table,
|
||||
primaryDisplay: expect.not.stringMatching(
|
||||
new RegExp(`^${table.primaryDisplay || ""}$`)
|
||||
),
|
||||
schema: Object.entries(table.schema).reduce<TableSchema>(
|
||||
(acc, [fieldName, field]) => {
|
||||
acc[fieldName] = {
|
||||
...field,
|
||||
externalType: allowUndefined(expect.any(String)),
|
||||
constraints: allowUndefined(expect.any(Object)),
|
||||
autocolumn: allowUndefined(expect.any(Boolean)),
|
||||
}
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
),
|
||||
})
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
),
|
||||
|
||||
_rev: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
}
|
||||
expect(updated).toEqual(expected)
|
||||
})
|
||||
})
|
||||
|
||||
describe("verify", () => {
|
||||
it("should be able to verify the connection", async () => {
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: rawDatasource,
|
||||
},
|
||||
{
|
||||
body: {
|
||||
connected: true,
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it("should state an invalid datasource cannot connect", async () => {
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: {
|
||||
...rawDatasource,
|
||||
config: {
|
||||
...rawDatasource.config,
|
||||
password: "wrongpassword",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
body: {
|
||||
connected: false,
|
||||
error: /.*/, // error message differs between databases
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("info", () => {
|
||||
it("should fetch information about a datasource with a single table", async () => {
|
||||
const existingTableNames = (
|
||||
await config.api.datasource.info(datasource)
|
||||
).tableNames
|
||||
|
||||
const tableName = generator.guid()
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("id").primary()
|
||||
table.string("name")
|
||||
})
|
||||
|
||||
const info = await config.api.datasource.info(datasource)
|
||||
expect(info.tableNames).toEqual(
|
||||
expect.arrayContaining([tableName, ...existingTableNames])
|
||||
)
|
||||
expect(info.tableNames).toHaveLength(existingTableNames.length + 1)
|
||||
it("should not return database password", async () => {
|
||||
const ds = await config.api.datasource.get(datasource._id!)
|
||||
expect(ds.config!.password).toBe("--secret-value--")
|
||||
})
|
||||
})
|
||||
|
||||
it("should fetch information about a datasource with multiple tables", async () => {
|
||||
const existingTableNames = (
|
||||
await config.api.datasource.info(datasource)
|
||||
).tableNames
|
||||
describe("list", () => {
|
||||
it("returns all the datasources", async () => {
|
||||
const datasources = await config.api.datasource.fetch()
|
||||
expect(datasources).toContainEqual(
|
||||
expect.objectContaining(datasource)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
const tableNames = [
|
||||
generator.guid(),
|
||||
generator.guid(),
|
||||
generator.guid(),
|
||||
generator.guid(),
|
||||
]
|
||||
for (const tableName of tableNames) {
|
||||
describe("put", () => {
|
||||
it("should update an existing datasource", async () => {
|
||||
const newName = generator.guid()
|
||||
datasource.name = newName
|
||||
const updatedDs = await config.api.datasource.update(datasource)
|
||||
expect(updatedDs.name).toEqual(newName)
|
||||
expect(events.datasource.updated).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("should not overwrite database password with --secret-value--", async () => {
|
||||
const password = await context.doInAppContext(
|
||||
config.getAppId(),
|
||||
async () => {
|
||||
const ds = await sdk.datasources.get(datasource._id!)
|
||||
return ds.config!.password
|
||||
}
|
||||
)
|
||||
|
||||
expect(password).not.toBe("--secret-value--")
|
||||
|
||||
const ds = await config.api.datasource.get(datasource._id!)
|
||||
expect(ds.config!.password).toBe("--secret-value--")
|
||||
|
||||
await config.api.datasource.update(
|
||||
await config.api.datasource.get(datasource._id!)
|
||||
)
|
||||
|
||||
const newPassword = await context.doInAppContext(
|
||||
config.getAppId(),
|
||||
async () => {
|
||||
const ds = await sdk.datasources.get(datasource._id!)
|
||||
return ds.config!.password
|
||||
}
|
||||
)
|
||||
|
||||
expect(newPassword).not.toBe("--secret-value--")
|
||||
expect(newPassword).toBe(password)
|
||||
})
|
||||
})
|
||||
|
||||
describe("destroy", () => {
|
||||
it("deletes queries for the datasource after deletion and returns a success message", async () => {
|
||||
await config.api.query.save({
|
||||
datasourceId: datasource._id!,
|
||||
name: "Test Query",
|
||||
parameters: [],
|
||||
fields: {},
|
||||
schema: {},
|
||||
queryVerb: "read",
|
||||
transformer: null,
|
||||
readable: true,
|
||||
})
|
||||
|
||||
await config.api.datasource.delete(datasource)
|
||||
const datasources = await config.api.datasource.fetch()
|
||||
expect(datasources).not.toContainEqual(
|
||||
expect.objectContaining(datasource)
|
||||
)
|
||||
expect(events.datasource.deleted).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe("schema", () => {
|
||||
it("fetching schema will not drop tables or columns", async () => {
|
||||
const datasourceId = datasource!._id!
|
||||
|
||||
const simpleTable = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
name: "simple",
|
||||
schema: {
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
const stringName = "string"
|
||||
const fullSchema: {
|
||||
[type in SupportedSqlTypes]: FieldSchema & { type: type }
|
||||
} = {
|
||||
[FieldType.STRING]: {
|
||||
name: stringName,
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
[FieldType.LONGFORM]: {
|
||||
name: "longform",
|
||||
type: FieldType.LONGFORM,
|
||||
},
|
||||
[FieldType.OPTIONS]: {
|
||||
name: "options",
|
||||
type: FieldType.OPTIONS,
|
||||
constraints: {
|
||||
presence: {
|
||||
allowEmpty: false,
|
||||
},
|
||||
inclusion: ["1", "2", "3"],
|
||||
},
|
||||
},
|
||||
[FieldType.NUMBER]: {
|
||||
name: "number",
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
[FieldType.BOOLEAN]: {
|
||||
name: "boolean",
|
||||
type: FieldType.BOOLEAN,
|
||||
},
|
||||
[FieldType.ARRAY]: {
|
||||
name: "array",
|
||||
type: FieldType.ARRAY,
|
||||
constraints: {
|
||||
type: JsonFieldSubType.ARRAY,
|
||||
inclusion: [],
|
||||
},
|
||||
},
|
||||
[FieldType.DATETIME]: {
|
||||
name: "datetime",
|
||||
type: FieldType.DATETIME,
|
||||
dateOnly: true,
|
||||
timeOnly: false,
|
||||
},
|
||||
[FieldType.LINK]: {
|
||||
name: "link",
|
||||
type: FieldType.LINK,
|
||||
tableId: simpleTable._id!,
|
||||
relationshipType: RelationshipType.ONE_TO_MANY,
|
||||
fieldName: "link",
|
||||
},
|
||||
[FieldType.FORMULA]: {
|
||||
name: "formula",
|
||||
type: FieldType.FORMULA,
|
||||
formula: "any formula",
|
||||
},
|
||||
[FieldType.BARCODEQR]: {
|
||||
name: "barcodeqr",
|
||||
type: FieldType.BARCODEQR,
|
||||
},
|
||||
[FieldType.BIGINT]: {
|
||||
name: "bigint",
|
||||
type: FieldType.BIGINT,
|
||||
},
|
||||
[FieldType.BB_REFERENCE]: {
|
||||
name: "bb_reference",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: BBReferenceFieldSubType.USER,
|
||||
},
|
||||
[FieldType.BB_REFERENCE_SINGLE]: {
|
||||
name: "bb_reference_single",
|
||||
type: FieldType.BB_REFERENCE_SINGLE,
|
||||
subtype: BBReferenceFieldSubType.USER,
|
||||
},
|
||||
}
|
||||
|
||||
await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
name: "full",
|
||||
schema: fullSchema,
|
||||
})
|
||||
)
|
||||
|
||||
const persisted = await config.api.datasource.get(datasourceId)
|
||||
await config.api.datasource.fetchSchema({ datasourceId })
|
||||
|
||||
const updated = await config.api.datasource.get(datasourceId)
|
||||
const expected: Datasource = {
|
||||
...persisted,
|
||||
entities:
|
||||
persisted?.entities &&
|
||||
Object.entries(persisted.entities).reduce<Record<string, Table>>(
|
||||
(acc, [tableName, table]) => {
|
||||
acc[tableName] = expect.objectContaining({
|
||||
...table,
|
||||
primaryDisplay: expect.not.stringMatching(
|
||||
new RegExp(`^${table.primaryDisplay || ""}$`)
|
||||
),
|
||||
schema: Object.entries(table.schema).reduce<TableSchema>(
|
||||
(acc, [fieldName, field]) => {
|
||||
acc[fieldName] = {
|
||||
...field,
|
||||
externalType: allowUndefined(expect.any(String)),
|
||||
constraints: allowUndefined(expect.any(Object)),
|
||||
autocolumn: allowUndefined(expect.any(Boolean)),
|
||||
}
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
),
|
||||
})
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
),
|
||||
|
||||
_rev: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
}
|
||||
expect(updated).toEqual(expected)
|
||||
})
|
||||
|
||||
!isOracle &&
|
||||
!isMSSQL &&
|
||||
it("can fetch options columns with a large number of options", async () => {
|
||||
const enumOptions = new Array(1000)
|
||||
.fill(0)
|
||||
.map((_, i) => i.toString())
|
||||
.toSorted()
|
||||
await client.schema.createTable("options", table => {
|
||||
table.increments("id").primary()
|
||||
table.enum("enum", enumOptions, {
|
||||
useNative: true,
|
||||
enumName: "enum",
|
||||
})
|
||||
})
|
||||
|
||||
const resp = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
expect(resp.errors).toEqual({})
|
||||
|
||||
const table = resp.datasource.entities!.options
|
||||
expect(
|
||||
table.schema.enum.constraints!.inclusion!.toSorted()
|
||||
).toEqual(enumOptions)
|
||||
})
|
||||
|
||||
!isOracle &&
|
||||
!isMSSQL &&
|
||||
it("can fetch options with commas in them", async () => {
|
||||
const enumOptions = [
|
||||
"Lincoln, Abraham",
|
||||
"Washington, George",
|
||||
"Fred",
|
||||
"Bob",
|
||||
].toSorted()
|
||||
await client.schema.createTable("options", table => {
|
||||
table.increments("id").primary()
|
||||
table.enum("enum", enumOptions, {
|
||||
useNative: true,
|
||||
enumName: "enum",
|
||||
})
|
||||
})
|
||||
|
||||
const resp = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
expect(resp.errors).toEqual({})
|
||||
|
||||
const table = resp.datasource.entities!.options
|
||||
expect(
|
||||
table.schema.enum.constraints!.inclusion!.toSorted()
|
||||
).toEqual(enumOptions)
|
||||
})
|
||||
|
||||
!isOracle &&
|
||||
!isMSSQL &&
|
||||
it("can fetch options that may include other type names", async () => {
|
||||
const enumOptions = [
|
||||
"int",
|
||||
"bigint",
|
||||
"float",
|
||||
"numeric",
|
||||
"json",
|
||||
"map",
|
||||
].toSorted()
|
||||
|
||||
await client.schema.createTable("options", table => {
|
||||
table.increments("id").primary()
|
||||
table.enum("enum", enumOptions, {
|
||||
useNative: true,
|
||||
enumName: "enum",
|
||||
})
|
||||
})
|
||||
|
||||
const resp = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
|
||||
expect(resp.errors).toEqual({})
|
||||
|
||||
const table = resp.datasource.entities!.options
|
||||
expect(
|
||||
table.schema.enum.constraints!.inclusion!.toSorted()
|
||||
).toEqual(enumOptions)
|
||||
})
|
||||
})
|
||||
|
||||
describe("verify", () => {
|
||||
it("should be able to verify the connection", async () => {
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: rawDatasource,
|
||||
},
|
||||
{
|
||||
body: {
|
||||
connected: true,
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it("should state an invalid datasource cannot connect", async () => {
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: {
|
||||
...rawDatasource,
|
||||
config: {
|
||||
...rawDatasource.config,
|
||||
password: "wrongpassword",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
body: {
|
||||
connected: false,
|
||||
error: /.*/, // error message differs between databases
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("info", () => {
|
||||
it("should fetch information about a datasource with a single table", async () => {
|
||||
const existingTableNames = (
|
||||
await config.api.datasource.info(datasource)
|
||||
).tableNames
|
||||
|
||||
const tableName = generator.guid()
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("id").primary()
|
||||
table.string("name")
|
||||
})
|
||||
}
|
||||
|
||||
const info = await config.api.datasource.info(datasource)
|
||||
expect(info.tableNames).toEqual(
|
||||
expect.arrayContaining([...tableNames, ...existingTableNames])
|
||||
)
|
||||
expect(info.tableNames).toHaveLength(
|
||||
existingTableNames.length + tableNames.length
|
||||
)
|
||||
const info = await config.api.datasource.info(datasource)
|
||||
expect(info.tableNames).toEqual(
|
||||
expect.arrayContaining([tableName, ...existingTableNames])
|
||||
)
|
||||
expect(info.tableNames).toHaveLength(existingTableNames.length + 1)
|
||||
})
|
||||
|
||||
it("should fetch information about a datasource with multiple tables", async () => {
|
||||
const existingTableNames = (
|
||||
await config.api.datasource.info(datasource)
|
||||
).tableNames
|
||||
|
||||
const tableNames = [
|
||||
generator.guid(),
|
||||
generator.guid(),
|
||||
generator.guid(),
|
||||
generator.guid(),
|
||||
]
|
||||
for (const tableName of tableNames) {
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("id").primary()
|
||||
table.string("name")
|
||||
})
|
||||
}
|
||||
|
||||
const info = await config.api.datasource.info(datasource)
|
||||
expect(info.tableNames).toEqual(
|
||||
expect.arrayContaining([...tableNames, ...existingTableNames])
|
||||
)
|
||||
expect(info.tableNames).toHaveLength(
|
||||
existingTableNames.length + tableNames.length
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
|
|
@ -322,9 +322,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
|
|||
presence: required && !isAuto && !hasDefault,
|
||||
externalType: column.Type,
|
||||
options: column.Type.startsWith("enum")
|
||||
? column.Type.substring(5, column.Type.length - 1)
|
||||
.split(",")
|
||||
.map(str => str.replace(/^'(.*)'$/, "$1"))
|
||||
? column.Type.substring(6, column.Type.length - 2).split("','")
|
||||
: undefined,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -138,12 +138,22 @@ export function generateColumnDefinition(config: {
|
|||
let { externalType, autocolumn, name, presence, options } = config
|
||||
let foundType = FieldType.STRING
|
||||
const lowerCaseType = externalType.toLowerCase()
|
||||
let matchingTypes = []
|
||||
for (let [external, internal] of Object.entries(SQL_TYPE_MAP)) {
|
||||
if (lowerCaseType.includes(external)) {
|
||||
matchingTypes.push({ external, internal })
|
||||
let matchingTypes: { external: string; internal: PrimitiveTypes }[] = []
|
||||
|
||||
// In at least MySQL, the external type of an ENUM column is "enum('option1',
|
||||
// 'option2', ...)", which can potentially contain any type name as a
|
||||
// substring. To get around this interfering with the loop below, we first
|
||||
// check for an enum column and handle that separately.
|
||||
if (lowerCaseType.startsWith("enum")) {
|
||||
matchingTypes.push({ external: "enum", internal: FieldType.OPTIONS })
|
||||
} else {
|
||||
for (let [external, internal] of Object.entries(SQL_TYPE_MAP)) {
|
||||
if (lowerCaseType.includes(external)) {
|
||||
matchingTypes.push({ external, internal })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set the foundType based the longest match
|
||||
if (matchingTypes.length > 0) {
|
||||
foundType = matchingTypes.reduce((acc, val) => {
|
||||
|
|
Loading…
Reference in New Issue