Linting and updating csv parser test case to match new functionality.
This commit is contained in:
parent
86eec3bb52
commit
0cf612029e
|
@ -601,7 +601,10 @@ module External {
|
|||
throw `Unable to process query, table "${tableName}" not defined.`
|
||||
}
|
||||
// look for specific components of config which may not be considered acceptable
|
||||
let { id, row, filters, sort, paginate, rows } = cleanupConfig(config, table)
|
||||
let { id, row, filters, sort, paginate, rows } = cleanupConfig(
|
||||
config,
|
||||
table
|
||||
)
|
||||
filters = buildFilters(id, filters || {}, table)
|
||||
const relationships = this.buildRelationships(table)
|
||||
// clean up row on ingress using schema
|
||||
|
|
|
@ -29,10 +29,7 @@ function generateSchema(
|
|||
for (let [key, column] of Object.entries(table.schema)) {
|
||||
// skip things that are already correct
|
||||
const oldColumn = oldTable ? oldTable.schema[key] : null
|
||||
if (
|
||||
(oldColumn && oldColumn.type) ||
|
||||
(primaryKey === key && !isJunction)
|
||||
) {
|
||||
if ((oldColumn && oldColumn.type) || (primaryKey === key && !isJunction)) {
|
||||
continue
|
||||
}
|
||||
switch (column.type) {
|
||||
|
|
|
@ -130,7 +130,7 @@ module PostgresModule {
|
|||
public tables: Record<string, Table> = {}
|
||||
public schemaErrors: Record<string, string> = {}
|
||||
|
||||
COLUMNS_SQL!: string
|
||||
COLUMNS_SQL!: string
|
||||
|
||||
PRIMARY_KEYS_SQL = `
|
||||
select tc.table_schema, tc.table_name, kc.column_name as primary_key
|
||||
|
@ -165,11 +165,11 @@ module PostgresModule {
|
|||
|
||||
setSchema() {
|
||||
if (!this.config.schema) {
|
||||
this.config.schema = 'public'
|
||||
this.config.schema = "public"
|
||||
}
|
||||
this.client.on('connect', (client: any) => {
|
||||
client.query(`SET search_path TO ${this.config.schema}`);
|
||||
});
|
||||
this.client.on("connect", (client: any) => {
|
||||
client.query(`SET search_path TO ${this.config.schema}`)
|
||||
})
|
||||
this.COLUMNS_SQL = `select * from information_schema.columns where table_schema = '${this.config.schema}'`
|
||||
}
|
||||
|
||||
|
|
|
@ -95,7 +95,9 @@ async function transform({ schema, csvString, existingTable }) {
|
|||
const colParser = {}
|
||||
|
||||
// make sure the table has all the columns required for import
|
||||
schema = updateSchema({ schema, existingTable })
|
||||
if (existingTable) {
|
||||
schema = updateSchema({ schema, existingTable })
|
||||
}
|
||||
|
||||
for (let key of Object.keys(schema)) {
|
||||
colParser[key] = PARSERS[schema[key].type] || schema[key].type
|
||||
|
|
|
@ -3,19 +3,13 @@
|
|||
exports[`CSV Parser transformation transforms a CSV file into JSON 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"Address": "5 Sesame Street",
|
||||
"Age": 4324,
|
||||
"Name": "Bertå",
|
||||
},
|
||||
Object {
|
||||
"Address": "1 World Trade Center",
|
||||
"Age": 34,
|
||||
"Name": "Ernie",
|
||||
},
|
||||
Object {
|
||||
"Address": "44 Second Avenue",
|
||||
"Age": 23423,
|
||||
"Name": "Big Bird",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
|
|
@ -24,6 +24,9 @@ const SCHEMAS = {
|
|||
Age: {
|
||||
type: "omit",
|
||||
},
|
||||
Name: {
|
||||
type: "string",
|
||||
},
|
||||
},
|
||||
BROKEN: {
|
||||
Address: {
|
||||
|
|
Loading…
Reference in New Issue