Merge pull request #3165 from Budibase/fix/sql-issues

Not importing SQL tables without a primary key and displaying an error, disabling query HBS helpers
This commit is contained in:
Michael Drury 2021-10-27 16:14:13 +01:00 committed by GitHub
commit 88c34a42c5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 311 additions and 922 deletions

View File

@ -51,6 +51,7 @@
"@spectrum-css/fieldlabel": "^3.0.1", "@spectrum-css/fieldlabel": "^3.0.1",
"@spectrum-css/icon": "^3.0.1", "@spectrum-css/icon": "^3.0.1",
"@spectrum-css/illustratedmessage": "^3.0.2", "@spectrum-css/illustratedmessage": "^3.0.2",
"@spectrum-css/inlinealert": "^2.0.1",
"@spectrum-css/inputgroup": "^3.0.2", "@spectrum-css/inputgroup": "^3.0.2",
"@spectrum-css/label": "^2.0.10", "@spectrum-css/label": "^2.0.10",
"@spectrum-css/link": "^3.1.1", "@spectrum-css/link": "^3.1.1",

View File

@ -0,0 +1,51 @@
<script>
import "@spectrum-css/inlinealert/dist/index-vars.css"
import Button from "../Button/Button.svelte"
export let type = "info"
export let header = ""
export let message = ""
export let onConfirm = undefined
$: icon = selectIcon(type)
function selectIcon(alertType) {
switch (alertType) {
case "error":
case "negative":
return "Alert"
case "success":
return "CheckmarkCircle"
case "help":
return "Help"
default:
return "Info"
}
}
</script>
<div class="spectrum-InLineAlert spectrum-InLineAlert--{type}">
<svg
class="spectrum-Icon spectrum-Icon--sizeM spectrum-InLineAlert-icon"
focusable="false"
aria-hidden="true"
>
<use xlink:href="#spectrum-icon-18-{icon}" />
</svg>
<div class="spectrum-InLineAlert-header">{header}</div>
<div class="spectrum-InLineAlert-content">{message}</div>
{#if onConfirm}
<div class="spectrum-InLineAlert-footer">
<Button secondary on:click={onConfirm}>OK</Button>
</div>
{/if}
</div>
<style>
.spectrum-InLineAlert {
--spectrum-semantic-negative-border-color: #e34850;
--spectrum-semantic-positive-border-color: #2d9d78;
--spectrum-semantic-positive-icon-color: #2d9d78;
--spectrum-semantic-negative-icon-color: #e34850;
}
</style>

View File

@ -58,6 +58,7 @@ export { default as Pagination } from "./Pagination/Pagination.svelte"
export { default as Badge } from "./Badge/Badge.svelte" export { default as Badge } from "./Badge/Badge.svelte"
export { default as StatusLight } from "./StatusLight/StatusLight.svelte" export { default as StatusLight } from "./StatusLight/StatusLight.svelte"
export { default as ColorPicker } from "./ColorPicker/ColorPicker.svelte" export { default as ColorPicker } from "./ColorPicker/ColorPicker.svelte"
export { default as InlineAlert } from "./InlineAlert/InlineAlert.svelte"
// Typography // Typography
export { default as Body } from "./Typography/Body.svelte" export { default as Body } from "./Typography/Body.svelte"

View File

@ -136,6 +136,11 @@
resolved "https://registry.yarnpkg.com/@spectrum-css/illustratedmessage/-/illustratedmessage-3.0.2.tgz#6a480be98b027e050b086e7899e40d87adb0a8c0" resolved "https://registry.yarnpkg.com/@spectrum-css/illustratedmessage/-/illustratedmessage-3.0.2.tgz#6a480be98b027e050b086e7899e40d87adb0a8c0"
integrity sha512-dqnE8X27bGcO0HN8+dYx8O4o0dNNIAqeivOzDHhe2El+V4dTzMrNIerF6G0NLm3GjVf6XliwmitsZK+K6FmbtA== integrity sha512-dqnE8X27bGcO0HN8+dYx8O4o0dNNIAqeivOzDHhe2El+V4dTzMrNIerF6G0NLm3GjVf6XliwmitsZK+K6FmbtA==
"@spectrum-css/inlinealert@^2.0.1":
version "2.0.1"
resolved "https://registry.yarnpkg.com/@spectrum-css/inlinealert/-/inlinealert-2.0.1.tgz#7521f88f6c845806403cc7d925773c7414e204a2"
integrity sha512-Xy5RCOwgurqUXuGQCsEDUduDd5408bmEpmFg+feynG7VFUgLFZWBeylSENB/OqjlFtO76PHXNVdHkhDscPIHTA==
"@spectrum-css/inputgroup@^3.0.2": "@spectrum-css/inputgroup@^3.0.2":
version "3.0.2" version "3.0.2"
resolved "https://registry.yarnpkg.com/@spectrum-css/inputgroup/-/inputgroup-3.0.2.tgz#f1b13603832cbd22394f3d898af13203961f8691" resolved "https://registry.yarnpkg.com/@spectrum-css/inputgroup/-/inputgroup-3.0.2.tgz#f1b13603832cbd22394f3d898af13203961f8691"

View File

@ -1,6 +1,14 @@
<script> <script>
import { goto } from "@roxi/routify" import { goto } from "@roxi/routify"
import { Button, Heading, Body, Divider, Layout, Modal } from "@budibase/bbui" import {
Button,
Heading,
Body,
Divider,
Layout,
Modal,
InlineAlert,
} from "@budibase/bbui"
import { datasources, integrations, queries, tables } from "stores/backend" import { datasources, integrations, queries, tables } from "stores/backend"
import { notifications } from "@budibase/bbui" import { notifications } from "@budibase/bbui"
import IntegrationConfigForm from "components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte" import IntegrationConfigForm from "components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte"
@ -19,6 +27,7 @@
? Object.values(datasource.entities || {}) ? Object.values(datasource.entities || {})
: [] : []
$: relationships = getRelationships(plusTables) $: relationships = getRelationships(plusTables)
$: schemaError = $datasources.schemaError
function getRelationships(tables) { function getRelationships(tables) {
if (!tables || !Array.isArray(tables)) { if (!tables || !Array.isArray(tables)) {
@ -171,6 +180,14 @@
your tables directly from the database and you can use them without your tables directly from the database and you can use them without
having to write any queries at all. having to write any queries at all.
</Body> </Body>
{#if schemaError}
<InlineAlert
type="error"
header="Error fetching tables"
message={schemaError}
onConfirm={datasources.removeSchemaError}
/>
{/if}
<div class="query-list"> <div class="query-list">
{#each plusTables as table} {#each plusTables as table}
<div class="query-list-item" on:click={() => onClickTable(table)}> <div class="query-list-item" on:click={() => onClickTable(table)}>

View File

@ -5,12 +5,35 @@ import api from "../../builderStore/api"
export const INITIAL_DATASOURCE_VALUES = { export const INITIAL_DATASOURCE_VALUES = {
list: [], list: [],
selected: null, selected: null,
schemaError: null,
} }
export function createDatasourcesStore() { export function createDatasourcesStore() {
const store = writable(INITIAL_DATASOURCE_VALUES) const store = writable(INITIAL_DATASOURCE_VALUES)
const { subscribe, update, set } = store const { subscribe, update, set } = store
async function updateDatasource(response) {
if (response.status !== 200) {
throw new Error(await response.text())
}
const { datasource, error } = await response.json()
update(state => {
const currentIdx = state.list.findIndex(ds => ds._id === datasource._id)
const sources = state.list
if (currentIdx >= 0) {
sources.splice(currentIdx, 1, datasource)
} else {
sources.push(datasource)
}
return { list: sources, selected: datasource._id, schemaError: error }
})
return datasource
}
return { return {
subscribe, subscribe,
update, update,
@ -46,61 +69,20 @@ export function createDatasourcesStore() {
let url = `/api/datasources/${datasource._id}/schema` let url = `/api/datasources/${datasource._id}/schema`
const response = await api.post(url) const response = await api.post(url)
const json = await response.json() return updateDatasource(response)
if (response.status !== 200) {
throw new Error(json.message)
}
update(state => {
const currentIdx = state.list.findIndex(ds => ds._id === json._id)
const sources = state.list
if (currentIdx >= 0) {
sources.splice(currentIdx, 1, json)
} else {
sources.push(json)
}
return { list: sources, selected: json._id }
})
return json
}, },
save: async (datasource, fetchSchema = false) => { save: async (body, fetchSchema = false) => {
let response let response
if (datasource._id) { if (body._id) {
response = await api.put( response = await api.put(`/api/datasources/${body._id}`, body)
`/api/datasources/${datasource._id}`,
datasource
)
} else { } else {
response = await api.post("/api/datasources", { response = await api.post("/api/datasources", {
datasource: datasource, datasource: body,
fetchSchema, fetchSchema,
}) })
} }
const json = await response.json() return updateDatasource(response)
if (response.status !== 200) {
throw new Error(json.message)
}
update(state => {
const currentIdx = state.list.findIndex(ds => ds._id === json._id)
const sources = state.list
if (currentIdx >= 0) {
sources.splice(currentIdx, 1, json)
} else {
sources.push(json)
}
return { list: sources, selected: json._id }
})
return json
}, },
delete: async datasource => { delete: async datasource => {
const response = await api.delete( const response = await api.delete(
@ -115,6 +97,11 @@ export function createDatasourcesStore() {
return response return response
}, },
removeSchemaError: () => {
update(state => {
return { ...state, schemaError: null }
})
},
} }
} }

View File

@ -53,7 +53,7 @@ describe("Datasources Store", () => {
}) })
expect(get(store).list).toEqual(expect.arrayContaining([SAVE_DATASOURCE])) expect(get(store).list).toEqual(expect.arrayContaining([SAVE_DATASOURCE.datasource]))
}) })
it("deletes a datasource, updates the store and returns status message", async () => { it("deletes a datasource, updates the store and returns status message", async () => {
api.get.mockReturnValue({ json: () => SOME_DATASOURCE}) api.get.mockReturnValue({ json: () => SOME_DATASOURCE})

View File

@ -13,13 +13,15 @@ export const SOME_DATASOURCE = [
] ]
export const SAVE_DATASOURCE = { export const SAVE_DATASOURCE = {
type: "datasource", datasource: {
name: "CoolDB", type: "datasource",
source: "REST", name: "CoolDB",
config: { source: "REST",
url: "localhost", config: {
defaultHeaders: {}, url: "localhost",
defaultHeaders: {},
},
_id: "datasource_04b003a7b4a8428eadd3bb2f7eae0255",
_rev: "1-4e72002f1011e9392e655948469b7908",
}, },
_id: "datasource_04b003a7b4a8428eadd3bb2f7eae0255",
_rev: "1-4e72002f1011e9392e655948469b7908",
} }

File diff suppressed because it is too large Load Diff

View File

@ -7,6 +7,7 @@ const {
BudibaseInternalDB, BudibaseInternalDB,
getTableParams, getTableParams,
} = require("../../db/utils") } = require("../../db/utils")
const { BuildSchemaErrors } = require("../../constants")
const { integrations } = require("../../integrations") const { integrations } = require("../../integrations")
const { makeExternalQuery } = require("./row/utils") const { makeExternalQuery } = require("./row/utils")
@ -43,13 +44,17 @@ exports.buildSchemaFromDb = async function (ctx) {
const db = new CouchDB(ctx.appId) const db = new CouchDB(ctx.appId)
const datasource = await db.get(ctx.params.datasourceId) const datasource = await db.get(ctx.params.datasourceId)
const tables = await buildSchemaHelper(datasource) const { tables, error } = await buildSchemaHelper(datasource)
datasource.entities = tables datasource.entities = tables
const response = await db.put(datasource) const dbResp = await db.put(datasource)
datasource._rev = response.rev datasource._rev = dbResp.rev
ctx.body = datasource const response = { datasource }
if (error) {
response.error = error
}
ctx.body = response
} }
exports.update = async function (ctx) { exports.update = async function (ctx) {
@ -85,13 +90,15 @@ exports.save = async function (ctx) {
...ctx.request.body.datasource, ...ctx.request.body.datasource,
} }
let schemaError = null
if (fetchSchema) { if (fetchSchema) {
let tables = await buildSchemaHelper(datasource) const { tables, error } = await buildSchemaHelper(datasource)
schemaError = error
datasource.entities = tables datasource.entities = tables
} }
const response = await db.put(datasource) const dbResp = await db.put(datasource)
datasource._rev = response.rev datasource._rev = dbResp.rev
// Drain connection pools when configuration is changed // Drain connection pools when configuration is changed
if (datasource.source) { if (datasource.source) {
@ -101,9 +108,11 @@ exports.save = async function (ctx) {
} }
} }
ctx.status = 200 const response = { datasource }
ctx.message = "Datasource saved successfully." if (schemaError) {
ctx.body = datasource response.error = schemaError
}
ctx.body = response
} }
exports.destroy = async function (ctx) { exports.destroy = async function (ctx) {
@ -143,5 +152,15 @@ const buildSchemaHelper = async datasource => {
await connector.buildSchema(datasource._id, datasource.entities) await connector.buildSchema(datasource._id, datasource.entities)
datasource.entities = connector.tables datasource.entities = connector.tables
return connector.tables const errors = connector.schemaErrors
let error = null
if (errors && Object.keys(errors).length > 0) {
const noKeyTables = Object.entries(errors)
.filter(entry => entry[1] === BuildSchemaErrors.NO_KEY)
.map(([name]) => name)
error = `No primary key constraint found for the following: ${noKeyTables.join(
", "
)}`
}
return { tables: connector.tables, error }
} }

View File

@ -101,7 +101,9 @@ async function enrichQueryFields(fields, parameters = {}) {
enrichedQuery[key] = await enrichQueryFields(fields[key], parameters) enrichedQuery[key] = await enrichQueryFields(fields[key], parameters)
} else if (typeof fields[key] === "string") { } else if (typeof fields[key] === "string") {
// enrich string value as normal // enrich string value as normal
enrichedQuery[key] = await processString(fields[key], parameters) enrichedQuery[key] = await processString(fields[key], parameters, {
noHelpers: true,
})
} else { } else {
enrichedQuery[key] = fields[key] enrichedQuery[key] = fields[key]
} }

View File

@ -26,8 +26,8 @@ describe("/datasources", () => {
.expect('Content-Type', /json/) .expect('Content-Type', /json/)
.expect(200) .expect(200)
expect(res.res.statusMessage).toEqual("Datasource saved successfully.") expect(res.body.datasource.name).toEqual("Test")
expect(res.body.name).toEqual("Test") expect(res.body.errors).toBeUndefined()
}) })
}) })

View File

@ -1,7 +1,6 @@
// mock out postgres for this // mock out postgres for this
jest.mock("pg") jest.mock("pg")
const { findLastKey } = require("lodash/fp")
const setup = require("./utilities") const setup = require("./utilities")
const { checkBuilderEndpoint } = require("./utilities/TestFunctions") const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
const { basicQuery, basicDatasource } = setup.structures const { basicQuery, basicDatasource } = setup.structures

View File

@ -152,5 +152,9 @@ exports.MetadataTypes = {
AUTOMATION_TEST_HISTORY: "automationTestHistory", AUTOMATION_TEST_HISTORY: "automationTestHistory",
} }
exports.BuildSchemaErrors = {
NO_KEY: "no_key",
}
// pass through the list from the auth/core lib // pass through the list from the auth/core lib
exports.ObjectStoreBuckets = ObjectStoreBuckets exports.ObjectStoreBuckets = ObjectStoreBuckets

View File

@ -0,0 +1,8 @@
import { Table } from "../../definitions/common"
export interface DatasourcePlus {
tables: Record<string, Table>
schemaErrors: Record<string, string>
buildSchema(datasourceId: string, entities: Record<string, Table>): any
}

View File

@ -8,6 +8,7 @@ import {
} from "../definitions/datasource" } from "../definitions/datasource"
import { Table, TableSchema } from "../definitions/common" import { Table, TableSchema } from "../definitions/common"
import { getSqlQuery } from "./utils" import { getSqlQuery } from "./utils"
import { DatasourcePlus } from "./base/datasourcePlus"
module MySQLModule { module MySQLModule {
const mysql = require("mysql2") const mysql = require("mysql2")
@ -15,7 +16,7 @@ module MySQLModule {
const { const {
buildExternalTableId, buildExternalTableId,
convertType, convertType,
copyExistingPropsOver, finaliseExternalTables,
} = require("./utils") } = require("./utils")
const { FieldTypes } = require("../constants") const { FieldTypes } = require("../constants")
@ -131,9 +132,11 @@ module MySQLModule {
}) })
} }
class MySQLIntegration extends Sql { class MySQLIntegration extends Sql implements DatasourcePlus {
private config: MySQLConfig private config: MySQLConfig
private readonly client: any private readonly client: any
public tables: Record<string, Table> = {}
public schemaErrors: Record<string, string> = {}
constructor(config: MySQLConfig) { constructor(config: MySQLConfig) {
super("mysql") super("mysql")
@ -185,10 +188,6 @@ module MySQLModule {
constraints, constraints,
} }
} }
// for now just default to first column
if (primaryKeys.length === 0) {
primaryKeys.push(descResp[0].Field)
}
if (!tables[tableName]) { if (!tables[tableName]) {
tables[tableName] = { tables[tableName] = {
_id: buildExternalTableId(datasourceId, tableName), _id: buildExternalTableId(datasourceId, tableName),
@ -197,12 +196,12 @@ module MySQLModule {
schema, schema,
} }
} }
copyExistingPropsOver(tableName, tables, entities)
} }
this.client.end() this.client.end()
this.tables = tables const final = finaliseExternalTables(tables, entities)
this.tables = final.tables
this.schemaErrors = final.errors
} }
async create(query: SqlQuery | string) { async create(query: SqlQuery | string) {

View File

@ -7,6 +7,7 @@ import {
} from "../definitions/datasource" } from "../definitions/datasource"
import { Table } from "../definitions/common" import { Table } from "../definitions/common"
import { getSqlQuery } from "./utils" import { getSqlQuery } from "./utils"
import { DatasourcePlus } from "./base/datasourcePlus"
module PostgresModule { module PostgresModule {
const { Pool } = require("pg") const { Pool } = require("pg")
@ -15,7 +16,7 @@ module PostgresModule {
const { const {
buildExternalTableId, buildExternalTableId,
convertType, convertType,
copyExistingPropsOver, finaliseExternalTables,
} = require("./utils") } = require("./utils")
const { escapeDangerousCharacters } = require("../utilities") const { escapeDangerousCharacters } = require("../utilities")
@ -132,10 +133,12 @@ module PostgresModule {
} }
} }
class PostgresIntegration extends Sql { class PostgresIntegration extends Sql implements DatasourcePlus {
static pool: any static pool: any
private readonly client: any private readonly client: any
private readonly config: PostgresConfig private readonly config: PostgresConfig
public tables: Record<string, Table> = {}
public schemaErrors: Record<string, string> = {}
COLUMNS_SQL = COLUMNS_SQL =
"select * from information_schema.columns where not table_schema = 'information_schema' and not table_schema = 'pg_catalog'" "select * from information_schema.columns where not table_schema = 'information_schema' and not table_schema = 'pg_catalog'"
@ -207,7 +210,7 @@ module PostgresModule {
if (!tables[tableName] || !tables[tableName].schema) { if (!tables[tableName] || !tables[tableName].schema) {
tables[tableName] = { tables[tableName] = {
_id: buildExternalTableId(datasourceId, tableName), _id: buildExternalTableId(datasourceId, tableName),
primary: tableKeys[tableName] || ["id"], primary: tableKeys[tableName] || [],
name: tableName, name: tableName,
schema: {}, schema: {},
} }
@ -232,10 +235,9 @@ module PostgresModule {
} }
} }
for (let tableName of Object.keys(tables)) { const final = finaliseExternalTables(tables, entities)
copyExistingPropsOver(tableName, tables, entities) this.tables = final.tables
} this.schemaErrors = final.errors
this.tables = tables
} }
async create(query: SqlQuery | string) { async create(query: SqlQuery | string) {

View File

@ -1,8 +1,8 @@
import { SqlQuery } from "../definitions/datasource" import { SqlQuery } from "../definitions/datasource"
import { Datasource } from "../definitions/common" import { Datasource, Table } from "../definitions/common"
import { SourceNames } from "../definitions/datasource" import { SourceNames } from "../definitions/datasource"
const { DocumentTypes, SEPARATOR } = require("../db/utils") const { DocumentTypes, SEPARATOR } = require("../db/utils")
const { FieldTypes } = require("../constants") const { FieldTypes, BuildSchemaErrors } = require("../constants")
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}` const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
const ROW_ID_REGEX = /^\[.*]$/g const ROW_ID_REGEX = /^\[.*]$/g
@ -102,14 +102,14 @@ export function isIsoDateString(str: string) {
} }
// add the existing relationships from the entities if they exist, to prevent them from being overridden // add the existing relationships from the entities if they exist, to prevent them from being overridden
export function copyExistingPropsOver( function copyExistingPropsOver(
tableName: string, tableName: string,
tables: { [key: string]: any }, table: Table,
entities: { [key: string]: any } entities: { [key: string]: any }
) { ) {
if (entities && entities[tableName]) { if (entities && entities[tableName]) {
if (entities[tableName].primaryDisplay) { if (entities[tableName].primaryDisplay) {
tables[tableName].primaryDisplay = entities[tableName].primaryDisplay table.primaryDisplay = entities[tableName].primaryDisplay
} }
const existingTableSchema = entities[tableName].schema const existingTableSchema = entities[tableName].schema
for (let key in existingTableSchema) { for (let key in existingTableSchema) {
@ -117,8 +117,24 @@ export function copyExistingPropsOver(
continue continue
} }
if (existingTableSchema[key].type === "link") { if (existingTableSchema[key].type === "link") {
tables[tableName].schema[key] = existingTableSchema[key] table.schema[key] = existingTableSchema[key]
} }
} }
} }
return table
}
export function finaliseExternalTables(tables: { [key: string]: any }, entities: { [key: string]: any }) {
const finalTables: { [key: string]: any } = {}
const errors: { [key: string]: string } = {}
for (let [name, table] of Object.entries(tables)) {
// make sure every table has a key
if (table.primary == null || table.primary.length === 0) {
errors[name] = BuildSchemaErrors.NO_KEY
continue
}
// make sure all previous props have been added back
finalTables[name] = copyExistingPropsOver(name, table, entities)
}
return { tables: finalTables, errors }
} }

View File

@ -311,7 +311,8 @@ class TestConfiguration {
async createDatasource(config = null) { async createDatasource(config = null) {
config = config || basicDatasource() config = config || basicDatasource()
this.datasource = await this._req(config, null, controllers.datasource.save) const response = await this._req(config, null, controllers.datasource.save)
this.datasource = response.datasource
return this.datasource return this.datasource
} }

View File

@ -6,6 +6,7 @@ const manifest = require("../manifest.json")
const hbsInstance = handlebars.create() const hbsInstance = handlebars.create()
registerAll(hbsInstance) registerAll(hbsInstance)
const hbsInstanceNoHelpers = handlebars.create()
/** /**
* utility function to check if the object is valid * utility function to check if the object is valid
@ -24,17 +25,30 @@ function testObject(object) {
* @param {object|array} object The input structure which is to be recursed, it is important to note that * @param {object|array} object The input structure which is to be recursed, it is important to note that
* if the structure contains any cycles then this will fail. * if the structure contains any cycles then this will fail.
* @param {object} context The context that handlebars should fill data from. * @param {object} context The context that handlebars should fill data from.
* @param {object} opts optional - specify some options for processing.
* @returns {Promise<object|array>} The structure input, as fully updated as possible. * @returns {Promise<object|array>} The structure input, as fully updated as possible.
*/ */
module.exports.processObject = async (object, context) => { module.exports.processObject = async (
object,
context,
opts = { noHelpers: false }
) => {
testObject(object) testObject(object)
for (let key of Object.keys(object || {})) { for (let key of Object.keys(object || {})) {
if (object[key] != null) { if (object[key] != null) {
let val = object[key] let val = object[key]
if (typeof val === "string") { if (typeof val === "string") {
object[key] = await module.exports.processString(object[key], context) object[key] = await module.exports.processString(
object[key],
context,
opts
)
} else if (typeof val === "object") { } else if (typeof val === "object") {
object[key] = await module.exports.processObject(object[key], context) object[key] = await module.exports.processObject(
object[key],
context,
opts
)
} }
} }
} }
@ -46,11 +60,16 @@ module.exports.processObject = async (object, context) => {
* then nothing will occur. * then nothing will occur.
* @param {string} string The template string which is the filled from the context object. * @param {string} string The template string which is the filled from the context object.
* @param {object} context An object of information which will be used to enrich the string. * @param {object} context An object of information which will be used to enrich the string.
* @param {object} opts optional - specify some options for processing.
* @returns {Promise<string>} The enriched string, all templates should have been replaced if they can be. * @returns {Promise<string>} The enriched string, all templates should have been replaced if they can be.
*/ */
module.exports.processString = async (string, context) => { module.exports.processString = async (
string,
context,
opts = { noHelpers: false }
) => {
// TODO: carry out any async calls before carrying out async call // TODO: carry out any async calls before carrying out async call
return module.exports.processStringSync(string, context) return module.exports.processStringSync(string, context, opts)
} }
/** /**
@ -59,16 +78,21 @@ module.exports.processString = async (string, context) => {
* @param {object|array} object The input structure which is to be recursed, it is important to note that * @param {object|array} object The input structure which is to be recursed, it is important to note that
* if the structure contains any cycles then this will fail. * if the structure contains any cycles then this will fail.
* @param {object} context The context that handlebars should fill data from. * @param {object} context The context that handlebars should fill data from.
* @param {object} opts optional - specify some options for processing.
* @returns {object|array} The structure input, as fully updated as possible. * @returns {object|array} The structure input, as fully updated as possible.
*/ */
module.exports.processObjectSync = (object, context) => { module.exports.processObjectSync = (
object,
context,
opts = { noHelpers: false }
) => {
testObject(object) testObject(object)
for (let key of Object.keys(object || {})) { for (let key of Object.keys(object || {})) {
let val = object[key] let val = object[key]
if (typeof val === "string") { if (typeof val === "string") {
object[key] = module.exports.processStringSync(object[key], context) object[key] = module.exports.processStringSync(object[key], context, opts)
} else if (typeof val === "object") { } else if (typeof val === "object") {
object[key] = module.exports.processObjectSync(object[key], context) object[key] = module.exports.processObjectSync(object[key], context, opts)
} }
} }
return object return object
@ -79,9 +103,14 @@ module.exports.processObjectSync = (object, context) => {
* then nothing will occur. This is a pure sync call and therefore does not have the full functionality of the async call. * then nothing will occur. This is a pure sync call and therefore does not have the full functionality of the async call.
* @param {string} string The template string which is the filled from the context object. * @param {string} string The template string which is the filled from the context object.
* @param {object} context An object of information which will be used to enrich the string. * @param {object} context An object of information which will be used to enrich the string.
* @param {object} opts optional - specify some options for processing.
* @returns {string} The enriched string, all templates should have been replaced if they can be. * @returns {string} The enriched string, all templates should have been replaced if they can be.
*/ */
module.exports.processStringSync = (string, context) => { module.exports.processStringSync = (
string,
context,
opts = { noHelpers: false }
) => {
if (!exports.isValid(string)) { if (!exports.isValid(string)) {
return string return string
} }
@ -91,9 +120,13 @@ module.exports.processStringSync = (string, context) => {
throw "Cannot process non-string types." throw "Cannot process non-string types."
} }
try { try {
string = processors.preprocess(string) const noHelpers = opts && opts.noHelpers
// finalising adds a helper, can't do this with no helpers
const shouldFinalise = !noHelpers
string = processors.preprocess(string, shouldFinalise)
// this does not throw an error when template can't be fulfilled, have to try correct beforehand // this does not throw an error when template can't be fulfilled, have to try correct beforehand
const template = hbsInstance.compile(string, { const instance = noHelpers ? hbsInstanceNoHelpers : hbsInstance
const template = instance.compile(string, {
strict: false, strict: false,
}) })
return processors.postprocess( return processors.postprocess(
@ -119,9 +152,10 @@ module.exports.makePropSafe = property => {
/** /**
* Checks whether or not a template string contains totally valid syntax (simply tries running it) * Checks whether or not a template string contains totally valid syntax (simply tries running it)
* @param string The string to test for valid syntax - this may contain no templates and will be considered valid. * @param string The string to test for valid syntax - this may contain no templates and will be considered valid.
* @param opts optional - specify some options for processing.
* @returns {boolean} Whether or not the input string is valid. * @returns {boolean} Whether or not the input string is valid.
*/ */
module.exports.isValid = string => { module.exports.isValid = (string, opts = { noHelpers: false }) => {
const validCases = [ const validCases = [
"string", "string",
"number", "number",
@ -135,7 +169,8 @@ module.exports.isValid = string => {
// don't really need a real context to check if its valid // don't really need a real context to check if its valid
const context = {} const context = {}
try { try {
hbsInstance.compile(processors.preprocess(string, false))(context) const instance = opts && opts.noHelpers ? hbsInstanceNoHelpers : hbsInstance
instance.compile(processors.preprocess(string, false))(context)
return true return true
} catch (err) { } catch (err) {
const msg = err && err.message ? err.message : err const msg = err && err.message ? err.message : err

View File

@ -11,6 +11,15 @@ describe("test the custom helpers we have applied", () => {
}) })
}) })
describe("test that it can run without helpers", () => {
it("should be able to run without helpers", async () => {
const output = await processString("{{ avg 1 1 1 }}", {}, { noHelpers: true })
const valid = await processString("{{ avg 1 1 1 }}", {})
expect(valid).toBe("1")
expect(output).toBe("Invalid Binding")
})
})
describe("test the math helpers", () => { describe("test the math helpers", () => {
it("should be able to produce an absolute", async () => { it("should be able to produce an absolute", async () => {
const output = await processString("{{abs a}}", { const output = await processString("{{abs a}}", {
@ -267,6 +276,7 @@ describe("test the comparison helpers", () => {
) )
expect(output).toBe("Success") expect(output).toBe("Success")
} }
it("should allow use of the lt helper", async () => { it("should allow use of the lt helper", async () => {
await compare("lt", 10, 15) await compare("lt", 10, 15)
}) })