Merge branch 'master' of github.com:budibase/budibase into budi-8483-consolidate-feature-flags-into-a-single-endpoint

This commit is contained in:
Sam Rose 2024-07-24 16:10:45 +01:00
commit 46cd06bacf
No known key found for this signature in database
37 changed files with 265 additions and 218 deletions

View File

@ -164,7 +164,7 @@ jobs:
- name: Pull testcontainers images - name: Pull testcontainers images
run: | run: |
docker pull mcr.microsoft.com/mssql/server:2022-latest & docker pull mcr.microsoft.com/mssql/server:2022-CU13-ubuntu-22.04 &
docker pull mysql:8.3 & docker pull mysql:8.3 &
docker pull postgres:16.1-bullseye & docker pull postgres:16.1-bullseye &
docker pull mongo:7.0-jammy & docker pull mongo:7.0-jammy &

View File

@ -64,11 +64,6 @@ RUN mkdir -p /var/log/nginx && \
touch /var/run/nginx.pid && \ touch /var/run/nginx.pid && \
usermod -a -G tty www-data usermod -a -G tty www-data
WORKDIR /
RUN mkdir -p scripts/integrations/oracle
COPY packages/server/scripts/integrations/oracle scripts/integrations/oracle
RUN /bin/bash -e ./scripts/integrations/oracle/instantclient/linux/install.sh
# setup minio # setup minio
WORKDIR /minio WORKDIR /minio
COPY scripts/install-minio.sh ./install.sh COPY scripts/install-minio.sh ./install.sh

View File

@ -1,6 +1,6 @@
{ {
"$schema": "node_modules/lerna/schemas/lerna-schema.json", "$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "2.29.22", "version": "2.29.24",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

13
nx.json
View File

@ -10,7 +10,18 @@
}, },
"targetDefaults": { "targetDefaults": {
"build": { "build": {
"inputs": ["{workspaceRoot}/scripts/*", "{workspaceRoot}/lerna.json"] "inputs": [
"{workspaceRoot}/scripts/*",
"{workspaceRoot}/lerna.json",
"{workspaceRoot}/.github/workflows/*"
]
},
"test": {
"inputs": [
"{workspaceRoot}/scripts/*",
"{workspaceRoot}/lerna.json",
"{workspaceRoot}/.github/workflows/*"
]
} }
}, },
"namedInputs": { "namedInputs": {

View File

@ -435,6 +435,11 @@ class InternalBuilder {
)} = ? THEN 1 ELSE 0 END = 1`, )} = ? THEN 1 ELSE 0 END = 1`,
[value] [value]
) )
} else if (this.client === SqlClient.ORACLE) {
query = query[fnc](
`COALESCE(${quotedIdentifier(this.client, key)}, -1) = ?`,
[value]
)
} else { } else {
query = query[fnc]( query = query[fnc](
`COALESCE(${quotedIdentifier(this.client, key)} = ?, FALSE)`, `COALESCE(${quotedIdentifier(this.client, key)} = ?, FALSE)`,
@ -454,6 +459,11 @@ class InternalBuilder {
)} = ? THEN 1 ELSE 0 END = 0`, )} = ? THEN 1 ELSE 0 END = 0`,
[value] [value]
) )
} else if (this.client === SqlClient.ORACLE) {
query = query[fnc](
`COALESCE(${quotedIdentifier(this.client, key)}, -1) != ?`,
[value]
)
} else { } else {
query = query[fnc]( query = query[fnc](
`COALESCE(${quotedIdentifier(this.client, key)} != ?, TRUE)`, `COALESCE(${quotedIdentifier(this.client, key)} != ?, TRUE)`,

View File

@ -12,7 +12,6 @@
export let listItemKey export let listItemKey
export let draggable = true export let draggable = true
export let focus export let focus
export let bindings = []
let zoneType = generate() let zoneType = generate()
@ -127,7 +126,6 @@
anchor={anchors[draggableItem.id]} anchor={anchors[draggableItem.id]}
item={draggableItem.item} item={draggableItem.item}
{...listTypeProps} {...listTypeProps}
{bindings}
on:change={onItemChanged} on:change={onItemChanged}
/> />
</div> </div>

View File

@ -64,7 +64,9 @@
items={columns.sortable} items={columns.sortable}
listItemKey={"_id"} listItemKey={"_id"}
listType={FieldSetting} listType={FieldSetting}
{bindings} listTypeProps={{
bindings,
}}
/> />
<style> <style>

View File

@ -106,12 +106,24 @@
} }
const handleHashChange = () => { const handleHashChange = () => {
const { open: sidePanelOpen } = $sidePanelStore const { open: sidePanelOpen } = $sidePanelStore
if (sidePanelOpen) { // only close if the sidepanel is open and theres no onload side panel actions on the screen.
if (
sidePanelOpen &&
!$screenStore.activeScreen.onLoad?.some(
item => item["##eventHandlerType"] === "Open Side Panel"
)
) {
sidePanelStore.actions.close() sidePanelStore.actions.close()
} }
const { open: modalOpen } = $modalStore const { open: modalOpen } = $modalStore
if (modalOpen) { // only close if the modal is open and theres onload modals actions on the screen.
if (
modalOpen &&
!$screenStore.activeScreen.onLoad?.some(
item => item["##eventHandlerType"] === "Open Modal"
)
) {
modalStore.actions.close() modalStore.actions.close()
} }
} }

View File

@ -21,11 +21,6 @@ RUN apt-get update \
&& apt-get install -y --no-install-recommends g++ make python3 jq && apt-get install -y --no-install-recommends g++ make python3 jq
RUN yarn global add pm2 RUN yarn global add pm2
# Install client for oracle datasource
RUN apt-get install unzip libaio1
COPY packages/server/scripts/integrations/oracle/ scripts/integrations/oracle/
RUN /bin/bash -e scripts/integrations/oracle/instantclient/linux/x86-64/install.sh
# Install postgres client for pg_dump utils # Install postgres client for pg_dump utils
RUN apt update && apt upgrade -y \ RUN apt update && apt upgrade -y \
&& apt install software-properties-common apt-transport-https curl gpg -y \ && apt install software-properties-common apt-transport-https curl gpg -y \

View File

@ -101,6 +101,7 @@
"object-sizeof": "2.6.1", "object-sizeof": "2.6.1",
"openai": "^4.52.1", "openai": "^4.52.1",
"openapi-types": "9.3.1", "openapi-types": "9.3.1",
"oracledb": "6.5.1",
"pg": "8.10.0", "pg": "8.10.0",
"pouchdb": "7.3.0", "pouchdb": "7.3.0",
"pouchdb-all-dbs": "1.1.1", "pouchdb-all-dbs": "1.1.1",
@ -111,12 +112,12 @@
"snowflake-promise": "^4.5.0", "snowflake-promise": "^4.5.0",
"socket.io": "4.7.5", "socket.io": "4.7.5",
"tar": "6.2.1", "tar": "6.2.1",
"tmp": "0.2.3",
"to-json-schema": "0.2.5", "to-json-schema": "0.2.5",
"uuid": "^8.3.2", "uuid": "^8.3.2",
"validate.js": "0.13.1", "validate.js": "0.13.1",
"worker-farm": "1.7.0", "worker-farm": "1.7.0",
"xml2js": "0.5.0", "xml2js": "0.5.0"
"tmp": "0.2.3"
}, },
"devDependencies": { "devDependencies": {
"@babel/preset-env": "7.16.11", "@babel/preset-env": "7.16.11",
@ -131,13 +132,13 @@
"@types/lodash": "4.14.200", "@types/lodash": "4.14.200",
"@types/mssql": "9.1.4", "@types/mssql": "9.1.4",
"@types/node-fetch": "2.6.4", "@types/node-fetch": "2.6.4",
"@types/oracledb": "5.2.2", "@types/oracledb": "6.5.1",
"@types/pg": "8.6.6", "@types/pg": "8.6.6",
"@types/server-destroy": "1.0.1", "@types/server-destroy": "1.0.1",
"@types/supertest": "2.0.14", "@types/supertest": "2.0.14",
"@types/tar": "6.1.5", "@types/tar": "6.1.5",
"@types/uuid": "8.3.4",
"@types/tmp": "0.2.6", "@types/tmp": "0.2.6",
"@types/uuid": "8.3.4",
"copyfiles": "2.4.1", "copyfiles": "2.4.1",
"docker-compose": "0.23.17", "docker-compose": "0.23.17",
"jest": "29.7.0", "jest": "29.7.0",
@ -156,9 +157,6 @@
"update-dotenv": "1.1.1", "update-dotenv": "1.1.1",
"yargs": "13.2.4" "yargs": "13.2.4"
}, },
"optionalDependencies": {
"oracledb": "5.3.0"
},
"nx": { "nx": {
"targets": { "targets": {
"dev": { "dev": {

View File

@ -16,4 +16,4 @@ services:
- oracle_data:/opt/oracle/oradata - oracle_data:/opt/oracle/oradata
volumes: volumes:
oracle_data: oracle_data:

View File

@ -1,23 +0,0 @@
#!/bin/bash
# Must be root to continue
if [[ $(id -u) -ne 0 ]] ; then echo "Please run as root" ; exit 1 ; fi
# Allow for re-runs
rm -rf /opt/oracle
echo "Installing oracle instant client"
# copy and unzip package
mkdir -p /opt/oracle
cp scripts/integrations/oracle/instantclient/linux/arm64/basiclite-19.10.zip /opt/oracle
cd /opt/oracle
unzip -qq basiclite-19.10.zip -d .
rm *.zip
mv instantclient* instantclient
# update runtime link path
sh -c "echo /opt/oracle/instantclient > /etc/ld.so.conf.d/oracle-instantclient.conf"
ldconfig /etc/ld.so.conf.d
echo "Installation complete"

View File

@ -1,10 +0,0 @@
#!/bin/bash
SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]:-$0}"; )" &> /dev/null && pwd 2> /dev/null; )"
if [[ $TARGETARCH == arm* ]] ;
then
echo "Installing ARM Oracle instant client..."
$SCRIPT_DIR/arm64/install.sh
else
echo "Installing x86-64 Oracle instant client..."
$SCRIPT_DIR/x86-64/install.sh
fi

View File

@ -1,23 +0,0 @@
#!/bin/bash
# Must be root to continue
if [[ $(id -u) -ne 0 ]] ; then echo "Please run as root" ; exit 1 ; fi
# Allow for re-runs
rm -rf /opt/oracle
echo "Installing oracle instant client"
# copy and unzip package
mkdir -p /opt/oracle
cp scripts/integrations/oracle/instantclient/linux/x86-64/basiclite-21.4.zip /opt/oracle
cd /opt/oracle
unzip -qq basiclite-21.4.zip -d .
rm *.zip
mv instantclient* instantclient
# update runtime link path
sh -c "echo /opt/oracle/instantclient > /etc/ld.so.conf.d/oracle-instantclient.conf"
ldconfig /etc/ld.so.conf.d
echo "Installation complete"

View File

@ -10,16 +10,16 @@ To install oracle express edition simply run `docker-compose up`
- A single instance pluggable database (PDB) will be created named `xepdb1` - A single instance pluggable database (PDB) will be created named `xepdb1`
- The default password is configured in the compose file as `oracle` - The default password is configured in the compose file as `oracle`
- The `system` and `pdbadmin` users share this password - The `system` and `pdbadmin` users share this password
## Instant Client ## Instant Client
Before oracle can be connected to from nodejs, the oracle client must be installed. Before oracle can be connected to from nodejs, the oracle client must be installed.
For more information see https://www.oracle.com/database/technologies/instant-client/downloads.html For more information see https://www.oracle.com/database/technologies/instant-client/downloads.html
**Important** **Important**
- Oracle client is supported only on **x86-64 architecture** - Oracle client is supported only on **x86-64 architecture**
- Oracle client is **not supported on Mac ARM architecture** - Oracle client is **not supported on Mac ARM architecture**
### Linux ### Linux
Run the provided install script for linux from the `server` root path: Run the provided install script for linux from the `server` root path:
@ -29,7 +29,7 @@ sudo /bin/bash -e scripts/integrations/oracle/instantclient/linux/x86-64/install
``` ```
For more information see: https://www.oracle.com/database/technologies/instant-client/linux-x86-64-downloads.html#ic_x64_inst For more information see: https://www.oracle.com/database/technologies/instant-client/linux-x86-64-downloads.html#ic_x64_inst
### Mac ### Mac
**This has not yet been tested** **This has not yet been tested**
See: https://www.oracle.com/database/technologies/instant-client/macos-intel-x86-downloads.html#ic_osx_inst See: https://www.oracle.com/database/technologies/instant-client/macos-intel-x86-downloads.html#ic_osx_inst
@ -64,7 +64,7 @@ grant create session,
to &USERNAME; to &USERNAME;
``` ```
To set the password for the sales schema use: To set the password for the sales schema use:
```sql ```sql
define USERNAME = sales define USERNAME = sales
@ -80,11 +80,10 @@ docker exec -it oracle-xe sqlplus -l sales/sales@localhost:1521/xepdb1
## HR Schema ## HR Schema
The `HR` schema is populated with dummy data by default in oracle for testing purposes. The `HR` schema is populated with dummy data by default in oracle for testing purposes.
To connect to the HR schema first update the user password and unlock the account by performing To connect to the HR schema first update the user password and unlock the account by performing
```sql ```sql
ALTER USER hr ACCOUNT UNLOCK; ALTER USER hr ACCOUNT UNLOCK;
ALTER USER hr IDENTIFIED BY hr; ALTER USER hr IDENTIFIED BY hr;
``` ```
You should now be able to connect to the hr schema using the credentials hr/hr You should now be able to connect to the hr schema using the credentials hr/hr

View File

@ -92,25 +92,6 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
} }
} }
export async function find(ctx: UserCtx): Promise<Row> {
const id = ctx.params.rowId
const tableId = utils.getTableId(ctx)
const row = await sdk.rows.external.getRow(tableId, id, {
relationships: true,
})
if (!row) {
ctx.throw(404)
}
const table = await sdk.tables.getTable(tableId)
// Preserving links, as the outputProcessing does not support external rows yet and we don't need it in this use case
return await outputProcessing(table, row, {
squash: true,
preserveLinks: true,
})
}
export async function destroy(ctx: UserCtx) { export async function destroy(ctx: UserCtx) {
const tableId = utils.getTableId(ctx) const tableId = utils.getTableId(ctx)
const _id = ctx.request.body._id const _id = ctx.request.body._id

View File

@ -117,7 +117,9 @@ export async function fetch(ctx: any) {
export async function find(ctx: UserCtx<void, GetRowResponse>) { export async function find(ctx: UserCtx<void, GetRowResponse>) {
const tableId = utils.getTableId(ctx) const tableId = utils.getTableId(ctx)
ctx.body = await pickApi(tableId).find(ctx) const rowId = ctx.params.rowId
ctx.body = await sdk.rows.find(tableId, rowId)
} }
function isDeleteRows(input: any): input is DeleteRows { function isDeleteRows(input: any): input is DeleteRows {
@ -278,7 +280,8 @@ export async function downloadAttachment(ctx: UserCtx) {
const { columnName } = ctx.params const { columnName } = ctx.params
const tableId = utils.getTableId(ctx) const tableId = utils.getTableId(ctx)
const row = await pickApi(tableId).find(ctx) const rowId = ctx.params.rowId
const row = await sdk.rows.find(tableId, rowId)
const table = await sdk.tables.getTable(tableId) const table = await sdk.tables.getTable(tableId)
const columnSchema = table.schema[columnName] const columnSchema = table.schema[columnName]

View File

@ -32,7 +32,7 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
try { try {
oldRow = await outputProcessing( oldRow = await outputProcessing(
dbTable, dbTable,
await utils.findRow(ctx, tableId, inputs._id!) await utils.findRow(tableId, inputs._id!)
) )
} catch (err) { } catch (err) {
if (isUserTable) { if (isUserTable) {
@ -96,15 +96,6 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
return { ...result, oldRow } return { ...result, oldRow }
} }
export async function find(ctx: UserCtx): Promise<Row> {
const tableId = utils.getTableId(ctx),
rowId = ctx.params.rowId
const table = await sdk.tables.getTable(tableId)
let row = await utils.findRow(ctx, tableId, rowId)
row = await outputProcessing(table, row)
return row
}
export async function destroy(ctx: UserCtx) { export async function destroy(ctx: UserCtx) {
const db = context.getAppDB() const db = context.getAppDB()
const tableId = utils.getTableId(ctx) const tableId = utils.getTableId(ctx)
@ -195,7 +186,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
sdk.tables.getTable(tableId), sdk.tables.getTable(tableId),
linkRows.getLinkDocuments({ tableId, rowId, fieldName }), linkRows.getLinkDocuments({ tableId, rowId, fieldName }),
]) ])
let row = await utils.findRow(ctx, tableId, rowId) let row = await utils.findRow(tableId, rowId)
row = await outputProcessing(table, row) row = await outputProcessing(table, row)
const linkVals = links as LinkDocumentValue[] const linkVals = links as LinkDocumentValue[]

View File

@ -1,5 +1,5 @@
import { InternalTables } from "../../../../db/utils" import { InternalTables } from "../../../../db/utils"
import * as userController from "../../user"
import { context } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { import {
Ctx, Ctx,
@ -8,7 +8,6 @@ import {
RelationshipsJson, RelationshipsJson,
Row, Row,
Table, Table,
UserCtx,
} from "@budibase/types" } from "@budibase/types"
import { import {
processDates, processDates,
@ -24,6 +23,7 @@ import {
import sdk from "../../../../sdk" import sdk from "../../../../sdk"
import { processStringSync } from "@budibase/string-templates" import { processStringSync } from "@budibase/string-templates"
import validateJs from "validate.js" import validateJs from "validate.js"
import { getFullUser } from "../../../../utilities/users"
validateJs.extend(validateJs.validators.datetime, { validateJs.extend(validateJs.validators.datetime, {
parse: function (value: string) { parse: function (value: string) {
@ -63,16 +63,12 @@ export async function processRelationshipFields(
return row return row
} }
export async function findRow(ctx: UserCtx, tableId: string, rowId: string) { export async function findRow(tableId: string, rowId: string) {
const db = context.getAppDB() const db = context.getAppDB()
let row: Row let row: Row
// TODO remove special user case in future // TODO remove special user case in future
if (tableId === InternalTables.USER_METADATA) { if (tableId === InternalTables.USER_METADATA) {
ctx.params = { row = await getFullUser(rowId)
id: rowId,
}
await userController.findMetadata(ctx)
row = ctx.body
} else { } else {
row = await db.get(rowId) row = await db.get(rowId)
} }

View File

@ -36,6 +36,7 @@ import { generator, mocks } from "@budibase/backend-core/tests"
import _, { merge } from "lodash" import _, { merge } from "lodash"
import * as uuid from "uuid" import * as uuid from "uuid"
import { Knex } from "knex" import { Knex } from "knex"
import { InternalTables } from "../../../db/utils"
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString() const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
tk.freeze(timestamp) tk.freeze(timestamp)
@ -804,6 +805,23 @@ describe.each([
status: 404, status: 404,
}) })
}) })
isInternal &&
it("can search row from user table", async () => {
const res = await config.api.row.get(
InternalTables.USER_METADATA,
config.userMetadataId!
)
expect(res).toEqual({
...config.getUser(),
_id: config.userMetadataId!,
_rev: expect.any(String),
roles: undefined,
roleId: "ADMIN",
tableId: InternalTables.USER_METADATA,
})
})
}) })
describe("fetch", () => { describe("fetch", () => {

View File

@ -41,7 +41,7 @@ const DEFINITIONS: Record<SourceName, Integration | undefined> = {
[SourceName.GOOGLE_SHEETS]: googlesheets.schema, [SourceName.GOOGLE_SHEETS]: googlesheets.schema,
[SourceName.REDIS]: redis.schema, [SourceName.REDIS]: redis.schema,
[SourceName.SNOWFLAKE]: snowflake.schema, [SourceName.SNOWFLAKE]: snowflake.schema,
[SourceName.ORACLE]: undefined, [SourceName.ORACLE]: oracle.schema,
[SourceName.BUDIBASE]: undefined, [SourceName.BUDIBASE]: undefined,
} }
@ -64,20 +64,10 @@ const INTEGRATIONS: Record<SourceName, IntegrationBaseConstructor | undefined> =
[SourceName.GOOGLE_SHEETS]: googlesheets.integration, [SourceName.GOOGLE_SHEETS]: googlesheets.integration,
[SourceName.REDIS]: redis.integration, [SourceName.REDIS]: redis.integration,
[SourceName.SNOWFLAKE]: snowflake.integration, [SourceName.SNOWFLAKE]: snowflake.integration,
[SourceName.ORACLE]: undefined, [SourceName.ORACLE]: oracle.integration,
[SourceName.BUDIBASE]: undefined, [SourceName.BUDIBASE]: undefined,
} }
// optionally add oracle integration if the oracle binary can be installed
if (
process.arch &&
!process.arch.startsWith("arm") &&
oracle.integration.isInstalled()
) {
DEFINITIONS[SourceName.ORACLE] = oracle.schema
INTEGRATIONS[SourceName.ORACLE] = oracle.integration
}
export async function getDefinition( export async function getDefinition(
source: SourceName source: SourceName
): Promise<Integration | undefined> { ): Promise<Integration | undefined> {

View File

@ -24,7 +24,7 @@ import {
getSqlQuery, getSqlQuery,
HOST_ADDRESS, HOST_ADDRESS,
} from "./utils" } from "./utils"
import { import oracledb, {
BindParameters, BindParameters,
Connection, Connection,
ConnectionAttributes, ConnectionAttributes,
@ -36,13 +36,7 @@ import { sql } from "@budibase/backend-core"
const Sql = sql.Sql const Sql = sql.Sql
let oracledb: any oracledb.outFormat = oracledb.OUT_FORMAT_OBJECT
try {
oracledb = require("oracledb")
oracledb.outFormat = oracledb.OUT_FORMAT_OBJECT
} catch (err) {
console.log("ORACLEDB is not installed")
}
interface OracleConfig { interface OracleConfig {
host: string host: string
@ -158,10 +152,6 @@ class OracleIntegration extends Sql implements DatasourcePlus {
return parts.join(" || ") return parts.join(" || ")
} }
static isInstalled() {
return oracledb != null
}
/** /**
* Map the flat tabular columns and constraints data into a nested object * Map the flat tabular columns and constraints data into a nested object
*/ */

View File

@ -208,4 +208,38 @@ describe("SQL query builder", () => {
sql: `select * from (select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`, sql: `select * from (select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`,
}) })
}) })
it("should use an oracle compatible coalesce query for oracle when using the equals filter", () => {
let query = new Sql(SqlClient.ORACLE, limit)._query(
generateReadJson({
filters: {
equal: {
name: "John",
},
},
})
)
expect(query).toEqual({
bindings: ["John", limit, 5000],
sql: `select * from (select * from (select * from (select * from "test" where COALESCE("test"."name", -1) = :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`,
})
})
it("should use an oracle compatible coalesce query for oracle when using the not equals filter", () => {
let query = new Sql(SqlClient.ORACLE, limit)._query(
generateReadJson({
filters: {
notEqual: {
name: "John",
},
},
})
)
expect(query).toEqual({
bindings: ["John", limit, 5000],
sql: `select * from (select * from (select * from (select * from "test" where COALESCE("test"."name", -1) != :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`,
})
})
}) })

View File

@ -9,7 +9,9 @@ let ports: Promise<testContainerUtils.Port[]>
export async function getDatasource(): Promise<Datasource> { export async function getDatasource(): Promise<Datasource> {
if (!ports) { if (!ports) {
ports = startContainer( ports = startContainer(
new GenericContainer("mcr.microsoft.com/mssql/server:2022-latest") new GenericContainer(
"mcr.microsoft.com/mssql/server:2022-CU13-ubuntu-22.04"
)
.withExposedPorts(1433) .withExposedPorts(1433)
.withEnvironment({ .withEnvironment({
ACCEPT_EULA: "Y", ACCEPT_EULA: "Y",

View File

@ -1,4 +1,5 @@
import { IncludeRelationship, Operation, Row } from "@budibase/types" import { IncludeRelationship, Operation, Row } from "@budibase/types"
import { HTTPError } from "@budibase/backend-core"
import { handleRequest } from "../../../api/controllers/row/external" import { handleRequest } from "../../../api/controllers/row/external"
import { breakRowIdField } from "../../../integrations/utils" import { breakRowIdField } from "../../../integrations/utils"
import sdk from "../../../sdk" import sdk from "../../../sdk"
@ -53,7 +54,7 @@ export async function save(
const rowId = response.row._id const rowId = response.row._id
if (rowId) { if (rowId) {
const row = await sdk.rows.external.getRow(tableId, rowId, { const row = await getRow(tableId, rowId, {
relationships: true, relationships: true,
}) })
return { return {
@ -67,3 +68,20 @@ export async function save(
return response return response
} }
} }
export async function find(tableId: string, rowId: string): Promise<Row> {
const row = await getRow(tableId, rowId, {
relationships: true,
})
if (!row) {
throw new HTTPError("Row not found", 404)
}
const table = await sdk.tables.getTable(tableId)
// Preserving links, as the outputProcessing does not support external rows yet and we don't need it in this use case
return await outputProcessing(table, row, {
squash: true,
preserveLinks: true,
})
}

View File

@ -1,10 +1,15 @@
import { db } from "@budibase/backend-core" import { context, db } from "@budibase/backend-core"
import { Row } from "@budibase/types" import { Row } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import cloneDeep from "lodash/fp/cloneDeep" import cloneDeep from "lodash/fp/cloneDeep"
import { finaliseRow } from "../../../api/controllers/row/staticFormula" import { finaliseRow } from "../../../api/controllers/row/staticFormula"
import { inputProcessing } from "../../../utilities/rowProcessor" import {
inputProcessing,
outputProcessing,
} from "../../../utilities/rowProcessor"
import * as linkRows from "../../../db/linkedRows" import * as linkRows from "../../../db/linkedRows"
import { InternalTables } from "../../../db/utils"
import { getFullUser } from "../../../utilities/users"
export async function save( export async function save(
tableId: string, tableId: string,
@ -47,3 +52,26 @@ export async function save(
updateFormula: true, updateFormula: true,
}) })
} }
export async function find(tableId: string, rowId: string): Promise<Row> {
const table = await sdk.tables.getTable(tableId)
let row = await findRow(tableId, rowId)
row = await outputProcessing(table, row)
return row
}
async function findRow(tableId: string, rowId: string) {
const db = context.getAppDB()
let row: Row
// TODO remove special user case in future
if (tableId === InternalTables.USER_METADATA) {
row = await getFullUser(rowId)
} else {
row = await db.get(rowId)
}
if (row.tableId !== tableId) {
throw "Supplied tableId does not match the rows tableId"
}
return row
}

View File

@ -34,3 +34,7 @@ export async function save(
) { ) {
return pickApi(tableId).save(tableId, row, userId) return pickApi(tableId).save(tableId, row, userId)
} }
export async function find(tableId: string, rowId: string) {
return pickApi(tableId).find(tableId, rowId)
}

View File

@ -80,18 +80,31 @@ export async function startup(
const address = server.address() as AddressInfo const address = server.address() as AddressInfo
env._set("PORT", address.port) env._set("PORT", address.port)
} }
console.log("Emitting port event")
eventEmitter.emitPort(env.PORT) eventEmitter.emitPort(env.PORT)
console.log("Initialising file system")
fileSystem.init() fileSystem.init()
console.log("Initialising redis")
await redis.init() await redis.init()
console.log("Initialising writethrough cache")
cache.docWritethrough.init() cache.docWritethrough.init()
console.log("Initialising events")
eventInit() eventInit()
if (app && server) { if (app && server) {
console.log("Initialising websockets")
initialiseWebsockets(app, server) initialiseWebsockets(app, server)
} }
// run migrations on startup if not done via http // run migrations on startup if not done via http
// not recommended in a clustered environment // not recommended in a clustered environment
if (!env.HTTP_MIGRATIONS && !env.isTest()) { if (!env.HTTP_MIGRATIONS && !env.isTest()) {
console.log("Running migrations")
try { try {
await migrations.migrate() await migrations.migrate()
} catch (e) { } catch (e) {
@ -107,12 +120,15 @@ export async function startup(
env.PLUGINS_DIR && env.PLUGINS_DIR &&
fs.existsSync(env.PLUGINS_DIR) fs.existsSync(env.PLUGINS_DIR)
) { ) {
console.log("Monitoring plugin directory")
watch() watch()
} }
// check for version updates // check for version updates
console.log("Checking for version updates")
await installation.checkInstallVersion() await installation.checkInstallVersion()
console.log("Initialising queues")
// get the references to the queue promises, don't await as // get the references to the queue promises, don't await as
// they will never end, unless the processing stops // they will never end, unless the processing stops
let queuePromises = [] let queuePromises = []
@ -126,6 +142,7 @@ export async function startup(
} }
queuePromises.push(initPro()) queuePromises.push(initPro())
if (app) { if (app) {
console.log("Initialising routes")
// bring routes online as final step once everything ready // bring routes online as final step once everything ready
await initRoutes(app) await initRoutes(app)
} }
@ -141,6 +158,7 @@ export async function startup(
bbAdminEmail && bbAdminEmail &&
bbAdminPassword bbAdminPassword
) { ) {
console.log("Initialising admin user")
const tenantId = tenancy.getTenantId() const tenantId = tenancy.getTenantId()
await tenancy.doInTenant(tenantId, async () => { await tenancy.doInTenant(tenantId, async () => {
const exists = await users.doesUserExist(bbAdminEmail) const exists = await users.doesUserExist(bbAdminEmail)
@ -171,5 +189,6 @@ export async function startup(
}) })
} }
console.log("Initialising JS runner")
jsRunner.init() jsRunner.init()
} }

View File

@ -1,4 +1,7 @@
import { HelperFunctionBuiltin } from "../src/helpers/constants" import {
HelperFunctionBuiltin,
EXTERNAL_FUNCTION_COLLECTIONS,
} from "../src/helpers/constants"
import { readFileSync, writeFileSync } from "fs" import { readFileSync, writeFileSync } from "fs"
import { marked } from "marked" import { marked } from "marked"
import { join, dirname } from "path" import { join, dirname } from "path"
@ -14,21 +17,6 @@ type HelperInfo = {
tags?: any[] tags?: any[]
} }
/**
* full list of supported helpers can be found here:
* https://github.com/budibase/handlebars-helpers
*/
const COLLECTIONS = [
"math",
"array",
"number",
"url",
"string",
"comparison",
"object",
"uuid",
]
const FILENAME = join(__dirname, "..", "src", "manifest.json") const FILENAME = join(__dirname, "..", "src", "manifest.json")
const outputJSON: any = {} const outputJSON: any = {}
const ADDED_HELPERS = { const ADDED_HELPERS = {
@ -140,7 +128,7 @@ const excludeFunctions: Record<string, string[]> = { string: ["raw"] }
*/ */
function run() { function run() {
const foundNames: string[] = [] const foundNames: string[] = []
for (let collection of COLLECTIONS) { for (let collection of EXTERNAL_FUNCTION_COLLECTIONS) {
const collectionFile = readFileSync( const collectionFile = readFileSync(
`${dirname( `${dirname(
require.resolve("@budibase/handlebars-helpers") require.resolve("@budibase/handlebars-helpers")

View File

@ -58,7 +58,7 @@ function buildList(parts: string[], value: any) {
if (!value) { if (!value) {
return parts.length > 1 ? `${build()}` : build() return parts.length > 1 ? `${build()}` : build()
} else { } else {
return parts.length === 0 ? value : `${value}, ${build()}` return parts.length === 0 ? value : `${build()}, ${value}`
} }
} }

View File

@ -15,6 +15,22 @@ export const HelperFunctionBuiltin = [
"with", "with",
] ]
/**
* full list of supported helpers can be found here:
* https://github.com/Budibase/handlebars-helpers
*/
export const EXTERNAL_FUNCTION_COLLECTIONS = [
"math",
"array",
"number",
"url",
"string",
"comparison",
"object",
"regex",
"uuid",
]
export const HelperFunctionNames = { export const HelperFunctionNames = {
OBJECT: "object", OBJECT: "object",
ALL: "all", ALL: "all",

View File

@ -2,26 +2,12 @@
import helpers from "@budibase/handlebars-helpers" import helpers from "@budibase/handlebars-helpers"
import { date, duration } from "./date" import { date, duration } from "./date"
import { HelperFunctionBuiltin } from "./constants" import {
HelperFunctionBuiltin,
EXTERNAL_FUNCTION_COLLECTIONS,
} from "./constants"
import Handlebars from "handlebars" import Handlebars from "handlebars"
/**
* full list of supported helpers can be found here:
* https://github.com/Budibase/handlebars-helpers
*/
const EXTERNAL_FUNCTION_COLLECTIONS = [
"math",
"array",
"number",
"url",
"string",
"comparison",
"object",
"regex",
"uuid",
]
const ADDED_HELPERS = { const ADDED_HELPERS = {
date: date, date: date,
duration: duration, duration: duration,
@ -40,7 +26,7 @@ export function registerAll(handlebars: typeof Handlebars) {
let hbsHelperInfo = helpers[collection]() let hbsHelperInfo = helpers[collection]()
for (let entry of Object.entries(hbsHelperInfo)) { for (let entry of Object.entries(hbsHelperInfo)) {
const name = entry[0] const name = entry[0]
// skip built in functions and ones seen already // skip built-in functions and ones seen already
if ( if (
HelperFunctionBuiltin.indexOf(name) !== -1 || HelperFunctionBuiltin.indexOf(name) !== -1 ||
externalNames.indexOf(name) !== -1 externalNames.indexOf(name) !== -1

View File

@ -1312,6 +1312,26 @@
"requiresBlock": false "requiresBlock": false
} }
}, },
"regex": {
"toRegex": {
"args": [
"str"
],
"numArgs": 1,
"example": "{{toRegex 'foo'}} -> /foo/",
"description": "<p>Convert the given string to a regular expression.</p>\n",
"requiresBlock": false
},
"test": {
"args": [
"str"
],
"numArgs": 1,
"example": "{{test 'foobar' (toRegex 'foo')}} -> true",
"description": "<p>Returns true if the given <code>str</code> matches the given regex. A regex can be passed on the context, or using the <a href=\"#toregex\">toRegex</a> helper as a subexpression.</p>\n",
"requiresBlock": false
}
},
"uuid": { "uuid": {
"uuid": { "uuid": {
"args": [], "args": [],

View File

@ -93,10 +93,10 @@ describe("Test that the string processing works correctly", () => {
it("should handle a complex statement", () => { it("should handle a complex statement", () => {
const response = convertToJS( const response = convertToJS(
"This is the average: {{ join ( avg val1 val2 val3 ) val4 }}" "This is the average: {{ join val1 ( avg val2 val3 val4 ) }}"
) )
checkLines(response, [ checkLines(response, [
'const var1 = helpers.join(helpers.avg($("val1"), $("val2"), $("val3")), $("val4"));', 'const var1 = helpers.join($("val1"), helpers.avg($("val2"), $("val3"), $("val4")));',
"return `This is the average: ${var1}`;", "return `This is the average: ${var1}`;",
]) ])
}) })
@ -119,10 +119,10 @@ describe("Test that the string processing works correctly", () => {
it("should handle multiple complex statements", () => { it("should handle multiple complex statements", () => {
const response = convertToJS( const response = convertToJS(
"average: {{ avg ( abs val1 ) val2 }} add: {{ add 1 2 }}" "average: {{ avg val1 ( abs val2 ) }} add: {{ add 1 2 }}"
) )
checkLines(response, [ checkLines(response, [
'const var1 = helpers.avg(helpers.abs($("val1")), $("val2"));', 'const var1 = helpers.avg($("val1"), helpers.abs($("val2")));',
"const var2 = helpers.add(1, 2);", "const var2 = helpers.add(1, 2);",
"return `average: ${var1} add: ${var2}`;", "return `average: ${var1} add: ${var2}`;",
]) ])

View File

@ -2038,7 +2038,7 @@
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
"@budibase/backend-core@2.23.12": "@budibase/backend-core@2.29.22":
version "0.0.0" version "0.0.0"
dependencies: dependencies:
"@budibase/nano" "10.1.5" "@budibase/nano" "10.1.5"
@ -2119,14 +2119,14 @@
through2 "^2.0.0" through2 "^2.0.0"
"@budibase/pro@npm:@budibase/pro@latest": "@budibase/pro@npm:@budibase/pro@latest":
version "2.23.12" version "2.29.22"
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.23.12.tgz#b2e813c547a5ed22b5bd86b1158159fe4b918260" resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.29.22.tgz#2608b2a76be0426879068e5a61100d6b8dde6f3a"
integrity sha512-DMtfkrJDSIF9V7AL6brpuWWw7Ot5XxO4YQ32ggmr0264uU9KYsTFvlFXFP3MSF2H+247ZYUouSJU76+XeC13qQ== integrity sha512-flMVIpWQb9w3f4aiBSM73aLcYmfoPIf+kP8JXgRWO0k3nGrUGaMQNBKzXwC7soTkTrJCZjBh8uaY75AxTP2RdA==
dependencies: dependencies:
"@budibase/backend-core" "2.23.12" "@budibase/backend-core" "2.29.22"
"@budibase/shared-core" "2.23.12" "@budibase/shared-core" "2.29.22"
"@budibase/string-templates" "2.23.12" "@budibase/string-templates" "2.29.22"
"@budibase/types" "2.23.12" "@budibase/types" "2.29.22"
"@koa/router" "8.0.8" "@koa/router" "8.0.8"
bull "4.10.1" bull "4.10.1"
joi "17.6.0" joi "17.6.0"
@ -2137,13 +2137,13 @@
scim-patch "^0.8.1" scim-patch "^0.8.1"
scim2-parse-filter "^0.2.8" scim2-parse-filter "^0.2.8"
"@budibase/shared-core@2.23.12": "@budibase/shared-core@2.29.22":
version "0.0.0" version "0.0.0"
dependencies: dependencies:
"@budibase/types" "0.0.0" "@budibase/types" "0.0.0"
cron-validate "1.4.5" cron-validate "1.4.5"
"@budibase/string-templates@2.23.12": "@budibase/string-templates@2.29.22":
version "0.0.0" version "0.0.0"
dependencies: dependencies:
"@budibase/handlebars-helpers" "^0.13.2" "@budibase/handlebars-helpers" "^0.13.2"
@ -2151,7 +2151,7 @@
handlebars "^4.7.8" handlebars "^4.7.8"
lodash.clonedeep "^4.5.0" lodash.clonedeep "^4.5.0"
"@budibase/types@2.23.12": "@budibase/types@2.29.22":
version "0.0.0" version "0.0.0"
dependencies: dependencies:
scim-patch "^0.8.1" scim-patch "^0.8.1"
@ -5731,13 +5731,12 @@
dependencies: dependencies:
"@types/node" "*" "@types/node" "*"
"@types/oracledb@5.2.2": "@types/oracledb@6.5.1":
version "5.2.2" version "6.5.1"
resolved "https://registry.yarnpkg.com/@types/oracledb/-/oracledb-5.2.2.tgz#ae7ba795969e3bbd8d57ab141873a1aa012b86cd" resolved "https://registry.yarnpkg.com/@types/oracledb/-/oracledb-6.5.1.tgz#17d021cabc9d216dfa6d3d65ae3ee585c33baab3"
integrity sha512-aYb2DdZOQVIgSCSXjXNikQuyiHAY09SkRA4cjwoj+F/mhLJDahdjNeBmvQvfFojyChCKLuupSJHqoAXPExgV5w== integrity sha512-Ll0bKGXmCZVngBL3juSaytA8Jeocx0VghDHTt+FEC2bs8fdl9pzoaBXYWXjBUxCCT8Y/69m5AzuTgBd79j24WA==
dependencies: dependencies:
"@types/node" "*" "@types/node" "*"
dotenv "^8.2.0"
"@types/passport-google-oauth@^1.0.42": "@types/passport-google-oauth@^1.0.42":
version "1.0.45" version "1.0.45"
@ -16660,10 +16659,10 @@ ora@^5.4.1:
strip-ansi "^6.0.0" strip-ansi "^6.0.0"
wcwidth "^1.0.1" wcwidth "^1.0.1"
oracledb@5.3.0: oracledb@6.5.1:
version "5.3.0" version "6.5.1"
resolved "https://registry.yarnpkg.com/oracledb/-/oracledb-5.3.0.tgz#a15e6cd16757d8711a2c006a28bd7ecd3b8466f7" resolved "https://registry.yarnpkg.com/oracledb/-/oracledb-6.5.1.tgz#814d985035acdb1a6470b1152af0ca3767569ede"
integrity sha512-HMJzQ6lCf287ztvvehTEmjCWA21FQ3RMvM+mgoqd4i8pkREuqFWO+y3ovsGR9moJUg4T0xjcwS8rl4mggWPxmg== integrity sha512-JzoSGei1wnvmqgKnAZK1W650mzHTZXx+7hClV4mwsbY/ZjUtrpnojNJMYJ2jkOhj7XG5oJPfXc4GqDKaNzkxqg==
os-locale@^3.1.0: os-locale@^3.1.0:
version "3.1.0" version "3.1.0"