commit
a7731930bd
|
@ -34,4 +34,5 @@ exports.Configs = {
|
|||
OIDC_LOGOS: "logos_oidc",
|
||||
}
|
||||
|
||||
exports.MAX_VALID_DATE = new Date(2147483647000)
|
||||
exports.DEFAULT_TENANT_ID = "default"
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
const redis = require("../redis/authRedis")
|
||||
|
||||
const EXPIRY_SECONDS = 86400
|
||||
// a week in seconds
|
||||
const EXPIRY_SECONDS = 86400 * 7
|
||||
|
||||
async function getSessionsForUser(userId) {
|
||||
const client = await redis.getSessionClient()
|
||||
|
|
|
@ -7,7 +7,7 @@ const {
|
|||
const jwt = require("jsonwebtoken")
|
||||
const { options } = require("./middleware/passport/jwt")
|
||||
const { createUserEmailView } = require("./db/views")
|
||||
const { Headers, UserStatus, Cookies } = require("./constants")
|
||||
const { Headers, UserStatus, Cookies, MAX_VALID_DATE } = require("./constants")
|
||||
const {
|
||||
getGlobalDB,
|
||||
updateTenantId,
|
||||
|
@ -83,14 +83,15 @@ exports.getCookie = (ctx, name) => {
|
|||
* @param {object} ctx The request which is to be manipulated.
|
||||
* @param {string} name The name of the cookie to set.
|
||||
* @param {string|object} value The value of cookie which will be set.
|
||||
* @param {object} opts options like whether to sign.
|
||||
*/
|
||||
exports.setCookie = (ctx, value, name = "builder") => {
|
||||
if (value) {
|
||||
exports.setCookie = (ctx, value, name = "builder", opts = { sign: true }) => {
|
||||
if (value && opts && opts.sign) {
|
||||
value = jwt.sign(value, options.secretOrKey)
|
||||
}
|
||||
|
||||
const config = {
|
||||
maxAge: Number.MAX_SAFE_INTEGER,
|
||||
expires: MAX_VALID_DATE,
|
||||
path: "/",
|
||||
httpOnly: false,
|
||||
overwrite: true,
|
||||
|
|
|
@ -75,6 +75,7 @@ exports.handleDataImport = async (appId, user, table, dataImport) => {
|
|||
if (!dataImport || !dataImport.csvString) {
|
||||
return table
|
||||
}
|
||||
|
||||
const db = new CouchDB(appId)
|
||||
// Populate the table with rows imported from CSV in a bulk update
|
||||
const data = await csvParser.transform({
|
||||
|
|
|
@ -5,6 +5,7 @@ const exporters = require("./exporters")
|
|||
const { saveView, getView, getViews, deleteView } = require("./utils")
|
||||
const { fetchView } = require("../row")
|
||||
const { getTable } = require("../table/utils")
|
||||
const { FieldTypes } = require("../../../constants")
|
||||
|
||||
exports.fetch = async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
|
@ -77,6 +78,7 @@ exports.exportView = async ctx => {
|
|||
}
|
||||
|
||||
await fetchView(ctx)
|
||||
let rows = ctx.body
|
||||
|
||||
let schema = view && view.meta && view.meta.schema
|
||||
if (!schema) {
|
||||
|
@ -85,11 +87,23 @@ exports.exportView = async ctx => {
|
|||
schema = table.schema
|
||||
}
|
||||
|
||||
// remove any relationships
|
||||
const relationships = Object.entries(schema)
|
||||
.filter(entry => entry[1].type === FieldTypes.LINK)
|
||||
.map(entry => entry[0])
|
||||
// iterate relationship columns and remove from and row and schema
|
||||
relationships.forEach(column => {
|
||||
rows.forEach(row => {
|
||||
delete row[column]
|
||||
})
|
||||
delete schema[column]
|
||||
})
|
||||
|
||||
// make sure no "undefined" entries appear in the CSV
|
||||
if (format === exporters.ExportFormats.CSV) {
|
||||
const schemaKeys = Object.keys(schema)
|
||||
for (let key of schemaKeys) {
|
||||
for (let row of ctx.body) {
|
||||
for (let row of rows) {
|
||||
if (row[key] == null) {
|
||||
row[key] = ""
|
||||
}
|
||||
|
@ -103,5 +117,5 @@ exports.exportView = async ctx => {
|
|||
const filename = `${viewName}.${format}`
|
||||
// send down the file
|
||||
ctx.attachment(filename)
|
||||
ctx.body = apiFileReturn(exporter(headers, ctx.body))
|
||||
ctx.body = apiFileReturn(exporter(headers, rows))
|
||||
}
|
||||
|
|
|
@ -102,8 +102,11 @@ async function transform({ schema, csvString, existingTable }) {
|
|||
schema = updateSchema({ schema, existingTable })
|
||||
}
|
||||
|
||||
for (let key of Object.keys(schema)) {
|
||||
colParser[key] = PARSERS[schema[key].type] || schema[key].type
|
||||
for (let [key, field] of Object.entries(schema)) {
|
||||
// don't import data to auto columns
|
||||
if (!field.autocolumn) {
|
||||
colParser[key] = PARSERS[field.type] || field.type
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
|
|
|
@ -56,26 +56,11 @@ async function authInternal(ctx, user, err = null, info = null) {
|
|||
return ctx.throw(403, info ? info : "Unauthorized")
|
||||
}
|
||||
|
||||
const expires = new Date()
|
||||
expires.setDate(expires.getDate() + 1)
|
||||
|
||||
if (!user) {
|
||||
return ctx.throw(403, info ? info : "Unauthorized")
|
||||
}
|
||||
|
||||
const config = {
|
||||
expires,
|
||||
path: "/",
|
||||
httpOnly: false,
|
||||
overwrite: true,
|
||||
}
|
||||
|
||||
if (env.COOKIE_DOMAIN) {
|
||||
config.domain = env.COOKIE_DOMAIN
|
||||
}
|
||||
|
||||
// just store the user ID
|
||||
ctx.cookies.set(Cookies.Auth, user.token, config)
|
||||
setCookie(ctx, user.token, Cookies.Auth, { sign: false })
|
||||
// get rid of any app cookies on login
|
||||
// have to check test because this breaks cypress
|
||||
if (!env.isTest()) {
|
||||
|
|
Loading…
Reference in New Issue