diff --git a/hosting/.env b/hosting/.env deleted file mode 120000 index bb1b54ad77..0000000000 --- a/hosting/.env +++ /dev/null @@ -1 +0,0 @@ -hosting.properties \ No newline at end of file diff --git a/hosting/digitalocean/README.md b/hosting/digitalocean/README.md new file mode 100644 index 0000000000..72c1950d17 --- /dev/null +++ b/hosting/digitalocean/README.md @@ -0,0 +1,19 @@ +# Budibase DigitalOcean One Click +You will find in this directory configuration for packaging and creating a snapshot for the Budibase 1 click Digitalocean build. We use this configuration to have an immutable and reproducible build package for Digitalocean, that rarely needs updated. + +## Prerequisites +You must install Hashicorps `packer` to build the snapshot for digitalocean. Follow the instructions to install packer [here](https://learn.hashicorp.com/tutorials/packer/get-started-install-cli) + +You must have the `DIGITALOCEAN_TOKEN` environment variable set, so that packer can reach out to the digitalocean API for build information. + +## Building +Just run the following command: +``` +yarn build:digitalocean +``` + +## Uploading to Marketplace +You can upload the snapshot to the Digitalocean vendor portal at the following link (Requires vendor account): + +https://marketplace.digitalocean.com/vendorportal + diff --git a/hosting/digitalocean/build.sh b/hosting/digitalocean/build.sh new file mode 100755 index 0000000000..743629ca12 --- /dev/null +++ b/hosting/digitalocean/build.sh @@ -0,0 +1,2 @@ +#!/bin/bash +packer build template.json diff --git a/hosting/digitalocean/files/etc/update-motd.d/99-one-click b/hosting/digitalocean/files/etc/update-motd.d/99-one-click new file mode 100644 index 0000000000..0f087a26ee --- /dev/null +++ b/hosting/digitalocean/files/etc/update-motd.d/99-one-click @@ -0,0 +1,19 @@ +#!/bin/sh +# +# Configured as part of the DigitalOcean 1-Click Image build process + +myip=$(hostname -I | awk '{print$1}') +cat <> .env +done /root/.bash_history +unset HISTFILE +find /var/log -mtime -1 -type f -exec truncate -s 0 {} \; +rm -rf /var/log/*.gz /var/log/*.[0-9] /var/log/*-???????? +rm -rf /var/lib/cloud/instances/* +rm -f /root/.ssh/authorized_keys /etc/ssh/*key* +touch /etc/ssh/revoked_keys +chmod 600 /etc/ssh/revoked_keys + +# Securely erase the unused portion of the filesystem +GREEN='\033[0;32m' +NC='\033[0m' +printf "\n${GREEN}Writing zeros to the remaining disk space to securely +erase the unused portion of the file system. +Depending on your disk size this may take several minutes. +The secure erase will complete successfully when you see:${NC} + dd: writing to '/zerofile': No space left on device\n +Beginning secure erase now\n" + +dd if=/dev/zero of=/zerofile bs=4096 || rm /zerofile diff --git a/hosting/digitalocean/scripts/99-img_check.sh b/hosting/digitalocean/scripts/99-img_check.sh new file mode 100644 index 0000000000..32a9e77eac --- /dev/null +++ b/hosting/digitalocean/scripts/99-img_check.sh @@ -0,0 +1,617 @@ +#!/bin/bash + +# DigitalOcean Marketplace Image Validation Tool +# © 2021 DigitalOcean LLC. +# This code is licensed under Apache 2.0 license (see LICENSE.md for details) + +VERSION="v. 1.6" +RUNDATE=$( date ) + +# Script should be run with SUDO +if [ "$EUID" -ne 0 ] + then echo "[Error] - This script must be run with sudo or as the root user." + exit 1 +fi + +STATUS=0 +PASS=0 +WARN=0 +FAIL=0 + +# $1 == command to check for +# returns: 0 == true, 1 == false +cmdExists() { + if command -v "$1" > /dev/null 2>&1; then + return 0 + else + return 1 + fi +} + +function getDistro { + if [ -f /etc/os-release ]; then + # freedesktop.org and systemd + . /etc/os-release + OS=$NAME + VER=$VERSION_ID +elif type lsb_release >/dev/null 2>&1; then + # linuxbase.org + OS=$(lsb_release -si) + VER=$(lsb_release -sr) +elif [ -f /etc/lsb-release ]; then + # For some versions of Debian/Ubuntu without lsb_release command + . /etc/lsb-release + OS=$DISTRIB_ID + VER=$DISTRIB_RELEASE +elif [ -f /etc/debian_version ]; then + # Older Debian/Ubuntu/etc. + OS=Debian + VER=$(cat /etc/debian_version) +elif [ -f /etc/SuSe-release ]; then + # Older SuSE/etc. + : +elif [ -f /etc/redhat-release ]; then + # Older Red Hat, CentOS, etc. + VER=$( cat /etc/redhat-release | cut -d" " -f3 | cut -d "." -f1) + d=$( cat /etc/redhat-release | cut -d" " -f1 | cut -d "." -f1) + if [[ $d == "CentOS" ]]; then + OS="CentOS Linux" + fi +else + # Fall back to uname, e.g. "Linux ", also works for BSD, etc. + OS=$(uname -s) + VER=$(uname -r) +fi +} +function loadPasswords { +SHADOW=$(cat /etc/shadow) +} + +function checkAgent { + # Check for the presence of the do-agent in the filesystem + if [ -d /var/opt/digitalocean/do-agent ];then + echo -en "\e[41m[FAIL]\e[0m DigitalOcean Monitoring Agent detected.\n" + ((FAIL++)) + STATUS=2 + if [[ $OS == "CentOS Linux" ]] || [[ $OS == "CentOS Stream" ]] || [[ $OS == "Rocky Linux" ]]; then + echo "The agent can be removed with 'sudo yum remove do-agent' " + elif [[ $OS == "Ubuntu" ]]; then + echo "The agent can be removed with 'sudo apt-get purge do-agent' " + fi + else + echo -en "\e[32m[PASS]\e[0m DigitalOcean Monitoring agent was not found\n" + ((PASS++)) + fi +} + +function checkLogs { + cp_ignore="/var/log/cpanel-install.log" + echo -en "\nChecking for log files in /var/log\n\n" + # Check if there are log archives or log files that have not been recently cleared. + for f in /var/log/*-????????; do + [[ -e $f ]] || break + if [ $f != $cp_ignore ]; then + echo -en "\e[93m[WARN]\e[0m Log archive ${f} found\n" + ((WARN++)) + if [[ $STATUS != 2 ]]; then + STATUS=1 + fi + fi + done + for f in /var/log/*.[0-9];do + [[ -e $f ]] || break + echo -en "\e[93m[WARN]\e[0m Log archive ${f} found\n" + ((WARN++)) + if [[ $STATUS != 2 ]]; then + STATUS=1 + fi + done + for f in /var/log/*.log; do + [[ -e $f ]] || break + if [[ "${f}" = '/var/log/lfd.log' && "$( cat "${f}" | egrep -v '/var/log/messages has been reset| Watching /var/log/messages' | wc -c)" -gt 50 ]]; then + if [ $f != $cp_ignore ]; then + echo -en "\e[93m[WARN]\e[0m un-cleared log file, ${f} found\n" + ((WARN++)) + if [[ $STATUS != 2 ]]; then + STATUS=1 + fi + fi + elif [[ "${f}" != '/var/log/lfd.log' && "$( cat "${f}" | wc -c)" -gt 50 ]]; then + if [ $f != $cp_ignore ]; then + echo -en "\e[93m[WARN]\e[0m un-cleared log file, ${f} found\n" + ((WARN++)) + if [[ $STATUS != 2 ]]; then + STATUS=1 + fi + fi + fi + done +} +function checkTMP { + # Check the /tmp directory to ensure it is empty. Warn on any files found. + return 1 +} +function checkRoot { + user="root" + uhome="/root" + for usr in $SHADOW + do + IFS=':' read -r -a u <<< "$usr" + if [[ "${u[0]}" == "${user}" ]]; then + if [[ ${u[1]} == "!" ]] || [[ ${u[1]} == "!!" ]] || [[ ${u[1]} == "*" ]]; then + echo -en "\e[32m[PASS]\e[0m User ${user} has no password set.\n" + ((PASS++)) + else + echo -en "\e[41m[FAIL]\e[0m User ${user} has a password set on their account.\n" + ((FAIL++)) + STATUS=2 + fi + fi + done + if [ -d ${uhome}/ ]; then + if [ -d ${uhome}/.ssh/ ]; then + if ls ${uhome}/.ssh/*> /dev/null 2>&1; then + for key in ${uhome}/.ssh/* + do + if [ "${key}" == "${uhome}/.ssh/authorized_keys" ]; then + + if [ "$( cat "${key}" | wc -c)" -gt 50 ]; then + echo -en "\e[41m[FAIL]\e[0m User \e[1m${user}\e[0m has a populated authorized_keys file in \e[93m${key}\e[0m\n" + akey=$(cat ${key}) + echo "File Contents:" + echo $akey + echo "--------------" + ((FAIL++)) + STATUS=2 + fi + elif [ "${key}" == "${uhome}/.ssh/id_rsa" ]; then + if [ "$( cat "${key}" | wc -c)" -gt 0 ]; then + echo -en "\e[41m[FAIL]\e[0m User \e[1m${user}\e[0m has a private key file in \e[93m${key}\e[0m\n" + akey=$(cat ${key}) + echo "File Contents:" + echo $akey + echo "--------------" + ((FAIL++)) + STATUS=2 + else + echo -en "\e[93m[WARN]\e[0m User \e[1m${user}\e[0m has empty private key file in \e[93m${key}\e[0m\n" + ((WARN++)) + if [[ $STATUS != 2 ]]; then + STATUS=1 + fi + fi + elif [ "${key}" != "${uhome}/.ssh/known_hosts" ]; then + echo -en "\e[93m[WARN]\e[0m User \e[1m${user}\e[0m has a file in their .ssh directory at \e[93m${key}\e[0m\n" + ((WARN++)) + if [[ $STATUS != 2 ]]; then + STATUS=1 + fi + else + if [ "$( cat "${key}" | wc -c)" -gt 50 ]; then + echo -en "\e[93m[WARN]\e[0m User \e[1m${user}\e[0m has a populated known_hosts file in \e[93m${key}\e[0m\n" + ((WARN++)) + if [[ $STATUS != 2 ]]; then + STATUS=1 + fi + fi + fi + done + else + echo -en "\e[32m[ OK ]\e[0m User \e[1m${user}\e[0m has no SSH keys present\n" + fi + else + echo -en "\e[32m[ OK ]\e[0m User \e[1m${user}\e[0m does not have an .ssh directory\n" + fi + if [ -f /root/.bash_history ];then + + BH_S=$( cat /root/.bash_history | wc -c) + + if [[ $BH_S -lt 200 ]]; then + echo -en "\e[32m[PASS]\e[0m ${user}'s Bash History appears to have been cleared\n" + ((PASS++)) + else + echo -en "\e[41m[FAIL]\e[0m ${user}'s Bash History should be cleared to prevent sensitive information from leaking\n" + ((FAIL++)) + STATUS=2 + fi + + return 1; + else + echo -en "\e[32m[PASS]\e[0m The Root User's Bash History is not present\n" + ((PASS++)) + fi + else + echo -en "\e[32m[ OK ]\e[0m User \e[1m${user}\e[0m does not have a directory in /home\n" + fi + echo -en "\n\n" + return 1 +} + +function checkUsers { + # Check each user-created account + for user in $(awk -F: '$3 >= 1000 && $1 != "nobody" {print $1}' /etc/passwd;) + do + # Skip some other non-user system accounts + if [[ $user == "centos" ]]; then + : + elif [[ $user == "nfsnobody" ]]; then + : + else + echo -en "\nChecking user: ${user}...\n" + for usr in $SHADOW + do + IFS=':' read -r -a u <<< "$usr" + if [[ "${u[0]}" == "${user}" ]]; then + if [[ ${u[1]} == "!" ]] || [[ ${u[1]} == "!!" ]] || [[ ${u[1]} == "*" ]]; then + echo -en "\e[32m[PASS]\e[0m User ${user} has no password set.\n" + ((PASS++)) + else + echo -en "\e[41m[FAIL]\e[0m User ${user} has a password set on their account. Only system users are allowed on the image.\n" + ((FAIL++)) + STATUS=2 + fi + fi + done + #echo "User Found: ${user}" + uhome="/home/${user}" + if [ -d "${uhome}/" ]; then + if [ -d "${uhome}/.ssh/" ]; then + if ls "${uhome}/.ssh/*"> /dev/null 2>&1; then + for key in ${uhome}/.ssh/* + do + if [ "${key}" == "${uhome}/.ssh/authorized_keys" ]; then + if [ "$( cat "${key}" | wc -c)" -gt 50 ]; then + echo -en "\e[41m[FAIL]\e[0m User \e[1m${user}\e[0m has a populated authorized_keys file in \e[93m${key}\e[0m\n" + akey=$(cat ${key}) + echo "File Contents:" + echo $akey + echo "--------------" + ((FAIL++)) + STATUS=2 + fi + elif [ "${key}" == "${uhome}/.ssh/id_rsa" ]; then + if [ "$( cat "${key}" | wc -c)" -gt 0 ]; then + echo -en "\e[41m[FAIL]\e[0m User \e[1m${user}\e[0m has a private key file in \e[93m${key}\e[0m\n" + akey=$(cat ${key}) + echo "File Contents:" + echo $akey + echo "--------------" + ((FAIL++)) + STATUS=2 + else + echo -en "\e[93m[WARN]\e[0m User \e[1m${user}\e[0m has empty private key file in \e[93m${key}\e[0m\n" + ((WARN++)) + if [[ $STATUS != 2 ]]; then + STATUS=1 + fi + fi + elif [ "${key}" != "${uhome}/.ssh/known_hosts" ]; then + + echo -en "\e[93m[WARN]\e[0m User \e[1m${user}\e[0m has a file in their .ssh directory named \e[93m${key}\e[0m\n" + ((WARN++)) + if [[ $STATUS != 2 ]]; then + STATUS=1 + fi + + else + if [ "$( cat "${key}" | wc -c)" -gt 50 ]; then + echo -en "\e[93m[WARN]\e[0m User \e[1m${user}\e[0m has a known_hosts file in \e[93m${key}\e[0m\n" + ((WARN++)) + if [[ $STATUS != 2 ]]; then + STATUS=1 + fi + fi + fi + + + done + else + echo -en "\e[32m[ OK ]\e[0m User \e[1m${user}\e[0m has no SSH keys present\n" + fi + else + echo -en "\e[32m[ OK ]\e[0m User \e[1m${user}\e[0m does not have an .ssh directory\n" + fi + else + echo -en "\e[32m[ OK ]\e[0m User \e[1m${user}\e[0m does not have a directory in /home\n" + fi + + # Check for an uncleared .bash_history for this user + if [ -f "${uhome}/.bash_history" ]; then + BH_S=$( cat "${uhome}/.bash_history" | wc -c ) + + if [[ $BH_S -lt 200 ]]; then + echo -en "\e[32m[PASS]\e[0m ${user}'s Bash History appears to have been cleared\n" + ((PASS++)) + else + echo -en "\e[41m[FAIL]\e[0m ${user}'s Bash History should be cleared to prevent sensitive information from leaking\n" + ((FAIL++)) + STATUS=2 + + fi + echo -en "\n\n" + fi + fi + done +} +function checkFirewall { + + if [[ $OS == "Ubuntu" ]]; then + fw="ufw" + ufwa=$(ufw status |head -1| sed -e "s/^Status:\ //") + if [[ $ufwa == "active" ]]; then + FW_VER="\e[32m[PASS]\e[0m Firewall service (${fw}) is active\n" + ((PASS++)) + else + FW_VER="\e[93m[WARN]\e[0m No firewall is configured. Ensure ${fw} is installed and configured\n" + ((WARN++)) + fi + elif [[ $OS == "CentOS Linux" ]] || [[ $OS == "CentOS Stream" ]] || [[ $OS == "Rocky Linux" ]]; then + if [ -f /usr/lib/systemd/system/csf.service ]; then + fw="csf" + if [[ $(systemctl status $fw >/dev/null 2>&1) ]]; then + + FW_VER="\e[32m[PASS]\e[0m Firewall service (${fw}) is active\n" + ((PASS++)) + elif cmdExists "firewall-cmd"; then + if [[ $(systemctl is-active firewalld >/dev/null 2>&1 && echo 1 || echo 0) ]]; then + FW_VER="\e[32m[PASS]\e[0m Firewall service (${fw}) is active\n" + ((PASS++)) + else + FW_VER="\e[93m[WARN]\e[0m No firewall is configured. Ensure ${fw} is installed and configured\n" + ((WARN++)) + fi + else + FW_VER="\e[93m[WARN]\e[0m No firewall is configured. Ensure ${fw} is installed and configured\n" + ((WARN++)) + fi + else + fw="firewalld" + if [[ $(systemctl is-active firewalld >/dev/null 2>&1 && echo 1 || echo 0) ]]; then + FW_VER="\e[32m[PASS]\e[0m Firewall service (${fw}) is active\n" + ((PASS++)) + else + FW_VER="\e[93m[WARN]\e[0m No firewall is configured. Ensure ${fw} is installed and configured\n" + ((WARN++)) + fi + fi + elif [[ "$OS" =~ Debian.* ]]; then + # user could be using a number of different services for managing their firewall + # we will check some of the most common + if cmdExists 'ufw'; then + fw="ufw" + ufwa=$(ufw status |head -1| sed -e "s/^Status:\ //") + if [[ $ufwa == "active" ]]; then + FW_VER="\e[32m[PASS]\e[0m Firewall service (${fw}) is active\n" + ((PASS++)) + else + FW_VER="\e[93m[WARN]\e[0m No firewall is configured. Ensure ${fw} is installed and configured\n" + ((WARN++)) + fi + elif cmdExists "firewall-cmd"; then + fw="firewalld" + if [[ $(systemctl is-active --quiet $fw) ]]; then + FW_VER="\e[32m[PASS]\e[0m Firewall service (${fw}) is active\n" + ((PASS++)) + else + FW_VER="\e[93m[WARN]\e[0m No firewall is configured. Ensure ${fw} is installed and configured\n" + ((WARN++)) + fi + else + # user could be using vanilla iptables, check if kernel module is loaded + fw="iptables" + if [[ $(lsmod | grep -q '^ip_tables' 2>/dev/null) ]]; then + FW_VER="\e[32m[PASS]\e[0m Firewall service (${fw}) is active\n" + ((PASS++)) + else + FW_VER="\e[93m[WARN]\e[0m No firewall is configured. Ensure ${fw} is installed and configured\n" + ((WARN++)) + fi + fi + fi + +} +function checkUpdates { + if [[ $OS == "Ubuntu" ]] || [[ "$OS" =~ Debian.* ]]; then + # Ensure /tmp exists and has the proper permissions before + # checking for security updates + # https://github.com/digitalocean/marketplace-partners/issues/94 + if [[ ! -d /tmp ]]; then + mkdir /tmp + fi + chmod 1777 /tmp + + echo -en "\nUpdating apt package database to check for security updates, this may take a minute...\n\n" + apt-get -y update > /dev/null + + uc=$(apt-get --just-print upgrade | grep -i "security" | wc -l) + if [[ $uc -gt 0 ]]; then + update_count=$(( ${uc} / 2 )) + else + update_count=0 + fi + + if [[ $update_count -gt 0 ]]; then + echo -en "\e[41m[FAIL]\e[0m There are ${update_count} security updates available for this image that have not been installed.\n" + echo -en + echo -en "Here is a list of the security updates that are not installed:\n" + sleep 2 + apt-get --just-print upgrade | grep -i security | awk '{print $2}' | awk '!seen[$0]++' + echo -en + ((FAIL++)) + STATUS=2 + else + echo -en "\e[32m[PASS]\e[0m There are no pending security updates for this image.\n\n" + fi + elif [[ $OS == "CentOS Linux" ]] || [[ $OS == "CentOS Stream" ]] || [[ $OS == "Rocky Linux" ]]; then + echo -en "\nChecking for available security updates, this may take a minute...\n\n" + + update_count=$(yum check-update --security --quiet | wc -l) + if [[ $update_count -gt 0 ]]; then + echo -en "\e[41m[FAIL]\e[0m There are ${update_count} security updates available for this image that have not been installed.\n" + ((FAIL++)) + STATUS=2 + else + echo -en "\e[32m[PASS]\e[0m There are no pending security updates for this image.\n" + ((PASS++)) + fi + else + echo "Error encountered" + exit 1 + fi + + return 1; +} +function checkCloudInit { + + if hash cloud-init 2>/dev/null; then + CI="\e[32m[PASS]\e[0m Cloud-init is installed.\n" + ((PASS++)) + else + CI="\e[41m[FAIL]\e[0m No valid verison of cloud-init was found.\n" + ((FAIL++)) + STATUS=2 + fi + return 1 +} + +function version_gt() { test "$(printf '%s\n' "$@" | sort -V | head -n 1)" != "$1"; } + + +clear +echo "DigitalOcean Marketplace Image Validation Tool ${VERSION}" +echo "Executed on: ${RUNDATE}" +echo "Checking local system for Marketplace compatibility..." + +getDistro + +echo -en "\n\e[1mDistribution:\e[0m ${OS}\n" +echo -en "\e[1mVersion:\e[0m ${VER}\n\n" + +ost=0 +osv=0 + +if [[ $OS == "Ubuntu" ]]; then + ost=1 + if [[ $VER == "20.04" ]]; then + osv=1 + elif [[ $VER == "18.04" ]]; then + osv=1 + elif [[ $VER == "16.04" ]]; then + osv=1 + else + osv=0 + fi + +elif [[ "$OS" =~ Debian.* ]]; then + ost=1 + case "$VER" in + 9) + osv=1 + ;; + 10) + osv=1 + ;; + *) + osv=2 + ;; + esac + +elif [[ $OS == "CentOS Linux" ]]; then + ost=1 + if [[ $VER == "8" ]]; then + osv=1 + elif [[ $VER == "7" ]]; then + osv=1 + elif [[ $VER == "6" ]]; then + osv=1 + else + osv=2 + fi +elif [[ $OS == "CentOS Stream" ]]; then + ost=1 + if [[ $VER == "8" ]]; then + osv=1 + else + osv=2 + fi +elif [[ $OS == "Rocky Linux" ]]; then + ost=1 + if [[ $VER =~ "8." ]]; then + osv=1 + else + osv=2 + fi +else + ost=0 +fi + +if [[ $ost == 1 ]]; then + echo -en "\e[32m[PASS]\e[0m Supported Operating System Detected: ${OS}\n" + ((PASS++)) +else + echo -en "\e[41m[FAIL]\e[0m ${OS} is not a supported Operating System\n" + ((FAIL++)) + STATUS=2 +fi + +if [[ $osv == 1 ]]; then + echo -en "\e[32m[PASS]\e[0m Supported Release Detected: ${VER}\n" + ((PASS++)) +elif [[ $ost == 1 ]]; then + echo -en "\e[41m[FAIL]\e[0m ${OS} ${VER} is not a supported Operating System Version\n" + ((FAIL++)) + STATUS=2 +else + echo "Exiting..." + exit 1 +fi + +checkCloudInit + +echo -en "${CI}" + +checkFirewall + +echo -en "${FW_VER}" + +checkUpdates + +loadPasswords + +checkLogs + +echo -en "\n\nChecking all user-created accounts...\n" +checkUsers + +echo -en "\n\nChecking the root account...\n" +checkRoot + +checkAgent + + +# Summary +echo -en "\n\n---------------------------------------------------------------------------------------------------\n" + +if [[ $STATUS == 0 ]]; then + echo -en "Scan Complete.\n\e[32mAll Tests Passed!\e[0m\n" +elif [[ $STATUS == 1 ]]; then + echo -en "Scan Complete. \n\e[93mSome non-critical tests failed. Please review these items.\e[0m\e[0m\n" +else + echo -en "Scan Complete. \n\e[41mOne or more tests failed. Please review these items and re-test.\e[0m\n" +fi +echo "---------------------------------------------------------------------------------------------------" +echo -en "\e[1m${PASS} Tests PASSED\e[0m\n" +echo -en "\e[1m${WARN} WARNINGS\e[0m\n" +echo -en "\e[1m${FAIL} Tests FAILED\e[0m\n" +echo -en "---------------------------------------------------------------------------------------------------\n" + +if [[ $STATUS == 0 ]]; then + echo -en "We did not detect any issues with this image. Please be sure to manually ensure that all software installed on the base system is functional, secure and properly configured (or facilities for configuration on first-boot have been created).\n\n" + exit 0 +elif [[ $STATUS == 1 ]]; then + echo -en "Please review all [WARN] items above and ensure they are intended or resolved. If you do not have a specific requirement, we recommend resolving these items before image submission\n\n" + exit 0 +else + echo -en "Some critical tests failed. These items must be resolved and this scan re-run before you submit your image to the DigitalOcean Marketplace.\n\n" + exit 1 +fi diff --git a/hosting/digitalocean/template.json b/hosting/digitalocean/template.json new file mode 100644 index 0000000000..bc3679abdc --- /dev/null +++ b/hosting/digitalocean/template.json @@ -0,0 +1,65 @@ +{ + "variables": { + "token": "{{env `DIGITALOCEAN_TOKEN`}}", + "image_name": "budibase-marketplace-snapshot-{{timestamp}}", + "apt_packages": "jq" + }, + "builders": [ + { + "type": "digitalocean", + "api_token": "{{user `token`}}", + "image": "docker-20-04", + "region": "lon1", + "size": "s-1vcpu-1gb", + "ssh_username": "root", + "snapshot_name": "{{user `image_name`}}" + } + ], + "provisioners": [ + { + "type": "shell", + "inline": [ + "cloud-init status --wait" + ] + }, + { + "type": "file", + "source": "files/etc/", + "destination": "/etc/" + }, + { + "type": "file", + "source": "files/var/", + "destination": "/var/" + }, + { + "type": "shell", + "environment_vars": [ + "DEBIAN_FRONTEND=noninteractive", + "LC_ALL=C", + "LANG=en_US.UTF-8", + "LC_CTYPE=en_US.UTF-8" + ], + "inline": [ + "apt -qqy update", + "apt -qqy -o Dpkg::Options::='--force-confdef' -o Dpkg::Options::='--force-confold' full-upgrade", + "apt -qqy -o Dpkg::Options::='--force-confdef' -o Dpkg::Options::='--force-confold' install {{user `apt_packages`}}" + ] + }, + { + "type": "shell", + "environment_vars": [ + "application_name={{user `application_name`}}", + "application_version={{user `application_version`}}", + "DEBIAN_FRONTEND=noninteractive", + "LC_ALL=C", + "LANG=en_US.UTF-8", + "LC_CTYPE=en_US.UTF-8" + ], + "scripts": [ + "scripts/90-cleanup.sh", + "scripts/99-img_check.sh" + ] + } + ] +} diff --git a/lerna.json b/lerna.json index 1a7e49a54e..bfd6b5f7b6 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "1.0.5-alpha.0", + "version": "1.0.8-alpha.0", "npmClient": "yarn", "packages": [ "packages/*" diff --git a/package.json b/package.json index e0b567ce59..4be3fe1401 100644 --- a/package.json +++ b/package.json @@ -47,6 +47,7 @@ "build:docker:selfhost": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -", "build:docker:develop": "node scripts/pinVersions && lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -", "build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild", + "build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -", "build:docs": "lerna run build:docs", "release:helm": "./scripts/release_helm_chart.sh", "env:multi:enable": "lerna run env:multi:enable", diff --git a/packages/auth/package.json b/packages/auth/package.json index 0748fadcf2..761c5ee304 100644 --- a/packages/auth/package.json +++ b/packages/auth/package.json @@ -1,6 +1,6 @@ { "name": "@budibase/auth", - "version": "1.0.5-alpha.0", + "version": "1.0.8-alpha.0", "description": "Authentication middlewares for budibase builder and apps", "main": "src/index.js", "author": "Budibase", diff --git a/packages/auth/src/constants.js b/packages/auth/src/constants.js index 9892275bec..363274eda5 100644 --- a/packages/auth/src/constants.js +++ b/packages/auth/src/constants.js @@ -34,4 +34,5 @@ exports.Configs = { OIDC_LOGOS: "logos_oidc", } +exports.MAX_VALID_DATE = new Date(2147483647000) exports.DEFAULT_TENANT_ID = "default" diff --git a/packages/auth/src/security/sessions.js b/packages/auth/src/security/sessions.js index 93c2d0a9ca..ad21627bd9 100644 --- a/packages/auth/src/security/sessions.js +++ b/packages/auth/src/security/sessions.js @@ -1,6 +1,7 @@ const redis = require("../redis/authRedis") -const EXPIRY_SECONDS = 86400 +// a week in seconds +const EXPIRY_SECONDS = 86400 * 7 async function getSessionsForUser(userId) { const client = await redis.getSessionClient() diff --git a/packages/auth/src/utils.js b/packages/auth/src/utils.js index f7ab5d6990..b8fa7b9588 100644 --- a/packages/auth/src/utils.js +++ b/packages/auth/src/utils.js @@ -7,7 +7,7 @@ const { const jwt = require("jsonwebtoken") const { options } = require("./middleware/passport/jwt") const { createUserEmailView } = require("./db/views") -const { Headers, UserStatus, Cookies } = require("./constants") +const { Headers, UserStatus, Cookies, MAX_VALID_DATE } = require("./constants") const { getGlobalDB, updateTenantId, @@ -83,14 +83,15 @@ exports.getCookie = (ctx, name) => { * @param {object} ctx The request which is to be manipulated. * @param {string} name The name of the cookie to set. * @param {string|object} value The value of cookie which will be set. + * @param {object} opts options like whether to sign. */ -exports.setCookie = (ctx, value, name = "builder") => { - if (value) { +exports.setCookie = (ctx, value, name = "builder", opts = { sign: true }) => { + if (value && opts && opts.sign) { value = jwt.sign(value, options.secretOrKey) } const config = { - maxAge: Number.MAX_SAFE_INTEGER, + expires: MAX_VALID_DATE, path: "/", httpOnly: false, overwrite: true, diff --git a/packages/bbui/package.json b/packages/bbui/package.json index 44f3536011..cc246957d0 100644 --- a/packages/bbui/package.json +++ b/packages/bbui/package.json @@ -1,7 +1,7 @@ { "name": "@budibase/bbui", "description": "A UI solution used in the different Budibase projects.", - "version": "1.0.5-alpha.0", + "version": "1.0.8-alpha.0", "license": "MPL-2.0", "svelte": "src/index.js", "module": "dist/bbui.es.js", diff --git a/packages/builder/package.json b/packages/builder/package.json index 3145a9e5ed..372d9627ab 100644 --- a/packages/builder/package.json +++ b/packages/builder/package.json @@ -1,6 +1,6 @@ { "name": "@budibase/builder", - "version": "1.0.5-alpha.0", + "version": "1.0.8-alpha.0", "license": "GPL-3.0", "private": true, "scripts": { @@ -14,7 +14,7 @@ "cy:setup": "node ./cypress/setup.js", "cy:run": "cypress run", "cy:open": "cypress open", - "cy:run:ci": "cypress run --record --key f308590b-6070-41af-b970-794a3823d451", + "cy:run:ci": "cypress run --record", "cy:test": "start-server-and-test cy:setup http://localhost:10001/builder cy:run", "cy:ci": "start-server-and-test cy:setup http://localhost:10001/builder cy:run", "cy:debug": "start-server-and-test cy:setup http://localhost:10001/builder cy:open" @@ -65,10 +65,10 @@ } }, "dependencies": { - "@budibase/bbui": "^1.0.5-alpha.0", - "@budibase/client": "^1.0.5-alpha.0", + "@budibase/bbui": "^1.0.8-alpha.0", + "@budibase/client": "^1.0.8-alpha.0", "@budibase/colorpicker": "1.1.2", - "@budibase/string-templates": "^1.0.5-alpha.0", + "@budibase/string-templates": "^1.0.8-alpha.0", "@sentry/browser": "5.19.1", "@spectrum-css/page": "^3.0.1", "@spectrum-css/vars": "^3.0.1", diff --git a/packages/builder/src/builderStore/store/frontend.js b/packages/builder/src/builderStore/store/frontend.js index b6227f4bd6..e0ec84591c 100644 --- a/packages/builder/src/builderStore/store/frontend.js +++ b/packages/builder/src/builderStore/store/frontend.js @@ -82,7 +82,7 @@ export const getFrontendStore = () => { libraries: application.componentLibraries, components, clientFeatures: { - ...state.clientFeatures, + ...INITIAL_FRONTEND_STATE.clientFeatures, ...components.features, }, name: application.name, diff --git a/packages/builder/src/components/design/PropertiesPanel/PropertyControls/DataSourceSelect.svelte b/packages/builder/src/components/design/PropertiesPanel/PropertyControls/DataSourceSelect.svelte index 4c173b619e..02d1e6160d 100644 --- a/packages/builder/src/components/design/PropertiesPanel/PropertyControls/DataSourceSelect.svelte +++ b/packages/builder/src/components/design/PropertiesPanel/PropertyControls/DataSourceSelect.svelte @@ -197,7 +197,7 @@ - {#if getQueryParams(value._id).length > 0} + {#if getQueryParams(value).length > 0} { if (!dataImport || !dataImport.csvString) { return table } + const db = new CouchDB(appId) // Populate the table with rows imported from CSV in a bulk update const data = await csvParser.transform({ diff --git a/packages/server/src/api/controllers/view/index.js b/packages/server/src/api/controllers/view/index.js index 3b43ef2408..e3232323bf 100644 --- a/packages/server/src/api/controllers/view/index.js +++ b/packages/server/src/api/controllers/view/index.js @@ -5,6 +5,7 @@ const exporters = require("./exporters") const { saveView, getView, getViews, deleteView } = require("./utils") const { fetchView } = require("../row") const { getTable } = require("../table/utils") +const { FieldTypes } = require("../../../constants") exports.fetch = async ctx => { const db = new CouchDB(ctx.appId) @@ -77,6 +78,7 @@ exports.exportView = async ctx => { } await fetchView(ctx) + let rows = ctx.body let schema = view && view.meta && view.meta.schema if (!schema) { @@ -85,11 +87,23 @@ exports.exportView = async ctx => { schema = table.schema } + // remove any relationships + const relationships = Object.entries(schema) + .filter(entry => entry[1].type === FieldTypes.LINK) + .map(entry => entry[0]) + // iterate relationship columns and remove from and row and schema + relationships.forEach(column => { + rows.forEach(row => { + delete row[column] + }) + delete schema[column] + }) + // make sure no "undefined" entries appear in the CSV if (format === exporters.ExportFormats.CSV) { const schemaKeys = Object.keys(schema) for (let key of schemaKeys) { - for (let row of ctx.body) { + for (let row of rows) { if (row[key] == null) { row[key] = "" } @@ -103,5 +117,5 @@ exports.exportView = async ctx => { const filename = `${viewName}.${format}` // send down the file ctx.attachment(filename) - ctx.body = apiFileReturn(exporter(headers, ctx.body)) + ctx.body = apiFileReturn(exporter(headers, rows)) } diff --git a/packages/server/src/integrations/oracle.ts b/packages/server/src/integrations/oracle.ts index bf8e83350e..afaa902655 100644 --- a/packages/server/src/integrations/oracle.ts +++ b/packages/server/src/integrations/oracle.ts @@ -381,7 +381,7 @@ module OracleModule { }` const attributes: ConnectionAttributes = { user: this.config.user, - password: this.config.user, + password: this.config.password, connectString, } return oracledb.getConnection(attributes) diff --git a/packages/server/src/utilities/csvParser.js b/packages/server/src/utilities/csvParser.js index c548a71758..8f9b3373c9 100644 --- a/packages/server/src/utilities/csvParser.js +++ b/packages/server/src/utilities/csvParser.js @@ -102,8 +102,11 @@ async function transform({ schema, csvString, existingTable }) { schema = updateSchema({ schema, existingTable }) } - for (let key of Object.keys(schema)) { - colParser[key] = PARSERS[schema[key].type] || schema[key].type + for (let [key, field] of Object.entries(schema)) { + // don't import data to auto columns + if (!field.autocolumn) { + colParser[key] = PARSERS[field.type] || field.type + } } try { diff --git a/packages/string-templates/package.json b/packages/string-templates/package.json index 145800dbb5..586af4820c 100644 --- a/packages/string-templates/package.json +++ b/packages/string-templates/package.json @@ -1,6 +1,6 @@ { "name": "@budibase/string-templates", - "version": "1.0.5-alpha.0", + "version": "1.0.8-alpha.0", "description": "Handlebars wrapper for Budibase templating.", "main": "src/index.cjs", "module": "dist/bundle.mjs", diff --git a/packages/worker/package.json b/packages/worker/package.json index 6917856ceb..62b52d35c0 100644 --- a/packages/worker/package.json +++ b/packages/worker/package.json @@ -1,7 +1,7 @@ { "name": "@budibase/worker", "email": "hi@budibase.com", - "version": "1.0.5-alpha.0", + "version": "1.0.8-alpha.0", "description": "Budibase background service", "main": "src/index.js", "repository": { @@ -29,8 +29,8 @@ "author": "Budibase", "license": "GPL-3.0", "dependencies": { - "@budibase/auth": "^1.0.5-alpha.0", - "@budibase/string-templates": "^1.0.5-alpha.0", + "@budibase/auth": "^1.0.8-alpha.0", + "@budibase/string-templates": "^1.0.8-alpha.0", "@koa/router": "^8.0.0", "@sentry/node": "^6.0.0", "@techpass/passport-openidconnect": "^0.3.0", diff --git a/packages/worker/src/api/controllers/global/auth.js b/packages/worker/src/api/controllers/global/auth.js index 592787c5ea..cd7d8abcee 100644 --- a/packages/worker/src/api/controllers/global/auth.js +++ b/packages/worker/src/api/controllers/global/auth.js @@ -56,26 +56,11 @@ async function authInternal(ctx, user, err = null, info = null) { return ctx.throw(403, info ? info : "Unauthorized") } - const expires = new Date() - expires.setDate(expires.getDate() + 1) - if (!user) { return ctx.throw(403, info ? info : "Unauthorized") } - const config = { - expires, - path: "/", - httpOnly: false, - overwrite: true, - } - - if (env.COOKIE_DOMAIN) { - config.domain = env.COOKIE_DOMAIN - } - - // just store the user ID - ctx.cookies.set(Cookies.Auth, user.token, config) + setCookie(ctx, user.token, Cookies.Auth, { sign: false }) // get rid of any app cookies on login // have to check test because this breaks cypress if (!env.isTest()) {