cherry pick nginx change (#9316)
Co-authored-by: Rory Powell <rory.codes@gmail.com>
This commit is contained in:
parent
0e1ce7789f
commit
def34d5b90
|
@ -38,17 +38,6 @@ jobs:
|
|||
fi
|
||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||
|
||||
- name: Tag and release Proxy service docker image
|
||||
run: |
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
||||
yarn build:docker:proxy:prod
|
||||
docker tag proxy-service budibase/proxy:$PROD_TAG
|
||||
docker push budibase/proxy:$PROD_TAG
|
||||
env:
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
||||
PROD_TAG: k8s
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
|
|
|
@ -28,17 +28,6 @@ jobs:
|
|||
release_version=$(cat lerna.json | jq -r '.version')
|
||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||
|
||||
- name: Tag and release Proxy service docker image
|
||||
run: |
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
||||
yarn build:docker:proxy:preprod
|
||||
docker tag proxy-service budibase/proxy:$PREPROD_TAG
|
||||
docker push budibase/proxy:$PREPROD_TAG
|
||||
env:
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
||||
PREPROD_TAG: k8s-preprod
|
||||
|
||||
- name: Pull values.yaml from budibase-infra
|
||||
run: |
|
||||
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \
|
||||
|
|
|
@ -29,17 +29,6 @@ jobs:
|
|||
release_version=$(cat lerna.json | jq -r '.version')
|
||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||
|
||||
- name: Tag and release Proxy service docker image
|
||||
run: |
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
||||
yarn build:docker:proxy:release
|
||||
docker tag proxy-service budibase/proxy:$RELEASE_TAG
|
||||
docker push budibase/proxy:$RELEASE_TAG
|
||||
env:
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
||||
RELEASE_TAG: k8s-release
|
||||
|
||||
- name: Pull values.yaml from budibase-infra
|
||||
run: |
|
||||
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \
|
||||
|
|
|
@ -76,22 +76,25 @@ jobs:
|
|||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
||||
|
||||
- name: Get the latest budibase release version
|
||||
deploy-to-release-env:
|
||||
needs: [release-images]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Get the current budibase release version
|
||||
id: version
|
||||
run: |
|
||||
run: |
|
||||
release_version=$(cat lerna.json | jq -r '.version')
|
||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||
|
||||
- name: Tag and release Proxy service docker image
|
||||
run: |
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
||||
yarn build:docker:proxy:release
|
||||
docker tag proxy-service budibase/proxy:$RELEASE_TAG
|
||||
docker push budibase/proxy:$RELEASE_TAG
|
||||
env:
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
||||
RELEASE_TAG: k8s-release
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: eu-west-1
|
||||
|
||||
- name: Pull values.yaml from budibase-infra
|
||||
run: |
|
||||
|
|
|
@ -98,17 +98,6 @@ jobs:
|
|||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: eu-west-1
|
||||
|
||||
- name: Tag and release Proxy service docker image
|
||||
run: |
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
||||
yarn build:docker:proxy:preprod
|
||||
docker tag proxy-service budibase/proxy:$PREPROD_TAG
|
||||
docker push budibase/proxy:$PREPROD_TAG
|
||||
env:
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
||||
PREPROD_TAG: k8s-preprod
|
||||
|
||||
- name: Pull values.yaml from budibase-infra
|
||||
run: |
|
||||
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \
|
||||
|
|
|
@ -66,8 +66,6 @@ typings/
|
|||
.env
|
||||
!qa-core/.env
|
||||
!hosting/.env
|
||||
hosting/.generated-nginx.dev.conf
|
||||
hosting/proxy/.generated-nginx.prod.conf
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
.cache
|
||||
|
@ -105,5 +103,7 @@ stats.html
|
|||
|
||||
# TypeScript cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# plugins
|
||||
budibase-component
|
||||
budibase-datasource
|
||||
|
|
|
@ -28,11 +28,26 @@ spec:
|
|||
app.kubernetes.io/name: budibase-proxy
|
||||
spec:
|
||||
containers:
|
||||
- image: budibase/proxy:{{ .Values.services.proxy.tag | default "k8s" }}
|
||||
- image: budibase/proxy:{{ .Values.globals.appVersion }}
|
||||
imagePullPolicy: Always
|
||||
name: proxy-service
|
||||
ports:
|
||||
- containerPort: {{ .Values.services.proxy.port }}
|
||||
env:
|
||||
- name: APPS_UPSTREAM_URL
|
||||
value: {{ tpl .Values.services.proxy.upstreams.apps . | quote }}
|
||||
- name: WORKER_UPSTREAM_URL
|
||||
value: {{ tpl .Values.services.proxy.upstreams.worker . | quote }}
|
||||
- name: MINIO_UPSTREAM_URL
|
||||
value: {{ tpl .Values.services.proxy.upstreams.minio . | quote }}
|
||||
- name: COUCHDB_UPSTREAM_URL
|
||||
value: {{ .Values.services.couchdb.url | default (tpl .Values.services.proxy.upstreams.couchdb .) | quote }}
|
||||
- name: RESOLVER
|
||||
{{ if .Values.services.proxy.resolver }}
|
||||
value: {{ .Values.services.proxy.resolver }}
|
||||
{{ else }}
|
||||
value: kube-dns.kube-system.svc.{{ .Values.services.dns }}
|
||||
{{ end }}
|
||||
{{ with .Values.services.proxy.resources }}
|
||||
resources:
|
||||
{{- toYaml . | nindent 10 }}
|
||||
|
|
|
@ -124,6 +124,11 @@ services:
|
|||
proxy:
|
||||
port: 10000
|
||||
replicaCount: 1
|
||||
upstreams:
|
||||
apps: 'http://app-service.{{ .Release.Namespace }}.svc.{{ .Values.services.dns }}:{{ .Values.services.apps.port }}'
|
||||
worker: 'http://worker-service.{{ .Release.Namespace }}.svc.{{ .Values.services.dns }}:{{ .Values.services.worker.port }}'
|
||||
minio: 'http://minio-service.{{ .Release.Namespace }}.svc.{{ .Values.services.dns }}:{{ .Values.services.objectStore.port }}'
|
||||
couchdb: 'http://{{ .Release.Name }}-svc-couchdb:{{ .Values.services.couchdb.port }}'
|
||||
resources: {}
|
||||
|
||||
apps:
|
||||
|
|
|
@ -27,7 +27,7 @@ services:
|
|||
restart: on-failure
|
||||
image: nginx:latest
|
||||
volumes:
|
||||
- ./.generated-nginx.dev.conf:/etc/nginx/nginx.conf
|
||||
- ./nginx.dev.conf:/etc/nginx/nginx.conf
|
||||
- ./proxy/error.html:/usr/share/nginx/html/error.html
|
||||
ports:
|
||||
- "${MAIN_PORT}:10000"
|
||||
|
@ -36,6 +36,8 @@ services:
|
|||
- couchdb-service
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
environment:
|
||||
- PROXY_ADDRESS=${PROXY_ADDRESS}
|
||||
|
||||
couchdb-service:
|
||||
# platform: linux/amd64
|
||||
|
|
|
@ -82,6 +82,12 @@ services:
|
|||
environment:
|
||||
- PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
|
||||
- PROXY_RATE_LIMIT_API_PER_SECOND=20
|
||||
- APPS_UPSTREAM_URL=http://app-service:4002
|
||||
- WORKER_UPSTREAM_URL=http://worker-service:4003
|
||||
- MINIO_UPSTREAM_URL=http://minio-service:9000
|
||||
- COUCHDB_UPSTREAM_URL=http://couchdb-service:5984
|
||||
- WATCHTOWER_UPSTREAM_URL=http://watchtower-service:8080
|
||||
- RESOLVER=127.0.0.11
|
||||
depends_on:
|
||||
- minio-service
|
||||
- worker-service
|
||||
|
|
|
@ -25,17 +25,17 @@ http {
|
|||
}
|
||||
|
||||
upstream app-service {
|
||||
server {{address}}:4001;
|
||||
server ${PROXY_ADDRESS}:4001;
|
||||
keepalive 32;
|
||||
}
|
||||
|
||||
upstream worker-service {
|
||||
server {{address}}:4002;
|
||||
server ${PROXY_ADDRESS}:4002;
|
||||
keepalive 32;
|
||||
}
|
||||
|
||||
upstream builder {
|
||||
server {{address}}:3000;
|
||||
server ${PROXY_ADDRESS}:3000;
|
||||
keepalive 32;
|
||||
}
|
||||
|
|
@ -4,7 +4,7 @@ FROM nginx:latest
|
|||
# use the default nginx behaviour for *.template files which are processed with envsubst
|
||||
# override the output dir to output directly to /etc/nginx instead of /etc/nginx/conf.d
|
||||
ENV NGINX_ENVSUBST_OUTPUT_DIR=/etc/nginx
|
||||
COPY .generated-nginx.prod.conf /etc/nginx/templates/nginx.conf.template
|
||||
COPY nginx.prod.conf /etc/nginx/templates/nginx.conf.template
|
||||
|
||||
# IPv6 removal needs to happen after envsubst
|
||||
RUN rm -rf /docker-entrypoint.d/10-listen-on-ipv6-by-default.sh
|
||||
|
@ -16,4 +16,11 @@ COPY error.html /usr/share/nginx/html/error.html
|
|||
|
||||
# Default environment
|
||||
ENV PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
|
||||
ENV PROXY_RATE_LIMIT_API_PER_SECOND=20
|
||||
ENV PROXY_RATE_LIMIT_API_PER_SECOND=20
|
||||
# Use docker-compose values as defaults for backwards compatibility
|
||||
ENV APPS_UPSTREAM_URL=http://app-service:4002
|
||||
ENV WORKER_UPSTREAM_URL=http://worker-service:4003
|
||||
ENV MINIO_UPSTREAM_URL=http://minio-service:9000
|
||||
ENV COUCHDB_UPSTREAM_URL=http://couchdb-service:5984
|
||||
ENV WATCHTOWER_UPSTREAM_URL=http://watchtower-service:8080
|
||||
ENV RESOLVER=127.0.0.11
|
||||
|
|
|
@ -23,7 +23,7 @@ http {
|
|||
tcp_nodelay on;
|
||||
server_tokens off;
|
||||
types_hash_max_size 2048;
|
||||
resolver {{ resolver }} valid=10s ipv6=off;
|
||||
resolver ${RESOLVER} valid=10s ipv6=off;
|
||||
|
||||
# buffering
|
||||
client_header_buffer_size 1k;
|
||||
|
@ -76,27 +76,23 @@ http {
|
|||
add_header Content-Security-Policy "${csp_default}; ${csp_script}; ${csp_style}; ${csp_object}; ${csp_base_uri}; ${csp_connect}; ${csp_font}; ${csp_frame}; ${csp_img}; ${csp_manifest}; ${csp_media}; ${csp_worker};" always;
|
||||
|
||||
# upstreams
|
||||
set $apps {{ apps }};
|
||||
set $worker {{ worker }};
|
||||
set $minio {{ minio }};
|
||||
set $couchdb {{ couchdb }};
|
||||
{{#if watchtower}}
|
||||
set $watchtower {{ watchtower }};
|
||||
{{/if}}
|
||||
set $apps ${APPS_UPSTREAM_URL};
|
||||
set $worker ${WORKER_UPSTREAM_URL};
|
||||
set $minio ${MINIO_UPSTREAM_URL};
|
||||
set $couchdb ${COUCHDB_UPSTREAM_URL};
|
||||
set $watchtower ${WATCHTOWER_UPSTREAM_URL};
|
||||
|
||||
location /app {
|
||||
proxy_pass http://$apps:4002;
|
||||
proxy_pass $apps;
|
||||
}
|
||||
|
||||
location = / {
|
||||
proxy_pass http://$apps:4002;
|
||||
proxy_pass $apps;
|
||||
}
|
||||
|
||||
{{#if watchtower}}
|
||||
location = /v1/update {
|
||||
proxy_pass http://$watchtower:8080;
|
||||
proxy_pass $watchtower;
|
||||
}
|
||||
{{/if}}
|
||||
|
||||
location ~ ^/(builder|app_) {
|
||||
proxy_http_version 1.1;
|
||||
|
@ -107,19 +103,17 @@ http {
|
|||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
proxy_pass http://$apps:4002;
|
||||
proxy_pass $apps;
|
||||
}
|
||||
|
||||
location ~ ^/api/(system|admin|global)/ {
|
||||
proxy_set_header Host $host;
|
||||
|
||||
proxy_pass http://$worker:4003;
|
||||
proxy_pass $worker;
|
||||
}
|
||||
|
||||
location /worker/ {
|
||||
proxy_set_header Host $host;
|
||||
|
||||
proxy_pass http://$worker:4003;
|
||||
proxy_pass $worker;
|
||||
rewrite ^/worker/(.*)$ /$1 break;
|
||||
}
|
||||
|
||||
|
@ -138,7 +132,7 @@ http {
|
|||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
|
||||
proxy_pass http://$apps:4002;
|
||||
proxy_pass $apps;
|
||||
}
|
||||
|
||||
location /api/ {
|
||||
|
@ -157,7 +151,7 @@ http {
|
|||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
proxy_pass http://$apps:4002;
|
||||
proxy_pass $apps;
|
||||
}
|
||||
|
||||
location /api/webhooks/ {
|
||||
|
@ -177,11 +171,11 @@ http {
|
|||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
proxy_pass http://$apps:4002;
|
||||
proxy_pass $apps;
|
||||
}
|
||||
|
||||
location /db/ {
|
||||
proxy_pass http://$couchdb:5984;
|
||||
proxy_pass $couchdb;
|
||||
rewrite ^/db/(.*)$ /$1 break;
|
||||
}
|
||||
|
||||
|
@ -191,7 +185,7 @@ http {
|
|||
proxy_set_header Connection 'upgrade';
|
||||
proxy_set_header Host $host;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
proxy_pass http://$apps:4002;
|
||||
proxy_pass $apps;
|
||||
}
|
||||
|
||||
location / {
|
||||
|
@ -205,7 +199,7 @@ http {
|
|||
proxy_set_header Connection "";
|
||||
chunked_transfer_encoding off;
|
||||
|
||||
proxy_pass http://$minio:9000;
|
||||
proxy_pass $minio;
|
||||
}
|
||||
|
||||
client_header_timeout 60;
|
10
package.json
10
package.json
|
@ -55,15 +55,11 @@
|
|||
"test:e2e:ci:record": "lerna run cy:ci:record --stream",
|
||||
"test:e2e:ci:notify": "lerna run cy:ci:notify",
|
||||
"build:specs": "lerna run specs",
|
||||
"build:docker": "lerna run build:docker && npm run build:docker:proxy:compose && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
|
||||
"build:docker": "lerna run build:docker && npm run build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
|
||||
"build:docker:pre": "lerna run build && lerna run predocker",
|
||||
"build:docker:proxy": "docker build hosting/proxy -t proxy-service",
|
||||
"build:docker:proxy:compose": "node scripts/proxy/generateProxyConfig compose && npm run build:docker:proxy",
|
||||
"build:docker:proxy:preprod": "node scripts/proxy/generateProxyConfig preprod && npm run build:docker:proxy",
|
||||
"build:docker:proxy:release": "node scripts/proxy/generateProxyConfig release && npm run build:docker:proxy",
|
||||
"build:docker:proxy:prod": "node scripts/proxy/generateProxyConfig prod && npm run build:docker:proxy",
|
||||
"build:docker:selfhost": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
|
||||
"build:docker:develop": "node scripts/pinVersions && lerna run build:docker && npm run build:docker:proxy:compose && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
|
||||
"build:docker:develop": "node scripts/pinVersions && lerna run build:docker && npm run build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
|
||||
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
|
||||
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",
|
||||
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
|
||||
|
@ -87,4 +83,4 @@
|
|||
"install:pro": "bash scripts/pro/install.sh",
|
||||
"dep:clean": "yarn clean && yarn bootstrap"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -3,7 +3,6 @@ const compose = require("docker-compose")
|
|||
const path = require("path")
|
||||
const fs = require("fs")
|
||||
const isWsl = require("is-wsl")
|
||||
const { processStringSync } = require("@budibase/string-templates")
|
||||
|
||||
function isLinux() {
|
||||
return !isWsl && process.platform !== "darwin" && process.platform !== "win32"
|
||||
|
@ -23,16 +22,6 @@ const Commands = {
|
|||
}
|
||||
|
||||
async function init() {
|
||||
// generate nginx file, always do this incase it has changed
|
||||
const hostingPath = path.join(process.cwd(), "..", "..", "hosting")
|
||||
const nginxHbsPath = path.join(hostingPath, "nginx.dev.conf.hbs")
|
||||
const nginxOutputPath = path.join(hostingPath, ".generated-nginx.dev.conf")
|
||||
const contents = fs.readFileSync(nginxHbsPath, "utf8")
|
||||
const config = {
|
||||
address: isLinux() ? "172.17.0.1" : "host.docker.internal",
|
||||
}
|
||||
fs.writeFileSync(nginxOutputPath, processStringSync(contents, config))
|
||||
|
||||
const envFilePath = path.join(process.cwd(), ".env")
|
||||
if (!fs.existsSync(envFilePath)) {
|
||||
const envFileJson = {
|
||||
|
@ -60,6 +49,7 @@ async function init() {
|
|||
BB_ADMIN_USER_PASSWORD: "",
|
||||
PLUGINS_DIR: "",
|
||||
TENANT_FEATURE_FLAGS: "*:LICENSING,*:USER_GROUPS",
|
||||
PROXY_ADDRESS: isLinux() ? "172.17.0.1" : "host.docker.internal",
|
||||
}
|
||||
let envFile = ""
|
||||
Object.keys(envFileJson).forEach(key => {
|
||||
|
|
|
@ -1,111 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
const path = require("path")
|
||||
const fs = require("fs")
|
||||
|
||||
function processStringSync(string, env) {
|
||||
let output = ""
|
||||
|
||||
// process if statements
|
||||
let removal = false
|
||||
for (let line of string.split("\n")) {
|
||||
if (new RegExp(`{{\/if}}`, "g").test(line)) {
|
||||
removal = false
|
||||
continue
|
||||
}
|
||||
|
||||
if (!removal) {
|
||||
const match = line.match(new RegExp(`{{#if (.*)}}`))
|
||||
if (match) {
|
||||
const key = match[1]
|
||||
// check the if statement is true
|
||||
if (!env[key]) {
|
||||
removal = true
|
||||
}
|
||||
continue
|
||||
}
|
||||
output += line + "\n"
|
||||
}
|
||||
}
|
||||
|
||||
for (let key in env) {
|
||||
// replace variables
|
||||
const rgx = new RegExp(`{{\\s*${key}\\s*}}`, "g")
|
||||
output = output.replace(rgx, env[key])
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
const Configs = {
|
||||
prod: {
|
||||
apps: "app-service.budibase.svc.cluster.local",
|
||||
worker: "worker-service.budibase.svc.cluster.local",
|
||||
minio: "minio-service.budibase.svc.cluster.local",
|
||||
couchdb: "budibase-prod-svc-couchdb",
|
||||
resolver: "kube-dns.kube-system.svc.cluster.local"
|
||||
},
|
||||
preprod: {
|
||||
apps: "app-service.budibase.svc.cluster.local",
|
||||
worker: "worker-service.budibase.svc.cluster.local",
|
||||
minio: "minio-service.budibase.svc.cluster.local",
|
||||
couchdb: "budibase-preprod-svc-couchdb",
|
||||
resolver: "kube-dns.kube-system.svc.cluster.local"
|
||||
},
|
||||
release: {
|
||||
apps: "app-service.budibase.svc.cluster.local",
|
||||
worker: "worker-service.budibase.svc.cluster.local",
|
||||
minio: "minio-service.budibase.svc.cluster.local",
|
||||
couchdb: "budibase-release-svc-couchdb",
|
||||
resolver: "kube-dns.kube-system.svc.cluster.local"
|
||||
},
|
||||
compose: {
|
||||
apps: "app-service",
|
||||
worker: "worker-service",
|
||||
minio: "minio-service",
|
||||
couchdb: "couchdb-service",
|
||||
watchtower: "watchtower-service",
|
||||
resolver: "127.0.0.11"
|
||||
},
|
||||
}
|
||||
|
||||
const Commands = {
|
||||
Prod: "prod",
|
||||
Preprod: "preprod",
|
||||
Release: "release",
|
||||
Compose: "compose",
|
||||
}
|
||||
|
||||
async function init(managementCommand) {
|
||||
const config = Configs[managementCommand]
|
||||
const hostingPath = path.join(process.cwd(), "hosting")
|
||||
const nginxHbsPath = path.join(hostingPath, "nginx.prod.conf.hbs")
|
||||
const nginxOutputPath = path.join(
|
||||
hostingPath,
|
||||
"proxy",
|
||||
".generated-nginx.prod.conf"
|
||||
)
|
||||
const contents = fs.readFileSync(nginxHbsPath, "utf8")
|
||||
fs.writeFileSync(nginxOutputPath, processStringSync(contents, config))
|
||||
}
|
||||
|
||||
const managementCommand = process.argv.slice(2)[0]
|
||||
|
||||
if (
|
||||
!managementCommand ||
|
||||
!Object.values(Commands).some(command => managementCommand === command)
|
||||
) {
|
||||
throw new Error(
|
||||
"You must supply either a 'compose', 'preprod' or 'prod' commmand to generate an NGINX config."
|
||||
)
|
||||
}
|
||||
|
||||
init(managementCommand)
|
||||
.then(() => {
|
||||
console.log("Done! 🎉")
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(
|
||||
"Something went wrong while creating the nginx configuration",
|
||||
err.message
|
||||
)
|
||||
})
|
Loading…
Reference in New Issue