Compare commits

..

2 Commits

Author SHA1 Message Date
f2e0434ad6 PR AB: complete Phoenix deployment scaffolding (add 3 files referenced by main 4a1f69a) (#32)
Some checks failed
Deploy to Phoenix / deploy (push) Failing after 7s
Adds webapp-nginx.conf, systemd/currencicombo-orchestrator.service, and install-prune-cron.sh — all three referenced by main's existing install.sh / deploy script / webapp.service / README but missing from the 4a1f69a commit. Byte-identical to PR #31 branch ded7d24.

Closes gap so CT 8604 can boot cleanly.
2026-04-23 04:39:36 +00:00
defiQUG
4a1f69a8e5 deploy: make Phoenix redeploys archive-safe
Some checks failed
Deploy to Phoenix / deploy (push) Failing after 5s
phoenix-deploy Deploy failed: Command failed: bash scripts/deployment/phoenix-deploy-currencicombo-from-workspace.sh [currencicombo-phoenix] packing s
2026-04-22 20:05:35 -07:00
8 changed files with 116 additions and 35 deletions

View File

@@ -0,0 +1,22 @@
name: Deploy to Phoenix
on:
push:
branches: [main, master]
workflow_dispatch:
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Trigger Phoenix deployment
run: |
SHA="$(git rev-parse HEAD)"
BRANCH="$(git rev-parse --abbrev-ref HEAD)"
curl -sSf -X POST "${{ secrets.PHOENIX_DEPLOY_URL }}" \
-H "Authorization: Bearer ${{ secrets.PHOENIX_DEPLOY_TOKEN }}" \
-H "Content-Type: application/json" \
-d "{\"repo\":\"${{ gitea.repository }}\",\"sha\":\"${SHA}\",\"branch\":\"${BRANCH}\",\"target\":\"default\"}"

View File

@@ -1,27 +1,42 @@
import { z } from "zod";
const emptyToUndefined = (value: unknown) => {
if (typeof value !== "string") return value;
const trimmed = value.trim();
return trimmed === "" ? undefined : trimmed;
};
const optionalString = () => z.preprocess(emptyToUndefined, z.string().optional());
const optionalUrl = () => z.preprocess(emptyToUndefined, z.string().url().optional());
/**
* Environment variable validation schema
*/
const envSchema = z.object({
NODE_ENV: z.enum(["development", "production", "test"]).default("development"),
PORT: z.string().transform(Number).pipe(z.number().int().positive()),
DATABASE_URL: z.string().url().optional(),
API_KEYS: z.string().optional(),
REDIS_URL: z.string().url().optional(),
DATABASE_URL: optionalUrl(),
API_KEYS: optionalString(),
REDIS_URL: optionalUrl(),
LOG_LEVEL: z.enum(["error", "warn", "info", "debug"]).default("info"),
ALLOWED_IPS: z.string().optional(),
ALLOWED_IPS: optionalString(),
SESSION_SECRET: z.string().min(32),
JWT_SECRET: z.string().min(32).optional(),
AZURE_KEY_VAULT_URL: z.string().url().optional(),
AWS_SECRETS_MANAGER_REGION: z.string().optional(),
SENTRY_DSN: z.string().url().optional(),
JWT_SECRET: z.preprocess(emptyToUndefined, z.string().min(32).optional()),
AZURE_KEY_VAULT_URL: optionalUrl(),
AWS_SECRETS_MANAGER_REGION: optionalString(),
SENTRY_DSN: optionalUrl(),
// Chain-138 + NotaryRegistry wiring (arch §4.5). All optional; when
// absent the notary adapter falls back to its deterministic mock.
CHAIN_138_RPC_URL: z.string().url().optional(),
CHAIN_138_CHAIN_ID: z.string().regex(/^\d+$/).optional(),
NOTARY_REGISTRY_ADDRESS: z.string().regex(/^0x[0-9a-fA-F]{40}$/).optional(),
ORCHESTRATOR_PRIVATE_KEY: z.string().regex(/^0x[0-9a-fA-F]{64}$/).optional(),
CHAIN_138_RPC_URL: optionalUrl(),
CHAIN_138_CHAIN_ID: z.preprocess(emptyToUndefined, z.string().regex(/^\d+$/).optional()),
NOTARY_REGISTRY_ADDRESS: z.preprocess(
emptyToUndefined,
z.string().regex(/^0x[0-9a-fA-F]{40}$/).optional(),
),
ORCHESTRATOR_PRIVATE_KEY: z.preprocess(
emptyToUndefined,
z.string().regex(/^0x[0-9a-fA-F]{64}$/).optional(),
),
});
/**
@@ -31,7 +46,7 @@ export const env = envSchema.parse({
NODE_ENV: process.env.NODE_ENV,
PORT: process.env.PORT || "8080",
DATABASE_URL: process.env.DATABASE_URL,
API_KEYS: process.env.API_KEYS,
API_KEYS: process.env.API_KEYS || process.env.ORCHESTRATOR_API_KEYS,
REDIS_URL: process.env.REDIS_URL,
LOG_LEVEL: process.env.LOG_LEVEL,
ALLOWED_IPS: process.env.ALLOWED_IPS,
@@ -56,7 +71,7 @@ export function validateEnv() {
NODE_ENV: process.env.NODE_ENV || "development",
PORT: process.env.PORT || "8080",
DATABASE_URL: process.env.DATABASE_URL,
API_KEYS: process.env.API_KEYS,
API_KEYS: process.env.API_KEYS || process.env.ORCHESTRATOR_API_KEYS,
REDIS_URL: process.env.REDIS_URL,
LOG_LEVEL: process.env.LOG_LEVEL || "info",
ALLOWED_IPS: process.env.ALLOWED_IPS,
@@ -65,6 +80,10 @@ export function validateEnv() {
AZURE_KEY_VAULT_URL: process.env.AZURE_KEY_VAULT_URL,
AWS_SECRETS_MANAGER_REGION: process.env.AWS_SECRETS_MANAGER_REGION,
SENTRY_DSN: process.env.SENTRY_DSN,
CHAIN_138_RPC_URL: process.env.CHAIN_138_RPC_URL,
CHAIN_138_CHAIN_ID: process.env.CHAIN_138_CHAIN_ID,
NOTARY_REGISTRY_ADDRESS: process.env.NOTARY_REGISTRY_ADDRESS,
ORCHESTRATOR_PRIVATE_KEY: process.env.ORCHESTRATOR_PRIVATE_KEY,
};
envSchema.parse(envWithDefaults);
console.log("✅ Environment variables validated");
@@ -79,4 +98,3 @@ export function validateEnv() {
throw error;
}
}

View File

@@ -70,16 +70,28 @@ app.get("/health", async (req, res) => {
const health = await healthCheck();
res.status(health.status === "healthy" ? 200 : 503).json(health);
});
app.get("/api/health", async (req, res) => {
const health = await healthCheck();
res.status(health.status === "healthy" ? 200 : 503).json(health);
});
app.get("/ready", async (req, res) => {
const ready = await readinessCheck();
res.status(ready ? 200 : 503).json({ ready });
});
app.get("/api/ready", async (req, res) => {
const ready = await readinessCheck();
res.status(ready ? 200 : 503).json({ ready });
});
app.get("/live", async (req, res) => {
const alive = await livenessCheck();
res.status(alive ? 200 : 503).json({ alive });
});
app.get("/api/live", async (req, res) => {
const alive = await livenessCheck();
res.status(alive ? 200 : 503).json({ alive });
});
// Metrics endpoint
app.get("/metrics", async (req, res) => {
@@ -87,6 +99,11 @@ app.get("/metrics", async (req, res) => {
const metrics = await getMetrics();
res.send(metrics);
});
app.get("/api/metrics", async (req, res) => {
res.setHeader("Content-Type", register.contentType);
const metrics = await getMetrics();
res.send(metrics);
});
// API routes with rate limiting
app.use("/api", apiLimiter);
@@ -173,4 +190,3 @@ async function start() {
}
start();

View File

@@ -23,7 +23,7 @@ HOST=0.0.0.0
############################################################
# Postgres (local to the CT per install.sh)
############################################################
DATABASE_URL=postgresql://currencicombo@127.0.0.1:5432/currencicombo
DATABASE_URL=postgresql://currencicombo:replace-me-on-install@127.0.0.1:5432/currencicombo
############################################################
# Redis (local to the CT per install.sh)
@@ -41,7 +41,7 @@ EVENT_SIGNING_SECRET=
# initiator/settler/auditor keys on first run unless set.
# Format: key1:role1,key2:role2,...
############################################################
ORCHESTRATOR_API_KEYS=
API_KEYS=
############################################################
# Chain 138 — resolves EXT-CHAIN138-CI-RPC (already resolved).

View File

@@ -112,7 +112,7 @@ already under `/api/*` — no separate `/events/*` rule is needed):
| location | upstream | proxy settings |
|---|---|---|
| `/api/*` | `http://10.160.0.14:8080` | **SSE-friendly settings apply here because the SSE route `/api/plans/:id/events/stream` is under /api/**. Set: `proxy_http_version 1.1;`, `proxy_set_header Connection "";`, `proxy_buffering off;`, `proxy_cache off;`, `proxy_read_timeout 24h;`, `proxy_send_timeout 24h;`. Standard forwarding: `proxy_set_header Host $host;`, `X-Real-IP $remote_addr;`, `X-Forwarded-For $proxy_add_x_forwarded_for;`, `X-Forwarded-Proto $scheme;`. The slight overhead of `proxy_buffering off` on plain REST calls is negligible for this workload. |
| `/api/*` | `http://10.160.0.14:8080` | **SSE-friendly settings apply here because the SSE route `/api/plans/:id/events/stream` is under /api/**. Use `proxy_pass http://10.160.0.14:8080;` with **no trailing slash** so `/api/...` reaches the orchestrator unchanged. Set: `proxy_http_version 1.1;`, `proxy_set_header Connection "";`, `proxy_buffering off;`, `proxy_cache off;`, `proxy_read_timeout 24h;`, `proxy_send_timeout 24h;`. Standard forwarding: `proxy_set_header Host $host;`, `X-Real-IP $remote_addr;`, `X-Forwarded-For $proxy_add_x_forwarded_for;`, `X-Forwarded-Proto $scheme;`. The slight overhead of `proxy_buffering off` on plain REST calls is negligible for this workload. |
| `/` | `http://10.160.0.14:3000` | Vite SPA. Default upstream. No special settings. |
If you skip the `/api/*` rule, the nginx in `webapp-nginx.conf`
@@ -237,7 +237,7 @@ Phoenix previously had an older Next.js "ISO-20022 Combo Flow" app in
```
3. Run `install.sh` (writes the new units, new nginx, new env). On an already-set-up host this is idempotent: it preserves `/etc/currencicombo/orchestrator.env` if it already exists.
4. Run `deploy-currencicombo-8604.sh`.
5. Apply the NPMplus `/api` + `/events` path rules.
5. Apply the NPMplus `/api` + `/` path rules.
6. Smoke from outside the CT: `curl -skI https://curucombo.xn--vov0g.com/ && curl -sk https://curucombo.xn--vov0g.com/api/ready`.
## Proxmox-side follow-up (not in this PR)

View File

@@ -107,20 +107,31 @@ if [[ "${DO_ROLLBACK}" -eq 1 ]]; then
fi
# ----- 1. git ---------------------------------------------------------
if [[ ! -d "${CC_REPO_DIR}/.git" ]]; then
run "install -d -o '${CC_USER}' -g '${CC_USER}' -m 0755 '${CC_REPO_DIR}'"
run "chown -R '${CC_USER}:${CC_USER}' '${CC_REPO_DIR}'"
if [[ ! -d "${CC_REPO_DIR}/.git" && "${CC_GIT_REF}" != "local" ]]; then
log "cloning ${CC_GIT_REMOTE}${CC_REPO_DIR}"
run "install -d -o '${CC_USER}' -g '${CC_USER}' -m 0755 '${CC_REPO_DIR}'"
runcc "git clone '${CC_GIT_REMOTE}' '${CC_REPO_DIR}'"
fi
runcc "cd '${CC_REPO_DIR}' && git fetch --prune origin"
runcc "cd '${CC_REPO_DIR}' && git reset --hard 'origin/${CC_GIT_REF}'"
REF_SHA="$(sudo -u "${CC_USER}" git -C "${CC_REPO_DIR}" rev-parse --short HEAD 2>/dev/null || echo unknown)"
log "repo at ${CC_GIT_REF} = ${REF_SHA}"
if [[ -d "${CC_REPO_DIR}/.git" && "${CC_GIT_REF}" != "local" ]]; then
runcc "cd '${CC_REPO_DIR}' && git fetch --prune origin"
runcc "cd '${CC_REPO_DIR}' && git reset --hard 'origin/${CC_GIT_REF}'"
REF_SHA="$(sudo -u "${CC_USER}" git -C "${CC_REPO_DIR}" rev-parse --short HEAD 2>/dev/null || echo unknown)"
log "repo at ${CC_GIT_REF} = ${REF_SHA}"
else
REF_SHA="local"
log "using staged local workspace from ${CC_REPO_DIR}"
fi
# ----- 2. orchestrator build -----------------------------------------
if [[ "${SKIP_BUILD}" -eq 0 ]]; then
log "building orchestrator"
runcc "cd '${CC_REPO_DIR}/orchestrator' && npm ci --no-audit --no-fund"
if [[ -f "${CC_REPO_DIR}/orchestrator/package-lock.json" ]]; then
runcc "cd '${CC_REPO_DIR}/orchestrator' && npm ci --no-audit --no-fund"
else
runcc "cd '${CC_REPO_DIR}/orchestrator' && npm install --no-audit --no-fund"
fi
runcc "cd '${CC_REPO_DIR}/orchestrator' && npm run build"
log "building portal (VITE_ORCHESTRATOR_URL=${VITE_ORCHESTRATOR_URL})"
runcc "cd '${CC_REPO_DIR}' && npm ci --include=optional --no-audit --no-fund || npm ci --include=optional --force --no-audit --no-fund"
@@ -158,7 +169,7 @@ log "rsyncing new build into ${CC_APP_HOME}"
runcc "rsync -a --delete '${CC_REPO_DIR}/orchestrator/dist/' '${CC_APP_HOME}/orchestrator/dist/'"
runcc "rsync -a '${CC_REPO_DIR}/orchestrator/node_modules/' '${CC_APP_HOME}/orchestrator/node_modules/'"
runcc "cp '${CC_REPO_DIR}/orchestrator/package.json' '${CC_APP_HOME}/orchestrator/package.json'"
runcc "cp '${CC_REPO_DIR}/orchestrator/package-lock.json' '${CC_APP_HOME}/orchestrator/package-lock.json'"
runcc "if [[ -f '${CC_REPO_DIR}/orchestrator/package-lock.json' ]]; then cp '${CC_REPO_DIR}/orchestrator/package-lock.json' '${CC_APP_HOME}/orchestrator/package-lock.json'; else rm -f '${CC_APP_HOME}/orchestrator/package-lock.json'; fi"
# Webapp: dist/
runcc "rsync -a --delete '${CC_REPO_DIR}/dist/' '${CC_APP_HOME}/webapp/dist/'"

View File

@@ -44,6 +44,9 @@ log() { printf '[install] %s\n' "$*" >&2; }
warn() { printf '[install][WARN] %s\n' "$*" >&2; }
die() { printf '[install][FATAL] %s\n' "$*" >&2; exit 1; }
run() { if [[ "${DRY_RUN}" -eq 1 ]]; then printf '[install][dry-run] %s\n' "$*" >&2; else eval "$*"; fi; }
sql_escape() {
printf "%s" "$1" | sed "s/'/''/g"
}
usage() {
cat <<'USAGE'
@@ -141,16 +144,16 @@ if ! pg_db_exists; then
log "creating Postgres database ${APP_USER}"
run "sudo -u postgres psql -c \"CREATE DATABASE ${APP_USER} OWNER ${APP_USER};\""
fi
# Peer auth from the currencicombo OS user → currencicombo DB role "just works"
# on Debian-style pg_hba (local all all peer). No password needed.
# ----------------------------------------------------------------------
# 4. Redis
# ----------------------------------------------------------------------
if systemctl list-unit-files | grep -q '^redis-server\.service'; then
run systemctl enable --now redis-server
run "systemctl start redis-server.service || true"
run "systemctl enable redis-server.service >/dev/null 2>&1 || true"
elif systemctl list-unit-files | grep -q '^redis\.service'; then
run systemctl enable --now redis
run "systemctl start redis.service || true"
run "systemctl enable redis.service >/dev/null 2>&1 || true"
elif command -v redis-cli >/dev/null 2>&1; then
warn "redis-cli present but no redis-server.service / redis.service unit — assuming external Redis"
else
@@ -171,8 +174,16 @@ else
INIT_KEY="$(openssl rand -hex 24)"
SETT_KEY="$(openssl rand -hex 24)"
AUD_KEY="$(openssl rand -hex 24)"
DB_PASSWORD="$(openssl rand -hex 24)"
DB_PASSWORD_SQL="$(sql_escape "${DB_PASSWORD}")"
API_KEYS_VALUE="${INIT_KEY}:initiator,${SETT_KEY}:settler,${AUD_KEY}:auditor"
DATABASE_URL="postgresql://${APP_USER}:${DB_PASSWORD}@127.0.0.1:5432/${APP_USER}"
log "setting Postgres password for role ${APP_USER}"
run "sudo -u postgres psql -c \"ALTER ROLE ${APP_USER} WITH LOGIN PASSWORD '${DB_PASSWORD_SQL}';\""
run "sed -i 's|^EVENT_SIGNING_SECRET=.*|EVENT_SIGNING_SECRET=${SECRET}|' '${ENV_FILE}'"
run "sed -i 's|^ORCHESTRATOR_API_KEYS=.*|ORCHESTRATOR_API_KEYS=${INIT_KEY}:initiator,${SETT_KEY}:settler,${AUD_KEY}:auditor|' '${ENV_FILE}'"
run "sed -i 's|^API_KEYS=.*|API_KEYS=${API_KEYS_VALUE}|' '${ENV_FILE}'"
run "sed -i 's|^DATABASE_URL=.*|DATABASE_URL=${DATABASE_URL}|' '${ENV_FILE}'"
run "grep -q '^ORCHESTRATOR_API_KEYS=' '${ENV_FILE}' && sed -i 's|^ORCHESTRATOR_API_KEYS=.*|ORCHESTRATOR_API_KEYS=${API_KEYS_VALUE}|' '${ENV_FILE}' || printf '\nORCHESTRATOR_API_KEYS=%s\n' '${API_KEYS_VALUE}' >> '${ENV_FILE}'"
# Write a root-only handoff file so ops can grab the keys without
# scraping journald or reading the env file. The canonical copy lives
# in ${ENV_FILE}; delete this file once the keys are in your password
@@ -197,9 +208,11 @@ EVENT_SIGNING_SECRET=${SECRET}
ORCHESTRATOR_API_KEY_INITIATOR=${INIT_KEY}
ORCHESTRATOR_API_KEY_SETTLER=${SETT_KEY}
ORCHESTRATOR_API_KEY_AUDITOR=${AUD_KEY}
DATABASE_URL=${DATABASE_URL}
# As it appears in ${ENV_FILE}:
ORCHESTRATOR_API_KEYS=${INIT_KEY}:initiator,${SETT_KEY}:settler,${AUD_KEY}:auditor
API_KEYS=${API_KEYS_VALUE}
ORCHESTRATOR_API_KEYS=${API_KEYS_VALUE}
EOF
chmod 0600 "${FIRST_KEYS_FILE}"
chown root:root "${FIRST_KEYS_FILE}"
@@ -208,6 +221,7 @@ EOF
fi
log " generated EVENT_SIGNING_SECRET (64 hex)"
log " generated 3 API keys (initiator/settler/auditor)"
log " generated local Postgres password for ${APP_USER}"
log " initial secrets written to ${FIRST_KEYS_FILE} (0600) — record in password manager, then 'shred -u ${FIRST_KEYS_FILE}'"
fi

View File

@@ -10,7 +10,7 @@ User=currencicombo
Group=currencicombo
RuntimeDirectory=currencicombo-webapp
RuntimeDirectoryMode=0755
ExecStart=/usr/sbin/nginx -c /etc/currencicombo/webapp-nginx.conf -e /var/log/currencicombo/webapp-nginx.error.log -g 'daemon off; pid /run/currencicombo-webapp/nginx.pid;'
ExecStart=/usr/sbin/nginx -c /etc/currencicombo/webapp-nginx.conf -g 'daemon off; pid /run/currencicombo-webapp/nginx.pid;'
ExecReload=/usr/sbin/nginx -c /etc/currencicombo/webapp-nginx.conf -s reload
Restart=on-failure
RestartSec=5