chore(repo): sync operator workspace (config, scripts, docs, multi-chain)
Some checks failed
Deploy to Phoenix / validate (push) Failing after 1s
Deploy to Phoenix / deploy (push) Has been skipped
Deploy to Phoenix / deploy-atomic-swap-dapp (push) Has been skipped
Deploy to Phoenix / cloudflare (push) Has been skipped

Add optional Cosmos/Engine-X/act-runner templates, CWUSDC/EI-matrix tooling,
non-EVM route planner in multi-chain-execution (tests passing), token list and
extraction updates, and documentation (MetaMask matrix, GRU/CWUSDC packets).

Ignore institutional evidence tarballs/sha256 under reports/status.

Validated with: bash scripts/verify/run-all-validation.sh --skip-genesis

Co-authored-by: Cursor <cursoragent@cursor.com>
This commit is contained in:
defiQUG
2026-05-11 16:25:08 -07:00
parent a5f7400955
commit 4ebf2d7902
292 changed files with 21574 additions and 1146 deletions

View File

@@ -82,7 +82,7 @@ if [ "$WETH9_VERIFIED" != "verified" ]; then
log_success "✓ CCIPWETH9Bridge verification submitted!"
else
log_warn "⚠️ Automated verification failed - manual verification required"
log_info "Manual verification URL: https://explorer.d-bis.org/address/$WETH9_BRIDGE_138#verify"
log_info "Manual verification URL: https://explorer.d-bis.org/addresses/$WETH9_BRIDGE_138#verify"
log_info "See docs/BLOCKSCOUT_BRIDGE_ADDRESSES_UPDATE.md for instructions"
fi
fi
@@ -109,7 +109,7 @@ if [ "$WETH10_VERIFIED" != "verified" ]; then
log_success "✓ CCIPWETH10Bridge verification submitted!"
else
log_warn "⚠️ Automated verification failed - manual verification required"
log_info "Manual verification URL: https://explorer.d-bis.org/address/$WETH10_BRIDGE_138#verify"
log_info "Manual verification URL: https://explorer.d-bis.org/addresses/$WETH10_BRIDGE_138#verify"
log_info "See docs/BLOCKSCOUT_BRIDGE_ADDRESSES_UPDATE.md for instructions"
fi
fi
@@ -120,7 +120,7 @@ log_success "Verification Attempt Complete"
log_success "========================================="
log_info ""
log_info "Blockscout Links:"
log_info " CCIPWETH9Bridge: https://explorer.d-bis.org/address/$WETH9_BRIDGE_138"
log_info " CCIPWETH10Bridge: https://explorer.d-bis.org/address/$WETH10_BRIDGE_138"
log_info " CCIPWETH9Bridge: https://explorer.d-bis.org/addresses/$WETH9_BRIDGE_138"
log_info " CCIPWETH10Bridge: https://explorer.d-bis.org/addresses/$WETH10_BRIDGE_138"
log_info ""

View File

@@ -147,7 +147,7 @@ echo ""
echo "✅ If you have transaction hashes or addresses:"
echo " https://explorer.d-bis.org/tx/<TRANSACTION_HASH>"
echo " https://explorer.d-bis.org/address/<ADDRESS>"
echo " https://explorer.d-bis.org/addresses/<ADDRESS>"
echo ""
log_info "Note: Some Blockscout versions serve the web interface"

View File

@@ -150,7 +150,7 @@ echo "⚠️ WEB INTERFACE ROUTES (May return 404 until more data):"
echo " - https://explorer.d-bis.org/ (root)"
echo " - https://explorer.d-bis.org/blocks"
echo " - https://explorer.d-bis.org/transactions"
echo " - https://explorer.d-bis.org/address/<ADDRESS>"
echo " - https://explorer.d-bis.org/addresses/<ADDRESS>"
echo ""
log_info "The web interface may need more indexed data or time to fully initialize"
echo ""

View File

@@ -213,6 +213,6 @@ done
echo ""
log_info "View contracts on Blockscout:"
log_info " https://explorer.d-bis.org/address/<CONTRACT_ADDRESS>"
log_info " https://explorer.d-bis.org/addresses/<CONTRACT_ADDRESS>"
echo ""

View File

@@ -65,7 +65,7 @@ if forge verify-contract \
--via-ir \
2>&1 | tee /tmp/weth9-bridge-138-verify.log; then
log_success "✓ CCIPWETH9Bridge verification submitted!"
log_info "View on Blockscout: https://explorer.d-bis.org/address/$WETH9_BRIDGE_138"
log_info "View on Blockscout: https://explorer.d-bis.org/addresses/$WETH9_BRIDGE_138"
else
log_warn "⚠️ CCIPWETH9Bridge verification failed or already verified"
log_info "Check /tmp/weth9-bridge-138-verify.log for details"
@@ -90,7 +90,7 @@ if forge verify-contract \
--via-ir \
2>&1 | tee /tmp/weth10-bridge-138-verify.log; then
log_success "✓ CCIPWETH10Bridge verification submitted!"
log_info "View on Blockscout: https://explorer.d-bis.org/address/$WETH10_BRIDGE_138"
log_info "View on Blockscout: https://explorer.d-bis.org/addresses/$WETH10_BRIDGE_138"
else
log_warn "⚠️ CCIPWETH10Bridge verification failed or already verified"
log_info "Check /tmp/weth10-bridge-138-verify.log for details"
@@ -102,7 +102,7 @@ log_success "Verification Complete!"
log_success "========================================="
log_info ""
log_info "Blockscout Links:"
log_info " CCIPWETH9Bridge: https://explorer.d-bis.org/address/$WETH9_BRIDGE_138"
log_info " CCIPWETH10Bridge: https://explorer.d-bis.org/address/$WETH10_BRIDGE_138"
log_info " CCIPWETH9Bridge: https://explorer.d-bis.org/addresses/$WETH9_BRIDGE_138"
log_info " CCIPWETH10Bridge: https://explorer.d-bis.org/addresses/$WETH10_BRIDGE_138"
log_info ""

View File

@@ -0,0 +1,5 @@
#!/usr/bin/env bash
# Resume mainnet cWUSDC EI matrix transfers from ei-matrix-cwusdc-send-last-idx.txt + 1.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
exec "$SCRIPT_DIR/send-cwusdc-ei-matrix-wallets.sh" --resume-next "$@"

View File

View File

@@ -0,0 +1,391 @@
#!/usr/bin/env bash
# Deploy/configure CWMultiTokenBridgeL2 receivers for active public cW chains.
# Defaults to dry-run. Use --apply to broadcast.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
SMOM_ROOT="$PROJECT_ROOT/smom-dbis-138"
ENV_FILE="$SMOM_ROOT/.env"
REPORT_DIR="$PROJECT_ROOT/reports/status"
APPLY=false
FULL_FAMILY=false
CHAIN_FILTER=""
TS="$(date -u +%Y%m%dT%H%M%SZ)"
MANIFEST="$REPORT_DIR/cw-multitoken-l2-remediation-${TS}.jsonl"
usage() {
cat <<USAGE
Usage: $0 [--apply] [--full-family] [--chain <chainId>]
Without --apply this prints actions only. With --apply it broadcasts deployments
and configuration transactions, then updates smom-dbis-138/.env CW_BRIDGE_*.
USAGE
}
while [[ $# -gt 0 ]]; do
case "$1" in
--apply) APPLY=true ;;
--full-family) FULL_FAMILY=true ;;
--chain) CHAIN_FILTER="${2:-}"; shift ;;
--chain=*) CHAIN_FILTER="${1#*=}" ;;
-h|--help) usage; exit 0 ;;
*) echo "Unknown arg: $1" >&2; usage >&2; exit 1 ;;
esac
shift
done
# shellcheck disable=SC1091
source "$PROJECT_ROOT/scripts/lib/load-project-env.sh" >/dev/null 2>&1 || true
[[ -n "${PRIVATE_KEY:-}" ]] || { echo "PRIVATE_KEY is required" >&2; exit 1; }
command -v cast >/dev/null 2>&1 || { echo "cast is required" >&2; exit 1; }
command -v forge >/dev/null 2>&1 || { echo "forge is required" >&2; exit 1; }
command -v jq >/dev/null 2>&1 || { echo "jq is required" >&2; exit 1; }
mkdir -p "$REPORT_DIR"
: > "$MANIFEST"
DEPLOYER="${DEPLOYER_ADDRESS:-$(cast wallet address "$PRIVATE_KEY")}"
L1_BRIDGE="${CW_L1_BRIDGE_CHAIN138:-}"
RPC_138="${RPC_URL_138:-${CHAIN138_RPC:-${CHAIN138_RPC_URL:-}}}"
[[ -n "$L1_BRIDGE" && "$L1_BRIDGE" != "0x0000000000000000000000000000000000000000" ]] || { echo "CW_L1_BRIDGE_CHAIN138 is required" >&2; exit 1; }
[[ -n "$RPC_138" ]] || { echo "RPC_URL_138/CHAIN138_RPC is required" >&2; exit 1; }
MINTER_ROLE="$(cast keccak "MINTER_ROLE")"
BURNER_ROLE="$(cast keccak "BURNER_ROLE")"
declare -A CHAIN_NAME=(
[10]="Optimism"
[25]="Cronos"
[56]="BSC"
[100]="Gnosis"
[137]="Polygon"
[8453]="Base"
[42161]="Arbitrum"
[42220]="Celo"
)
declare -A CHAIN_SUFFIX=(
[10]="OPTIMISM"
[25]="CRONOS"
[56]="BSC"
[100]="GNOSIS"
[137]="POLYGON"
[8453]="BASE"
[42161]="ARBITRUM"
[42220]="CELO"
)
declare -A CHAIN_SELECTOR=(
[10]="3734403246176062136"
[25]="1456215246176062136"
[56]="11344663589394136015"
[100]="465200170687744372"
[137]="4051577828743386545"
[8453]="15971525489660198786"
[42161]="4949039107694359620"
[42220]="1346049177634351622"
)
set_env_value() {
local file="$1" key="$2" value="$3"
if grep -qE "^${key}=" "$file"; then
sed -i "s|^${key}=.*|${key}=${value}|" "$file"
else
printf '\n%s=%s\n' "$key" "$value" >> "$file"
fi
}
var_value() {
local key="$1"
printf '%s' "${!key:-}"
}
first_env() {
local key value
for key in "$@"; do
value="$(var_value "$key")"
if [[ -n "$value" ]]; then
printf '%s\n' "$value"
return 0
fi
done
return 1
}
rpc_for_chain() {
local suffix="$1"
case "$suffix" in
OPTIMISM) first_env OPTIMISM_RPC_URL OPTIMISM_MAINNET_RPC ;;
CRONOS) first_env CRONOS_RPC_URL CRONOS_RPC CRONOS_MAINNET_RPC ;;
BSC) first_env BSC_RPC_URL BSC_MAINNET_RPC ;;
GNOSIS) first_env GNOSIS_RPC_URL GNOSIS_MAINNET_RPC GNOSIS_RPC ;;
POLYGON) first_env POLYGON_RPC_URL POLYGON_MAINNET_RPC ;;
BASE) first_env BASE_RPC_URL BASE_MAINNET_RPC ;;
ARBITRUM) first_env ARBITRUM_RPC_URL ARBITRUM_MAINNET_RPC ;;
CELO) first_env CELO_RPC_URL CELO_MAINNET_RPC CELO_RPC ;;
esac
}
old_bridge_for_suffix() {
local suffix="$1" key="CW_BRIDGE_${suffix}"
var_value "$key"
}
old_bridge_call() {
local bridge="$1" rpc="$2" sig="$3"
[[ -n "$bridge" && -n "$rpc" ]] || return 1
cast call "$bridge" "$sig" --rpc-url "$rpc" 2>/dev/null | awk 'NF{print $1; exit}'
}
send_router_for_chain() {
local suffix="$1" old_bridge="$2" rpc="$3" value
value="$(first_env "CCIP_${suffix}_ROUTER" "CCIP_ROUTER_${suffix}" 2>/dev/null || true)"
if [[ -n "$value" ]]; then printf '%s\n' "$value"; return; fi
old_bridge_call "$old_bridge" "$rpc" "ccipRouter()(address)"
}
receive_router_for_chain() {
local suffix="$1" send_router="$2" value
value="$(first_env "CCIP_RELAY_ROUTER_${suffix}_CW" "CCIP_RELAY_ROUTER_${suffix}" 2>/dev/null || true)"
if [[ -n "$value" ]]; then printf '%s\n' "$value"; return; fi
printf '%s\n' "$send_router"
}
fee_token_for_chain() {
local suffix="$1" old_bridge="$2" rpc="$3" value
value="$(first_env "CCIP_${suffix}_LINK_TOKEN" "LINK_TOKEN_${suffix}" "LINK_${suffix}" 2>/dev/null || true)"
if [[ -n "$value" ]]; then printf '%s\n' "$value"; return; fi
old_bridge_call "$old_bridge" "$rpc" "feeToken()(address)" || printf '0x0000000000000000000000000000000000000000\n'
}
json_log() {
python3 - "$MANIFEST" "$@" <<'PY'
import json, sys
path = sys.argv[1]
items = dict(arg.split("=", 1) for arg in sys.argv[2:])
with open(path, "a", encoding="utf-8") as fh:
fh.write(json.dumps(items, sort_keys=True) + "\n")
PY
}
token_rows_for_chain() {
local chain_id="$1"
jq -r --argjson cid "$chain_id" --argjson full "$($FULL_FAMILY && echo true || echo false)" '
.pairs[]
| select(.fromChainId == 138 and .toChainId == $cid)
| .tokens[]
| select((.key == "Compliant_USDT_cW") or (.key == "Compliant_USDC_cW") or ($full and (.key | endswith("_cW"))))
| select(.addressFrom and .addressTo and .addressTo != "0x0000000000000000000000000000000000000000")
| [.key, .addressFrom, .addressTo] | @tsv
' "$PROJECT_ROOT/config/token-mapping-multichain.json"
}
send_tx() {
local rpc="$1"; shift
local base_cmd=(cast send --rpc-url "$rpc" --private-key "$PRIVATE_KEY" --gas-limit 500000 "$@")
if ! $APPLY; then
printf '[dry-run] '
printf '%q ' "${base_cmd[@]/$PRIVATE_KEY/\$PRIVATE_KEY}"
printf '\n'
return 0
fi
local attempt mode out rc
for mode in legacy eip1559; do
local cmd=("${base_cmd[@]}")
if [[ "$mode" == "legacy" ]]; then
cmd=(cast send --rpc-url "$rpc" --private-key "$PRIVATE_KEY" --legacy --gas-limit 500000 "$@")
fi
for attempt in 1 2 3; do
set +e
out="$("${cmd[@]}" 2>&1)"
rc=$?
set -e
printf '%s\n' "$out"
if [[ "$rc" -eq 0 ]]; then
sleep 3
return 0
fi
if [[ "$mode" == "legacy" && ( "$out" == *"Invalid params"* || "$out" == *"legacy"* || "$out" == *"transaction type"* ) ]]; then
echo " legacy tx rejected; retrying with EIP-1559 tx params"
break
fi
if [[ "$out" == *"max fee per gas less than block base fee"* ]]; then
local gas_price
gas_price="$(cast gas-price --rpc-url "$rpc" 2>/dev/null || echo 25000000)"
gas_price="$((gas_price * 2))"
echo " gas price below base fee; retrying with --gas-price $gas_price"
cmd=(cast send --rpc-url "$rpc" --private-key "$PRIVATE_KEY" --gas-limit 500000 --gas-price "$gas_price" "$@")
sleep $((attempt * 3))
continue
fi
if [[ "$out" == *"replacement transaction underpriced"* || "$out" == *"nonce too low"* || "$out" == *"invalid nonce"* || "$out" == *"invalid sequence"* || "$out" == *"already known"* ]]; then
echo " retryable tx error (attempt $attempt); waiting before retry"
sleep $((attempt * 10))
continue
fi
return "$rc"
done
done
return "$rc"
}
forge_create_l2() {
local rpc="$1" chain_id="$2" send_router="$3" receive_router="$4" fee_token="$5"
local mode out rc
for mode in legacy eip1559; do
set +e
if [[ "$mode" == "legacy" ]]; then
out="$(
cd "$SMOM_ROOT" &&
forge create contracts/bridge/CWMultiTokenBridgeL2.sol:CWMultiTokenBridgeL2 \
--rpc-url "$rpc" --chain-id "$chain_id" --broadcast --private-key "$PRIVATE_KEY" --legacy \
--constructor-args "$send_router" "$receive_router" "$fee_token" 2>&1
)"
else
out="$(
cd "$SMOM_ROOT" &&
forge create contracts/bridge/CWMultiTokenBridgeL2.sol:CWMultiTokenBridgeL2 \
--rpc-url "$rpc" --chain-id "$chain_id" --broadcast --private-key "$PRIVATE_KEY" \
--constructor-args "$send_router" "$receive_router" "$fee_token" 2>&1
)"
fi
rc=$?
set -e
printf '%s\n' "$out"
if [[ "$rc" -eq 0 ]]; then
return 0
fi
if [[ "$mode" == "legacy" && ( "$out" == *"Invalid params"* || "$out" == *"legacy"* || "$out" == *"transaction type"* ) ]]; then
echo " legacy deploy rejected; retrying with EIP-1559 tx params"
continue
fi
return "$rc"
done
return "$rc"
}
ensure_token_pair() {
local rpc="$1" bridge="$2" key="$3" canonical="$4" mirrored="$5"
local current
current="$(cast call "$bridge" "canonicalToMirrored(address)(address)" "$canonical" --rpc-url "$rpc" 2>/dev/null || true)"
if [[ "${current,,}" == "${mirrored,,}" ]]; then
echo " pair ok $key"
return
fi
echo " configure pair $key"
send_tx "$rpc" "$bridge" "configureTokenPair(address,address)" "$canonical" "$mirrored"
}
ensure_l2_destination() {
local rpc="$1" bridge="$2"
local current
current="$(cast call "$bridge" "destinations(uint64)((address,bool))" 138 --rpc-url "$rpc" 2>/dev/null || true)"
if [[ "${current,,}" == *"${L1_BRIDGE,,}"* && "${current,,}" == *"true"* ]]; then
echo " L2 destination 138 ok"
return
fi
echo " configure L2 destination 138"
send_tx "$rpc" "$bridge" "configureDestination(uint64,address,bool)" 138 "$L1_BRIDGE" true
}
ensure_l1_destination() {
local selector="$1" key="$2" canonical="$3" bridge="$4"
local current
current="$(cast call "$L1_BRIDGE" "destinations(address,uint64)((address,bool))" "$canonical" "$selector" --rpc-url "$RPC_138" 2>/dev/null || true)"
if [[ "${current,,}" == *"${bridge,,}"* && "${current,,}" == *"true"* ]]; then
echo " L1 destination ok $key"
return
fi
echo " configure L1 destination $key selector=$selector"
send_tx "$RPC_138" "$L1_BRIDGE" "configureDestination(address,uint64,address,bool)" "$canonical" "$selector" "$bridge" true
}
ensure_role() {
local rpc="$1" token="$2" role_name="$3" role="$4" holder="$5"
local current
current="$(cast call "$token" "hasRole(bytes32,address)(bool)" "$role" "$holder" --rpc-url "$rpc" 2>/dev/null || echo false)"
if [[ "$current" == "true" ]]; then
echo " role ok $role_name $token"
return
fi
echo " grant $role_name on $token"
send_tx "$rpc" "$token" "grantRole(bytes32,address)" "$role" "$holder"
}
deploy_l2() {
local chain_id="$1" suffix="$2" rpc="$3" send_router="$4" receive_router="$5" fee_token="$6"
echo " deploying CWMultiTokenBridgeL2 send=$send_router receive=$receive_router fee=$fee_token"
if ! $APPLY; then
echo " [dry-run] forge script DeployCWMultiTokenBridgeL2"
printf '0x000000000000000000000000000000000000%04x\n' "$chain_id"
return
fi
local out addr
out="$(forge_create_l2 "$rpc" "$chain_id" "$send_router" "$receive_router" "$fee_token")"
printf '%s\n' "$out"
addr="$(printf '%s\n' "$out" | sed -nE 's/.*Deployed to:[[:space:]]*(0x[a-fA-F0-9]{40}).*/\1/p; s/.*CWMultiTokenBridgeL2:[[:space:]]*(0x[a-fA-F0-9]{40}).*/\1/p' | tail -1)"
[[ -n "$addr" ]] || { echo "Could not parse deployed bridge address for $suffix" >&2; return 1; }
printf '%s\n' "$addr"
}
is_cw_multitoken_l2() {
local rpc="$1" bridge="$2"
[[ -n "$bridge" ]] || return 1
cast call "$bridge" "sendRouter()(address)" --rpc-url "$rpc" >/dev/null 2>&1 &&
cast call "$bridge" "receiveRouter()(address)" --rpc-url "$rpc" >/dev/null 2>&1 &&
cast call "$bridge" "canonicalToMirrored(address)(address)" "0x93E66202A11B1772E55407B32B44e5Cd8eda7f22" --rpc-url "$rpc" >/dev/null 2>&1
}
echo "CWMultiToken L2 remediation"
echo "Apply: $APPLY"
echo "Full family: $FULL_FAMILY"
echo "Deployer: $DEPLOYER"
echo "Manifest: $MANIFEST"
echo
for chain_id in 10 25 56 100 137 8453 42161 42220; do
[[ -n "$CHAIN_FILTER" && "$CHAIN_FILTER" != "$chain_id" ]] && continue
suffix="${CHAIN_SUFFIX[$chain_id]}"
rpc="$(rpc_for_chain "$suffix" || true)"
old_bridge="$(old_bridge_for_suffix "$suffix")"
selector="${CHAIN_SELECTOR[$chain_id]}"
echo "=== $chain_id ${CHAIN_NAME[$chain_id]} ($suffix) ==="
if [[ -z "$rpc" ]]; then
echo " skip: missing RPC"
json_log chainId="$chain_id" suffix="$suffix" status="skipped" reason="missing_rpc"
continue
fi
send_router="$(send_router_for_chain "$suffix" "$old_bridge" "$rpc" || true)"
receive_router="$(receive_router_for_chain "$suffix" "$send_router")"
fee_token="$(fee_token_for_chain "$suffix" "$old_bridge" "$rpc")"
if [[ -z "$send_router" || -z "$receive_router" || -z "$fee_token" ]]; then
echo " skip: missing router or fee token"
json_log chainId="$chain_id" suffix="$suffix" status="skipped" reason="missing_router_or_fee"
continue
fi
current_bridge="$(old_bridge_for_suffix "$suffix")"
if is_cw_multitoken_l2 "$rpc" "$current_bridge"; then
bridge="$current_bridge"
echo " using existing CWMultiTokenBridgeL2: $bridge"
else
bridge="$(deploy_l2 "$chain_id" "$suffix" "$rpc" "$send_router" "$receive_router" "$fee_token" | tail -1)"
echo " new bridge: $bridge"
fi
if $APPLY; then
set_env_value "$ENV_FILE" "CW_BRIDGE_${suffix}" "$bridge"
fi
ensure_l2_destination "$rpc" "$bridge"
while IFS=$'\t' read -r key canonical mirrored; do
ensure_token_pair "$rpc" "$bridge" "$key" "$canonical" "$mirrored"
ensure_l1_destination "$selector" "$key" "$canonical" "$bridge"
ensure_role "$rpc" "$mirrored" "MINTER_ROLE" "$MINTER_ROLE" "$bridge"
ensure_role "$rpc" "$mirrored" "BURNER_ROLE" "$BURNER_ROLE" "$bridge"
done < <(token_rows_for_chain "$chain_id")
json_log chainId="$chain_id" suffix="$suffix" status="configured" bridge="$bridge" sendRouter="$send_router" receiveRouter="$receive_router" feeToken="$fee_token"
done
echo
echo "Done. Manifest: $MANIFEST"
echo "Next: pnpm cw:bridge-e2e-readiness && pnpm cw:full-readiness"

View File

@@ -0,0 +1,89 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/load-project-env.sh
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh"
: "${ETHEREUM_MAINNET_RPC:?ETHEREUM_MAINNET_RPC is required}"
CWUSDC="${CWUSDC_MAINNET:-0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a}"
QUOTE_TOKEN="${ENGINE_X_SINGLE_SIDED_DODO_QUOTE_TOKEN:-${WETH9_MAINNET:-0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2}}"
DODO_INTEGRATION="${ENGINE_X_SINGLE_SIDED_DODO_INTEGRATION:-${DODO_PMM_INTEGRATION_MAINNET:-${CHAIN_1_DODO_PMM_INTEGRATION:-}}}"
VERIFY="${VERIFY:-1}"
EXECUTE="${EXECUTE:-0}"
OWNER="${ENGINE_X_SINGLE_SIDED_DODO_OWNER:-${DEPLOYER_ADDRESS:-}}"
if [[ -n "${PRIVATE_KEY:-}" ]]; then
OWNER="$(cast wallet address --private-key "${PRIVATE_KEY}")"
fi
if [[ -z "${OWNER}" ]]; then
echo "Set PRIVATE_KEY, DEPLOYER_ADDRESS, or ENGINE_X_SINGLE_SIDED_DODO_OWNER" >&2
exit 1
fi
if [[ -z "${DODO_INTEGRATION}" ]]; then
echo "Set DODO_PMM_INTEGRATION_MAINNET or ENGINE_X_SINGLE_SIDED_DODO_INTEGRATION" >&2
exit 1
fi
if [[ "${EXECUTE}" == "1" && -z "${PRIVATE_KEY:-}" ]]; then
echo "PRIVATE_KEY is required when EXECUTE=1" >&2
exit 1
fi
VERIFY_ARGS=()
if [[ "${VERIFY}" == "1" ]]; then
VERIFY_ARGS+=(--verify)
fi
CREATE_CMD_EXEC=(
forge create
--broadcast
--rpc-url "${ETHEREUM_MAINNET_RPC}"
--private-key "${PRIVATE_KEY:-}"
"${VERIFY_ARGS[@]}"
contracts/flash/DBISEngineXSingleSidedDodoCwusdcVault.sol:DBISEngineXSingleSidedDodoCwusdcVault
--constructor-args
"${CWUSDC}" "${QUOTE_TOKEN}" "${DODO_INTEGRATION}" "${OWNER}"
)
cat <<EOF
Engine X single-sided DODO cWUSDC wrapper deployment plan
mode EXECUTE: ${EXECUTE}
owner: ${OWNER}
cWUSDC: ${CWUSDC}
quote token: ${QUOTE_TOKEN}
DODO integration: ${DODO_INTEGRATION}
Boundary:
This wrapper may hold cWUSDC-only inventory as Engine X accounted support
inventory. It does not make cWUSDC-only inventory executable DODO liquidity.
Promotion to DODO requires nonzero cWUSDC and nonzero quote-token inventory,
a configured cWUSDC/quote DODO pool, and passing querySellBase/querySellQuote
canary guards.
EOF
if [[ "${EXECUTE}" != "1" ]]; then
cat <<EOF
Dry-run only. Review command:
cd smom-dbis-138
forge create --broadcast --rpc-url "\$ETHEREUM_MAINNET_RPC" --private-key "\$PRIVATE_KEY" ${VERIFY_ARGS[*]} contracts/flash/DBISEngineXSingleSidedDodoCwusdcVault.sol:DBISEngineXSingleSidedDodoCwusdcVault --constructor-args "${CWUSDC}" "${QUOTE_TOKEN}" "${DODO_INTEGRATION}" "${OWNER}"
EOF
exit 0
fi
pushd "${PROJECT_ROOT}/smom-dbis-138" >/dev/null
DEPLOY_OUT="$("${CREATE_CMD_EXEC[@]}")"
popd >/dev/null
printf '%s\n' "${DEPLOY_OUT}"
VAULT="$(printf '%s\n' "${DEPLOY_OUT}" | grep -oE 'Deployed to: 0x[a-fA-F0-9]{40}' | awk '{print $3}' | tail -1)"
if [[ -z "${VAULT}" ]]; then
echo "Could not parse deployed single-sided DODO cWUSDC wrapper address" >&2
exit 1
fi
echo "DBIS_ENGINE_X_SINGLE_SIDED_DODO_CWUSDC_VAULT=${VAULT}"

View File

@@ -3,7 +3,8 @@
# Usage: ./scripts/deployment/deploy-sankofa-studio-lxc.sh [--dry-run] [--skip-create]
# --dry-run Print commands only.
# --skip-create Use existing container 7805 (only install Docker / compose / deploy app).
# Env: PROXMOX_HOST, NODE, VMID, HOSTNAME, IP_SANKOFA_STUDIO, REPO_URL or REPO_PATH, ENV_FILE.
# Env: PROXMOX_HOST (defaults from VMID), NODE, VMID, HOSTNAME, IP_SANKOFA_STUDIO, REPO_URL or REPO_PATH, ENV_FILE.
# DEPLOY_PCT_ON_LOCAL_PVE=1 — only on a Proxmox node: run pct locally (no SSH).
# See: docs/03-deployment/SANKOFA_STUDIO_DEPLOYMENT.md
set -euo pipefail
@@ -45,6 +46,11 @@ for a in "$@"; do
[[ "$a" == "--skip-create" ]] && SKIP_CREATE=true
done
PROXMOX_MONOREPO_ROOT="$PROXMOX_ROOT"
# shellcheck disable=SC1091
source "$PROXMOX_ROOT/scripts/lib/require-proxmox-ssh-for-pct.sh"
require_proxmox_ssh_for_pct || exit 1
run_cmd() {
if [[ -n "$PROXMOX_HOST" ]]; then
ssh $SSH_OPTS root@"$PROXMOX_HOST" "$@"
@@ -72,17 +78,6 @@ echo "URL: https://studio.sankofa.nexus → http://${IP}:8000"
echo "IP: $IP | Memory: ${MEMORY_MB}MB | Cores: $CORES | Disk: ${DISK_GB}G"
echo ""
# pct runs only on Proxmox hosts; from another machine set PROXMOX_HOST to SSH there
if ! $DRY_RUN && [[ -z "${PROXMOX_HOST:-}" ]] && ! command -v pct &>/dev/null; then
echo "ERROR: 'pct' not found. This script must run on a Proxmox host or with PROXMOX_HOST set."
echo ""
echo "From your current machine, run:"
echo " PROXMOX_HOST=192.168.11.11 REPO_URL='https://gitea.d-bis.org/d-bis/FusionAI-Creator.git' $0"
echo ""
echo "Or SSH to the Proxmox host and run the script there (with REPO_URL set)."
exit 1
fi
if ! $SKIP_CREATE; then
if $DRY_RUN; then
echo "[DRY-RUN] Would create LXC $VMID with hostname=$HOSTNAME, ip=$IP/24 (Docker + FusionAI Creator)"

View File

@@ -0,0 +1,172 @@
#!/usr/bin/env node
/**
* Execute the coffee-money gas top-up packet generated by
* scripts/deployment/plan-coffee-money-gas-topups.mjs.
*
* Default mode is dry-run. Pass --execute to approve exact source-token allowance
* and submit the bounded LiFi bridge transactions.
*/
import { mkdirSync, readFileSync, writeFileSync } from "node:fs";
import { resolve } from "node:path";
import { Contract, JsonRpcProvider, Wallet } from "ethers";
const repoRoot = resolve(new URL("../..", import.meta.url).pathname);
const planPath = resolve(repoRoot, "reports/status/coffee-money-gas-topup-plan-latest.json");
const outJson = resolve(repoRoot, "reports/status/coffee-money-gas-topup-execution-latest.json");
const outMd = resolve(repoRoot, "reports/status/coffee-money-gas-topup-execution-latest.md");
const execute = process.argv.includes("--execute");
const confirmations = Number(process.env.COFFEE_MONEY_CONFIRMATIONS || "1");
const rpcUrl = process.env.ETHEREUM_MAINNET_RPC || process.env.RPC_URL_1 || process.env.RPC_URL_MAINNET || "https://ethereum.publicnode.com";
const privateKey = process.env.PRIVATE_KEY;
const erc20Abi = [
"function allowance(address owner,address spender) view returns (uint256)",
"function approve(address spender,uint256 amount) returns (bool)",
"function balanceOf(address owner) view returns (uint256)",
];
const plan = JSON.parse(readFileSync(planPath, "utf8"));
if (execute && !privateKey) {
console.error("PRIVATE_KEY is required for --execute");
process.exit(1);
}
if (!plan.readiness?.sourceTokenSufficient && !plan.readiness?.sourceUsdcSufficient) {
console.error("Plan source token balance is not sufficient. Re-run the planner and inspect readiness.");
process.exit(1);
}
if (!plan.readiness?.mainnetEthGasSufficientForQuotedBridgeTxs || !plan.readiness?.allQuotesOk) {
console.error("Plan is not ready for execution. Re-run the planner and inspect readiness.");
process.exit(1);
}
const provider = new JsonRpcProvider(rpcUrl, 1);
const wallet = execute ? new Wallet(privateKey, provider) : null;
const operator = execute ? await wallet.getAddress() : plan.deployer;
if (operator.toLowerCase() !== String(plan.deployer).toLowerCase()) {
console.error(`PRIVATE_KEY resolves to ${operator}, expected ${plan.deployer}`);
process.exit(1);
}
const sourceTokenAddress = plan.source?.tokenAddress;
const sourceTokenSymbol = plan.source?.token || "source token";
if (!sourceTokenAddress) {
console.error("Plan is missing source.tokenAddress. Re-run the planner with the updated planner script.");
process.exit(1);
}
const token = new Contract(sourceTokenAddress, erc20Abi, execute ? wallet : provider);
const execution = {
generatedAt: new Date().toISOString(),
mode: execute ? "execute_broadcast" : "dry_run_no_broadcast",
deployer: plan.deployer,
plan: "reports/status/coffee-money-gas-topup-plan-latest.json",
approvals: [],
transactions: [],
totals: plan.totals,
};
for (const allowancePlan of plan.allowances ?? []) {
const spender = allowancePlan.spender;
const required = BigInt(allowancePlan.requiredRaw);
const current = await token.allowance(plan.deployer, spender);
const approvalRecord = {
spender,
requiredRaw: required.toString(),
currentRaw: current.toString(),
action: current >= required ? "skip_allowance_sufficient" : "approve_exact_required",
txHash: null,
status: "pending",
};
if (execute && current < required) {
const tx = await token.approve(spender, required, { gasLimit: 100_000n });
approvalRecord.txHash = tx.hash;
const receipt = await tx.wait(confirmations);
approvalRecord.status = receipt?.status === 1 ? "confirmed" : "failed";
approvalRecord.blockNumber = receipt?.blockNumber ?? null;
if (receipt?.status !== 1) {
execution.approvals.push(approvalRecord);
throw new Error(`Approval failed: ${tx.hash}`);
}
} else {
approvalRecord.status = execute ? "confirmed" : "dry_run";
}
execution.approvals.push(approvalRecord);
}
for (const row of plan.rows) {
const request = row.transactionRequest;
const record = {
chainId: row.chainId,
nativeSymbol: row.nativeSymbol,
spend: row.spend,
spendUSDC: row.spendUSDC,
expectedOutNative: row.toAmountNative,
tool: row.tool,
to: request?.to ?? null,
value: request?.value ?? null,
gasLimit: request?.gasLimit ?? null,
gasPrice: request?.gasPrice ?? null,
txHash: null,
status: execute ? "pending" : "dry_run",
};
if (execute) {
const tx = await wallet.sendTransaction({
to: request.to,
data: request.data,
value: BigInt(request.value || "0"),
gasLimit: BigInt(request.gasLimit),
gasPrice: BigInt(request.gasPrice),
chainId: 1,
});
record.txHash = tx.hash;
const receipt = await tx.wait(confirmations);
record.status = receipt?.status === 1 ? "confirmed" : "failed";
record.blockNumber = receipt?.blockNumber ?? null;
if (receipt?.status !== 1) {
execution.transactions.push(record);
throw new Error(`Bridge tx failed: ${tx.hash}`);
}
}
execution.transactions.push(record);
}
function table(headers, rows) {
return [
`| ${headers.join(" | ")} |`,
`| ${headers.map(() => "---").join(" | ")} |`,
...rows.map((row) => `| ${row.map((cell) => String(cell ?? "").replace(/\|/g, "\\|")).join(" | ")} |`),
].join("\n");
}
const md = [
"# Coffee-Money Gas Top-Up Execution",
"",
`- Generated: \`${execution.generatedAt}\``,
`- Mode: \`${execution.mode}\``,
`- Deployer: \`${execution.deployer}\``,
`- Planned spend: \`${execution.totals.spend || execution.totals.spendUSDC} ${sourceTokenSymbol}\``,
`- Mainnet gas estimate from plan: \`${execution.totals.mainnetGasCostETH} ETH\``,
"",
"## Approvals",
"",
table(
["Spender", "Required raw", "Action", "Status", "Tx"],
execution.approvals.map((row) => [row.spender, row.requiredRaw, row.action, row.status, row.txHash]),
),
"",
"## Bridge Transactions",
"",
table(
["Destination", "Spend", "Expected out", "Tool", "Status", "Tx"],
execution.transactions.map((row) => [row.chainId, `${row.spend || row.spendUSDC} ${sourceTokenSymbol}`, `${row.expectedOutNative} ${row.nativeSymbol}`, row.tool, row.status, row.txHash]),
),
].join("\n");
mkdirSync(resolve(repoRoot, "reports/status"), { recursive: true });
writeFileSync(outJson, `${JSON.stringify(execution, null, 2)}\n`);
writeFileSync(outMd, `${md}\n`);
console.log(outJson);

View File

@@ -0,0 +1,63 @@
#!/usr/bin/env bash
# Recursively chown /srv/projects inside Dev CT 5700 to the primary dev user.
# Use when rsync --delete-remote fails with Permission denied (root-owned files on VM).
#
# Requires: SSH as root to the Proxmox node that hosts VMID 5700 (default r630-04).
#
# Usage:
# ./scripts/deployment/fix-dev-vm-srv-projects-ownership.sh --dry-run
# ./scripts/deployment/fix-dev-vm-srv-projects-ownership.sh
#
# Env:
# DEV_VM_CTID — LXC ID (default 5700)
# DEV_VM_USER — owning user inside CT (default dev1)
# DEV_VM_PVE_HOST — override Proxmox node IP/hostname (default: get_host_for_vmid + R630_04 fallback)
# Do not use generic PROXMOX_HOST here; it may point at the wrong node.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
export PROJECT_ROOT
# shellcheck source=/dev/null
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh" 2>/dev/null || true
DRY_RUN=0
while [[ $# -gt 0 ]]; do
case "$1" in
--dry-run) DRY_RUN=1; shift ;;
--help|-h)
sed -n '1,22p' "$0" | tail -n +2
exit 0
;;
*)
echo "ERROR: unknown argument: $1 (try --help)" >&2
exit 1
;;
esac
done
CTID="${DEV_VM_CTID:-5700}"
OWNER="${DEV_VM_USER:-dev1}"
if [[ -n "${DEV_VM_PVE_HOST:-}" ]]; then
NODE="$DEV_VM_PVE_HOST"
else
NODE="$(get_host_for_vmid "$CTID" 2>/dev/null || true)"
fi
NODE="${NODE:-${PROXMOX_HOST_R630_04:-192.168.11.14}}"
REMOTE_CMD="pct exec $CTID -- chown -R ${OWNER}:${OWNER} /srv/projects"
echo "=== Fix Dev VM /srv/projects ownership ==="
echo "Node: root@${NODE}"
echo "CT: $CTID"
echo "Owner: $OWNER"
echo ""
if [[ "$DRY_RUN" == "1" ]]; then
echo "DRY-RUN: ssh root@${NODE} \"$REMOTE_CMD\""
exit 0
fi
ssh -o BatchMode=yes -o ConnectTimeout=15 -o StrictHostKeyChecking=accept-new "root@${NODE}" "$REMOTE_CMD"
echo "Done."

View File

@@ -6,6 +6,8 @@ PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/load-project-env.sh
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh"
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/mev-protection.sh
source "${PROJECT_ROOT}/scripts/lib/mev-protection.sh"
: "${ETHEREUM_MAINNET_RPC:?ETHEREUM_MAINNET_RPC is required}"
@@ -103,8 +105,24 @@ available_usdc = vault_usdc if include_lender == "1" else max(vault_usdc - lende
amount = min(pool_cw, pool_usdc, vault_cw, available_usdc)
if override_amount:
amount = int(override_amount)
def units(raw: int) -> str:
return f"{Decimal(raw) / Decimal(10**6):f}"
def emit(name, value):
print(f"{name}='{value}'")
emit("POOL_CWUSDC_UNITS", units(pool_cw))
emit("POOL_USDC_UNITS", units(pool_usdc))
emit("LENDER_USDC_UNITS", units(lender_usdc))
emit("VAULT_CWUSDC_UNITS", units(vault_cw))
emit("VAULT_USDC_UNITS", units(vault_usdc))
emit("POOL_USDC_AVAILABLE_FOR_MIGRATION_RAW", available_usdc)
emit("POOL_USDC_AVAILABLE_FOR_MIGRATION_UNITS", units(available_usdc))
if amount <= 0:
raise SystemExit("no balanced vault liquidity is available to migrate")
emit("NO_MIGRATION", "1")
emit("NO_MIGRATION_REASON", "no balanced vault liquidity is available to migrate")
raise SystemExit(0)
if amount > vault_cw:
raise SystemExit("migration amount exceeds vault cWUSDC balance")
if amount > vault_usdc:
@@ -117,28 +135,40 @@ token0, token1 = addrs
amount0 = amount if token0 == cwusdc.lower() else amount
amount1 = amount if token1 == usdc.lower() else amount
def units(raw: int) -> str:
return f"{Decimal(raw) / Decimal(10**6):f}"
def emit(name, value):
print(f"{name}='{value}'")
emit("NO_MIGRATION", "0")
emit("TOKEN0", token0)
emit("TOKEN1", token1)
emit("AMOUNT0_RAW", amount0)
emit("AMOUNT1_RAW", amount1)
emit("MIGRATE_RAW", amount)
emit("MIGRATE_UNITS", units(amount))
emit("POOL_CWUSDC_UNITS", units(pool_cw))
emit("POOL_USDC_UNITS", units(pool_usdc))
emit("LENDER_USDC_UNITS", units(lender_usdc))
emit("VAULT_CWUSDC_UNITS", units(vault_cw))
emit("VAULT_USDC_UNITS", units(vault_usdc))
emit("POOL_USDC_AVAILABLE_FOR_MIGRATION_RAW", available_usdc)
emit("POOL_USDC_AVAILABLE_FOR_MIGRATION_UNITS", units(available_usdc))
PY
)"
if [[ "${NO_MIGRATION}" == "1" ]]; then
cat <<EOF
Engine X indexed-liquidity migration plan
mode: ${EXECUTE}
vault: ${VAULT}
vault owner: ${OWNER}
signer/recipient: ${SIGNER}
Engine X virtual vault state
accounted pool: ${POOL_CWUSDC_UNITS} cWUSDC / ${POOL_USDC_UNITS} USDC
lender bucket: ${LENDER_USDC_UNITS} USDC
actual token balances: ${VAULT_CWUSDC_UNITS} cWUSDC / ${VAULT_USDC_UNITS} USDC
USDC available without lender bucket: ${POOL_USDC_AVAILABLE_FOR_MIGRATION_UNITS}
No migration commands emitted:
${NO_MIGRATION_REASON}
Boundary
The previous tiny Engine X public-liquidity attempt appears already swept or spent.
Rerun only after an upgraded vault is seeded with balanced cWUSDC and official Mainnet USDC.
EOF
exit 0
fi
POOL="$(cast call "${FACTORY}" 'getPool(address,address,uint24)(address)' "${TOKEN0}" "${TOKEN1}" "${FEE}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | grep -oE '0x[a-fA-F0-9]{40}' | head -1)"
SIMULATED_POOL=""
if [[ "${POOL}" == "0x0000000000000000000000000000000000000000" ]]; then
@@ -216,21 +246,19 @@ EOF
exit 0
fi
cast send "${VAULT}" 'withdrawPoolLiquidity(address,uint256,uint256)' "${SIGNER}" "${MIGRATE_RAW}" "${MIGRATE_RAW}" \
--rpc-url "${ETHEREUM_MAINNET_RPC}" --private-key "${PRIVATE_KEY}"
mev_require_private_for_action "engine-x-univ3-indexed-lp-migration"
cast send "${POSITION_MANAGER}" 'createAndInitializePoolIfNecessary(address,address,uint24,uint160)' \
"${TOKEN0}" "${TOKEN1}" "${FEE}" "${SQRT_PRICE_X96}" \
--rpc-url "${ETHEREUM_MAINNET_RPC}" --private-key "${PRIVATE_KEY}"
mev_cast_send "${VAULT}" 'withdrawPoolLiquidity(address,uint256,uint256)' "${SIGNER}" "${MIGRATE_RAW}" "${MIGRATE_RAW}"
cast send "${TOKEN0}" 'approve(address,uint256)' "${POSITION_MANAGER}" "${AMOUNT0_RAW}" \
--rpc-url "${ETHEREUM_MAINNET_RPC}" --private-key "${PRIVATE_KEY}"
cast send "${TOKEN1}" 'approve(address,uint256)' "${POSITION_MANAGER}" "${AMOUNT1_RAW}" \
--rpc-url "${ETHEREUM_MAINNET_RPC}" --private-key "${PRIVATE_KEY}"
mev_cast_send "${POSITION_MANAGER}" 'createAndInitializePoolIfNecessary(address,address,uint24,uint160)' \
"${TOKEN0}" "${TOKEN1}" "${FEE}" "${SQRT_PRICE_X96}"
cast send "${POSITION_MANAGER}" 'mint((address,address,uint24,int24,int24,uint256,uint256,uint256,uint256,address,uint256))' \
mev_cast_send "${TOKEN0}" 'approve(address,uint256)' "${POSITION_MANAGER}" "${AMOUNT0_RAW}"
mev_cast_send "${TOKEN1}" 'approve(address,uint256)' "${POSITION_MANAGER}" "${AMOUNT1_RAW}"
mev_cast_send "${POSITION_MANAGER}" 'mint((address,address,uint24,int24,int24,uint256,uint256,uint256,uint256,address,uint256))' \
"(${TOKEN0},${TOKEN1},${FEE},${TICK_LOWER},${TICK_UPPER},${AMOUNT0_RAW},${AMOUNT1_RAW},0,0,${SIGNER},${DEADLINE})" \
--rpc-url "${ETHEREUM_MAINNET_RPC}" --private-key "${PRIVATE_KEY}" -vv
-vv
NEW_POOL="$(cast call "${FACTORY}" 'getPool(address,address,uint24)(address)' "${TOKEN0}" "${TOKEN1}" "${FEE}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | grep -oE '0x[a-fA-F0-9]{40}' | head -1)"
echo "Post-migration UniV3 pool: ${NEW_POOL}"

35
scripts/deployment/mint-cwusdc-ei-matrix-wallets.sh Normal file → Executable file
View File

@@ -305,19 +305,28 @@ if ! $DRY_RUN && [[ "${EI_MATRIX_SKIP_GAS_CHECK:-}" != "1" ]]; then
fi
echo ""
echo "Sample (first 3, last 3):"
_s_idx=$OFFSET
while IFS= read -r s_addr && IFS= read -r s_raw <&3; do
h=$(python3 -c "d=int('$DECIMALS'); a=int('$s_raw'); print(f'{a / (10**d):.6f}')" 2>/dev/null || echo "$s_raw")
echo " idx=$_s_idx $s_addr raw=$s_raw (~$h cWUSDC)"
_s_idx=$((_s_idx + 1))
done < <(head -3 "$ADDR_TMP") 3< <(head -3 "$AMOUNTS_TMP")
_s_idx=$((OFFSET + WALLET_COUNT - 3))
while IFS= read -r s_addr && IFS= read -r s_raw <&3; do
h=$(python3 -c "d=int('$DECIMALS'); a=int('$s_raw'); print(f'{a / (10**d):.6f}')" 2>/dev/null || echo "$s_raw")
echo " idx=$_s_idx $s_addr raw=$s_raw (~$h cWUSDC)"
_s_idx=$((_s_idx + 1))
done < <(tail -3 "$ADDR_TMP") 3< <(tail -3 "$AMOUNTS_TMP")
echo "Sample mints:"
if [[ "$WALLET_COUNT" -le 6 ]]; then
_s_idx=$OFFSET
while IFS=$'\t' read -r s_addr s_raw; do
h=$(python3 -c "d=int('$DECIMALS'); a=int('$s_raw'); print(f'{a / (10**d):.6f}')" 2>/dev/null || echo "$s_raw")
echo " idx=$_s_idx $s_addr raw=$s_raw (~$h cWUSDC)"
_s_idx=$((_s_idx + 1))
done < <(paste -d $'\t' "$ADDR_TMP" "$AMOUNTS_TMP")
else
_s_idx=$OFFSET
while IFS=$'\t' read -r s_addr s_raw; do
h=$(python3 -c "d=int('$DECIMALS'); a=int('$s_raw'); print(f'{a / (10**d):.6f}')" 2>/dev/null || echo "$s_raw")
echo " idx=$_s_idx $s_addr raw=$s_raw (~$h cWUSDC)"
_s_idx=$((_s_idx + 1))
done < <(paste -d $'\t' "$ADDR_TMP" "$AMOUNTS_TMP" | head -3)
_s_idx=$((OFFSET + WALLET_COUNT - 3))
while IFS=$'\t' read -r s_addr s_raw; do
h=$(python3 -c "d=int('$DECIMALS'); a=int('$s_raw'); print(f'{a / (10**d):.6f}')" 2>/dev/null || echo "$s_raw")
echo " idx=$_s_idx $s_addr raw=$s_raw (~$h cWUSDC)"
_s_idx=$((_s_idx + 1))
done < <(paste -d $'\t' "$ADDR_TMP" "$AMOUNTS_TMP" | tail -3)
fi
echo ""
sent=0

View File

@@ -0,0 +1,30 @@
#!/usr/bin/env bash
# Mirror a GitHub fork to Gitea (push --mirror). Run from operator LAN when Gitea is reachable.
#
# Required:
# GITEA_REMOTE — e.g. https://gitea.d-bis.org/ORG/DefiLlama-Adapters.git
# Optional:
# GITHUB_FORK — default https://github.com/Defi-Oracle-Meta-Blockchain/DefiLlama-Adapters.git
#
# Usage:
# export GITEA_REMOTE='https://USER:TOKEN@gitea.d-bis.org/d-bis/DefiLlama-Adapters.git'
# ./scripts/deployment/mirror-github-fork-to-gitea.sh
#
# Or dry-run (fetch only):
# DRY_RUN=1 ./scripts/deployment/mirror-github-fork-to-gitea.sh
set -euo pipefail
GITHUB_FORK="${GITHUB_FORK:-https://github.com/Defi-Oracle-Meta-Blockchain/DefiLlama-Adapters.git}"
if [[ -z "${GITEA_REMOTE:-}" ]]; then
echo "Set GITEA_REMOTE to your Gitea repo URL (with credentials if needed)." >&2
exit 1
fi
TMP="${TMPDIR:-/tmp}/mirror-defillama-$$"
cleanup() { rm -rf "$TMP"; }
trap cleanup EXIT
git clone --mirror "$GITHUB_FORK" "$TMP"
if [[ "${DRY_RUN:-}" == "1" ]]; then
echo "DRY_RUN=1: mirror clone OK; skip push to GITEA_REMOTE"
exit 0
fi
git -C "$TMP" push --mirror "$GITEA_REMOTE"
echo "Mirror push completed."

15
scripts/deployment/pipeline-ei-matrix-mint-cwusdc.sh Normal file → Executable file
View File

@@ -49,17 +49,18 @@ if [[ "$CID" != "1" ]]; then
exit 1
fi
ROLE=$(cast keccak "MINTER_ROLE()")
# AccessControl MINTER_ROLE is keccak256 of the string "MINTER_ROLE" for OZ — use cast keccak
ROLE=$(cast keccak "MINTER_ROLE")
if HR=$(cast call "$CWUSDC" "hasRole(bytes32,address)(bool)" "$ROLE" "$FROM" --rpc-url "$RPC" 2>/dev/null); then
if [[ "${HR,,}" != *true* ]]; then
echo "[WARN] hasRole(MINTER_ROLE) returned false for signer — mints will likely revert." >&2
else
if [[ "${SKIP_EI_MATRIX_MINT_PREFLIGHT:-}" == "1" ]]; then
echo "Preflight: skipped (SKIP_EI_MATRIX_MINT_PREFLIGHT=1)"
elif HR=$(cast call "$CWUSDC" "hasRole(bytes32,address)(bool)" "$ROLE" "$FROM" --rpc-url "$RPC" 2>/dev/null); then
HR_TR=$(echo "$HR" | tr -d '[:space:]' | tr '[:upper:]' '[:lower:]')
if [[ "$HR_TR" == *true* || "$HR_TR" == *0x0000000000000000000000000000000000000000000000000000000000000001* ]]; then
echo "Preflight: MINTER_ROLE on cWUSDC for signer — OK"
else
echo "[WARN] hasRole(MINTER_ROLE) not true for signer (got: $HR) — mints may revert if minter is elsewhere." >&2
fi
else
echo "[WARN] Could not call hasRole (ABI may differ) — continuing." >&2
echo "[WARN] Could not call hasRole — continuing (token ABI may differ)." >&2
fi
echo ""

View File

@@ -0,0 +1,104 @@
#!/usr/bin/env bash
# Run full-grid readiness audit (optional), then remediate mainnet cWUSDC gaps.
#
# Modes:
# Default: fixed --send-raw to each gap index (send-cwusdc-ei-matrix-targeted.sh).
# --multicall: per-wallet deficit TSV + Multicall3 batches (cheapest; see send-cwusdc-ei-matrix-multicall-batches.sh).
#
# Usage:
# ./scripts/deployment/pipeline-ei-matrix-remediate-cwusdc-from-audit.sh --dry-run --send-raw 5000000
# ./scripts/deployment/pipeline-ei-matrix-remediate-cwusdc-from-audit.sh --dry-run --multicall
# ./scripts/deployment/pipeline-ei-matrix-remediate-cwusdc-from-audit.sh --execute --multicall
# SKIP_EI_MATRIX_REMEDIATE_AUDIT=1 ./scripts/deployment/pipeline-ei-matrix-remediate-cwusdc-from-audit.sh --multicall --execute
#
# Env:
# SKIP_EI_MATRIX_REMEDIATE_AUDIT=1
# EI_MATRIX_REMEDIATE_MULTICALL=1 Same as --multicall
# EI_MATRIX_REMEDIATE_TOPUP_TSV=path Default: reports/status/ei-matrix-cwusdc-topup-amounts.tsv
# Rebuild TSV after audit: scripts/verify/build-ei-matrix-cwusdc-topup-tsv-from-audit-json.sh
#
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
cd "$PROJECT_ROOT"
DRY=(false)
SEND_RAW=""
INDICES_OVERRIDE=""
USE_MULTICALL=false
PASS=()
while [[ $# -gt 0 ]]; do
case "$1" in
--dry-run) DRY=(true); PASS+=(--dry-run); shift ;;
--send-raw) SEND_RAW="${2:?}"; shift 2 ;;
--indices-file) INDICES_OVERRIDE="${2:?}"; shift 2 ;;
--multicall) USE_MULTICALL=true; shift ;;
--) shift; PASS+=("$@"); break ;;
*) PASS+=("$1"); shift ;;
esac
done
if [[ "${EI_MATRIX_REMEDIATE_MULTICALL:-}" == "1" ]]; then
USE_MULTICALL=true
fi
AUDIT_SH="$PROJECT_ROOT/scripts/verify/run-ei-matrix-full-readiness-audit.sh"
TARGET_SH="$PROJECT_ROOT/scripts/deployment/send-cwusdc-ei-matrix-targeted.sh"
MULTICALL_SH="$PROJECT_ROOT/scripts/deployment/send-cwusdc-ei-matrix-multicall-batches.sh"
BUILD_TSV_SH="$PROJECT_ROOT/scripts/verify/build-ei-matrix-cwusdc-topup-tsv-from-audit-json.sh"
if [[ "$USE_MULTICALL" == true ]]; then
if [[ -n "$SEND_RAW" ]]; then
echo "Do not combine --multicall with --send-raw (multicall uses per-row TSV amounts)." >&2
exit 1
fi
else
if [[ -z "$SEND_RAW" ]]; then
echo "Required: --send-raw R, or use --multicall with a top-up TSV." >&2
exit 1
fi
fi
if [[ "${SKIP_EI_MATRIX_REMEDIATE_AUDIT:-}" != "1" ]]; then
echo "→ Step 1: full readiness audit (refreshes gap files; exit 1 means gaps below policy — OK)"
_audit_rc=0
"$AUDIT_SH" || _audit_rc=$?
if [[ "$_audit_rc" -gt 1 ]]; then
echo "Audit failed with exit $_audit_rc (RPC/config?)." >&2
exit "$_audit_rc"
fi
if [[ "$USE_MULTICALL" == true ]]; then
echo "→ Step 1b: rebuild top-up TSV from ei-matrix-readiness-audit-latest.json"
"$BUILD_TSV_SH"
fi
else
echo "→ Step 1: skipped (SKIP_EI_MATRIX_REMEDIATE_AUDIT=1)"
fi
if [[ "$USE_MULTICALL" == true ]]; then
TSV="${EI_MATRIX_REMEDIATE_TOPUP_TSV:-$PROJECT_ROOT/reports/status/ei-matrix-cwusdc-topup-amounts.tsv}"
if [[ ! -f "$TSV" ]]; then
echo "Missing top-up TSV: $TSV — run $BUILD_TSV_SH or audit without SKIP." >&2
exit 1
fi
N=$(wc -l <"$TSV" | tr -d ' ')
echo "→ Step 2: Multicall3 batches ($N rows) $TSV"
if $DRY; then
exec "$MULTICALL_SH" --dry-run --tsv "$TSV"
else
exec "$MULTICALL_SH" --execute --tsv "$TSV"
fi
fi
GAPS="${INDICES_OVERRIDE:-${EI_MATRIX_AUDIT_GAPS_MAINNET:-${PROJECT_ROOT}/reports/status/ei-matrix-readiness-gaps-mainnet-indices.txt}}"
if [[ ! -f "$GAPS" ]]; then
echo "Missing gap file: $GAPS" >&2
exit 1
fi
N=$(grep -cE '^[0-9]+$' "$GAPS" 2>/dev/null || echo 0)
if [[ "$N" -eq 0 ]]; then
echo "No mainnet gap indices in $GAPS — nothing to remediate."
exit 0
fi
echo "→ Step 2: targeted send to $N indices ($GAPS) --send-raw $SEND_RAW"
exec "$TARGET_SH" "${PASS[@]}" --send-raw "$SEND_RAW" --indices-file "$GAPS"

View File

@@ -0,0 +1,325 @@
#!/usr/bin/env node
/**
* Read-only coffee-money gas top-up planner.
*
* Quotes a Mainnet source token -> destination native gas via LiFi for the chains that
* currently block token-aggregation stability work. It does not approve,
* bridge, sign, or broadcast transactions.
*/
import { mkdirSync, readFileSync, writeFileSync } from "node:fs";
import { resolve } from "node:path";
const repoRoot = resolve(new URL("../..", import.meta.url).pathname);
const fundingPlanPath = resolve(repoRoot, "reports/status/token-aggregation-liquidity-gap-funding-plan-latest.json");
const jsonOut = resolve(repoRoot, "reports/status/coffee-money-gas-topup-plan-latest.json");
const mdOut = resolve(repoRoot, "reports/status/coffee-money-gas-topup-plan-latest.md");
const deployer = process.env.DEPLOYER_ADDRESS || "0x4A666F96fC8764181194447A7dFdb7d471b301C8";
const sourceChain = 1;
const sourceToken = process.env.COFFEE_MONEY_SOURCE_SYMBOL || "USDC";
const sourceTokenAddress = process.env.COFFEE_MONEY_SOURCE_TOKEN_ADDRESS || "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48";
const sourceDecimals = Number(process.env.COFFEE_MONEY_SOURCE_DECIMALS || "6");
const safetyMultiplierBps = BigInt(process.env.COFFEE_MONEY_TOPUP_SAFETY_BPS || "15000");
const minSpendRaw = BigInt(process.env.COFFEE_MONEY_MIN_SPEND_USDC_RAW || "1000000");
const maxSpendRaw = BigInt(process.env.COFFEE_MONEY_MAX_SPEND_USDC_RAW || "4000000");
const targetChains = new Set(
(process.env.COFFEE_MONEY_TARGET_CHAINS || "10,56,137,42161")
.split(",")
.map((value) => Number(value.trim()))
.filter(Boolean),
);
const nativeTokenAddress = "0x0000000000000000000000000000000000000000";
const nativeTokenOverrides = {
// LiFi models CELO as the canonical CELO token on Celo, not the zero address.
42220: "0x471EcE3750Da237f93B8E339c536989b8978a438",
};
const ethereumRpc = process.env.ETHEREUM_MAINNET_RPC || process.env.RPC_URL_1 || "https://ethereum.publicnode.com";
const fundingPlan = JSON.parse(readFileSync(fundingPlanPath, "utf8"));
function parseUnitsDecimal(value, decimals = 18) {
const [whole, frac = ""] = String(value).split(".");
const padded = `${frac}${"0".repeat(decimals)}`.slice(0, decimals);
return BigInt(whole || "0") * 10n ** BigInt(decimals) + BigInt(padded || "0");
}
function decimalUnits(raw, decimals) {
const scale = 10n ** BigInt(decimals);
const whole = raw / scale;
const frac = raw % scale;
const fracText = frac.toString().padStart(decimals, "0").replace(/0+$/, "");
return fracText ? `${whole}.${fracText}` : whole.toString();
}
function padAddress(address) {
return String(address).replace(/^0x/i, "").padStart(64, "0");
}
async function rpcCall(rpcUrl, method, params) {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 12_000);
try {
const response = await fetch(rpcUrl, {
method: "POST",
headers: { "content-type": "application/json" },
body: JSON.stringify({ jsonrpc: "2.0", method, params, id: 1 }),
signal: controller.signal,
});
const json = await response.json();
if (json.error) return { ok: false, error: json.error.message || JSON.stringify(json.error) };
return { ok: true, result: json.result };
} catch (error) {
return { ok: false, error: error.message };
} finally {
clearTimeout(timeout);
}
}
function bigintFromHex(hex) {
if (!hex || hex === "0x") return 0n;
return BigInt(hex);
}
async function erc20Allowance(token, owner, spender) {
if (!spender) return { ok: false, raw: "0", units: "0", error: "missing_spender" };
const selector = "0xdd62ed3e";
const data = `${selector}${padAddress(owner)}${padAddress(spender)}`;
const result = await rpcCall(ethereumRpc, "eth_call", [{ to: token, data }, "latest"]);
const raw = result.ok ? bigintFromHex(result.result) : 0n;
return {
ok: result.ok,
raw: raw.toString(),
units: decimalUnits(raw, sourceDecimals),
error: result.ok ? null : result.error,
};
}
function spendForShortfall(shortfallNative, quote) {
const shortfallRaw = parseUnitsDecimal(shortfallNative, 18);
const toAmountRaw = BigInt(quote.estimate?.toAmount || "0");
const fromAmountRaw = BigInt(quote.estimate?.fromAmount || "0");
if (toAmountRaw === 0n || fromAmountRaw === 0n) return maxSpendRaw;
const desiredOutRaw = (shortfallRaw * safetyMultiplierBps + 9_999n) / 10_000n;
const spend = (desiredOutRaw * fromAmountRaw + toAmountRaw - 1n) / toAmountRaw;
if (spend < minSpendRaw) return minSpendRaw;
if (spend > maxSpendRaw) return maxSpendRaw;
return spend;
}
async function lifiQuote(toChain, fromAmountRaw) {
const url = new URL("https://li.quest/v1/quote");
url.searchParams.set("fromChain", String(sourceChain));
url.searchParams.set("toChain", String(toChain));
url.searchParams.set("fromToken", sourceTokenAddress);
url.searchParams.set("toToken", nativeTokenOverrides[toChain] || nativeTokenAddress);
url.searchParams.set("fromAmount", String(fromAmountRaw));
url.searchParams.set("fromAddress", deployer);
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 20_000);
try {
const response = await fetch(url, { signal: controller.signal });
const text = await response.text();
let data = null;
try {
data = JSON.parse(text);
} catch {
data = { raw: text };
}
return {
ok: response.ok,
statusCode: response.status,
url: url.toString(),
data,
error: response.ok ? null : text.slice(0, 500),
};
} catch (error) {
return { ok: false, statusCode: 0, url: url.toString(), data: null, error: error.message };
} finally {
clearTimeout(timeout);
}
}
const shortfalls = fundingPlan.chainGasBudgetRows.filter((row) => (
targetChains.has(Number(row.chainId)) && row.status === "chain_gas_budget_shortfall"
));
const sourceInventory = fundingPlan.ethereumSourceInventory?.tokens?.find((token) => (
token.symbol === sourceToken || String(token.address).toLowerCase() === String(sourceTokenAddress).toLowerCase()
));
const sourceBalanceRaw = BigInt(sourceInventory?.balanceRaw || "0");
const rows = [];
for (const shortfall of shortfalls) {
const oneDollarQuote = await lifiQuote(shortfall.chainId, minSpendRaw);
let spendRaw = minSpendRaw;
let finalQuote = oneDollarQuote;
if (oneDollarQuote.ok) {
spendRaw = spendForShortfall(shortfall.shortfallNative, oneDollarQuote.data);
if (spendRaw !== minSpendRaw) finalQuote = await lifiQuote(shortfall.chainId, spendRaw);
}
const estimate = finalQuote.data?.estimate ?? {};
const tx = finalQuote.data?.transactionRequest ?? null;
rows.push({
chainId: shortfall.chainId,
symbols: shortfall.symbols,
nativeSymbol: shortfall.nativeSymbol,
shortfallNative: shortfall.shortfallNative,
spendRaw: spendRaw.toString(),
spend: decimalUnits(spendRaw, sourceDecimals),
spendUSDC: sourceToken === "USDC" ? decimalUnits(spendRaw, sourceDecimals) : null,
quoteOk: finalQuote.ok,
quoteStatusCode: finalQuote.statusCode,
tool: finalQuote.data?.tool ?? estimate.tool ?? null,
toAmountRaw: estimate.toAmount ?? null,
toAmountMinRaw: estimate.toAmountMin ?? null,
toAmountNative: estimate.toAmount ? decimalUnits(BigInt(estimate.toAmount), 18) : null,
fromAmountUSD: estimate.fromAmountUSD ?? null,
toAmountUSD: estimate.toAmountUSD ?? null,
mainnetGasCostRaw: estimate.gasCosts?.[0]?.amount ?? null,
mainnetGasCostETH: estimate.gasCosts?.[0]?.amount ? decimalUnits(BigInt(estimate.gasCosts[0].amount), 18) : null,
approvalAddress: estimate.approvalAddress ?? null,
transactionTo: tx?.to ?? null,
transactionValue: tx?.value ?? null,
transactionGasLimit: tx?.gasLimit ?? null,
transactionDataPresent: Boolean(tx?.data),
transactionRequest: tx ? {
chainId: tx.chainId,
to: tx.to,
value: tx.value,
gasLimit: tx.gasLimit,
gasPrice: tx.gasPrice,
data: tx.data,
} : null,
quoteUrl: finalQuote.url,
blocker: finalQuote.ok ? null : finalQuote.error,
});
}
const allowanceBySpender = {};
for (const spender of [...new Set(rows.map((row) => row.approvalAddress).filter(Boolean))]) {
const allowance = await erc20Allowance(sourceTokenAddress, deployer, spender);
const requiredRaw = rows
.filter((row) => row.approvalAddress === spender)
.reduce((sum, row) => sum + BigInt(row.spendRaw || "0"), 0n);
allowanceBySpender[spender] = {
spender,
requiredRaw: requiredRaw.toString(),
required: decimalUnits(requiredRaw, sourceDecimals),
requiredUSDC: sourceToken === "USDC" ? decimalUnits(requiredRaw, sourceDecimals) : null,
allowanceRaw: allowance.raw,
allowance: allowance.units,
allowanceUSDC: sourceToken === "USDC" ? allowance.units : null,
sufficient: BigInt(allowance.raw || "0") >= requiredRaw,
error: allowance.error,
};
}
const totalSpendRaw = rows.reduce((sum, row) => sum + BigInt(row.spendRaw || "0"), 0n);
const totalMainnetGasRaw = rows.reduce((sum, row) => sum + BigInt(row.mainnetGasCostRaw || "0"), 0n);
const mainnetEth = fundingPlan.ethereumSourceInventory?.native?.balanceRaw ? BigInt(fundingPlan.ethereumSourceInventory.native.balanceRaw) : 0n;
const payload = {
generatedAt: new Date().toISOString(),
mode: "read_only_no_broadcast",
deployer,
source: {
chainId: sourceChain,
token: sourceToken,
tokenAddress: sourceTokenAddress,
decimals: sourceDecimals,
balanceRaw: sourceBalanceRaw.toString(),
balance: decimalUnits(sourceBalanceRaw, sourceDecimals),
balanceUSDC: sourceToken === "USDC" ? decimalUnits(sourceBalanceRaw, sourceDecimals) : null,
},
policy: {
safetyMultiplierBps: Number(safetyMultiplierBps),
minSpend: decimalUnits(minSpendRaw, sourceDecimals),
maxSpend: decimalUnits(maxSpendRaw, sourceDecimals),
unit: sourceToken,
minSpendUSDC: sourceToken === "USDC" ? decimalUnits(minSpendRaw, sourceDecimals) : null,
maxSpendUSDC: sourceToken === "USDC" ? decimalUnits(maxSpendRaw, sourceDecimals) : null,
},
totals: {
spendRaw: totalSpendRaw.toString(),
spend: decimalUnits(totalSpendRaw, sourceDecimals),
spendUSDC: sourceToken === "USDC" ? decimalUnits(totalSpendRaw, sourceDecimals) : null,
sourceBalanceAfterRaw: sourceBalanceRaw > totalSpendRaw ? (sourceBalanceRaw - totalSpendRaw).toString() : "0",
sourceBalanceAfter: decimalUnits(sourceBalanceRaw > totalSpendRaw ? sourceBalanceRaw - totalSpendRaw : 0n, sourceDecimals),
sourceBalanceAfterUSDC: sourceToken === "USDC" ? decimalUnits(sourceBalanceRaw > totalSpendRaw ? sourceBalanceRaw - totalSpendRaw : 0n, sourceDecimals) : null,
mainnetGasCostRaw: totalMainnetGasRaw.toString(),
mainnetGasCostETH: decimalUnits(totalMainnetGasRaw, 18),
mainnetEthBalanceRaw: mainnetEth.toString(),
mainnetEthBalance: decimalUnits(mainnetEth, 18),
mainnetEthAfterBridgeGasRaw: mainnetEth > totalMainnetGasRaw ? (mainnetEth - totalMainnetGasRaw).toString() : "0",
mainnetEthAfterBridgeGas: decimalUnits(mainnetEth > totalMainnetGasRaw ? mainnetEth - totalMainnetGasRaw : 0n, 18),
},
readiness: {
sourceTokenSufficient: sourceBalanceRaw >= totalSpendRaw,
sourceUsdcSufficient: sourceToken === "USDC" ? sourceBalanceRaw >= totalSpendRaw : null,
mainnetEthGasSufficientForQuotedBridgeTxs: mainnetEth >= totalMainnetGasRaw,
allQuotesOk: rows.every((row) => row.quoteOk),
sourceAllowancesSufficient: Object.values(allowanceBySpender).every((row) => row.sufficient),
usdcAllowancesSufficient: sourceToken === "USDC" ? Object.values(allowanceBySpender).every((row) => row.sufficient) : null,
broadcastReady: false,
broadcastBoundary: "Quotes include transaction data, but this planner intentionally does not sign, approve, or broadcast.",
},
allowances: Object.values(allowanceBySpender),
rows,
};
function table(headers, tableRows) {
return [
`| ${headers.join(" | ")} |`,
`| ${headers.map(() => "---").join(" | ")} |`,
...tableRows.map((row) => `| ${row.map((cell) => String(cell ?? "").replace(/\|/g, "\\|")).join(" | ")} |`),
].join("\n");
}
const md = [
"# Coffee-Money Gas Top-Up Plan",
"",
`- Generated: \`${payload.generatedAt}\``,
`- Mode: \`${payload.mode}\``,
`- Deployer: \`${deployer}\``,
`- Source: \`${payload.source.balance} ${sourceToken}\` on Ethereum Mainnet`,
`- Planned spend: \`${payload.totals.spend} ${sourceToken}\``,
`- Mainnet bridge gas estimate: \`${payload.totals.mainnetGasCostETH} ETH\``,
`- Mainnet ETH after quoted bridge gas: \`${payload.totals.mainnetEthAfterBridgeGas} ETH\``,
"",
table(
["Check", "Value"],
Object.entries(payload.readiness).map(([key, value]) => [key, value]),
),
"",
`## ${sourceToken} Allowances`,
"",
table(
["Spender", "Required", "Allowance", "Sufficient"],
payload.allowances.map((row) => [row.spender, `${row.required} ${sourceToken}`, `${row.allowance} ${sourceToken}`, row.sufficient]),
),
"",
"## Top-Up Quotes",
"",
table(
["Chain", "Symbols", "Need", "Spend", "Tool", "Out", "Mainnet gas", "Tx data"],
rows.map((row) => [
row.chainId,
row.symbols,
`${row.shortfallNative} ${row.nativeSymbol}`,
`${row.spend} ${sourceToken}`,
row.tool,
row.toAmountNative ? `${row.toAmountNative} ${row.nativeSymbol}` : "quote_failed",
row.mainnetGasCostETH ? `${row.mainnetGasCostETH} ETH` : "",
row.transactionDataPresent,
]),
),
"",
"## Execution Boundary",
"",
"This artifact proves route availability and bounded spend only. Before broadcast, each row still needs allowance/approval handling, final quote refresh, private-key signing, and tx submission.",
].join("\n");
mkdirSync(resolve(repoRoot, "reports/status"), { recursive: true });
writeFileSync(jsonOut, `${JSON.stringify(payload, null, 2)}\n`);
writeFileSync(mdOut, `${md}\n`);
console.log(jsonOut);

View File

@@ -0,0 +1,73 @@
#!/usr/bin/env bash
# Probe Dev VM SSH: LAN IP vs Cloudflare FQDN (tunnel + Access).
# See: docs/04-configuration/DEV_VM_SSH_REMOTE_ACCESS.md
#
# Usage:
# ./scripts/deployment/probe-dev-vm-ssh.sh
# DEV_VM_USER=dev1 DEV_VM_FQDN=ssh.dev.d-bis.org ./scripts/deployment/probe-dev-vm-ssh.sh
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
# shellcheck source=/dev/null
source "${PROJECT_ROOT}/config/ip-addresses.conf" 2>/dev/null || true
USER_NAME="${DEV_VM_USER:-dev1}"
IP="${IP_DEV_VM:-192.168.11.59}"
FQDN="${DEV_VM_FQDN:-ssh.dev.d-bis.org}"
echo "=== Dev VM SSH probes (user=$USER_NAME) ==="
echo ""
echo "1) LAN: BatchMode SSH to $IP"
if ssh -o BatchMode=yes -o ConnectTimeout=8 -o StrictHostKeyChecking=accept-new "${USER_NAME}@${IP}" true 2>/dev/null; then
echo " OK ${USER_NAME}@${IP}"
else
echo " FAIL ${USER_NAME}@${IP} (no route, firewall, or key not accepted)"
fi
echo ""
echo "2) DNS: $FQDN"
if command -v dig >/dev/null 2>&1; then
dig +short "$FQDN" A 2>/dev/null | head -3 | sed 's/^/ A: /' || true
dig +short "$FQDN" AAAA 2>/dev/null | head -2 | sed 's/^/ AAAA: /' || true
else
echo " (dig not installed; skip)"
fi
echo ""
echo "3) Plain SSH to $FQDN:22 (usually FAILS behind Cloudflare — tunnel expects cloudflared client)"
set +e
out=$(ssh -4 -o BatchMode=yes -o ConnectTimeout=12 -o StrictHostKeyChecking=accept-new "${USER_NAME}@${FQDN}" true 2>&1)
code=$?
set -e
if [[ "$code" -eq 0 ]]; then
echo " OK (unexpected for CF tunnel host — you may be using port-forward / direct)"
else
echo " FAIL (expected for tunnel hostname): $out"
fi
echo ""
echo "4) FQDN via cloudflared access ssh (needs cloudflared on PATH + Access policy / service token)"
PATH="$HOME/bin:$PATH"
if command -v cloudflared >/dev/null 2>&1; then
set +e
out=$(ssh -o BatchMode=yes -o ConnectTimeout=25 \
-o ProxyCommand="cloudflared access ssh --hostname %h" \
-o StrictHostKeyChecking=accept-new \
"${USER_NAME}@${FQDN}" true 2>&1)
code=$?
set -e
if [[ "$code" -eq 0 ]]; then
echo " OK ProxyCommand → ${USER_NAME}@${FQDN}"
else
echo " FAIL: $out"
fi
else
echo " SKIP: cloudflared not in PATH"
echo " Install: https://developers.cloudflare.com/cloudflare-one/connections/connect-apps/install-and-setup/installation/"
echo " Then set CF_ACCESS_CLIENT_ID / CF_ACCESS_CLIENT_SECRET if using service tokens (see DEV_VM_SSH_REMOTE_ACCESS.md)."
fi
echo ""
echo "Done."

View File

@@ -6,6 +6,8 @@ PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/load-project-env.sh
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh"
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/mev-protection.sh
source "${PROJECT_ROOT}/scripts/lib/mev-protection.sh"
: "${ETHEREUM_MAINNET_RPC:?ETHEREUM_MAINNET_RPC is required}"
: "${PRIVATE_KEY:?PRIVATE_KEY is required}"
@@ -131,24 +133,21 @@ EOF
exit 0
fi
mev_require_private_for_action "mainnet-cwusdc-usdc-univ2-canary-repair"
DEADLINE="$(( $(date +%s) + 1800 ))"
cast send "${USDC}" 'approve(address,uint256)(bool)' "${ROUTER}" "${QUOTE_IN_RAW}" \
--private-key "${PRIVATE_KEY}" --rpc-url "${ETHEREUM_MAINNET_RPC}"
mev_cast_send "${USDC}" 'approve(address,uint256)(bool)' "${ROUTER}" "${QUOTE_IN_RAW}"
cast send "${ROUTER}" 'swapExactTokensForTokens(uint256,uint256,address[],address,uint256)' \
mev_cast_send "${ROUTER}" 'swapExactTokensForTokens(uint256,uint256,address[],address,uint256)' \
"${QUOTE_IN_RAW}" "${MIN_BASE_OUT_RAW}" "[${USDC},${CWUSDC}]" "${SIGNER}" "${DEADLINE}" \
--private-key "${PRIVATE_KEY}" --rpc-url "${ETHEREUM_MAINNET_RPC}"
if [[ "${BALANCED_ADD_RAW}" != "0" ]]; then
cast send "${CWUSDC}" 'approve(address,uint256)(bool)' "${ROUTER}" "${BALANCED_ADD_RAW}" \
--private-key "${PRIVATE_KEY}" --rpc-url "${ETHEREUM_MAINNET_RPC}"
cast send "${USDC}" 'approve(address,uint256)(bool)' "${ROUTER}" "${BALANCED_ADD_RAW}" \
--private-key "${PRIVATE_KEY}" --rpc-url "${ETHEREUM_MAINNET_RPC}"
cast send "${ROUTER}" 'addLiquidity(address,address,uint256,uint256,uint256,uint256,address,uint256)' \
mev_cast_send "${CWUSDC}" 'approve(address,uint256)(bool)' "${ROUTER}" "${BALANCED_ADD_RAW}"
mev_cast_send "${USDC}" 'approve(address,uint256)(bool)' "${ROUTER}" "${BALANCED_ADD_RAW}"
mev_cast_send "${ROUTER}" 'addLiquidity(address,address,uint256,uint256,uint256,uint256,address,uint256)' \
"${CWUSDC}" "${USDC}" "${BALANCED_ADD_RAW}" "${BALANCED_ADD_RAW}" \
"${MIN_BALANCED_ADD_RAW}" "${MIN_BALANCED_ADD_RAW}" "${SIGNER}" "${DEADLINE}" \
--private-key "${PRIVATE_KEY}" --rpc-url "${ETHEREUM_MAINNET_RPC}"
fi
bash "${PROJECT_ROOT}/scripts/verify/snapshot-mainnet-cwusdc-usdc-preflight.sh"

View File

@@ -0,0 +1,74 @@
#!/usr/bin/env bash
# Chunked continuation of send-cwusdc-ei-matrix-targeted.sh using the full topup TSV.
# Uses ei-matrix-cwusdc-targeted-last-idx.txt: next index is last+1 (TSV line last+2).
# Chooses chunk size from current ETH and gas price (never exceeds --max-chunk).
# Stops when TSV exhausted, chunk size < 1, or the targeted script exits non-zero.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
cd "$PROJECT_ROOT"
MAX_CHUNK="${EI_MATRIX_TARGETED_MAX_CHUNK:-500}"
TSV="${EI_MATRIX_TARGETED_TSV:-${PROJECT_ROOT}/reports/status/ei-matrix-cwusdc-topup-amounts.tsv}"
LAST_FILE="${PROJECT_ROOT}/reports/status/ei-matrix-cwusdc-targeted-last-idx.txt"
LOG="${EI_MATRIX_TARGETED_CHUNK_LOG:-${PROJECT_ROOT}/reports/status/ei-matrix-cwusdc-targeted-chunked.log}"
# shellcheck disable=SC1091
source "$PROJECT_ROOT/scripts/lib/load-project-env.sh"
[[ -f "$TSV" ]] || { echo "Missing TSV: $TSV" >&2; exit 1; }
[[ -f "$LAST_FILE" ]] || { echo "Missing progress file: $LAST_FILE" >&2; exit 1; }
command -v cast &>/dev/null || { echo "cast required" >&2; exit 1; }
RPC="${ETHEREUM_MAINNET_RPC:-${RPC_URL_1:-}}"
[[ -n "$RPC" ]] || { echo "ETHEREUM_MAINNET_RPC or RPC_URL_1 required" >&2; exit 1; }
[[ -n "${PRIVATE_KEY:-}" ]] || { echo "PRIVATE_KEY required" >&2; exit 1; }
D=$(cast wallet address --private-key "$PRIVATE_KEY")
TOTAL_LINES=$(wc -l <"$TSV" | tr -d ' ')
exec >>"$LOG" 2>&1
echo "======== $(date -Is) chunked targeted cWUSDC start signer=$D max_chunk=$MAX_CHUNK tsv=$TSV"
while true; do
LAST=$(tr -d '[:space:]' <"$LAST_FILE" || echo "-1")
START=$((LAST + 2))
REM=$((TOTAL_LINES - START + 1))
if [[ "$REM" -le 0 ]]; then
echo "======== $(date -Is) COMPLETE: all indices done (last_idx=$LAST lines=$TOTAL_LINES)"
exit 0
fi
ETH_WEI=$(cast balance "$D" --rpc-url "$RPC" | awk '{print $1}')
GP=$(cast gas-price --rpc-url "$RPC" | awk '{print $1}' | head -1)
CHUNK=$(python3 -c "
eth = int('${ETH_WEI}')
gp = int('${GP}')
rem = int('${REM}')
mx = int('${MAX_CHUNK}')
# ~72k gas per ERC-20 transfer + 20% headroom on fee
cost = max(1, 72000 * gp * 12 // 10)
# spend at most 85% of ETH on this chunk
n = eth * 85 // 100 // cost
chunk = max(0, min(n, rem, mx))
print(chunk)
")
if [[ "$CHUNK" -lt 1 ]]; then
echo "======== $(date -Is) STOP: not enough ETH for one transfer (rem=$REM wei=$ETH_WEI gp=$GP). Top up and re-run."
exit 2
fi
echo "-------- $(date -Is) last=$LAST start_line=$START chunk=$CHUNK rem=$REM eth_wei=$ETH_WEI gp=$GP"
# Avoid tail|head under pipefail (SIGPIPE → exit 141).
_end=$((START + CHUNK - 1))
sed -n "${START},${_end}p" "$TSV" >"${PROJECT_ROOT}/reports/status/ei-matrix-cwusdc-chunk.tsv"
awk '{print $1}' "${PROJECT_ROOT}/reports/status/ei-matrix-cwusdc-chunk.tsv" >"${PROJECT_ROOT}/reports/status/ei-matrix-cwusdc-chunk.idx"
EI_MATRIX_SKIP_GAS_CHECK=1 \
"$SCRIPT_DIR/send-cwusdc-ei-matrix-targeted.sh" \
--amounts-tsv "${PROJECT_ROOT}/reports/status/ei-matrix-cwusdc-chunk.tsv" \
--indices-file "${PROJECT_ROOT}/reports/status/ei-matrix-cwusdc-chunk.idx"
done

View File

@@ -0,0 +1,169 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/load-project-env.sh
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh"
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/mev-protection.sh
source "${PROJECT_ROOT}/scripts/lib/mev-protection.sh"
: "${ETHEREUM_MAINNET_RPC:?ETHEREUM_MAINNET_RPC is required}"
CWUSDC="${CWUSDC_MAINNET:-0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a}"
USDC="${USDC_MAINNET:-0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48}"
PAIR="${MAINNET_CWUSDC_USDC_UNIV2_PAIR:-0xC28706F899266b36BC43cc072b3a921BDf2C48D9}"
ROUTER="${CHAIN_1_UNISWAP_V2_ROUTER:-0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D}"
TARGET_USDC_OUT_RAW="${TARGET_USDC_OUT_RAW:-10000}"
SLIPPAGE_BPS="${SLIPPAGE_BPS:-100}"
DEADLINE_SECONDS="${DEADLINE_SECONDS:-900}"
EXECUTE="${EXECUTE:-0}"
STAMP="${ENGINE_X_UNIV2_LOOP_STAMP:-$(date -u +%Y%m%dT%H%M%SZ)}"
OUT_JSON="${OUT_JSON:-reports/status/engine-x-univ2-public-indexed-loop-${STAMP}.json}"
LATEST_JSON="${LATEST_JSON:-reports/status/engine-x-univ2-public-indexed-loop-latest.json}"
if [[ -n "${PRIVATE_KEY:-}" ]]; then
SIGNER="$(cast wallet address --private-key "${PRIVATE_KEY}")"
else
SIGNER="${DEPLOYER_ADDRESS:-}"
fi
if [[ -z "${SIGNER}" ]]; then
echo "Set PRIVATE_KEY or DEPLOYER_ADDRESS" >&2
exit 1
fi
if [[ "${EXECUTE}" == "1" && -z "${PRIVATE_KEY:-}" ]]; then
echo "PRIVATE_KEY is required when EXECUTE=1" >&2
exit 1
fi
TOKEN0="$(cast call "${PAIR}" 'token0()(address)' --rpc-url "${ETHEREUM_MAINNET_RPC}" | grep -oE '0x[a-fA-F0-9]{40}' | head -1)"
TOKEN1="$(cast call "${PAIR}" 'token1()(address)' --rpc-url "${ETHEREUM_MAINNET_RPC}" | grep -oE '0x[a-fA-F0-9]{40}' | head -1)"
if [[ "${TOKEN0,,}" != "${CWUSDC,,}" || "${TOKEN1,,}" != "${USDC,,}" ]]; then
echo "Configured pair is not token0=cWUSDC/token1=USDC: ${PAIR}" >&2
exit 1
fi
RESERVES_BEFORE="$(cast call "${PAIR}" 'getReserves()(uint112,uint112,uint32)' --rpc-url "${ETHEREUM_MAINNET_RPC}" | tr '\n' ' ')"
CW_BAL_BEFORE="$(cast call "${CWUSDC}" 'balanceOf(address)(uint256)' "${SIGNER}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | awk '{print $1}')"
USDC_BAL_BEFORE="$(cast call "${USDC}" 'balanceOf(address)(uint256)' "${SIGNER}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | awk '{print $1}')"
CW_ALLOW_BEFORE="$(cast call "${CWUSDC}" 'allowance(address,address)(uint256)' "${SIGNER}" "${ROUTER}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | awk '{print $1}')"
USDC_ALLOW_BEFORE="$(cast call "${USDC}" 'allowance(address,address)(uint256)' "${SIGNER}" "${ROUTER}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | awk '{print $1}')"
ETH_BEFORE="$(cast balance "${SIGNER}" --rpc-url "${ETHEREUM_MAINNET_RPC}")"
GAS_PRICE_WEI="$(cast gas-price --rpc-url "${ETHEREUM_MAINNET_RPC}")"
BLOCK_BEFORE="$(cast block-number --rpc-url "${ETHEREUM_MAINNET_RPC}")"
CW_IN_RAW="$(cast call --json "${ROUTER}" 'getAmountsIn(uint256,address[])(uint256[])' "${TARGET_USDC_OUT_RAW}" "[${CWUSDC},${USDC}]" --rpc-url "${ETHEREUM_MAINNET_RPC}" | jq -r '.[0][0]')"
ROUNDTRIP_CW_OUT_RAW="$(cast call --json "${ROUTER}" 'getAmountsOut(uint256,address[])(uint256[])' "${TARGET_USDC_OUT_RAW}" "[${USDC},${CWUSDC}]" --rpc-url "${ETHEREUM_MAINNET_RPC}" | jq -r '.[0][-1]')"
MAX_CW_IN_RAW="$(( CW_IN_RAW * (10000 + SLIPPAGE_BPS) / 10000 + 1 ))"
MIN_CW_BACK_RAW="$(( ROUNDTRIP_CW_OUT_RAW * (10000 - SLIPPAGE_BPS) / 10000 ))"
if (( CW_BAL_BEFORE < MAX_CW_IN_RAW )); then
echo "Insufficient cWUSDC: need ${MAX_CW_IN_RAW}, have ${CW_BAL_BEFORE}" >&2
exit 1
fi
cat <<EOF
Engine X UniV2 public indexed loop plan
mode: ${EXECUTE}
pair: ${PAIR}
router: ${ROUTER}
signer: ${SIGNER}
target USDC out raw: ${TARGET_USDC_OUT_RAW}
cWUSDC max input raw: ${MAX_CW_IN_RAW}
expected cWUSDC input raw: ${CW_IN_RAW}
expected cWUSDC back raw: ${ROUNDTRIP_CW_OUT_RAW}
min cWUSDC back raw: ${MIN_CW_BACK_RAW}
reserves before: ${RESERVES_BEFORE}
EOF
mkdir -p "$(dirname "${OUT_JSON}")"
python3 - "${OUT_JSON}" "${LATEST_JSON}" \
"${EXECUTE}" "${STAMP}" "${BLOCK_BEFORE}" "${GAS_PRICE_WEI}" "${SIGNER}" "${PAIR}" "${ROUTER}" \
"${CWUSDC}" "${USDC}" "${TARGET_USDC_OUT_RAW}" "${CW_IN_RAW}" "${MAX_CW_IN_RAW}" \
"${ROUNDTRIP_CW_OUT_RAW}" "${MIN_CW_BACK_RAW}" "${RESERVES_BEFORE}" "${CW_BAL_BEFORE}" "${USDC_BAL_BEFORE}" \
"${CW_ALLOW_BEFORE}" "${USDC_ALLOW_BEFORE}" "${ETH_BEFORE}" <<'PY'
import json
from pathlib import Path
import sys
(
out_json, latest_json, execute, stamp, block, gas_price, signer, pair, router, cw, usdc,
target, cw_in, max_cw_in, cw_back, min_cw_back, reserves, cw_bal, usdc_bal, cw_allow,
usdc_allow, eth,
) = sys.argv[1:]
payload = {
"schema": "engine-x-univ2-public-indexed-loop/v1",
"executed": execute == "1",
"stamp": stamp,
"blockBefore": block,
"gasPriceWei": gas_price,
"signer": signer,
"pair": pair,
"router": router,
"tokens": {"cwusdc": cw, "usdc": usdc},
"targetUsdcOutRaw": target,
"expectedCwusdcInputRaw": cw_in,
"maxCwusdcInputRaw": max_cw_in,
"expectedCwusdcBackRaw": cw_back,
"minCwusdcBackRaw": min_cw_back,
"reservesBefore": reserves,
"balancesBefore": {"ethWei": eth, "cwusdcRaw": cw_bal, "usdcRaw": usdc_bal},
"allowancesBefore": {"cwusdcRaw": cw_allow, "usdcRaw": usdc_allow},
"transactions": {},
}
Path(out_json).write_text(json.dumps(payload, indent=2) + "\n")
Path(latest_json).write_text(json.dumps(payload, indent=2) + "\n")
PY
if [[ "${EXECUTE}" != "1" ]]; then
cat <<EOF
Dry-run only. To broadcast this exact public indexed loop:
EXECUTE=1 TARGET_USDC_OUT_RAW=${TARGET_USDC_OUT_RAW} SLIPPAGE_BPS=${SLIPPAGE_BPS} \\
bash scripts/deployment/run-engine-x-univ2-public-indexed-loop.sh
EOF
exit 0
fi
mev_require_private_for_action "engine-x-univ2-public-indexed-loop"
DEADLINE="$(( $(date +%s) + DEADLINE_SECONDS ))"
CW_APPROVE_TX=""
USDC_APPROVE_TX=""
FORWARD_TX=""
REVERSE_TX=""
if (( CW_ALLOW_BEFORE < MAX_CW_IN_RAW )); then
CW_APPROVE_TX="$(mev_cast_send "${CWUSDC}" 'approve(address,uint256)(bool)' "${ROUTER}" "${MAX_CW_IN_RAW}" --json | jq -r '.transactionHash')"
fi
FORWARD_TX="$(mev_cast_send "${ROUTER}" 'swapTokensForExactTokens(uint256,uint256,address[],address,uint256)' "${TARGET_USDC_OUT_RAW}" "${MAX_CW_IN_RAW}" "[${CWUSDC},${USDC}]" "${SIGNER}" "${DEADLINE}" --json | jq -r '.transactionHash')"
USDC_ALLOW_AFTER_FORWARD="$(cast call "${USDC}" 'allowance(address,address)(uint256)' "${SIGNER}" "${ROUTER}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | awk '{print $1}')"
if (( USDC_ALLOW_AFTER_FORWARD < TARGET_USDC_OUT_RAW )); then
USDC_APPROVE_TX="$(mev_cast_send "${USDC}" 'approve(address,uint256)(bool)' "${ROUTER}" "${TARGET_USDC_OUT_RAW}" --json | jq -r '.transactionHash')"
fi
REVERSE_TX="$(mev_cast_send "${ROUTER}" 'swapExactTokensForTokens(uint256,uint256,address[],address,uint256)' "${TARGET_USDC_OUT_RAW}" "${MIN_CW_BACK_RAW}" "[${USDC},${CWUSDC}]" "${SIGNER}" "${DEADLINE}" --json | jq -r '.transactionHash')"
RESERVES_AFTER="$(cast call "${PAIR}" 'getReserves()(uint112,uint112,uint32)' --rpc-url "${ETHEREUM_MAINNET_RPC}" | tr '\n' ' ')"
CW_BAL_AFTER="$(cast call "${CWUSDC}" 'balanceOf(address)(uint256)' "${SIGNER}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | awk '{print $1}')"
USDC_BAL_AFTER="$(cast call "${USDC}" 'balanceOf(address)(uint256)' "${SIGNER}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | awk '{print $1}')"
ETH_AFTER="$(cast balance "${SIGNER}" --rpc-url "${ETHEREUM_MAINNET_RPC}")"
python3 - "${OUT_JSON}" "${LATEST_JSON}" "${CW_APPROVE_TX}" "${FORWARD_TX}" "${USDC_APPROVE_TX}" "${REVERSE_TX}" "${RESERVES_AFTER}" "${CW_BAL_AFTER}" "${USDC_BAL_AFTER}" "${ETH_AFTER}" <<'PY'
import json
from pathlib import Path
import sys
out_json, latest_json, cw_approve, forward, usdc_approve, reverse, reserves, cw_bal, usdc_bal, eth = sys.argv[1:]
payload = json.loads(Path(out_json).read_text())
payload["executed"] = True
payload["transactions"] = {
"cwusdcApprove": cw_approve or None,
"forwardCwusdcToUsdc": forward,
"usdcApprove": usdc_approve or None,
"reverseUsdcToCwusdc": reverse,
}
payload["reservesAfter"] = reserves
payload["balancesAfter"] = {"ethWei": eth, "cwusdcRaw": cw_bal, "usdcRaw": usdc_bal}
Path(out_json).write_text(json.dumps(payload, indent=2) + "\n")
Path(latest_json).write_text(json.dumps(payload, indent=2) + "\n")
print(json.dumps(payload["transactions"], indent=2))
PY

View File

@@ -6,6 +6,8 @@ PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/load-project-env.sh
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh"
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/mev-protection.sh
source "${PROJECT_ROOT}/scripts/lib/mev-protection.sh"
: "${ETHEREUM_MAINNET_RPC:?ETHEREUM_MAINNET_RPC is required}"
@@ -107,7 +109,8 @@ EOF
exit 0
fi
cast send "${TOKEN_IN}" 'approve(address,uint256)' "${ROUTER}" "${AMOUNT_IN_RAW}" --rpc-url "${ETHEREUM_MAINNET_RPC}" --private-key "${PRIVATE_KEY}"
cast send "${ROUTER}" 'exactInputSingle((address,address,uint24,address,uint256,uint256,uint256,uint160))(uint256)' \
mev_require_private_for_action "engine-x-univ3-public-swap-proof"
mev_cast_send "${TOKEN_IN}" 'approve(address,uint256)' "${ROUTER}" "${AMOUNT_IN_RAW}"
mev_cast_send "${ROUTER}" 'exactInputSingle((address,address,uint24,address,uint256,uint256,uint256,uint160))(uint256)' \
"(${TOKEN_IN},${TOKEN_OUT},${FEE},${SIGNER},${DEADLINE},${AMOUNT_IN_RAW},${MIN_OUT_RAW},0)" \
--rpc-url "${ETHEREUM_MAINNET_RPC}" --private-key "${PRIVATE_KEY}"

View File

@@ -0,0 +1,32 @@
#!/usr/bin/env bash
# Fast/cheap EI matrix mainnet cWUSDC distribution via Multicall3 aggregate3 + transferFrom.
# One approve(Multicall3, budget) then one on-chain tx per chunk (default 200 transfers).
#
# Requires: cast, PRIVATE_KEY, ETHEREUM_MAINNET_RPC; token must allow transferFrom.
#
# Usage:
# ./scripts/deployment/send-cwusdc-ei-matrix-multicall-batches.sh --dry-run \\
# --tsv reports/status/ei-matrix-cwusdc-topup-amounts.tsv
# ./scripts/deployment/send-cwusdc-ei-matrix-multicall-batches.sh --execute \\
# --tsv reports/status/ei-matrix-cwusdc-topup-amounts.tsv
#
# Env: EI_MATRIX_MC_CHUNK (default 200), MULTICALL3_MAINNET, CWUSDC_MAINNET, DEPLOYER_ADDRESS (dry-run only)
#
# Core: scripts/lib/ei_matrix_multicall3_cwusdc_batch.py
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
cd "$PROJECT_ROOT"
# shellcheck disable=SC1091
source "$PROJECT_ROOT/scripts/lib/load-project-env.sh"
[[ "${1:-}" == "--" ]] && shift
LOCK_FILE="${PROJECT_ROOT}/reports/status/ei-matrix-multicall3-send.lock"
mkdir -p "$(dirname "$LOCK_FILE")"
exec 9>"$LOCK_FILE"
if ! flock -n 9; then
echo "Another send-cwusdc-ei-matrix-multicall-batches.sh is running (lock: $LOCK_FILE)." >&2
exit 1
fi
exec python3 "$PROJECT_ROOT/scripts/lib/ei_matrix_multicall3_cwusdc_batch.py" "$@"

View File

@@ -0,0 +1,297 @@
#!/usr/bin/env bash
# Targeted mainnet cWUSDC transfers to a subset of EI matrix wallets by linear index.
# Intended for triage: feed indices from run-ei-matrix-full-readiness-audit.sh gap files.
#
# Usage:
# ./scripts/deployment/send-cwusdc-ei-matrix-targeted.sh [--dry-run] --send-raw R \
# [--indices-file PATH] [--amounts-tsv PATH] [--resume-next] [--quiet-dry-run] [--legacy]
#
# --indices-file Newline-separated linear indices (default: reports/status/ei-matrix-readiness-gaps-mainnet-indices.txt).
# Empty lines and # comments ignored.
# --send-raw R Amount (raw, 6 decimals) per wallet when not using --amounts-tsv.
# --amounts-tsv F Tab-separated: linearIndex <TAB> amountRaw (must cover every index in indices file).
# --resume-next Continue from reports/status/ei-matrix-cwusdc-targeted-last-idx.txt + 1
# (skips indices at or below last completed).
#
# Env: same as send-cwusdc-ei-matrix-wallets.sh (PRIVATE_KEY, ETHEREUM_MAINNET_RPC, CWUSDC_MAINNET, …)
# Lock: reports/status/ei-matrix-cwusdc-targeted-send.lock
# Progress: reports/status/ei-matrix-cwusdc-targeted-last-idx.txt
# Failures: reports/status/ei-matrix-cwusdc-targeted-failures.log
#
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
cd "$PROJECT_ROOT"
DRY_RUN=false
QUIET_DRY_RUN=false
CAST_LEGACY=false
RESUME_NEXT=false
INDICES_FILE="${EI_MATRIX_TARGETED_INDICES_FILE:-${PROJECT_ROOT}/reports/status/ei-matrix-readiness-gaps-mainnet-indices.txt}"
AMOUNTS_TSV=""
SEND_RAW=""
LAST_IDX_FILE="${PROJECT_ROOT}/reports/status/ei-matrix-cwusdc-targeted-last-idx.txt"
while [[ $# -gt 0 ]]; do
case "$1" in
--dry-run) DRY_RUN=true; shift ;;
--quiet-dry-run) QUIET_DRY_RUN=true; shift ;;
--legacy) CAST_LEGACY=true; shift ;;
--resume-next) RESUME_NEXT=true; shift ;;
--indices-file) INDICES_FILE="${2:?}"; shift 2 ;;
--amounts-tsv) AMOUNTS_TSV="${2:?}"; shift 2 ;;
--send-raw) SEND_RAW="${2:?}"; shift 2 ;;
*) echo "Unknown arg: $1" >&2; exit 1 ;;
esac
done
if [[ -z "$SEND_RAW" && -z "$AMOUNTS_TSV" ]]; then
echo "Set --send-raw R or --amounts-tsv PATH." >&2
exit 1
fi
if [[ -n "$SEND_RAW" && -n "$AMOUNTS_TSV" ]]; then
echo "Use only one of --send-raw or --amounts-tsv." >&2
exit 1
fi
# shellcheck disable=SC1091
source "$PROJECT_ROOT/scripts/lib/load-project-env.sh"
CWUSDC="${CWUSDC_MAINNET:-0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a}"
PUBLIC_ETH_RPC="${ETHEREUM_MAINNET_PUBLIC_RPC:-https://ethereum-rpc.publicnode.com}"
RPC="${ETHEREUM_MAINNET_RPC:-${RPC_URL_1:-${ETH_MAINNET_RPC_URL:-$PUBLIC_ETH_RPC}}}"
BALANCE_RPC="${EI_MATRIX_BALANCE_RPC:-$RPC}"
GRID="$PROJECT_ROOT/config/pmm-soak-wallet-grid.json"
[[ -f "$INDICES_FILE" ]] || { echo "Missing indices file: $INDICES_FILE" >&2; exit 1; }
[[ -f "$GRID" ]] || { echo "Missing $GRID" >&2; exit 1; }
command -v cast &>/dev/null || { echo "cast required" >&2; exit 1; }
command -v jq &>/dev/null || { echo "jq required" >&2; exit 1; }
[[ -n "${PRIVATE_KEY:-}" ]] || { echo "PRIVATE_KEY not set" >&2; exit 1; }
LOCK_FILE="${PROJECT_ROOT}/reports/status/ei-matrix-cwusdc-targeted-send.lock"
mkdir -p "${PROJECT_ROOT}/reports/status"
exec 200>"$LOCK_FILE"
if ! flock -n 200; then
echo "Another send-cwusdc-ei-matrix-targeted.sh is already running (lock: $LOCK_FILE)." >&2
exit 1
fi
FROM_ADDR=$(cast wallet address --private-key "$PRIVATE_KEY")
CHAIN_ID=$(cast chain-id --rpc-url "$RPC" 2>/dev/null | tr -d '[:space:]' || true)
[[ -n "$CHAIN_ID" ]] || CHAIN_ID="1"
if [[ "$CHAIN_ID" != "1" ]]; then
echo "[WARN] chain-id=$CHAIN_ID (expected 1)." >&2
fi
pending_nonce() {
local resp hex
resp=$(curl -sS -X POST "$RPC" -H "Content-Type: application/json" \
-d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_getTransactionCount\",\"params\":[\"${FROM_ADDR}\",\"pending\"],\"id\":1}" 2>/dev/null) || return 1
hex=$(echo "$resp" | jq -r '.result // empty')
[[ -n "$hex" ]] || return 1
cast to-dec "$hex"
}
token_decimals() {
cast call "$CWUSDC" 'decimals()(uint8)' --rpc-url "$BALANCE_RPC" 2>/dev/null | awk '{print $1}'
}
token_balance_raw() {
cast call "$CWUSDC" "balanceOf(address)(uint256)" "$FROM_ADDR" --rpc-url "$BALANCE_RPC" 2>/dev/null | awk '{print $1}'
}
ERR_LOG="${PROJECT_ROOT}/reports/status/ei-matrix-cwusdc-targeted-failures.log"
# Sorted unique indices from file (one index per line; # comments; whitespace tolerated)
IND_TMP=$(mktemp)
grep -v '^[[:space:]]*$' "$INDICES_FILE" | sed 's/#.*//' \
| awk '{ gsub(/[[:space:]]/, "", $0); if ($0 ~ /^[0-9]+$/) print $0 }' \
| sort -n -u >"$IND_TMP" || true
if [[ ! -s "$IND_TMP" ]]; then
echo "No numeric indices in $INDICES_FILE — nothing to do."
rm -f "$IND_TMP"
exit 0
fi
if $RESUME_NEXT; then
[[ -f "$LAST_IDX_FILE" ]] || { echo "Missing $LAST_IDX_FILE for --resume-next" >&2; rm -f "$IND_TMP"; exit 1; }
_last="$(tr -d '[:space:]' < "$LAST_IDX_FILE" || echo "")"
[[ -n "$_last" ]] || { echo "Empty $LAST_IDX_FILE" >&2; rm -f "$IND_TMP"; exit 1; }
_filtered=$(mktemp)
while read -r line; do
[[ "$line" =~ ^[0-9]+$ ]] || continue
if [[ "$line" -gt "$_last" ]]; then
echo "$line"
fi
done <"$IND_TMP" >"$_filtered"
mv "$_filtered" "$IND_TMP"
echo "Resume-next: last completed idx=$_last; remaining indices: $(wc -l <"$IND_TMP" | tr -d ' ')"
fi
N_IND=$(wc -l <"$IND_TMP" | tr -d ' ')
if [[ "$N_IND" -eq 0 ]]; then
echo "No indices to process after filters."
rm -f "$IND_TMP"
exit 0
fi
PAIR_TMP=$(mktemp)
cleanup() {
[[ -f "$IND_TMP" ]] && rm -f "$IND_TMP"
[[ -f "$PAIR_TMP" ]] && rm -f "$PAIR_TMP"
}
trap cleanup EXIT
if [[ -n "$AMOUNTS_TSV" ]]; then
[[ -f "$AMOUNTS_TSV" ]] || { echo "Missing amounts TSV: $AMOUNTS_TSV" >&2; exit 1; }
# Build map idx->amount in Python for validation + join
python3 - "$IND_TMP" "$AMOUNTS_TSV" "$PAIR_TMP" <<'PY'
import sys
from pathlib import Path
ind_path = Path(sys.argv[1])
amt_path = Path(sys.argv[2])
out_path = Path(sys.argv[3])
wanted = [int(x) for x in ind_path.read_text().split() if x.strip().isdigit()]
wanted_set = set(wanted)
amap: dict[int, int] = {}
for line in amt_path.read_text().splitlines():
line = line.split("#", 1)[0].strip()
if not line:
continue
parts = line.split("\t")
if len(parts) < 2:
parts = line.split()
if len(parts) < 2:
print(f"Bad amounts line: {line!r}", file=sys.stderr)
sys.exit(1)
idx = int(parts[0].strip())
raw = int(parts[1].strip())
amap[idx] = raw
missing = sorted(wanted_set - set(amap))
if missing:
print(f"Amounts TSV missing {len(missing)} indices (first 20): {missing[:20]}", file=sys.stderr)
sys.exit(1)
lines = []
for idx in sorted(wanted_set):
lines.append(f"{idx}\t{amap[idx]}")
out_path.write_text("\n".join(lines) + "\n", encoding="utf-8")
PY
# PAIR_TMP now: idx \t raw per line sorted by idx
BUDGET_RAW=$(awk '{s+=$2} END {print s}' "$PAIR_TMP")
else
while read -r idx; do
echo -e "${idx}\t${SEND_RAW}"
done <"$IND_TMP" | sort -n -t $'\t' -k1,1 >"$PAIR_TMP"
BUDGET_RAW=$(python3 -c "print(int('$SEND_RAW') * int('$N_IND'))")
fi
ADDR_AMT_TMP=$(mktemp)
cleanup() {
[[ -f "$IND_TMP" ]] && rm -f "$IND_TMP"
[[ -f "$PAIR_TMP" ]] && rm -f "$PAIR_TMP"
[[ -f "$ADDR_AMT_TMP" ]] && rm -f "$ADDR_AMT_TMP"
}
trap cleanup EXIT
while IFS=$'\t' read -r idx raw_amt; do
addr=$(jq -r --argjson i "$idx" '.wallets[$i].address // empty' "$GRID")
[[ -n "$addr" && "$addr" != null ]] || { echo "No address for index $idx in grid" >&2; exit 1; }
echo -e "$addr\t$raw_amt\t$idx"
done <"$PAIR_TMP" >"$ADDR_AMT_TMP"
DECIMALS=$(token_decimals || echo "6")
GAS_EST="${EI_MATRIX_SEND_GAS_EST:-70000}"
HEADROOM_BPS="${EI_MATRIX_GAS_HEADROOM_BPS:-10500}"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "EI matrix cWUSDC targeted transfer (mainnet)"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "RPC: $RPC"
echo "Token: $CWUSDC"
echo "Signer: $FROM_ADDR"
echo "Indices: $N_IND (from $INDICES_FILE)"
echo "Budget: $BUDGET_RAW raw total"
echo "Dry-run: $DRY_RUN"
echo ""
ETH_WEI=$(cast balance "$FROM_ADDR" --rpc-url "$BALANCE_RPC" 2>/dev/null | awk '{print $1}' || echo "0")
TOKEN_BAL=$(token_balance_raw || echo "0")
echo "Signer ETH: $ETH_WEI wei"
echo "Signer cWUSDC (raw): $TOKEN_BAL"
if ! $DRY_RUN && [[ "${EI_MATRIX_SKIP_BALANCE_CHECK:-}" != "1" ]]; then
if ! python3 -c "import sys; sys.exit(0 if int('$TOKEN_BAL') >= int('$BUDGET_RAW') else 1)"; then
echo "Insufficient cWUSDC for budget $BUDGET_RAW raw." >&2
exit 1
fi
fi
if ! $DRY_RUN && [[ "${EI_MATRIX_SKIP_GAS_CHECK:-}" != "1" ]]; then
GAS_PRICE_WEI=$(cast gas-price --rpc-url "$RPC" 2>/dev/null | awk '{print $1}' | head -1)
[[ -n "$GAS_PRICE_WEI" ]] || GAS_PRICE_WEI=0
MIN_WEI=$(python3 -c "c=int('$N_IND'); g=int('$GAS_EST'); p=int('$GAS_PRICE_WEI'); b=int('$HEADROOM_BPS'); print(c*g*p*b//10000)")
if ! python3 -c "import sys; sys.exit(0 if int('$ETH_WEI') >= int('$MIN_WEI') else 1)"; then
echo "Insufficient ETH for gas (need ≈ $MIN_WEI wei)." >&2
exit 1
fi
fi
matrix_try_transfer() {
local addr="$1" raw_amt="$2" idx="$3"
local dec human out tx attempt=1
dec="${DECIMALS:-6}"
[[ "$raw_amt" != "0" ]] || { echo "[skip] idx=$idx zero"; return 0; }
human=$(python3 -c "d=int('$dec'); a=int('$raw_amt'); print(f'{a / (10**d):.6f}')" 2>/dev/null || echo "$raw_amt")
if $DRY_RUN; then
if ! $QUIET_DRY_RUN; then
echo "[dry-run] idx=$idx $addr raw=$raw_amt (~$human)"
fi
return 0
fi
local cast_extra=()
$CAST_LEGACY && cast_extra+=(--legacy)
while [[ "$attempt" -le 2 ]]; do
if out=$(cast send "$CWUSDC" "transfer(address,uint256)" "$addr" "$raw_amt" \
--rpc-url "$RPC" --private-key "$PRIVATE_KEY" \
--nonce "$NONCE" "${cast_extra[@]}" 2>&1); then
tx=$(echo "$out" | tail -n1)
echo "[ok] idx=$idx nonce=$NONCE $addr raw=$raw_amt tx=$tx"
sent=$((sent + 1))
NONCE=$((NONCE + 1))
echo "$idx" >"$LAST_IDX_FILE"
return 0
fi
if [[ "$attempt" -eq 1 ]] && echo "$out" | grep -qi 'nonce too low'; then
NONCE=$(pending_nonce) || true
attempt=$((attempt + 1))
continue
fi
echo "[fail] idx=$idx $out" | tee -a "$ERR_LOG" >&2
failed=$((failed + 1))
NONCE=$(pending_nonce) || true
return 0
done
}
sent=0
failed=0
NONCE=$(pending_nonce) || { echo "Could not read nonce" >&2; exit 1; }
echo "Starting nonce: $NONCE"
echo ""
while IFS=$'\t' read -r addr raw_amt idx; do
matrix_try_transfer "$addr" "$raw_amt" "$idx"
done <"$ADDR_AMT_TMP"
if $DRY_RUN; then
echo "Dry-run complete ($N_IND wallets)."
else
echo "Done. sent=$sent failed=$failed"
fi

View File

@@ -0,0 +1,355 @@
#!/usr/bin/env bash
# Transfer Ethereum mainnet cWUSDC from PRIVATE_KEY holder to each address in
# config/pmm-soak-wallet-grid.json (EI matrix slice). Uses ERC-20 transfer(address,uint256).
#
# Modes (exactly one):
# --send-raw R Same raw units sent to every wallet in the slice.
# --total-send-raw B Total sent across the slice, split with ±spread then renormalized to B.
#
# Usage:
# ./scripts/deployment/send-cwusdc-ei-matrix-wallets.sh [--dry-run] [--limit N] [--offset N|--resume-next]
# (--send-raw R | --total-send-raw B [--spread-pct S])
#
# --quiet-dry-run With --dry-run, suppress per-wallet lines.
# --legacy Pass --legacy to cast send.
#
# Env: ETHEREUM_MAINNET_RPC, CWUSDC_MAINNET, PRIVATE_KEY,
# EI_MATRIX_SEND_GAS_EST (default 70000), EI_MATRIX_GAS_HEADROOM_BPS (default 10500),
# EI_MATRIX_SKIP_GAS_CHECK=1, EI_MATRIX_SKIP_BALANCE_CHECK=1 (operator risk).
#
# Progress: reports/status/ei-matrix-cwusdc-send-last-idx.txt
# Failures: reports/status/ei-matrix-cwusdc-send-failures.log
# Lock: reports/status/ei-matrix-cwusdc-send.lock
#
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
cd "$PROJECT_ROOT"
DRY_RUN=false
LIMIT=""
OFFSET="0"
OFFSET_EXPLICIT=false
RESUME_NEXT=false
SPREAD_PCT="${EI_MATRIX_SPREAD_PCT:-15}"
CAST_LEGACY=false
QUIET_DRY_RUN=false
SEND_RAW=""
TOTAL_SEND_RAW=""
while [[ $# -gt 0 ]]; do
case "$1" in
--dry-run) DRY_RUN=true; shift ;;
--quiet-dry-run) QUIET_DRY_RUN=true; shift ;;
--limit) LIMIT="${2:?}"; shift 2 ;;
--resume-next) RESUME_NEXT=true; shift ;;
--offset) OFFSET="${2:?}"; OFFSET_EXPLICIT=true; shift 2 ;;
--spread-pct) SPREAD_PCT="${2:?}"; shift 2 ;;
--send-raw) SEND_RAW="${2:?}"; shift 2 ;;
--total-send-raw) TOTAL_SEND_RAW="${2:?}"; shift 2 ;;
--legacy) CAST_LEGACY=true; shift ;;
*) echo "Unknown arg: $1" >&2; exit 1 ;;
esac
done
LAST_IDX_FILE="${EI_MATRIX_CWUSDC_SEND_LAST_IDX_FILE:-${PROJECT_ROOT}/reports/status/ei-matrix-cwusdc-send-last-idx.txt}"
if $RESUME_NEXT && $OFFSET_EXPLICIT; then
echo "Use only one of --offset or --resume-next." >&2
exit 1
fi
if $RESUME_NEXT; then
[[ -f "$LAST_IDX_FILE" ]] || { echo "Missing last-index file for --resume-next: $LAST_IDX_FILE" >&2; exit 1; }
_last="$(tr -d '[:space:]' < "$LAST_IDX_FILE" || echo "")"
[[ -n "$_last" ]] || { echo "Empty $LAST_IDX_FILE" >&2; exit 1; }
OFFSET=$((_last + 1))
echo "Resume-next (send): last completed idx=$_last → offset=$OFFSET"
fi
if [[ -n "$SEND_RAW" && -n "$TOTAL_SEND_RAW" ]]; then
echo "Use only one of --send-raw or --total-send-raw." >&2
exit 1
fi
if [[ -z "$SEND_RAW" && -z "$TOTAL_SEND_RAW" ]]; then
echo "Set --send-raw or --total-send-raw." >&2
exit 1
fi
# shellcheck disable=SC1091
source "$PROJECT_ROOT/scripts/lib/load-project-env.sh"
CWUSDC="${CWUSDC_MAINNET:-0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a}"
PUBLIC_ETH_RPC="${ETHEREUM_MAINNET_PUBLIC_RPC:-https://ethereum-rpc.publicnode.com}"
RPC="${ETHEREUM_MAINNET_RPC:-${RPC_URL_1:-${ETH_MAINNET_RPC_URL:-$PUBLIC_ETH_RPC}}}"
BALANCE_RPC="${EI_MATRIX_BALANCE_RPC:-$RPC}"
LOCK_FILE="${PROJECT_ROOT}/reports/status/ei-matrix-cwusdc-send.lock"
MANIFEST_DIR="${PROJECT_ROOT}/reports/status"
mkdir -p "$MANIFEST_DIR"
exec 200>"$LOCK_FILE"
if ! flock -n 200; then
echo "Another send-cwusdc-ei-matrix-wallets.sh is already running (lock: $LOCK_FILE)." >&2
exit 1
fi
GRID="$PROJECT_ROOT/config/pmm-soak-wallet-grid.json"
[[ -f "$GRID" ]] || { echo "Missing $GRID" >&2; exit 1; }
command -v cast &>/dev/null || { echo "cast required" >&2; exit 1; }
command -v jq &>/dev/null || { echo "jq required" >&2; exit 1; }
[[ -n "${PRIVATE_KEY:-}" ]] || { echo "PRIVATE_KEY not set" >&2; exit 1; }
FROM_ADDR=$(cast wallet address --private-key "$PRIVATE_KEY")
CHAIN_ID=$(cast chain-id --rpc-url "$RPC" 2>/dev/null | tr -d '[:space:]' || true)
[[ -n "$CHAIN_ID" ]] || CHAIN_ID="1"
if [[ "$CHAIN_ID" != "1" ]]; then
echo "[WARN] chain-id=$CHAIN_ID (expected 1)." >&2
fi
pending_nonce() {
local resp hex
resp=$(curl -sS -X POST "$RPC" -H "Content-Type: application/json" \
-d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_getTransactionCount\",\"params\":[\"${FROM_ADDR}\",\"pending\"],\"id\":1}" 2>/dev/null) || return 1
hex=$(echo "$resp" | jq -r '.result // empty')
[[ -n "$hex" ]] || return 1
cast to-dec "$hex"
}
token_decimals() {
cast call "$CWUSDC" 'decimals()(uint8)' --rpc-url "$BALANCE_RPC" 2>/dev/null | awk '{print $1}'
}
token_balance_raw() {
cast call "$CWUSDC" "balanceOf(address)(uint256)" "$FROM_ADDR" --rpc-url "$BALANCE_RPC" 2>/dev/null | awk '{print $1}'
}
generate_spread_amounts_raw() {
local count="$1" budget="$2" spread="$3"
python3 - "$count" "$budget" "$spread" <<'PY'
import random
import sys
n = int(sys.argv[1])
budget = int(sys.argv[2])
spread = float(sys.argv[3])
if n <= 0:
sys.exit("count must be positive")
if budget < 0:
sys.exit("budget must be non-negative")
if spread < 0 or spread > 100:
sys.exit("spread-pct must be in [0, 100]")
base = 10000
low_w = max(1, (100 * base - int(spread * base)) // 100)
high_w = (100 * base + int(spread * base)) // 100
w = [random.randint(low_w, high_w) for _ in range(n)]
s = sum(w)
raw = [(budget * wi) // s for wi in w]
rem = budget - sum(raw)
for i in range(rem):
raw[i % n] += 1
for x in raw:
print(x)
PY
}
stream_addresses() {
if [[ -n "${LIMIT:-}" ]]; then
jq -r --argjson o "$OFFSET" --argjson l "$LIMIT" '.wallets[$o:$o+$l][] | .address' "$GRID"
else
jq -r --argjson o "$OFFSET" '.wallets[$o:][] | .address' "$GRID"
fi
}
ERR_LOG="${PROJECT_ROOT}/reports/status/ei-matrix-cwusdc-send-failures.log"
LAST_IDX="${PROJECT_ROOT}/reports/status/ei-matrix-cwusdc-send-last-idx.txt"
matrix_try_transfer() {
local addr="$1" raw_amt="$2" idx="$3"
local dec human out tx attempt=1
dec="${DECIMALS:-6}"
if [[ "$raw_amt" == "0" ]]; then
echo "[skip] idx=$idx $addr zero raw"
return 0
fi
human=$(python3 -c "d=int('$dec'); a=int('$raw_amt'); print(f'{a / (10**d):.{min(d,8)}f}')" 2>/dev/null || echo "$raw_amt")
if $DRY_RUN; then
if ! $QUIET_DRY_RUN; then
echo "[dry-run] idx=$idx $addr raw=$raw_amt (~$human)"
fi
return 0
fi
local cast_extra=()
$CAST_LEGACY && cast_extra+=(--legacy)
while [[ "$attempt" -le 2 ]]; do
if out=$(cast send "$CWUSDC" "transfer(address,uint256)" "$addr" "$raw_amt" \
--rpc-url "$RPC" --private-key "$PRIVATE_KEY" \
--nonce "$NONCE" "${cast_extra[@]}" 2>&1); then
tx=$(echo "$out" | tail -n1)
echo "[ok] idx=$idx nonce=$NONCE $addr raw=$raw_amt (~$human) tx=$tx"
sent=$((sent + 1))
NONCE=$((NONCE + 1))
echo "$idx" > "$LAST_IDX"
return 0
fi
if [[ "$attempt" -eq 1 ]] && echo "$out" | grep -qi 'nonce too low'; then
NONCE=$(pending_nonce) || true
echo "[retry] idx=$idx nonce refreshed to $NONCE (nonce too low)" >&2
attempt=$((attempt + 1))
continue
fi
echo "[fail] idx=$idx nonce=$NONCE $addr $out" | tee -a "$ERR_LOG" >&2
failed=$((failed + 1))
NONCE=$(pending_nonce) || true
return 0
done
}
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "EI matrix cWUSDC transfer (mainnet)"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "RPC: $RPC"
echo "Token: $CWUSDC"
echo "Signer: $FROM_ADDR"
echo "Grid: $GRID"
echo "Dry-run: $DRY_RUN Quiet: $QUIET_DRY_RUN"
echo "Offset: $OFFSET Limit: ${LIMIT:-all}"
if [[ -n "$SEND_RAW" ]]; then
echo "Mode: fixed --send-raw $SEND_RAW per wallet"
else
echo "Mode: --total-send-raw $TOTAL_SEND_RAW spread: ±${SPREAD_PCT}% normalized"
fi
echo ""
DECIMALS=$(token_decimals || echo "6")
ADDR_TMP=$(mktemp)
AMOUNTS_TMP=$(mktemp)
cleanup_tmp() {
[[ -f "$ADDR_TMP" ]] && rm -f "$ADDR_TMP"
[[ -f "$AMOUNTS_TMP" ]] && rm -f "$AMOUNTS_TMP"
}
trap cleanup_tmp EXIT
stream_addresses > "$ADDR_TMP"
WALLET_COUNT=$(wc -l < "$ADDR_TMP" | tr -d '[:space:]')
if [[ -z "$WALLET_COUNT" || "$WALLET_COUNT" -eq 0 ]]; then
echo "No wallets in range (offset=$OFFSET limit=${LIMIT:-all})." >&2
exit 1
fi
if [[ -n "$SEND_RAW" ]]; then
awk -v r="$SEND_RAW" '{print r}' "$ADDR_TMP" > "$AMOUNTS_TMP"
BUDGET_RAW=$((SEND_RAW * WALLET_COUNT))
else
BUDGET_RAW="$TOTAL_SEND_RAW"
if [[ "$BUDGET_RAW" -le 0 ]]; then
echo "total-send-raw must be positive." >&2
exit 1
fi
generate_spread_amounts_raw "$WALLET_COUNT" "$BUDGET_RAW" "$SPREAD_PCT" > "$AMOUNTS_TMP"
fi
SUM_CHECK=$(awk '{s+=$1} END {print s}' "$AMOUNTS_TMP")
if [[ "$SUM_CHECK" != "$BUDGET_RAW" ]]; then
echo "INTERNAL: amount sum $SUM_CHECK != budget $BUDGET_RAW" >&2
exit 1
fi
AMOUNTS_SHA256=$(sha256sum "$AMOUNTS_TMP" | awk '{print $1}')
TS=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
MANIFEST="$MANIFEST_DIR/ei-matrix-cwusdc-send-manifest-${TS//:/-}.json"
cat >"$MANIFEST" <<EOF
{
"version": 1,
"kind": "ei-matrix-cwusdc-transfer",
"timestamp": "$TS",
"chainId": 1,
"token": "$CWUSDC",
"signer": "$FROM_ADDR",
"offset": $OFFSET,
"limit": ${LIMIT:-null},
"walletCount": $WALLET_COUNT,
"budgetRaw": "$BUDGET_RAW",
"fixedSendRaw": ${SEND_RAW:-null},
"spreadPct": ${SPREAD_PCT},
"amountsSha256": "$AMOUNTS_SHA256",
"manifestPath": "$MANIFEST"
}
EOF
echo "Manifest: $MANIFEST"
echo "Amounts SHA256: $AMOUNTS_SHA256"
echo ""
GAS_EST="${EI_MATRIX_SEND_GAS_EST:-70000}"
HEADROOM_BPS="${EI_MATRIX_GAS_HEADROOM_BPS:-10500}"
ETH_WEI=$(cast balance "$FROM_ADDR" --rpc-url "$BALANCE_RPC" 2>/dev/null | awk '{print $1}' || echo "0")
ETH_HUMAN=$(python3 -c "print(f'{int(\"$ETH_WEI\") / 1e18:.6f}')" 2>/dev/null || echo "?")
echo "Signer ETH (gas): ${ETH_WEI} wei (~$ETH_HUMAN ETH)"
TOKEN_BAL=$(token_balance_raw || echo "0")
echo "Signer cWUSDC balance (raw): $TOKEN_BAL (need >= $BUDGET_RAW for this slice)"
if ! $DRY_RUN && [[ "${EI_MATRIX_SKIP_BALANCE_CHECK:-}" != "1" ]]; then
if python3 -c "import sys; sys.exit(0 if int('$TOKEN_BAL') >= int('$BUDGET_RAW') else 1)"; then
echo "Token preflight OK: balance covers budget $BUDGET_RAW raw."
else
echo "Insufficient cWUSDC: have $TOKEN_BAL raw, need $BUDGET_RAW raw." >&2
echo "Set EI_MATRIX_SKIP_BALANCE_CHECK=1 to override (operator risk)." >&2
exit 1
fi
fi
if ! $DRY_RUN && [[ "${EI_MATRIX_SKIP_GAS_CHECK:-}" != "1" ]]; then
GAS_PRICE_WEI=$(cast gas-price --rpc-url "$RPC" 2>/dev/null | awk '{print $1}' | head -1)
[[ -n "$GAS_PRICE_WEI" ]] || GAS_PRICE_WEI=0
MIN_WEI=$(python3 -c "c=int('$WALLET_COUNT'); g=int('$GAS_EST'); p=int('$GAS_PRICE_WEI'); b=int('$HEADROOM_BPS'); print(c*g*p*b//10000)")
if python3 -c "import sys; sys.exit(0 if int('$ETH_WEI') >= int('$MIN_WEI') else 1)"; then
echo "Gas preflight OK: est ${GAS_EST} gas/tx × $WALLET_COUNT × gasPrice $GAS_PRICE_WEI × (${HEADROOM_BPS}/10000) ≈ $MIN_WEI wei."
else
echo "Insufficient ETH for gas preflight. Need ≈ $MIN_WEI wei." >&2
echo "Set EI_MATRIX_SKIP_GAS_CHECK=1 to override (operator risk)." >&2
exit 1
fi
fi
echo ""
echo "Sample transfers:"
if [[ "$WALLET_COUNT" -le 6 ]]; then
_s_idx=$OFFSET
while IFS=$'\t' read -r s_addr s_raw; do
h=$(python3 -c "d=int('$DECIMALS'); a=int('$s_raw'); print(f'{a / (10**d):.6f}')" 2>/dev/null || echo "$s_raw")
echo " idx=$_s_idx $s_addr raw=$s_raw (~$h cWUSDC)"
_s_idx=$((_s_idx + 1))
done < <(paste -d $'\t' "$ADDR_TMP" "$AMOUNTS_TMP")
else
_s_idx=$OFFSET
while IFS=$'\t' read -r s_addr s_raw; do
h=$(python3 -c "d=int('$DECIMALS'); a=int('$s_raw'); print(f'{a / (10**d):.6f}')" 2>/dev/null || echo "$s_raw")
echo " idx=$_s_idx $s_addr raw=$s_raw (~$h cWUSDC)"
_s_idx=$((_s_idx + 1))
done < <(paste -d $'\t' "$ADDR_TMP" "$AMOUNTS_TMP" | head -3)
_s_idx=$((OFFSET + WALLET_COUNT - 3))
while IFS=$'\t' read -r s_addr s_raw; do
h=$(python3 -c "d=int('$DECIMALS'); a=int('$s_raw'); print(f'{a / (10**d):.6f}')" 2>/dev/null || echo "$s_raw")
echo " idx=$_s_idx $s_addr raw=$s_raw (~$h cWUSDC)"
_s_idx=$((_s_idx + 1))
done < <(paste -d $'\t' "$ADDR_TMP" "$AMOUNTS_TMP" | tail -3)
fi
echo ""
sent=0
failed=0
idx=$OFFSET
NONCE=$(pending_nonce) || { echo "Could not read pending nonce" >&2; exit 1; }
echo "Starting nonce (pending): $NONCE"
echo ""
while IFS=$'\t' read -r addr raw_amt; do
matrix_try_transfer "$addr" "$raw_amt" "$idx"
idx=$((idx + 1))
done < <(paste -d $'\t' "$ADDR_TMP" "$AMOUNTS_TMP")
if $DRY_RUN; then
echo "Dry-run complete. Indices covered: $OFFSET..$((idx - 1))."
else
echo "Done. Transfer txs attempted: sent=$sent failed=$failed"
fi

View File

@@ -0,0 +1,148 @@
#!/usr/bin/env python3
"""
Transfer native SOL (lamports) on Solana mainnet-beta (or any RPC you pass).
Loads ``SOLANA_RPC_URL`` and ``SOLANA_KEYPAIR_PATH`` from the environment when
set (after ``source scripts/lib/load-project-env.sh``). Submits via
``solana_jsonrpc.send_transaction_wire`` (``scripts/lib/solana_jsonrpc.py``) so
RPCs that return only a signature string for ``sendTransaction`` do not hit
``solana-py``'s ``SendTransactionResp`` parser (which can panic on ``missing field 'data'``).
Install (venv recommended)::
pip install -r scripts/lib/requirements-solana-ops.txt
"""
from __future__ import annotations
import argparse
import base64
import json
import os
import sys
from pathlib import Path
# ``scripts/lib`` is not a Python package; load ``solana_jsonrpc`` by path.
_LIB = Path(__file__).resolve().parents[1] / "lib"
if str(_LIB) not in sys.path:
sys.path.insert(0, str(_LIB))
import solana_jsonrpc # noqa: E402
def _load_keypair(path: Path):
with path.open() as f:
raw = json.load(f)
if not isinstance(raw, list) or len(raw) != 64:
raise SystemExit("keypair JSON must be a 64-element byte array (Solana CLI format)")
from solders.keypair import Keypair
return Keypair.from_bytes(bytes(raw))
def main() -> None:
p = argparse.ArgumentParser(description="Send native SOL via JSON-RPC (robust sendTransaction parsing).")
p.add_argument("--to", required=True, help="Destination base58 pubkey")
p.add_argument(
"--lamports",
type=int,
help="Amount to send (excludes fee; payer pays fee separately). Omit with --sweep-all",
)
p.add_argument(
"--sweep-all",
action="store_true",
help="Send entire balance minus 5000 lamports legacy fee reserve",
)
p.add_argument("--fee-lamports", type=int, default=5000, help="Reserved for fee when using --sweep-all")
p.add_argument("--rpc-url", default=os.environ.get("SOLANA_RPC_URL", "").strip())
p.add_argument(
"--keypair",
type=Path,
default=Path(os.environ.get("SOLANA_KEYPAIR_PATH", "").strip() or "."),
help="Solana CLI JSON keypair path (default: SOLANA_KEYPAIR_PATH)",
)
p.add_argument("--skip-preflight", action="store_true")
p.add_argument(
"--dry-run",
action="store_true",
help="Print base64 wire and exit without sending",
)
p.add_argument(
"--no-wait",
action="store_true",
help="Do not poll getSignatureStatuses after send (default: wait up to 90s)",
)
args = p.parse_args()
try:
from solders.hash import Hash
from solders.pubkey import Pubkey
from solders.system_program import TransferParams, transfer
from solders.transaction import Transaction
except ImportError:
print(
"Missing dependency: install with\n"
" pip install -r scripts/lib/requirements-solana-ops.txt",
file=sys.stderr,
)
raise SystemExit(2) from None
if not args.rpc_url:
print("Set SOLANA_RPC_URL or pass --rpc-url", file=sys.stderr)
raise SystemExit(2)
if not args.keypair.is_file():
print(f"Keypair not found: {args.keypair}", file=sys.stderr)
raise SystemExit(2)
kp = _load_keypair(args.keypair)
dest = Pubkey.from_string(args.to)
src = kp.pubkey()
if args.dry_run:
if args.sweep_all:
raise SystemExit("--dry-run requires explicit --lamports (no balance query)")
if args.lamports is None:
raise SystemExit("Pass --lamports N with --dry-run")
send_lamports = args.lamports
else:
bal = solana_jsonrpc.get_balance_lamports(args.rpc_url, str(src))
if args.sweep_all:
send_lamports = bal - args.fee_lamports
if send_lamports <= 0:
raise SystemExit("Nothing to sweep after fee reserve")
elif args.lamports is not None:
send_lamports = args.lamports
if send_lamports <= 0:
raise SystemExit("--lamports must be positive")
if bal < send_lamports + args.fee_lamports:
raise SystemExit(
f"Insufficient balance: have {bal} lamports, need {send_lamports + args.fee_lamports}"
)
else:
raise SystemExit("Pass --lamports N or --sweep-all")
bh_str = solana_jsonrpc.get_latest_blockhash(args.rpc_url)
bh = Hash.from_string(bh_str)
ix = transfer(TransferParams(from_pubkey=src, to_pubkey=dest, lamports=send_lamports))
tx = Transaction.new_signed_with_payer([ix], src, [kp], bh)
wire = bytes(tx)
if args.dry_run:
print("blockhash", bh_str)
print("wire_b64", base64.b64encode(wire).decode("ascii"))
return
sig = solana_jsonrpc.send_transaction_wire(
args.rpc_url,
wire,
skip_preflight=args.skip_preflight,
preflight_commitment="confirmed",
)
print(sig)
if not args.no_wait:
st = solana_jsonrpc.wait_until_signature_confirmed(args.rpc_url, sig)
print("confirmationStatus", st.get("confirmationStatus"), "slot", st.get("slot"), file=sys.stderr)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,127 @@
#!/usr/bin/env bash
# Sync Gov Portals monorepo from Gitea to CT 7804 (gov-portals-dev), install deps,
# build DBIS + ICCC (and OMNL/XOM when they define a "build" script), restart systemd units.
#
# CT 7804 typically runs on r630-04 (192.168.11.14); tarball deploys omit .git, so
# in-container "git pull" is not enough — this script refreshes a local clone then
# streams the tree into the container.
#
# Usage (from proxmox repo root):
# export GITEA_TOKEN=... # or ensure it is in .env (see .env.master.example)
# bash scripts/deployment/sync-gov-portals-ct-7804-from-git.sh
#
# Options:
# --skip-fetch Use GOV_PORTALS_SOURCE as-is (no git fetch; no token required)
# --dry-run Print steps only
#
# Env:
# GOV_PORTALS_SOURCE Default: /home/intlc/projects/gov-portals-monorepo
# GOV_PORTALS_REPO_URL Default: https://gitea.d-bis.org/Gov_Web_Portals/gov-portals-monorepo.git
# GOV_PORTALS_REF Default: main
# PROXMOX_HOST / DBIS_PORTAL_PROXMOX_HOST / PROXMOX_HOST_GOV_PORTALS Default: 192.168.11.14
# VMID_GOV_PORTALS Default: 7804
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
# shellcheck disable=SC1090
source "$PROJECT_ROOT/config/ip-addresses.conf" 2>/dev/null || true
# shellcheck disable=SC1090
[ -f "$PROJECT_ROOT/.env" ] && set +u && source "$PROJECT_ROOT/.env" 2>/dev/null || true && set -u
GOV_PORTALS_SOURCE="${GOV_PORTALS_SOURCE:-/home/intlc/projects/gov-portals-monorepo}"
GOV_PORTALS_REPO_URL="${GOV_PORTALS_REPO_URL:-https://gitea.d-bis.org/Gov_Web_Portals/gov-portals-monorepo.git}"
GOV_PORTALS_REF="${GOV_PORTALS_REF:-main}"
VMID_GOV_PORTALS="${VMID_GOV_PORTALS:-7804}"
PROXMOX_HOST="${DBIS_PORTAL_PROXMOX_HOST:-${PROXMOX_HOST_GOV_PORTALS:-192.168.11.14}}"
SKIP_FETCH=false
DRY_RUN=false
for arg in "$@"; do
[[ "$arg" == "--skip-fetch" ]] && SKIP_FETCH=true
[[ "$arg" == "--dry-run" ]] && DRY_RUN=true
done
die() { echo "ERROR: $*" >&2; exit 1; }
log() { echo "[$(date +%H:%M:%S)] $*"; }
[[ -d "$GOV_PORTALS_SOURCE" ]] || die "GOV_PORTALS_SOURCE is not a directory: $GOV_PORTALS_SOURCE"
git_auth_args=()
if [[ -n "${GITEA_TOKEN:-}" ]]; then
git_auth_args=(-c "http.extraHeader=Authorization: token ${GITEA_TOKEN}")
fi
if [[ "$SKIP_FETCH" != "true" ]]; then
[[ -d "$GOV_PORTALS_SOURCE/.git" ]] || die "Not a git clone: $GOV_PORTALS_SOURCE (use --skip-fetch to rsync only)"
if [[ ${#git_auth_args[@]} -eq 0 ]]; then
die "GITEA_TOKEN is unset. Add it to $PROJECT_ROOT/.env or run: export GITEA_TOKEN=... (Or use --skip-fetch.)"
fi
if [[ "$DRY_RUN" == "true" ]]; then
log "DRY: would git fetch $GOV_PORTALS_REF and submodule update in $GOV_PORTALS_SOURCE"
else
log "Fetching $GOV_PORTALS_REF and updating submodules in $GOV_PORTALS_SOURCE"
git -C "$GOV_PORTALS_SOURCE" "${git_auth_args[@]}" fetch origin
git -C "$GOV_PORTALS_SOURCE" reset --hard "origin/$GOV_PORTALS_REF"
git -C "$GOV_PORTALS_SOURCE" "${git_auth_args[@]}" submodule update --init --recursive --force
log "Monorepo HEAD: $(git -C "$GOV_PORTALS_SOURCE" log -1 --oneline)"
if [[ -e "$GOV_PORTALS_SOURCE/DBIS/.git" ]]; then
log "DBIS HEAD: $(git -C "$GOV_PORTALS_SOURCE/DBIS" log -1 --oneline)"
fi
fi
else
log "Skipping git fetch (--skip-fetch)"
fi
SYNC_ID="gov-portals-ct-${VMID_GOV_PORTALS}-$(date +%s)"
REMOTE_SYNC="/tmp/$SYNC_ID"
if [[ "$DRY_RUN" == "true" ]]; then
log "DRY: would rsync to root@$PROXMOX_HOST:$REMOTE_SYNC/ and tar into CT $VMID_GOV_PORTALS"
exit 0
fi
log "Rsync to $PROXMOX_HOST:$REMOTE_SYNC/"
rsync -az --delete \
--exclude 'node_modules' --exclude '.next' --exclude '.git' \
--exclude '*/node_modules' --exclude '*/.next' --exclude '*/.git' \
"$GOV_PORTALS_SOURCE/" "root@$PROXMOX_HOST:$REMOTE_SYNC/"
run_pve() {
ssh -o ConnectTimeout=20 -o StrictHostKeyChecking=accept-new "root@$PROXMOX_HOST" "$@"
}
VMID="$VMID_GOV_PORTALS"
run_pve "pct exec $VMID -- mkdir -p /srv/gov-portals /tmp/gov-env-7804"
for portal in DBIS ICCC; do
for f in .env .env.local .env.production; do
run_pve "pct exec $VMID -- bash -c '[ -f /srv/gov-portals/${portal}/${f} ] && cp -a /srv/gov-portals/${portal}/${f} /tmp/gov-env-7804/${portal}_${f} || true'"
done
done
run_pve "pct exec $VMID -- bash -c 'if [ -d /srv/gov-portals ]; then find /srv/gov-portals -mindepth 1 -maxdepth 1 -exec rm -rf {} +; else mkdir -p /srv/gov-portals; fi'"
run_pve "bash -c 'cd $REMOTE_SYNC && tar cf - . | pct exec $VMID -- tar xf - -C /srv/gov-portals'"
for portal in DBIS ICCC; do
for f in .env .env.local .env.production; do
run_pve "pct exec $VMID -- bash -c '[ -f /tmp/gov-env-7804/${portal}_${f} ] && [ ! -f /srv/gov-portals/${portal}/${f} ] && cp -a /tmp/gov-env-7804/${portal}_${f} /srv/gov-portals/${portal}/${f} || true'"
done
done
run_pve "pct exec $VMID -- bash -lc 'export PATH=/usr/local/bin:/usr/bin:/bin:\$PATH; cd /srv/gov-portals && (pnpm install --frozen-lockfile || pnpm install)'"
run_pve "pct exec $VMID -- bash -lc 'export PATH=/usr/local/bin:/usr/bin:/bin:\$PATH; cd /srv/gov-portals/DBIS && pnpm run build && systemctl restart gov-portal-DBIS'"
run_pve "pct exec $VMID -- bash -lc 'export PATH=/usr/local/bin:/usr/bin:/bin:\$PATH; cd /srv/gov-portals/ICCC && pnpm run build && systemctl restart gov-portal-ICCC'"
run_pve "pct exec $VMID -- bash -lc 'export PATH=/usr/local/bin:/usr/bin:/bin:\$PATH; for p in OMNL XOM; do d=/srv/gov-portals/\$p; if [ -f \"\$d/package.json\" ] && grep -qF \"\\\"build\\\"\" \"\$d/package.json\" 2>/dev/null; then (cd \"\$d\" && pnpm run build && systemctl restart gov-portal-\$p) || true; fi; done'"
run_pve "pct exec $VMID -- bash -lc 'systemctl is-active gov-portal-DBIS gov-portal-ICCC gov-portal-OMNL gov-portal-XOM || true; printf DBIS:; curl -s -o /dev/null -w %{http_code} http://127.0.0.1:3001/; echo; printf ICCC:; curl -s -o /dev/null -w %{http_code} http://127.0.0.1:3002/; echo'"
run_pve "rm -rf $REMOTE_SYNC"
log "Removed $PROXMOX_HOST:$REMOTE_SYNC"
log "Done. CT $VMID_GOV_PORTALS on $PROXMOX_HOST updated."

View File

@@ -0,0 +1,154 @@
#!/usr/bin/env bash
# One-way sync: local workstation project tree → Dev VM (5700) /srv/projects
# for coordinated development over SSH (Cursor Remote-SSH, shared tree).
#
# Prerequisites:
# - CT 5700 exists, ssh dev1@IP_DEV_VM works, /srv/projects is writable (see setup-dev-vm-users-and-gitea.sh).
# - Run from a machine that has your local clone (default: ~/projects).
#
# Usage:
# ./scripts/deployment/sync-local-projects-to-dev-vm.sh --dry-run
# ./scripts/deployment/sync-local-projects-to-dev-vm.sh
# ./scripts/deployment/sync-local-projects-to-dev-vm.sh --delete-remote # mirror: remove remote files absent locally
# RSYNC_RSH='ssh -o ProxyCommand="cloudflared access ssh --hostname ssh.dev.d-bis.org"' \
# DEV_VM_HOST=ssh.dev.d-bis.org ./scripts/deployment/sync-local-projects-to-dev-vm.sh
#
# Env:
# SOURCE — local directory (default: ~/projects)
# DEV_VM_USER — SSH user on dev VM (default: dev1)
# DEV_VM_HOST — override IP/hostname (default: IP_DEV_VM from config)
# DEV_VM_PROJECTS — remote path (default: /srv/projects)
# RSYNC_EXTRA_OPTS — extra rsync args (quoted string)
# RSYNC_RSH — passed to rsync as SSH transport (e.g. cloudflared ProxyCommand)
#
# If --delete-remote fails with rsync code 23 (Permission denied on delete), run:
# ./scripts/deployment/fix-dev-vm-srv-projects-ownership.sh
# See: docs/04-configuration/DEV_VM_WORKSTATION_MIGRATION_RUNBOOK.md
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
# shellcheck source=/dev/null
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh" 2>/dev/null || true
# shellcheck source=/dev/null
source "${PROJECT_ROOT}/config/ip-addresses.conf" 2>/dev/null || true
SOURCE="${SOURCE:-${HOME}/projects}"
DEV_VM_USER="${DEV_VM_USER:-dev1}"
DEV_VM_HOST="${DEV_VM_HOST:-${IP_DEV_VM:-192.168.11.59}}"
DEV_VM_PROJECTS="${DEV_VM_PROJECTS:-/srv/projects}"
RSYNC_EXTRA_OPTS="${RSYNC_EXTRA_OPTS:-}"
DRY_RUN=()
DELETE_REMOTE=0
while [[ $# -gt 0 ]]; do
case "$1" in
--dry-run) DRY_RUN=(-n); shift ;;
--delete-remote) DELETE_REMOTE=1; shift ;;
--help|-h)
sed -n '1,35p' "$0" | tail -n +2
exit 0
;;
*)
echo "ERROR: unknown argument: $1 (try --help)" >&2
exit 1
;;
esac
done
if [[ ! -d "$SOURCE" ]]; then
echo "ERROR: SOURCE is not a directory: $SOURCE" >&2
exit 1
fi
REMOTE="${DEV_VM_USER}@${DEV_VM_HOST}:${DEV_VM_PROJECTS}/"
RSYNC_DELETE=()
if [[ "$DELETE_REMOTE" == "1" ]]; then
RSYNC_DELETE=(--delete-delay)
echo "WARNING: --delete-remote — files on Dev VM under DEST not present locally (after excludes) will be REMOVED."
echo ""
else
echo "Safe mode: no remote delete (omit files only on VM). Use --delete-remote to mirror (destructive)."
echo ""
fi
echo "=== Sync local projects → Dev VM ==="
echo "SOURCE: $SOURCE"
echo "DEST: $REMOTE"
echo "Mode: ${DRY_RUN[*]:-live}"
if [[ -n "${RSYNC_RSH:-}" ]]; then
echo "RSYNC_RSH: set (custom SSH / cloudflared)"
fi
echo ""
echo "NOTE: Review secrets after sync — chmod 600 remote .env; share only with trusted dev users."
echo ""
# Heavy / reproducible artifacts (keep .git; omit bulky caches)
RSYNC_EXCLUDES=(
--exclude=node_modules
--exclude=__pycache__
--exclude=.pnpm-store
--exclude=.pnpm
--exclude=venv
--exclude=.venv
--exclude=.venv-*
--exclude=dist
--exclude=build
--exclude=.next
--exclude=out
--exclude=.turbo
--exclude=.cache
--exclude=.parcel-cache
--exclude=coverage
--exclude=.pytest_cache
--exclude=.mypy_cache
--exclude=.ruff_cache
--exclude=forge-cache
--exclude=artifacts
--exclude=broadcast
--exclude=tmp
--exclude=.tmp
--exclude=.codex-artifacts
)
# Optional: skip known multi-GB trees (re-clone or sync later with --no-skip-large)
if [[ "${SKIP_LARGE_LOCAL_TREES:-1}" == "1" ]]; then
RSYNC_EXCLUDES+=(
--exclude=MEV_Bot
--exclude=the-order
)
echo "SKIP_LARGE_LOCAL_TREES=1: excluding MEV_Bot, the-order (set SKIP_LARGE_LOCAL_TREES=0 to include)."
echo ""
fi
if [[ -n "${RSYNC_RSH:-}" ]]; then
export RSYNC_RSH
else
unset RSYNC_RSH 2>/dev/null || true
fi
set -x
# shellcheck disable=SC2086
set +e
rsync -av "${DRY_RUN[@]}" "${RSYNC_DELETE[@]}" --omit-dir-times \
"${RSYNC_EXCLUDES[@]}" \
${RSYNC_EXTRA_OPTS} \
"${SOURCE}/" "${REMOTE}"
rsync_ec=$?
set -e
set +x
if [[ "$rsync_ec" -ne 0 ]]; then
echo "" >&2
echo "ERROR: rsync exited with code $rsync_ec." >&2
if [[ "$rsync_ec" -eq 23 && "$DELETE_REMOTE" == "1" ]]; then
echo " Common cause: root-owned files on the VM block deletes. From repo root (Proxmox root SSH to the node that runs CT 5700):" >&2
echo " ./scripts/deployment/fix-dev-vm-srv-projects-ownership.sh" >&2
fi
exit "$rsync_ec"
fi
echo ""
echo "Done. Next: SSH to dev VM, open /srv/projects/proxmox in Cursor (Remote-SSH), run pnpm/npm install where needed."

View File

@@ -0,0 +1,26 @@
#!/usr/bin/env bash
# Quick RAM / Docker snapshot for act_runner CTs (5700 + 5701). Host loadavg inside LXCs
# tracks the Proxmox host — use docker stats for job CPU when containers are running.
#
# Usage:
# bash scripts/dev-vm/act-runner-resource-snapshot.sh
#
# Env:
# PROXMOX_HOST_R630_04 — default 192.168.11.14
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
[[ -f "${PROJECT_ROOT}/config/ip-addresses.conf" ]] && source "${PROJECT_ROOT}/config/ip-addresses.conf"
PVE="${PROXMOX_HOST_R630_04:-192.168.11.14}"
print_ct() {
local vmid="$1"
echo "=== CT ${vmid} ==="
ssh -o BatchMode=yes -o ConnectTimeout=12 "root@${PVE}" "pct exec ${vmid} -- bash -lc 'hostname; nproc; free -h | head -2; echo loadavg: \$(cat /proc/loadavg); systemctl show act-runner -p MemoryCurrent -p CPUUsageNSec --no-pager 2>/dev/null || true; docker stats --no-stream 2>/dev/null | head -12 || echo \"(no docker stats / no containers)\"'"
echo ""
}
print_ct 5700
print_ct 5701

View File

@@ -0,0 +1,29 @@
#!/usr/bin/env bash
# Copy repo-managed act_runner YAML to CT 5700 / 5701 and restart services.
# Requires SSH to the Proxmox node that hosts both CTs (default r630-04).
#
# Usage (repo root):
# bash scripts/dev-vm/apply-act-runner-config.sh
#
# Env:
# PROXMOX_HOST_R630_04 — override Proxmox host IP (default 192.168.11.14)
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
[[ -f "${PROJECT_ROOT}/config/ip-addresses.conf" ]] && source "${PROJECT_ROOT}/config/ip-addresses.conf"
PVE="${PROXMOX_HOST_R630_04:-192.168.11.14}"
CFG_DIR="${PROJECT_ROOT}/config/gitea-act-runner"
ssh -o BatchMode=yes -o StrictHostKeyChecking=accept-new "root@${PVE}" "pct exec 5700 -- mkdir -p /etc/act_runner"
ssh -o BatchMode=yes -o StrictHostKeyChecking=accept-new "root@${PVE}" "pct exec 5701 -- mkdir -p /etc/act_runner"
ssh -o BatchMode=yes -o StrictHostKeyChecking=accept-new "root@${PVE}" "pct exec 5700 -- bash -c 'cat > /etc/act_runner/config.yaml'" < "${CFG_DIR}/config-5700-heavy.yaml"
ssh -o BatchMode=yes -o StrictHostKeyChecking=accept-new "root@${PVE}" "pct exec 5701 -- bash -c 'cat > /etc/act_runner/config.yaml'" < "${CFG_DIR}/config-5701-standard.yaml"
ssh -o BatchMode=yes -o StrictHostKeyChecking=accept-new "root@${PVE}" \
"pct exec 5700 -- systemctl restart act-runner && pct exec 5701 -- systemctl restart act-runner"
echo "Applied templates and restarted act-runner on 5700 and 5701 (${PVE})."

View File

@@ -0,0 +1,19 @@
#!/usr/bin/env bash
# Standard runner on CT 5701 (gitea-runner-1): ubuntu-latest / 22.04 / 20.04 for org-wide jobs.
# Gitea HTTP is on dev-vm — use LAN URL from inside 5701.
#
# Usage (repo root with GITEA_TOKEN in .env):
# bash scripts/dev-vm/bootstrap-gitea-act-runner-secondary-lan.sh
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
[[ -f "${PROJECT_ROOT}/config/ip-addresses.conf" ]] && source "${PROJECT_ROOT}/config/ip-addresses.conf"
IP_DEV="${IP_DEV_VM:-192.168.11.59}"
export DEV_VM_VMID="${DEV_VM_VMID:-5701}"
export GITEA_RUNNER_INSTANCE="${GITEA_RUNNER_INSTANCE:-http://${IP_DEV}:3000}"
export RUNNER_LABELS="${RUNNER_LABELS:-ubuntu-latest:docker://docker.gitea.com/runner-images:ubuntu-latest,ubuntu-22.04:docker://docker.gitea.com/runner-images:ubuntu-22.04,ubuntu-20.04:docker://docker.gitea.com/runner-images:ubuntu-20.04}"
exec bash "${SCRIPT_DIR}/bootstrap-gitea-act-runner.sh"

View File

@@ -1,70 +1,14 @@
#!/usr/bin/env bash
# Site-wide Gitea Actions runner: use admin GITEA_TOKEN from root .env to fetch the
# instance registration token, then register act_runner on dev-vm (5700) with ubuntu-latest.
#
# Requires: SSH to Proxmox (BatchMode), CT 5700 running Gitea + act_runner under /opt/act_runner.
# Env (from .env via load-project-env): GITEA_TOKEN, optional GITEA_URL, RUNNER_LABELS,
# RUNNER_FORCE_REREGISTER=1 to drop .runner and re-register, DEV_VM_VMID (default 5700).
#
# Usage (repo root):
# bash scripts/dev-vm/bootstrap-gitea-act-runner-site-wide.sh
# RUNNER_FORCE_REREGISTER=1 bash scripts/dev-vm/bootstrap-gitea-act-runner-site-wide.sh
# Heavy runner on CT 5700 (dev-vm): registers ubuntu-latest-heavy for monorepo / validation workflows.
# See scripts/dev-vm/bootstrap-gitea-act-runner.sh
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
# Load only root .env + IPs (avoid full load-project-env if another dotenv exits non-zero under set -e).
[[ -f "${PROJECT_ROOT}/.env" ]] && set -a && source "${PROJECT_ROOT}/.env" && set +a
[[ -f "${PROJECT_ROOT}/config/ip-addresses.conf" ]] && source "${PROJECT_ROOT}/config/ip-addresses.conf"
PROXMOX_HOST_R630_01="${PROXMOX_R630_01:-${PROXMOX_HOST_R630_01:-192.168.11.11}}"
PROXMOX_HOST_R630_02="${PROXMOX_R630_02:-${PROXMOX_HOST_R630_02:-192.168.11.12}}"
PROXMOX_HOST_ML110="${PROXMOX_ML110:-${PROXMOX_HOST_ML110:-192.168.11.10}}"
get_host_for_vmid() {
case "$1" in
5000|5700|7810|2201|2303|2401|6200|6201|10234|10237|5800|5801) echo "${PROXMOX_HOST_R630_02}";;
5400|5401|5402|5403|5410|5411|5412|5413|5414|5415|5416|5417|5418|5419|5420|5421|5422|5423|5424|5425|5440|5441|5442|5443|5444|5445|5446|5447|5448|5449|5450|5451|5452|5453|5454|5455|5470|5471|5472|5473|5474|5475|5476) echo "${PROXMOX_HOST_R630_02}";;
2101|10130|10150|10151|106|107|108|10000|10001|10020|10100|10101|10120|10233|10235) echo "${PROXMOX_HOST_R630_01}";;
2301|2400|1504|2503|2504|2505) echo "${PROXMOX_HOST_ML110}";;
*) echo "${PROXMOX_HOST_R630_01}";;
esac
}
GITEA_URL="${GITEA_URL:-https://gitea.d-bis.org}"
GITEA_URL="${GITEA_URL%/}"
VMID="${DEV_VM_VMID:-5700}"
RUNNER_LABELS="${RUNNER_LABELS:-ubuntu-latest}"
export DEV_VM_VMID="${DEV_VM_VMID:-5700}"
export GITEA_RUNNER_INSTANCE="${GITEA_RUNNER_INSTANCE:-http://127.0.0.1:3000}"
export RUNNER_LABELS="${RUNNER_LABELS:-ubuntu-latest-heavy:docker://docker.gitea.com/runner-images:ubuntu-latest}"
if [[ -z "${GITEA_TOKEN:-}" ]]; then
echo "ERROR: GITEA_TOKEN not set (root .env)." >&2
exit 1
fi
REG_JSON="$(curl -sS -H "Authorization: token ${GITEA_TOKEN}" \
"${GITEA_URL}/api/v1/admin/runners/registration-token")"
REG_TOKEN="$(printf '%s' "$REG_JSON" | sed -n 's/.*"token"[[:space:]]*:[[:space:]]*"\([^"]*\)".*/\1/p')"
if [[ -z "$REG_TOKEN" || "$REG_TOKEN" == "null" ]]; then
echo "ERROR: Could not get admin registration token. Response:" >&2
printf '%s\n' "$REG_JSON" >&2
echo "Ensure GITEA_TOKEN is an admin token with access to GET /api/v1/admin/runners/registration-token" >&2
exit 1
fi
PROXMOX_HOST="$(get_host_for_vmid "$VMID")"
echo "Using Proxmox host ${PROXMOX_HOST} for VMID ${VMID}."
if [[ "${RUNNER_FORCE_REREGISTER:-0}" == "1" ]]; then
ssh -o BatchMode=yes -o StrictHostKeyChecking=accept-new "root@${PROXMOX_HOST}" \
"pct exec ${VMID} -- bash -lc 'rm -f /opt/act_runner/.runner; systemctl stop act-runner 2>/dev/null || true'"
fi
# Pass registration token into the container without embedding raw secret in ssh argv (still reversible from b64).
TB64="$(printf '%s' "$REG_TOKEN" | base64 | tr -d '\n')"
ssh -o BatchMode=yes -o StrictHostKeyChecking=accept-new "root@${PROXMOX_HOST}" \
"pct exec ${VMID} -- bash -c 'export GITEA_RUNNER_REGISTRATION_TOKEN=\$(printf %s \"${TB64}\" | base64 -d); export RUNNER_LABELS=\"${RUNNER_LABELS}\"; bash -s'" \
< "${SCRIPT_DIR}/setup-act-runner.sh"
ssh -o BatchMode=yes -o StrictHostKeyChecking=accept-new "root@${PROXMOX_HOST}" \
"pct exec ${VMID} -- bash -s" < "${SCRIPT_DIR}/install-act-runner-systemd.sh"
echo "Done. Check Gitea Admin → Actions → Runners for an online runner with labels including: ${RUNNER_LABELS}"
exec bash "${SCRIPT_DIR}/bootstrap-gitea-act-runner.sh"

View File

@@ -0,0 +1,82 @@
#!/usr/bin/env bash
# Register/re-register Gitea act_runner on a Proxmox LXC (5700 heavy pool or 5701 standard pool).
#
# Env (required unless noted):
# GITEA_TOKEN — admin token (root .env)
# DEV_VM_VMID — default 5700
# GITEA_RUNNER_INSTANCE — URL passed to act_runner register --instance (5700: http://127.0.0.1:3000)
# RUNNER_LABELS — comma-separated labels (docker image refs); default set by wrappers
# Optional:
# GITEA_URL — default https://gitea.d-bis.org
# RUNNER_FORCE_REREGISTER=1 — remove .runner before register
#
# Usage:
# bash scripts/dev-vm/bootstrap-gitea-act-runner-site-wide.sh
# bash scripts/dev-vm/bootstrap-gitea-act-runner-secondary-lan.sh
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
[[ -f "${PROJECT_ROOT}/.env" ]] && set -a && source "${PROJECT_ROOT}/.env" && set +a
[[ -f "${PROJECT_ROOT}/config/ip-addresses.conf" ]] && source "${PROJECT_ROOT}/config/ip-addresses.conf"
PROXMOX_HOST_R630_04="${PROXMOX_HOST_R630_04:-192.168.11.14}"
PROXMOX_HOST_R630_01="${PROXMOX_R630_01:-${PROXMOX_HOST_R630_01:-192.168.11.11}}"
PROXMOX_HOST_R630_02="${PROXMOX_R630_02:-${PROXMOX_HOST_R630_02:-192.168.11.12}}"
get_host_for_vmid() {
local vmid="$1"
case "$vmid" in
5700|5701) echo "${PROXMOX_HOST_R630_04}";;
5000|7810|2201|2303|2401|6200|6201|10234|10237|5800|5801) echo "${PROXMOX_HOST_R630_02}";;
*) echo "${PROXMOX_HOST_R630_01}";;
esac
}
GITEA_URL="${GITEA_URL:-https://gitea.d-bis.org}"
GITEA_URL="${GITEA_URL%/}"
VMID="${DEV_VM_VMID:-5700}"
GITEA_RUNNER_INSTANCE="${GITEA_RUNNER_INSTANCE:-http://127.0.0.1:3000}"
if [[ -z "${RUNNER_LABELS:-}" ]]; then
echo "ERROR: RUNNER_LABELS must be set (use a wrapper script or export explicitly)." >&2
exit 1
fi
if [[ -z "${GITEA_TOKEN:-}" ]]; then
echo "ERROR: GITEA_TOKEN not set (root .env)." >&2
exit 1
fi
REG_JSON="$(curl -sS -H "Authorization: token ${GITEA_TOKEN}" \
"${GITEA_URL}/api/v1/admin/runners/registration-token")"
REG_TOKEN="$(printf '%s' "$REG_JSON" | sed -n 's/.*"token"[[:space:]]*:[[:space:]]*"\([^"]*\)".*/\1/p')"
if [[ -z "$REG_TOKEN" || "$REG_TOKEN" == "null" ]]; then
echo "ERROR: Could not get admin registration token. Response:" >&2
printf '%s\n' "$REG_JSON" >&2
exit 1
fi
PROXMOX_HOST="$(get_host_for_vmid "$VMID")"
echo "Using Proxmox host ${PROXMOX_HOST} for VMID ${VMID}."
TB64="$(printf '%s' "$REG_TOKEN" | base64 | tr -d '\n')"
LB64="$(printf '%s' "$RUNNER_LABELS" | base64 | tr -d '\n')"
IB64="$(printf '%s' "$GITEA_RUNNER_INSTANCE" | base64 | tr -d '\n')"
if [[ "${RUNNER_FORCE_REREGISTER:-0}" == "1" ]]; then
ssh -o BatchMode=yes -o StrictHostKeyChecking=accept-new "root@${PROXMOX_HOST}" \
"pct exec ${VMID} -- bash -lc 'rm -f /opt/act_runner/.runner; systemctl stop act-runner 2>/dev/null || true'"
fi
ssh -o BatchMode=yes -o StrictHostKeyChecking=accept-new "root@${PROXMOX_HOST}" \
"pct exec ${VMID} -- bash -c 'export GITEA_RUNNER_REGISTRATION_TOKEN=\$(printf %s \"${TB64}\" | base64 -d); export RUNNER_LABELS=\$(printf %s \"${LB64}\" | base64 -d); export INSTANCE=\$(printf %s \"${IB64}\" | base64 -d); bash -s'" \
< "${SCRIPT_DIR}/setup-act-runner.sh"
ACT_RUNNER_CONFIG="${ACT_RUNNER_CONFIG:-/etc/act_runner/config.yaml}"
ssh -o BatchMode=yes -o StrictHostKeyChecking=accept-new "root@${PROXMOX_HOST}" \
"pct exec ${VMID} -- env ACT_RUNNER_CONFIG=${ACT_RUNNER_CONFIG} GITEA_ACTION_URL=${GITEA_RUNNER_INSTANCE} bash -s" \
< "${SCRIPT_DIR}/install-act-runner-systemd.sh"
echo "Done. VMID ${VMID} — labels: ${RUNNER_LABELS}"

View File

@@ -0,0 +1,72 @@
#!/usr/bin/env bash
# Delete Gitea Actions runners that are **offline** (stale rows after re-register).
# Uses Admin API — requires GITEA_TOKEN (admin) in repo root .env.
#
# Usage (repo root):
# bash scripts/dev-vm/delete-offline-gitea-actions-runners.sh --dry-run
# bash scripts/dev-vm/delete-offline-gitea-actions-runners.sh --apply
#
# Over SSH (from a host with this repo and .env):
# ssh user@workstation 'cd /path/to/proxmox && bash scripts/dev-vm/delete-offline-gitea-actions-runners.sh --apply'
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
[[ -f "${PROJECT_ROOT}/.env" ]] && set -a && source "${PROJECT_ROOT}/.env" && set +a
GITEA_URL="${GITEA_URL:-https://gitea.d-bis.org}"
GITEA_URL="${GITEA_URL%/}"
MODE="${1:-}"
if [[ -z "${GITEA_TOKEN:-}" ]]; then
echo "ERROR: GITEA_TOKEN not set (root .env)." >&2
exit 1
fi
if [[ "$MODE" != "--dry-run" && "$MODE" != "--apply" ]]; then
echo "Usage: $0 --dry-run | --apply" >&2
exit 1
fi
export GITEA_URL GITEA_TOKEN
export DELETE_MODE="$MODE"
python3 <<'PY'
import json, os, sys, urllib.request
base = os.environ["GITEA_URL"].rstrip("/")
token = os.environ["GITEA_TOKEN"]
mode = os.environ["DELETE_MODE"]
req = urllib.request.Request(
f"{base}/api/v1/admin/actions/runners?limit=100",
headers={"Authorization": f"token {token}"},
)
with urllib.request.urlopen(req, timeout=60) as resp:
data = json.loads(resp.read().decode())
runners = data.get("runners") or []
offline = [r for r in runners if r.get("status") == "offline"]
if not offline:
print("No offline runners.")
sys.exit(0)
for r in offline:
print(f"offline id={r.get('id')} name={r.get('name')!r}")
if mode == "--dry-run":
print("--dry-run: no DELETE issued.")
sys.exit(0)
for r in offline:
rid = r["id"]
dreq = urllib.request.Request(
f"{base}/api/v1/admin/actions/runners/{rid}",
method="DELETE",
headers={"Authorization": f"token {token}"},
)
with urllib.request.urlopen(dreq, timeout=60) as resp:
print(f"DELETE runner id={rid} -> HTTP {resp.status}")
PY

View File

@@ -10,6 +10,8 @@ set -euo pipefail
WORK_DIR="${WORK_DIR:-/opt/act_runner}"
GITEA_ACTION_URL="${GITEA_ACTION_URL:-http://127.0.0.1:3000}"
ACT_RUNNER_CONFIG="${ACT_RUNNER_CONFIG:-/etc/act_runner/config.yaml}"
mkdir -p "$(dirname "${ACT_RUNNER_CONFIG}")"
if [ ! -x "${WORK_DIR}/act_runner" ]; then
echo "Missing ${WORK_DIR}/act_runner — run setup-act-runner.sh with GITEA_RUNNER_REGISTRATION_TOKEN first."
@@ -30,7 +32,7 @@ After=network.target
Type=simple
User=root
WorkingDirectory=${WORK_DIR}
ExecStart=${WORK_DIR}/act_runner daemon
ExecStart=${WORK_DIR}/act_runner daemon -c ${ACT_RUNNER_CONFIG}
Restart=on-failure
RestartSec=10
Environment=GITEA_ACTION_URL=${GITEA_ACTION_URL}

View File

@@ -7,10 +7,10 @@ set -euo pipefail
ACT_RUNNER_VERSION="${ACT_RUNNER_VERSION:-0.2.13}"
# Gitea root URL as seen from this host (same LXC as Gitea → 127.0.0.1)
INSTANCE="${INSTANCE:-http://127.0.0.1:3000}"
INSTANCE="${INSTANCE:-${GITEA_RUNNER_INSTANCE:-http://127.0.0.1:3000}}"
WORK_DIR="${WORK_DIR:-/opt/act_runner}"
TOKEN="${GITEA_RUNNER_REGISTRATION_TOKEN:-}"
# Workflows commonly use runs-on: ubuntu-latest; labels must match.
# Labels must match workflow runs-on (e.g. ubuntu-latest or ubuntu-latest-heavy); comma-separated.
RUNNER_LABELS="${RUNNER_LABELS:-ubuntu-latest}"
if [ -z "$TOKEN" ]; then

View File

@@ -44,6 +44,6 @@ ENDSSH
echo ""
echo "Verifier URL: http://${IP}/api/"
echo "If forge still fails with 'module and action required', use manual verification:"
echo " https://explorer.d-bis.org/address/<CONTRACT>#verify-contract"
echo " https://explorer.d-bis.org/addresses/<CONTRACT>#verify-contract"
echo ""
echo "Test: source smom-dbis-138/.env && ./scripts/verify-contracts-blockscout.sh"

0
scripts/health/check-rpc-vms-health.sh Normal file → Executable file
View File

View File

@@ -0,0 +1,246 @@
#!/usr/bin/env python3
"""
Batch mainnet cWUSDC to EI matrix wallets via canonical Multicall3 aggregate3.
Each inner call is transferFrom(deployer, recipient, amount) on the token, so
msg.sender is Multicall3. Requires a prior approve(deployer -> Multicall3) for
at least the sum of amounts in this run (one tx before batches).
Default Multicall3 (Ethereum): 0xcA11bde05977b3631167028862bE2a173976CA11
Examples:
python3 scripts/lib/ei_matrix_multicall3_cwusdc_batch.py --dry-run \\
--tsv reports/status/ei-matrix-cwusdc-topup-amounts.tsv
python3 scripts/lib/ei_matrix_multicall3_cwusdc_batch.py --execute \\
--tsv reports/status/ei-matrix-cwusdc-topup-amounts.tsv
Env: PRIVATE_KEY (or DEPLOYER_ADDRESS for dry-run calldata only), ETHEREUM_MAINNET_RPC,
CWUSDC_MAINNET (optional), MULTICALL3_MAINNET (optional), EI_MATRIX_MC_CHUNK (default 200).
"""
from __future__ import annotations
import argparse
import json
import os
import subprocess
import sys
import time
from pathlib import Path
MULTICALL3_MAINNET = "0xcA11bde05977b3631167028862bE2a173976CA11"
DEFAULT_CWUSDC = "0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a"
def _sh(cmd: list[str]) -> str:
r = subprocess.run(cmd, capture_output=True, text=True, check=False)
if r.returncode != 0:
raise RuntimeError(f"command failed: {' '.join(cmd)}\n{(r.stderr or r.stdout).strip()}")
return (r.stdout or "").strip()
def _deployer(pk: str | None) -> str:
if pk:
return _sh(["cast", "wallet", "address", "--private-key", pk])
env = (os.environ.get("DEPLOYER_ADDRESS") or os.environ.get("DEPLOYER") or "").strip()
if env:
return env
raise SystemExit("Set PRIVATE_KEY or DEPLOYER_ADDRESS for transferFrom(from=...)")
def _cast_calldata_transfer_from(from_addr: str, to_addr: str, amount: int) -> str:
out = _sh(["cast", "calldata", "transferFrom(address,address,uint256)", from_addr, to_addr, str(amount)])
return out if out.startswith("0x") else "0x" + out
def _cast_calldata_aggregate3(calls_tuple_str: str) -> str:
out = _sh(["cast", "calldata", "aggregate3((address,bool,bytes)[])", calls_tuple_str])
return out if out.startswith("0x") else "0x" + out
def _estimate_gas(from_addr: str, multicall: str, data: str, rpc_url: str) -> int:
payload = json.dumps({"from": from_addr, "to": multicall, "data": data})
raw = _sh(["cast", "rpc", "eth_estimateGas", payload, "--rpc-url", rpc_url])
return int(raw, 16)
def _allowance(token: str, owner: str, spender: str, rpc_url: str) -> int:
out = _sh(["cast", "call", token, "allowance(address,address)(uint256)", owner, spender, "--rpc-url", rpc_url])
return int(out.split()[0], 0)
def _send_cast_send(to: str, sig: str, args: list[str], rpc_url: str, pk: str, gas_limit: str | None) -> None:
cmd = ["cast", "send", to, sig, *args, "--rpc-url", rpc_url, "--private-key", pk]
if gas_limit:
cmd.extend(["--gas-limit", gas_limit])
print("", " ".join(cmd[:8]), "", file=sys.stderr)
r = subprocess.run(cmd, env={**os.environ})
if r.returncode != 0:
sys.exit(r.returncode)
def _send_raw_calldata(to: str, data: str, rpc_url: str, pk: str, gas_limit: str) -> None:
cmd = ["cast", "send", to, data, "--rpc-url", rpc_url, "--private-key", pk, "--gas-limit", gas_limit]
print("→ cast send", to[:10] + "", "--gas-limit", gas_limit, file=sys.stderr)
r = subprocess.run(cmd, env={**os.environ})
if r.returncode != 0:
sys.exit(r.returncode)
def main() -> int:
ap = argparse.ArgumentParser()
ap.add_argument("--tsv", required=True, help="linearIndex TAB amountRaw")
ap.add_argument("--grid", default="config/pmm-soak-wallet-grid.json")
ap.add_argument("--chunk-size", type=int, default=int(os.environ.get("EI_MATRIX_MC_CHUNK", "200")))
ap.add_argument("--multicall", default=os.environ.get("MULTICALL3_MAINNET", MULTICALL3_MAINNET))
ap.add_argument("--token", default=os.environ.get("CWUSDC_MAINNET", DEFAULT_CWUSDC))
ap.add_argument("--rpc-url", default=os.environ.get("ETHEREUM_MAINNET_RPC") or os.environ.get("RPC_URL_1") or "")
ap.add_argument("--dry-run", action="store_true")
ap.add_argument("--execute", action="store_true")
ap.add_argument("--gas-headroom-bps", type=int, default=13000)
ap.add_argument("--min-gas-per-batch", type=int, default=500_000)
ap.add_argument("--start-batch", type=int, default=0)
ap.add_argument("--max-batches", type=int, default=0, help="0 = all remaining")
ap.add_argument("--progress-file", default="reports/status/ei-matrix-multicall3-batch-progress.txt")
args = ap.parse_args()
if not args.rpc_url:
print("Need --rpc-url or ETHEREUM_MAINNET_RPC / RPC_URL_1", file=sys.stderr)
return 2
if args.dry_run == args.execute:
print("Specify exactly one of --dry-run or --execute", file=sys.stderr)
return 2
repo = Path(__file__).resolve().parents[2]
grid_path = repo / args.grid if not os.path.isabs(args.grid) else Path(args.grid)
tsv_path = repo / args.tsv if not os.path.isabs(args.tsv) else Path(args.tsv)
wallets = json.loads(grid_path.read_text(encoding="utf-8"))["wallets"]
rows: list[tuple[str, int]] = []
for line in tsv_path.read_text(encoding="utf-8").splitlines():
line = line.split("#", 1)[0].strip()
if not line:
continue
parts = line.split("\t")
if len(parts) < 2:
parts = line.split()
if len(parts) < 2:
continue
idx = int(parts[0])
amt = int(parts[1])
if amt <= 0:
continue
addr = wallets[idx]["address"]
rows.append((addr, amt))
if not rows:
print("No positive-amount rows in TSV.", file=sys.stderr)
return 0
pk = os.environ.get("PRIVATE_KEY", "").strip() or None
if args.execute and not pk:
print("PRIVATE_KEY required for --execute", file=sys.stderr)
return 2
deployer = _deployer(pk)
mc = args.multicall
token = args.token
all_chunks: list[list[tuple[str, int]]] = []
for i in range(0, len(rows), args.chunk_size):
all_chunks.append(rows[i : i + args.chunk_size])
start_b = max(0, args.start_batch)
if args.max_batches > 0:
end_b = min(len(all_chunks), start_b + args.max_batches)
else:
end_b = len(all_chunks)
chunks = all_chunks[start_b:end_b]
budget_raw = sum(amt for c in chunks for _, amt in c)
if not chunks:
print("No batches in range.", file=sys.stderr)
return 0
print(
f"batches {start_b}..{end_b - 1} of {len(all_chunks)} transfers={sum(len(c) for c in chunks)} "
f"budget_raw={budget_raw}",
file=sys.stderr,
)
if args.dry_run:
try:
allow = _allowance(token, deployer, mc, args.rpc_url)
except Exception:
allow = 0
print(f"# allowance Multicall3: {allow} budget_this_run: {budget_raw}", file=sys.stderr)
if allow < budget_raw:
print(
f"cast send {token} \"approve(address,uint256)\" {mc} {budget_raw} \\\n"
f" --rpc-url \"$ETHEREUM_MAINNET_RPC\" --private-key \"$PRIVATE_KEY\" --gas-limit 120000",
file=sys.stderr,
)
chunk = chunks[0]
parts = []
for addr, amt in chunk:
data = _cast_calldata_transfer_from(deployer, addr, amt)
parts.append(f"({token},false,{data})")
tuple_str = "[" + ",".join(parts) + "]"
calldata = _cast_calldata_aggregate3(tuple_str)
gl = args.min_gas_per_batch + 65_000 * len(chunk)
sample_hex = repo / "reports/status/ei-matrix-multicall3-dryrun-sample-batch.hex"
sample_hex.write_text(calldata + "\n", encoding="utf-8")
rel = os.path.relpath(str(sample_hex), str(repo))
print(f"\n# sample batch 0 n={len(chunk)} gas_limit~{gl}", file=sys.stderr)
print(f"# calldata written: {rel}", file=sys.stderr)
print(
f"cast send {mc} $(cat {rel}) --rpc-url \"$ETHEREUM_MAINNET_RPC\" \\\n"
f" --private-key \"$PRIVATE_KEY\" --gas-limit {gl}"
)
print(f"\n# … {len(chunks)} batches total (chunk_size={args.chunk_size})", file=sys.stderr)
return 0
assert pk is not None
allow = _allowance(token, deployer, mc, args.rpc_url)
if allow < budget_raw:
print(f"Approving Multicall3 for {budget_raw} raw (was {allow})", file=sys.stderr)
_send_cast_send(token, "approve(address,uint256)", [mc, str(budget_raw)], args.rpc_url, pk, "120000")
time.sleep(2)
allow2 = _allowance(token, deployer, mc, args.rpc_url)
if allow2 < budget_raw:
print(f"Allowance insufficient: {allow2} < {budget_raw}", file=sys.stderr)
return 1
progress_path = repo / args.progress_file
progress_path.parent.mkdir(parents=True, exist_ok=True)
for bi, chunk in enumerate(chunks):
global_batch_idx = start_b + bi
parts = []
for addr, amt in chunk:
data = _cast_calldata_transfer_from(deployer, addr, amt)
parts.append(f"({token},false,{data})")
tuple_str = "[" + ",".join(parts) + "]"
calldata = _cast_calldata_aggregate3(tuple_str)
gas_est = args.min_gas_per_batch
try:
gas_est = _estimate_gas(deployer, mc, calldata, args.rpc_url)
except Exception as e:
print(f"[warn] estimateGas failed, fallback: {e}", file=sys.stderr)
gas_est = 70_000 * len(chunk) + 400_000
gas_with_headroom = max(args.min_gas_per_batch, (gas_est * args.gas_headroom_bps + 9999) // 10000)
print(f"Batch {global_batch_idx}: n={len(chunk)} estimate={gas_est} limit={gas_with_headroom}", file=sys.stderr)
_send_raw_calldata(mc, calldata, args.rpc_url, pk, str(gas_with_headroom))
progress_path.write_text(f"{global_batch_idx}\n", encoding="utf-8")
time.sleep(1)
print("Done.", file=sys.stderr)
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,299 @@
#!/usr/bin/env python3
"""
On-chain readiness audit for EI matrix wallets (config/pmm-soak-wallet-grid.json).
Queries ERC-20 balanceOf for each address on one or both chains:
- Ethereum mainnet cWUSDC (default from env CWUSDC_MAINNET)
- Chain 138 cUSDC (default canonical CompliantUSDC)
Use for strength profiling: segment by class/lpbca via --report-by-class, find gaps vs thresholds.
Environment (optional defaults for thresholds):
EI_MATRIX_AUDIT_MIN_MAINNET_RAW, EI_MATRIX_AUDIT_MIN_138_RAW, EI_MATRIX_AUDIT_WORKERS
Examples:
python3 scripts/lib/ei_matrix_onchain_readiness_audit.py --mainnet-only --min-mainnet-raw 1
python3 scripts/lib/ei_matrix_onchain_readiness_audit.py --both \\
--shard-size 400 --min-mainnet-raw 12000000 --min-138-raw 0 --workers 3 \\
--report-by-class --json-out reports/status/ei-matrix-readiness-audit-latest.json
"""
from __future__ import annotations
import argparse
import json
import os
import sys
import urllib.error
import urllib.request
from concurrent.futures import ThreadPoolExecutor, as_completed
from pathlib import Path
# balanceOf(address) selector
BALANCE_OF = bytes.fromhex("70a08231")
ADDR_PAD = 12 * b"\x00"
def encode_balance_of_call(addr: str) -> str:
a = addr.lower().removeprefix("0x")
if len(a) != 40:
raise ValueError(f"bad address {addr}")
data = BALANCE_OF + ADDR_PAD + bytes.fromhex(a)
return "0x" + data.hex()
def rpc_eth_call(to: str, data: str, rpc_url: str, timeout: float = 30.0) -> str:
body = json.dumps(
{
"jsonrpc": "2.0",
"id": 1,
"method": "eth_call",
"params": [{"to": to, "data": data}, "latest"],
}
).encode()
req = urllib.request.Request(rpc_url, data=body, headers={"Content-Type": "application/json"}, method="POST")
with urllib.request.urlopen(req, timeout=timeout) as r:
j = json.loads(r.read().decode())
if "error" in j:
raise RuntimeError(str(j["error"]))
return j.get("result") or "0x0"
def hex_to_int(h: str) -> int:
h = h.strip()
if not h or h == "0x":
return 0
return int(h, 16)
def collect_rows_for_slice(
slice_items: list[tuple[int, dict]],
*,
do_main: bool,
do_138: bool,
mainnet_rpc: str,
chain138_rpc: str,
mainnet_token: str,
chain138_cusdc: str,
workers: int,
) -> list[dict]:
def fetch_one(item: tuple[int, dict]) -> tuple[int, dict, int, int]:
idx, w = item
addr = w["address"]
mbal, bbal = 0, 0
if do_main:
calldata = encode_balance_of_call(addr)
res = rpc_eth_call(mainnet_token.lower(), calldata, mainnet_rpc)
mbal = hex_to_int(res)
if do_138:
calldata = encode_balance_of_call(addr)
res = rpc_eth_call(chain138_cusdc.lower(), calldata, chain138_rpc)
bbal = hex_to_int(res)
return idx, w, mbal, bbal
rows: list[dict] = []
with ThreadPoolExecutor(max_workers=max(1, workers)) as ex:
futs = [ex.submit(fetch_one, it) for it in slice_items]
for fut in as_completed(futs):
idx, w, mbal, bbal = fut.result()
cls = int(w.get("class", 0))
row = {
"linearIndex": idx,
"address": w["address"],
"cellId": w.get("cellId"),
"class": cls,
"mainnetCwusdcRaw": mbal if do_main else None,
"chain138CusdcRaw": bbal if do_138 else None,
}
rows.append(row)
return rows
def write_indices(path: Path, indices: list[int]) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text("\n".join(str(i) for i in indices) + ("\n" if indices else ""), encoding="utf-8")
def main() -> int:
ap = argparse.ArgumentParser()
ap.add_argument("--grid", default="config/pmm-soak-wallet-grid.json")
ap.add_argument("--offset", type=int, default=0)
ap.add_argument("--limit", type=int, default=0, help="0 = all from offset to grid end")
ap.add_argument(
"--shard-size",
type=int,
default=int(os.environ.get("EI_MATRIX_AUDIT_SHARD_SIZE", "0")),
help="If >0, query in sequential shards of this size (eases RPC load). 0 = single batch.",
)
ap.add_argument("--workers", type=int, default=int(os.environ.get("EI_MATRIX_AUDIT_WORKERS", "4")))
ap.add_argument("--mainnet-only", action="store_true")
ap.add_argument("--chain138-only", action="store_true")
ap.add_argument("--both", action="store_true")
ap.add_argument("--mainnet-rpc", default=os.environ.get("ETHEREUM_MAINNET_RPC") or os.environ.get("RPC_URL_1") or "")
ap.add_argument("--chain138-rpc", default=os.environ.get("RPC_URL_138") or os.environ.get("CHAIN138_PUBLIC_RPC_URL") or "")
ap.add_argument("--mainnet-token", default=os.environ.get("CWUSDC_MAINNET", "0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a"))
ap.add_argument(
"--chain138-cusdc",
default=os.environ.get("CUSDC_CHAIN138", "0xf22258f57794CC8E06237084b353Ab30fFfa640b"),
)
ap.add_argument(
"--min-mainnet-raw",
type=int,
default=int(os.environ.get("EI_MATRIX_AUDIT_MIN_MAINNET_RAW", "0")),
help="fail wallets strictly below this (mainnet); env EI_MATRIX_AUDIT_MIN_MAINNET_RAW",
)
ap.add_argument(
"--min-138-raw",
type=int,
default=int(os.environ.get("EI_MATRIX_AUDIT_MIN_138_RAW", "0")),
help="fail wallets strictly below this (138); env EI_MATRIX_AUDIT_MIN_138_RAW",
)
ap.add_argument("--report-by-class", action="store_true", help="aggregate counts by matrix class 0..5")
ap.add_argument("--json-out", default="", help="write full per-wallet rows + summary")
ap.add_argument(
"--gaps-mainnet-out",
default="",
help="write newline-separated linear indices below mainnet minimum (only if mainnet queried)",
)
ap.add_argument(
"--gaps-138-out",
default="",
help="write newline-separated linear indices below 138 minimum (only if 138 queried)",
)
ap.add_argument("--max-list", type=int, default=200, help="max gap indices to print on stderr")
args = ap.parse_args()
repo = Path(__file__).resolve().parents[2]
grid_path = repo / args.grid if not os.path.isabs(args.grid) else Path(args.grid)
data = json.loads(grid_path.read_text(encoding="utf-8"))
wallets: list[dict] = data["wallets"]
n = len(wallets)
scan_end = n if args.limit <= 0 else min(n, args.offset + args.limit)
scan_start = args.offset
if scan_start < 0 or scan_start > n:
print("Invalid --offset", file=sys.stderr)
return 2
if scan_end < scan_start:
print("Invalid --limit / range", file=sys.stderr)
return 2
do_main = args.mainnet_only or args.both
do_138 = args.chain138_only or args.both
if not do_main and not do_138:
print("Specify --mainnet-only, --chain138-only, or --both", file=sys.stderr)
return 2
if do_main and not args.mainnet_rpc:
print("Need --mainnet-rpc or ETHEREUM_MAINNET_RPC / RPC_URL_1", file=sys.stderr)
return 2
if do_138 and not args.chain138_rpc:
print("Need --chain138-rpc or RPC_URL_138", file=sys.stderr)
return 2
shard = max(0, args.shard_size)
rows: list[dict] = []
if shard <= 0:
slice_items = list(enumerate(wallets[scan_start:scan_end], start=scan_start))
rows = collect_rows_for_slice(
slice_items,
do_main=do_main,
do_138=do_138,
mainnet_rpc=args.mainnet_rpc,
chain138_rpc=args.chain138_rpc,
mainnet_token=args.mainnet_token,
chain138_cusdc=args.chain138_cusdc,
workers=args.workers,
)
else:
for start in range(scan_start, scan_end, shard):
chunk_end = min(scan_end, start + shard)
slice_items = list(enumerate(wallets[start:chunk_end], start=start))
print(f"Shard {start}..{chunk_end} ({len(slice_items)} wallets)", file=sys.stderr)
rows.extend(
collect_rows_for_slice(
slice_items,
do_main=do_main,
do_138=do_138,
mainnet_rpc=args.mainnet_rpc,
chain138_rpc=args.chain138_rpc,
mainnet_token=args.mainnet_token,
chain138_cusdc=args.chain138_cusdc,
workers=args.workers,
)
)
rows.sort(key=lambda r: r["linearIndex"])
by_class: dict[int, dict] = {i: {"n": 0, "mainnet_below": 0, "138_below": 0} for i in range(6)}
if args.report_by_class:
for r in rows:
cls = int(r.get("class", 0))
if cls not in by_class:
continue
by_class[cls]["n"] += 1
if do_main and r["mainnetCwusdcRaw"] < args.min_mainnet_raw:
by_class[cls]["mainnet_below"] += 1
if do_138 and r["chain138CusdcRaw"] < args.min_138_raw:
by_class[cls]["138_below"] += 1
gaps_main: list[int] = []
gaps_138: list[int] = []
for r in rows:
if do_main and r["mainnetCwusdcRaw"] < args.min_mainnet_raw:
gaps_main.append(r["linearIndex"])
if do_138 and r["chain138CusdcRaw"] < args.min_138_raw:
gaps_138.append(r["linearIndex"])
summary = {
"gridPath": str(grid_path),
"slice": {"offset": scan_start, "endExclusive": scan_end, "count": len(rows)},
"shardSize": shard if shard > 0 else None,
"mainnet": {
"token": args.mainnet_token if do_main else None,
"rpc": args.mainnet_rpc[:48] + "" if do_main and len(args.mainnet_rpc) > 48 else args.mainnet_rpc,
"minRaw": args.min_mainnet_raw,
"belowMin": len(gaps_main),
},
"chain138": {
"token": args.chain138_cusdc if do_138 else None,
"minRaw": args.min_138_raw,
"belowMin": len(gaps_138),
},
"byClass": by_class if args.report_by_class else None,
}
print(json.dumps(summary, indent=2))
if gaps_main:
print(
f"\nMainnet cWUSDC below min ({args.min_mainnet_raw}) — {len(gaps_main)} wallets "
f"(first {args.max_list} indices):",
file=sys.stderr,
)
print(", ".join(str(x) for x in gaps_main[: args.max_list]), file=sys.stderr)
if gaps_138:
print(
f"\nChain 138 cUSDC below min ({args.min_138_raw}) — {len(gaps_138)} wallets "
f"(first {args.max_list} indices):",
file=sys.stderr,
)
print(", ".join(str(x) for x in gaps_138[: args.max_list]), file=sys.stderr)
if args.json_out:
outp = repo / args.json_out if not os.path.isabs(args.json_out) else Path(args.json_out)
outp.parent.mkdir(parents=True, exist_ok=True)
outp.write_text(json.dumps({"summary": summary, "rows": rows}, indent=2), encoding="utf-8")
print(f"\nWrote {outp}", file=sys.stderr)
if do_main and args.gaps_mainnet_out:
gp = repo / args.gaps_mainnet_out if not os.path.isabs(args.gaps_mainnet_out) else Path(args.gaps_mainnet_out)
write_indices(gp, gaps_main)
print(f"Wrote mainnet gap indices ({len(gaps_main)}): {gp}", file=sys.stderr)
if do_138 and args.gaps_138_out:
gp = repo / args.gaps_138_out if not os.path.isabs(args.gaps_138_out) else Path(args.gaps_138_out)
write_indices(gp, gaps_138)
print(f"Wrote 138 gap indices ({len(gaps_138)}): {gp}", file=sys.stderr)
fail = bool(gaps_main or gaps_138)
return 1 if fail else 0
if __name__ == "__main__":
raise SystemExit(main())

22
scripts/lib/find-repo-files.sh Executable file
View File

@@ -0,0 +1,22 @@
#!/usr/bin/env bash
# List files under the repo root without descending into node_modules or .git
# (avoids hanging on huge dependency trees).
#
# Usage:
# scripts/lib/find-repo-files.sh
# scripts/lib/find-repo-files.sh -name '*.md'
# scripts/lib/find-repo-files.sh \( -name '*.ts' -o -name '*.tsx' \)
#
# Example — search text without scanning node_modules (prefer narrowing extensions;
# piping every file to grep can still be slow on very large trees):
# scripts/lib/find-repo-files.sh -name '*.md' | xargs grep -l 'pattern' 2>/dev/null
# scripts/lib/find-repo-files.sh \( -name '*.md' -o -name '*.sh' -o -name '*.ts' -o -name '*.json' \) \\
# | xargs grep -l 'pattern' 2>/dev/null
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
# shellcheck disable=SC2086
exec find "$ROOT" \
\( -name node_modules -o -name .git \) -prune -o \
-type f ${1+"$@"} -print

View File

@@ -172,12 +172,13 @@ export DBIS_CORE_DIR="${DBIS_CORE_DIR:-${PROJECT_ROOT}/dbis_core}"
get_host_for_vmid() {
local vmid="$1"
case "$vmid" in
7800|7801|7802|7803|7804|7805|7806) echo "${PROXMOX_HOST_R630_01}";;
7800|7801|7802|7803|7805|7806) echo "${PROXMOX_HOST_R630_01}";;
7804) echo "${PROXMOX_HOST_R630_04:-192.168.11.14}";;
10130|10150|10151|106|107|108|10000|10001|10020|10100|10101|10120|10203|10233|10235) echo "${PROXMOX_HOST_R630_01}";;
1000|1001|1002|1500|1501|1502|2101|2103) echo "${PROXMOX_HOST_R630_01}";;
1003|1004|1503|1504|1505|1506|1507|1509|1510|2102|2301|2304|2400|2402|2403) echo "${PROXMOX_HOST_R630_03}";;
1508) echo "${PROXMOX_HOST_R630_04}";;
5700) echo "${PROXMOX_HOST_R630_04}";;
5700|5701) echo "${PROXMOX_HOST_R630_04:-192.168.11.14}";;
5000|7810|2201|2303|2305|2306|2307|2308|2401|6200|6201|6202|6203|6204|6205|10234|10237|5800|5801) echo "${PROXMOX_HOST_R630_02}";;
2420|2430|2440|2460|2470|2480) echo "${PROXMOX_HOST_R630_01}";;
5400|5401|5402|5403|5410|5411|5412|5413|5414|5415|5416|5417|5418|5419|5420|5421|5422|5423|5424|5425|5440|5441|5442|5443|5444|5445|5446|5447|5448|5449|5450|5451|5452|5453|5454|5455|5470|5471|5472|5473|5474|5475|5476) echo "${PROXMOX_HOST_R630_02}";;

94
scripts/lib/mev-protection.sh Executable file
View File

@@ -0,0 +1,94 @@
#!/usr/bin/env bash
# Shared protected-broadcast helpers for Engine X Mainnet actions.
#
# Source after scripts/lib/load-project-env.sh. Reads use the normal public RPC;
# sensitive writes should go through mev_cast_send so operators cannot
# accidentally broadcast quote-defense swaps through the public mempool.
mev_private_rpc_key() {
local key value
for key in ENGINE_X_PRIVATE_TX_RPC MEV_BLOCKER_RPC_URL FLASHBOTS_RPC_URL BLOXROUTE_RPC_URL BLINK_RPC_URL; do
value="${!key-}"
if [[ -n "${value}" ]]; then
printf '%s\n' "${key}"
return 0
fi
done
return 1
}
mev_has_private_rpc() {
mev_private_rpc_key >/dev/null 2>&1
}
mev_write_rpc_label() {
local key
if key="$(mev_private_rpc_key)"; then
case "${key}" in
ENGINE_X_PRIVATE_TX_RPC) printf '%s\n' "${ENGINE_X_PRIVATE_TX_RPC_LABEL:-engine-x-private-tx-rpc}" ;;
MEV_BLOCKER_RPC_URL) printf '%s\n' "mev-blocker" ;;
FLASHBOTS_RPC_URL) printf '%s\n' "flashbots" ;;
BLOXROUTE_RPC_URL) printf '%s\n' "bloxroute" ;;
BLINK_RPC_URL) printf '%s\n' "blink" ;;
*) printf '%s\n' "${key}" ;;
esac
return 0
fi
printf '%s\n' "public-mainnet-rpc"
}
mev_write_rpc_url() {
local key
if key="$(mev_private_rpc_key)"; then
printf '%s\n' "${!key}"
return 0
fi
if [[ "${ENGINE_X_MEV_PROTECTION:-1}" == "1" && "${ENGINE_X_ALLOW_PUBLIC_BROADCAST:-0}" != "1" ]]; then
return 1
fi
if [[ -z "${ETHEREUM_MAINNET_RPC:-}" ]]; then
return 1
fi
printf '%s\n' "${ETHEREUM_MAINNET_RPC}"
}
mev_require_private_for_action() {
local action="${1:-engine-x-sensitive-action}"
if [[ "${ENGINE_X_MEV_PROTECTION:-1}" != "1" ]]; then
echo "WARN: MEV protection disabled for ${action} (ENGINE_X_MEV_PROTECTION=0)." >&2
return 0
fi
if mev_has_private_rpc; then
return 0
fi
if [[ "${ENGINE_X_ALLOW_PUBLIC_BROADCAST:-0}" == "1" ]]; then
echo "WARN: public broadcast explicitly allowed for ${action} (ENGINE_X_ALLOW_PUBLIC_BROADCAST=1)." >&2
return 0
fi
cat >&2 <<EOF
MEV protected broadcast is required for ${action}, but no private/protected RPC is configured.
Set one of:
ENGINE_X_PRIVATE_TX_RPC
MEV_BLOCKER_RPC_URL
FLASHBOTS_RPC_URL
BLOXROUTE_RPC_URL
BLINK_RPC_URL
For an intentional public-mempool canary only, set ENGINE_X_ALLOW_PUBLIC_BROADCAST=1.
EOF
return 1
}
mev_cast_send() {
local target="${1:?target is required}"
shift
local rpc
if ! rpc="$(mev_write_rpc_url)"; then
echo "Unable to choose a write RPC; protected RPC required or ETHEREUM_MAINNET_RPC missing." >&2
return 1
fi
cast send "${target}" "$@" --private-key "${PRIVATE_KEY:?PRIVATE_KEY is required}" --rpc-url "${rpc}"
}

View File

@@ -0,0 +1,59 @@
#!/usr/bin/env bash
# Resolve PROXMOX_HOST for pct-over-SSH so operator workstations do not run pct by mistake.
#
# Usage (after VMID is set):
# PROXMOX_MONOREPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" # proxmox repo root
# source "${PROXMOX_MONOREPO_ROOT}/scripts/lib/require-proxmox-ssh-for-pct.sh"
# require_proxmox_ssh_for_pct
#
# Env:
# PROXMOX_HOST If set, use this host (ssh root@$PROXMOX_HOST … pct …).
# VMID Used with get_host_for_vmid when PROXMOX_HOST is unset.
# PROXMOX_MONOREPO_ROOT Proxmox monorepo root (directory containing scripts/lib/load-project-env.sh).
# PROJECT_ROOT Alternative to PROXMOX_MONOREPO_ROOT when sourcing load-project-env.
# DEPLOY_PCT_ON_LOCAL_PVE Set to 1 only on a real Proxmox node (/etc/pve/.members) to run local pct
# without SSH (hypervisor shell only).
require_proxmox_ssh_for_pct() {
local vmid="${VMID:-}"
if [[ "${DEPLOY_PCT_ON_LOCAL_PVE:-0}" == "1" ]]; then
if [[ -r /etc/pve/.members ]]; then
export PROXMOX_HOST=""
echo "NOTE: DEPLOY_PCT_ON_LOCAL_PVE=1 — using pct on this Proxmox node (no SSH)." >&2
return 0
fi
echo "ERROR: DEPLOY_PCT_ON_LOCAL_PVE=1 but this host is not a Proxmox cluster member (/etc/pve/.members missing)." >&2
return 1
fi
if [[ -n "${PROXMOX_HOST:-}" ]]; then
echo "Using Proxmox target: ssh root@${PROXMOX_HOST} (VMID ${vmid:-n/a})" >&2
return 0
fi
if [[ -z "$vmid" ]]; then
echo "ERROR: PROXMOX_HOST is unset and VMID is empty — cannot choose a Proxmox host." >&2
echo " Set PROXMOX_HOST (e.g. 192.168.11.12) or VMID, or run on a PVE node with DEPLOY_PCT_ON_LOCAL_PVE=1." >&2
return 1
fi
local root="${PROXMOX_MONOREPO_ROOT:-${PROJECT_ROOT:-}}"
if [[ -z "$root" || ! -f "$root/scripts/lib/load-project-env.sh" ]]; then
echo "ERROR: Proxmox monorepo root not found (expected scripts/lib/load-project-env.sh under PROXMOX_MONOREPO_ROOT or PROJECT_ROOT)." >&2
return 1
fi
# shellcheck disable=SC1090
PROJECT_ROOT="$root" source "$root/scripts/lib/load-project-env.sh"
local chosen
chosen="$(get_host_for_vmid "$vmid")"
if [[ -z "$chosen" ]]; then
echo "ERROR: get_host_for_vmid returned empty for VMID=$vmid" >&2
return 1
fi
export PROXMOX_HOST="$chosen"
echo "Auto-selected Proxmox host from VMID ${vmid}: ssh root@${PROXMOX_HOST}" >&2
return 0
}

View File

@@ -0,0 +1,3 @@
# Used by scripts/deployment/solana-transfer-native.py (sign + serialize only).
# RPC calls use stdlib in scripts/lib/solana_jsonrpc.py (avoids solana-py sendTransaction parse panics on some hosts).
solders>=0.21.0,<0.26

View File

@@ -0,0 +1,188 @@
"""
Minimal Solana JSON-RPC over HTTP (stdlib only).
Some public RPCs return a bare string for ``sendTransaction`` ``result`` without
extra fields that ``solana-py``'s ``SendTransactionResp`` expects, which makes
``Client.send_raw_transaction`` panic while deserializing (missing JSON field
``data``). Use :func:`send_transaction_wire` for submission; keep ``solders``
(or ``solana-py``) only for signing and local serialization.
"""
from __future__ import annotations
import base64
import json
import time
import urllib.error
import urllib.request
from typing import Any
DEFAULT_USER_AGENT = "proxmox-scripts/solana-jsonrpc/1.0"
class SolanaJsonRpcError(RuntimeError):
"""JSON-RPC error object or unexpected HTTP / parse failure."""
def __init__(self, message: str, *, payload: dict[str, Any] | None = None) -> None:
super().__init__(message)
self.payload = payload
def post_json_rpc(
rpc_url: str,
method: str,
params: list[Any],
*,
request_id: int = 1,
timeout_s: float = 90.0,
user_agent: str = DEFAULT_USER_AGENT,
) -> dict[str, Any]:
body = json.dumps(
{"jsonrpc": "2.0", "id": request_id, "method": method, "params": params}
).encode("utf-8")
req = urllib.request.Request(
rpc_url,
data=body,
headers={"Content-Type": "application/json", "User-Agent": user_agent},
method="POST",
)
try:
with urllib.request.urlopen(req, timeout=timeout_s) as resp:
raw = resp.read().decode("utf-8")
except urllib.error.HTTPError as e:
try:
detail = e.read().decode("utf-8", errors="replace")
except Exception:
detail = str(e)
raise SolanaJsonRpcError(f"HTTP {e.code}: {detail}") from e
try:
out: dict[str, Any] = json.loads(raw)
except json.JSONDecodeError as e:
raise SolanaJsonRpcError(f"invalid JSON from RPC: {raw[:500]!r}") from e
err = out.get("error")
if err:
raise SolanaJsonRpcError(f"RPC error: {err}", payload=out)
return out
def get_latest_blockhash(
rpc_url: str, *, commitment: str = "confirmed", timeout_s: float = 30.0
) -> str:
out = post_json_rpc(
rpc_url,
"getLatestBlockhash",
[{"commitment": commitment}],
timeout_s=timeout_s,
)
try:
return str(out["result"]["value"]["blockhash"])
except (KeyError, TypeError) as e:
raise SolanaJsonRpcError(f"unexpected getLatestBlockhash shape: {out!r}") from e
def get_balance_lamports(
rpc_url: str, pubkey_b58: str, *, commitment: str = "confirmed"
) -> int:
out = post_json_rpc(
rpc_url,
"getBalance",
[pubkey_b58, {"commitment": commitment}],
)
try:
return int(out["result"]["value"])
except (KeyError, TypeError, ValueError) as e:
raise SolanaJsonRpcError(f"unexpected getBalance shape: {out!r}") from e
def send_transaction_wire(
rpc_url: str,
signed_wire: bytes,
*,
skip_preflight: bool = False,
preflight_commitment: str = "confirmed",
max_retries: int | None = None,
timeout_s: float = 90.0,
) -> str:
"""
Submit a fully signed legacy or versioned transaction (wire bytes).
Returns base58 transaction signature string from ``result``.
"""
opts: dict[str, Any] = {
"encoding": "base64",
"skipPreflight": skip_preflight,
"preflightCommitment": preflight_commitment,
}
if max_retries is not None:
opts["maxRetries"] = max_retries
params: list[Any] = [base64.b64encode(signed_wire).decode("ascii"), opts]
out = post_json_rpc(rpc_url, "sendTransaction", params, timeout_s=timeout_s)
result = out.get("result")
if not isinstance(result, str):
raise SolanaJsonRpcError(f"unexpected sendTransaction result: {out!r}")
return result
def get_signature_statuses(
rpc_url: str,
signatures: list[str],
*,
search_transaction_history: bool = False,
) -> list[dict[str, Any] | None]:
"""Return one status object (or null) per signature, same order as input."""
if search_transaction_history:
params: list[Any] = [signatures, {"searchTransactionHistory": True}]
else:
params = [signatures]
out = post_json_rpc(rpc_url, "getSignatureStatuses", params)
try:
val = out["result"]["value"]
except (KeyError, TypeError) as e:
raise SolanaJsonRpcError(f"unexpected getSignatureStatuses shape: {out!r}") from e
if not isinstance(val, list):
raise SolanaJsonRpcError(f"unexpected getSignatureStatuses value: {val!r}")
out_list: list[dict[str, Any] | None] = []
for item in val:
if item is None:
out_list.append(None)
elif isinstance(item, dict):
out_list.append(item)
else:
raise SolanaJsonRpcError(f"unexpected status entry: {item!r}")
return out_list
def wait_until_signature_confirmed(
rpc_url: str,
signature: str,
*,
timeout_s: float = 90.0,
poll_interval_s: float = 1.0,
) -> dict[str, Any]:
"""
Poll ``getSignatureStatuses`` until the signature has a terminal ``err`` or
reaches ``confirmationStatus`` of ``confirmed`` / ``finalized``.
"""
deadline = time.monotonic() + timeout_s
last: dict[str, Any] | None = None
while time.monotonic() < deadline:
statuses = get_signature_statuses(rpc_url, [signature])
st = statuses[0] if statuses else None
last = st
if st is None:
time.sleep(poll_interval_s)
continue
err = st.get("err")
if err:
raise SolanaJsonRpcError(f"transaction failed: err={err!r}", payload=st)
conf = st.get("confirmationStatus")
if conf in ("confirmed", "finalized"):
return st
time.sleep(poll_interval_s)
raise SolanaJsonRpcError(
f"timeout waiting for confirmation of {signature!r}; last={last!r}"
)

View File

@@ -0,0 +1,45 @@
#!/usr/bin/env bash
# Show which Proxmox node hosts NPMplus CTs 1023310236 (live SSH).
# Usage: bash scripts/maintenance/npmplus-cluster-placement-status.sh
# Requires: SSH BatchMode to cluster nodes (root@192.168.11.11, .12); extend HOSTS if needed.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
# shellcheck source=/dev/null
[[ -f "$PROJECT_ROOT/config/ip-addresses.conf" ]] && source "$PROJECT_ROOT/config/ip-addresses.conf" 2>/dev/null || true
SSH_OPTS=(-o BatchMode=yes -o ConnectTimeout=12 -o StrictHostKeyChecking=no)
HOSTS=(
"${PROXMOX_HOST_R630_01:-192.168.11.11}:r630-01"
"${PROXMOX_HOST_R630_02:-192.168.11.12}:r630-02"
"${PROXMOX_HOST_R630_03:-192.168.11.13}:r630-03"
"${PROXMOX_HOST_R630_04:-192.168.11.14}:r630-04"
)
echo "=== NPMplus CT placement (VMIDs 1023310236) ==="
echo ""
for entry in "${HOSTS[@]}"; do
ip="${entry%%:*}"
label="${entry##*:}"
echo "--- $label ($ip) ---"
if ssh "${SSH_OPTS[@]}" "root@$ip" "pct list | grep -E '^10233|^10234|^10235|^10236' || true" 2>/dev/null; then
:
else
echo "(SSH skipped or failed — check key access to $ip)"
fi
echo ""
done
echo "=== Quick HTTP :81 (LAN, optional) ==="
for ip in 167 168 169 170; do
code=$(curl -sS -m 4 -o /dev/null -w "%{http_code}" "http://192.168.11.$ip:81/" 2>/dev/null || echo "fail")
echo "192.168.11.$ip:81 -> $code"
done
echo ""
echo "Target distribution: docs/04-configuration/NPMPLUS_MISSION_CRITICAL_DISTRIBUTION_AND_HA_PLAN.md"

View File

@@ -0,0 +1,25 @@
#!/usr/bin/env bash
# Stop dev helpers started from this repo (not IDE/Cursor, not system DNS).
# Safe to run before leaving a workstation; does not touch Besu/RPC/NPM on LAN.
#
# Usage: bash scripts/maintenance/stop-local-repo-dev-servers.sh
set -euo pipefail
_stopped=0
for sig in TERM KILL; do
if pgrep -f 'forge-verification-proxy/server\.js' >/dev/null 2>&1; then
pkill -"$sig" -f 'forge-verification-proxy/server\.js' 2>/dev/null || true
_stopped=1
fi
if pgrep -f 'serve_explorer_spa\.py' >/dev/null 2>&1; then
pkill -"$sig" -f 'serve_explorer_spa\.py' 2>/dev/null || true
_stopped=1
fi
done
if [[ "$_stopped" == 1 ]]; then
echo "Stopped forge-verification-proxy and/or serve_explorer_spa.py (if running)."
else
echo "No forge-verification-proxy or serve_explorer_spa.py processes found."
fi

0
scripts/run-wave0-from-lan.sh Normal file → Executable file
View File

View File

@@ -222,6 +222,25 @@ if [[ -n "$ADDR_TX_MIRROR" ]] && should_verify TransactionMirror; then
verify_one_explicit "$ADDR_TX_MIRROR" "contracts/mirror/TransactionMirror.sol:TransactionMirror" "TransactionMirror" "${enc:-}"
fi
# Optional: alternate CCIPWETH9Bridge deployment (must run after verify_one_explicit is defined).
# Constructor args read from chain. Enable: VERIFY_ALTERNATE_CCIPWETH9_BRIDGE=1 ./scripts/verify/run-contract-verification-with-proxy.sh
ADDR_CCIPWETH9_ALT="${VERIFY_CCIPWETH9_ALT_ADDRESS:-0x9cba0D04Ae5f6f16e3C599025aB97a05c4A593d5}"
if [[ "${VERIFY_ALTERNATE_CCIPWETH9_BRIDGE:-0}" == "1" ]] && should_verify CCIPWETH9BridgeAlt; then
if has_contract_bytecode "$ADDR_CCIPWETH9_ALT"; then
r=$(cast call "$ADDR_CCIPWETH9_ALT" "ccipRouter()(address)" --rpc-url "$RPC" 2>/dev/null | tr -d '\n\r \t') || r=""
w=$(cast call "$ADDR_CCIPWETH9_ALT" "weth9()(address)" --rpc-url "$RPC" 2>/dev/null | tr -d '\n\r \t') || w=""
f=$(cast call "$ADDR_CCIPWETH9_ALT" "feeToken()(address)" --rpc-url "$RPC" 2>/dev/null | tr -d '\n\r \t') || f=""
if [[ -n "$r" && -n "$w" && -n "$f" ]]; then
enc=$(cast abi-encode 'constructor(address,address,address)' "$r" "$w" "$f" 2>/dev/null | tr -d '\n\r \t') || enc=""
verify_one_explicit "$ADDR_CCIPWETH9_ALT" "contracts/ccip/CCIPWETH9Bridge.sol:CCIPWETH9Bridge" "CCIPWETH9Bridge (alternate $ADDR_CCIPWETH9_ALT)" "${enc:-}"
else
echo "CCIPWETH9Bridge (alternate): skip — could not read immutables from $ADDR_CCIPWETH9_ALT"
fi
else
echo "CCIPWETH9Bridge (alternate): skip — no bytecode at $ADDR_CCIPWETH9_ALT"
fi
fi
# CompliantFiatToken: one deployment per currency with distinct constructor args — verify per token in the Blockscout UI or add scripted entries when addresses are enumerated in env.
echo ""

View File

@@ -27,6 +27,10 @@ One-line install (Debian/Ubuntu): `sudo apt install -y sshpass rsync dnsutils ip
## Scripts
- `run-cwusdc-provider-nonmanual-checks.sh` - Run all public/read-only cWUSDC provider checks and write `reports/status/cwusdc-provider-handoff-latest.{json,md}`. Does not submit forms, approve tokens, add liquidity, swap, bridge, or broadcast transactions.
- `check-cwusdc-provider-readiness-ci.sh` - CI-safe cWUSDC provider gate: fails only when repo-controlled URL prerequisites fail; reports external provider blockers as advisory.
- `build-cwusdc-provider-handoff-report.py` - Build a concise cWUSDC provider handoff report from latest JSON probe outputs.
- `check-cwusdc-etherscan-prereq-urls.sh` - Refresh public URL prerequisite evidence for Etherscan profile submission; supports `--json-out`, `--md-out`, `--timeout`, and `--retries` (or env `CWUSDC_PROVIDER_URL_TIMEOUT` / `CWUSDC_PROVIDER_URL_RETRIES`).
- `backup-npmplus.sh` - Full NPMplus backup (database, API exports, certificates)
- `check-contracts-on-chain-138.sh` - Check that Chain 138 deployed contracts have bytecode on-chain (`cast code` for 31 addresses; requires `cast` and RPC access). Use `[RPC_URL]` or env `RPC_URL_138`; `--dry-run` lists addresses only (no RPC calls); `SKIP_EXIT=1` to exit 0 when RPC unreachable.
- `check-non-evm-network-health.sh` - Read-only live check for the public Solana, Tron, and XRPL endpoints used in repo docs. Prints a concise status table and can also write `reports/status/non-evm-network-health-latest.json`.

View File

@@ -0,0 +1,310 @@
#!/usr/bin/env python3
"""Audit Chain 138 cUSDC and Ethereum cWUSDC explorer feeds.
This produces an evidence packet for Etherscan/listing submissions. It does not
ask Etherscan to merge Chain 138 traffic into the Ethereum token tracker; rather,
it documents that Ethereum Mainnet cWUSDC is the wrapped public-network transport
representation of canonical Chain 138 cUSDC and summarizes both API feeds.
"""
from __future__ import annotations
import argparse
import datetime as dt
import json
import os
import sys
import urllib.parse
import urllib.request
from pathlib import Path
from typing import Any
CHAIN138_CUSDC = "0xf22258f57794CC8E06237084b353Ab30fFfa640b"
MAINNET_CWUSDC = "0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a"
CHAIN138_EXPLORER_API = "https://explorer.d-bis.org/api/v2"
ETHERSCAN_V2_API = "https://api.etherscan.io/v2/api"
REPORT_BASE = Path("reports/status/cusdc-cwusdc-etherscan-feed-audit-latest")
def fetch_json(url: str, timeout: int = 30) -> Any:
req = urllib.request.Request(url, headers={"User-Agent": "dbis-cusdc-cwusdc-feed-audit/1.0"})
with urllib.request.urlopen(req, timeout=timeout) as response:
payload = response.read().decode("utf-8")
return json.loads(payload)
def human_units(raw: int, decimals: int) -> str:
sign = "-" if raw < 0 else ""
raw = abs(raw)
whole = raw // (10**decimals)
frac = str(raw % (10**decimals)).rjust(decimals, "0").rstrip("0")
return f"{sign}{whole:,}" + (f".{frac}" if frac else "")
def addresses_from_transfer(item: dict[str, Any], style: str) -> set[str]:
if style == "blockscout":
values = [
item.get("from", {}).get("hash"),
item.get("to", {}).get("hash"),
]
else:
values = [item.get("from"), item.get("to")]
return {str(v).lower() for v in values if v}
def summarize_blockscout_transfers(items: list[dict[str, Any]], decimals: int) -> dict[str, Any]:
total_raw = 0
addresses: set[str] = set()
methods: dict[str, int] = {}
latest = items[0] if items else None
for item in items:
value = item.get("total", {}).get("value", "0")
try:
total_raw += int(value)
except (TypeError, ValueError):
pass
addresses.update(addresses_from_transfer(item, "blockscout"))
method = item.get("method") or "unknown"
methods[method] = methods.get(method, 0) + 1
return {
"sample_count": len(items),
"sample_volume_raw": str(total_raw),
"sample_volume_units": human_units(total_raw, decimals),
"unique_addresses_in_sample": len(addresses),
"method_counts": methods,
"latest_transfer": {
"hash": latest.get("transaction_hash"),
"timestamp": latest.get("timestamp"),
"from": latest.get("from", {}).get("hash"),
"to": latest.get("to", {}).get("hash"),
"value_raw": latest.get("total", {}).get("value"),
"value_units": human_units(int(latest.get("total", {}).get("value", "0")), decimals),
"method": latest.get("method"),
}
if latest
else None,
"addresses": sorted(addresses),
}
def summarize_etherscan_transfers(items: list[dict[str, Any]], decimals: int) -> dict[str, Any]:
total_raw = 0
addresses: set[str] = set()
methods: dict[str, int] = {}
latest = items[0] if items else None
for item in items:
try:
total_raw += int(item.get("value", "0"))
except (TypeError, ValueError):
pass
addresses.update(addresses_from_transfer(item, "etherscan"))
method = item.get("methodId") or item.get("functionName") or "unknown"
methods[method] = methods.get(method, 0) + 1
return {
"sample_count": len(items),
"sample_volume_raw": str(total_raw),
"sample_volume_units": human_units(total_raw, decimals),
"unique_addresses_in_sample": len(addresses),
"method_counts": methods,
"latest_transfer": {
"hash": latest.get("hash"),
"blockNumber": latest.get("blockNumber"),
"timeStamp": latest.get("timeStamp"),
"from": latest.get("from"),
"to": latest.get("to"),
"value_raw": latest.get("value"),
"value_units": human_units(int(latest.get("value", "0")), decimals),
"methodId": latest.get("methodId"),
"functionName": latest.get("functionName"),
}
if latest
else None,
"addresses": sorted(addresses),
}
def blockscout_token_metadata(address: str) -> dict[str, Any]:
return fetch_json(f"{CHAIN138_EXPLORER_API}/tokens/{address}")
def blockscout_transfers(address: str, pages: int) -> list[dict[str, Any]]:
items: list[dict[str, Any]] = []
params: dict[str, Any] | None = None
for _ in range(pages):
url = f"{CHAIN138_EXPLORER_API}/tokens/{address}/transfers"
if params:
url += "?" + urllib.parse.urlencode(params)
payload = fetch_json(url)
items.extend(payload.get("items", []))
params = payload.get("next_page_params")
if not params:
break
return items
def etherscan_call(params: dict[str, str], api_key: str) -> Any:
query = {"chainid": "1", **params, "apikey": api_key}
payload = fetch_json(f"{ETHERSCAN_V2_API}?{urllib.parse.urlencode(query)}")
if payload.get("status") == "0" and "No transactions found" not in str(payload.get("message")):
raise RuntimeError(f"Etherscan API error: {payload.get('message')} {payload.get('result')}")
return payload.get("result", [])
def build_report(args: argparse.Namespace) -> dict[str, Any]:
api_key = args.etherscan_api_key or os.environ.get("ETHERSCAN_API_KEY", "")
if not api_key:
raise SystemExit("ETHERSCAN_API_KEY is required for Ethereum cWUSDC Etherscan API checks")
c138_meta = blockscout_token_metadata(args.chain138_cusdc)
c138_decimals = int(c138_meta.get("decimals") or 6)
c138_transfers = blockscout_transfers(args.chain138_cusdc, args.chain138_pages)
cw_supply_raw = etherscan_call(
{
"module": "stats",
"action": "tokensupply",
"contractaddress": args.mainnet_cwusdc,
},
api_key,
)
cw_transfers = etherscan_call(
{
"module": "account",
"action": "tokentx",
"contractaddress": args.mainnet_cwusdc,
"page": "1",
"offset": str(args.etherscan_offset),
"sort": "desc",
},
api_key,
)
if not isinstance(cw_transfers, list):
cw_transfers = []
c138_summary = summarize_blockscout_transfers(c138_transfers, c138_decimals)
cw_summary = summarize_etherscan_transfers(cw_transfers, 6)
common_addresses = sorted(set(c138_summary["addresses"]) & set(cw_summary["addresses"]))
c138_summary_public = {k: v for k, v in c138_summary.items() if k != "addresses"}
cw_summary_public = {k: v for k, v in cw_summary.items() if k != "addresses"}
return {
"generatedAt": dt.datetime.now(dt.UTC).isoformat().replace("+00:00", "Z"),
"purpose": "Evidence packet for Etherscan/listing feeds: Chain 138 cUSDC is the canonical source asset; Ethereum cWUSDC is the wrapped transport representation.",
"canonicalRelationship": {
"sourceChainId": 138,
"sourceToken": {
"symbol": "cUSDC",
"name": "USD Coin (Compliant)",
"address": args.chain138_cusdc,
"explorer": f"https://explorer.d-bis.org/token/{args.chain138_cusdc}",
"api": f"{CHAIN138_EXPLORER_API}/tokens/{args.chain138_cusdc}",
},
"wrappedChainId": 1,
"wrappedToken": {
"symbol": "cWUSDC",
"name": "Wrapped cUSDC",
"address": args.mainnet_cwusdc,
"explorer": f"https://etherscan.io/token/{args.mainnet_cwusdc}",
"api": ETHERSCAN_V2_API,
},
"mappingSource": "config/token-mapping-multichain.json: 138 cUSDC -> Ethereum Mainnet cWUSDC",
"trackerLanguage": "cWUSDC is the Ethereum Mainnet compliant wrapped transport representation of canonical Chain 138 cUSDC. It is not Circle-issued USDC.",
},
"chain138Cusdc": {
"metadata": {
"name": c138_meta.get("name"),
"symbol": c138_meta.get("symbol"),
"decimals": c138_meta.get("decimals"),
"holders": c138_meta.get("holders"),
"totalSupplyRaw": c138_meta.get("total_supply"),
"totalSupplyUnits": human_units(int(c138_meta.get("total_supply") or 0), c138_decimals),
},
"transferFeed": c138_summary_public,
},
"mainnetCwusdc": {
"metadata": {
"name": "Wrapped cUSDC",
"symbol": "cWUSDC",
"decimals": "6",
"totalSupplyRaw": str(cw_supply_raw),
"totalSupplyUnits": human_units(int(cw_supply_raw or 0), 6),
},
"transferFeed": cw_summary_public,
},
"crossFeedSignals": {
"commonAddressesInRecentSamples": common_addresses,
"commonAddressCount": len(common_addresses),
"interpretation": "Common addresses are supporting evidence only. Canonical linkage is established by the token mapping, metadata registry, and bridge/listing documentation; Etherscan itself will only index Ethereum Mainnet cWUSDC traffic for the token page.",
},
"etherscanSubmissionNote": "Ask Etherscan to list the Ethereum token as Wrapped cUSDC (cWUSDC), with Chain 138 cUSDC identified as the canonical source asset in the description/supporting links. Do not ask Etherscan to add Chain 138 transfer counts to the Ethereum token tracker totals.",
}
def write_markdown(report: dict[str, Any], path: Path) -> None:
rel = report["canonicalRelationship"]
c138 = report["chain138Cusdc"]
cw = report["mainnetCwusdc"]
signals = report["crossFeedSignals"]
lines = [
"# cUSDC / cWUSDC Etherscan Feed Audit",
"",
f"Generated: `{report['generatedAt']}`",
"",
"## Relationship",
"",
f"- Source asset: Chain 138 `cUSDC` at `{rel['sourceToken']['address']}`",
f"- Wrapped transport asset: Ethereum Mainnet `cWUSDC` at `{rel['wrappedToken']['address']}`",
f"- Mapping source: `{rel['mappingSource']}`",
f"- Tracker language: {rel['trackerLanguage']}",
"",
"## API Feed Summary",
"",
"| Feed | Supply | Recent sample transfers | Recent sample volume | Unique addresses in sample |",
"|---|---:|---:|---:|---:|",
f"| Chain 138 cUSDC Blockscout | {c138['metadata']['totalSupplyUnits']} | {c138['transferFeed']['sample_count']} | {c138['transferFeed']['sample_volume_units']} | {c138['transferFeed']['unique_addresses_in_sample']} |",
f"| Ethereum cWUSDC Etherscan | {cw['metadata']['totalSupplyUnits']} | {cw['transferFeed']['sample_count']} | {cw['transferFeed']['sample_volume_units']} | {cw['transferFeed']['unique_addresses_in_sample']} |",
"",
"## Latest Transfers",
"",
f"- Chain 138 cUSDC latest: `{(c138['transferFeed']['latest_transfer'] or {}).get('hash')}`",
f"- Ethereum cWUSDC latest: `{(cw['transferFeed']['latest_transfer'] or {}).get('hash')}`",
"",
"## Cross-Feed Signal",
"",
f"- Common addresses in recent API samples: `{signals['commonAddressCount']}`",
f"- Interpretation: {signals['interpretation']}",
"",
"## Etherscan Submission Note",
"",
report["etherscanSubmissionNote"],
"",
]
path.write_text("\n".join(lines), encoding="utf-8")
def main() -> int:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--chain138-cusdc", default=CHAIN138_CUSDC)
parser.add_argument("--mainnet-cwusdc", default=MAINNET_CWUSDC)
parser.add_argument("--etherscan-api-key", default="")
parser.add_argument("--chain138-pages", type=int, default=3)
parser.add_argument("--etherscan-offset", type=int, default=150)
parser.add_argument("--json-out", default=f"{REPORT_BASE}.json")
parser.add_argument("--md-out", default=f"{REPORT_BASE}.md")
args = parser.parse_args()
report = build_report(args)
json_path = Path(args.json_out)
md_path = Path(args.md_out)
json_path.parent.mkdir(parents=True, exist_ok=True)
json_path.write_text(json.dumps(report, indent=2) + "\n", encoding="utf-8")
write_markdown(report, md_path)
print(f"Wrote {json_path}")
print(f"Wrote {md_path}")
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@@ -0,0 +1,245 @@
#!/usr/bin/env python3
"""Read-only role/control audit for Ethereum Mainnet cWUSDC."""
from __future__ import annotations
import argparse
import datetime as dt
import json
import os
import subprocess
import urllib.parse
import urllib.request
from pathlib import Path
from typing import Any
ROOT = Path(__file__).resolve().parents[2]
REPORT_JSON = ROOT / "reports" / "status" / "cwusdc-mainnet-role-audit-latest.json"
REPORT_MD = ROOT / "reports" / "status" / "cwusdc-mainnet-role-audit-latest.md"
CWUSDC = "0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a"
DEPLOYER = "0x4A666F96fC8764181194447A7dFdb7d471b301C8"
CW_BRIDGE_MAINNET_FALLBACK = "0x2bF74583206A49Be07E0E8A94197C12987AbD7B5"
ETHERSCAN_V2_API = "https://api.etherscan.io/v2/api"
def load_dotenv(path: Path) -> None:
if not path.exists():
return
for line in path.read_text().splitlines():
line = line.strip()
if not line or line.startswith("#") or "=" not in line:
continue
key, value = line.split("=", 1)
key = key.strip()
value = value.strip().strip('"').strip("'")
if key and key not in os.environ:
os.environ[key] = value
def cast_call(contract: str, signature: str, *args: str, rpc_url: str) -> str:
command = ["cast", "call", contract, signature, *args, "--rpc-url", rpc_url]
proc = subprocess.run(command, cwd=ROOT, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
if proc.returncode != 0:
raise RuntimeError(proc.stderr.strip() or proc.stdout.strip())
return proc.stdout.strip()
def cast_keccak(signature: str) -> str:
proc = subprocess.run(["cast", "keccak", signature], cwd=ROOT, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
if proc.returncode != 0:
raise RuntimeError(proc.stderr.strip() or proc.stdout.strip())
return proc.stdout.strip()
def fetch_json(url: str) -> Any:
req = urllib.request.Request(url, headers={"User-Agent": "dbis-cwusdc-role-audit/1.0"})
with urllib.request.urlopen(req, timeout=30) as response:
return json.loads(response.read().decode("utf-8"))
def etherscan_logs(api_key: str, address: str, topic0: str) -> list[dict[str, Any]]:
if not api_key:
return []
query = {
"chainid": "1",
"module": "logs",
"action": "getLogs",
"fromBlock": "0",
"toBlock": "latest",
"address": address,
"topic0": topic0,
"apikey": api_key,
}
payload = fetch_json(f"{ETHERSCAN_V2_API}?{urllib.parse.urlencode(query)}")
result = payload.get("result") if isinstance(payload, dict) else None
return result if isinstance(result, list) else []
def topic_to_address(topic: str) -> str:
return "0x" + topic[-40:]
def bool_from_cast(value: str) -> bool:
return value.strip().lower() in {"true", "1"}
def candidate_addresses() -> dict[str, str]:
candidates = {
"deployer": os.environ.get("DEPLOYER_ADDRESS") or DEPLOYER,
"cwBridgeMainnet": os.environ.get("CW_BRIDGE_MAINNET", "") or CW_BRIDGE_MAINNET_FALLBACK,
"ccipRelayBridgeMainnet": os.environ.get("CCIP_RELAY_BRIDGE_MAINNET", ""),
"mainnetCcipWeth9Bridge": os.environ.get("MAINNET_CCIP_WETH9_BRIDGE", ""),
"mainnetCcipWeth10Bridge": os.environ.get("MAINNET_CCIP_WETH10_BRIDGE", ""),
"ccipEthRouter": os.environ.get("CCIP_ETH_ROUTER", ""),
"uniswapV3CwusdcUsdcPool": "0x1Cf2e685682C7F7beF508F0Af15Dfb5CDda01ee3",
"uniswapV2CwusdcUsdcPair": "0xC28706F899266b36BC43cc072b3a921BDf2C48D9",
"engineXVirtualBatchVault": "0xf108586d1FC330EA1D4EA4ff8fd983cde94279B1",
}
return {label: address for label, address in candidates.items() if address and address.startswith("0x") and len(address) == 42}
def build(args: argparse.Namespace) -> dict[str, Any]:
load_dotenv(ROOT / ".env")
load_dotenv(ROOT / "smom-dbis-138" / ".env")
rpc_url = args.rpc_url or os.environ.get("ETHEREUM_MAINNET_RPC") or os.environ.get("MAINNET_RPC_URL")
if not rpc_url:
raise SystemExit("ETHEREUM_MAINNET_RPC or --rpc-url is required")
roles = {
"DEFAULT_ADMIN_ROLE": cast_call(args.token, "DEFAULT_ADMIN_ROLE()(bytes32)", rpc_url=rpc_url),
"MINTER_ROLE": cast_call(args.token, "MINTER_ROLE()(bytes32)", rpc_url=rpc_url),
"BURNER_ROLE": cast_call(args.token, "BURNER_ROLE()(bytes32)", rpc_url=rpc_url),
}
role_admins = {
role_name: cast_call(args.token, "getRoleAdmin(bytes32)(bytes32)", role_id, rpc_url=rpc_url)
for role_name, role_id in roles.items()
}
candidates = candidate_addresses()
checks: dict[str, Any] = {}
for label, address in candidates.items():
checks[label] = {"address": address, "roles": {}}
for role_name, role_id in roles.items():
checks[label]["roles"][role_name] = bool_from_cast(
cast_call(args.token, "hasRole(bytes32,address)(bool)", role_id, address, rpc_url=rpc_url)
)
privileged = [
{
"label": label,
"address": data["address"],
"roles": [role for role, has_role in data["roles"].items() if has_role],
}
for label, data in checks.items()
if any(data["roles"].values())
]
api_key = os.environ.get("ETHERSCAN_API_KEY", "")
event_topics = {
"RoleGranted": cast_keccak("RoleGranted(bytes32,address,address)"),
"RoleRevoked": cast_keccak("RoleRevoked(bytes32,address,address)"),
}
events: list[dict[str, Any]] = []
if api_key:
for event_name, topic0 in event_topics.items():
for item in etherscan_logs(api_key, args.token, topic0):
topics = item.get("topics") or []
if len(topics) < 4:
continue
role_id = topics[1]
account = topic_to_address(topics[2])
sender = topic_to_address(topics[3])
role_name = next((name for name, value in roles.items() if value.lower() == role_id.lower()), role_id)
events.append(
{
"event": event_name,
"role": role_name,
"roleId": role_id,
"account": account,
"sender": sender,
"blockNumber": int(str(item.get("blockNumber", "0")), 16) if str(item.get("blockNumber", "")).startswith("0x") else item.get("blockNumber"),
"transactionHash": item.get("transactionHash"),
"logIndex": item.get("logIndex"),
}
)
effective_from_events: dict[str, set[str]] = {role: set() for role in roles}
for item in sorted(events, key=lambda row: (int(row.get("blockNumber") or 0), int(str(row.get("logIndex") or "0x0"), 16) if str(row.get("logIndex", "")).startswith("0x") else 0)):
role = item["role"]
if role not in effective_from_events:
continue
if item["event"] == "RoleGranted":
effective_from_events[role].add(item["account"])
elif item["event"] == "RoleRevoked":
effective_from_events[role].discard(item["account"])
return {
"schema": "cwusdc-mainnet-role-audit/v1",
"generatedAt": dt.datetime.now(dt.UTC).isoformat().replace("+00:00", "Z"),
"network": {"chainId": 1, "name": "Ethereum Mainnet"},
"token": {"address": args.token, "symbol": "cWUSDC", "name": "Wrapped cUSDC"},
"roles": roles,
"roleAdmins": role_admins,
"candidateChecks": checks,
"privilegedCandidates": privileged,
"eventLogReview": {
"checked": bool(api_key),
"topics": event_topics,
"eventCount": len(events),
"events": events,
"effectiveMembersFromEvents": {role: sorted(values) for role, values in effective_from_events.items()},
},
"limitations": [
"This audit checks known candidate addresses only.",
"Event-log reconstruction is included when ETHERSCAN_API_KEY is available, but provider log limits or pruned responses can still require manual verification.",
"This is a read-only control snapshot, not a formal third-party audit.",
],
}
def write_md(payload: dict[str, Any], path: Path) -> None:
lines = [
"# cWUSDC Mainnet Role Audit",
"",
f"- Generated: `{payload['generatedAt']}`",
f"- Token: `{payload['token']['address']}`",
"",
"## Role IDs",
"",
"| Role | ID | Admin role ID |",
"|---|---|---|",
]
for role, role_id in payload["roles"].items():
lines.append(f"| `{role}` | `{role_id}` | `{payload['roleAdmins'][role]}` |")
lines.extend(["", "## Candidate Role Checks", "", "| Label | Address | Admin | Minter | Burner |", "|---|---|---:|---:|---:|"])
for label, data in payload["candidateChecks"].items():
roles = data["roles"]
lines.append(
f"| `{label}` | `{data['address']}` | `{roles['DEFAULT_ADMIN_ROLE']}` | `{roles['MINTER_ROLE']}` | `{roles['BURNER_ROLE']}` |"
)
lines.extend(["", "## Event-Log Role Reconstruction", "", f"- Checked: `{payload['eventLogReview']['checked']}`", f"- Event count: `{payload['eventLogReview']['eventCount']}`", "", "| Role | Effective members from events |", "|---|---|"])
for role, members in payload["eventLogReview"]["effectiveMembersFromEvents"].items():
lines.append(f"| `{role}` | `{', '.join(members) if members else 'none observed'}` |")
lines.extend(["", "## Limitations", ""])
lines.extend(f"- {item}" for item in payload["limitations"])
path.write_text("\n".join(lines) + "\n")
def main() -> int:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--token", default=CWUSDC)
parser.add_argument("--rpc-url", default="")
parser.add_argument("--json-out", type=Path, default=REPORT_JSON)
parser.add_argument("--md-out", type=Path, default=REPORT_MD)
args = parser.parse_args()
payload = build(args)
args.json_out.parent.mkdir(parents=True, exist_ok=True)
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
write_md(payload, args.md_out)
print(f"Wrote {args.json_out.relative_to(ROOT)}")
print(f"Wrote {args.md_out.relative_to(ROOT)}")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,17 @@
#!/usr/bin/env bash
# On-chain readiness audit: EI matrix vs mainnet cWUSDC and/or Chain 138 cUSDC.
# Loads scripts/lib/load-project-env.sh for RPCs and token defaults.
#
# Usage:
# ./scripts/verify/audit-ei-matrix-onchain-readiness.sh --mainnet-only --min-mainnet-raw 12000000
# ./scripts/verify/audit-ei-matrix-onchain-readiness.sh --both --min-mainnet-raw 1 --min-138-raw 1 --workers 6 --report-by-class
# Optional leading "--" is stripped (for shells that pass it through).
# Exit 1 if any wallet is below configured minima (CI gate). Use min 0 to only report.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
cd "$PROJECT_ROOT"
# shellcheck disable=SC1091
source "$PROJECT_ROOT/scripts/lib/load-project-env.sh"
[[ "${1:-}" == "--" ]] && shift
exec python3 "$PROJECT_ROOT/scripts/lib/ei_matrix_onchain_readiness_audit.py" "$@"

View File

@@ -0,0 +1,226 @@
#!/usr/bin/env python3
"""Build a repo-side CMC top-10 ecosystem coverage matrix."""
from __future__ import annotations
import json
from datetime import datetime, timezone
from pathlib import Path
from typing import Any
ROOT = Path(__file__).resolve().parents[2]
OUT_JSON = ROOT / "reports/status/cmc-top10-ecosystem-coverage-latest.json"
OUT_MD = ROOT / "docs/04-configuration/CMC_TOP10_ECOSYSTEM_ACCESSIBILITY_MATRIX.md"
SOURCE = {
"name": "CoinMarketCap coins page",
"url": "https://coinmarketcap.com/coins/",
"observedAt": "2026-05-11",
"note": "Ranks are volatile; rerun or update this snapshot before external outreach.",
}
TOKENS: list[dict[str, Any]] = [
{
"rank": 1,
"symbol": "BTC",
"name": "Bitcoin",
"accessibility": "non_evm_wrapped_lane_required",
"dbisTouchpoint": "cWBTC / BTC reserve-or-wrapper evidence lane",
"timeframe": "1-2 weeks repo evidence after wallet/venue binding",
"repoDoableNext": [
"Bind canonical BTC custody/address evidence fields.",
"Add BTC venue and wrapped-asset evidence placeholders.",
"Keep claims as provenance-only until custody and liquidity are independently evidenced.",
],
"requiresExternalHuman": False,
},
{
"rank": 2,
"symbol": "ETH",
"name": "Ethereum",
"accessibility": "native_evm_core_surface",
"dbisTouchpoint": "WETH/cWETH, Ethereum Mainnet cWUSDC, gas/quote evidence",
"timeframe": "1-3 days repo hardening",
"repoDoableNext": [
"Refresh Ethereum pool and quote-side evidence.",
"Add CMC/Dex/Gecko sanity checks for ETH-paired surfaces.",
],
"requiresExternalHuman": False,
},
{
"rank": 3,
"symbol": "USDT",
"name": "Tether USDt",
"accessibility": "evm_quote_asset_and_wrapped_transport",
"dbisTouchpoint": "cUSDT / cWUSDT",
"timeframe": "1-3 days repo-side; provider acceptance external",
"repoDoableNext": [
"Refresh cUSDT/cWUSDT provider packet fields.",
"Validate official USDT quote addresses per chain.",
],
"requiresExternalHuman": False,
},
{
"rank": 4,
"symbol": "XRP",
"name": "XRP",
"accessibility": "xrpl_lane_required",
"dbisTouchpoint": "XRPLAdapter / wXRP / MintBurnController",
"timeframe": "1-2 weeks after wallet/trustline binding",
"repoDoableNext": [
"Bind XRPL account and destination tag policy placeholders.",
"Document XRP reserve, trustline, and issuer requirements.",
],
"requiresExternalHuman": False,
},
{
"rank": 5,
"symbol": "BNB",
"name": "BNB",
"accessibility": "evm_compatible_bsc_lane",
"dbisTouchpoint": "BSC cW* routing and gas surface",
"timeframe": "2-5 days repo-side",
"repoDoableNext": [
"Refresh BSC cW* pool and official quote evidence.",
"Check BNB gas budget and CMC report values.",
],
"requiresExternalHuman": False,
},
{
"rank": 6,
"symbol": "USDC",
"name": "USD Coin",
"accessibility": "primary_focus_ready_for_submission",
"dbisTouchpoint": "cUSDC / cWUSDC",
"timeframe": "submission-ready now; price/listing acceptance external",
"repoDoableNext": [
"Keep Etherscan/CoinGecko/CMC/DexScreener packets current.",
"Maintain exact CAIP-19 discipline for Mainnet cWUSDC.",
],
"requiresExternalHuman": False,
},
{
"rank": 7,
"symbol": "SOL",
"name": "Solana",
"accessibility": "solana_spl_lane_required",
"dbisTouchpoint": "SolanaAdapter and config/solana-gru-bridge-lineup.json",
"timeframe": "3-7 days repo-side if mints are bound",
"repoDoableNext": [
"Bind SPL mint placeholders and minimum rent/gas targets.",
"Separate confirmed Chain 138 adapter evidence from native Solana liquidity claims.",
],
"requiresExternalHuman": False,
},
{
"rank": 8,
"symbol": "TRX",
"name": "TRON",
"accessibility": "tron_wallet_and_energy_lane_required",
"dbisTouchpoint": "TronAdapter and derived/canonical Tron wallet evidence",
"timeframe": "3-7 days after address confirmation",
"repoDoableNext": [
"Bind canonical Tron address policy placeholder.",
"Document TRX energy/bandwidth and TRC-20 inventory requirements.",
],
"requiresExternalHuman": False,
},
{
"rank": 9,
"symbol": "DOGE",
"name": "Dogecoin",
"accessibility": "new_non_evm_adapter_or_custody_lane",
"dbisTouchpoint": "future DOGE wrapper/custody evidence lane",
"timeframe": "1-3 weeks for serious repo evidence",
"repoDoableNext": [
"Create DOGE custody and bridge evidence stub.",
"Keep DOGE out of provider claims until wallet, reserve, and venue evidence exist.",
],
"requiresExternalHuman": False,
},
{
"rank": 10,
"symbol": "HYPE",
"name": "Hyperliquid",
"accessibility": "new_chain_or_venue_research_required",
"dbisTouchpoint": "future Hyperliquid venue/asset touchpoint",
"timeframe": "1-3 weeks for discovery/evidence",
"repoDoableNext": [
"Open a research stub for chain/asset identifiers and supported custody paths.",
"Do not include HYPE in liquidity or settlement claims until identifiers are bound.",
],
"requiresExternalHuman": False,
},
]
def table(headers: list[str], rows: list[list[Any]]) -> str:
def cell(value: Any) -> str:
if isinstance(value, list):
value = "<br>".join(str(item) for item in value)
return str(value).replace("|", "\\|").replace("\n", "<br>")
return "\n".join(
[
f"| {' | '.join(cell(header) for header in headers)} |",
f"| {' | '.join('---' for _ in headers)} |",
*[f"| {' | '.join(cell(value) for value in row)} |" for row in rows],
]
)
def main() -> int:
payload = {
"schema": "cmc-top10-ecosystem-coverage/v1",
"generatedAt": datetime.now(timezone.utc).isoformat(),
"source": SOURCE,
"summary": {
"tokenCount": len(TOKENS),
"repoDoableWithoutOutsideHumanCount": sum(1 for token in TOKENS if not token["requiresExternalHuman"]),
"externalAcceptanceStillRequired": [
"CMC/CoinGecko/DexScreener/Etherscan listing and price acceptance",
"Any custody, bank, exchange, or provider-side manual review",
],
},
"tokens": TOKENS,
}
OUT_JSON.parent.mkdir(parents=True, exist_ok=True)
OUT_JSON.write_text(json.dumps(payload, indent=2) + "\n")
lines = [
"# CMC Top 10 Ecosystem Accessibility Matrix",
"",
f"- Generated: `{payload['generatedAt']}`",
f"- Source: [{SOURCE['name']}]({SOURCE['url']})",
f"- Observed: `{SOURCE['observedAt']}`",
f"- Boundary: {SOURCE['note']}",
"",
table(
["Rank", "Token", "Accessibility", "DBIS touchpoint", "Repo-side timeframe", "Repo-doable next work"],
[
[
token["rank"],
f"{token['symbol']} ({token['name']})",
token["accessibility"],
token["dbisTouchpoint"],
token["timeframe"],
token["repoDoableNext"],
]
for token in TOKENS
],
),
"",
"## Operating Rule",
"",
"This matrix is a repo-side planning artifact. It improves DBIS coverage discipline, but it does not imply that any external tracker, wallet, exchange, custodian, or market-data provider has accepted a token.",
]
OUT_MD.parent.mkdir(parents=True, exist_ok=True)
OUT_MD.write_text("\n".join(lines) + "\n")
print(f"Wrote {OUT_JSON.relative_to(ROOT)}")
print(f"Wrote {OUT_MD.relative_to(ROOT)}")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,528 @@
#!/usr/bin/env python3
"""Build a submission-ready cWUSDC Etherscan Value dossier.
The dossier intentionally separates Ethereum Mainnet cWUSDC evidence from
global cUSDC/cWUSDC family context. It is read-only: it runs monitors and proof
generators, then summarizes what can be submitted and what remains externally
blocked.
"""
from __future__ import annotations
import argparse
import datetime as dt
import json
import os
import subprocess
import urllib.parse
import urllib.request
from pathlib import Path
from typing import Any
ROOT = Path(__file__).resolve().parents[2]
REPORT_JSON = ROOT / "reports" / "status" / "cwusdc-etherscan-value-dossier-latest.json"
REPORT_MD = ROOT / "reports" / "status" / "cwusdc-etherscan-value-dossier-latest.md"
CWUSDC = "0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a"
ETHERSCAN_CHAINLIST_URL = "https://api.etherscan.io/v2/chainlist"
ETHERSCAN_V2_API = "https://api.etherscan.io/v2/api"
DEPLOYER_FALLBACK = "0x4A666F96fC8764181194447A7dFdb7d471b301C8"
L2_DEPOSIT_CHAINS = {
"10": "OP Mainnet",
"42161": "Arbitrum One Mainnet",
}
ARTIFACTS = {
"mainnetSupply": ROOT / "reports" / "status" / "cwusdc-supply-circulating-attestation-latest.json",
"globalFamilySupply": ROOT / "reports" / "status" / "global-cusdc-cwusdc-family-supply-proof-latest.json",
"feedAudit": ROOT / "reports" / "status" / "cusdc-cwusdc-etherscan-feed-audit-latest.json",
"propagation": ROOT / "reports" / "status" / "cwusdc-etherscan-value-propagation-latest.json",
}
DOCS = {
"executionPlan": ROOT / "docs" / "04-configuration" / "etherscan" / "CWUSDC_ETHERSCAN_VALUE_EXECUTION_PLAN.md",
"bridgeLayerMap": ROOT / "docs" / "04-configuration" / "etherscan" / "CWUSDC_ETHERSCAN_BRIDGE_CROSSCHAIN_LAYER_MAP.md",
"profilePacket": ROOT / "docs" / "04-configuration" / "etherscan" / "CWUSDC_MAINNET_ETHERSCAN_PROFILE_PACKET.md",
"e2eRecommendations": ROOT / "docs" / "04-configuration" / "etherscan" / "CWUSDC_ETHERSCAN_E2E_RECOMMENDATIONS.md",
"trackerPacket": ROOT / "docs" / "04-configuration" / "coingecko" / "CWUSDC_MAINNET_TRACKER_SUBMISSION_PACKET.md",
}
def load_dotenv(path: Path, env: dict[str, str]) -> dict[str, str]:
if not path.exists():
return env
merged = dict(env)
for line in path.read_text().splitlines():
line = line.strip()
if not line or line.startswith("#") or "=" not in line:
continue
key, value = line.split("=", 1)
key = key.strip()
value = value.strip().strip('"').strip("'")
if key and key not in merged:
merged[key] = value
return merged
def run_command(command: list[str], env: dict[str, str]) -> dict[str, Any]:
proc = subprocess.run(
command,
cwd=ROOT,
env=env,
text=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=False,
)
return {
"command": command,
"returncode": proc.returncode,
"stdout": proc.stdout.strip(),
"stderr": proc.stderr.strip(),
"ok": proc.returncode == 0,
}
def read_json(path: Path) -> Any | None:
if not path.exists():
return None
return json.loads(path.read_text())
def fetch_json_url(url: str, timeout: int = 30) -> Any:
req = urllib.request.Request(url, headers={"User-Agent": "dbis-cwusdc-etherscan-dossier/1.0"})
with urllib.request.urlopen(req, timeout=timeout) as response:
return json.loads(response.read().decode("utf-8"))
def load_etherscan_chainlist() -> dict[str, Any]:
try:
payload = fetch_json_url(ETHERSCAN_CHAINLIST_URL)
except Exception as exc: # noqa: BLE001 - dossier should capture diagnostics instead of crashing
return {
"url": ETHERSCAN_CHAINLIST_URL,
"available": False,
"error": str(exc),
"totalcount": None,
"supportedChainIds": [],
"statusByChainId": {},
}
result = payload.get("result") if isinstance(payload, dict) else None
chains = result if isinstance(result, list) else []
status_by_chain_id = {
str(item.get("chainid")): {
"chainname": item.get("chainname"),
"blockexplorer": item.get("blockexplorer"),
"apiurl": item.get("apiurl"),
"status": item.get("status"),
"comment": item.get("comment"),
}
for item in chains
if isinstance(item, dict) and item.get("chainid") is not None
}
return {
"url": ETHERSCAN_CHAINLIST_URL,
"available": True,
"comments": payload.get("comments") if isinstance(payload, dict) else None,
"totalcount": payload.get("totalcount") if isinstance(payload, dict) else len(chains),
"supportedChainIds": sorted(status_by_chain_id, key=lambda value: int(value) if value.isdigit() else value),
"statusByChainId": status_by_chain_id,
}
def human_token_value(raw: Any, token_address: str | None) -> str | None:
try:
raw_int = int(str(raw))
except (TypeError, ValueError):
return None
decimals = 18 if token_address == "ETH" else 6 if token_address and token_address.lower() == CWUSDC.lower() else 18
whole = raw_int // (10**decimals)
frac = str(raw_int % (10**decimals)).rjust(decimals, "0").rstrip("0")
return f"{whole}" + (f".{frac}" if frac else "")
def human_wei(raw: Any) -> str | None:
try:
raw_int = int(str(raw))
except (TypeError, ValueError):
return None
whole = raw_int // (10**18)
frac = str(raw_int % (10**18)).rjust(18, "0").rstrip("0")
return f"{whole}" + (f".{frac}" if frac else "")
def normalize_deposit_row(row: dict[str, Any]) -> dict[str, Any]:
token_address = row.get("tokenAddress")
return {
"hash": row.get("hash"),
"l1TransactionHash": row.get("L1transactionhash"),
"timeStamp": row.get("timeStamp"),
"from": row.get("from"),
"to": row.get("to"),
"valueRaw": row.get("value"),
"valueEth": human_wei(row.get("value")),
"tokenAddress": token_address,
"tokenValueRaw": row.get("tokenValue"),
"tokenValueUnits": human_token_value(row.get("tokenValue"), token_address),
"txreceiptStatus": row.get("txreceipt_status"),
"isError": row.get("isError"),
}
def etherscan_v2_call(params: dict[str, str], api_key: str) -> dict[str, Any]:
query = {**params, "apikey": api_key}
url = f"{ETHERSCAN_V2_API}?{urllib.parse.urlencode(query)}"
redacted_query = {**params, "apikey": "REDACTED"}
redacted_url = f"{ETHERSCAN_V2_API}?{urllib.parse.urlencode(redacted_query)}"
try:
payload = fetch_json_url(url)
except Exception as exc: # noqa: BLE001 - capture diagnostics instead of crashing the dossier
return {"url": redacted_url, "ok": False, "error": str(exc), "status": None, "message": None, "result": None}
status = str(payload.get("status")) if isinstance(payload, dict) else None
message = payload.get("message") if isinstance(payload, dict) else None
result = payload.get("result") if isinstance(payload, dict) else None
no_rows = isinstance(result, str) and "No transactions found" in result
return {
"url": redacted_url,
"ok": status == "1" or no_rows,
"status": status,
"message": message,
"result": [] if no_rows else result,
"error": None,
}
def load_l2_deposit_evidence(api_key: str, chainlist: dict[str, Any], address: str) -> dict[str, Any]:
if not api_key:
return {
"checked": False,
"reason": "ETHERSCAN_API_KEY is not set.",
"address": address,
"chains": {},
}
supported = set(chainlist.get("statusByChainId", {}))
chains: dict[str, Any] = {}
for chain_id, chain_name in L2_DEPOSIT_CHAINS.items():
if chain_id not in supported:
chains[chain_id] = {
"chainName": chain_name,
"checked": False,
"reason": "chain is not present in Etherscan V2 chainlist",
}
continue
response = etherscan_v2_call(
{
"chainid": chain_id,
"module": "account",
"action": "getdeposittxs",
"address": address,
"page": "1",
"offset": "10",
"sort": "desc",
},
api_key,
)
result = response.get("result")
rows = result if isinstance(result, list) else []
chains[chain_id] = {
"chainName": chain_name,
"checked": True,
"ok": response.get("ok"),
"status": response.get("status"),
"message": response.get("message"),
"sampleCount": len(rows),
"latest": normalize_deposit_row(rows[0]) if rows else None,
"url": response.get("url"),
"error": response.get("error"),
}
return {
"checked": True,
"address": address,
"scope": "Etherscan-indexed L2 deposits by address. This is bridge provenance only and does not set Mainnet cWUSDC USD Value.",
"rawUnitNote": "tokenValue is returned as raw token units. ETH uses 18 decimals; ERC-20 rows must be normalized with that token contract's decimals.",
"chains": chains,
}
def load_contract_source_verification(api_key: str, address: str) -> dict[str, Any]:
if not api_key:
return {
"checked": False,
"reason": "ETHERSCAN_API_KEY is not set.",
"address": address,
"verified": False,
}
response = etherscan_v2_call(
{
"chainid": "1",
"module": "contract",
"action": "getsourcecode",
"address": address,
},
api_key,
)
result = response.get("result")
entry = result[0] if isinstance(result, list) and result and isinstance(result[0], dict) else {}
source_code = str(entry.get("SourceCode") or "")
abi = str(entry.get("ABI") or "")
contract_name = str(entry.get("ContractName") or "")
return {
"checked": True,
"address": address,
"ok": response.get("ok"),
"status": response.get("status"),
"message": response.get("message"),
"verified": bool(source_code and contract_name and abi and abi != "Contract source code not verified"),
"contractName": contract_name or None,
"compilerVersion": entry.get("CompilerVersion") or None,
"optimizationUsed": entry.get("OptimizationUsed") or None,
"runs": entry.get("Runs") or None,
"constructorArgumentsPresent": bool(entry.get("ConstructorArguments")),
"evmVersion": entry.get("EVMVersion") or None,
"licenseType": entry.get("LicenseType") or None,
"proxy": entry.get("Proxy") or None,
"implementation": entry.get("Implementation") or None,
"sourceCodeBytes": len(source_code),
"abiAvailable": bool(abi and abi != "Contract source code not verified"),
"url": response.get("url"),
"error": response.get("error"),
}
def rel(path: Path) -> str:
return str(path.relative_to(ROOT))
def build(args: argparse.Namespace) -> dict[str, Any]:
env = load_dotenv(ROOT / ".env", dict(os.environ))
etherscan_api_key = env.get("ETHERSCAN_API_KEY", "")
l2_deposit_address = args.l2_deposit_address or env.get("DEPLOYER_ADDRESS") or DEPLOYER_FALLBACK
commands: list[dict[str, Any]] = []
if args.refresh:
commands = [
run_command(["python3", "scripts/verify/generate-cwusdc-supply-circulating-attestation.py"], env),
run_command(["python3", "scripts/verify/generate-global-cusdc-cwusdc-family-supply-proof.py"], env),
run_command(["python3", "scripts/verify/audit-cusdc-cwusdc-etherscan-feeds.py"], env),
run_command(["python3", "scripts/verify/monitor-cwusdc-etherscan-value-propagation.py"], env),
run_command(["bash", "scripts/verify/check-cwusdc-etherscan-prereq-urls.sh"], env),
]
artifacts = {key: read_json(path) for key, path in ARTIFACTS.items()}
propagation = artifacts["propagation"] or {}
supply = artifacts["mainnetSupply"] or {}
global_family = artifacts["globalFamilySupply"] or {}
feed_audit = artifacts["feedAudit"] or {}
chainlist = load_etherscan_chainlist()
l2_deposits = load_l2_deposit_evidence(etherscan_api_key, chainlist, l2_deposit_address)
contract_source = load_contract_source_verification(etherscan_api_key, CWUSDC)
family_chain_ids = sorted(
{str(item.get("chainId")) for item in global_family.get("entries", []) if isinstance(item, dict) and item.get("chainId") is not None},
key=lambda value: int(value) if value.isdigit() else value,
)
supported_family_chain_ids = [chain_id for chain_id in family_chain_ids if chain_id in chainlist.get("statusByChainId", {})]
unsupported_family_chain_ids = [chain_id for chain_id in family_chain_ids if chain_id not in chainlist.get("statusByChainId", {})]
blockers = list(((propagation.get("summary") or {}).get("blockers") or []))
command_failures = [item for item in commands if not item["ok"]]
for item in command_failures:
blockers.append("Command failed: " + " ".join(item["command"]))
ready_evidence = {
"mainnetSupplyAttestation": bool(supply.get("supply")),
"globalFamilySupplyContext": bool(global_family.get("summary")),
"chain138MainnetFeedAudit": bool(feed_audit.get("canonicalRelationship")),
"mainnetContractSourceVerified": bool(contract_source.get("verified")),
"propagationMonitor": bool(propagation.get("checks")),
"publicPrereqUrls": not any(
item["command"] == ["bash", "scripts/verify/check-cwusdc-etherscan-prereq-urls.sh"] and not item["ok"]
for item in commands
)
if commands
else None,
"documentationPacket": {key: path.exists() for key, path in DOCS.items()},
}
next_actions = []
if blockers:
next_actions.extend(
[
"Submit/update Etherscan token profile for the exact Ethereum Mainnet cWUSDC contract.",
"Submit/update CoinGecko and CoinMarketCap listings with Mainnet supply proof, DEX evidence, and bridge-family context.",
"Use the global family supply proof only as context; use the Ethereum Mainnet cWUSDC attestation as the token-page supply basis.",
"Re-run this dossier after each external approval or tracker response.",
]
)
else:
next_actions.append("No blockers detected by local monitors; capture Etherscan screenshots and continue propagation monitoring.")
return {
"schema": "cwusdc-etherscan-value-dossier/v1",
"generatedAt": dt.datetime.now(dt.UTC).isoformat().replace("+00:00", "Z"),
"purpose": "Single submission and monitoring packet for making Etherscan show USD value for Ethereum Mainnet cWUSDC.",
"target": {
"network": "Ethereum Mainnet",
"chainId": 1,
"contract": CWUSDC,
"caip19": f"eip155:1/erc20:{CWUSDC}",
"name": "Wrapped cUSDC",
"symbol": "cWUSDC",
"decimals": 6,
},
"readiness": {
"readyForExternalSubmission": ready_evidence["mainnetSupplyAttestation"]
and ready_evidence["chain138MainnetFeedAudit"]
and ready_evidence["mainnetContractSourceVerified"]
and ready_evidence["propagationMonitor"],
"etherscanValueReady": (propagation.get("summary") or {}).get("etherscanValueReady"),
"coinGeckoPriceReady": (propagation.get("summary") or {}).get("coingeckoPriceReady"),
"blockers": blockers,
},
"evidence": {
"artifacts": {key: rel(path) for key, path in ARTIFACTS.items()},
"docs": {key: rel(path) for key, path in DOCS.items()},
"readyEvidence": ready_evidence,
"mainnetContractSourceVerification": contract_source,
"mainnetSupply": supply.get("supply"),
"globalFamilyWarning": (global_family.get("caveats") or ["Global family supply is context only; do not use it as Ethereum Etherscan token-page supply."])[0],
"globalFamilySummary": global_family.get("summary"),
"feedRelationship": feed_audit.get("canonicalRelationship"),
"etherscanChainlist": {
**chainlist,
"familyChainIds": family_chain_ids,
"etherscanSupportedFamilyChainIds": supported_family_chain_ids,
"notEtherscanSupportedFamilyChainIds": unsupported_family_chain_ids,
"chain138SupportedByEtherscanV2": "138" in chainlist.get("statusByChainId", {}),
"interpretation": "Only chains present in Etherscan V2 chainlist should be described as first-class Etherscan-family API evidence. Chain 138 remains provenance/context evidence unless Etherscan adds chainid 138.",
},
"l2DepositTransactions": l2_deposits,
},
"commands": commands,
"nextActions": next_actions,
}
def write_md(payload: dict[str, Any], path: Path) -> None:
readiness = payload["readiness"]
evidence = payload["evidence"]
target = payload["target"]
lines = [
"# cWUSDC Etherscan Value Dossier",
"",
f"- Generated: `{payload['generatedAt']}`",
f"- Target: `{target['contract']}`",
f"- CAIP-19: `{target['caip19']}`",
f"- Ready for external submission: `{readiness['readyForExternalSubmission']}`",
f"- Etherscan value ready: `{readiness['etherscanValueReady']}`",
f"- CoinGecko price ready: `{readiness['coinGeckoPriceReady']}`",
"",
"## Blockers",
"",
]
if readiness["blockers"]:
lines.extend(f"- {item}" for item in readiness["blockers"])
else:
lines.append("- None detected by this dossier.")
lines.extend(
[
"",
"## Evidence Artifacts",
"",
"| Artifact | Path |",
"|---|---|",
]
)
for key, value in evidence["artifacts"].items():
lines.append(f"| `{key}` | `{value}` |")
lines.extend(["", "## Documentation Packet", "", "| Document | Path |", "|---|---|"])
for key, value in evidence["docs"].items():
lines.append(f"| `{key}` | `{value}` |")
lines.extend(
[
"",
"## Mainnet Contract Verification",
"",
f"- Checked: `{(evidence['mainnetContractSourceVerification'] or {}).get('checked')}`",
f"- Verified: `{(evidence['mainnetContractSourceVerification'] or {}).get('verified')}`",
f"- Contract name: `{(evidence['mainnetContractSourceVerification'] or {}).get('contractName')}`",
f"- Compiler: `{(evidence['mainnetContractSourceVerification'] or {}).get('compilerVersion')}`",
f"- License: `{(evidence['mainnetContractSourceVerification'] or {}).get('licenseType')}`",
f"- Proxy: `{(evidence['mainnetContractSourceVerification'] or {}).get('proxy')}`",
"",
"## Supply Boundary",
"",
f"- Ethereum Mainnet cWUSDC supply basis: `{(evidence['mainnetSupply'] or {}).get('totalSupplyUnits')}`",
f"- Circulating supply basis: `{(evidence['mainnetSupply'] or {}).get('circulatingSupplyUnits')}`",
f"- Global family warning: {evidence['globalFamilyWarning']}",
"",
"## Etherscan Chainlist Boundary",
"",
f"- Etherscan V2 chainlist total: `{(evidence['etherscanChainlist'] or {}).get('totalcount')}`",
f"- Family chain IDs: `{', '.join((evidence['etherscanChainlist'] or {}).get('familyChainIds') or [])}`",
f"- Etherscan-supported family chain IDs: `{', '.join((evidence['etherscanChainlist'] or {}).get('etherscanSupportedFamilyChainIds') or [])}`",
f"- Not Etherscan-supported family chain IDs: `{', '.join((evidence['etherscanChainlist'] or {}).get('notEtherscanSupportedFamilyChainIds') or [])}`",
f"- Chain 138 supported by Etherscan V2: `{(evidence['etherscanChainlist'] or {}).get('chain138SupportedByEtherscanV2')}`",
"",
"## L2 Deposit Transaction Boundary",
"",
f"- Address checked: `{(evidence['l2DepositTransactions'] or {}).get('address')}`",
f"- Checked: `{(evidence['l2DepositTransactions'] or {}).get('checked')}`",
"- Scope: Etherscan-indexed OP/Arbitrum deposit provenance only; it does not set Mainnet cWUSDC USD Value.",
"- Unit note: `value` is raw wei; `tokenValue` is raw token units. `1195403000000000` in `value` is `0.001195403 ETH`; `598200000000000` with `tokenAddress=ETH` is `0.0005982 ETH`.",
"",
"| Chain | Checked | Sample deposits | Latest tx | Native value | Token value |",
"|---|---:|---:|---|---:|---:|",
]
)
for chain_id, item in (evidence["l2DepositTransactions"].get("chains") or {}).items():
latest = item.get("latest") or {}
lines.append(
f"| `{chain_id}` {item.get('chainName')} | `{item.get('checked')}` | `{item.get('sampleCount', 0)}` | `{latest.get('hash')}` | `{latest.get('valueEth')}` ETH | `{latest.get('tokenValueUnits')}` {latest.get('tokenAddress') or ''} |"
)
lines.extend(
[
"",
"## Next Actions",
"",
]
)
lines.extend(f"- {item}" for item in payload["nextActions"])
if payload["commands"]:
lines.extend(["", "## Command Results", "", "| Command | Exit |", "|---|---:|"])
for item in payload["commands"]:
lines.append(f"| `{' '.join(item['command'])}` | `{item['returncode']}` |")
path.write_text("\n".join(lines) + "\n")
def main() -> int:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--no-refresh", action="store_true", help="Only aggregate existing reports; do not rerun checks.")
parser.add_argument("--json-out", type=Path, default=REPORT_JSON)
parser.add_argument("--md-out", type=Path, default=REPORT_MD)
parser.add_argument("--l2-deposit-address", default="", help="Address to check with Etherscan getdeposittxs.")
parser.add_argument("--strict", action="store_true")
args = parser.parse_args()
args.refresh = not args.no_refresh
payload = build(args)
args.json_out.parent.mkdir(parents=True, exist_ok=True)
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
write_md(payload, args.md_out)
print(f"Wrote {args.json_out.relative_to(ROOT)}")
print(f"Wrote {args.md_out.relative_to(ROOT)}")
print(f"readyForExternalSubmission={payload['readiness']['readyForExternalSubmission']}")
if payload["readiness"]["blockers"]:
print("Blockers: " + "; ".join(payload["readiness"]["blockers"]))
if args.strict and payload["readiness"]["blockers"]:
return 1
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,86 @@
#!/usr/bin/env bash
set -euo pipefail
PROJECT_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$PROJECT_ROOT"
DATE_TAG="${CWUSDC_EVIDENCE_BUNDLE_DATE:-$(date -u +%Y%m%d)}"
OUT_DIR="reports/status"
BUNDLE="${OUT_DIR}/cwusdc-institutional-evidence-bundle-${DATE_TAG}.tar.gz"
CHECKSUM="${OUT_DIR}/cwusdc-institutional-evidence-bundle-${DATE_TAG}.sha256"
FILES=(
"docs/04-configuration/etherscan/CWUSDC_EVIDENCE_BUNDLE_INDEX.md"
"docs/04-configuration/etherscan/CWUSDC_SUPPLY_AND_CIRCULATING_METHODOLOGY.md"
"docs/04-configuration/etherscan/CWUSDC_SECURITY_AND_AUDIT_DISCLOSURE.md"
"docs/04-configuration/etherscan/CWUSDC_PROVIDER_RESPONSE_TRACKER.md"
"docs/04-configuration/etherscan/CWUSDC_LIQUIDITY_READINESS_NO_BROADCAST_PLAN.md"
"docs/04-configuration/CWUSDC_PROVIDER_SUBMISSION_PACKET.md"
"docs/04-configuration/etherscan/CWUSDC_MAINNET_ETHERSCAN_PROFILE_PACKET.md"
"docs/04-configuration/etherscan/CWUSDC_ETHERSCAN_E2E_RECOMMENDATIONS.md"
"docs/04-configuration/etherscan/CWUSDC_ETHERSCAN_VALUE_EXECUTION_PLAN.md"
"docs/04-configuration/etherscan/CWUSDC_ETHERSCAN_BRIDGE_CROSSCHAIN_LAYER_MAP.md"
"docs/04-configuration/coingecko/CWUSDC_MAINNET_TRACKER_SUBMISSION_PACKET.md"
"docs/04-configuration/dexscreener/CWUSDC_DEXSCREENER_INDEXING_AND_PROFILE_PACKET_20260509.md"
"docs/04-configuration/metamask/METAMASK_ASSET_PRICE_PROVIDER_SUBMISSION_MATRIX.md"
"reports/status/cwusdc-etherscan-value-dossier-latest.md"
"reports/status/cwusdc-etherscan-value-dossier-latest.json"
"reports/status/cwusdc-supply-circulating-attestation-latest.md"
"reports/status/cwusdc-supply-circulating-attestation-latest.json"
"reports/status/global-cusdc-cwusdc-family-supply-proof-latest.md"
"reports/status/global-cusdc-cwusdc-family-supply-proof-latest.json"
"reports/status/cusdc-cwusdc-etherscan-feed-audit-latest.md"
"reports/status/cusdc-cwusdc-etherscan-feed-audit-latest.json"
"reports/status/cwusdc-mainnet-role-audit-latest.md"
"reports/status/cwusdc-mainnet-role-audit-latest.json"
"reports/status/cwusdc-role-deployment-appendix-latest.md"
"reports/status/cwusdc-role-deployment-appendix-latest.json"
"reports/status/cwusdc-institutional-doc-link-check-latest.md"
"reports/status/cwusdc-institutional-doc-link-check-latest.json"
"reports/status/cwusdc-provider-submission-prefill-latest.md"
"reports/status/cwusdc-provider-submission-prefill-latest.json"
"reports/status/cwusdc-provider-handoff-latest.md"
"reports/status/cwusdc-provider-handoff-latest.json"
"reports/status/cwusdc-external-trackers-live-latest.md"
"reports/status/cwusdc-external-trackers-live-latest.json"
"reports/status/cwusdc-institutional-readiness-review-20260511.md"
"reports/status/cwusdc-institutional-hardening-completion-20260511.md"
)
OPTIONAL_FILES=(
"reports/status/cwusdc-provider-monitoring-snapshot-latest.md"
"reports/status/cwusdc-provider-monitoring-snapshot-latest.json"
"reports/status/screenshots/cwusdc-etherscan-token-page.png"
"reports/status/screenshots/cwusdc-dbis-token-directory.png"
"reports/status/screenshots/cwusdc-logo-url.png"
"reports/status/screenshots/cwusdc-geckoterminal-univ3-pool.png"
)
missing=()
present=()
for file in "${FILES[@]}"; do
if [[ -f "$file" ]]; then
present+=("$file")
else
missing+=("$file")
fi
done
for file in "${OPTIONAL_FILES[@]}"; do
if [[ -f "$file" ]]; then
present+=("$file")
fi
done
if ((${#missing[@]} > 0)); then
printf 'Missing required evidence files:\n' >&2
printf ' - %s\n' "${missing[@]}" >&2
exit 1
fi
mkdir -p "$OUT_DIR"
tar -czf "$BUNDLE" "${present[@]}"
sha256sum "$BUNDLE" > "$CHECKSUM"
printf 'Wrote %s\n' "$BUNDLE"
printf 'Wrote %s\n' "$CHECKSUM"
cat "$CHECKSUM"

View File

@@ -0,0 +1,266 @@
#!/usr/bin/env python3
"""Build a concise cWUSDC provider handoff report from latest probe JSON."""
from __future__ import annotations
import argparse
import json
from datetime import datetime, timezone
from pathlib import Path
from typing import Any
ROOT = Path(__file__).resolve().parents[2]
DEFAULT_PREREQ_JSON = ROOT / "reports/status/cwusdc-etherscan-prereq-urls-latest.json"
DEFAULT_TRACKERS_JSON = ROOT / "reports/status/cwusdc-external-trackers-live-latest.json"
DEFAULT_LIQUIDITY_JSON = ROOT / "reports/status/token-aggregation-liquidity-gap-funding-plan-latest.json"
DEFAULT_CMC_SANITY_JSON = ROOT / "reports/status/cmc-provider-report-sanity-latest.json"
DEFAULT_MD = ROOT / "reports/status/cwusdc-provider-handoff-latest.md"
DEFAULT_JSON = ROOT / "reports/status/cwusdc-provider-handoff-latest.json"
def read_json(path: Path) -> Any | None:
if not path.exists():
return None
return json.loads(path.read_text())
def rel(path: Path) -> str:
try:
return str(path.relative_to(ROOT))
except ValueError:
return str(path)
def first(obj: dict[str, Any] | None, path: list[str], default: Any = None) -> Any:
cur: Any = obj
for part in path:
if not isinstance(cur, dict):
return default
cur = cur.get(part)
return cur if cur is not None else default
def table(headers: list[str], rows: list[list[Any]]) -> str:
def cell(value: Any) -> str:
if isinstance(value, (dict, list)):
value = json.dumps(value, sort_keys=True)
text = str(value)
return text.replace("|", "\\|").replace("\n", "<br>")
return "\n".join([
f"| {' | '.join(cell(header) for header in headers)} |",
f"| {' | '.join('---' for _ in headers)} |",
*[f"| {' | '.join(cell(value) for value in row)} |" for row in rows],
])
def build_payload(
prereq: Any,
trackers: Any,
liquidity: Any,
cmc_sanity: Any,
prereq_path: Path,
trackers_path: Path,
liquidity_path: Path,
cmc_sanity_path: Path,
) -> dict[str, Any]:
tracker_summary = first(trackers, ["summary"], {})
failed_required = tracker_summary.get("failedRequiredIds") if isinstance(tracker_summary, dict) else []
liquidity_summary = first(liquidity, ["summary"], {})
blockers = []
if prereq is None:
blockers.append({
"id": "missing_prereq_url_evidence",
"type": "repo_controlled",
"status": "blocked",
"nextAction": "Run check-cwusdc-etherscan-prereq-urls.sh with JSON output.",
})
if trackers is None:
blockers.append({
"id": "missing_external_tracker_evidence",
"type": "repo_controlled",
"status": "blocked",
"nextAction": "Run check-cwusdc-external-trackers-live.sh with JSON output.",
})
if liquidity is None:
blockers.append({
"id": "missing_liquidity_planner_evidence",
"type": "repo_controlled",
"status": "blocked",
"nextAction": "Run plan-token-aggregation-liquidity-gap-funding.mjs.",
})
if cmc_sanity is None:
blockers.append({
"id": "missing_cmc_report_sanity_evidence",
"type": "repo_controlled",
"status": "blocked",
"nextAction": "Run check-cmc-provider-report-sanity.py.",
})
if prereq is not None and not first(prereq, ["summary", "allPassed"], False):
blockers.append({
"id": "repo_public_urls",
"type": "repo_controlled",
"status": "blocked",
"nextAction": "Fix failing d-bis.org prerequisite URLs before external profile submission.",
})
for failed in failed_required or []:
blockers.append({
"id": failed,
"type": "external_provider",
"status": "blocked",
"nextAction": "Submit/update provider packet or wait for provider indexing, then rerun tracker probe.",
})
if first(liquidity, ["summary", "nonEvmFundingRequirementRows"], 0):
blockers.append({
"id": "non_evm_funding_requirements",
"type": "operator_bound",
"status": "open",
"nextAction": "Bind non-EVM wallets, asset IDs, and minimum funding targets before making non-EVM liquidity claims.",
})
if first(cmc_sanity, ["summary", "warningCount"], 0):
blockers.append({
"id": "cmc_report_sanity_warnings",
"type": "repo_advisory",
"status": "open",
"nextAction": "Review CMC-shaped report warnings before using CMC fields as listing-quality liquidity or quote-asset evidence.",
})
repo_ready = bool(first(prereq, ["summary", "allPassed"], False))
ready_for_etherscan_value = bool(first(trackers, ["summary", "readyForEtherscanUsdValue"], False))
return {
"schema": "cwusdc-provider-handoff/v1",
"generatedAt": datetime.now(timezone.utc).isoformat(),
"inputs": {
"prereq": rel(prereq_path),
"trackers": rel(trackers_path),
"liquidity": rel(liquidity_path),
"cmcSanity": rel(cmc_sanity_path),
},
"summary": {
"repoControlledPrereqsPassed": repo_ready,
"externalTrackersAllLive": bool(first(trackers, ["summary", "allTrackersLive"], False)),
"readyForEtherscanUsdValue": ready_for_etherscan_value,
"externalRequiredPassed": first(trackers, ["summary", "requiredPassedCount"], None),
"externalRequiredCount": first(trackers, ["summary", "requiredCount"], None),
"liquidityRows": first(liquidity, ["summary", "rows"], None),
"nonEvmFundingRequirementRows": first(liquidity, ["summary", "nonEvmFundingRequirementRows"], None),
"cmcSanityWarningCount": first(cmc_sanity, ["summary", "warningCount"], None),
"cmcPromotedTokenCount": first(cmc_sanity, ["summary", "promotedTokenCount"], None),
"blockerCount": len(blockers),
},
"blockers": blockers,
}
def write_markdown(payload: dict[str, Any], prereq: Any, trackers: Any, liquidity: Any, cmc_sanity: Any, path: Path) -> None:
prereq_checks = first(prereq, ["checks"], []) or []
tracker_checks = first(trackers, ["checks"], []) or []
liquidity_summary = first(liquidity, ["summary"], {}) or {}
cmc_summary = first(cmc_sanity, ["summary"], {}) or {}
cmc_warnings = first(cmc_sanity, ["warnings"], []) or []
lines = [
"# cWUSDC Provider Handoff Report",
"",
f"- Generated: `{payload['generatedAt']}`",
f"- Repo-controlled prerequisites passed: `{payload['summary']['repoControlledPrereqsPassed']}`",
f"- External trackers all live: `{payload['summary']['externalTrackersAllLive']}`",
f"- Ready for Etherscan USD Value path: `{payload['summary']['readyForEtherscanUsdValue']}`",
"",
"## Inputs",
"",
table(["Input", "Path"], [[key, value] for key, value in payload["inputs"].items()]),
"",
"## Repo-Controlled URL Prerequisites",
"",
table(
["URL", "Passed", "HTTP", "Attempts", "curl status"],
[[
c.get("url"),
f"`{c.get('passed')}`",
f"`{c.get('status')}`",
f"`{c.get('attempts', '-')}`",
f"`{c.get('curlStatus', '-')}`",
] for c in prereq_checks],
) if prereq_checks else "No prerequisite URL JSON found.",
"",
"## External Tracker State",
"",
table(
["Surface", "Passed", "HTTP", "Details"],
[[
c.get("id"),
f"`{c.get('passed')}`",
f"`{c.get('status')}`",
"; ".join(c.get("details") or []) or c.get("error") or "-",
] for c in tracker_checks],
) if tracker_checks else "No external tracker JSON found.",
"",
"## Liquidity Planner Summary",
"",
table(["Metric", "Value"], [[key, value] for key, value in liquidity_summary.items()]),
"",
"## CMC Report Sanity",
"",
table(["Metric", "Value"], [[key, value] for key, value in cmc_summary.items()]) if cmc_summary else "No CMC sanity JSON found.",
"",
table(
["ID", "Symbol", "Severity", "Message"],
[[w.get("id"), w.get("symbol", "-"), w.get("severity"), w.get("message")] for w in cmc_warnings],
) if cmc_warnings else "No CMC sanity warnings.",
"",
"## Blockers",
"",
table(
["ID", "Type", "Status", "Next action"],
[[b["id"], b["type"], b["status"], b["nextAction"]] for b in payload["blockers"]],
) if payload["blockers"] else "No current blockers detected.",
"",
"## Submission Boundary",
"",
"This report is generated from public/read-only repo checks. It does not submit forms, approve tokens, add liquidity, swap, bridge, or broadcast transactions.",
]
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text("\n".join(lines) + "\n")
def main() -> int:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--prereq-json", type=Path, default=DEFAULT_PREREQ_JSON)
parser.add_argument("--trackers-json", type=Path, default=DEFAULT_TRACKERS_JSON)
parser.add_argument("--liquidity-json", type=Path, default=DEFAULT_LIQUIDITY_JSON)
parser.add_argument("--cmc-sanity-json", type=Path, default=DEFAULT_CMC_SANITY_JSON)
parser.add_argument("--json-out", type=Path, default=DEFAULT_JSON)
parser.add_argument("--md-out", type=Path, default=DEFAULT_MD)
args = parser.parse_args()
prereq = read_json(args.prereq_json)
trackers = read_json(args.trackers_json)
liquidity = read_json(args.liquidity_json)
cmc_sanity = read_json(args.cmc_sanity_json)
payload = build_payload(
prereq,
trackers,
liquidity,
cmc_sanity,
args.prereq_json,
args.trackers_json,
args.liquidity_json,
args.cmc_sanity_json,
)
args.json_out.parent.mkdir(parents=True, exist_ok=True)
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
write_markdown(payload, prereq, trackers, liquidity, cmc_sanity, args.md_out)
print(f"Wrote {rel(args.json_out)}")
print(f"Wrote {rel(args.md_out)}")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,207 @@
#!/usr/bin/env python3
"""Generate prefilled provider submission packets and screenshot checklist for cWUSDC."""
from __future__ import annotations
import datetime as dt
import json
from pathlib import Path
from typing import Any
ROOT = Path(__file__).resolve().parents[2]
REPORT_JSON = ROOT / "reports" / "status" / "cwusdc-provider-submission-prefill-latest.json"
REPORT_MD = ROOT / "reports" / "status" / "cwusdc-provider-submission-prefill-latest.md"
ASSET = {
"network": "Ethereum Mainnet",
"chainId": 1,
"caip19": "eip155:1/erc20:0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a",
"contract": "0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a",
"name": "Wrapped cUSDC",
"symbol": "cWUSDC",
"decimals": 6,
"website": "https://d-bis.org/",
"tokenDirectory": "https://d-bis.org/gru/tokens",
"logo": "https://d-bis.org/tokens/cwusdc.svg",
"contactEmail": "submissions@d-bis.org",
"supportEmail": "support@d-bis.org",
"securityUrl": "https://d-bis.org/security",
"nonAffiliation": "cWUSDC is not Circle-issued USDC and should not be represented as an official Circle asset.",
}
DESCRIPTION = (
"cWUSDC is the Ethereum Mainnet compliant wrapped transport representation of Chain 138 cUSDC "
"in the DBIS GRU asset family. It is used for public-network mirrored settlement, proof, and "
"interoperability workflows. cWUSDC is a DBIS/GRU transport asset and is not Circle-issued USDC."
)
PROVIDERS = {
"etherscan": {
"objective": "Token profile/logo/value evidence submission",
"fields": {
"contract": ASSET["contract"],
"website": ASSET["website"],
"email": ASSET["contactEmail"],
"logo": ASSET["logo"],
"description": DESCRIPTION,
},
"attachment": "docs/04-configuration/etherscan/CWUSDC_MAINNET_ETHERSCAN_PROFILE_PACKET.md",
},
"coingecko": {
"objective": "Token listing/update with supply and liquidity caveats",
"fields": {
"chain": ASSET["network"],
"contract": ASSET["contract"],
"symbol": ASSET["symbol"],
"website": ASSET["website"],
"logo": ASSET["logo"],
"description": DESCRIPTION,
},
"attachment": "docs/04-configuration/coingecko/CWUSDC_MAINNET_TRACKER_SUBMISSION_PACKET.md",
},
"coinmarketcap": {
"objective": "Token listing/update with DEX discoverability and supply proof",
"fields": {
"chain": ASSET["network"],
"contract": ASSET["contract"],
"symbol": ASSET["symbol"],
"website": ASSET["website"],
"logo": ASSET["logo"],
"description": DESCRIPTION,
},
"attachment": "docs/04-configuration/coingecko/CWUSDC_MAINNET_TRACKER_SUBMISSION_PACKET.md",
},
"dexscreener": {
"objective": "Token profile/indexing support packet",
"fields": {
"chain": "ethereum",
"tokenAddress": ASSET["contract"],
"website": ASSET["website"],
"logo": ASSET["logo"],
"description": DESCRIPTION,
},
"attachment": "docs/04-configuration/dexscreener/CWUSDC_DEXSCREENER_INDEXING_AND_PROFILE_PACKET_20260509.md",
},
"metamask": {
"objective": "Wallet metadata/price-provider support evidence",
"fields": {
"assetId": ASSET["caip19"],
"chainId": ASSET["chainId"],
"address": ASSET["contract"],
"symbol": ASSET["symbol"],
"logoURI": ASSET["logo"],
"description": DESCRIPTION,
},
"attachment": "docs/04-configuration/metamask/METAMASK_ASSET_PRICE_PROVIDER_SUBMISSION_MATRIX.md",
},
}
SCREENSHOTS = [
{
"id": "etherscan-token-page",
"url": f"https://etherscan.io/token/{ASSET['contract']}",
"target": "reports/status/screenshots/cwusdc-etherscan-token-page.png",
"reason": "Shows verified Mainnet token page and current value/market-cap state.",
},
{
"id": "dbis-token-directory",
"url": ASSET["tokenDirectory"],
"target": "reports/status/screenshots/cwusdc-dbis-token-directory.png",
"reason": "Shows official website token context.",
},
{
"id": "dbis-logo-url",
"url": ASSET["logo"],
"target": "reports/status/screenshots/cwusdc-logo-url.png",
"reason": "Shows hosted token logo asset.",
},
{
"id": "geckoterminal-univ3-pool",
"url": "https://www.geckoterminal.com/eth/pools/0x1cf2e685682c7f7bef508f0af15dfb5cdda01ee3",
"target": "reports/status/screenshots/cwusdc-geckoterminal-univ3-pool.png",
"reason": "Shows indexed public cWUSDC/USDC pool evidence.",
},
]
def load_optional(path: str) -> Any:
full = ROOT / path
if not full.exists():
return None
if full.suffix == ".json":
return json.loads(full.read_text())
return full.read_text()
def build() -> dict[str, Any]:
screenshots = []
for item in SCREENSHOTS:
entry = dict(item)
target = ROOT / entry["target"]
entry["captured"] = target.exists()
entry["sizeBytes"] = target.stat().st_size if target.exists() else 0
screenshots.append(entry)
return {
"schema": "cwusdc-provider-submission-prefill/v1",
"generatedAt": dt.datetime.now(dt.UTC).isoformat().replace("+00:00", "Z"),
"asset": ASSET,
"description": DESCRIPTION,
"providers": PROVIDERS,
"screenshotChecklist": screenshots,
"currentReadiness": {
"dossier": load_optional("reports/status/cwusdc-etherscan-value-dossier-latest.json"),
"providerCi": load_optional("reports/status/cwusdc-provider-readiness-ci-latest.json"),
"supply": load_optional("reports/status/cwusdc-supply-circulating-attestation-latest.json"),
},
"submissionBoundary": [
"This packet is prefilled evidence only; it does not prove provider acceptance.",
"Screenshots should be captured immediately before submission and after any provider response.",
"Do not remove the non-Circle disclosure from provider forms.",
],
}
def write_md(payload: dict[str, Any], path: Path) -> None:
lines = [
"# cWUSDC Provider Submission Prefill",
"",
f"- Generated: `{payload['generatedAt']}`",
f"- Contract: `{ASSET['contract']}`",
f"- CAIP-19: `{ASSET['caip19']}`",
"",
"## Description",
"",
DESCRIPTION,
"",
"## Provider Prefill",
"",
]
for provider, data in payload["providers"].items():
lines.extend([f"### {provider}", "", f"- Objective: {data['objective']}", f"- Attachment: `{data['attachment']}`", "", "| Field | Value |", "|---|---|"])
for key, value in data["fields"].items():
lines.append(f"| `{key}` | `{value}` |")
lines.append("")
lines.extend(["## Screenshot Checklist", "", "| ID | URL | Target | Captured | Reason |", "|---|---|---|---:|---|"])
for item in payload["screenshotChecklist"]:
lines.append(f"| `{item['id']}` | {item['url']} | `{item['target']}` | `{item['captured']}` | {item['reason']} |")
lines.extend(["", "## Screenshot Capture Commands", "", "```bash"])
for item in payload["screenshotChecklist"]:
lines.append(f"pnpm exec playwright screenshot --timeout=60000 {item['url']} {item['target']}")
lines.extend(["```", "", "## Boundaries", ""])
lines.extend(f"- {item}" for item in payload["submissionBoundary"])
path.write_text("\n".join(lines) + "\n")
def main() -> int:
payload = build()
REPORT_JSON.parent.mkdir(parents=True, exist_ok=True)
REPORT_JSON.write_text(json.dumps(payload, indent=2) + "\n")
write_md(payload, REPORT_MD)
print(f"Wrote {REPORT_JSON.relative_to(ROOT)}")
print(f"Wrote {REPORT_MD.relative_to(ROOT)}")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,160 @@
#!/usr/bin/env python3
"""Build a formal role-event/deployment-record appendix for Mainnet cWUSDC."""
from __future__ import annotations
import datetime as dt
import json
import re
from pathlib import Path
from typing import Any
ROOT = Path(__file__).resolve().parents[2]
ROLE_AUDIT = ROOT / "reports" / "status" / "cwusdc-mainnet-role-audit-latest.json"
REPORT_JSON = ROOT / "reports" / "status" / "cwusdc-role-deployment-appendix-latest.json"
REPORT_MD = ROOT / "reports" / "status" / "cwusdc-role-deployment-appendix-latest.md"
SEARCH_ROOTS = [
ROOT / "docs",
ROOT / "reports" / "status",
ROOT / "config",
ROOT / "scripts",
]
def load_role_audit() -> dict[str, Any]:
if not ROLE_AUDIT.exists():
raise SystemExit(f"Missing role audit: {ROLE_AUDIT.relative_to(ROOT)}")
return json.loads(ROLE_AUDIT.read_text())
def read_text(path: Path) -> str:
try:
return path.read_text(errors="ignore")
except Exception:
return ""
def candidate_files() -> list[Path]:
files: list[Path] = []
for root in SEARCH_ROOTS:
if not root.exists():
continue
for path in root.rglob("*"):
if path.is_file() and path.suffix.lower() in {".md", ".json", ".jsonl", ".sh", ".py", ".env", ".txt"}:
files.append(path)
return files
def find_mentions(needles: list[str]) -> list[dict[str, Any]]:
lowered = [(needle, needle.lower()) for needle in needles if needle]
findings: list[dict[str, Any]] = []
for path in candidate_files():
rel = str(path.relative_to(ROOT))
if rel.endswith("cwusdc-role-deployment-appendix-latest.json"):
continue
text = read_text(path)
if not text:
continue
text_lower = text.lower()
matches = [needle for needle, low in lowered if low in text_lower]
if not matches:
continue
lines = []
for index, line in enumerate(text.splitlines(), start=1):
low_line = line.lower()
if any(low in low_line for _, low in lowered):
lines.append({"line": index, "text": line[:240]})
if len(lines) >= 8:
break
findings.append({"path": rel, "matches": sorted(set(matches)), "sampleLines": lines})
return sorted(findings, key=lambda item: item["path"])
def tx_url(tx_hash: str | None) -> str:
return f"https://etherscan.io/tx/{tx_hash}" if tx_hash else ""
def build() -> dict[str, Any]:
role_audit = load_role_audit()
events = role_audit.get("eventLogReview", {}).get("events", [])
tx_hashes = sorted({event.get("transactionHash") for event in events if event.get("transactionHash")})
addresses = sorted(
{
role_audit.get("token", {}).get("address", ""),
*[candidate.get("address", "") for candidate in role_audit.get("candidateChecks", {}).values()],
*[event.get("account", "") for event in events],
*[event.get("sender", "") for event in events],
}
)
addresses = [address for address in addresses if re.fullmatch(r"0x[a-fA-F0-9]{40}", address or "")]
needles = [role_audit.get("token", {}).get("address", ""), *tx_hashes, *addresses]
mentions = find_mentions(needles)
deployment_record_candidates = [
item
for item in mentions
if any(token in item["path"].lower() for token in ["deploy", "tracker", "technical", "completion", "readiness", "runbook", "bridge"])
]
return {
"schema": "cwusdc-role-deployment-appendix/v1",
"generatedAt": dt.datetime.now(dt.UTC).isoformat().replace("+00:00", "Z"),
"roleAudit": str(ROLE_AUDIT.relative_to(ROOT)),
"token": role_audit.get("token", {}),
"eventCount": len(events),
"transactionHashes": tx_hashes,
"effectiveMembersFromEvents": role_audit.get("eventLogReview", {}).get("effectiveMembersFromEvents", {}),
"privilegedCandidates": role_audit.get("privilegedCandidates", []),
"deploymentRecordCandidates": deployment_record_candidates,
"allMentions": mentions,
"limitations": [
"This appendix reconciles on-chain role events with repository records discoverable by local text search.",
"It is not a substitute for a signed third-party audit or a provider-side ownership verification flow.",
"Operator notebooks, private emails, and provider form submissions are outside this local repository scan unless committed as evidence reports.",
],
}
def write_md(payload: dict[str, Any], path: Path) -> None:
lines = [
"# cWUSDC Role Deployment Appendix",
"",
f"- Generated: `{payload['generatedAt']}`",
f"- Token: `{payload['token'].get('address')}`",
f"- Source role audit: `{payload['roleAudit']}`",
f"- On-chain role event count: `{payload['eventCount']}`",
"",
"## Effective Members From Events",
"",
"| Role | Members |",
"|---|---|",
]
for role, members in payload["effectiveMembersFromEvents"].items():
lines.append(f"| `{role}` | `{', '.join(members) if members else 'none observed'}` |")
lines.extend(["", "## Privileged Candidates", "", "| Label | Address | Roles |", "|---|---|---|"])
for item in payload["privilegedCandidates"]:
lines.append(f"| `{item['label']}` | `{item['address']}` | `{', '.join(item['roles'])}` |")
lines.extend(["", "## Role Event Transactions", "", "| Transaction | Etherscan |", "|---|---|"])
for tx_hash in payload["transactionHashes"]:
lines.append(f"| `{tx_hash}` | {tx_url(tx_hash)} |")
lines.extend(["", "## Deployment Record Candidates", "", "| Path | Matches | Sample |", "|---|---|---|"])
for item in payload["deploymentRecordCandidates"]:
sample = "; ".join(f"L{line['line']}: {line['text']}" for line in item["sampleLines"][:3])
lines.append(f"| `{item['path']}` | `{', '.join(item['matches'][:4])}` | {sample} |")
lines.extend(["", "## Limitations", ""])
lines.extend(f"- {item}" for item in payload["limitations"])
path.write_text("\n".join(lines) + "\n")
def main() -> int:
payload = build()
REPORT_JSON.parent.mkdir(parents=True, exist_ok=True)
REPORT_JSON.write_text(json.dumps(payload, indent=2) + "\n")
write_md(payload, REPORT_MD)
print(f"Wrote {REPORT_JSON.relative_to(ROOT)}")
print(f"Wrote {REPORT_MD.relative_to(ROOT)}")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,37 @@
#!/usr/bin/env bash
# Build ei-matrix-cwusdc-topup-indices.txt + ei-matrix-cwusdc-topup-amounts.tsv from
# reports/status/ei-matrix-readiness-audit-latest.json (rows with mainnetCwusdcRaw < TARGET).
#
# Usage (repo root):
# EI_MATRIX_TOPUP_TARGET_RAW=12000000 ./scripts/verify/build-ei-matrix-cwusdc-topup-tsv-from-audit-json.sh
# ./scripts/verify/build-ei-matrix-cwusdc-topup-tsv-from-audit-json.sh /path/to/audit.json
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
JSON="${1:-$PROJECT_ROOT/reports/status/ei-matrix-readiness-audit-latest.json}"
TARGET="${EI_MATRIX_TOPUP_TARGET_RAW:-12000000}"
OUT_IDX="$PROJECT_ROOT/reports/status/ei-matrix-cwusdc-topup-indices.txt"
OUT_TSV="$PROJECT_ROOT/reports/status/ei-matrix-cwusdc-topup-amounts.tsv"
[[ -f "$JSON" ]] || { echo "Missing $JSON" >&2; exit 1; }
python3 - "$JSON" "$TARGET" "$OUT_IDX" "$OUT_TSV" <<'PY'
import json
import sys
from pathlib import Path
p, target, out_idx, out_tsv = Path(sys.argv[1]), int(sys.argv[2]), Path(sys.argv[3]), Path(sys.argv[4])
data = json.loads(p.read_text(encoding="utf-8"))
rows = data["rows"]
gaps = []
total = 0
for r in rows:
cur = int(r.get("mainnetCwusdcRaw") or 0)
if cur < target:
need = target - cur
idx = int(r["linearIndex"])
gaps.append((idx, need))
total += need
out_idx.parent.mkdir(parents=True, exist_ok=True)
out_idx.write_text("\n".join(str(i) for i, _ in gaps) + "\n", encoding="utf-8")
out_tsv.write_text("\n".join(f"{i}\t{n}" for i, n in gaps) + "\n", encoding="utf-8")
print(f"wrote {out_idx} + {out_tsv} gap_wallets={len(gaps)} total_topup_raw={total}")
PY

View File

@@ -0,0 +1,157 @@
#!/usr/bin/env python3
"""Build an index of external submission packets and current probe artifacts."""
from __future__ import annotations
import json
from datetime import datetime, timezone
from pathlib import Path
from typing import Any
ROOT = Path(__file__).resolve().parents[2]
OUT_JSON = ROOT / "reports/status/external-submission-packet-index-latest.json"
OUT_MD = ROOT / "docs/04-configuration/EXTERNAL_SUBMISSION_PACKET_INDEX.md"
PACKETS: list[dict[str, Any]] = [
{
"provider": "Etherscan",
"status": "repo_ready_external_acceptance_pending",
"primaryPacket": "docs/04-configuration/etherscan/CWUSDC_MAINNET_ETHERSCAN_PROFILE_PACKET.md",
"supporting": [
"docs/04-configuration/etherscan/CWUSDC_ETHERSCAN_E2E_RECOMMENDATIONS.md",
"docs/04-configuration/etherscan/CWUSDC_ETHERSCAN_VALUE_EXECUTION_PLAN.md",
"reports/status/cwusdc-etherscan-value-dossier-latest.json",
],
"nextRepoAction": "Refresh dossier and capture post-submit response evidence.",
},
{
"provider": "CoinGecko",
"status": "repo_ready_external_price_entry_missing",
"primaryPacket": "docs/04-configuration/coingecko/CWUSDC_MAINNET_TRACKER_SUBMISSION_PACKET.md",
"supporting": [
"docs/04-configuration/coingecko/CWUSDC_MAINNET_EXTERNAL_SUBMISSION_CHECKLIST.md",
"docs/04-configuration/coingecko/submissions/cwusdc-coingecko-listing-request-20260509.json",
"reports/status/cwusdc-external-trackers-live-latest.json",
],
"nextRepoAction": "Keep token-price API blocker visible and attach current supply/liquidity caveats.",
},
{
"provider": "CoinMarketCap",
"status": "dex_page_visible_full_value_acceptance_pending",
"primaryPacket": "docs/04-configuration/coingecko/CWUSDC_MAINNET_TRACKER_SUBMISSION_PACKET.md",
"supporting": [
"reports/status/token-aggregation-cmc-report-chain1-latest.json",
"reports/status/cmc-provider-report-sanity-latest.json",
"reports/status/cmc-top10-ecosystem-coverage-latest.json",
],
"nextRepoAction": "Use CMC sanity report to avoid overclaiming liquidity or quote-asset identity.",
},
{
"provider": "DexScreener",
"status": "api_not_indexing_pairs",
"primaryPacket": "docs/04-configuration/dexscreener/CWUSDC_DEXSCREENER_INDEXING_AND_PROFILE_PACKET_20260509.md",
"supporting": [
"reports/status/cwusdc-external-trackers-live-latest.json",
"reports/status/cwusdc-provider-handoff-latest.md",
],
"nextRepoAction": "Keep pair/profile request evidence updated after fresh public swap/liquidity events.",
},
{
"provider": "GeckoTerminal",
"status": "pool_api_visible_low_reserve",
"primaryPacket": "docs/04-configuration/coingecko/CWUSDC_MAINNET_TRACKER_SUBMISSION_PACKET.md",
"supporting": [
"reports/status/cwusdc-external-trackers-live-latest.json",
"reports/status/cmc-provider-report-sanity-latest.json",
],
"nextRepoAction": "Track reserve USD and 24h volume separately from listing acceptance.",
},
{
"provider": "MetaMask",
"status": "metadata_path_ready_price_provider_external",
"primaryPacket": "docs/04-configuration/metamask/METAMASK_ASSET_PRICE_PROVIDER_SUBMISSION_MATRIX.md",
"supporting": [
"docs/04-configuration/metamask/METAMASK_EIP747_CONTRACT_METADATA_REFERENCE_PACKET.md",
"docs/04-configuration/metamask/METAMASK_CWUSDC_API_FEED_SPIDER_WEB_RESEARCH.md",
"reports/status/cwusdc-provider-readiness-ci-latest.json",
],
"nextRepoAction": "Keep CAIP-19, EIP-747, logo URL, and external price-provider blockers aligned.",
},
]
def exists(path: str) -> bool:
return (ROOT / path).exists()
def table(headers: list[str], rows: list[list[Any]]) -> str:
def cell(value: Any) -> str:
if isinstance(value, list):
value = "<br>".join(str(item) for item in value)
return str(value).replace("|", "\\|").replace("\n", "<br>")
return "\n".join(
[
f"| {' | '.join(cell(header) for header in headers)} |",
f"| {' | '.join('---' for _ in headers)} |",
*[f"| {' | '.join(cell(value) for value in row)} |" for row in rows],
]
)
def main() -> int:
generated_at = datetime.now(timezone.utc).isoformat()
packets = []
for packet in PACKETS:
row = dict(packet)
row["primaryExists"] = exists(row["primaryPacket"])
row["supportingExists"] = [{"path": path, "exists": exists(path)} for path in row["supporting"]]
row["allArtifactsPresent"] = row["primaryExists"] and all(item["exists"] for item in row["supportingExists"])
packets.append(row)
payload = {
"schema": "external-submission-packet-index/v1",
"generatedAt": generated_at,
"summary": {
"providerCount": len(packets),
"allArtifactsPresent": all(row["allArtifactsPresent"] for row in packets),
"missingArtifactCount": sum(1 for row in packets if not row["allArtifactsPresent"]),
},
"packets": packets,
}
OUT_JSON.parent.mkdir(parents=True, exist_ok=True)
OUT_JSON.write_text(json.dumps(payload, indent=2) + "\n")
lines = [
"# External Submission Packet Index",
"",
f"- Generated: `{generated_at}`",
f"- All artifacts present: `{payload['summary']['allArtifactsPresent']}`",
"",
table(
["Provider", "Status", "Primary packet", "Supporting artifacts", "Next repo action"],
[
[
row["provider"],
row["status"],
f"`{row['primaryPacket']}` ({row['primaryExists']})",
[f"`{item['path']}` ({item['exists']})" for item in row["supportingExists"]],
row["nextRepoAction"],
]
for row in packets
],
),
"",
"## Boundary",
"",
"This index tracks repo-side evidence availability only. Provider submission, review, acceptance, and price propagation remain external states.",
]
OUT_MD.parent.mkdir(parents=True, exist_ok=True)
OUT_MD.write_text("\n".join(lines) + "\n")
print(f"Wrote {OUT_JSON.relative_to(ROOT)}")
print(f"Wrote {OUT_MD.relative_to(ROOT)}")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,136 @@
#!/usr/bin/env python3
"""Write repo-side non-EVM funding and identity requirement stubs."""
from __future__ import annotations
import json
from datetime import datetime, timezone
from pathlib import Path
from typing import Any
ROOT = Path(__file__).resolve().parents[2]
CONFIG_OUT = ROOT / "config/non-evm-lane-requirements.json"
REPORT_JSON = ROOT / "reports/status/non-evm-lane-requirements-latest.json"
REPORT_MD = ROOT / "reports/status/non-evm-lane-requirements-latest.md"
LANES: list[dict[str, Any]] = [
{
"network": "solana",
"nativeAsset": "SOL",
"walletStatus": "bound_from_SOLANA_KEYPAIR_PATH_public_key",
"canonicalWallet": "9b4ebHVimuhMqbiCh6tUMMY2S48VyEHpqg5nxMMFe5Pf",
"requiredBindings": ["splMintAddresses", "rentReserveTarget", "venueMinimumLiquidity"],
"minimumFundingTarget": "TBD",
"claimBoundary": "Do not claim native Solana liquidity until SPL mints, rent/gas, and venue inventory are bound.",
},
{
"network": "tron",
"nativeAsset": "TRX",
"walletStatus": "derived_wallet_needs_canonical_confirmation",
"canonicalWallet": "TGkbidE5LfVJZ3QGj6DaPqzCTcTe9tJDxm",
"requiredBindings": ["canonicalWalletApproval", "energyBandwidthTarget", "trc20Inventory"],
"minimumFundingTarget": "TBD",
"claimBoundary": "Do not claim native Tron liquidity until the canonical wallet and TRC-20 inventory are confirmed.",
},
{
"network": "xrpl",
"nativeAsset": "XRP",
"walletStatus": "missing",
"canonicalWallet": None,
"requiredBindings": ["xrplAccount", "destinationTagPolicy", "trustlineIssuerPolicy", "xrpReserveTarget"],
"minimumFundingTarget": "TBD",
"claimBoundary": "Do not claim XRPL corridor readiness until account reserve, tags, trustlines, and wXRP controller evidence are closed.",
},
{
"network": "bitcoin",
"nativeAsset": "BTC",
"walletStatus": "missing",
"canonicalWallet": None,
"requiredBindings": ["btcCustodyAddress", "proofOfReservesPolicy", "wrappedAssetMapping", "venueTarget"],
"minimumFundingTarget": "TBD",
"claimBoundary": "Use BTC as a planning lane only until custody/reserve evidence and wrapping policy are bound.",
},
{
"network": "dogecoin",
"nativeAsset": "DOGE",
"walletStatus": "missing",
"canonicalWallet": None,
"requiredBindings": ["dogeCustodyAddress", "bridgeOrCustodyModel", "venueTarget"],
"minimumFundingTarget": "TBD",
"claimBoundary": "Use DOGE as a planning lane only until native custody and bridge model are bound.",
},
{
"network": "hyperliquid",
"nativeAsset": "HYPE",
"walletStatus": "research_required",
"canonicalWallet": None,
"requiredBindings": ["chainIdentifier", "assetIdentifier", "custodyPath", "venueOrApiEvidence"],
"minimumFundingTarget": "TBD",
"claimBoundary": "Use HYPE only as a market-cap watch item until identifiers and custody path are verified.",
},
]
def table(headers: list[str], rows: list[list[Any]]) -> str:
def cell(value: Any) -> str:
if isinstance(value, list):
value = "<br>".join(str(item) for item in value)
if value is None:
value = "TBD"
return str(value).replace("|", "\\|").replace("\n", "<br>")
return "\n".join(
[
f"| {' | '.join(cell(header) for header in headers)} |",
f"| {' | '.join('---' for _ in headers)} |",
*[f"| {' | '.join(cell(value) for value in row)} |" for row in rows],
]
)
def main() -> int:
generated_at = datetime.now(timezone.utc).isoformat()
payload = {
"schema": "non-evm-lane-requirements/v1",
"generatedAt": generated_at,
"status": "stubs_bound_repo_side",
"lanes": LANES,
"validationRule": "A lane is claimable only after canonicalWallet, asset IDs, native gas/reserve target, venue target, and evidence source are non-TBD.",
}
for path in (CONFIG_OUT, REPORT_JSON):
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
lines = [
"# Non-EVM Lane Requirement Stubs",
"",
f"- Generated: `{generated_at}`",
f"- Config source: `{CONFIG_OUT.relative_to(ROOT)}`",
"",
table(
["Network", "Native", "Wallet status", "Canonical wallet", "Required bindings", "Minimum target", "Claim boundary"],
[
[
lane["network"],
lane["nativeAsset"],
lane["walletStatus"],
lane["canonicalWallet"],
lane["requiredBindings"],
lane["minimumFundingTarget"],
lane["claimBoundary"],
]
for lane in LANES
],
),
]
REPORT_MD.parent.mkdir(parents=True, exist_ok=True)
REPORT_MD.write_text("\n".join(lines) + "\n")
print(f"Wrote {CONFIG_OUT.relative_to(ROOT)}")
print(f"Wrote {REPORT_JSON.relative_to(ROOT)}")
print(f"Wrote {REPORT_MD.relative_to(ROOT)}")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,165 @@
#!/usr/bin/env python3
"""Advisory sanity checks for the repo CMC-shaped Mainnet report."""
from __future__ import annotations
import json
from datetime import datetime, timezone
from decimal import Decimal, InvalidOperation
from pathlib import Path
from typing import Any
ROOT = Path(__file__).resolve().parents[2]
CMC_REPORT = ROOT / "reports/status/token-aggregation-cmc-report-chain1-latest.json"
TRACKERS = ROOT / "reports/status/cwusdc-external-trackers-live-latest.json"
OUT_JSON = ROOT / "reports/status/cmc-provider-report-sanity-latest.json"
OUT_MD = ROOT / "reports/status/cmc-provider-report-sanity-latest.md"
OFFICIAL_QUOTES = {
"0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48": "USDC",
"0xdac17f958d2ee523a2206206994597c13d831ec7": "USDT",
}
PROMOTED = {"cWUSDC", "cWUSDT", "cWXAUC", "cWXAUT", "cWBTC", "cWETH"}
def load(path: Path) -> dict[str, Any]:
return json.loads(path.read_text()) if path.exists() else {}
def dec(value: Any) -> Decimal:
try:
return Decimal(str(value or "0"))
except (InvalidOperation, ValueError):
return Decimal(0)
def table(headers: list[str], rows: list[list[Any]]) -> str:
def cell(value: Any) -> str:
if isinstance(value, list):
value = "<br>".join(str(item) for item in value)
return str(value).replace("|", "\\|").replace("\n", "<br>")
return "\n".join(
[
f"| {' | '.join(cell(header) for header in headers)} |",
f"| {' | '.join('---' for _ in headers)} |",
*[f"| {' | '.join(cell(value) for value in row)} |" for row in rows],
]
)
def main() -> int:
cmc = load(CMC_REPORT)
trackers = load(TRACKERS)
tokens = cmc.get("tokens", [])
warnings: list[dict[str, Any]] = []
promoted_rows = []
for token in tokens:
symbol = token.get("symbol")
address = str(token.get("contract_address", "")).lower()
liquidity = dec(token.get("liquidity_usd"))
volume = dec(token.get("volume_24h"))
pairs = token.get("pairs", [])
if address in OFFICIAL_QUOTES and symbol != OFFICIAL_QUOTES[address]:
warnings.append(
{
"id": "official_quote_symbol_alias",
"symbol": symbol,
"address": address,
"severity": "warning",
"message": f"Official {OFFICIAL_QUOTES[address]} address is presented with symbol {symbol}; keep provider packets explicit about official quote vs DBIS wrapped/compliant symbols.",
}
)
if symbol in PROMOTED:
promoted_rows.append(
{
"symbol": symbol,
"address": address,
"liquidityUsd": str(liquidity),
"volume24hUsd": str(volume),
"pairCount": len(pairs),
}
)
if liquidity <= 0:
warnings.append(
{
"id": "zero_reported_liquidity",
"symbol": symbol,
"address": address,
"severity": "warning",
"message": "CMC-shaped report shows zero liquidity_usd; do not use it as listing-quality liquidity evidence.",
}
)
gecko_reserves = []
for check in trackers.get("checks", []):
if not str(check.get("id", "")).startswith("geckoterminal"):
continue
attrs = (((check.get("jsonPreview") or {}).get("data") or {}).get("attributes") or {})
gecko_reserves.append(
{
"id": check.get("id"),
"pool": attrs.get("address"),
"name": attrs.get("name"),
"reserveUsd": attrs.get("reserve_in_usd"),
"volume24hUsd": (attrs.get("volume_usd") or {}).get("h24"),
}
)
payload = {
"schema": "cmc-provider-report-sanity/v1",
"generatedAt": datetime.now(timezone.utc).isoformat(),
"inputs": {
"cmcReport": str(CMC_REPORT.relative_to(ROOT)),
"trackerReport": str(TRACKERS.relative_to(ROOT)) if TRACKERS.exists() else None,
},
"summary": {
"tokenCount": len(tokens),
"promotedTokenCount": len(promoted_rows),
"warningCount": len(warnings),
"geckoReserveEvidenceCount": len(gecko_reserves),
},
"promotedTokens": promoted_rows,
"geckoReserveEvidence": gecko_reserves,
"warnings": warnings,
}
OUT_JSON.parent.mkdir(parents=True, exist_ok=True)
OUT_JSON.write_text(json.dumps(payload, indent=2) + "\n")
lines = [
"# CMC Provider Report Sanity",
"",
f"- Generated: `{payload['generatedAt']}`",
f"- Warnings: `{len(warnings)}`",
"",
"## Promoted Mainnet Rows",
"",
table(
["Symbol", "Address", "Liquidity USD", "24h volume USD", "Pairs"],
[[row["symbol"], row["address"], row["liquidityUsd"], row["volume24hUsd"], row["pairCount"]] for row in promoted_rows],
),
"",
"## GeckoTerminal Reserve Cross-Check",
"",
table(
["Check", "Pool", "Name", "Reserve USD", "24h volume USD"],
[[row["id"], row["pool"], row["name"], row["reserveUsd"], row["volume24hUsd"]] for row in gecko_reserves],
) if gecko_reserves else "No GeckoTerminal tracker reserve evidence found.",
"",
"## Advisory Warnings",
"",
table(
["ID", "Symbol", "Address", "Severity", "Message"],
[[w["id"], w.get("symbol", "-"), w.get("address", "-"), w["severity"], w["message"]] for w in warnings],
) if warnings else "No warnings.",
]
OUT_MD.write_text("\n".join(lines) + "\n")
print(f"Wrote {OUT_JSON.relative_to(ROOT)}")
print(f"Wrote {OUT_MD.relative_to(ROOT)}")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -157,7 +157,7 @@ for addr in "${ADDRESSES[@]}"; do
done
echo ""
echo "Total: $OK present, $MISS missing/empty (${#ADDRESSES[@]} addresses). Explorer: https://explorer.d-bis.org/address/<ADDR>"
echo "Total: $OK present, $MISS missing/empty (${#ADDRESSES[@]} addresses). Explorer: https://explorer.d-bis.org/addresses/<ADDR>"
if [[ $MISS -gt 0 && -z "$rpc_reachable" ]]; then
echo " → RPC was unreachable from this host; see WARN above. Run from LAN/VPN or pass a reachable RPC URL." >&2
fi

View File

@@ -0,0 +1,536 @@
#!/usr/bin/env python3
"""Build a read-only cW mesh operational readiness report.
This checker intentionally does not call RPC. It validates the source-of-truth
files that the deployment, bridge, liquidity, token aggregation, and tracker
runbooks consume, then records the remaining live/operator and external gates.
"""
from __future__ import annotations
import argparse
import json
import re
from datetime import datetime, timezone
from pathlib import Path
from typing import Any
ROOT = Path(__file__).resolve().parents[2]
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
MESH_MATRIX = ROOT / "reports" / "status" / "cw-mesh-deployment-matrix-latest.json"
TOKEN_MAPPING = ROOT / "config" / "token-mapping-multichain.json"
CANONICAL_TOKENS = ROOT / "smom-dbis-138" / "services" / "token-aggregation" / "src" / "config" / "canonical-tokens.ts"
ENGINE_X_READINESS = ROOT / "reports" / "status" / "engine-x-public-indexed-readiness-latest.json"
ETHERSCAN_PACKET = ROOT / "docs" / "04-configuration" / "etherscan" / "CWUSDC_MAINNET_ETHERSCAN_PROFILE_PACKET.md"
COINGECKO_CHECKLIST = ROOT / "docs" / "04-configuration" / "coingecko" / "CWUSDC_MAINNET_EXTERNAL_SUBMISSION_CHECKLIST.md"
CW_L1_CONTRACT = ROOT / "smom-dbis-138" / "contracts" / "bridge" / "CWMultiTokenBridgeL1.sol"
CW_L2_CONTRACT = ROOT / "smom-dbis-138" / "contracts" / "bridge" / "CWMultiTokenBridgeL2.sol"
CW_L1_DEPLOY = ROOT / "smom-dbis-138" / "script" / "DeployCWMultiTokenBridgeL1.s.sol"
CW_L2_DEPLOY = ROOT / "smom-dbis-138" / "script" / "DeployCWMultiTokenBridgeL2.s.sol"
CW_ROUTE_BOOTSTRAP = ROOT / "smom-dbis-138" / "scripts" / "deployment" / "cw-l1-bootstrap-gru-v2-ccip-routes.sh"
BRIDGE_EVIDENCE_CANDIDATES = [
ROOT / "reports" / "status" / "cw-multitoken-bridge-e2e-latest.json",
ROOT / "reports" / "status" / "cw-bridge-live-e2e-latest.json",
]
TRACKER_EVIDENCE_CANDIDATES = [
ROOT / "reports" / "status" / "cwusdc-tracker-profile-approval-latest.json",
ROOT / "reports" / "status" / "cwusdc-external-trackers-live-latest.json",
]
ACTIVE_CHAIN_IDS = [1, 10, 56, 100, 137, 8453, 42161, 42220, 43114]
PLANNED_OR_NON_CW_CHAIN_IDS = [1111, 651940]
REQUIRED_CW_SYMBOLS = [
"cWUSDT",
"cWUSDC",
"cWEURC",
"cWEURT",
"cWGBPC",
"cWGBPT",
"cWAUDC",
"cWJPYC",
"cWCHFC",
"cWCADC",
"cWXAUC",
"cWXAUT",
]
OPTIONAL_ADDON_SYMBOLS = ["cWBTC"]
TOKEN_MAPPING_KEYS = {
"cWUSDT": "Compliant_USDT_cW",
"cWUSDC": "Compliant_USDC_cW",
"cWEURC": "Compliant_EURC_cW",
"cWEURT": "Compliant_EURT_cW",
"cWGBPC": "Compliant_GBPC_cW",
"cWGBPT": "Compliant_GBPT_cW",
"cWAUDC": "Compliant_AUDC_cW",
"cWJPYC": "Compliant_JPYC_cW",
"cWCHFC": "Compliant_CHFC_cW",
"cWCADC": "Compliant_CADC_cW",
"cWXAUC": "Compliant_XAUC_cW",
"cWXAUT": "Compliant_XAUT_cW",
}
def load_json(path: Path, default: Any = None) -> Any:
if not path.exists():
return default
return json.loads(path.read_text())
def rel(path: Path) -> str:
try:
return str(path.relative_to(ROOT))
except ValueError:
return str(path)
def is_zero_address(value: str | None) -> bool:
if not value:
return True
return value.lower() == "0x0000000000000000000000000000000000000000"
def ok_gate(gate_id: str, title: str, details: list[str] | None = None, evidence: list[str] | None = None) -> dict[str, Any]:
return {
"id": gate_id,
"title": title,
"status": "pass",
"details": details or [],
"evidence": evidence or [],
}
def warn_gate(gate_id: str, title: str, details: list[str], evidence: list[str] | None = None) -> dict[str, Any]:
return {
"id": gate_id,
"title": title,
"status": "warn",
"details": details,
"evidence": evidence or [],
}
def fail_gate(gate_id: str, title: str, details: list[str], evidence: list[str] | None = None) -> dict[str, Any]:
return {
"id": gate_id,
"title": title,
"status": "blocked",
"details": details,
"evidence": evidence or [],
}
def check_source_of_truth(dep: dict[str, Any]) -> dict[str, Any]:
issues: list[str] = []
if dep.get("homeChainId") != 138:
issues.append(f"deployment-status homeChainId is `{dep.get('homeChainId')}`, expected `138`.")
if "138" not in (dep.get("chains") or {}):
issues.append("deployment-status is missing Chain 138.")
for chain_id in ACTIVE_CHAIN_IDS:
if str(chain_id) not in (dep.get("chains") or {}):
issues.append(f"deployment-status is missing active chain `{chain_id}`.")
evidence = [rel(DEPLOYMENT_STATUS)]
if issues:
return fail_gate("source_of_truth_chain_138", "Chain 138 source of truth", issues, evidence)
return ok_gate(
"source_of_truth_chain_138",
"Chain 138 source of truth",
["Chain 138 is the home chain and all active public mesh chains are represented."],
evidence,
)
def check_deployment_coverage(dep: dict[str, Any]) -> dict[str, Any]:
chains = dep.get("chains") or {}
issues: list[str] = []
optional_missing: list[str] = []
for chain_id in ACTIVE_CHAIN_IDS:
info = chains.get(str(chain_id)) or {}
cw_tokens = info.get("cwTokens") or {}
for symbol in REQUIRED_CW_SYMBOLS:
if is_zero_address(cw_tokens.get(symbol)):
issues.append(f"chain `{chain_id}` missing `{symbol}` in deployment-status cwTokens.")
for symbol in OPTIONAL_ADDON_SYMBOLS:
if is_zero_address(cw_tokens.get(symbol)):
optional_missing.append(f"chain `{chain_id}` missing optional add-on `{symbol}`.")
evidence = [rel(DEPLOYMENT_STATUS)]
if issues:
return fail_gate("active_cw_token_coverage", "Active cW token coverage", issues, evidence)
if optional_missing:
return warn_gate(
"active_cw_token_coverage",
"Active cW token coverage",
["All required cW fiat/commodity symbols are present.", *optional_missing],
evidence,
)
return ok_gate(
"active_cw_token_coverage",
"Active cW token coverage",
["All required cW fiat/commodity symbols and optional cWBTC add-on are present on active mesh chains."],
evidence,
)
def check_token_mapping(dep: dict[str, Any], mapping: dict[str, Any]) -> dict[str, Any]:
chains = dep.get("chains") or {}
pairs = mapping.get("pairs") or []
pair_by_to = {int(p.get("toChainId")): p for p in pairs if p.get("fromChainId") == 138 and p.get("toChainId") is not None}
issues: list[str] = []
for chain_id in ACTIVE_CHAIN_IDS:
pair = pair_by_to.get(chain_id)
if not pair:
issues.append(f"token-mapping missing 138 -> `{chain_id}` pair.")
continue
tokens = {t.get("key"): t.get("addressTo") for t in pair.get("tokens") or []}
cw_tokens = (chains.get(str(chain_id)) or {}).get("cwTokens") or {}
for symbol, key in TOKEN_MAPPING_KEYS.items():
mapped = tokens.get(key)
expected = cw_tokens.get(symbol)
if is_zero_address(mapped):
issues.append(f"138 -> `{chain_id}` token-mapping has empty `{key}`.")
elif expected and mapped.lower() != expected.lower():
issues.append(f"138 -> `{chain_id}` `{key}` is `{mapped}`, deployment-status has `{expected}`.")
evidence = [rel(TOKEN_MAPPING), rel(DEPLOYMENT_STATUS)]
if issues:
return fail_gate("token_mapping_mesh", "138 to public-chain token mapping", issues, evidence)
return ok_gate(
"token_mapping_mesh",
"138 to public-chain token mapping",
["All required cW mapping entries are non-zero and match deployment-status for active mesh chains."],
evidence,
)
def parse_gru_chain_ids(text: str) -> list[int]:
match = re.search(r"const\s+GRU_CW_CHAIN_IDS\s*=\s*\[([^\]]+)\]", text)
if not match:
return []
return [int(x) for x in re.findall(r"\d+", match.group(1))]
def fallback_symbol_has_chain(text: str, symbol: str, chain_id: int) -> bool:
match = re.search(rf"\n\s+{re.escape(symbol)}:\s*\{{(?P<body>.*?)\n\s+\}},", text, flags=re.S)
if not match:
return False
body = match.group("body")
return re.search(rf"\[\s*{chain_id}\s*\]\s*:\s*['\"]0x[a-fA-F0-9]{{40}}['\"]", body) is not None
def check_token_aggregation() -> dict[str, Any]:
if not CANONICAL_TOKENS.exists():
return fail_gate("token_aggregation_registry", "Token aggregation canonical registry", ["canonical-tokens.ts is missing."], [rel(CANONICAL_TOKENS)])
text = CANONICAL_TOKENS.read_text()
issues: list[str] = []
chain_ids = parse_gru_chain_ids(text)
for chain_id in ACTIVE_CHAIN_IDS:
if chain_id not in chain_ids:
issues.append(f"GRU_CW_CHAIN_IDS missing active chain `{chain_id}`.")
for symbol in REQUIRED_CW_SYMBOLS:
if f"symbol: '{symbol}'" not in text and f'symbol: "{symbol}"' not in text:
issues.append(f"CANONICAL_TOKENS missing first-class `{symbol}` entry.")
for chain_id in ACTIVE_CHAIN_IDS:
if not fallback_symbol_has_chain(text, symbol, chain_id):
issues.append(f"FALLBACK_ADDRESSES `{symbol}` missing chain `{chain_id}`.")
evidence = [rel(CANONICAL_TOKENS), "smom-dbis-138/services/token-aggregation/src/config/canonical-tokens.test.ts"]
if issues:
return fail_gate("token_aggregation_registry", "Token aggregation canonical registry", issues, evidence)
return ok_gate(
"token_aggregation_registry",
"Token aggregation canonical registry",
["Canonical token aggregation includes the active nine-chain promoted cW mesh and the full required wrapped family."],
evidence,
)
def check_liquidity_and_indexing(matrix: dict[str, Any], dep: dict[str, Any]) -> dict[str, Any]:
rows = {int(r.get("chainId")): r for r in matrix.get("rows") or []}
chains = dep.get("chains") or {}
issues: list[str] = []
missing_rails: list[str] = []
unseeded_by_chain: dict[int, int] = {}
unseeded_examples: dict[int, list[str]] = {}
for chain_id in ACTIVE_CHAIN_IDS:
row = rows.get(chain_id)
if not row:
issues.append(f"mesh matrix missing chain `{chain_id}`.")
continue
if row.get("uniswapV2CWUSDTvsCWUSDCLive") is not True:
issues.append(f"chain `{chain_id}` cWUSDT/cWUSDC UniV2 pair is not live in latest matrix.")
if row.get("uniswapV2CWUSDTvsCWUSDCHealthy") is not True:
issues.append(f"chain `{chain_id}` cWUSDT/cWUSDC UniV2 pair is not healthy in latest matrix.")
rails = set(row.get("pmmSettlementRails") or [])
if not any(x in rails for x in ("cWUSDC/USDC", "cWUSDC/USDT")):
missing_rails.append(f"chain `{chain_id}` lacks cWUSDC stable settlement PMM rail")
if not any(x in rails for x in ("cWUSDT/USDT", "cWUSDT/USDC")):
missing_rails.append(f"chain `{chain_id}` lacks cWUSDT stable settlement PMM rail")
for pool in (chains.get(str(chain_id)) or {}).get("pmmPools") or []:
notes = pool.get("notes") or []
if any("unseeded_pending" in str(note) for note in notes):
unseeded_by_chain[chain_id] = unseeded_by_chain.get(chain_id, 0) + 1
unseeded_examples.setdefault(chain_id, [])
if len(unseeded_examples[chain_id]) < 3:
unseeded_examples[chain_id].append(f"{pool.get('base')}/{pool.get('quote')} {pool.get('poolAddress')}")
evidence = [rel(MESH_MATRIX), rel(DEPLOYMENT_STATUS)]
warnings: list[str] = []
if missing_rails:
warnings.append(
"Stable settlement PMM rail gaps: "
+ "; ".join(missing_rails)
+ ". Core cWUSDT/cWUSDC pair indexing remains healthy."
)
if unseeded_by_chain:
summary = ", ".join(f"{chain_id}: {count}" for chain_id, count in sorted(unseeded_by_chain.items()))
warnings.append(f"Unseeded pending PMM pools by chain: {summary}.")
for chain_id in sorted(unseeded_examples):
warnings.append(f"chain `{chain_id}` examples: " + "; ".join(unseeded_examples[chain_id]))
if issues:
return fail_gate("liquidity_and_indexing", "LP indexing and settlement rails", issues + warnings, evidence)
if warnings:
return warn_gate(
"liquidity_and_indexing",
"LP indexing and settlement rails",
["Core cWUSDT/cWUSDC pair indexing is present and healthy.", *warnings],
evidence,
)
return ok_gate(
"liquidity_and_indexing",
"LP indexing and settlement rails",
["Core cWUSDT/cWUSDC pair indexing and stable settlement rails are present with no unseeded PMM warnings."],
evidence,
)
def check_bridge_implementation() -> dict[str, Any]:
required = [CW_L1_CONTRACT, CW_L2_CONTRACT, CW_L1_DEPLOY, CW_L2_DEPLOY, CW_ROUTE_BOOTSTRAP]
missing = [rel(path) for path in required if not path.exists()]
evidence = [rel(path) for path in required]
if missing:
return fail_gate("cw_mint_burn_bridge_implementation", "cW mint/burn bridge implementation", [f"missing `{path}`" for path in missing], evidence)
return ok_gate(
"cw_mint_burn_bridge_implementation",
"cW mint/burn bridge implementation",
["CWMultiTokenBridgeL1/L2 contracts and deployment/bootstrap scripts are present."],
evidence,
)
def evidence_file_status(paths: list[Path], success_keys: list[str]) -> tuple[bool, str | None, list[str]]:
for path in paths:
if not path.exists():
continue
data = load_json(path, {})
if not isinstance(data, dict):
continue
for key in success_keys:
value: Any = data
for part in key.split("."):
value = value.get(part) if isinstance(value, dict) else None
if value is True:
return True, rel(path), []
details = [f"`{rel(path)}` exists but does not expose a true success key: {', '.join(success_keys)}."]
summary = data.get("summary") if isinstance(data, dict) else None
if isinstance(summary, dict):
for key in ("failedChainIds", "failedRequiredIds", "blockedGateIds"):
values = summary.get(key)
if values:
details.append(f"{key}: {', '.join(str(x) for x in values)}.")
for key in ("passedChainCount", "activeChainCount", "requiredPassedCount", "requiredCount"):
if key in summary:
details.append(f"{key}: {summary[key]}.")
return False, rel(path), details
return False, None, [f"No evidence file found. Expected one of: {', '.join(rel(p) for p in paths)}."]
def check_bridge_live_e2e() -> dict[str, Any]:
ok, evidence, details = evidence_file_status(
BRIDGE_EVIDENCE_CANDIDATES,
["summary.allActiveChainsPassed", "summary.readyForProduction", "allActiveChainsPassed"],
)
ev = [evidence] if evidence else [rel(path) for path in BRIDGE_EVIDENCE_CANDIDATES]
if ok:
return ok_gate("cw_mint_burn_bridge_live_e2e", "cW mint/burn bridge live E2E", ["Live E2E evidence reports all active chains passed."], ev)
return fail_gate(
"cw_mint_burn_bridge_live_e2e",
"cW mint/burn bridge live E2E",
[
"Dedicated cW mint/burn bridge code is implemented, but live receiver deployment, role wiring, route bootstrap, and E2E bridge proof evidence are still required.",
*details,
],
ev,
)
def check_engine_x_readiness() -> dict[str, Any]:
data = load_json(ENGINE_X_READINESS, {})
ready = (((data or {}).get("summary") or {}).get("readyForPublicIndexedProof") is True)
blockers = (((data or {}).get("summary") or {}).get("blockers") or [])
if ready and not blockers:
return ok_gate("engine_x_public_indexed_readiness", "Engine X cWUSDC public indexed readiness", ["Engine X readiness report is green."], [rel(ENGINE_X_READINESS)])
return fail_gate(
"engine_x_public_indexed_readiness",
"Engine X cWUSDC public indexed readiness",
["Engine X readiness report is not green.", *[str(x) for x in blockers]],
[rel(ENGINE_X_READINESS)],
)
def check_external_tracker_packet() -> dict[str, Any]:
missing = [rel(path) for path in [ETHERSCAN_PACKET, COINGECKO_CHECKLIST] if not path.exists()]
if missing:
return fail_gate("external_tracker_submission_packet", "External tracker submission packet", [f"missing `{path}`" for path in missing], [rel(ETHERSCAN_PACKET), rel(COINGECKO_CHECKLIST)])
return ok_gate(
"external_tracker_submission_packet",
"External tracker submission packet",
["Etherscan profile packet and CoinGecko/external tracker checklist are present."],
[rel(ETHERSCAN_PACKET), rel(COINGECKO_CHECKLIST)],
)
def check_external_tracker_live() -> dict[str, Any]:
ok, evidence, details = evidence_file_status(
TRACKER_EVIDENCE_CANDIDATES,
["summary.allTrackersLive", "summary.readyForEtherscanUsdValue", "allTrackersLive"],
)
ev = [evidence] if evidence else [rel(path) for path in TRACKER_EVIDENCE_CANDIDATES]
if ok:
return ok_gate("external_trackers_live", "External trackers live", ["External tracker evidence reports live indexing/profile approval."], ev)
return fail_gate(
"external_trackers_live",
"External trackers live",
[
"Explorer/tracker packet is prepared, but Etherscan/CoinGecko/CMC/DexScreener approval or live tracker evidence is external and still pending.",
*details,
],
ev,
)
def check_planned_chains(dep: dict[str, Any]) -> dict[str, Any]:
chains = dep.get("chains") or {}
details: list[str] = []
for chain_id in PLANNED_OR_NON_CW_CHAIN_IDS:
info = chains.get(str(chain_id)) or {}
state = info.get("activationState") or "not_active_cw_mesh"
details.append(f"chain `{chain_id}` `{info.get('name', '')}` is `{state}` and is not counted as active cW mesh coverage.")
return warn_gate("planned_or_non_cw_chains", "Planned or non-cW chain scope", details, [rel(DEPLOYMENT_STATUS)])
def build_report() -> dict[str, Any]:
dep = load_json(DEPLOYMENT_STATUS, {})
matrix = load_json(MESH_MATRIX, {"rows": []})
mapping = load_json(TOKEN_MAPPING, {})
gates = [
check_source_of_truth(dep),
check_deployment_coverage(dep),
check_token_mapping(dep, mapping),
check_token_aggregation(),
check_liquidity_and_indexing(matrix, dep),
check_bridge_implementation(),
check_bridge_live_e2e(),
check_engine_x_readiness(),
check_external_tracker_packet(),
check_external_tracker_live(),
check_planned_chains(dep),
]
blocked = [g for g in gates if g["status"] == "blocked"]
warnings = [g for g in gates if g["status"] == "warn"]
in_repo_blockers = [
g for g in blocked
if g["id"] not in {"cw_mint_burn_bridge_live_e2e", "external_trackers_live"}
]
return {
"schema": "cw-full-operational-readiness/v1",
"generatedAt": datetime.now(timezone.utc).isoformat(),
"activeChainIds": ACTIVE_CHAIN_IDS,
"requiredCwSymbols": REQUIRED_CW_SYMBOLS,
"summary": {
"inRepoSourceOfTruthConfigured": len(in_repo_blockers) == 0,
"fullyOperationalAndLive": len(blocked) == 0,
"blockedGateCount": len(blocked),
"warningGateCount": len(warnings),
"blockedGateIds": [g["id"] for g in blocked],
"warningGateIds": [g["id"] for g in warnings],
},
"gates": gates,
"nextActions": [
"Deploy and configure CWMultiTokenBridgeL1 on Chain 138 and CWMultiTokenBridgeL2 on each active public mesh chain.",
"Grant MINTER_ROLE and BURNER_ROLE for each cW* token to the active CWMultiTokenBridgeL2 receiver on its chain.",
"Run cW route bootstrap/configuration and capture live E2E evidence to reports/status/cw-multitoken-bridge-e2e-latest.json.",
"Submit/confirm Etherscan, CoinGecko, CMC, DexScreener tracker approvals and capture live evidence.",
"Seed any PMM pools still marked unseeded_pending in deployment-status, prioritizing stable quote rails with tracker-visible volume.",
],
}
def write_markdown(report: dict[str, Any], path: Path) -> None:
lines: list[str] = []
summary = report["summary"]
lines.append("# cW Full Operational Readiness")
lines.append("")
lines.append(f"- Generated: `{report['generatedAt']}`")
lines.append(f"- Active chains: `{', '.join(str(x) for x in report['activeChainIds'])}`")
lines.append(f"- In-repo source of truth configured: `{summary['inRepoSourceOfTruthConfigured']}`")
lines.append(f"- Fully operational and live: `{summary['fullyOperationalAndLive']}`")
lines.append(f"- Blocked gates: `{summary['blockedGateCount']}`")
lines.append(f"- Warning gates: `{summary['warningGateCount']}`")
lines.append("")
lines.append("## Gates")
lines.append("")
lines.append("| Gate | Status | Details | Evidence |")
lines.append("|---|---:|---|---|")
for gate in report["gates"]:
details = "<br>".join(gate["details"]) if gate["details"] else "-"
evidence = "<br>".join(f"`{x}`" for x in gate["evidence"]) if gate["evidence"] else "-"
lines.append(f"| {gate['title']} | `{gate['status']}` | {details} | {evidence} |")
lines.append("")
lines.append("## Next Actions")
lines.append("")
for action in report["nextActions"]:
lines.append(f"- {action}")
lines.append("")
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text("\n".join(lines) + "\n")
def main() -> int:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--json-out", type=Path, default=ROOT / "reports" / "status" / "cw-full-operational-readiness-latest.json")
parser.add_argument("--md-out", type=Path, default=ROOT / "reports" / "status" / "cw-full-operational-readiness-latest.md")
parser.add_argument("--strict", action="store_true", help="Exit non-zero if any gate is blocked.")
args = parser.parse_args()
report = build_report()
args.json_out.parent.mkdir(parents=True, exist_ok=True)
args.json_out.write_text(json.dumps(report, indent=2) + "\n")
write_markdown(report, args.md_out)
print(f"Wrote {rel(args.json_out)}")
print(f"Wrote {rel(args.md_out)}")
print(f"In-repo configured: {report['summary']['inRepoSourceOfTruthConfigured']}")
print(f"Fully operational/live: {report['summary']['fullyOperationalAndLive']}")
if report["summary"]["blockedGateIds"]:
print("Blocked gates: " + ", ".join(report["summary"]["blockedGateIds"]))
if args.strict and report["summary"]["blockedGateCount"]:
return 1
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,8 @@
#!/usr/bin/env bash
# Read-only readiness gate for cW source-of-truth, liquidity/indexing, bridge, and tracker status.
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$ROOT"
exec python3 scripts/verify/check-cw-full-operational-readiness.py "$@"

View File

@@ -0,0 +1,364 @@
#!/usr/bin/env python3
"""Read-only CWMultiToken bridge readiness evidence.
The script checks deployed bridge contracts, configured routes, canonical-to-
mirrored token mappings, and cW token MINTER/BURNER roles. It does not send
transactions or trigger bridge transfers.
"""
from __future__ import annotations
import argparse
import json
import os
import subprocess
from datetime import datetime, timezone
from pathlib import Path
from typing import Any
ROOT = Path(__file__).resolve().parents[2]
TOKEN_MAPPING = ROOT / "config" / "token-mapping-multichain.json"
DEFAULT_JSON = ROOT / "reports" / "status" / "cw-multitoken-bridge-e2e-latest.json"
DEFAULT_MD = ROOT / "reports" / "status" / "cw-multitoken-bridge-e2e-latest.md"
ACTIVE_CHAINS = [
(1, "Ethereum Mainnet", "MAINNET", "5009297550715157269"),
(10, "Optimism", "OPTIMISM", "3734403246176062136"),
(25, "Cronos", "CRONOS", "1456215246176062136"),
(56, "BSC", "BSC", "11344663589394136015"),
(100, "Gnosis", "GNOSIS", "465200170687744372"),
(137, "Polygon", "POLYGON", "4051577828743386545"),
(8453, "Base", "BASE", "15971525489660198786"),
(42161, "Arbitrum", "ARBITRUM", "4949039107694359620"),
(42220, "Celo", "CELO", "1346049177634351622"),
(43114, "Avalanche", "AVALANCHE", "6433500567565415381"),
]
RPC_CANDIDATES = {
1: ["ETHEREUM_MAINNET_RPC", "ETH_MAINNET_RPC_URL", "MAINNET_RPC_URL", "RPC_URL_MAINNET"],
10: ["OPTIMISM_RPC_URL", "OPTIMISM_MAINNET_RPC"],
25: ["CRONOS_RPC_URL", "CRONOS_MAINNET_RPC", "CRONOS_RPC"],
56: ["BSC_RPC_URL", "BSC_MAINNET_RPC"],
100: ["GNOSIS_RPC_URL", "GNOSIS_MAINNET_RPC", "GNOSIS_RPC"],
137: ["POLYGON_RPC_URL", "POLYGON_MAINNET_RPC"],
8453: ["BASE_RPC_URL", "BASE_MAINNET_RPC"],
42161: ["ARBITRUM_RPC_URL", "ARBITRUM_MAINNET_RPC"],
42220: ["CELO_RPC_URL", "CELO_MAINNET_RPC", "CELO_RPC"],
43114: ["AVALANCHE_RPC_URL", "AVALANCHE_MAINNET_RPC", "AVALANCHE_RPC"],
}
CORE_KEYS = {"Compliant_USDT_cW", "Compliant_USDC_cW"}
def rel(path: Path) -> str:
try:
return str(path.relative_to(ROOT))
except ValueError:
return str(path)
def env_first(keys: list[str]) -> str:
for key in keys:
value = os.environ.get(key, "").strip()
if value:
return value
return ""
def is_address(value: str) -> bool:
return value.startswith("0x") and len(value) == 42
def run_cast(args: list[str], timeout: int = 18) -> tuple[bool, str]:
try:
proc = subprocess.run(["cast", *args], cwd=ROOT, text=True, capture_output=True, timeout=timeout, check=False)
except Exception as exc: # noqa: BLE001 - evidence should capture any runner failure
return False, str(exc)
out = (proc.stdout or proc.stderr or "").strip()
return proc.returncode == 0, out
def cast_call(address: str, signature: str, params: list[str], rpc: str) -> tuple[bool, str]:
return run_cast(["call", address, signature, *params, "--rpc-url", rpc])
def cast_code(address: str, rpc: str) -> tuple[bool, str]:
return run_cast(["code", address, "--rpc-url", rpc])
def cast_keccak(value: str) -> str:
ok, out = run_cast(["keccak", value], timeout=5)
if not ok:
raise RuntimeError(f"cast keccak failed for {value}: {out}")
return out.split()[0]
def bool_from_cast(value: str) -> bool:
return value.strip().lower().splitlines()[-1:] == ["true"]
def address_in_cast(value: str, expected: str) -> bool:
return expected.lower() in value.lower()
def load_token_rows(full_family: bool) -> dict[int, list[dict[str, str]]]:
data = json.loads(TOKEN_MAPPING.read_text())
out: dict[int, list[dict[str, str]]] = {}
for pair in data.get("pairs") or []:
if pair.get("fromChainId") != 138:
continue
chain_id = int(pair.get("toChainId"))
rows: list[dict[str, str]] = []
for token in pair.get("tokens") or []:
key = token.get("key", "")
if not key.endswith("_cW") and key not in CORE_KEYS:
continue
if not full_family and key not in CORE_KEYS:
continue
address_from = token.get("addressFrom", "")
address_to = token.get("addressTo", "")
if is_address(address_from) and is_address(address_to) and int(address_to, 16) != 0:
rows.append(
{
"key": key,
"name": token.get("name", key),
"canonical": address_from,
"mirrored": address_to,
}
)
if rows:
out[chain_id] = rows
return out
def check_l1(l1_bridge: str, rpc: str, token_rows: dict[int, list[dict[str, str]]]) -> dict[str, Any]:
result: dict[str, Any] = {
"address": l1_bridge,
"rpcConfigured": bool(rpc),
"hasCode": False,
"sendRouterReadable": False,
"receiveRouterReadable": False,
"destinationChecks": [],
"passed": False,
"errors": [],
}
if not is_address(l1_bridge):
result["errors"].append("CW_L1_BRIDGE_CHAIN138 is unset or invalid.")
return result
if not rpc:
result["errors"].append("RPC_URL_138/CHAIN138_RPC is unset.")
return result
ok, code = cast_code(l1_bridge, rpc)
result["hasCode"] = ok and code not in ("", "0x")
for field in ["sendRouter", "receiveRouter"]:
ok, out = cast_call(l1_bridge, f"{field}()(address)", [], rpc)
result[f"{field}Readable"] = ok and is_address(out.splitlines()[-1].strip())
result[f"{field}"] = out.splitlines()[-1].strip() if ok and out else ""
for chain_id, _, _, selector in ACTIVE_CHAINS:
rows = token_rows.get(chain_id) or []
for token in rows:
ok, out = cast_call(
l1_bridge,
"destinations(address,uint64)((address,bool))",
[token["canonical"], selector],
rpc,
)
result["destinationChecks"].append(
{
"chainId": chain_id,
"selector": selector,
"token": token["key"],
"canonical": token["canonical"],
"raw": out,
"configured": ok and "true" in out.lower(),
}
)
result["passed"] = (
result["hasCode"]
and result["sendRouterReadable"]
and result["receiveRouterReadable"]
and all(x["configured"] for x in result["destinationChecks"])
)
return result
def check_chain(
chain_id: int,
name: str,
suffix: str,
selector: str,
token_rows: list[dict[str, str]],
minter_role: str,
burner_role: str,
) -> dict[str, Any]:
rpc = env_first(RPC_CANDIDATES[chain_id])
bridge = os.environ.get(f"CW_BRIDGE_{suffix}", "").strip()
result: dict[str, Any] = {
"chainId": chain_id,
"network": name,
"selector": selector,
"bridge": bridge,
"rpcConfigured": bool(rpc),
"hasCode": False,
"sendRouterReadable": False,
"receiveRouterReadable": False,
"feeTokenReadable": False,
"tokenPairChecks": [],
"roleChecks": [],
"destination138": {},
"passed": False,
"errors": [],
}
if not rpc:
result["errors"].append("RPC unset.")
return result
if not is_address(bridge):
result["errors"].append(f"CW_BRIDGE_{suffix} is unset or invalid.")
return result
ok, code = cast_code(bridge, rpc)
result["hasCode"] = ok and code not in ("", "0x")
for field in ["sendRouter", "receiveRouter", "feeToken"]:
ok, out = cast_call(bridge, f"{field}()(address)", [], rpc)
result[f"{field}Readable"] = ok and is_address(out.splitlines()[-1].strip())
result[field] = out.splitlines()[-1].strip() if ok and out else ""
ok, out = cast_call(bridge, "destinations(uint64)((address,bool))", ["138"], rpc)
result["destination138"] = {"raw": out, "configured": ok and "true" in out.lower()}
for token in token_rows:
ok, out = cast_call(bridge, "canonicalToMirrored(address)(address)", [token["canonical"]], rpc)
mapped = ok and address_in_cast(out, token["mirrored"])
result["tokenPairChecks"].append(
{
"token": token["key"],
"canonical": token["canonical"],
"expectedMirrored": token["mirrored"],
"raw": out,
"configured": mapped,
}
)
for role_name, role in [("MINTER_ROLE", minter_role), ("BURNER_ROLE", burner_role)]:
ok_role, out_role = cast_call(
token["mirrored"],
"hasRole(bytes32,address)(bool)",
[role, bridge],
rpc,
)
result["roleChecks"].append(
{
"token": token["key"],
"mirrored": token["mirrored"],
"role": role_name,
"holder": bridge,
"granted": ok_role and bool_from_cast(out_role),
"raw": out_role,
}
)
result["passed"] = (
result["hasCode"]
and result["sendRouterReadable"]
and result["receiveRouterReadable"]
and result["destination138"]["configured"]
and all(x["configured"] for x in result["tokenPairChecks"])
and all(x["granted"] for x in result["roleChecks"])
)
return result
def write_markdown(payload: dict[str, Any], path: Path) -> None:
lines = [
"# cW MultiToken Bridge E2E Readiness",
"",
f"- Generated: `{payload['generatedAt']}`",
f"- Scope: `{payload['scope']}`",
f"- All active chains passed: `{payload['summary']['allActiveChainsPassed']}`",
f"- L1 passed: `{payload['summary']['l1Passed']}`",
f"- Chain pass count: `{payload['summary']['passedChainCount']} / {payload['summary']['activeChainCount']}`",
"",
"## Chain Status",
"",
"| Chain | Network | Passed | Bridge | Notes |",
"|---:|---|---:|---|---|",
]
for row in payload["chains"]:
notes = []
if row["errors"]:
notes.extend(row["errors"])
failed_pairs = [x["token"] for x in row["tokenPairChecks"] if not x["configured"]]
failed_roles = [f"{x['token']} {x['role']}" for x in row["roleChecks"] if not x["granted"]]
if failed_pairs:
notes.append("missing token pairs: " + ", ".join(failed_pairs[:6]))
if failed_roles:
notes.append("missing roles: " + ", ".join(failed_roles[:6]))
if not row["destination138"].get("configured"):
notes.append("destination 138 not configured")
lines.append(
f"| {row['chainId']} | {row['network']} | `{row['passed']}` | `{row['bridge'] or '<unset>'}` | {'; '.join(notes) or 'ok'} |"
)
lines.extend(["", "## L1", ""])
l1 = payload["l1"]
lines.append(f"- Bridge: `{l1['address']}`")
lines.append(f"- Passed: `{l1['passed']}`")
if l1["errors"]:
lines.append(f"- Errors: `{'; '.join(l1['errors'])}`")
missing = [f"{x['chainId']} {x['token']}" for x in l1["destinationChecks"] if not x["configured"]]
lines.append(f"- Missing destination checks: `{', '.join(missing[:30]) if missing else 'none'}`")
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text("\n".join(lines) + "\n")
def main() -> int:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--full-family", action="store_true", help="Check every cW mapping, not only cWUSDT/cWUSDC canary routes.")
parser.add_argument("--json-out", type=Path, default=DEFAULT_JSON)
parser.add_argument("--md-out", type=Path, default=DEFAULT_MD)
parser.add_argument("--strict", action="store_true")
args = parser.parse_args()
token_rows = load_token_rows(args.full_family)
minter_role = cast_keccak("MINTER_ROLE")
burner_role = cast_keccak("BURNER_ROLE")
l1_bridge = os.environ.get("CW_L1_BRIDGE_CHAIN138", "").strip()
rpc_138 = env_first(["RPC_URL_138", "CHAIN138_RPC", "CHAIN138_RPC_URL", "RPC_URL"])
l1 = check_l1(l1_bridge, rpc_138, token_rows)
chains = [
check_chain(chain_id, name, suffix, selector, token_rows.get(chain_id) or [], minter_role, burner_role)
for chain_id, name, suffix, selector in ACTIVE_CHAINS
]
passed = [row for row in chains if row["passed"]]
payload = {
"schema": "cw-multitoken-bridge-e2e-readiness/v1",
"generatedAt": datetime.now(timezone.utc).isoformat(),
"scope": "full-family" if args.full_family else "core-cwusdt-cwusdc",
"summary": {
"readyForProduction": l1["passed"] and len(passed) == len(chains),
"allActiveChainsPassed": l1["passed"] and len(passed) == len(chains),
"l1Passed": l1["passed"],
"activeChainCount": len(chains),
"passedChainCount": len(passed),
"failedChainIds": [row["chainId"] for row in chains if not row["passed"]],
},
"roles": {"MINTER_ROLE": minter_role, "BURNER_ROLE": burner_role},
"l1": l1,
"chains": chains,
}
args.json_out.parent.mkdir(parents=True, exist_ok=True)
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
write_markdown(payload, args.md_out)
print(f"Wrote {rel(args.json_out)}")
print(f"Wrote {rel(args.md_out)}")
print(f"All active chains passed: {payload['summary']['allActiveChainsPassed']}")
if payload["summary"]["failedChainIds"]:
print("Failed chains: " + ", ".join(str(x) for x in payload["summary"]["failedChainIds"]))
if args.strict and not payload["summary"]["allActiveChainsPassed"]:
return 1
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,11 @@
#!/usr/bin/env bash
# Read-only evidence producer for CWMultiTokenBridgeL1/L2 route, role, and config readiness.
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$ROOT"
# shellcheck disable=SC1091
source scripts/lib/load-project-env.sh >/dev/null 2>&1 || true
exec python3 scripts/verify/check-cw-multitoken-bridge-e2e-readiness.py "$@"

View File

@@ -0,0 +1,145 @@
#!/usr/bin/env bash
# Public URL prereq checks for cWUSDC Etherscan token profile (d-bis.org surfaces).
# See: docs/04-configuration/etherscan/CWUSDC_ETHERSCAN_E2E_RECOMMENDATIONS.md
# Usage: bash scripts/verify/check-cwusdc-etherscan-prereq-urls.sh [--json-out PATH] [--md-out PATH] [--timeout SEC] [--retries N]
# Exit: 0 if every URL returns HTTP 200; 1 otherwise.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
cd "$PROJECT_ROOT"
JSON_OUT=""
MD_OUT=""
TIMEOUT="${CWUSDC_PROVIDER_URL_TIMEOUT:-15}"
RETRIES="${CWUSDC_PROVIDER_URL_RETRIES:-1}"
while [[ $# -gt 0 ]]; do
case "$1" in
--json-out)
[[ $# -ge 2 ]] || { echo "Missing value for --json-out" >&2; exit 1; }
JSON_OUT="$2"
shift 2
;;
--md-out)
[[ $# -ge 2 ]] || { echo "Missing value for --md-out" >&2; exit 1; }
MD_OUT="$2"
shift 2
;;
--timeout)
[[ $# -ge 2 ]] || { echo "Missing value for --timeout" >&2; exit 1; }
TIMEOUT="$2"
shift 2
;;
--retries)
[[ $# -ge 2 ]] || { echo "Missing value for --retries" >&2; exit 1; }
RETRIES="$2"
shift 2
;;
-h|--help)
sed -n '1,5p' "$0"
exit 0
;;
*)
echo "Unknown argument: $1" >&2
exit 1
;;
esac
done
TMP_TSV="$(mktemp)"
trap 'rm -f "$TMP_TSV"' EXIT
FAIL=0
while IFS= read -r url; do
[[ -z "$url" || "$url" =~ ^# ]] && continue
code="000"
curl_status=0
attempts=0
max_attempts=$((RETRIES + 1))
while [[ "$attempts" -lt "$max_attempts" ]]; do
attempts=$((attempts + 1))
curl_status=0
code=$(curl -L --max-time "$TIMEOUT" -o /dev/null -s -w "%{http_code}" "$url") || curl_status=$?
[[ -n "$code" ]] || code="000"
[[ "$code" == "200" ]] && break
[[ "$attempts" -lt "$max_attempts" ]] && sleep 1
done
if [[ "$code" != "200" ]]; then
echo "FAIL $code $url (attempts=$attempts curl_status=$curl_status)" >&2
FAIL=1
printf '%s\t%s\t%s\t%s\t%s\n' "$url" "$code" "false" "$attempts" "$curl_status" >> "$TMP_TSV"
else
echo "OK 200 $url (attempts=$attempts)"
printf '%s\t%s\t%s\t%s\t%s\n' "$url" "$code" "true" "$attempts" "$curl_status" >> "$TMP_TSV"
fi
done <<'URLS'
https://d-bis.org/
https://d-bis.org/contact
https://d-bis.org/leadership
https://d-bis.org/gru/tokens
https://d-bis.org/security
https://d-bis.org/.well-known/trust.json
https://d-bis.org/brand-assets
https://d-bis.org/tokens/cwusdc.svg
URLS
if [[ -n "$JSON_OUT" || -n "$MD_OUT" ]]; then
python3 - "$TMP_TSV" "$JSON_OUT" "$MD_OUT" <<'PY'
import json
import sys
from datetime import datetime, timezone
from pathlib import Path
tsv = Path(sys.argv[1])
json_out = Path(sys.argv[2]) if sys.argv[2] else None
md_out = Path(sys.argv[3]) if sys.argv[3] else None
checks = []
for line in tsv.read_text().splitlines():
url, status, passed, attempts, curl_status = line.split("\t")
checks.append({
"url": url,
"status": int(status) if status.isdigit() else status,
"passed": passed == "true",
"attempts": int(attempts),
"curlStatus": int(curl_status),
})
payload = {
"schema": "cwusdc-etherscan-prereq-urls/v1",
"generatedAt": datetime.now(timezone.utc).isoformat(),
"summary": {
"allPassed": all(check["passed"] for check in checks),
"requiredCount": len(checks),
"passedCount": sum(1 for check in checks if check["passed"]),
"failedUrls": [check["url"] for check in checks if not check["passed"]],
},
"checks": checks,
}
if json_out:
json_out.parent.mkdir(parents=True, exist_ok=True)
json_out.write_text(json.dumps(payload, indent=2) + "\n")
print(f"Wrote {json_out}")
if md_out:
md_out.parent.mkdir(parents=True, exist_ok=True)
lines = [
"# cWUSDC Etherscan Prerequisite URL Evidence",
"",
f"- Generated: `{payload['generatedAt']}`",
f"- All passed: `{payload['summary']['allPassed']}`",
f"- Passed: `{payload['summary']['passedCount']} / {payload['summary']['requiredCount']}`",
"",
"| URL | Passed | HTTP | Attempts | curl status |",
"|---|---:|---:|---:|---:|",
]
for check in checks:
lines.append(f"| {check['url']} | `{check['passed']}` | `{check['status']}` | `{check['attempts']}` | `{check['curlStatus']}` |")
md_out.write_text("\n".join(lines) + "\n")
print(f"Wrote {md_out}")
PY
fi
exit "$FAIL"

View File

@@ -0,0 +1,265 @@
#!/usr/bin/env python3
"""Probe cWUSDC public tracker/indexing surfaces and write evidence JSON."""
from __future__ import annotations
import argparse
import json
import re
import time
import urllib.error
import urllib.request
from datetime import datetime, timezone
from pathlib import Path
from typing import Any
ROOT = Path(__file__).resolve().parents[2]
DEFAULT_JSON = ROOT / "reports" / "status" / "cwusdc-external-trackers-live-latest.json"
DEFAULT_MD = ROOT / "reports" / "status" / "cwusdc-external-trackers-live-latest.md"
CWUSDC = "0x2de5f116bfce3d0f922d9c8351e0c5fc24b9284a"
POOLS = [
"0x1cf2e685682c7f7bef508f0af15dfb5cdda01ee3",
"0xc28706f899266b36bc43cc072b3a921bdf2c48d9",
]
URLS = [
{
"id": "etherscan_token_page",
"kind": "explorer",
"url": f"https://etherscan.io/token/{CWUSDC}",
"required": True,
"mustContain": ["cWUSDC", "Contract"],
},
{
"id": "coingecko_token_price_api",
"kind": "listing_api",
"url": f"https://api.coingecko.com/api/v3/simple/token_price/ethereum?contract_addresses={CWUSDC}&vs_currencies=usd&include_market_cap=true&include_24hr_vol=true",
"required": True,
"jsonTokenKey": CWUSDC,
},
{
"id": "coinmarketcap_dex_token",
"kind": "dex_index",
"url": f"https://dex.coinmarketcap.com/token/ethereum/{CWUSDC}/",
"required": True,
"mustContain": ["cWUSDC", "ethereum"],
},
{
"id": "dexscreener_token_pairs_v1",
"kind": "dex_index",
"url": f"https://api.dexscreener.com/token-pairs/v1/ethereum/{CWUSDC}",
"required": True,
"jsonRootMinLength": 1,
},
{
"id": "dexscreener_tokens_v1",
"kind": "dex_index",
"url": f"https://api.dexscreener.com/tokens/v1/ethereum/{CWUSDC}",
"required": True,
"jsonRootMinLength": 1,
},
{
"id": "dexscreener_v3_pair_api_legacy",
"kind": "dex_index",
"url": f"https://api.dexscreener.com/latest/dex/pairs/ethereum/{POOLS[0]}",
"required": False,
"jsonPathPresent": ["pairs"],
},
{
"id": "dexscreener_v2_pair_api_legacy",
"kind": "dex_index",
"url": f"https://api.dexscreener.com/latest/dex/pairs/ethereum/{POOLS[1]}",
"required": False,
"jsonPathPresent": ["pairs"],
},
{
"id": "dexscreener_orders_profile",
"kind": "dex_profile",
"url": f"https://api.dexscreener.com/orders/v1/ethereum/{CWUSDC}",
"required": False,
"jsonPathPresent": ["orders"],
},
{
"id": "geckoterminal_v3_pool",
"kind": "dex_index",
"url": f"https://api.geckoterminal.com/api/v2/networks/eth/pools/{POOLS[0]}",
"required": True,
"jsonPathPresent": ["data"],
},
{
"id": "geckoterminal_v2_pool",
"kind": "dex_index",
"url": f"https://api.geckoterminal.com/api/v2/networks/eth/pools/{POOLS[1]}",
"required": True,
"jsonPathPresent": ["data"],
},
]
def rel(path: Path) -> str:
try:
return str(path.relative_to(ROOT))
except ValueError:
return str(path)
def fetch(url: str, timeout: int) -> dict[str, Any]:
req = urllib.request.Request(
url,
headers={
"User-Agent": "Mozilla/5.0 DBIS-readiness-check/1.0",
"Accept": "application/json,text/html;q=0.9,*/*;q=0.8",
},
)
started = time.time()
try:
with urllib.request.urlopen(req, timeout=timeout) as resp:
body = resp.read(512_000)
text = body.decode("utf-8", errors="replace")
return {
"ok": 200 <= resp.status < 300,
"status": resp.status,
"elapsedMs": int((time.time() - started) * 1000),
"contentType": resp.headers.get("content-type", ""),
"text": text,
"error": "",
}
except urllib.error.HTTPError as exc:
text = exc.read(64_000).decode("utf-8", errors="replace") if exc.fp else ""
return {
"ok": False,
"status": exc.code,
"elapsedMs": int((time.time() - started) * 1000),
"contentType": exc.headers.get("content-type", "") if exc.headers else "",
"text": text,
"error": str(exc),
}
except Exception as exc: # noqa: BLE001 - evidence should capture network errors
return {
"ok": False,
"status": None,
"elapsedMs": int((time.time() - started) * 1000),
"contentType": "",
"text": "",
"error": str(exc),
}
def json_path_present(data: Any, path: list[str]) -> bool:
cur = data
for part in path:
if isinstance(cur, dict):
cur = cur.get(part)
else:
return False
if isinstance(cur, list):
return len(cur) > 0
return cur is not None
def evaluate(spec: dict[str, Any], timeout: int) -> dict[str, Any]:
raw = fetch(spec["url"], timeout)
text = raw.pop("text")
evidence: dict[str, Any] = {
"id": spec["id"],
"kind": spec["kind"],
"url": spec["url"],
"required": spec["required"],
"httpOk": raw["ok"],
"status": raw["status"],
"elapsedMs": raw["elapsedMs"],
"contentType": raw["contentType"],
"passed": False,
"error": raw["error"],
"details": [],
}
data: Any = None
if "json" in raw["contentType"] or text.strip().startswith(("{", "[")):
try:
data = json.loads(text)
evidence["jsonPreview"] = data if len(text) < 5000 else "json-too-large"
except json.JSONDecodeError as exc:
evidence["details"].append(f"JSON parse failed: {exc}")
passed = raw["ok"]
for needle in spec.get("mustContain") or []:
found = re.search(re.escape(needle), text, flags=re.I) is not None
evidence["details"].append(f"contains `{needle}`: {found}")
passed = passed and found
if spec.get("jsonTokenKey"):
token_key = spec["jsonTokenKey"].lower()
found = isinstance(data, dict) and token_key in {str(k).lower(): v for k, v in data.items()}
evidence["details"].append(f"json token key `{token_key}` present: {found}")
passed = passed and found
for path in spec.get("jsonPathPresent") or []:
present = json_path_present(data, path if isinstance(path, list) else [path])
evidence["details"].append(f"json path `{'.'.join(path) if isinstance(path, list) else path}` present: {present}")
passed = passed and present
if spec.get("jsonRootMinLength") is not None:
min_len = int(spec["jsonRootMinLength"])
found = isinstance(data, list) and len(data) >= min_len
evidence["details"].append(f"json root array length >= {min_len}: {found}")
passed = passed and found
evidence["passed"] = bool(passed)
return evidence
def write_markdown(payload: dict[str, Any], path: Path) -> None:
lines = [
"# cWUSDC External Trackers Live Evidence",
"",
f"- Generated: `{payload['generatedAt']}`",
f"- All trackers live: `{payload['summary']['allTrackersLive']}`",
f"- Required passed: `{payload['summary']['requiredPassedCount']} / {payload['summary']['requiredCount']}`",
"",
"| Surface | Passed | HTTP | URL | Details |",
"|---|---:|---:|---|---|",
]
for check in payload["checks"]:
details = "; ".join(check["details"]) or check["error"] or "-"
lines.append(f"| {check['id']} | `{check['passed']}` | `{check['status']}` | {check['url']} | {details} |")
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text("\n".join(lines) + "\n")
def main() -> int:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--json-out", type=Path, default=DEFAULT_JSON)
parser.add_argument("--md-out", type=Path, default=DEFAULT_MD)
parser.add_argument("--timeout", type=int, default=20)
parser.add_argument("--strict", action="store_true")
args = parser.parse_args()
checks = [evaluate(spec, args.timeout) for spec in URLS]
required = [c for c in checks if c["required"]]
required_passed = [c for c in required if c["passed"]]
payload = {
"schema": "cwusdc-external-trackers-live/v1",
"generatedAt": datetime.now(timezone.utc).isoformat(),
"token": {"chainId": 1, "network": "ethereum", "address": CWUSDC, "symbol": "cWUSDC"},
"summary": {
"allTrackersLive": len(required_passed) == len(required),
"readyForEtherscanUsdValue": len(required_passed) == len(required),
"requiredCount": len(required),
"requiredPassedCount": len(required_passed),
"failedRequiredIds": [c["id"] for c in required if not c["passed"]],
},
"checks": checks,
}
args.json_out.parent.mkdir(parents=True, exist_ok=True)
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
write_markdown(payload, args.md_out)
print(f"Wrote {rel(args.json_out)}")
print(f"Wrote {rel(args.md_out)}")
print(f"All trackers live: {payload['summary']['allTrackersLive']}")
if payload["summary"]["failedRequiredIds"]:
print("Failed required trackers: " + ", ".join(payload["summary"]["failedRequiredIds"]))
if args.strict and not payload["summary"]["allTrackersLive"]:
return 1
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,8 @@
#!/usr/bin/env bash
# Probe public cWUSDC tracker/indexing surfaces and write readiness evidence.
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$ROOT"
exec python3 scripts/verify/check-cwusdc-external-trackers-live.py "$@"

View File

@@ -0,0 +1,162 @@
#!/usr/bin/env python3
"""Lightweight link check for the cWUSDC institutional evidence packet."""
from __future__ import annotations
import argparse
import datetime as dt
import json
import re
import urllib.request
from pathlib import Path
from typing import Any
ROOT = Path(__file__).resolve().parents[2]
REPORT_JSON = ROOT / "reports" / "status" / "cwusdc-institutional-doc-link-check-latest.json"
REPORT_MD = ROOT / "reports" / "status" / "cwusdc-institutional-doc-link-check-latest.md"
DOCS = [
"docs/04-configuration/CWUSDC_PROVIDER_SUBMISSION_PACKET.md",
"docs/04-configuration/etherscan/CWUSDC_EVIDENCE_BUNDLE_INDEX.md",
"docs/04-configuration/etherscan/CWUSDC_SUPPLY_AND_CIRCULATING_METHODOLOGY.md",
"docs/04-configuration/etherscan/CWUSDC_SECURITY_AND_AUDIT_DISCLOSURE.md",
"docs/04-configuration/etherscan/CWUSDC_PROVIDER_RESPONSE_TRACKER.md",
"docs/04-configuration/etherscan/CWUSDC_LIQUIDITY_READINESS_NO_BROADCAST_PLAN.md",
"docs/04-configuration/etherscan/CWUSDC_MAINNET_ETHERSCAN_PROFILE_PACKET.md",
"docs/04-configuration/etherscan/CWUSDC_ETHERSCAN_E2E_RECOMMENDATIONS.md",
"docs/04-configuration/etherscan/CWUSDC_ETHERSCAN_VALUE_EXECUTION_PLAN.md",
"docs/04-configuration/etherscan/CWUSDC_ETHERSCAN_BRIDGE_CROSSCHAIN_LAYER_MAP.md",
"docs/04-configuration/coingecko/CWUSDC_MAINNET_TRACKER_SUBMISSION_PACKET.md",
"docs/04-configuration/dexscreener/CWUSDC_DEXSCREENER_INDEXING_AND_PROFILE_PACKET_20260509.md",
"docs/04-configuration/metamask/METAMASK_ASSET_PRICE_PROVIDER_SUBMISSION_MATRIX.md",
]
LINK_RE = re.compile(r"(?<!!)\[[^\]]+\]\(([^)]+)\)")
BARE_URL_RE = re.compile(r"(?<![<`])https?://[^\s)`]+")
def strip_fragment(target: str) -> str:
return target.split("#", 1)[0]
def is_skipped(target: str) -> bool:
return (
not target
or target.startswith("#")
or target.startswith("mailto:")
or target.startswith("app://")
or target.startswith("plugin://")
)
def resolve_local(source: Path, target: str) -> Path:
target = urllib.request.url2pathname(strip_fragment(target))
if target.startswith("/"):
return ROOT / target.lstrip("/")
candidate = (source.parent / target).resolve()
try:
candidate.relative_to(ROOT)
except ValueError:
return ROOT / target
return candidate
def http_status(url: str, timeout: int = 15) -> dict[str, Any]:
req = urllib.request.Request(url, method="GET", headers={"User-Agent": "dbis-cwusdc-link-check/1.0"})
try:
with urllib.request.urlopen(req, timeout=timeout) as response:
return {"ok": 200 <= response.status < 400, "status": response.status}
except Exception as exc: # noqa: BLE001 - report exact probe failure.
return {"ok": False, "error": str(exc)}
def collect_links(path: Path) -> list[str]:
text = path.read_text()
links = [match.group(1).strip() for match in LINK_RE.finditer(text)]
links.extend(match.group(0).strip(".,") for match in BARE_URL_RE.finditer(text))
return sorted(set(links))
def build(args: argparse.Namespace) -> dict[str, Any]:
records: list[dict[str, Any]] = []
for doc in DOCS:
source = ROOT / doc
if not source.exists():
records.append({"source": doc, "target": doc, "type": "source", "ok": False, "error": "source missing"})
continue
for target in collect_links(source):
if is_skipped(target):
continue
if target.startswith("http://") or target.startswith("https://"):
result = {"source": doc, "target": target, "type": "http", "ok": True, "checked": False}
if args.check_http:
result.update(http_status(target, timeout=args.timeout))
result["checked"] = True
records.append(result)
else:
resolved = resolve_local(source, target)
records.append(
{
"source": doc,
"target": target,
"type": "local",
"resolved": str(resolved.relative_to(ROOT)) if resolved.exists() else str(resolved),
"ok": resolved.exists(),
}
)
failures = [record for record in records if not record.get("ok")]
return {
"schema": "cwusdc-institutional-doc-link-check/v1",
"generatedAt": dt.datetime.now(dt.UTC).isoformat().replace("+00:00", "Z"),
"checkedHttp": args.check_http,
"sourceCount": len(DOCS),
"linkCount": len(records),
"failureCount": len(failures),
"status": "pass" if not failures else "fail",
"failures": failures,
"records": records,
}
def write_md(payload: dict[str, Any], path: Path) -> None:
lines = [
"# cWUSDC Institutional Doc Link Check",
"",
f"- Generated: `{payload['generatedAt']}`",
f"- Status: `{payload['status']}`",
f"- Sources: `{payload['sourceCount']}`",
f"- Links checked: `{payload['linkCount']}`",
f"- HTTP checked: `{payload['checkedHttp']}`",
f"- Failures: `{payload['failureCount']}`",
"",
]
if payload["failures"]:
lines.extend(["## Failures", "", "| Source | Target | Error |", "|---|---|---|"])
for failure in payload["failures"]:
lines.append(
f"| `{failure.get('source')}` | `{failure.get('target')}` | `{failure.get('error', failure.get('resolved', 'missing'))}` |"
)
else:
lines.append("No broken institutional packet links were found.")
path.write_text("\n".join(lines) + "\n")
def main() -> int:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--check-http", action="store_true", help="Probe public HTTP(S) links too.")
parser.add_argument("--timeout", type=int, default=15)
parser.add_argument("--json-out", type=Path, default=REPORT_JSON)
parser.add_argument("--md-out", type=Path, default=REPORT_MD)
args = parser.parse_args()
payload = build(args)
args.json_out.parent.mkdir(parents=True, exist_ok=True)
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
write_md(payload, args.md_out)
print(f"Wrote {args.json_out.relative_to(ROOT)}")
print(f"Wrote {args.md_out.relative_to(ROOT)}")
return 0 if payload["status"] == "pass" else 1
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,71 @@
#!/usr/bin/env bash
# CI-safe cWUSDC provider readiness gate.
# Fails only on repo-controlled prerequisites. External provider blockers are reported, not gated.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
cd "$PROJECT_ROOT"
JSON_OUT="reports/status/cwusdc-provider-readiness-ci-latest.json"
MD_OUT="reports/status/cwusdc-provider-readiness-ci-latest.md"
HANDOFF_JSON="reports/status/cwusdc-provider-handoff-latest.json"
DOC_LINK_JSON="reports/status/cwusdc-institutional-doc-link-check-latest.json"
WRAPPER_STATUS=0
bash "$SCRIPT_DIR/run-cwusdc-provider-nonmanual-checks.sh" --strict-repo || WRAPPER_STATUS=$?
python3 "$SCRIPT_DIR/check-cwusdc-institutional-doc-links.py"
python3 - "$HANDOFF_JSON" "$DOC_LINK_JSON" "$JSON_OUT" "$MD_OUT" <<'PY'
import json
import sys
from datetime import datetime, timezone
from pathlib import Path
handoff_path = Path(sys.argv[1])
doc_link_path = Path(sys.argv[2])
json_out = Path(sys.argv[3])
md_out = Path(sys.argv[4])
handoff = json.loads(handoff_path.read_text())
doc_links = json.loads(doc_link_path.read_text())
repo_ok = bool(handoff["summary"]["repoControlledPrereqsPassed"])
doc_links_ok = doc_links.get("status") == "pass"
external_blockers = [b for b in handoff.get("blockers", []) if b.get("type") != "repo_controlled"]
payload = {
"schema": "cwusdc-provider-readiness-ci/v1",
"generatedAt": datetime.now(timezone.utc).isoformat(),
"status": "success" if repo_ok and doc_links_ok else "failed",
"repoControlledPrereqsPassed": repo_ok and doc_links_ok,
"baseRepoControlledPrereqsPassed": repo_ok,
"institutionalDocLinksPassed": doc_links_ok,
"institutionalDocLinksReport": str(doc_link_path),
"externalBlockersAdvisoryCount": len(external_blockers),
"externalBlockersAdvisory": external_blockers,
"handoffReport": str(handoff_path),
}
json_out.parent.mkdir(parents=True, exist_ok=True)
json_out.write_text(json.dumps(payload, indent=2) + "\n")
lines = [
"# cWUSDC Provider Readiness CI",
"",
f"- Generated: `{payload['generatedAt']}`",
f"- Status: `{payload['status']}`",
f"- Repo-controlled prerequisites passed: `{payload['repoControlledPrereqsPassed']}`",
f"- Base provider prerequisites passed: `{repo_ok}`",
f"- Institutional doc links passed: `{doc_links_ok}`",
f"- External blockers advisory count: `{len(external_blockers)}`",
f"- Handoff report: `{handoff_path}`",
"",
"External provider blockers are advisory in this CI gate. They require provider acceptance or operator action and should not fail repo-controlled CI.",
]
md_out.write_text("\n".join(lines) + "\n")
print(f"Wrote {json_out}")
print(f"Wrote {md_out}")
if not repo_ok:
raise SystemExit(1)
if not doc_links_ok:
raise SystemExit(1)
PY
exit "$WRAPPER_STATUS"

View File

@@ -0,0 +1,116 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/load-project-env.sh
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh"
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/mev-protection.sh
source "${PROJECT_ROOT}/scripts/lib/mev-protection.sh"
POLICY_PATH="${POLICY_PATH:-config/extraction/mainnet-cwusdc-usdc-support-policy.json}"
OUT_JSON="${OUT_JSON:-reports/status/engine-x-mev-defense-readiness-latest.json}"
OUT_MD="${OUT_MD:-reports/status/engine-x-mev-defense-readiness-latest.md}"
mkdir -p "$(dirname "${OUT_JSON}")"
RPC_LABEL="$(mev_write_rpc_label)"
PRIVATE_KEY_NAME="$(mev_private_rpc_key 2>/dev/null || true)"
HAS_PRIVATE_RPC=0
[[ -n "${PRIVATE_KEY_NAME}" ]] && HAS_PRIVATE_RPC=1
ALLOW_PUBLIC="${ENGINE_X_ALLOW_PUBLIC_BROADCAST:-0}"
MEV_ENABLED="${ENGINE_X_MEV_PROTECTION:-1}"
READY=0
STATUS="blocked"
if [[ "${MEV_ENABLED}" != "1" ]]; then
STATUS="disabled_by_operator"
elif [[ "${HAS_PRIVATE_RPC}" == "1" ]]; then
READY=1
STATUS="ready"
elif [[ "${ALLOW_PUBLIC}" == "1" ]]; then
STATUS="public_broadcast_override"
fi
python3 - "${POLICY_PATH}" "${OUT_JSON}" "${OUT_MD}" "${READY}" "${STATUS}" "${RPC_LABEL}" "${PRIVATE_KEY_NAME:-}" "${MEV_ENABLED}" "${ALLOW_PUBLIC}" <<'PY'
import json
import sys
from datetime import datetime, timezone
from pathlib import Path
policy_path, out_json, out_md, ready, status, rpc_label, private_key_name, mev_enabled, allow_public = sys.argv[1:]
policy = json.loads(Path(policy_path).read_text())
mev_policy = policy.get("mevDefense", {})
surfaces = policy.get("quoteDefenseSurfaces", [])
payload = {
"schema": "engine-x-mev-defense-readiness/v1",
"generatedAt": datetime.now(timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z"),
"ready": ready == "1",
"status": status,
"policyPath": policy_path,
"configuredWriteRpcLabel": rpc_label,
"configuredPrivateRpcEnvKey": private_key_name or None,
"mevProtectionEnabled": mev_enabled == "1",
"publicBroadcastOverride": allow_public == "1",
"sensitiveSurfaces": [
{
"id": surface.get("id"),
"venue": surface.get("venue"),
"role": surface.get("role"),
"poolAddress": surface.get("poolAddress"),
"defenseMode": surface.get("defenseMode"),
}
for surface in surfaces
if "quote" in (surface.get("role") or "") or surface.get("venue") in {"uniswap_v2_pair", "uniswap_v3_pool", "dodo_pmm"}
],
"policy": mev_policy,
"blockers": [],
"operatorEnvironment": {
"acceptedPrivateRpcEnvKeys": [
"ENGINE_X_PRIVATE_TX_RPC",
"MEV_BLOCKER_RPC_URL",
"FLASHBOTS_RPC_URL",
"BLOXROUTE_RPC_URL",
"BLINK_RPC_URL",
],
"publicOverrideEnvKey": "ENGINE_X_ALLOW_PUBLIC_BROADCAST",
"disableGuardEnvKey": "ENGINE_X_MEV_PROTECTION",
},
}
if not payload["ready"]:
if payload["status"] == "blocked":
payload["blockers"].append("No private/protected transaction RPC is configured for sensitive Engine X broadcasts.")
elif payload["status"] == "disabled_by_operator":
payload["blockers"].append("MEV guard is disabled by operator environment.")
elif payload["status"] == "public_broadcast_override":
payload["blockers"].append("Public broadcast override is enabled; do not use for adversarially sensitive loops.")
Path(out_json).write_text(json.dumps(payload, indent=2) + "\n")
lines = [
"# Engine X MEV Defense Readiness",
"",
f"- generatedAt: `{payload['generatedAt']}`",
f"- ready: `{str(payload['ready']).lower()}`",
f"- status: `{payload['status']}`",
f"- write RPC label: `{payload['configuredWriteRpcLabel']}`",
f"- private RPC env key: `{payload['configuredPrivateRpcEnvKey'] or 'none'}`",
f"- public override: `{str(payload['publicBroadcastOverride']).lower()}`",
"",
"## Sensitive Surfaces",
]
for surface in payload["sensitiveSurfaces"]:
lines.append(f"- `{surface['id']}` / `{surface['venue']}` / `{surface['role']}` / `{surface['poolAddress']}`")
lines.extend(["", "## Blockers"])
if payload["blockers"]:
lines.extend(f"- {blocker}" for blocker in payload["blockers"])
else:
lines.append("- none")
lines.extend([
"",
"## Operator Rule",
"",
"Sensitive Engine X swaps, LP migrations, and quote-defense repairs must use `mev_cast_send`; scripts fail closed unless a protected RPC is configured or the operator explicitly enables the public broadcast override.",
])
Path(out_md).write_text("\n".join(lines) + "\n")
print(json.dumps({"ready": payload["ready"], "status": payload["status"], "writeRpcLabel": payload["configuredWriteRpcLabel"]}, indent=2))
PY

View File

@@ -90,7 +90,13 @@ from datetime import datetime, timezone
def units(raw):
return str(Decimal(int(raw or 0)) / Decimal(10**6))
v2_nums = [int(x) for x in re.findall(r"\b\d+\b", v2_reserves or "")]
v2_nums = []
for line in (v2_reserves or "").splitlines():
match = re.search(r"\b\d+\b", line)
if match:
v2_nums.append(int(match.group(0)))
if len(v2_nums) < 2:
v2_nums = [int(x) for x in re.findall(r"\b\d+\b", v2_reserves or "")]
blockers = []
if accounting_aware != "1":
blockers.append("configured Engine X vault is not accounting-aware")

View File

@@ -75,7 +75,13 @@ def parse_uint(value: str) -> int:
def parse_uints(value: str, count: int) -> list[int]:
matches = [int(match) for match in UINT_RE.findall(value)]
matches: list[int] = []
for line in value.splitlines():
line_matches = UINT_RE.findall(line)
if line_matches:
matches.append(int(line_matches[0]))
if len(matches) < count:
matches = [int(match) for match in UINT_RE.findall(value)]
if len(matches) < count:
raise ValueError(f"expected at least {count} integers, got {matches!r}")
return matches[:count]
@@ -198,6 +204,71 @@ def query_dodo_health(rpc_url: str, defended_venue: dict) -> dict:
}
def query_uniswap_v3_health(rpc_url: str, surface: dict) -> dict:
pool_address = surface["poolAddress"]
try:
token0 = parse_address(cast_call(rpc_url, pool_address, "token0()(address)"))
token1 = parse_address(cast_call(rpc_url, pool_address, "token1()(address)"))
fee = parse_uint(cast_call(rpc_url, pool_address, "fee()(uint24)"))
slot0 = cast_call(rpc_url, pool_address, "slot0()(uint160,int24,uint16,uint16,uint16,uint8,bool)")
slot_values = parse_uints(slot0, 2)
sqrt_price_x96 = slot_values[0]
# The tick may be signed; parse it directly from the second line before falling back.
slot_lines = [line.strip().split()[0] for line in slot0.splitlines() if line.strip()]
tick = int(slot_lines[1]) if len(slot_lines) > 1 else int(slot_values[1])
liquidity = parse_uint(cast_call(rpc_url, pool_address, "liquidity()(uint128)"))
token0_balance = parse_uint(cast_call(rpc_url, token0, "balanceOf(address)(uint256)", pool_address))
token1_balance = parse_uint(cast_call(rpc_url, token1, "balanceOf(address)(uint256)", pool_address))
decimals0 = parse_uint(cast_call(rpc_url, token0, "decimals()(uint8)"))
decimals1 = parse_uint(cast_call(rpc_url, token1, "decimals()(uint8)"))
except Exception as exc:
return {"live": False, "poolAddress": pool_address, "error": str(exc)}
preferred = surface.get("activeRange", {})
lower_tick = int(preferred.get("preferredLowerTick", tick))
upper_tick = int(preferred.get("preferredUpperTick", tick))
target_tick = int(preferred.get("targetTick", 0))
if tick < lower_tick:
range_status = "below_preferred_range"
elif tick > upper_tick:
range_status = "above_preferred_range"
else:
range_status = "inside_preferred_range"
return {
"live": True,
"poolAddress": pool_address,
"token0": token0,
"token1": token1,
"fee": fee,
"sqrtPriceX96": str(sqrt_price_x96),
"tick": tick,
"targetTick": target_tick,
"preferredLowerTick": lower_tick,
"preferredUpperTick": upper_tick,
"rangeStatus": range_status,
"activeLiquidity": str(liquidity),
"token0BalanceRaw": str(token0_balance),
"token1BalanceRaw": str(token1_balance),
"token0BalanceUnits": str(normalize_units(token0_balance, decimals0)),
"token1BalanceUnits": str(normalize_units(token1_balance, decimals1)),
}
def query_quote_defense_surface(rpc_url: str, surface: dict, base_address: str, quote_address: str) -> dict:
venue = surface.get("venue")
if venue == "uniswap_v3_pool":
return query_uniswap_v3_health(rpc_url, surface)
if venue == "uniswap_v2_pair":
pair = dict(surface)
pair["baseAddress"] = base_address
pair["quoteAddress"] = quote_address
return query_uniswap_pair_health(rpc_url, pair)
if venue == "dodo_pmm":
return query_dodo_health(rpc_url, surface)
return {"live": False, "poolAddress": surface.get("poolAddress"), "error": f"unsupported venue {venue!r}"}
def choose_flash_amount(policy: dict, deviation_bps: Decimal) -> int:
for row in policy["managedCycle"]["quoteAmountByDeviationBps"]:
if deviation_bps >= Decimal(row["minDeviationBps"]):
@@ -282,6 +353,42 @@ def render_shell(result: dict) -> str:
return "\n".join(lines)
def build_quote_defense_decision(surfaces: list[dict], health_by_id: dict[str, dict]) -> dict:
candidates: list[dict] = []
blockers: list[str] = []
for surface in surfaces:
surface_id = surface["id"]
health = health_by_id.get(surface_id, {})
if not health.get("live"):
blockers.append(f"{surface_id}: unreadable")
continue
venue = surface.get("venue")
if venue == "uniswap_v3_pool":
if health.get("rangeStatus") == "inside_preferred_range" and int(health.get("activeLiquidity", "0")) > 0:
candidates.append({"surfaceId": surface_id, "action": "use_for_public_indexed_quote_defense"})
else:
candidates.append({"surfaceId": surface_id, "action": "rebalance_tick_before_use"})
elif venue == "uniswap_v2_pair":
quote_units = Decimal(health.get("quoteReserveUnits", "0"))
if quote_units > 0:
candidates.append({"surfaceId": surface_id, "action": "secondary_public_repair_or_activity_lane"})
else:
blockers.append(f"{surface_id}: zero quote reserve")
elif venue == "dodo_pmm":
if health.get("live"):
candidates.append({"surfaceId": surface_id, "action": "managed_defended_lane_when_capital_and_quotes_pass"})
preferred = next(
(row for row in candidates if row["action"] == "use_for_public_indexed_quote_defense"),
next((row for row in candidates if row["action"] == "rebalance_tick_before_use"), None),
)
return {
"preferredSurface": preferred,
"candidates": candidates,
"blockers": blockers,
}
def main() -> int:
parser = argparse.ArgumentParser()
parser.add_argument("--shell", action="store_true", help="Emit shell-friendly KEY=VALUE lines.")
@@ -293,13 +400,21 @@ def main() -> int:
rpc_url = resolve_rpc_url(policy, env_values)
chain = deployment_status["chains"][str(policy["network"]["chainId"])]
base_address = chain["cwTokens"]["cWUSDC"]
quote_address = chain["anchorAddresses"]["USDC"]
public_pair = load_public_pair_from_policy(policy, deployment_status)
public_pair["baseAddress"] = chain["cwTokens"][public_pair["base"]]
public_pair["quoteAddress"] = chain["anchorAddresses"][public_pair["quote"]]
public_pair["baseAddress"] = base_address
public_pair["quoteAddress"] = quote_address
defended_venue = dict(policy["defendedVenue"])
public_health = query_uniswap_pair_health(rpc_url, public_pair)
defended_health = query_dodo_health(rpc_url, defended_venue)
quote_surfaces = policy.get("quoteDefenseSurfaces", [])
quote_surface_health = {
surface["id"]: query_quote_defense_surface(rpc_url, surface, base_address, quote_address)
for surface in quote_surfaces
}
quote_defense_decision = build_quote_defense_decision(quote_surfaces, quote_surface_health)
decision = build_decision(policy, public_health, defended_health)
if public_health.get("live"):
decision["publicDeviationBps"] = public_health["deviationBps"]
@@ -313,6 +428,9 @@ def main() -> int:
"publicPairHealth": public_health,
"defendedVenue": defended_venue,
"defendedVenueHealth": defended_health,
"quoteDefenseSurfaces": quote_surfaces,
"quoteDefenseSurfaceHealth": quote_surface_health,
"quoteDefenseDecision": quote_defense_decision,
"decision": decision,
}

View File

@@ -0,0 +1,356 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/load-project-env.sh
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh"
: "${ETHEREUM_MAINNET_RPC:?ETHEREUM_MAINNET_RPC is required}"
OUT_JSON="${OUT_JSON:-reports/status/mainnet-cwusdc-weth-liquidity-surfaces-latest.json}"
OUT_MD="${OUT_MD:-reports/status/mainnet-cwusdc-weth-liquidity-surfaces-latest.md}"
CWUSDC="${CWUSDC_MAINNET:-0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a}"
USDC="${USDC_MAINNET:-0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48}"
WETH="${WETH9_MAINNET:-0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2}"
UNIV2_FACTORY="${CHAIN_1_UNISWAP_V2_FACTORY:-0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f}"
UNIV2_ROUTER="${CHAIN_1_UNISWAP_V2_ROUTER:-0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D}"
UNIV3_FACTORY="${CHAIN_1_UNISWAP_V3_FACTORY:-0x1F98431c8aD98523631AE4a59f267346ea31F984}"
DODO_INTEGRATION="${DODO_PMM_INTEGRATION_MAINNET:-${CHAIN_1_DODO_PMM_INTEGRATION:-}}"
DODO_VENDING_MACHINE="${MAINNET_DODO_VENDING_MACHINE_ADDRESS:-${ETHEREUM_DODO_VENDING_MACHINE_ADDRESS:-}}"
MAINNET_CCIP_WETH9_BRIDGE="${MAINNET_CCIP_WETH9_BRIDGE:-}"
GAS_RESERVE_WEI="${ENGINE_X_WETH_POOL_GAS_RESERVE_WEI:-5000000000000000}"
MAX_ETH_WRAP_WEI="${ENGINE_X_WETH_POOL_MAX_WRAP_WEI:-0}"
PEG_TEST_AMOUNTS_USD="${ENGINE_X_WETH_PEG_TEST_AMOUNTS_USD:-0.005,0.01,0.025}"
if [[ -n "${PRIVATE_KEY:-}" ]]; then
DEPLOYER="$(cast wallet address --private-key "${PRIVATE_KEY}")"
else
DEPLOYER="${DEPLOYER_ADDRESS:-}"
fi
if [[ -z "${DEPLOYER}" ]]; then
echo "Set PRIVATE_KEY or DEPLOYER_ADDRESS" >&2
exit 1
fi
BLOCK_NUMBER="$(cast block-number --rpc-url "${ETHEREUM_MAINNET_RPC}")"
GAS_PRICE_WEI="$(cast gas-price --rpc-url "${ETHEREUM_MAINNET_RPC}")"
ETH_WEI="$(cast balance "${DEPLOYER}" --rpc-url "${ETHEREUM_MAINNET_RPC}")"
CWUSDC_RAW="$(cast call "${CWUSDC}" 'balanceOf(address)(uint256)' "${DEPLOYER}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | awk '{print $1}')"
USDC_RAW="$(cast call "${USDC}" 'balanceOf(address)(uint256)' "${DEPLOYER}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | awk '{print $1}')"
WETH_RAW="$(cast call "${WETH}" 'balanceOf(address)(uint256)' "${DEPLOYER}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | awk '{print $1}')"
UNIV2_PAIR="$(cast call "${UNIV2_FACTORY}" 'getPair(address,address)(address)' "${CWUSDC}" "${WETH}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | grep -oE '0x[a-fA-F0-9]{40}' | head -1)"
UNIV2_RESERVES=""
if [[ "${UNIV2_PAIR}" != "0x0000000000000000000000000000000000000000" ]]; then
UNIV2_RESERVES="$(cast call "${UNIV2_PAIR}" 'getReserves()(uint112,uint112,uint32)' --rpc-url "${ETHEREUM_MAINNET_RPC}" || true)"
fi
TOKEN0="$(printf '%s\n%s\n' "${CWUSDC}" "${WETH}" | tr '[:upper:]' '[:lower:]' | sort | sed -n '1p')"
TOKEN1="$(printf '%s\n%s\n' "${CWUSDC}" "${WETH}" | tr '[:upper:]' '[:lower:]' | sort | sed -n '2p')"
V3_POOLS_JSON="$(
for fee in 100 500 3000 10000; do
pool="$(cast call "${UNIV3_FACTORY}" 'getPool(address,address,uint24)(address)' "${TOKEN0}" "${TOKEN1}" "${fee}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | grep -oE '0x[a-fA-F0-9]{40}' | head -1)"
slot0=""
liquidity="0"
if [[ "${pool}" != "0x0000000000000000000000000000000000000000" ]]; then
slot0="$(cast call "${pool}" 'slot0()(uint160,int24,uint16,uint16,uint16,uint8,bool)' --rpc-url "${ETHEREUM_MAINNET_RPC}" 2>/dev/null || true)"
liquidity="$(cast call "${pool}" 'liquidity()(uint128)' --rpc-url "${ETHEREUM_MAINNET_RPC}" 2>/dev/null | awk '{print $1}' || echo 0)"
fi
python3 - "${fee}" "${pool}" "${liquidity}" "${slot0}" <<'PY'
import json, sys
fee, pool, liquidity, slot0 = sys.argv[1:]
print(json.dumps({"fee": int(fee), "pool": pool, "liquidity": liquidity, "slot0": slot0 or None}))
PY
done | python3 -c 'import json,sys; print(json.dumps([json.loads(line) for line in sys.stdin if line.strip()]))'
)"
WETH_QUOTES_JSON="$(
for amt in 100000000000000 200000000000000 500000000000000 1000000000000000 2000000000000000 5000000000000000; do
raw="$(cast call "${UNIV2_ROUTER}" 'getAmountsOut(uint256,address[])(uint256[])' "${amt}" "[${WETH},${USDC}]" --rpc-url "${ETHEREUM_MAINNET_RPC}" 2>/dev/null || true)"
python3 - "${amt}" "${raw}" <<'PY'
import json, re, sys
amt = int(sys.argv[1])
raw = sys.argv[2]
parts = raw.split(",", 1)
out = 0
if len(parts) == 2:
match = re.search(r"(\d+)", parts[1])
out = int(match.group(1)) if match else 0
print(json.dumps({"wethInRaw": str(amt), "usdcOutRaw": str(out)}))
PY
done | python3 -c 'import json,sys; print(json.dumps([json.loads(line) for line in sys.stdin if line.strip()]))'
)"
DODO_HAS_MANAGER="false"
DODO_CODE_LEN="0"
DODO_VENDING_CODE_LEN="0"
if [[ -n "${DODO_INTEGRATION}" ]]; then
DODO_CODE_LEN="$(cast code "${DODO_INTEGRATION}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | wc -c | tr -d ' ')"
POOL_MANAGER_ROLE="$(cast keccak "POOL_MANAGER_ROLE")"
DODO_HAS_MANAGER="$(cast call "${DODO_INTEGRATION}" 'hasRole(bytes32,address)(bool)' "${POOL_MANAGER_ROLE}" "${DEPLOYER}" --rpc-url "${ETHEREUM_MAINNET_RPC}" 2>/dev/null | tr -d '[:space:]' || echo false)"
fi
if [[ -n "${DODO_VENDING_MACHINE}" ]]; then
DODO_VENDING_CODE_LEN="$(cast code "${DODO_VENDING_MACHINE}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | wc -c | tr -d ' ')"
fi
mkdir -p "$(dirname "${OUT_JSON}")"
python3 - "${OUT_JSON}" "${OUT_MD}" \
"${BLOCK_NUMBER}" "${GAS_PRICE_WEI}" "${DEPLOYER}" "${CWUSDC}" "${WETH}" "${USDC}" "${MAINNET_CCIP_WETH9_BRIDGE}" \
"${ETH_WEI}" "${CWUSDC_RAW}" "${USDC_RAW}" "${WETH_RAW}" "${GAS_RESERVE_WEI}" "${MAX_ETH_WRAP_WEI}" \
"${UNIV2_PAIR}" "${UNIV2_RESERVES}" "${V3_POOLS_JSON}" "${WETH_QUOTES_JSON}" \
"${DODO_INTEGRATION}" "${DODO_VENDING_MACHINE}" "${DODO_HAS_MANAGER}" "${DODO_CODE_LEN}" "${DODO_VENDING_CODE_LEN}" \
"${PEG_TEST_AMOUNTS_USD}" <<'PY'
from decimal import Decimal, getcontext
from datetime import datetime, timezone
from pathlib import Path
import json
import re
import sys
getcontext().prec = 80
(
out_json, out_md, block_number, gas_price, deployer, cwusdc, weth, usdc, ccip_bridge,
eth_wei, cw_raw, usdc_raw, weth_raw, gas_reserve, max_wrap, univ2_pair, univ2_reserves,
v3_pools_json, weth_quotes_json, dodo_integration, dodo_vm, dodo_has_manager, dodo_code_len, dodo_vm_code_len,
peg_test_amounts_usd,
) = sys.argv[1:]
eth_wei_i = int(eth_wei)
weth_raw_i = int(weth_raw)
cw_raw_i = int(cw_raw)
gas_reserve_i = int(gas_reserve)
max_wrap_i = int(max_wrap)
usable_eth_for_wrap = max(eth_wei_i - gas_reserve_i, 0)
if max_wrap_i:
usable_eth_for_wrap = min(usable_eth_for_wrap, max_wrap_i)
usable_weth_raw = weth_raw_i + usable_eth_for_wrap
quotes = json.loads(weth_quotes_json)
price = Decimal(0)
for q in sorted(quotes, key=lambda x: int(x["wethInRaw"]), reverse=True):
if int(q["usdcOutRaw"]) > 0:
price = (Decimal(int(q["usdcOutRaw"])) / Decimal(10**6)) / (Decimal(int(q["wethInRaw"])) / Decimal(10**18))
break
cw_needed_for_wallet_weth_raw = int((Decimal(usable_weth_raw) / Decimal(10**18) * price * Decimal(10**6)).to_integral_value()) if price else 0
usable_pair_cw_raw = min(cw_raw_i, cw_needed_for_wallet_weth_raw)
usable_pair_weth_raw = usable_weth_raw if cw_needed_for_wallet_weth_raw <= cw_raw_i else int((Decimal(cw_raw_i) / Decimal(10**6) / price * Decimal(10**18)).to_integral_value()) if price else 0
v3_pools = json.loads(v3_pools_json)
v3_existing = [p for p in v3_pools if p["pool"].lower() != "0x0000000000000000000000000000000000000000"]
v2_exists = univ2_pair.lower() != "0x0000000000000000000000000000000000000000"
def fmt_decimal(value: Decimal, places: int = 18) -> str:
text = f"{value:.{places}f}"
return text.rstrip("0").rstrip(".") if "." in text else text
def cp_out(amount_in: Decimal, reserve_in: Decimal, reserve_out: Decimal, fee_bps: Decimal = Decimal(30)) -> Decimal:
if amount_in <= 0 or reserve_in <= 0 or reserve_out <= 0:
return Decimal(0)
amount_in_after_fee = amount_in * (Decimal(10000) - fee_bps) / Decimal(10000)
return (amount_in_after_fee * reserve_out) / (reserve_in + amount_in_after_fee)
fee_scenarios_bps = [
("uniswapV3_1bp", Decimal(1)),
("uniswapV3_5bp", Decimal(5)),
("uniswapV2_30bp", Decimal(30)),
]
peg_tests = []
for raw_amount in [a.strip() for a in peg_test_amounts_usd.split(",") if a.strip()]:
amount_usd = Decimal(raw_amount)
cw_in = amount_usd
ideal_weth = (amount_usd / price) if price else Decimal(0)
fee_models = {}
for scenario_name, fee_bps in fee_scenarios_bps:
seeded_weth_out = cp_out(
cw_in,
Decimal(usable_pair_cw_raw) / Decimal(10**6),
Decimal(usable_pair_weth_raw) / Decimal(10**18),
fee_bps,
)
effective_usd_out = seeded_weth_out * price if price else Decimal(0)
loss_pct = ((amount_usd - effective_usd_out) / amount_usd * Decimal(100)) if amount_usd > 0 and effective_usd_out else Decimal(0)
reverse_cw_out = cp_out(
ideal_weth,
Decimal(usable_pair_weth_raw) / Decimal(10**18),
Decimal(usable_pair_cw_raw) / Decimal(10**6),
fee_bps,
)
reverse_loss_pct = ((amount_usd - reverse_cw_out) / amount_usd * Decimal(100)) if amount_usd > 0 and reverse_cw_out else Decimal(0)
fee_models[scenario_name] = {
"feeBps": str(fee_bps),
"cwusdcToWethOut": fmt_decimal(seeded_weth_out),
"cwusdcToWethUsdOut": fmt_decimal(effective_usd_out, 12),
"cwusdcToWethLossPct": fmt_decimal(loss_pct, 8),
"wethToCwusdcOut": fmt_decimal(reverse_cw_out, 12),
"wethToCwusdcLossPct": fmt_decimal(reverse_loss_pct, 8),
}
peg_tests.append({
"usdAmount": str(amount_usd),
"cwusdcRaw": str(int((amount_usd * Decimal(10**6)).to_integral_value())),
"idealWethRaw": str(int((ideal_weth * Decimal(10**18)).to_integral_value())) if price else "0",
"idealWeth": fmt_decimal(ideal_weth),
"walletSeededModels": fee_models,
"lossAccounting": "For cWUSDC->WETH9 canaries, loss is paid by spending slightly more cWUSDC value for the same WETH/USD reference value. WETH/USDC inventory is protected by exact-output/min-out guards.",
})
native_boundary = "Native ETH execution wraps to WETH9; DODO and UniV3 require ERC-20 WETH9, while UniV2 addLiquidityETH still creates a WETH9 pair."
usd_support_model = (
"cWUSDC/WETH9 pools support the USD peg as an indirect public price anchor through deep WETH/USD markets. "
"They do not replace direct cWUSDC/USDC quote liquidity for redemption-style proof, but they can provide indexable "
"on-chain evidence that 1 cWUSDC is priced near 1 USD when the implied cWUSDC/WETH price matches the live WETH/USD reference."
)
blockers = []
if int(usdc_raw) == 0:
blockers.append("deployer has 0 USDC; this is fine for WETH pools but means no cWUSDC/USDC repair can accompany them")
if usable_weth_raw == 0:
blockers.append("no WETH or spare ETH available after gas reserve")
if not dodo_integration:
blockers.append("DODO_PMM_INTEGRATION_MAINNET is not configured")
elif dodo_has_manager != "true":
blockers.append("deployer lacks DODO POOL_MANAGER_ROLE on Mainnet integration")
if dodo_vm and int(dodo_vm_code_len) <= 3:
blockers.append("configured DODO vending machine has no code")
dodo_single_sided = {
"requestedBaseToken": cwusdc,
"supportedAsEngineXInventory": cw_raw_i > 0,
"executableThroughCurrentIntegration": False,
"reason": "DODOPMMIntegration.addLiquidity requires both baseAmount > 0 and quoteAmount > 0; cWUSDC-only deposits are inventory/accounting support until a wrapper or quote-side seed is available.",
"currentCwusdcAvailable": str(Decimal(cw_raw_i) / Decimal(10**6)),
"recommendedMode": "record cWUSDC-only inventory inside Engine X first; promote to executable DODO PMM only after adding WETH9/USDC quote inventory or deploying a single-sided wrapper that controls solvency and min-out proofs",
}
payload = {
"schema": "mainnet-cwusdc-weth-liquidity-surfaces/v1",
"generatedAt": datetime.now(timezone.utc).isoformat(),
"blockNumber": int(block_number),
"gasPriceWei": gas_price,
"addresses": {
"deployer": deployer,
"cWUSDC": cwusdc,
"WETH9": weth,
"USDC": usdc,
"mainnetCcipWeth9Bridge": ccip_bridge or None,
},
"balances": {
"ethWei": eth_wei,
"eth": str(Decimal(int(eth_wei)) / Decimal(10**18)),
"wethRaw": weth_raw,
"weth": str(Decimal(weth_raw_i) / Decimal(10**18)),
"cwusdcRaw": cw_raw,
"cwusdc": str(Decimal(cw_raw_i) / Decimal(10**6)),
"usdcRaw": usdc_raw,
"usdc": str(Decimal(int(usdc_raw)) / Decimal(10**6)),
},
"marketReference": {
"wethUsdcPriceFromUniV2": str(price),
"wethQuotes": quotes,
},
"availableSeed": {
"gasReserveWei": str(gas_reserve_i),
"ethUsableForWrapWei": str(usable_eth_for_wrap),
"totalUsableWethRaw": str(usable_weth_raw),
"totalUsableWeth": str(Decimal(usable_weth_raw) / Decimal(10**18)),
"cwusdcNeededForAllUsableWethRaw": str(cw_needed_for_wallet_weth_raw),
"cwusdcNeededForAllUsableWeth": str(Decimal(cw_needed_for_wallet_weth_raw) / Decimal(10**6)),
"recommendedPairCwusdcRaw": str(usable_pair_cw_raw),
"recommendedPairCwusdc": str(Decimal(usable_pair_cw_raw) / Decimal(10**6)),
"recommendedPairWethRaw": str(usable_pair_weth_raw),
"recommendedPairWeth": str(Decimal(usable_pair_weth_raw) / Decimal(10**18)),
},
"pegTestAmounts": peg_tests,
"surfaces": {
"uniswapV2": {"pair": univ2_pair, "exists": v2_exists, "reservesRawText": univ2_reserves or None},
"uniswapV3": {"token0": min(cwusdc.lower(), weth.lower()), "token1": max(cwusdc.lower(), weth.lower()), "pools": v3_pools, "existingPools": v3_existing},
"dodo": {
"integration": dodo_integration or None,
"vendingMachine": dodo_vm or None,
"integrationCodeLength": int(dodo_code_len),
"vendingMachineCodeLength": int(dodo_vm_code_len),
"deployerHasPoolManagerRole": dodo_has_manager == "true",
},
},
"singleSidedDodoCwusdc": dodo_single_sided,
"boundary": native_boundary,
"usdPegSupportModel": usd_support_model,
"blockers": blockers,
"recommendation": [
"Treat cWUSDC/ETH and cWUSDC/WETH as the same WETH9-backed public market for indexers.",
"Use WETH-backed pools as an indirect USD peg support surface by comparing cWUSDC/WETH9 pool price against live WETH/USDC reference markets.",
"Create at most one canonical UniV3 cWUSDC/WETH9 pool first, using private/protected execution and a wider tick range than the cWUSDC/USDC attempt.",
"Use UniV2 addLiquidityETH only as a secondary indexable surface; it creates the same WETH9 pair.",
"Use DODO PMM only after confirming createPool against the configured vending machine and seeding with ERC-20 WETH9, not native ETH.",
],
}
Path(out_json).write_text(json.dumps(payload, indent=2) + "\n")
lines = [
"# Mainnet cWUSDC/WETH9 Liquidity Surface Evaluation",
"",
f"- Generated: `{payload['generatedAt']}`",
f"- Block: `{payload['blockNumber']}`",
f"- cWUSDC: `{cwusdc}`",
f"- WETH9 Cc2: `{weth}`",
f"- Native ETH boundary: {native_boundary}",
f"- USD peg support model: {usd_support_model}",
"",
"## Live Balances",
"",
f"- ETH: `{payload['balances']['eth']}`",
f"- WETH9: `{payload['balances']['weth']}`",
f"- cWUSDC: `{payload['balances']['cwusdc']}`",
f"- USDC: `{payload['balances']['usdc']}`",
"",
"## Existing Pools",
"",
f"- UniV2 cWUSDC/WETH9: `{univ2_pair}`",
]
for pool in v3_pools:
lines.append(f"- UniV3 fee `{pool['fee']}`: `{pool['pool']}` liquidity `{pool['liquidity']}`")
lines.extend([
f"- DODO integration: `{dodo_integration or 'not configured'}`",
f"- DODO deployer pool-manager role: `{dodo_has_manager == 'true'}`",
"",
"## Seed Capacity",
"",
f"- WETH/USDC reference price: `{price}`",
f"- Total usable WETH after gas reserve: `{payload['availableSeed']['totalUsableWeth']}`",
f"- cWUSDC needed to pair all usable WETH: `{payload['availableSeed']['cwusdcNeededForAllUsableWeth']}`",
f"- Recommended max seed from current wallet: `{payload['availableSeed']['recommendedPairCwusdc']} cWUSDC + {payload['availableSeed']['recommendedPairWeth']} WETH9`",
"",
"## Single-Sided DODO cWUSDC",
"",
f"- Supported as Engine X inventory: `{dodo_single_sided['supportedAsEngineXInventory']}`",
f"- Executable through current DODOPMMIntegration: `{dodo_single_sided['executableThroughCurrentIntegration']}`",
f"- Reason: {dodo_single_sided['reason']}",
f"- Recommended mode: {dodo_single_sided['recommendedMode']}",
"",
"## Peg Test Amounts",
"",
"| USD amount | cWUSDC in | Ideal WETH value | Modeled cWUSDC->WETH USD out | Modeled loss | Reverse WETH->cWUSDC out | Reverse loss |",
"|---:|---:|---:|---:|---:|---:|---:|",
])
for test in peg_tests:
for scenario_name, model in test["walletSeededModels"].items():
lines.append(
f"| `{test['usdAmount']} ({scenario_name})` | `{Decimal(test['cwusdcRaw']) / Decimal(10**6)}` | `{test['idealWeth']}` | "
f"`{model['cwusdcToWethUsdOut']}` | `{model['cwusdcToWethLossPct']}%` | "
f"`{model['wethToCwusdcOut']}` | `{model['wethToCwusdcLossPct']}%` |"
)
lines.extend([
"",
"Loss accounting: for cWUSDC->WETH9 canaries, loss is paid by spending slightly more cWUSDC value for the same WETH/USD reference value. Use exact-output or strict min-out guards so WETH, USDC, and lender inventory are not silently depleted.",
"",
"## Blockers",
])
lines.extend([f"- {b}" for b in blockers] if blockers else ["- none for a tiny WETH-backed pool seed"])
lines.extend(["", "## Recommendation"])
lines.extend([f"- {r}" for r in payload["recommendation"]])
Path(out_md).write_text("\n".join(lines) + "\n")
print(out_json)
print(out_md)
PY

View File

@@ -0,0 +1,269 @@
#!/usr/bin/env python3
"""Generate a current cWUSDC supply and circulating-supply attestation.
The output is intentionally tracker-facing: it separates on-chain total supply
from proposed circulating-supply methodology and does not silently exclude
operator or protocol balances unless explicitly requested.
"""
from __future__ import annotations
import argparse
import datetime as dt
import json
import os
import re
import time
import urllib.parse
import urllib.request
from decimal import Decimal, getcontext
from pathlib import Path
from typing import Any
getcontext().prec = 80
ROOT = Path(__file__).resolve().parents[2]
REPORT_JSON = ROOT / "reports" / "status" / "cwusdc-supply-circulating-attestation-latest.json"
REPORT_MD = ROOT / "reports" / "status" / "cwusdc-supply-circulating-attestation-latest.md"
ETHERSCAN_API = "https://api.etherscan.io/v2/api"
ETHERSCAN_PAGE = "https://etherscan.io/token/{address}"
CWUSDC = "0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a"
DECIMALS = 6
KNOWN_BALANCES = {
"operator": "0x4A666F96fC8764181194447A7dFdb7d471b301C8",
"engineXVirtualBatchVault": "0xf108586d1FC330EA1D4EA4ff8fd983cde94279B1",
"uniswapV3CwusdcUsdcPool": "0x1Cf2e685682C7F7beF508F0Af15Dfb5CDda01ee3",
"uniswapV2CwusdcUsdcPair": "0xC28706F899266b36BC43cc072b3a921BDf2C48D9",
}
def load_dotenv(path: Path) -> None:
if not path.exists():
return
for line in path.read_text().splitlines():
line = line.strip()
if not line or line.startswith("#") or "=" not in line:
continue
key, value = line.split("=", 1)
key = key.strip()
value = value.strip().strip('"').strip("'")
if key and key not in os.environ:
os.environ[key] = value
def fetch_json(url: str, timeout: int = 30) -> Any:
req = urllib.request.Request(url, headers={"User-Agent": "dbis-cwusdc-supply-attestation/1.0"})
with urllib.request.urlopen(req, timeout=timeout) as response:
return json.loads(response.read().decode("utf-8"))
def etherscan_call(params: dict[str, str], api_key: str) -> Any:
query = {"chainid": "1", **params, "apikey": api_key}
url = f"{ETHERSCAN_API}?{urllib.parse.urlencode(query)}"
last_payload: Any = None
for attempt in range(6):
payload = fetch_json(url)
last_payload = payload
message = str(payload.get("message", ""))
result = payload.get("result")
if str(payload.get("status")) != "0" or message.lower() == "no transactions found":
time.sleep(0.35)
return result
if isinstance(result, str) and result.startswith("0x"):
time.sleep(0.35)
return result
if "rate limit" in str(result).lower() or "rate limit" in message.lower():
time.sleep(1.25 + attempt * 0.5)
continue
raise RuntimeError(f"Etherscan API error: {payload.get('message')} {payload.get('result')}")
raise RuntimeError(f"Etherscan API error after retries: {last_payload}")
def human(raw: int, decimals: int = DECIMALS) -> str:
scaled = Decimal(raw) / (Decimal(10) ** decimals)
return f"{scaled:f}"
def parse_int(value: Any) -> int:
if isinstance(value, str) and value.startswith("0x"):
return int(value, 16)
return int(str(value))
def fetch_etherscan_page_stats(address: str) -> dict[str, Any]:
url = ETHERSCAN_PAGE.format(address=address)
req = urllib.request.Request(url, headers={"User-Agent": "Mozilla/5.0 DBIS-cwusdc-attestation/1.0"})
with urllib.request.urlopen(req, timeout=30) as response:
html = response.read().decode("utf-8", errors="replace")
holders_match = re.search(r"<h4[^>]*>\s*Holders\s*</h4>\s*<div[^>]*>\s*<div>\s*([0-9,]+)", html, re.I)
total_supply_match = re.search(r'id="ContentPlaceHolder1_hdnTotalSupply" value="([^"]+)"', html)
onchain_marketcap_missing = "id=\"ContentPlaceHolder1_tr_marketcap\"" in html and re.search(
r"id=\"ContentPlaceHolder1_tr_marketcap\".*?<div>\s*-\s*</div>", html, re.I | re.S
)
circulating_marketcap_missing = "id=\"ContentPlaceHolder1_tr_circulatingmarketcap\"" in html and re.search(
r"id=\"ContentPlaceHolder1_tr_circulatingmarketcap\".*?<div>\s*-\s*</div>", html, re.I | re.S
)
return {
"url": url,
"holdersText": holders_match.group(1) if holders_match else None,
"totalSupplyText": total_supply_match.group(1) if total_supply_match else None,
"onchainMarketCapMissing": bool(onchain_marketcap_missing),
"circulatingMarketCapMissing": bool(circulating_marketcap_missing),
}
def build(args: argparse.Namespace) -> dict[str, Any]:
load_dotenv(ROOT / ".env")
api_key = args.etherscan_api_key or os.environ.get("ETHERSCAN_API_KEY", "")
if not api_key:
raise SystemExit("ETHERSCAN_API_KEY is required")
latest_block_raw = etherscan_call({"module": "proxy", "action": "eth_blockNumber"}, api_key)
latest_block = parse_int(latest_block_raw)
total_supply_raw = parse_int(
etherscan_call(
{"module": "stats", "action": "tokensupply", "contractaddress": args.token},
api_key,
)
)
known: dict[str, Any] = {}
excluded_raw = 0
exclude_set = set(args.exclude_known or [])
for label, address in KNOWN_BALANCES.items():
raw = parse_int(
etherscan_call(
{
"module": "account",
"action": "tokenbalance",
"contractaddress": args.token,
"address": address,
"tag": "latest",
},
api_key,
)
)
excluded = label in exclude_set
if excluded:
excluded_raw += raw
known[label] = {
"address": address,
"balanceRaw": str(raw),
"balanceUnits": human(raw),
"excludedFromCirculatingSupply": excluded,
}
circulating_raw = total_supply_raw - excluded_raw
page_stats = fetch_etherscan_page_stats(args.token)
return {
"schema": "cwusdc-supply-circulating-attestation/v1",
"generatedAt": dt.datetime.now(dt.UTC).isoformat().replace("+00:00", "Z"),
"purpose": "Tracker-facing supply and circulating-supply attestation for Etherscan Value propagation.",
"network": {"chainId": 1, "name": "Ethereum Mainnet", "referenceBlock": latest_block},
"token": {
"address": args.token,
"caip19": f"eip155:1/erc20:{args.token}",
"name": "Wrapped cUSDC",
"symbol": "cWUSDC",
"decimals": DECIMALS,
"etherscan": ETHERSCAN_PAGE.format(address=args.token),
},
"supply": {
"totalSupplyRaw": str(total_supply_raw),
"totalSupplyUnits": human(total_supply_raw),
"excludedProtocolControlledRaw": str(excluded_raw),
"excludedProtocolControlledUnits": human(excluded_raw),
"circulatingSupplyRaw": str(circulating_raw),
"circulatingSupplyUnits": human(circulating_raw),
"formula": "circulatingSupply = totalSupply - explicitlyExcludedProtocolControlledNonCirculatingBalances",
"defaultPolicy": "No known balance is excluded unless the operator passes --exclude-known for that label or a tracker requests a specific exclusion methodology.",
},
"knownBalances": known,
"etherscanPageObservation": page_stats,
"submissionPosition": {
"readyForTrackerReview": True,
"requestedProviderAction": "Accept total/circulating supply for the exact Mainnet cWUSDC contract and use it with accepted USD price data to populate market cap/value surfaces.",
"caveats": [
"This is an on-chain supply attestation, not third-party listing approval.",
"Chain 138 cUSDC source-asset activity must not be counted as Ethereum Mainnet cWUSDC transfer activity.",
"If a tracker requires treasury, bridge, operator, or pool exclusions, regenerate with explicit --exclude-known labels and attach the requested signed inventory.",
],
},
}
def write_md(payload: dict[str, Any], path: Path) -> None:
supply = payload["supply"]
lines = [
"# cWUSDC Supply and Circulating-Supply Attestation",
"",
f"- Generated: `{payload['generatedAt']}`",
f"- Reference block: `{payload['network']['referenceBlock']}`",
f"- Token: `{payload['token']['address']}`",
f"- CAIP-19: `{payload['token']['caip19']}`",
"",
"## Supply",
"",
"| Field | Value |",
"|---|---:|",
f"| Total supply | `{supply['totalSupplyUnits']}` |",
f"| Explicitly excluded protocol-controlled balances | `{supply['excludedProtocolControlledUnits']}` |",
f"| Circulating supply | `{supply['circulatingSupplyUnits']}` |",
"",
f"Formula: `{supply['formula']}`",
"",
"## Known Balances",
"",
"| Label | Address | Balance | Excluded |",
"|---|---|---:|---:|",
]
for label, item in payload["knownBalances"].items():
lines.append(
f"| `{label}` | `{item['address']}` | `{item['balanceUnits']}` | `{item['excludedFromCirculatingSupply']}` |"
)
page = payload["etherscanPageObservation"]
lines.extend(
[
"",
"## Etherscan Observation",
"",
f"- URL: `{page['url']}`",
f"- Holders text: `{page['holdersText']}`",
f"- Total supply text: `{page['totalSupplyText']}`",
f"- Onchain market cap missing: `{page['onchainMarketCapMissing']}`",
f"- Circulating market cap missing: `{page['circulatingMarketCapMissing']}`",
"",
"## Caveats",
"",
]
)
for caveat in payload["submissionPosition"]["caveats"]:
lines.append(f"- {caveat}")
path.write_text("\n".join(lines) + "\n")
def main() -> int:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--token", default=CWUSDC)
parser.add_argument("--etherscan-api-key", default="")
parser.add_argument("--exclude-known", action="append", choices=sorted(KNOWN_BALANCES), help="Known balance label to exclude from circulating supply. Repeatable.")
parser.add_argument("--json-out", type=Path, default=REPORT_JSON)
parser.add_argument("--md-out", type=Path, default=REPORT_MD)
args = parser.parse_args()
payload = build(args)
args.json_out.parent.mkdir(parents=True, exist_ok=True)
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
write_md(payload, args.md_out)
print(f"Wrote {args.json_out.relative_to(ROOT)}")
print(f"Wrote {args.md_out.relative_to(ROOT)}")
print(f"circulatingSupply={payload['supply']['circulatingSupplyUnits']}")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,206 @@
#!/usr/bin/env python3
"""Generate a global cUSDC/cWUSDC family supply proof from report APIs.
This is deliberately NOT the Ethereum Mainnet cWUSDC Etherscan supply proof.
It is a cross-chain family inventory. Entries without supply proof are listed
but excluded from aggregate totals.
"""
from __future__ import annotations
import argparse
import datetime as dt
import json
import urllib.request
from decimal import Decimal, getcontext
from pathlib import Path
from typing import Any
getcontext().prec = 80
ROOT = Path(__file__).resolve().parents[2]
DEFAULT_API = "https://explorer.d-bis.org/api/v1/report/all"
DEFAULT_JSON = ROOT / "reports" / "status" / "global-cusdc-cwusdc-family-supply-proof-latest.json"
DEFAULT_MD = ROOT / "reports" / "status" / "global-cusdc-cwusdc-family-supply-proof-latest.md"
ETH_MAINNET_CWUSDC = "0x2de5f116bfce3d0f922d9c8351e0c5fc24b9284a"
def fetch_json(url: str) -> Any:
req = urllib.request.Request(url, headers={"User-Agent": "dbis-global-cusdc-cwusdc-proof/1.0"})
with urllib.request.urlopen(req, timeout=30) as response:
return json.loads(response.read().decode("utf-8"))
def decimal_or_none(value: Any) -> Decimal | None:
if value is None:
return None
try:
return Decimal(str(value))
except Exception: # noqa: BLE001 - proof should keep malformed entries out of totals
return None
def fmt(value: Decimal) -> str:
return f"{value:f}"
def iter_family_tokens(report: dict[str, Any]) -> list[dict[str, Any]]:
tokens_by_chain = report.get("tokens") or {}
rows: list[dict[str, Any]] = []
for chain_id, tokens in tokens_by_chain.items():
if not isinstance(tokens, list):
continue
for token in tokens:
if token.get("symbol") not in {"cUSDC", "cWUSDC"}:
continue
total = decimal_or_none(token.get("totalSupply"))
circulating = decimal_or_none(token.get("circulatingSupply"))
provenance = token.get("supplyProofProvenance") or {}
proved = total is not None and str(provenance.get("status", "")).lower() != "proof_required"
address = str(token.get("address") or "").lower()
rows.append(
{
"chainId": int(token.get("chainId") or chain_id),
"address": address,
"symbol": token.get("symbol"),
"name": token.get("name"),
"type": token.get("type"),
"decimals": token.get("decimals"),
"totalSupply": str(total) if total is not None else None,
"circulatingSupply": str(circulating) if circulating is not None else None,
"provedForAggregate": proved,
"isEthereumMainnetCwusdc": int(token.get("chainId") or chain_id) == 1 and address == ETH_MAINNET_CWUSDC,
"supplyProofProvenance": provenance,
"trackerCaveats": token.get("trackerCaveats") or [],
}
)
return sorted(rows, key=lambda item: (item["chainId"], item["symbol"], item["address"]))
def build(api_url: str) -> dict[str, Any]:
report = fetch_json(api_url)
rows = iter_family_tokens(report)
proved = [row for row in rows if row["provedForAggregate"]]
unproved = [row for row in rows if not row["provedForAggregate"]]
totals: dict[str, Decimal] = {
"globalFamilyTotalSupply": Decimal(0),
"globalFamilyCirculatingSupply": Decimal(0),
"baseCusdcTotalSupply": Decimal(0),
"baseCusdcCirculatingSupply": Decimal(0),
"wrappedCwusdcTotalSupply": Decimal(0),
"wrappedCwusdcCirculatingSupply": Decimal(0),
}
for row in proved:
total = Decimal(row["totalSupply"])
circulating = Decimal(row["circulatingSupply"] or row["totalSupply"])
totals["globalFamilyTotalSupply"] += total
totals["globalFamilyCirculatingSupply"] += circulating
if row["symbol"] == "cUSDC":
totals["baseCusdcTotalSupply"] += total
totals["baseCusdcCirculatingSupply"] += circulating
elif row["symbol"] == "cWUSDC":
totals["wrappedCwusdcTotalSupply"] += total
totals["wrappedCwusdcCirculatingSupply"] += circulating
eth_mainnet = next((row for row in rows if row["isEthereumMainnetCwusdc"]), None)
return {
"schema": "global-cusdc-cwusdc-family-supply-proof/v1",
"generatedAt": dt.datetime.now(dt.UTC).isoformat().replace("+00:00", "Z"),
"source": {
"api": api_url,
"reportGeneratedAt": report.get("generatedAt"),
},
"scope": {
"description": "Cross-chain cUSDC/cWUSDC family supply inventory across report API chains.",
"notForEtherscanEthereumTokenPage": True,
"etherscanEthereumOnlyToken": {
"chainId": 1,
"address": ETH_MAINNET_CWUSDC,
"note": "Use the Ethereum-only cWUSDC supply attestation for Etherscan Value submissions, not this global family total.",
"entry": eth_mainnet,
},
},
"summary": {
"familyEntryCount": len(rows),
"provedAggregateEntryCount": len(proved),
"proofRequiredEntryCount": len(unproved),
**{key: fmt(value) for key, value in totals.items()},
},
"entries": rows,
"proofRequiredEntries": unproved,
"caveats": [
"This is a global cross-chain family inventory, not a circulating-supply claim for any single chain explorer.",
"Entries marked proof_required are excluded from aggregate totals.",
"Ethereum Etherscan Value for cWUSDC must use only the Ethereum Mainnet cWUSDC contract supply.",
"cUSDC source assets and cWUSDC wrapped assets may represent related economic rails; global totals should not be used as a market-cap input without a tracker-approved methodology that prevents double counting.",
],
}
def write_md(payload: dict[str, Any], path: Path) -> None:
summary = payload["summary"]
lines = [
"# Global cUSDC/cWUSDC Family Supply Proof",
"",
f"- Generated: `{payload['generatedAt']}`",
f"- Source API: `{payload['source']['api']}`",
f"- Source report generated: `{payload['source']['reportGeneratedAt']}`",
f"- Not for Ethereum Etherscan token page: `{payload['scope']['notForEtherscanEthereumTokenPage']}`",
"",
"## Aggregate Totals",
"",
"| Field | Value |",
"|---|---:|",
f"| Family entries | `{summary['familyEntryCount']}` |",
f"| Proved aggregate entries | `{summary['provedAggregateEntryCount']}` |",
f"| Proof-required entries | `{summary['proofRequiredEntryCount']}` |",
f"| Global family total supply, proved only | `{summary['globalFamilyTotalSupply']}` |",
f"| Global family circulating supply, proved only | `{summary['globalFamilyCirculatingSupply']}` |",
f"| Base cUSDC total supply, proved only | `{summary['baseCusdcTotalSupply']}` |",
f"| Wrapped cWUSDC total supply, proved only | `{summary['wrappedCwusdcTotalSupply']}` |",
"",
"## Ethereum Mainnet cWUSDC Reminder",
"",
"For Etherscan Value, use only:",
"",
f"`{payload['scope']['etherscanEthereumOnlyToken']['address']}`",
"",
"Do not use the global family total for the Ethereum token page.",
"",
"## Entries",
"",
"| Chain | Symbol | Type | Address | Total supply | Circulating supply | Proved |",
"|---:|---|---|---|---:|---:|---:|",
]
for row in payload["entries"]:
lines.append(
f"| `{row['chainId']}` | `{row['symbol']}` | `{row['type']}` | `{row['address']}` | `{row['totalSupply']}` | `{row['circulatingSupply']}` | `{row['provedForAggregate']}` |"
)
lines.extend(["", "## Caveats", ""])
for caveat in payload["caveats"]:
lines.append(f"- {caveat}")
path.write_text("\n".join(lines) + "\n")
def main() -> int:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--api-url", default=DEFAULT_API)
parser.add_argument("--json-out", type=Path, default=DEFAULT_JSON)
parser.add_argument("--md-out", type=Path, default=DEFAULT_MD)
args = parser.parse_args()
payload = build(args.api_url)
args.json_out.parent.mkdir(parents=True, exist_ok=True)
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
write_md(payload, args.md_out)
print(f"Wrote {args.json_out.relative_to(ROOT)}")
print(f"Wrote {args.md_out.relative_to(ROOT)}")
print(f"globalFamilyTotalSupply={payload['summary']['globalFamilyTotalSupply']}")
print(f"proofRequiredEntryCount={payload['summary']['proofRequiredEntryCount']}")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,361 @@
#!/usr/bin/env python3
"""Monitor whether cWUSDC USD value has propagated to Etherscan and upstream feeds."""
from __future__ import annotations
import argparse
import datetime as dt
import json
import os
import re
import time
import urllib.error
import urllib.parse
import urllib.request
from pathlib import Path
from typing import Any
ROOT = Path(__file__).resolve().parents[2]
REPORT_JSON = ROOT / "reports" / "status" / "cwusdc-etherscan-value-propagation-latest.json"
REPORT_MD = ROOT / "reports" / "status" / "cwusdc-etherscan-value-propagation-latest.md"
CWUSDC = "0x2de5f116bfce3d0f922d9c8351e0c5fc24b9284a"
ETHERSCAN_API = "https://api.etherscan.io/v2/api"
ETHERSCAN_PAGE = f"https://etherscan.io/token/{CWUSDC}"
COINGECKO_PRICE = (
"https://api.coingecko.com/api/v3/simple/token_price/ethereum?"
f"contract_addresses={CWUSDC}&vs_currencies=usd&include_market_cap=true&include_24hr_vol=true&include_last_updated_at=true"
)
DEXSCREENER_TOKEN_PAIRS = f"https://api.dexscreener.com/token-pairs/v1/ethereum/{CWUSDC}"
GECKOTERMINAL_POOLS = [
"https://api.geckoterminal.com/api/v2/networks/eth/pools/0x1cf2e685682c7f7bef508f0af15dfb5cdda01ee3",
"https://api.geckoterminal.com/api/v2/networks/eth/pools/0xc28706f899266b36bc43cc072b3a921bdf2c48d9",
]
def load_dotenv(path: Path) -> None:
if not path.exists():
return
for line in path.read_text().splitlines():
line = line.strip()
if not line or line.startswith("#") or "=" not in line:
continue
key, value = line.split("=", 1)
key = key.strip()
value = value.strip().strip('"').strip("'")
if key and key not in os.environ:
os.environ[key] = value
def fetch_text(url: str, timeout: int = 30) -> tuple[int | None, str, str]:
req = urllib.request.Request(
url,
headers={
"User-Agent": "Mozilla/5.0 DBIS-cwusdc-value-monitor/1.0",
"Accept": "application/json,text/html;q=0.9,*/*;q=0.8",
},
)
try:
with urllib.request.urlopen(req, timeout=timeout) as response:
return response.status, response.headers.get("content-type", ""), response.read().decode("utf-8", errors="replace")
except urllib.error.HTTPError as exc:
body = exc.read().decode("utf-8", errors="replace") if exc.fp else ""
return exc.code, exc.headers.get("content-type", "") if exc.headers else "", body
except Exception as exc: # noqa: BLE001 - monitor evidence should capture transient failures
return None, "", str(exc)
def fetch_json(url: str, timeout: int = 30) -> tuple[int | None, str, Any, str]:
status, content_type, text = fetch_text(url, timeout)
try:
return status, content_type, json.loads(text), ""
except json.JSONDecodeError as exc:
return status, content_type, None, str(exc)
def fetch_etherscan_api(params: dict[str, str], api_key: str) -> tuple[int | None, str, Any, str]:
query = {"chainid": "1", **params, "apikey": api_key}
url = f"{ETHERSCAN_API}?{urllib.parse.urlencode(query)}"
last: tuple[int | None, str, Any, str] = (None, "", None, "")
for attempt in range(5):
status, content_type, data, error = fetch_json(url)
last = (status, content_type, data, error)
if error:
return last
if not isinstance(data, dict):
return last
message = str(data.get("message", ""))
result = data.get("result")
if str(data.get("status")) != "0":
time.sleep(0.25)
return last
if "rate limit" in message.lower() or "rate limit" in str(result).lower():
time.sleep(1.25 + attempt * 0.5)
continue
return last
return last
def extract_div_missing(html: str, element_id: str) -> bool:
pattern = rf'id="{re.escape(element_id)}".*?<div>\s*-\s*</div>'
return bool(re.search(pattern, html, flags=re.I | re.S))
def parse_etherscan() -> dict[str, Any]:
status, content_type, html = fetch_text(ETHERSCAN_PAGE)
has_profile = "Wrapped cUSDC" in html and "cWUSDC" in html
total_supply_match = re.search(r'id="ContentPlaceHolder1_hdnTotalSupply" value="([^"]+)"', html)
holders_match = re.search(r"<h4[^>]*>\s*Holders\s*</h4>\s*<div[^>]*>\s*<div>\s*([0-9,]+)", html, re.I)
market_missing = extract_div_missing(html, "ContentPlaceHolder1_tr_marketcap")
circ_market_missing = extract_div_missing(html, "ContentPlaceHolder1_tr_circulatingmarketcap")
value_ready = bool(status and 200 <= status < 300 and has_profile and not market_missing and not circ_market_missing)
return {
"id": "etherscan_token_page",
"url": ETHERSCAN_PAGE,
"status": status,
"contentType": content_type,
"profileDetected": has_profile,
"holdersText": holders_match.group(1) if holders_match else None,
"totalSupplyText": total_supply_match.group(1) if total_supply_match else None,
"onchainMarketCapMissing": market_missing,
"circulatingMarketCapMissing": circ_market_missing,
"valueReady": value_ready,
}
def parse_etherscan_tokeninfo(api_key: str) -> dict[str, Any]:
if not api_key:
return {
"id": "etherscan_tokeninfo_api",
"url": ETHERSCAN_API,
"status": None,
"contentType": "",
"parseError": "",
"skipped": True,
"skipReason": "ETHERSCAN_API_KEY is not set.",
"metadataReady": False,
"priceReady": False,
}
status, content_type, data, error = fetch_etherscan_api(
{"module": "token", "action": "tokeninfo", "contractaddress": CWUSDC},
api_key,
)
result = data.get("result") if isinstance(data, dict) else None
entry = result[0] if isinstance(result, list) and result and isinstance(result[0], dict) else None
token_price_raw = entry.get("tokenPriceUSD") if isinstance(entry, dict) else None
try:
token_price = float(token_price_raw) if token_price_raw not in (None, "") else 0.0
except (TypeError, ValueError):
token_price = 0.0
metadata_ready = bool(
isinstance(entry, dict)
and entry.get("contractAddress", "").lower() == CWUSDC
and entry.get("symbol") == "cWUSDC"
and entry.get("tokenName")
)
profile_enriched = bool(
isinstance(entry, dict)
and (entry.get("image") or entry.get("website") or entry.get("description") or entry.get("twitter"))
)
return {
"id": "etherscan_tokeninfo_api",
"url": ETHERSCAN_API,
"status": status,
"contentType": content_type,
"parseError": error,
"skipped": False,
"apiStatus": data.get("status") if isinstance(data, dict) else None,
"apiMessage": data.get("message") if isinstance(data, dict) else None,
"apiResultPreview": result,
"metadataReady": metadata_ready,
"profileEnriched": profile_enriched,
"priceReady": token_price > 0,
"tokenPriceUSD": token_price_raw,
"tokenName": entry.get("tokenName") if isinstance(entry, dict) else None,
"symbol": entry.get("symbol") if isinstance(entry, dict) else None,
"divisor": entry.get("divisor") if isinstance(entry, dict) else None,
"tokenType": entry.get("tokenType") if isinstance(entry, dict) else None,
"totalSupply": entry.get("totalSupply") if isinstance(entry, dict) else None,
"blueCheckmark": entry.get("blueCheckmark") if isinstance(entry, dict) else None,
"image": entry.get("image") if isinstance(entry, dict) else None,
"website": entry.get("website") if isinstance(entry, dict) else None,
"descriptionPresent": bool(entry.get("description")) if isinstance(entry, dict) else False,
}
def parse_coingecko() -> dict[str, Any]:
status, content_type, data, error = fetch_json(COINGECKO_PRICE)
entry = None
if isinstance(data, dict):
entry = data.get(CWUSDC)
usd = entry.get("usd") if isinstance(entry, dict) else None
return {
"id": "coingecko_token_price",
"url": COINGECKO_PRICE,
"status": status,
"contentType": content_type,
"parseError": error,
"listedByContract": isinstance(entry, dict),
"usd": usd,
"marketCapUsd": entry.get("usd_market_cap") if isinstance(entry, dict) else None,
"volume24hUsd": entry.get("usd_24h_vol") if isinstance(entry, dict) else None,
"lastUpdatedAt": entry.get("last_updated_at") if isinstance(entry, dict) else None,
"priceReady": isinstance(usd, (int, float)) and usd > 0,
"jsonPreview": data,
}
def parse_dexscreener() -> dict[str, Any]:
status, content_type, data, error = fetch_json(DEXSCREENER_TOKEN_PAIRS)
pair_count = len(data) if isinstance(data, list) else 0
return {
"id": "dexscreener_token_pairs",
"url": DEXSCREENER_TOKEN_PAIRS,
"status": status,
"contentType": content_type,
"parseError": error,
"pairCount": pair_count,
"indexed": pair_count > 0,
"jsonPreview": data[:3] if isinstance(data, list) else data,
}
def parse_geckoterminal() -> list[dict[str, Any]]:
checks: list[dict[str, Any]] = []
for url in GECKOTERMINAL_POOLS:
status, content_type, data, error = fetch_json(url)
attrs = ((data or {}).get("data") or {}).get("attributes") if isinstance(data, dict) else None
checks.append(
{
"id": "geckoterminal_pool",
"url": url,
"status": status,
"contentType": content_type,
"parseError": error,
"indexed": isinstance(attrs, dict),
"reserveUsd": attrs.get("reserve_in_usd") if isinstance(attrs, dict) else None,
"volume24hUsd": ((attrs.get("volume_usd") or {}).get("h24") if isinstance(attrs, dict) else None),
}
)
return checks
def build() -> dict[str, Any]:
load_dotenv(ROOT / ".env")
etherscan_api_key = os.environ.get("ETHERSCAN_API_KEY", "")
etherscan = parse_etherscan()
etherscan_tokeninfo = parse_etherscan_tokeninfo(etherscan_api_key)
coingecko = parse_coingecko()
dexscreener = parse_dexscreener()
gecko = parse_geckoterminal()
blockers: list[str] = []
if not etherscan["profileDetected"]:
blockers.append("Etherscan token profile text was not detected.")
if etherscan["onchainMarketCapMissing"]:
blockers.append("Etherscan Onchain Market Cap is still blank.")
if etherscan["circulatingMarketCapMissing"]:
blockers.append("Etherscan Circulating Supply Market Cap is still blank.")
tokeninfo_preview = str(etherscan_tokeninfo.get("apiResultPreview", ""))
if "API Pro endpoint" in tokeninfo_preview:
blockers.append("Etherscan tokeninfo API is an API Pro endpoint for the current key; tokeninfo propagation cannot be monitored with the current plan.")
elif not etherscan_tokeninfo["metadataReady"]:
blockers.append("Etherscan tokeninfo API does not return accepted token metadata for cWUSDC.")
if "API Pro endpoint" not in tokeninfo_preview and not etherscan_tokeninfo["priceReady"]:
blockers.append("Etherscan tokeninfo API does not return a positive USD token price.")
if not coingecko["priceReady"]:
blockers.append("CoinGecko contract price API does not return a positive USD price.")
if not dexscreener["indexed"]:
blockers.append("DexScreener token-pairs API still does not index cWUSDC pairs.")
return {
"schema": "cwusdc-etherscan-value-propagation/v1",
"generatedAt": dt.datetime.now(dt.UTC).isoformat().replace("+00:00", "Z"),
"token": {
"chainId": 1,
"address": CWUSDC,
"caip19": f"eip155:1/erc20:{CWUSDC}",
"symbol": "cWUSDC",
},
"summary": {
"etherscanValueReady": etherscan["valueReady"],
"etherscanTokenInfoMetadataReady": etherscan_tokeninfo["metadataReady"],
"etherscanTokenInfoPriceReady": etherscan_tokeninfo["priceReady"],
"coingeckoPriceReady": coingecko["priceReady"],
"readyForEtherscanValuePropagation": etherscan["valueReady"] or coingecko["priceReady"],
"blockers": blockers,
},
"checks": {
"etherscan": etherscan,
"etherscanTokenInfo": etherscan_tokeninfo,
"coingecko": coingecko,
"dexscreener": dexscreener,
"geckoterminal": gecko,
},
}
def write_md(payload: dict[str, Any], path: Path) -> None:
summary = payload["summary"]
checks = payload["checks"]
lines = [
"# cWUSDC Etherscan Value Propagation Monitor",
"",
f"- Generated: `{payload['generatedAt']}`",
f"- Token: `{payload['token']['address']}`",
f"- CAIP-19: `{payload['token']['caip19']}`",
f"- Etherscan value ready: `{summary['etherscanValueReady']}`",
f"- Etherscan tokeninfo metadata ready: `{summary['etherscanTokenInfoMetadataReady']}`",
f"- Etherscan tokeninfo price ready: `{summary['etherscanTokenInfoPriceReady']}`",
f"- CoinGecko price ready: `{summary['coingeckoPriceReady']}`",
"",
"## Blockers",
"",
]
if summary["blockers"]:
lines.extend(f"- {item}" for item in summary["blockers"])
else:
lines.append("- None detected by this monitor.")
lines.extend(
[
"",
"## Checks",
"",
"| Surface | Status | Ready / indexed | Key fields |",
"|---|---:|---:|---|",
f"| Etherscan | `{checks['etherscan']['status']}` | `{checks['etherscan']['valueReady']}` | marketCapMissing={checks['etherscan']['onchainMarketCapMissing']}; circulatingMarketCapMissing={checks['etherscan']['circulatingMarketCapMissing']}; holders={checks['etherscan']['holdersText']} |",
f"| Etherscan tokeninfo API | `{checks['etherscanTokenInfo']['status']}` | `{checks['etherscanTokenInfo']['metadataReady']}` / price `{checks['etherscanTokenInfo']['priceReady']}` | symbol={checks['etherscanTokenInfo']['symbol']}; price={checks['etherscanTokenInfo']['tokenPriceUSD']}; image={checks['etherscanTokenInfo']['image']}; website={checks['etherscanTokenInfo']['website']} |",
f"| CoinGecko contract price | `{checks['coingecko']['status']}` | `{checks['coingecko']['priceReady']}` | usd={checks['coingecko']['usd']}; marketCap={checks['coingecko']['marketCapUsd']}; lastUpdated={checks['coingecko']['lastUpdatedAt']} |",
f"| DexScreener token pairs | `{checks['dexscreener']['status']}` | `{checks['dexscreener']['indexed']}` | pairCount={checks['dexscreener']['pairCount']} |",
]
)
for item in checks["geckoterminal"]:
lines.append(
f"| GeckoTerminal pool | `{item['status']}` | `{item['indexed']}` | reserveUsd={item['reserveUsd']}; volume24hUsd={item['volume24hUsd']}; url={item['url']} |"
)
path.write_text("\n".join(lines) + "\n")
def main() -> int:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--json-out", type=Path, default=REPORT_JSON)
parser.add_argument("--md-out", type=Path, default=REPORT_MD)
parser.add_argument("--strict", action="store_true")
args = parser.parse_args()
payload = build()
args.json_out.parent.mkdir(parents=True, exist_ok=True)
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
write_md(payload, args.md_out)
print(f"Wrote {args.json_out.relative_to(ROOT)}")
print(f"Wrote {args.md_out.relative_to(ROOT)}")
print(f"etherscanValueReady={payload['summary']['etherscanValueReady']}")
if payload["summary"]["blockers"]:
print("Blockers: " + "; ".join(payload["summary"]["blockers"]))
if args.strict and not payload["summary"]["etherscanValueReady"]:
return 1
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,303 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import os
import subprocess
from datetime import datetime, timezone
from decimal import Decimal, ROUND_FLOOR, getcontext
from pathlib import Path
from typing import Any
getcontext().prec = 80
ROOT = Path(__file__).resolve().parents[2]
POLICY_PATH = ROOT / "config/engine-x/automation-policy.json"
OUT_JSON = ROOT / "reports/status/engine-x-automated-liquidity-advisor-latest.json"
OUT_MD = ROOT / "reports/status/engine-x-automated-liquidity-advisor-latest.md"
XAUT_MAINNET = "0x68749665FF8D2d112Fa859AA293F07A622782F38"
def read_json(path: Path) -> dict[str, Any] | None:
try:
return json.loads(path.read_text())
except FileNotFoundError:
return None
except json.JSONDecodeError as exc:
return {"_error": f"invalid json: {exc}"}
def env_decimal(name: str, default: str | None = None) -> Decimal | None:
value = os.environ.get(name, default)
if value is None or value == "":
return None
return Decimal(value)
def env_int(name: str, default: str | None = None) -> int | None:
value = os.environ.get(name, default)
if value is None or value == "":
return None
return int(Decimal(value))
def decstr(value: Decimal, places: int = 6) -> str:
q = Decimal(10) ** -places
return f"{value.quantize(q)}"
def raw6(units: Decimal) -> int:
return int((units * Decimal(10**6)).to_integral_value(rounding=ROUND_FLOOR))
def units_from_raw(raw: int, decimals: int) -> Decimal:
return Decimal(raw) / Decimal(10**decimals)
def get_path(payload: dict[str, Any] | None, *parts: str, default: Any = None) -> Any:
cur: Any = payload
for part in parts:
if not isinstance(cur, dict) or part not in cur:
return default
cur = cur[part]
return cur
def cast_call_raw(token: str, owner: str, rpc: str) -> int | None:
try:
result = subprocess.run(
["cast", "call", token, "balanceOf(address)(uint256)", owner, "--rpc-url", rpc],
check=True,
capture_output=True,
text=True,
timeout=30,
)
except (OSError, subprocess.CalledProcessError, subprocess.TimeoutExpired):
return None
first = result.stdout.strip().split()[0] if result.stdout.strip() else ""
return int(first) if first.isdigit() else None
def cast_decimals(token: str, rpc: str, fallback: int) -> int:
try:
result = subprocess.run(
["cast", "call", token, "decimals()(uint8)", "--rpc-url", rpc],
check=True,
capture_output=True,
text=True,
timeout=30,
)
except (OSError, subprocess.CalledProcessError, subprocess.TimeoutExpired):
return fallback
first = result.stdout.strip().split()[0] if result.stdout.strip() else ""
return int(first) if first.isdigit() else fallback
def main() -> int:
policy = read_json(POLICY_PATH) or {}
reports = {
"supportHealth": read_json(ROOT / "reports/status/mainnet-cwusdc-usdc-support-health-latest.json"),
"publicIndexedReadiness": read_json(ROOT / "reports/status/engine-x-public-indexed-readiness-latest.json"),
"mevDefenseReadiness": read_json(ROOT / "reports/status/engine-x-mev-defense-readiness-latest.json"),
"wethSupport": read_json(ROOT / "reports/status/mainnet-cwusdc-weth-liquidity-surfaces-latest.json"),
}
requested_symbol = os.environ.get("ENGINE_X_REQUESTED_CW_SYMBOL", "cWUSDC")
requested_units = env_decimal("ENGINE_X_REQUESTED_OUTPUT_UNITS")
requested_raw = env_int("ENGINE_X_REQUESTED_OUTPUT_RAW")
if requested_raw is None and requested_units is not None:
requested_raw = raw6(requested_units)
if requested_units is None and requested_raw is not None:
requested_units = Decimal(requested_raw) / Decimal(10**6)
if requested_raw is None:
requested_raw = 10_000
requested_units = Decimal("0.01")
xaut_token = os.environ.get("XAUT_MAINNET", XAUT_MAINNET)
xaut_decimals = 6
if os.environ.get("ETHEREUM_MAINNET_RPC"):
xaut_decimals = cast_decimals(xaut_token, os.environ["ETHEREUM_MAINNET_RPC"], xaut_decimals)
xaut_available_raw = env_int("ENGINE_X_XAUT_AVAILABLE_RAW")
if xaut_available_raw is None:
xaut_units_env = env_decimal("ENGINE_X_XAUT_AVAILABLE_UNITS")
xaut_available_raw = int((xaut_units_env * Decimal(10**xaut_decimals)).to_integral_value(rounding=ROUND_FLOOR)) if xaut_units_env is not None else 0
xaut_available_units = units_from_raw(xaut_available_raw, xaut_decimals)
xaut_usd_price6 = Decimal(env_int("ENGINE_X_XAUT_USD_PRICE6", str(get_path(policy, "inputs", "collateral", "defaultUsdPrice6", default="3226640000"))))
xaut_usd_price = xaut_usd_price6 / Decimal(10**6)
ltv_bps = Decimal(env_int("ENGINE_X_BORROW_LTV_BPS", str(get_path(policy, "inputs", "risk", "defaultLtvBps", default=7500))))
hf_bps = Decimal(env_int("ENGINE_X_BORROW_MIN_HEALTH_FACTOR_BPS", str(get_path(policy, "inputs", "risk", "defaultMinHealthFactorBps", default=11000))))
max_round_trip_loss_bps = Decimal(env_int("ENGINE_X_MAX_ROUND_TRIP_LOSS_BPS", str(get_path(policy, "inputs", "risk", "defaultMaxRoundTripLossBps", default=100))))
min_gas_reserve_wei = env_int("ENGINE_X_MIN_GAS_RESERVE_WEI", str(get_path(policy, "inputs", "risk", "defaultMinGasReserveWei", default="5000000000000000")))
deployer = get_path(reports["publicIndexedReadiness"], "deployer", default={}) or {}
deployer_address = str(deployer.get("address") or os.environ.get("DEPLOYER_ADDRESS") or "")
wallet_usdc = Decimal(str(deployer.get("usdc", "0")))
wallet_cwusdc = Decimal(str(deployer.get("cwusdc", "0")))
wallet_eth = Decimal(str(get_path(reports["wethSupport"], "balances", "eth", default="0")))
xaut_balance_source = "env"
if xaut_available_raw == 0 and deployer_address and os.environ.get("ETHEREUM_MAINNET_RPC"):
live_xaut = cast_call_raw(
xaut_token,
deployer_address,
os.environ["ETHEREUM_MAINNET_RPC"],
)
if live_xaut is not None:
xaut_available_raw = live_xaut
xaut_available_units = units_from_raw(xaut_available_raw, xaut_decimals)
xaut_balance_source = "live_wallet_balance"
collateral_usd = xaut_available_units * xaut_usd_price
ltv_borrow_capacity = collateral_usd * ltv_bps / Decimal(10_000)
# Conservative single-asset health-factor capacity: debt <= collateral_ltv_value / target_hf.
hf_borrow_capacity = ltv_borrow_capacity * Decimal(10_000) / hf_bps if hf_bps > 0 else Decimal(0)
borrow_capacity = min(ltv_borrow_capacity, hf_borrow_capacity)
mev_ready = bool(get_path(reports["mevDefenseReadiness"], "ready", default=False))
public_ready = bool(get_path(reports["publicIndexedReadiness"], "summary", "readyForPublicIndexedProof", default=False))
support_preferred = get_path(reports["supportHealth"], "quoteDefenseDecision", "preferredSurface", default={}) or {}
support_blockers = get_path(reports["supportHealth"], "quoteDefenseDecision", "blockers", default=[]) or []
v3_tick = get_path(reports["supportHealth"], "quoteDefenseSurfaceHealth", "mainnet-cwusdc-usdc-univ3-100", "tick")
v3_range_status = get_path(reports["supportHealth"], "quoteDefenseSurfaceHealth", "mainnet-cwusdc-usdc-univ3-100", "rangeStatus")
requested_usd = requested_units or Decimal(0)
available_loop_usd = min(wallet_usdc + borrow_capacity, wallet_cwusdc, requested_usd)
can_satisfy_requested = available_loop_usd >= requested_usd and requested_usd > 0
blockers: list[str] = []
warnings: list[str] = []
if requested_symbol not in get_path(policy, "inputs", "requestedOutput", "supportedInitialSymbols", default=["cWUSDC"]):
blockers.append(f"requested output symbol is not yet supported by this advisor: {requested_symbol}")
if not public_ready:
blockers.append("public indexed readiness is not passing")
if not mev_ready:
blockers.append("MEV/protected broadcast readiness is not passing for live automation")
if support_preferred.get("action") not in {"ready_for_tiny_canary", "use_for_tiny_public_canary"}:
warnings.append(f"preferred quote-defense surface action is {support_preferred.get('action', 'unknown')}; rebalance or operator review may be needed")
if support_blockers:
blockers.extend(f"support health blocker: {item}" for item in support_blockers)
if wallet_eth * Decimal(10**18) < Decimal(min_gas_reserve_wei or 0):
blockers.append("wallet ETH is below configured gas reserve")
if not can_satisfy_requested:
blockers.append("requested output exceeds current wallet plus XAUt-backed conservative USDC capacity")
live_execution_ready = not blockers and can_satisfy_requested
endpoint_publication_ready = live_execution_ready and public_ready
phases = []
for phase in get_path(policy, "automationPhases", default=[]):
status = "ready"
if phase["id"] == "phase_1_canary" and not live_execution_ready:
status = "blocked"
elif phase["id"] == "phase_2_liquidity_defense" and (not live_execution_ready or warnings):
status = "operator_review"
elif phase["id"] == "phase_3_endpoint_publication" and not endpoint_publication_ready:
status = "blocked"
elif phase["id"] == "phase_4_multi_asset_forex_crypto":
status = "design_required"
phases.append({**phase, "status": status})
payload = {
"schema": "engine-x-automated-liquidity-advisor/v1",
"generatedAt": datetime.now(timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z"),
"policyPath": str(POLICY_PATH.relative_to(ROOT)),
"request": {
"outputSymbol": requested_symbol,
"outputRaw": str(requested_raw),
"outputUnits": decstr(requested_units or Decimal(0), 6),
},
"inputs": {
"wallet": {
"usdc": decstr(wallet_usdc, 6),
"cwusdc": decstr(wallet_cwusdc, 6),
"eth": str(wallet_eth),
},
"xautCollateral": {
"availableRaw": str(xaut_available_raw),
"availableUnits": decstr(xaut_available_units, min(xaut_decimals, 8)),
"decimals": xaut_decimals,
"token": xaut_token,
"source": xaut_balance_source,
"usdPrice": decstr(xaut_usd_price, 6),
"collateralUsd": decstr(collateral_usd, 6),
},
"risk": {
"ltvBps": str(ltv_bps),
"minHealthFactorBps": str(hf_bps),
"maxRoundTripLossBps": str(max_round_trip_loss_bps),
"minGasReserveWei": str(min_gas_reserve_wei),
},
},
"calculator": {
"ltvBorrowCapacityUsdc": decstr(ltv_borrow_capacity, 6),
"healthFactorBorrowCapacityUsdc": decstr(hf_borrow_capacity, 6),
"conservativeBorrowCapacityUsdc": decstr(borrow_capacity, 6),
"walletPlusBorrowUsdcCapacity": decstr(wallet_usdc + borrow_capacity, 6),
"maxCurrentRequestFillUnits": decstr(available_loop_usd, 6),
"canSatisfyRequestedOutput": can_satisfy_requested,
"debtNeutralLoopInvariant": "borrow USDC, swap cWUSDC->USDC to repay debt, then swap borrowed USDC->cWUSDC; ending USDC debt must be zero before XAUt withdrawal",
},
"feedStatus": {
"publicIndexedReady": public_ready,
"mevDefenseReady": mev_ready,
"preferredQuoteDefenseSurface": support_preferred,
"uniV3Tick": v3_tick,
"uniV3RangeStatus": v3_range_status,
},
"recommendedPhases": phases,
"endpointReporting": {
"ready": endpoint_publication_ready,
"targets": get_path(policy, "feeds", "externalPublicationTargets", default=[]),
"requiredEvidence": get_path(policy, "decisionGates", "publication", default=[]),
},
"blockers": blockers,
"warnings": warnings,
"operatorCommands": {
"regenerateFeeds": get_path(policy, "feeds", "onChainReadinessCommands", default=[]),
"dryRunAdvisor": "pnpm engine-x:automation-advisor",
"liveExecution": "blocked unless this report has no blockers and operator-approved scripts are run with protected RPC",
},
}
OUT_JSON.parent.mkdir(parents=True, exist_ok=True)
OUT_JSON.write_text(json.dumps(payload, indent=2) + "\n")
lines = [
"# Engine X Automated Liquidity Advisor",
"",
f"- generatedAt: `{payload['generatedAt']}`",
f"- request: `{payload['request']['outputUnits']} {requested_symbol}`",
f"- max current request fill: `{payload['calculator']['maxCurrentRequestFillUnits']} {requested_symbol}`",
f"- conservative XAUt-backed USDC capacity: `{payload['calculator']['conservativeBorrowCapacityUsdc']} USDC`",
f"- public indexed ready: `{str(public_ready).lower()}`",
f"- MEV defense ready: `{str(mev_ready).lower()}`",
f"- endpoint publication ready: `{str(endpoint_publication_ready).lower()}`",
"",
"## Calculator",
"",
f"- wallet USDC: `{payload['inputs']['wallet']['usdc']}`",
f"- wallet cWUSDC: `{payload['inputs']['wallet']['cwusdc']}`",
f"- XAUt available: `{payload['inputs']['xautCollateral']['availableUnits']}`",
f"- XAUt collateral USD value: `{payload['inputs']['xautCollateral']['collateralUsd']}`",
f"- wallet plus conservative borrow capacity: `{payload['calculator']['walletPlusBorrowUsdcCapacity']} USDC`",
"",
"## Phases",
]
for phase in phases:
lines.append(f"- `{phase['id']}`: `{phase['status']}` - {phase['name']}")
lines.extend(["", "## Blockers"])
lines.extend([f"- {item}" for item in blockers] or ["- none"])
lines.extend(["", "## Warnings"])
lines.extend([f"- {item}" for item in warnings] or ["- none"])
lines.extend(["", "## Reporting Targets"])
lines.extend([f"- {item}" for item in payload["endpointReporting"]["targets"]])
OUT_MD.write_text("\n".join(lines) + "\n")
print(json.dumps({"ready": live_execution_ready, "blockers": blockers, "warnings": warnings}, indent=2))
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,10 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/load-project-env.sh
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh"
python3 "${PROJECT_ROOT}/scripts/verify/plan-engine-x-automated-liquidity-advisor.py"

View File

@@ -0,0 +1,932 @@
#!/usr/bin/env node
/**
* Read-only funding planner for token-aggregation adoption-readiness liquidity gaps.
*
* It does not broadcast transactions. It checks the deployer wallet's native and ERC-20 balances
* for every current liquidityMissingDetails row and classifies each row as:
* - fundable_token_balance_present
* - gas_gated
* - token_balance_gated
* - pool_binding_gated
*/
import { createHash } from "node:crypto";
import { mkdirSync, readFileSync, writeFileSync } from "node:fs";
import { resolve } from "node:path";
const repoRoot = resolve(new URL("../..", import.meta.url).pathname);
const readinessPath = resolve(repoRoot, "reports/status/token-aggregation-adoption-readiness-live-20260509.json");
const nonEvmHealthPath = resolve(repoRoot, "reports/status/non-evm-network-health-latest.json");
const nonEvmLaneStatusPath = resolve(repoRoot, "reports/status/non-evm-lane-status-latest.json");
const jsonOut = resolve(repoRoot, "reports/status/token-aggregation-liquidity-gap-funding-plan-latest.json");
const mdOut = resolve(repoRoot, "reports/status/token-aggregation-liquidity-gap-funding-plan-latest.md");
const deployer = (process.env.DEPLOYER_ADDRESS || process.env.DEPLOYER || "0x4A666F96fC8764181194447A7dFdb7d471b301C8").trim();
const envFiles = [resolve(repoRoot, ".env"), resolve(repoRoot, "smom-dbis-138/.env")];
const stabilityCycles = Number(process.env.TOKEN_AGGREGATION_STABILITY_CYCLES || "30");
const gasSafetyBps = BigInt(process.env.TOKEN_AGGREGATION_GAS_SAFETY_BPS || "15000");
const coffeeMoneyUsdAvailable = Number(process.env.DEPLOYER_COFFEE_MONEY_USD || "48");
const coffeeMoneyLiquidityUsdPerRow = Number(process.env.COFFEE_MONEY_LIQUIDITY_USD_PER_ROW || "1");
const bridgeCapableChains = new Set([1, 10, 25, 56, 100, 137, 42161, 42220, 43114, 8453]);
const protocolinkCandidateChains = new Set([1, 10, 56, 100, 137, 42161, 42220, 43114, 8453]);
const officialQuoteAssets = new Set([
"1:0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48",
"1:0xdac17f958d2ee523a2206206994597c13d831ec7",
"10:0x0b2c639c533813f4aa9d7837caf62653d097ff85",
"10:0x94b008aa00579c1307b0ef2c499ad98a8ce58e58",
"25:0xc21223249ca28397b4b6541dffaecc539bff0c59",
"25:0x66e428c3f67a68878562e79a0234c1f83c208770",
"56:0x8ac76a51cc950d9822d68b83fe1ad97b32cd580d",
"56:0x55d398326f99059ff775485246999027b3197955",
"100:0xddafbb505ad214d7b80b1f830fccc89b60fb7a83",
"100:0x4ecaba5870353805a9f068101a40e0f32ed605c6",
"137:0x3c499c542cef5e3811e1192ce70d8cc03d5c3359",
"137:0xc2132d05d31c914a87c6611c10748aeb04b58e8f",
"8453:0x833589fcd6edb6e08f4c7c32d4f71b54bda02913",
"42161:0xaf88d065e77c8cc2239327c5edb3a432268e5831",
"42161:0xfd086bc7cd5c481dcc9c85ebe478a1c0b69fcbb9",
"42220:0x765de816845861e75a25fca122bb6898b8b1282a",
"42220:0x48065fbbe25f71c9282ddf5e1cd6d6a887483d5e",
"43114:0xb97ef9ef8734c71904d8002f8b6bc66dd9c48a6e",
"43114:0x9702230a8ea53601f5cd2dc00fdbc13d4df4a8c7",
]);
const ethereumSourceTokens = [
{ symbol: "USDC", address: "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", decimals: 6, role: "official_quote_capital" },
{ symbol: "USDT", address: "0xdAC17F958D2ee523a2206206994597C13D831ec7", decimals: 6, role: "official_quote_capital" },
{ symbol: "LINK", address: "0x514910771AF9Ca656af840dff83E8264EcF986CA", decimals: 18, role: "route_quote_before_use" },
{ symbol: "WETH", address: "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2", decimals: 18, role: "wrapped_native_gas_source" },
{ symbol: "XAUt", address: "0x68749665FF8D2d112Fa859AA293F07A622782F38", decimals: 6, role: "route_quote_before_use" },
{ symbol: "cWUSDC", address: "0x66a3c2fa3e467aa586e90912f977e648589cabaf", decimals: 6, role: "evidence_or_pair_side_not_native_gas" },
];
const rpcByChain = {
1: process.env.ETHEREUM_MAINNET_RPC || process.env.RPC_URL_1 || "https://ethereum.publicnode.com",
10: process.env.OPTIMISM_MAINNET_RPC || process.env.OPTIMISM_RPC_URL || process.env.RPC_URL_10 || "https://optimism.publicnode.com",
25: process.env.CRONOS_RPC_URL || process.env.CRONOS_MAINNET_RPC || process.env.RPC_URL_25 || "https://cronos-evm-rpc.publicnode.com",
56: process.env.BSC_RPC_URL || process.env.BSC_MAINNET_RPC || process.env.RPC_URL_56 || "https://bsc-rpc.publicnode.com",
100: process.env.GNOSIS_MAINNET_RPC || process.env.GNOSIS_RPC_URL || process.env.GNOSIS_RPC || process.env.RPC_URL_100 || "https://gnosis.publicnode.com",
137: process.env.POLYGON_MAINNET_RPC || process.env.POLYGON_RPC_URL || process.env.RPC_URL_137 || "https://polygon-bor-rpc.publicnode.com",
138: process.env.RPC_URL_138_PUBLIC || process.env.RPC_URL_138 || process.env.CHAIN138_RPC_URL || "http://192.168.11.221:8545",
1111: process.env.WEMIX_MAINNET_RPC || process.env.WEMIX_RPC || process.env.RPC_URL_1111 || "https://api.wemix.com",
8453: process.env.BASE_MAINNET_RPC || process.env.BASE_RPC_URL || process.env.RPC_URL_8453 || "https://base-rpc.publicnode.com",
42161: process.env.ARBITRUM_MAINNET_RPC || process.env.ARBITRUM_RPC_URL || process.env.RPC_URL_42161 || "https://arbitrum-one-rpc.publicnode.com",
42220: process.env.CELO_MAINNET_RPC || process.env.CELO_RPC_URL || process.env.CELO_RPC || process.env.RPC_URL_42220 || "https://celo-rpc.publicnode.com",
43114: process.env.AVALANCHE_RPC_URL || process.env.AVALANCHE_MAINNET_RPC || process.env.RPC_URL_43114 || "https://avalanche-c-chain-rpc.publicnode.com",
651940: process.env.CHAIN_651940_RPC_URL || process.env.ALL_MAINNET_RPC || "https://mainnet-rpc.alltra.global",
};
const nativeSymbolsByChain = {
1: "ETH",
10: "ETH",
25: "CRO",
56: "BNB",
100: "xDAI",
137: "POL",
138: "DBIS",
1111: "WEMIX",
8453: "ETH",
42161: "ETH",
42220: "CELO",
43114: "AVAX",
651940: "ALL",
};
const gasPriceCache = new Map();
function padAddress(address) {
return String(address).replace(/^0x/i, "").padStart(64, "0");
}
async function rpcCall(rpcUrl, method, params) {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 12_000);
try {
const response = await fetch(rpcUrl, {
method: "POST",
headers: { "content-type": "application/json" },
body: JSON.stringify({ jsonrpc: "2.0", method, params, id: 1 }),
signal: controller.signal,
});
const json = await response.json();
if (json.error) return { ok: false, error: json.error.message || JSON.stringify(json.error) };
return { ok: true, result: json.result };
} catch (error) {
return { ok: false, error: error.message };
} finally {
clearTimeout(timeout);
}
}
function bigintFromHex(hex) {
if (!hex || hex === "0x") return 0n;
return BigInt(hex);
}
function decimalUnits(raw, decimals) {
const scale = 10n ** BigInt(decimals);
const whole = raw / scale;
const frac = raw % scale;
const fracText = frac.toString().padStart(decimals, "0").replace(/0+$/, "");
return fracText ? `${whole}.${fracText}` : whole.toString();
}
async function erc20Balance(rpcUrl, token, holder) {
const [balance, decimals] = await Promise.all([
rpcCall(rpcUrl, "eth_call", [{ to: token, data: `0x70a08231${padAddress(holder)}` }, "latest"]),
rpcCall(rpcUrl, "eth_call", [{ to: token, data: "0x313ce567" }, "latest"]),
]);
const raw = balance.ok ? bigintFromHex(balance.result) : 0n;
const dec = decimals.ok ? Number(bigintFromHex(decimals.result)) : 18;
return {
ok: balance.ok,
raw: raw.toString(),
units: decimalUnits(raw, Number.isFinite(dec) ? dec : 18),
decimals: Number.isFinite(dec) ? dec : 18,
error: balance.ok ? undefined : balance.error,
};
}
async function nativeBalance(rpcUrl, holder) {
const balance = await rpcCall(rpcUrl, "eth_getBalance", [holder, "latest"]);
const raw = balance.ok ? bigintFromHex(balance.result) : 0n;
return {
ok: balance.ok,
raw: raw.toString(),
units: decimalUnits(raw, 18),
error: balance.ok ? undefined : balance.error,
};
}
async function buildEthereumSourceInventory() {
const rpcUrl = rpcByChain[1];
const native = await nativeBalance(rpcUrl, deployer);
const tokens = await Promise.all(ethereumSourceTokens.map(async (token) => {
const balance = await erc20Balance(rpcUrl, token.address, deployer);
return {
...token,
balance: balance.units,
balanceRaw: balance.raw,
balanceStatus: BigInt(balance.raw || "0") > 0n ? "present" : "zero",
error: balance.error,
};
}));
return {
chainId: 1,
network: "Ethereum Mainnet",
deployer,
native: {
symbol: "ETH",
balance: native.units,
balanceRaw: native.raw,
role: "mainnet_transaction_gas_do_not_fully_drain",
},
tokens,
interpretation: [
"Ethereum portfolio value is not the same as immediately spendable cross-chain gas.",
"Keep enough ETH for Mainnet approvals, swaps, and liquidity/stability events.",
"Use USDC/USDT first as official quote capital; use LINK/XAUt only after a live route quote proves acceptable output.",
"Treat cWUSDC as pair-side/evidence inventory unless a real public route converts it into the exact official token needed.",
],
};
}
async function gasPrice(rpcUrl, chainId) {
if (!rpcUrl) return { ok: false, raw: "0", units: "0", error: "missing_rpc" };
if (gasPriceCache.has(chainId)) return gasPriceCache.get(chainId);
const result = await rpcCall(rpcUrl, "eth_gasPrice", []);
const raw = result.ok ? bigintFromHex(result.result) : 0n;
const payload = {
ok: result.ok,
raw: raw.toString(),
gwei: decimalUnits(raw, 9),
error: result.ok ? undefined : result.error,
};
gasPriceCache.set(chainId, payload);
return payload;
}
function table(headers, rows) {
return [
`| ${headers.join(" | ")} |`,
`| ${headers.map(() => "---").join(" | ")} |`,
...rows.map((row) => `| ${row.map((cell) => String(cell ?? "").replace(/\|/g, "\\|")).join(" | ")} |`),
].join("\n");
}
function readJsonIfExists(path, fallback = null) {
try {
return JSON.parse(readFileSync(path, "utf8"));
} catch {
return fallback;
}
}
function readEnvValue(...keys) {
for (const key of keys) {
if (process.env[key]) return process.env[key].trim();
}
for (const file of envFiles) {
let text = "";
try {
text = readFileSync(file, "utf8");
} catch {
continue;
}
for (const key of keys) {
const match = text.match(new RegExp(`^${key}=([^\\n#]*)`, "m"));
if (match?.[1]) return match[1].trim().replace(/^['"]|['"]$/g, "");
}
}
return "";
}
const base58Alphabet = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz";
function base58Encode(bytes) {
let value = BigInt(`0x${Buffer.from(bytes).toString("hex") || "0"}`);
let output = "";
while (value > 0n) {
const remainder = Number(value % 58n);
output = `${base58Alphabet[remainder]}${output}`;
value /= 58n;
}
let leadingZeroes = 0;
for (const byte of bytes) {
if (byte !== 0) break;
leadingZeroes += 1;
}
return `${"1".repeat(leadingZeroes)}${output || ""}`;
}
function base58Decode(text) {
let value = 0n;
for (const char of text) {
const index = base58Alphabet.indexOf(char);
if (index < 0) throw new Error(`invalid_base58_char_${char}`);
value = value * 58n + BigInt(index);
}
let hex = value.toString(16);
if (hex.length % 2) hex = `0${hex}`;
const decoded = hex ? Buffer.from(hex, "hex") : Buffer.alloc(0);
const leading = [...text].findIndex((char) => char !== "1");
const zeroCount = leading < 0 ? text.length : leading;
return Buffer.concat([Buffer.alloc(zeroCount), decoded]);
}
function solanaWalletFromConfig() {
const explicit = readEnvValue("SOLANA_DEPLOYER_ADDRESS", "SOLANA_WALLET_ADDRESS", "SOLANA_PUBLIC_KEY");
if (explicit) return { address: explicit, source: "env_public_key" };
const keypairPath = readEnvValue("SOLANA_KEYPAIR_PATH");
if (keypairPath) {
try {
const keypair = JSON.parse(readFileSync(keypairPath, "utf8"));
if (Array.isArray(keypair) && keypair.length >= 64) {
return { address: base58Encode(Buffer.from(keypair.slice(32, 64))), source: "SOLANA_KEYPAIR_PATH_public_key" };
}
} catch {
// Fall through to private-key decode if present.
}
}
const privateKey = readEnvValue("PRIVATE_KEY_SOLANA_DEPLOYER", "SOLANA_PRIVATE_KEY");
if (privateKey) {
try {
const decoded = base58Decode(privateKey);
if (decoded.length >= 64) return { address: base58Encode(decoded.subarray(32, 64)), source: "solana_private_key_public_half" };
} catch {
// Keep address gated if the value is a seed-only key.
}
}
return { address: "", source: "missing" };
}
function tronWalletFromConfig() {
const explicit = readEnvValue("TRON_DEPLOYER_ADDRESS", "TRON_WALLET_ADDRESS", "TRON_PUBLIC_ADDRESS", "TRON_ACCOUNT_ADDRESS");
if (explicit) return { address: explicit, source: "env_tron_address" };
const ethAddress = deployer.replace(/^0x/i, "");
if (/^[0-9a-fA-F]{40}$/.test(ethAddress)) {
const payload = Buffer.from(`41${ethAddress}`, "hex");
const checksum = createHash("sha256").update(createHash("sha256").update(payload).digest()).digest().subarray(0, 4);
return { address: base58Encode(Buffer.concat([payload, checksum])), source: "derived_from_evm_deployer_address" };
}
return { address: "", source: "missing" };
}
function xrplWalletFromConfig() {
const explicit = readEnvValue("XRPL_DEPLOYER_ADDRESS", "XRP_DEPLOYER_ADDRESS", "XRPL_WALLET_ADDRESS", "XRP_WALLET_ADDRESS", "XRPL_ACCOUNT");
return explicit ? { address: explicit, source: "env_xrpl_address" } : { address: "", source: "missing" };
}
async function solanaNativeBalance(address) {
if (!address) return { ok: false, units: "address_required", raw: "0", error: "missing_solana_address" };
const rpcUrl = readEnvValue("SOLANA_RPC_URL") || "https://solana-rpc.publicnode.com";
const result = await rpcCall(rpcUrl, "getBalance", [address]);
const lamports = result.ok ? BigInt(result.result?.value ?? 0) : 0n;
return {
ok: result.ok,
raw: lamports.toString(),
units: decimalUnits(lamports, 9),
error: result.ok ? undefined : result.error,
};
}
async function tronNativeBalance(address) {
if (!address) return { ok: false, units: "address_required", raw: "0", error: "missing_tron_address" };
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 12_000);
try {
const headers = { "content-type": "application/json" };
const apiKey = readEnvValue("TRONGRID_API_KEY");
if (apiKey) headers["TRON-PRO-API-KEY"] = apiKey;
const response = await fetch("https://api.trongrid.io/wallet/getaccount", {
method: "POST",
headers,
body: JSON.stringify({ address, visible: true }),
signal: controller.signal,
});
const json = await response.json();
const sun = BigInt(json.balance ?? 0);
return { ok: response.ok, raw: sun.toString(), units: decimalUnits(sun, 6), error: response.ok ? undefined : JSON.stringify(json) };
} catch (error) {
return { ok: false, raw: "0", units: "0", error: error.message };
} finally {
clearTimeout(timeout);
}
}
async function xrplNativeBalance(address) {
if (!address) return { ok: false, units: "address_required", raw: "0", error: "missing_xrpl_address" };
const result = await rpcCall(readEnvValue("XRPL_RPC_URL") || "https://xrplcluster.com", "account_info", [{ account: address, ledger_index: "validated" }]);
const drops = result.ok ? BigInt(result.result?.account_data?.Balance ?? 0) : 0n;
return {
ok: result.ok,
raw: drops.toString(),
units: decimalUnits(drops, 6),
error: result.ok ? undefined : result.error,
};
}
function networkHealth(health, network) {
return (health?.checks ?? []).find((row) => row.network === network) ?? null;
}
function classifyFundingPath(detail, token, native) {
const chainId = Number(detail.chainId);
const addressKey = `${chainId}:${String(detail.address ?? "").toLowerCase()}`;
const hasToken = BigInt(token.raw || "0") > 0n;
const hasGas = BigInt(native.raw || "0") > 0n;
const isOfficialQuoteAsset = officialQuoteAssets.has(addressKey);
const canBridge = bridgeCapableChains.has(chainId);
const canProtocolink = protocolinkCandidateChains.has(chainId);
if (hasToken && detail.category === "configured_or_indexed_pools_zero_tvl") {
return {
fundingPath: "seed_existing_visible_pool_from_deployer_balance",
fundingPathStatus: hasGas ? "executable_after_operator_approval" : "native_gas_topup_required",
assetClass: isOfficialQuoteAsset ? "official_quote_asset" : "repo_or_wrapped_asset",
protocolinkUse: "not_required_for_seed; optional for pre-seed rebalance quote",
};
}
if (hasToken && detail.category === "no_visible_pool_binding") {
return {
fundingPath: "create_or_bind_pool_then_seed_from_deployer_balance",
fundingPathStatus: hasGas ? "pool_binding_required_before_funding" : "native_gas_and_pool_binding_required",
assetClass: isOfficialQuoteAsset ? "official_quote_asset" : "repo_or_wrapped_asset",
protocolinkUse: "not_required_until pool exists; optional to rebalance paired side",
};
}
if (isOfficialQuoteAsset) {
return {
fundingPath: canProtocolink ? "protocolink_swap_candidate_for_non_mintable_quote_asset" : "external_quote_asset_required",
fundingPathStatus: canProtocolink ? "requires_live_route_quote_source_asset_and_min_out" : "external_funding_required",
assetClass: "official_quote_asset",
protocolinkUse: canProtocolink
? "use only after live quote proves deployer-held source asset converts into this exact token"
: "unsupported_by_current_protocolink_candidate_set",
};
}
if (canBridge) {
return {
fundingPath: "bridge_or_destination_mint_repo_asset_then_seed",
fundingPathStatus: hasGas ? "bridge_or_mint_route_required" : "native_gas_topup_then_bridge_or_mint",
assetClass: "repo_or_wrapped_asset",
protocolinkUse: "optional only if a public swap route beats bridge/mint for the needed asset",
};
}
return {
fundingPath: "manual_asset_source_required",
fundingPathStatus: hasGas ? "asset_source_required" : "native_gas_and_asset_source_required",
assetClass: "unknown_or_unclassified_asset",
protocolinkUse: "route support not classified",
};
}
function gasUnitsForFundingPath(fundingPath) {
const policy = {
seed_existing_visible_pool_from_deployer_balance: {
oneTimeGasUnits: 650_000,
stabilityGasUnitsPerCycle: 260_000,
rationale: "approve plus add/sync/validation transaction budget for already visible pools",
},
create_or_bind_pool_then_seed_from_deployer_balance: {
oneTimeGasUnits: 1_350_000,
stabilityGasUnitsPerCycle: 320_000,
rationale: "factory create/bind plus seed transaction budget",
},
bridge_or_destination_mint_repo_asset_then_seed: {
oneTimeGasUnits: 1_150_000,
stabilityGasUnitsPerCycle: 300_000,
rationale: "bridge-or-mint plus destination seed transaction budget",
},
protocolink_swap_candidate_for_non_mintable_quote_asset: {
oneTimeGasUnits: 900_000,
stabilityGasUnitsPerCycle: 360_000,
rationale: "Protocolink route execution plus seed/rebalance budget after live quote",
},
external_quote_asset_required: {
oneTimeGasUnits: 450_000,
stabilityGasUnitsPerCycle: 260_000,
rationale: "post-funding seed/rebalance budget; asset funding is out of band",
},
manual_asset_source_required: {
oneTimeGasUnits: 650_000,
stabilityGasUnitsPerCycle: 260_000,
rationale: "manual source then seed/rebalance budget",
},
};
return policy[fundingPath] ?? {
oneTimeGasUnits: 650_000,
stabilityGasUnitsPerCycle: 260_000,
rationale: "default seed/rebalance budget",
};
}
function buildGasPlan({ chainId, native, gas, fundingPath }) {
const units = gasUnitsForFundingPath(fundingPath);
const gasPriceWei = BigInt(gas.raw || "0");
const oneTimeRaw = BigInt(units.oneTimeGasUnits) * gasPriceWei;
const stabilityRaw = BigInt(units.stabilityGasUnitsPerCycle) * BigInt(stabilityCycles) * gasPriceWei;
const subtotalRaw = oneTimeRaw + stabilityRaw;
const requiredRaw = (subtotalRaw * gasSafetyBps + 9_999n) / 10_000n;
const nativeRaw = BigInt(native.raw || "0");
const shortfallRaw = requiredRaw > nativeRaw ? requiredRaw - nativeRaw : 0n;
const surplusRaw = nativeRaw > requiredRaw ? nativeRaw - requiredRaw : 0n;
return {
nativeSymbol: nativeSymbolsByChain[chainId] ?? "native",
gasPriceWei: gas.raw,
gasPriceGwei: gas.gwei,
oneTimeGasUnits: units.oneTimeGasUnits,
stabilityCycles,
stabilityGasUnitsPerCycle: units.stabilityGasUnitsPerCycle,
safetyBps: Number(gasSafetyBps),
requiredNativeRaw: requiredRaw.toString(),
requiredNative: decimalUnits(requiredRaw, 18),
oneTimeNative: decimalUnits(oneTimeRaw, 18),
stabilityNative: decimalUnits(stabilityRaw, 18),
nativeBalanceRaw: native.raw,
nativeBalance: native.units,
shortfallNativeRaw: shortfallRaw.toString(),
shortfallNative: decimalUnits(shortfallRaw, 18),
surplusNativeRaw: surplusRaw.toString(),
surplusNative: decimalUnits(surplusRaw, 18),
status: shortfallRaw === 0n ? "gas_budget_satisfied" : "gas_budget_shortfall",
rationale: units.rationale,
gasPriceError: gas.error,
};
}
async function buildNonEvmFundingRequirements() {
const health = readJsonIfExists(nonEvmHealthPath, null);
const laneStatus = readJsonIfExists(nonEvmLaneStatusPath, null);
const lanes = laneStatus?.lanes ?? {};
const solanaWallet = solanaWalletFromConfig();
const tronWallet = tronWalletFromConfig();
const xrplWallet = xrplWalletFromConfig();
const [solanaBalance, tronBalance, xrplBalance] = await Promise.all([
solanaNativeBalance(solanaWallet.address),
tronNativeBalance(tronWallet.address),
xrplNativeBalance(xrplWallet.address),
]);
return [
{
network: "Solana",
target: "mainnet-beta",
includedInFundingScope: true,
walletAddress: solanaWallet.address || "missing",
walletSource: solanaWallet.source,
currentBalanceStatus: solanaBalance.ok ? `${solanaBalance.units} SOL` : solanaBalance.units,
currentBalanceRaw: solanaBalance.raw,
nativeGasAsset: "SOL",
bridgeOrWrappedAsset: lanes.solana?.destinationAsset?.symbol ?? "cWAUSDT",
requiredFunding: "TBD",
status: solanaWallet.address ? "spl_mint_inventory_and_minimum_funding_targets_required" : "wallet_and_spl_mint_inventory_required",
networkHealth: networkHealth(health, "Solana"),
requirements: [
solanaWallet.address ? "Canonical Solana deployer public key is bound for native SOL checks." : "Bind canonical Solana custody wallet/public key for funding checks.",
"Populate SPL mint addresses in config/solana-gru-bridge-lineup.json.",
"Check SOL gas/rent balance and SPL token balances for each promoted cW* mint.",
"Set minimum pool/rent/execution funding targets per Solana venue before declaring positive liquidity.",
],
},
{
network: "Tron",
target: "mainnet",
includedInFundingScope: true,
walletAddress: tronWallet.address || "missing",
walletSource: tronWallet.source,
currentBalanceStatus: tronBalance.ok ? `${tronBalance.units} TRX` : tronBalance.units,
currentBalanceRaw: tronBalance.raw,
nativeGasAsset: "TRX",
bridgeOrWrappedAsset: "TronAdapter relay inventory",
requiredFunding: "TBD",
status: tronWallet.source === "derived_from_evm_deployer_address" ? "derived_tron_wallet_needs_operator_confirmation_and_asset_inventory" : "native_tron_wallet_and_asset_inventory_required",
networkHealth: networkHealth(health, "Tron"),
requirements: [
tronWallet.source === "derived_from_evm_deployer_address" ? "Confirm whether the EVM deployer-derived Tron address is the canonical native Tron deployer." : "Bind canonical Tron custody wallet address.",
"Check TRX energy/bandwidth funding and any native TRC-20 inventory needed for relay settlement.",
"Promote or document native Tron-side contracts/assets before treating Tron as liquidity-ready.",
"Close Chain 138 TronAdapter source/publication evidence separately from native Tron funding.",
],
},
{
network: "XRPL",
target: "mainnet",
includedInFundingScope: true,
walletAddress: xrplWallet.address || "missing",
walletSource: xrplWallet.source,
currentBalanceStatus: xrplBalance.ok ? `${xrplBalance.units} XRP` : xrplBalance.units,
currentBalanceRaw: xrplBalance.raw,
nativeGasAsset: "XRP",
bridgeOrWrappedAsset: lanes.xrpl?.wrappedAsset?.address ? `wXRP ${lanes.xrpl.wrappedAsset.address}` : "wXRP",
requiredFunding: "TBD",
status: xrplWallet.address ? "xrpl_reserve_trustline_and_bridge_inventory_required" : "xrpl_wallet_reserve_and_bridge_inventory_required",
networkHealth: networkHealth(health, "XRPL"),
requirements: [
xrplWallet.address ? "Canonical XRPL account is bound for native XRP checks." : "Bind canonical XRPL account and optional destination tag policy.",
"Check XRP reserve, transfer-fee cushion, and any trustline/issuer requirements.",
"Check Chain 138 wXRP inventory and MintBurnController authorization readiness.",
"Close Chain 138 XRPLAdapter/wXRP/MintBurnController source-publication evidence separately from XRPL funding.",
],
},
{
network: "Other non-EVM majors",
target: "BTC/SOL/XRP/ADA/XLM/HBAR/SUI/TON class expansion",
includedInFundingScope: true,
walletAddress: "per-network wallet not bound",
walletSource: "missing",
currentBalanceStatus: "not_supported_by_current_balance_planner",
nativeGasAsset: "varies",
bridgeOrWrappedAsset: "not bound",
requiredFunding: "TBD",
status: "adapter_wallet_asset_and_venue_requirements_not_yet_bound",
networkHealth: null,
requirements: [
"Create per-network custody wallet and balance checker.",
"Bind asset IDs/mints/trustlines/program IDs in repo config.",
"Define minimum native gas/rent/reserve and liquidity targets per network.",
"Add lane evidence before including the network in tracker-facing liquidity claims.",
],
},
];
}
const readiness = JSON.parse(readFileSync(readinessPath, "utf8"));
const details = readiness.blockerInventory?.liquidityMissingDetails ?? [];
const rows = [];
for (const detail of details) {
const chainId = Number(detail.chainId);
const rpcUrl = rpcByChain[chainId];
const native = rpcUrl ? await nativeBalance(rpcUrl, deployer) : { ok: false, raw: "0", units: "0", error: "missing_rpc" };
const token = rpcUrl && detail.address?.startsWith("0x")
? await erc20Balance(rpcUrl, detail.address, deployer)
: { ok: false, raw: "0", units: "0", decimals: 18, error: "missing_token_or_rpc" };
const hasGas = BigInt(native.raw || "0") > 0n;
const hasToken = BigInt(token.raw || "0") > 0n;
const funding = classifyFundingPath(detail, token, native);
const gas = await gasPrice(rpcUrl, chainId);
const gasPlan = buildGasPlan({ chainId, native, gas, fundingPath: funding.fundingPath });
let status = "token_balance_gated";
if (detail.category === "no_visible_pool_binding") status = hasToken ? "pool_binding_gated" : "pool_binding_and_token_balance_gated";
if (detail.category === "configured_or_indexed_pools_zero_tvl" && hasToken) status = "fundable_token_balance_present";
if (!hasGas) status = `${status}+gas_gated`;
rows.push({
chainId,
symbol: detail.symbol,
address: detail.address,
category: detail.category,
poolCount: detail.poolCount,
zeroTvlPoolCount: detail.zeroTvlPoolCount,
nativeBalance: native.units,
tokenBalance: token.units,
tokenBalanceRaw: token.raw,
status,
...funding,
gasPlan,
rpcError: native.error || token.error,
});
}
const gasBudgetRows = rows.map((row) => ({
chainId: row.chainId,
symbol: row.symbol,
nativeSymbol: row.gasPlan.nativeSymbol,
fundingPath: row.fundingPath,
requiredNative: row.gasPlan.requiredNative,
nativeBalance: row.gasPlan.nativeBalance,
shortfallNative: row.gasPlan.shortfallNative,
status: row.gasPlan.status,
}));
const chainGasBudgetMap = new Map();
for (const row of rows) {
const existing = chainGasBudgetMap.get(row.chainId) ?? {
chainId: row.chainId,
nativeSymbol: row.gasPlan.nativeSymbol,
gasPriceGwei: row.gasPlan.gasPriceGwei,
requiredNativeRaw: 0n,
nativeBalanceRaw: BigInt(row.gasPlan.nativeBalanceRaw || "0"),
rows: 0,
symbols: [],
};
existing.requiredNativeRaw += BigInt(row.gasPlan.requiredNativeRaw || "0");
existing.rows += 1;
existing.symbols.push(row.symbol);
chainGasBudgetMap.set(row.chainId, existing);
}
const chainGasBudgetRows = [...chainGasBudgetMap.values()]
.sort((a, b) => Number(a.chainId) - Number(b.chainId))
.map((row) => {
const shortfallRaw = row.requiredNativeRaw > row.nativeBalanceRaw ? row.requiredNativeRaw - row.nativeBalanceRaw : 0n;
const surplusRaw = row.nativeBalanceRaw > row.requiredNativeRaw ? row.nativeBalanceRaw - row.requiredNativeRaw : 0n;
return {
chainId: row.chainId,
nativeSymbol: row.nativeSymbol,
gasPriceGwei: row.gasPriceGwei,
rows: row.rows,
symbols: [...new Set(row.symbols)].join(", "),
requiredNativeRaw: row.requiredNativeRaw.toString(),
requiredNative: decimalUnits(row.requiredNativeRaw, 18),
nativeBalanceRaw: row.nativeBalanceRaw.toString(),
nativeBalance: decimalUnits(row.nativeBalanceRaw, 18),
shortfallNativeRaw: shortfallRaw.toString(),
shortfallNative: decimalUnits(shortfallRaw, 18),
surplusNativeRaw: surplusRaw.toString(),
surplusNative: decimalUnits(surplusRaw, 18),
status: shortfallRaw === 0n ? "chain_gas_budget_satisfied" : "chain_gas_budget_shortfall",
};
});
const etherscanStability = {
purpose: "off_chain_indexing_stability_for_token_trackers",
boundary: "Etherscan/token trackers index public on-chain facts; gas only funds the transactions that create and refresh those facts.",
requiredOnChainFacts: [
"Verified token contract and correct metadata/logoURI publication path.",
"Visible/indexable pool contract for each promoted token pair.",
"Positive, non-dust liquidity on the visible pool.",
"Recent real swap or liquidity-change events when tracker freshness is required.",
"Official quote-token evidence when claiming cW*/USDC or c*/USDC peg support.",
],
gasBudgetRole: [
"Create or bind missing pools.",
"Approve and seed liquidity.",
"Execute Protocolink/bridge/mint/swap actions when needed.",
"Run recurring stability/rebalance transactions so public indexers observe fresh state.",
],
cannotBeSolvedByGasAlone: [
"A missing verified-source listing.",
"A token logo or page-info package that is not published at the expected endpoint.",
"A pool that exists only in internal config but is not visible/indexable on the public chain.",
"A c* balance that has not been bridged or swapped into the exact official quote asset required by the tracker claim.",
],
readinessStatus: chainGasBudgetRows.some((row) => row.status === "chain_gas_budget_shortfall")
? "on_chain_stability_transactions_gas_shortfall"
: "on_chain_stability_transactions_gas_budget_satisfied",
};
const coffeeMoneyExecutableRows = rows.filter((row) => [
"seed_existing_visible_pool_from_deployer_balance",
"create_or_bind_pool_then_seed_from_deployer_balance",
].includes(row.fundingPath));
const coffeeMoneyGasShortfallChains = chainGasBudgetRows.filter((row) => row.status === "chain_gas_budget_shortfall");
const coffeeMoneyPlan = {
purpose: "start_visible_indexable_liquidity_with_coffee_money",
operatorObservedUsdAvailable: coffeeMoneyUsdAvailable,
liquidityDustUsdPerRow: coffeeMoneyLiquidityUsdPerRow,
immediatelyUsefulRows: coffeeMoneyExecutableRows.length,
estimatedLiquidityDustUsd: Number((coffeeMoneyExecutableRows.length * coffeeMoneyLiquidityUsdPerRow).toFixed(2)),
gasShortfallChains: coffeeMoneyGasShortfallChains.map((row) => ({
chainId: row.chainId,
nativeSymbol: row.nativeSymbol,
shortfallNative: row.shortfallNative,
symbols: row.symbols,
})),
assessment: coffeeMoneyUsdAvailable >= 35
? "enough_to_start_coffee_money_liquidity_if_routed_into_missing_native_gas"
: "not_enough_for_full_coffee_money_set",
recommendedOrder: [
"Top up native gas on shortfall chains first: Optimism, BSC, Polygon, Arbitrum.",
"Seed existing visible pools that already have deployer token balance.",
"Create or bind missing visible pools for rows that already have deployer token balance.",
"Run tiny real swaps/liquidity events so Etherscan/tracker indexers see fresh public facts.",
"Leave Protocolink-only official quote-asset rows for last unless a live quote proves conversion from current deployer assets.",
],
boundary: "This starts indexable public liquidity evidence; it does not create deep market depth or a large 1:1 peg reserve.",
};
const counts = rows.reduce((acc, row) => {
acc[row.status] = (acc[row.status] || 0) + 1;
return acc;
}, {});
const nonEvmFundingRequirements = await buildNonEvmFundingRequirements();
const ethereumSourceInventory = await buildEthereumSourceInventory();
const payload = {
generatedAt: new Date().toISOString(),
mode: "read_only_no_broadcast",
deployer,
sourceReadiness: "reports/status/token-aggregation-adoption-readiness-live-20260509.json",
summary: {
rows: rows.length,
nonEvmFundingRequirementRows: nonEvmFundingRequirements.length,
fundableTokenBalancePresent: rows.filter((row) => row.status.startsWith("fundable_token_balance_present")).length,
poolBindingGated: rows.filter((row) => row.status.includes("pool_binding")).length,
gasGated: rows.filter((row) => row.status.includes("gas_gated")).length,
protocolinkSwapCandidates: rows.filter((row) => row.fundingPath === "protocolink_swap_candidate_for_non_mintable_quote_asset").length,
bridgeOrMintCandidates: rows.filter((row) => row.fundingPath === "bridge_or_destination_mint_repo_asset_then_seed").length,
poolCreateOrBindFirst: rows.filter((row) => row.fundingPath === "create_or_bind_pool_then_seed_from_deployer_balance").length,
seedExistingVisiblePoolNow: rows.filter((row) => row.fundingPath === "seed_existing_visible_pool_from_deployer_balance").length,
gasBudgetSatisfied: rows.filter((row) => row.gasPlan.status === "gas_budget_satisfied").length,
gasBudgetShortfall: rows.filter((row) => row.gasPlan.status === "gas_budget_shortfall").length,
chainGasBudgetSatisfied: chainGasBudgetRows.filter((row) => row.status === "chain_gas_budget_satisfied").length,
chainGasBudgetShortfall: chainGasBudgetRows.filter((row) => row.status === "chain_gas_budget_shortfall").length,
gasPolicy: {
stabilityCycles,
gasSafetyBps: Number(gasSafetyBps),
},
statusCounts: counts,
},
rows,
gasBudgetRows,
chainGasBudgetRows,
etherscanStability,
coffeeMoneyPlan,
ethereumSourceInventory,
nonEvmFundingRequirements,
};
const md = [
"# Token-Aggregation Liquidity Gap Funding Plan",
"",
`- Generated: \`${payload.generatedAt}\``,
`- Mode: \`${payload.mode}\``,
`- Deployer: \`${deployer}\``,
"",
table(["Metric", "Count"], Object.entries(payload.summary).map(([key, value]) => [key, typeof value === "object" ? JSON.stringify(value) : value])),
"",
"## Rows",
"",
table(
["Chain", "Symbol", "Category", "Pools", "Native", "Token balance", "Status", "Funding path", "Gas shortfall"],
rows.map((row) => [row.chainId, row.symbol, row.category, row.poolCount, row.nativeBalance, row.tokenBalance, row.status, row.fundingPath, `${row.gasPlan.shortfallNative} ${row.gasPlan.nativeSymbol}`]),
),
"",
"### Chain-Level Gas Budget",
"",
"This aggregates all planned row actions by network because the same deployer native balance pays every deployment, seed, swap, bridge, and stability transaction on that chain.",
"",
table(
["Chain", "Symbols", "Native", "Rows", "Gas price gwei", "Required", "Balance", "Shortfall", "Status"],
chainGasBudgetRows.map((row) => [
row.chainId,
row.symbols,
row.nativeSymbol,
row.rows,
row.gasPriceGwei,
`${row.requiredNative} ${row.nativeSymbol}`,
`${row.nativeBalance} ${row.nativeSymbol}`,
`${row.shortfallNative} ${row.nativeSymbol}`,
row.status,
]),
),
"",
"## Gas Budget",
"",
`Gas is budgeted for one deployment/seed action plus \`${stabilityCycles}\` continual stability cycles, with a \`${Number(gasSafetyBps) / 100}%\` safety multiplier. Etherscan/token-tracker stability itself is off-chain indexing; gas only funds the on-chain facts that Etherscan can index.`,
"",
table(
["Chain", "Symbol", "Native", "Gas price gwei", "One-time gas", "Stability gas/cycle", "Required", "Balance", "Shortfall", "Status"],
rows.map((row) => [
row.chainId,
row.symbol,
row.gasPlan.nativeSymbol,
row.gasPlan.gasPriceGwei,
row.gasPlan.oneTimeGasUnits,
row.gasPlan.stabilityGasUnitsPerCycle,
`${row.gasPlan.requiredNative} ${row.gasPlan.nativeSymbol}`,
`${row.gasPlan.nativeBalance} ${row.gasPlan.nativeSymbol}`,
`${row.gasPlan.shortfallNative} ${row.gasPlan.nativeSymbol}`,
row.gasPlan.status,
]),
),
"",
"## Etherscan Stability Boundary",
"",
`- Purpose: \`${etherscanStability.purpose}\``,
`- Status: \`${etherscanStability.readinessStatus}\``,
`- Boundary: ${etherscanStability.boundary}`,
"",
"Required on-chain facts for Etherscan/tracker stability:",
"",
...etherscanStability.requiredOnChainFacts.map((item) => `- ${item}`),
"",
"Gas budget role:",
"",
...etherscanStability.gasBudgetRole.map((item) => `- ${item}`),
"",
"Cannot be solved by gas alone:",
"",
...etherscanStability.cannotBeSolvedByGasAlone.map((item) => `- ${item}`),
"",
"## Coffee-Money Start Plan",
"",
`- Operator-observed deployer value available: \`$${coffeeMoneyPlan.operatorObservedUsdAvailable}\``,
`- Assessment: \`${coffeeMoneyPlan.assessment}\``,
`- Immediately useful rows: \`${coffeeMoneyPlan.immediatelyUsefulRows}\``,
`- Planning dust liquidity: \`$${coffeeMoneyPlan.liquidityDustUsdPerRow}\` per row`,
`- Estimated dust liquidity: \`$${coffeeMoneyPlan.estimatedLiquidityDustUsd}\``,
`- Boundary: ${coffeeMoneyPlan.boundary}`,
"",
"Native gas shortfall chains to fill first:",
"",
table(
["Chain", "Symbols", "Shortfall"],
coffeeMoneyPlan.gasShortfallChains.map((row) => [row.chainId, row.symbols, `${row.shortfallNative} ${row.nativeSymbol}`]),
),
"",
"Recommended order:",
"",
...coffeeMoneyPlan.recommendedOrder.map((item) => `- ${item}`),
"",
"### Ethereum Source Inventory",
"",
`- Native ETH: \`${ethereumSourceInventory.native.balance}\``,
"",
table(
["Token", "Balance", "Role", "Status"],
ethereumSourceInventory.tokens.map((token) => [token.symbol, token.balance, token.role, token.balanceStatus]),
),
"",
"Interpretation:",
"",
...ethereumSourceInventory.interpretation.map((item) => `- ${item}`),
"",
"## Funding Path Interpretation",
"",
"- `seed_existing_visible_pool_from_deployer_balance`: token and gas are present; only operator approval and pool-specific seeding rules remain.",
"- `create_or_bind_pool_then_seed_from_deployer_balance`: token and gas are present, but no visible/indexable pool binding exists yet.",
"- `bridge_or_destination_mint_repo_asset_then_seed`: repo-controlled c*/cW* inventory can be moved or minted once the bridge/mint path and destination gas are ready.",
"- `protocolink_swap_candidate_for_non_mintable_quote_asset`: the needed asset is an official/non-mintable quote asset; Protocolink can help only after a live quote proves a deployer-held source asset converts into the exact target token with acceptable minOut.",
"- `external_quote_asset_required`: neither bridge nor Protocolink coverage is classified for that exact non-mintable quote asset.",
"",
"## Non-EVM Funding Requirements",
"",
"These networks are now part of funding scope. The planner resolves non-EVM deployer wallets where the repo can prove them, checks native gas balances where possible, and leaves funding amounts `TBD` until asset IDs and minimum venue targets are bound.",
"",
table(
["Network", "Target", "Wallet", "Source", "Native gas", "Current balance", "Required funding", "Status"],
payload.nonEvmFundingRequirements.map((row) => [
row.network,
row.target,
row.walletAddress,
row.walletSource,
row.nativeGasAsset,
row.currentBalanceStatus,
row.requiredFunding,
row.status,
]),
),
"",
"### Non-EVM Requirement Details",
"",
...payload.nonEvmFundingRequirements.flatMap((row) => [
`#### ${row.network}`,
"",
...row.requirements.map((requirement) => `- ${requirement}`),
"",
]),
"",
"## Execution Boundary",
"",
"This planner is read-only. It proves whether the deployer currently holds token and gas inventory for each liquidity gap. It does not create pools, add liquidity, approve tokens, bridge assets, or broadcast transactions.",
].join("\n");
mkdirSync(resolve(repoRoot, "reports/status"), { recursive: true });
writeFileSync(jsonOut, `${JSON.stringify(payload, null, 2)}\n`);
writeFileSync(mdOut, `${md}\n`);
console.log(jsonOut);

View File

@@ -32,7 +32,8 @@ FEE_COLLECTOR=0xF78246eB94c6CB14018E507E60661314E5f4C53f
DEBT_REGISTRY=0x95BC4A997c0670d5DAC64d55cDf3769B53B63C28
POLICY_MANAGER=0x0C4FD27018130A00762a802f91a72D6a64a60F14
TOKEN_IMPLEMENTATION=0x0059e237973179146237aB49f1322E8197c22b21
CCIPWETH9_BRIDGE_CHAIN138=0x9cba0D04Ae5f6f16e3C599025aB97a05c4A593d5
# Alternate legacy deployment (do not use for ops): 0x9cba0D04Ae5f6f16e3C599025aB97a05c4A593d5
CCIPWETH9_BRIDGE_CHAIN138=0xcacfd227A040002e49e2e01626363071324f820a
CCIPWETH10_BRIDGE_CHAIN138=0xe0E93247376aa097dB308B92e6Ba36bA015535D0
LINK_TOKEN=0xb7721dD53A8c629d9f1Ba31a5819AFe250002b03
CCIP_FEE_TOKEN=0xb7721dD53A8c629d9f1Ba31a5819AFe250002b03

View File

@@ -3,6 +3,9 @@
# Use for CI or pre-deploy: dependencies, config files, optional genesis.
# Usage: bash scripts/verify/run-all-validation.sh [--skip-genesis]
# --skip-genesis: do not run validate-genesis.sh (default: run if smom-dbis-138 present).
# Optional: EI_MATRIX_ONCHAIN_AUDIT_CI=1 runs scripts/lib/ei_matrix_onchain_readiness_audit.py (needs
# ETHEREUM_MAINNET_RPC + RPC_URL_138 in .env). EI_MATRIX_ONCHAIN_AUDIT_CI_LIMIT default 120; 0 = full grid.
# Strict gate: set EI_MATRIX_AUDIT_MIN_MAINNET_RAW_CI / EI_MATRIX_AUDIT_MIN_138_RAW_CI.
# Steps: dependencies, config files, cW* mesh matrix (if pair-discovery JSON exists), optional advisory non-EVM public health, genesis.
set -euo pipefail
@@ -170,6 +173,56 @@ run_summary_record_step "3d" "Non-EVM public network health" "success" "$((SECON
step_done "$STEP_STARTED"
echo ""
echo "3d1. d-bis.org CWUSDC Etherscan profile prereq URLs (advisory)..."
STEP_STARTED=$SECONDS
CWU_URLS="$SCRIPT_DIR/check-cwusdc-etherscan-prereq-urls.sh"
CWU_STATUS="skipped"
if [[ -x "$CWU_URLS" ]] && command -v curl &>/dev/null; then
if bash "$CWU_URLS"; then
log_ok "d-bis.org token-profile URLs OK (CWUSDC E2E prereq)"
CWU_STATUS="success"
else
echo " (advisory: one or more URLs not HTTP 200 — fix site or network; see CWUSDC_ETHERSCAN_E2E_RECOMMENDATIONS.md)"
CWU_STATUS="advisory_fail"
fi
else
echo " (skip: curl or $CWU_URLS missing)"
fi
run_summary_record_step "3d1" "CWUSDC d-bis URL prereqs" "$CWU_STATUS" "$((SECONDS - STEP_STARTED))"
step_done "$STEP_STARTED"
echo ""
echo "3e. EI matrix on-chain readiness audit (optional)..."
STEP_STARTED=$SECONDS
EIM_STEP_STATUS="skipped"
if [[ "${EI_MATRIX_ONCHAIN_AUDIT_CI:-}" == "1" ]]; then
# shellcheck disable=SC1091
source "$PROJECT_ROOT/scripts/lib/load-project-env.sh"
if [[ -z "${ETHEREUM_MAINNET_RPC:-}" || -z "${RPC_URL_138:-}" ]]; then
echo " (skip: ETHEREUM_MAINNET_RPC or RPC_URL_138 unset — set RPCs in .env for CI gate)"
else
CI_LIMIT="${EI_MATRIX_ONCHAIN_AUDIT_CI_LIMIT:-120}"
CI_SHARD="${EI_MATRIX_ONCHAIN_AUDIT_CI_SHARD:-200}"
CI_WORKERS="${EI_MATRIX_ONCHAIN_AUDIT_CI_WORKERS:-2}"
MIN_M="${EI_MATRIX_AUDIT_MIN_MAINNET_RAW_CI:-0}"
MIN_138="${EI_MATRIX_AUDIT_MIN_138_RAW_CI:-0}"
JSON_CI="${EI_MATRIX_ONCHAIN_AUDIT_JSON_CI:-reports/status/ei-matrix-readiness-audit-ci.json}"
AUDIT_PY="$PROJECT_ROOT/scripts/lib/ei_matrix_onchain_readiness_audit.py"
EXTRA=(--shard-size "$CI_SHARD" --workers "$CI_WORKERS" --both --min-mainnet-raw "$MIN_M" --min-138-raw "$MIN_138" --json-out "$JSON_CI")
if [[ "$CI_LIMIT" != "0" ]]; then
EXTRA+=(--limit "$CI_LIMIT")
fi
python3 "$AUDIT_PY" "${EXTRA[@]}" || log_err "EI matrix on-chain audit failed (thresholds or RPC)"
log_ok "EI matrix on-chain audit OK ($JSON_CI)"
EIM_STEP_STATUS="success"
fi
else
echo " (skip: set EI_MATRIX_ONCHAIN_AUDIT_CI=1 to run; optional EI_MATRIX_ONCHAIN_AUDIT_CI_LIMIT=0 for full grid)"
fi
run_summary_record_step "3e" "EI matrix on-chain audit" "$EIM_STEP_STATUS" "$((SECONDS - STEP_STARTED))"
step_done "$STEP_STARTED"
echo ""
if [[ "$SKIP_GENESIS" == true ]]; then
echo "4. Genesis — skipped (--skip-genesis)"
run_summary_record_step "4" "Genesis (smom-dbis-138)" "skipped" "0"

View File

@@ -0,0 +1,76 @@
#!/usr/bin/env bash
# Read-only cWUSDC provider propagation monitor.
set -euo pipefail
PROJECT_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$PROJECT_ROOT"
TS="$(date -u +%Y-%m-%dT%H:%M:%SZ)"
JSON_OUT="reports/status/cwusdc-provider-monitoring-snapshot-latest.json"
MD_OUT="reports/status/cwusdc-provider-monitoring-snapshot-latest.md"
BUNDLE_DATE="${CWUSDC_EVIDENCE_BUNDLE_DATE:-$(date -u +%Y%m%d)}"
pnpm cwusdc:etherscan-dossier
pnpm cwusdc:role-audit
pnpm cwusdc:provider-ci
pnpm cwusdc:doc-links
pnpm cwusdc:role-appendix
pnpm cwusdc:submission-prefill
CWUSDC_EVIDENCE_BUNDLE_DATE="$BUNDLE_DATE" pnpm cwusdc:evidence-bundle
python3 - "$TS" "$JSON_OUT" "$MD_OUT" "$BUNDLE_DATE" <<'PY'
import json
import sys
from pathlib import Path
ts = sys.argv[1]
json_out = Path(sys.argv[2])
md_out = Path(sys.argv[3])
bundle_date = sys.argv[4]
def load(path):
p = Path(path)
return json.loads(p.read_text()) if p.exists() else {}
dossier = load("reports/status/cwusdc-etherscan-value-dossier-latest.json")
dossier_readiness = dossier.get("readiness") or dossier.get("summary") or {}
provider_ci = load("reports/status/cwusdc-provider-readiness-ci-latest.json")
links = load("reports/status/cwusdc-institutional-doc-link-check-latest.json")
role_appendix = load("reports/status/cwusdc-role-deployment-appendix-latest.json")
payload = {
"schema": "cwusdc-provider-monitoring-snapshot/v1",
"generatedAt": ts,
"status": "success" if provider_ci.get("repoControlledPrereqsPassed") and links.get("status") == "pass" else "attention",
"readyForExternalSubmission": dossier_readiness.get("readyForExternalSubmission"),
"etherscanValueReady": dossier_readiness.get("etherscanValueReady"),
"coinGeckoPriceReady": dossier_readiness.get("coinGeckoPriceReady"),
"repoControlledPrereqsPassed": provider_ci.get("repoControlledPrereqsPassed"),
"externalBlockersAdvisory": provider_ci.get("externalBlockersAdvisory", []),
"docLinkStatus": links.get("status"),
"roleEventCount": role_appendix.get("eventCount"),
"artifacts": {
"dossier": "reports/status/cwusdc-etherscan-value-dossier-latest.json",
"providerCi": "reports/status/cwusdc-provider-readiness-ci-latest.json",
"docLinks": "reports/status/cwusdc-institutional-doc-link-check-latest.json",
"roleAppendix": "reports/status/cwusdc-role-deployment-appendix-latest.json",
"evidenceBundleSha256": f"reports/status/cwusdc-institutional-evidence-bundle-{bundle_date}.sha256",
},
}
json_out.write_text(json.dumps(payload, indent=2) + "\n")
lines = [
"# cWUSDC Provider Monitoring Snapshot",
"",
f"- Generated: `{payload['generatedAt']}`",
f"- Status: `{payload['status']}`",
f"- Ready for external submission: `{payload['readyForExternalSubmission']}`",
f"- Etherscan Value ready: `{payload['etherscanValueReady']}`",
f"- CoinGecko price ready: `{payload['coinGeckoPriceReady']}`",
f"- Repo-controlled prerequisites passed: `{payload['repoControlledPrereqsPassed']}`",
f"- Institutional doc link status: `{payload['docLinkStatus']}`",
f"- Role event count: `{payload['roleEventCount']}`",
f"- External advisory blockers: `{len(payload['externalBlockersAdvisory'])}`",
]
md_out.write_text("\n".join(lines) + "\n")
print(f"Wrote {json_out}")
print(f"Wrote {md_out}")
PY

View File

@@ -0,0 +1,79 @@
#!/usr/bin/env bash
# Run all non-manual cWUSDC provider checks and build a handoff report.
# This script is public/read-only except for report files under reports/status/.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
cd "$PROJECT_ROOT"
STRICT_REPO=false
while [[ $# -gt 0 ]]; do
case "$1" in
--strict-repo)
STRICT_REPO=true
shift
;;
-h|--help)
sed -n '1,4p' "$0"
echo " --strict-repo Exit non-zero if repo-controlled URL prerequisites fail."
exit 0
;;
*)
echo "Unknown argument: $1" >&2
exit 1
;;
esac
done
PREREQ_JSON="reports/status/cwusdc-etherscan-prereq-urls-latest.json"
PREREQ_MD="reports/status/cwusdc-etherscan-prereq-urls-latest.md"
TRACKERS_JSON="reports/status/cwusdc-external-trackers-live-latest.json"
TRACKERS_MD="reports/status/cwusdc-external-trackers-live-latest.md"
LIQUIDITY_JSON="reports/status/token-aggregation-liquidity-gap-funding-plan-latest.json"
CMC_SANITY_JSON="reports/status/cmc-provider-report-sanity-latest.json"
HANDOFF_JSON="reports/status/cwusdc-provider-handoff-latest.json"
HANDOFF_MD="reports/status/cwusdc-provider-handoff-latest.md"
echo "=== cWUSDC provider non-manual checks ==="
echo "Mode: public/read-only, report writes only"
echo ""
REPO_STATUS=0
echo "1. Repo-controlled Etherscan prerequisite URLs..."
if ! bash "$SCRIPT_DIR/check-cwusdc-etherscan-prereq-urls.sh" --json-out "$PREREQ_JSON" --md-out "$PREREQ_MD"; then
REPO_STATUS=1
echo " Repo-controlled URL prerequisites failed."
fi
echo ""
echo "2. External tracker/indexing probes (advisory)..."
bash "$SCRIPT_DIR/check-cwusdc-external-trackers-live.sh" --json-out "$TRACKERS_JSON" --md-out "$TRACKERS_MD" || true
echo ""
echo "3. Liquidity-gap funding planner (read-only)..."
node "$SCRIPT_DIR/plan-token-aggregation-liquidity-gap-funding.mjs"
echo ""
echo "3b. CMC-shaped report sanity (advisory)..."
python3 "$SCRIPT_DIR/check-cmc-provider-report-sanity.py" || true
echo ""
echo "4. Provider handoff report..."
python3 "$SCRIPT_DIR/build-cwusdc-provider-handoff-report.py" \
--prereq-json "$PREREQ_JSON" \
--trackers-json "$TRACKERS_JSON" \
--liquidity-json "$LIQUIDITY_JSON" \
--cmc-sanity-json "$CMC_SANITY_JSON" \
--json-out "$HANDOFF_JSON" \
--md-out "$HANDOFF_MD"
echo ""
echo "Handoff: $HANDOFF_MD"
if [[ "$STRICT_REPO" == "true" && "$REPO_STATUS" -ne 0 ]]; then
exit "$REPO_STATUS"
fi
exit 0

View File

@@ -0,0 +1,44 @@
#!/usr/bin/env bash
# Full-grid EI matrix on-chain readiness: mainnet cWUSDC + Chain 138 cUSDC (sharded RPC).
# Sources scripts/lib/load-project-env.sh for RPCs and token defaults.
#
# Environment (optional):
# EI_MATRIX_AUDIT_SHARD_SIZE default 400
# EI_MATRIX_AUDIT_WORKERS default 3
# EI_MATRIX_AUDIT_MIN_MAINNET_RAW default 12000000 (12 USDC units, 6 decimals)
# EI_MATRIX_AUDIT_MIN_138_RAW default 0 (set >0 to require 138 cUSDC everywhere)
# EI_MATRIX_AUDIT_JSON_OUT default reports/status/ei-matrix-readiness-audit-latest.json
# EI_MATRIX_AUDIT_GAPS_MAINNET default reports/status/ei-matrix-readiness-gaps-mainnet-indices.txt
# EI_MATRIX_AUDIT_GAPS_138 default reports/status/ei-matrix-readiness-gaps-138-indices.txt
#
# Pass-through: any extra args after optional -- are forwarded to ei_matrix_onchain_readiness_audit.py
# ./scripts/verify/run-ei-matrix-full-readiness-audit.sh -- --offset 0 --limit 500
#
# Exit 1 if any wallet is below configured minima (operator gate).
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
cd "$PROJECT_ROOT"
# shellcheck disable=SC1091
source "$PROJECT_ROOT/scripts/lib/load-project-env.sh"
[[ "${1:-}" == "--" ]] && shift
SHARD="${EI_MATRIX_AUDIT_SHARD_SIZE:-400}"
WORKERS="${EI_MATRIX_AUDIT_WORKERS:-3}"
MIN_M="${EI_MATRIX_AUDIT_MIN_MAINNET_RAW:-12000000}"
MIN_138="${EI_MATRIX_AUDIT_MIN_138_RAW:-0}"
JSON_OUT="${EI_MATRIX_AUDIT_JSON_OUT:-reports/status/ei-matrix-readiness-audit-latest.json}"
GAPS_M="${EI_MATRIX_AUDIT_GAPS_MAINNET:-reports/status/ei-matrix-readiness-gaps-mainnet-indices.txt}"
GAPS_138="${EI_MATRIX_AUDIT_GAPS_138:-reports/status/ei-matrix-readiness-gaps-138-indices.txt}"
exec python3 "$PROJECT_ROOT/scripts/lib/ei_matrix_onchain_readiness_audit.py" \
--shard-size "$SHARD" \
--workers "$WORKERS" \
--both \
--min-mainnet-raw "$MIN_M" \
--min-138-raw "$MIN_138" \
--report-by-class \
--json-out "$JSON_OUT" \
--gaps-mainnet-out "$GAPS_M" \
--gaps-138-out "$GAPS_138" \
"$@"

View File

@@ -78,7 +78,13 @@ def parse_uint(value: str) -> int:
def parse_uints(value: str, count: int) -> list[int]:
matches = [int(match) for match in UINT_RE.findall(value)]
matches: list[int] = []
for line in value.splitlines():
line_matches = UINT_RE.findall(line)
if line_matches:
matches.append(int(line_matches[0]))
if len(matches) < count:
matches = [int(match) for match in UINT_RE.findall(value)]
if len(matches) < count:
raise ValueError(f"expected at least {count} integers from {value!r}")
return matches[:count]