chore(repo): sync operator workspace (config, scripts, docs, multi-chain)
Add optional Cosmos/Engine-X/act-runner templates, CWUSDC/EI-matrix tooling, non-EVM route planner in multi-chain-execution (tests passing), token list and extraction updates, and documentation (MetaMask matrix, GRU/CWUSDC packets). Ignore institutional evidence tarballs/sha256 under reports/status. Validated with: bash scripts/verify/run-all-validation.sh --skip-genesis Co-authored-by: Cursor <cursoragent@cursor.com>
This commit is contained in:
@@ -27,6 +27,10 @@ One-line install (Debian/Ubuntu): `sudo apt install -y sshpass rsync dnsutils ip
|
||||
|
||||
## Scripts
|
||||
|
||||
- `run-cwusdc-provider-nonmanual-checks.sh` - Run all public/read-only cWUSDC provider checks and write `reports/status/cwusdc-provider-handoff-latest.{json,md}`. Does not submit forms, approve tokens, add liquidity, swap, bridge, or broadcast transactions.
|
||||
- `check-cwusdc-provider-readiness-ci.sh` - CI-safe cWUSDC provider gate: fails only when repo-controlled URL prerequisites fail; reports external provider blockers as advisory.
|
||||
- `build-cwusdc-provider-handoff-report.py` - Build a concise cWUSDC provider handoff report from latest JSON probe outputs.
|
||||
- `check-cwusdc-etherscan-prereq-urls.sh` - Refresh public URL prerequisite evidence for Etherscan profile submission; supports `--json-out`, `--md-out`, `--timeout`, and `--retries` (or env `CWUSDC_PROVIDER_URL_TIMEOUT` / `CWUSDC_PROVIDER_URL_RETRIES`).
|
||||
- `backup-npmplus.sh` - Full NPMplus backup (database, API exports, certificates)
|
||||
- `check-contracts-on-chain-138.sh` - Check that Chain 138 deployed contracts have bytecode on-chain (`cast code` for 31 addresses; requires `cast` and RPC access). Use `[RPC_URL]` or env `RPC_URL_138`; `--dry-run` lists addresses only (no RPC calls); `SKIP_EXIT=1` to exit 0 when RPC unreachable.
|
||||
- `check-non-evm-network-health.sh` - Read-only live check for the public Solana, Tron, and XRPL endpoints used in repo docs. Prints a concise status table and can also write `reports/status/non-evm-network-health-latest.json`.
|
||||
|
||||
310
scripts/verify/audit-cusdc-cwusdc-etherscan-feeds.py
Executable file
310
scripts/verify/audit-cusdc-cwusdc-etherscan-feeds.py
Executable file
@@ -0,0 +1,310 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Audit Chain 138 cUSDC and Ethereum cWUSDC explorer feeds.
|
||||
|
||||
This produces an evidence packet for Etherscan/listing submissions. It does not
|
||||
ask Etherscan to merge Chain 138 traffic into the Ethereum token tracker; rather,
|
||||
it documents that Ethereum Mainnet cWUSDC is the wrapped public-network transport
|
||||
representation of canonical Chain 138 cUSDC and summarizes both API feeds.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import datetime as dt
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
CHAIN138_CUSDC = "0xf22258f57794CC8E06237084b353Ab30fFfa640b"
|
||||
MAINNET_CWUSDC = "0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a"
|
||||
CHAIN138_EXPLORER_API = "https://explorer.d-bis.org/api/v2"
|
||||
ETHERSCAN_V2_API = "https://api.etherscan.io/v2/api"
|
||||
REPORT_BASE = Path("reports/status/cusdc-cwusdc-etherscan-feed-audit-latest")
|
||||
|
||||
|
||||
def fetch_json(url: str, timeout: int = 30) -> Any:
|
||||
req = urllib.request.Request(url, headers={"User-Agent": "dbis-cusdc-cwusdc-feed-audit/1.0"})
|
||||
with urllib.request.urlopen(req, timeout=timeout) as response:
|
||||
payload = response.read().decode("utf-8")
|
||||
return json.loads(payload)
|
||||
|
||||
|
||||
def human_units(raw: int, decimals: int) -> str:
|
||||
sign = "-" if raw < 0 else ""
|
||||
raw = abs(raw)
|
||||
whole = raw // (10**decimals)
|
||||
frac = str(raw % (10**decimals)).rjust(decimals, "0").rstrip("0")
|
||||
return f"{sign}{whole:,}" + (f".{frac}" if frac else "")
|
||||
|
||||
|
||||
def addresses_from_transfer(item: dict[str, Any], style: str) -> set[str]:
|
||||
if style == "blockscout":
|
||||
values = [
|
||||
item.get("from", {}).get("hash"),
|
||||
item.get("to", {}).get("hash"),
|
||||
]
|
||||
else:
|
||||
values = [item.get("from"), item.get("to")]
|
||||
return {str(v).lower() for v in values if v}
|
||||
|
||||
|
||||
def summarize_blockscout_transfers(items: list[dict[str, Any]], decimals: int) -> dict[str, Any]:
|
||||
total_raw = 0
|
||||
addresses: set[str] = set()
|
||||
methods: dict[str, int] = {}
|
||||
latest = items[0] if items else None
|
||||
for item in items:
|
||||
value = item.get("total", {}).get("value", "0")
|
||||
try:
|
||||
total_raw += int(value)
|
||||
except (TypeError, ValueError):
|
||||
pass
|
||||
addresses.update(addresses_from_transfer(item, "blockscout"))
|
||||
method = item.get("method") or "unknown"
|
||||
methods[method] = methods.get(method, 0) + 1
|
||||
return {
|
||||
"sample_count": len(items),
|
||||
"sample_volume_raw": str(total_raw),
|
||||
"sample_volume_units": human_units(total_raw, decimals),
|
||||
"unique_addresses_in_sample": len(addresses),
|
||||
"method_counts": methods,
|
||||
"latest_transfer": {
|
||||
"hash": latest.get("transaction_hash"),
|
||||
"timestamp": latest.get("timestamp"),
|
||||
"from": latest.get("from", {}).get("hash"),
|
||||
"to": latest.get("to", {}).get("hash"),
|
||||
"value_raw": latest.get("total", {}).get("value"),
|
||||
"value_units": human_units(int(latest.get("total", {}).get("value", "0")), decimals),
|
||||
"method": latest.get("method"),
|
||||
}
|
||||
if latest
|
||||
else None,
|
||||
"addresses": sorted(addresses),
|
||||
}
|
||||
|
||||
|
||||
def summarize_etherscan_transfers(items: list[dict[str, Any]], decimals: int) -> dict[str, Any]:
|
||||
total_raw = 0
|
||||
addresses: set[str] = set()
|
||||
methods: dict[str, int] = {}
|
||||
latest = items[0] if items else None
|
||||
for item in items:
|
||||
try:
|
||||
total_raw += int(item.get("value", "0"))
|
||||
except (TypeError, ValueError):
|
||||
pass
|
||||
addresses.update(addresses_from_transfer(item, "etherscan"))
|
||||
method = item.get("methodId") or item.get("functionName") or "unknown"
|
||||
methods[method] = methods.get(method, 0) + 1
|
||||
return {
|
||||
"sample_count": len(items),
|
||||
"sample_volume_raw": str(total_raw),
|
||||
"sample_volume_units": human_units(total_raw, decimals),
|
||||
"unique_addresses_in_sample": len(addresses),
|
||||
"method_counts": methods,
|
||||
"latest_transfer": {
|
||||
"hash": latest.get("hash"),
|
||||
"blockNumber": latest.get("blockNumber"),
|
||||
"timeStamp": latest.get("timeStamp"),
|
||||
"from": latest.get("from"),
|
||||
"to": latest.get("to"),
|
||||
"value_raw": latest.get("value"),
|
||||
"value_units": human_units(int(latest.get("value", "0")), decimals),
|
||||
"methodId": latest.get("methodId"),
|
||||
"functionName": latest.get("functionName"),
|
||||
}
|
||||
if latest
|
||||
else None,
|
||||
"addresses": sorted(addresses),
|
||||
}
|
||||
|
||||
|
||||
def blockscout_token_metadata(address: str) -> dict[str, Any]:
|
||||
return fetch_json(f"{CHAIN138_EXPLORER_API}/tokens/{address}")
|
||||
|
||||
|
||||
def blockscout_transfers(address: str, pages: int) -> list[dict[str, Any]]:
|
||||
items: list[dict[str, Any]] = []
|
||||
params: dict[str, Any] | None = None
|
||||
for _ in range(pages):
|
||||
url = f"{CHAIN138_EXPLORER_API}/tokens/{address}/transfers"
|
||||
if params:
|
||||
url += "?" + urllib.parse.urlencode(params)
|
||||
payload = fetch_json(url)
|
||||
items.extend(payload.get("items", []))
|
||||
params = payload.get("next_page_params")
|
||||
if not params:
|
||||
break
|
||||
return items
|
||||
|
||||
|
||||
def etherscan_call(params: dict[str, str], api_key: str) -> Any:
|
||||
query = {"chainid": "1", **params, "apikey": api_key}
|
||||
payload = fetch_json(f"{ETHERSCAN_V2_API}?{urllib.parse.urlencode(query)}")
|
||||
if payload.get("status") == "0" and "No transactions found" not in str(payload.get("message")):
|
||||
raise RuntimeError(f"Etherscan API error: {payload.get('message')} {payload.get('result')}")
|
||||
return payload.get("result", [])
|
||||
|
||||
|
||||
def build_report(args: argparse.Namespace) -> dict[str, Any]:
|
||||
api_key = args.etherscan_api_key or os.environ.get("ETHERSCAN_API_KEY", "")
|
||||
if not api_key:
|
||||
raise SystemExit("ETHERSCAN_API_KEY is required for Ethereum cWUSDC Etherscan API checks")
|
||||
|
||||
c138_meta = blockscout_token_metadata(args.chain138_cusdc)
|
||||
c138_decimals = int(c138_meta.get("decimals") or 6)
|
||||
c138_transfers = blockscout_transfers(args.chain138_cusdc, args.chain138_pages)
|
||||
|
||||
cw_supply_raw = etherscan_call(
|
||||
{
|
||||
"module": "stats",
|
||||
"action": "tokensupply",
|
||||
"contractaddress": args.mainnet_cwusdc,
|
||||
},
|
||||
api_key,
|
||||
)
|
||||
cw_transfers = etherscan_call(
|
||||
{
|
||||
"module": "account",
|
||||
"action": "tokentx",
|
||||
"contractaddress": args.mainnet_cwusdc,
|
||||
"page": "1",
|
||||
"offset": str(args.etherscan_offset),
|
||||
"sort": "desc",
|
||||
},
|
||||
api_key,
|
||||
)
|
||||
if not isinstance(cw_transfers, list):
|
||||
cw_transfers = []
|
||||
|
||||
c138_summary = summarize_blockscout_transfers(c138_transfers, c138_decimals)
|
||||
cw_summary = summarize_etherscan_transfers(cw_transfers, 6)
|
||||
common_addresses = sorted(set(c138_summary["addresses"]) & set(cw_summary["addresses"]))
|
||||
|
||||
c138_summary_public = {k: v for k, v in c138_summary.items() if k != "addresses"}
|
||||
cw_summary_public = {k: v for k, v in cw_summary.items() if k != "addresses"}
|
||||
|
||||
return {
|
||||
"generatedAt": dt.datetime.now(dt.UTC).isoformat().replace("+00:00", "Z"),
|
||||
"purpose": "Evidence packet for Etherscan/listing feeds: Chain 138 cUSDC is the canonical source asset; Ethereum cWUSDC is the wrapped transport representation.",
|
||||
"canonicalRelationship": {
|
||||
"sourceChainId": 138,
|
||||
"sourceToken": {
|
||||
"symbol": "cUSDC",
|
||||
"name": "USD Coin (Compliant)",
|
||||
"address": args.chain138_cusdc,
|
||||
"explorer": f"https://explorer.d-bis.org/token/{args.chain138_cusdc}",
|
||||
"api": f"{CHAIN138_EXPLORER_API}/tokens/{args.chain138_cusdc}",
|
||||
},
|
||||
"wrappedChainId": 1,
|
||||
"wrappedToken": {
|
||||
"symbol": "cWUSDC",
|
||||
"name": "Wrapped cUSDC",
|
||||
"address": args.mainnet_cwusdc,
|
||||
"explorer": f"https://etherscan.io/token/{args.mainnet_cwusdc}",
|
||||
"api": ETHERSCAN_V2_API,
|
||||
},
|
||||
"mappingSource": "config/token-mapping-multichain.json: 138 cUSDC -> Ethereum Mainnet cWUSDC",
|
||||
"trackerLanguage": "cWUSDC is the Ethereum Mainnet compliant wrapped transport representation of canonical Chain 138 cUSDC. It is not Circle-issued USDC.",
|
||||
},
|
||||
"chain138Cusdc": {
|
||||
"metadata": {
|
||||
"name": c138_meta.get("name"),
|
||||
"symbol": c138_meta.get("symbol"),
|
||||
"decimals": c138_meta.get("decimals"),
|
||||
"holders": c138_meta.get("holders"),
|
||||
"totalSupplyRaw": c138_meta.get("total_supply"),
|
||||
"totalSupplyUnits": human_units(int(c138_meta.get("total_supply") or 0), c138_decimals),
|
||||
},
|
||||
"transferFeed": c138_summary_public,
|
||||
},
|
||||
"mainnetCwusdc": {
|
||||
"metadata": {
|
||||
"name": "Wrapped cUSDC",
|
||||
"symbol": "cWUSDC",
|
||||
"decimals": "6",
|
||||
"totalSupplyRaw": str(cw_supply_raw),
|
||||
"totalSupplyUnits": human_units(int(cw_supply_raw or 0), 6),
|
||||
},
|
||||
"transferFeed": cw_summary_public,
|
||||
},
|
||||
"crossFeedSignals": {
|
||||
"commonAddressesInRecentSamples": common_addresses,
|
||||
"commonAddressCount": len(common_addresses),
|
||||
"interpretation": "Common addresses are supporting evidence only. Canonical linkage is established by the token mapping, metadata registry, and bridge/listing documentation; Etherscan itself will only index Ethereum Mainnet cWUSDC traffic for the token page.",
|
||||
},
|
||||
"etherscanSubmissionNote": "Ask Etherscan to list the Ethereum token as Wrapped cUSDC (cWUSDC), with Chain 138 cUSDC identified as the canonical source asset in the description/supporting links. Do not ask Etherscan to add Chain 138 transfer counts to the Ethereum token tracker totals.",
|
||||
}
|
||||
|
||||
|
||||
def write_markdown(report: dict[str, Any], path: Path) -> None:
|
||||
rel = report["canonicalRelationship"]
|
||||
c138 = report["chain138Cusdc"]
|
||||
cw = report["mainnetCwusdc"]
|
||||
signals = report["crossFeedSignals"]
|
||||
lines = [
|
||||
"# cUSDC / cWUSDC Etherscan Feed Audit",
|
||||
"",
|
||||
f"Generated: `{report['generatedAt']}`",
|
||||
"",
|
||||
"## Relationship",
|
||||
"",
|
||||
f"- Source asset: Chain 138 `cUSDC` at `{rel['sourceToken']['address']}`",
|
||||
f"- Wrapped transport asset: Ethereum Mainnet `cWUSDC` at `{rel['wrappedToken']['address']}`",
|
||||
f"- Mapping source: `{rel['mappingSource']}`",
|
||||
f"- Tracker language: {rel['trackerLanguage']}",
|
||||
"",
|
||||
"## API Feed Summary",
|
||||
"",
|
||||
"| Feed | Supply | Recent sample transfers | Recent sample volume | Unique addresses in sample |",
|
||||
"|---|---:|---:|---:|---:|",
|
||||
f"| Chain 138 cUSDC Blockscout | {c138['metadata']['totalSupplyUnits']} | {c138['transferFeed']['sample_count']} | {c138['transferFeed']['sample_volume_units']} | {c138['transferFeed']['unique_addresses_in_sample']} |",
|
||||
f"| Ethereum cWUSDC Etherscan | {cw['metadata']['totalSupplyUnits']} | {cw['transferFeed']['sample_count']} | {cw['transferFeed']['sample_volume_units']} | {cw['transferFeed']['unique_addresses_in_sample']} |",
|
||||
"",
|
||||
"## Latest Transfers",
|
||||
"",
|
||||
f"- Chain 138 cUSDC latest: `{(c138['transferFeed']['latest_transfer'] or {}).get('hash')}`",
|
||||
f"- Ethereum cWUSDC latest: `{(cw['transferFeed']['latest_transfer'] or {}).get('hash')}`",
|
||||
"",
|
||||
"## Cross-Feed Signal",
|
||||
"",
|
||||
f"- Common addresses in recent API samples: `{signals['commonAddressCount']}`",
|
||||
f"- Interpretation: {signals['interpretation']}",
|
||||
"",
|
||||
"## Etherscan Submission Note",
|
||||
"",
|
||||
report["etherscanSubmissionNote"],
|
||||
"",
|
||||
]
|
||||
path.write_text("\n".join(lines), encoding="utf-8")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("--chain138-cusdc", default=CHAIN138_CUSDC)
|
||||
parser.add_argument("--mainnet-cwusdc", default=MAINNET_CWUSDC)
|
||||
parser.add_argument("--etherscan-api-key", default="")
|
||||
parser.add_argument("--chain138-pages", type=int, default=3)
|
||||
parser.add_argument("--etherscan-offset", type=int, default=150)
|
||||
parser.add_argument("--json-out", default=f"{REPORT_BASE}.json")
|
||||
parser.add_argument("--md-out", default=f"{REPORT_BASE}.md")
|
||||
args = parser.parse_args()
|
||||
|
||||
report = build_report(args)
|
||||
json_path = Path(args.json_out)
|
||||
md_path = Path(args.md_out)
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
json_path.write_text(json.dumps(report, indent=2) + "\n", encoding="utf-8")
|
||||
write_markdown(report, md_path)
|
||||
print(f"Wrote {json_path}")
|
||||
print(f"Wrote {md_path}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
245
scripts/verify/audit-cwusdc-mainnet-roles.py
Normal file
245
scripts/verify/audit-cwusdc-mainnet-roles.py
Normal file
@@ -0,0 +1,245 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Read-only role/control audit for Ethereum Mainnet cWUSDC."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import datetime as dt
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
REPORT_JSON = ROOT / "reports" / "status" / "cwusdc-mainnet-role-audit-latest.json"
|
||||
REPORT_MD = ROOT / "reports" / "status" / "cwusdc-mainnet-role-audit-latest.md"
|
||||
CWUSDC = "0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a"
|
||||
DEPLOYER = "0x4A666F96fC8764181194447A7dFdb7d471b301C8"
|
||||
CW_BRIDGE_MAINNET_FALLBACK = "0x2bF74583206A49Be07E0E8A94197C12987AbD7B5"
|
||||
ETHERSCAN_V2_API = "https://api.etherscan.io/v2/api"
|
||||
|
||||
|
||||
def load_dotenv(path: Path) -> None:
|
||||
if not path.exists():
|
||||
return
|
||||
for line in path.read_text().splitlines():
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#") or "=" not in line:
|
||||
continue
|
||||
key, value = line.split("=", 1)
|
||||
key = key.strip()
|
||||
value = value.strip().strip('"').strip("'")
|
||||
if key and key not in os.environ:
|
||||
os.environ[key] = value
|
||||
|
||||
|
||||
def cast_call(contract: str, signature: str, *args: str, rpc_url: str) -> str:
|
||||
command = ["cast", "call", contract, signature, *args, "--rpc-url", rpc_url]
|
||||
proc = subprocess.run(command, cwd=ROOT, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
|
||||
if proc.returncode != 0:
|
||||
raise RuntimeError(proc.stderr.strip() or proc.stdout.strip())
|
||||
return proc.stdout.strip()
|
||||
|
||||
|
||||
def cast_keccak(signature: str) -> str:
|
||||
proc = subprocess.run(["cast", "keccak", signature], cwd=ROOT, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
|
||||
if proc.returncode != 0:
|
||||
raise RuntimeError(proc.stderr.strip() or proc.stdout.strip())
|
||||
return proc.stdout.strip()
|
||||
|
||||
|
||||
def fetch_json(url: str) -> Any:
|
||||
req = urllib.request.Request(url, headers={"User-Agent": "dbis-cwusdc-role-audit/1.0"})
|
||||
with urllib.request.urlopen(req, timeout=30) as response:
|
||||
return json.loads(response.read().decode("utf-8"))
|
||||
|
||||
|
||||
def etherscan_logs(api_key: str, address: str, topic0: str) -> list[dict[str, Any]]:
|
||||
if not api_key:
|
||||
return []
|
||||
query = {
|
||||
"chainid": "1",
|
||||
"module": "logs",
|
||||
"action": "getLogs",
|
||||
"fromBlock": "0",
|
||||
"toBlock": "latest",
|
||||
"address": address,
|
||||
"topic0": topic0,
|
||||
"apikey": api_key,
|
||||
}
|
||||
payload = fetch_json(f"{ETHERSCAN_V2_API}?{urllib.parse.urlencode(query)}")
|
||||
result = payload.get("result") if isinstance(payload, dict) else None
|
||||
return result if isinstance(result, list) else []
|
||||
|
||||
|
||||
def topic_to_address(topic: str) -> str:
|
||||
return "0x" + topic[-40:]
|
||||
|
||||
|
||||
def bool_from_cast(value: str) -> bool:
|
||||
return value.strip().lower() in {"true", "1"}
|
||||
|
||||
|
||||
def candidate_addresses() -> dict[str, str]:
|
||||
candidates = {
|
||||
"deployer": os.environ.get("DEPLOYER_ADDRESS") or DEPLOYER,
|
||||
"cwBridgeMainnet": os.environ.get("CW_BRIDGE_MAINNET", "") or CW_BRIDGE_MAINNET_FALLBACK,
|
||||
"ccipRelayBridgeMainnet": os.environ.get("CCIP_RELAY_BRIDGE_MAINNET", ""),
|
||||
"mainnetCcipWeth9Bridge": os.environ.get("MAINNET_CCIP_WETH9_BRIDGE", ""),
|
||||
"mainnetCcipWeth10Bridge": os.environ.get("MAINNET_CCIP_WETH10_BRIDGE", ""),
|
||||
"ccipEthRouter": os.environ.get("CCIP_ETH_ROUTER", ""),
|
||||
"uniswapV3CwusdcUsdcPool": "0x1Cf2e685682C7F7beF508F0Af15Dfb5CDda01ee3",
|
||||
"uniswapV2CwusdcUsdcPair": "0xC28706F899266b36BC43cc072b3a921BDf2C48D9",
|
||||
"engineXVirtualBatchVault": "0xf108586d1FC330EA1D4EA4ff8fd983cde94279B1",
|
||||
}
|
||||
return {label: address for label, address in candidates.items() if address and address.startswith("0x") and len(address) == 42}
|
||||
|
||||
|
||||
def build(args: argparse.Namespace) -> dict[str, Any]:
|
||||
load_dotenv(ROOT / ".env")
|
||||
load_dotenv(ROOT / "smom-dbis-138" / ".env")
|
||||
rpc_url = args.rpc_url or os.environ.get("ETHEREUM_MAINNET_RPC") or os.environ.get("MAINNET_RPC_URL")
|
||||
if not rpc_url:
|
||||
raise SystemExit("ETHEREUM_MAINNET_RPC or --rpc-url is required")
|
||||
|
||||
roles = {
|
||||
"DEFAULT_ADMIN_ROLE": cast_call(args.token, "DEFAULT_ADMIN_ROLE()(bytes32)", rpc_url=rpc_url),
|
||||
"MINTER_ROLE": cast_call(args.token, "MINTER_ROLE()(bytes32)", rpc_url=rpc_url),
|
||||
"BURNER_ROLE": cast_call(args.token, "BURNER_ROLE()(bytes32)", rpc_url=rpc_url),
|
||||
}
|
||||
role_admins = {
|
||||
role_name: cast_call(args.token, "getRoleAdmin(bytes32)(bytes32)", role_id, rpc_url=rpc_url)
|
||||
for role_name, role_id in roles.items()
|
||||
}
|
||||
|
||||
candidates = candidate_addresses()
|
||||
checks: dict[str, Any] = {}
|
||||
for label, address in candidates.items():
|
||||
checks[label] = {"address": address, "roles": {}}
|
||||
for role_name, role_id in roles.items():
|
||||
checks[label]["roles"][role_name] = bool_from_cast(
|
||||
cast_call(args.token, "hasRole(bytes32,address)(bool)", role_id, address, rpc_url=rpc_url)
|
||||
)
|
||||
|
||||
privileged = [
|
||||
{
|
||||
"label": label,
|
||||
"address": data["address"],
|
||||
"roles": [role for role, has_role in data["roles"].items() if has_role],
|
||||
}
|
||||
for label, data in checks.items()
|
||||
if any(data["roles"].values())
|
||||
]
|
||||
|
||||
api_key = os.environ.get("ETHERSCAN_API_KEY", "")
|
||||
event_topics = {
|
||||
"RoleGranted": cast_keccak("RoleGranted(bytes32,address,address)"),
|
||||
"RoleRevoked": cast_keccak("RoleRevoked(bytes32,address,address)"),
|
||||
}
|
||||
events: list[dict[str, Any]] = []
|
||||
if api_key:
|
||||
for event_name, topic0 in event_topics.items():
|
||||
for item in etherscan_logs(api_key, args.token, topic0):
|
||||
topics = item.get("topics") or []
|
||||
if len(topics) < 4:
|
||||
continue
|
||||
role_id = topics[1]
|
||||
account = topic_to_address(topics[2])
|
||||
sender = topic_to_address(topics[3])
|
||||
role_name = next((name for name, value in roles.items() if value.lower() == role_id.lower()), role_id)
|
||||
events.append(
|
||||
{
|
||||
"event": event_name,
|
||||
"role": role_name,
|
||||
"roleId": role_id,
|
||||
"account": account,
|
||||
"sender": sender,
|
||||
"blockNumber": int(str(item.get("blockNumber", "0")), 16) if str(item.get("blockNumber", "")).startswith("0x") else item.get("blockNumber"),
|
||||
"transactionHash": item.get("transactionHash"),
|
||||
"logIndex": item.get("logIndex"),
|
||||
}
|
||||
)
|
||||
effective_from_events: dict[str, set[str]] = {role: set() for role in roles}
|
||||
for item in sorted(events, key=lambda row: (int(row.get("blockNumber") or 0), int(str(row.get("logIndex") or "0x0"), 16) if str(row.get("logIndex", "")).startswith("0x") else 0)):
|
||||
role = item["role"]
|
||||
if role not in effective_from_events:
|
||||
continue
|
||||
if item["event"] == "RoleGranted":
|
||||
effective_from_events[role].add(item["account"])
|
||||
elif item["event"] == "RoleRevoked":
|
||||
effective_from_events[role].discard(item["account"])
|
||||
|
||||
return {
|
||||
"schema": "cwusdc-mainnet-role-audit/v1",
|
||||
"generatedAt": dt.datetime.now(dt.UTC).isoformat().replace("+00:00", "Z"),
|
||||
"network": {"chainId": 1, "name": "Ethereum Mainnet"},
|
||||
"token": {"address": args.token, "symbol": "cWUSDC", "name": "Wrapped cUSDC"},
|
||||
"roles": roles,
|
||||
"roleAdmins": role_admins,
|
||||
"candidateChecks": checks,
|
||||
"privilegedCandidates": privileged,
|
||||
"eventLogReview": {
|
||||
"checked": bool(api_key),
|
||||
"topics": event_topics,
|
||||
"eventCount": len(events),
|
||||
"events": events,
|
||||
"effectiveMembersFromEvents": {role: sorted(values) for role, values in effective_from_events.items()},
|
||||
},
|
||||
"limitations": [
|
||||
"This audit checks known candidate addresses only.",
|
||||
"Event-log reconstruction is included when ETHERSCAN_API_KEY is available, but provider log limits or pruned responses can still require manual verification.",
|
||||
"This is a read-only control snapshot, not a formal third-party audit.",
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def write_md(payload: dict[str, Any], path: Path) -> None:
|
||||
lines = [
|
||||
"# cWUSDC Mainnet Role Audit",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- Token: `{payload['token']['address']}`",
|
||||
"",
|
||||
"## Role IDs",
|
||||
"",
|
||||
"| Role | ID | Admin role ID |",
|
||||
"|---|---|---|",
|
||||
]
|
||||
for role, role_id in payload["roles"].items():
|
||||
lines.append(f"| `{role}` | `{role_id}` | `{payload['roleAdmins'][role]}` |")
|
||||
lines.extend(["", "## Candidate Role Checks", "", "| Label | Address | Admin | Minter | Burner |", "|---|---|---:|---:|---:|"])
|
||||
for label, data in payload["candidateChecks"].items():
|
||||
roles = data["roles"]
|
||||
lines.append(
|
||||
f"| `{label}` | `{data['address']}` | `{roles['DEFAULT_ADMIN_ROLE']}` | `{roles['MINTER_ROLE']}` | `{roles['BURNER_ROLE']}` |"
|
||||
)
|
||||
lines.extend(["", "## Event-Log Role Reconstruction", "", f"- Checked: `{payload['eventLogReview']['checked']}`", f"- Event count: `{payload['eventLogReview']['eventCount']}`", "", "| Role | Effective members from events |", "|---|---|"])
|
||||
for role, members in payload["eventLogReview"]["effectiveMembersFromEvents"].items():
|
||||
lines.append(f"| `{role}` | `{', '.join(members) if members else 'none observed'}` |")
|
||||
lines.extend(["", "## Limitations", ""])
|
||||
lines.extend(f"- {item}" for item in payload["limitations"])
|
||||
path.write_text("\n".join(lines) + "\n")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("--token", default=CWUSDC)
|
||||
parser.add_argument("--rpc-url", default="")
|
||||
parser.add_argument("--json-out", type=Path, default=REPORT_JSON)
|
||||
parser.add_argument("--md-out", type=Path, default=REPORT_MD)
|
||||
args = parser.parse_args()
|
||||
payload = build(args)
|
||||
args.json_out.parent.mkdir(parents=True, exist_ok=True)
|
||||
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
write_md(payload, args.md_out)
|
||||
print(f"Wrote {args.json_out.relative_to(ROOT)}")
|
||||
print(f"Wrote {args.md_out.relative_to(ROOT)}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
17
scripts/verify/audit-ei-matrix-onchain-readiness.sh
Executable file
17
scripts/verify/audit-ei-matrix-onchain-readiness.sh
Executable file
@@ -0,0 +1,17 @@
|
||||
#!/usr/bin/env bash
|
||||
# On-chain readiness audit: EI matrix vs mainnet cWUSDC and/or Chain 138 cUSDC.
|
||||
# Loads scripts/lib/load-project-env.sh for RPCs and token defaults.
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/verify/audit-ei-matrix-onchain-readiness.sh --mainnet-only --min-mainnet-raw 12000000
|
||||
# ./scripts/verify/audit-ei-matrix-onchain-readiness.sh --both --min-mainnet-raw 1 --min-138-raw 1 --workers 6 --report-by-class
|
||||
# Optional leading "--" is stripped (for shells that pass it through).
|
||||
# Exit 1 if any wallet is below configured minima (CI gate). Use min 0 to only report.
|
||||
set -euo pipefail
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
cd "$PROJECT_ROOT"
|
||||
# shellcheck disable=SC1091
|
||||
source "$PROJECT_ROOT/scripts/lib/load-project-env.sh"
|
||||
[[ "${1:-}" == "--" ]] && shift
|
||||
exec python3 "$PROJECT_ROOT/scripts/lib/ei_matrix_onchain_readiness_audit.py" "$@"
|
||||
226
scripts/verify/build-cmc-top10-ecosystem-coverage.py
Executable file
226
scripts/verify/build-cmc-top10-ecosystem-coverage.py
Executable file
@@ -0,0 +1,226 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Build a repo-side CMC top-10 ecosystem coverage matrix."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
OUT_JSON = ROOT / "reports/status/cmc-top10-ecosystem-coverage-latest.json"
|
||||
OUT_MD = ROOT / "docs/04-configuration/CMC_TOP10_ECOSYSTEM_ACCESSIBILITY_MATRIX.md"
|
||||
|
||||
SOURCE = {
|
||||
"name": "CoinMarketCap coins page",
|
||||
"url": "https://coinmarketcap.com/coins/",
|
||||
"observedAt": "2026-05-11",
|
||||
"note": "Ranks are volatile; rerun or update this snapshot before external outreach.",
|
||||
}
|
||||
|
||||
TOKENS: list[dict[str, Any]] = [
|
||||
{
|
||||
"rank": 1,
|
||||
"symbol": "BTC",
|
||||
"name": "Bitcoin",
|
||||
"accessibility": "non_evm_wrapped_lane_required",
|
||||
"dbisTouchpoint": "cWBTC / BTC reserve-or-wrapper evidence lane",
|
||||
"timeframe": "1-2 weeks repo evidence after wallet/venue binding",
|
||||
"repoDoableNext": [
|
||||
"Bind canonical BTC custody/address evidence fields.",
|
||||
"Add BTC venue and wrapped-asset evidence placeholders.",
|
||||
"Keep claims as provenance-only until custody and liquidity are independently evidenced.",
|
||||
],
|
||||
"requiresExternalHuman": False,
|
||||
},
|
||||
{
|
||||
"rank": 2,
|
||||
"symbol": "ETH",
|
||||
"name": "Ethereum",
|
||||
"accessibility": "native_evm_core_surface",
|
||||
"dbisTouchpoint": "WETH/cWETH, Ethereum Mainnet cWUSDC, gas/quote evidence",
|
||||
"timeframe": "1-3 days repo hardening",
|
||||
"repoDoableNext": [
|
||||
"Refresh Ethereum pool and quote-side evidence.",
|
||||
"Add CMC/Dex/Gecko sanity checks for ETH-paired surfaces.",
|
||||
],
|
||||
"requiresExternalHuman": False,
|
||||
},
|
||||
{
|
||||
"rank": 3,
|
||||
"symbol": "USDT",
|
||||
"name": "Tether USDt",
|
||||
"accessibility": "evm_quote_asset_and_wrapped_transport",
|
||||
"dbisTouchpoint": "cUSDT / cWUSDT",
|
||||
"timeframe": "1-3 days repo-side; provider acceptance external",
|
||||
"repoDoableNext": [
|
||||
"Refresh cUSDT/cWUSDT provider packet fields.",
|
||||
"Validate official USDT quote addresses per chain.",
|
||||
],
|
||||
"requiresExternalHuman": False,
|
||||
},
|
||||
{
|
||||
"rank": 4,
|
||||
"symbol": "XRP",
|
||||
"name": "XRP",
|
||||
"accessibility": "xrpl_lane_required",
|
||||
"dbisTouchpoint": "XRPLAdapter / wXRP / MintBurnController",
|
||||
"timeframe": "1-2 weeks after wallet/trustline binding",
|
||||
"repoDoableNext": [
|
||||
"Bind XRPL account and destination tag policy placeholders.",
|
||||
"Document XRP reserve, trustline, and issuer requirements.",
|
||||
],
|
||||
"requiresExternalHuman": False,
|
||||
},
|
||||
{
|
||||
"rank": 5,
|
||||
"symbol": "BNB",
|
||||
"name": "BNB",
|
||||
"accessibility": "evm_compatible_bsc_lane",
|
||||
"dbisTouchpoint": "BSC cW* routing and gas surface",
|
||||
"timeframe": "2-5 days repo-side",
|
||||
"repoDoableNext": [
|
||||
"Refresh BSC cW* pool and official quote evidence.",
|
||||
"Check BNB gas budget and CMC report values.",
|
||||
],
|
||||
"requiresExternalHuman": False,
|
||||
},
|
||||
{
|
||||
"rank": 6,
|
||||
"symbol": "USDC",
|
||||
"name": "USD Coin",
|
||||
"accessibility": "primary_focus_ready_for_submission",
|
||||
"dbisTouchpoint": "cUSDC / cWUSDC",
|
||||
"timeframe": "submission-ready now; price/listing acceptance external",
|
||||
"repoDoableNext": [
|
||||
"Keep Etherscan/CoinGecko/CMC/DexScreener packets current.",
|
||||
"Maintain exact CAIP-19 discipline for Mainnet cWUSDC.",
|
||||
],
|
||||
"requiresExternalHuman": False,
|
||||
},
|
||||
{
|
||||
"rank": 7,
|
||||
"symbol": "SOL",
|
||||
"name": "Solana",
|
||||
"accessibility": "solana_spl_lane_required",
|
||||
"dbisTouchpoint": "SolanaAdapter and config/solana-gru-bridge-lineup.json",
|
||||
"timeframe": "3-7 days repo-side if mints are bound",
|
||||
"repoDoableNext": [
|
||||
"Bind SPL mint placeholders and minimum rent/gas targets.",
|
||||
"Separate confirmed Chain 138 adapter evidence from native Solana liquidity claims.",
|
||||
],
|
||||
"requiresExternalHuman": False,
|
||||
},
|
||||
{
|
||||
"rank": 8,
|
||||
"symbol": "TRX",
|
||||
"name": "TRON",
|
||||
"accessibility": "tron_wallet_and_energy_lane_required",
|
||||
"dbisTouchpoint": "TronAdapter and derived/canonical Tron wallet evidence",
|
||||
"timeframe": "3-7 days after address confirmation",
|
||||
"repoDoableNext": [
|
||||
"Bind canonical Tron address policy placeholder.",
|
||||
"Document TRX energy/bandwidth and TRC-20 inventory requirements.",
|
||||
],
|
||||
"requiresExternalHuman": False,
|
||||
},
|
||||
{
|
||||
"rank": 9,
|
||||
"symbol": "DOGE",
|
||||
"name": "Dogecoin",
|
||||
"accessibility": "new_non_evm_adapter_or_custody_lane",
|
||||
"dbisTouchpoint": "future DOGE wrapper/custody evidence lane",
|
||||
"timeframe": "1-3 weeks for serious repo evidence",
|
||||
"repoDoableNext": [
|
||||
"Create DOGE custody and bridge evidence stub.",
|
||||
"Keep DOGE out of provider claims until wallet, reserve, and venue evidence exist.",
|
||||
],
|
||||
"requiresExternalHuman": False,
|
||||
},
|
||||
{
|
||||
"rank": 10,
|
||||
"symbol": "HYPE",
|
||||
"name": "Hyperliquid",
|
||||
"accessibility": "new_chain_or_venue_research_required",
|
||||
"dbisTouchpoint": "future Hyperliquid venue/asset touchpoint",
|
||||
"timeframe": "1-3 weeks for discovery/evidence",
|
||||
"repoDoableNext": [
|
||||
"Open a research stub for chain/asset identifiers and supported custody paths.",
|
||||
"Do not include HYPE in liquidity or settlement claims until identifiers are bound.",
|
||||
],
|
||||
"requiresExternalHuman": False,
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def table(headers: list[str], rows: list[list[Any]]) -> str:
|
||||
def cell(value: Any) -> str:
|
||||
if isinstance(value, list):
|
||||
value = "<br>".join(str(item) for item in value)
|
||||
return str(value).replace("|", "\\|").replace("\n", "<br>")
|
||||
|
||||
return "\n".join(
|
||||
[
|
||||
f"| {' | '.join(cell(header) for header in headers)} |",
|
||||
f"| {' | '.join('---' for _ in headers)} |",
|
||||
*[f"| {' | '.join(cell(value) for value in row)} |" for row in rows],
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
payload = {
|
||||
"schema": "cmc-top10-ecosystem-coverage/v1",
|
||||
"generatedAt": datetime.now(timezone.utc).isoformat(),
|
||||
"source": SOURCE,
|
||||
"summary": {
|
||||
"tokenCount": len(TOKENS),
|
||||
"repoDoableWithoutOutsideHumanCount": sum(1 for token in TOKENS if not token["requiresExternalHuman"]),
|
||||
"externalAcceptanceStillRequired": [
|
||||
"CMC/CoinGecko/DexScreener/Etherscan listing and price acceptance",
|
||||
"Any custody, bank, exchange, or provider-side manual review",
|
||||
],
|
||||
},
|
||||
"tokens": TOKENS,
|
||||
}
|
||||
OUT_JSON.parent.mkdir(parents=True, exist_ok=True)
|
||||
OUT_JSON.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
|
||||
lines = [
|
||||
"# CMC Top 10 Ecosystem Accessibility Matrix",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- Source: [{SOURCE['name']}]({SOURCE['url']})",
|
||||
f"- Observed: `{SOURCE['observedAt']}`",
|
||||
f"- Boundary: {SOURCE['note']}",
|
||||
"",
|
||||
table(
|
||||
["Rank", "Token", "Accessibility", "DBIS touchpoint", "Repo-side timeframe", "Repo-doable next work"],
|
||||
[
|
||||
[
|
||||
token["rank"],
|
||||
f"{token['symbol']} ({token['name']})",
|
||||
token["accessibility"],
|
||||
token["dbisTouchpoint"],
|
||||
token["timeframe"],
|
||||
token["repoDoableNext"],
|
||||
]
|
||||
for token in TOKENS
|
||||
],
|
||||
),
|
||||
"",
|
||||
"## Operating Rule",
|
||||
"",
|
||||
"This matrix is a repo-side planning artifact. It improves DBIS coverage discipline, but it does not imply that any external tracker, wallet, exchange, custodian, or market-data provider has accepted a token.",
|
||||
]
|
||||
OUT_MD.parent.mkdir(parents=True, exist_ok=True)
|
||||
OUT_MD.write_text("\n".join(lines) + "\n")
|
||||
print(f"Wrote {OUT_JSON.relative_to(ROOT)}")
|
||||
print(f"Wrote {OUT_MD.relative_to(ROOT)}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
528
scripts/verify/build-cwusdc-etherscan-value-dossier.py
Executable file
528
scripts/verify/build-cwusdc-etherscan-value-dossier.py
Executable file
@@ -0,0 +1,528 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Build a submission-ready cWUSDC Etherscan Value dossier.
|
||||
|
||||
The dossier intentionally separates Ethereum Mainnet cWUSDC evidence from
|
||||
global cUSDC/cWUSDC family context. It is read-only: it runs monitors and proof
|
||||
generators, then summarizes what can be submitted and what remains externally
|
||||
blocked.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import datetime as dt
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
REPORT_JSON = ROOT / "reports" / "status" / "cwusdc-etherscan-value-dossier-latest.json"
|
||||
REPORT_MD = ROOT / "reports" / "status" / "cwusdc-etherscan-value-dossier-latest.md"
|
||||
|
||||
CWUSDC = "0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a"
|
||||
ETHERSCAN_CHAINLIST_URL = "https://api.etherscan.io/v2/chainlist"
|
||||
ETHERSCAN_V2_API = "https://api.etherscan.io/v2/api"
|
||||
DEPLOYER_FALLBACK = "0x4A666F96fC8764181194447A7dFdb7d471b301C8"
|
||||
L2_DEPOSIT_CHAINS = {
|
||||
"10": "OP Mainnet",
|
||||
"42161": "Arbitrum One Mainnet",
|
||||
}
|
||||
ARTIFACTS = {
|
||||
"mainnetSupply": ROOT / "reports" / "status" / "cwusdc-supply-circulating-attestation-latest.json",
|
||||
"globalFamilySupply": ROOT / "reports" / "status" / "global-cusdc-cwusdc-family-supply-proof-latest.json",
|
||||
"feedAudit": ROOT / "reports" / "status" / "cusdc-cwusdc-etherscan-feed-audit-latest.json",
|
||||
"propagation": ROOT / "reports" / "status" / "cwusdc-etherscan-value-propagation-latest.json",
|
||||
}
|
||||
DOCS = {
|
||||
"executionPlan": ROOT / "docs" / "04-configuration" / "etherscan" / "CWUSDC_ETHERSCAN_VALUE_EXECUTION_PLAN.md",
|
||||
"bridgeLayerMap": ROOT / "docs" / "04-configuration" / "etherscan" / "CWUSDC_ETHERSCAN_BRIDGE_CROSSCHAIN_LAYER_MAP.md",
|
||||
"profilePacket": ROOT / "docs" / "04-configuration" / "etherscan" / "CWUSDC_MAINNET_ETHERSCAN_PROFILE_PACKET.md",
|
||||
"e2eRecommendations": ROOT / "docs" / "04-configuration" / "etherscan" / "CWUSDC_ETHERSCAN_E2E_RECOMMENDATIONS.md",
|
||||
"trackerPacket": ROOT / "docs" / "04-configuration" / "coingecko" / "CWUSDC_MAINNET_TRACKER_SUBMISSION_PACKET.md",
|
||||
}
|
||||
|
||||
|
||||
def load_dotenv(path: Path, env: dict[str, str]) -> dict[str, str]:
|
||||
if not path.exists():
|
||||
return env
|
||||
merged = dict(env)
|
||||
for line in path.read_text().splitlines():
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#") or "=" not in line:
|
||||
continue
|
||||
key, value = line.split("=", 1)
|
||||
key = key.strip()
|
||||
value = value.strip().strip('"').strip("'")
|
||||
if key and key not in merged:
|
||||
merged[key] = value
|
||||
return merged
|
||||
|
||||
|
||||
def run_command(command: list[str], env: dict[str, str]) -> dict[str, Any]:
|
||||
proc = subprocess.run(
|
||||
command,
|
||||
cwd=ROOT,
|
||||
env=env,
|
||||
text=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
check=False,
|
||||
)
|
||||
return {
|
||||
"command": command,
|
||||
"returncode": proc.returncode,
|
||||
"stdout": proc.stdout.strip(),
|
||||
"stderr": proc.stderr.strip(),
|
||||
"ok": proc.returncode == 0,
|
||||
}
|
||||
|
||||
|
||||
def read_json(path: Path) -> Any | None:
|
||||
if not path.exists():
|
||||
return None
|
||||
return json.loads(path.read_text())
|
||||
|
||||
|
||||
def fetch_json_url(url: str, timeout: int = 30) -> Any:
|
||||
req = urllib.request.Request(url, headers={"User-Agent": "dbis-cwusdc-etherscan-dossier/1.0"})
|
||||
with urllib.request.urlopen(req, timeout=timeout) as response:
|
||||
return json.loads(response.read().decode("utf-8"))
|
||||
|
||||
|
||||
def load_etherscan_chainlist() -> dict[str, Any]:
|
||||
try:
|
||||
payload = fetch_json_url(ETHERSCAN_CHAINLIST_URL)
|
||||
except Exception as exc: # noqa: BLE001 - dossier should capture diagnostics instead of crashing
|
||||
return {
|
||||
"url": ETHERSCAN_CHAINLIST_URL,
|
||||
"available": False,
|
||||
"error": str(exc),
|
||||
"totalcount": None,
|
||||
"supportedChainIds": [],
|
||||
"statusByChainId": {},
|
||||
}
|
||||
|
||||
result = payload.get("result") if isinstance(payload, dict) else None
|
||||
chains = result if isinstance(result, list) else []
|
||||
status_by_chain_id = {
|
||||
str(item.get("chainid")): {
|
||||
"chainname": item.get("chainname"),
|
||||
"blockexplorer": item.get("blockexplorer"),
|
||||
"apiurl": item.get("apiurl"),
|
||||
"status": item.get("status"),
|
||||
"comment": item.get("comment"),
|
||||
}
|
||||
for item in chains
|
||||
if isinstance(item, dict) and item.get("chainid") is not None
|
||||
}
|
||||
return {
|
||||
"url": ETHERSCAN_CHAINLIST_URL,
|
||||
"available": True,
|
||||
"comments": payload.get("comments") if isinstance(payload, dict) else None,
|
||||
"totalcount": payload.get("totalcount") if isinstance(payload, dict) else len(chains),
|
||||
"supportedChainIds": sorted(status_by_chain_id, key=lambda value: int(value) if value.isdigit() else value),
|
||||
"statusByChainId": status_by_chain_id,
|
||||
}
|
||||
|
||||
|
||||
def human_token_value(raw: Any, token_address: str | None) -> str | None:
|
||||
try:
|
||||
raw_int = int(str(raw))
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
decimals = 18 if token_address == "ETH" else 6 if token_address and token_address.lower() == CWUSDC.lower() else 18
|
||||
whole = raw_int // (10**decimals)
|
||||
frac = str(raw_int % (10**decimals)).rjust(decimals, "0").rstrip("0")
|
||||
return f"{whole}" + (f".{frac}" if frac else "")
|
||||
|
||||
|
||||
def human_wei(raw: Any) -> str | None:
|
||||
try:
|
||||
raw_int = int(str(raw))
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
whole = raw_int // (10**18)
|
||||
frac = str(raw_int % (10**18)).rjust(18, "0").rstrip("0")
|
||||
return f"{whole}" + (f".{frac}" if frac else "")
|
||||
|
||||
|
||||
def normalize_deposit_row(row: dict[str, Any]) -> dict[str, Any]:
|
||||
token_address = row.get("tokenAddress")
|
||||
return {
|
||||
"hash": row.get("hash"),
|
||||
"l1TransactionHash": row.get("L1transactionhash"),
|
||||
"timeStamp": row.get("timeStamp"),
|
||||
"from": row.get("from"),
|
||||
"to": row.get("to"),
|
||||
"valueRaw": row.get("value"),
|
||||
"valueEth": human_wei(row.get("value")),
|
||||
"tokenAddress": token_address,
|
||||
"tokenValueRaw": row.get("tokenValue"),
|
||||
"tokenValueUnits": human_token_value(row.get("tokenValue"), token_address),
|
||||
"txreceiptStatus": row.get("txreceipt_status"),
|
||||
"isError": row.get("isError"),
|
||||
}
|
||||
|
||||
|
||||
def etherscan_v2_call(params: dict[str, str], api_key: str) -> dict[str, Any]:
|
||||
query = {**params, "apikey": api_key}
|
||||
url = f"{ETHERSCAN_V2_API}?{urllib.parse.urlencode(query)}"
|
||||
redacted_query = {**params, "apikey": "REDACTED"}
|
||||
redacted_url = f"{ETHERSCAN_V2_API}?{urllib.parse.urlencode(redacted_query)}"
|
||||
try:
|
||||
payload = fetch_json_url(url)
|
||||
except Exception as exc: # noqa: BLE001 - capture diagnostics instead of crashing the dossier
|
||||
return {"url": redacted_url, "ok": False, "error": str(exc), "status": None, "message": None, "result": None}
|
||||
status = str(payload.get("status")) if isinstance(payload, dict) else None
|
||||
message = payload.get("message") if isinstance(payload, dict) else None
|
||||
result = payload.get("result") if isinstance(payload, dict) else None
|
||||
no_rows = isinstance(result, str) and "No transactions found" in result
|
||||
return {
|
||||
"url": redacted_url,
|
||||
"ok": status == "1" or no_rows,
|
||||
"status": status,
|
||||
"message": message,
|
||||
"result": [] if no_rows else result,
|
||||
"error": None,
|
||||
}
|
||||
|
||||
|
||||
def load_l2_deposit_evidence(api_key: str, chainlist: dict[str, Any], address: str) -> dict[str, Any]:
|
||||
if not api_key:
|
||||
return {
|
||||
"checked": False,
|
||||
"reason": "ETHERSCAN_API_KEY is not set.",
|
||||
"address": address,
|
||||
"chains": {},
|
||||
}
|
||||
|
||||
supported = set(chainlist.get("statusByChainId", {}))
|
||||
chains: dict[str, Any] = {}
|
||||
for chain_id, chain_name in L2_DEPOSIT_CHAINS.items():
|
||||
if chain_id not in supported:
|
||||
chains[chain_id] = {
|
||||
"chainName": chain_name,
|
||||
"checked": False,
|
||||
"reason": "chain is not present in Etherscan V2 chainlist",
|
||||
}
|
||||
continue
|
||||
response = etherscan_v2_call(
|
||||
{
|
||||
"chainid": chain_id,
|
||||
"module": "account",
|
||||
"action": "getdeposittxs",
|
||||
"address": address,
|
||||
"page": "1",
|
||||
"offset": "10",
|
||||
"sort": "desc",
|
||||
},
|
||||
api_key,
|
||||
)
|
||||
result = response.get("result")
|
||||
rows = result if isinstance(result, list) else []
|
||||
chains[chain_id] = {
|
||||
"chainName": chain_name,
|
||||
"checked": True,
|
||||
"ok": response.get("ok"),
|
||||
"status": response.get("status"),
|
||||
"message": response.get("message"),
|
||||
"sampleCount": len(rows),
|
||||
"latest": normalize_deposit_row(rows[0]) if rows else None,
|
||||
"url": response.get("url"),
|
||||
"error": response.get("error"),
|
||||
}
|
||||
|
||||
return {
|
||||
"checked": True,
|
||||
"address": address,
|
||||
"scope": "Etherscan-indexed L2 deposits by address. This is bridge provenance only and does not set Mainnet cWUSDC USD Value.",
|
||||
"rawUnitNote": "tokenValue is returned as raw token units. ETH uses 18 decimals; ERC-20 rows must be normalized with that token contract's decimals.",
|
||||
"chains": chains,
|
||||
}
|
||||
|
||||
|
||||
def load_contract_source_verification(api_key: str, address: str) -> dict[str, Any]:
|
||||
if not api_key:
|
||||
return {
|
||||
"checked": False,
|
||||
"reason": "ETHERSCAN_API_KEY is not set.",
|
||||
"address": address,
|
||||
"verified": False,
|
||||
}
|
||||
response = etherscan_v2_call(
|
||||
{
|
||||
"chainid": "1",
|
||||
"module": "contract",
|
||||
"action": "getsourcecode",
|
||||
"address": address,
|
||||
},
|
||||
api_key,
|
||||
)
|
||||
result = response.get("result")
|
||||
entry = result[0] if isinstance(result, list) and result and isinstance(result[0], dict) else {}
|
||||
source_code = str(entry.get("SourceCode") or "")
|
||||
abi = str(entry.get("ABI") or "")
|
||||
contract_name = str(entry.get("ContractName") or "")
|
||||
return {
|
||||
"checked": True,
|
||||
"address": address,
|
||||
"ok": response.get("ok"),
|
||||
"status": response.get("status"),
|
||||
"message": response.get("message"),
|
||||
"verified": bool(source_code and contract_name and abi and abi != "Contract source code not verified"),
|
||||
"contractName": contract_name or None,
|
||||
"compilerVersion": entry.get("CompilerVersion") or None,
|
||||
"optimizationUsed": entry.get("OptimizationUsed") or None,
|
||||
"runs": entry.get("Runs") or None,
|
||||
"constructorArgumentsPresent": bool(entry.get("ConstructorArguments")),
|
||||
"evmVersion": entry.get("EVMVersion") or None,
|
||||
"licenseType": entry.get("LicenseType") or None,
|
||||
"proxy": entry.get("Proxy") or None,
|
||||
"implementation": entry.get("Implementation") or None,
|
||||
"sourceCodeBytes": len(source_code),
|
||||
"abiAvailable": bool(abi and abi != "Contract source code not verified"),
|
||||
"url": response.get("url"),
|
||||
"error": response.get("error"),
|
||||
}
|
||||
|
||||
|
||||
def rel(path: Path) -> str:
|
||||
return str(path.relative_to(ROOT))
|
||||
|
||||
|
||||
def build(args: argparse.Namespace) -> dict[str, Any]:
|
||||
env = load_dotenv(ROOT / ".env", dict(os.environ))
|
||||
etherscan_api_key = env.get("ETHERSCAN_API_KEY", "")
|
||||
l2_deposit_address = args.l2_deposit_address or env.get("DEPLOYER_ADDRESS") or DEPLOYER_FALLBACK
|
||||
commands: list[dict[str, Any]] = []
|
||||
if args.refresh:
|
||||
commands = [
|
||||
run_command(["python3", "scripts/verify/generate-cwusdc-supply-circulating-attestation.py"], env),
|
||||
run_command(["python3", "scripts/verify/generate-global-cusdc-cwusdc-family-supply-proof.py"], env),
|
||||
run_command(["python3", "scripts/verify/audit-cusdc-cwusdc-etherscan-feeds.py"], env),
|
||||
run_command(["python3", "scripts/verify/monitor-cwusdc-etherscan-value-propagation.py"], env),
|
||||
run_command(["bash", "scripts/verify/check-cwusdc-etherscan-prereq-urls.sh"], env),
|
||||
]
|
||||
|
||||
artifacts = {key: read_json(path) for key, path in ARTIFACTS.items()}
|
||||
propagation = artifacts["propagation"] or {}
|
||||
supply = artifacts["mainnetSupply"] or {}
|
||||
global_family = artifacts["globalFamilySupply"] or {}
|
||||
feed_audit = artifacts["feedAudit"] or {}
|
||||
chainlist = load_etherscan_chainlist()
|
||||
l2_deposits = load_l2_deposit_evidence(etherscan_api_key, chainlist, l2_deposit_address)
|
||||
contract_source = load_contract_source_verification(etherscan_api_key, CWUSDC)
|
||||
family_chain_ids = sorted(
|
||||
{str(item.get("chainId")) for item in global_family.get("entries", []) if isinstance(item, dict) and item.get("chainId") is not None},
|
||||
key=lambda value: int(value) if value.isdigit() else value,
|
||||
)
|
||||
supported_family_chain_ids = [chain_id for chain_id in family_chain_ids if chain_id in chainlist.get("statusByChainId", {})]
|
||||
unsupported_family_chain_ids = [chain_id for chain_id in family_chain_ids if chain_id not in chainlist.get("statusByChainId", {})]
|
||||
|
||||
blockers = list(((propagation.get("summary") or {}).get("blockers") or []))
|
||||
command_failures = [item for item in commands if not item["ok"]]
|
||||
for item in command_failures:
|
||||
blockers.append("Command failed: " + " ".join(item["command"]))
|
||||
|
||||
ready_evidence = {
|
||||
"mainnetSupplyAttestation": bool(supply.get("supply")),
|
||||
"globalFamilySupplyContext": bool(global_family.get("summary")),
|
||||
"chain138MainnetFeedAudit": bool(feed_audit.get("canonicalRelationship")),
|
||||
"mainnetContractSourceVerified": bool(contract_source.get("verified")),
|
||||
"propagationMonitor": bool(propagation.get("checks")),
|
||||
"publicPrereqUrls": not any(
|
||||
item["command"] == ["bash", "scripts/verify/check-cwusdc-etherscan-prereq-urls.sh"] and not item["ok"]
|
||||
for item in commands
|
||||
)
|
||||
if commands
|
||||
else None,
|
||||
"documentationPacket": {key: path.exists() for key, path in DOCS.items()},
|
||||
}
|
||||
|
||||
next_actions = []
|
||||
if blockers:
|
||||
next_actions.extend(
|
||||
[
|
||||
"Submit/update Etherscan token profile for the exact Ethereum Mainnet cWUSDC contract.",
|
||||
"Submit/update CoinGecko and CoinMarketCap listings with Mainnet supply proof, DEX evidence, and bridge-family context.",
|
||||
"Use the global family supply proof only as context; use the Ethereum Mainnet cWUSDC attestation as the token-page supply basis.",
|
||||
"Re-run this dossier after each external approval or tracker response.",
|
||||
]
|
||||
)
|
||||
else:
|
||||
next_actions.append("No blockers detected by local monitors; capture Etherscan screenshots and continue propagation monitoring.")
|
||||
|
||||
return {
|
||||
"schema": "cwusdc-etherscan-value-dossier/v1",
|
||||
"generatedAt": dt.datetime.now(dt.UTC).isoformat().replace("+00:00", "Z"),
|
||||
"purpose": "Single submission and monitoring packet for making Etherscan show USD value for Ethereum Mainnet cWUSDC.",
|
||||
"target": {
|
||||
"network": "Ethereum Mainnet",
|
||||
"chainId": 1,
|
||||
"contract": CWUSDC,
|
||||
"caip19": f"eip155:1/erc20:{CWUSDC}",
|
||||
"name": "Wrapped cUSDC",
|
||||
"symbol": "cWUSDC",
|
||||
"decimals": 6,
|
||||
},
|
||||
"readiness": {
|
||||
"readyForExternalSubmission": ready_evidence["mainnetSupplyAttestation"]
|
||||
and ready_evidence["chain138MainnetFeedAudit"]
|
||||
and ready_evidence["mainnetContractSourceVerified"]
|
||||
and ready_evidence["propagationMonitor"],
|
||||
"etherscanValueReady": (propagation.get("summary") or {}).get("etherscanValueReady"),
|
||||
"coinGeckoPriceReady": (propagation.get("summary") or {}).get("coingeckoPriceReady"),
|
||||
"blockers": blockers,
|
||||
},
|
||||
"evidence": {
|
||||
"artifacts": {key: rel(path) for key, path in ARTIFACTS.items()},
|
||||
"docs": {key: rel(path) for key, path in DOCS.items()},
|
||||
"readyEvidence": ready_evidence,
|
||||
"mainnetContractSourceVerification": contract_source,
|
||||
"mainnetSupply": supply.get("supply"),
|
||||
"globalFamilyWarning": (global_family.get("caveats") or ["Global family supply is context only; do not use it as Ethereum Etherscan token-page supply."])[0],
|
||||
"globalFamilySummary": global_family.get("summary"),
|
||||
"feedRelationship": feed_audit.get("canonicalRelationship"),
|
||||
"etherscanChainlist": {
|
||||
**chainlist,
|
||||
"familyChainIds": family_chain_ids,
|
||||
"etherscanSupportedFamilyChainIds": supported_family_chain_ids,
|
||||
"notEtherscanSupportedFamilyChainIds": unsupported_family_chain_ids,
|
||||
"chain138SupportedByEtherscanV2": "138" in chainlist.get("statusByChainId", {}),
|
||||
"interpretation": "Only chains present in Etherscan V2 chainlist should be described as first-class Etherscan-family API evidence. Chain 138 remains provenance/context evidence unless Etherscan adds chainid 138.",
|
||||
},
|
||||
"l2DepositTransactions": l2_deposits,
|
||||
},
|
||||
"commands": commands,
|
||||
"nextActions": next_actions,
|
||||
}
|
||||
|
||||
|
||||
def write_md(payload: dict[str, Any], path: Path) -> None:
|
||||
readiness = payload["readiness"]
|
||||
evidence = payload["evidence"]
|
||||
target = payload["target"]
|
||||
lines = [
|
||||
"# cWUSDC Etherscan Value Dossier",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- Target: `{target['contract']}`",
|
||||
f"- CAIP-19: `{target['caip19']}`",
|
||||
f"- Ready for external submission: `{readiness['readyForExternalSubmission']}`",
|
||||
f"- Etherscan value ready: `{readiness['etherscanValueReady']}`",
|
||||
f"- CoinGecko price ready: `{readiness['coinGeckoPriceReady']}`",
|
||||
"",
|
||||
"## Blockers",
|
||||
"",
|
||||
]
|
||||
if readiness["blockers"]:
|
||||
lines.extend(f"- {item}" for item in readiness["blockers"])
|
||||
else:
|
||||
lines.append("- None detected by this dossier.")
|
||||
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
"## Evidence Artifacts",
|
||||
"",
|
||||
"| Artifact | Path |",
|
||||
"|---|---|",
|
||||
]
|
||||
)
|
||||
for key, value in evidence["artifacts"].items():
|
||||
lines.append(f"| `{key}` | `{value}` |")
|
||||
|
||||
lines.extend(["", "## Documentation Packet", "", "| Document | Path |", "|---|---|"])
|
||||
for key, value in evidence["docs"].items():
|
||||
lines.append(f"| `{key}` | `{value}` |")
|
||||
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
"## Mainnet Contract Verification",
|
||||
"",
|
||||
f"- Checked: `{(evidence['mainnetContractSourceVerification'] or {}).get('checked')}`",
|
||||
f"- Verified: `{(evidence['mainnetContractSourceVerification'] or {}).get('verified')}`",
|
||||
f"- Contract name: `{(evidence['mainnetContractSourceVerification'] or {}).get('contractName')}`",
|
||||
f"- Compiler: `{(evidence['mainnetContractSourceVerification'] or {}).get('compilerVersion')}`",
|
||||
f"- License: `{(evidence['mainnetContractSourceVerification'] or {}).get('licenseType')}`",
|
||||
f"- Proxy: `{(evidence['mainnetContractSourceVerification'] or {}).get('proxy')}`",
|
||||
"",
|
||||
"## Supply Boundary",
|
||||
"",
|
||||
f"- Ethereum Mainnet cWUSDC supply basis: `{(evidence['mainnetSupply'] or {}).get('totalSupplyUnits')}`",
|
||||
f"- Circulating supply basis: `{(evidence['mainnetSupply'] or {}).get('circulatingSupplyUnits')}`",
|
||||
f"- Global family warning: {evidence['globalFamilyWarning']}",
|
||||
"",
|
||||
"## Etherscan Chainlist Boundary",
|
||||
"",
|
||||
f"- Etherscan V2 chainlist total: `{(evidence['etherscanChainlist'] or {}).get('totalcount')}`",
|
||||
f"- Family chain IDs: `{', '.join((evidence['etherscanChainlist'] or {}).get('familyChainIds') or [])}`",
|
||||
f"- Etherscan-supported family chain IDs: `{', '.join((evidence['etherscanChainlist'] or {}).get('etherscanSupportedFamilyChainIds') or [])}`",
|
||||
f"- Not Etherscan-supported family chain IDs: `{', '.join((evidence['etherscanChainlist'] or {}).get('notEtherscanSupportedFamilyChainIds') or [])}`",
|
||||
f"- Chain 138 supported by Etherscan V2: `{(evidence['etherscanChainlist'] or {}).get('chain138SupportedByEtherscanV2')}`",
|
||||
"",
|
||||
"## L2 Deposit Transaction Boundary",
|
||||
"",
|
||||
f"- Address checked: `{(evidence['l2DepositTransactions'] or {}).get('address')}`",
|
||||
f"- Checked: `{(evidence['l2DepositTransactions'] or {}).get('checked')}`",
|
||||
"- Scope: Etherscan-indexed OP/Arbitrum deposit provenance only; it does not set Mainnet cWUSDC USD Value.",
|
||||
"- Unit note: `value` is raw wei; `tokenValue` is raw token units. `1195403000000000` in `value` is `0.001195403 ETH`; `598200000000000` with `tokenAddress=ETH` is `0.0005982 ETH`.",
|
||||
"",
|
||||
"| Chain | Checked | Sample deposits | Latest tx | Native value | Token value |",
|
||||
"|---|---:|---:|---|---:|---:|",
|
||||
]
|
||||
)
|
||||
for chain_id, item in (evidence["l2DepositTransactions"].get("chains") or {}).items():
|
||||
latest = item.get("latest") or {}
|
||||
lines.append(
|
||||
f"| `{chain_id}` {item.get('chainName')} | `{item.get('checked')}` | `{item.get('sampleCount', 0)}` | `{latest.get('hash')}` | `{latest.get('valueEth')}` ETH | `{latest.get('tokenValueUnits')}` {latest.get('tokenAddress') or ''} |"
|
||||
)
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
"## Next Actions",
|
||||
"",
|
||||
]
|
||||
)
|
||||
lines.extend(f"- {item}" for item in payload["nextActions"])
|
||||
|
||||
if payload["commands"]:
|
||||
lines.extend(["", "## Command Results", "", "| Command | Exit |", "|---|---:|"])
|
||||
for item in payload["commands"]:
|
||||
lines.append(f"| `{' '.join(item['command'])}` | `{item['returncode']}` |")
|
||||
|
||||
path.write_text("\n".join(lines) + "\n")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("--no-refresh", action="store_true", help="Only aggregate existing reports; do not rerun checks.")
|
||||
parser.add_argument("--json-out", type=Path, default=REPORT_JSON)
|
||||
parser.add_argument("--md-out", type=Path, default=REPORT_MD)
|
||||
parser.add_argument("--l2-deposit-address", default="", help="Address to check with Etherscan getdeposittxs.")
|
||||
parser.add_argument("--strict", action="store_true")
|
||||
args = parser.parse_args()
|
||||
args.refresh = not args.no_refresh
|
||||
|
||||
payload = build(args)
|
||||
args.json_out.parent.mkdir(parents=True, exist_ok=True)
|
||||
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
write_md(payload, args.md_out)
|
||||
print(f"Wrote {args.json_out.relative_to(ROOT)}")
|
||||
print(f"Wrote {args.md_out.relative_to(ROOT)}")
|
||||
print(f"readyForExternalSubmission={payload['readiness']['readyForExternalSubmission']}")
|
||||
if payload["readiness"]["blockers"]:
|
||||
print("Blockers: " + "; ".join(payload["readiness"]["blockers"]))
|
||||
if args.strict and payload["readiness"]["blockers"]:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
86
scripts/verify/build-cwusdc-institutional-evidence-bundle.sh
Executable file
86
scripts/verify/build-cwusdc-institutional-evidence-bundle.sh
Executable file
@@ -0,0 +1,86 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
PROJECT_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||
cd "$PROJECT_ROOT"
|
||||
|
||||
DATE_TAG="${CWUSDC_EVIDENCE_BUNDLE_DATE:-$(date -u +%Y%m%d)}"
|
||||
OUT_DIR="reports/status"
|
||||
BUNDLE="${OUT_DIR}/cwusdc-institutional-evidence-bundle-${DATE_TAG}.tar.gz"
|
||||
CHECKSUM="${OUT_DIR}/cwusdc-institutional-evidence-bundle-${DATE_TAG}.sha256"
|
||||
|
||||
FILES=(
|
||||
"docs/04-configuration/etherscan/CWUSDC_EVIDENCE_BUNDLE_INDEX.md"
|
||||
"docs/04-configuration/etherscan/CWUSDC_SUPPLY_AND_CIRCULATING_METHODOLOGY.md"
|
||||
"docs/04-configuration/etherscan/CWUSDC_SECURITY_AND_AUDIT_DISCLOSURE.md"
|
||||
"docs/04-configuration/etherscan/CWUSDC_PROVIDER_RESPONSE_TRACKER.md"
|
||||
"docs/04-configuration/etherscan/CWUSDC_LIQUIDITY_READINESS_NO_BROADCAST_PLAN.md"
|
||||
"docs/04-configuration/CWUSDC_PROVIDER_SUBMISSION_PACKET.md"
|
||||
"docs/04-configuration/etherscan/CWUSDC_MAINNET_ETHERSCAN_PROFILE_PACKET.md"
|
||||
"docs/04-configuration/etherscan/CWUSDC_ETHERSCAN_E2E_RECOMMENDATIONS.md"
|
||||
"docs/04-configuration/etherscan/CWUSDC_ETHERSCAN_VALUE_EXECUTION_PLAN.md"
|
||||
"docs/04-configuration/etherscan/CWUSDC_ETHERSCAN_BRIDGE_CROSSCHAIN_LAYER_MAP.md"
|
||||
"docs/04-configuration/coingecko/CWUSDC_MAINNET_TRACKER_SUBMISSION_PACKET.md"
|
||||
"docs/04-configuration/dexscreener/CWUSDC_DEXSCREENER_INDEXING_AND_PROFILE_PACKET_20260509.md"
|
||||
"docs/04-configuration/metamask/METAMASK_ASSET_PRICE_PROVIDER_SUBMISSION_MATRIX.md"
|
||||
"reports/status/cwusdc-etherscan-value-dossier-latest.md"
|
||||
"reports/status/cwusdc-etherscan-value-dossier-latest.json"
|
||||
"reports/status/cwusdc-supply-circulating-attestation-latest.md"
|
||||
"reports/status/cwusdc-supply-circulating-attestation-latest.json"
|
||||
"reports/status/global-cusdc-cwusdc-family-supply-proof-latest.md"
|
||||
"reports/status/global-cusdc-cwusdc-family-supply-proof-latest.json"
|
||||
"reports/status/cusdc-cwusdc-etherscan-feed-audit-latest.md"
|
||||
"reports/status/cusdc-cwusdc-etherscan-feed-audit-latest.json"
|
||||
"reports/status/cwusdc-mainnet-role-audit-latest.md"
|
||||
"reports/status/cwusdc-mainnet-role-audit-latest.json"
|
||||
"reports/status/cwusdc-role-deployment-appendix-latest.md"
|
||||
"reports/status/cwusdc-role-deployment-appendix-latest.json"
|
||||
"reports/status/cwusdc-institutional-doc-link-check-latest.md"
|
||||
"reports/status/cwusdc-institutional-doc-link-check-latest.json"
|
||||
"reports/status/cwusdc-provider-submission-prefill-latest.md"
|
||||
"reports/status/cwusdc-provider-submission-prefill-latest.json"
|
||||
"reports/status/cwusdc-provider-handoff-latest.md"
|
||||
"reports/status/cwusdc-provider-handoff-latest.json"
|
||||
"reports/status/cwusdc-external-trackers-live-latest.md"
|
||||
"reports/status/cwusdc-external-trackers-live-latest.json"
|
||||
"reports/status/cwusdc-institutional-readiness-review-20260511.md"
|
||||
"reports/status/cwusdc-institutional-hardening-completion-20260511.md"
|
||||
)
|
||||
|
||||
OPTIONAL_FILES=(
|
||||
"reports/status/cwusdc-provider-monitoring-snapshot-latest.md"
|
||||
"reports/status/cwusdc-provider-monitoring-snapshot-latest.json"
|
||||
"reports/status/screenshots/cwusdc-etherscan-token-page.png"
|
||||
"reports/status/screenshots/cwusdc-dbis-token-directory.png"
|
||||
"reports/status/screenshots/cwusdc-logo-url.png"
|
||||
"reports/status/screenshots/cwusdc-geckoterminal-univ3-pool.png"
|
||||
)
|
||||
|
||||
missing=()
|
||||
present=()
|
||||
for file in "${FILES[@]}"; do
|
||||
if [[ -f "$file" ]]; then
|
||||
present+=("$file")
|
||||
else
|
||||
missing+=("$file")
|
||||
fi
|
||||
done
|
||||
for file in "${OPTIONAL_FILES[@]}"; do
|
||||
if [[ -f "$file" ]]; then
|
||||
present+=("$file")
|
||||
fi
|
||||
done
|
||||
|
||||
if ((${#missing[@]} > 0)); then
|
||||
printf 'Missing required evidence files:\n' >&2
|
||||
printf ' - %s\n' "${missing[@]}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p "$OUT_DIR"
|
||||
tar -czf "$BUNDLE" "${present[@]}"
|
||||
sha256sum "$BUNDLE" > "$CHECKSUM"
|
||||
|
||||
printf 'Wrote %s\n' "$BUNDLE"
|
||||
printf 'Wrote %s\n' "$CHECKSUM"
|
||||
cat "$CHECKSUM"
|
||||
266
scripts/verify/build-cwusdc-provider-handoff-report.py
Executable file
266
scripts/verify/build-cwusdc-provider-handoff-report.py
Executable file
@@ -0,0 +1,266 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Build a concise cWUSDC provider handoff report from latest probe JSON."""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
DEFAULT_PREREQ_JSON = ROOT / "reports/status/cwusdc-etherscan-prereq-urls-latest.json"
|
||||
DEFAULT_TRACKERS_JSON = ROOT / "reports/status/cwusdc-external-trackers-live-latest.json"
|
||||
DEFAULT_LIQUIDITY_JSON = ROOT / "reports/status/token-aggregation-liquidity-gap-funding-plan-latest.json"
|
||||
DEFAULT_CMC_SANITY_JSON = ROOT / "reports/status/cmc-provider-report-sanity-latest.json"
|
||||
DEFAULT_MD = ROOT / "reports/status/cwusdc-provider-handoff-latest.md"
|
||||
DEFAULT_JSON = ROOT / "reports/status/cwusdc-provider-handoff-latest.json"
|
||||
|
||||
|
||||
def read_json(path: Path) -> Any | None:
|
||||
if not path.exists():
|
||||
return None
|
||||
return json.loads(path.read_text())
|
||||
|
||||
|
||||
def rel(path: Path) -> str:
|
||||
try:
|
||||
return str(path.relative_to(ROOT))
|
||||
except ValueError:
|
||||
return str(path)
|
||||
|
||||
|
||||
def first(obj: dict[str, Any] | None, path: list[str], default: Any = None) -> Any:
|
||||
cur: Any = obj
|
||||
for part in path:
|
||||
if not isinstance(cur, dict):
|
||||
return default
|
||||
cur = cur.get(part)
|
||||
return cur if cur is not None else default
|
||||
|
||||
|
||||
def table(headers: list[str], rows: list[list[Any]]) -> str:
|
||||
def cell(value: Any) -> str:
|
||||
if isinstance(value, (dict, list)):
|
||||
value = json.dumps(value, sort_keys=True)
|
||||
text = str(value)
|
||||
return text.replace("|", "\\|").replace("\n", "<br>")
|
||||
|
||||
return "\n".join([
|
||||
f"| {' | '.join(cell(header) for header in headers)} |",
|
||||
f"| {' | '.join('---' for _ in headers)} |",
|
||||
*[f"| {' | '.join(cell(value) for value in row)} |" for row in rows],
|
||||
])
|
||||
|
||||
|
||||
def build_payload(
|
||||
prereq: Any,
|
||||
trackers: Any,
|
||||
liquidity: Any,
|
||||
cmc_sanity: Any,
|
||||
prereq_path: Path,
|
||||
trackers_path: Path,
|
||||
liquidity_path: Path,
|
||||
cmc_sanity_path: Path,
|
||||
) -> dict[str, Any]:
|
||||
tracker_summary = first(trackers, ["summary"], {})
|
||||
failed_required = tracker_summary.get("failedRequiredIds") if isinstance(tracker_summary, dict) else []
|
||||
liquidity_summary = first(liquidity, ["summary"], {})
|
||||
blockers = []
|
||||
|
||||
if prereq is None:
|
||||
blockers.append({
|
||||
"id": "missing_prereq_url_evidence",
|
||||
"type": "repo_controlled",
|
||||
"status": "blocked",
|
||||
"nextAction": "Run check-cwusdc-etherscan-prereq-urls.sh with JSON output.",
|
||||
})
|
||||
if trackers is None:
|
||||
blockers.append({
|
||||
"id": "missing_external_tracker_evidence",
|
||||
"type": "repo_controlled",
|
||||
"status": "blocked",
|
||||
"nextAction": "Run check-cwusdc-external-trackers-live.sh with JSON output.",
|
||||
})
|
||||
if liquidity is None:
|
||||
blockers.append({
|
||||
"id": "missing_liquidity_planner_evidence",
|
||||
"type": "repo_controlled",
|
||||
"status": "blocked",
|
||||
"nextAction": "Run plan-token-aggregation-liquidity-gap-funding.mjs.",
|
||||
})
|
||||
if cmc_sanity is None:
|
||||
blockers.append({
|
||||
"id": "missing_cmc_report_sanity_evidence",
|
||||
"type": "repo_controlled",
|
||||
"status": "blocked",
|
||||
"nextAction": "Run check-cmc-provider-report-sanity.py.",
|
||||
})
|
||||
|
||||
if prereq is not None and not first(prereq, ["summary", "allPassed"], False):
|
||||
blockers.append({
|
||||
"id": "repo_public_urls",
|
||||
"type": "repo_controlled",
|
||||
"status": "blocked",
|
||||
"nextAction": "Fix failing d-bis.org prerequisite URLs before external profile submission.",
|
||||
})
|
||||
|
||||
for failed in failed_required or []:
|
||||
blockers.append({
|
||||
"id": failed,
|
||||
"type": "external_provider",
|
||||
"status": "blocked",
|
||||
"nextAction": "Submit/update provider packet or wait for provider indexing, then rerun tracker probe.",
|
||||
})
|
||||
|
||||
if first(liquidity, ["summary", "nonEvmFundingRequirementRows"], 0):
|
||||
blockers.append({
|
||||
"id": "non_evm_funding_requirements",
|
||||
"type": "operator_bound",
|
||||
"status": "open",
|
||||
"nextAction": "Bind non-EVM wallets, asset IDs, and minimum funding targets before making non-EVM liquidity claims.",
|
||||
})
|
||||
if first(cmc_sanity, ["summary", "warningCount"], 0):
|
||||
blockers.append({
|
||||
"id": "cmc_report_sanity_warnings",
|
||||
"type": "repo_advisory",
|
||||
"status": "open",
|
||||
"nextAction": "Review CMC-shaped report warnings before using CMC fields as listing-quality liquidity or quote-asset evidence.",
|
||||
})
|
||||
|
||||
repo_ready = bool(first(prereq, ["summary", "allPassed"], False))
|
||||
ready_for_etherscan_value = bool(first(trackers, ["summary", "readyForEtherscanUsdValue"], False))
|
||||
|
||||
return {
|
||||
"schema": "cwusdc-provider-handoff/v1",
|
||||
"generatedAt": datetime.now(timezone.utc).isoformat(),
|
||||
"inputs": {
|
||||
"prereq": rel(prereq_path),
|
||||
"trackers": rel(trackers_path),
|
||||
"liquidity": rel(liquidity_path),
|
||||
"cmcSanity": rel(cmc_sanity_path),
|
||||
},
|
||||
"summary": {
|
||||
"repoControlledPrereqsPassed": repo_ready,
|
||||
"externalTrackersAllLive": bool(first(trackers, ["summary", "allTrackersLive"], False)),
|
||||
"readyForEtherscanUsdValue": ready_for_etherscan_value,
|
||||
"externalRequiredPassed": first(trackers, ["summary", "requiredPassedCount"], None),
|
||||
"externalRequiredCount": first(trackers, ["summary", "requiredCount"], None),
|
||||
"liquidityRows": first(liquidity, ["summary", "rows"], None),
|
||||
"nonEvmFundingRequirementRows": first(liquidity, ["summary", "nonEvmFundingRequirementRows"], None),
|
||||
"cmcSanityWarningCount": first(cmc_sanity, ["summary", "warningCount"], None),
|
||||
"cmcPromotedTokenCount": first(cmc_sanity, ["summary", "promotedTokenCount"], None),
|
||||
"blockerCount": len(blockers),
|
||||
},
|
||||
"blockers": blockers,
|
||||
}
|
||||
|
||||
|
||||
def write_markdown(payload: dict[str, Any], prereq: Any, trackers: Any, liquidity: Any, cmc_sanity: Any, path: Path) -> None:
|
||||
prereq_checks = first(prereq, ["checks"], []) or []
|
||||
tracker_checks = first(trackers, ["checks"], []) or []
|
||||
liquidity_summary = first(liquidity, ["summary"], {}) or {}
|
||||
cmc_summary = first(cmc_sanity, ["summary"], {}) or {}
|
||||
cmc_warnings = first(cmc_sanity, ["warnings"], []) or []
|
||||
|
||||
lines = [
|
||||
"# cWUSDC Provider Handoff Report",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- Repo-controlled prerequisites passed: `{payload['summary']['repoControlledPrereqsPassed']}`",
|
||||
f"- External trackers all live: `{payload['summary']['externalTrackersAllLive']}`",
|
||||
f"- Ready for Etherscan USD Value path: `{payload['summary']['readyForEtherscanUsdValue']}`",
|
||||
"",
|
||||
"## Inputs",
|
||||
"",
|
||||
table(["Input", "Path"], [[key, value] for key, value in payload["inputs"].items()]),
|
||||
"",
|
||||
"## Repo-Controlled URL Prerequisites",
|
||||
"",
|
||||
table(
|
||||
["URL", "Passed", "HTTP", "Attempts", "curl status"],
|
||||
[[
|
||||
c.get("url"),
|
||||
f"`{c.get('passed')}`",
|
||||
f"`{c.get('status')}`",
|
||||
f"`{c.get('attempts', '-')}`",
|
||||
f"`{c.get('curlStatus', '-')}`",
|
||||
] for c in prereq_checks],
|
||||
) if prereq_checks else "No prerequisite URL JSON found.",
|
||||
"",
|
||||
"## External Tracker State",
|
||||
"",
|
||||
table(
|
||||
["Surface", "Passed", "HTTP", "Details"],
|
||||
[[
|
||||
c.get("id"),
|
||||
f"`{c.get('passed')}`",
|
||||
f"`{c.get('status')}`",
|
||||
"; ".join(c.get("details") or []) or c.get("error") or "-",
|
||||
] for c in tracker_checks],
|
||||
) if tracker_checks else "No external tracker JSON found.",
|
||||
"",
|
||||
"## Liquidity Planner Summary",
|
||||
"",
|
||||
table(["Metric", "Value"], [[key, value] for key, value in liquidity_summary.items()]),
|
||||
"",
|
||||
"## CMC Report Sanity",
|
||||
"",
|
||||
table(["Metric", "Value"], [[key, value] for key, value in cmc_summary.items()]) if cmc_summary else "No CMC sanity JSON found.",
|
||||
"",
|
||||
table(
|
||||
["ID", "Symbol", "Severity", "Message"],
|
||||
[[w.get("id"), w.get("symbol", "-"), w.get("severity"), w.get("message")] for w in cmc_warnings],
|
||||
) if cmc_warnings else "No CMC sanity warnings.",
|
||||
"",
|
||||
"## Blockers",
|
||||
"",
|
||||
table(
|
||||
["ID", "Type", "Status", "Next action"],
|
||||
[[b["id"], b["type"], b["status"], b["nextAction"]] for b in payload["blockers"]],
|
||||
) if payload["blockers"] else "No current blockers detected.",
|
||||
"",
|
||||
"## Submission Boundary",
|
||||
"",
|
||||
"This report is generated from public/read-only repo checks. It does not submit forms, approve tokens, add liquidity, swap, bridge, or broadcast transactions.",
|
||||
]
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text("\n".join(lines) + "\n")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("--prereq-json", type=Path, default=DEFAULT_PREREQ_JSON)
|
||||
parser.add_argument("--trackers-json", type=Path, default=DEFAULT_TRACKERS_JSON)
|
||||
parser.add_argument("--liquidity-json", type=Path, default=DEFAULT_LIQUIDITY_JSON)
|
||||
parser.add_argument("--cmc-sanity-json", type=Path, default=DEFAULT_CMC_SANITY_JSON)
|
||||
parser.add_argument("--json-out", type=Path, default=DEFAULT_JSON)
|
||||
parser.add_argument("--md-out", type=Path, default=DEFAULT_MD)
|
||||
args = parser.parse_args()
|
||||
|
||||
prereq = read_json(args.prereq_json)
|
||||
trackers = read_json(args.trackers_json)
|
||||
liquidity = read_json(args.liquidity_json)
|
||||
cmc_sanity = read_json(args.cmc_sanity_json)
|
||||
payload = build_payload(
|
||||
prereq,
|
||||
trackers,
|
||||
liquidity,
|
||||
cmc_sanity,
|
||||
args.prereq_json,
|
||||
args.trackers_json,
|
||||
args.liquidity_json,
|
||||
args.cmc_sanity_json,
|
||||
)
|
||||
|
||||
args.json_out.parent.mkdir(parents=True, exist_ok=True)
|
||||
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
write_markdown(payload, prereq, trackers, liquidity, cmc_sanity, args.md_out)
|
||||
print(f"Wrote {rel(args.json_out)}")
|
||||
print(f"Wrote {rel(args.md_out)}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
207
scripts/verify/build-cwusdc-provider-submission-prefill.py
Normal file
207
scripts/verify/build-cwusdc-provider-submission-prefill.py
Normal file
@@ -0,0 +1,207 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Generate prefilled provider submission packets and screenshot checklist for cWUSDC."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime as dt
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
REPORT_JSON = ROOT / "reports" / "status" / "cwusdc-provider-submission-prefill-latest.json"
|
||||
REPORT_MD = ROOT / "reports" / "status" / "cwusdc-provider-submission-prefill-latest.md"
|
||||
|
||||
ASSET = {
|
||||
"network": "Ethereum Mainnet",
|
||||
"chainId": 1,
|
||||
"caip19": "eip155:1/erc20:0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a",
|
||||
"contract": "0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a",
|
||||
"name": "Wrapped cUSDC",
|
||||
"symbol": "cWUSDC",
|
||||
"decimals": 6,
|
||||
"website": "https://d-bis.org/",
|
||||
"tokenDirectory": "https://d-bis.org/gru/tokens",
|
||||
"logo": "https://d-bis.org/tokens/cwusdc.svg",
|
||||
"contactEmail": "submissions@d-bis.org",
|
||||
"supportEmail": "support@d-bis.org",
|
||||
"securityUrl": "https://d-bis.org/security",
|
||||
"nonAffiliation": "cWUSDC is not Circle-issued USDC and should not be represented as an official Circle asset.",
|
||||
}
|
||||
|
||||
DESCRIPTION = (
|
||||
"cWUSDC is the Ethereum Mainnet compliant wrapped transport representation of Chain 138 cUSDC "
|
||||
"in the DBIS GRU asset family. It is used for public-network mirrored settlement, proof, and "
|
||||
"interoperability workflows. cWUSDC is a DBIS/GRU transport asset and is not Circle-issued USDC."
|
||||
)
|
||||
|
||||
PROVIDERS = {
|
||||
"etherscan": {
|
||||
"objective": "Token profile/logo/value evidence submission",
|
||||
"fields": {
|
||||
"contract": ASSET["contract"],
|
||||
"website": ASSET["website"],
|
||||
"email": ASSET["contactEmail"],
|
||||
"logo": ASSET["logo"],
|
||||
"description": DESCRIPTION,
|
||||
},
|
||||
"attachment": "docs/04-configuration/etherscan/CWUSDC_MAINNET_ETHERSCAN_PROFILE_PACKET.md",
|
||||
},
|
||||
"coingecko": {
|
||||
"objective": "Token listing/update with supply and liquidity caveats",
|
||||
"fields": {
|
||||
"chain": ASSET["network"],
|
||||
"contract": ASSET["contract"],
|
||||
"symbol": ASSET["symbol"],
|
||||
"website": ASSET["website"],
|
||||
"logo": ASSET["logo"],
|
||||
"description": DESCRIPTION,
|
||||
},
|
||||
"attachment": "docs/04-configuration/coingecko/CWUSDC_MAINNET_TRACKER_SUBMISSION_PACKET.md",
|
||||
},
|
||||
"coinmarketcap": {
|
||||
"objective": "Token listing/update with DEX discoverability and supply proof",
|
||||
"fields": {
|
||||
"chain": ASSET["network"],
|
||||
"contract": ASSET["contract"],
|
||||
"symbol": ASSET["symbol"],
|
||||
"website": ASSET["website"],
|
||||
"logo": ASSET["logo"],
|
||||
"description": DESCRIPTION,
|
||||
},
|
||||
"attachment": "docs/04-configuration/coingecko/CWUSDC_MAINNET_TRACKER_SUBMISSION_PACKET.md",
|
||||
},
|
||||
"dexscreener": {
|
||||
"objective": "Token profile/indexing support packet",
|
||||
"fields": {
|
||||
"chain": "ethereum",
|
||||
"tokenAddress": ASSET["contract"],
|
||||
"website": ASSET["website"],
|
||||
"logo": ASSET["logo"],
|
||||
"description": DESCRIPTION,
|
||||
},
|
||||
"attachment": "docs/04-configuration/dexscreener/CWUSDC_DEXSCREENER_INDEXING_AND_PROFILE_PACKET_20260509.md",
|
||||
},
|
||||
"metamask": {
|
||||
"objective": "Wallet metadata/price-provider support evidence",
|
||||
"fields": {
|
||||
"assetId": ASSET["caip19"],
|
||||
"chainId": ASSET["chainId"],
|
||||
"address": ASSET["contract"],
|
||||
"symbol": ASSET["symbol"],
|
||||
"logoURI": ASSET["logo"],
|
||||
"description": DESCRIPTION,
|
||||
},
|
||||
"attachment": "docs/04-configuration/metamask/METAMASK_ASSET_PRICE_PROVIDER_SUBMISSION_MATRIX.md",
|
||||
},
|
||||
}
|
||||
|
||||
SCREENSHOTS = [
|
||||
{
|
||||
"id": "etherscan-token-page",
|
||||
"url": f"https://etherscan.io/token/{ASSET['contract']}",
|
||||
"target": "reports/status/screenshots/cwusdc-etherscan-token-page.png",
|
||||
"reason": "Shows verified Mainnet token page and current value/market-cap state.",
|
||||
},
|
||||
{
|
||||
"id": "dbis-token-directory",
|
||||
"url": ASSET["tokenDirectory"],
|
||||
"target": "reports/status/screenshots/cwusdc-dbis-token-directory.png",
|
||||
"reason": "Shows official website token context.",
|
||||
},
|
||||
{
|
||||
"id": "dbis-logo-url",
|
||||
"url": ASSET["logo"],
|
||||
"target": "reports/status/screenshots/cwusdc-logo-url.png",
|
||||
"reason": "Shows hosted token logo asset.",
|
||||
},
|
||||
{
|
||||
"id": "geckoterminal-univ3-pool",
|
||||
"url": "https://www.geckoterminal.com/eth/pools/0x1cf2e685682c7f7bef508f0af15dfb5cdda01ee3",
|
||||
"target": "reports/status/screenshots/cwusdc-geckoterminal-univ3-pool.png",
|
||||
"reason": "Shows indexed public cWUSDC/USDC pool evidence.",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def load_optional(path: str) -> Any:
|
||||
full = ROOT / path
|
||||
if not full.exists():
|
||||
return None
|
||||
if full.suffix == ".json":
|
||||
return json.loads(full.read_text())
|
||||
return full.read_text()
|
||||
|
||||
|
||||
def build() -> dict[str, Any]:
|
||||
screenshots = []
|
||||
for item in SCREENSHOTS:
|
||||
entry = dict(item)
|
||||
target = ROOT / entry["target"]
|
||||
entry["captured"] = target.exists()
|
||||
entry["sizeBytes"] = target.stat().st_size if target.exists() else 0
|
||||
screenshots.append(entry)
|
||||
return {
|
||||
"schema": "cwusdc-provider-submission-prefill/v1",
|
||||
"generatedAt": dt.datetime.now(dt.UTC).isoformat().replace("+00:00", "Z"),
|
||||
"asset": ASSET,
|
||||
"description": DESCRIPTION,
|
||||
"providers": PROVIDERS,
|
||||
"screenshotChecklist": screenshots,
|
||||
"currentReadiness": {
|
||||
"dossier": load_optional("reports/status/cwusdc-etherscan-value-dossier-latest.json"),
|
||||
"providerCi": load_optional("reports/status/cwusdc-provider-readiness-ci-latest.json"),
|
||||
"supply": load_optional("reports/status/cwusdc-supply-circulating-attestation-latest.json"),
|
||||
},
|
||||
"submissionBoundary": [
|
||||
"This packet is prefilled evidence only; it does not prove provider acceptance.",
|
||||
"Screenshots should be captured immediately before submission and after any provider response.",
|
||||
"Do not remove the non-Circle disclosure from provider forms.",
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def write_md(payload: dict[str, Any], path: Path) -> None:
|
||||
lines = [
|
||||
"# cWUSDC Provider Submission Prefill",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- Contract: `{ASSET['contract']}`",
|
||||
f"- CAIP-19: `{ASSET['caip19']}`",
|
||||
"",
|
||||
"## Description",
|
||||
"",
|
||||
DESCRIPTION,
|
||||
"",
|
||||
"## Provider Prefill",
|
||||
"",
|
||||
]
|
||||
for provider, data in payload["providers"].items():
|
||||
lines.extend([f"### {provider}", "", f"- Objective: {data['objective']}", f"- Attachment: `{data['attachment']}`", "", "| Field | Value |", "|---|---|"])
|
||||
for key, value in data["fields"].items():
|
||||
lines.append(f"| `{key}` | `{value}` |")
|
||||
lines.append("")
|
||||
lines.extend(["## Screenshot Checklist", "", "| ID | URL | Target | Captured | Reason |", "|---|---|---|---:|---|"])
|
||||
for item in payload["screenshotChecklist"]:
|
||||
lines.append(f"| `{item['id']}` | {item['url']} | `{item['target']}` | `{item['captured']}` | {item['reason']} |")
|
||||
lines.extend(["", "## Screenshot Capture Commands", "", "```bash"])
|
||||
for item in payload["screenshotChecklist"]:
|
||||
lines.append(f"pnpm exec playwright screenshot --timeout=60000 {item['url']} {item['target']}")
|
||||
lines.extend(["```", "", "## Boundaries", ""])
|
||||
lines.extend(f"- {item}" for item in payload["submissionBoundary"])
|
||||
path.write_text("\n".join(lines) + "\n")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
payload = build()
|
||||
REPORT_JSON.parent.mkdir(parents=True, exist_ok=True)
|
||||
REPORT_JSON.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
write_md(payload, REPORT_MD)
|
||||
print(f"Wrote {REPORT_JSON.relative_to(ROOT)}")
|
||||
print(f"Wrote {REPORT_MD.relative_to(ROOT)}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
160
scripts/verify/build-cwusdc-role-deployment-appendix.py
Normal file
160
scripts/verify/build-cwusdc-role-deployment-appendix.py
Normal file
@@ -0,0 +1,160 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Build a formal role-event/deployment-record appendix for Mainnet cWUSDC."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime as dt
|
||||
import json
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
ROLE_AUDIT = ROOT / "reports" / "status" / "cwusdc-mainnet-role-audit-latest.json"
|
||||
REPORT_JSON = ROOT / "reports" / "status" / "cwusdc-role-deployment-appendix-latest.json"
|
||||
REPORT_MD = ROOT / "reports" / "status" / "cwusdc-role-deployment-appendix-latest.md"
|
||||
|
||||
SEARCH_ROOTS = [
|
||||
ROOT / "docs",
|
||||
ROOT / "reports" / "status",
|
||||
ROOT / "config",
|
||||
ROOT / "scripts",
|
||||
]
|
||||
|
||||
|
||||
def load_role_audit() -> dict[str, Any]:
|
||||
if not ROLE_AUDIT.exists():
|
||||
raise SystemExit(f"Missing role audit: {ROLE_AUDIT.relative_to(ROOT)}")
|
||||
return json.loads(ROLE_AUDIT.read_text())
|
||||
|
||||
|
||||
def read_text(path: Path) -> str:
|
||||
try:
|
||||
return path.read_text(errors="ignore")
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
def candidate_files() -> list[Path]:
|
||||
files: list[Path] = []
|
||||
for root in SEARCH_ROOTS:
|
||||
if not root.exists():
|
||||
continue
|
||||
for path in root.rglob("*"):
|
||||
if path.is_file() and path.suffix.lower() in {".md", ".json", ".jsonl", ".sh", ".py", ".env", ".txt"}:
|
||||
files.append(path)
|
||||
return files
|
||||
|
||||
|
||||
def find_mentions(needles: list[str]) -> list[dict[str, Any]]:
|
||||
lowered = [(needle, needle.lower()) for needle in needles if needle]
|
||||
findings: list[dict[str, Any]] = []
|
||||
for path in candidate_files():
|
||||
rel = str(path.relative_to(ROOT))
|
||||
if rel.endswith("cwusdc-role-deployment-appendix-latest.json"):
|
||||
continue
|
||||
text = read_text(path)
|
||||
if not text:
|
||||
continue
|
||||
text_lower = text.lower()
|
||||
matches = [needle for needle, low in lowered if low in text_lower]
|
||||
if not matches:
|
||||
continue
|
||||
lines = []
|
||||
for index, line in enumerate(text.splitlines(), start=1):
|
||||
low_line = line.lower()
|
||||
if any(low in low_line for _, low in lowered):
|
||||
lines.append({"line": index, "text": line[:240]})
|
||||
if len(lines) >= 8:
|
||||
break
|
||||
findings.append({"path": rel, "matches": sorted(set(matches)), "sampleLines": lines})
|
||||
return sorted(findings, key=lambda item: item["path"])
|
||||
|
||||
|
||||
def tx_url(tx_hash: str | None) -> str:
|
||||
return f"https://etherscan.io/tx/{tx_hash}" if tx_hash else ""
|
||||
|
||||
|
||||
def build() -> dict[str, Any]:
|
||||
role_audit = load_role_audit()
|
||||
events = role_audit.get("eventLogReview", {}).get("events", [])
|
||||
tx_hashes = sorted({event.get("transactionHash") for event in events if event.get("transactionHash")})
|
||||
addresses = sorted(
|
||||
{
|
||||
role_audit.get("token", {}).get("address", ""),
|
||||
*[candidate.get("address", "") for candidate in role_audit.get("candidateChecks", {}).values()],
|
||||
*[event.get("account", "") for event in events],
|
||||
*[event.get("sender", "") for event in events],
|
||||
}
|
||||
)
|
||||
addresses = [address for address in addresses if re.fullmatch(r"0x[a-fA-F0-9]{40}", address or "")]
|
||||
needles = [role_audit.get("token", {}).get("address", ""), *tx_hashes, *addresses]
|
||||
mentions = find_mentions(needles)
|
||||
deployment_record_candidates = [
|
||||
item
|
||||
for item in mentions
|
||||
if any(token in item["path"].lower() for token in ["deploy", "tracker", "technical", "completion", "readiness", "runbook", "bridge"])
|
||||
]
|
||||
return {
|
||||
"schema": "cwusdc-role-deployment-appendix/v1",
|
||||
"generatedAt": dt.datetime.now(dt.UTC).isoformat().replace("+00:00", "Z"),
|
||||
"roleAudit": str(ROLE_AUDIT.relative_to(ROOT)),
|
||||
"token": role_audit.get("token", {}),
|
||||
"eventCount": len(events),
|
||||
"transactionHashes": tx_hashes,
|
||||
"effectiveMembersFromEvents": role_audit.get("eventLogReview", {}).get("effectiveMembersFromEvents", {}),
|
||||
"privilegedCandidates": role_audit.get("privilegedCandidates", []),
|
||||
"deploymentRecordCandidates": deployment_record_candidates,
|
||||
"allMentions": mentions,
|
||||
"limitations": [
|
||||
"This appendix reconciles on-chain role events with repository records discoverable by local text search.",
|
||||
"It is not a substitute for a signed third-party audit or a provider-side ownership verification flow.",
|
||||
"Operator notebooks, private emails, and provider form submissions are outside this local repository scan unless committed as evidence reports.",
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def write_md(payload: dict[str, Any], path: Path) -> None:
|
||||
lines = [
|
||||
"# cWUSDC Role Deployment Appendix",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- Token: `{payload['token'].get('address')}`",
|
||||
f"- Source role audit: `{payload['roleAudit']}`",
|
||||
f"- On-chain role event count: `{payload['eventCount']}`",
|
||||
"",
|
||||
"## Effective Members From Events",
|
||||
"",
|
||||
"| Role | Members |",
|
||||
"|---|---|",
|
||||
]
|
||||
for role, members in payload["effectiveMembersFromEvents"].items():
|
||||
lines.append(f"| `{role}` | `{', '.join(members) if members else 'none observed'}` |")
|
||||
lines.extend(["", "## Privileged Candidates", "", "| Label | Address | Roles |", "|---|---|---|"])
|
||||
for item in payload["privilegedCandidates"]:
|
||||
lines.append(f"| `{item['label']}` | `{item['address']}` | `{', '.join(item['roles'])}` |")
|
||||
lines.extend(["", "## Role Event Transactions", "", "| Transaction | Etherscan |", "|---|---|"])
|
||||
for tx_hash in payload["transactionHashes"]:
|
||||
lines.append(f"| `{tx_hash}` | {tx_url(tx_hash)} |")
|
||||
lines.extend(["", "## Deployment Record Candidates", "", "| Path | Matches | Sample |", "|---|---|---|"])
|
||||
for item in payload["deploymentRecordCandidates"]:
|
||||
sample = "; ".join(f"L{line['line']}: {line['text']}" for line in item["sampleLines"][:3])
|
||||
lines.append(f"| `{item['path']}` | `{', '.join(item['matches'][:4])}` | {sample} |")
|
||||
lines.extend(["", "## Limitations", ""])
|
||||
lines.extend(f"- {item}" for item in payload["limitations"])
|
||||
path.write_text("\n".join(lines) + "\n")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
payload = build()
|
||||
REPORT_JSON.parent.mkdir(parents=True, exist_ok=True)
|
||||
REPORT_JSON.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
write_md(payload, REPORT_MD)
|
||||
print(f"Wrote {REPORT_JSON.relative_to(ROOT)}")
|
||||
print(f"Wrote {REPORT_MD.relative_to(ROOT)}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
37
scripts/verify/build-ei-matrix-cwusdc-topup-tsv-from-audit-json.sh
Executable file
37
scripts/verify/build-ei-matrix-cwusdc-topup-tsv-from-audit-json.sh
Executable file
@@ -0,0 +1,37 @@
|
||||
#!/usr/bin/env bash
|
||||
# Build ei-matrix-cwusdc-topup-indices.txt + ei-matrix-cwusdc-topup-amounts.tsv from
|
||||
# reports/status/ei-matrix-readiness-audit-latest.json (rows with mainnetCwusdcRaw < TARGET).
|
||||
#
|
||||
# Usage (repo root):
|
||||
# EI_MATRIX_TOPUP_TARGET_RAW=12000000 ./scripts/verify/build-ei-matrix-cwusdc-topup-tsv-from-audit-json.sh
|
||||
# ./scripts/verify/build-ei-matrix-cwusdc-topup-tsv-from-audit-json.sh /path/to/audit.json
|
||||
set -euo pipefail
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
JSON="${1:-$PROJECT_ROOT/reports/status/ei-matrix-readiness-audit-latest.json}"
|
||||
TARGET="${EI_MATRIX_TOPUP_TARGET_RAW:-12000000}"
|
||||
OUT_IDX="$PROJECT_ROOT/reports/status/ei-matrix-cwusdc-topup-indices.txt"
|
||||
OUT_TSV="$PROJECT_ROOT/reports/status/ei-matrix-cwusdc-topup-amounts.tsv"
|
||||
[[ -f "$JSON" ]] || { echo "Missing $JSON" >&2; exit 1; }
|
||||
python3 - "$JSON" "$TARGET" "$OUT_IDX" "$OUT_TSV" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
p, target, out_idx, out_tsv = Path(sys.argv[1]), int(sys.argv[2]), Path(sys.argv[3]), Path(sys.argv[4])
|
||||
data = json.loads(p.read_text(encoding="utf-8"))
|
||||
rows = data["rows"]
|
||||
gaps = []
|
||||
total = 0
|
||||
for r in rows:
|
||||
cur = int(r.get("mainnetCwusdcRaw") or 0)
|
||||
if cur < target:
|
||||
need = target - cur
|
||||
idx = int(r["linearIndex"])
|
||||
gaps.append((idx, need))
|
||||
total += need
|
||||
out_idx.parent.mkdir(parents=True, exist_ok=True)
|
||||
out_idx.write_text("\n".join(str(i) for i, _ in gaps) + "\n", encoding="utf-8")
|
||||
out_tsv.write_text("\n".join(f"{i}\t{n}" for i, n in gaps) + "\n", encoding="utf-8")
|
||||
print(f"wrote {out_idx} + {out_tsv} gap_wallets={len(gaps)} total_topup_raw={total}")
|
||||
PY
|
||||
157
scripts/verify/build-external-submission-packet-index.py
Executable file
157
scripts/verify/build-external-submission-packet-index.py
Executable file
@@ -0,0 +1,157 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Build an index of external submission packets and current probe artifacts."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
OUT_JSON = ROOT / "reports/status/external-submission-packet-index-latest.json"
|
||||
OUT_MD = ROOT / "docs/04-configuration/EXTERNAL_SUBMISSION_PACKET_INDEX.md"
|
||||
|
||||
PACKETS: list[dict[str, Any]] = [
|
||||
{
|
||||
"provider": "Etherscan",
|
||||
"status": "repo_ready_external_acceptance_pending",
|
||||
"primaryPacket": "docs/04-configuration/etherscan/CWUSDC_MAINNET_ETHERSCAN_PROFILE_PACKET.md",
|
||||
"supporting": [
|
||||
"docs/04-configuration/etherscan/CWUSDC_ETHERSCAN_E2E_RECOMMENDATIONS.md",
|
||||
"docs/04-configuration/etherscan/CWUSDC_ETHERSCAN_VALUE_EXECUTION_PLAN.md",
|
||||
"reports/status/cwusdc-etherscan-value-dossier-latest.json",
|
||||
],
|
||||
"nextRepoAction": "Refresh dossier and capture post-submit response evidence.",
|
||||
},
|
||||
{
|
||||
"provider": "CoinGecko",
|
||||
"status": "repo_ready_external_price_entry_missing",
|
||||
"primaryPacket": "docs/04-configuration/coingecko/CWUSDC_MAINNET_TRACKER_SUBMISSION_PACKET.md",
|
||||
"supporting": [
|
||||
"docs/04-configuration/coingecko/CWUSDC_MAINNET_EXTERNAL_SUBMISSION_CHECKLIST.md",
|
||||
"docs/04-configuration/coingecko/submissions/cwusdc-coingecko-listing-request-20260509.json",
|
||||
"reports/status/cwusdc-external-trackers-live-latest.json",
|
||||
],
|
||||
"nextRepoAction": "Keep token-price API blocker visible and attach current supply/liquidity caveats.",
|
||||
},
|
||||
{
|
||||
"provider": "CoinMarketCap",
|
||||
"status": "dex_page_visible_full_value_acceptance_pending",
|
||||
"primaryPacket": "docs/04-configuration/coingecko/CWUSDC_MAINNET_TRACKER_SUBMISSION_PACKET.md",
|
||||
"supporting": [
|
||||
"reports/status/token-aggregation-cmc-report-chain1-latest.json",
|
||||
"reports/status/cmc-provider-report-sanity-latest.json",
|
||||
"reports/status/cmc-top10-ecosystem-coverage-latest.json",
|
||||
],
|
||||
"nextRepoAction": "Use CMC sanity report to avoid overclaiming liquidity or quote-asset identity.",
|
||||
},
|
||||
{
|
||||
"provider": "DexScreener",
|
||||
"status": "api_not_indexing_pairs",
|
||||
"primaryPacket": "docs/04-configuration/dexscreener/CWUSDC_DEXSCREENER_INDEXING_AND_PROFILE_PACKET_20260509.md",
|
||||
"supporting": [
|
||||
"reports/status/cwusdc-external-trackers-live-latest.json",
|
||||
"reports/status/cwusdc-provider-handoff-latest.md",
|
||||
],
|
||||
"nextRepoAction": "Keep pair/profile request evidence updated after fresh public swap/liquidity events.",
|
||||
},
|
||||
{
|
||||
"provider": "GeckoTerminal",
|
||||
"status": "pool_api_visible_low_reserve",
|
||||
"primaryPacket": "docs/04-configuration/coingecko/CWUSDC_MAINNET_TRACKER_SUBMISSION_PACKET.md",
|
||||
"supporting": [
|
||||
"reports/status/cwusdc-external-trackers-live-latest.json",
|
||||
"reports/status/cmc-provider-report-sanity-latest.json",
|
||||
],
|
||||
"nextRepoAction": "Track reserve USD and 24h volume separately from listing acceptance.",
|
||||
},
|
||||
{
|
||||
"provider": "MetaMask",
|
||||
"status": "metadata_path_ready_price_provider_external",
|
||||
"primaryPacket": "docs/04-configuration/metamask/METAMASK_ASSET_PRICE_PROVIDER_SUBMISSION_MATRIX.md",
|
||||
"supporting": [
|
||||
"docs/04-configuration/metamask/METAMASK_EIP747_CONTRACT_METADATA_REFERENCE_PACKET.md",
|
||||
"docs/04-configuration/metamask/METAMASK_CWUSDC_API_FEED_SPIDER_WEB_RESEARCH.md",
|
||||
"reports/status/cwusdc-provider-readiness-ci-latest.json",
|
||||
],
|
||||
"nextRepoAction": "Keep CAIP-19, EIP-747, logo URL, and external price-provider blockers aligned.",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def exists(path: str) -> bool:
|
||||
return (ROOT / path).exists()
|
||||
|
||||
|
||||
def table(headers: list[str], rows: list[list[Any]]) -> str:
|
||||
def cell(value: Any) -> str:
|
||||
if isinstance(value, list):
|
||||
value = "<br>".join(str(item) for item in value)
|
||||
return str(value).replace("|", "\\|").replace("\n", "<br>")
|
||||
|
||||
return "\n".join(
|
||||
[
|
||||
f"| {' | '.join(cell(header) for header in headers)} |",
|
||||
f"| {' | '.join('---' for _ in headers)} |",
|
||||
*[f"| {' | '.join(cell(value) for value in row)} |" for row in rows],
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
generated_at = datetime.now(timezone.utc).isoformat()
|
||||
packets = []
|
||||
for packet in PACKETS:
|
||||
row = dict(packet)
|
||||
row["primaryExists"] = exists(row["primaryPacket"])
|
||||
row["supportingExists"] = [{"path": path, "exists": exists(path)} for path in row["supporting"]]
|
||||
row["allArtifactsPresent"] = row["primaryExists"] and all(item["exists"] for item in row["supportingExists"])
|
||||
packets.append(row)
|
||||
payload = {
|
||||
"schema": "external-submission-packet-index/v1",
|
||||
"generatedAt": generated_at,
|
||||
"summary": {
|
||||
"providerCount": len(packets),
|
||||
"allArtifactsPresent": all(row["allArtifactsPresent"] for row in packets),
|
||||
"missingArtifactCount": sum(1 for row in packets if not row["allArtifactsPresent"]),
|
||||
},
|
||||
"packets": packets,
|
||||
}
|
||||
OUT_JSON.parent.mkdir(parents=True, exist_ok=True)
|
||||
OUT_JSON.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
|
||||
lines = [
|
||||
"# External Submission Packet Index",
|
||||
"",
|
||||
f"- Generated: `{generated_at}`",
|
||||
f"- All artifacts present: `{payload['summary']['allArtifactsPresent']}`",
|
||||
"",
|
||||
table(
|
||||
["Provider", "Status", "Primary packet", "Supporting artifacts", "Next repo action"],
|
||||
[
|
||||
[
|
||||
row["provider"],
|
||||
row["status"],
|
||||
f"`{row['primaryPacket']}` ({row['primaryExists']})",
|
||||
[f"`{item['path']}` ({item['exists']})" for item in row["supportingExists"]],
|
||||
row["nextRepoAction"],
|
||||
]
|
||||
for row in packets
|
||||
],
|
||||
),
|
||||
"",
|
||||
"## Boundary",
|
||||
"",
|
||||
"This index tracks repo-side evidence availability only. Provider submission, review, acceptance, and price propagation remain external states.",
|
||||
]
|
||||
OUT_MD.parent.mkdir(parents=True, exist_ok=True)
|
||||
OUT_MD.write_text("\n".join(lines) + "\n")
|
||||
print(f"Wrote {OUT_JSON.relative_to(ROOT)}")
|
||||
print(f"Wrote {OUT_MD.relative_to(ROOT)}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
136
scripts/verify/build-non-evm-requirement-stubs.py
Executable file
136
scripts/verify/build-non-evm-requirement-stubs.py
Executable file
@@ -0,0 +1,136 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Write repo-side non-EVM funding and identity requirement stubs."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
CONFIG_OUT = ROOT / "config/non-evm-lane-requirements.json"
|
||||
REPORT_JSON = ROOT / "reports/status/non-evm-lane-requirements-latest.json"
|
||||
REPORT_MD = ROOT / "reports/status/non-evm-lane-requirements-latest.md"
|
||||
|
||||
LANES: list[dict[str, Any]] = [
|
||||
{
|
||||
"network": "solana",
|
||||
"nativeAsset": "SOL",
|
||||
"walletStatus": "bound_from_SOLANA_KEYPAIR_PATH_public_key",
|
||||
"canonicalWallet": "9b4ebHVimuhMqbiCh6tUMMY2S48VyEHpqg5nxMMFe5Pf",
|
||||
"requiredBindings": ["splMintAddresses", "rentReserveTarget", "venueMinimumLiquidity"],
|
||||
"minimumFundingTarget": "TBD",
|
||||
"claimBoundary": "Do not claim native Solana liquidity until SPL mints, rent/gas, and venue inventory are bound.",
|
||||
},
|
||||
{
|
||||
"network": "tron",
|
||||
"nativeAsset": "TRX",
|
||||
"walletStatus": "derived_wallet_needs_canonical_confirmation",
|
||||
"canonicalWallet": "TGkbidE5LfVJZ3QGj6DaPqzCTcTe9tJDxm",
|
||||
"requiredBindings": ["canonicalWalletApproval", "energyBandwidthTarget", "trc20Inventory"],
|
||||
"minimumFundingTarget": "TBD",
|
||||
"claimBoundary": "Do not claim native Tron liquidity until the canonical wallet and TRC-20 inventory are confirmed.",
|
||||
},
|
||||
{
|
||||
"network": "xrpl",
|
||||
"nativeAsset": "XRP",
|
||||
"walletStatus": "missing",
|
||||
"canonicalWallet": None,
|
||||
"requiredBindings": ["xrplAccount", "destinationTagPolicy", "trustlineIssuerPolicy", "xrpReserveTarget"],
|
||||
"minimumFundingTarget": "TBD",
|
||||
"claimBoundary": "Do not claim XRPL corridor readiness until account reserve, tags, trustlines, and wXRP controller evidence are closed.",
|
||||
},
|
||||
{
|
||||
"network": "bitcoin",
|
||||
"nativeAsset": "BTC",
|
||||
"walletStatus": "missing",
|
||||
"canonicalWallet": None,
|
||||
"requiredBindings": ["btcCustodyAddress", "proofOfReservesPolicy", "wrappedAssetMapping", "venueTarget"],
|
||||
"minimumFundingTarget": "TBD",
|
||||
"claimBoundary": "Use BTC as a planning lane only until custody/reserve evidence and wrapping policy are bound.",
|
||||
},
|
||||
{
|
||||
"network": "dogecoin",
|
||||
"nativeAsset": "DOGE",
|
||||
"walletStatus": "missing",
|
||||
"canonicalWallet": None,
|
||||
"requiredBindings": ["dogeCustodyAddress", "bridgeOrCustodyModel", "venueTarget"],
|
||||
"minimumFundingTarget": "TBD",
|
||||
"claimBoundary": "Use DOGE as a planning lane only until native custody and bridge model are bound.",
|
||||
},
|
||||
{
|
||||
"network": "hyperliquid",
|
||||
"nativeAsset": "HYPE",
|
||||
"walletStatus": "research_required",
|
||||
"canonicalWallet": None,
|
||||
"requiredBindings": ["chainIdentifier", "assetIdentifier", "custodyPath", "venueOrApiEvidence"],
|
||||
"minimumFundingTarget": "TBD",
|
||||
"claimBoundary": "Use HYPE only as a market-cap watch item until identifiers and custody path are verified.",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def table(headers: list[str], rows: list[list[Any]]) -> str:
|
||||
def cell(value: Any) -> str:
|
||||
if isinstance(value, list):
|
||||
value = "<br>".join(str(item) for item in value)
|
||||
if value is None:
|
||||
value = "TBD"
|
||||
return str(value).replace("|", "\\|").replace("\n", "<br>")
|
||||
|
||||
return "\n".join(
|
||||
[
|
||||
f"| {' | '.join(cell(header) for header in headers)} |",
|
||||
f"| {' | '.join('---' for _ in headers)} |",
|
||||
*[f"| {' | '.join(cell(value) for value in row)} |" for row in rows],
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
generated_at = datetime.now(timezone.utc).isoformat()
|
||||
payload = {
|
||||
"schema": "non-evm-lane-requirements/v1",
|
||||
"generatedAt": generated_at,
|
||||
"status": "stubs_bound_repo_side",
|
||||
"lanes": LANES,
|
||||
"validationRule": "A lane is claimable only after canonicalWallet, asset IDs, native gas/reserve target, venue target, and evidence source are non-TBD.",
|
||||
}
|
||||
for path in (CONFIG_OUT, REPORT_JSON):
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
|
||||
lines = [
|
||||
"# Non-EVM Lane Requirement Stubs",
|
||||
"",
|
||||
f"- Generated: `{generated_at}`",
|
||||
f"- Config source: `{CONFIG_OUT.relative_to(ROOT)}`",
|
||||
"",
|
||||
table(
|
||||
["Network", "Native", "Wallet status", "Canonical wallet", "Required bindings", "Minimum target", "Claim boundary"],
|
||||
[
|
||||
[
|
||||
lane["network"],
|
||||
lane["nativeAsset"],
|
||||
lane["walletStatus"],
|
||||
lane["canonicalWallet"],
|
||||
lane["requiredBindings"],
|
||||
lane["minimumFundingTarget"],
|
||||
lane["claimBoundary"],
|
||||
]
|
||||
for lane in LANES
|
||||
],
|
||||
),
|
||||
]
|
||||
REPORT_MD.parent.mkdir(parents=True, exist_ok=True)
|
||||
REPORT_MD.write_text("\n".join(lines) + "\n")
|
||||
print(f"Wrote {CONFIG_OUT.relative_to(ROOT)}")
|
||||
print(f"Wrote {REPORT_JSON.relative_to(ROOT)}")
|
||||
print(f"Wrote {REPORT_MD.relative_to(ROOT)}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
165
scripts/verify/check-cmc-provider-report-sanity.py
Executable file
165
scripts/verify/check-cmc-provider-report-sanity.py
Executable file
@@ -0,0 +1,165 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Advisory sanity checks for the repo CMC-shaped Mainnet report."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal, InvalidOperation
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
CMC_REPORT = ROOT / "reports/status/token-aggregation-cmc-report-chain1-latest.json"
|
||||
TRACKERS = ROOT / "reports/status/cwusdc-external-trackers-live-latest.json"
|
||||
OUT_JSON = ROOT / "reports/status/cmc-provider-report-sanity-latest.json"
|
||||
OUT_MD = ROOT / "reports/status/cmc-provider-report-sanity-latest.md"
|
||||
|
||||
OFFICIAL_QUOTES = {
|
||||
"0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48": "USDC",
|
||||
"0xdac17f958d2ee523a2206206994597c13d831ec7": "USDT",
|
||||
}
|
||||
PROMOTED = {"cWUSDC", "cWUSDT", "cWXAUC", "cWXAUT", "cWBTC", "cWETH"}
|
||||
|
||||
|
||||
def load(path: Path) -> dict[str, Any]:
|
||||
return json.loads(path.read_text()) if path.exists() else {}
|
||||
|
||||
|
||||
def dec(value: Any) -> Decimal:
|
||||
try:
|
||||
return Decimal(str(value or "0"))
|
||||
except (InvalidOperation, ValueError):
|
||||
return Decimal(0)
|
||||
|
||||
|
||||
def table(headers: list[str], rows: list[list[Any]]) -> str:
|
||||
def cell(value: Any) -> str:
|
||||
if isinstance(value, list):
|
||||
value = "<br>".join(str(item) for item in value)
|
||||
return str(value).replace("|", "\\|").replace("\n", "<br>")
|
||||
|
||||
return "\n".join(
|
||||
[
|
||||
f"| {' | '.join(cell(header) for header in headers)} |",
|
||||
f"| {' | '.join('---' for _ in headers)} |",
|
||||
*[f"| {' | '.join(cell(value) for value in row)} |" for row in rows],
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
cmc = load(CMC_REPORT)
|
||||
trackers = load(TRACKERS)
|
||||
tokens = cmc.get("tokens", [])
|
||||
warnings: list[dict[str, Any]] = []
|
||||
promoted_rows = []
|
||||
|
||||
for token in tokens:
|
||||
symbol = token.get("symbol")
|
||||
address = str(token.get("contract_address", "")).lower()
|
||||
liquidity = dec(token.get("liquidity_usd"))
|
||||
volume = dec(token.get("volume_24h"))
|
||||
pairs = token.get("pairs", [])
|
||||
if address in OFFICIAL_QUOTES and symbol != OFFICIAL_QUOTES[address]:
|
||||
warnings.append(
|
||||
{
|
||||
"id": "official_quote_symbol_alias",
|
||||
"symbol": symbol,
|
||||
"address": address,
|
||||
"severity": "warning",
|
||||
"message": f"Official {OFFICIAL_QUOTES[address]} address is presented with symbol {symbol}; keep provider packets explicit about official quote vs DBIS wrapped/compliant symbols.",
|
||||
}
|
||||
)
|
||||
if symbol in PROMOTED:
|
||||
promoted_rows.append(
|
||||
{
|
||||
"symbol": symbol,
|
||||
"address": address,
|
||||
"liquidityUsd": str(liquidity),
|
||||
"volume24hUsd": str(volume),
|
||||
"pairCount": len(pairs),
|
||||
}
|
||||
)
|
||||
if liquidity <= 0:
|
||||
warnings.append(
|
||||
{
|
||||
"id": "zero_reported_liquidity",
|
||||
"symbol": symbol,
|
||||
"address": address,
|
||||
"severity": "warning",
|
||||
"message": "CMC-shaped report shows zero liquidity_usd; do not use it as listing-quality liquidity evidence.",
|
||||
}
|
||||
)
|
||||
|
||||
gecko_reserves = []
|
||||
for check in trackers.get("checks", []):
|
||||
if not str(check.get("id", "")).startswith("geckoterminal"):
|
||||
continue
|
||||
attrs = (((check.get("jsonPreview") or {}).get("data") or {}).get("attributes") or {})
|
||||
gecko_reserves.append(
|
||||
{
|
||||
"id": check.get("id"),
|
||||
"pool": attrs.get("address"),
|
||||
"name": attrs.get("name"),
|
||||
"reserveUsd": attrs.get("reserve_in_usd"),
|
||||
"volume24hUsd": (attrs.get("volume_usd") or {}).get("h24"),
|
||||
}
|
||||
)
|
||||
|
||||
payload = {
|
||||
"schema": "cmc-provider-report-sanity/v1",
|
||||
"generatedAt": datetime.now(timezone.utc).isoformat(),
|
||||
"inputs": {
|
||||
"cmcReport": str(CMC_REPORT.relative_to(ROOT)),
|
||||
"trackerReport": str(TRACKERS.relative_to(ROOT)) if TRACKERS.exists() else None,
|
||||
},
|
||||
"summary": {
|
||||
"tokenCount": len(tokens),
|
||||
"promotedTokenCount": len(promoted_rows),
|
||||
"warningCount": len(warnings),
|
||||
"geckoReserveEvidenceCount": len(gecko_reserves),
|
||||
},
|
||||
"promotedTokens": promoted_rows,
|
||||
"geckoReserveEvidence": gecko_reserves,
|
||||
"warnings": warnings,
|
||||
}
|
||||
OUT_JSON.parent.mkdir(parents=True, exist_ok=True)
|
||||
OUT_JSON.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
|
||||
lines = [
|
||||
"# CMC Provider Report Sanity",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- Warnings: `{len(warnings)}`",
|
||||
"",
|
||||
"## Promoted Mainnet Rows",
|
||||
"",
|
||||
table(
|
||||
["Symbol", "Address", "Liquidity USD", "24h volume USD", "Pairs"],
|
||||
[[row["symbol"], row["address"], row["liquidityUsd"], row["volume24hUsd"], row["pairCount"]] for row in promoted_rows],
|
||||
),
|
||||
"",
|
||||
"## GeckoTerminal Reserve Cross-Check",
|
||||
"",
|
||||
table(
|
||||
["Check", "Pool", "Name", "Reserve USD", "24h volume USD"],
|
||||
[[row["id"], row["pool"], row["name"], row["reserveUsd"], row["volume24hUsd"]] for row in gecko_reserves],
|
||||
) if gecko_reserves else "No GeckoTerminal tracker reserve evidence found.",
|
||||
"",
|
||||
"## Advisory Warnings",
|
||||
"",
|
||||
table(
|
||||
["ID", "Symbol", "Address", "Severity", "Message"],
|
||||
[[w["id"], w.get("symbol", "-"), w.get("address", "-"), w["severity"], w["message"]] for w in warnings],
|
||||
) if warnings else "No warnings.",
|
||||
]
|
||||
OUT_MD.write_text("\n".join(lines) + "\n")
|
||||
print(f"Wrote {OUT_JSON.relative_to(ROOT)}")
|
||||
print(f"Wrote {OUT_MD.relative_to(ROOT)}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
@@ -157,7 +157,7 @@ for addr in "${ADDRESSES[@]}"; do
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "Total: $OK present, $MISS missing/empty (${#ADDRESSES[@]} addresses). Explorer: https://explorer.d-bis.org/address/<ADDR>"
|
||||
echo "Total: $OK present, $MISS missing/empty (${#ADDRESSES[@]} addresses). Explorer: https://explorer.d-bis.org/addresses/<ADDR>"
|
||||
if [[ $MISS -gt 0 && -z "$rpc_reachable" ]]; then
|
||||
echo " → RPC was unreachable from this host; see WARN above. Run from LAN/VPN or pass a reachable RPC URL." >&2
|
||||
fi
|
||||
|
||||
536
scripts/verify/check-cw-full-operational-readiness.py
Executable file
536
scripts/verify/check-cw-full-operational-readiness.py
Executable file
@@ -0,0 +1,536 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Build a read-only cW mesh operational readiness report.
|
||||
|
||||
This checker intentionally does not call RPC. It validates the source-of-truth
|
||||
files that the deployment, bridge, liquidity, token aggregation, and tracker
|
||||
runbooks consume, then records the remaining live/operator and external gates.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
|
||||
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
|
||||
MESH_MATRIX = ROOT / "reports" / "status" / "cw-mesh-deployment-matrix-latest.json"
|
||||
TOKEN_MAPPING = ROOT / "config" / "token-mapping-multichain.json"
|
||||
CANONICAL_TOKENS = ROOT / "smom-dbis-138" / "services" / "token-aggregation" / "src" / "config" / "canonical-tokens.ts"
|
||||
ENGINE_X_READINESS = ROOT / "reports" / "status" / "engine-x-public-indexed-readiness-latest.json"
|
||||
ETHERSCAN_PACKET = ROOT / "docs" / "04-configuration" / "etherscan" / "CWUSDC_MAINNET_ETHERSCAN_PROFILE_PACKET.md"
|
||||
COINGECKO_CHECKLIST = ROOT / "docs" / "04-configuration" / "coingecko" / "CWUSDC_MAINNET_EXTERNAL_SUBMISSION_CHECKLIST.md"
|
||||
CW_L1_CONTRACT = ROOT / "smom-dbis-138" / "contracts" / "bridge" / "CWMultiTokenBridgeL1.sol"
|
||||
CW_L2_CONTRACT = ROOT / "smom-dbis-138" / "contracts" / "bridge" / "CWMultiTokenBridgeL2.sol"
|
||||
CW_L1_DEPLOY = ROOT / "smom-dbis-138" / "script" / "DeployCWMultiTokenBridgeL1.s.sol"
|
||||
CW_L2_DEPLOY = ROOT / "smom-dbis-138" / "script" / "DeployCWMultiTokenBridgeL2.s.sol"
|
||||
CW_ROUTE_BOOTSTRAP = ROOT / "smom-dbis-138" / "scripts" / "deployment" / "cw-l1-bootstrap-gru-v2-ccip-routes.sh"
|
||||
|
||||
BRIDGE_EVIDENCE_CANDIDATES = [
|
||||
ROOT / "reports" / "status" / "cw-multitoken-bridge-e2e-latest.json",
|
||||
ROOT / "reports" / "status" / "cw-bridge-live-e2e-latest.json",
|
||||
]
|
||||
TRACKER_EVIDENCE_CANDIDATES = [
|
||||
ROOT / "reports" / "status" / "cwusdc-tracker-profile-approval-latest.json",
|
||||
ROOT / "reports" / "status" / "cwusdc-external-trackers-live-latest.json",
|
||||
]
|
||||
|
||||
ACTIVE_CHAIN_IDS = [1, 10, 56, 100, 137, 8453, 42161, 42220, 43114]
|
||||
PLANNED_OR_NON_CW_CHAIN_IDS = [1111, 651940]
|
||||
|
||||
REQUIRED_CW_SYMBOLS = [
|
||||
"cWUSDT",
|
||||
"cWUSDC",
|
||||
"cWEURC",
|
||||
"cWEURT",
|
||||
"cWGBPC",
|
||||
"cWGBPT",
|
||||
"cWAUDC",
|
||||
"cWJPYC",
|
||||
"cWCHFC",
|
||||
"cWCADC",
|
||||
"cWXAUC",
|
||||
"cWXAUT",
|
||||
]
|
||||
|
||||
OPTIONAL_ADDON_SYMBOLS = ["cWBTC"]
|
||||
|
||||
TOKEN_MAPPING_KEYS = {
|
||||
"cWUSDT": "Compliant_USDT_cW",
|
||||
"cWUSDC": "Compliant_USDC_cW",
|
||||
"cWEURC": "Compliant_EURC_cW",
|
||||
"cWEURT": "Compliant_EURT_cW",
|
||||
"cWGBPC": "Compliant_GBPC_cW",
|
||||
"cWGBPT": "Compliant_GBPT_cW",
|
||||
"cWAUDC": "Compliant_AUDC_cW",
|
||||
"cWJPYC": "Compliant_JPYC_cW",
|
||||
"cWCHFC": "Compliant_CHFC_cW",
|
||||
"cWCADC": "Compliant_CADC_cW",
|
||||
"cWXAUC": "Compliant_XAUC_cW",
|
||||
"cWXAUT": "Compliant_XAUT_cW",
|
||||
}
|
||||
|
||||
|
||||
def load_json(path: Path, default: Any = None) -> Any:
|
||||
if not path.exists():
|
||||
return default
|
||||
return json.loads(path.read_text())
|
||||
|
||||
|
||||
def rel(path: Path) -> str:
|
||||
try:
|
||||
return str(path.relative_to(ROOT))
|
||||
except ValueError:
|
||||
return str(path)
|
||||
|
||||
|
||||
def is_zero_address(value: str | None) -> bool:
|
||||
if not value:
|
||||
return True
|
||||
return value.lower() == "0x0000000000000000000000000000000000000000"
|
||||
|
||||
|
||||
def ok_gate(gate_id: str, title: str, details: list[str] | None = None, evidence: list[str] | None = None) -> dict[str, Any]:
|
||||
return {
|
||||
"id": gate_id,
|
||||
"title": title,
|
||||
"status": "pass",
|
||||
"details": details or [],
|
||||
"evidence": evidence or [],
|
||||
}
|
||||
|
||||
|
||||
def warn_gate(gate_id: str, title: str, details: list[str], evidence: list[str] | None = None) -> dict[str, Any]:
|
||||
return {
|
||||
"id": gate_id,
|
||||
"title": title,
|
||||
"status": "warn",
|
||||
"details": details,
|
||||
"evidence": evidence or [],
|
||||
}
|
||||
|
||||
|
||||
def fail_gate(gate_id: str, title: str, details: list[str], evidence: list[str] | None = None) -> dict[str, Any]:
|
||||
return {
|
||||
"id": gate_id,
|
||||
"title": title,
|
||||
"status": "blocked",
|
||||
"details": details,
|
||||
"evidence": evidence or [],
|
||||
}
|
||||
|
||||
|
||||
def check_source_of_truth(dep: dict[str, Any]) -> dict[str, Any]:
|
||||
issues: list[str] = []
|
||||
if dep.get("homeChainId") != 138:
|
||||
issues.append(f"deployment-status homeChainId is `{dep.get('homeChainId')}`, expected `138`.")
|
||||
if "138" not in (dep.get("chains") or {}):
|
||||
issues.append("deployment-status is missing Chain 138.")
|
||||
for chain_id in ACTIVE_CHAIN_IDS:
|
||||
if str(chain_id) not in (dep.get("chains") or {}):
|
||||
issues.append(f"deployment-status is missing active chain `{chain_id}`.")
|
||||
|
||||
evidence = [rel(DEPLOYMENT_STATUS)]
|
||||
if issues:
|
||||
return fail_gate("source_of_truth_chain_138", "Chain 138 source of truth", issues, evidence)
|
||||
return ok_gate(
|
||||
"source_of_truth_chain_138",
|
||||
"Chain 138 source of truth",
|
||||
["Chain 138 is the home chain and all active public mesh chains are represented."],
|
||||
evidence,
|
||||
)
|
||||
|
||||
|
||||
def check_deployment_coverage(dep: dict[str, Any]) -> dict[str, Any]:
|
||||
chains = dep.get("chains") or {}
|
||||
issues: list[str] = []
|
||||
optional_missing: list[str] = []
|
||||
|
||||
for chain_id in ACTIVE_CHAIN_IDS:
|
||||
info = chains.get(str(chain_id)) or {}
|
||||
cw_tokens = info.get("cwTokens") or {}
|
||||
for symbol in REQUIRED_CW_SYMBOLS:
|
||||
if is_zero_address(cw_tokens.get(symbol)):
|
||||
issues.append(f"chain `{chain_id}` missing `{symbol}` in deployment-status cwTokens.")
|
||||
for symbol in OPTIONAL_ADDON_SYMBOLS:
|
||||
if is_zero_address(cw_tokens.get(symbol)):
|
||||
optional_missing.append(f"chain `{chain_id}` missing optional add-on `{symbol}`.")
|
||||
|
||||
evidence = [rel(DEPLOYMENT_STATUS)]
|
||||
if issues:
|
||||
return fail_gate("active_cw_token_coverage", "Active cW token coverage", issues, evidence)
|
||||
if optional_missing:
|
||||
return warn_gate(
|
||||
"active_cw_token_coverage",
|
||||
"Active cW token coverage",
|
||||
["All required cW fiat/commodity symbols are present.", *optional_missing],
|
||||
evidence,
|
||||
)
|
||||
return ok_gate(
|
||||
"active_cw_token_coverage",
|
||||
"Active cW token coverage",
|
||||
["All required cW fiat/commodity symbols and optional cWBTC add-on are present on active mesh chains."],
|
||||
evidence,
|
||||
)
|
||||
|
||||
|
||||
def check_token_mapping(dep: dict[str, Any], mapping: dict[str, Any]) -> dict[str, Any]:
|
||||
chains = dep.get("chains") or {}
|
||||
pairs = mapping.get("pairs") or []
|
||||
pair_by_to = {int(p.get("toChainId")): p for p in pairs if p.get("fromChainId") == 138 and p.get("toChainId") is not None}
|
||||
issues: list[str] = []
|
||||
|
||||
for chain_id in ACTIVE_CHAIN_IDS:
|
||||
pair = pair_by_to.get(chain_id)
|
||||
if not pair:
|
||||
issues.append(f"token-mapping missing 138 -> `{chain_id}` pair.")
|
||||
continue
|
||||
tokens = {t.get("key"): t.get("addressTo") for t in pair.get("tokens") or []}
|
||||
cw_tokens = (chains.get(str(chain_id)) or {}).get("cwTokens") or {}
|
||||
for symbol, key in TOKEN_MAPPING_KEYS.items():
|
||||
mapped = tokens.get(key)
|
||||
expected = cw_tokens.get(symbol)
|
||||
if is_zero_address(mapped):
|
||||
issues.append(f"138 -> `{chain_id}` token-mapping has empty `{key}`.")
|
||||
elif expected and mapped.lower() != expected.lower():
|
||||
issues.append(f"138 -> `{chain_id}` `{key}` is `{mapped}`, deployment-status has `{expected}`.")
|
||||
|
||||
evidence = [rel(TOKEN_MAPPING), rel(DEPLOYMENT_STATUS)]
|
||||
if issues:
|
||||
return fail_gate("token_mapping_mesh", "138 to public-chain token mapping", issues, evidence)
|
||||
return ok_gate(
|
||||
"token_mapping_mesh",
|
||||
"138 to public-chain token mapping",
|
||||
["All required cW mapping entries are non-zero and match deployment-status for active mesh chains."],
|
||||
evidence,
|
||||
)
|
||||
|
||||
|
||||
def parse_gru_chain_ids(text: str) -> list[int]:
|
||||
match = re.search(r"const\s+GRU_CW_CHAIN_IDS\s*=\s*\[([^\]]+)\]", text)
|
||||
if not match:
|
||||
return []
|
||||
return [int(x) for x in re.findall(r"\d+", match.group(1))]
|
||||
|
||||
|
||||
def fallback_symbol_has_chain(text: str, symbol: str, chain_id: int) -> bool:
|
||||
match = re.search(rf"\n\s+{re.escape(symbol)}:\s*\{{(?P<body>.*?)\n\s+\}},", text, flags=re.S)
|
||||
if not match:
|
||||
return False
|
||||
body = match.group("body")
|
||||
return re.search(rf"\[\s*{chain_id}\s*\]\s*:\s*['\"]0x[a-fA-F0-9]{{40}}['\"]", body) is not None
|
||||
|
||||
|
||||
def check_token_aggregation() -> dict[str, Any]:
|
||||
if not CANONICAL_TOKENS.exists():
|
||||
return fail_gate("token_aggregation_registry", "Token aggregation canonical registry", ["canonical-tokens.ts is missing."], [rel(CANONICAL_TOKENS)])
|
||||
|
||||
text = CANONICAL_TOKENS.read_text()
|
||||
issues: list[str] = []
|
||||
chain_ids = parse_gru_chain_ids(text)
|
||||
for chain_id in ACTIVE_CHAIN_IDS:
|
||||
if chain_id not in chain_ids:
|
||||
issues.append(f"GRU_CW_CHAIN_IDS missing active chain `{chain_id}`.")
|
||||
for symbol in REQUIRED_CW_SYMBOLS:
|
||||
if f"symbol: '{symbol}'" not in text and f'symbol: "{symbol}"' not in text:
|
||||
issues.append(f"CANONICAL_TOKENS missing first-class `{symbol}` entry.")
|
||||
for chain_id in ACTIVE_CHAIN_IDS:
|
||||
if not fallback_symbol_has_chain(text, symbol, chain_id):
|
||||
issues.append(f"FALLBACK_ADDRESSES `{symbol}` missing chain `{chain_id}`.")
|
||||
|
||||
evidence = [rel(CANONICAL_TOKENS), "smom-dbis-138/services/token-aggregation/src/config/canonical-tokens.test.ts"]
|
||||
if issues:
|
||||
return fail_gate("token_aggregation_registry", "Token aggregation canonical registry", issues, evidence)
|
||||
return ok_gate(
|
||||
"token_aggregation_registry",
|
||||
"Token aggregation canonical registry",
|
||||
["Canonical token aggregation includes the active nine-chain promoted cW mesh and the full required wrapped family."],
|
||||
evidence,
|
||||
)
|
||||
|
||||
|
||||
def check_liquidity_and_indexing(matrix: dict[str, Any], dep: dict[str, Any]) -> dict[str, Any]:
|
||||
rows = {int(r.get("chainId")): r for r in matrix.get("rows") or []}
|
||||
chains = dep.get("chains") or {}
|
||||
issues: list[str] = []
|
||||
missing_rails: list[str] = []
|
||||
unseeded_by_chain: dict[int, int] = {}
|
||||
unseeded_examples: dict[int, list[str]] = {}
|
||||
|
||||
for chain_id in ACTIVE_CHAIN_IDS:
|
||||
row = rows.get(chain_id)
|
||||
if not row:
|
||||
issues.append(f"mesh matrix missing chain `{chain_id}`.")
|
||||
continue
|
||||
if row.get("uniswapV2CWUSDTvsCWUSDCLive") is not True:
|
||||
issues.append(f"chain `{chain_id}` cWUSDT/cWUSDC UniV2 pair is not live in latest matrix.")
|
||||
if row.get("uniswapV2CWUSDTvsCWUSDCHealthy") is not True:
|
||||
issues.append(f"chain `{chain_id}` cWUSDT/cWUSDC UniV2 pair is not healthy in latest matrix.")
|
||||
rails = set(row.get("pmmSettlementRails") or [])
|
||||
if not any(x in rails for x in ("cWUSDC/USDC", "cWUSDC/USDT")):
|
||||
missing_rails.append(f"chain `{chain_id}` lacks cWUSDC stable settlement PMM rail")
|
||||
if not any(x in rails for x in ("cWUSDT/USDT", "cWUSDT/USDC")):
|
||||
missing_rails.append(f"chain `{chain_id}` lacks cWUSDT stable settlement PMM rail")
|
||||
|
||||
for pool in (chains.get(str(chain_id)) or {}).get("pmmPools") or []:
|
||||
notes = pool.get("notes") or []
|
||||
if any("unseeded_pending" in str(note) for note in notes):
|
||||
unseeded_by_chain[chain_id] = unseeded_by_chain.get(chain_id, 0) + 1
|
||||
unseeded_examples.setdefault(chain_id, [])
|
||||
if len(unseeded_examples[chain_id]) < 3:
|
||||
unseeded_examples[chain_id].append(f"{pool.get('base')}/{pool.get('quote')} {pool.get('poolAddress')}")
|
||||
|
||||
evidence = [rel(MESH_MATRIX), rel(DEPLOYMENT_STATUS)]
|
||||
warnings: list[str] = []
|
||||
if missing_rails:
|
||||
warnings.append(
|
||||
"Stable settlement PMM rail gaps: "
|
||||
+ "; ".join(missing_rails)
|
||||
+ ". Core cWUSDT/cWUSDC pair indexing remains healthy."
|
||||
)
|
||||
if unseeded_by_chain:
|
||||
summary = ", ".join(f"{chain_id}: {count}" for chain_id, count in sorted(unseeded_by_chain.items()))
|
||||
warnings.append(f"Unseeded pending PMM pools by chain: {summary}.")
|
||||
for chain_id in sorted(unseeded_examples):
|
||||
warnings.append(f"chain `{chain_id}` examples: " + "; ".join(unseeded_examples[chain_id]))
|
||||
if issues:
|
||||
return fail_gate("liquidity_and_indexing", "LP indexing and settlement rails", issues + warnings, evidence)
|
||||
if warnings:
|
||||
return warn_gate(
|
||||
"liquidity_and_indexing",
|
||||
"LP indexing and settlement rails",
|
||||
["Core cWUSDT/cWUSDC pair indexing is present and healthy.", *warnings],
|
||||
evidence,
|
||||
)
|
||||
return ok_gate(
|
||||
"liquidity_and_indexing",
|
||||
"LP indexing and settlement rails",
|
||||
["Core cWUSDT/cWUSDC pair indexing and stable settlement rails are present with no unseeded PMM warnings."],
|
||||
evidence,
|
||||
)
|
||||
|
||||
|
||||
def check_bridge_implementation() -> dict[str, Any]:
|
||||
required = [CW_L1_CONTRACT, CW_L2_CONTRACT, CW_L1_DEPLOY, CW_L2_DEPLOY, CW_ROUTE_BOOTSTRAP]
|
||||
missing = [rel(path) for path in required if not path.exists()]
|
||||
evidence = [rel(path) for path in required]
|
||||
if missing:
|
||||
return fail_gate("cw_mint_burn_bridge_implementation", "cW mint/burn bridge implementation", [f"missing `{path}`" for path in missing], evidence)
|
||||
return ok_gate(
|
||||
"cw_mint_burn_bridge_implementation",
|
||||
"cW mint/burn bridge implementation",
|
||||
["CWMultiTokenBridgeL1/L2 contracts and deployment/bootstrap scripts are present."],
|
||||
evidence,
|
||||
)
|
||||
|
||||
|
||||
def evidence_file_status(paths: list[Path], success_keys: list[str]) -> tuple[bool, str | None, list[str]]:
|
||||
for path in paths:
|
||||
if not path.exists():
|
||||
continue
|
||||
data = load_json(path, {})
|
||||
if not isinstance(data, dict):
|
||||
continue
|
||||
for key in success_keys:
|
||||
value: Any = data
|
||||
for part in key.split("."):
|
||||
value = value.get(part) if isinstance(value, dict) else None
|
||||
if value is True:
|
||||
return True, rel(path), []
|
||||
details = [f"`{rel(path)}` exists but does not expose a true success key: {', '.join(success_keys)}."]
|
||||
summary = data.get("summary") if isinstance(data, dict) else None
|
||||
if isinstance(summary, dict):
|
||||
for key in ("failedChainIds", "failedRequiredIds", "blockedGateIds"):
|
||||
values = summary.get(key)
|
||||
if values:
|
||||
details.append(f"{key}: {', '.join(str(x) for x in values)}.")
|
||||
for key in ("passedChainCount", "activeChainCount", "requiredPassedCount", "requiredCount"):
|
||||
if key in summary:
|
||||
details.append(f"{key}: {summary[key]}.")
|
||||
return False, rel(path), details
|
||||
return False, None, [f"No evidence file found. Expected one of: {', '.join(rel(p) for p in paths)}."]
|
||||
|
||||
|
||||
def check_bridge_live_e2e() -> dict[str, Any]:
|
||||
ok, evidence, details = evidence_file_status(
|
||||
BRIDGE_EVIDENCE_CANDIDATES,
|
||||
["summary.allActiveChainsPassed", "summary.readyForProduction", "allActiveChainsPassed"],
|
||||
)
|
||||
ev = [evidence] if evidence else [rel(path) for path in BRIDGE_EVIDENCE_CANDIDATES]
|
||||
if ok:
|
||||
return ok_gate("cw_mint_burn_bridge_live_e2e", "cW mint/burn bridge live E2E", ["Live E2E evidence reports all active chains passed."], ev)
|
||||
return fail_gate(
|
||||
"cw_mint_burn_bridge_live_e2e",
|
||||
"cW mint/burn bridge live E2E",
|
||||
[
|
||||
"Dedicated cW mint/burn bridge code is implemented, but live receiver deployment, role wiring, route bootstrap, and E2E bridge proof evidence are still required.",
|
||||
*details,
|
||||
],
|
||||
ev,
|
||||
)
|
||||
|
||||
|
||||
def check_engine_x_readiness() -> dict[str, Any]:
|
||||
data = load_json(ENGINE_X_READINESS, {})
|
||||
ready = (((data or {}).get("summary") or {}).get("readyForPublicIndexedProof") is True)
|
||||
blockers = (((data or {}).get("summary") or {}).get("blockers") or [])
|
||||
if ready and not blockers:
|
||||
return ok_gate("engine_x_public_indexed_readiness", "Engine X cWUSDC public indexed readiness", ["Engine X readiness report is green."], [rel(ENGINE_X_READINESS)])
|
||||
return fail_gate(
|
||||
"engine_x_public_indexed_readiness",
|
||||
"Engine X cWUSDC public indexed readiness",
|
||||
["Engine X readiness report is not green.", *[str(x) for x in blockers]],
|
||||
[rel(ENGINE_X_READINESS)],
|
||||
)
|
||||
|
||||
|
||||
def check_external_tracker_packet() -> dict[str, Any]:
|
||||
missing = [rel(path) for path in [ETHERSCAN_PACKET, COINGECKO_CHECKLIST] if not path.exists()]
|
||||
if missing:
|
||||
return fail_gate("external_tracker_submission_packet", "External tracker submission packet", [f"missing `{path}`" for path in missing], [rel(ETHERSCAN_PACKET), rel(COINGECKO_CHECKLIST)])
|
||||
return ok_gate(
|
||||
"external_tracker_submission_packet",
|
||||
"External tracker submission packet",
|
||||
["Etherscan profile packet and CoinGecko/external tracker checklist are present."],
|
||||
[rel(ETHERSCAN_PACKET), rel(COINGECKO_CHECKLIST)],
|
||||
)
|
||||
|
||||
|
||||
def check_external_tracker_live() -> dict[str, Any]:
|
||||
ok, evidence, details = evidence_file_status(
|
||||
TRACKER_EVIDENCE_CANDIDATES,
|
||||
["summary.allTrackersLive", "summary.readyForEtherscanUsdValue", "allTrackersLive"],
|
||||
)
|
||||
ev = [evidence] if evidence else [rel(path) for path in TRACKER_EVIDENCE_CANDIDATES]
|
||||
if ok:
|
||||
return ok_gate("external_trackers_live", "External trackers live", ["External tracker evidence reports live indexing/profile approval."], ev)
|
||||
return fail_gate(
|
||||
"external_trackers_live",
|
||||
"External trackers live",
|
||||
[
|
||||
"Explorer/tracker packet is prepared, but Etherscan/CoinGecko/CMC/DexScreener approval or live tracker evidence is external and still pending.",
|
||||
*details,
|
||||
],
|
||||
ev,
|
||||
)
|
||||
|
||||
|
||||
def check_planned_chains(dep: dict[str, Any]) -> dict[str, Any]:
|
||||
chains = dep.get("chains") or {}
|
||||
details: list[str] = []
|
||||
for chain_id in PLANNED_OR_NON_CW_CHAIN_IDS:
|
||||
info = chains.get(str(chain_id)) or {}
|
||||
state = info.get("activationState") or "not_active_cw_mesh"
|
||||
details.append(f"chain `{chain_id}` `{info.get('name', '')}` is `{state}` and is not counted as active cW mesh coverage.")
|
||||
return warn_gate("planned_or_non_cw_chains", "Planned or non-cW chain scope", details, [rel(DEPLOYMENT_STATUS)])
|
||||
|
||||
|
||||
def build_report() -> dict[str, Any]:
|
||||
dep = load_json(DEPLOYMENT_STATUS, {})
|
||||
matrix = load_json(MESH_MATRIX, {"rows": []})
|
||||
mapping = load_json(TOKEN_MAPPING, {})
|
||||
|
||||
gates = [
|
||||
check_source_of_truth(dep),
|
||||
check_deployment_coverage(dep),
|
||||
check_token_mapping(dep, mapping),
|
||||
check_token_aggregation(),
|
||||
check_liquidity_and_indexing(matrix, dep),
|
||||
check_bridge_implementation(),
|
||||
check_bridge_live_e2e(),
|
||||
check_engine_x_readiness(),
|
||||
check_external_tracker_packet(),
|
||||
check_external_tracker_live(),
|
||||
check_planned_chains(dep),
|
||||
]
|
||||
blocked = [g for g in gates if g["status"] == "blocked"]
|
||||
warnings = [g for g in gates if g["status"] == "warn"]
|
||||
in_repo_blockers = [
|
||||
g for g in blocked
|
||||
if g["id"] not in {"cw_mint_burn_bridge_live_e2e", "external_trackers_live"}
|
||||
]
|
||||
|
||||
return {
|
||||
"schema": "cw-full-operational-readiness/v1",
|
||||
"generatedAt": datetime.now(timezone.utc).isoformat(),
|
||||
"activeChainIds": ACTIVE_CHAIN_IDS,
|
||||
"requiredCwSymbols": REQUIRED_CW_SYMBOLS,
|
||||
"summary": {
|
||||
"inRepoSourceOfTruthConfigured": len(in_repo_blockers) == 0,
|
||||
"fullyOperationalAndLive": len(blocked) == 0,
|
||||
"blockedGateCount": len(blocked),
|
||||
"warningGateCount": len(warnings),
|
||||
"blockedGateIds": [g["id"] for g in blocked],
|
||||
"warningGateIds": [g["id"] for g in warnings],
|
||||
},
|
||||
"gates": gates,
|
||||
"nextActions": [
|
||||
"Deploy and configure CWMultiTokenBridgeL1 on Chain 138 and CWMultiTokenBridgeL2 on each active public mesh chain.",
|
||||
"Grant MINTER_ROLE and BURNER_ROLE for each cW* token to the active CWMultiTokenBridgeL2 receiver on its chain.",
|
||||
"Run cW route bootstrap/configuration and capture live E2E evidence to reports/status/cw-multitoken-bridge-e2e-latest.json.",
|
||||
"Submit/confirm Etherscan, CoinGecko, CMC, DexScreener tracker approvals and capture live evidence.",
|
||||
"Seed any PMM pools still marked unseeded_pending in deployment-status, prioritizing stable quote rails with tracker-visible volume.",
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def write_markdown(report: dict[str, Any], path: Path) -> None:
|
||||
lines: list[str] = []
|
||||
summary = report["summary"]
|
||||
lines.append("# cW Full Operational Readiness")
|
||||
lines.append("")
|
||||
lines.append(f"- Generated: `{report['generatedAt']}`")
|
||||
lines.append(f"- Active chains: `{', '.join(str(x) for x in report['activeChainIds'])}`")
|
||||
lines.append(f"- In-repo source of truth configured: `{summary['inRepoSourceOfTruthConfigured']}`")
|
||||
lines.append(f"- Fully operational and live: `{summary['fullyOperationalAndLive']}`")
|
||||
lines.append(f"- Blocked gates: `{summary['blockedGateCount']}`")
|
||||
lines.append(f"- Warning gates: `{summary['warningGateCount']}`")
|
||||
lines.append("")
|
||||
lines.append("## Gates")
|
||||
lines.append("")
|
||||
lines.append("| Gate | Status | Details | Evidence |")
|
||||
lines.append("|---|---:|---|---|")
|
||||
for gate in report["gates"]:
|
||||
details = "<br>".join(gate["details"]) if gate["details"] else "-"
|
||||
evidence = "<br>".join(f"`{x}`" for x in gate["evidence"]) if gate["evidence"] else "-"
|
||||
lines.append(f"| {gate['title']} | `{gate['status']}` | {details} | {evidence} |")
|
||||
lines.append("")
|
||||
lines.append("## Next Actions")
|
||||
lines.append("")
|
||||
for action in report["nextActions"]:
|
||||
lines.append(f"- {action}")
|
||||
lines.append("")
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text("\n".join(lines) + "\n")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("--json-out", type=Path, default=ROOT / "reports" / "status" / "cw-full-operational-readiness-latest.json")
|
||||
parser.add_argument("--md-out", type=Path, default=ROOT / "reports" / "status" / "cw-full-operational-readiness-latest.md")
|
||||
parser.add_argument("--strict", action="store_true", help="Exit non-zero if any gate is blocked.")
|
||||
args = parser.parse_args()
|
||||
|
||||
report = build_report()
|
||||
args.json_out.parent.mkdir(parents=True, exist_ok=True)
|
||||
args.json_out.write_text(json.dumps(report, indent=2) + "\n")
|
||||
write_markdown(report, args.md_out)
|
||||
|
||||
print(f"Wrote {rel(args.json_out)}")
|
||||
print(f"Wrote {rel(args.md_out)}")
|
||||
print(f"In-repo configured: {report['summary']['inRepoSourceOfTruthConfigured']}")
|
||||
print(f"Fully operational/live: {report['summary']['fullyOperationalAndLive']}")
|
||||
if report["summary"]["blockedGateIds"]:
|
||||
print("Blocked gates: " + ", ".join(report["summary"]["blockedGateIds"]))
|
||||
|
||||
if args.strict and report["summary"]["blockedGateCount"]:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
8
scripts/verify/check-cw-full-operational-readiness.sh
Executable file
8
scripts/verify/check-cw-full-operational-readiness.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
# Read-only readiness gate for cW source-of-truth, liquidity/indexing, bridge, and tracker status.
|
||||
set -euo pipefail
|
||||
|
||||
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||
cd "$ROOT"
|
||||
|
||||
exec python3 scripts/verify/check-cw-full-operational-readiness.py "$@"
|
||||
364
scripts/verify/check-cw-multitoken-bridge-e2e-readiness.py
Executable file
364
scripts/verify/check-cw-multitoken-bridge-e2e-readiness.py
Executable file
@@ -0,0 +1,364 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Read-only CWMultiToken bridge readiness evidence.
|
||||
|
||||
The script checks deployed bridge contracts, configured routes, canonical-to-
|
||||
mirrored token mappings, and cW token MINTER/BURNER roles. It does not send
|
||||
transactions or trigger bridge transfers.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
TOKEN_MAPPING = ROOT / "config" / "token-mapping-multichain.json"
|
||||
DEFAULT_JSON = ROOT / "reports" / "status" / "cw-multitoken-bridge-e2e-latest.json"
|
||||
DEFAULT_MD = ROOT / "reports" / "status" / "cw-multitoken-bridge-e2e-latest.md"
|
||||
|
||||
ACTIVE_CHAINS = [
|
||||
(1, "Ethereum Mainnet", "MAINNET", "5009297550715157269"),
|
||||
(10, "Optimism", "OPTIMISM", "3734403246176062136"),
|
||||
(25, "Cronos", "CRONOS", "1456215246176062136"),
|
||||
(56, "BSC", "BSC", "11344663589394136015"),
|
||||
(100, "Gnosis", "GNOSIS", "465200170687744372"),
|
||||
(137, "Polygon", "POLYGON", "4051577828743386545"),
|
||||
(8453, "Base", "BASE", "15971525489660198786"),
|
||||
(42161, "Arbitrum", "ARBITRUM", "4949039107694359620"),
|
||||
(42220, "Celo", "CELO", "1346049177634351622"),
|
||||
(43114, "Avalanche", "AVALANCHE", "6433500567565415381"),
|
||||
]
|
||||
|
||||
RPC_CANDIDATES = {
|
||||
1: ["ETHEREUM_MAINNET_RPC", "ETH_MAINNET_RPC_URL", "MAINNET_RPC_URL", "RPC_URL_MAINNET"],
|
||||
10: ["OPTIMISM_RPC_URL", "OPTIMISM_MAINNET_RPC"],
|
||||
25: ["CRONOS_RPC_URL", "CRONOS_MAINNET_RPC", "CRONOS_RPC"],
|
||||
56: ["BSC_RPC_URL", "BSC_MAINNET_RPC"],
|
||||
100: ["GNOSIS_RPC_URL", "GNOSIS_MAINNET_RPC", "GNOSIS_RPC"],
|
||||
137: ["POLYGON_RPC_URL", "POLYGON_MAINNET_RPC"],
|
||||
8453: ["BASE_RPC_URL", "BASE_MAINNET_RPC"],
|
||||
42161: ["ARBITRUM_RPC_URL", "ARBITRUM_MAINNET_RPC"],
|
||||
42220: ["CELO_RPC_URL", "CELO_MAINNET_RPC", "CELO_RPC"],
|
||||
43114: ["AVALANCHE_RPC_URL", "AVALANCHE_MAINNET_RPC", "AVALANCHE_RPC"],
|
||||
}
|
||||
|
||||
CORE_KEYS = {"Compliant_USDT_cW", "Compliant_USDC_cW"}
|
||||
|
||||
|
||||
def rel(path: Path) -> str:
|
||||
try:
|
||||
return str(path.relative_to(ROOT))
|
||||
except ValueError:
|
||||
return str(path)
|
||||
|
||||
|
||||
def env_first(keys: list[str]) -> str:
|
||||
for key in keys:
|
||||
value = os.environ.get(key, "").strip()
|
||||
if value:
|
||||
return value
|
||||
return ""
|
||||
|
||||
|
||||
def is_address(value: str) -> bool:
|
||||
return value.startswith("0x") and len(value) == 42
|
||||
|
||||
|
||||
def run_cast(args: list[str], timeout: int = 18) -> tuple[bool, str]:
|
||||
try:
|
||||
proc = subprocess.run(["cast", *args], cwd=ROOT, text=True, capture_output=True, timeout=timeout, check=False)
|
||||
except Exception as exc: # noqa: BLE001 - evidence should capture any runner failure
|
||||
return False, str(exc)
|
||||
out = (proc.stdout or proc.stderr or "").strip()
|
||||
return proc.returncode == 0, out
|
||||
|
||||
|
||||
def cast_call(address: str, signature: str, params: list[str], rpc: str) -> tuple[bool, str]:
|
||||
return run_cast(["call", address, signature, *params, "--rpc-url", rpc])
|
||||
|
||||
|
||||
def cast_code(address: str, rpc: str) -> tuple[bool, str]:
|
||||
return run_cast(["code", address, "--rpc-url", rpc])
|
||||
|
||||
|
||||
def cast_keccak(value: str) -> str:
|
||||
ok, out = run_cast(["keccak", value], timeout=5)
|
||||
if not ok:
|
||||
raise RuntimeError(f"cast keccak failed for {value}: {out}")
|
||||
return out.split()[0]
|
||||
|
||||
|
||||
def bool_from_cast(value: str) -> bool:
|
||||
return value.strip().lower().splitlines()[-1:] == ["true"]
|
||||
|
||||
|
||||
def address_in_cast(value: str, expected: str) -> bool:
|
||||
return expected.lower() in value.lower()
|
||||
|
||||
|
||||
def load_token_rows(full_family: bool) -> dict[int, list[dict[str, str]]]:
|
||||
data = json.loads(TOKEN_MAPPING.read_text())
|
||||
out: dict[int, list[dict[str, str]]] = {}
|
||||
for pair in data.get("pairs") or []:
|
||||
if pair.get("fromChainId") != 138:
|
||||
continue
|
||||
chain_id = int(pair.get("toChainId"))
|
||||
rows: list[dict[str, str]] = []
|
||||
for token in pair.get("tokens") or []:
|
||||
key = token.get("key", "")
|
||||
if not key.endswith("_cW") and key not in CORE_KEYS:
|
||||
continue
|
||||
if not full_family and key not in CORE_KEYS:
|
||||
continue
|
||||
address_from = token.get("addressFrom", "")
|
||||
address_to = token.get("addressTo", "")
|
||||
if is_address(address_from) and is_address(address_to) and int(address_to, 16) != 0:
|
||||
rows.append(
|
||||
{
|
||||
"key": key,
|
||||
"name": token.get("name", key),
|
||||
"canonical": address_from,
|
||||
"mirrored": address_to,
|
||||
}
|
||||
)
|
||||
if rows:
|
||||
out[chain_id] = rows
|
||||
return out
|
||||
|
||||
|
||||
def check_l1(l1_bridge: str, rpc: str, token_rows: dict[int, list[dict[str, str]]]) -> dict[str, Any]:
|
||||
result: dict[str, Any] = {
|
||||
"address": l1_bridge,
|
||||
"rpcConfigured": bool(rpc),
|
||||
"hasCode": False,
|
||||
"sendRouterReadable": False,
|
||||
"receiveRouterReadable": False,
|
||||
"destinationChecks": [],
|
||||
"passed": False,
|
||||
"errors": [],
|
||||
}
|
||||
if not is_address(l1_bridge):
|
||||
result["errors"].append("CW_L1_BRIDGE_CHAIN138 is unset or invalid.")
|
||||
return result
|
||||
if not rpc:
|
||||
result["errors"].append("RPC_URL_138/CHAIN138_RPC is unset.")
|
||||
return result
|
||||
|
||||
ok, code = cast_code(l1_bridge, rpc)
|
||||
result["hasCode"] = ok and code not in ("", "0x")
|
||||
for field in ["sendRouter", "receiveRouter"]:
|
||||
ok, out = cast_call(l1_bridge, f"{field}()(address)", [], rpc)
|
||||
result[f"{field}Readable"] = ok and is_address(out.splitlines()[-1].strip())
|
||||
result[f"{field}"] = out.splitlines()[-1].strip() if ok and out else ""
|
||||
|
||||
for chain_id, _, _, selector in ACTIVE_CHAINS:
|
||||
rows = token_rows.get(chain_id) or []
|
||||
for token in rows:
|
||||
ok, out = cast_call(
|
||||
l1_bridge,
|
||||
"destinations(address,uint64)((address,bool))",
|
||||
[token["canonical"], selector],
|
||||
rpc,
|
||||
)
|
||||
result["destinationChecks"].append(
|
||||
{
|
||||
"chainId": chain_id,
|
||||
"selector": selector,
|
||||
"token": token["key"],
|
||||
"canonical": token["canonical"],
|
||||
"raw": out,
|
||||
"configured": ok and "true" in out.lower(),
|
||||
}
|
||||
)
|
||||
|
||||
result["passed"] = (
|
||||
result["hasCode"]
|
||||
and result["sendRouterReadable"]
|
||||
and result["receiveRouterReadable"]
|
||||
and all(x["configured"] for x in result["destinationChecks"])
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
def check_chain(
|
||||
chain_id: int,
|
||||
name: str,
|
||||
suffix: str,
|
||||
selector: str,
|
||||
token_rows: list[dict[str, str]],
|
||||
minter_role: str,
|
||||
burner_role: str,
|
||||
) -> dict[str, Any]:
|
||||
rpc = env_first(RPC_CANDIDATES[chain_id])
|
||||
bridge = os.environ.get(f"CW_BRIDGE_{suffix}", "").strip()
|
||||
result: dict[str, Any] = {
|
||||
"chainId": chain_id,
|
||||
"network": name,
|
||||
"selector": selector,
|
||||
"bridge": bridge,
|
||||
"rpcConfigured": bool(rpc),
|
||||
"hasCode": False,
|
||||
"sendRouterReadable": False,
|
||||
"receiveRouterReadable": False,
|
||||
"feeTokenReadable": False,
|
||||
"tokenPairChecks": [],
|
||||
"roleChecks": [],
|
||||
"destination138": {},
|
||||
"passed": False,
|
||||
"errors": [],
|
||||
}
|
||||
if not rpc:
|
||||
result["errors"].append("RPC unset.")
|
||||
return result
|
||||
if not is_address(bridge):
|
||||
result["errors"].append(f"CW_BRIDGE_{suffix} is unset or invalid.")
|
||||
return result
|
||||
|
||||
ok, code = cast_code(bridge, rpc)
|
||||
result["hasCode"] = ok and code not in ("", "0x")
|
||||
for field in ["sendRouter", "receiveRouter", "feeToken"]:
|
||||
ok, out = cast_call(bridge, f"{field}()(address)", [], rpc)
|
||||
result[f"{field}Readable"] = ok and is_address(out.splitlines()[-1].strip())
|
||||
result[field] = out.splitlines()[-1].strip() if ok and out else ""
|
||||
|
||||
ok, out = cast_call(bridge, "destinations(uint64)((address,bool))", ["138"], rpc)
|
||||
result["destination138"] = {"raw": out, "configured": ok and "true" in out.lower()}
|
||||
|
||||
for token in token_rows:
|
||||
ok, out = cast_call(bridge, "canonicalToMirrored(address)(address)", [token["canonical"]], rpc)
|
||||
mapped = ok and address_in_cast(out, token["mirrored"])
|
||||
result["tokenPairChecks"].append(
|
||||
{
|
||||
"token": token["key"],
|
||||
"canonical": token["canonical"],
|
||||
"expectedMirrored": token["mirrored"],
|
||||
"raw": out,
|
||||
"configured": mapped,
|
||||
}
|
||||
)
|
||||
for role_name, role in [("MINTER_ROLE", minter_role), ("BURNER_ROLE", burner_role)]:
|
||||
ok_role, out_role = cast_call(
|
||||
token["mirrored"],
|
||||
"hasRole(bytes32,address)(bool)",
|
||||
[role, bridge],
|
||||
rpc,
|
||||
)
|
||||
result["roleChecks"].append(
|
||||
{
|
||||
"token": token["key"],
|
||||
"mirrored": token["mirrored"],
|
||||
"role": role_name,
|
||||
"holder": bridge,
|
||||
"granted": ok_role and bool_from_cast(out_role),
|
||||
"raw": out_role,
|
||||
}
|
||||
)
|
||||
|
||||
result["passed"] = (
|
||||
result["hasCode"]
|
||||
and result["sendRouterReadable"]
|
||||
and result["receiveRouterReadable"]
|
||||
and result["destination138"]["configured"]
|
||||
and all(x["configured"] for x in result["tokenPairChecks"])
|
||||
and all(x["granted"] for x in result["roleChecks"])
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
def write_markdown(payload: dict[str, Any], path: Path) -> None:
|
||||
lines = [
|
||||
"# cW MultiToken Bridge E2E Readiness",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- Scope: `{payload['scope']}`",
|
||||
f"- All active chains passed: `{payload['summary']['allActiveChainsPassed']}`",
|
||||
f"- L1 passed: `{payload['summary']['l1Passed']}`",
|
||||
f"- Chain pass count: `{payload['summary']['passedChainCount']} / {payload['summary']['activeChainCount']}`",
|
||||
"",
|
||||
"## Chain Status",
|
||||
"",
|
||||
"| Chain | Network | Passed | Bridge | Notes |",
|
||||
"|---:|---|---:|---|---|",
|
||||
]
|
||||
for row in payload["chains"]:
|
||||
notes = []
|
||||
if row["errors"]:
|
||||
notes.extend(row["errors"])
|
||||
failed_pairs = [x["token"] for x in row["tokenPairChecks"] if not x["configured"]]
|
||||
failed_roles = [f"{x['token']} {x['role']}" for x in row["roleChecks"] if not x["granted"]]
|
||||
if failed_pairs:
|
||||
notes.append("missing token pairs: " + ", ".join(failed_pairs[:6]))
|
||||
if failed_roles:
|
||||
notes.append("missing roles: " + ", ".join(failed_roles[:6]))
|
||||
if not row["destination138"].get("configured"):
|
||||
notes.append("destination 138 not configured")
|
||||
lines.append(
|
||||
f"| {row['chainId']} | {row['network']} | `{row['passed']}` | `{row['bridge'] or '<unset>'}` | {'; '.join(notes) or 'ok'} |"
|
||||
)
|
||||
lines.extend(["", "## L1", ""])
|
||||
l1 = payload["l1"]
|
||||
lines.append(f"- Bridge: `{l1['address']}`")
|
||||
lines.append(f"- Passed: `{l1['passed']}`")
|
||||
if l1["errors"]:
|
||||
lines.append(f"- Errors: `{'; '.join(l1['errors'])}`")
|
||||
missing = [f"{x['chainId']} {x['token']}" for x in l1["destinationChecks"] if not x["configured"]]
|
||||
lines.append(f"- Missing destination checks: `{', '.join(missing[:30]) if missing else 'none'}`")
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text("\n".join(lines) + "\n")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("--full-family", action="store_true", help="Check every cW mapping, not only cWUSDT/cWUSDC canary routes.")
|
||||
parser.add_argument("--json-out", type=Path, default=DEFAULT_JSON)
|
||||
parser.add_argument("--md-out", type=Path, default=DEFAULT_MD)
|
||||
parser.add_argument("--strict", action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
token_rows = load_token_rows(args.full_family)
|
||||
minter_role = cast_keccak("MINTER_ROLE")
|
||||
burner_role = cast_keccak("BURNER_ROLE")
|
||||
l1_bridge = os.environ.get("CW_L1_BRIDGE_CHAIN138", "").strip()
|
||||
rpc_138 = env_first(["RPC_URL_138", "CHAIN138_RPC", "CHAIN138_RPC_URL", "RPC_URL"])
|
||||
l1 = check_l1(l1_bridge, rpc_138, token_rows)
|
||||
chains = [
|
||||
check_chain(chain_id, name, suffix, selector, token_rows.get(chain_id) or [], minter_role, burner_role)
|
||||
for chain_id, name, suffix, selector in ACTIVE_CHAINS
|
||||
]
|
||||
passed = [row for row in chains if row["passed"]]
|
||||
payload = {
|
||||
"schema": "cw-multitoken-bridge-e2e-readiness/v1",
|
||||
"generatedAt": datetime.now(timezone.utc).isoformat(),
|
||||
"scope": "full-family" if args.full_family else "core-cwusdt-cwusdc",
|
||||
"summary": {
|
||||
"readyForProduction": l1["passed"] and len(passed) == len(chains),
|
||||
"allActiveChainsPassed": l1["passed"] and len(passed) == len(chains),
|
||||
"l1Passed": l1["passed"],
|
||||
"activeChainCount": len(chains),
|
||||
"passedChainCount": len(passed),
|
||||
"failedChainIds": [row["chainId"] for row in chains if not row["passed"]],
|
||||
},
|
||||
"roles": {"MINTER_ROLE": minter_role, "BURNER_ROLE": burner_role},
|
||||
"l1": l1,
|
||||
"chains": chains,
|
||||
}
|
||||
args.json_out.parent.mkdir(parents=True, exist_ok=True)
|
||||
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
write_markdown(payload, args.md_out)
|
||||
print(f"Wrote {rel(args.json_out)}")
|
||||
print(f"Wrote {rel(args.md_out)}")
|
||||
print(f"All active chains passed: {payload['summary']['allActiveChainsPassed']}")
|
||||
if payload["summary"]["failedChainIds"]:
|
||||
print("Failed chains: " + ", ".join(str(x) for x in payload["summary"]["failedChainIds"]))
|
||||
if args.strict and not payload["summary"]["allActiveChainsPassed"]:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
11
scripts/verify/check-cw-multitoken-bridge-e2e-readiness.sh
Executable file
11
scripts/verify/check-cw-multitoken-bridge-e2e-readiness.sh
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env bash
|
||||
# Read-only evidence producer for CWMultiTokenBridgeL1/L2 route, role, and config readiness.
|
||||
set -euo pipefail
|
||||
|
||||
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||
cd "$ROOT"
|
||||
|
||||
# shellcheck disable=SC1091
|
||||
source scripts/lib/load-project-env.sh >/dev/null 2>&1 || true
|
||||
|
||||
exec python3 scripts/verify/check-cw-multitoken-bridge-e2e-readiness.py "$@"
|
||||
145
scripts/verify/check-cwusdc-etherscan-prereq-urls.sh
Executable file
145
scripts/verify/check-cwusdc-etherscan-prereq-urls.sh
Executable file
@@ -0,0 +1,145 @@
|
||||
#!/usr/bin/env bash
|
||||
# Public URL prereq checks for cWUSDC Etherscan token profile (d-bis.org surfaces).
|
||||
# See: docs/04-configuration/etherscan/CWUSDC_ETHERSCAN_E2E_RECOMMENDATIONS.md
|
||||
# Usage: bash scripts/verify/check-cwusdc-etherscan-prereq-urls.sh [--json-out PATH] [--md-out PATH] [--timeout SEC] [--retries N]
|
||||
# Exit: 0 if every URL returns HTTP 200; 1 otherwise.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
cd "$PROJECT_ROOT"
|
||||
|
||||
JSON_OUT=""
|
||||
MD_OUT=""
|
||||
TIMEOUT="${CWUSDC_PROVIDER_URL_TIMEOUT:-15}"
|
||||
RETRIES="${CWUSDC_PROVIDER_URL_RETRIES:-1}"
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--json-out)
|
||||
[[ $# -ge 2 ]] || { echo "Missing value for --json-out" >&2; exit 1; }
|
||||
JSON_OUT="$2"
|
||||
shift 2
|
||||
;;
|
||||
--md-out)
|
||||
[[ $# -ge 2 ]] || { echo "Missing value for --md-out" >&2; exit 1; }
|
||||
MD_OUT="$2"
|
||||
shift 2
|
||||
;;
|
||||
--timeout)
|
||||
[[ $# -ge 2 ]] || { echo "Missing value for --timeout" >&2; exit 1; }
|
||||
TIMEOUT="$2"
|
||||
shift 2
|
||||
;;
|
||||
--retries)
|
||||
[[ $# -ge 2 ]] || { echo "Missing value for --retries" >&2; exit 1; }
|
||||
RETRIES="$2"
|
||||
shift 2
|
||||
;;
|
||||
-h|--help)
|
||||
sed -n '1,5p' "$0"
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "Unknown argument: $1" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
TMP_TSV="$(mktemp)"
|
||||
trap 'rm -f "$TMP_TSV"' EXIT
|
||||
|
||||
FAIL=0
|
||||
while IFS= read -r url; do
|
||||
[[ -z "$url" || "$url" =~ ^# ]] && continue
|
||||
code="000"
|
||||
curl_status=0
|
||||
attempts=0
|
||||
max_attempts=$((RETRIES + 1))
|
||||
while [[ "$attempts" -lt "$max_attempts" ]]; do
|
||||
attempts=$((attempts + 1))
|
||||
curl_status=0
|
||||
code=$(curl -L --max-time "$TIMEOUT" -o /dev/null -s -w "%{http_code}" "$url") || curl_status=$?
|
||||
[[ -n "$code" ]] || code="000"
|
||||
[[ "$code" == "200" ]] && break
|
||||
[[ "$attempts" -lt "$max_attempts" ]] && sleep 1
|
||||
done
|
||||
if [[ "$code" != "200" ]]; then
|
||||
echo "FAIL $code $url (attempts=$attempts curl_status=$curl_status)" >&2
|
||||
FAIL=1
|
||||
printf '%s\t%s\t%s\t%s\t%s\n' "$url" "$code" "false" "$attempts" "$curl_status" >> "$TMP_TSV"
|
||||
else
|
||||
echo "OK 200 $url (attempts=$attempts)"
|
||||
printf '%s\t%s\t%s\t%s\t%s\n' "$url" "$code" "true" "$attempts" "$curl_status" >> "$TMP_TSV"
|
||||
fi
|
||||
done <<'URLS'
|
||||
https://d-bis.org/
|
||||
https://d-bis.org/contact
|
||||
https://d-bis.org/leadership
|
||||
https://d-bis.org/gru/tokens
|
||||
https://d-bis.org/security
|
||||
https://d-bis.org/.well-known/trust.json
|
||||
https://d-bis.org/brand-assets
|
||||
https://d-bis.org/tokens/cwusdc.svg
|
||||
URLS
|
||||
|
||||
if [[ -n "$JSON_OUT" || -n "$MD_OUT" ]]; then
|
||||
python3 - "$TMP_TSV" "$JSON_OUT" "$MD_OUT" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
tsv = Path(sys.argv[1])
|
||||
json_out = Path(sys.argv[2]) if sys.argv[2] else None
|
||||
md_out = Path(sys.argv[3]) if sys.argv[3] else None
|
||||
|
||||
checks = []
|
||||
for line in tsv.read_text().splitlines():
|
||||
url, status, passed, attempts, curl_status = line.split("\t")
|
||||
checks.append({
|
||||
"url": url,
|
||||
"status": int(status) if status.isdigit() else status,
|
||||
"passed": passed == "true",
|
||||
"attempts": int(attempts),
|
||||
"curlStatus": int(curl_status),
|
||||
})
|
||||
|
||||
payload = {
|
||||
"schema": "cwusdc-etherscan-prereq-urls/v1",
|
||||
"generatedAt": datetime.now(timezone.utc).isoformat(),
|
||||
"summary": {
|
||||
"allPassed": all(check["passed"] for check in checks),
|
||||
"requiredCount": len(checks),
|
||||
"passedCount": sum(1 for check in checks if check["passed"]),
|
||||
"failedUrls": [check["url"] for check in checks if not check["passed"]],
|
||||
},
|
||||
"checks": checks,
|
||||
}
|
||||
|
||||
if json_out:
|
||||
json_out.parent.mkdir(parents=True, exist_ok=True)
|
||||
json_out.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
print(f"Wrote {json_out}")
|
||||
|
||||
if md_out:
|
||||
md_out.parent.mkdir(parents=True, exist_ok=True)
|
||||
lines = [
|
||||
"# cWUSDC Etherscan Prerequisite URL Evidence",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- All passed: `{payload['summary']['allPassed']}`",
|
||||
f"- Passed: `{payload['summary']['passedCount']} / {payload['summary']['requiredCount']}`",
|
||||
"",
|
||||
"| URL | Passed | HTTP | Attempts | curl status |",
|
||||
"|---|---:|---:|---:|---:|",
|
||||
]
|
||||
for check in checks:
|
||||
lines.append(f"| {check['url']} | `{check['passed']}` | `{check['status']}` | `{check['attempts']}` | `{check['curlStatus']}` |")
|
||||
md_out.write_text("\n".join(lines) + "\n")
|
||||
print(f"Wrote {md_out}")
|
||||
PY
|
||||
fi
|
||||
|
||||
exit "$FAIL"
|
||||
265
scripts/verify/check-cwusdc-external-trackers-live.py
Executable file
265
scripts/verify/check-cwusdc-external-trackers-live.py
Executable file
@@ -0,0 +1,265 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Probe cWUSDC public tracker/indexing surfaces and write evidence JSON."""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import re
|
||||
import time
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
DEFAULT_JSON = ROOT / "reports" / "status" / "cwusdc-external-trackers-live-latest.json"
|
||||
DEFAULT_MD = ROOT / "reports" / "status" / "cwusdc-external-trackers-live-latest.md"
|
||||
|
||||
CWUSDC = "0x2de5f116bfce3d0f922d9c8351e0c5fc24b9284a"
|
||||
POOLS = [
|
||||
"0x1cf2e685682c7f7bef508f0af15dfb5cdda01ee3",
|
||||
"0xc28706f899266b36bc43cc072b3a921bdf2c48d9",
|
||||
]
|
||||
|
||||
URLS = [
|
||||
{
|
||||
"id": "etherscan_token_page",
|
||||
"kind": "explorer",
|
||||
"url": f"https://etherscan.io/token/{CWUSDC}",
|
||||
"required": True,
|
||||
"mustContain": ["cWUSDC", "Contract"],
|
||||
},
|
||||
{
|
||||
"id": "coingecko_token_price_api",
|
||||
"kind": "listing_api",
|
||||
"url": f"https://api.coingecko.com/api/v3/simple/token_price/ethereum?contract_addresses={CWUSDC}&vs_currencies=usd&include_market_cap=true&include_24hr_vol=true",
|
||||
"required": True,
|
||||
"jsonTokenKey": CWUSDC,
|
||||
},
|
||||
{
|
||||
"id": "coinmarketcap_dex_token",
|
||||
"kind": "dex_index",
|
||||
"url": f"https://dex.coinmarketcap.com/token/ethereum/{CWUSDC}/",
|
||||
"required": True,
|
||||
"mustContain": ["cWUSDC", "ethereum"],
|
||||
},
|
||||
{
|
||||
"id": "dexscreener_token_pairs_v1",
|
||||
"kind": "dex_index",
|
||||
"url": f"https://api.dexscreener.com/token-pairs/v1/ethereum/{CWUSDC}",
|
||||
"required": True,
|
||||
"jsonRootMinLength": 1,
|
||||
},
|
||||
{
|
||||
"id": "dexscreener_tokens_v1",
|
||||
"kind": "dex_index",
|
||||
"url": f"https://api.dexscreener.com/tokens/v1/ethereum/{CWUSDC}",
|
||||
"required": True,
|
||||
"jsonRootMinLength": 1,
|
||||
},
|
||||
{
|
||||
"id": "dexscreener_v3_pair_api_legacy",
|
||||
"kind": "dex_index",
|
||||
"url": f"https://api.dexscreener.com/latest/dex/pairs/ethereum/{POOLS[0]}",
|
||||
"required": False,
|
||||
"jsonPathPresent": ["pairs"],
|
||||
},
|
||||
{
|
||||
"id": "dexscreener_v2_pair_api_legacy",
|
||||
"kind": "dex_index",
|
||||
"url": f"https://api.dexscreener.com/latest/dex/pairs/ethereum/{POOLS[1]}",
|
||||
"required": False,
|
||||
"jsonPathPresent": ["pairs"],
|
||||
},
|
||||
{
|
||||
"id": "dexscreener_orders_profile",
|
||||
"kind": "dex_profile",
|
||||
"url": f"https://api.dexscreener.com/orders/v1/ethereum/{CWUSDC}",
|
||||
"required": False,
|
||||
"jsonPathPresent": ["orders"],
|
||||
},
|
||||
{
|
||||
"id": "geckoterminal_v3_pool",
|
||||
"kind": "dex_index",
|
||||
"url": f"https://api.geckoterminal.com/api/v2/networks/eth/pools/{POOLS[0]}",
|
||||
"required": True,
|
||||
"jsonPathPresent": ["data"],
|
||||
},
|
||||
{
|
||||
"id": "geckoterminal_v2_pool",
|
||||
"kind": "dex_index",
|
||||
"url": f"https://api.geckoterminal.com/api/v2/networks/eth/pools/{POOLS[1]}",
|
||||
"required": True,
|
||||
"jsonPathPresent": ["data"],
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def rel(path: Path) -> str:
|
||||
try:
|
||||
return str(path.relative_to(ROOT))
|
||||
except ValueError:
|
||||
return str(path)
|
||||
|
||||
|
||||
def fetch(url: str, timeout: int) -> dict[str, Any]:
|
||||
req = urllib.request.Request(
|
||||
url,
|
||||
headers={
|
||||
"User-Agent": "Mozilla/5.0 DBIS-readiness-check/1.0",
|
||||
"Accept": "application/json,text/html;q=0.9,*/*;q=0.8",
|
||||
},
|
||||
)
|
||||
started = time.time()
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=timeout) as resp:
|
||||
body = resp.read(512_000)
|
||||
text = body.decode("utf-8", errors="replace")
|
||||
return {
|
||||
"ok": 200 <= resp.status < 300,
|
||||
"status": resp.status,
|
||||
"elapsedMs": int((time.time() - started) * 1000),
|
||||
"contentType": resp.headers.get("content-type", ""),
|
||||
"text": text,
|
||||
"error": "",
|
||||
}
|
||||
except urllib.error.HTTPError as exc:
|
||||
text = exc.read(64_000).decode("utf-8", errors="replace") if exc.fp else ""
|
||||
return {
|
||||
"ok": False,
|
||||
"status": exc.code,
|
||||
"elapsedMs": int((time.time() - started) * 1000),
|
||||
"contentType": exc.headers.get("content-type", "") if exc.headers else "",
|
||||
"text": text,
|
||||
"error": str(exc),
|
||||
}
|
||||
except Exception as exc: # noqa: BLE001 - evidence should capture network errors
|
||||
return {
|
||||
"ok": False,
|
||||
"status": None,
|
||||
"elapsedMs": int((time.time() - started) * 1000),
|
||||
"contentType": "",
|
||||
"text": "",
|
||||
"error": str(exc),
|
||||
}
|
||||
|
||||
|
||||
def json_path_present(data: Any, path: list[str]) -> bool:
|
||||
cur = data
|
||||
for part in path:
|
||||
if isinstance(cur, dict):
|
||||
cur = cur.get(part)
|
||||
else:
|
||||
return False
|
||||
if isinstance(cur, list):
|
||||
return len(cur) > 0
|
||||
return cur is not None
|
||||
|
||||
|
||||
def evaluate(spec: dict[str, Any], timeout: int) -> dict[str, Any]:
|
||||
raw = fetch(spec["url"], timeout)
|
||||
text = raw.pop("text")
|
||||
evidence: dict[str, Any] = {
|
||||
"id": spec["id"],
|
||||
"kind": spec["kind"],
|
||||
"url": spec["url"],
|
||||
"required": spec["required"],
|
||||
"httpOk": raw["ok"],
|
||||
"status": raw["status"],
|
||||
"elapsedMs": raw["elapsedMs"],
|
||||
"contentType": raw["contentType"],
|
||||
"passed": False,
|
||||
"error": raw["error"],
|
||||
"details": [],
|
||||
}
|
||||
data: Any = None
|
||||
if "json" in raw["contentType"] or text.strip().startswith(("{", "[")):
|
||||
try:
|
||||
data = json.loads(text)
|
||||
evidence["jsonPreview"] = data if len(text) < 5000 else "json-too-large"
|
||||
except json.JSONDecodeError as exc:
|
||||
evidence["details"].append(f"JSON parse failed: {exc}")
|
||||
|
||||
passed = raw["ok"]
|
||||
for needle in spec.get("mustContain") or []:
|
||||
found = re.search(re.escape(needle), text, flags=re.I) is not None
|
||||
evidence["details"].append(f"contains `{needle}`: {found}")
|
||||
passed = passed and found
|
||||
if spec.get("jsonTokenKey"):
|
||||
token_key = spec["jsonTokenKey"].lower()
|
||||
found = isinstance(data, dict) and token_key in {str(k).lower(): v for k, v in data.items()}
|
||||
evidence["details"].append(f"json token key `{token_key}` present: {found}")
|
||||
passed = passed and found
|
||||
for path in spec.get("jsonPathPresent") or []:
|
||||
present = json_path_present(data, path if isinstance(path, list) else [path])
|
||||
evidence["details"].append(f"json path `{'.'.join(path) if isinstance(path, list) else path}` present: {present}")
|
||||
passed = passed and present
|
||||
if spec.get("jsonRootMinLength") is not None:
|
||||
min_len = int(spec["jsonRootMinLength"])
|
||||
found = isinstance(data, list) and len(data) >= min_len
|
||||
evidence["details"].append(f"json root array length >= {min_len}: {found}")
|
||||
passed = passed and found
|
||||
|
||||
evidence["passed"] = bool(passed)
|
||||
return evidence
|
||||
|
||||
|
||||
def write_markdown(payload: dict[str, Any], path: Path) -> None:
|
||||
lines = [
|
||||
"# cWUSDC External Trackers Live Evidence",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- All trackers live: `{payload['summary']['allTrackersLive']}`",
|
||||
f"- Required passed: `{payload['summary']['requiredPassedCount']} / {payload['summary']['requiredCount']}`",
|
||||
"",
|
||||
"| Surface | Passed | HTTP | URL | Details |",
|
||||
"|---|---:|---:|---|---|",
|
||||
]
|
||||
for check in payload["checks"]:
|
||||
details = "; ".join(check["details"]) or check["error"] or "-"
|
||||
lines.append(f"| {check['id']} | `{check['passed']}` | `{check['status']}` | {check['url']} | {details} |")
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text("\n".join(lines) + "\n")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("--json-out", type=Path, default=DEFAULT_JSON)
|
||||
parser.add_argument("--md-out", type=Path, default=DEFAULT_MD)
|
||||
parser.add_argument("--timeout", type=int, default=20)
|
||||
parser.add_argument("--strict", action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
checks = [evaluate(spec, args.timeout) for spec in URLS]
|
||||
required = [c for c in checks if c["required"]]
|
||||
required_passed = [c for c in required if c["passed"]]
|
||||
payload = {
|
||||
"schema": "cwusdc-external-trackers-live/v1",
|
||||
"generatedAt": datetime.now(timezone.utc).isoformat(),
|
||||
"token": {"chainId": 1, "network": "ethereum", "address": CWUSDC, "symbol": "cWUSDC"},
|
||||
"summary": {
|
||||
"allTrackersLive": len(required_passed) == len(required),
|
||||
"readyForEtherscanUsdValue": len(required_passed) == len(required),
|
||||
"requiredCount": len(required),
|
||||
"requiredPassedCount": len(required_passed),
|
||||
"failedRequiredIds": [c["id"] for c in required if not c["passed"]],
|
||||
},
|
||||
"checks": checks,
|
||||
}
|
||||
args.json_out.parent.mkdir(parents=True, exist_ok=True)
|
||||
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
write_markdown(payload, args.md_out)
|
||||
print(f"Wrote {rel(args.json_out)}")
|
||||
print(f"Wrote {rel(args.md_out)}")
|
||||
print(f"All trackers live: {payload['summary']['allTrackersLive']}")
|
||||
if payload["summary"]["failedRequiredIds"]:
|
||||
print("Failed required trackers: " + ", ".join(payload["summary"]["failedRequiredIds"]))
|
||||
if args.strict and not payload["summary"]["allTrackersLive"]:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
8
scripts/verify/check-cwusdc-external-trackers-live.sh
Executable file
8
scripts/verify/check-cwusdc-external-trackers-live.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
# Probe public cWUSDC tracker/indexing surfaces and write readiness evidence.
|
||||
set -euo pipefail
|
||||
|
||||
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||
cd "$ROOT"
|
||||
|
||||
exec python3 scripts/verify/check-cwusdc-external-trackers-live.py "$@"
|
||||
162
scripts/verify/check-cwusdc-institutional-doc-links.py
Normal file
162
scripts/verify/check-cwusdc-institutional-doc-links.py
Normal file
@@ -0,0 +1,162 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Lightweight link check for the cWUSDC institutional evidence packet."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import datetime as dt
|
||||
import json
|
||||
import re
|
||||
import urllib.request
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
REPORT_JSON = ROOT / "reports" / "status" / "cwusdc-institutional-doc-link-check-latest.json"
|
||||
REPORT_MD = ROOT / "reports" / "status" / "cwusdc-institutional-doc-link-check-latest.md"
|
||||
|
||||
DOCS = [
|
||||
"docs/04-configuration/CWUSDC_PROVIDER_SUBMISSION_PACKET.md",
|
||||
"docs/04-configuration/etherscan/CWUSDC_EVIDENCE_BUNDLE_INDEX.md",
|
||||
"docs/04-configuration/etherscan/CWUSDC_SUPPLY_AND_CIRCULATING_METHODOLOGY.md",
|
||||
"docs/04-configuration/etherscan/CWUSDC_SECURITY_AND_AUDIT_DISCLOSURE.md",
|
||||
"docs/04-configuration/etherscan/CWUSDC_PROVIDER_RESPONSE_TRACKER.md",
|
||||
"docs/04-configuration/etherscan/CWUSDC_LIQUIDITY_READINESS_NO_BROADCAST_PLAN.md",
|
||||
"docs/04-configuration/etherscan/CWUSDC_MAINNET_ETHERSCAN_PROFILE_PACKET.md",
|
||||
"docs/04-configuration/etherscan/CWUSDC_ETHERSCAN_E2E_RECOMMENDATIONS.md",
|
||||
"docs/04-configuration/etherscan/CWUSDC_ETHERSCAN_VALUE_EXECUTION_PLAN.md",
|
||||
"docs/04-configuration/etherscan/CWUSDC_ETHERSCAN_BRIDGE_CROSSCHAIN_LAYER_MAP.md",
|
||||
"docs/04-configuration/coingecko/CWUSDC_MAINNET_TRACKER_SUBMISSION_PACKET.md",
|
||||
"docs/04-configuration/dexscreener/CWUSDC_DEXSCREENER_INDEXING_AND_PROFILE_PACKET_20260509.md",
|
||||
"docs/04-configuration/metamask/METAMASK_ASSET_PRICE_PROVIDER_SUBMISSION_MATRIX.md",
|
||||
]
|
||||
|
||||
LINK_RE = re.compile(r"(?<!!)\[[^\]]+\]\(([^)]+)\)")
|
||||
BARE_URL_RE = re.compile(r"(?<![<`])https?://[^\s)`]+")
|
||||
|
||||
|
||||
def strip_fragment(target: str) -> str:
|
||||
return target.split("#", 1)[0]
|
||||
|
||||
|
||||
def is_skipped(target: str) -> bool:
|
||||
return (
|
||||
not target
|
||||
or target.startswith("#")
|
||||
or target.startswith("mailto:")
|
||||
or target.startswith("app://")
|
||||
or target.startswith("plugin://")
|
||||
)
|
||||
|
||||
|
||||
def resolve_local(source: Path, target: str) -> Path:
|
||||
target = urllib.request.url2pathname(strip_fragment(target))
|
||||
if target.startswith("/"):
|
||||
return ROOT / target.lstrip("/")
|
||||
candidate = (source.parent / target).resolve()
|
||||
try:
|
||||
candidate.relative_to(ROOT)
|
||||
except ValueError:
|
||||
return ROOT / target
|
||||
return candidate
|
||||
|
||||
|
||||
def http_status(url: str, timeout: int = 15) -> dict[str, Any]:
|
||||
req = urllib.request.Request(url, method="GET", headers={"User-Agent": "dbis-cwusdc-link-check/1.0"})
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=timeout) as response:
|
||||
return {"ok": 200 <= response.status < 400, "status": response.status}
|
||||
except Exception as exc: # noqa: BLE001 - report exact probe failure.
|
||||
return {"ok": False, "error": str(exc)}
|
||||
|
||||
|
||||
def collect_links(path: Path) -> list[str]:
|
||||
text = path.read_text()
|
||||
links = [match.group(1).strip() for match in LINK_RE.finditer(text)]
|
||||
links.extend(match.group(0).strip(".,") for match in BARE_URL_RE.finditer(text))
|
||||
return sorted(set(links))
|
||||
|
||||
|
||||
def build(args: argparse.Namespace) -> dict[str, Any]:
|
||||
records: list[dict[str, Any]] = []
|
||||
for doc in DOCS:
|
||||
source = ROOT / doc
|
||||
if not source.exists():
|
||||
records.append({"source": doc, "target": doc, "type": "source", "ok": False, "error": "source missing"})
|
||||
continue
|
||||
for target in collect_links(source):
|
||||
if is_skipped(target):
|
||||
continue
|
||||
if target.startswith("http://") or target.startswith("https://"):
|
||||
result = {"source": doc, "target": target, "type": "http", "ok": True, "checked": False}
|
||||
if args.check_http:
|
||||
result.update(http_status(target, timeout=args.timeout))
|
||||
result["checked"] = True
|
||||
records.append(result)
|
||||
else:
|
||||
resolved = resolve_local(source, target)
|
||||
records.append(
|
||||
{
|
||||
"source": doc,
|
||||
"target": target,
|
||||
"type": "local",
|
||||
"resolved": str(resolved.relative_to(ROOT)) if resolved.exists() else str(resolved),
|
||||
"ok": resolved.exists(),
|
||||
}
|
||||
)
|
||||
failures = [record for record in records if not record.get("ok")]
|
||||
return {
|
||||
"schema": "cwusdc-institutional-doc-link-check/v1",
|
||||
"generatedAt": dt.datetime.now(dt.UTC).isoformat().replace("+00:00", "Z"),
|
||||
"checkedHttp": args.check_http,
|
||||
"sourceCount": len(DOCS),
|
||||
"linkCount": len(records),
|
||||
"failureCount": len(failures),
|
||||
"status": "pass" if not failures else "fail",
|
||||
"failures": failures,
|
||||
"records": records,
|
||||
}
|
||||
|
||||
|
||||
def write_md(payload: dict[str, Any], path: Path) -> None:
|
||||
lines = [
|
||||
"# cWUSDC Institutional Doc Link Check",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- Status: `{payload['status']}`",
|
||||
f"- Sources: `{payload['sourceCount']}`",
|
||||
f"- Links checked: `{payload['linkCount']}`",
|
||||
f"- HTTP checked: `{payload['checkedHttp']}`",
|
||||
f"- Failures: `{payload['failureCount']}`",
|
||||
"",
|
||||
]
|
||||
if payload["failures"]:
|
||||
lines.extend(["## Failures", "", "| Source | Target | Error |", "|---|---|---|"])
|
||||
for failure in payload["failures"]:
|
||||
lines.append(
|
||||
f"| `{failure.get('source')}` | `{failure.get('target')}` | `{failure.get('error', failure.get('resolved', 'missing'))}` |"
|
||||
)
|
||||
else:
|
||||
lines.append("No broken institutional packet links were found.")
|
||||
path.write_text("\n".join(lines) + "\n")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("--check-http", action="store_true", help="Probe public HTTP(S) links too.")
|
||||
parser.add_argument("--timeout", type=int, default=15)
|
||||
parser.add_argument("--json-out", type=Path, default=REPORT_JSON)
|
||||
parser.add_argument("--md-out", type=Path, default=REPORT_MD)
|
||||
args = parser.parse_args()
|
||||
payload = build(args)
|
||||
args.json_out.parent.mkdir(parents=True, exist_ok=True)
|
||||
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
write_md(payload, args.md_out)
|
||||
print(f"Wrote {args.json_out.relative_to(ROOT)}")
|
||||
print(f"Wrote {args.md_out.relative_to(ROOT)}")
|
||||
return 0 if payload["status"] == "pass" else 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
71
scripts/verify/check-cwusdc-provider-readiness-ci.sh
Executable file
71
scripts/verify/check-cwusdc-provider-readiness-ci.sh
Executable file
@@ -0,0 +1,71 @@
|
||||
#!/usr/bin/env bash
|
||||
# CI-safe cWUSDC provider readiness gate.
|
||||
# Fails only on repo-controlled prerequisites. External provider blockers are reported, not gated.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
cd "$PROJECT_ROOT"
|
||||
|
||||
JSON_OUT="reports/status/cwusdc-provider-readiness-ci-latest.json"
|
||||
MD_OUT="reports/status/cwusdc-provider-readiness-ci-latest.md"
|
||||
HANDOFF_JSON="reports/status/cwusdc-provider-handoff-latest.json"
|
||||
DOC_LINK_JSON="reports/status/cwusdc-institutional-doc-link-check-latest.json"
|
||||
|
||||
WRAPPER_STATUS=0
|
||||
bash "$SCRIPT_DIR/run-cwusdc-provider-nonmanual-checks.sh" --strict-repo || WRAPPER_STATUS=$?
|
||||
python3 "$SCRIPT_DIR/check-cwusdc-institutional-doc-links.py"
|
||||
|
||||
python3 - "$HANDOFF_JSON" "$DOC_LINK_JSON" "$JSON_OUT" "$MD_OUT" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
handoff_path = Path(sys.argv[1])
|
||||
doc_link_path = Path(sys.argv[2])
|
||||
json_out = Path(sys.argv[3])
|
||||
md_out = Path(sys.argv[4])
|
||||
handoff = json.loads(handoff_path.read_text())
|
||||
doc_links = json.loads(doc_link_path.read_text())
|
||||
repo_ok = bool(handoff["summary"]["repoControlledPrereqsPassed"])
|
||||
doc_links_ok = doc_links.get("status") == "pass"
|
||||
external_blockers = [b for b in handoff.get("blockers", []) if b.get("type") != "repo_controlled"]
|
||||
payload = {
|
||||
"schema": "cwusdc-provider-readiness-ci/v1",
|
||||
"generatedAt": datetime.now(timezone.utc).isoformat(),
|
||||
"status": "success" if repo_ok and doc_links_ok else "failed",
|
||||
"repoControlledPrereqsPassed": repo_ok and doc_links_ok,
|
||||
"baseRepoControlledPrereqsPassed": repo_ok,
|
||||
"institutionalDocLinksPassed": doc_links_ok,
|
||||
"institutionalDocLinksReport": str(doc_link_path),
|
||||
"externalBlockersAdvisoryCount": len(external_blockers),
|
||||
"externalBlockersAdvisory": external_blockers,
|
||||
"handoffReport": str(handoff_path),
|
||||
}
|
||||
json_out.parent.mkdir(parents=True, exist_ok=True)
|
||||
json_out.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
lines = [
|
||||
"# cWUSDC Provider Readiness CI",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- Status: `{payload['status']}`",
|
||||
f"- Repo-controlled prerequisites passed: `{payload['repoControlledPrereqsPassed']}`",
|
||||
f"- Base provider prerequisites passed: `{repo_ok}`",
|
||||
f"- Institutional doc links passed: `{doc_links_ok}`",
|
||||
f"- External blockers advisory count: `{len(external_blockers)}`",
|
||||
f"- Handoff report: `{handoff_path}`",
|
||||
"",
|
||||
"External provider blockers are advisory in this CI gate. They require provider acceptance or operator action and should not fail repo-controlled CI.",
|
||||
]
|
||||
md_out.write_text("\n".join(lines) + "\n")
|
||||
print(f"Wrote {json_out}")
|
||||
print(f"Wrote {md_out}")
|
||||
if not repo_ok:
|
||||
raise SystemExit(1)
|
||||
if not doc_links_ok:
|
||||
raise SystemExit(1)
|
||||
PY
|
||||
|
||||
exit "$WRAPPER_STATUS"
|
||||
116
scripts/verify/check-engine-x-mev-defense-readiness.sh
Executable file
116
scripts/verify/check-engine-x-mev-defense-readiness.sh
Executable file
@@ -0,0 +1,116 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
|
||||
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/load-project-env.sh
|
||||
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh"
|
||||
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/mev-protection.sh
|
||||
source "${PROJECT_ROOT}/scripts/lib/mev-protection.sh"
|
||||
|
||||
POLICY_PATH="${POLICY_PATH:-config/extraction/mainnet-cwusdc-usdc-support-policy.json}"
|
||||
OUT_JSON="${OUT_JSON:-reports/status/engine-x-mev-defense-readiness-latest.json}"
|
||||
OUT_MD="${OUT_MD:-reports/status/engine-x-mev-defense-readiness-latest.md}"
|
||||
mkdir -p "$(dirname "${OUT_JSON}")"
|
||||
|
||||
RPC_LABEL="$(mev_write_rpc_label)"
|
||||
PRIVATE_KEY_NAME="$(mev_private_rpc_key 2>/dev/null || true)"
|
||||
HAS_PRIVATE_RPC=0
|
||||
[[ -n "${PRIVATE_KEY_NAME}" ]] && HAS_PRIVATE_RPC=1
|
||||
ALLOW_PUBLIC="${ENGINE_X_ALLOW_PUBLIC_BROADCAST:-0}"
|
||||
MEV_ENABLED="${ENGINE_X_MEV_PROTECTION:-1}"
|
||||
READY=0
|
||||
STATUS="blocked"
|
||||
if [[ "${MEV_ENABLED}" != "1" ]]; then
|
||||
STATUS="disabled_by_operator"
|
||||
elif [[ "${HAS_PRIVATE_RPC}" == "1" ]]; then
|
||||
READY=1
|
||||
STATUS="ready"
|
||||
elif [[ "${ALLOW_PUBLIC}" == "1" ]]; then
|
||||
STATUS="public_broadcast_override"
|
||||
fi
|
||||
|
||||
python3 - "${POLICY_PATH}" "${OUT_JSON}" "${OUT_MD}" "${READY}" "${STATUS}" "${RPC_LABEL}" "${PRIVATE_KEY_NAME:-}" "${MEV_ENABLED}" "${ALLOW_PUBLIC}" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
policy_path, out_json, out_md, ready, status, rpc_label, private_key_name, mev_enabled, allow_public = sys.argv[1:]
|
||||
policy = json.loads(Path(policy_path).read_text())
|
||||
mev_policy = policy.get("mevDefense", {})
|
||||
surfaces = policy.get("quoteDefenseSurfaces", [])
|
||||
payload = {
|
||||
"schema": "engine-x-mev-defense-readiness/v1",
|
||||
"generatedAt": datetime.now(timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z"),
|
||||
"ready": ready == "1",
|
||||
"status": status,
|
||||
"policyPath": policy_path,
|
||||
"configuredWriteRpcLabel": rpc_label,
|
||||
"configuredPrivateRpcEnvKey": private_key_name or None,
|
||||
"mevProtectionEnabled": mev_enabled == "1",
|
||||
"publicBroadcastOverride": allow_public == "1",
|
||||
"sensitiveSurfaces": [
|
||||
{
|
||||
"id": surface.get("id"),
|
||||
"venue": surface.get("venue"),
|
||||
"role": surface.get("role"),
|
||||
"poolAddress": surface.get("poolAddress"),
|
||||
"defenseMode": surface.get("defenseMode"),
|
||||
}
|
||||
for surface in surfaces
|
||||
if "quote" in (surface.get("role") or "") or surface.get("venue") in {"uniswap_v2_pair", "uniswap_v3_pool", "dodo_pmm"}
|
||||
],
|
||||
"policy": mev_policy,
|
||||
"blockers": [],
|
||||
"operatorEnvironment": {
|
||||
"acceptedPrivateRpcEnvKeys": [
|
||||
"ENGINE_X_PRIVATE_TX_RPC",
|
||||
"MEV_BLOCKER_RPC_URL",
|
||||
"FLASHBOTS_RPC_URL",
|
||||
"BLOXROUTE_RPC_URL",
|
||||
"BLINK_RPC_URL",
|
||||
],
|
||||
"publicOverrideEnvKey": "ENGINE_X_ALLOW_PUBLIC_BROADCAST",
|
||||
"disableGuardEnvKey": "ENGINE_X_MEV_PROTECTION",
|
||||
},
|
||||
}
|
||||
if not payload["ready"]:
|
||||
if payload["status"] == "blocked":
|
||||
payload["blockers"].append("No private/protected transaction RPC is configured for sensitive Engine X broadcasts.")
|
||||
elif payload["status"] == "disabled_by_operator":
|
||||
payload["blockers"].append("MEV guard is disabled by operator environment.")
|
||||
elif payload["status"] == "public_broadcast_override":
|
||||
payload["blockers"].append("Public broadcast override is enabled; do not use for adversarially sensitive loops.")
|
||||
|
||||
Path(out_json).write_text(json.dumps(payload, indent=2) + "\n")
|
||||
|
||||
lines = [
|
||||
"# Engine X MEV Defense Readiness",
|
||||
"",
|
||||
f"- generatedAt: `{payload['generatedAt']}`",
|
||||
f"- ready: `{str(payload['ready']).lower()}`",
|
||||
f"- status: `{payload['status']}`",
|
||||
f"- write RPC label: `{payload['configuredWriteRpcLabel']}`",
|
||||
f"- private RPC env key: `{payload['configuredPrivateRpcEnvKey'] or 'none'}`",
|
||||
f"- public override: `{str(payload['publicBroadcastOverride']).lower()}`",
|
||||
"",
|
||||
"## Sensitive Surfaces",
|
||||
]
|
||||
for surface in payload["sensitiveSurfaces"]:
|
||||
lines.append(f"- `{surface['id']}` / `{surface['venue']}` / `{surface['role']}` / `{surface['poolAddress']}`")
|
||||
lines.extend(["", "## Blockers"])
|
||||
if payload["blockers"]:
|
||||
lines.extend(f"- {blocker}" for blocker in payload["blockers"])
|
||||
else:
|
||||
lines.append("- none")
|
||||
lines.extend([
|
||||
"",
|
||||
"## Operator Rule",
|
||||
"",
|
||||
"Sensitive Engine X swaps, LP migrations, and quote-defense repairs must use `mev_cast_send`; scripts fail closed unless a protected RPC is configured or the operator explicitly enables the public broadcast override.",
|
||||
])
|
||||
Path(out_md).write_text("\n".join(lines) + "\n")
|
||||
print(json.dumps({"ready": payload["ready"], "status": payload["status"], "writeRpcLabel": payload["configuredWriteRpcLabel"]}, indent=2))
|
||||
PY
|
||||
@@ -90,7 +90,13 @@ from datetime import datetime, timezone
|
||||
def units(raw):
|
||||
return str(Decimal(int(raw or 0)) / Decimal(10**6))
|
||||
|
||||
v2_nums = [int(x) for x in re.findall(r"\b\d+\b", v2_reserves or "")]
|
||||
v2_nums = []
|
||||
for line in (v2_reserves or "").splitlines():
|
||||
match = re.search(r"\b\d+\b", line)
|
||||
if match:
|
||||
v2_nums.append(int(match.group(0)))
|
||||
if len(v2_nums) < 2:
|
||||
v2_nums = [int(x) for x in re.findall(r"\b\d+\b", v2_reserves or "")]
|
||||
blockers = []
|
||||
if accounting_aware != "1":
|
||||
blockers.append("configured Engine X vault is not accounting-aware")
|
||||
|
||||
@@ -75,7 +75,13 @@ def parse_uint(value: str) -> int:
|
||||
|
||||
|
||||
def parse_uints(value: str, count: int) -> list[int]:
|
||||
matches = [int(match) for match in UINT_RE.findall(value)]
|
||||
matches: list[int] = []
|
||||
for line in value.splitlines():
|
||||
line_matches = UINT_RE.findall(line)
|
||||
if line_matches:
|
||||
matches.append(int(line_matches[0]))
|
||||
if len(matches) < count:
|
||||
matches = [int(match) for match in UINT_RE.findall(value)]
|
||||
if len(matches) < count:
|
||||
raise ValueError(f"expected at least {count} integers, got {matches!r}")
|
||||
return matches[:count]
|
||||
@@ -198,6 +204,71 @@ def query_dodo_health(rpc_url: str, defended_venue: dict) -> dict:
|
||||
}
|
||||
|
||||
|
||||
def query_uniswap_v3_health(rpc_url: str, surface: dict) -> dict:
|
||||
pool_address = surface["poolAddress"]
|
||||
try:
|
||||
token0 = parse_address(cast_call(rpc_url, pool_address, "token0()(address)"))
|
||||
token1 = parse_address(cast_call(rpc_url, pool_address, "token1()(address)"))
|
||||
fee = parse_uint(cast_call(rpc_url, pool_address, "fee()(uint24)"))
|
||||
slot0 = cast_call(rpc_url, pool_address, "slot0()(uint160,int24,uint16,uint16,uint16,uint8,bool)")
|
||||
slot_values = parse_uints(slot0, 2)
|
||||
sqrt_price_x96 = slot_values[0]
|
||||
# The tick may be signed; parse it directly from the second line before falling back.
|
||||
slot_lines = [line.strip().split()[0] for line in slot0.splitlines() if line.strip()]
|
||||
tick = int(slot_lines[1]) if len(slot_lines) > 1 else int(slot_values[1])
|
||||
liquidity = parse_uint(cast_call(rpc_url, pool_address, "liquidity()(uint128)"))
|
||||
token0_balance = parse_uint(cast_call(rpc_url, token0, "balanceOf(address)(uint256)", pool_address))
|
||||
token1_balance = parse_uint(cast_call(rpc_url, token1, "balanceOf(address)(uint256)", pool_address))
|
||||
decimals0 = parse_uint(cast_call(rpc_url, token0, "decimals()(uint8)"))
|
||||
decimals1 = parse_uint(cast_call(rpc_url, token1, "decimals()(uint8)"))
|
||||
except Exception as exc:
|
||||
return {"live": False, "poolAddress": pool_address, "error": str(exc)}
|
||||
|
||||
preferred = surface.get("activeRange", {})
|
||||
lower_tick = int(preferred.get("preferredLowerTick", tick))
|
||||
upper_tick = int(preferred.get("preferredUpperTick", tick))
|
||||
target_tick = int(preferred.get("targetTick", 0))
|
||||
if tick < lower_tick:
|
||||
range_status = "below_preferred_range"
|
||||
elif tick > upper_tick:
|
||||
range_status = "above_preferred_range"
|
||||
else:
|
||||
range_status = "inside_preferred_range"
|
||||
|
||||
return {
|
||||
"live": True,
|
||||
"poolAddress": pool_address,
|
||||
"token0": token0,
|
||||
"token1": token1,
|
||||
"fee": fee,
|
||||
"sqrtPriceX96": str(sqrt_price_x96),
|
||||
"tick": tick,
|
||||
"targetTick": target_tick,
|
||||
"preferredLowerTick": lower_tick,
|
||||
"preferredUpperTick": upper_tick,
|
||||
"rangeStatus": range_status,
|
||||
"activeLiquidity": str(liquidity),
|
||||
"token0BalanceRaw": str(token0_balance),
|
||||
"token1BalanceRaw": str(token1_balance),
|
||||
"token0BalanceUnits": str(normalize_units(token0_balance, decimals0)),
|
||||
"token1BalanceUnits": str(normalize_units(token1_balance, decimals1)),
|
||||
}
|
||||
|
||||
|
||||
def query_quote_defense_surface(rpc_url: str, surface: dict, base_address: str, quote_address: str) -> dict:
|
||||
venue = surface.get("venue")
|
||||
if venue == "uniswap_v3_pool":
|
||||
return query_uniswap_v3_health(rpc_url, surface)
|
||||
if venue == "uniswap_v2_pair":
|
||||
pair = dict(surface)
|
||||
pair["baseAddress"] = base_address
|
||||
pair["quoteAddress"] = quote_address
|
||||
return query_uniswap_pair_health(rpc_url, pair)
|
||||
if venue == "dodo_pmm":
|
||||
return query_dodo_health(rpc_url, surface)
|
||||
return {"live": False, "poolAddress": surface.get("poolAddress"), "error": f"unsupported venue {venue!r}"}
|
||||
|
||||
|
||||
def choose_flash_amount(policy: dict, deviation_bps: Decimal) -> int:
|
||||
for row in policy["managedCycle"]["quoteAmountByDeviationBps"]:
|
||||
if deviation_bps >= Decimal(row["minDeviationBps"]):
|
||||
@@ -282,6 +353,42 @@ def render_shell(result: dict) -> str:
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def build_quote_defense_decision(surfaces: list[dict], health_by_id: dict[str, dict]) -> dict:
|
||||
candidates: list[dict] = []
|
||||
blockers: list[str] = []
|
||||
for surface in surfaces:
|
||||
surface_id = surface["id"]
|
||||
health = health_by_id.get(surface_id, {})
|
||||
if not health.get("live"):
|
||||
blockers.append(f"{surface_id}: unreadable")
|
||||
continue
|
||||
venue = surface.get("venue")
|
||||
if venue == "uniswap_v3_pool":
|
||||
if health.get("rangeStatus") == "inside_preferred_range" and int(health.get("activeLiquidity", "0")) > 0:
|
||||
candidates.append({"surfaceId": surface_id, "action": "use_for_public_indexed_quote_defense"})
|
||||
else:
|
||||
candidates.append({"surfaceId": surface_id, "action": "rebalance_tick_before_use"})
|
||||
elif venue == "uniswap_v2_pair":
|
||||
quote_units = Decimal(health.get("quoteReserveUnits", "0"))
|
||||
if quote_units > 0:
|
||||
candidates.append({"surfaceId": surface_id, "action": "secondary_public_repair_or_activity_lane"})
|
||||
else:
|
||||
blockers.append(f"{surface_id}: zero quote reserve")
|
||||
elif venue == "dodo_pmm":
|
||||
if health.get("live"):
|
||||
candidates.append({"surfaceId": surface_id, "action": "managed_defended_lane_when_capital_and_quotes_pass"})
|
||||
|
||||
preferred = next(
|
||||
(row for row in candidates if row["action"] == "use_for_public_indexed_quote_defense"),
|
||||
next((row for row in candidates if row["action"] == "rebalance_tick_before_use"), None),
|
||||
)
|
||||
return {
|
||||
"preferredSurface": preferred,
|
||||
"candidates": candidates,
|
||||
"blockers": blockers,
|
||||
}
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--shell", action="store_true", help="Emit shell-friendly KEY=VALUE lines.")
|
||||
@@ -293,13 +400,21 @@ def main() -> int:
|
||||
rpc_url = resolve_rpc_url(policy, env_values)
|
||||
|
||||
chain = deployment_status["chains"][str(policy["network"]["chainId"])]
|
||||
base_address = chain["cwTokens"]["cWUSDC"]
|
||||
quote_address = chain["anchorAddresses"]["USDC"]
|
||||
public_pair = load_public_pair_from_policy(policy, deployment_status)
|
||||
public_pair["baseAddress"] = chain["cwTokens"][public_pair["base"]]
|
||||
public_pair["quoteAddress"] = chain["anchorAddresses"][public_pair["quote"]]
|
||||
public_pair["baseAddress"] = base_address
|
||||
public_pair["quoteAddress"] = quote_address
|
||||
|
||||
defended_venue = dict(policy["defendedVenue"])
|
||||
public_health = query_uniswap_pair_health(rpc_url, public_pair)
|
||||
defended_health = query_dodo_health(rpc_url, defended_venue)
|
||||
quote_surfaces = policy.get("quoteDefenseSurfaces", [])
|
||||
quote_surface_health = {
|
||||
surface["id"]: query_quote_defense_surface(rpc_url, surface, base_address, quote_address)
|
||||
for surface in quote_surfaces
|
||||
}
|
||||
quote_defense_decision = build_quote_defense_decision(quote_surfaces, quote_surface_health)
|
||||
decision = build_decision(policy, public_health, defended_health)
|
||||
if public_health.get("live"):
|
||||
decision["publicDeviationBps"] = public_health["deviationBps"]
|
||||
@@ -313,6 +428,9 @@ def main() -> int:
|
||||
"publicPairHealth": public_health,
|
||||
"defendedVenue": defended_venue,
|
||||
"defendedVenueHealth": defended_health,
|
||||
"quoteDefenseSurfaces": quote_surfaces,
|
||||
"quoteDefenseSurfaceHealth": quote_surface_health,
|
||||
"quoteDefenseDecision": quote_defense_decision,
|
||||
"decision": decision,
|
||||
}
|
||||
|
||||
|
||||
356
scripts/verify/evaluate-mainnet-cwusdc-weth-liquidity-surfaces.sh
Executable file
356
scripts/verify/evaluate-mainnet-cwusdc-weth-liquidity-surfaces.sh
Executable file
@@ -0,0 +1,356 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
|
||||
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/load-project-env.sh
|
||||
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh"
|
||||
|
||||
: "${ETHEREUM_MAINNET_RPC:?ETHEREUM_MAINNET_RPC is required}"
|
||||
|
||||
OUT_JSON="${OUT_JSON:-reports/status/mainnet-cwusdc-weth-liquidity-surfaces-latest.json}"
|
||||
OUT_MD="${OUT_MD:-reports/status/mainnet-cwusdc-weth-liquidity-surfaces-latest.md}"
|
||||
CWUSDC="${CWUSDC_MAINNET:-0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a}"
|
||||
USDC="${USDC_MAINNET:-0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48}"
|
||||
WETH="${WETH9_MAINNET:-0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2}"
|
||||
UNIV2_FACTORY="${CHAIN_1_UNISWAP_V2_FACTORY:-0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f}"
|
||||
UNIV2_ROUTER="${CHAIN_1_UNISWAP_V2_ROUTER:-0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D}"
|
||||
UNIV3_FACTORY="${CHAIN_1_UNISWAP_V3_FACTORY:-0x1F98431c8aD98523631AE4a59f267346ea31F984}"
|
||||
DODO_INTEGRATION="${DODO_PMM_INTEGRATION_MAINNET:-${CHAIN_1_DODO_PMM_INTEGRATION:-}}"
|
||||
DODO_VENDING_MACHINE="${MAINNET_DODO_VENDING_MACHINE_ADDRESS:-${ETHEREUM_DODO_VENDING_MACHINE_ADDRESS:-}}"
|
||||
MAINNET_CCIP_WETH9_BRIDGE="${MAINNET_CCIP_WETH9_BRIDGE:-}"
|
||||
GAS_RESERVE_WEI="${ENGINE_X_WETH_POOL_GAS_RESERVE_WEI:-5000000000000000}"
|
||||
MAX_ETH_WRAP_WEI="${ENGINE_X_WETH_POOL_MAX_WRAP_WEI:-0}"
|
||||
PEG_TEST_AMOUNTS_USD="${ENGINE_X_WETH_PEG_TEST_AMOUNTS_USD:-0.005,0.01,0.025}"
|
||||
|
||||
if [[ -n "${PRIVATE_KEY:-}" ]]; then
|
||||
DEPLOYER="$(cast wallet address --private-key "${PRIVATE_KEY}")"
|
||||
else
|
||||
DEPLOYER="${DEPLOYER_ADDRESS:-}"
|
||||
fi
|
||||
if [[ -z "${DEPLOYER}" ]]; then
|
||||
echo "Set PRIVATE_KEY or DEPLOYER_ADDRESS" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BLOCK_NUMBER="$(cast block-number --rpc-url "${ETHEREUM_MAINNET_RPC}")"
|
||||
GAS_PRICE_WEI="$(cast gas-price --rpc-url "${ETHEREUM_MAINNET_RPC}")"
|
||||
ETH_WEI="$(cast balance "${DEPLOYER}" --rpc-url "${ETHEREUM_MAINNET_RPC}")"
|
||||
CWUSDC_RAW="$(cast call "${CWUSDC}" 'balanceOf(address)(uint256)' "${DEPLOYER}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | awk '{print $1}')"
|
||||
USDC_RAW="$(cast call "${USDC}" 'balanceOf(address)(uint256)' "${DEPLOYER}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | awk '{print $1}')"
|
||||
WETH_RAW="$(cast call "${WETH}" 'balanceOf(address)(uint256)' "${DEPLOYER}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | awk '{print $1}')"
|
||||
|
||||
UNIV2_PAIR="$(cast call "${UNIV2_FACTORY}" 'getPair(address,address)(address)' "${CWUSDC}" "${WETH}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | grep -oE '0x[a-fA-F0-9]{40}' | head -1)"
|
||||
UNIV2_RESERVES=""
|
||||
if [[ "${UNIV2_PAIR}" != "0x0000000000000000000000000000000000000000" ]]; then
|
||||
UNIV2_RESERVES="$(cast call "${UNIV2_PAIR}" 'getReserves()(uint112,uint112,uint32)' --rpc-url "${ETHEREUM_MAINNET_RPC}" || true)"
|
||||
fi
|
||||
|
||||
TOKEN0="$(printf '%s\n%s\n' "${CWUSDC}" "${WETH}" | tr '[:upper:]' '[:lower:]' | sort | sed -n '1p')"
|
||||
TOKEN1="$(printf '%s\n%s\n' "${CWUSDC}" "${WETH}" | tr '[:upper:]' '[:lower:]' | sort | sed -n '2p')"
|
||||
|
||||
V3_POOLS_JSON="$(
|
||||
for fee in 100 500 3000 10000; do
|
||||
pool="$(cast call "${UNIV3_FACTORY}" 'getPool(address,address,uint24)(address)' "${TOKEN0}" "${TOKEN1}" "${fee}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | grep -oE '0x[a-fA-F0-9]{40}' | head -1)"
|
||||
slot0=""
|
||||
liquidity="0"
|
||||
if [[ "${pool}" != "0x0000000000000000000000000000000000000000" ]]; then
|
||||
slot0="$(cast call "${pool}" 'slot0()(uint160,int24,uint16,uint16,uint16,uint8,bool)' --rpc-url "${ETHEREUM_MAINNET_RPC}" 2>/dev/null || true)"
|
||||
liquidity="$(cast call "${pool}" 'liquidity()(uint128)' --rpc-url "${ETHEREUM_MAINNET_RPC}" 2>/dev/null | awk '{print $1}' || echo 0)"
|
||||
fi
|
||||
python3 - "${fee}" "${pool}" "${liquidity}" "${slot0}" <<'PY'
|
||||
import json, sys
|
||||
fee, pool, liquidity, slot0 = sys.argv[1:]
|
||||
print(json.dumps({"fee": int(fee), "pool": pool, "liquidity": liquidity, "slot0": slot0 or None}))
|
||||
PY
|
||||
done | python3 -c 'import json,sys; print(json.dumps([json.loads(line) for line in sys.stdin if line.strip()]))'
|
||||
)"
|
||||
|
||||
WETH_QUOTES_JSON="$(
|
||||
for amt in 100000000000000 200000000000000 500000000000000 1000000000000000 2000000000000000 5000000000000000; do
|
||||
raw="$(cast call "${UNIV2_ROUTER}" 'getAmountsOut(uint256,address[])(uint256[])' "${amt}" "[${WETH},${USDC}]" --rpc-url "${ETHEREUM_MAINNET_RPC}" 2>/dev/null || true)"
|
||||
python3 - "${amt}" "${raw}" <<'PY'
|
||||
import json, re, sys
|
||||
amt = int(sys.argv[1])
|
||||
raw = sys.argv[2]
|
||||
parts = raw.split(",", 1)
|
||||
out = 0
|
||||
if len(parts) == 2:
|
||||
match = re.search(r"(\d+)", parts[1])
|
||||
out = int(match.group(1)) if match else 0
|
||||
print(json.dumps({"wethInRaw": str(amt), "usdcOutRaw": str(out)}))
|
||||
PY
|
||||
done | python3 -c 'import json,sys; print(json.dumps([json.loads(line) for line in sys.stdin if line.strip()]))'
|
||||
)"
|
||||
|
||||
DODO_HAS_MANAGER="false"
|
||||
DODO_CODE_LEN="0"
|
||||
DODO_VENDING_CODE_LEN="0"
|
||||
if [[ -n "${DODO_INTEGRATION}" ]]; then
|
||||
DODO_CODE_LEN="$(cast code "${DODO_INTEGRATION}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | wc -c | tr -d ' ')"
|
||||
POOL_MANAGER_ROLE="$(cast keccak "POOL_MANAGER_ROLE")"
|
||||
DODO_HAS_MANAGER="$(cast call "${DODO_INTEGRATION}" 'hasRole(bytes32,address)(bool)' "${POOL_MANAGER_ROLE}" "${DEPLOYER}" --rpc-url "${ETHEREUM_MAINNET_RPC}" 2>/dev/null | tr -d '[:space:]' || echo false)"
|
||||
fi
|
||||
if [[ -n "${DODO_VENDING_MACHINE}" ]]; then
|
||||
DODO_VENDING_CODE_LEN="$(cast code "${DODO_VENDING_MACHINE}" --rpc-url "${ETHEREUM_MAINNET_RPC}" | wc -c | tr -d ' ')"
|
||||
fi
|
||||
|
||||
mkdir -p "$(dirname "${OUT_JSON}")"
|
||||
python3 - "${OUT_JSON}" "${OUT_MD}" \
|
||||
"${BLOCK_NUMBER}" "${GAS_PRICE_WEI}" "${DEPLOYER}" "${CWUSDC}" "${WETH}" "${USDC}" "${MAINNET_CCIP_WETH9_BRIDGE}" \
|
||||
"${ETH_WEI}" "${CWUSDC_RAW}" "${USDC_RAW}" "${WETH_RAW}" "${GAS_RESERVE_WEI}" "${MAX_ETH_WRAP_WEI}" \
|
||||
"${UNIV2_PAIR}" "${UNIV2_RESERVES}" "${V3_POOLS_JSON}" "${WETH_QUOTES_JSON}" \
|
||||
"${DODO_INTEGRATION}" "${DODO_VENDING_MACHINE}" "${DODO_HAS_MANAGER}" "${DODO_CODE_LEN}" "${DODO_VENDING_CODE_LEN}" \
|
||||
"${PEG_TEST_AMOUNTS_USD}" <<'PY'
|
||||
from decimal import Decimal, getcontext
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
|
||||
getcontext().prec = 80
|
||||
(
|
||||
out_json, out_md, block_number, gas_price, deployer, cwusdc, weth, usdc, ccip_bridge,
|
||||
eth_wei, cw_raw, usdc_raw, weth_raw, gas_reserve, max_wrap, univ2_pair, univ2_reserves,
|
||||
v3_pools_json, weth_quotes_json, dodo_integration, dodo_vm, dodo_has_manager, dodo_code_len, dodo_vm_code_len,
|
||||
peg_test_amounts_usd,
|
||||
) = sys.argv[1:]
|
||||
|
||||
eth_wei_i = int(eth_wei)
|
||||
weth_raw_i = int(weth_raw)
|
||||
cw_raw_i = int(cw_raw)
|
||||
gas_reserve_i = int(gas_reserve)
|
||||
max_wrap_i = int(max_wrap)
|
||||
usable_eth_for_wrap = max(eth_wei_i - gas_reserve_i, 0)
|
||||
if max_wrap_i:
|
||||
usable_eth_for_wrap = min(usable_eth_for_wrap, max_wrap_i)
|
||||
usable_weth_raw = weth_raw_i + usable_eth_for_wrap
|
||||
|
||||
quotes = json.loads(weth_quotes_json)
|
||||
price = Decimal(0)
|
||||
for q in sorted(quotes, key=lambda x: int(x["wethInRaw"]), reverse=True):
|
||||
if int(q["usdcOutRaw"]) > 0:
|
||||
price = (Decimal(int(q["usdcOutRaw"])) / Decimal(10**6)) / (Decimal(int(q["wethInRaw"])) / Decimal(10**18))
|
||||
break
|
||||
|
||||
cw_needed_for_wallet_weth_raw = int((Decimal(usable_weth_raw) / Decimal(10**18) * price * Decimal(10**6)).to_integral_value()) if price else 0
|
||||
usable_pair_cw_raw = min(cw_raw_i, cw_needed_for_wallet_weth_raw)
|
||||
usable_pair_weth_raw = usable_weth_raw if cw_needed_for_wallet_weth_raw <= cw_raw_i else int((Decimal(cw_raw_i) / Decimal(10**6) / price * Decimal(10**18)).to_integral_value()) if price else 0
|
||||
|
||||
v3_pools = json.loads(v3_pools_json)
|
||||
v3_existing = [p for p in v3_pools if p["pool"].lower() != "0x0000000000000000000000000000000000000000"]
|
||||
v2_exists = univ2_pair.lower() != "0x0000000000000000000000000000000000000000"
|
||||
|
||||
def fmt_decimal(value: Decimal, places: int = 18) -> str:
|
||||
text = f"{value:.{places}f}"
|
||||
return text.rstrip("0").rstrip(".") if "." in text else text
|
||||
|
||||
def cp_out(amount_in: Decimal, reserve_in: Decimal, reserve_out: Decimal, fee_bps: Decimal = Decimal(30)) -> Decimal:
|
||||
if amount_in <= 0 or reserve_in <= 0 or reserve_out <= 0:
|
||||
return Decimal(0)
|
||||
amount_in_after_fee = amount_in * (Decimal(10000) - fee_bps) / Decimal(10000)
|
||||
return (amount_in_after_fee * reserve_out) / (reserve_in + amount_in_after_fee)
|
||||
|
||||
fee_scenarios_bps = [
|
||||
("uniswapV3_1bp", Decimal(1)),
|
||||
("uniswapV3_5bp", Decimal(5)),
|
||||
("uniswapV2_30bp", Decimal(30)),
|
||||
]
|
||||
peg_tests = []
|
||||
for raw_amount in [a.strip() for a in peg_test_amounts_usd.split(",") if a.strip()]:
|
||||
amount_usd = Decimal(raw_amount)
|
||||
cw_in = amount_usd
|
||||
ideal_weth = (amount_usd / price) if price else Decimal(0)
|
||||
fee_models = {}
|
||||
for scenario_name, fee_bps in fee_scenarios_bps:
|
||||
seeded_weth_out = cp_out(
|
||||
cw_in,
|
||||
Decimal(usable_pair_cw_raw) / Decimal(10**6),
|
||||
Decimal(usable_pair_weth_raw) / Decimal(10**18),
|
||||
fee_bps,
|
||||
)
|
||||
effective_usd_out = seeded_weth_out * price if price else Decimal(0)
|
||||
loss_pct = ((amount_usd - effective_usd_out) / amount_usd * Decimal(100)) if amount_usd > 0 and effective_usd_out else Decimal(0)
|
||||
reverse_cw_out = cp_out(
|
||||
ideal_weth,
|
||||
Decimal(usable_pair_weth_raw) / Decimal(10**18),
|
||||
Decimal(usable_pair_cw_raw) / Decimal(10**6),
|
||||
fee_bps,
|
||||
)
|
||||
reverse_loss_pct = ((amount_usd - reverse_cw_out) / amount_usd * Decimal(100)) if amount_usd > 0 and reverse_cw_out else Decimal(0)
|
||||
fee_models[scenario_name] = {
|
||||
"feeBps": str(fee_bps),
|
||||
"cwusdcToWethOut": fmt_decimal(seeded_weth_out),
|
||||
"cwusdcToWethUsdOut": fmt_decimal(effective_usd_out, 12),
|
||||
"cwusdcToWethLossPct": fmt_decimal(loss_pct, 8),
|
||||
"wethToCwusdcOut": fmt_decimal(reverse_cw_out, 12),
|
||||
"wethToCwusdcLossPct": fmt_decimal(reverse_loss_pct, 8),
|
||||
}
|
||||
peg_tests.append({
|
||||
"usdAmount": str(amount_usd),
|
||||
"cwusdcRaw": str(int((amount_usd * Decimal(10**6)).to_integral_value())),
|
||||
"idealWethRaw": str(int((ideal_weth * Decimal(10**18)).to_integral_value())) if price else "0",
|
||||
"idealWeth": fmt_decimal(ideal_weth),
|
||||
"walletSeededModels": fee_models,
|
||||
"lossAccounting": "For cWUSDC->WETH9 canaries, loss is paid by spending slightly more cWUSDC value for the same WETH/USD reference value. WETH/USDC inventory is protected by exact-output/min-out guards.",
|
||||
})
|
||||
|
||||
native_boundary = "Native ETH execution wraps to WETH9; DODO and UniV3 require ERC-20 WETH9, while UniV2 addLiquidityETH still creates a WETH9 pair."
|
||||
usd_support_model = (
|
||||
"cWUSDC/WETH9 pools support the USD peg as an indirect public price anchor through deep WETH/USD markets. "
|
||||
"They do not replace direct cWUSDC/USDC quote liquidity for redemption-style proof, but they can provide indexable "
|
||||
"on-chain evidence that 1 cWUSDC is priced near 1 USD when the implied cWUSDC/WETH price matches the live WETH/USD reference."
|
||||
)
|
||||
blockers = []
|
||||
if int(usdc_raw) == 0:
|
||||
blockers.append("deployer has 0 USDC; this is fine for WETH pools but means no cWUSDC/USDC repair can accompany them")
|
||||
if usable_weth_raw == 0:
|
||||
blockers.append("no WETH or spare ETH available after gas reserve")
|
||||
if not dodo_integration:
|
||||
blockers.append("DODO_PMM_INTEGRATION_MAINNET is not configured")
|
||||
elif dodo_has_manager != "true":
|
||||
blockers.append("deployer lacks DODO POOL_MANAGER_ROLE on Mainnet integration")
|
||||
if dodo_vm and int(dodo_vm_code_len) <= 3:
|
||||
blockers.append("configured DODO vending machine has no code")
|
||||
|
||||
dodo_single_sided = {
|
||||
"requestedBaseToken": cwusdc,
|
||||
"supportedAsEngineXInventory": cw_raw_i > 0,
|
||||
"executableThroughCurrentIntegration": False,
|
||||
"reason": "DODOPMMIntegration.addLiquidity requires both baseAmount > 0 and quoteAmount > 0; cWUSDC-only deposits are inventory/accounting support until a wrapper or quote-side seed is available.",
|
||||
"currentCwusdcAvailable": str(Decimal(cw_raw_i) / Decimal(10**6)),
|
||||
"recommendedMode": "record cWUSDC-only inventory inside Engine X first; promote to executable DODO PMM only after adding WETH9/USDC quote inventory or deploying a single-sided wrapper that controls solvency and min-out proofs",
|
||||
}
|
||||
|
||||
payload = {
|
||||
"schema": "mainnet-cwusdc-weth-liquidity-surfaces/v1",
|
||||
"generatedAt": datetime.now(timezone.utc).isoformat(),
|
||||
"blockNumber": int(block_number),
|
||||
"gasPriceWei": gas_price,
|
||||
"addresses": {
|
||||
"deployer": deployer,
|
||||
"cWUSDC": cwusdc,
|
||||
"WETH9": weth,
|
||||
"USDC": usdc,
|
||||
"mainnetCcipWeth9Bridge": ccip_bridge or None,
|
||||
},
|
||||
"balances": {
|
||||
"ethWei": eth_wei,
|
||||
"eth": str(Decimal(int(eth_wei)) / Decimal(10**18)),
|
||||
"wethRaw": weth_raw,
|
||||
"weth": str(Decimal(weth_raw_i) / Decimal(10**18)),
|
||||
"cwusdcRaw": cw_raw,
|
||||
"cwusdc": str(Decimal(cw_raw_i) / Decimal(10**6)),
|
||||
"usdcRaw": usdc_raw,
|
||||
"usdc": str(Decimal(int(usdc_raw)) / Decimal(10**6)),
|
||||
},
|
||||
"marketReference": {
|
||||
"wethUsdcPriceFromUniV2": str(price),
|
||||
"wethQuotes": quotes,
|
||||
},
|
||||
"availableSeed": {
|
||||
"gasReserveWei": str(gas_reserve_i),
|
||||
"ethUsableForWrapWei": str(usable_eth_for_wrap),
|
||||
"totalUsableWethRaw": str(usable_weth_raw),
|
||||
"totalUsableWeth": str(Decimal(usable_weth_raw) / Decimal(10**18)),
|
||||
"cwusdcNeededForAllUsableWethRaw": str(cw_needed_for_wallet_weth_raw),
|
||||
"cwusdcNeededForAllUsableWeth": str(Decimal(cw_needed_for_wallet_weth_raw) / Decimal(10**6)),
|
||||
"recommendedPairCwusdcRaw": str(usable_pair_cw_raw),
|
||||
"recommendedPairCwusdc": str(Decimal(usable_pair_cw_raw) / Decimal(10**6)),
|
||||
"recommendedPairWethRaw": str(usable_pair_weth_raw),
|
||||
"recommendedPairWeth": str(Decimal(usable_pair_weth_raw) / Decimal(10**18)),
|
||||
},
|
||||
"pegTestAmounts": peg_tests,
|
||||
"surfaces": {
|
||||
"uniswapV2": {"pair": univ2_pair, "exists": v2_exists, "reservesRawText": univ2_reserves or None},
|
||||
"uniswapV3": {"token0": min(cwusdc.lower(), weth.lower()), "token1": max(cwusdc.lower(), weth.lower()), "pools": v3_pools, "existingPools": v3_existing},
|
||||
"dodo": {
|
||||
"integration": dodo_integration or None,
|
||||
"vendingMachine": dodo_vm or None,
|
||||
"integrationCodeLength": int(dodo_code_len),
|
||||
"vendingMachineCodeLength": int(dodo_vm_code_len),
|
||||
"deployerHasPoolManagerRole": dodo_has_manager == "true",
|
||||
},
|
||||
},
|
||||
"singleSidedDodoCwusdc": dodo_single_sided,
|
||||
"boundary": native_boundary,
|
||||
"usdPegSupportModel": usd_support_model,
|
||||
"blockers": blockers,
|
||||
"recommendation": [
|
||||
"Treat cWUSDC/ETH and cWUSDC/WETH as the same WETH9-backed public market for indexers.",
|
||||
"Use WETH-backed pools as an indirect USD peg support surface by comparing cWUSDC/WETH9 pool price against live WETH/USDC reference markets.",
|
||||
"Create at most one canonical UniV3 cWUSDC/WETH9 pool first, using private/protected execution and a wider tick range than the cWUSDC/USDC attempt.",
|
||||
"Use UniV2 addLiquidityETH only as a secondary indexable surface; it creates the same WETH9 pair.",
|
||||
"Use DODO PMM only after confirming createPool against the configured vending machine and seeding with ERC-20 WETH9, not native ETH.",
|
||||
],
|
||||
}
|
||||
Path(out_json).write_text(json.dumps(payload, indent=2) + "\n")
|
||||
|
||||
lines = [
|
||||
"# Mainnet cWUSDC/WETH9 Liquidity Surface Evaluation",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- Block: `{payload['blockNumber']}`",
|
||||
f"- cWUSDC: `{cwusdc}`",
|
||||
f"- WETH9 Cc2: `{weth}`",
|
||||
f"- Native ETH boundary: {native_boundary}",
|
||||
f"- USD peg support model: {usd_support_model}",
|
||||
"",
|
||||
"## Live Balances",
|
||||
"",
|
||||
f"- ETH: `{payload['balances']['eth']}`",
|
||||
f"- WETH9: `{payload['balances']['weth']}`",
|
||||
f"- cWUSDC: `{payload['balances']['cwusdc']}`",
|
||||
f"- USDC: `{payload['balances']['usdc']}`",
|
||||
"",
|
||||
"## Existing Pools",
|
||||
"",
|
||||
f"- UniV2 cWUSDC/WETH9: `{univ2_pair}`",
|
||||
]
|
||||
for pool in v3_pools:
|
||||
lines.append(f"- UniV3 fee `{pool['fee']}`: `{pool['pool']}` liquidity `{pool['liquidity']}`")
|
||||
lines.extend([
|
||||
f"- DODO integration: `{dodo_integration or 'not configured'}`",
|
||||
f"- DODO deployer pool-manager role: `{dodo_has_manager == 'true'}`",
|
||||
"",
|
||||
"## Seed Capacity",
|
||||
"",
|
||||
f"- WETH/USDC reference price: `{price}`",
|
||||
f"- Total usable WETH after gas reserve: `{payload['availableSeed']['totalUsableWeth']}`",
|
||||
f"- cWUSDC needed to pair all usable WETH: `{payload['availableSeed']['cwusdcNeededForAllUsableWeth']}`",
|
||||
f"- Recommended max seed from current wallet: `{payload['availableSeed']['recommendedPairCwusdc']} cWUSDC + {payload['availableSeed']['recommendedPairWeth']} WETH9`",
|
||||
"",
|
||||
"## Single-Sided DODO cWUSDC",
|
||||
"",
|
||||
f"- Supported as Engine X inventory: `{dodo_single_sided['supportedAsEngineXInventory']}`",
|
||||
f"- Executable through current DODOPMMIntegration: `{dodo_single_sided['executableThroughCurrentIntegration']}`",
|
||||
f"- Reason: {dodo_single_sided['reason']}",
|
||||
f"- Recommended mode: {dodo_single_sided['recommendedMode']}",
|
||||
"",
|
||||
"## Peg Test Amounts",
|
||||
"",
|
||||
"| USD amount | cWUSDC in | Ideal WETH value | Modeled cWUSDC->WETH USD out | Modeled loss | Reverse WETH->cWUSDC out | Reverse loss |",
|
||||
"|---:|---:|---:|---:|---:|---:|---:|",
|
||||
])
|
||||
for test in peg_tests:
|
||||
for scenario_name, model in test["walletSeededModels"].items():
|
||||
lines.append(
|
||||
f"| `{test['usdAmount']} ({scenario_name})` | `{Decimal(test['cwusdcRaw']) / Decimal(10**6)}` | `{test['idealWeth']}` | "
|
||||
f"`{model['cwusdcToWethUsdOut']}` | `{model['cwusdcToWethLossPct']}%` | "
|
||||
f"`{model['wethToCwusdcOut']}` | `{model['wethToCwusdcLossPct']}%` |"
|
||||
)
|
||||
lines.extend([
|
||||
"",
|
||||
"Loss accounting: for cWUSDC->WETH9 canaries, loss is paid by spending slightly more cWUSDC value for the same WETH/USD reference value. Use exact-output or strict min-out guards so WETH, USDC, and lender inventory are not silently depleted.",
|
||||
"",
|
||||
"## Blockers",
|
||||
])
|
||||
lines.extend([f"- {b}" for b in blockers] if blockers else ["- none for a tiny WETH-backed pool seed"])
|
||||
lines.extend(["", "## Recommendation"])
|
||||
lines.extend([f"- {r}" for r in payload["recommendation"]])
|
||||
Path(out_md).write_text("\n".join(lines) + "\n")
|
||||
print(out_json)
|
||||
print(out_md)
|
||||
PY
|
||||
269
scripts/verify/generate-cwusdc-supply-circulating-attestation.py
Normal file
269
scripts/verify/generate-cwusdc-supply-circulating-attestation.py
Normal file
@@ -0,0 +1,269 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Generate a current cWUSDC supply and circulating-supply attestation.
|
||||
|
||||
The output is intentionally tracker-facing: it separates on-chain total supply
|
||||
from proposed circulating-supply methodology and does not silently exclude
|
||||
operator or protocol balances unless explicitly requested.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import datetime as dt
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from decimal import Decimal, getcontext
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
getcontext().prec = 80
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
REPORT_JSON = ROOT / "reports" / "status" / "cwusdc-supply-circulating-attestation-latest.json"
|
||||
REPORT_MD = ROOT / "reports" / "status" / "cwusdc-supply-circulating-attestation-latest.md"
|
||||
ETHERSCAN_API = "https://api.etherscan.io/v2/api"
|
||||
ETHERSCAN_PAGE = "https://etherscan.io/token/{address}"
|
||||
CWUSDC = "0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a"
|
||||
DECIMALS = 6
|
||||
|
||||
KNOWN_BALANCES = {
|
||||
"operator": "0x4A666F96fC8764181194447A7dFdb7d471b301C8",
|
||||
"engineXVirtualBatchVault": "0xf108586d1FC330EA1D4EA4ff8fd983cde94279B1",
|
||||
"uniswapV3CwusdcUsdcPool": "0x1Cf2e685682C7F7beF508F0Af15Dfb5CDda01ee3",
|
||||
"uniswapV2CwusdcUsdcPair": "0xC28706F899266b36BC43cc072b3a921BDf2C48D9",
|
||||
}
|
||||
|
||||
|
||||
def load_dotenv(path: Path) -> None:
|
||||
if not path.exists():
|
||||
return
|
||||
for line in path.read_text().splitlines():
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#") or "=" not in line:
|
||||
continue
|
||||
key, value = line.split("=", 1)
|
||||
key = key.strip()
|
||||
value = value.strip().strip('"').strip("'")
|
||||
if key and key not in os.environ:
|
||||
os.environ[key] = value
|
||||
|
||||
|
||||
def fetch_json(url: str, timeout: int = 30) -> Any:
|
||||
req = urllib.request.Request(url, headers={"User-Agent": "dbis-cwusdc-supply-attestation/1.0"})
|
||||
with urllib.request.urlopen(req, timeout=timeout) as response:
|
||||
return json.loads(response.read().decode("utf-8"))
|
||||
|
||||
|
||||
def etherscan_call(params: dict[str, str], api_key: str) -> Any:
|
||||
query = {"chainid": "1", **params, "apikey": api_key}
|
||||
url = f"{ETHERSCAN_API}?{urllib.parse.urlencode(query)}"
|
||||
last_payload: Any = None
|
||||
for attempt in range(6):
|
||||
payload = fetch_json(url)
|
||||
last_payload = payload
|
||||
message = str(payload.get("message", ""))
|
||||
result = payload.get("result")
|
||||
if str(payload.get("status")) != "0" or message.lower() == "no transactions found":
|
||||
time.sleep(0.35)
|
||||
return result
|
||||
if isinstance(result, str) and result.startswith("0x"):
|
||||
time.sleep(0.35)
|
||||
return result
|
||||
if "rate limit" in str(result).lower() or "rate limit" in message.lower():
|
||||
time.sleep(1.25 + attempt * 0.5)
|
||||
continue
|
||||
raise RuntimeError(f"Etherscan API error: {payload.get('message')} {payload.get('result')}")
|
||||
raise RuntimeError(f"Etherscan API error after retries: {last_payload}")
|
||||
|
||||
|
||||
def human(raw: int, decimals: int = DECIMALS) -> str:
|
||||
scaled = Decimal(raw) / (Decimal(10) ** decimals)
|
||||
return f"{scaled:f}"
|
||||
|
||||
|
||||
def parse_int(value: Any) -> int:
|
||||
if isinstance(value, str) and value.startswith("0x"):
|
||||
return int(value, 16)
|
||||
return int(str(value))
|
||||
|
||||
|
||||
def fetch_etherscan_page_stats(address: str) -> dict[str, Any]:
|
||||
url = ETHERSCAN_PAGE.format(address=address)
|
||||
req = urllib.request.Request(url, headers={"User-Agent": "Mozilla/5.0 DBIS-cwusdc-attestation/1.0"})
|
||||
with urllib.request.urlopen(req, timeout=30) as response:
|
||||
html = response.read().decode("utf-8", errors="replace")
|
||||
|
||||
holders_match = re.search(r"<h4[^>]*>\s*Holders\s*</h4>\s*<div[^>]*>\s*<div>\s*([0-9,]+)", html, re.I)
|
||||
total_supply_match = re.search(r'id="ContentPlaceHolder1_hdnTotalSupply" value="([^"]+)"', html)
|
||||
onchain_marketcap_missing = "id=\"ContentPlaceHolder1_tr_marketcap\"" in html and re.search(
|
||||
r"id=\"ContentPlaceHolder1_tr_marketcap\".*?<div>\s*-\s*</div>", html, re.I | re.S
|
||||
)
|
||||
circulating_marketcap_missing = "id=\"ContentPlaceHolder1_tr_circulatingmarketcap\"" in html and re.search(
|
||||
r"id=\"ContentPlaceHolder1_tr_circulatingmarketcap\".*?<div>\s*-\s*</div>", html, re.I | re.S
|
||||
)
|
||||
return {
|
||||
"url": url,
|
||||
"holdersText": holders_match.group(1) if holders_match else None,
|
||||
"totalSupplyText": total_supply_match.group(1) if total_supply_match else None,
|
||||
"onchainMarketCapMissing": bool(onchain_marketcap_missing),
|
||||
"circulatingMarketCapMissing": bool(circulating_marketcap_missing),
|
||||
}
|
||||
|
||||
|
||||
def build(args: argparse.Namespace) -> dict[str, Any]:
|
||||
load_dotenv(ROOT / ".env")
|
||||
api_key = args.etherscan_api_key or os.environ.get("ETHERSCAN_API_KEY", "")
|
||||
if not api_key:
|
||||
raise SystemExit("ETHERSCAN_API_KEY is required")
|
||||
|
||||
latest_block_raw = etherscan_call({"module": "proxy", "action": "eth_blockNumber"}, api_key)
|
||||
latest_block = parse_int(latest_block_raw)
|
||||
total_supply_raw = parse_int(
|
||||
etherscan_call(
|
||||
{"module": "stats", "action": "tokensupply", "contractaddress": args.token},
|
||||
api_key,
|
||||
)
|
||||
)
|
||||
|
||||
known: dict[str, Any] = {}
|
||||
excluded_raw = 0
|
||||
exclude_set = set(args.exclude_known or [])
|
||||
for label, address in KNOWN_BALANCES.items():
|
||||
raw = parse_int(
|
||||
etherscan_call(
|
||||
{
|
||||
"module": "account",
|
||||
"action": "tokenbalance",
|
||||
"contractaddress": args.token,
|
||||
"address": address,
|
||||
"tag": "latest",
|
||||
},
|
||||
api_key,
|
||||
)
|
||||
)
|
||||
excluded = label in exclude_set
|
||||
if excluded:
|
||||
excluded_raw += raw
|
||||
known[label] = {
|
||||
"address": address,
|
||||
"balanceRaw": str(raw),
|
||||
"balanceUnits": human(raw),
|
||||
"excludedFromCirculatingSupply": excluded,
|
||||
}
|
||||
|
||||
circulating_raw = total_supply_raw - excluded_raw
|
||||
page_stats = fetch_etherscan_page_stats(args.token)
|
||||
|
||||
return {
|
||||
"schema": "cwusdc-supply-circulating-attestation/v1",
|
||||
"generatedAt": dt.datetime.now(dt.UTC).isoformat().replace("+00:00", "Z"),
|
||||
"purpose": "Tracker-facing supply and circulating-supply attestation for Etherscan Value propagation.",
|
||||
"network": {"chainId": 1, "name": "Ethereum Mainnet", "referenceBlock": latest_block},
|
||||
"token": {
|
||||
"address": args.token,
|
||||
"caip19": f"eip155:1/erc20:{args.token}",
|
||||
"name": "Wrapped cUSDC",
|
||||
"symbol": "cWUSDC",
|
||||
"decimals": DECIMALS,
|
||||
"etherscan": ETHERSCAN_PAGE.format(address=args.token),
|
||||
},
|
||||
"supply": {
|
||||
"totalSupplyRaw": str(total_supply_raw),
|
||||
"totalSupplyUnits": human(total_supply_raw),
|
||||
"excludedProtocolControlledRaw": str(excluded_raw),
|
||||
"excludedProtocolControlledUnits": human(excluded_raw),
|
||||
"circulatingSupplyRaw": str(circulating_raw),
|
||||
"circulatingSupplyUnits": human(circulating_raw),
|
||||
"formula": "circulatingSupply = totalSupply - explicitlyExcludedProtocolControlledNonCirculatingBalances",
|
||||
"defaultPolicy": "No known balance is excluded unless the operator passes --exclude-known for that label or a tracker requests a specific exclusion methodology.",
|
||||
},
|
||||
"knownBalances": known,
|
||||
"etherscanPageObservation": page_stats,
|
||||
"submissionPosition": {
|
||||
"readyForTrackerReview": True,
|
||||
"requestedProviderAction": "Accept total/circulating supply for the exact Mainnet cWUSDC contract and use it with accepted USD price data to populate market cap/value surfaces.",
|
||||
"caveats": [
|
||||
"This is an on-chain supply attestation, not third-party listing approval.",
|
||||
"Chain 138 cUSDC source-asset activity must not be counted as Ethereum Mainnet cWUSDC transfer activity.",
|
||||
"If a tracker requires treasury, bridge, operator, or pool exclusions, regenerate with explicit --exclude-known labels and attach the requested signed inventory.",
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def write_md(payload: dict[str, Any], path: Path) -> None:
|
||||
supply = payload["supply"]
|
||||
lines = [
|
||||
"# cWUSDC Supply and Circulating-Supply Attestation",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- Reference block: `{payload['network']['referenceBlock']}`",
|
||||
f"- Token: `{payload['token']['address']}`",
|
||||
f"- CAIP-19: `{payload['token']['caip19']}`",
|
||||
"",
|
||||
"## Supply",
|
||||
"",
|
||||
"| Field | Value |",
|
||||
"|---|---:|",
|
||||
f"| Total supply | `{supply['totalSupplyUnits']}` |",
|
||||
f"| Explicitly excluded protocol-controlled balances | `{supply['excludedProtocolControlledUnits']}` |",
|
||||
f"| Circulating supply | `{supply['circulatingSupplyUnits']}` |",
|
||||
"",
|
||||
f"Formula: `{supply['formula']}`",
|
||||
"",
|
||||
"## Known Balances",
|
||||
"",
|
||||
"| Label | Address | Balance | Excluded |",
|
||||
"|---|---|---:|---:|",
|
||||
]
|
||||
for label, item in payload["knownBalances"].items():
|
||||
lines.append(
|
||||
f"| `{label}` | `{item['address']}` | `{item['balanceUnits']}` | `{item['excludedFromCirculatingSupply']}` |"
|
||||
)
|
||||
page = payload["etherscanPageObservation"]
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
"## Etherscan Observation",
|
||||
"",
|
||||
f"- URL: `{page['url']}`",
|
||||
f"- Holders text: `{page['holdersText']}`",
|
||||
f"- Total supply text: `{page['totalSupplyText']}`",
|
||||
f"- Onchain market cap missing: `{page['onchainMarketCapMissing']}`",
|
||||
f"- Circulating market cap missing: `{page['circulatingMarketCapMissing']}`",
|
||||
"",
|
||||
"## Caveats",
|
||||
"",
|
||||
]
|
||||
)
|
||||
for caveat in payload["submissionPosition"]["caveats"]:
|
||||
lines.append(f"- {caveat}")
|
||||
path.write_text("\n".join(lines) + "\n")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("--token", default=CWUSDC)
|
||||
parser.add_argument("--etherscan-api-key", default="")
|
||||
parser.add_argument("--exclude-known", action="append", choices=sorted(KNOWN_BALANCES), help="Known balance label to exclude from circulating supply. Repeatable.")
|
||||
parser.add_argument("--json-out", type=Path, default=REPORT_JSON)
|
||||
parser.add_argument("--md-out", type=Path, default=REPORT_MD)
|
||||
args = parser.parse_args()
|
||||
|
||||
payload = build(args)
|
||||
args.json_out.parent.mkdir(parents=True, exist_ok=True)
|
||||
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
write_md(payload, args.md_out)
|
||||
print(f"Wrote {args.json_out.relative_to(ROOT)}")
|
||||
print(f"Wrote {args.md_out.relative_to(ROOT)}")
|
||||
print(f"circulatingSupply={payload['supply']['circulatingSupplyUnits']}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
@@ -0,0 +1,206 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Generate a global cUSDC/cWUSDC family supply proof from report APIs.
|
||||
|
||||
This is deliberately NOT the Ethereum Mainnet cWUSDC Etherscan supply proof.
|
||||
It is a cross-chain family inventory. Entries without supply proof are listed
|
||||
but excluded from aggregate totals.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import datetime as dt
|
||||
import json
|
||||
import urllib.request
|
||||
from decimal import Decimal, getcontext
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
getcontext().prec = 80
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
DEFAULT_API = "https://explorer.d-bis.org/api/v1/report/all"
|
||||
DEFAULT_JSON = ROOT / "reports" / "status" / "global-cusdc-cwusdc-family-supply-proof-latest.json"
|
||||
DEFAULT_MD = ROOT / "reports" / "status" / "global-cusdc-cwusdc-family-supply-proof-latest.md"
|
||||
ETH_MAINNET_CWUSDC = "0x2de5f116bfce3d0f922d9c8351e0c5fc24b9284a"
|
||||
|
||||
|
||||
def fetch_json(url: str) -> Any:
|
||||
req = urllib.request.Request(url, headers={"User-Agent": "dbis-global-cusdc-cwusdc-proof/1.0"})
|
||||
with urllib.request.urlopen(req, timeout=30) as response:
|
||||
return json.loads(response.read().decode("utf-8"))
|
||||
|
||||
|
||||
def decimal_or_none(value: Any) -> Decimal | None:
|
||||
if value is None:
|
||||
return None
|
||||
try:
|
||||
return Decimal(str(value))
|
||||
except Exception: # noqa: BLE001 - proof should keep malformed entries out of totals
|
||||
return None
|
||||
|
||||
|
||||
def fmt(value: Decimal) -> str:
|
||||
return f"{value:f}"
|
||||
|
||||
|
||||
def iter_family_tokens(report: dict[str, Any]) -> list[dict[str, Any]]:
|
||||
tokens_by_chain = report.get("tokens") or {}
|
||||
rows: list[dict[str, Any]] = []
|
||||
for chain_id, tokens in tokens_by_chain.items():
|
||||
if not isinstance(tokens, list):
|
||||
continue
|
||||
for token in tokens:
|
||||
if token.get("symbol") not in {"cUSDC", "cWUSDC"}:
|
||||
continue
|
||||
total = decimal_or_none(token.get("totalSupply"))
|
||||
circulating = decimal_or_none(token.get("circulatingSupply"))
|
||||
provenance = token.get("supplyProofProvenance") or {}
|
||||
proved = total is not None and str(provenance.get("status", "")).lower() != "proof_required"
|
||||
address = str(token.get("address") or "").lower()
|
||||
rows.append(
|
||||
{
|
||||
"chainId": int(token.get("chainId") or chain_id),
|
||||
"address": address,
|
||||
"symbol": token.get("symbol"),
|
||||
"name": token.get("name"),
|
||||
"type": token.get("type"),
|
||||
"decimals": token.get("decimals"),
|
||||
"totalSupply": str(total) if total is not None else None,
|
||||
"circulatingSupply": str(circulating) if circulating is not None else None,
|
||||
"provedForAggregate": proved,
|
||||
"isEthereumMainnetCwusdc": int(token.get("chainId") or chain_id) == 1 and address == ETH_MAINNET_CWUSDC,
|
||||
"supplyProofProvenance": provenance,
|
||||
"trackerCaveats": token.get("trackerCaveats") or [],
|
||||
}
|
||||
)
|
||||
return sorted(rows, key=lambda item: (item["chainId"], item["symbol"], item["address"]))
|
||||
|
||||
|
||||
def build(api_url: str) -> dict[str, Any]:
|
||||
report = fetch_json(api_url)
|
||||
rows = iter_family_tokens(report)
|
||||
proved = [row for row in rows if row["provedForAggregate"]]
|
||||
unproved = [row for row in rows if not row["provedForAggregate"]]
|
||||
|
||||
totals: dict[str, Decimal] = {
|
||||
"globalFamilyTotalSupply": Decimal(0),
|
||||
"globalFamilyCirculatingSupply": Decimal(0),
|
||||
"baseCusdcTotalSupply": Decimal(0),
|
||||
"baseCusdcCirculatingSupply": Decimal(0),
|
||||
"wrappedCwusdcTotalSupply": Decimal(0),
|
||||
"wrappedCwusdcCirculatingSupply": Decimal(0),
|
||||
}
|
||||
for row in proved:
|
||||
total = Decimal(row["totalSupply"])
|
||||
circulating = Decimal(row["circulatingSupply"] or row["totalSupply"])
|
||||
totals["globalFamilyTotalSupply"] += total
|
||||
totals["globalFamilyCirculatingSupply"] += circulating
|
||||
if row["symbol"] == "cUSDC":
|
||||
totals["baseCusdcTotalSupply"] += total
|
||||
totals["baseCusdcCirculatingSupply"] += circulating
|
||||
elif row["symbol"] == "cWUSDC":
|
||||
totals["wrappedCwusdcTotalSupply"] += total
|
||||
totals["wrappedCwusdcCirculatingSupply"] += circulating
|
||||
|
||||
eth_mainnet = next((row for row in rows if row["isEthereumMainnetCwusdc"]), None)
|
||||
return {
|
||||
"schema": "global-cusdc-cwusdc-family-supply-proof/v1",
|
||||
"generatedAt": dt.datetime.now(dt.UTC).isoformat().replace("+00:00", "Z"),
|
||||
"source": {
|
||||
"api": api_url,
|
||||
"reportGeneratedAt": report.get("generatedAt"),
|
||||
},
|
||||
"scope": {
|
||||
"description": "Cross-chain cUSDC/cWUSDC family supply inventory across report API chains.",
|
||||
"notForEtherscanEthereumTokenPage": True,
|
||||
"etherscanEthereumOnlyToken": {
|
||||
"chainId": 1,
|
||||
"address": ETH_MAINNET_CWUSDC,
|
||||
"note": "Use the Ethereum-only cWUSDC supply attestation for Etherscan Value submissions, not this global family total.",
|
||||
"entry": eth_mainnet,
|
||||
},
|
||||
},
|
||||
"summary": {
|
||||
"familyEntryCount": len(rows),
|
||||
"provedAggregateEntryCount": len(proved),
|
||||
"proofRequiredEntryCount": len(unproved),
|
||||
**{key: fmt(value) for key, value in totals.items()},
|
||||
},
|
||||
"entries": rows,
|
||||
"proofRequiredEntries": unproved,
|
||||
"caveats": [
|
||||
"This is a global cross-chain family inventory, not a circulating-supply claim for any single chain explorer.",
|
||||
"Entries marked proof_required are excluded from aggregate totals.",
|
||||
"Ethereum Etherscan Value for cWUSDC must use only the Ethereum Mainnet cWUSDC contract supply.",
|
||||
"cUSDC source assets and cWUSDC wrapped assets may represent related economic rails; global totals should not be used as a market-cap input without a tracker-approved methodology that prevents double counting.",
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def write_md(payload: dict[str, Any], path: Path) -> None:
|
||||
summary = payload["summary"]
|
||||
lines = [
|
||||
"# Global cUSDC/cWUSDC Family Supply Proof",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- Source API: `{payload['source']['api']}`",
|
||||
f"- Source report generated: `{payload['source']['reportGeneratedAt']}`",
|
||||
f"- Not for Ethereum Etherscan token page: `{payload['scope']['notForEtherscanEthereumTokenPage']}`",
|
||||
"",
|
||||
"## Aggregate Totals",
|
||||
"",
|
||||
"| Field | Value |",
|
||||
"|---|---:|",
|
||||
f"| Family entries | `{summary['familyEntryCount']}` |",
|
||||
f"| Proved aggregate entries | `{summary['provedAggregateEntryCount']}` |",
|
||||
f"| Proof-required entries | `{summary['proofRequiredEntryCount']}` |",
|
||||
f"| Global family total supply, proved only | `{summary['globalFamilyTotalSupply']}` |",
|
||||
f"| Global family circulating supply, proved only | `{summary['globalFamilyCirculatingSupply']}` |",
|
||||
f"| Base cUSDC total supply, proved only | `{summary['baseCusdcTotalSupply']}` |",
|
||||
f"| Wrapped cWUSDC total supply, proved only | `{summary['wrappedCwusdcTotalSupply']}` |",
|
||||
"",
|
||||
"## Ethereum Mainnet cWUSDC Reminder",
|
||||
"",
|
||||
"For Etherscan Value, use only:",
|
||||
"",
|
||||
f"`{payload['scope']['etherscanEthereumOnlyToken']['address']}`",
|
||||
"",
|
||||
"Do not use the global family total for the Ethereum token page.",
|
||||
"",
|
||||
"## Entries",
|
||||
"",
|
||||
"| Chain | Symbol | Type | Address | Total supply | Circulating supply | Proved |",
|
||||
"|---:|---|---|---|---:|---:|---:|",
|
||||
]
|
||||
for row in payload["entries"]:
|
||||
lines.append(
|
||||
f"| `{row['chainId']}` | `{row['symbol']}` | `{row['type']}` | `{row['address']}` | `{row['totalSupply']}` | `{row['circulatingSupply']}` | `{row['provedForAggregate']}` |"
|
||||
)
|
||||
lines.extend(["", "## Caveats", ""])
|
||||
for caveat in payload["caveats"]:
|
||||
lines.append(f"- {caveat}")
|
||||
path.write_text("\n".join(lines) + "\n")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("--api-url", default=DEFAULT_API)
|
||||
parser.add_argument("--json-out", type=Path, default=DEFAULT_JSON)
|
||||
parser.add_argument("--md-out", type=Path, default=DEFAULT_MD)
|
||||
args = parser.parse_args()
|
||||
|
||||
payload = build(args.api_url)
|
||||
args.json_out.parent.mkdir(parents=True, exist_ok=True)
|
||||
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
write_md(payload, args.md_out)
|
||||
print(f"Wrote {args.json_out.relative_to(ROOT)}")
|
||||
print(f"Wrote {args.md_out.relative_to(ROOT)}")
|
||||
print(f"globalFamilyTotalSupply={payload['summary']['globalFamilyTotalSupply']}")
|
||||
print(f"proofRequiredEntryCount={payload['summary']['proofRequiredEntryCount']}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
361
scripts/verify/monitor-cwusdc-etherscan-value-propagation.py
Normal file
361
scripts/verify/monitor-cwusdc-etherscan-value-propagation.py
Normal file
@@ -0,0 +1,361 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Monitor whether cWUSDC USD value has propagated to Etherscan and upstream feeds."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import datetime as dt
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
REPORT_JSON = ROOT / "reports" / "status" / "cwusdc-etherscan-value-propagation-latest.json"
|
||||
REPORT_MD = ROOT / "reports" / "status" / "cwusdc-etherscan-value-propagation-latest.md"
|
||||
CWUSDC = "0x2de5f116bfce3d0f922d9c8351e0c5fc24b9284a"
|
||||
ETHERSCAN_API = "https://api.etherscan.io/v2/api"
|
||||
ETHERSCAN_PAGE = f"https://etherscan.io/token/{CWUSDC}"
|
||||
COINGECKO_PRICE = (
|
||||
"https://api.coingecko.com/api/v3/simple/token_price/ethereum?"
|
||||
f"contract_addresses={CWUSDC}&vs_currencies=usd&include_market_cap=true&include_24hr_vol=true&include_last_updated_at=true"
|
||||
)
|
||||
DEXSCREENER_TOKEN_PAIRS = f"https://api.dexscreener.com/token-pairs/v1/ethereum/{CWUSDC}"
|
||||
GECKOTERMINAL_POOLS = [
|
||||
"https://api.geckoterminal.com/api/v2/networks/eth/pools/0x1cf2e685682c7f7bef508f0af15dfb5cdda01ee3",
|
||||
"https://api.geckoterminal.com/api/v2/networks/eth/pools/0xc28706f899266b36bc43cc072b3a921bdf2c48d9",
|
||||
]
|
||||
|
||||
|
||||
def load_dotenv(path: Path) -> None:
|
||||
if not path.exists():
|
||||
return
|
||||
for line in path.read_text().splitlines():
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#") or "=" not in line:
|
||||
continue
|
||||
key, value = line.split("=", 1)
|
||||
key = key.strip()
|
||||
value = value.strip().strip('"').strip("'")
|
||||
if key and key not in os.environ:
|
||||
os.environ[key] = value
|
||||
|
||||
|
||||
def fetch_text(url: str, timeout: int = 30) -> tuple[int | None, str, str]:
|
||||
req = urllib.request.Request(
|
||||
url,
|
||||
headers={
|
||||
"User-Agent": "Mozilla/5.0 DBIS-cwusdc-value-monitor/1.0",
|
||||
"Accept": "application/json,text/html;q=0.9,*/*;q=0.8",
|
||||
},
|
||||
)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=timeout) as response:
|
||||
return response.status, response.headers.get("content-type", ""), response.read().decode("utf-8", errors="replace")
|
||||
except urllib.error.HTTPError as exc:
|
||||
body = exc.read().decode("utf-8", errors="replace") if exc.fp else ""
|
||||
return exc.code, exc.headers.get("content-type", "") if exc.headers else "", body
|
||||
except Exception as exc: # noqa: BLE001 - monitor evidence should capture transient failures
|
||||
return None, "", str(exc)
|
||||
|
||||
|
||||
def fetch_json(url: str, timeout: int = 30) -> tuple[int | None, str, Any, str]:
|
||||
status, content_type, text = fetch_text(url, timeout)
|
||||
try:
|
||||
return status, content_type, json.loads(text), ""
|
||||
except json.JSONDecodeError as exc:
|
||||
return status, content_type, None, str(exc)
|
||||
|
||||
|
||||
def fetch_etherscan_api(params: dict[str, str], api_key: str) -> tuple[int | None, str, Any, str]:
|
||||
query = {"chainid": "1", **params, "apikey": api_key}
|
||||
url = f"{ETHERSCAN_API}?{urllib.parse.urlencode(query)}"
|
||||
last: tuple[int | None, str, Any, str] = (None, "", None, "")
|
||||
for attempt in range(5):
|
||||
status, content_type, data, error = fetch_json(url)
|
||||
last = (status, content_type, data, error)
|
||||
if error:
|
||||
return last
|
||||
if not isinstance(data, dict):
|
||||
return last
|
||||
message = str(data.get("message", ""))
|
||||
result = data.get("result")
|
||||
if str(data.get("status")) != "0":
|
||||
time.sleep(0.25)
|
||||
return last
|
||||
if "rate limit" in message.lower() or "rate limit" in str(result).lower():
|
||||
time.sleep(1.25 + attempt * 0.5)
|
||||
continue
|
||||
return last
|
||||
return last
|
||||
|
||||
|
||||
def extract_div_missing(html: str, element_id: str) -> bool:
|
||||
pattern = rf'id="{re.escape(element_id)}".*?<div>\s*-\s*</div>'
|
||||
return bool(re.search(pattern, html, flags=re.I | re.S))
|
||||
|
||||
|
||||
def parse_etherscan() -> dict[str, Any]:
|
||||
status, content_type, html = fetch_text(ETHERSCAN_PAGE)
|
||||
has_profile = "Wrapped cUSDC" in html and "cWUSDC" in html
|
||||
total_supply_match = re.search(r'id="ContentPlaceHolder1_hdnTotalSupply" value="([^"]+)"', html)
|
||||
holders_match = re.search(r"<h4[^>]*>\s*Holders\s*</h4>\s*<div[^>]*>\s*<div>\s*([0-9,]+)", html, re.I)
|
||||
market_missing = extract_div_missing(html, "ContentPlaceHolder1_tr_marketcap")
|
||||
circ_market_missing = extract_div_missing(html, "ContentPlaceHolder1_tr_circulatingmarketcap")
|
||||
value_ready = bool(status and 200 <= status < 300 and has_profile and not market_missing and not circ_market_missing)
|
||||
return {
|
||||
"id": "etherscan_token_page",
|
||||
"url": ETHERSCAN_PAGE,
|
||||
"status": status,
|
||||
"contentType": content_type,
|
||||
"profileDetected": has_profile,
|
||||
"holdersText": holders_match.group(1) if holders_match else None,
|
||||
"totalSupplyText": total_supply_match.group(1) if total_supply_match else None,
|
||||
"onchainMarketCapMissing": market_missing,
|
||||
"circulatingMarketCapMissing": circ_market_missing,
|
||||
"valueReady": value_ready,
|
||||
}
|
||||
|
||||
|
||||
def parse_etherscan_tokeninfo(api_key: str) -> dict[str, Any]:
|
||||
if not api_key:
|
||||
return {
|
||||
"id": "etherscan_tokeninfo_api",
|
||||
"url": ETHERSCAN_API,
|
||||
"status": None,
|
||||
"contentType": "",
|
||||
"parseError": "",
|
||||
"skipped": True,
|
||||
"skipReason": "ETHERSCAN_API_KEY is not set.",
|
||||
"metadataReady": False,
|
||||
"priceReady": False,
|
||||
}
|
||||
|
||||
status, content_type, data, error = fetch_etherscan_api(
|
||||
{"module": "token", "action": "tokeninfo", "contractaddress": CWUSDC},
|
||||
api_key,
|
||||
)
|
||||
result = data.get("result") if isinstance(data, dict) else None
|
||||
entry = result[0] if isinstance(result, list) and result and isinstance(result[0], dict) else None
|
||||
token_price_raw = entry.get("tokenPriceUSD") if isinstance(entry, dict) else None
|
||||
try:
|
||||
token_price = float(token_price_raw) if token_price_raw not in (None, "") else 0.0
|
||||
except (TypeError, ValueError):
|
||||
token_price = 0.0
|
||||
metadata_ready = bool(
|
||||
isinstance(entry, dict)
|
||||
and entry.get("contractAddress", "").lower() == CWUSDC
|
||||
and entry.get("symbol") == "cWUSDC"
|
||||
and entry.get("tokenName")
|
||||
)
|
||||
profile_enriched = bool(
|
||||
isinstance(entry, dict)
|
||||
and (entry.get("image") or entry.get("website") or entry.get("description") or entry.get("twitter"))
|
||||
)
|
||||
return {
|
||||
"id": "etherscan_tokeninfo_api",
|
||||
"url": ETHERSCAN_API,
|
||||
"status": status,
|
||||
"contentType": content_type,
|
||||
"parseError": error,
|
||||
"skipped": False,
|
||||
"apiStatus": data.get("status") if isinstance(data, dict) else None,
|
||||
"apiMessage": data.get("message") if isinstance(data, dict) else None,
|
||||
"apiResultPreview": result,
|
||||
"metadataReady": metadata_ready,
|
||||
"profileEnriched": profile_enriched,
|
||||
"priceReady": token_price > 0,
|
||||
"tokenPriceUSD": token_price_raw,
|
||||
"tokenName": entry.get("tokenName") if isinstance(entry, dict) else None,
|
||||
"symbol": entry.get("symbol") if isinstance(entry, dict) else None,
|
||||
"divisor": entry.get("divisor") if isinstance(entry, dict) else None,
|
||||
"tokenType": entry.get("tokenType") if isinstance(entry, dict) else None,
|
||||
"totalSupply": entry.get("totalSupply") if isinstance(entry, dict) else None,
|
||||
"blueCheckmark": entry.get("blueCheckmark") if isinstance(entry, dict) else None,
|
||||
"image": entry.get("image") if isinstance(entry, dict) else None,
|
||||
"website": entry.get("website") if isinstance(entry, dict) else None,
|
||||
"descriptionPresent": bool(entry.get("description")) if isinstance(entry, dict) else False,
|
||||
}
|
||||
|
||||
|
||||
def parse_coingecko() -> dict[str, Any]:
|
||||
status, content_type, data, error = fetch_json(COINGECKO_PRICE)
|
||||
entry = None
|
||||
if isinstance(data, dict):
|
||||
entry = data.get(CWUSDC)
|
||||
usd = entry.get("usd") if isinstance(entry, dict) else None
|
||||
return {
|
||||
"id": "coingecko_token_price",
|
||||
"url": COINGECKO_PRICE,
|
||||
"status": status,
|
||||
"contentType": content_type,
|
||||
"parseError": error,
|
||||
"listedByContract": isinstance(entry, dict),
|
||||
"usd": usd,
|
||||
"marketCapUsd": entry.get("usd_market_cap") if isinstance(entry, dict) else None,
|
||||
"volume24hUsd": entry.get("usd_24h_vol") if isinstance(entry, dict) else None,
|
||||
"lastUpdatedAt": entry.get("last_updated_at") if isinstance(entry, dict) else None,
|
||||
"priceReady": isinstance(usd, (int, float)) and usd > 0,
|
||||
"jsonPreview": data,
|
||||
}
|
||||
|
||||
|
||||
def parse_dexscreener() -> dict[str, Any]:
|
||||
status, content_type, data, error = fetch_json(DEXSCREENER_TOKEN_PAIRS)
|
||||
pair_count = len(data) if isinstance(data, list) else 0
|
||||
return {
|
||||
"id": "dexscreener_token_pairs",
|
||||
"url": DEXSCREENER_TOKEN_PAIRS,
|
||||
"status": status,
|
||||
"contentType": content_type,
|
||||
"parseError": error,
|
||||
"pairCount": pair_count,
|
||||
"indexed": pair_count > 0,
|
||||
"jsonPreview": data[:3] if isinstance(data, list) else data,
|
||||
}
|
||||
|
||||
|
||||
def parse_geckoterminal() -> list[dict[str, Any]]:
|
||||
checks: list[dict[str, Any]] = []
|
||||
for url in GECKOTERMINAL_POOLS:
|
||||
status, content_type, data, error = fetch_json(url)
|
||||
attrs = ((data or {}).get("data") or {}).get("attributes") if isinstance(data, dict) else None
|
||||
checks.append(
|
||||
{
|
||||
"id": "geckoterminal_pool",
|
||||
"url": url,
|
||||
"status": status,
|
||||
"contentType": content_type,
|
||||
"parseError": error,
|
||||
"indexed": isinstance(attrs, dict),
|
||||
"reserveUsd": attrs.get("reserve_in_usd") if isinstance(attrs, dict) else None,
|
||||
"volume24hUsd": ((attrs.get("volume_usd") or {}).get("h24") if isinstance(attrs, dict) else None),
|
||||
}
|
||||
)
|
||||
return checks
|
||||
|
||||
|
||||
def build() -> dict[str, Any]:
|
||||
load_dotenv(ROOT / ".env")
|
||||
etherscan_api_key = os.environ.get("ETHERSCAN_API_KEY", "")
|
||||
etherscan = parse_etherscan()
|
||||
etherscan_tokeninfo = parse_etherscan_tokeninfo(etherscan_api_key)
|
||||
coingecko = parse_coingecko()
|
||||
dexscreener = parse_dexscreener()
|
||||
gecko = parse_geckoterminal()
|
||||
blockers: list[str] = []
|
||||
if not etherscan["profileDetected"]:
|
||||
blockers.append("Etherscan token profile text was not detected.")
|
||||
if etherscan["onchainMarketCapMissing"]:
|
||||
blockers.append("Etherscan Onchain Market Cap is still blank.")
|
||||
if etherscan["circulatingMarketCapMissing"]:
|
||||
blockers.append("Etherscan Circulating Supply Market Cap is still blank.")
|
||||
tokeninfo_preview = str(etherscan_tokeninfo.get("apiResultPreview", ""))
|
||||
if "API Pro endpoint" in tokeninfo_preview:
|
||||
blockers.append("Etherscan tokeninfo API is an API Pro endpoint for the current key; tokeninfo propagation cannot be monitored with the current plan.")
|
||||
elif not etherscan_tokeninfo["metadataReady"]:
|
||||
blockers.append("Etherscan tokeninfo API does not return accepted token metadata for cWUSDC.")
|
||||
if "API Pro endpoint" not in tokeninfo_preview and not etherscan_tokeninfo["priceReady"]:
|
||||
blockers.append("Etherscan tokeninfo API does not return a positive USD token price.")
|
||||
if not coingecko["priceReady"]:
|
||||
blockers.append("CoinGecko contract price API does not return a positive USD price.")
|
||||
if not dexscreener["indexed"]:
|
||||
blockers.append("DexScreener token-pairs API still does not index cWUSDC pairs.")
|
||||
|
||||
return {
|
||||
"schema": "cwusdc-etherscan-value-propagation/v1",
|
||||
"generatedAt": dt.datetime.now(dt.UTC).isoformat().replace("+00:00", "Z"),
|
||||
"token": {
|
||||
"chainId": 1,
|
||||
"address": CWUSDC,
|
||||
"caip19": f"eip155:1/erc20:{CWUSDC}",
|
||||
"symbol": "cWUSDC",
|
||||
},
|
||||
"summary": {
|
||||
"etherscanValueReady": etherscan["valueReady"],
|
||||
"etherscanTokenInfoMetadataReady": etherscan_tokeninfo["metadataReady"],
|
||||
"etherscanTokenInfoPriceReady": etherscan_tokeninfo["priceReady"],
|
||||
"coingeckoPriceReady": coingecko["priceReady"],
|
||||
"readyForEtherscanValuePropagation": etherscan["valueReady"] or coingecko["priceReady"],
|
||||
"blockers": blockers,
|
||||
},
|
||||
"checks": {
|
||||
"etherscan": etherscan,
|
||||
"etherscanTokenInfo": etherscan_tokeninfo,
|
||||
"coingecko": coingecko,
|
||||
"dexscreener": dexscreener,
|
||||
"geckoterminal": gecko,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def write_md(payload: dict[str, Any], path: Path) -> None:
|
||||
summary = payload["summary"]
|
||||
checks = payload["checks"]
|
||||
lines = [
|
||||
"# cWUSDC Etherscan Value Propagation Monitor",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- Token: `{payload['token']['address']}`",
|
||||
f"- CAIP-19: `{payload['token']['caip19']}`",
|
||||
f"- Etherscan value ready: `{summary['etherscanValueReady']}`",
|
||||
f"- Etherscan tokeninfo metadata ready: `{summary['etherscanTokenInfoMetadataReady']}`",
|
||||
f"- Etherscan tokeninfo price ready: `{summary['etherscanTokenInfoPriceReady']}`",
|
||||
f"- CoinGecko price ready: `{summary['coingeckoPriceReady']}`",
|
||||
"",
|
||||
"## Blockers",
|
||||
"",
|
||||
]
|
||||
if summary["blockers"]:
|
||||
lines.extend(f"- {item}" for item in summary["blockers"])
|
||||
else:
|
||||
lines.append("- None detected by this monitor.")
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
"## Checks",
|
||||
"",
|
||||
"| Surface | Status | Ready / indexed | Key fields |",
|
||||
"|---|---:|---:|---|",
|
||||
f"| Etherscan | `{checks['etherscan']['status']}` | `{checks['etherscan']['valueReady']}` | marketCapMissing={checks['etherscan']['onchainMarketCapMissing']}; circulatingMarketCapMissing={checks['etherscan']['circulatingMarketCapMissing']}; holders={checks['etherscan']['holdersText']} |",
|
||||
f"| Etherscan tokeninfo API | `{checks['etherscanTokenInfo']['status']}` | `{checks['etherscanTokenInfo']['metadataReady']}` / price `{checks['etherscanTokenInfo']['priceReady']}` | symbol={checks['etherscanTokenInfo']['symbol']}; price={checks['etherscanTokenInfo']['tokenPriceUSD']}; image={checks['etherscanTokenInfo']['image']}; website={checks['etherscanTokenInfo']['website']} |",
|
||||
f"| CoinGecko contract price | `{checks['coingecko']['status']}` | `{checks['coingecko']['priceReady']}` | usd={checks['coingecko']['usd']}; marketCap={checks['coingecko']['marketCapUsd']}; lastUpdated={checks['coingecko']['lastUpdatedAt']} |",
|
||||
f"| DexScreener token pairs | `{checks['dexscreener']['status']}` | `{checks['dexscreener']['indexed']}` | pairCount={checks['dexscreener']['pairCount']} |",
|
||||
]
|
||||
)
|
||||
for item in checks["geckoterminal"]:
|
||||
lines.append(
|
||||
f"| GeckoTerminal pool | `{item['status']}` | `{item['indexed']}` | reserveUsd={item['reserveUsd']}; volume24hUsd={item['volume24hUsd']}; url={item['url']} |"
|
||||
)
|
||||
path.write_text("\n".join(lines) + "\n")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("--json-out", type=Path, default=REPORT_JSON)
|
||||
parser.add_argument("--md-out", type=Path, default=REPORT_MD)
|
||||
parser.add_argument("--strict", action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
payload = build()
|
||||
args.json_out.parent.mkdir(parents=True, exist_ok=True)
|
||||
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
write_md(payload, args.md_out)
|
||||
print(f"Wrote {args.json_out.relative_to(ROOT)}")
|
||||
print(f"Wrote {args.md_out.relative_to(ROOT)}")
|
||||
print(f"etherscanValueReady={payload['summary']['etherscanValueReady']}")
|
||||
if payload["summary"]["blockers"]:
|
||||
print("Blockers: " + "; ".join(payload["summary"]["blockers"]))
|
||||
if args.strict and not payload["summary"]["etherscanValueReady"]:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
303
scripts/verify/plan-engine-x-automated-liquidity-advisor.py
Executable file
303
scripts/verify/plan-engine-x-automated-liquidity-advisor.py
Executable file
@@ -0,0 +1,303 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal, ROUND_FLOOR, getcontext
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
getcontext().prec = 80
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
POLICY_PATH = ROOT / "config/engine-x/automation-policy.json"
|
||||
OUT_JSON = ROOT / "reports/status/engine-x-automated-liquidity-advisor-latest.json"
|
||||
OUT_MD = ROOT / "reports/status/engine-x-automated-liquidity-advisor-latest.md"
|
||||
XAUT_MAINNET = "0x68749665FF8D2d112Fa859AA293F07A622782F38"
|
||||
|
||||
|
||||
def read_json(path: Path) -> dict[str, Any] | None:
|
||||
try:
|
||||
return json.loads(path.read_text())
|
||||
except FileNotFoundError:
|
||||
return None
|
||||
except json.JSONDecodeError as exc:
|
||||
return {"_error": f"invalid json: {exc}"}
|
||||
|
||||
|
||||
def env_decimal(name: str, default: str | None = None) -> Decimal | None:
|
||||
value = os.environ.get(name, default)
|
||||
if value is None or value == "":
|
||||
return None
|
||||
return Decimal(value)
|
||||
|
||||
|
||||
def env_int(name: str, default: str | None = None) -> int | None:
|
||||
value = os.environ.get(name, default)
|
||||
if value is None or value == "":
|
||||
return None
|
||||
return int(Decimal(value))
|
||||
|
||||
|
||||
def decstr(value: Decimal, places: int = 6) -> str:
|
||||
q = Decimal(10) ** -places
|
||||
return f"{value.quantize(q)}"
|
||||
|
||||
|
||||
def raw6(units: Decimal) -> int:
|
||||
return int((units * Decimal(10**6)).to_integral_value(rounding=ROUND_FLOOR))
|
||||
|
||||
|
||||
def units_from_raw(raw: int, decimals: int) -> Decimal:
|
||||
return Decimal(raw) / Decimal(10**decimals)
|
||||
|
||||
|
||||
def get_path(payload: dict[str, Any] | None, *parts: str, default: Any = None) -> Any:
|
||||
cur: Any = payload
|
||||
for part in parts:
|
||||
if not isinstance(cur, dict) or part not in cur:
|
||||
return default
|
||||
cur = cur[part]
|
||||
return cur
|
||||
|
||||
|
||||
def cast_call_raw(token: str, owner: str, rpc: str) -> int | None:
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["cast", "call", token, "balanceOf(address)(uint256)", owner, "--rpc-url", rpc],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=30,
|
||||
)
|
||||
except (OSError, subprocess.CalledProcessError, subprocess.TimeoutExpired):
|
||||
return None
|
||||
first = result.stdout.strip().split()[0] if result.stdout.strip() else ""
|
||||
return int(first) if first.isdigit() else None
|
||||
|
||||
|
||||
def cast_decimals(token: str, rpc: str, fallback: int) -> int:
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["cast", "call", token, "decimals()(uint8)", "--rpc-url", rpc],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=30,
|
||||
)
|
||||
except (OSError, subprocess.CalledProcessError, subprocess.TimeoutExpired):
|
||||
return fallback
|
||||
first = result.stdout.strip().split()[0] if result.stdout.strip() else ""
|
||||
return int(first) if first.isdigit() else fallback
|
||||
|
||||
|
||||
def main() -> int:
|
||||
policy = read_json(POLICY_PATH) or {}
|
||||
reports = {
|
||||
"supportHealth": read_json(ROOT / "reports/status/mainnet-cwusdc-usdc-support-health-latest.json"),
|
||||
"publicIndexedReadiness": read_json(ROOT / "reports/status/engine-x-public-indexed-readiness-latest.json"),
|
||||
"mevDefenseReadiness": read_json(ROOT / "reports/status/engine-x-mev-defense-readiness-latest.json"),
|
||||
"wethSupport": read_json(ROOT / "reports/status/mainnet-cwusdc-weth-liquidity-surfaces-latest.json"),
|
||||
}
|
||||
|
||||
requested_symbol = os.environ.get("ENGINE_X_REQUESTED_CW_SYMBOL", "cWUSDC")
|
||||
requested_units = env_decimal("ENGINE_X_REQUESTED_OUTPUT_UNITS")
|
||||
requested_raw = env_int("ENGINE_X_REQUESTED_OUTPUT_RAW")
|
||||
if requested_raw is None and requested_units is not None:
|
||||
requested_raw = raw6(requested_units)
|
||||
if requested_units is None and requested_raw is not None:
|
||||
requested_units = Decimal(requested_raw) / Decimal(10**6)
|
||||
if requested_raw is None:
|
||||
requested_raw = 10_000
|
||||
requested_units = Decimal("0.01")
|
||||
|
||||
xaut_token = os.environ.get("XAUT_MAINNET", XAUT_MAINNET)
|
||||
xaut_decimals = 6
|
||||
if os.environ.get("ETHEREUM_MAINNET_RPC"):
|
||||
xaut_decimals = cast_decimals(xaut_token, os.environ["ETHEREUM_MAINNET_RPC"], xaut_decimals)
|
||||
|
||||
xaut_available_raw = env_int("ENGINE_X_XAUT_AVAILABLE_RAW")
|
||||
if xaut_available_raw is None:
|
||||
xaut_units_env = env_decimal("ENGINE_X_XAUT_AVAILABLE_UNITS")
|
||||
xaut_available_raw = int((xaut_units_env * Decimal(10**xaut_decimals)).to_integral_value(rounding=ROUND_FLOOR)) if xaut_units_env is not None else 0
|
||||
xaut_available_units = units_from_raw(xaut_available_raw, xaut_decimals)
|
||||
|
||||
xaut_usd_price6 = Decimal(env_int("ENGINE_X_XAUT_USD_PRICE6", str(get_path(policy, "inputs", "collateral", "defaultUsdPrice6", default="3226640000"))))
|
||||
xaut_usd_price = xaut_usd_price6 / Decimal(10**6)
|
||||
ltv_bps = Decimal(env_int("ENGINE_X_BORROW_LTV_BPS", str(get_path(policy, "inputs", "risk", "defaultLtvBps", default=7500))))
|
||||
hf_bps = Decimal(env_int("ENGINE_X_BORROW_MIN_HEALTH_FACTOR_BPS", str(get_path(policy, "inputs", "risk", "defaultMinHealthFactorBps", default=11000))))
|
||||
max_round_trip_loss_bps = Decimal(env_int("ENGINE_X_MAX_ROUND_TRIP_LOSS_BPS", str(get_path(policy, "inputs", "risk", "defaultMaxRoundTripLossBps", default=100))))
|
||||
min_gas_reserve_wei = env_int("ENGINE_X_MIN_GAS_RESERVE_WEI", str(get_path(policy, "inputs", "risk", "defaultMinGasReserveWei", default="5000000000000000")))
|
||||
|
||||
deployer = get_path(reports["publicIndexedReadiness"], "deployer", default={}) or {}
|
||||
deployer_address = str(deployer.get("address") or os.environ.get("DEPLOYER_ADDRESS") or "")
|
||||
wallet_usdc = Decimal(str(deployer.get("usdc", "0")))
|
||||
wallet_cwusdc = Decimal(str(deployer.get("cwusdc", "0")))
|
||||
wallet_eth = Decimal(str(get_path(reports["wethSupport"], "balances", "eth", default="0")))
|
||||
|
||||
xaut_balance_source = "env"
|
||||
if xaut_available_raw == 0 and deployer_address and os.environ.get("ETHEREUM_MAINNET_RPC"):
|
||||
live_xaut = cast_call_raw(
|
||||
xaut_token,
|
||||
deployer_address,
|
||||
os.environ["ETHEREUM_MAINNET_RPC"],
|
||||
)
|
||||
if live_xaut is not None:
|
||||
xaut_available_raw = live_xaut
|
||||
xaut_available_units = units_from_raw(xaut_available_raw, xaut_decimals)
|
||||
xaut_balance_source = "live_wallet_balance"
|
||||
|
||||
collateral_usd = xaut_available_units * xaut_usd_price
|
||||
ltv_borrow_capacity = collateral_usd * ltv_bps / Decimal(10_000)
|
||||
# Conservative single-asset health-factor capacity: debt <= collateral_ltv_value / target_hf.
|
||||
hf_borrow_capacity = ltv_borrow_capacity * Decimal(10_000) / hf_bps if hf_bps > 0 else Decimal(0)
|
||||
borrow_capacity = min(ltv_borrow_capacity, hf_borrow_capacity)
|
||||
|
||||
mev_ready = bool(get_path(reports["mevDefenseReadiness"], "ready", default=False))
|
||||
public_ready = bool(get_path(reports["publicIndexedReadiness"], "summary", "readyForPublicIndexedProof", default=False))
|
||||
support_preferred = get_path(reports["supportHealth"], "quoteDefenseDecision", "preferredSurface", default={}) or {}
|
||||
support_blockers = get_path(reports["supportHealth"], "quoteDefenseDecision", "blockers", default=[]) or []
|
||||
v3_tick = get_path(reports["supportHealth"], "quoteDefenseSurfaceHealth", "mainnet-cwusdc-usdc-univ3-100", "tick")
|
||||
v3_range_status = get_path(reports["supportHealth"], "quoteDefenseSurfaceHealth", "mainnet-cwusdc-usdc-univ3-100", "rangeStatus")
|
||||
|
||||
requested_usd = requested_units or Decimal(0)
|
||||
available_loop_usd = min(wallet_usdc + borrow_capacity, wallet_cwusdc, requested_usd)
|
||||
can_satisfy_requested = available_loop_usd >= requested_usd and requested_usd > 0
|
||||
|
||||
blockers: list[str] = []
|
||||
warnings: list[str] = []
|
||||
if requested_symbol not in get_path(policy, "inputs", "requestedOutput", "supportedInitialSymbols", default=["cWUSDC"]):
|
||||
blockers.append(f"requested output symbol is not yet supported by this advisor: {requested_symbol}")
|
||||
if not public_ready:
|
||||
blockers.append("public indexed readiness is not passing")
|
||||
if not mev_ready:
|
||||
blockers.append("MEV/protected broadcast readiness is not passing for live automation")
|
||||
if support_preferred.get("action") not in {"ready_for_tiny_canary", "use_for_tiny_public_canary"}:
|
||||
warnings.append(f"preferred quote-defense surface action is {support_preferred.get('action', 'unknown')}; rebalance or operator review may be needed")
|
||||
if support_blockers:
|
||||
blockers.extend(f"support health blocker: {item}" for item in support_blockers)
|
||||
if wallet_eth * Decimal(10**18) < Decimal(min_gas_reserve_wei or 0):
|
||||
blockers.append("wallet ETH is below configured gas reserve")
|
||||
if not can_satisfy_requested:
|
||||
blockers.append("requested output exceeds current wallet plus XAUt-backed conservative USDC capacity")
|
||||
|
||||
live_execution_ready = not blockers and can_satisfy_requested
|
||||
endpoint_publication_ready = live_execution_ready and public_ready
|
||||
|
||||
phases = []
|
||||
for phase in get_path(policy, "automationPhases", default=[]):
|
||||
status = "ready"
|
||||
if phase["id"] == "phase_1_canary" and not live_execution_ready:
|
||||
status = "blocked"
|
||||
elif phase["id"] == "phase_2_liquidity_defense" and (not live_execution_ready or warnings):
|
||||
status = "operator_review"
|
||||
elif phase["id"] == "phase_3_endpoint_publication" and not endpoint_publication_ready:
|
||||
status = "blocked"
|
||||
elif phase["id"] == "phase_4_multi_asset_forex_crypto":
|
||||
status = "design_required"
|
||||
phases.append({**phase, "status": status})
|
||||
|
||||
payload = {
|
||||
"schema": "engine-x-automated-liquidity-advisor/v1",
|
||||
"generatedAt": datetime.now(timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z"),
|
||||
"policyPath": str(POLICY_PATH.relative_to(ROOT)),
|
||||
"request": {
|
||||
"outputSymbol": requested_symbol,
|
||||
"outputRaw": str(requested_raw),
|
||||
"outputUnits": decstr(requested_units or Decimal(0), 6),
|
||||
},
|
||||
"inputs": {
|
||||
"wallet": {
|
||||
"usdc": decstr(wallet_usdc, 6),
|
||||
"cwusdc": decstr(wallet_cwusdc, 6),
|
||||
"eth": str(wallet_eth),
|
||||
},
|
||||
"xautCollateral": {
|
||||
"availableRaw": str(xaut_available_raw),
|
||||
"availableUnits": decstr(xaut_available_units, min(xaut_decimals, 8)),
|
||||
"decimals": xaut_decimals,
|
||||
"token": xaut_token,
|
||||
"source": xaut_balance_source,
|
||||
"usdPrice": decstr(xaut_usd_price, 6),
|
||||
"collateralUsd": decstr(collateral_usd, 6),
|
||||
},
|
||||
"risk": {
|
||||
"ltvBps": str(ltv_bps),
|
||||
"minHealthFactorBps": str(hf_bps),
|
||||
"maxRoundTripLossBps": str(max_round_trip_loss_bps),
|
||||
"minGasReserveWei": str(min_gas_reserve_wei),
|
||||
},
|
||||
},
|
||||
"calculator": {
|
||||
"ltvBorrowCapacityUsdc": decstr(ltv_borrow_capacity, 6),
|
||||
"healthFactorBorrowCapacityUsdc": decstr(hf_borrow_capacity, 6),
|
||||
"conservativeBorrowCapacityUsdc": decstr(borrow_capacity, 6),
|
||||
"walletPlusBorrowUsdcCapacity": decstr(wallet_usdc + borrow_capacity, 6),
|
||||
"maxCurrentRequestFillUnits": decstr(available_loop_usd, 6),
|
||||
"canSatisfyRequestedOutput": can_satisfy_requested,
|
||||
"debtNeutralLoopInvariant": "borrow USDC, swap cWUSDC->USDC to repay debt, then swap borrowed USDC->cWUSDC; ending USDC debt must be zero before XAUt withdrawal",
|
||||
},
|
||||
"feedStatus": {
|
||||
"publicIndexedReady": public_ready,
|
||||
"mevDefenseReady": mev_ready,
|
||||
"preferredQuoteDefenseSurface": support_preferred,
|
||||
"uniV3Tick": v3_tick,
|
||||
"uniV3RangeStatus": v3_range_status,
|
||||
},
|
||||
"recommendedPhases": phases,
|
||||
"endpointReporting": {
|
||||
"ready": endpoint_publication_ready,
|
||||
"targets": get_path(policy, "feeds", "externalPublicationTargets", default=[]),
|
||||
"requiredEvidence": get_path(policy, "decisionGates", "publication", default=[]),
|
||||
},
|
||||
"blockers": blockers,
|
||||
"warnings": warnings,
|
||||
"operatorCommands": {
|
||||
"regenerateFeeds": get_path(policy, "feeds", "onChainReadinessCommands", default=[]),
|
||||
"dryRunAdvisor": "pnpm engine-x:automation-advisor",
|
||||
"liveExecution": "blocked unless this report has no blockers and operator-approved scripts are run with protected RPC",
|
||||
},
|
||||
}
|
||||
|
||||
OUT_JSON.parent.mkdir(parents=True, exist_ok=True)
|
||||
OUT_JSON.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
|
||||
lines = [
|
||||
"# Engine X Automated Liquidity Advisor",
|
||||
"",
|
||||
f"- generatedAt: `{payload['generatedAt']}`",
|
||||
f"- request: `{payload['request']['outputUnits']} {requested_symbol}`",
|
||||
f"- max current request fill: `{payload['calculator']['maxCurrentRequestFillUnits']} {requested_symbol}`",
|
||||
f"- conservative XAUt-backed USDC capacity: `{payload['calculator']['conservativeBorrowCapacityUsdc']} USDC`",
|
||||
f"- public indexed ready: `{str(public_ready).lower()}`",
|
||||
f"- MEV defense ready: `{str(mev_ready).lower()}`",
|
||||
f"- endpoint publication ready: `{str(endpoint_publication_ready).lower()}`",
|
||||
"",
|
||||
"## Calculator",
|
||||
"",
|
||||
f"- wallet USDC: `{payload['inputs']['wallet']['usdc']}`",
|
||||
f"- wallet cWUSDC: `{payload['inputs']['wallet']['cwusdc']}`",
|
||||
f"- XAUt available: `{payload['inputs']['xautCollateral']['availableUnits']}`",
|
||||
f"- XAUt collateral USD value: `{payload['inputs']['xautCollateral']['collateralUsd']}`",
|
||||
f"- wallet plus conservative borrow capacity: `{payload['calculator']['walletPlusBorrowUsdcCapacity']} USDC`",
|
||||
"",
|
||||
"## Phases",
|
||||
]
|
||||
for phase in phases:
|
||||
lines.append(f"- `{phase['id']}`: `{phase['status']}` - {phase['name']}")
|
||||
lines.extend(["", "## Blockers"])
|
||||
lines.extend([f"- {item}" for item in blockers] or ["- none"])
|
||||
lines.extend(["", "## Warnings"])
|
||||
lines.extend([f"- {item}" for item in warnings] or ["- none"])
|
||||
lines.extend(["", "## Reporting Targets"])
|
||||
lines.extend([f"- {item}" for item in payload["endpointReporting"]["targets"]])
|
||||
OUT_MD.write_text("\n".join(lines) + "\n")
|
||||
|
||||
print(json.dumps({"ready": live_execution_ready, "blockers": blockers, "warnings": warnings}, indent=2))
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
10
scripts/verify/plan-engine-x-automated-liquidity-advisor.sh
Executable file
10
scripts/verify/plan-engine-x-automated-liquidity-advisor.sh
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
|
||||
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/load-project-env.sh
|
||||
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh"
|
||||
|
||||
python3 "${PROJECT_ROOT}/scripts/verify/plan-engine-x-automated-liquidity-advisor.py"
|
||||
932
scripts/verify/plan-token-aggregation-liquidity-gap-funding.mjs
Normal file
932
scripts/verify/plan-token-aggregation-liquidity-gap-funding.mjs
Normal file
@@ -0,0 +1,932 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Read-only funding planner for token-aggregation adoption-readiness liquidity gaps.
|
||||
*
|
||||
* It does not broadcast transactions. It checks the deployer wallet's native and ERC-20 balances
|
||||
* for every current liquidityMissingDetails row and classifies each row as:
|
||||
* - fundable_token_balance_present
|
||||
* - gas_gated
|
||||
* - token_balance_gated
|
||||
* - pool_binding_gated
|
||||
*/
|
||||
|
||||
import { createHash } from "node:crypto";
|
||||
import { mkdirSync, readFileSync, writeFileSync } from "node:fs";
|
||||
import { resolve } from "node:path";
|
||||
|
||||
const repoRoot = resolve(new URL("../..", import.meta.url).pathname);
|
||||
const readinessPath = resolve(repoRoot, "reports/status/token-aggregation-adoption-readiness-live-20260509.json");
|
||||
const nonEvmHealthPath = resolve(repoRoot, "reports/status/non-evm-network-health-latest.json");
|
||||
const nonEvmLaneStatusPath = resolve(repoRoot, "reports/status/non-evm-lane-status-latest.json");
|
||||
const jsonOut = resolve(repoRoot, "reports/status/token-aggregation-liquidity-gap-funding-plan-latest.json");
|
||||
const mdOut = resolve(repoRoot, "reports/status/token-aggregation-liquidity-gap-funding-plan-latest.md");
|
||||
const deployer = (process.env.DEPLOYER_ADDRESS || process.env.DEPLOYER || "0x4A666F96fC8764181194447A7dFdb7d471b301C8").trim();
|
||||
const envFiles = [resolve(repoRoot, ".env"), resolve(repoRoot, "smom-dbis-138/.env")];
|
||||
const stabilityCycles = Number(process.env.TOKEN_AGGREGATION_STABILITY_CYCLES || "30");
|
||||
const gasSafetyBps = BigInt(process.env.TOKEN_AGGREGATION_GAS_SAFETY_BPS || "15000");
|
||||
const coffeeMoneyUsdAvailable = Number(process.env.DEPLOYER_COFFEE_MONEY_USD || "48");
|
||||
const coffeeMoneyLiquidityUsdPerRow = Number(process.env.COFFEE_MONEY_LIQUIDITY_USD_PER_ROW || "1");
|
||||
const bridgeCapableChains = new Set([1, 10, 25, 56, 100, 137, 42161, 42220, 43114, 8453]);
|
||||
const protocolinkCandidateChains = new Set([1, 10, 56, 100, 137, 42161, 42220, 43114, 8453]);
|
||||
const officialQuoteAssets = new Set([
|
||||
"1:0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48",
|
||||
"1:0xdac17f958d2ee523a2206206994597c13d831ec7",
|
||||
"10:0x0b2c639c533813f4aa9d7837caf62653d097ff85",
|
||||
"10:0x94b008aa00579c1307b0ef2c499ad98a8ce58e58",
|
||||
"25:0xc21223249ca28397b4b6541dffaecc539bff0c59",
|
||||
"25:0x66e428c3f67a68878562e79a0234c1f83c208770",
|
||||
"56:0x8ac76a51cc950d9822d68b83fe1ad97b32cd580d",
|
||||
"56:0x55d398326f99059ff775485246999027b3197955",
|
||||
"100:0xddafbb505ad214d7b80b1f830fccc89b60fb7a83",
|
||||
"100:0x4ecaba5870353805a9f068101a40e0f32ed605c6",
|
||||
"137:0x3c499c542cef5e3811e1192ce70d8cc03d5c3359",
|
||||
"137:0xc2132d05d31c914a87c6611c10748aeb04b58e8f",
|
||||
"8453:0x833589fcd6edb6e08f4c7c32d4f71b54bda02913",
|
||||
"42161:0xaf88d065e77c8cc2239327c5edb3a432268e5831",
|
||||
"42161:0xfd086bc7cd5c481dcc9c85ebe478a1c0b69fcbb9",
|
||||
"42220:0x765de816845861e75a25fca122bb6898b8b1282a",
|
||||
"42220:0x48065fbbe25f71c9282ddf5e1cd6d6a887483d5e",
|
||||
"43114:0xb97ef9ef8734c71904d8002f8b6bc66dd9c48a6e",
|
||||
"43114:0x9702230a8ea53601f5cd2dc00fdbc13d4df4a8c7",
|
||||
]);
|
||||
const ethereumSourceTokens = [
|
||||
{ symbol: "USDC", address: "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", decimals: 6, role: "official_quote_capital" },
|
||||
{ symbol: "USDT", address: "0xdAC17F958D2ee523a2206206994597C13D831ec7", decimals: 6, role: "official_quote_capital" },
|
||||
{ symbol: "LINK", address: "0x514910771AF9Ca656af840dff83E8264EcF986CA", decimals: 18, role: "route_quote_before_use" },
|
||||
{ symbol: "WETH", address: "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2", decimals: 18, role: "wrapped_native_gas_source" },
|
||||
{ symbol: "XAUt", address: "0x68749665FF8D2d112Fa859AA293F07A622782F38", decimals: 6, role: "route_quote_before_use" },
|
||||
{ symbol: "cWUSDC", address: "0x66a3c2fa3e467aa586e90912f977e648589cabaf", decimals: 6, role: "evidence_or_pair_side_not_native_gas" },
|
||||
];
|
||||
|
||||
const rpcByChain = {
|
||||
1: process.env.ETHEREUM_MAINNET_RPC || process.env.RPC_URL_1 || "https://ethereum.publicnode.com",
|
||||
10: process.env.OPTIMISM_MAINNET_RPC || process.env.OPTIMISM_RPC_URL || process.env.RPC_URL_10 || "https://optimism.publicnode.com",
|
||||
25: process.env.CRONOS_RPC_URL || process.env.CRONOS_MAINNET_RPC || process.env.RPC_URL_25 || "https://cronos-evm-rpc.publicnode.com",
|
||||
56: process.env.BSC_RPC_URL || process.env.BSC_MAINNET_RPC || process.env.RPC_URL_56 || "https://bsc-rpc.publicnode.com",
|
||||
100: process.env.GNOSIS_MAINNET_RPC || process.env.GNOSIS_RPC_URL || process.env.GNOSIS_RPC || process.env.RPC_URL_100 || "https://gnosis.publicnode.com",
|
||||
137: process.env.POLYGON_MAINNET_RPC || process.env.POLYGON_RPC_URL || process.env.RPC_URL_137 || "https://polygon-bor-rpc.publicnode.com",
|
||||
138: process.env.RPC_URL_138_PUBLIC || process.env.RPC_URL_138 || process.env.CHAIN138_RPC_URL || "http://192.168.11.221:8545",
|
||||
1111: process.env.WEMIX_MAINNET_RPC || process.env.WEMIX_RPC || process.env.RPC_URL_1111 || "https://api.wemix.com",
|
||||
8453: process.env.BASE_MAINNET_RPC || process.env.BASE_RPC_URL || process.env.RPC_URL_8453 || "https://base-rpc.publicnode.com",
|
||||
42161: process.env.ARBITRUM_MAINNET_RPC || process.env.ARBITRUM_RPC_URL || process.env.RPC_URL_42161 || "https://arbitrum-one-rpc.publicnode.com",
|
||||
42220: process.env.CELO_MAINNET_RPC || process.env.CELO_RPC_URL || process.env.CELO_RPC || process.env.RPC_URL_42220 || "https://celo-rpc.publicnode.com",
|
||||
43114: process.env.AVALANCHE_RPC_URL || process.env.AVALANCHE_MAINNET_RPC || process.env.RPC_URL_43114 || "https://avalanche-c-chain-rpc.publicnode.com",
|
||||
651940: process.env.CHAIN_651940_RPC_URL || process.env.ALL_MAINNET_RPC || "https://mainnet-rpc.alltra.global",
|
||||
};
|
||||
const nativeSymbolsByChain = {
|
||||
1: "ETH",
|
||||
10: "ETH",
|
||||
25: "CRO",
|
||||
56: "BNB",
|
||||
100: "xDAI",
|
||||
137: "POL",
|
||||
138: "DBIS",
|
||||
1111: "WEMIX",
|
||||
8453: "ETH",
|
||||
42161: "ETH",
|
||||
42220: "CELO",
|
||||
43114: "AVAX",
|
||||
651940: "ALL",
|
||||
};
|
||||
const gasPriceCache = new Map();
|
||||
|
||||
function padAddress(address) {
|
||||
return String(address).replace(/^0x/i, "").padStart(64, "0");
|
||||
}
|
||||
|
||||
async function rpcCall(rpcUrl, method, params) {
|
||||
const controller = new AbortController();
|
||||
const timeout = setTimeout(() => controller.abort(), 12_000);
|
||||
try {
|
||||
const response = await fetch(rpcUrl, {
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({ jsonrpc: "2.0", method, params, id: 1 }),
|
||||
signal: controller.signal,
|
||||
});
|
||||
const json = await response.json();
|
||||
if (json.error) return { ok: false, error: json.error.message || JSON.stringify(json.error) };
|
||||
return { ok: true, result: json.result };
|
||||
} catch (error) {
|
||||
return { ok: false, error: error.message };
|
||||
} finally {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
}
|
||||
|
||||
function bigintFromHex(hex) {
|
||||
if (!hex || hex === "0x") return 0n;
|
||||
return BigInt(hex);
|
||||
}
|
||||
|
||||
function decimalUnits(raw, decimals) {
|
||||
const scale = 10n ** BigInt(decimals);
|
||||
const whole = raw / scale;
|
||||
const frac = raw % scale;
|
||||
const fracText = frac.toString().padStart(decimals, "0").replace(/0+$/, "");
|
||||
return fracText ? `${whole}.${fracText}` : whole.toString();
|
||||
}
|
||||
|
||||
async function erc20Balance(rpcUrl, token, holder) {
|
||||
const [balance, decimals] = await Promise.all([
|
||||
rpcCall(rpcUrl, "eth_call", [{ to: token, data: `0x70a08231${padAddress(holder)}` }, "latest"]),
|
||||
rpcCall(rpcUrl, "eth_call", [{ to: token, data: "0x313ce567" }, "latest"]),
|
||||
]);
|
||||
const raw = balance.ok ? bigintFromHex(balance.result) : 0n;
|
||||
const dec = decimals.ok ? Number(bigintFromHex(decimals.result)) : 18;
|
||||
return {
|
||||
ok: balance.ok,
|
||||
raw: raw.toString(),
|
||||
units: decimalUnits(raw, Number.isFinite(dec) ? dec : 18),
|
||||
decimals: Number.isFinite(dec) ? dec : 18,
|
||||
error: balance.ok ? undefined : balance.error,
|
||||
};
|
||||
}
|
||||
|
||||
async function nativeBalance(rpcUrl, holder) {
|
||||
const balance = await rpcCall(rpcUrl, "eth_getBalance", [holder, "latest"]);
|
||||
const raw = balance.ok ? bigintFromHex(balance.result) : 0n;
|
||||
return {
|
||||
ok: balance.ok,
|
||||
raw: raw.toString(),
|
||||
units: decimalUnits(raw, 18),
|
||||
error: balance.ok ? undefined : balance.error,
|
||||
};
|
||||
}
|
||||
|
||||
async function buildEthereumSourceInventory() {
|
||||
const rpcUrl = rpcByChain[1];
|
||||
const native = await nativeBalance(rpcUrl, deployer);
|
||||
const tokens = await Promise.all(ethereumSourceTokens.map(async (token) => {
|
||||
const balance = await erc20Balance(rpcUrl, token.address, deployer);
|
||||
return {
|
||||
...token,
|
||||
balance: balance.units,
|
||||
balanceRaw: balance.raw,
|
||||
balanceStatus: BigInt(balance.raw || "0") > 0n ? "present" : "zero",
|
||||
error: balance.error,
|
||||
};
|
||||
}));
|
||||
return {
|
||||
chainId: 1,
|
||||
network: "Ethereum Mainnet",
|
||||
deployer,
|
||||
native: {
|
||||
symbol: "ETH",
|
||||
balance: native.units,
|
||||
balanceRaw: native.raw,
|
||||
role: "mainnet_transaction_gas_do_not_fully_drain",
|
||||
},
|
||||
tokens,
|
||||
interpretation: [
|
||||
"Ethereum portfolio value is not the same as immediately spendable cross-chain gas.",
|
||||
"Keep enough ETH for Mainnet approvals, swaps, and liquidity/stability events.",
|
||||
"Use USDC/USDT first as official quote capital; use LINK/XAUt only after a live route quote proves acceptable output.",
|
||||
"Treat cWUSDC as pair-side/evidence inventory unless a real public route converts it into the exact official token needed.",
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
async function gasPrice(rpcUrl, chainId) {
|
||||
if (!rpcUrl) return { ok: false, raw: "0", units: "0", error: "missing_rpc" };
|
||||
if (gasPriceCache.has(chainId)) return gasPriceCache.get(chainId);
|
||||
const result = await rpcCall(rpcUrl, "eth_gasPrice", []);
|
||||
const raw = result.ok ? bigintFromHex(result.result) : 0n;
|
||||
const payload = {
|
||||
ok: result.ok,
|
||||
raw: raw.toString(),
|
||||
gwei: decimalUnits(raw, 9),
|
||||
error: result.ok ? undefined : result.error,
|
||||
};
|
||||
gasPriceCache.set(chainId, payload);
|
||||
return payload;
|
||||
}
|
||||
|
||||
function table(headers, rows) {
|
||||
return [
|
||||
`| ${headers.join(" | ")} |`,
|
||||
`| ${headers.map(() => "---").join(" | ")} |`,
|
||||
...rows.map((row) => `| ${row.map((cell) => String(cell ?? "").replace(/\|/g, "\\|")).join(" | ")} |`),
|
||||
].join("\n");
|
||||
}
|
||||
|
||||
function readJsonIfExists(path, fallback = null) {
|
||||
try {
|
||||
return JSON.parse(readFileSync(path, "utf8"));
|
||||
} catch {
|
||||
return fallback;
|
||||
}
|
||||
}
|
||||
|
||||
function readEnvValue(...keys) {
|
||||
for (const key of keys) {
|
||||
if (process.env[key]) return process.env[key].trim();
|
||||
}
|
||||
for (const file of envFiles) {
|
||||
let text = "";
|
||||
try {
|
||||
text = readFileSync(file, "utf8");
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
for (const key of keys) {
|
||||
const match = text.match(new RegExp(`^${key}=([^\\n#]*)`, "m"));
|
||||
if (match?.[1]) return match[1].trim().replace(/^['"]|['"]$/g, "");
|
||||
}
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
const base58Alphabet = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz";
|
||||
|
||||
function base58Encode(bytes) {
|
||||
let value = BigInt(`0x${Buffer.from(bytes).toString("hex") || "0"}`);
|
||||
let output = "";
|
||||
while (value > 0n) {
|
||||
const remainder = Number(value % 58n);
|
||||
output = `${base58Alphabet[remainder]}${output}`;
|
||||
value /= 58n;
|
||||
}
|
||||
let leadingZeroes = 0;
|
||||
for (const byte of bytes) {
|
||||
if (byte !== 0) break;
|
||||
leadingZeroes += 1;
|
||||
}
|
||||
return `${"1".repeat(leadingZeroes)}${output || ""}`;
|
||||
}
|
||||
|
||||
function base58Decode(text) {
|
||||
let value = 0n;
|
||||
for (const char of text) {
|
||||
const index = base58Alphabet.indexOf(char);
|
||||
if (index < 0) throw new Error(`invalid_base58_char_${char}`);
|
||||
value = value * 58n + BigInt(index);
|
||||
}
|
||||
let hex = value.toString(16);
|
||||
if (hex.length % 2) hex = `0${hex}`;
|
||||
const decoded = hex ? Buffer.from(hex, "hex") : Buffer.alloc(0);
|
||||
const leading = [...text].findIndex((char) => char !== "1");
|
||||
const zeroCount = leading < 0 ? text.length : leading;
|
||||
return Buffer.concat([Buffer.alloc(zeroCount), decoded]);
|
||||
}
|
||||
|
||||
function solanaWalletFromConfig() {
|
||||
const explicit = readEnvValue("SOLANA_DEPLOYER_ADDRESS", "SOLANA_WALLET_ADDRESS", "SOLANA_PUBLIC_KEY");
|
||||
if (explicit) return { address: explicit, source: "env_public_key" };
|
||||
|
||||
const keypairPath = readEnvValue("SOLANA_KEYPAIR_PATH");
|
||||
if (keypairPath) {
|
||||
try {
|
||||
const keypair = JSON.parse(readFileSync(keypairPath, "utf8"));
|
||||
if (Array.isArray(keypair) && keypair.length >= 64) {
|
||||
return { address: base58Encode(Buffer.from(keypair.slice(32, 64))), source: "SOLANA_KEYPAIR_PATH_public_key" };
|
||||
}
|
||||
} catch {
|
||||
// Fall through to private-key decode if present.
|
||||
}
|
||||
}
|
||||
|
||||
const privateKey = readEnvValue("PRIVATE_KEY_SOLANA_DEPLOYER", "SOLANA_PRIVATE_KEY");
|
||||
if (privateKey) {
|
||||
try {
|
||||
const decoded = base58Decode(privateKey);
|
||||
if (decoded.length >= 64) return { address: base58Encode(decoded.subarray(32, 64)), source: "solana_private_key_public_half" };
|
||||
} catch {
|
||||
// Keep address gated if the value is a seed-only key.
|
||||
}
|
||||
}
|
||||
return { address: "", source: "missing" };
|
||||
}
|
||||
|
||||
function tronWalletFromConfig() {
|
||||
const explicit = readEnvValue("TRON_DEPLOYER_ADDRESS", "TRON_WALLET_ADDRESS", "TRON_PUBLIC_ADDRESS", "TRON_ACCOUNT_ADDRESS");
|
||||
if (explicit) return { address: explicit, source: "env_tron_address" };
|
||||
const ethAddress = deployer.replace(/^0x/i, "");
|
||||
if (/^[0-9a-fA-F]{40}$/.test(ethAddress)) {
|
||||
const payload = Buffer.from(`41${ethAddress}`, "hex");
|
||||
const checksum = createHash("sha256").update(createHash("sha256").update(payload).digest()).digest().subarray(0, 4);
|
||||
return { address: base58Encode(Buffer.concat([payload, checksum])), source: "derived_from_evm_deployer_address" };
|
||||
}
|
||||
return { address: "", source: "missing" };
|
||||
}
|
||||
|
||||
function xrplWalletFromConfig() {
|
||||
const explicit = readEnvValue("XRPL_DEPLOYER_ADDRESS", "XRP_DEPLOYER_ADDRESS", "XRPL_WALLET_ADDRESS", "XRP_WALLET_ADDRESS", "XRPL_ACCOUNT");
|
||||
return explicit ? { address: explicit, source: "env_xrpl_address" } : { address: "", source: "missing" };
|
||||
}
|
||||
|
||||
async function solanaNativeBalance(address) {
|
||||
if (!address) return { ok: false, units: "address_required", raw: "0", error: "missing_solana_address" };
|
||||
const rpcUrl = readEnvValue("SOLANA_RPC_URL") || "https://solana-rpc.publicnode.com";
|
||||
const result = await rpcCall(rpcUrl, "getBalance", [address]);
|
||||
const lamports = result.ok ? BigInt(result.result?.value ?? 0) : 0n;
|
||||
return {
|
||||
ok: result.ok,
|
||||
raw: lamports.toString(),
|
||||
units: decimalUnits(lamports, 9),
|
||||
error: result.ok ? undefined : result.error,
|
||||
};
|
||||
}
|
||||
|
||||
async function tronNativeBalance(address) {
|
||||
if (!address) return { ok: false, units: "address_required", raw: "0", error: "missing_tron_address" };
|
||||
const controller = new AbortController();
|
||||
const timeout = setTimeout(() => controller.abort(), 12_000);
|
||||
try {
|
||||
const headers = { "content-type": "application/json" };
|
||||
const apiKey = readEnvValue("TRONGRID_API_KEY");
|
||||
if (apiKey) headers["TRON-PRO-API-KEY"] = apiKey;
|
||||
const response = await fetch("https://api.trongrid.io/wallet/getaccount", {
|
||||
method: "POST",
|
||||
headers,
|
||||
body: JSON.stringify({ address, visible: true }),
|
||||
signal: controller.signal,
|
||||
});
|
||||
const json = await response.json();
|
||||
const sun = BigInt(json.balance ?? 0);
|
||||
return { ok: response.ok, raw: sun.toString(), units: decimalUnits(sun, 6), error: response.ok ? undefined : JSON.stringify(json) };
|
||||
} catch (error) {
|
||||
return { ok: false, raw: "0", units: "0", error: error.message };
|
||||
} finally {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
}
|
||||
|
||||
async function xrplNativeBalance(address) {
|
||||
if (!address) return { ok: false, units: "address_required", raw: "0", error: "missing_xrpl_address" };
|
||||
const result = await rpcCall(readEnvValue("XRPL_RPC_URL") || "https://xrplcluster.com", "account_info", [{ account: address, ledger_index: "validated" }]);
|
||||
const drops = result.ok ? BigInt(result.result?.account_data?.Balance ?? 0) : 0n;
|
||||
return {
|
||||
ok: result.ok,
|
||||
raw: drops.toString(),
|
||||
units: decimalUnits(drops, 6),
|
||||
error: result.ok ? undefined : result.error,
|
||||
};
|
||||
}
|
||||
|
||||
function networkHealth(health, network) {
|
||||
return (health?.checks ?? []).find((row) => row.network === network) ?? null;
|
||||
}
|
||||
|
||||
function classifyFundingPath(detail, token, native) {
|
||||
const chainId = Number(detail.chainId);
|
||||
const addressKey = `${chainId}:${String(detail.address ?? "").toLowerCase()}`;
|
||||
const hasToken = BigInt(token.raw || "0") > 0n;
|
||||
const hasGas = BigInt(native.raw || "0") > 0n;
|
||||
const isOfficialQuoteAsset = officialQuoteAssets.has(addressKey);
|
||||
const canBridge = bridgeCapableChains.has(chainId);
|
||||
const canProtocolink = protocolinkCandidateChains.has(chainId);
|
||||
|
||||
if (hasToken && detail.category === "configured_or_indexed_pools_zero_tvl") {
|
||||
return {
|
||||
fundingPath: "seed_existing_visible_pool_from_deployer_balance",
|
||||
fundingPathStatus: hasGas ? "executable_after_operator_approval" : "native_gas_topup_required",
|
||||
assetClass: isOfficialQuoteAsset ? "official_quote_asset" : "repo_or_wrapped_asset",
|
||||
protocolinkUse: "not_required_for_seed; optional for pre-seed rebalance quote",
|
||||
};
|
||||
}
|
||||
|
||||
if (hasToken && detail.category === "no_visible_pool_binding") {
|
||||
return {
|
||||
fundingPath: "create_or_bind_pool_then_seed_from_deployer_balance",
|
||||
fundingPathStatus: hasGas ? "pool_binding_required_before_funding" : "native_gas_and_pool_binding_required",
|
||||
assetClass: isOfficialQuoteAsset ? "official_quote_asset" : "repo_or_wrapped_asset",
|
||||
protocolinkUse: "not_required_until pool exists; optional to rebalance paired side",
|
||||
};
|
||||
}
|
||||
|
||||
if (isOfficialQuoteAsset) {
|
||||
return {
|
||||
fundingPath: canProtocolink ? "protocolink_swap_candidate_for_non_mintable_quote_asset" : "external_quote_asset_required",
|
||||
fundingPathStatus: canProtocolink ? "requires_live_route_quote_source_asset_and_min_out" : "external_funding_required",
|
||||
assetClass: "official_quote_asset",
|
||||
protocolinkUse: canProtocolink
|
||||
? "use only after live quote proves deployer-held source asset converts into this exact token"
|
||||
: "unsupported_by_current_protocolink_candidate_set",
|
||||
};
|
||||
}
|
||||
|
||||
if (canBridge) {
|
||||
return {
|
||||
fundingPath: "bridge_or_destination_mint_repo_asset_then_seed",
|
||||
fundingPathStatus: hasGas ? "bridge_or_mint_route_required" : "native_gas_topup_then_bridge_or_mint",
|
||||
assetClass: "repo_or_wrapped_asset",
|
||||
protocolinkUse: "optional only if a public swap route beats bridge/mint for the needed asset",
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
fundingPath: "manual_asset_source_required",
|
||||
fundingPathStatus: hasGas ? "asset_source_required" : "native_gas_and_asset_source_required",
|
||||
assetClass: "unknown_or_unclassified_asset",
|
||||
protocolinkUse: "route support not classified",
|
||||
};
|
||||
}
|
||||
|
||||
function gasUnitsForFundingPath(fundingPath) {
|
||||
const policy = {
|
||||
seed_existing_visible_pool_from_deployer_balance: {
|
||||
oneTimeGasUnits: 650_000,
|
||||
stabilityGasUnitsPerCycle: 260_000,
|
||||
rationale: "approve plus add/sync/validation transaction budget for already visible pools",
|
||||
},
|
||||
create_or_bind_pool_then_seed_from_deployer_balance: {
|
||||
oneTimeGasUnits: 1_350_000,
|
||||
stabilityGasUnitsPerCycle: 320_000,
|
||||
rationale: "factory create/bind plus seed transaction budget",
|
||||
},
|
||||
bridge_or_destination_mint_repo_asset_then_seed: {
|
||||
oneTimeGasUnits: 1_150_000,
|
||||
stabilityGasUnitsPerCycle: 300_000,
|
||||
rationale: "bridge-or-mint plus destination seed transaction budget",
|
||||
},
|
||||
protocolink_swap_candidate_for_non_mintable_quote_asset: {
|
||||
oneTimeGasUnits: 900_000,
|
||||
stabilityGasUnitsPerCycle: 360_000,
|
||||
rationale: "Protocolink route execution plus seed/rebalance budget after live quote",
|
||||
},
|
||||
external_quote_asset_required: {
|
||||
oneTimeGasUnits: 450_000,
|
||||
stabilityGasUnitsPerCycle: 260_000,
|
||||
rationale: "post-funding seed/rebalance budget; asset funding is out of band",
|
||||
},
|
||||
manual_asset_source_required: {
|
||||
oneTimeGasUnits: 650_000,
|
||||
stabilityGasUnitsPerCycle: 260_000,
|
||||
rationale: "manual source then seed/rebalance budget",
|
||||
},
|
||||
};
|
||||
return policy[fundingPath] ?? {
|
||||
oneTimeGasUnits: 650_000,
|
||||
stabilityGasUnitsPerCycle: 260_000,
|
||||
rationale: "default seed/rebalance budget",
|
||||
};
|
||||
}
|
||||
|
||||
function buildGasPlan({ chainId, native, gas, fundingPath }) {
|
||||
const units = gasUnitsForFundingPath(fundingPath);
|
||||
const gasPriceWei = BigInt(gas.raw || "0");
|
||||
const oneTimeRaw = BigInt(units.oneTimeGasUnits) * gasPriceWei;
|
||||
const stabilityRaw = BigInt(units.stabilityGasUnitsPerCycle) * BigInt(stabilityCycles) * gasPriceWei;
|
||||
const subtotalRaw = oneTimeRaw + stabilityRaw;
|
||||
const requiredRaw = (subtotalRaw * gasSafetyBps + 9_999n) / 10_000n;
|
||||
const nativeRaw = BigInt(native.raw || "0");
|
||||
const shortfallRaw = requiredRaw > nativeRaw ? requiredRaw - nativeRaw : 0n;
|
||||
const surplusRaw = nativeRaw > requiredRaw ? nativeRaw - requiredRaw : 0n;
|
||||
return {
|
||||
nativeSymbol: nativeSymbolsByChain[chainId] ?? "native",
|
||||
gasPriceWei: gas.raw,
|
||||
gasPriceGwei: gas.gwei,
|
||||
oneTimeGasUnits: units.oneTimeGasUnits,
|
||||
stabilityCycles,
|
||||
stabilityGasUnitsPerCycle: units.stabilityGasUnitsPerCycle,
|
||||
safetyBps: Number(gasSafetyBps),
|
||||
requiredNativeRaw: requiredRaw.toString(),
|
||||
requiredNative: decimalUnits(requiredRaw, 18),
|
||||
oneTimeNative: decimalUnits(oneTimeRaw, 18),
|
||||
stabilityNative: decimalUnits(stabilityRaw, 18),
|
||||
nativeBalanceRaw: native.raw,
|
||||
nativeBalance: native.units,
|
||||
shortfallNativeRaw: shortfallRaw.toString(),
|
||||
shortfallNative: decimalUnits(shortfallRaw, 18),
|
||||
surplusNativeRaw: surplusRaw.toString(),
|
||||
surplusNative: decimalUnits(surplusRaw, 18),
|
||||
status: shortfallRaw === 0n ? "gas_budget_satisfied" : "gas_budget_shortfall",
|
||||
rationale: units.rationale,
|
||||
gasPriceError: gas.error,
|
||||
};
|
||||
}
|
||||
|
||||
async function buildNonEvmFundingRequirements() {
|
||||
const health = readJsonIfExists(nonEvmHealthPath, null);
|
||||
const laneStatus = readJsonIfExists(nonEvmLaneStatusPath, null);
|
||||
const lanes = laneStatus?.lanes ?? {};
|
||||
const solanaWallet = solanaWalletFromConfig();
|
||||
const tronWallet = tronWalletFromConfig();
|
||||
const xrplWallet = xrplWalletFromConfig();
|
||||
const [solanaBalance, tronBalance, xrplBalance] = await Promise.all([
|
||||
solanaNativeBalance(solanaWallet.address),
|
||||
tronNativeBalance(tronWallet.address),
|
||||
xrplNativeBalance(xrplWallet.address),
|
||||
]);
|
||||
return [
|
||||
{
|
||||
network: "Solana",
|
||||
target: "mainnet-beta",
|
||||
includedInFundingScope: true,
|
||||
walletAddress: solanaWallet.address || "missing",
|
||||
walletSource: solanaWallet.source,
|
||||
currentBalanceStatus: solanaBalance.ok ? `${solanaBalance.units} SOL` : solanaBalance.units,
|
||||
currentBalanceRaw: solanaBalance.raw,
|
||||
nativeGasAsset: "SOL",
|
||||
bridgeOrWrappedAsset: lanes.solana?.destinationAsset?.symbol ?? "cWAUSDT",
|
||||
requiredFunding: "TBD",
|
||||
status: solanaWallet.address ? "spl_mint_inventory_and_minimum_funding_targets_required" : "wallet_and_spl_mint_inventory_required",
|
||||
networkHealth: networkHealth(health, "Solana"),
|
||||
requirements: [
|
||||
solanaWallet.address ? "Canonical Solana deployer public key is bound for native SOL checks." : "Bind canonical Solana custody wallet/public key for funding checks.",
|
||||
"Populate SPL mint addresses in config/solana-gru-bridge-lineup.json.",
|
||||
"Check SOL gas/rent balance and SPL token balances for each promoted cW* mint.",
|
||||
"Set minimum pool/rent/execution funding targets per Solana venue before declaring positive liquidity.",
|
||||
],
|
||||
},
|
||||
{
|
||||
network: "Tron",
|
||||
target: "mainnet",
|
||||
includedInFundingScope: true,
|
||||
walletAddress: tronWallet.address || "missing",
|
||||
walletSource: tronWallet.source,
|
||||
currentBalanceStatus: tronBalance.ok ? `${tronBalance.units} TRX` : tronBalance.units,
|
||||
currentBalanceRaw: tronBalance.raw,
|
||||
nativeGasAsset: "TRX",
|
||||
bridgeOrWrappedAsset: "TronAdapter relay inventory",
|
||||
requiredFunding: "TBD",
|
||||
status: tronWallet.source === "derived_from_evm_deployer_address" ? "derived_tron_wallet_needs_operator_confirmation_and_asset_inventory" : "native_tron_wallet_and_asset_inventory_required",
|
||||
networkHealth: networkHealth(health, "Tron"),
|
||||
requirements: [
|
||||
tronWallet.source === "derived_from_evm_deployer_address" ? "Confirm whether the EVM deployer-derived Tron address is the canonical native Tron deployer." : "Bind canonical Tron custody wallet address.",
|
||||
"Check TRX energy/bandwidth funding and any native TRC-20 inventory needed for relay settlement.",
|
||||
"Promote or document native Tron-side contracts/assets before treating Tron as liquidity-ready.",
|
||||
"Close Chain 138 TronAdapter source/publication evidence separately from native Tron funding.",
|
||||
],
|
||||
},
|
||||
{
|
||||
network: "XRPL",
|
||||
target: "mainnet",
|
||||
includedInFundingScope: true,
|
||||
walletAddress: xrplWallet.address || "missing",
|
||||
walletSource: xrplWallet.source,
|
||||
currentBalanceStatus: xrplBalance.ok ? `${xrplBalance.units} XRP` : xrplBalance.units,
|
||||
currentBalanceRaw: xrplBalance.raw,
|
||||
nativeGasAsset: "XRP",
|
||||
bridgeOrWrappedAsset: lanes.xrpl?.wrappedAsset?.address ? `wXRP ${lanes.xrpl.wrappedAsset.address}` : "wXRP",
|
||||
requiredFunding: "TBD",
|
||||
status: xrplWallet.address ? "xrpl_reserve_trustline_and_bridge_inventory_required" : "xrpl_wallet_reserve_and_bridge_inventory_required",
|
||||
networkHealth: networkHealth(health, "XRPL"),
|
||||
requirements: [
|
||||
xrplWallet.address ? "Canonical XRPL account is bound for native XRP checks." : "Bind canonical XRPL account and optional destination tag policy.",
|
||||
"Check XRP reserve, transfer-fee cushion, and any trustline/issuer requirements.",
|
||||
"Check Chain 138 wXRP inventory and MintBurnController authorization readiness.",
|
||||
"Close Chain 138 XRPLAdapter/wXRP/MintBurnController source-publication evidence separately from XRPL funding.",
|
||||
],
|
||||
},
|
||||
{
|
||||
network: "Other non-EVM majors",
|
||||
target: "BTC/SOL/XRP/ADA/XLM/HBAR/SUI/TON class expansion",
|
||||
includedInFundingScope: true,
|
||||
walletAddress: "per-network wallet not bound",
|
||||
walletSource: "missing",
|
||||
currentBalanceStatus: "not_supported_by_current_balance_planner",
|
||||
nativeGasAsset: "varies",
|
||||
bridgeOrWrappedAsset: "not bound",
|
||||
requiredFunding: "TBD",
|
||||
status: "adapter_wallet_asset_and_venue_requirements_not_yet_bound",
|
||||
networkHealth: null,
|
||||
requirements: [
|
||||
"Create per-network custody wallet and balance checker.",
|
||||
"Bind asset IDs/mints/trustlines/program IDs in repo config.",
|
||||
"Define minimum native gas/rent/reserve and liquidity targets per network.",
|
||||
"Add lane evidence before including the network in tracker-facing liquidity claims.",
|
||||
],
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
const readiness = JSON.parse(readFileSync(readinessPath, "utf8"));
|
||||
const details = readiness.blockerInventory?.liquidityMissingDetails ?? [];
|
||||
const rows = [];
|
||||
|
||||
for (const detail of details) {
|
||||
const chainId = Number(detail.chainId);
|
||||
const rpcUrl = rpcByChain[chainId];
|
||||
const native = rpcUrl ? await nativeBalance(rpcUrl, deployer) : { ok: false, raw: "0", units: "0", error: "missing_rpc" };
|
||||
const token = rpcUrl && detail.address?.startsWith("0x")
|
||||
? await erc20Balance(rpcUrl, detail.address, deployer)
|
||||
: { ok: false, raw: "0", units: "0", decimals: 18, error: "missing_token_or_rpc" };
|
||||
const hasGas = BigInt(native.raw || "0") > 0n;
|
||||
const hasToken = BigInt(token.raw || "0") > 0n;
|
||||
const funding = classifyFundingPath(detail, token, native);
|
||||
const gas = await gasPrice(rpcUrl, chainId);
|
||||
const gasPlan = buildGasPlan({ chainId, native, gas, fundingPath: funding.fundingPath });
|
||||
let status = "token_balance_gated";
|
||||
if (detail.category === "no_visible_pool_binding") status = hasToken ? "pool_binding_gated" : "pool_binding_and_token_balance_gated";
|
||||
if (detail.category === "configured_or_indexed_pools_zero_tvl" && hasToken) status = "fundable_token_balance_present";
|
||||
if (!hasGas) status = `${status}+gas_gated`;
|
||||
|
||||
rows.push({
|
||||
chainId,
|
||||
symbol: detail.symbol,
|
||||
address: detail.address,
|
||||
category: detail.category,
|
||||
poolCount: detail.poolCount,
|
||||
zeroTvlPoolCount: detail.zeroTvlPoolCount,
|
||||
nativeBalance: native.units,
|
||||
tokenBalance: token.units,
|
||||
tokenBalanceRaw: token.raw,
|
||||
status,
|
||||
...funding,
|
||||
gasPlan,
|
||||
rpcError: native.error || token.error,
|
||||
});
|
||||
}
|
||||
|
||||
const gasBudgetRows = rows.map((row) => ({
|
||||
chainId: row.chainId,
|
||||
symbol: row.symbol,
|
||||
nativeSymbol: row.gasPlan.nativeSymbol,
|
||||
fundingPath: row.fundingPath,
|
||||
requiredNative: row.gasPlan.requiredNative,
|
||||
nativeBalance: row.gasPlan.nativeBalance,
|
||||
shortfallNative: row.gasPlan.shortfallNative,
|
||||
status: row.gasPlan.status,
|
||||
}));
|
||||
|
||||
const chainGasBudgetMap = new Map();
|
||||
for (const row of rows) {
|
||||
const existing = chainGasBudgetMap.get(row.chainId) ?? {
|
||||
chainId: row.chainId,
|
||||
nativeSymbol: row.gasPlan.nativeSymbol,
|
||||
gasPriceGwei: row.gasPlan.gasPriceGwei,
|
||||
requiredNativeRaw: 0n,
|
||||
nativeBalanceRaw: BigInt(row.gasPlan.nativeBalanceRaw || "0"),
|
||||
rows: 0,
|
||||
symbols: [],
|
||||
};
|
||||
existing.requiredNativeRaw += BigInt(row.gasPlan.requiredNativeRaw || "0");
|
||||
existing.rows += 1;
|
||||
existing.symbols.push(row.symbol);
|
||||
chainGasBudgetMap.set(row.chainId, existing);
|
||||
}
|
||||
|
||||
const chainGasBudgetRows = [...chainGasBudgetMap.values()]
|
||||
.sort((a, b) => Number(a.chainId) - Number(b.chainId))
|
||||
.map((row) => {
|
||||
const shortfallRaw = row.requiredNativeRaw > row.nativeBalanceRaw ? row.requiredNativeRaw - row.nativeBalanceRaw : 0n;
|
||||
const surplusRaw = row.nativeBalanceRaw > row.requiredNativeRaw ? row.nativeBalanceRaw - row.requiredNativeRaw : 0n;
|
||||
return {
|
||||
chainId: row.chainId,
|
||||
nativeSymbol: row.nativeSymbol,
|
||||
gasPriceGwei: row.gasPriceGwei,
|
||||
rows: row.rows,
|
||||
symbols: [...new Set(row.symbols)].join(", "),
|
||||
requiredNativeRaw: row.requiredNativeRaw.toString(),
|
||||
requiredNative: decimalUnits(row.requiredNativeRaw, 18),
|
||||
nativeBalanceRaw: row.nativeBalanceRaw.toString(),
|
||||
nativeBalance: decimalUnits(row.nativeBalanceRaw, 18),
|
||||
shortfallNativeRaw: shortfallRaw.toString(),
|
||||
shortfallNative: decimalUnits(shortfallRaw, 18),
|
||||
surplusNativeRaw: surplusRaw.toString(),
|
||||
surplusNative: decimalUnits(surplusRaw, 18),
|
||||
status: shortfallRaw === 0n ? "chain_gas_budget_satisfied" : "chain_gas_budget_shortfall",
|
||||
};
|
||||
});
|
||||
|
||||
const etherscanStability = {
|
||||
purpose: "off_chain_indexing_stability_for_token_trackers",
|
||||
boundary: "Etherscan/token trackers index public on-chain facts; gas only funds the transactions that create and refresh those facts.",
|
||||
requiredOnChainFacts: [
|
||||
"Verified token contract and correct metadata/logoURI publication path.",
|
||||
"Visible/indexable pool contract for each promoted token pair.",
|
||||
"Positive, non-dust liquidity on the visible pool.",
|
||||
"Recent real swap or liquidity-change events when tracker freshness is required.",
|
||||
"Official quote-token evidence when claiming cW*/USDC or c*/USDC peg support.",
|
||||
],
|
||||
gasBudgetRole: [
|
||||
"Create or bind missing pools.",
|
||||
"Approve and seed liquidity.",
|
||||
"Execute Protocolink/bridge/mint/swap actions when needed.",
|
||||
"Run recurring stability/rebalance transactions so public indexers observe fresh state.",
|
||||
],
|
||||
cannotBeSolvedByGasAlone: [
|
||||
"A missing verified-source listing.",
|
||||
"A token logo or page-info package that is not published at the expected endpoint.",
|
||||
"A pool that exists only in internal config but is not visible/indexable on the public chain.",
|
||||
"A c* balance that has not been bridged or swapped into the exact official quote asset required by the tracker claim.",
|
||||
],
|
||||
readinessStatus: chainGasBudgetRows.some((row) => row.status === "chain_gas_budget_shortfall")
|
||||
? "on_chain_stability_transactions_gas_shortfall"
|
||||
: "on_chain_stability_transactions_gas_budget_satisfied",
|
||||
};
|
||||
|
||||
const coffeeMoneyExecutableRows = rows.filter((row) => [
|
||||
"seed_existing_visible_pool_from_deployer_balance",
|
||||
"create_or_bind_pool_then_seed_from_deployer_balance",
|
||||
].includes(row.fundingPath));
|
||||
const coffeeMoneyGasShortfallChains = chainGasBudgetRows.filter((row) => row.status === "chain_gas_budget_shortfall");
|
||||
const coffeeMoneyPlan = {
|
||||
purpose: "start_visible_indexable_liquidity_with_coffee_money",
|
||||
operatorObservedUsdAvailable: coffeeMoneyUsdAvailable,
|
||||
liquidityDustUsdPerRow: coffeeMoneyLiquidityUsdPerRow,
|
||||
immediatelyUsefulRows: coffeeMoneyExecutableRows.length,
|
||||
estimatedLiquidityDustUsd: Number((coffeeMoneyExecutableRows.length * coffeeMoneyLiquidityUsdPerRow).toFixed(2)),
|
||||
gasShortfallChains: coffeeMoneyGasShortfallChains.map((row) => ({
|
||||
chainId: row.chainId,
|
||||
nativeSymbol: row.nativeSymbol,
|
||||
shortfallNative: row.shortfallNative,
|
||||
symbols: row.symbols,
|
||||
})),
|
||||
assessment: coffeeMoneyUsdAvailable >= 35
|
||||
? "enough_to_start_coffee_money_liquidity_if_routed_into_missing_native_gas"
|
||||
: "not_enough_for_full_coffee_money_set",
|
||||
recommendedOrder: [
|
||||
"Top up native gas on shortfall chains first: Optimism, BSC, Polygon, Arbitrum.",
|
||||
"Seed existing visible pools that already have deployer token balance.",
|
||||
"Create or bind missing visible pools for rows that already have deployer token balance.",
|
||||
"Run tiny real swaps/liquidity events so Etherscan/tracker indexers see fresh public facts.",
|
||||
"Leave Protocolink-only official quote-asset rows for last unless a live quote proves conversion from current deployer assets.",
|
||||
],
|
||||
boundary: "This starts indexable public liquidity evidence; it does not create deep market depth or a large 1:1 peg reserve.",
|
||||
};
|
||||
|
||||
const counts = rows.reduce((acc, row) => {
|
||||
acc[row.status] = (acc[row.status] || 0) + 1;
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
const nonEvmFundingRequirements = await buildNonEvmFundingRequirements();
|
||||
const ethereumSourceInventory = await buildEthereumSourceInventory();
|
||||
|
||||
const payload = {
|
||||
generatedAt: new Date().toISOString(),
|
||||
mode: "read_only_no_broadcast",
|
||||
deployer,
|
||||
sourceReadiness: "reports/status/token-aggregation-adoption-readiness-live-20260509.json",
|
||||
summary: {
|
||||
rows: rows.length,
|
||||
nonEvmFundingRequirementRows: nonEvmFundingRequirements.length,
|
||||
fundableTokenBalancePresent: rows.filter((row) => row.status.startsWith("fundable_token_balance_present")).length,
|
||||
poolBindingGated: rows.filter((row) => row.status.includes("pool_binding")).length,
|
||||
gasGated: rows.filter((row) => row.status.includes("gas_gated")).length,
|
||||
protocolinkSwapCandidates: rows.filter((row) => row.fundingPath === "protocolink_swap_candidate_for_non_mintable_quote_asset").length,
|
||||
bridgeOrMintCandidates: rows.filter((row) => row.fundingPath === "bridge_or_destination_mint_repo_asset_then_seed").length,
|
||||
poolCreateOrBindFirst: rows.filter((row) => row.fundingPath === "create_or_bind_pool_then_seed_from_deployer_balance").length,
|
||||
seedExistingVisiblePoolNow: rows.filter((row) => row.fundingPath === "seed_existing_visible_pool_from_deployer_balance").length,
|
||||
gasBudgetSatisfied: rows.filter((row) => row.gasPlan.status === "gas_budget_satisfied").length,
|
||||
gasBudgetShortfall: rows.filter((row) => row.gasPlan.status === "gas_budget_shortfall").length,
|
||||
chainGasBudgetSatisfied: chainGasBudgetRows.filter((row) => row.status === "chain_gas_budget_satisfied").length,
|
||||
chainGasBudgetShortfall: chainGasBudgetRows.filter((row) => row.status === "chain_gas_budget_shortfall").length,
|
||||
gasPolicy: {
|
||||
stabilityCycles,
|
||||
gasSafetyBps: Number(gasSafetyBps),
|
||||
},
|
||||
statusCounts: counts,
|
||||
},
|
||||
rows,
|
||||
gasBudgetRows,
|
||||
chainGasBudgetRows,
|
||||
etherscanStability,
|
||||
coffeeMoneyPlan,
|
||||
ethereumSourceInventory,
|
||||
nonEvmFundingRequirements,
|
||||
};
|
||||
|
||||
const md = [
|
||||
"# Token-Aggregation Liquidity Gap Funding Plan",
|
||||
"",
|
||||
`- Generated: \`${payload.generatedAt}\``,
|
||||
`- Mode: \`${payload.mode}\``,
|
||||
`- Deployer: \`${deployer}\``,
|
||||
"",
|
||||
table(["Metric", "Count"], Object.entries(payload.summary).map(([key, value]) => [key, typeof value === "object" ? JSON.stringify(value) : value])),
|
||||
"",
|
||||
"## Rows",
|
||||
"",
|
||||
table(
|
||||
["Chain", "Symbol", "Category", "Pools", "Native", "Token balance", "Status", "Funding path", "Gas shortfall"],
|
||||
rows.map((row) => [row.chainId, row.symbol, row.category, row.poolCount, row.nativeBalance, row.tokenBalance, row.status, row.fundingPath, `${row.gasPlan.shortfallNative} ${row.gasPlan.nativeSymbol}`]),
|
||||
),
|
||||
"",
|
||||
"### Chain-Level Gas Budget",
|
||||
"",
|
||||
"This aggregates all planned row actions by network because the same deployer native balance pays every deployment, seed, swap, bridge, and stability transaction on that chain.",
|
||||
"",
|
||||
table(
|
||||
["Chain", "Symbols", "Native", "Rows", "Gas price gwei", "Required", "Balance", "Shortfall", "Status"],
|
||||
chainGasBudgetRows.map((row) => [
|
||||
row.chainId,
|
||||
row.symbols,
|
||||
row.nativeSymbol,
|
||||
row.rows,
|
||||
row.gasPriceGwei,
|
||||
`${row.requiredNative} ${row.nativeSymbol}`,
|
||||
`${row.nativeBalance} ${row.nativeSymbol}`,
|
||||
`${row.shortfallNative} ${row.nativeSymbol}`,
|
||||
row.status,
|
||||
]),
|
||||
),
|
||||
"",
|
||||
"## Gas Budget",
|
||||
"",
|
||||
`Gas is budgeted for one deployment/seed action plus \`${stabilityCycles}\` continual stability cycles, with a \`${Number(gasSafetyBps) / 100}%\` safety multiplier. Etherscan/token-tracker stability itself is off-chain indexing; gas only funds the on-chain facts that Etherscan can index.`,
|
||||
"",
|
||||
table(
|
||||
["Chain", "Symbol", "Native", "Gas price gwei", "One-time gas", "Stability gas/cycle", "Required", "Balance", "Shortfall", "Status"],
|
||||
rows.map((row) => [
|
||||
row.chainId,
|
||||
row.symbol,
|
||||
row.gasPlan.nativeSymbol,
|
||||
row.gasPlan.gasPriceGwei,
|
||||
row.gasPlan.oneTimeGasUnits,
|
||||
row.gasPlan.stabilityGasUnitsPerCycle,
|
||||
`${row.gasPlan.requiredNative} ${row.gasPlan.nativeSymbol}`,
|
||||
`${row.gasPlan.nativeBalance} ${row.gasPlan.nativeSymbol}`,
|
||||
`${row.gasPlan.shortfallNative} ${row.gasPlan.nativeSymbol}`,
|
||||
row.gasPlan.status,
|
||||
]),
|
||||
),
|
||||
"",
|
||||
"## Etherscan Stability Boundary",
|
||||
"",
|
||||
`- Purpose: \`${etherscanStability.purpose}\``,
|
||||
`- Status: \`${etherscanStability.readinessStatus}\``,
|
||||
`- Boundary: ${etherscanStability.boundary}`,
|
||||
"",
|
||||
"Required on-chain facts for Etherscan/tracker stability:",
|
||||
"",
|
||||
...etherscanStability.requiredOnChainFacts.map((item) => `- ${item}`),
|
||||
"",
|
||||
"Gas budget role:",
|
||||
"",
|
||||
...etherscanStability.gasBudgetRole.map((item) => `- ${item}`),
|
||||
"",
|
||||
"Cannot be solved by gas alone:",
|
||||
"",
|
||||
...etherscanStability.cannotBeSolvedByGasAlone.map((item) => `- ${item}`),
|
||||
"",
|
||||
"## Coffee-Money Start Plan",
|
||||
"",
|
||||
`- Operator-observed deployer value available: \`$${coffeeMoneyPlan.operatorObservedUsdAvailable}\``,
|
||||
`- Assessment: \`${coffeeMoneyPlan.assessment}\``,
|
||||
`- Immediately useful rows: \`${coffeeMoneyPlan.immediatelyUsefulRows}\``,
|
||||
`- Planning dust liquidity: \`$${coffeeMoneyPlan.liquidityDustUsdPerRow}\` per row`,
|
||||
`- Estimated dust liquidity: \`$${coffeeMoneyPlan.estimatedLiquidityDustUsd}\``,
|
||||
`- Boundary: ${coffeeMoneyPlan.boundary}`,
|
||||
"",
|
||||
"Native gas shortfall chains to fill first:",
|
||||
"",
|
||||
table(
|
||||
["Chain", "Symbols", "Shortfall"],
|
||||
coffeeMoneyPlan.gasShortfallChains.map((row) => [row.chainId, row.symbols, `${row.shortfallNative} ${row.nativeSymbol}`]),
|
||||
),
|
||||
"",
|
||||
"Recommended order:",
|
||||
"",
|
||||
...coffeeMoneyPlan.recommendedOrder.map((item) => `- ${item}`),
|
||||
"",
|
||||
"### Ethereum Source Inventory",
|
||||
"",
|
||||
`- Native ETH: \`${ethereumSourceInventory.native.balance}\``,
|
||||
"",
|
||||
table(
|
||||
["Token", "Balance", "Role", "Status"],
|
||||
ethereumSourceInventory.tokens.map((token) => [token.symbol, token.balance, token.role, token.balanceStatus]),
|
||||
),
|
||||
"",
|
||||
"Interpretation:",
|
||||
"",
|
||||
...ethereumSourceInventory.interpretation.map((item) => `- ${item}`),
|
||||
"",
|
||||
"## Funding Path Interpretation",
|
||||
"",
|
||||
"- `seed_existing_visible_pool_from_deployer_balance`: token and gas are present; only operator approval and pool-specific seeding rules remain.",
|
||||
"- `create_or_bind_pool_then_seed_from_deployer_balance`: token and gas are present, but no visible/indexable pool binding exists yet.",
|
||||
"- `bridge_or_destination_mint_repo_asset_then_seed`: repo-controlled c*/cW* inventory can be moved or minted once the bridge/mint path and destination gas are ready.",
|
||||
"- `protocolink_swap_candidate_for_non_mintable_quote_asset`: the needed asset is an official/non-mintable quote asset; Protocolink can help only after a live quote proves a deployer-held source asset converts into the exact target token with acceptable minOut.",
|
||||
"- `external_quote_asset_required`: neither bridge nor Protocolink coverage is classified for that exact non-mintable quote asset.",
|
||||
"",
|
||||
"## Non-EVM Funding Requirements",
|
||||
"",
|
||||
"These networks are now part of funding scope. The planner resolves non-EVM deployer wallets where the repo can prove them, checks native gas balances where possible, and leaves funding amounts `TBD` until asset IDs and minimum venue targets are bound.",
|
||||
"",
|
||||
table(
|
||||
["Network", "Target", "Wallet", "Source", "Native gas", "Current balance", "Required funding", "Status"],
|
||||
payload.nonEvmFundingRequirements.map((row) => [
|
||||
row.network,
|
||||
row.target,
|
||||
row.walletAddress,
|
||||
row.walletSource,
|
||||
row.nativeGasAsset,
|
||||
row.currentBalanceStatus,
|
||||
row.requiredFunding,
|
||||
row.status,
|
||||
]),
|
||||
),
|
||||
"",
|
||||
"### Non-EVM Requirement Details",
|
||||
"",
|
||||
...payload.nonEvmFundingRequirements.flatMap((row) => [
|
||||
`#### ${row.network}`,
|
||||
"",
|
||||
...row.requirements.map((requirement) => `- ${requirement}`),
|
||||
"",
|
||||
]),
|
||||
"",
|
||||
"## Execution Boundary",
|
||||
"",
|
||||
"This planner is read-only. It proves whether the deployer currently holds token and gas inventory for each liquidity gap. It does not create pools, add liquidity, approve tokens, bridge assets, or broadcast transactions.",
|
||||
].join("\n");
|
||||
|
||||
mkdirSync(resolve(repoRoot, "reports/status"), { recursive: true });
|
||||
writeFileSync(jsonOut, `${JSON.stringify(payload, null, 2)}\n`);
|
||||
writeFileSync(mdOut, `${md}\n`);
|
||||
console.log(jsonOut);
|
||||
@@ -32,7 +32,8 @@ FEE_COLLECTOR=0xF78246eB94c6CB14018E507E60661314E5f4C53f
|
||||
DEBT_REGISTRY=0x95BC4A997c0670d5DAC64d55cDf3769B53B63C28
|
||||
POLICY_MANAGER=0x0C4FD27018130A00762a802f91a72D6a64a60F14
|
||||
TOKEN_IMPLEMENTATION=0x0059e237973179146237aB49f1322E8197c22b21
|
||||
CCIPWETH9_BRIDGE_CHAIN138=0x9cba0D04Ae5f6f16e3C599025aB97a05c4A593d5
|
||||
# Alternate legacy deployment (do not use for ops): 0x9cba0D04Ae5f6f16e3C599025aB97a05c4A593d5
|
||||
CCIPWETH9_BRIDGE_CHAIN138=0xcacfd227A040002e49e2e01626363071324f820a
|
||||
CCIPWETH10_BRIDGE_CHAIN138=0xe0E93247376aa097dB308B92e6Ba36bA015535D0
|
||||
LINK_TOKEN=0xb7721dD53A8c629d9f1Ba31a5819AFe250002b03
|
||||
CCIP_FEE_TOKEN=0xb7721dD53A8c629d9f1Ba31a5819AFe250002b03
|
||||
|
||||
@@ -3,6 +3,9 @@
|
||||
# Use for CI or pre-deploy: dependencies, config files, optional genesis.
|
||||
# Usage: bash scripts/verify/run-all-validation.sh [--skip-genesis]
|
||||
# --skip-genesis: do not run validate-genesis.sh (default: run if smom-dbis-138 present).
|
||||
# Optional: EI_MATRIX_ONCHAIN_AUDIT_CI=1 runs scripts/lib/ei_matrix_onchain_readiness_audit.py (needs
|
||||
# ETHEREUM_MAINNET_RPC + RPC_URL_138 in .env). EI_MATRIX_ONCHAIN_AUDIT_CI_LIMIT default 120; 0 = full grid.
|
||||
# Strict gate: set EI_MATRIX_AUDIT_MIN_MAINNET_RAW_CI / EI_MATRIX_AUDIT_MIN_138_RAW_CI.
|
||||
# Steps: dependencies, config files, cW* mesh matrix (if pair-discovery JSON exists), optional advisory non-EVM public health, genesis.
|
||||
|
||||
set -euo pipefail
|
||||
@@ -170,6 +173,56 @@ run_summary_record_step "3d" "Non-EVM public network health" "success" "$((SECON
|
||||
step_done "$STEP_STARTED"
|
||||
echo ""
|
||||
|
||||
echo "3d1. d-bis.org CWUSDC Etherscan profile prereq URLs (advisory)..."
|
||||
STEP_STARTED=$SECONDS
|
||||
CWU_URLS="$SCRIPT_DIR/check-cwusdc-etherscan-prereq-urls.sh"
|
||||
CWU_STATUS="skipped"
|
||||
if [[ -x "$CWU_URLS" ]] && command -v curl &>/dev/null; then
|
||||
if bash "$CWU_URLS"; then
|
||||
log_ok "d-bis.org token-profile URLs OK (CWUSDC E2E prereq)"
|
||||
CWU_STATUS="success"
|
||||
else
|
||||
echo " (advisory: one or more URLs not HTTP 200 — fix site or network; see CWUSDC_ETHERSCAN_E2E_RECOMMENDATIONS.md)"
|
||||
CWU_STATUS="advisory_fail"
|
||||
fi
|
||||
else
|
||||
echo " (skip: curl or $CWU_URLS missing)"
|
||||
fi
|
||||
run_summary_record_step "3d1" "CWUSDC d-bis URL prereqs" "$CWU_STATUS" "$((SECONDS - STEP_STARTED))"
|
||||
step_done "$STEP_STARTED"
|
||||
echo ""
|
||||
|
||||
echo "3e. EI matrix on-chain readiness audit (optional)..."
|
||||
STEP_STARTED=$SECONDS
|
||||
EIM_STEP_STATUS="skipped"
|
||||
if [[ "${EI_MATRIX_ONCHAIN_AUDIT_CI:-}" == "1" ]]; then
|
||||
# shellcheck disable=SC1091
|
||||
source "$PROJECT_ROOT/scripts/lib/load-project-env.sh"
|
||||
if [[ -z "${ETHEREUM_MAINNET_RPC:-}" || -z "${RPC_URL_138:-}" ]]; then
|
||||
echo " (skip: ETHEREUM_MAINNET_RPC or RPC_URL_138 unset — set RPCs in .env for CI gate)"
|
||||
else
|
||||
CI_LIMIT="${EI_MATRIX_ONCHAIN_AUDIT_CI_LIMIT:-120}"
|
||||
CI_SHARD="${EI_MATRIX_ONCHAIN_AUDIT_CI_SHARD:-200}"
|
||||
CI_WORKERS="${EI_MATRIX_ONCHAIN_AUDIT_CI_WORKERS:-2}"
|
||||
MIN_M="${EI_MATRIX_AUDIT_MIN_MAINNET_RAW_CI:-0}"
|
||||
MIN_138="${EI_MATRIX_AUDIT_MIN_138_RAW_CI:-0}"
|
||||
JSON_CI="${EI_MATRIX_ONCHAIN_AUDIT_JSON_CI:-reports/status/ei-matrix-readiness-audit-ci.json}"
|
||||
AUDIT_PY="$PROJECT_ROOT/scripts/lib/ei_matrix_onchain_readiness_audit.py"
|
||||
EXTRA=(--shard-size "$CI_SHARD" --workers "$CI_WORKERS" --both --min-mainnet-raw "$MIN_M" --min-138-raw "$MIN_138" --json-out "$JSON_CI")
|
||||
if [[ "$CI_LIMIT" != "0" ]]; then
|
||||
EXTRA+=(--limit "$CI_LIMIT")
|
||||
fi
|
||||
python3 "$AUDIT_PY" "${EXTRA[@]}" || log_err "EI matrix on-chain audit failed (thresholds or RPC)"
|
||||
log_ok "EI matrix on-chain audit OK ($JSON_CI)"
|
||||
EIM_STEP_STATUS="success"
|
||||
fi
|
||||
else
|
||||
echo " (skip: set EI_MATRIX_ONCHAIN_AUDIT_CI=1 to run; optional EI_MATRIX_ONCHAIN_AUDIT_CI_LIMIT=0 for full grid)"
|
||||
fi
|
||||
run_summary_record_step "3e" "EI matrix on-chain audit" "$EIM_STEP_STATUS" "$((SECONDS - STEP_STARTED))"
|
||||
step_done "$STEP_STARTED"
|
||||
echo ""
|
||||
|
||||
if [[ "$SKIP_GENESIS" == true ]]; then
|
||||
echo "4. Genesis — skipped (--skip-genesis)"
|
||||
run_summary_record_step "4" "Genesis (smom-dbis-138)" "skipped" "0"
|
||||
|
||||
76
scripts/verify/run-cwusdc-provider-monitoring-snapshot.sh
Executable file
76
scripts/verify/run-cwusdc-provider-monitoring-snapshot.sh
Executable file
@@ -0,0 +1,76 @@
|
||||
#!/usr/bin/env bash
|
||||
# Read-only cWUSDC provider propagation monitor.
|
||||
set -euo pipefail
|
||||
|
||||
PROJECT_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||
cd "$PROJECT_ROOT"
|
||||
|
||||
TS="$(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
||||
JSON_OUT="reports/status/cwusdc-provider-monitoring-snapshot-latest.json"
|
||||
MD_OUT="reports/status/cwusdc-provider-monitoring-snapshot-latest.md"
|
||||
BUNDLE_DATE="${CWUSDC_EVIDENCE_BUNDLE_DATE:-$(date -u +%Y%m%d)}"
|
||||
|
||||
pnpm cwusdc:etherscan-dossier
|
||||
pnpm cwusdc:role-audit
|
||||
pnpm cwusdc:provider-ci
|
||||
pnpm cwusdc:doc-links
|
||||
pnpm cwusdc:role-appendix
|
||||
pnpm cwusdc:submission-prefill
|
||||
CWUSDC_EVIDENCE_BUNDLE_DATE="$BUNDLE_DATE" pnpm cwusdc:evidence-bundle
|
||||
|
||||
python3 - "$TS" "$JSON_OUT" "$MD_OUT" "$BUNDLE_DATE" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
ts = sys.argv[1]
|
||||
json_out = Path(sys.argv[2])
|
||||
md_out = Path(sys.argv[3])
|
||||
bundle_date = sys.argv[4]
|
||||
|
||||
def load(path):
|
||||
p = Path(path)
|
||||
return json.loads(p.read_text()) if p.exists() else {}
|
||||
|
||||
dossier = load("reports/status/cwusdc-etherscan-value-dossier-latest.json")
|
||||
dossier_readiness = dossier.get("readiness") or dossier.get("summary") or {}
|
||||
provider_ci = load("reports/status/cwusdc-provider-readiness-ci-latest.json")
|
||||
links = load("reports/status/cwusdc-institutional-doc-link-check-latest.json")
|
||||
role_appendix = load("reports/status/cwusdc-role-deployment-appendix-latest.json")
|
||||
payload = {
|
||||
"schema": "cwusdc-provider-monitoring-snapshot/v1",
|
||||
"generatedAt": ts,
|
||||
"status": "success" if provider_ci.get("repoControlledPrereqsPassed") and links.get("status") == "pass" else "attention",
|
||||
"readyForExternalSubmission": dossier_readiness.get("readyForExternalSubmission"),
|
||||
"etherscanValueReady": dossier_readiness.get("etherscanValueReady"),
|
||||
"coinGeckoPriceReady": dossier_readiness.get("coinGeckoPriceReady"),
|
||||
"repoControlledPrereqsPassed": provider_ci.get("repoControlledPrereqsPassed"),
|
||||
"externalBlockersAdvisory": provider_ci.get("externalBlockersAdvisory", []),
|
||||
"docLinkStatus": links.get("status"),
|
||||
"roleEventCount": role_appendix.get("eventCount"),
|
||||
"artifacts": {
|
||||
"dossier": "reports/status/cwusdc-etherscan-value-dossier-latest.json",
|
||||
"providerCi": "reports/status/cwusdc-provider-readiness-ci-latest.json",
|
||||
"docLinks": "reports/status/cwusdc-institutional-doc-link-check-latest.json",
|
||||
"roleAppendix": "reports/status/cwusdc-role-deployment-appendix-latest.json",
|
||||
"evidenceBundleSha256": f"reports/status/cwusdc-institutional-evidence-bundle-{bundle_date}.sha256",
|
||||
},
|
||||
}
|
||||
json_out.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
lines = [
|
||||
"# cWUSDC Provider Monitoring Snapshot",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- Status: `{payload['status']}`",
|
||||
f"- Ready for external submission: `{payload['readyForExternalSubmission']}`",
|
||||
f"- Etherscan Value ready: `{payload['etherscanValueReady']}`",
|
||||
f"- CoinGecko price ready: `{payload['coinGeckoPriceReady']}`",
|
||||
f"- Repo-controlled prerequisites passed: `{payload['repoControlledPrereqsPassed']}`",
|
||||
f"- Institutional doc link status: `{payload['docLinkStatus']}`",
|
||||
f"- Role event count: `{payload['roleEventCount']}`",
|
||||
f"- External advisory blockers: `{len(payload['externalBlockersAdvisory'])}`",
|
||||
]
|
||||
md_out.write_text("\n".join(lines) + "\n")
|
||||
print(f"Wrote {json_out}")
|
||||
print(f"Wrote {md_out}")
|
||||
PY
|
||||
79
scripts/verify/run-cwusdc-provider-nonmanual-checks.sh
Executable file
79
scripts/verify/run-cwusdc-provider-nonmanual-checks.sh
Executable file
@@ -0,0 +1,79 @@
|
||||
#!/usr/bin/env bash
|
||||
# Run all non-manual cWUSDC provider checks and build a handoff report.
|
||||
# This script is public/read-only except for report files under reports/status/.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
cd "$PROJECT_ROOT"
|
||||
|
||||
STRICT_REPO=false
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--strict-repo)
|
||||
STRICT_REPO=true
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
sed -n '1,4p' "$0"
|
||||
echo " --strict-repo Exit non-zero if repo-controlled URL prerequisites fail."
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "Unknown argument: $1" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
PREREQ_JSON="reports/status/cwusdc-etherscan-prereq-urls-latest.json"
|
||||
PREREQ_MD="reports/status/cwusdc-etherscan-prereq-urls-latest.md"
|
||||
TRACKERS_JSON="reports/status/cwusdc-external-trackers-live-latest.json"
|
||||
TRACKERS_MD="reports/status/cwusdc-external-trackers-live-latest.md"
|
||||
LIQUIDITY_JSON="reports/status/token-aggregation-liquidity-gap-funding-plan-latest.json"
|
||||
CMC_SANITY_JSON="reports/status/cmc-provider-report-sanity-latest.json"
|
||||
HANDOFF_JSON="reports/status/cwusdc-provider-handoff-latest.json"
|
||||
HANDOFF_MD="reports/status/cwusdc-provider-handoff-latest.md"
|
||||
|
||||
echo "=== cWUSDC provider non-manual checks ==="
|
||||
echo "Mode: public/read-only, report writes only"
|
||||
echo ""
|
||||
|
||||
REPO_STATUS=0
|
||||
echo "1. Repo-controlled Etherscan prerequisite URLs..."
|
||||
if ! bash "$SCRIPT_DIR/check-cwusdc-etherscan-prereq-urls.sh" --json-out "$PREREQ_JSON" --md-out "$PREREQ_MD"; then
|
||||
REPO_STATUS=1
|
||||
echo " Repo-controlled URL prerequisites failed."
|
||||
fi
|
||||
echo ""
|
||||
|
||||
echo "2. External tracker/indexing probes (advisory)..."
|
||||
bash "$SCRIPT_DIR/check-cwusdc-external-trackers-live.sh" --json-out "$TRACKERS_JSON" --md-out "$TRACKERS_MD" || true
|
||||
echo ""
|
||||
|
||||
echo "3. Liquidity-gap funding planner (read-only)..."
|
||||
node "$SCRIPT_DIR/plan-token-aggregation-liquidity-gap-funding.mjs"
|
||||
echo ""
|
||||
|
||||
echo "3b. CMC-shaped report sanity (advisory)..."
|
||||
python3 "$SCRIPT_DIR/check-cmc-provider-report-sanity.py" || true
|
||||
echo ""
|
||||
|
||||
echo "4. Provider handoff report..."
|
||||
python3 "$SCRIPT_DIR/build-cwusdc-provider-handoff-report.py" \
|
||||
--prereq-json "$PREREQ_JSON" \
|
||||
--trackers-json "$TRACKERS_JSON" \
|
||||
--liquidity-json "$LIQUIDITY_JSON" \
|
||||
--cmc-sanity-json "$CMC_SANITY_JSON" \
|
||||
--json-out "$HANDOFF_JSON" \
|
||||
--md-out "$HANDOFF_MD"
|
||||
echo ""
|
||||
|
||||
echo "Handoff: $HANDOFF_MD"
|
||||
|
||||
if [[ "$STRICT_REPO" == "true" && "$REPO_STATUS" -ne 0 ]]; then
|
||||
exit "$REPO_STATUS"
|
||||
fi
|
||||
|
||||
exit 0
|
||||
44
scripts/verify/run-ei-matrix-full-readiness-audit.sh
Executable file
44
scripts/verify/run-ei-matrix-full-readiness-audit.sh
Executable file
@@ -0,0 +1,44 @@
|
||||
#!/usr/bin/env bash
|
||||
# Full-grid EI matrix on-chain readiness: mainnet cWUSDC + Chain 138 cUSDC (sharded RPC).
|
||||
# Sources scripts/lib/load-project-env.sh for RPCs and token defaults.
|
||||
#
|
||||
# Environment (optional):
|
||||
# EI_MATRIX_AUDIT_SHARD_SIZE default 400
|
||||
# EI_MATRIX_AUDIT_WORKERS default 3
|
||||
# EI_MATRIX_AUDIT_MIN_MAINNET_RAW default 12000000 (12 USDC units, 6 decimals)
|
||||
# EI_MATRIX_AUDIT_MIN_138_RAW default 0 (set >0 to require 138 cUSDC everywhere)
|
||||
# EI_MATRIX_AUDIT_JSON_OUT default reports/status/ei-matrix-readiness-audit-latest.json
|
||||
# EI_MATRIX_AUDIT_GAPS_MAINNET default reports/status/ei-matrix-readiness-gaps-mainnet-indices.txt
|
||||
# EI_MATRIX_AUDIT_GAPS_138 default reports/status/ei-matrix-readiness-gaps-138-indices.txt
|
||||
#
|
||||
# Pass-through: any extra args after optional -- are forwarded to ei_matrix_onchain_readiness_audit.py
|
||||
# ./scripts/verify/run-ei-matrix-full-readiness-audit.sh -- --offset 0 --limit 500
|
||||
#
|
||||
# Exit 1 if any wallet is below configured minima (operator gate).
|
||||
set -euo pipefail
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
cd "$PROJECT_ROOT"
|
||||
# shellcheck disable=SC1091
|
||||
source "$PROJECT_ROOT/scripts/lib/load-project-env.sh"
|
||||
[[ "${1:-}" == "--" ]] && shift
|
||||
|
||||
SHARD="${EI_MATRIX_AUDIT_SHARD_SIZE:-400}"
|
||||
WORKERS="${EI_MATRIX_AUDIT_WORKERS:-3}"
|
||||
MIN_M="${EI_MATRIX_AUDIT_MIN_MAINNET_RAW:-12000000}"
|
||||
MIN_138="${EI_MATRIX_AUDIT_MIN_138_RAW:-0}"
|
||||
JSON_OUT="${EI_MATRIX_AUDIT_JSON_OUT:-reports/status/ei-matrix-readiness-audit-latest.json}"
|
||||
GAPS_M="${EI_MATRIX_AUDIT_GAPS_MAINNET:-reports/status/ei-matrix-readiness-gaps-mainnet-indices.txt}"
|
||||
GAPS_138="${EI_MATRIX_AUDIT_GAPS_138:-reports/status/ei-matrix-readiness-gaps-138-indices.txt}"
|
||||
|
||||
exec python3 "$PROJECT_ROOT/scripts/lib/ei_matrix_onchain_readiness_audit.py" \
|
||||
--shard-size "$SHARD" \
|
||||
--workers "$WORKERS" \
|
||||
--both \
|
||||
--min-mainnet-raw "$MIN_M" \
|
||||
--min-138-raw "$MIN_138" \
|
||||
--report-by-class \
|
||||
--json-out "$JSON_OUT" \
|
||||
--gaps-mainnet-out "$GAPS_M" \
|
||||
--gaps-138-out "$GAPS_138" \
|
||||
"$@"
|
||||
@@ -78,7 +78,13 @@ def parse_uint(value: str) -> int:
|
||||
|
||||
|
||||
def parse_uints(value: str, count: int) -> list[int]:
|
||||
matches = [int(match) for match in UINT_RE.findall(value)]
|
||||
matches: list[int] = []
|
||||
for line in value.splitlines():
|
||||
line_matches = UINT_RE.findall(line)
|
||||
if line_matches:
|
||||
matches.append(int(line_matches[0]))
|
||||
if len(matches) < count:
|
||||
matches = [int(match) for match in UINT_RE.findall(value)]
|
||||
if len(matches) < count:
|
||||
raise ValueError(f"expected at least {count} integers from {value!r}")
|
||||
return matches[:count]
|
||||
|
||||
Reference in New Issue
Block a user