Add optional Cosmos/Engine-X/act-runner templates, CWUSDC/EI-matrix tooling, non-EVM route planner in multi-chain-execution (tests passing), token list and extraction updates, and documentation (MetaMask matrix, GRU/CWUSDC packets). Ignore institutional evidence tarballs/sha256 under reports/status. Validated with: bash scripts/verify/run-all-validation.sh --skip-genesis Co-authored-by: Cursor <cursoragent@cursor.com>
161 lines
6.4 KiB
Python
161 lines
6.4 KiB
Python
#!/usr/bin/env python3
|
|
"""Build a formal role-event/deployment-record appendix for Mainnet cWUSDC."""
|
|
|
|
from __future__ import annotations
|
|
|
|
import datetime as dt
|
|
import json
|
|
import re
|
|
from pathlib import Path
|
|
from typing import Any
|
|
|
|
|
|
ROOT = Path(__file__).resolve().parents[2]
|
|
ROLE_AUDIT = ROOT / "reports" / "status" / "cwusdc-mainnet-role-audit-latest.json"
|
|
REPORT_JSON = ROOT / "reports" / "status" / "cwusdc-role-deployment-appendix-latest.json"
|
|
REPORT_MD = ROOT / "reports" / "status" / "cwusdc-role-deployment-appendix-latest.md"
|
|
|
|
SEARCH_ROOTS = [
|
|
ROOT / "docs",
|
|
ROOT / "reports" / "status",
|
|
ROOT / "config",
|
|
ROOT / "scripts",
|
|
]
|
|
|
|
|
|
def load_role_audit() -> dict[str, Any]:
|
|
if not ROLE_AUDIT.exists():
|
|
raise SystemExit(f"Missing role audit: {ROLE_AUDIT.relative_to(ROOT)}")
|
|
return json.loads(ROLE_AUDIT.read_text())
|
|
|
|
|
|
def read_text(path: Path) -> str:
|
|
try:
|
|
return path.read_text(errors="ignore")
|
|
except Exception:
|
|
return ""
|
|
|
|
|
|
def candidate_files() -> list[Path]:
|
|
files: list[Path] = []
|
|
for root in SEARCH_ROOTS:
|
|
if not root.exists():
|
|
continue
|
|
for path in root.rglob("*"):
|
|
if path.is_file() and path.suffix.lower() in {".md", ".json", ".jsonl", ".sh", ".py", ".env", ".txt"}:
|
|
files.append(path)
|
|
return files
|
|
|
|
|
|
def find_mentions(needles: list[str]) -> list[dict[str, Any]]:
|
|
lowered = [(needle, needle.lower()) for needle in needles if needle]
|
|
findings: list[dict[str, Any]] = []
|
|
for path in candidate_files():
|
|
rel = str(path.relative_to(ROOT))
|
|
if rel.endswith("cwusdc-role-deployment-appendix-latest.json"):
|
|
continue
|
|
text = read_text(path)
|
|
if not text:
|
|
continue
|
|
text_lower = text.lower()
|
|
matches = [needle for needle, low in lowered if low in text_lower]
|
|
if not matches:
|
|
continue
|
|
lines = []
|
|
for index, line in enumerate(text.splitlines(), start=1):
|
|
low_line = line.lower()
|
|
if any(low in low_line for _, low in lowered):
|
|
lines.append({"line": index, "text": line[:240]})
|
|
if len(lines) >= 8:
|
|
break
|
|
findings.append({"path": rel, "matches": sorted(set(matches)), "sampleLines": lines})
|
|
return sorted(findings, key=lambda item: item["path"])
|
|
|
|
|
|
def tx_url(tx_hash: str | None) -> str:
|
|
return f"https://etherscan.io/tx/{tx_hash}" if tx_hash else ""
|
|
|
|
|
|
def build() -> dict[str, Any]:
|
|
role_audit = load_role_audit()
|
|
events = role_audit.get("eventLogReview", {}).get("events", [])
|
|
tx_hashes = sorted({event.get("transactionHash") for event in events if event.get("transactionHash")})
|
|
addresses = sorted(
|
|
{
|
|
role_audit.get("token", {}).get("address", ""),
|
|
*[candidate.get("address", "") for candidate in role_audit.get("candidateChecks", {}).values()],
|
|
*[event.get("account", "") for event in events],
|
|
*[event.get("sender", "") for event in events],
|
|
}
|
|
)
|
|
addresses = [address for address in addresses if re.fullmatch(r"0x[a-fA-F0-9]{40}", address or "")]
|
|
needles = [role_audit.get("token", {}).get("address", ""), *tx_hashes, *addresses]
|
|
mentions = find_mentions(needles)
|
|
deployment_record_candidates = [
|
|
item
|
|
for item in mentions
|
|
if any(token in item["path"].lower() for token in ["deploy", "tracker", "technical", "completion", "readiness", "runbook", "bridge"])
|
|
]
|
|
return {
|
|
"schema": "cwusdc-role-deployment-appendix/v1",
|
|
"generatedAt": dt.datetime.now(dt.UTC).isoformat().replace("+00:00", "Z"),
|
|
"roleAudit": str(ROLE_AUDIT.relative_to(ROOT)),
|
|
"token": role_audit.get("token", {}),
|
|
"eventCount": len(events),
|
|
"transactionHashes": tx_hashes,
|
|
"effectiveMembersFromEvents": role_audit.get("eventLogReview", {}).get("effectiveMembersFromEvents", {}),
|
|
"privilegedCandidates": role_audit.get("privilegedCandidates", []),
|
|
"deploymentRecordCandidates": deployment_record_candidates,
|
|
"allMentions": mentions,
|
|
"limitations": [
|
|
"This appendix reconciles on-chain role events with repository records discoverable by local text search.",
|
|
"It is not a substitute for a signed third-party audit or a provider-side ownership verification flow.",
|
|
"Operator notebooks, private emails, and provider form submissions are outside this local repository scan unless committed as evidence reports.",
|
|
],
|
|
}
|
|
|
|
|
|
def write_md(payload: dict[str, Any], path: Path) -> None:
|
|
lines = [
|
|
"# cWUSDC Role Deployment Appendix",
|
|
"",
|
|
f"- Generated: `{payload['generatedAt']}`",
|
|
f"- Token: `{payload['token'].get('address')}`",
|
|
f"- Source role audit: `{payload['roleAudit']}`",
|
|
f"- On-chain role event count: `{payload['eventCount']}`",
|
|
"",
|
|
"## Effective Members From Events",
|
|
"",
|
|
"| Role | Members |",
|
|
"|---|---|",
|
|
]
|
|
for role, members in payload["effectiveMembersFromEvents"].items():
|
|
lines.append(f"| `{role}` | `{', '.join(members) if members else 'none observed'}` |")
|
|
lines.extend(["", "## Privileged Candidates", "", "| Label | Address | Roles |", "|---|---|---|"])
|
|
for item in payload["privilegedCandidates"]:
|
|
lines.append(f"| `{item['label']}` | `{item['address']}` | `{', '.join(item['roles'])}` |")
|
|
lines.extend(["", "## Role Event Transactions", "", "| Transaction | Etherscan |", "|---|---|"])
|
|
for tx_hash in payload["transactionHashes"]:
|
|
lines.append(f"| `{tx_hash}` | {tx_url(tx_hash)} |")
|
|
lines.extend(["", "## Deployment Record Candidates", "", "| Path | Matches | Sample |", "|---|---|---|"])
|
|
for item in payload["deploymentRecordCandidates"]:
|
|
sample = "; ".join(f"L{line['line']}: {line['text']}" for line in item["sampleLines"][:3])
|
|
lines.append(f"| `{item['path']}` | `{', '.join(item['matches'][:4])}` | {sample} |")
|
|
lines.extend(["", "## Limitations", ""])
|
|
lines.extend(f"- {item}" for item in payload["limitations"])
|
|
path.write_text("\n".join(lines) + "\n")
|
|
|
|
|
|
def main() -> int:
|
|
payload = build()
|
|
REPORT_JSON.parent.mkdir(parents=True, exist_ok=True)
|
|
REPORT_JSON.write_text(json.dumps(payload, indent=2) + "\n")
|
|
write_md(payload, REPORT_MD)
|
|
print(f"Wrote {REPORT_JSON.relative_to(ROOT)}")
|
|
print(f"Wrote {REPORT_MD.relative_to(ROOT)}")
|
|
return 0
|
|
|
|
|
|
if __name__ == "__main__":
|
|
raise SystemExit(main())
|