Compare commits
1 Commits
docs/explo
...
feat/gru-v
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c840c0f777 |
133
config/gru-v2-full-mesh-master-matrix.json
Normal file
133
config/gru-v2-full-mesh-master-matrix.json
Normal file
@@ -0,0 +1,133 @@
|
||||
{
|
||||
"statusDate": "2026-04-14",
|
||||
"namespaces": {
|
||||
"chain138": "c* V2",
|
||||
"allMainnet651940": "cA*",
|
||||
"publicConnectedNetworks": "cW*"
|
||||
},
|
||||
"executionPhases": [
|
||||
{
|
||||
"id": "P0",
|
||||
"namespace": "c* V2",
|
||||
"scope": "Chain 138 canonical hub and Wave 1"
|
||||
},
|
||||
{
|
||||
"id": "P1",
|
||||
"namespace": "c* V2",
|
||||
"scope": "Chain 138 cross-links and gas-native hubs"
|
||||
},
|
||||
{
|
||||
"id": "P2",
|
||||
"namespace": "cA*",
|
||||
"scope": "ALL Mainnet canonical hub and Wave 1"
|
||||
},
|
||||
{
|
||||
"id": "P3",
|
||||
"namespace": "cA*",
|
||||
"scope": "ALL Mainnet cross-links and gas-native hubs"
|
||||
},
|
||||
{
|
||||
"id": "P4",
|
||||
"namespace": "cW*",
|
||||
"scope": "Public cW stable hub, Wave 1, and gas-native mesh"
|
||||
},
|
||||
{
|
||||
"id": "P5",
|
||||
"namespace": "all",
|
||||
"scope": "Spot venue protocol completion"
|
||||
},
|
||||
{
|
||||
"id": "P6",
|
||||
"namespace": "all",
|
||||
"scope": "Aggregator, reserve, and market protocol completion"
|
||||
},
|
||||
{
|
||||
"id": "P7",
|
||||
"namespace": "all",
|
||||
"scope": "MEV completion"
|
||||
}
|
||||
],
|
||||
"protocolsRequired": [
|
||||
"DODO",
|
||||
"Uniswap v3",
|
||||
"Uniswap v2",
|
||||
"SushiSwap",
|
||||
"Curve",
|
||||
"Balancer",
|
||||
"1Inch",
|
||||
"Aave",
|
||||
"GMX",
|
||||
"dYdX"
|
||||
],
|
||||
"chain138CanonicalPools": [
|
||||
"cUSDT V2 / cUSDC V2",
|
||||
"cUSDT V2 / USDT",
|
||||
"cUSDC V2 / USDC",
|
||||
"cEURC V2 / cUSDC V2",
|
||||
"cEURT V2 / cUSDC V2",
|
||||
"cGBPC V2 / cUSDC V2",
|
||||
"cGBPT V2 / cUSDC V2",
|
||||
"cAUDC V2 / cUSDC V2",
|
||||
"cJPYC V2 / cUSDC V2",
|
||||
"cCHFC V2 / cUSDC V2",
|
||||
"cCADC V2 / cUSDC V2",
|
||||
"cXAUC V2 / cUSDC V2",
|
||||
"cXAUT V2 / cUSDC V2",
|
||||
"cEURC V2 / cEURT V2",
|
||||
"cGBPC V2 / cGBPT V2",
|
||||
"cXAUC V2 / cXAUT V2",
|
||||
"cETH / WETH",
|
||||
"cETH / cUSDC V2",
|
||||
"cETHL2 / cUSDC V2",
|
||||
"cBNB / cUSDC V2",
|
||||
"cPOL / cUSDC V2",
|
||||
"cAVAX / cUSDC V2",
|
||||
"cCRO / cUSDC V2",
|
||||
"cXDAI / cUSDC V2",
|
||||
"cCELO / cUSDC V2",
|
||||
"cWEMIX / cUSDC V2"
|
||||
],
|
||||
"allMainnetCanonicalPools": [
|
||||
"cAUSDT / cAUSDC",
|
||||
"cAUSDT / AUSDT",
|
||||
"cAUSDC / USDC",
|
||||
"cAEURC / cAUSDC",
|
||||
"cAEURT / cAUSDC",
|
||||
"cAGBPC / cAUSDC",
|
||||
"cAGBPT / cAUSDC",
|
||||
"cAAUDC / cAUSDC",
|
||||
"cAJPYC / cAUSDC",
|
||||
"cACHFC / cAUSDC",
|
||||
"cACADC / cAUSDC",
|
||||
"cAXAUC / cAUSDC",
|
||||
"cAXAUT / cAUSDC",
|
||||
"cAEURC / cAEURT",
|
||||
"cAGBPC / cAGBPT",
|
||||
"cAXAUC / cAXAUT",
|
||||
"cAETH / WETH",
|
||||
"cAETH / cAUSDC",
|
||||
"cAWALL / WALL",
|
||||
"cAWALL / cAUSDC"
|
||||
],
|
||||
"publicMeshTemplate": {
|
||||
"stableHub": [
|
||||
"cWUSDT / USDC",
|
||||
"cWUSDC / USDC",
|
||||
"cWUSDT / USDT",
|
||||
"cWUSDC / USDT",
|
||||
"cWUSDT / cWUSDC"
|
||||
],
|
||||
"wave1VsUsdc": [
|
||||
"cWEURC / USDC",
|
||||
"cWEURT / USDC",
|
||||
"cWGBPC / USDC",
|
||||
"cWGBPT / USDC",
|
||||
"cWAUDC / USDC",
|
||||
"cWJPYC / USDC",
|
||||
"cWCHFC / USDC",
|
||||
"cWCADC / USDC",
|
||||
"cWXAUC / USDC",
|
||||
"cWXAUT / USDC"
|
||||
]
|
||||
}
|
||||
}
|
||||
82
config/gru-v2-full-mesh-pool-tracker.schema.json
Normal file
82
config/gru-v2-full-mesh-pool-tracker.schema.json
Normal file
@@ -0,0 +1,82 @@
|
||||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://d-bis.org/schemas/gru-v2-full-mesh-pool-tracker.json",
|
||||
"title": "GRU v2 Full Mesh Pool Tracker",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"statusDate",
|
||||
"defaultFields",
|
||||
"chain138",
|
||||
"allMainnet651940",
|
||||
"publicMesh"
|
||||
],
|
||||
"properties": {
|
||||
"statusDate": {
|
||||
"type": "string",
|
||||
"pattern": "^\\d{4}-\\d{2}-\\d{2}$"
|
||||
},
|
||||
"defaultFields": {
|
||||
"type": "object",
|
||||
"required": ["status", "deployed", "seeded", "validated", "live", "mevReady"],
|
||||
"properties": {
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": ["todo", "in_progress", "blocked", "done"]
|
||||
},
|
||||
"deployed": { "type": "boolean" },
|
||||
"seeded": { "type": "boolean" },
|
||||
"validated": { "type": "boolean" },
|
||||
"live": { "type": "boolean" },
|
||||
"mevReady": { "type": "boolean" }
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"chain138": { "$ref": "#/$defs/namedBucket" },
|
||||
"allMainnet651940": { "$ref": "#/$defs/namedBucket" },
|
||||
"publicMesh": {
|
||||
"type": "object",
|
||||
"minProperties": 1,
|
||||
"additionalProperties": { "$ref": "#/$defs/meshBucket" }
|
||||
}
|
||||
},
|
||||
"$defs": {
|
||||
"pairEntry": {
|
||||
"type": "object",
|
||||
"required": ["pair"],
|
||||
"properties": {
|
||||
"pair": { "type": "string", "minLength": 3 },
|
||||
"priority": { "type": "string", "minLength": 2 }
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"namedBucket": {
|
||||
"type": "object",
|
||||
"required": ["namespace", "entries"],
|
||||
"properties": {
|
||||
"namespace": { "type": "string", "minLength": 2 },
|
||||
"entries": {
|
||||
"type": "array",
|
||||
"items": { "$ref": "#/$defs/pairEntry" }
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"meshBucket": {
|
||||
"type": "object",
|
||||
"required": ["namespace", "entries"],
|
||||
"properties": {
|
||||
"namespace": { "type": "string", "minLength": 2 },
|
||||
"entries": {
|
||||
"type": "array",
|
||||
"items": { "type": "string", "minLength": 3 }
|
||||
},
|
||||
"statusOverride": {
|
||||
"type": "string",
|
||||
"enum": ["planned", "todo", "in_progress", "blocked", "done"]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
@@ -59,32 +59,6 @@ pct exec 5000 -- bash -c 'cd /opt/blockscout && docker-compose up -d blockscout'
|
||||
|
||||
---
|
||||
|
||||
## Fix: Smart-Contract Verifier Sidecar Missing
|
||||
|
||||
**Symptom:** verification endpoints exist and submissions appear accepted, but deployed contracts remain `bytecode-only` and never promote into full source metadata.
|
||||
|
||||
**Root cause:** CT `5000` is running only `blockscout` and `postgres`, without the upstream `smart-contract-verifier` sidecar and without the required verifier wiring env:
|
||||
|
||||
- `MICROSERVICE_SC_VERIFIER_ENABLED=true`
|
||||
- `MICROSERVICE_SC_VERIFIER_TYPE=sc_verifier`
|
||||
- `MICROSERVICE_SC_VERIFIER_URL=http://smart-contract-verifier:8050/`
|
||||
|
||||
**Recommended fix:**
|
||||
|
||||
```bash
|
||||
bash scripts/deployment/ensure-blockscout-smart-contract-verifier-5000.sh --dry-run
|
||||
bash scripts/deployment/ensure-blockscout-smart-contract-verifier-5000.sh --apply
|
||||
```
|
||||
|
||||
The script:
|
||||
1. Backs up `/opt/blockscout/docker-compose.yml`
|
||||
2. Adds the upstream `smart-contract-verifier` sidecar
|
||||
3. Wires Blockscout to the sidecar with `MICROSERVICE_SC_VERIFIER_*`
|
||||
4. Restarts the stack cleanly
|
||||
5. Verifies `/api/v2/smart-contracts/verification/config`
|
||||
|
||||
---
|
||||
|
||||
## Fix: Migrate VM 5000 to thin5 (has free space)
|
||||
|
||||
**Run on Proxmox host r630-02 (192.168.11.12):**
|
||||
@@ -135,8 +109,6 @@ source smom-dbis-138/.env 2>/dev/null
|
||||
./scripts/verify/run-contract-verification-with-proxy.sh
|
||||
```
|
||||
|
||||
**Important:** native `Uniswap v2` / `SushiSwap` verification should pin the exact historical compiler version. The repo submitter now does that explicitly so Forge defaults do not silently downgrade the verification attempt.
|
||||
|
||||
---
|
||||
|
||||
## Forge Verification Compatibility
|
||||
|
||||
@@ -10,9 +10,26 @@ The full plan is only partially deployable today.
|
||||
|
||||
- Chain `138` canonical non-gas DODO PMM mesh: `script-backed` and live
|
||||
- Chain `138` pilot `Uniswap v3`, `Balancer`, `Curve`, and `1Inch` venues: `script-backed` and live
|
||||
- Chain `138` native `Uniswap v2` and `SushiSwap`: `script-backed`, deployed, seeded, and verified
|
||||
- Chain `138` deployed smart-contract publication now has a repo-backed orchestration lane:
|
||||
- targeted Blockscout submission wrappers for native `Uniswap v2` / `SushiSwap`
|
||||
- targeted Blockscout submission wrappers for the route execution stack and pilot venues
|
||||
- a generated publication report at [CHAIN138_DEPLOYED_SMART_CONTRACT_VERIFICATION_STATUS.md](/home/intlc/projects/proxmox/docs/04-configuration/CHAIN138_DEPLOYED_SMART_CONTRACT_VERIFICATION_STATUS.md:1)
|
||||
- current live publication status is explicit rather than implied:
|
||||
- `D3Oracle`, `D3Vault`, `DODOApprove`, and `DODOApproveProxy` are Blockscout-verified
|
||||
- `D3MMFactory` and `D3Proxy` still show bytecode-only metadata
|
||||
- the flash trio, native `Uniswap v2` / `SushiSwap`, and the route execution stack now have Blockscout verification submissions accepted, but the explorer API still exposes them as bytecode-only as of the latest report
|
||||
- repeated internal Blockscout polling after submission did not materialize source metadata yet, so the remaining work is now explorer-side verification materialization or manual explorer intervention rather than missing repo automation
|
||||
- Chain `138` supported spot / routing protocol publication is now live end to end:
|
||||
- token-aggregation planner capabilities expose `DODO`, `Uniswap v3`, `Uniswap v2`, `SushiSwap`, `Balancer`, `Curve`, and `1Inch`
|
||||
- MEV venue coverage exposes native `curve`, `dodo_d3mm`, `dodo_pmm`, `sushiswap`, `uniswap_v2`, and `uniswap_v3`
|
||||
- token-aggregation persistence for Chain `138` V2/Sushi pools is wired to the DBIS primary and writing into `liquidity_pools`
|
||||
- Chain `138` `Aave`: repo-backed deployment surface plus imported upstream native source now exist, but rollout remains blocked on real Chain `138` market deployment and canonical live addresses
|
||||
- Chain `138` `GMX`: imported upstream native source now exists, but rollout remains blocked on Chain `138` deployment/configuration work and canonical live addresses
|
||||
- Chain `138` `dYdX`: canonical inventory surface exists, but it remains blocked on a native protocol stack and live Chain `138` addresses
|
||||
- public `cW*` token and partial PMM rollout: `script-backed` in parts
|
||||
- ALL Mainnet `651940` full `cA*` mesh: `inventory-backed`, not fully deployer-backed
|
||||
- full protocol completion across `DODO`, `Uniswap v2`, `Uniswap v3`, `SushiSwap`, `Curve`, `Balancer`, `1Inch`, `Aave`, `GMX`, and `dYdX`: not fully deployer-backed
|
||||
- full protocol completion across `DODO`, `Uniswap v2`, `Uniswap v3`, `SushiSwap`, `Curve`, `Balancer`, `1Inch`, `Aave`, `GMX`, and `dYdX`: Chain `138` supported spot/routing set is complete; `Aave` and `GMX` now have imported upstream native source, while `dYdX` still remains an external native-stack gap
|
||||
|
||||
## Script-backed now
|
||||
|
||||
@@ -22,6 +39,25 @@ The full plan is only partially deployable today.
|
||||
| Chain `138` rollout wrapper | [scripts/deployment/run-all-next-steps-chain138.sh](/home/intlc/projects/proxmox/scripts/deployment/run-all-next-steps-chain138.sh:1) |
|
||||
| Chain `138` readiness validation | [scripts/verify/check-gru-v2-chain138-readiness.sh](/home/intlc/projects/proxmox/scripts/verify/check-gru-v2-chain138-readiness.sh:1) |
|
||||
| Chain `138` protocol venue deployer | [scripts/deployment/deploy-chain138-pilot-protocol-venues.sh](/home/intlc/projects/proxmox/scripts/deployment/deploy-chain138-pilot-protocol-venues.sh:1) |
|
||||
| Chain `138` native `Uniswap v2` deployer | [scripts/deployment/deploy-chain138-uniswap-v2-native.sh](/home/intlc/projects/proxmox/scripts/deployment/deploy-chain138-uniswap-v2-native.sh:1) |
|
||||
| Chain `138` native `SushiSwap` deployer | [scripts/deployment/deploy-chain138-sushiswap-native.sh](/home/intlc/projects/proxmox/scripts/deployment/deploy-chain138-sushiswap-native.sh:1) |
|
||||
| Chain `138` native V2 venue verification | [scripts/verify/check-chain138-native-v2-venues.sh](/home/intlc/projects/proxmox/scripts/verify/check-chain138-native-v2-venues.sh:1) |
|
||||
| Chain `138` native V2 Blockscout publication | [scripts/verify/verify-chain138-native-v2-blockscout.sh](/home/intlc/projects/proxmox/scripts/verify/verify-chain138-native-v2-blockscout.sh:1) |
|
||||
| Chain `138` route execution stack Blockscout publication | [scripts/verify/verify-chain138-route-execution-stack-blockscout.sh](/home/intlc/projects/proxmox/scripts/verify/verify-chain138-route-execution-stack-blockscout.sh:1) |
|
||||
| Chain `138` deployed-contract publication report | [scripts/verify/check-chain138-deployed-contract-publication.py](/home/intlc/projects/proxmox/scripts/verify/check-chain138-deployed-contract-publication.py:1) |
|
||||
| Chain `138` publication orchestrator | [scripts/deployment/publish-chain138-deployed-smart-contracts.sh](/home/intlc/projects/proxmox/scripts/deployment/publish-chain138-deployed-smart-contracts.sh:1) |
|
||||
| Chain `138` Aave execution stack deployer | [scripts/deployment/deploy-chain138-aave-v3-execution-stack.sh](/home/intlc/projects/proxmox/scripts/deployment/deploy-chain138-aave-v3-execution-stack.sh:1) |
|
||||
| Chain `138` Aave quote-push receiver deployer | [scripts/deployment/deploy-chain138-aave-quote-push-receiver.sh](/home/intlc/projects/proxmox/scripts/deployment/deploy-chain138-aave-quote-push-receiver.sh:1) |
|
||||
| Chain `138` remaining protocol env verifier | [scripts/verify/check-chain138-remaining-protocol-env.sh](/home/intlc/projects/proxmox/scripts/verify/check-chain138-remaining-protocol-env.sh:1) |
|
||||
| Chain `138` Aave rollout readiness verifier | [scripts/verify/check-chain138-aave-rollout-readiness.sh](/home/intlc/projects/proxmox/scripts/verify/check-chain138-aave-rollout-readiness.sh:1) |
|
||||
| Chain `138` Aave runtime publication helper | [scripts/deployment/publish-chain138-aave-runtime-from-artifacts.sh](/home/intlc/projects/proxmox/scripts/deployment/publish-chain138-aave-runtime-from-artifacts.sh:1) |
|
||||
| Chain `138` Aave blocker-removal worksheet | [CHAIN138_AAVE_BLOCKER_REMOVAL_WORKSHEET.md](/home/intlc/projects/proxmox/docs/04-configuration/CHAIN138_AAVE_BLOCKER_REMOVAL_WORKSHEET.md:1) |
|
||||
| Chain `138` Aave rollout manifest template | [chain138-aave-rollout-manifest.example.json](/home/intlc/projects/proxmox/config/chain138-aave-rollout-manifest.example.json:1) |
|
||||
| Chain `138` Aave manifest apply helper | [scripts/deployment/apply-chain138-aave-manifest.sh](/home/intlc/projects/proxmox/scripts/deployment/apply-chain138-aave-manifest.sh:1) |
|
||||
| Imported upstream native Aave source | [vendor/chain138-protocols/aave-v3-origin](</home/intlc/projects/proxmox/vendor/chain138-protocols/aave-v3-origin>) |
|
||||
| Imported upstream native GMX source | [vendor/chain138-protocols/gmx-synthetics](</home/intlc/projects/proxmox/vendor/chain138-protocols/gmx-synthetics>) |
|
||||
| Chain `138` native Aave V3 Origin scaffold | [deploy-chain138-aave-v3-origin-market.sh](/home/intlc/projects/proxmox/scripts/deployment/deploy-chain138-aave-v3-origin-market.sh:1) |
|
||||
| Chain `138` native GMX synthetics scaffold | [deploy-chain138-gmx-synthetics-core.sh](/home/intlc/projects/proxmox/scripts/deployment/deploy-chain138-gmx-synthetics-core.sh:1) |
|
||||
| Chain `138` canonical PMM pool seeding | [smom-dbis-138/scripts/deployment/seed-chain138-canonical-pmm-pools.sh](/home/intlc/projects/proxmox/smom-dbis-138/scripts/deployment/seed-chain138-canonical-pmm-pools.sh:1) |
|
||||
| GRU mesh planning and live reconciliation | [scripts/verify/reconcile-gru-v2-full-mesh-status.py](/home/intlc/projects/proxmox/scripts/verify/reconcile-gru-v2-full-mesh-status.py:1) |
|
||||
| ALL Mainnet `cA*` token deployment wrapper | [scripts/deployment/deploy-allmainnet-ca-tokens.sh](/home/intlc/projects/proxmox/scripts/deployment/deploy-allmainnet-ca-tokens.sh:1) |
|
||||
@@ -33,6 +69,9 @@ The full plan is only partially deployable today.
|
||||
|---|---|---|
|
||||
| ALL Mainnet token inventory | [docs/11-references/ALL_MAINNET_TOKEN_ADDRESSES.md](/home/intlc/projects/proxmox/docs/11-references/ALL_MAINNET_TOKEN_ADDRESSES.md:1) | still needs final deployed `cA*` addresses to complete live inventory |
|
||||
| `651940` planned full mesh | [config/gru-v2-full-mesh-pool-tracker.json](/home/intlc/projects/proxmox/config/gru-v2-full-mesh-pool-tracker.json:1) | still needs final live pool addresses and liquidity |
|
||||
| Chain `138` remaining native protocol inventory | [config/chain138-remaining-protocol-surface.json](/home/intlc/projects/proxmox/config/chain138-remaining-protocol-surface.json:1) | `Aave` and `GMX` are now source-backed, but still need live Chain `138` deployment outputs and canonical addresses; `dYdX` still needs both source and live addresses |
|
||||
| Chain `138` remaining protocol discovery evidence | [CHAIN138_REMAINING_PROTOCOL_DISCOVERY_REPORT.md](/home/intlc/projects/proxmox/docs/04-configuration/CHAIN138_REMAINING_PROTOCOL_DISCOVERY_REPORT.md:1) | evidence pass found no discoverable canonical live addresses for Aave / GMX / dYdX on Chain `138` |
|
||||
| Chain `138` native protocol stack gap report | [CHAIN138_NATIVE_PROTOCOL_STACK_GAP_REPORT.md](/home/intlc/projects/proxmox/docs/04-configuration/CHAIN138_NATIVE_PROTOCOL_STACK_GAP_REPORT.md:1) | confirms the repo does not include full native Aave / GMX / dYdX deployment stacks |
|
||||
| public/non-public protocol target state | [docs/04-configuration/GRU_V2_PROTOCOL_COMPLETION_MATRIX.md](/home/intlc/projects/proxmox/docs/04-configuration/GRU_V2_PROTOCOL_COMPLETION_MATRIX.md:1) | no end-to-end deployer coverage for all protocol cells |
|
||||
|
||||
## External blockers
|
||||
@@ -42,6 +81,7 @@ The full plan is only partially deployable today.
|
||||
| Missing live `651940` venue addresses and integrations for the non-DODO protocol set | canonical env surface now exists, but the live addresses still need to be supplied |
|
||||
| Live liquidity and partner venue dependencies | even with scripts, final pool rows cannot be marked `live` without real seeding and venue support |
|
||||
| Chain `138` gas-native runtime verifier / vault wiring | the gas family rows remain blocked until real `CW_GAS_*_CHAIN138` addresses are supplied from deployed contracts |
|
||||
| Native `Aave`, `GMX`, and `dYdX` protocol programs on Chain `138` | `Aave` now has repo-backed deployment wrappers plus imported upstream source but still needs real Chain `138` market deployment outputs; `GMX` now has imported upstream source but still needs a Chain `138` deployment program and live addresses; `dYdX` still needs both a native stack and canonical live addresses |
|
||||
|
||||
## New operator entrypoints
|
||||
|
||||
@@ -51,3 +91,5 @@ The full plan is only partially deployable today.
|
||||
| [scripts/verify/check-gru-v2-full-deployment-implementation.py](/home/intlc/projects/proxmox/scripts/verify/check-gru-v2-full-deployment-implementation.py:1) | verify which plan segments are actually implemented in-repo |
|
||||
| [scripts/verify/check-gru-v2-core-protocol-blockers.sh](/home/intlc/projects/proxmox/scripts/verify/check-gru-v2-core-protocol-blockers.sh:1) | verify the repo-side blockers are closed and isolate only the remaining external dependencies |
|
||||
| [scripts/verify/check-allmainnet-protocol-env.sh](/home/intlc/projects/proxmox/scripts/verify/check-allmainnet-protocol-env.sh:1) | inventory the remaining ALL Mainnet protocol env gaps so missing venue coverage is explicit |
|
||||
| [scripts/verify/check-chain138-remaining-protocol-env.sh](/home/intlc/projects/proxmox/scripts/verify/check-chain138-remaining-protocol-env.sh:1) | inventory the remaining Chain `138` Aave / GMX / dYdX protocol env gaps and verify bytecode when addresses are supplied |
|
||||
| [scripts/verify/check-chain138-native-protocol-stack-source.sh](/home/intlc/projects/proxmox/scripts/verify/check-chain138-native-protocol-stack-source.sh:1) | prove whether the repo actually contains the native source families needed to deploy Aave / GMX / dYdX on Chain `138` |
|
||||
|
||||
106
docs/04-configuration/GRU_V2_FULL_MESH_EXECUTION_CHECKLIST.md
Normal file
106
docs/04-configuration/GRU_V2_FULL_MESH_EXECUTION_CHECKLIST.md
Normal file
@@ -0,0 +1,106 @@
|
||||
# GRU v2 Full Mesh Execution Checklist
|
||||
|
||||
This checklist turns the master matrix into execution order.
|
||||
|
||||
Status values:
|
||||
|
||||
- `todo`
|
||||
- `in_progress`
|
||||
- `blocked`
|
||||
- `done`
|
||||
|
||||
Execution classes:
|
||||
|
||||
- `script-backed`
|
||||
- `inventory-backed`
|
||||
- `external-blocked`
|
||||
|
||||
Current truth:
|
||||
|
||||
- `138` DODO PMM work is `script-backed`
|
||||
- `651940` full `cA*` deployment is currently `inventory-backed` and `external-blocked`
|
||||
- protocol-complete rollout across both namespaces is not fully deployer-backed in this repo yet
|
||||
|
||||
## 1. Chain 138 Canonical Pools
|
||||
|
||||
| Status | Namespace | Pair / Venue | Priority | Notes |
|
||||
|---|---|---|---|---|
|
||||
| `todo` | `c* V2` | `cUSDT V2 / cUSDC V2` | `P0` | canonical USD hub |
|
||||
| `todo` | `c* V2` | `cUSDT V2 / USDT` | `P0` | native bridge rail |
|
||||
| `todo` | `c* V2` | `cUSDC V2 / USDC` | `P0` | native bridge rail |
|
||||
| `todo` | `c* V2` | `cEURC V2 / cUSDC V2` | `P0` | Wave 1 |
|
||||
| `todo` | `c* V2` | `cEURT V2 / cUSDC V2` | `P0` | Wave 1 |
|
||||
| `todo` | `c* V2` | `cGBPC V2 / cUSDC V2` | `P0` | Wave 1 |
|
||||
| `todo` | `c* V2` | `cGBPT V2 / cUSDC V2` | `P0` | Wave 1 |
|
||||
| `todo` | `c* V2` | `cAUDC V2 / cUSDC V2` | `P0` | Wave 1 |
|
||||
| `todo` | `c* V2` | `cJPYC V2 / cUSDC V2` | `P0` | Wave 1 |
|
||||
| `todo` | `c* V2` | `cCHFC V2 / cUSDC V2` | `P0` | Wave 1 |
|
||||
| `todo` | `c* V2` | `cCADC V2 / cUSDC V2` | `P0` | Wave 1 |
|
||||
| `todo` | `c* V2` | `cXAUC V2 / cUSDC V2` | `P0` | commodity |
|
||||
| `todo` | `c* V2` | `cXAUT V2 / cUSDC V2` | `P0` | commodity |
|
||||
| `todo` | `c* V2` | `cEURC V2 / cEURT V2` | `P1` | cross-link |
|
||||
| `todo` | `c* V2` | `cGBPC V2 / cGBPT V2` | `P1` | cross-link |
|
||||
| `todo` | `c* V2` | `cXAUC V2 / cXAUT V2` | `P1` | cross-link |
|
||||
|
||||
## 2. ALL Mainnet Canonical Pools
|
||||
|
||||
| Status | Namespace | Pair / Venue | Priority | Notes |
|
||||
|---|---|---|---|---|
|
||||
| `todo` | `cA*` | `cAUSDT / cAUSDC` | `P0` | canonical ALL USD hub |
|
||||
| `todo` | `cA*` | `cAUSDT / AUSDT` | `P0` | native ALL rail |
|
||||
| `todo` | `cA*` | `cAUSDC / USDC` | `P0` | native ALL rail |
|
||||
| `todo` | `cA*` | `cAEURC / cAUSDC` | `P0` | Wave 1 |
|
||||
| `todo` | `cA*` | `cAEURT / cAUSDC` | `P0` | Wave 1 |
|
||||
| `todo` | `cA*` | `cAGBPC / cAUSDC` | `P0` | Wave 1 |
|
||||
| `todo` | `cA*` | `cAGBPT / cAUSDC` | `P0` | Wave 1 |
|
||||
| `todo` | `cA*` | `cAAUDC / cAUSDC` | `P0` | Wave 1 |
|
||||
| `todo` | `cA*` | `cAJPYC / cAUSDC` | `P0` | Wave 1 |
|
||||
| `todo` | `cA*` | `cACHFC / cAUSDC` | `P0` | Wave 1 |
|
||||
| `todo` | `cA*` | `cACADC / cAUSDC` | `P0` | Wave 1 |
|
||||
| `todo` | `cA*` | `cAXAUC / cAUSDC` | `P0` | commodity |
|
||||
| `todo` | `cA*` | `cAXAUT / cAUSDC` | `P0` | commodity |
|
||||
| `todo` | `cA*` | `cAEURC / cAEURT` | `P1` | cross-link |
|
||||
| `todo` | `cA*` | `cAGBPC / cAGBPT` | `P1` | cross-link |
|
||||
| `todo` | `cA*` | `cAXAUC / cAXAUT` | `P1` | cross-link |
|
||||
|
||||
## 3. Public cW Mesh
|
||||
|
||||
| Status | Chain | Required work | Priority | Notes |
|
||||
|---|---|---|---|---|
|
||||
| `todo` | `1` | full stable hub + Wave 1 + gas-native lanes | `P2` | first public reference mesh |
|
||||
| `todo` | `10` | full stable hub + Wave 1 + gas-native lanes | `P3` | ETH L2 |
|
||||
| `todo` | `8453` | full stable hub + Wave 1 + gas-native lanes | `P3` | ETH L2 |
|
||||
| `todo` | `42161` | full stable hub + Wave 1 + gas-native lanes | `P3` | ETH L2 |
|
||||
| `todo` | `137` | full stable hub + Wave 1 + gas-native lanes | `P4` | major public chain |
|
||||
| `todo` | `56` | full stable hub + Wave 1 + gas-native lanes | `P4` | major public chain |
|
||||
| `todo` | `100` | full stable hub + Wave 1 + gas-native lanes | `P4` | major public chain |
|
||||
| `todo` | `43114` | full stable hub + Wave 1 + gas-native lanes | `P4` | major public chain |
|
||||
| `todo` | `42220` | full stable hub + Wave 1 + gas-native lanes | `P4` | major public chain |
|
||||
| `todo` | `25` | full stable hub + Wave 1 + gas-native lanes | `P4` | major public chain |
|
||||
| `todo` | `1111` | publish mirrors, then deploy full mesh | `P5` | token family still incomplete |
|
||||
|
||||
## 4. Protocol Completion
|
||||
|
||||
| Status | Protocol | Namespace scope | Completion requirement |
|
||||
|---|---|---|---|
|
||||
| `todo` | `DODO` | all namespaces | primary PMM mesh complete |
|
||||
| `todo` | `Uniswap v3` | all namespaces | live reference and execution lanes |
|
||||
| `todo` | `Uniswap v2` | all namespaces | fallback spot lanes where applicable |
|
||||
| `todo` | `SushiSwap` | all namespaces | secondary AMM lanes |
|
||||
| `todo` | `Curve` | all namespaces | stable and basket lanes |
|
||||
| `todo` | `Balancer` | all namespaces | weighted and stable basket lanes |
|
||||
| `todo` | `1Inch` | all namespaces | routing/execution integration |
|
||||
| `todo` | `Aave` | all namespaces | reserve + flash-liquidity integration |
|
||||
| `todo` | `GMX` | all namespaces | market integration or unsupported-by-protocol closure |
|
||||
| `todo` | `dYdX` | all namespaces | market integration or unsupported-by-protocol closure |
|
||||
|
||||
## 5. MEV Completion
|
||||
|
||||
| Status | Requirement | Exit condition |
|
||||
|---|---|---|
|
||||
| `todo` | discovery | pools/venues visible in canonical MEV inventory |
|
||||
| `todo` | quoting | exact protocol quote path works |
|
||||
| `todo` | simulation | route simulation matches execution semantics |
|
||||
| `todo` | execution | execution adapter succeeds |
|
||||
| `todo` | settlement | settlement and attribution persist cleanly |
|
||||
| `todo` | observability | health / infra / freshness / coverage surfaces green |
|
||||
@@ -40,7 +40,7 @@ Implementation vocabulary:
|
||||
|
||||
| Namespace | DODO | Uni v3 | Uni v2 | Sushi | Curve | Balancer | 1Inch | Aave | GMX | dYdX |
|
||||
|---|---|---|---|---|---|---|---|---|---|---|
|
||||
| `138 c* V2` | `done`, `script-backed` | `done`, `script-backed` | `todo`, `external-blocked` | `todo`, `external-blocked` | `done`, `script-backed` | `done`, `script-backed` | `done`, `script-backed` | `todo`, `external-blocked` | `todo`, `external-blocked` | `todo`, `external-blocked` |
|
||||
| `138 c* V2` | `done`, `script-backed` | `done`, `script-backed` | `done`, `script-backed` | `done`, `script-backed` | `done`, `script-backed` | `done`, `script-backed` | `done`, `script-backed` | `blocked`, `script-backed` | `blocked`, `inventory-backed` | `blocked`, `inventory-backed` |
|
||||
| `651940 cA*` | `todo`, `inventory-backed` | `todo`, `external-blocked` | `todo`, `external-blocked` | `todo`, `external-blocked` | `todo`, `external-blocked` | `todo`, `external-blocked` | `todo`, `external-blocked` | `todo`, `external-blocked` | `todo`, `external-blocked` | `todo`, `external-blocked` |
|
||||
| public `cW*` | `in_progress`, `script-backed` | `in_progress`, `inventory-backed` | `in_progress`, `inventory-backed` | `todo`, `external-blocked` | `todo`, `external-blocked` | `todo`, `external-blocked` | `in_progress`, `inventory-backed` | `todo`, `external-blocked` | `todo`, `external-blocked` | `todo`, `external-blocked` |
|
||||
|
||||
@@ -54,4 +54,16 @@ For a protocol cell to move to `done`, all of the following should be true:
|
||||
4. `live`
|
||||
5. `MEV-ready` where the protocol participates in MEV routing/execution
|
||||
|
||||
For `Aave`, `GMX`, and `dYdX`, if the protocol does not support the family or chain natively, the cell should be closed as `unsupported_by_protocol` rather than left ambiguous.
|
||||
For `Aave`, `GMX`, and `dYdX`, close a cell as `unsupported_by_protocol` only when the namespace is intentionally out of scope. If the protocol remains a target but lacks live contracts or addresses, keep it `blocked`.
|
||||
|
||||
## 4. Chain 138 closure evidence
|
||||
|
||||
These `138 c* V2` cells are closed with explicit evidence rather than left as open `todo` rows:
|
||||
|
||||
| Protocol | Closed status | Evidence |
|
||||
|---|---|---|
|
||||
| `Uniswap v2` | `done`, `script-backed` | native Chain `138` factory `0x0C30F6e67Ab3667fCc2f5CEA8e274ef1FB920279`, router `0x3019A7fDc76ba7F64F18d78e66842760037ee638`, and seeded pairs (`WETH/USDT`, `WETH/USDC`, `cUSDT/cUSDC`) are now published in `config/smart-contracts-master.json`, `.env.master.example`, and verified by [check-chain138-native-v2-venues.sh](/home/intlc/projects/proxmox/scripts/verify/check-chain138-native-v2-venues.sh:1) |
|
||||
| `SushiSwap` | `done`, `script-backed` | native Chain `138` factory `0x2871207ff0d56089D70c0134d33f1291B6Fce0BE`, router `0xB37b93D38559f53b62ab020A14919f2630a1aE34`, and seeded pairs (`WETH/USDT`, `WETH/USDC`, `cUSDT/cUSDC`) are now published in `config/smart-contracts-master.json`, `.env.master.example`, and verified by [check-chain138-native-v2-venues.sh](/home/intlc/projects/proxmox/scripts/verify/check-chain138-native-v2-venues.sh:1) |
|
||||
| `Aave` | `blocked`, `source-backed`, `external-blocked` | the repo now contains a Chain `138` native surface inventory, a hard env/bytecode checker, a Chain `138` wrapper for the Aave-backed MEV execution stack, a Chain `138` quote-push receiver deployer, and imported upstream source from `aave-dao/aave-v3-origin`, but no canonical Chain `138` Aave pool/provider/data-provider addresses or native market rollout are published yet |
|
||||
| `GMX` | `blocked`, `source-backed`, `external-blocked` | a canonical Chain `138` inventory surface now exists and the official upstream source from `gmx-io/gmx-synthetics` is now imported, but no Chain `138` GMX deployment outputs, live addresses, registry wiring, planner capabilities, or MEV venue coverage are published yet |
|
||||
| `dYdX` | `blocked`, `inventory-backed`, `external-blocked` | a canonical Chain `138` inventory surface now exists, but no Chain `138` dYdX market / data-provider / exchange addresses or vendored native deployment stack exist in canonical env, registry, planner capabilities, or MEV venue coverage |
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Contract Verification And Publication Matrix (All Networks)
|
||||
|
||||
**Generated:** 2026-04-16T19:20:09.634Z
|
||||
**Generated:** 2026-04-11T21:10:30.092Z
|
||||
**Authoritative sources:** `config/smart-contracts-master.json`, `cross-chain-pmm-lps/config/deployment-status.json`
|
||||
|
||||
This matrix is the canonical repo-level inventory for **what still needs explorer verification and publication coverage across every network currently tracked in the workspace**.
|
||||
@@ -15,18 +15,18 @@ This matrix is the canonical repo-level inventory for **what still needs explore
|
||||
|
||||
| Chain ID | Chain | Total Entries | Canonical Contracts | cW / Gas Mirrors | PMM Pools | Explorer |
|
||||
| --- | --- | ---: | ---: | ---: | ---: | --- |
|
||||
| 1 | Ethereum Mainnet | 40 | 3 | 14 | 17 | https://etherscan.io |
|
||||
| 10 | Optimism | 31 | 0 | 14 | 12 | https://optimistic.etherscan.io |
|
||||
| 25 | Cronos | 29 | 0 | 14 | 12 | https://cronoscan.com |
|
||||
| 56 | BSC | 28 | 0 | 16 | 10 | https://bscscan.com |
|
||||
| 100 | Gnosis | 28 | 0 | 14 | 10 | https://gnosisscan.io |
|
||||
| 137 | Polygon | 32 | 0 | 15 | 12 | https://polygonscan.com |
|
||||
| 1 | Ethereum Mainnet | 36 | 3 | 14 | 13 | https://etherscan.io |
|
||||
| 10 | Optimism | 21 | 0 | 14 | 2 | https://optimistic.etherscan.io |
|
||||
| 25 | Cronos | 19 | 0 | 14 | 2 | https://cronoscan.com |
|
||||
| 56 | BSC | 18 | 0 | 16 | 0 | https://bscscan.com |
|
||||
| 100 | Gnosis | 18 | 0 | 14 | 0 | https://gnosisscan.io |
|
||||
| 137 | Polygon | 22 | 0 | 15 | 2 | https://polygonscan.com |
|
||||
| 138 | Chain 138 | 115 | 115 | 0 | 0 | https://blockscout.defi-oracle.io |
|
||||
| 1111 | Wemix | 4 | 0 | 2 | 0 | https://explorer.wemix.com |
|
||||
| 8453 | Base | 29 | 0 | 14 | 10 | https://basescan.org |
|
||||
| 42161 | Arbitrum | 29 | 0 | 14 | 10 | https://arbiscan.io |
|
||||
| 42220 | Celo | 29 | 0 | 16 | 10 | https://celoscan.io |
|
||||
| 43114 | Avalanche | 29 | 0 | 16 | 10 | https://snowtrace.io |
|
||||
| 8453 | Base | 19 | 0 | 14 | 0 | https://basescan.org |
|
||||
| 42161 | Arbitrum | 19 | 0 | 14 | 0 | https://arbiscan.io |
|
||||
| 42220 | Celo | 19 | 0 | 16 | 0 | https://celoscan.io |
|
||||
| 43114 | Avalanche | 19 | 0 | 16 | 0 | https://snowtrace.io |
|
||||
|
||||
## Required operator path
|
||||
|
||||
@@ -71,17 +71,13 @@ The JSON report in `reports/status/contract_verification_publish_matrix.json` co
|
||||
| 1 | Ethereum Mainnet | pmm_pool | cWCADC/USDC | `0xE0F35b5736FDd0a2F4B618621b0A08F8D8A3f92A` | etherscan | inventory-only | https://etherscan.io | pending | pending |
|
||||
| 1 | Ethereum Mainnet | pmm_pool | cWCHFC/USDC | `0x776Ca556deD3245984F504F4bef8Eeec55C50190` | etherscan | inventory-only | https://etherscan.io | pending | pending |
|
||||
| 1 | Ethereum Mainnet | pmm_pool | cWEURC/USDC | `0x0bC750F9c6DbDcd76B205695A356491b1B9ef098` | etherscan | inventory-only | https://etherscan.io | pending | pending |
|
||||
| 1 | Ethereum Mainnet | pmm_pool | cWEURT/USDC | `0x9cF3DeDAaC0984c530801b9b4881c8f99Bb329c3` | etherscan | inventory-only | https://etherscan.io | pending | pending |
|
||||
| 1 | Ethereum Mainnet | pmm_pool | cWGBPC/USDC | `0x5488042dF882893a3e7074453E2005CaDE4101b0` | etherscan | inventory-only | https://etherscan.io | pending | pending |
|
||||
| 1 | Ethereum Mainnet | pmm_pool | cWGBPT/USDC | `0xA42566bb730AD6D551Db32d50c0877132fc07c32` | etherscan | inventory-only | https://etherscan.io | pending | pending |
|
||||
| 1 | Ethereum Mainnet | pmm_pool | cWJPYC/USDC | `0x8A4187dF0A8FE855cC53A4F7B2D8346588Ee9794` | etherscan | inventory-only | https://etherscan.io | pending | pending |
|
||||
| 1 | Ethereum Mainnet | pmm_pool | cWUSDC/USDC | `0x69776fc607e9edA8042e320e7e43f54d06c68f0E` | etherscan | inventory-only | https://etherscan.io | pending | pending |
|
||||
| 1 | Ethereum Mainnet | pmm_pool | cWUSDC/USDT | `0xCC0fd27A40775c9AfcD2BBd3f7c902b0192c247A` | etherscan | inventory-only | https://etherscan.io | pending | pending |
|
||||
| 1 | Ethereum Mainnet | pmm_pool | cWUSDT/cWUSDC | `0xe944b7Cb012A0820c07f54D51e92f0e1C74168DB` | etherscan | inventory-only | https://etherscan.io | pending | pending |
|
||||
| 1 | Ethereum Mainnet | pmm_pool | cWUSDT/USDC | `0x27f3aE7EE71Be3d77bAf17d4435cF8B895DD25D2` | etherscan | inventory-only | https://etherscan.io | pending | pending |
|
||||
| 1 | Ethereum Mainnet | pmm_pool | cWUSDT/USDT | `0x79156F6B7bf71a1B72D78189B540A89A6C13F6FC` | etherscan | inventory-only | https://etherscan.io | pending | pending |
|
||||
| 1 | Ethereum Mainnet | pmm_pool | cWXAUC/USDC | `0xf6470219ce7749f8860dEABe9c347Ef2c1075E08` | etherscan | inventory-only | https://etherscan.io | pending | pending |
|
||||
| 1 | Ethereum Mainnet | pmm_pool | cWXAUT/USDC | `0x1D51a38C924382287d770AbB61deb9C39ACa96E9` | etherscan | inventory-only | https://etherscan.io | pending | pending |
|
||||
| 1 | Ethereum Mainnet | pmm_pool_volatile | cWUSDC/TRUU | `0x9A632F35078b6A4A9bf27806Bb7aFfAA2F16C846` | etherscan | inventory-only | https://etherscan.io | pending | pending |
|
||||
| 1 | Ethereum Mainnet | pmm_pool_volatile | cWUSDT/TRUU | `0x508E5e80B66204b8CD9869323Fdd3A289ea50993` | etherscan | inventory-only | https://etherscan.io | pending | pending |
|
||||
| 1 | Ethereum Mainnet | reference_venue | balancer:cWETH/USDC | `0xba11000000000000000000000000000000000001` | etherscan | inventory-only | https://etherscan.io | pending | pending |
|
||||
@@ -103,18 +99,8 @@ The JSON report in `reports/status/contract_verification_publish_matrix.json` co
|
||||
| 10 | Optimism | cw_token | cWXAUC | `0xddc4063f770f7c49d00b5a10fb552e922aa39b2c` | etherscan-family | inventory-only | https://optimistic.etherscan.io | pending | pending |
|
||||
| 10 | Optimism | cw_token | cWXAUT | `0x145e8e8c49b6a021969dd9d2c01c8fea44374f61` | etherscan-family | inventory-only | https://optimistic.etherscan.io | pending | pending |
|
||||
| 10 | Optimism | gas_mirror | cWETHL2 | `0x95007ec50d0766162f77848edf7bdc4eba147fb4` | etherscan-family | inventory-only | https://optimistic.etherscan.io | pending | pending |
|
||||
| 10 | Optimism | pmm_pool | cWAUDC/USDC | `0x4B452800f6cD50326F14a6f089f4bB04e8079250` | etherscan-family | inventory-only | https://optimistic.etherscan.io | pending | pending |
|
||||
| 10 | Optimism | pmm_pool | cWCADC/USDC | `0x19e1fdd037F1651AcEE11c5A5Aa246b85FA63f8e` | etherscan-family | inventory-only | https://optimistic.etherscan.io | pending | pending |
|
||||
| 10 | Optimism | pmm_pool | cWCHFC/USDC | `0xA97D7dfB93CBf0C10243931d93FFEda745222ec6` | etherscan-family | inventory-only | https://optimistic.etherscan.io | pending | pending |
|
||||
| 10 | Optimism | pmm_pool | cWEURC/USDC | `0x78C6aC6D7CbFcd85A3291D656F2154979a92c00B` | etherscan-family | inventory-only | https://optimistic.etherscan.io | pending | pending |
|
||||
| 10 | Optimism | pmm_pool | cWEURT/USDC | `0x631DfC86A03cB05319d7165198f8099dacF78e56` | etherscan-family | inventory-only | https://optimistic.etherscan.io | pending | pending |
|
||||
| 10 | Optimism | pmm_pool | cWGBPC/USDC | `0x79BE2b70A94E954d095f9F537FAf0741D15dfA31` | etherscan-family | inventory-only | https://optimistic.etherscan.io | pending | pending |
|
||||
| 10 | Optimism | pmm_pool | cWGBPT/USDC | `0x5D6b5d7CA165c39c350083255774DdBf1c858e12` | etherscan-family | inventory-only | https://optimistic.etherscan.io | pending | pending |
|
||||
| 10 | Optimism | pmm_pool | cWJPYC/USDC | `0x68C1c8a945ddCF3482b73aC09b6B5D4177D564AF` | etherscan-family | inventory-only | https://optimistic.etherscan.io | pending | pending |
|
||||
| 10 | Optimism | pmm_pool | cWUSDC/USDC | `0x8F1038dE06d799a30D16d8B0b0ADEe629e7d4547` | etherscan-family | inventory-only | https://optimistic.etherscan.io | pending | pending |
|
||||
| 10 | Optimism | pmm_pool | cWUSDT/USDT | `0xFCB0b0Ac36d67EDBA91100c75C27De945357CD62` | etherscan-family | inventory-only | https://optimistic.etherscan.io | pending | pending |
|
||||
| 10 | Optimism | pmm_pool | cWXAUC/USDC | `0xCE25c324e41049D75abfB81c23257984A2A97a79` | etherscan-family | inventory-only | https://optimistic.etherscan.io | pending | pending |
|
||||
| 10 | Optimism | pmm_pool | cWXAUT/USDC | `0xb3Ee650019d7F756ce0F79b69614Fa2761871775` | etherscan-family | inventory-only | https://optimistic.etherscan.io | pending | pending |
|
||||
| 10 | Optimism | reference_venue | balancer:cWETHL2/USDC | `0xba2100000000000000000000000000000000000a` | etherscan-family | inventory-only | https://optimistic.etherscan.io | pending | pending |
|
||||
| 10 | Optimism | reference_venue | curve:cWETHL2/USDC | `0xc72100000000000000000000000000000000000a` | etherscan-family | inventory-only | https://optimistic.etherscan.io | pending | pending |
|
||||
| 10 | Optimism | reference_venue | uniswap_v3:cWETHL2/WETH | `0x712100000000000000000000000000000000000a` | etherscan-family | inventory-only | https://optimistic.etherscan.io | pending | pending |
|
||||
@@ -127,6 +113,20 @@ The JSON report in `reports/status/contract_verification_publish_matrix.json` co
|
||||
| 25 | Cronos | cw_token | cWEURC | `0x7574d37F42528B47c88962931e48FC61608a4050` | etherscan-family | inventory-only | https://cronoscan.com | pending | pending |
|
||||
| 25 | Cronos | cw_token | cWEURT | `0x9f833b4f1012F52eb3317b09922a79c6EdFca77D` | etherscan-family | inventory-only | https://cronoscan.com | pending | pending |
|
||||
| 25 | Cronos | cw_token | cWGBPC | `0xe5c65A76A541368d3061fe9E7A2140cABB903dbF` | etherscan-family | inventory-only | https://cronoscan.com | pending | pending |
|
||||
| 25 | Cronos | cw_token | cWGBPT | `0xBb58fa16bAc8E789f09C14243adEE6480D8213A2` | etherscan-family | inventory-only | https://cronoscan.com | pending | pending |
|
||||
| 25 | Cronos | cw_token | cWJPYC | `0x52aD62B8bD01154e2A4E067F8Dc4144C9988d203` | etherscan-family | inventory-only | https://cronoscan.com | pending | pending |
|
||||
| 25 | Cronos | cw_token | cWUSDC | `0x932566E5bB6BEBF6B035B94f3DE1f75f126304Ec` | etherscan-family | inventory-only | https://cronoscan.com | pending | pending |
|
||||
| 25 | Cronos | cw_token | cWUSDT | `0x72948a7a813B60b37Cd0c920C4657DbFF54312b8` | etherscan-family | inventory-only | https://cronoscan.com | pending | pending |
|
||||
| 25 | Cronos | cw_token | cWXAUC | `0xf1B771c95573113E993374c0c7cB2dc1a7908B12` | etherscan-family | inventory-only | https://cronoscan.com | pending | pending |
|
||||
| 25 | Cronos | cw_token | cWXAUT | `0xD517C0cF7013f988946A468c880Cc9F8e2A4BCbE` | etherscan-family | inventory-only | https://cronoscan.com | pending | pending |
|
||||
| 25 | Cronos | gas_mirror | cWCRO | `0x9b10eb0f77c45322dbd1fcb07176fd9a7609c164` | etherscan-family | inventory-only | https://cronoscan.com | pending | pending |
|
||||
| 25 | Cronos | pmm_pool | cWUSDC/USDC | `0x8F1038dE06d799a30D16d8B0b0ADEe629e7d4547` | etherscan-family | inventory-only | https://cronoscan.com | pending | pending |
|
||||
| 25 | Cronos | pmm_pool | cWUSDT/USDT | `0xFCB0b0Ac36d67EDBA91100c75C27De945357CD62` | etherscan-family | inventory-only | https://cronoscan.com | pending | pending |
|
||||
| 25 | Cronos | reference_venue | uniswap_v3:cWCRO/WCRO | `0x7161000000000000000000000000000000000019` | etherscan-family | inventory-only | https://cronoscan.com | pending | pending |
|
||||
| 56 | BSC | anchor_token | USDT | `0x55d398326f99059fF775485246999027B3197955` | etherscan-family | reference-only | https://bscscan.com | pending | pending |
|
||||
| 56 | BSC | cw_token | cWAUDC | `0x7062f35567BBAb4d98dc33af03B0d14Df42294D5` | etherscan-family | inventory-only | https://bscscan.com | pending | pending |
|
||||
| 56 | BSC | cw_token | cWAUSDT | `0xe1a51Bc037a79AB36767561B147eb41780124934` | etherscan-family | inventory-only | https://bscscan.com | pending | pending |
|
||||
| 56 | BSC | cw_token | cWBTC | `0xcb7c000000000000000000000000000000000038` | etherscan-family | inventory-only | https://bscscan.com | pending | pending |
|
||||
|
||||
## Notes
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Explorer Token List Cross-Check
|
||||
|
||||
**Last Updated:** 2026-04-14
|
||||
**Last Updated:** 2026-04-02
|
||||
**Purpose:** Cross-check the token list shown at [https://explorer.d-bis.org/tokens](https://explorer.d-bis.org/tokens) against repo token lists, canonical addresses, and CONTRACT_ADDRESSES_REFERENCE.
|
||||
|
||||
---
|
||||
@@ -89,7 +89,6 @@ and use `dbis-138.tokenlist.json` as the curated source. See §9.
|
||||
| LINK (0xb7721d…) | Yes | Match |
|
||||
| cUSDT (0x93E6…) | Yes | Match |
|
||||
| cUSDC (0xf22258…) | Yes | Match |
|
||||
| cUSDT V2, cUSDC V2, mirror USDT/USDC | Yes | Match §5; list uses tag **`fwdcanon`** (Uniswap schema tag length); gas rows use **`gasnative`** |
|
||||
| cEURC (0x808596…) | Yes | Match |
|
||||
|
||||
**Historical gap (closed 2026-02-28):**
|
||||
@@ -97,7 +96,7 @@ cEURT, cGBPC, cGBPT, cAUDC, cJPYC, cCHFC, cCADC, cXAUC, cXAUT were initially mis
|
||||
|
||||
### 3.2 `explorer-monorepo/backend/config/metamask/DUAL_CHAIN_TOKEN_LIST.tokenlist.json`
|
||||
|
||||
Chain 138 entries were originally the same 7 as `dbis-138`, but the additional compliant tokens were added on 2026-02-28. **cUSDT/cUSDC V2** and **official mirror USDT/USDC** (D3) are now part of the canonical set in **§5** and appear in `DUAL_CHAIN_TOKEN_LIST`; keep `dbis-138.tokenlist.json` aligned with §5 for packaging parity. Keep this file focused on live Explorer cross-checks rather than the closed token-list gap.
|
||||
Chain 138 entries were originally the same 7 as `dbis-138`, but the additional compliant tokens were added on 2026-02-28. Keep this file focused on live Explorer cross-checks rather than the closed token-list gap.
|
||||
|
||||
### 3.3 `ADDRESS_MATRIX_AND_STATUS.md` / `CONTRACT_ADDRESSES_REFERENCE.md`
|
||||
|
||||
@@ -111,9 +110,9 @@ All **canonical** Chain 138 token addresses (WETH, WETH10, LINK, cUSDT, cUSDC, c
|
||||
|------|--------|----------------|
|
||||
| **Explorer token list source** | Confirmed: Blockscout `GET /api/v2/tokens` | No change; document only. |
|
||||
| **WETH9 on Blockscout** | First token has `decimals: "0"`, `name`/`symbol`: null | Verify WETH9 contract metadata on-chain; fix in contract or in Blockscout indexing if needed. |
|
||||
| **dbis-138.tokenlist.json** | Done (2026-02-28; mirrors 2026-04-14) | Added cEURT, cGBPC, cGBPT, cAUDC, cJPYC, cCHFC, cCADC, cXAUC, cXAUT; **official mirror USDT/USDC** per §5. |
|
||||
| **DUAL_CHAIN_TOKEN_LIST (MetaMask)** | Done (2026-02-28) | Added same 9 tokens to backend and api/rest copies; **V2 + mirrors** tracked in §5 / DUAL. |
|
||||
| **ADDRESS_MATRIX / docs** | Done | cEURT and **§1.1 / §5** token inventory; TransactionMirror 0x7131…; summary updated. |
|
||||
| **dbis-138.tokenlist.json** | Done (2026-02-28) | Added cEURT, cGBPC, cGBPT, cAUDC, cJPYC, cCHFC, cCADC, cXAUC, cXAUT. |
|
||||
| **DUAL_CHAIN_TOKEN_LIST (MetaMask)** | Done (2026-02-28) | Added same 9 tokens to backend and api/rest copies. |
|
||||
| **ADDRESS_MATRIX / docs** | Done | cEURT and all 16 tokens in §1.1; TransactionMirror 0x7131…; summary updated. |
|
||||
| **Extra LINK/cUSDT/cUSDC on Blockscout** | 6 additional contracts | Non-canonical; use §5 canonical addresses only. |
|
||||
|
||||
---
|
||||
@@ -129,10 +128,6 @@ Use this table to align token lists and docs with the Explorer (Blockscout) and
|
||||
| LINK | `0xb7721dD53A8c629d9f1Ba31a5819AFe250002b03` | 18 |
|
||||
| cUSDT | `0x93E66202A11B1772E55407B32B44e5Cd8eda7f22` | 6 |
|
||||
| cUSDC | `0xf22258f57794CC8E06237084b353Ab30fFfa640b` | 6 |
|
||||
| cUSDT V2 | `0x9FBfab33882Efe0038DAa608185718b772EE5660` | 6 |
|
||||
| cUSDC V2 | `0x219522c60e83dEe01FC5b0329d6fA8fD84b9D13d` | 6 |
|
||||
| USDT (official mirror, D3) | `0x004b63A7B5b0E06f6bB6adb4a5F9f590BF3182D1` | 6 |
|
||||
| USDC (official mirror, D3) | `0x71D6687F38b93CCad569Fa6352c876eea967201b` | 6 |
|
||||
| cEURC | `0x8085961F9cF02b4d800A3c6d386D31da4B34266a` | 6 |
|
||||
| cEURT | `0xdf4b71c61E5912712C1Bdd451416B9aC26949d72` | 6 |
|
||||
| cGBPC | `0x003960f16D9d34F2e98d62723B6721Fb92074aD2` | 6 |
|
||||
@@ -159,7 +154,7 @@ Use this table to align token lists and docs with the Explorer (Blockscout) and
|
||||
| `explorer-monorepo/backend/config/metamask/DUAL_CHAIN_TOKEN_LIST.tokenlist.json` | Multi-chain token list (138, 1, 651940, 25) for MetaMask |
|
||||
| `token-lists/lists/dbis-138.tokenlist.json` | Chain 138 curated token list (Uniswap-style) |
|
||||
| `smom-dbis-138/services/token-aggregation/src/config/canonical-tokens.ts` | Canonical addresses and env overrides for indexing/reporting |
|
||||
| `docs/11-references/ADDRESS_MATRIX_AND_STATUS.md` | Correlated address matrix; §1.1 aligned with **§5** canonical rows; TransactionMirror and summary updated |
|
||||
| `docs/11-references/ADDRESS_MATRIX_AND_STATUS.md` | Correlated address matrix; §1.1 includes all 16 tokens; TransactionMirror and summary updated |
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -76,7 +76,7 @@
|
||||
| **DODO Vending Machine / Adapter** | Deployed | `0xb6D9EF3575bc48De3f011C310DC24d87bEC6087C` — adapter used by `DODOPMMIntegration`. |
|
||||
| **PMM pools (current canonical stack)** | 3 created | Public pools are `0x9e89bAe009adf128782E19e8341996c596ac40dC` (cUSDT/cUSDC), `0x866Cb44b59303d8dc5f4F9E3E7A8e8b0bf238d66` (cUSDT/USDT), `0xc39B7D0F40838cbFb54649d327f49a6DAC964062` (cUSDC/USDC). |
|
||||
| **DODOPMMProvider** | **Deployed** | `0x3f729632E9553EBacCdE2e9b4c8F2B285b014F2e`; supports the three canonical stable pools above. |
|
||||
| **D3Oracle** | Deployed (private pilot) | `0xD7459aEa8bB53C83a1e90262777D730539A326F0`; `WETH10` uses keeper-synced **MockPriceFeed** `0x3e8725b8De386feF3eFE5678c92eA6aDB41992B2` (avoids managed-aggregator staleness on Besu). Legacy slot `0x99b3511a2d315a497c8112c1fdd8d508d4b1e506` remains for audits. Stables use managed USD feeds. |
|
||||
| **D3Oracle** | Deployed (private pilot) | `0xD7459aEa8bB53C83a1e90262777D730539A326F0`; `WETH10` now uses live `Oracle_Aggregator=0x99b3511a2d315a497c8112c1fdd8d508d4b1e506`, and the stable assets use dedicated managed USD feeds. |
|
||||
| **D3Vault / D3Proxy / D3MMFactory** | Deployed (private pilot) | `D3Vault=0x42b6867260Fb9eE6d09B7E0233A1fAD65D0133D1`, `D3Proxy=0xc9a11abB7C63d88546Be24D58a6d95e3762cB843`, `D3MMFactory=0x78470C7d2925B6738544E2DD4FE7c07CcA21AC31`. |
|
||||
| **D3MM pools** | 2 created | `0xE71Bc2cCb62dA5B18F88647db2b4a721Db416fc5` is a superseded bootstrap pool on the placeholder `WETH9` path. `0x6550A3a59070061a262a893A1D6F3F490afFDBDA` is the canonical private `WETH10` pilot pool. |
|
||||
| **EnhancedSwapRouterV2 + public venue layer** | Live | Router-v2 is deployed and the canonical upstream-native `Uniswap_v3` lane plus the funded pilot-compatible `Balancer`, `Curve_3`, and `1inch` venues are publicly exposed for the canonical Chain 138 routing asset lanes. |
|
||||
|
||||
@@ -17,25 +17,6 @@ import http from 'node:http';
|
||||
const PORT = parseInt(process.env.PORT || '3080', 10);
|
||||
const BLOCKSCOUT_URL = (process.env.BLOCKSCOUT_URL || 'http://192.168.11.140:4000').replace(/\/$/, '');
|
||||
|
||||
function parseOptimizationRuns(payload) {
|
||||
const raw = payload.runs ?? payload.optimization_runs ?? '200';
|
||||
return parseInt(raw, 10) || 200;
|
||||
}
|
||||
|
||||
function inferOptimizationEnabled(payload) {
|
||||
const explicit = payload.optimizationUsed ?? payload.optimization_used;
|
||||
if (explicit !== undefined && explicit !== null && explicit !== '') {
|
||||
return [true, '1', 1, 'true'].includes(explicit);
|
||||
}
|
||||
// Forge often supplies runs without optimizationUsed for legacy compiler paths.
|
||||
// When runs is positive, assume optimization was intentionally enabled.
|
||||
return parseOptimizationRuns(payload) > 0;
|
||||
}
|
||||
|
||||
function inferEvmVersion(payload) {
|
||||
return payload.evmversion || payload.evm_version || 'default';
|
||||
}
|
||||
|
||||
/** Parse body as JSON or application/x-www-form-urlencoded (Forge/Etherscan style). */
|
||||
function parseBody(req) {
|
||||
return new Promise((resolve, reject) => {
|
||||
@@ -142,9 +123,9 @@ async function forwardV2Flattened(payload) {
|
||||
compiler_version: payload.compilerversion || payload.compilerVersion || 'v0.8.20+commit.a1b79de6',
|
||||
contract_name: payload.contractname || payload.contractName || 'Contract',
|
||||
license_type: payload.licensetype || payload.licenseType || 'mit',
|
||||
is_optimization_enabled: inferOptimizationEnabled(payload),
|
||||
optimization_runs: parseOptimizationRuns(payload),
|
||||
evm_version: inferEvmVersion(payload),
|
||||
is_optimization_enabled: [true, '1', 1, 'true'].includes(payload.optimizationUsed ?? payload.optimization_used),
|
||||
optimization_runs: parseInt(payload.runs ?? payload.optimization_runs ?? '200', 10) || 200,
|
||||
evm_version: payload.evmversion || payload.evm_version || 'london',
|
||||
autodetect_constructor_args: payload.autodetectConstructorArguments !== false,
|
||||
source_code: typeof sourceCode === 'string' ? sourceCode : JSON.stringify(sourceCode),
|
||||
};
|
||||
@@ -225,9 +206,13 @@ async function forwardV2StandardInput(payload) {
|
||||
appendField('autodetect_constructor_args', String(payload.autodetectConstructorArguments !== false));
|
||||
appendField('license_type', licenseType);
|
||||
appendField('constructor_args', constructorArgs);
|
||||
appendField('evm_version', inferEvmVersion(payload));
|
||||
appendField('is_optimization_enabled', String(inferOptimizationEnabled(payload)));
|
||||
appendField('optimization_runs', String(parseOptimizationRuns(payload)));
|
||||
if (payload.evmversion || payload.evm_version) appendField('evm_version', payload.evmversion || payload.evm_version);
|
||||
if (payload.optimizationUsed !== undefined || payload.optimization_used !== undefined) {
|
||||
appendField('is_optimization_enabled', String([true, '1', 1, 'true'].includes(payload.optimizationUsed ?? payload.optimization_used)));
|
||||
}
|
||||
if (payload.runs !== undefined || payload.optimization_runs !== undefined) {
|
||||
appendField('optimization_runs', String(parseInt(payload.runs ?? payload.optimization_runs ?? '200', 10) || 200));
|
||||
}
|
||||
appendFile('files[0]', 'standard-input.json', standardJson, 'application/json');
|
||||
parts.push(Buffer.from(`--${boundary}--\r\n`));
|
||||
const body = Buffer.concat(parts);
|
||||
@@ -269,15 +254,7 @@ function toEtherscanResponse(result) {
|
||||
return { status: '1', message: data.message || 'OK', result: data.result ?? 'Verification submitted' };
|
||||
}
|
||||
if (status >= 200 && status < 300) {
|
||||
const successMessage = typeof data?.message === 'string' ? data.message : '';
|
||||
const successResult = typeof data?.result === 'string' ? data.result : '';
|
||||
if (
|
||||
/verification started/i.test(successMessage) ||
|
||||
/verification submitted/i.test(successMessage) ||
|
||||
/verification submitted/i.test(successResult)
|
||||
) {
|
||||
return { status: '1', message: successMessage || 'OK', result: successResult || 'Verification submitted' };
|
||||
}
|
||||
return { status: '1', message: 'OK', result: data?.result ?? 'Verification submitted' };
|
||||
}
|
||||
// Blockscout may return HTML (502/500) or invalid JSON when DB/migrations fail
|
||||
let msg = data?.message || data?.error;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
1332
reports/status/gru_v2_full_mesh_status_report.json
Normal file
1332
reports/status/gru_v2_full_mesh_status_report.json
Normal file
File diff suppressed because it is too large
Load Diff
234
reports/status/gru_v2_full_mesh_tracker.csv
Normal file
234
reports/status/gru_v2_full_mesh_tracker.csv
Normal file
@@ -0,0 +1,234 @@
|
||||
namespace,chain,pair,priority,status
|
||||
c* V2,138,cUSDT V2 / cUSDC V2,P0,todo
|
||||
c* V2,138,cUSDT V2 / USDT,P0,todo
|
||||
c* V2,138,cUSDC V2 / USDC,P0,todo
|
||||
c* V2,138,cEURC V2 / cUSDC V2,P0,todo
|
||||
c* V2,138,cEURT V2 / cUSDC V2,P0,todo
|
||||
c* V2,138,cGBPC V2 / cUSDC V2,P0,todo
|
||||
c* V2,138,cGBPT V2 / cUSDC V2,P0,todo
|
||||
c* V2,138,cAUDC V2 / cUSDC V2,P0,todo
|
||||
c* V2,138,cJPYC V2 / cUSDC V2,P0,todo
|
||||
c* V2,138,cCHFC V2 / cUSDC V2,P0,todo
|
||||
c* V2,138,cCADC V2 / cUSDC V2,P0,todo
|
||||
c* V2,138,cXAUC V2 / cUSDC V2,P0,todo
|
||||
c* V2,138,cXAUT V2 / cUSDC V2,P0,todo
|
||||
c* V2,138,cEURC V2 / cEURT V2,P1,todo
|
||||
c* V2,138,cGBPC V2 / cGBPT V2,P1,todo
|
||||
c* V2,138,cXAUC V2 / cXAUT V2,P1,todo
|
||||
c* V2,138,cETH / WETH,P1,todo
|
||||
c* V2,138,cETH / cUSDC V2,P1,todo
|
||||
c* V2,138,cETHL2 / cUSDC V2,P2,todo
|
||||
c* V2,138,cBNB / cUSDC V2,P2,todo
|
||||
c* V2,138,cPOL / cUSDC V2,P2,todo
|
||||
c* V2,138,cAVAX / cUSDC V2,P2,todo
|
||||
c* V2,138,cCRO / cUSDC V2,P2,todo
|
||||
c* V2,138,cXDAI / cUSDC V2,P2,todo
|
||||
c* V2,138,cCELO / cUSDC V2,P2,todo
|
||||
c* V2,138,cWEMIX / cUSDC V2,P2,todo
|
||||
cA*,651940,cAUSDT / cAUSDC,P0,todo
|
||||
cA*,651940,cAUSDT / AUSDT,P0,todo
|
||||
cA*,651940,cAUSDC / USDC,P0,todo
|
||||
cA*,651940,cAEURC / cAUSDC,P0,todo
|
||||
cA*,651940,cAEURT / cAUSDC,P0,todo
|
||||
cA*,651940,cAGBPC / cAUSDC,P0,todo
|
||||
cA*,651940,cAGBPT / cAUSDC,P0,todo
|
||||
cA*,651940,cAAUDC / cAUSDC,P0,todo
|
||||
cA*,651940,cAJPYC / cAUSDC,P0,todo
|
||||
cA*,651940,cACHFC / cAUSDC,P0,todo
|
||||
cA*,651940,cACADC / cAUSDC,P0,todo
|
||||
cA*,651940,cAXAUC / cAUSDC,P0,todo
|
||||
cA*,651940,cAXAUT / cAUSDC,P0,todo
|
||||
cA*,651940,cAEURC / cAEURT,P1,todo
|
||||
cA*,651940,cAGBPC / cAGBPT,P1,todo
|
||||
cA*,651940,cAXAUC / cAXAUT,P1,todo
|
||||
cA*,651940,cAETH / WETH,P1,todo
|
||||
cA*,651940,cAETH / cAUSDC,P1,todo
|
||||
cA*,651940,cAWALL / WALL,P1,todo
|
||||
cA*,651940,cAWALL / cAUSDC,P1,todo
|
||||
cW*,1,cWUSDT / USDC,,todo
|
||||
cW*,1,cWUSDC / USDC,,todo
|
||||
cW*,1,cWUSDT / USDT,,todo
|
||||
cW*,1,cWUSDC / USDT,,todo
|
||||
cW*,1,cWUSDT / cWUSDC,,todo
|
||||
cW*,1,cWEURC / USDC,,todo
|
||||
cW*,1,cWEURT / USDC,,todo
|
||||
cW*,1,cWGBPC / USDC,,todo
|
||||
cW*,1,cWGBPT / USDC,,todo
|
||||
cW*,1,cWAUDC / USDC,,todo
|
||||
cW*,1,cWJPYC / USDC,,todo
|
||||
cW*,1,cWCHFC / USDC,,todo
|
||||
cW*,1,cWCADC / USDC,,todo
|
||||
cW*,1,cWXAUC / USDC,,todo
|
||||
cW*,1,cWXAUT / USDC,,todo
|
||||
cW*,1,cWETH / WETH,,todo
|
||||
cW*,1,cWETH / USDC,,todo
|
||||
cW*,10,cWUSDT / USDC,,todo
|
||||
cW*,10,cWUSDC / USDC,,todo
|
||||
cW*,10,cWUSDT / USDT,,todo
|
||||
cW*,10,cWUSDC / USDT,,todo
|
||||
cW*,10,cWUSDT / cWUSDC,,todo
|
||||
cW*,10,cWEURC / USDC,,todo
|
||||
cW*,10,cWEURT / USDC,,todo
|
||||
cW*,10,cWGBPC / USDC,,todo
|
||||
cW*,10,cWGBPT / USDC,,todo
|
||||
cW*,10,cWAUDC / USDC,,todo
|
||||
cW*,10,cWJPYC / USDC,,todo
|
||||
cW*,10,cWCHFC / USDC,,todo
|
||||
cW*,10,cWCADC / USDC,,todo
|
||||
cW*,10,cWXAUC / USDC,,todo
|
||||
cW*,10,cWXAUT / USDC,,todo
|
||||
cW*,10,cWETHL2 / WETH,,todo
|
||||
cW*,10,cWETHL2 / USDC,,todo
|
||||
cW*,25,cWUSDT / USDC,,todo
|
||||
cW*,25,cWUSDC / USDC,,todo
|
||||
cW*,25,cWUSDT / USDT,,todo
|
||||
cW*,25,cWUSDC / USDT,,todo
|
||||
cW*,25,cWUSDT / cWUSDC,,todo
|
||||
cW*,25,cWEURC / USDC,,todo
|
||||
cW*,25,cWEURT / USDC,,todo
|
||||
cW*,25,cWGBPC / USDC,,todo
|
||||
cW*,25,cWGBPT / USDC,,todo
|
||||
cW*,25,cWAUDC / USDC,,todo
|
||||
cW*,25,cWJPYC / USDC,,todo
|
||||
cW*,25,cWCHFC / USDC,,todo
|
||||
cW*,25,cWCADC / USDC,,todo
|
||||
cW*,25,cWXAUC / USDC,,todo
|
||||
cW*,25,cWXAUT / USDC,,todo
|
||||
cW*,25,cWCRO / WCRO,,todo
|
||||
cW*,25,cWCRO / USDT,,todo
|
||||
cW*,56,cWUSDT / USDC,,todo
|
||||
cW*,56,cWUSDC / USDC,,todo
|
||||
cW*,56,cWUSDT / USDT,,todo
|
||||
cW*,56,cWUSDC / USDT,,todo
|
||||
cW*,56,cWUSDT / cWUSDC,,todo
|
||||
cW*,56,cWEURC / USDC,,todo
|
||||
cW*,56,cWEURT / USDC,,todo
|
||||
cW*,56,cWGBPC / USDC,,todo
|
||||
cW*,56,cWGBPT / USDC,,todo
|
||||
cW*,56,cWAUDC / USDC,,todo
|
||||
cW*,56,cWJPYC / USDC,,todo
|
||||
cW*,56,cWCHFC / USDC,,todo
|
||||
cW*,56,cWCADC / USDC,,todo
|
||||
cW*,56,cWXAUC / USDC,,todo
|
||||
cW*,56,cWXAUT / USDC,,todo
|
||||
cW*,56,cWBNB / WBNB,,todo
|
||||
cW*,56,cWBNB / USDT,,todo
|
||||
cW*,100,cWUSDT / USDC,,todo
|
||||
cW*,100,cWUSDC / USDC,,todo
|
||||
cW*,100,cWUSDT / USDT,,todo
|
||||
cW*,100,cWUSDC / USDT,,todo
|
||||
cW*,100,cWUSDT / cWUSDC,,todo
|
||||
cW*,100,cWEURC / USDC,,todo
|
||||
cW*,100,cWEURT / USDC,,todo
|
||||
cW*,100,cWGBPC / USDC,,todo
|
||||
cW*,100,cWGBPT / USDC,,todo
|
||||
cW*,100,cWAUDC / USDC,,todo
|
||||
cW*,100,cWJPYC / USDC,,todo
|
||||
cW*,100,cWCHFC / USDC,,todo
|
||||
cW*,100,cWCADC / USDC,,todo
|
||||
cW*,100,cWXAUC / USDC,,todo
|
||||
cW*,100,cWXAUT / USDC,,todo
|
||||
cW*,100,cWXDAI / WXDAI,,todo
|
||||
cW*,100,cWXDAI / USDC,,todo
|
||||
cW*,137,cWUSDT / USDC,,todo
|
||||
cW*,137,cWUSDC / USDC,,todo
|
||||
cW*,137,cWUSDT / USDT,,todo
|
||||
cW*,137,cWUSDC / USDT,,todo
|
||||
cW*,137,cWUSDT / cWUSDC,,todo
|
||||
cW*,137,cWEURC / USDC,,todo
|
||||
cW*,137,cWEURT / USDC,,todo
|
||||
cW*,137,cWGBPC / USDC,,todo
|
||||
cW*,137,cWGBPT / USDC,,todo
|
||||
cW*,137,cWAUDC / USDC,,todo
|
||||
cW*,137,cWJPYC / USDC,,todo
|
||||
cW*,137,cWCHFC / USDC,,todo
|
||||
cW*,137,cWCADC / USDC,,todo
|
||||
cW*,137,cWXAUC / USDC,,todo
|
||||
cW*,137,cWXAUT / USDC,,todo
|
||||
cW*,137,cWPOL / WPOL,,todo
|
||||
cW*,137,cWPOL / USDC,,todo
|
||||
cW*,8453,cWUSDT / USDC,,todo
|
||||
cW*,8453,cWUSDC / USDC,,todo
|
||||
cW*,8453,cWUSDT / USDT,,todo
|
||||
cW*,8453,cWUSDC / USDT,,todo
|
||||
cW*,8453,cWUSDT / cWUSDC,,todo
|
||||
cW*,8453,cWEURC / USDC,,todo
|
||||
cW*,8453,cWEURT / USDC,,todo
|
||||
cW*,8453,cWGBPC / USDC,,todo
|
||||
cW*,8453,cWGBPT / USDC,,todo
|
||||
cW*,8453,cWAUDC / USDC,,todo
|
||||
cW*,8453,cWJPYC / USDC,,todo
|
||||
cW*,8453,cWCHFC / USDC,,todo
|
||||
cW*,8453,cWCADC / USDC,,todo
|
||||
cW*,8453,cWXAUC / USDC,,todo
|
||||
cW*,8453,cWXAUT / USDC,,todo
|
||||
cW*,8453,cWETHL2 / WETH,,todo
|
||||
cW*,8453,cWETHL2 / USDC,,todo
|
||||
cW*,42161,cWUSDT / USDC,,todo
|
||||
cW*,42161,cWUSDC / USDC,,todo
|
||||
cW*,42161,cWUSDT / USDT,,todo
|
||||
cW*,42161,cWUSDC / USDT,,todo
|
||||
cW*,42161,cWUSDT / cWUSDC,,todo
|
||||
cW*,42161,cWEURC / USDC,,todo
|
||||
cW*,42161,cWEURT / USDC,,todo
|
||||
cW*,42161,cWGBPC / USDC,,todo
|
||||
cW*,42161,cWGBPT / USDC,,todo
|
||||
cW*,42161,cWAUDC / USDC,,todo
|
||||
cW*,42161,cWJPYC / USDC,,todo
|
||||
cW*,42161,cWCHFC / USDC,,todo
|
||||
cW*,42161,cWCADC / USDC,,todo
|
||||
cW*,42161,cWXAUC / USDC,,todo
|
||||
cW*,42161,cWXAUT / USDC,,todo
|
||||
cW*,42161,cWETHL2 / WETH,,todo
|
||||
cW*,42161,cWETHL2 / USDC,,todo
|
||||
cW*,42220,cWUSDT / USDC,,todo
|
||||
cW*,42220,cWUSDC / USDC,,todo
|
||||
cW*,42220,cWUSDT / USDT,,todo
|
||||
cW*,42220,cWUSDC / USDT,,todo
|
||||
cW*,42220,cWUSDT / cWUSDC,,todo
|
||||
cW*,42220,cWEURC / USDC,,todo
|
||||
cW*,42220,cWEURT / USDC,,todo
|
||||
cW*,42220,cWGBPC / USDC,,todo
|
||||
cW*,42220,cWGBPT / USDC,,todo
|
||||
cW*,42220,cWAUDC / USDC,,todo
|
||||
cW*,42220,cWJPYC / USDC,,todo
|
||||
cW*,42220,cWCHFC / USDC,,todo
|
||||
cW*,42220,cWCADC / USDC,,todo
|
||||
cW*,42220,cWXAUC / USDC,,todo
|
||||
cW*,42220,cWXAUT / USDC,,todo
|
||||
cW*,42220,cWCELO / WCELO,,todo
|
||||
cW*,42220,cWCELO / USDC,,todo
|
||||
cW*,43114,cWUSDT / USDC,,todo
|
||||
cW*,43114,cWUSDC / USDC,,todo
|
||||
cW*,43114,cWUSDT / USDT,,todo
|
||||
cW*,43114,cWUSDC / USDT,,todo
|
||||
cW*,43114,cWUSDT / cWUSDC,,todo
|
||||
cW*,43114,cWEURC / USDC,,todo
|
||||
cW*,43114,cWEURT / USDC,,todo
|
||||
cW*,43114,cWGBPC / USDC,,todo
|
||||
cW*,43114,cWGBPT / USDC,,todo
|
||||
cW*,43114,cWAUDC / USDC,,todo
|
||||
cW*,43114,cWJPYC / USDC,,todo
|
||||
cW*,43114,cWCHFC / USDC,,todo
|
||||
cW*,43114,cWCADC / USDC,,todo
|
||||
cW*,43114,cWXAUC / USDC,,todo
|
||||
cW*,43114,cWXAUT / USDC,,todo
|
||||
cW*,43114,cWAVAX / WAVAX,,todo
|
||||
cW*,43114,cWAVAX / USDC,,todo
|
||||
cW*,1111,cWUSDT / USDC,,planned
|
||||
cW*,1111,cWUSDC / USDC,,planned
|
||||
cW*,1111,cWUSDT / USDT,,planned
|
||||
cW*,1111,cWUSDC / USDT,,planned
|
||||
cW*,1111,cWUSDT / cWUSDC,,planned
|
||||
cW*,1111,cWEURC / USDC,,planned
|
||||
cW*,1111,cWEURT / USDC,,planned
|
||||
cW*,1111,cWGBPC / USDC,,planned
|
||||
cW*,1111,cWGBPT / USDC,,planned
|
||||
cW*,1111,cWAUDC / USDC,,planned
|
||||
cW*,1111,cWJPYC / USDC,,planned
|
||||
cW*,1111,cWCHFC / USDC,,planned
|
||||
cW*,1111,cWCADC / USDC,,planned
|
||||
cW*,1111,cWXAUC / USDC,,planned
|
||||
cW*,1111,cWXAUT / USDC,,planned
|
||||
cW*,1111,cWWEMIX / WWEMIX,,planned
|
||||
cW*,1111,cWWEMIX / USDC,,planned
|
||||
|
128
scripts/deployment/configure-gru-v2-mainnet-bridge-parity.sh
Executable file
128
scripts/deployment/configure-gru-v2-mainnet-bridge-parity.sh
Executable file
@@ -0,0 +1,128 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh" >/dev/null 2>&1 || true
|
||||
source "${PROJECT_ROOT}/smom-dbis-138/scripts/load-env.sh" >/dev/null 2>&1 || true
|
||||
|
||||
need_cmd() {
|
||||
command -v "$1" >/dev/null 2>&1 || { echo "[fail] missing required command: $1" >&2; exit 1; }
|
||||
}
|
||||
|
||||
need_cmd cast
|
||||
|
||||
L1_BRIDGE="${CW_MULTITOKEN_BRIDGE_L1_138:-${CW_L1_BRIDGE_CHAIN138:-${CHAIN138_L1_BRIDGE:-0x152ed3e9912161b76bdfd368d0c84b7c31c10de7}}}"
|
||||
L2_BRIDGE="${CW_MULTITOKEN_BRIDGE_L2_MAINNET:-${CW_BRIDGE_MAINNET:-0x2bF74583206A49Be07E0E8A94197C12987AbD7B5}}"
|
||||
SELECTOR="${ETH_MAINNET_SELECTOR:-5009297550715157269}"
|
||||
RPC138="${RPC_URL_138:-}"
|
||||
RPC1="${ETHEREUM_MAINNET_RPC:-${ETH_MAINNET_RPC_URL:-}}"
|
||||
|
||||
CUSDT_V2="${COMPLIANT_USDT_V2:-0x9FBfab33882Efe0038DAa608185718b772EE5660}"
|
||||
CUSDC_V2="${COMPLIANT_USDC_V2:-0x219522c60e83dEe01FC5b0329d6fA8fD84b9D13d}"
|
||||
CWUSDT="${CWUSDT_V2_MAINNET:-${CWUSDT_MAINNET:-0x7E8FF0DcC974F290a29968e9350800a6df674447}}"
|
||||
CWUSDC="${CWUSDC_V2_MAINNET:-${CWUSDC_MAINNET:-0x3398ff0Bc56Fe3597E12BE6b191Cc92f10Eae53c}}"
|
||||
|
||||
ASSET="ALL"
|
||||
EXECUTE=0
|
||||
|
||||
usage() {
|
||||
cat <<'EOF'
|
||||
Usage:
|
||||
bash scripts/deployment/configure-gru-v2-mainnet-bridge-parity.sh [--asset cUSDT_V2|cUSDC_V2] [--execute]
|
||||
|
||||
Dry-run by default. With --execute, broadcasts:
|
||||
1. L2 configureDestination(138, L1_BRIDGE, true)
|
||||
2. L2 configureTokenPair(V2 canonical, cW mirrored)
|
||||
3. L1 configureDestination(V2 canonical, MAINNET_SELECTOR, L2_BRIDGE, true)
|
||||
4. L1 configureSupportedCanonicalToken(V2 canonical, true) only when the deployed L1 bridge supports it
|
||||
EOF
|
||||
}
|
||||
|
||||
for arg in "$@"; do
|
||||
case "$arg" in
|
||||
--asset) shift ;;
|
||||
esac
|
||||
done
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--asset) ASSET="${2:-}"; shift 2 ;;
|
||||
--execute) EXECUTE=1; shift ;;
|
||||
--help|-h) usage; exit 0 ;;
|
||||
*) echo "[fail] unknown arg: $1" >&2; usage >&2; exit 2 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
[[ -n "$RPC138" && -n "$RPC1" ]] || { echo "[fail] RPC_URL_138 and ETHEREUM_MAINNET_RPC are required" >&2; exit 1; }
|
||||
if (( EXECUTE == 1 )); then
|
||||
[[ -n "${PRIVATE_KEY:-}" ]] || { echo "[fail] PRIVATE_KEY is required for --execute" >&2; exit 1; }
|
||||
fi
|
||||
|
||||
send_cast() {
|
||||
local rpc="$1" to="$2" sig="$3"
|
||||
shift 3
|
||||
if (( EXECUTE == 1 )); then
|
||||
cast send "$to" "$sig" "$@" --rpc-url "$rpc" --private-key "$PRIVATE_KEY" --legacy
|
||||
else
|
||||
printf 'cast send %q %q' "$to" "$sig"
|
||||
for part in "$@"; do
|
||||
printf ' %q' "$part"
|
||||
done
|
||||
printf ' --rpc-url %q --private-key "$PRIVATE_KEY" --legacy\n' "$rpc"
|
||||
fi
|
||||
}
|
||||
|
||||
print_state() {
|
||||
local label="$1" canonical="$2"
|
||||
echo "=== $label ==="
|
||||
echo "canonical=$canonical"
|
||||
echo "l1_destination=$(cast call "$L1_BRIDGE" 'destinations(address,uint64)((address,bool))' "$canonical" "$SELECTOR" --rpc-url "$RPC138" 2>/dev/null | tr '\n' ' ' || true)"
|
||||
echo "l1_supported=$(cast call "$L1_BRIDGE" 'supportedCanonicalToken(address)(bool)' "$canonical" --rpc-url "$RPC138" 2>/dev/null | awk '{print $1}' || true)"
|
||||
echo "l2_pair=$(cast call "$L2_BRIDGE" 'canonicalToMirrored(address)(address)' "$canonical" --rpc-url "$RPC1" 2>/dev/null | awk '{print $1}' || true)"
|
||||
echo "l2_destination=$(cast call "$L2_BRIDGE" 'destinations(uint64)((address,bool))' 138 --rpc-url "$RPC1" 2>/dev/null | tr '\n' ' ' || true)"
|
||||
}
|
||||
|
||||
l1_supports_supported_canonical_fn() {
|
||||
local canonical="$1"
|
||||
cast call "$L1_BRIDGE" 'supportedCanonicalToken(address)(bool)' "$canonical" --rpc-url "$RPC138" >/dev/null 2>&1
|
||||
}
|
||||
|
||||
run_asset() {
|
||||
local label="$1" canonical="$2" mirrored="$3"
|
||||
print_state "$label" "$canonical"
|
||||
echo "plan_l2_destination:"
|
||||
send_cast "$RPC1" "$L2_BRIDGE" "configureDestination(uint64,address,bool)" 138 "$L1_BRIDGE" true
|
||||
echo "plan_l2_pair:"
|
||||
send_cast "$RPC1" "$L2_BRIDGE" "configureTokenPair(address,address)" "$canonical" "$mirrored"
|
||||
echo "plan_l1_destination:"
|
||||
send_cast "$RPC138" "$L1_BRIDGE" "configureDestination(address,uint64,address,bool)" "$canonical" "$SELECTOR" "$L2_BRIDGE" true
|
||||
if l1_supports_supported_canonical_fn "$canonical"; then
|
||||
echo "plan_l1_supported:"
|
||||
send_cast "$RPC138" "$L1_BRIDGE" "configureSupportedCanonicalToken(address,bool)" "$canonical" true
|
||||
else
|
||||
echo "plan_l1_supported: skipped (deployed L1 bridge does not expose supportedCanonicalToken(address)(bool); destination + fee-quote path is authoritative)"
|
||||
fi
|
||||
echo
|
||||
}
|
||||
|
||||
case "$ASSET" in
|
||||
ALL)
|
||||
run_asset "cUSDT_V2" "$CUSDT_V2" "$CWUSDT"
|
||||
run_asset "cUSDC_V2" "$CUSDC_V2" "$CWUSDC"
|
||||
;;
|
||||
cUSDT_V2)
|
||||
run_asset "cUSDT_V2" "$CUSDT_V2" "$CWUSDT"
|
||||
;;
|
||||
cUSDC_V2)
|
||||
run_asset "cUSDC_V2" "$CUSDC_V2" "$CWUSDC"
|
||||
;;
|
||||
*)
|
||||
echo "[fail] unsupported asset: $ASSET" >&2
|
||||
exit 2
|
||||
;;
|
||||
esac
|
||||
|
||||
if (( EXECUTE == 0 )); then
|
||||
echo "Dry-run only. Re-run with --execute to broadcast."
|
||||
fi
|
||||
@@ -1,171 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Enable the upstream Blockscout smart-contract verifier sidecar on CT 5000 and
|
||||
# wire Blockscout to use it.
|
||||
#
|
||||
# Usage:
|
||||
# bash scripts/deployment/ensure-blockscout-smart-contract-verifier-5000.sh --dry-run
|
||||
# bash scripts/deployment/ensure-blockscout-smart-contract-verifier-5000.sh --apply
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
|
||||
if [[ -f "${PROJECT_ROOT}/scripts/lib/load-project-env.sh" ]]; then
|
||||
# shellcheck source=/dev/null
|
||||
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh"
|
||||
fi
|
||||
|
||||
HOST="${PROXMOX_HOST_R630_02:-192.168.11.12}"
|
||||
VMID="${BLOCKSCOUT_DB_CT_VMID:-5000}"
|
||||
APPLY=0
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--apply) APPLY=1; shift ;;
|
||||
--dry-run) APPLY=0; shift ;;
|
||||
*)
|
||||
echo "Unknown argument: $1" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
read -r -d '' REMOTE_SCRIPT <<'EOF_REMOTE' || true
|
||||
set -euo pipefail
|
||||
cd /opt/blockscout
|
||||
|
||||
stamp="$(date +%Y%m%d_%H%M%S)"
|
||||
cp docker-compose.yml "docker-compose.yml.bak.${stamp}.pre_verifier"
|
||||
|
||||
cat > smart-contract-verifier.env <<'EOF_VERIFIER'
|
||||
SMART_CONTRACT_VERIFIER__SERVER__HTTP__ENABLED=true
|
||||
SMART_CONTRACT_VERIFIER__SERVER__HTTP__ADDR=0.0.0.0:8050
|
||||
SMART_CONTRACT_VERIFIER__SERVER__HTTP__MAX_BODY_SIZE=8388608
|
||||
SMART_CONTRACT_VERIFIER__SERVER__GRPC__ENABLED=false
|
||||
SMART_CONTRACT_VERIFIER__SERVER__GRPC__ADDR=0.0.0.0:8051
|
||||
SMART_CONTRACT_VERIFIER__SOLIDITY__ENABLED=true
|
||||
SMART_CONTRACT_VERIFIER__SOLIDITY__COMPILERS_DIR=/tmp/solidity-compilers
|
||||
SMART_CONTRACT_VERIFIER__SOLIDITY__REFRESH_VERSIONS_SCHEDULE=0 0 * * * * *
|
||||
SMART_CONTRACT_VERIFIER__SOLIDITY__FETCHER__LIST__LIST_URL=https://binaries.soliditylang.org/linux-amd64/list.json
|
||||
SMART_CONTRACT_VERIFIER__VYPER__ENABLED=true
|
||||
SMART_CONTRACT_VERIFIER__VYPER__COMPILERS_DIR=/tmp/vyper-compilers
|
||||
SMART_CONTRACT_VERIFIER__VYPER__REFRESH_VERSIONS_SCHEDULE=0 0 * * * * *
|
||||
SMART_CONTRACT_VERIFIER__VYPER__FETCHER__LIST__LIST_URL=https://raw.githubusercontent.com/blockscout/solc-bin/main/vyper.list.json
|
||||
SMART_CONTRACT_VERIFIER__SOURCIFY__ENABLED=true
|
||||
SMART_CONTRACT_VERIFIER__SOURCIFY__API_URL=https://sourcify.dev/server/
|
||||
SMART_CONTRACT_VERIFIER__SOURCIFY__VERIFICATION_ATTEMPTS=3
|
||||
SMART_CONTRACT_VERIFIER__SOURCIFY__REQUEST_TIMEOUT=10
|
||||
SMART_CONTRACT_VERIFIER__METRICS__ENABLED=false
|
||||
SMART_CONTRACT_VERIFIER__JAEGER__ENABLED=false
|
||||
EOF_VERIFIER
|
||||
|
||||
cat > docker-compose.yml <<'EOF_COMPOSE'
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15-alpine
|
||||
container_name: blockscout-postgres
|
||||
environment:
|
||||
POSTGRES_USER: blockscout
|
||||
POSTGRES_PASSWORD: blockscout
|
||||
POSTGRES_DB: blockscout
|
||||
volumes:
|
||||
- postgres-data:/var/lib/postgresql/data
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- blockscout-network
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U blockscout"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
smart-contract-verifier:
|
||||
image: ghcr.io/blockscout/smart-contract-verifier:latest
|
||||
container_name: smart-contract-verifier
|
||||
env_file:
|
||||
- ./smart-contract-verifier.env
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- blockscout-network
|
||||
|
||||
blockscout:
|
||||
image: blockscout/blockscout:latest
|
||||
container_name: blockscout
|
||||
command: bin/blockscout start
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
smart-contract-verifier:
|
||||
condition: service_started
|
||||
environment:
|
||||
- DISABLE_WEBAPP=false
|
||||
- DISABLE_INDEXER=false
|
||||
- INDEXER_DISABLE_PENDING_TRANSACTIONS_FETCHER=true
|
||||
- DATABASE_URL=postgresql://blockscout:blockscout@postgres:5432/blockscout?sslmode=disable
|
||||
- ETHEREUM_JSONRPC_HTTP_URL=http://192.168.11.221:8545
|
||||
- ETHEREUM_JSONRPC_WS_URL=ws://192.168.11.221:8546
|
||||
- ETHEREUM_JSONRPC_TRACE_URL=http://192.168.11.221:8545
|
||||
- ETHEREUM_JSONRPC_VARIANT=besu
|
||||
- CHAIN_ID=138
|
||||
- COIN=ETH
|
||||
- BLOCKSCOUT_HOST=explorer.d-bis.org
|
||||
- BLOCKSCOUT_PROTOCOL=https
|
||||
- SECRET_KEY_BASE=73159c7d10b9a5a75ddf10710773078c078bf02124d35b72fa2a841b30b4f88c7c43e5caaf7f9f7f87d16dd66e7870931ae11039c428d1dedae187af762531fa
|
||||
- POOL_SIZE=50
|
||||
- POOL_SIZE_API=50
|
||||
- DATABASE_QUEUE_TARGET=5s
|
||||
- ECTO_USE_SSL=false
|
||||
- MICROSERVICE_SC_VERIFIER_ENABLED=true
|
||||
- MICROSERVICE_SC_VERIFIER_TYPE=sc_verifier
|
||||
- MICROSERVICE_SC_VERIFIER_URL=http://smart-contract-verifier:8050/
|
||||
ports:
|
||||
- "4000:4000"
|
||||
volumes:
|
||||
- blockscout-data:/app/apps/explorer/priv/static
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- blockscout-network
|
||||
|
||||
volumes:
|
||||
postgres-data:
|
||||
blockscout-data:
|
||||
|
||||
networks:
|
||||
blockscout-network:
|
||||
driver: bridge
|
||||
EOF_COMPOSE
|
||||
|
||||
docker pull ghcr.io/blockscout/smart-contract-verifier:latest
|
||||
docker rm -f blockscout 2>/dev/null || true
|
||||
docker rm -f smart-contract-verifier 2>/dev/null || true
|
||||
# Older docker-compose v1 can leave an orphaned auto-generated verifier container
|
||||
# that breaks recreation with a `ContainerConfig` KeyError. Clear it first.
|
||||
docker ps -a --format '{{.Names}}' | grep -E 'smart-contract-verifier$' | xargs -r docker rm -f
|
||||
|
||||
if command -v docker-compose >/dev/null 2>&1; then
|
||||
docker-compose -f docker-compose.yml up -d
|
||||
else
|
||||
docker compose -f docker-compose.yml up -d
|
||||
fi
|
||||
|
||||
sleep 10
|
||||
docker ps --format "table {{.Names}}\t{{.Status}}"
|
||||
curl -fsS http://127.0.0.1:4000/api/v2/smart-contracts/verification/config >/dev/null
|
||||
EOF_REMOTE
|
||||
|
||||
echo "Blockscout smart-contract verifier enablement"
|
||||
echo "Host: ${HOST}"
|
||||
echo "VMID: ${VMID}"
|
||||
echo
|
||||
|
||||
if (( APPLY == 0 )); then
|
||||
echo "[dry-run] Would patch /opt/blockscout/docker-compose.yml on CT ${VMID}, add smart-contract-verifier, and restart Blockscout."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
ssh root@"${HOST}" "pct exec ${VMID} -- bash -lc $(printf '%q' "${REMOTE_SCRIPT}")"
|
||||
echo
|
||||
echo "[ok] Blockscout verifier sidecar enabled on CT ${VMID}."
|
||||
335
scripts/deployment/plan-gru-v2-wave1-public-pools.sh
Executable file
335
scripts/deployment/plan-gru-v2-wave1-public-pools.sh
Executable file
@@ -0,0 +1,335 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
ROOT_DIR="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
OUTPUT_PATH="${ROOT_DIR}/reports/extraction/gru-v2-wave1-public-deploy-plan-latest.json"
|
||||
POLICY_PATH="${ROOT_DIR}/config/extraction/gru-v2-wave1-public-seed-policy.json"
|
||||
GAP_REPORT_PATH="${ROOT_DIR}/reports/extraction/gru-v2-wave1-public-gap-report-latest.json"
|
||||
|
||||
source "${ROOT_DIR}/scripts/lib/load-project-env.sh" >/dev/null 2>&1 || true
|
||||
if [[ -f "${ROOT_DIR}/smom-dbis-138/scripts/lib/deployment/dotenv.sh" ]]; then
|
||||
# shellcheck disable=SC1090
|
||||
source "${ROOT_DIR}/smom-dbis-138/scripts/lib/deployment/dotenv.sh" >/dev/null 2>&1 || true
|
||||
load_deployment_env --repo-root "${ROOT_DIR}/smom-dbis-138" >/dev/null 2>&1 || true
|
||||
export PROJECT_ROOT="${ROOT_DIR}"
|
||||
fi
|
||||
|
||||
mkdir -p "$(dirname "$OUTPUT_PATH")"
|
||||
|
||||
python3 - <<'PY' "$ROOT_DIR" "$OUTPUT_PATH" "$POLICY_PATH" "$GAP_REPORT_PATH"
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from collections import Counter
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
project_root = Path(sys.argv[1])
|
||||
output_path = Path(sys.argv[2])
|
||||
policy_path = Path(sys.argv[3])
|
||||
gap_report_path = Path(sys.argv[4])
|
||||
|
||||
if not gap_report_path.exists():
|
||||
subprocess.check_call(
|
||||
["bash", "scripts/verify/build-gru-v2-wave1-public-gap-report.sh"],
|
||||
cwd=project_root,
|
||||
)
|
||||
|
||||
deployment_status = json.loads((project_root / "cross-chain-pmm-lps/config/deployment-status.json").read_text())
|
||||
policy = json.loads(policy_path.read_text())
|
||||
gap_report = json.loads(gap_report_path.read_text())
|
||||
|
||||
chain_suffix = {
|
||||
1: "MAINNET",
|
||||
10: "OPTIMISM",
|
||||
25: "CRONOS",
|
||||
56: "BSC",
|
||||
100: "GNOSIS",
|
||||
137: "POLYGON",
|
||||
1111: "WEMIX",
|
||||
8453: "BASE",
|
||||
42161: "ARBITRUM",
|
||||
42220: "CELO",
|
||||
43114: "AVALANCHE",
|
||||
}
|
||||
|
||||
rpc_env_key = {
|
||||
1: "ETHEREUM_MAINNET_RPC",
|
||||
10: "OPTIMISM_RPC_URL",
|
||||
25: "CRONOS_RPC_URL",
|
||||
56: "BSC_RPC_URL",
|
||||
100: "GNOSIS_MAINNET_RPC",
|
||||
137: "POLYGON_RPC_URL",
|
||||
1111: "WEMIX_RPC",
|
||||
8453: "BASE_RPC_URL",
|
||||
42161: "ARBITRUM_RPC_URL",
|
||||
42220: "CELO_RPC_URL",
|
||||
43114: "AVALANCHE_RPC_URL",
|
||||
}
|
||||
|
||||
integration_env_key = {
|
||||
1: "DODO_PMM_INTEGRATION_MAINNET",
|
||||
10: "DODO_PMM_INTEGRATION_OPTIMISM",
|
||||
25: "DODO_PMM_INTEGRATION_CRONOS",
|
||||
56: "DODO_PMM_INTEGRATION_BSC",
|
||||
100: "DODO_PMM_INTEGRATION_GNOSIS",
|
||||
137: "DODO_PMM_INTEGRATION_POLYGON",
|
||||
1111: "DODO_PMM_INTEGRATION_WEMIX",
|
||||
8453: "DODO_PMM_INTEGRATION_BASE",
|
||||
42161: "DODO_PMM_INTEGRATION_ARBITRUM",
|
||||
42220: "DODO_PMM_INTEGRATION_CELO",
|
||||
43114: "DODO_PMM_INTEGRATION_AVALANCHE",
|
||||
}
|
||||
|
||||
private_key = os.environ.get("PRIVATE_KEY", "")
|
||||
live_checks = os.environ.get("GRU_WAVE1_PLAN_LIVE_CHECKS", "").strip().lower() in {"1", "true", "yes", "on"}
|
||||
include_allowances = os.environ.get("GRU_WAVE1_PLAN_INCLUDE_ALLOWANCES", "").strip().lower() in {"1", "true", "yes", "on"}
|
||||
call_timeout = max(1, int(os.environ.get("GRU_WAVE1_CALL_TIMEOUT_SEC", "2")))
|
||||
estimate_timeout = max(1, int(os.environ.get("GRU_WAVE1_ESTIMATE_TIMEOUT_SEC", "2")))
|
||||
deployer = ""
|
||||
if private_key and live_checks:
|
||||
try:
|
||||
deployer = subprocess.check_output(
|
||||
["cast", "wallet", "address", "--private-key", private_key],
|
||||
text=True,
|
||||
timeout=call_timeout,
|
||||
).strip()
|
||||
except Exception:
|
||||
deployer = ""
|
||||
|
||||
defaults = policy.get("defaults", {})
|
||||
symbol_defaults = policy.get("symbol_defaults", {})
|
||||
pair_overrides = policy.get("pair_overrides", {})
|
||||
|
||||
rows = []
|
||||
|
||||
def merge_policy(base_symbol: str, pair_key: str):
|
||||
merged = dict(defaults)
|
||||
merged.update(symbol_defaults.get(base_symbol, {}))
|
||||
merged.update(pair_overrides.get(pair_key, {}))
|
||||
return merged
|
||||
|
||||
def compute_quote_amount(base_raw: str, price_e18: str):
|
||||
if not base_raw or not price_e18:
|
||||
return None
|
||||
return str((int(base_raw) * int(price_e18)) // 10**18)
|
||||
|
||||
def call_single(rpc_url: str, to: str, sig: str, *args):
|
||||
try:
|
||||
out = subprocess.check_output(
|
||||
["cast", "call", to, sig, *args, "--rpc-url", rpc_url],
|
||||
text=True,
|
||||
stderr=subprocess.DEVNULL,
|
||||
timeout=call_timeout,
|
||||
).strip()
|
||||
return out.split()[0] if out else ""
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
def estimate_single(rpc_url: str, to: str, sig: str, *args):
|
||||
try:
|
||||
out = subprocess.check_output(
|
||||
["cast", "estimate", to, sig, *args, "--rpc-url", rpc_url],
|
||||
text=True,
|
||||
stderr=subprocess.DEVNULL,
|
||||
timeout=estimate_timeout,
|
||||
).strip()
|
||||
return out.split()[0] if out else ""
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
for missing in gap_report["missing_first_tier_wave1_pools"]:
|
||||
chain_id = missing["chain_id"]
|
||||
network = missing["network"]
|
||||
pair = missing["pair"]
|
||||
base_symbol, quote_symbol = [part.strip() for part in pair.split("/")]
|
||||
suffix = chain_suffix.get(chain_id, "")
|
||||
rpc_key = rpc_env_key.get(chain_id, "")
|
||||
integration_key = integration_env_key.get(chain_id, "")
|
||||
base_env_key = f"{base_symbol.upper()}_{suffix}" if suffix else ""
|
||||
pair_key = pair.lower().replace("/", "-").replace(" ", "")
|
||||
|
||||
chain_state = deployment_status["chains"].get(str(chain_id), {})
|
||||
quote_address = (chain_state.get("anchorAddresses") or {}).get(quote_symbol, "")
|
||||
rpc_url = os.environ.get(rpc_key, "")
|
||||
integration = os.environ.get(integration_key, "")
|
||||
base_address = os.environ.get(base_env_key, "")
|
||||
|
||||
cfg = merge_policy(base_symbol, pair_key)
|
||||
initial_price = cfg.get("initial_price_e18")
|
||||
base_amount = str(cfg.get("base_amount_raw")) if cfg.get("base_amount_raw") is not None else None
|
||||
quote_amount = str(cfg.get("quote_amount_raw")) if cfg.get("quote_amount_raw") is not None else None
|
||||
if quote_amount is None and base_amount and initial_price:
|
||||
quote_amount = compute_quote_amount(base_amount, initial_price)
|
||||
mint_amount = str(cfg.get("mint_base_amount_raw")) if cfg.get("mint_base_amount_raw") is not None else None
|
||||
fee_bps = str(cfg.get("fee_bps", 3))
|
||||
k_value = str(cfg.get("k", "500000000000000000"))
|
||||
open_twap = bool(cfg.get("open_twap", False))
|
||||
price_mode = cfg.get("price_mode", "unspecified")
|
||||
|
||||
blockers = []
|
||||
if not rpc_url:
|
||||
blockers.append(f"missing_rpc_env:{rpc_key}")
|
||||
if not integration:
|
||||
blockers.append(f"missing_integration_env:{integration_key}")
|
||||
if not base_address:
|
||||
blockers.append(f"missing_base_token_env:{base_env_key}")
|
||||
if not quote_address:
|
||||
blockers.append(f"missing_quote_anchor:{quote_symbol}")
|
||||
if not initial_price:
|
||||
blockers.append("missing_initial_price_e18")
|
||||
if not base_amount:
|
||||
blockers.append("missing_base_amount_raw")
|
||||
if not quote_amount:
|
||||
blockers.append("missing_quote_amount_raw")
|
||||
if price_mode == "bootstrap_reference":
|
||||
blockers.append("bootstrap_price_requires_operator_review")
|
||||
if not private_key:
|
||||
blockers.append("missing_private_key")
|
||||
|
||||
existing_pool = ""
|
||||
base_balance = ""
|
||||
quote_balance = ""
|
||||
base_allowance = ""
|
||||
quote_allowance = ""
|
||||
mintable_base = False
|
||||
base_supply_mode = "unknown"
|
||||
if live_checks and rpc_url and integration and base_address and quote_address:
|
||||
existing_pool = call_single(rpc_url, integration, "pools(address,address)(address)", base_address, quote_address)
|
||||
if deployer:
|
||||
base_balance = call_single(rpc_url, base_address, "balanceOf(address)(uint256)", deployer)
|
||||
quote_balance = call_single(rpc_url, quote_address, "balanceOf(address)(uint256)", deployer)
|
||||
if include_allowances:
|
||||
base_allowance = call_single(rpc_url, base_address, "allowance(address,address)(uint256)", deployer, integration)
|
||||
quote_allowance = call_single(rpc_url, quote_address, "allowance(address,address)(uint256)", deployer, integration)
|
||||
|
||||
live_missing = existing_pool in ("", "0x0000000000000000000000000000000000000000")
|
||||
if not live_missing:
|
||||
blockers = [b for b in blockers if b not in {"missing_base_amount_raw", "missing_quote_amount_raw"}]
|
||||
|
||||
has_seed_amounts = bool(base_amount and quote_amount)
|
||||
create_ready = all(
|
||||
token not in blockers
|
||||
for token in [
|
||||
f"missing_rpc_env:{rpc_key}",
|
||||
f"missing_integration_env:{integration_key}",
|
||||
f"missing_base_token_env:{base_env_key}",
|
||||
f"missing_quote_anchor:{quote_symbol}",
|
||||
"missing_initial_price_e18",
|
||||
"missing_private_key",
|
||||
]
|
||||
) and live_missing
|
||||
|
||||
create_ready_with_bootstrap_price = create_ready
|
||||
create_ready = create_ready and price_mode != "bootstrap_reference"
|
||||
|
||||
seed_ready = create_ready_with_bootstrap_price and has_seed_amounts
|
||||
if base_balance and base_amount:
|
||||
if int(base_balance) >= int(base_amount):
|
||||
base_supply_mode = "wallet_balance"
|
||||
else:
|
||||
gap = int(base_amount) - int(base_balance)
|
||||
if live_checks and deployer:
|
||||
mintable_base = bool(estimate_single(rpc_url, base_address, "mint(address,uint256)", deployer, str(max(gap, 1))))
|
||||
if mintable_base and mint_amount and int(base_balance) + int(mint_amount) >= int(base_amount):
|
||||
base_supply_mode = "mintable_gap"
|
||||
else:
|
||||
base_supply_mode = "insufficient"
|
||||
blockers.append("insufficient_base_balance")
|
||||
if quote_balance and quote_amount and int(quote_balance) < int(quote_amount):
|
||||
blockers.append("insufficient_quote_balance")
|
||||
|
||||
create_cmd = (
|
||||
f"cast send {integration} "
|
||||
f"'createPool(address,address,uint256,uint256,uint256,bool)(address)' "
|
||||
f"{base_address} {quote_address} {fee_bps} {initial_price or '<initial_price_e18>'} {k_value} "
|
||||
f"{str(open_twap).lower()} --rpc-url {rpc_url} --private-key $PRIVATE_KEY"
|
||||
if integration and base_address and quote_address and rpc_url else ""
|
||||
)
|
||||
seed_cmd = (
|
||||
f"cast send {integration} "
|
||||
f"'addLiquidity(address,uint256,uint256)(uint256,uint256,uint256)' "
|
||||
f"<pool_address> {base_amount or '<base_amount_raw>'} {quote_amount or '<quote_amount_raw>'} "
|
||||
f"--rpc-url {rpc_url} --private-key $PRIVATE_KEY"
|
||||
if integration and rpc_url else ""
|
||||
)
|
||||
|
||||
rows.append({
|
||||
"chain_id": chain_id,
|
||||
"network": network,
|
||||
"pair": pair,
|
||||
"base_symbol": base_symbol,
|
||||
"quote_symbol": quote_symbol,
|
||||
"hub_stable": missing["hub_stable"],
|
||||
"rpc_env_key": rpc_key,
|
||||
"integration_env_key": integration_key,
|
||||
"base_env_key": base_env_key,
|
||||
"quote_anchor_source": "deployment-status.json",
|
||||
"rpc_url_present": bool(rpc_url),
|
||||
"integration_present": bool(integration),
|
||||
"base_token_present": bool(base_address),
|
||||
"quote_anchor_present": bool(quote_address),
|
||||
"base_address": base_address or None,
|
||||
"quote_address": quote_address or None,
|
||||
"integration_address": integration or None,
|
||||
"deployer": deployer or None,
|
||||
"live_checks_enabled": live_checks,
|
||||
"existing_pool_address": existing_pool or None,
|
||||
"initial_price_e18": initial_price,
|
||||
"price_mode": price_mode,
|
||||
"fee_bps": fee_bps,
|
||||
"k": k_value,
|
||||
"open_twap": open_twap,
|
||||
"base_amount_raw": base_amount,
|
||||
"quote_amount_raw": quote_amount,
|
||||
"mint_base_amount_raw": mint_amount,
|
||||
"wallet_base_balance_raw": base_balance or None,
|
||||
"wallet_quote_balance_raw": quote_balance or None,
|
||||
"wallet_base_allowance_raw": base_allowance or None,
|
||||
"wallet_quote_allowance_raw": quote_allowance or None,
|
||||
"mintable_base": mintable_base,
|
||||
"base_supply_mode": base_supply_mode,
|
||||
"ready_to_create": create_ready,
|
||||
"ready_to_create_with_bootstrap_price": create_ready_with_bootstrap_price,
|
||||
"ready_to_seed": seed_ready and "insufficient_base_balance" not in blockers and "insufficient_quote_balance" not in blockers,
|
||||
"blockers": sorted(set(blockers)),
|
||||
"create_command": create_cmd,
|
||||
"seed_command": seed_cmd,
|
||||
"next_step": (
|
||||
"deploy_or_seed_now" if seed_ready and "insufficient_base_balance" not in blockers and "insufficient_quote_balance" not in blockers
|
||||
else "operator_review_bootstrap_price" if create_ready_with_bootstrap_price
|
||||
else "resolve_blockers"
|
||||
),
|
||||
})
|
||||
|
||||
summary = {
|
||||
"planned_missing_rows": len(rows),
|
||||
"ready_to_create_strict": sum(1 for row in rows if row["ready_to_create"]),
|
||||
"ready_to_create_with_bootstrap_price": sum(1 for row in rows if row["ready_to_create_with_bootstrap_price"]),
|
||||
"ready_to_seed": sum(1 for row in rows if row["ready_to_seed"]),
|
||||
"existing_pool_rows_found_onchain": sum(1 for row in rows if row["existing_pool_address"] and row["existing_pool_address"] != "0x0000000000000000000000000000000000000000"),
|
||||
"counts_by_network": dict(sorted(Counter(row["network"] for row in rows).items())),
|
||||
"top_blockers": dict(sorted(Counter(blocker for row in rows for blocker in row["blockers"]).items(), key=lambda item: (-item[1], item[0]))[:20]),
|
||||
}
|
||||
|
||||
result = {
|
||||
"generated_at": datetime.now(timezone.utc).isoformat().replace("+00:00", "Z"),
|
||||
"description": "Universal GRU v2 Wave 1 public pool deployment plan derived from the missing-pool gap report, env, deployment-status, and seed policy.",
|
||||
"sources": [
|
||||
"reports/extraction/gru-v2-wave1-public-gap-report-latest.json",
|
||||
"cross-chain-pmm-lps/config/deployment-status.json",
|
||||
"config/extraction/gru-v2-wave1-public-seed-policy.json",
|
||||
"repo env via scripts/lib/load-project-env.sh",
|
||||
"optional live RPC checks via GRU_WAVE1_PLAN_LIVE_CHECKS=1",
|
||||
],
|
||||
"summary": summary,
|
||||
"rows": sorted(rows, key=lambda item: (item["chain_id"], item["pair"])),
|
||||
}
|
||||
|
||||
output_path.write_text(json.dumps(result, indent=2) + "\n")
|
||||
print(json.dumps(summary, indent=2))
|
||||
PY
|
||||
|
||||
echo "Wrote ${OUTPUT_PATH}"
|
||||
152
scripts/deployment/run-gru-v2-full-deployment.sh
Executable file
152
scripts/deployment/run-gru-v2-full-deployment.sh
Executable file
@@ -0,0 +1,152 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Orchestrate the repo-backed portions of the GRU v2 full deployment plan.
|
||||
# This script is intentionally honest about scope:
|
||||
# - Chain 138 has real PMM desired-state sync and verification paths.
|
||||
# - ALL Mainnet 651940 full cA* mesh is not fully deployable from this repo today.
|
||||
# - Multi-protocol completion on 138/651940 is still partially inventory-only.
|
||||
#
|
||||
# Usage:
|
||||
# bash scripts/deployment/run-gru-v2-full-deployment.sh [--dry-run] [--apply-chain138]
|
||||
#
|
||||
# Exit codes:
|
||||
# 0 = all repo-backed steps passed and no known implementation blockers remain
|
||||
# 1 = one or more repo-backed steps failed
|
||||
# 2 = repo-backed steps passed but external/not-yet-implemented blockers remain
|
||||
#
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
|
||||
DRY_RUN=1
|
||||
APPLY_CHAIN138=0
|
||||
|
||||
for arg in "$@"; do
|
||||
case "$arg" in
|
||||
--dry-run) DRY_RUN=1 ;;
|
||||
--apply-chain138) DRY_RUN=0; APPLY_CHAIN138=1 ;;
|
||||
*)
|
||||
echo "Unknown argument: $arg" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
run_cmd() {
|
||||
if (( DRY_RUN )); then
|
||||
echo "[DRY-RUN] $*"
|
||||
else
|
||||
"$@"
|
||||
fi
|
||||
}
|
||||
|
||||
say() {
|
||||
printf '\n== %s ==\n' "$1"
|
||||
}
|
||||
|
||||
BLOCKERS=0
|
||||
FAILURES=0
|
||||
|
||||
say "GRU v2 full deployment"
|
||||
echo "projectRoot=$PROJECT_ROOT"
|
||||
echo "dryRun=$DRY_RUN"
|
||||
echo "applyChain138=$APPLY_CHAIN138"
|
||||
|
||||
say "Validate planning artifacts"
|
||||
if ! run_cmd python3 "$PROJECT_ROOT/scripts/validation/validate-gru-v2-full-mesh-artifacts.py"; then
|
||||
echo "validation failed: GRU mesh artifacts" >&2
|
||||
FAILURES=1
|
||||
fi
|
||||
|
||||
say "Reconcile current live status"
|
||||
if ! run_cmd python3 "$PROJECT_ROOT/scripts/verify/reconcile-gru-v2-full-mesh-status.py"; then
|
||||
echo "reconcile failed: GRU mesh live status" >&2
|
||||
FAILURES=1
|
||||
fi
|
||||
|
||||
say "Chain 138 repo-backed deployment path"
|
||||
if (( APPLY_CHAIN138 )); then
|
||||
if ! run_cmd bash "$PROJECT_ROOT/scripts/deployment/run-all-next-steps-chain138.sh" --mesh-only --skip-register-gru; then
|
||||
echo "chain 138 deployment path failed" >&2
|
||||
FAILURES=1
|
||||
fi
|
||||
if ! run_cmd bash "$PROJECT_ROOT/scripts/deployment/deploy-chain138-pilot-protocol-venues.sh" --apply; then
|
||||
echo "chain 138 protocol venue deployment path failed" >&2
|
||||
FAILURES=1
|
||||
fi
|
||||
else
|
||||
run_cmd bash "$PROJECT_ROOT/scripts/deployment/run-all-next-steps-chain138.sh" --dry-run --mesh-only --skip-register-gru
|
||||
run_cmd bash "$PROJECT_ROOT/scripts/deployment/deploy-chain138-pilot-protocol-venues.sh" --dry-run
|
||||
fi
|
||||
|
||||
say "Chain 138 readiness"
|
||||
if ! run_cmd bash "$PROJECT_ROOT/scripts/verify/check-gru-v2-chain138-readiness.sh"; then
|
||||
echo "chain 138 readiness failed" >&2
|
||||
FAILURES=1
|
||||
fi
|
||||
|
||||
say "Chain 138 remaining protocol surface"
|
||||
if [[ -x "$PROJECT_ROOT/scripts/deployment/deploy-chain138-aave-v3-execution-stack.sh" ]]; then
|
||||
run_cmd bash "$PROJECT_ROOT/scripts/deployment/deploy-chain138-aave-v3-execution-stack.sh" --dry-run || BLOCKERS=1
|
||||
else
|
||||
echo "No Chain 138 Aave execution deployer was found."
|
||||
BLOCKERS=1
|
||||
fi
|
||||
|
||||
if [[ -x "$PROJECT_ROOT/scripts/deployment/deploy-chain138-aave-quote-push-receiver.sh" ]]; then
|
||||
run_cmd bash "$PROJECT_ROOT/scripts/deployment/deploy-chain138-aave-quote-push-receiver.sh" --dry-run || BLOCKERS=1
|
||||
else
|
||||
echo "No Chain 138 Aave quote-push receiver deployer was found."
|
||||
BLOCKERS=1
|
||||
fi
|
||||
|
||||
run_cmd bash "$PROJECT_ROOT/scripts/verify/check-chain138-remaining-protocol-env.sh" || BLOCKERS=1
|
||||
|
||||
say "ALL Mainnet 651940 implementation gate"
|
||||
if [[ -x "$PROJECT_ROOT/scripts/deployment/deploy-allmainnet-ca-tokens.sh" ]]; then
|
||||
run_cmd bash "$PROJECT_ROOT/scripts/deployment/deploy-allmainnet-ca-tokens.sh" --dry-run
|
||||
else
|
||||
echo "No repo-backed full cA* contract deployer was found for 651940."
|
||||
BLOCKERS=1
|
||||
fi
|
||||
|
||||
if [[ -x "$PROJECT_ROOT/scripts/deployment/sync-allmainnet-pmm-pools-from-json.sh" ]]; then
|
||||
run_cmd bash "$PROJECT_ROOT/scripts/deployment/sync-allmainnet-pmm-pools-from-json.sh"
|
||||
else
|
||||
echo "No repo-backed full 651940 PMM mesh deployer was found."
|
||||
BLOCKERS=1
|
||||
fi
|
||||
|
||||
echo "651940 still requires live cA* addresses, DODO integration/provider addresses, and routeable liquidity to finish."
|
||||
BLOCKERS=1
|
||||
|
||||
say "Protocol completion gate"
|
||||
echo "DODO on Chain 138 is script-backed."
|
||||
echo "Full Uniswap v2/v3, SushiSwap, Curve, Balancer, 1Inch, Aave, GMX, and dYdX completion on 138/651940 is not fully deployer-backed in this repo."
|
||||
echo "Marking protocol completion as externally blocked until venue-specific deploy/integration scripts exist."
|
||||
BLOCKERS=1
|
||||
|
||||
if ! run_cmd bash "$PROJECT_ROOT/scripts/verify/check-gru-v2-core-protocol-blockers.sh"; then
|
||||
BLOCKERS=1
|
||||
fi
|
||||
run_cmd bash "$PROJECT_ROOT/scripts/verify/check-allmainnet-protocol-env.sh" || BLOCKERS=1
|
||||
|
||||
say "Implementation verifier"
|
||||
if ! run_cmd python3 "$PROJECT_ROOT/scripts/verify/check-gru-v2-full-deployment-implementation.py"; then
|
||||
BLOCKERS=1
|
||||
fi
|
||||
|
||||
say "Summary"
|
||||
if (( FAILURES )); then
|
||||
echo "result=failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if (( BLOCKERS )); then
|
||||
echo "result=partial"
|
||||
echo "note=repo-backed deployment steps completed, but external/not-yet-implemented blockers remain"
|
||||
exit 2
|
||||
fi
|
||||
|
||||
echo "result=complete"
|
||||
105
scripts/deployment/run-gru-v2-mainnet-funding.sh
Executable file
105
scripts/deployment/run-gru-v2-mainnet-funding.sh
Executable file
@@ -0,0 +1,105 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
source "${REPO_ROOT}/scripts/lib/load-project-env.sh" >/dev/null 2>&1 || true
|
||||
source "${REPO_ROOT}/smom-dbis-138/scripts/load-env.sh" >/dev/null 2>&1 || true
|
||||
|
||||
TARGET_USD="${TARGET_USD:-100000}"
|
||||
EXECUTE=0
|
||||
|
||||
usage() {
|
||||
cat <<'EOF'
|
||||
Usage:
|
||||
bash scripts/deployment/run-gru-v2-mainnet-funding.sh [--target-usd=100000] [--execute]
|
||||
|
||||
Stages:
|
||||
1. Ensure reports are current
|
||||
2. Print / optionally apply GRU V2 Mainnet bridge parity
|
||||
3. Mint required cUSDT_V2 / cUSDC_V2 on Chain 138
|
||||
4. Bridge the required amounts to Mainnet cW mirrors
|
||||
5. Apply full-target funding to cwusdt-usdt and cwusdc-usdc
|
||||
EOF
|
||||
}
|
||||
|
||||
for arg in "$@"; do
|
||||
case "$arg" in
|
||||
--target-usd=*) TARGET_USD="${arg#*=}" ;;
|
||||
--execute) EXECUTE=1 ;;
|
||||
--help|-h) usage; exit 0 ;;
|
||||
*) echo "[fail] unknown arg: $arg" >&2; usage >&2; exit 2 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
command -v python3 >/dev/null 2>&1 || { echo "[fail] missing required command: python3" >&2; exit 1; }
|
||||
command -v cast >/dev/null 2>&1 || { echo "[fail] missing required command: cast" >&2; exit 1; }
|
||||
|
||||
bash "${REPO_ROOT}/scripts/verify/build-gru-v2-mainnet-bridge-parity.sh" >/dev/null
|
||||
bash "${REPO_ROOT}/scripts/verify/build-mainnet-direct-exit-funding-plan.sh" >/dev/null
|
||||
bash "${REPO_ROOT}/scripts/verify/build-gru-v2-mainnet-funding-plan.sh" >/dev/null
|
||||
|
||||
TARGET_USD="$TARGET_USD" REPO_ROOT="$REPO_ROOT" python3 - <<'PY'
|
||||
import json, os
|
||||
from pathlib import Path
|
||||
root = Path(os.environ["REPO_ROOT"])
|
||||
target_usd = int(os.environ["TARGET_USD"])
|
||||
report = json.loads((root / "reports/extraction/gru-v2-mainnet-funding-plan-latest.json").read_text())
|
||||
lane = report.get("parity_state", {}).get("lane_policy", {})
|
||||
print("lanePolicy.v2CutoverActive="+str(lane.get("v2_cutover_active")))
|
||||
print("lanePolicy.v1DisplacedAssets="+",".join(lane.get("v1_displaced_assets", [])))
|
||||
for row in report["assets"]:
|
||||
print("asset="+row["symbol"])
|
||||
print("targetUsd="+str(row["target_exit_usd"]))
|
||||
print("mintNeededRaw="+row["mint_needed_raw"])
|
||||
print("bridgeRaw="+row["bridge_amount_raw"])
|
||||
print("pair="+row["funding_pair"])
|
||||
print("quoteSideReadyNow="+str(row.get("wallet_can_fund_quote_side_now")))
|
||||
print("fundPairCommand="+row["fund_pool_command"])
|
||||
print("---")
|
||||
PY
|
||||
|
||||
run_stage() {
|
||||
local cmd="$1"
|
||||
echo "$cmd"
|
||||
if (( EXECUTE == 1 )); then
|
||||
bash -lc "$cmd"
|
||||
fi
|
||||
}
|
||||
|
||||
run_stage "bash ${REPO_ROOT}/scripts/deployment/configure-gru-v2-mainnet-bridge-parity.sh $([[ $EXECUTE -eq 1 ]] && echo --execute)"
|
||||
|
||||
readarray -t ASSET_LINES < <(
|
||||
TARGET_USD="$TARGET_USD" REPO_ROOT="$REPO_ROOT" python3 - <<'PY'
|
||||
import json, os
|
||||
from pathlib import Path
|
||||
root = Path(os.environ["REPO_ROOT"])
|
||||
target_usd = int(os.environ["TARGET_USD"])
|
||||
report = json.loads((root / "reports/extraction/gru-v2-mainnet-funding-plan-latest.json").read_text())
|
||||
for row in report["assets"]:
|
||||
print("|".join([
|
||||
row["symbol"],
|
||||
row["canonical_token"],
|
||||
row["mirrored_token"],
|
||||
row["mint_needed_raw"],
|
||||
row["bridge_amount_raw"],
|
||||
row["funding_pair"],
|
||||
]))
|
||||
PY
|
||||
)
|
||||
|
||||
for entry in "${ASSET_LINES[@]}"; do
|
||||
IFS='|' read -r SYMBOL TOKEN MIRRORED MINT_NEEDED BRIDGE_RAW FUND_PAIR <<<"$entry"
|
||||
if [[ "$MINT_NEEDED" != "0" ]]; then
|
||||
run_stage "cast send ${TOKEN} 'mint(address,uint256)' ${CANONICAL_WALLET:-0x4A666F96fC8764181194447A7dFdb7d471b301C8} ${MINT_NEEDED} --rpc-url \"\$RPC_URL_138\" --private-key \"\$PRIVATE_KEY\" --legacy"
|
||||
fi
|
||||
if [[ "$BRIDGE_RAW" != "0" ]]; then
|
||||
run_stage "bash ${REPO_ROOT}/scripts/bridge/bridge-canonical-token-to-mainnet-cw.sh --label ${SYMBOL} --canonical-token ${TOKEN} --mirrored-token ${MIRRORED} --raw-amount ${BRIDGE_RAW} --recipient ${CANONICAL_WALLET:-0x4A666F96fC8764181194447A7dFdb7d471b301C8} --approve $([[ $EXECUTE -eq 1 ]] && echo --execute)"
|
||||
fi
|
||||
run_stage "bash ${REPO_ROOT}/scripts/deployment/apply-mainnet-direct-exit-funding.sh --pair=${FUND_PAIR} --mode=full-target --target-usd=${TARGET_USD} $([[ $EXECUTE -eq 1 ]] && echo --execute)"
|
||||
done
|
||||
|
||||
if (( EXECUTE == 0 )); then
|
||||
echo "Dry-run only. Re-run with --execute to broadcast."
|
||||
echo "Note: if quoteSideReadyNow=false in the printed plan, the pool-funding stage is still capital-blocked even though the V2 bridge lane is ready."
|
||||
fi
|
||||
360
scripts/deployment/run-gru-v2-wave1-public-pools.sh
Executable file
360
scripts/deployment/run-gru-v2-wave1-public-pools.sh
Executable file
@@ -0,0 +1,360 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
ROOT_DIR="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
PLAN_PATH="${ROOT_DIR}/reports/extraction/gru-v2-wave1-public-deploy-plan-latest.json"
|
||||
OUTPUT_PATH="${ROOT_DIR}/reports/extraction/gru-v2-wave1-public-deploy-run-latest.json"
|
||||
|
||||
source "${ROOT_DIR}/scripts/lib/load-project-env.sh" >/dev/null 2>&1 || true
|
||||
if [[ -f "${ROOT_DIR}/smom-dbis-138/scripts/lib/deployment/dotenv.sh" ]]; then
|
||||
# shellcheck disable=SC1090
|
||||
source "${ROOT_DIR}/smom-dbis-138/scripts/lib/deployment/dotenv.sh" >/dev/null 2>&1 || true
|
||||
load_deployment_env --repo-root "${ROOT_DIR}/smom-dbis-138" >/dev/null 2>&1 || true
|
||||
export PROJECT_ROOT="${ROOT_DIR}"
|
||||
fi
|
||||
|
||||
require_cmd() {
|
||||
command -v "$1" >/dev/null 2>&1 || {
|
||||
echo "[fail] missing required command: $1" >&2
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
parse_tx_hash() {
|
||||
local output="$1"
|
||||
local tx_hash
|
||||
tx_hash="$(printf '%s\n' "$output" | grep -E '^0x[0-9a-fA-F]{64}$' | tail -n1 || true)"
|
||||
if [[ -z "$tx_hash" ]]; then
|
||||
tx_hash="$(printf '%s\n' "$output" | grep -E '^transactionHash[[:space:]]+0x[0-9a-fA-F]{64}$' | awk '{print $2}' | tail -n1 || true)"
|
||||
fi
|
||||
printf '%s\n' "$tx_hash"
|
||||
}
|
||||
|
||||
require_cmd python3
|
||||
|
||||
CHAIN_ID_FILTER=""
|
||||
PAIR_FILTER=""
|
||||
MAX_POOLS=0
|
||||
EXECUTE=0
|
||||
ONLY_READY=0
|
||||
SKIP_SEED=0
|
||||
ALLOW_BOOTSTRAP_PRICES=0
|
||||
BASE_AMOUNT_OVERRIDE=""
|
||||
QUOTE_AMOUNT_OVERRIDE=""
|
||||
MINT_BASE_AMOUNT_OVERRIDE=""
|
||||
|
||||
for arg in "$@"; do
|
||||
case "$arg" in
|
||||
--chain-id=*) CHAIN_ID_FILTER="${arg#*=}" ;;
|
||||
--pair=*) PAIR_FILTER="${arg#*=}" ;;
|
||||
--max-pools=*) MAX_POOLS="${arg#*=}" ;;
|
||||
--execute) EXECUTE=1 ;;
|
||||
--only-ready) ONLY_READY=1 ;;
|
||||
--skip-seed) SKIP_SEED=1 ;;
|
||||
--allow-bootstrap-prices) ALLOW_BOOTSTRAP_PRICES=1 ;;
|
||||
--base-amount=*) BASE_AMOUNT_OVERRIDE="${arg#*=}" ;;
|
||||
--quote-amount=*) QUOTE_AMOUNT_OVERRIDE="${arg#*=}" ;;
|
||||
--mint-base-amount=*) MINT_BASE_AMOUNT_OVERRIDE="${arg#*=}" ;;
|
||||
*)
|
||||
echo "[fail] unknown arg: $arg" >&2
|
||||
exit 2
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if (( EXECUTE == 1 )); then
|
||||
export GRU_WAVE1_PLAN_LIVE_CHECKS="${GRU_WAVE1_PLAN_LIVE_CHECKS:-1}"
|
||||
fi
|
||||
|
||||
bash "${ROOT_DIR}/scripts/deployment/plan-gru-v2-wave1-public-pools.sh" >/dev/null
|
||||
|
||||
mkdir -p "$(dirname "$OUTPUT_PATH")"
|
||||
|
||||
python3 - <<'PY' "$PLAN_PATH" "$OUTPUT_PATH" "$CHAIN_ID_FILTER" "$PAIR_FILTER" "$MAX_POOLS" "$EXECUTE" "$ONLY_READY" "$SKIP_SEED" "$ALLOW_BOOTSTRAP_PRICES"
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
plan_path = Path(sys.argv[1])
|
||||
output_path = Path(sys.argv[2])
|
||||
chain_filter = sys.argv[3]
|
||||
pair_filter = sys.argv[4].lower()
|
||||
max_pools = int(sys.argv[5])
|
||||
execute = sys.argv[6] == "1"
|
||||
only_ready = sys.argv[7] == "1"
|
||||
skip_seed = sys.argv[8] == "1"
|
||||
allow_bootstrap_prices = sys.argv[9] == "1"
|
||||
|
||||
plan = json.loads(plan_path.read_text())
|
||||
rows = plan["rows"]
|
||||
|
||||
selected = []
|
||||
for row in rows:
|
||||
if chain_filter and str(row["chain_id"]) != chain_filter:
|
||||
continue
|
||||
if pair_filter and row["pair"].lower() != pair_filter:
|
||||
continue
|
||||
if only_ready and not row["ready_to_create_with_bootstrap_price"]:
|
||||
continue
|
||||
selected.append(row)
|
||||
if max_pools and len(selected) >= max_pools:
|
||||
break
|
||||
|
||||
result_rows = []
|
||||
for row in selected:
|
||||
out = {
|
||||
"chain_id": row["chain_id"],
|
||||
"network": row["network"],
|
||||
"pair": row["pair"],
|
||||
"mode": "execute" if execute else "dry_run",
|
||||
"create_planned": row["ready_to_create_with_bootstrap_price"],
|
||||
"seed_planned": row["ready_to_seed"] and not skip_seed,
|
||||
"price_mode": row["price_mode"],
|
||||
"blockers": list(row["blockers"]),
|
||||
"base_supply_mode": row.get("base_supply_mode"),
|
||||
"mintable_base": row.get("mintable_base"),
|
||||
"wallet_base_balance_raw": row.get("wallet_base_balance_raw"),
|
||||
"wallet_quote_balance_raw": row.get("wallet_quote_balance_raw"),
|
||||
"create_tx": None,
|
||||
"seed_tx": None,
|
||||
"pool_address_before": row["existing_pool_address"],
|
||||
"pool_address_after": row["existing_pool_address"],
|
||||
"status": "planned",
|
||||
}
|
||||
if row["price_mode"] == "bootstrap_reference" and not allow_bootstrap_prices:
|
||||
out["status"] = "blocked_bootstrap_price_guard"
|
||||
result_rows.append(out)
|
||||
continue
|
||||
if not execute:
|
||||
result_rows.append(out)
|
||||
continue
|
||||
out["status"] = "execute_requires_shell_runner"
|
||||
result_rows.append(out)
|
||||
|
||||
result = {
|
||||
"generated_at": datetime.now(timezone.utc).isoformat().replace("+00:00", "Z"),
|
||||
"description": "Universal GRU v2 Wave 1 pool operator run record. Execute mode is implemented in the surrounding shell runner.",
|
||||
"selected_count": len(selected),
|
||||
"rows": result_rows,
|
||||
}
|
||||
|
||||
output_path.write_text(json.dumps(result, indent=2) + "\n")
|
||||
print(json.dumps({"selected_count": len(selected)}, indent=2))
|
||||
PY
|
||||
|
||||
if (( EXECUTE == 0 )); then
|
||||
echo "Wrote ${OUTPUT_PATH}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
require_cmd cast
|
||||
|
||||
mapfile -t RUN_ROWS < <(
|
||||
python3 - <<'PY' "$PLAN_PATH" "$CHAIN_ID_FILTER" "$PAIR_FILTER" "$MAX_POOLS" "$ONLY_READY" "$ALLOW_BOOTSTRAP_PRICES" "$BASE_AMOUNT_OVERRIDE" "$QUOTE_AMOUNT_OVERRIDE" "$MINT_BASE_AMOUNT_OVERRIDE"
|
||||
import base64, json, os, sys
|
||||
from pathlib import Path
|
||||
|
||||
plan = json.loads(Path(sys.argv[1]).read_text())
|
||||
chain_filter = sys.argv[2]
|
||||
pair_filter = sys.argv[3].lower()
|
||||
max_pools = int(sys.argv[4])
|
||||
only_ready = sys.argv[5] == "1"
|
||||
allow_bootstrap_prices = sys.argv[6] == "1"
|
||||
base_override = sys.argv[7]
|
||||
quote_override = sys.argv[8]
|
||||
mint_override = sys.argv[9]
|
||||
|
||||
rows = []
|
||||
for row in plan["rows"]:
|
||||
if chain_filter and str(row["chain_id"]) != chain_filter:
|
||||
continue
|
||||
if pair_filter and row["pair"].lower() != pair_filter:
|
||||
continue
|
||||
if only_ready and not row["ready_to_create_with_bootstrap_price"]:
|
||||
continue
|
||||
if row["price_mode"] == "bootstrap_reference" and not allow_bootstrap_prices:
|
||||
continue
|
||||
rows.append(row)
|
||||
if max_pools and len(rows) >= max_pools:
|
||||
break
|
||||
|
||||
for row in rows:
|
||||
payload = {
|
||||
"chain_id": row["chain_id"],
|
||||
"network": row["network"],
|
||||
"pair": row["pair"],
|
||||
"integration": row["integration_address"] or "",
|
||||
"base_addr": row["base_address"] or "",
|
||||
"quote_addr": row["quote_address"] or "",
|
||||
"existing_pool": row["existing_pool_address"] or "",
|
||||
"initial_price": row["initial_price_e18"] or "",
|
||||
"fee_bps": row["fee_bps"] or "",
|
||||
"k_value": row["k"] or "",
|
||||
"open_twap": "true" if row["open_twap"] else "false",
|
||||
"base_amount": base_override or row["base_amount_raw"] or "",
|
||||
"quote_amount": quote_override or row["quote_amount_raw"] or "",
|
||||
"mint_amount": mint_override or row["mint_base_amount_raw"] or "",
|
||||
"rpc_env_key": row["rpc_env_key"] or "",
|
||||
"rpc_url": os.environ.get(row["rpc_env_key"] or "", "") if row.get("rpc_url_present") else "",
|
||||
"ready_to_seed": "true" if row.get("ready_to_seed") else "false",
|
||||
"base_supply_mode": row.get("base_supply_mode") or "",
|
||||
"mintable_base": "true" if row.get("mintable_base") else "false",
|
||||
"wallet_base_balance_raw": row.get("wallet_base_balance_raw") or "",
|
||||
"wallet_quote_balance_raw": row.get("wallet_quote_balance_raw") or "",
|
||||
"blockers_json": json.dumps(row.get("blockers", []), separators=(",", ":")),
|
||||
}
|
||||
print(base64.b64encode(json.dumps(payload).encode()).decode())
|
||||
PY
|
||||
)
|
||||
|
||||
DEPLOYER="$(cast wallet address --private-key "${PRIVATE_KEY:-}")"
|
||||
|
||||
RESULT_JSON="$(
|
||||
python3 - <<'PY'
|
||||
import json
|
||||
print(json.dumps({"generated_at": None, "description": "Universal GRU v2 Wave 1 pool operator live run record.", "rows": []}))
|
||||
PY
|
||||
)"
|
||||
|
||||
for encoded in "${RUN_ROWS[@]}"; do
|
||||
eval "$(
|
||||
python3 - <<'PY' "$encoded"
|
||||
import base64, json, shlex, sys
|
||||
row = json.loads(base64.b64decode(sys.argv[1]).decode())
|
||||
for key, value in row.items():
|
||||
print(f"{key.upper()}={shlex.quote(str(value))}")
|
||||
PY
|
||||
)"
|
||||
STATUS="executed"
|
||||
CREATE_TX=""
|
||||
SEED_TX=""
|
||||
POOL_ADDRESS="$EXISTING_POOL"
|
||||
NOTES=()
|
||||
SEED_READY=0
|
||||
MINTABLE_BASE_READY=0
|
||||
HAS_INSUFFICIENT_BASE=0
|
||||
HAS_INSUFFICIENT_QUOTE=0
|
||||
|
||||
if [[ "${READY_TO_SEED:-false}" == "true" ]]; then
|
||||
SEED_READY=1
|
||||
fi
|
||||
if [[ "${MINTABLE_BASE:-false}" == "true" ]]; then
|
||||
MINTABLE_BASE_READY=1
|
||||
fi
|
||||
if [[ "${BLOCKERS_JSON:-[]}" == *'"insufficient_base_balance"'* ]]; then
|
||||
HAS_INSUFFICIENT_BASE=1
|
||||
fi
|
||||
if [[ "${BLOCKERS_JSON:-[]}" == *'"insufficient_quote_balance"'* ]]; then
|
||||
HAS_INSUFFICIENT_QUOTE=1
|
||||
fi
|
||||
|
||||
if [[ -z "$RPC_URL" || -z "$INTEGRATION" || -z "$BASE_ADDR" || -z "$QUOTE_ADDR" ]]; then
|
||||
STATUS="blocked_missing_runtime_inputs"
|
||||
else
|
||||
if [[ -z "$POOL_ADDRESS" || "$POOL_ADDRESS" == "0x0000000000000000000000000000000000000000" ]]; then
|
||||
create_output="$(
|
||||
cast send "$INTEGRATION" \
|
||||
'createPool(address,address,uint256,uint256,uint256,bool)(address)' \
|
||||
"$BASE_ADDR" "$QUOTE_ADDR" "$FEE_BPS" "$INITIAL_PRICE" "$K_VALUE" "$OPEN_TWAP" \
|
||||
--rpc-url "$RPC_URL" \
|
||||
--private-key "${PRIVATE_KEY:-}"
|
||||
)"
|
||||
CREATE_TX="$(parse_tx_hash "$create_output")"
|
||||
POOL_ADDRESS="$(cast call "$INTEGRATION" 'pools(address,address)(address)' "$BASE_ADDR" "$QUOTE_ADDR" --rpc-url "$RPC_URL" | awk '{print $1}')"
|
||||
fi
|
||||
|
||||
if (( SKIP_SEED == 0 )) && [[ -n "$POOL_ADDRESS" && "$POOL_ADDRESS" != "0x0000000000000000000000000000000000000000" && -n "$BASE_AMOUNT" && -n "$QUOTE_AMOUNT" ]]; then
|
||||
if (( HAS_INSUFFICIENT_BASE == 1 )); then
|
||||
STATUS="blocked_seed_base_supply"
|
||||
NOTES+=("seed_skipped_insufficient_base_supply")
|
||||
elif (( HAS_INSUFFICIENT_QUOTE == 1 )); then
|
||||
STATUS="blocked_seed_quote_supply"
|
||||
NOTES+=("seed_skipped_insufficient_quote_supply")
|
||||
elif (( SEED_READY == 0 )); then
|
||||
STATUS="blocked_seed_readiness"
|
||||
NOTES+=("seed_skipped_not_ready")
|
||||
else
|
||||
BASE_BAL="${WALLET_BASE_BALANCE_RAW:-}"
|
||||
QUOTE_BAL="${WALLET_QUOTE_BALANCE_RAW:-}"
|
||||
if [[ -z "$BASE_BAL" ]]; then
|
||||
BASE_BAL="$(cast call "$BASE_ADDR" 'balanceOf(address)(uint256)' "$DEPLOYER" --rpc-url "$RPC_URL" | awk '{print $1}')"
|
||||
fi
|
||||
if [[ -z "$QUOTE_BAL" ]]; then
|
||||
QUOTE_BAL="$(cast call "$QUOTE_ADDR" 'balanceOf(address)(uint256)' "$DEPLOYER" --rpc-url "$RPC_URL" | awk '{print $1}')"
|
||||
fi
|
||||
if (( BASE_BAL < BASE_AMOUNT )); then
|
||||
if (( MINTABLE_BASE_READY == 1 )) && [[ -n "$MINT_AMOUNT" && "$MINT_AMOUNT" != "0" ]]; then
|
||||
cast send "$BASE_ADDR" 'mint(address,uint256)' "$DEPLOYER" "$MINT_AMOUNT" --rpc-url "$RPC_URL" --private-key "${PRIVATE_KEY:-}" >/dev/null
|
||||
NOTES+=("minted_base")
|
||||
BASE_BAL="$(cast call "$BASE_ADDR" 'balanceOf(address)(uint256)' "$DEPLOYER" --rpc-url "$RPC_URL" | awk '{print $1}')"
|
||||
fi
|
||||
fi
|
||||
if (( BASE_BAL < BASE_AMOUNT )); then
|
||||
STATUS="blocked_seed_base_supply"
|
||||
NOTES+=("seed_skipped_postcheck_base_short")
|
||||
elif (( QUOTE_BAL < QUOTE_AMOUNT )); then
|
||||
STATUS="blocked_seed_quote_supply"
|
||||
NOTES+=("seed_skipped_postcheck_quote_short")
|
||||
else
|
||||
cast send "$BASE_ADDR" 'approve(address,uint256)(bool)' "$INTEGRATION" "$BASE_AMOUNT" --rpc-url "$RPC_URL" --private-key "${PRIVATE_KEY:-}" >/dev/null
|
||||
cast send "$QUOTE_ADDR" 'approve(address,uint256)(bool)' "$INTEGRATION" "$QUOTE_AMOUNT" --rpc-url "$RPC_URL" --private-key "${PRIVATE_KEY:-}" >/dev/null
|
||||
seed_output="$(
|
||||
cast send "$INTEGRATION" \
|
||||
'addLiquidity(address,uint256,uint256)(uint256,uint256,uint256)' \
|
||||
"$POOL_ADDRESS" "$BASE_AMOUNT" "$QUOTE_AMOUNT" \
|
||||
--rpc-url "$RPC_URL" \
|
||||
--private-key "${PRIVATE_KEY:-}"
|
||||
)"
|
||||
SEED_TX="$(parse_tx_hash "$seed_output")"
|
||||
if [[ -n "$CREATE_TX" ]]; then
|
||||
STATUS="executed_create_and_seed"
|
||||
else
|
||||
STATUS="executed_seed_only"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
elif [[ -n "$CREATE_TX" ]]; then
|
||||
STATUS="executed_create_only"
|
||||
fi
|
||||
fi
|
||||
|
||||
RESULT_JSON="$(
|
||||
python3 - <<'PY' "$RESULT_JSON" "$CHAIN_ID" "$NETWORK" "$PAIR" "$STATUS" "$CREATE_TX" "$SEED_TX" "$POOL_ADDRESS" "$EXISTING_POOL" "$(printf '%s\n' "${NOTES[*]}")" "${BASE_SUPPLY_MODE:-}" "${MINTABLE_BASE:-false}" "${WALLET_BASE_BALANCE_RAW:-}" "${WALLET_QUOTE_BALANCE_RAW:-}" "${BLOCKERS_JSON:-[]}"
|
||||
import json, sys
|
||||
doc = json.loads(sys.argv[1])
|
||||
doc["rows"].append({
|
||||
"chain_id": int(sys.argv[2]),
|
||||
"network": sys.argv[3],
|
||||
"pair": sys.argv[4],
|
||||
"status": sys.argv[5],
|
||||
"create_tx": sys.argv[6] or None,
|
||||
"seed_tx": sys.argv[7] or None,
|
||||
"pool_address_before": sys.argv[9] or None,
|
||||
"pool_address_after": sys.argv[8] or None,
|
||||
"notes": [x for x in sys.argv[10].split() if x],
|
||||
"base_supply_mode": sys.argv[11] or None,
|
||||
"mintable_base": sys.argv[12] == "true",
|
||||
"wallet_base_balance_raw": sys.argv[13] or None,
|
||||
"wallet_quote_balance_raw": sys.argv[14] or None,
|
||||
"blockers": json.loads(sys.argv[15] or "[]"),
|
||||
})
|
||||
print(json.dumps(doc))
|
||||
PY
|
||||
)"
|
||||
done
|
||||
|
||||
python3 - <<'PY' "$RESULT_JSON" "$OUTPUT_PATH"
|
||||
import json, sys
|
||||
from datetime import datetime, timezone
|
||||
doc = json.loads(sys.argv[1])
|
||||
doc["generated_at"] = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
|
||||
with open(sys.argv[2], "w") as fh:
|
||||
json.dump(doc, fh, indent=2)
|
||||
fh.write("\n")
|
||||
print(json.dumps({"executed_rows": len(doc["rows"])}, indent=2))
|
||||
PY
|
||||
|
||||
echo "Wrote ${OUTPUT_PATH}"
|
||||
83
scripts/validation/validate-gru-v2-full-mesh-artifacts.py
Normal file
83
scripts/validation/validate-gru-v2-full-mesh-artifacts.py
Normal file
@@ -0,0 +1,83 @@
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
||||
ROOT = pathlib.Path(__file__).resolve().parents[2]
|
||||
TRACKER = ROOT / "config/gru-v2-full-mesh-pool-tracker.json"
|
||||
SCHEMA = ROOT / "config/gru-v2-full-mesh-pool-tracker.schema.json"
|
||||
MASTER = ROOT / "config/gru-v2-full-mesh-master-matrix.json"
|
||||
MASTER_DOC = ROOT / "docs/04-configuration/GRU_V2_FULL_MESH_MASTER_MATRIX.md"
|
||||
CHECKLIST = ROOT / "docs/04-configuration/GRU_V2_FULL_MESH_EXECUTION_CHECKLIST.md"
|
||||
PROTOCOL = ROOT / "docs/04-configuration/GRU_V2_PROTOCOL_COMPLETION_MATRIX.md"
|
||||
|
||||
|
||||
def fail(msg: str) -> None:
|
||||
print(f"ERROR: {msg}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def load_json(path: pathlib.Path):
|
||||
try:
|
||||
return json.loads(path.read_text())
|
||||
except Exception as exc:
|
||||
fail(f"failed to parse {path}: {exc}")
|
||||
|
||||
|
||||
def require(cond: bool, msg: str) -> None:
|
||||
if not cond:
|
||||
fail(msg)
|
||||
|
||||
|
||||
def validate_tracker(tracker: dict) -> None:
|
||||
require(re.fullmatch(r"\d{4}-\d{2}-\d{2}", tracker.get("statusDate", "")) is not None, "tracker statusDate must be YYYY-MM-DD")
|
||||
default_fields = tracker.get("defaultFields", {})
|
||||
for key in ["status", "deployed", "seeded", "validated", "live", "mevReady"]:
|
||||
require(key in default_fields, f"defaultFields missing {key}")
|
||||
require(default_fields["status"] in {"todo", "in_progress", "blocked", "done"}, "defaultFields.status invalid")
|
||||
|
||||
chain138_pairs = [row["pair"] for row in tracker["chain138"]["entries"]]
|
||||
allmain_pairs = [row["pair"] for row in tracker["allMainnet651940"]["entries"]]
|
||||
require("cUSDT V2 / cUSDC V2" in chain138_pairs, "chain138 tracker missing canonical USD hub pair")
|
||||
require("cAUSDT / cAUSDC" in allmain_pairs, "allMainnet tracker missing canonical USD hub pair")
|
||||
|
||||
public_mesh = tracker["publicMesh"]
|
||||
for chain in ["1", "10", "25", "56", "100", "137", "1111", "8453", "42161", "42220", "43114"]:
|
||||
require(chain in public_mesh, f"publicMesh missing connected chain {chain}")
|
||||
entries = public_mesh[chain]["entries"]
|
||||
require("cWUSDT / USDC" in entries, f"publicMesh {chain} missing cWUSDT / USDC")
|
||||
require("cWUSDC / USDC" in entries, f"publicMesh {chain} missing cWUSDC / USDC")
|
||||
|
||||
|
||||
def validate_master(master: dict) -> None:
|
||||
phases = {row["id"] for row in master.get("executionPhases", [])}
|
||||
require(phases == {"P0", "P1", "P2", "P3", "P4", "P5", "P6", "P7"}, "executionPhases must contain P0..P7")
|
||||
protocols = set(master.get("protocolsRequired", []))
|
||||
for protocol in ["DODO", "Uniswap v3", "Uniswap v2", "SushiSwap", "Curve", "Balancer", "1Inch", "Aave", "GMX", "dYdX"]:
|
||||
require(protocol in protocols, f"master matrix missing protocol {protocol}")
|
||||
|
||||
|
||||
def validate_docs() -> None:
|
||||
for path in [MASTER_DOC, CHECKLIST, PROTOCOL]:
|
||||
require(path.exists(), f"missing doc {path}")
|
||||
text = path.read_text()
|
||||
require("Chain 138" in text or "138" in text, f"{path.name} does not mention Chain 138")
|
||||
require("651940" in text or "ALL Mainnet" in text, f"{path.name} does not mention ALL Mainnet")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
for path in [TRACKER, SCHEMA, MASTER, MASTER_DOC, CHECKLIST, PROTOCOL]:
|
||||
require(path.exists(), f"required artifact missing: {path}")
|
||||
tracker = load_json(TRACKER)
|
||||
_schema = load_json(SCHEMA)
|
||||
master = load_json(MASTER)
|
||||
validate_tracker(tracker)
|
||||
validate_master(master)
|
||||
validate_docs()
|
||||
print("GRU_V2_FULL_MESH_ARTIFACTS_OK")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
5
scripts/verify/build-gru-v2-mainnet-bridge-parity.sh
Executable file
5
scripts/verify/build-gru-v2-mainnet-bridge-parity.sh
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||
source "$ROOT/scripts/lib/load-project-env.sh" >/dev/null 2>&1 || true
|
||||
python3 "$ROOT/scripts/lib/immediate_liquidity_expansion.py" parity
|
||||
5
scripts/verify/build-gru-v2-mainnet-funding-plan.sh
Executable file
5
scripts/verify/build-gru-v2-mainnet-funding-plan.sh
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||
source "$ROOT/scripts/lib/load-project-env.sh" >/dev/null 2>&1 || true
|
||||
python3 "$ROOT/scripts/lib/immediate_liquidity_expansion.py" v2-funding
|
||||
298
scripts/verify/build-gru-v2-wave1-funding-authority-report.sh
Normal file
298
scripts/verify/build-gru-v2-wave1-funding-authority-report.sh
Normal file
@@ -0,0 +1,298 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
REPO_ROOT="$PROJECT_ROOT"
|
||||
OUTPUT_PATH="${PROJECT_ROOT}/reports/extraction/gru-v2-wave1-funding-authority-report-latest.json"
|
||||
PLAN_PATH="${PROJECT_ROOT}/reports/extraction/gru-v2-wave1-public-deploy-plan-latest.json"
|
||||
GAP_REPORT_PATH="${PROJECT_ROOT}/reports/extraction/gru-v2-wave1-public-gap-report-latest.json"
|
||||
VERIFY_MATRIX_PATH="${PROJECT_ROOT}/reports/status/contract_verification_publish_matrix.json"
|
||||
|
||||
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh" >/dev/null 2>&1 || true
|
||||
if [[ -f "${PROJECT_ROOT}/smom-dbis-138/scripts/lib/deployment/dotenv.sh" ]]; then
|
||||
# shellcheck disable=SC1090
|
||||
source "${PROJECT_ROOT}/smom-dbis-138/scripts/lib/deployment/dotenv.sh" >/dev/null 2>&1 || true
|
||||
load_deployment_env --repo-root "${PROJECT_ROOT}/smom-dbis-138" >/dev/null 2>&1 || true
|
||||
PROJECT_ROOT="$REPO_ROOT"
|
||||
export PROJECT_ROOT
|
||||
fi
|
||||
|
||||
mkdir -p "$(dirname "$OUTPUT_PATH")"
|
||||
|
||||
NEEDS_PLAN_REFRESH="$(
|
||||
python3 - <<'PY' "$PLAN_PATH" "$GAP_REPORT_PATH"
|
||||
import json, sys
|
||||
from pathlib import Path
|
||||
|
||||
plan_path = Path(sys.argv[1])
|
||||
gap_report_path = Path(sys.argv[2])
|
||||
|
||||
if not plan_path.exists() or not gap_report_path.exists():
|
||||
print("1")
|
||||
raise SystemExit
|
||||
|
||||
plan = json.loads(plan_path.read_text())
|
||||
gap = json.loads(gap_report_path.read_text())
|
||||
rows = plan.get("rows", [])
|
||||
expected = gap.get("summary", {}).get("first_tier_wave1_pools_missing")
|
||||
live_rows = [row for row in rows if row.get("live_checks_enabled")]
|
||||
|
||||
if expected is None or len(rows) != expected or len(live_rows) != len(rows):
|
||||
print("1")
|
||||
else:
|
||||
print("0")
|
||||
PY
|
||||
)"
|
||||
|
||||
if [[ "$NEEDS_PLAN_REFRESH" == "1" ]]; then
|
||||
export GRU_WAVE1_PLAN_LIVE_CHECKS="${GRU_WAVE1_PLAN_LIVE_CHECKS:-1}"
|
||||
bash "${PROJECT_ROOT}/scripts/deployment/plan-gru-v2-wave1-public-pools.sh" >/dev/null
|
||||
fi
|
||||
|
||||
python3 - <<'PY' "$PLAN_PATH" "$GAP_REPORT_PATH" "$VERIFY_MATRIX_PATH" "$OUTPUT_PATH"
|
||||
import json
|
||||
import sys
|
||||
from collections import Counter, defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
plan_path = Path(sys.argv[1])
|
||||
gap_report_path = Path(sys.argv[2])
|
||||
verify_matrix_path = Path(sys.argv[3])
|
||||
output_path = Path(sys.argv[4])
|
||||
|
||||
plan = json.loads(plan_path.read_text())
|
||||
gap_report = json.loads(gap_report_path.read_text())
|
||||
verify_matrix = json.loads(verify_matrix_path.read_text()) if verify_matrix_path.exists() else {"entries": []}
|
||||
|
||||
plan_rows = {
|
||||
(int(row["chain_id"]), row["pair"]): row
|
||||
for row in plan.get("rows", [])
|
||||
}
|
||||
|
||||
by_label = {}
|
||||
by_address = {}
|
||||
for entry in verify_matrix.get("entries", []):
|
||||
chain_id = str(entry.get("chainId", ""))
|
||||
label = entry.get("label", "")
|
||||
address = (entry.get("address") or "").lower()
|
||||
if chain_id and label:
|
||||
by_label[(chain_id, label)] = entry
|
||||
if chain_id and address:
|
||||
by_address[(chain_id, address)] = entry
|
||||
|
||||
|
||||
def verification_entry(chain_id: int, label: str = "", address: str = ""):
|
||||
chain_key = str(chain_id)
|
||||
if label and (chain_key, label) in by_label:
|
||||
return by_label[(chain_key, label)]
|
||||
address = (address or "").lower()
|
||||
if address and (chain_key, address) in by_address:
|
||||
return by_address[(chain_key, address)]
|
||||
return None
|
||||
|
||||
|
||||
def verification_state_for(chain_id: int, label: str = "", address: str = "", exists_expected: bool = True):
|
||||
if not exists_expected:
|
||||
return {
|
||||
"tracked": False,
|
||||
"verification_status": "not_deployed",
|
||||
"publication_status": "not_deployed",
|
||||
"publish_surface": "",
|
||||
"explorer": "",
|
||||
"notes": "Pool or contract not yet deployed",
|
||||
}
|
||||
entry = verification_entry(chain_id, label=label, address=address)
|
||||
if not entry:
|
||||
return {
|
||||
"tracked": False,
|
||||
"verification_status": "untracked",
|
||||
"publication_status": "untracked",
|
||||
"publish_surface": "",
|
||||
"explorer": "",
|
||||
"notes": "No matching verification/publication matrix row found",
|
||||
}
|
||||
return {
|
||||
"tracked": True,
|
||||
"verification_status": entry.get("verificationStatus", "unknown"),
|
||||
"publication_status": entry.get("publicationStatus", "unknown"),
|
||||
"publish_surface": entry.get("publishSurface", ""),
|
||||
"explorer": entry.get("explorer", ""),
|
||||
"notes": entry.get("publishNotes", ""),
|
||||
}
|
||||
|
||||
|
||||
def is_complete(status_block: dict) -> bool:
|
||||
return (
|
||||
status_block.get("tracked") is True
|
||||
and status_block.get("verification_status") == "complete"
|
||||
and status_block.get("publication_status") == "complete"
|
||||
)
|
||||
|
||||
|
||||
rows = []
|
||||
chain_summary = defaultdict(lambda: {
|
||||
"missing_pair_count": 0,
|
||||
"rows_missing_quote_side_stable": 0,
|
||||
"rows_missing_base_side_balance": 0,
|
||||
"rows_missing_base_side_mintability": 0,
|
||||
"rows_missing_integration": 0,
|
||||
"rows_missing_verification_publication": 0,
|
||||
})
|
||||
|
||||
for gap_row in gap_report.get("missing_first_tier_wave1_pools", []):
|
||||
key = (int(gap_row["chain_id"]), gap_row["pair"])
|
||||
plan_row = plan_rows.get(key)
|
||||
if not plan_row:
|
||||
continue
|
||||
|
||||
chain_id = int(plan_row["chain_id"])
|
||||
network = plan_row["network"]
|
||||
pair = plan_row["pair"]
|
||||
base_symbol = plan_row["base_symbol"]
|
||||
quote_symbol = plan_row["quote_symbol"]
|
||||
existing_pool = plan_row.get("existing_pool_address") or ""
|
||||
pool_exists = existing_pool not in ("", "0x0000000000000000000000000000000000000000", None)
|
||||
|
||||
quote_balance_raw = plan_row.get("wallet_quote_balance_raw")
|
||||
quote_amount_raw = plan_row.get("quote_amount_raw")
|
||||
base_balance_raw = plan_row.get("wallet_base_balance_raw")
|
||||
base_amount_raw = plan_row.get("base_amount_raw")
|
||||
|
||||
quote_has_balance = bool(quote_balance_raw) and bool(quote_amount_raw) and int(quote_balance_raw) >= int(quote_amount_raw)
|
||||
base_has_balance = bool(base_balance_raw) and bool(base_amount_raw) and int(base_balance_raw) >= int(base_amount_raw)
|
||||
base_mintable = bool(plan_row.get("mintable_base"))
|
||||
integration_present = bool(plan_row.get("integration_present"))
|
||||
|
||||
base_ver = verification_state_for(chain_id, label=base_symbol, address=plan_row.get("base_address") or "", exists_expected=bool(plan_row.get("base_address")))
|
||||
quote_ver = verification_state_for(chain_id, label=quote_symbol, address=plan_row.get("quote_address") or "", exists_expected=bool(plan_row.get("quote_address")))
|
||||
pool_ver = verification_state_for(chain_id, label=pair, address=existing_pool, exists_expected=pool_exists)
|
||||
integration_ver = verification_state_for(chain_id, address=plan_row.get("integration_address") or "", exists_expected=integration_present)
|
||||
|
||||
verification_publication_complete = all(
|
||||
is_complete(block) for block in [base_ver, quote_ver]
|
||||
) and (
|
||||
is_complete(pool_ver) if pool_exists else False
|
||||
) and (
|
||||
is_complete(integration_ver) if integration_present else False
|
||||
)
|
||||
|
||||
missing = {
|
||||
"quote_side_stable": not quote_has_balance,
|
||||
"base_side_balance": not base_has_balance,
|
||||
"base_side_mintability": not base_mintable,
|
||||
"integration": not integration_present,
|
||||
"verification_publication_status": not verification_publication_complete,
|
||||
}
|
||||
|
||||
missing_reasons = []
|
||||
if missing["quote_side_stable"]:
|
||||
missing_reasons.append("quote_side_stable")
|
||||
if missing["base_side_balance"]:
|
||||
missing_reasons.append("base_side_balance")
|
||||
if missing["base_side_mintability"]:
|
||||
missing_reasons.append("base_side_mintability")
|
||||
if missing["integration"]:
|
||||
missing_reasons.append("integration")
|
||||
if missing["verification_publication_status"]:
|
||||
missing_reasons.append("verification_publication_status")
|
||||
|
||||
verification_publication = {
|
||||
"overall_complete": verification_publication_complete,
|
||||
"base_token": base_ver,
|
||||
"quote_token": quote_ver,
|
||||
"integration_contract": integration_ver,
|
||||
"pool_contract": pool_ver,
|
||||
}
|
||||
|
||||
row = {
|
||||
"chain_id": chain_id,
|
||||
"network": network,
|
||||
"pair": pair,
|
||||
"hub_stable": gap_row.get("hub_stable"),
|
||||
"missing": missing,
|
||||
"missing_reasons": missing_reasons,
|
||||
"quote_side_stable": {
|
||||
"quote_symbol": quote_symbol,
|
||||
"quote_address": plan_row.get("quote_address"),
|
||||
"required_raw": quote_amount_raw,
|
||||
"wallet_balance_raw": quote_balance_raw,
|
||||
"sufficient": quote_has_balance,
|
||||
},
|
||||
"base_side_balance": {
|
||||
"base_symbol": base_symbol,
|
||||
"base_address": plan_row.get("base_address"),
|
||||
"required_raw": base_amount_raw,
|
||||
"wallet_balance_raw": base_balance_raw,
|
||||
"sufficient": base_has_balance,
|
||||
"base_supply_mode": plan_row.get("base_supply_mode"),
|
||||
},
|
||||
"base_side_mintability": {
|
||||
"mintable_base": base_mintable,
|
||||
"mint_base_amount_raw": plan_row.get("mint_base_amount_raw"),
|
||||
},
|
||||
"integration": {
|
||||
"env_key": plan_row.get("integration_env_key"),
|
||||
"address": plan_row.get("integration_address"),
|
||||
"present": integration_present,
|
||||
},
|
||||
"verification_publication_status": verification_publication,
|
||||
"existing_pool_address": existing_pool or None,
|
||||
"ready_to_create": plan_row.get("ready_to_create"),
|
||||
"ready_to_create_with_bootstrap_price": plan_row.get("ready_to_create_with_bootstrap_price"),
|
||||
"ready_to_seed_live": plan_row.get("ready_to_seed"),
|
||||
"blockers": plan_row.get("blockers", []),
|
||||
"next_step": plan_row.get("next_step"),
|
||||
}
|
||||
rows.append(row)
|
||||
|
||||
chain_entry = chain_summary[(chain_id, network)]
|
||||
chain_entry["missing_pair_count"] += 1
|
||||
chain_entry["rows_missing_quote_side_stable"] += int(missing["quote_side_stable"])
|
||||
chain_entry["rows_missing_base_side_balance"] += int(missing["base_side_balance"])
|
||||
chain_entry["rows_missing_base_side_mintability"] += int(missing["base_side_mintability"])
|
||||
chain_entry["rows_missing_integration"] += int(missing["integration"])
|
||||
chain_entry["rows_missing_verification_publication"] += int(missing["verification_publication_status"])
|
||||
|
||||
summary = {
|
||||
"remaining_missing_pairs": len(rows),
|
||||
"rows_missing_quote_side_stable": sum(int(row["missing"]["quote_side_stable"]) for row in rows),
|
||||
"rows_missing_base_side_balance": sum(int(row["missing"]["base_side_balance"]) for row in rows),
|
||||
"rows_missing_base_side_mintability": sum(int(row["missing"]["base_side_mintability"]) for row in rows),
|
||||
"rows_missing_integration": sum(int(row["missing"]["integration"]) for row in rows),
|
||||
"rows_missing_verification_publication": sum(int(row["missing"]["verification_publication_status"]) for row in rows),
|
||||
"top_missing_reasons": dict(
|
||||
sorted(
|
||||
Counter(reason for row in rows for reason in row["missing_reasons"]).items(),
|
||||
key=lambda item: (-item[1], item[0]),
|
||||
)
|
||||
),
|
||||
}
|
||||
|
||||
chains = []
|
||||
for (chain_id, network), item in sorted(chain_summary.items()):
|
||||
chains.append({
|
||||
"chain_id": chain_id,
|
||||
"network": network,
|
||||
**item,
|
||||
})
|
||||
|
||||
result = {
|
||||
"generated_at": datetime.now(timezone.utc).isoformat().replace("+00:00", "Z"),
|
||||
"description": "Chain-by-chain GRU v2 Wave 1 funding and authority report for the remaining missing public pools.",
|
||||
"sources": [
|
||||
str(plan_path.relative_to(output_path.parents[2])),
|
||||
str(gap_report_path.relative_to(output_path.parents[2])),
|
||||
str(verify_matrix_path.relative_to(output_path.parents[2])) if verify_matrix_path.exists() else "reports/status/contract_verification_publish_matrix.json (missing)",
|
||||
],
|
||||
"summary": summary,
|
||||
"chains": chains,
|
||||
"rows": sorted(rows, key=lambda item: (item["chain_id"], item["pair"])),
|
||||
}
|
||||
|
||||
output_path.write_text(json.dumps(result, indent=2) + "\n")
|
||||
print(json.dumps(summary, indent=2))
|
||||
PY
|
||||
|
||||
echo "Wrote ${OUTPUT_PATH}"
|
||||
185
scripts/verify/build-gru-v2-wave1-public-gap-report.sh
Executable file
185
scripts/verify/build-gru-v2-wave1-public-gap-report.sh
Executable file
@@ -0,0 +1,185 @@
|
||||
#!/usr/bin/env bash
|
||||
# Build a machine-readable GRU v2 public Wave 1 gap report.
|
||||
# Captures missing first-tier pools and missing token suites by network.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
OUTPUT_PATH="${PROJECT_ROOT}/reports/extraction/gru-v2-wave1-public-gap-report-latest.json"
|
||||
|
||||
mkdir -p "$(dirname "$OUTPUT_PATH")"
|
||||
|
||||
python3 - <<'PY' "$PROJECT_ROOT" "$OUTPUT_PATH"
|
||||
import json
|
||||
from collections import Counter
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
project_root = Path(sys.argv[1])
|
||||
output_path = Path(sys.argv[2])
|
||||
|
||||
deployment_status = json.loads((project_root / "cross-chain-pmm-lps/config/deployment-status.json").read_text())
|
||||
queue_json = json.loads(
|
||||
__import__("subprocess").check_output(
|
||||
["bash", "scripts/verify/check-gru-v2-deployment-queue.sh", "--json"],
|
||||
cwd=project_root,
|
||||
text=True,
|
||||
)
|
||||
)
|
||||
|
||||
loaded_full_mesh_tokens = {
|
||||
"cWUSDT",
|
||||
"cWUSDC",
|
||||
"cWEURC",
|
||||
"cWEURT",
|
||||
"cWGBPC",
|
||||
"cWGBPT",
|
||||
"cWAUDC",
|
||||
"cWJPYC",
|
||||
"cWCHFC",
|
||||
"cWCADC",
|
||||
"cWXAUC",
|
||||
"cWXAUT",
|
||||
}
|
||||
|
||||
gas_native_required = {
|
||||
1: ["cWETH"],
|
||||
10: ["cWETHL2"],
|
||||
25: ["cWCRO"],
|
||||
56: ["cWBNB"],
|
||||
100: ["cWXDAI"],
|
||||
137: ["cWPOL"],
|
||||
1111: ["cWWEMIX"],
|
||||
8453: ["cWETHL2"],
|
||||
42161: ["cWETHL2"],
|
||||
42220: ["cWCELO"],
|
||||
43114: ["cWAVAX"],
|
||||
}
|
||||
|
||||
network_names = {
|
||||
1: "Ethereum Mainnet",
|
||||
10: "Optimism",
|
||||
25: "Cronos",
|
||||
56: "BSC",
|
||||
100: "Gnosis",
|
||||
137: "Polygon",
|
||||
1111: "Wemix",
|
||||
8453: "Base",
|
||||
42161: "Arbitrum One",
|
||||
42220: "Celo",
|
||||
43114: "Avalanche C-Chain",
|
||||
}
|
||||
|
||||
|
||||
def normalize_pair(pair: str) -> str:
|
||||
return " / ".join(part.strip() for part in pair.split("/"))
|
||||
|
||||
|
||||
chains = []
|
||||
missing_pool_rows = []
|
||||
missing_tokens_rows = []
|
||||
live_pool_rows = []
|
||||
|
||||
for row in queue_json["chainQueue"]:
|
||||
chain_id = row["chainId"]
|
||||
network = row["name"]
|
||||
planned = set(row["plannedWave1Pairs"])
|
||||
recorded = set(row["recordedWave1Pairs"])
|
||||
missing = sorted(planned - recorded)
|
||||
live = sorted(recorded)
|
||||
|
||||
chain_status = deployment_status["chains"].get(str(chain_id), {})
|
||||
actual_tokens = set((chain_status.get("cwTokens") or {}).keys())
|
||||
missing_tokens = sorted(token for token in loaded_full_mesh_tokens if token not in actual_tokens)
|
||||
missing_gas = [token for token in gas_native_required.get(chain_id, []) if token not in set((chain_status.get("gasMirrors") or {}).keys())]
|
||||
|
||||
chain_entry = {
|
||||
"chain_id": chain_id,
|
||||
"network": network,
|
||||
"hub_stable": row["hubStable"],
|
||||
"bridge_available": row["bridgeAvailable"],
|
||||
"cw_token_count": row["cwTokenCount"],
|
||||
"planned_wave1_pair_count": len(planned),
|
||||
"recorded_live_pair_count": len(recorded),
|
||||
"missing_wave1_pair_count": len(missing),
|
||||
"live_wave1_pairs": live,
|
||||
"missing_wave1_pairs": missing,
|
||||
"missing_wrapped_tokens": missing_tokens,
|
||||
"missing_gas_native_tokens": missing_gas,
|
||||
"next_step": row["nextStep"],
|
||||
}
|
||||
chains.append(chain_entry)
|
||||
|
||||
for pair in missing:
|
||||
missing_pool_rows.append(
|
||||
{
|
||||
"chain_id": chain_id,
|
||||
"network": network,
|
||||
"pair": pair,
|
||||
"hub_stable": row["hubStable"],
|
||||
"next_step": row["nextStep"],
|
||||
}
|
||||
)
|
||||
for pair in live:
|
||||
live_pool_rows.append(
|
||||
{
|
||||
"chain_id": chain_id,
|
||||
"network": network,
|
||||
"pair": pair,
|
||||
"hub_stable": row["hubStable"],
|
||||
}
|
||||
)
|
||||
for token in missing_tokens:
|
||||
missing_tokens_rows.append(
|
||||
{
|
||||
"chain_id": chain_id,
|
||||
"network": network,
|
||||
"token": token,
|
||||
"token_type": "wrapped_wave1",
|
||||
}
|
||||
)
|
||||
for token in missing_gas:
|
||||
missing_tokens_rows.append(
|
||||
{
|
||||
"chain_id": chain_id,
|
||||
"network": network,
|
||||
"token": token,
|
||||
"token_type": "gas_native_public_mirror",
|
||||
}
|
||||
)
|
||||
|
||||
summary = {
|
||||
"desired_public_evm_targets": queue_json["summary"]["desiredPublicEvmTargets"],
|
||||
"chains_with_loaded_cw_suites": queue_json["summary"]["chainsWithLoadedCwSuites"],
|
||||
"chains_missing_cw_suites": queue_json["summary"]["chainsMissingCwSuites"],
|
||||
"first_tier_wave1_pools_planned": queue_json["summary"]["firstTierWave1PoolsPlanned"],
|
||||
"first_tier_wave1_pools_recorded_live": queue_json["summary"]["firstTierWave1PoolsRecordedLive"],
|
||||
"first_tier_wave1_pools_missing": queue_json["summary"]["firstTierWave1PoolsPlanned"] - queue_json["summary"]["firstTierWave1PoolsRecordedLive"],
|
||||
"wave1_transport_pending_assets": queue_json["summary"]["wave1TransportPending"],
|
||||
"missing_token_rows": len(missing_tokens_rows),
|
||||
"networks_with_missing_tokens": len({row["network"] for row in missing_tokens_rows}),
|
||||
}
|
||||
|
||||
result = {
|
||||
"generated_at": datetime.now(timezone.utc).isoformat().replace("+00:00", "Z"),
|
||||
"description": "Canonical machine-readable GRU v2 public Wave 1 rollout gap report.",
|
||||
"sources": [
|
||||
"scripts/verify/check-gru-v2-deployment-queue.sh --json",
|
||||
"cross-chain-pmm-lps/config/deployment-status.json",
|
||||
"config/gru-v2-d3mm-network-expansion-plan.json",
|
||||
"cross-chain-pmm-lps/config/pool-matrix.json",
|
||||
],
|
||||
"summary": summary,
|
||||
"chains": sorted(chains, key=lambda item: item["chain_id"]),
|
||||
"missing_first_tier_wave1_pools": sorted(missing_pool_rows, key=lambda item: (item["chain_id"], item["pair"])),
|
||||
"live_first_tier_wave1_pools": sorted(live_pool_rows, key=lambda item: (item["chain_id"], item["pair"])),
|
||||
"missing_tokens": sorted(missing_tokens_rows, key=lambda item: (item["chain_id"], item["token_type"], item["token"])),
|
||||
}
|
||||
|
||||
output_path.write_text(json.dumps(result, indent=2) + "\n")
|
||||
print(json.dumps(summary, indent=2))
|
||||
PY
|
||||
|
||||
echo "Wrote ${OUTPUT_PATH}"
|
||||
39
scripts/verify/check-gru-v2-core-protocol-blockers.sh
Executable file
39
scripts/verify/check-gru-v2-core-protocol-blockers.sh
Executable file
@@ -0,0 +1,39 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
|
||||
failures=0
|
||||
|
||||
check_file() {
|
||||
local label="$1" path="$2"
|
||||
if [[ -f "$path" ]]; then
|
||||
echo "OK $label -> $path"
|
||||
else
|
||||
echo "MISS $label -> $path"
|
||||
failures=1
|
||||
fi
|
||||
}
|
||||
|
||||
echo "=== GRU v2 core protocol blocker check ==="
|
||||
|
||||
check_file "138 DODO PMM mesh sync" "$PROJECT_ROOT/scripts/create-pmm-full-mesh-chain138.sh"
|
||||
check_file "138 pilot protocol venue deployer" "$PROJECT_ROOT/scripts/deployment/deploy-chain138-pilot-protocol-venues.sh"
|
||||
check_file "138 Aave execution stack deployer" "$PROJECT_ROOT/scripts/deployment/deploy-chain138-aave-v3-execution-stack.sh"
|
||||
check_file "138 Aave quote-push receiver deployer" "$PROJECT_ROOT/scripts/deployment/deploy-chain138-aave-quote-push-receiver.sh"
|
||||
check_file "138 remaining protocol surface" "$PROJECT_ROOT/config/chain138-remaining-protocol-surface.json"
|
||||
check_file "138 remaining protocol env verifier" "$PROJECT_ROOT/scripts/verify/check-chain138-remaining-protocol-env.sh"
|
||||
check_file "651940 cA* token deployer" "$PROJECT_ROOT/scripts/deployment/deploy-allmainnet-ca-tokens.sh"
|
||||
check_file "651940 PMM desired-state sync" "$PROJECT_ROOT/scripts/deployment/sync-allmainnet-pmm-pools-from-json.sh"
|
||||
check_file "651940 cA* token catalog" "$PROJECT_ROOT/config/allmainnet-ca-token-catalog.json"
|
||||
check_file "651940 PMM desired-state config" "$PROJECT_ROOT/config/allmainnet-pmm-pools.json"
|
||||
check_file "GRU deployment implementation status doc" "$PROJECT_ROOT/docs/04-configuration/GRU_V2_FULL_DEPLOYMENT_IMPLEMENTATION_STATUS.md"
|
||||
|
||||
echo "--- remaining external blockers ---"
|
||||
echo "INFO Chain 138 Aave still requires real CHAIN_138_AAVE_POOL / provider / data-provider addresses before the new deploy wrappers can be applied."
|
||||
echo "INFO Chain 138 GMX and dYdX still require canonical live addresses and native protocol stacks."
|
||||
echo "INFO Full venue completion for 651940 across Uniswap v2/v3, SushiSwap, Curve, Balancer, 1Inch, Aave, GMX, and dYdX still depends on live venue addresses, chain support, and liquidity."
|
||||
echo "INFO 651940 PMM sync also requires live DODO integration/provider addresses and final deployed cA* token addresses."
|
||||
|
||||
exit "$failures"
|
||||
52
scripts/verify/check-gru-v2-full-deployment-implementation.py
Executable file
52
scripts/verify/check-gru-v2-full-deployment-implementation.py
Executable file
@@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Verify which parts of the GRU v2 full deployment plan are actually implemented in-repo."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
|
||||
|
||||
def exists(rel: str) -> bool:
|
||||
return (ROOT / rel).exists()
|
||||
|
||||
|
||||
def main() -> int:
|
||||
checks = {
|
||||
"chain138_mesh_sync": exists("scripts/create-pmm-full-mesh-chain138.sh"),
|
||||
"chain138_next_steps_runner": exists("scripts/deployment/run-all-next-steps-chain138.sh"),
|
||||
"chain138_readiness_check": exists("scripts/verify/check-gru-v2-chain138-readiness.sh"),
|
||||
"gru_master_matrix": exists("docs/04-configuration/GRU_V2_FULL_MESH_MASTER_MATRIX.md"),
|
||||
"gru_live_status_report": exists("docs/04-configuration/GRU_V2_FULL_MESH_LIVE_STATUS_REPORT.md"),
|
||||
"gru_protocol_matrix": exists("docs/04-configuration/GRU_V2_PROTOCOL_COMPLETION_MATRIX.md"),
|
||||
"gru_pool_tracker": exists("config/gru-v2-full-mesh-pool-tracker.json"),
|
||||
"all_mainnet_token_inventory": exists("docs/11-references/ALL_MAINNET_TOKEN_ADDRESSES.md"),
|
||||
"all_mainnet_full_cA_deployer": exists("scripts/deployment/deploy-allmainnet-ca-tokens.sh"),
|
||||
"all_mainnet_full_mesh_deployer": exists("scripts/deployment/sync-allmainnet-pmm-pools-from-json.sh"),
|
||||
"chain138_uniswap_curve_balancer_full_deployer": exists("scripts/deployment/deploy-chain138-pilot-protocol-venues.sh"),
|
||||
"chain138_aave_execution_deployer": exists("scripts/deployment/deploy-chain138-aave-v3-execution-stack.sh"),
|
||||
"chain138_aave_receiver_deployer": exists("scripts/deployment/deploy-chain138-aave-quote-push-receiver.sh"),
|
||||
"chain138_remaining_protocol_inventory": exists("config/chain138-remaining-protocol-surface.json"),
|
||||
"chain138_remaining_protocol_checker": exists("scripts/verify/check-chain138-remaining-protocol-env.sh"),
|
||||
"all_mainnet_uniswap_curve_balancer_full_deployer": False,
|
||||
}
|
||||
|
||||
missing_repo_backed = [
|
||||
"all_mainnet_uniswap_curve_balancer_full_deployer",
|
||||
]
|
||||
|
||||
report = {
|
||||
"implemented": {k: v for k, v in checks.items() if v},
|
||||
"missing_or_external_blocked": {k: checks[k] for k in missing_repo_backed if not checks[k]},
|
||||
}
|
||||
|
||||
print(json.dumps(report, indent=2, sort_keys=True))
|
||||
return 1 if any(not checks[k] for k in missing_repo_backed) else 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
30
scripts/verify/export-gru-v2-full-mesh-tracker-csv.py
Normal file
30
scripts/verify/export-gru-v2-full-mesh-tracker-csv.py
Normal file
@@ -0,0 +1,30 @@
|
||||
#!/usr/bin/env python3
|
||||
import csv
|
||||
import json
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
|
||||
ROOT = pathlib.Path("/home/intlc/projects/proxmox")
|
||||
TRACKER = ROOT / "config/gru-v2-full-mesh-pool-tracker.json"
|
||||
|
||||
|
||||
def main() -> None:
|
||||
data = json.loads(TRACKER.read_text())
|
||||
out = csv.writer(sys.stdout)
|
||||
out.writerow(["namespace", "chain", "pair", "priority", "status"])
|
||||
|
||||
for row in data["chain138"]["entries"]:
|
||||
out.writerow(["c* V2", "138", row["pair"], row.get("priority", ""), data["defaultFields"]["status"]])
|
||||
|
||||
for row in data["allMainnet651940"]["entries"]:
|
||||
out.writerow(["cA*", "651940", row["pair"], row.get("priority", ""), data["defaultFields"]["status"]])
|
||||
|
||||
for chain, bucket in data["publicMesh"].items():
|
||||
status = bucket.get("statusOverride", data["defaultFields"]["status"])
|
||||
for pair in bucket["entries"]:
|
||||
out.writerow(["cW*", chain, pair, "", status])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,224 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Verify the Chain 138 native Uniswap v2 and SushiSwap deployments on Blockscout.
|
||||
#
|
||||
# Usage:
|
||||
# bash scripts/verify/verify-chain138-native-v2-blockscout.sh
|
||||
# bash scripts/verify/verify-chain138-native-v2-blockscout.sh --status-only
|
||||
# bash scripts/verify/verify-chain138-native-v2-blockscout.sh --only UniswapV2Factory,SushiSwapRouter
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
SMOM_ROOT="${PROJECT_ROOT}/smom-dbis-138"
|
||||
|
||||
if [[ -f "${PROJECT_ROOT}/scripts/lib/load-project-env.sh" ]]; then
|
||||
# shellcheck source=/dev/null
|
||||
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh"
|
||||
fi
|
||||
|
||||
command -v forge >/dev/null 2>&1 || { echo "ERROR: forge not found"; exit 1; }
|
||||
command -v node >/dev/null 2>&1 || { echo "ERROR: node not found"; exit 1; }
|
||||
command -v cast >/dev/null 2>&1 || { echo "ERROR: cast not found"; exit 1; }
|
||||
command -v jq >/dev/null 2>&1 || { echo "ERROR: jq not found"; exit 1; }
|
||||
command -v curl >/dev/null 2>&1 || { echo "ERROR: curl not found"; exit 1; }
|
||||
|
||||
RPC_URL="${RPC_URL_138:-${CHAIN138_RPC_URL:-http://192.168.11.211:8545}}"
|
||||
BLOCKSCOUT_URL="${CHAIN138_BLOCKSCOUT_INTERNAL_URL:-http://${IP_BLOCKSCOUT:-192.168.11.140}:4000}"
|
||||
BLOCKSCOUT_API_BASE="${CHAIN138_BLOCKSCOUT_API_BASE:-${BLOCKSCOUT_URL}/api/v2}"
|
||||
BLOCKSCOUT_PUBLIC_API_BASE="${CHAIN138_BLOCKSCOUT_PUBLIC_API_BASE:-https://explorer.d-bis.org/api/v2}"
|
||||
VERIFIER_PORT="${FORGE_VERIFIER_PROXY_PORT:-3080}"
|
||||
FORGE_VERIFIER_URL="${FORGE_VERIFIER_URL:-http://127.0.0.1:${VERIFIER_PORT}/api}"
|
||||
|
||||
UNISWAP_JSON="${SMOM_ROOT}/deployments/chain138/uniswap-v2-native.json"
|
||||
SUSHI_JSON="${SMOM_ROOT}/deployments/chain138/sushiswap-native.json"
|
||||
|
||||
ONLY_LIST=""
|
||||
STATUS_ONLY=0
|
||||
PROXY_PID=""
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--only) ONLY_LIST="${2:-}"; shift 2 ;;
|
||||
--status-only) STATUS_ONLY=1; shift ;;
|
||||
*)
|
||||
echo "Unknown argument: $1" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
cleanup_proxy() {
|
||||
[[ -n "${PROXY_PID:-}" ]] && kill "${PROXY_PID}" 2>/dev/null || true
|
||||
}
|
||||
trap cleanup_proxy EXIT
|
||||
|
||||
should_handle() {
|
||||
local name="$1"
|
||||
[[ -n "${ONLY_LIST}" ]] && [[ ",${ONLY_LIST}," != *",${name},"* ]] && return 1
|
||||
return 0
|
||||
}
|
||||
|
||||
log() { printf '%s\n' "$*"; }
|
||||
ok() { printf '[ok] %s\n' "$*"; }
|
||||
warn() { printf '[warn] %s\n' "$*" >&2; }
|
||||
fail() { printf '[fail] %s\n' "$*" >&2; exit 1; }
|
||||
|
||||
proxy_listening() {
|
||||
if command -v nc >/dev/null 2>&1; then
|
||||
nc -z -w 2 127.0.0.1 "${VERIFIER_PORT}" 2>/dev/null
|
||||
else
|
||||
timeout 2 bash -c "echo >/dev/tcp/127.0.0.1/${VERIFIER_PORT}" 2>/dev/null
|
||||
fi
|
||||
}
|
||||
|
||||
start_proxy_if_needed() {
|
||||
if proxy_listening; then
|
||||
ok "Forge verification proxy already listening on ${VERIFIER_PORT}."
|
||||
return 0
|
||||
fi
|
||||
|
||||
log "Starting forge verification proxy on ${VERIFIER_PORT} -> ${BLOCKSCOUT_URL}"
|
||||
PORT="${VERIFIER_PORT}" BLOCKSCOUT_URL="${BLOCKSCOUT_URL}" node "${PROJECT_ROOT}/forge-verification-proxy/server.js" >/tmp/chain138-native-v2-blockscout-proxy.log 2>&1 &
|
||||
PROXY_PID=$!
|
||||
sleep 2
|
||||
proxy_listening || fail "Forge verification proxy failed to start. See /tmp/chain138-native-v2-blockscout-proxy.log"
|
||||
}
|
||||
|
||||
has_contract_bytecode() {
|
||||
local addr="$1"
|
||||
local code
|
||||
code="$(cast code "${addr}" --rpc-url "${RPC_URL}" 2>/dev/null | tr -d '\n\r \t' | tr '[:upper:]' '[:lower:]')" || true
|
||||
[[ -n "${code}" && "${code}" != "0x" && "${code}" != "0x0" ]]
|
||||
}
|
||||
|
||||
verification_status_json() {
|
||||
local addr="$1"
|
||||
local raw
|
||||
local base
|
||||
for base in "${BLOCKSCOUT_API_BASE}" "${BLOCKSCOUT_PUBLIC_API_BASE}"; do
|
||||
raw="$(curl --max-time 20 -fsS "${base}/smart-contracts/${addr}" 2>/dev/null || true)"
|
||||
if [[ -n "${raw}" ]] && jq -e 'type == "object"' >/dev/null 2>&1 <<<"${raw}"; then
|
||||
printf '%s' "${raw}"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
is_verified() {
|
||||
local addr="$1"
|
||||
local expected_name="$2"
|
||||
local json name compiler
|
||||
json="$(verification_status_json "${addr}")" || return 1
|
||||
name="$(jq -r '.name // empty' <<<"${json}")"
|
||||
compiler="$(jq -r '.compiler_version // empty' <<<"${json}")"
|
||||
[[ -n "${name}" && -n "${compiler}" && "${name}" == "${expected_name}" ]]
|
||||
}
|
||||
|
||||
submit_verification() {
|
||||
local label="$1"
|
||||
local addr="$2"
|
||||
local path="$3"
|
||||
local expected_name="$4"
|
||||
local constructor_sig="$5"
|
||||
local compiler_version="$6"
|
||||
local force_flag="${7:-0}"
|
||||
shift 6
|
||||
local constructor_args=("$@")
|
||||
|
||||
start_proxy_if_needed
|
||||
has_contract_bytecode "${addr}" || fail "${label} has no bytecode at ${addr}"
|
||||
|
||||
if is_verified "${addr}" "${expected_name}"; then
|
||||
ok "${label} already verified on Blockscout."
|
||||
return 0
|
||||
fi
|
||||
|
||||
local cmd=(forge verify-contract "${addr}" "${path}" --chain-id 138 --verifier blockscout --verifier-url "${FORGE_VERIFIER_URL}" --rpc-url "${RPC_URL}" --flatten)
|
||||
[[ -n "${compiler_version}" ]] && cmd+=(--compiler-version "${compiler_version}")
|
||||
if [[ "${force_flag}" == "1" ]]; then
|
||||
cmd+=(--force)
|
||||
fi
|
||||
if [[ -n "${constructor_sig}" ]]; then
|
||||
local encoded
|
||||
encoded="$(cast abi-encode "${constructor_sig}" "${constructor_args[@]:1}")"
|
||||
cmd+=(--constructor-args "${encoded}")
|
||||
fi
|
||||
|
||||
log "Submitting Blockscout verification for ${label} (${addr})"
|
||||
if (cd "${SMOM_ROOT}" && "${cmd[@]}" 2>&1); then
|
||||
ok "${label} verification submission accepted."
|
||||
else
|
||||
warn "${label} verification submission did not complete cleanly. Check Blockscout manually."
|
||||
fi
|
||||
}
|
||||
|
||||
[[ -f "${UNISWAP_JSON}" ]] || fail "Missing deployment artifact ${UNISWAP_JSON}"
|
||||
[[ -f "${SUSHI_JSON}" ]] || fail "Missing deployment artifact ${SUSHI_JSON}"
|
||||
|
||||
UNISWAP_FACTORY="$(jq -r '.factory' "${UNISWAP_JSON}")"
|
||||
UNISWAP_ROUTER="$(jq -r '.router' "${UNISWAP_JSON}")"
|
||||
UNISWAP_WETH="$(jq -r '.weth' "${UNISWAP_JSON}")"
|
||||
UNISWAP_DEPLOYER="$(jq -r '.deployer' "${UNISWAP_JSON}")"
|
||||
|
||||
SUSHI_FACTORY="$(jq -r '.factory' "${SUSHI_JSON}")"
|
||||
SUSHI_ROUTER="$(jq -r '.router' "${SUSHI_JSON}")"
|
||||
SUSHI_WETH="$(jq -r '.weth' "${SUSHI_JSON}")"
|
||||
SUSHI_FEE_TO_SETTER="$(jq -r '.feeToSetter // .deployer' "${SUSHI_JSON}")"
|
||||
|
||||
log "Chain 138 native V2 Blockscout verification"
|
||||
log "RPC: ${RPC_URL}"
|
||||
log "Explorer API: ${BLOCKSCOUT_API_BASE}"
|
||||
log
|
||||
|
||||
if (( STATUS_ONLY )); then
|
||||
should_handle "UniswapV2Factory" && { is_verified "${UNISWAP_FACTORY}" "UniswapV2Factory" && ok "UniswapV2Factory already verified on Blockscout." || warn "UniswapV2Factory not yet verified on Blockscout."; }
|
||||
should_handle "UniswapV2Router" && { is_verified "${UNISWAP_ROUTER}" "UniswapV2Router02" && ok "UniswapV2Router already verified on Blockscout." || warn "UniswapV2Router not yet verified on Blockscout."; }
|
||||
should_handle "SushiSwapFactory" && { is_verified "${SUSHI_FACTORY}" "UniswapV2Factory" && ok "SushiSwapFactory already verified on Blockscout." || warn "SushiSwapFactory not yet verified on Blockscout."; }
|
||||
should_handle "SushiSwapRouter" && { is_verified "${SUSHI_ROUTER}" "UniswapV2Router02" && ok "SushiSwapRouter already verified on Blockscout." || warn "SushiSwapRouter not yet verified on Blockscout."; }
|
||||
exit 0
|
||||
fi
|
||||
|
||||
should_handle "UniswapV2Factory" && submit_verification \
|
||||
"UniswapV2Factory" \
|
||||
"${UNISWAP_FACTORY}" \
|
||||
"contracts/vendor/uniswap-v2-core/UniswapV2Factory.sol:UniswapV2Factory" \
|
||||
"UniswapV2Factory" \
|
||||
"constructor(address)" \
|
||||
"v0.5.16+commit.9c3226ce" \
|
||||
"1" \
|
||||
"${UNISWAP_DEPLOYER}"
|
||||
|
||||
should_handle "UniswapV2Router" && submit_verification \
|
||||
"UniswapV2Router" \
|
||||
"${UNISWAP_ROUTER}" \
|
||||
"contracts/vendor/uniswap-v2-periphery/UniswapV2Router02.sol:UniswapV2Router02" \
|
||||
"UniswapV2Router02" \
|
||||
"constructor(address,address)" \
|
||||
"v0.6.6+commit.6c089d02" \
|
||||
"1" \
|
||||
"${UNISWAP_FACTORY}" "${UNISWAP_WETH}"
|
||||
|
||||
should_handle "SushiSwapFactory" && submit_verification \
|
||||
"SushiSwapFactory" \
|
||||
"${SUSHI_FACTORY}" \
|
||||
"contracts/vendor/sushiswap-v2/UniswapV2Factory.sol:UniswapV2Factory" \
|
||||
"UniswapV2Factory" \
|
||||
"constructor(address)" \
|
||||
"v0.6.12+commit.27d51765" \
|
||||
"1" \
|
||||
"${SUSHI_FEE_TO_SETTER}"
|
||||
|
||||
should_handle "SushiSwapRouter" && submit_verification \
|
||||
"SushiSwapRouter" \
|
||||
"${SUSHI_ROUTER}" \
|
||||
"contracts/vendor/sushiswap-v2/UniswapV2Router02.sol:UniswapV2Router02" \
|
||||
"UniswapV2Router02" \
|
||||
"constructor(address,address)" \
|
||||
"v0.6.12+commit.27d51765" \
|
||||
"1" \
|
||||
"${SUSHI_FACTORY}" "${SUSHI_WETH}"
|
||||
|
||||
log
|
||||
ok "Chain 138 native V2 verification flow complete."
|
||||
@@ -1,545 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Verify the deployed Chain 138 route execution stack and pilot venue contracts on Blockscout.
|
||||
#
|
||||
# Usage:
|
||||
# bash scripts/verify/verify-chain138-route-execution-stack-blockscout.sh
|
||||
# bash scripts/verify/verify-chain138-route-execution-stack-blockscout.sh --status-only
|
||||
# bash scripts/verify/verify-chain138-route-execution-stack-blockscout.sh --no-wait
|
||||
# bash scripts/verify/verify-chain138-route-execution-stack-blockscout.sh --only EnhancedSwapRouterV2,Chain138PilotCurve3Pool
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
SMOM_ROOT="${PROJECT_ROOT}/smom-dbis-138"
|
||||
|
||||
if [[ -f "${PROJECT_ROOT}/scripts/lib/load-project-env.sh" ]]; then
|
||||
# shellcheck source=/dev/null
|
||||
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh"
|
||||
fi
|
||||
|
||||
command -v forge >/dev/null 2>&1 || { echo "ERROR: forge not found"; exit 1; }
|
||||
command -v node >/dev/null 2>&1 || { echo "ERROR: node not found"; exit 1; }
|
||||
command -v cast >/dev/null 2>&1 || { echo "ERROR: cast not found"; exit 1; }
|
||||
command -v jq >/dev/null 2>&1 || { echo "ERROR: jq not found"; exit 1; }
|
||||
command -v curl >/dev/null 2>&1 || { echo "ERROR: curl not found"; exit 1; }
|
||||
|
||||
RPC_URL="${RPC_URL_138:-${CHAIN138_RPC_URL:-http://192.168.11.211:8545}}"
|
||||
BLOCKSCOUT_URL="${CHAIN138_BLOCKSCOUT_INTERNAL_URL:-http://${IP_BLOCKSCOUT:-192.168.11.140}:4000}"
|
||||
BLOCKSCOUT_API_BASE="${CHAIN138_BLOCKSCOUT_API_BASE:-${BLOCKSCOUT_URL}/api/v2}"
|
||||
BLOCKSCOUT_PUBLIC_API_BASE="${CHAIN138_BLOCKSCOUT_PUBLIC_API_BASE:-https://explorer.d-bis.org/api/v2}"
|
||||
VERIFIER_PORT="${FORGE_VERIFIER_PROXY_PORT:-3080}"
|
||||
FORGE_VERIFIER_URL="${FORGE_VERIFIER_URL:-http://127.0.0.1:${VERIFIER_PORT}/api}"
|
||||
ROUTE_STACK_SOLC_VERSION="${ROUTE_STACK_SOLC_VERSION:-v0.8.20+commit.a1b79de6}"
|
||||
ROUTE_STACK_EVM_VERSION="${ROUTE_STACK_EVM_VERSION:-shanghai}"
|
||||
ROUTE_STACK_OPT_RUNS="${ROUTE_STACK_OPT_RUNS:-200}"
|
||||
|
||||
ONLY_LIST=""
|
||||
STATUS_ONLY=0
|
||||
NO_WAIT=0
|
||||
PROXY_PID=""
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--only) ONLY_LIST="${2:-}"; shift 2 ;;
|
||||
--status-only) STATUS_ONLY=1; shift ;;
|
||||
--no-wait) NO_WAIT=1; shift ;;
|
||||
*)
|
||||
echo "Unknown argument: $1" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
cleanup_proxy() {
|
||||
[[ -n "${PROXY_PID:-}" ]] && kill "${PROXY_PID}" 2>/dev/null || true
|
||||
}
|
||||
trap cleanup_proxy EXIT
|
||||
|
||||
should_handle() {
|
||||
local name="$1"
|
||||
[[ -n "${ONLY_LIST}" ]] && [[ ",${ONLY_LIST}," != *",${name},"* ]] && return 1
|
||||
return 0
|
||||
}
|
||||
|
||||
log() { printf '%s\n' "$*"; }
|
||||
ok() { printf '[ok] %s\n' "$*"; }
|
||||
warn() { printf '[warn] %s\n' "$*" >&2; }
|
||||
fail() { printf '[fail] %s\n' "$*" >&2; exit 1; }
|
||||
|
||||
proxy_listening() {
|
||||
if command -v nc >/dev/null 2>&1; then
|
||||
nc -z -w 2 127.0.0.1 "${VERIFIER_PORT}" 2>/dev/null
|
||||
else
|
||||
timeout 2 bash -c "echo >/dev/tcp/127.0.0.1/${VERIFIER_PORT}" 2>/dev/null
|
||||
fi
|
||||
}
|
||||
|
||||
start_proxy_if_needed() {
|
||||
if proxy_listening; then
|
||||
ok "Forge verification proxy already listening on ${VERIFIER_PORT}."
|
||||
return 0
|
||||
fi
|
||||
|
||||
log "Starting forge verification proxy on ${VERIFIER_PORT} -> ${BLOCKSCOUT_URL}"
|
||||
PORT="${VERIFIER_PORT}" BLOCKSCOUT_URL="${BLOCKSCOUT_URL}" node "${PROJECT_ROOT}/forge-verification-proxy/server.js" >/tmp/chain138-route-execution-blockscout-proxy.log 2>&1 &
|
||||
PROXY_PID=$!
|
||||
sleep 2
|
||||
proxy_listening || fail "Forge verification proxy failed to start. See /tmp/chain138-route-execution-blockscout-proxy.log"
|
||||
}
|
||||
|
||||
has_contract_bytecode() {
|
||||
local addr="$1"
|
||||
local code
|
||||
code="$(cast code "${addr}" --rpc-url "${RPC_URL}" 2>/dev/null | tr -d '\n\r \t' | tr '[:upper:]' '[:lower:]')" || true
|
||||
[[ -n "${code}" && "${code}" != "0x" && "${code}" != "0x0" ]]
|
||||
}
|
||||
|
||||
verification_status_json() {
|
||||
local addr="$1"
|
||||
local raw
|
||||
local base
|
||||
for base in "${BLOCKSCOUT_API_BASE}" "${BLOCKSCOUT_PUBLIC_API_BASE}"; do
|
||||
raw="$(curl --max-time 20 -fsS "${base}/smart-contracts/${addr}" 2>/dev/null || true)"
|
||||
if [[ -n "${raw}" ]] && jq -e 'type == "object"' >/dev/null 2>&1 <<<"${raw}"; then
|
||||
printf '%s' "${raw}"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
is_verified() {
|
||||
local addr="$1"
|
||||
local expected_name="$2"
|
||||
local json name compiler
|
||||
json="$(verification_status_json "${addr}")" || return 1
|
||||
name="$(jq -r '.name // empty' <<<"${json}")"
|
||||
compiler="$(jq -r '.compiler_version // empty' <<<"${json}")"
|
||||
[[ -n "${name}" && -n "${compiler}" && "${name}" == "${expected_name}" ]]
|
||||
}
|
||||
|
||||
submit_verification() {
|
||||
local label="$1"
|
||||
local addr="$2"
|
||||
local path="$3"
|
||||
local expected_name="$4"
|
||||
local constructor_sig="$5"
|
||||
shift 5
|
||||
local constructor_args=("$@")
|
||||
|
||||
start_proxy_if_needed
|
||||
has_contract_bytecode "${addr}" || fail "${label} has no bytecode at ${addr}"
|
||||
|
||||
if is_verified "${addr}" "${expected_name}"; then
|
||||
ok "${label} already verified on Blockscout."
|
||||
return 0
|
||||
fi
|
||||
|
||||
local cmd=(forge verify-contract "${addr}" "${path}" --chain-id 138 --verifier blockscout --verifier-url "${FORGE_VERIFIER_URL}" --rpc-url "${RPC_URL}" --flatten)
|
||||
if [[ -n "${constructor_sig}" ]]; then
|
||||
local encoded
|
||||
encoded="$(cast abi-encode "${constructor_sig}" "${constructor_args[@]}")"
|
||||
cmd+=(--constructor-args "${encoded}")
|
||||
fi
|
||||
|
||||
log "Submitting Blockscout verification for ${label} (${addr})"
|
||||
if (cd "${SMOM_ROOT}" && "${cmd[@]}" 2>&1); then
|
||||
ok "${label} verification submission accepted."
|
||||
else
|
||||
warn "${label} verification submission did not complete cleanly. Check Blockscout manually."
|
||||
fi
|
||||
}
|
||||
|
||||
artifact_dbg_path() {
|
||||
case "$1" in
|
||||
EnhancedSwapRouterV2) printf '%s' "${SMOM_ROOT}/artifacts/contracts/bridge/trustless/EnhancedSwapRouterV2.sol/EnhancedSwapRouterV2.dbg.json" ;;
|
||||
IntentBridgeCoordinatorV2) printf '%s' "${SMOM_ROOT}/artifacts/contracts/bridge/trustless/IntentBridgeCoordinatorV2.sol/IntentBridgeCoordinatorV2.dbg.json" ;;
|
||||
DodoRouteExecutorAdapter) printf '%s' "${SMOM_ROOT}/artifacts/contracts/bridge/trustless/adapters/DodoRouteExecutorAdapter.sol/DodoRouteExecutorAdapter.dbg.json" ;;
|
||||
DodoV3RouteExecutorAdapter) printf '%s' "${SMOM_ROOT}/artifacts/contracts/bridge/trustless/adapters/DodoV3RouteExecutorAdapter.sol/DodoV3RouteExecutorAdapter.dbg.json" ;;
|
||||
UniswapV3RouteExecutorAdapter) printf '%s' "${SMOM_ROOT}/artifacts/contracts/bridge/trustless/adapters/UniswapV3RouteExecutorAdapter.sol/UniswapV3RouteExecutorAdapter.dbg.json" ;;
|
||||
BalancerRouteExecutorAdapter) printf '%s' "${SMOM_ROOT}/artifacts/contracts/bridge/trustless/adapters/BalancerRouteExecutorAdapter.sol/BalancerRouteExecutorAdapter.dbg.json" ;;
|
||||
CurveRouteExecutorAdapter) printf '%s' "${SMOM_ROOT}/artifacts/contracts/bridge/trustless/adapters/CurveRouteExecutorAdapter.sol/CurveRouteExecutorAdapter.dbg.json" ;;
|
||||
OneInchRouteExecutorAdapter) printf '%s' "${SMOM_ROOT}/artifacts/contracts/bridge/trustless/adapters/OneInchRouteExecutorAdapter.sol/OneInchRouteExecutorAdapter.dbg.json" ;;
|
||||
Chain138PilotUniswapV3Router) printf '%s' "${SMOM_ROOT}/artifacts/contracts/bridge/trustless/pilot/Chain138PilotDexVenues.sol/Chain138PilotUniswapV3Router.dbg.json" ;;
|
||||
Chain138PilotBalancerVault) printf '%s' "${SMOM_ROOT}/artifacts/contracts/bridge/trustless/pilot/Chain138PilotDexVenues.sol/Chain138PilotBalancerVault.dbg.json" ;;
|
||||
Chain138PilotCurve3Pool) printf '%s' "${SMOM_ROOT}/artifacts/contracts/bridge/trustless/pilot/Chain138PilotDexVenues.sol/Chain138PilotCurve3Pool.dbg.json" ;;
|
||||
Chain138PilotOneInchAggregationRouter) printf '%s' "${SMOM_ROOT}/artifacts/contracts/bridge/trustless/pilot/Chain138PilotDexVenues.sol/Chain138PilotOneInchAggregationRouter.dbg.json" ;;
|
||||
*) return 1 ;;
|
||||
esac
|
||||
}
|
||||
|
||||
foundry_artifact_json_path() {
|
||||
case "$1" in
|
||||
EnhancedSwapRouterV2) printf '%s' "${SMOM_ROOT}/out/EnhancedSwapRouterV2.sol/EnhancedSwapRouterV2.json" ;;
|
||||
IntentBridgeCoordinatorV2) printf '%s' "${SMOM_ROOT}/out/IntentBridgeCoordinatorV2.sol/IntentBridgeCoordinatorV2.json" ;;
|
||||
DodoRouteExecutorAdapter) printf '%s' "${SMOM_ROOT}/out/DodoRouteExecutorAdapter.sol/DodoRouteExecutorAdapter.json" ;;
|
||||
DodoV3RouteExecutorAdapter) printf '%s' "${SMOM_ROOT}/out/DodoV3RouteExecutorAdapter.sol/DodoV3RouteExecutorAdapter.json" ;;
|
||||
UniswapV3RouteExecutorAdapter) printf '%s' "${SMOM_ROOT}/out/UniswapV3RouteExecutorAdapter.sol/UniswapV3RouteExecutorAdapter.json" ;;
|
||||
BalancerRouteExecutorAdapter) printf '%s' "${SMOM_ROOT}/out/BalancerRouteExecutorAdapter.sol/BalancerRouteExecutorAdapter.json" ;;
|
||||
CurveRouteExecutorAdapter) printf '%s' "${SMOM_ROOT}/out/CurveRouteExecutorAdapter.sol/CurveRouteExecutorAdapter.json" ;;
|
||||
OneInchRouteExecutorAdapter) printf '%s' "${SMOM_ROOT}/out/OneInchRouteExecutorAdapter.sol/OneInchRouteExecutorAdapter.json" ;;
|
||||
Chain138PilotUniswapV3Router|Chain138PilotBalancerVault|Chain138PilotCurve3Pool|Chain138PilotOneInchAggregationRouter)
|
||||
printf '%s' "${SMOM_ROOT}/out/Chain138PilotDexVenues.sol/${1}.json"
|
||||
;;
|
||||
*) return 1 ;;
|
||||
esac
|
||||
}
|
||||
|
||||
runtime_hash_report() {
|
||||
local label="$1"
|
||||
local addr="$2"
|
||||
local artifact_json artifact_runtime chain_runtime
|
||||
|
||||
artifact_json="$(foundry_artifact_json_path "${label}")" || return 0
|
||||
[[ -f "${artifact_json}" ]] || return 0
|
||||
|
||||
artifact_runtime="$(jq -r '.deployedBytecode.object // empty' "${artifact_json}" | tr '[:upper:]' '[:lower:]')"
|
||||
chain_runtime="$(cast code "${addr}" --rpc-url "${RPC_URL}" 2>/dev/null | tr -d '\n\r \t' | tr '[:upper:]' '[:lower:]')" || true
|
||||
|
||||
[[ -n "${artifact_runtime}" && -n "${chain_runtime}" && "${chain_runtime}" != "0x" ]] || return 0
|
||||
|
||||
if [[ "${artifact_runtime}" != "${chain_runtime}" ]]; then
|
||||
warn "${label}: Foundry artifact runtime bytecode does not match deployed bytecode."
|
||||
warn "${label}: artifact_keccak=$(cast keccak "${artifact_runtime}") chain_keccak=$(cast keccak "${chain_runtime}")"
|
||||
fi
|
||||
}
|
||||
|
||||
submit_standard_input_from_artifact() {
|
||||
local label="$1"
|
||||
local addr="$2"
|
||||
local contract_path="$3"
|
||||
local constructor_args="$4"
|
||||
local dbg build input_file compiler_version evm_version optimization_runs optimization_enabled license_type response message
|
||||
|
||||
dbg="$(artifact_dbg_path "${label}")" || fail "${label}: missing dbg path mapping"
|
||||
[[ -f "${dbg}" ]] || fail "${label}: missing dbg artifact ${dbg}"
|
||||
build="$(jq -r '.buildInfo // .build_info // empty' "${dbg}")"
|
||||
[[ -n "${build}" && "${build}" != "null" ]] || fail "${label}: missing build-info reference in ${dbg}"
|
||||
build="$(cd "$(dirname "${dbg}")" && realpath "${build}")"
|
||||
[[ -f "${build}" ]] || fail "${label}: missing build-info file ${build}"
|
||||
|
||||
input_file="$(mktemp)"
|
||||
python3 - "${dbg}" "${build}" "${input_file}" "${contract_path%%:*}" <<'PY'
|
||||
import json
|
||||
import os
|
||||
import posixpath
|
||||
import re
|
||||
import sys
|
||||
|
||||
dbg_path, build_path, out_path, fallback_source = sys.argv[1:5]
|
||||
with open(dbg_path, "r", encoding="utf-8") as fh:
|
||||
dbg = json.load(fh)
|
||||
with open(build_path, "r", encoding="utf-8") as fh:
|
||||
build = json.load(fh)
|
||||
|
||||
source_name = dbg.get("sourceName") or dbg.get("source_name") or fallback_source
|
||||
if not source_name:
|
||||
raise SystemExit(f"missing sourceName in {dbg_path}")
|
||||
|
||||
input_data = build["input"]
|
||||
sources = input_data.get("sources", {})
|
||||
if source_name not in sources:
|
||||
raise SystemExit(f"source {source_name} missing from build-info input")
|
||||
|
||||
import_re = re.compile(r'import\s+(?:[^;]*?\s+from\s+)?["\']([^"\']+)["\']\s*;')
|
||||
|
||||
closure = set()
|
||||
stack = [source_name]
|
||||
while stack:
|
||||
current = stack.pop()
|
||||
if current in closure or current not in sources:
|
||||
continue
|
||||
closure.add(current)
|
||||
content = sources[current].get("content", "")
|
||||
for entry in import_re.findall(content):
|
||||
if entry.startswith("."):
|
||||
target = posixpath.normpath(posixpath.join(posixpath.dirname(current), entry))
|
||||
else:
|
||||
target = entry
|
||||
if target in sources and target not in closure:
|
||||
stack.append(target)
|
||||
|
||||
reduced = json.loads(json.dumps(input_data))
|
||||
reduced["sources"] = {name: sources[name] for name in sorted(closure)}
|
||||
|
||||
with open(out_path, "w", encoding="utf-8") as fh:
|
||||
json.dump(reduced, fh, separators=(",", ":"))
|
||||
PY
|
||||
compiler_version="$(jq -r '.solcLongVersion | "v" + .' "${build}")"
|
||||
evm_version="$(jq -r '.input.settings.evmVersion // "default"' "${build}")"
|
||||
optimization_runs="$(jq -r '.input.settings.optimizer.runs // 200' "${build}")"
|
||||
optimization_enabled="$(jq -r '.input.settings.optimizer.enabled // true' "${build}")"
|
||||
license_type="mit"
|
||||
|
||||
response="$(
|
||||
curl --max-time 180 -fsS -X POST \
|
||||
-F "compiler_version=${compiler_version}" \
|
||||
-F "contract_name=${contract_path}" \
|
||||
-F "autodetect_constructor_args=false" \
|
||||
-F "constructor_args=${constructor_args}" \
|
||||
-F "optimization_runs=${optimization_runs}" \
|
||||
-F "is_optimization_enabled=${optimization_enabled}" \
|
||||
-F "evm_version=${evm_version}" \
|
||||
-F "license_type=${license_type}" \
|
||||
-F "files[0]=@${input_file};type=application/json" \
|
||||
"${BLOCKSCOUT_URL}/api/v2/smart-contracts/${addr}/verification/via/standard-input"
|
||||
)" || {
|
||||
rm -f "${input_file}"
|
||||
fail "${label}: Blockscout standard-input submission failed."
|
||||
}
|
||||
rm -f "${input_file}"
|
||||
|
||||
message="$(jq -r '.message // empty' <<<"${response}")"
|
||||
if [[ "${message}" == "Smart-contract verification started" ]]; then
|
||||
ok "${label} standard-input verification submission accepted."
|
||||
return 0
|
||||
fi
|
||||
|
||||
warn "${label} standard-input verification returned: ${response}"
|
||||
return 1
|
||||
}
|
||||
|
||||
submit_standard_input_from_forge() {
|
||||
local label="$1"
|
||||
local addr="$2"
|
||||
local contract_path="$3"
|
||||
local constructor_args="$4"
|
||||
local input_file response message
|
||||
local compiler_version evm_version optimization_runs via_ir_flag artifact_json
|
||||
|
||||
compiler_version="${ROUTE_STACK_SOLC_VERSION}"
|
||||
evm_version="${ROUTE_STACK_EVM_VERSION}"
|
||||
optimization_runs="${ROUTE_STACK_OPT_RUNS}"
|
||||
via_ir_flag=(--via-ir)
|
||||
|
||||
artifact_json="$(foundry_artifact_json_path "${label}" || true)"
|
||||
if [[ -n "${artifact_json}" && -f "${artifact_json}" ]]; then
|
||||
compiler_version="v$(jq -r '.metadata.compiler.version // empty' "${artifact_json}")"
|
||||
evm_version="$(jq -r '.metadata.settings.evmVersion // "default"' "${artifact_json}")"
|
||||
optimization_runs="$(jq -r '.metadata.settings.optimizer.runs // 200' "${artifact_json}")"
|
||||
if [[ "$(jq -r '.metadata.settings.viaIR // false' "${artifact_json}")" != "true" ]]; then
|
||||
via_ir_flag=()
|
||||
fi
|
||||
fi
|
||||
|
||||
runtime_hash_report "${label}" "${addr}"
|
||||
input_file="$(mktemp)"
|
||||
(
|
||||
cd "${SMOM_ROOT}"
|
||||
forge verify-contract "${addr}" "${contract_path}" \
|
||||
--chain-id 138 \
|
||||
--root . \
|
||||
--compiler-version "${compiler_version}" \
|
||||
--num-of-optimizations "${optimization_runs}" \
|
||||
"${via_ir_flag[@]}" \
|
||||
--evm-version "${evm_version}" \
|
||||
--show-standard-json-input >"${input_file}"
|
||||
) || {
|
||||
rm -f "${input_file}"
|
||||
fail "${label}: failed to render Foundry standard-input from deployment sources."
|
||||
}
|
||||
|
||||
response="$(
|
||||
curl --max-time 180 -fsS -X POST \
|
||||
-F "compiler_version=${compiler_version}" \
|
||||
-F "contract_name=${contract_path}" \
|
||||
-F "autodetect_constructor_args=false" \
|
||||
-F "constructor_args=${constructor_args}" \
|
||||
-F "optimization_runs=${optimization_runs}" \
|
||||
-F "is_optimization_enabled=true" \
|
||||
-F "evm_version=${evm_version}" \
|
||||
-F "license_type=mit" \
|
||||
-F "files[0]=@${input_file};type=application/json" \
|
||||
"${BLOCKSCOUT_URL}/api/v2/smart-contracts/${addr}/verification/via/standard-input"
|
||||
)" || {
|
||||
rm -f "${input_file}"
|
||||
fail "${label}: Blockscout Foundry standard-input submission failed."
|
||||
}
|
||||
rm -f "${input_file}"
|
||||
|
||||
message="$(jq -r '.message // empty' <<<"${response}")"
|
||||
if [[ "${message}" == "Smart-contract verification started" ]]; then
|
||||
ok "${label} Foundry standard-input verification submission accepted."
|
||||
return 0
|
||||
fi
|
||||
|
||||
warn "${label} Foundry standard-input verification returned: ${response}"
|
||||
return 1
|
||||
}
|
||||
|
||||
submit_best_verification() {
|
||||
local label="$1"
|
||||
local addr="$2"
|
||||
local path="$3"
|
||||
local expected_name="$4"
|
||||
local constructor_sig="$5"
|
||||
shift 5
|
||||
local constructor_args=("$@")
|
||||
local encoded=""
|
||||
|
||||
if [[ -n "${constructor_sig}" ]]; then
|
||||
encoded="$(cast abi-encode "${constructor_sig}" "${constructor_args[@]}")"
|
||||
fi
|
||||
|
||||
# Prefer the Foundry deployment lineage for the route stack. The earlier
|
||||
# Hardhat dbg/build-info path drifted away from the actual deployed compiler/EVM
|
||||
# settings and is kept only as a compatibility fallback.
|
||||
if submit_standard_input_from_forge "${label}" "${addr}" "${path}" "${encoded}"; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
if artifact_dbg_path "${label}" >/dev/null 2>&1; then
|
||||
warn "${label}: falling back to artifact-derived standard-input after Foundry mismatch."
|
||||
submit_standard_input_from_artifact "${label}" "${addr}" "${path}" "${encoded}" || return 1
|
||||
return 0
|
||||
fi
|
||||
|
||||
warn "${label}: falling back to legacy Forge flattened verification path."
|
||||
submit_verification "${label}" "${addr}" "${path}" "${expected_name}" "${constructor_sig}" "${constructor_args[@]}"
|
||||
}
|
||||
|
||||
WETH="0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
|
||||
USDT="0x004b63A7B5b0E06f6bB6adb4a5F9f590BF3182D1"
|
||||
USDC="0x71D6687F38b93CCad569Fa6352c876eea967201b"
|
||||
DAI_PLACEHOLDER="0x6B175474E89094C44Da98b954EedeAC495271d0F"
|
||||
|
||||
ROUTER_V2="$(jq -r '.chains["138"].contracts.EnhancedSwapRouterV2' "${PROJECT_ROOT}/config/smart-contracts-master.json")"
|
||||
COORDINATOR_V2="$(jq -r '.chains["138"].contracts.IntentBridgeCoordinatorV2' "${PROJECT_ROOT}/config/smart-contracts-master.json")"
|
||||
DODO_ADAPTER="$(jq -r '.chains["138"].contracts.DodoRouteExecutorAdapter' "${PROJECT_ROOT}/config/smart-contracts-master.json")"
|
||||
DODO_V3_ADAPTER="$(jq -r '.chains["138"].contracts.DodoV3RouteExecutorAdapter' "${PROJECT_ROOT}/config/smart-contracts-master.json")"
|
||||
UNISWAP_V3_ADAPTER="$(jq -r '.chains["138"].contracts.UniswapV3RouteExecutorAdapter' "${PROJECT_ROOT}/config/smart-contracts-master.json")"
|
||||
BALANCER_ADAPTER="$(jq -r '.chains["138"].contracts.BalancerRouteExecutorAdapter' "${PROJECT_ROOT}/config/smart-contracts-master.json")"
|
||||
CURVE_ADAPTER="$(jq -r '.chains["138"].contracts.CurveRouteExecutorAdapter' "${PROJECT_ROOT}/config/smart-contracts-master.json")"
|
||||
ONEINCH_ADAPTER="$(jq -r '.chains["138"].contracts.OneInchRouteExecutorAdapter' "${PROJECT_ROOT}/config/smart-contracts-master.json")"
|
||||
PILOT_UNISWAP="$(jq -r '.chains["138"].contracts.PilotUniswapV3Router' "${PROJECT_ROOT}/config/smart-contracts-master.json")"
|
||||
PILOT_BALANCER="$(jq -r '.chains["138"].contracts.PilotBalancerVault' "${PROJECT_ROOT}/config/smart-contracts-master.json")"
|
||||
PILOT_CURVE="$(jq -r '.chains["138"].contracts.PilotCurve3Pool' "${PROJECT_ROOT}/config/smart-contracts-master.json")"
|
||||
PILOT_ONEINCH="$(jq -r '.chains["138"].contracts.PilotOneInchRouter' "${PROJECT_ROOT}/config/smart-contracts-master.json")"
|
||||
|
||||
log "Chain 138 route execution stack Blockscout verification"
|
||||
log "RPC: ${RPC_URL}"
|
||||
log "Explorer API: ${BLOCKSCOUT_API_BASE}"
|
||||
log
|
||||
|
||||
if (( STATUS_ONLY )); then
|
||||
for pair in \
|
||||
"EnhancedSwapRouterV2:${ROUTER_V2}:EnhancedSwapRouterV2" \
|
||||
"IntentBridgeCoordinatorV2:${COORDINATOR_V2}:IntentBridgeCoordinatorV2" \
|
||||
"DodoRouteExecutorAdapter:${DODO_ADAPTER}:DodoRouteExecutorAdapter" \
|
||||
"DodoV3RouteExecutorAdapter:${DODO_V3_ADAPTER}:DodoV3RouteExecutorAdapter" \
|
||||
"UniswapV3RouteExecutorAdapter:${UNISWAP_V3_ADAPTER}:UniswapV3RouteExecutorAdapter" \
|
||||
"BalancerRouteExecutorAdapter:${BALANCER_ADAPTER}:BalancerRouteExecutorAdapter" \
|
||||
"CurveRouteExecutorAdapter:${CURVE_ADAPTER}:CurveRouteExecutorAdapter" \
|
||||
"OneInchRouteExecutorAdapter:${ONEINCH_ADAPTER}:OneInchRouteExecutorAdapter" \
|
||||
"Chain138PilotUniswapV3Router:${PILOT_UNISWAP}:Chain138PilotUniswapV3Router" \
|
||||
"Chain138PilotBalancerVault:${PILOT_BALANCER}:Chain138PilotBalancerVault" \
|
||||
"Chain138PilotCurve3Pool:${PILOT_CURVE}:Chain138PilotCurve3Pool" \
|
||||
"Chain138PilotOneInchAggregationRouter:${PILOT_ONEINCH}:Chain138PilotOneInchAggregationRouter"
|
||||
do
|
||||
IFS=":" read -r label addr expected <<<"${pair}"
|
||||
should_handle "${label}" || continue
|
||||
if is_verified "${addr}" "${expected}"; then
|
||||
ok "${label} already verified on Blockscout."
|
||||
else
|
||||
warn "${label} not yet verified on Blockscout."
|
||||
fi
|
||||
done
|
||||
exit 0
|
||||
fi
|
||||
|
||||
should_handle "EnhancedSwapRouterV2" && submit_best_verification \
|
||||
"EnhancedSwapRouterV2" \
|
||||
"${ROUTER_V2}" \
|
||||
"contracts/bridge/trustless/EnhancedSwapRouterV2.sol:EnhancedSwapRouterV2" \
|
||||
"EnhancedSwapRouterV2" \
|
||||
"constructor(address,address,address,address)" \
|
||||
"${WETH}" "${USDT}" "${USDC}" "${DAI_PLACEHOLDER}"
|
||||
|
||||
should_handle "IntentBridgeCoordinatorV2" && submit_best_verification \
|
||||
"IntentBridgeCoordinatorV2" \
|
||||
"${COORDINATOR_V2}" \
|
||||
"contracts/bridge/trustless/IntentBridgeCoordinatorV2.sol:IntentBridgeCoordinatorV2" \
|
||||
"IntentBridgeCoordinatorV2" \
|
||||
"constructor(address)" \
|
||||
"${ROUTER_V2}"
|
||||
|
||||
should_handle "DodoRouteExecutorAdapter" && submit_best_verification \
|
||||
"DodoRouteExecutorAdapter" \
|
||||
"${DODO_ADAPTER}" \
|
||||
"contracts/bridge/trustless/adapters/DodoRouteExecutorAdapter.sol:DodoRouteExecutorAdapter" \
|
||||
"DodoRouteExecutorAdapter" \
|
||||
""
|
||||
|
||||
should_handle "DodoV3RouteExecutorAdapter" && submit_best_verification \
|
||||
"DodoV3RouteExecutorAdapter" \
|
||||
"${DODO_V3_ADAPTER}" \
|
||||
"contracts/bridge/trustless/adapters/DodoV3RouteExecutorAdapter.sol:DodoV3RouteExecutorAdapter" \
|
||||
"DodoV3RouteExecutorAdapter" \
|
||||
""
|
||||
|
||||
should_handle "UniswapV3RouteExecutorAdapter" && submit_best_verification \
|
||||
"UniswapV3RouteExecutorAdapter" \
|
||||
"${UNISWAP_V3_ADAPTER}" \
|
||||
"contracts/bridge/trustless/adapters/UniswapV3RouteExecutorAdapter.sol:UniswapV3RouteExecutorAdapter" \
|
||||
"UniswapV3RouteExecutorAdapter" \
|
||||
""
|
||||
|
||||
should_handle "BalancerRouteExecutorAdapter" && submit_best_verification \
|
||||
"BalancerRouteExecutorAdapter" \
|
||||
"${BALANCER_ADAPTER}" \
|
||||
"contracts/bridge/trustless/adapters/BalancerRouteExecutorAdapter.sol:BalancerRouteExecutorAdapter" \
|
||||
"BalancerRouteExecutorAdapter" \
|
||||
""
|
||||
|
||||
should_handle "CurveRouteExecutorAdapter" && submit_best_verification \
|
||||
"CurveRouteExecutorAdapter" \
|
||||
"${CURVE_ADAPTER}" \
|
||||
"contracts/bridge/trustless/adapters/CurveRouteExecutorAdapter.sol:CurveRouteExecutorAdapter" \
|
||||
"CurveRouteExecutorAdapter" \
|
||||
""
|
||||
|
||||
should_handle "OneInchRouteExecutorAdapter" && submit_best_verification \
|
||||
"OneInchRouteExecutorAdapter" \
|
||||
"${ONEINCH_ADAPTER}" \
|
||||
"contracts/bridge/trustless/adapters/OneInchRouteExecutorAdapter.sol:OneInchRouteExecutorAdapter" \
|
||||
"OneInchRouteExecutorAdapter" \
|
||||
""
|
||||
|
||||
should_handle "Chain138PilotUniswapV3Router" && submit_best_verification \
|
||||
"Chain138PilotUniswapV3Router" \
|
||||
"${PILOT_UNISWAP}" \
|
||||
"contracts/bridge/trustless/pilot/Chain138PilotDexVenues.sol:Chain138PilotUniswapV3Router" \
|
||||
"Chain138PilotUniswapV3Router" \
|
||||
""
|
||||
|
||||
should_handle "Chain138PilotBalancerVault" && submit_best_verification \
|
||||
"Chain138PilotBalancerVault" \
|
||||
"${PILOT_BALANCER}" \
|
||||
"contracts/bridge/trustless/pilot/Chain138PilotDexVenues.sol:Chain138PilotBalancerVault" \
|
||||
"Chain138PilotBalancerVault" \
|
||||
""
|
||||
|
||||
should_handle "Chain138PilotCurve3Pool" && submit_best_verification \
|
||||
"Chain138PilotCurve3Pool" \
|
||||
"${PILOT_CURVE}" \
|
||||
"contracts/bridge/trustless/pilot/Chain138PilotDexVenues.sol:Chain138PilotCurve3Pool" \
|
||||
"Chain138PilotCurve3Pool" \
|
||||
"constructor(address,address,address,uint256)" \
|
||||
"${USDT}" "${USDC}" "0x0000000000000000000000000000000000000000" "4"
|
||||
|
||||
should_handle "Chain138PilotOneInchAggregationRouter" && submit_best_verification \
|
||||
"Chain138PilotOneInchAggregationRouter" \
|
||||
"${PILOT_ONEINCH}" \
|
||||
"contracts/bridge/trustless/pilot/Chain138PilotDexVenues.sol:Chain138PilotOneInchAggregationRouter" \
|
||||
"Chain138PilotOneInchAggregationRouter" \
|
||||
""
|
||||
|
||||
if (( NO_WAIT )); then
|
||||
log
|
||||
ok "Chain 138 route execution stack verification submissions complete."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
log
|
||||
ok "Chain 138 route execution stack verification flow complete."
|
||||
@@ -273,16 +273,9 @@ submit_v2_standard_input() {
|
||||
|
||||
compiler_version="v0.8.16+commit.07a7930e"
|
||||
license_type="busl_1_1"
|
||||
input_file="$(mktemp)"
|
||||
|
||||
jq '
|
||||
del(.settings.libraries[""].__CACHE_BREAKER__)
|
||||
| if (.settings.libraries[""]? == {}) then del(.settings.libraries[""]) else . end
|
||||
| if (.settings.libraries? == {}) then del(.settings.libraries) else . end
|
||||
' "${input}" > "${input_file}"
|
||||
|
||||
response="$(
|
||||
curl --max-time 120 -fsS -X POST \
|
||||
curl --max-time 30 -fsS -X POST \
|
||||
-F "compiler_version=${compiler_version}" \
|
||||
-F "contract_name=${path}" \
|
||||
-F "autodetect_constructor_args=false" \
|
||||
@@ -290,13 +283,9 @@ submit_v2_standard_input() {
|
||||
-F "optimization_runs=200" \
|
||||
-F "is_optimization_enabled=true" \
|
||||
-F "license_type=${license_type}" \
|
||||
-F "files[0]=@${input_file};type=application/json" \
|
||||
-F "files[0]=@${input};type=application/json" \
|
||||
"${BLOCKSCOUT_URL}/api/v2/smart-contracts/${addr}/verification/via/standard-input"
|
||||
)" || {
|
||||
rm -f "${input_file}"
|
||||
fail "${label}: Blockscout standard-input submission failed."
|
||||
}
|
||||
rm -f "${input_file}"
|
||||
)" || fail "${label}: Blockscout standard-input submission failed."
|
||||
|
||||
message="$(jq -r '.message // empty' <<<"${response}")"
|
||||
if [[ "${message}" == "Smart-contract verification started" ]]; then
|
||||
|
||||
@@ -0,0 +1,133 @@
|
||||
{
|
||||
"statusDate": "2026-04-14",
|
||||
"namespaces": {
|
||||
"chain138": "c* V2",
|
||||
"allMainnet651940": "cA*",
|
||||
"publicConnectedNetworks": "cW*"
|
||||
},
|
||||
"executionPhases": [
|
||||
{
|
||||
"id": "P0",
|
||||
"namespace": "c* V2",
|
||||
"scope": "Chain 138 canonical hub and Wave 1"
|
||||
},
|
||||
{
|
||||
"id": "P1",
|
||||
"namespace": "c* V2",
|
||||
"scope": "Chain 138 cross-links and gas-native hubs"
|
||||
},
|
||||
{
|
||||
"id": "P2",
|
||||
"namespace": "cA*",
|
||||
"scope": "ALL Mainnet canonical hub and Wave 1"
|
||||
},
|
||||
{
|
||||
"id": "P3",
|
||||
"namespace": "cA*",
|
||||
"scope": "ALL Mainnet cross-links and gas-native hubs"
|
||||
},
|
||||
{
|
||||
"id": "P4",
|
||||
"namespace": "cW*",
|
||||
"scope": "Public cW stable hub, Wave 1, and gas-native mesh"
|
||||
},
|
||||
{
|
||||
"id": "P5",
|
||||
"namespace": "all",
|
||||
"scope": "Spot venue protocol completion"
|
||||
},
|
||||
{
|
||||
"id": "P6",
|
||||
"namespace": "all",
|
||||
"scope": "Aggregator, reserve, and market protocol completion"
|
||||
},
|
||||
{
|
||||
"id": "P7",
|
||||
"namespace": "all",
|
||||
"scope": "MEV completion"
|
||||
}
|
||||
],
|
||||
"protocolsRequired": [
|
||||
"DODO",
|
||||
"Uniswap v3",
|
||||
"Uniswap v2",
|
||||
"SushiSwap",
|
||||
"Curve",
|
||||
"Balancer",
|
||||
"1Inch",
|
||||
"Aave",
|
||||
"GMX",
|
||||
"dYdX"
|
||||
],
|
||||
"chain138CanonicalPools": [
|
||||
"cUSDT V2 / cUSDC V2",
|
||||
"cUSDT V2 / USDT",
|
||||
"cUSDC V2 / USDC",
|
||||
"cEURC V2 / cUSDC V2",
|
||||
"cEURT V2 / cUSDC V2",
|
||||
"cGBPC V2 / cUSDC V2",
|
||||
"cGBPT V2 / cUSDC V2",
|
||||
"cAUDC V2 / cUSDC V2",
|
||||
"cJPYC V2 / cUSDC V2",
|
||||
"cCHFC V2 / cUSDC V2",
|
||||
"cCADC V2 / cUSDC V2",
|
||||
"cXAUC V2 / cUSDC V2",
|
||||
"cXAUT V2 / cUSDC V2",
|
||||
"cEURC V2 / cEURT V2",
|
||||
"cGBPC V2 / cGBPT V2",
|
||||
"cXAUC V2 / cXAUT V2",
|
||||
"cETH / WETH",
|
||||
"cETH / cUSDC V2",
|
||||
"cETHL2 / cUSDC V2",
|
||||
"cBNB / cUSDC V2",
|
||||
"cPOL / cUSDC V2",
|
||||
"cAVAX / cUSDC V2",
|
||||
"cCRO / cUSDC V2",
|
||||
"cXDAI / cUSDC V2",
|
||||
"cCELO / cUSDC V2",
|
||||
"cWEMIX / cUSDC V2"
|
||||
],
|
||||
"allMainnetCanonicalPools": [
|
||||
"cAUSDT / cAUSDC",
|
||||
"cAUSDT / AUSDT",
|
||||
"cAUSDC / USDC",
|
||||
"cAEURC / cAUSDC",
|
||||
"cAEURT / cAUSDC",
|
||||
"cAGBPC / cAUSDC",
|
||||
"cAGBPT / cAUSDC",
|
||||
"cAAUDC / cAUSDC",
|
||||
"cAJPYC / cAUSDC",
|
||||
"cACHFC / cAUSDC",
|
||||
"cACADC / cAUSDC",
|
||||
"cAXAUC / cAUSDC",
|
||||
"cAXAUT / cAUSDC",
|
||||
"cAEURC / cAEURT",
|
||||
"cAGBPC / cAGBPT",
|
||||
"cAXAUC / cAXAUT",
|
||||
"cAETH / WETH",
|
||||
"cAETH / cAUSDC",
|
||||
"cAWALL / WALL",
|
||||
"cAWALL / cAUSDC"
|
||||
],
|
||||
"publicMeshTemplate": {
|
||||
"stableHub": [
|
||||
"cWUSDT / USDC",
|
||||
"cWUSDC / USDC",
|
||||
"cWUSDT / USDT",
|
||||
"cWUSDC / USDT",
|
||||
"cWUSDT / cWUSDC"
|
||||
],
|
||||
"wave1VsUsdc": [
|
||||
"cWEURC / USDC",
|
||||
"cWEURT / USDC",
|
||||
"cWGBPC / USDC",
|
||||
"cWGBPT / USDC",
|
||||
"cWAUDC / USDC",
|
||||
"cWJPYC / USDC",
|
||||
"cWCHFC / USDC",
|
||||
"cWCADC / USDC",
|
||||
"cWXAUC / USDC",
|
||||
"cWXAUT / USDC"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,169 @@
|
||||
{
|
||||
"statusDate": "2026-04-15",
|
||||
"defaultFields": {
|
||||
"status": "todo",
|
||||
"deployed": false,
|
||||
"seeded": false,
|
||||
"validated": false,
|
||||
"live": false,
|
||||
"mevReady": false
|
||||
},
|
||||
"chain138": {
|
||||
"namespace": "c* V2",
|
||||
"entries": [
|
||||
{ "pair": "cUSDT V2 / cUSDC V2", "priority": "P0" },
|
||||
{ "pair": "cUSDT V2 / USDT", "priority": "P0" },
|
||||
{ "pair": "cUSDC V2 / USDC", "priority": "P0" },
|
||||
{ "pair": "cEURC V2 / cUSDC V2", "priority": "P0" },
|
||||
{ "pair": "cEURT V2 / cUSDC V2", "priority": "P0" },
|
||||
{ "pair": "cGBPC V2 / cUSDC V2", "priority": "P0" },
|
||||
{ "pair": "cGBPT V2 / cUSDC V2", "priority": "P0" },
|
||||
{ "pair": "cAUDC V2 / cUSDC V2", "priority": "P0" },
|
||||
{ "pair": "cJPYC V2 / cUSDC V2", "priority": "P0" },
|
||||
{ "pair": "cCHFC V2 / cUSDC V2", "priority": "P0" },
|
||||
{ "pair": "cCADC V2 / cUSDC V2", "priority": "P0" },
|
||||
{ "pair": "cXAUC V2 / cUSDC V2", "priority": "P0" },
|
||||
{ "pair": "cXAUT V2 / cUSDC V2", "priority": "P0" },
|
||||
{ "pair": "cEURC V2 / cEURT V2", "priority": "P1" },
|
||||
{ "pair": "cGBPC V2 / cGBPT V2", "priority": "P1" },
|
||||
{ "pair": "cXAUC V2 / cXAUT V2", "priority": "P1" },
|
||||
{ "pair": "cETH / WETH", "priority": "P1" },
|
||||
{ "pair": "cETH / cUSDC V2", "priority": "P1" },
|
||||
{ "pair": "cETHL2 / cUSDC V2", "priority": "P2" },
|
||||
{ "pair": "cBNB / cUSDC V2", "priority": "P2" },
|
||||
{ "pair": "cPOL / cUSDC V2", "priority": "P2" },
|
||||
{ "pair": "cAVAX / cUSDC V2", "priority": "P2" },
|
||||
{ "pair": "cCRO / cUSDC V2", "priority": "P2" },
|
||||
{ "pair": "cXDAI / cUSDC V2", "priority": "P2" },
|
||||
{ "pair": "cCELO / cUSDC V2", "priority": "P2" },
|
||||
{ "pair": "cWEMIX / cUSDC V2", "priority": "P2" }
|
||||
]
|
||||
},
|
||||
"allMainnet651940": {
|
||||
"namespace": "cA*",
|
||||
"entries": [
|
||||
{ "pair": "cAUSDT / cAUSDC", "priority": "P0" },
|
||||
{ "pair": "cAUSDT / AUSDT", "priority": "P0" },
|
||||
{ "pair": "cAUSDC / USDC", "priority": "P0" },
|
||||
{ "pair": "cAEURC / cAUSDC", "priority": "P0" },
|
||||
{ "pair": "cAEURT / cAUSDC", "priority": "P0" },
|
||||
{ "pair": "cAGBPC / cAUSDC", "priority": "P0" },
|
||||
{ "pair": "cAGBPT / cAUSDC", "priority": "P0" },
|
||||
{ "pair": "cAAUDC / cAUSDC", "priority": "P0" },
|
||||
{ "pair": "cAJPYC / cAUSDC", "priority": "P0" },
|
||||
{ "pair": "cACHFC / cAUSDC", "priority": "P0" },
|
||||
{ "pair": "cACADC / cAUSDC", "priority": "P0" },
|
||||
{ "pair": "cAXAUC / cAUSDC", "priority": "P0" },
|
||||
{ "pair": "cAXAUT / cAUSDC", "priority": "P0" },
|
||||
{ "pair": "cAEURC / cAEURT", "priority": "P1" },
|
||||
{ "pair": "cAGBPC / cAGBPT", "priority": "P1" },
|
||||
{ "pair": "cAXAUC / cAXAUT", "priority": "P1" },
|
||||
{ "pair": "cAETH / WETH", "priority": "P1" },
|
||||
{ "pair": "cAETH / cAUSDC", "priority": "P1" },
|
||||
{ "pair": "cAWALL / WALL", "priority": "P1" },
|
||||
{ "pair": "cAWALL / cAUSDC", "priority": "P1" }
|
||||
]
|
||||
},
|
||||
"publicMesh": {
|
||||
"1": {
|
||||
"namespace": "cW*",
|
||||
"entries": [
|
||||
"cWUSDT / USDC","cWUSDC / USDC","cWUSDT / USDT","cWUSDC / USDT","cWUSDT / cWUSDC",
|
||||
"cWEURC / USDC","cWEURT / USDC","cWGBPC / USDC","cWGBPT / USDC","cWAUDC / USDC",
|
||||
"cWJPYC / USDC","cWCHFC / USDC","cWCADC / USDC","cWXAUC / USDC","cWXAUT / USDC",
|
||||
"cWETH / WETH","cWETH / USDC"
|
||||
]
|
||||
},
|
||||
"10": {
|
||||
"namespace": "cW*",
|
||||
"entries": [
|
||||
"cWUSDT / USDC","cWUSDC / USDC","cWUSDT / USDT","cWUSDC / USDT","cWUSDT / cWUSDC",
|
||||
"cWEURC / USDC","cWEURT / USDC","cWGBPC / USDC","cWGBPT / USDC","cWAUDC / USDC",
|
||||
"cWJPYC / USDC","cWCHFC / USDC","cWCADC / USDC","cWXAUC / USDC","cWXAUT / USDC",
|
||||
"cWETHL2 / WETH","cWETHL2 / USDC"
|
||||
]
|
||||
},
|
||||
"25": {
|
||||
"namespace": "cW*",
|
||||
"entries": [
|
||||
"cWUSDT / USDC","cWUSDC / USDC","cWUSDT / USDT","cWUSDC / USDT","cWUSDT / cWUSDC",
|
||||
"cWEURC / USDC","cWEURT / USDC","cWGBPC / USDC","cWGBPT / USDC","cWAUDC / USDC",
|
||||
"cWJPYC / USDC","cWCHFC / USDC","cWCADC / USDC","cWXAUC / USDC","cWXAUT / USDC",
|
||||
"cWCRO / WCRO","cWCRO / USDT"
|
||||
]
|
||||
},
|
||||
"56": {
|
||||
"namespace": "cW*",
|
||||
"entries": [
|
||||
"cWUSDT / USDC","cWUSDC / USDC","cWUSDT / USDT","cWUSDC / USDT","cWUSDT / cWUSDC",
|
||||
"cWEURC / USDC","cWEURT / USDC","cWGBPC / USDC","cWGBPT / USDC","cWAUDC / USDC",
|
||||
"cWJPYC / USDC","cWCHFC / USDC","cWCADC / USDC","cWXAUC / USDC","cWXAUT / USDC",
|
||||
"cWBNB / WBNB","cWBNB / USDT"
|
||||
]
|
||||
},
|
||||
"100": {
|
||||
"namespace": "cW*",
|
||||
"entries": [
|
||||
"cWUSDT / USDC","cWUSDC / USDC","cWUSDT / USDT","cWUSDC / USDT","cWUSDT / cWUSDC",
|
||||
"cWEURC / USDC","cWEURT / USDC","cWGBPC / USDC","cWGBPT / USDC","cWAUDC / USDC",
|
||||
"cWJPYC / USDC","cWCHFC / USDC","cWCADC / USDC","cWXAUC / USDC","cWXAUT / USDC",
|
||||
"cWXDAI / WXDAI","cWXDAI / USDC"
|
||||
]
|
||||
},
|
||||
"137": {
|
||||
"namespace": "cW*",
|
||||
"entries": [
|
||||
"cWUSDT / USDC","cWUSDC / USDC","cWUSDT / USDT","cWUSDC / USDT","cWUSDT / cWUSDC",
|
||||
"cWEURC / USDC","cWEURT / USDC","cWGBPC / USDC","cWGBPT / USDC","cWAUDC / USDC",
|
||||
"cWJPYC / USDC","cWCHFC / USDC","cWCADC / USDC","cWXAUC / USDC","cWXAUT / USDC",
|
||||
"cWPOL / WPOL","cWPOL / USDC"
|
||||
]
|
||||
},
|
||||
"8453": {
|
||||
"namespace": "cW*",
|
||||
"entries": [
|
||||
"cWUSDT / USDC","cWUSDC / USDC","cWUSDT / USDT","cWUSDC / USDT","cWUSDT / cWUSDC",
|
||||
"cWEURC / USDC","cWEURT / USDC","cWGBPC / USDC","cWGBPT / USDC","cWAUDC / USDC",
|
||||
"cWJPYC / USDC","cWCHFC / USDC","cWCADC / USDC","cWXAUC / USDC","cWXAUT / USDC",
|
||||
"cWETHL2 / WETH","cWETHL2 / USDC"
|
||||
]
|
||||
},
|
||||
"42161": {
|
||||
"namespace": "cW*",
|
||||
"entries": [
|
||||
"cWUSDT / USDC","cWUSDC / USDC","cWUSDT / USDT","cWUSDC / USDT","cWUSDT / cWUSDC",
|
||||
"cWEURC / USDC","cWEURT / USDC","cWGBPC / USDC","cWGBPT / USDC","cWAUDC / USDC",
|
||||
"cWJPYC / USDC","cWCHFC / USDC","cWCADC / USDC","cWXAUC / USDC","cWXAUT / USDC",
|
||||
"cWETHL2 / WETH","cWETHL2 / USDC"
|
||||
]
|
||||
},
|
||||
"42220": {
|
||||
"namespace": "cW*",
|
||||
"entries": [
|
||||
"cWUSDT / USDC","cWUSDC / USDC","cWUSDT / USDT","cWUSDC / USDT","cWUSDT / cWUSDC",
|
||||
"cWEURC / USDC","cWEURT / USDC","cWGBPC / USDC","cWGBPT / USDC","cWAUDC / USDC",
|
||||
"cWJPYC / USDC","cWCHFC / USDC","cWCADC / USDC","cWXAUC / USDC","cWXAUT / USDC",
|
||||
"cWCELO / WCELO","cWCELO / USDC"
|
||||
]
|
||||
},
|
||||
"43114": {
|
||||
"namespace": "cW*",
|
||||
"entries": [
|
||||
"cWUSDT / USDC","cWUSDC / USDC","cWUSDT / USDT","cWUSDC / USDT","cWUSDT / cWUSDC",
|
||||
"cWEURC / USDC","cWEURT / USDC","cWGBPC / USDC","cWGBPT / USDC","cWAUDC / USDC",
|
||||
"cWJPYC / USDC","cWCHFC / USDC","cWCADC / USDC","cWXAUC / USDC","cWXAUT / USDC",
|
||||
"cWAVAX / WAVAX","cWAVAX / USDC"
|
||||
]
|
||||
},
|
||||
"1111": {
|
||||
"namespace": "cW*",
|
||||
"entries": [
|
||||
"cWUSDT / USDC","cWUSDC / USDC","cWUSDT / USDT","cWUSDC / USDT","cWUSDT / cWUSDC",
|
||||
"cWEURC / USDC","cWEURT / USDC","cWGBPC / USDC","cWGBPT / USDC","cWAUDC / USDC",
|
||||
"cWJPYC / USDC","cWCHFC / USDC","cWCADC / USDC","cWXAUC / USDC","cWXAUT / USDC",
|
||||
"cWWEMIX / WWEMIX","cWWEMIX / USDC"
|
||||
],
|
||||
"statusOverride": "planned"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,82 @@
|
||||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://d-bis.org/schemas/gru-v2-full-mesh-pool-tracker.json",
|
||||
"title": "GRU v2 Full Mesh Pool Tracker",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"statusDate",
|
||||
"defaultFields",
|
||||
"chain138",
|
||||
"allMainnet651940",
|
||||
"publicMesh"
|
||||
],
|
||||
"properties": {
|
||||
"statusDate": {
|
||||
"type": "string",
|
||||
"pattern": "^\\d{4}-\\d{2}-\\d{2}$"
|
||||
},
|
||||
"defaultFields": {
|
||||
"type": "object",
|
||||
"required": ["status", "deployed", "seeded", "validated", "live", "mevReady"],
|
||||
"properties": {
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": ["todo", "in_progress", "blocked", "done"]
|
||||
},
|
||||
"deployed": { "type": "boolean" },
|
||||
"seeded": { "type": "boolean" },
|
||||
"validated": { "type": "boolean" },
|
||||
"live": { "type": "boolean" },
|
||||
"mevReady": { "type": "boolean" }
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"chain138": { "$ref": "#/$defs/namedBucket" },
|
||||
"allMainnet651940": { "$ref": "#/$defs/namedBucket" },
|
||||
"publicMesh": {
|
||||
"type": "object",
|
||||
"minProperties": 1,
|
||||
"additionalProperties": { "$ref": "#/$defs/meshBucket" }
|
||||
}
|
||||
},
|
||||
"$defs": {
|
||||
"pairEntry": {
|
||||
"type": "object",
|
||||
"required": ["pair"],
|
||||
"properties": {
|
||||
"pair": { "type": "string", "minLength": 3 },
|
||||
"priority": { "type": "string", "minLength": 2 }
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"namedBucket": {
|
||||
"type": "object",
|
||||
"required": ["namespace", "entries"],
|
||||
"properties": {
|
||||
"namespace": { "type": "string", "minLength": 2 },
|
||||
"entries": {
|
||||
"type": "array",
|
||||
"items": { "$ref": "#/$defs/pairEntry" }
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"meshBucket": {
|
||||
"type": "object",
|
||||
"required": ["namespace", "entries"],
|
||||
"properties": {
|
||||
"namespace": { "type": "string", "minLength": 2 },
|
||||
"entries": {
|
||||
"type": "array",
|
||||
"items": { "type": "string", "minLength": 3 }
|
||||
},
|
||||
"statusOverride": {
|
||||
"type": "string",
|
||||
"enum": ["planned", "todo", "in_progress", "blocked", "done"]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
Reference in New Issue
Block a user