Add All Mainnet 1inch route preflight
Some checks failed
Deploy to Phoenix / validate (push) Successful in 1m20s
Deploy to Phoenix / deploy (push) Successful in 46s
Deploy to Phoenix / deploy-atomic-swap-dapp (push) Has been cancelled
Deploy to Phoenix / cloudflare (push) Has been cancelled
phoenix-deploy Deployed to atomic-swap-dapp-live

This commit is contained in:
defiQUG
2026-04-30 04:11:10 -07:00
parent ce4be10171
commit ddb1f825dc
6 changed files with 249 additions and 5 deletions

View File

@@ -81,7 +81,7 @@ Optional: `MIFOS_INSECURE=1` — Allow self-signed TLS when calling the API (dev
### Explorer Monorepo
- `DB_REPLICA_PASSWORD` - If using replica database
- `SEARCH_PASSWORD` - If using Elasticsearch
- `ONEINCH_API_KEY` - If using 1inch integration
- `ONEINCH_API_KEY` - If using 1inch integration (`ONE_INCH_API_KEY` is also honored by the token aggregation dispatcher)
- `JUMIO_API_KEY/SECRET` - If using Jumio KYC
- `MOONPAY_API_KEY` - If using MoonPay
- `WALLETCONNECT_PROJECT_ID` - If using WalletConnect

View File

@@ -55,7 +55,8 @@
"all-mainnet:official-dodo-discovery": "node scripts/status/discover-official-dodo-pools.mjs",
"all-mainnet:official-dodo-migration": "node scripts/status/execute-official-dodo-migration.mjs",
"all-mainnet:remaining-routing-tasks": "node scripts/status/build-remaining-official-routing-tasks.mjs",
"all-mainnet:remaining-balances": "node scripts/status/check-remaining-deployer-balances.mjs"
"all-mainnet:remaining-balances": "node scripts/status/check-remaining-deployer-balances.mjs",
"all-mainnet:oneinch-preflight": "node scripts/status/check-oneinch-remaining-routes.mjs"
},
"keywords": [
"proxmox",

View File

@@ -11,7 +11,7 @@
|---------|--------------|------------|-------------|
| **Li.Fi** | `LIFI_API_KEY` | alltra-lifi-settlement | https://li.fi |
| **Jumper** | `JUMPER_API_KEY` | alltra-lifi-settlement, .env.example | https://jumper.exchange |
| **1inch** | `ONEINCH_API_KEY` | chain138-quote.service.ts (api.1inch.dev) | https://portal.1inch.dev |
| **1inch** | `ONEINCH_API_KEY` (`ONE_INCH_API_KEY` alias also honored) | chain138-quote.service.ts / token aggregation 1inch preflight (api.1inch.dev) | https://portal.1inch.dev |
| **LayerZero** | Config/API | Bridge integrations | https://layerzero.network |
| **Wormhole** | API key | Bridge integrations | https://wormhole.com |

View File

@@ -11,6 +11,7 @@ const repoRoot = resolve(new URL("../..", import.meta.url).pathname);
const matrixPath = resolve(repoRoot, "config/all-mainnet-pool-creation-matrix.json");
const sourcesPath = resolve(repoRoot, "config/official-protocol-integration-sources.json");
const discoveryPath = resolve(repoRoot, "reports/status/all-mainnet-official-dodo-discovery-latest.json");
const oneInchPreflightPath = resolve(repoRoot, "reports/status/all-mainnet-oneinch-route-preflight-latest.json");
const outJson = resolve(repoRoot, "reports/status/all-mainnet-remaining-official-routing-tasks-latest.json");
const outMd = resolve(repoRoot, "reports/status/all-mainnet-remaining-official-routing-tasks-latest.md");
@@ -42,6 +43,13 @@ function matchingAggregatorRow(row) {
const matrix = readJson(matrixPath);
const sources = readJson(sourcesPath);
const discovery = readJson(discoveryPath);
const oneInchPreflight = (() => {
try {
return readJson(oneInchPreflightPath);
} catch {
return { routes: [] };
}
})();
const generatedAt = new Date().toISOString();
const unsupportedDodoRows = matrix.rows.filter((row) => (
@@ -55,6 +63,10 @@ const unsupportedRoutingTasks = unsupportedDodoRows.map((row) => {
const support = oneInchSupport[String(row.chainId)];
const hasAggregatorSupport = Boolean(support);
const aggregatorRow = hasAggregatorSupport ? matchingAggregatorRow(row) : null;
const preflight = (oneInchPreflight.routes || []).find((route) => (
route.poolId === row.poolId ||
(aggregatorRow && route.aggregatorPoolId === aggregatorRow.poolId)
));
return {
poolId: row.poolId,
chainId: row.chainId,
@@ -63,6 +75,8 @@ const unsupportedRoutingTasks = unsupportedDodoRows.map((row) => {
currentReplacementPool: row.poolAddress,
matchingAggregatorPoolId: aggregatorRow?.poolId || null,
matchingAggregatorStatus: aggregatorRow?.status || null,
quotePreflightStatus: preflight?.status || "not_run",
quotePreflightBlocker: preflight?.blocker || null,
targetSupportProtocol: hasAggregatorSupport ? "oneinch_aggregator" : "official_alternate_required",
supportStatus: support?.status || "needs_official_source",
tasks: hasAggregatorSupport ? [
@@ -123,6 +137,8 @@ const report = {
oneInchSupportableRows: unsupportedRoutingTasks.filter((row) => row.targetSupportProtocol === "oneinch_aggregator").length,
oneInchRowsAlreadyInventoried: unsupportedRoutingTasks.filter((row) => row.matchingAggregatorPoolId).length,
oneInchRowsMissingInventory: unsupportedRoutingTasks.filter((row) => row.targetSupportProtocol === "oneinch_aggregator" && !row.matchingAggregatorPoolId).length,
oneInchQuotePreflightNonzero: unsupportedRoutingTasks.filter((row) => row.quotePreflightStatus === "quote_nonzero").length,
oneInchQuotePreflightBlocked: unsupportedRoutingTasks.filter((row) => row.quotePreflightStatus !== "quote_nonzero").length,
cronosRowsNeedingNativeDexProfile: unsupportedRoutingTasks.filter((row) => row.targetSupportProtocol === "cronos_native_dex_profile_required").length,
zeroOrUnusableOfficialPools: zeroPoolTasks.length,
},
@@ -140,7 +156,7 @@ const md = [
"## Unsupported DODO Rows",
"",
table(
["Pool", "Chain", "Pair", "Target Support", "Inventory Row", "Status", "First Task"],
["Pool", "Chain", "Pair", "Target Support", "Inventory Row", "Status", "Quote Preflight", "First Task"],
unsupportedRoutingTasks.map((row) => [
row.poolId,
row.chainId,
@@ -148,6 +164,7 @@ const md = [
row.targetSupportProtocol,
row.matchingAggregatorPoolId || "missing",
row.supportStatus,
row.quotePreflightStatus,
row.tasks[0],
]),
),

View File

@@ -0,0 +1,226 @@
#!/usr/bin/env node
/**
* Probe official 1inch quote support for the remaining ALL Mainnet aggregator
* routes. This is read-only: it never requests swap calldata or sends txs.
*/
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
import { homedir } from "node:os";
import { resolve } from "node:path";
const repoRoot = resolve(new URL("../..", import.meta.url).pathname);
const matrixPath = resolve(repoRoot, "config/all-mainnet-pool-creation-matrix.json");
const tasksPath = resolve(repoRoot, "reports/status/all-mainnet-remaining-official-routing-tasks-latest.json");
const outJson = resolve(repoRoot, "reports/status/all-mainnet-oneinch-route-preflight-latest.json");
const outMd = resolve(repoRoot, "reports/status/all-mainnet-oneinch-route-preflight-latest.md");
const DEFAULT_API_BASE = "https://api.1inch.dev/swap/v6.1";
const DEFAULT_AMOUNT_RAW = "100";
function readJson(path) {
return JSON.parse(readFileSync(path, "utf8"));
}
function loadEnvFile(path, env) {
if (!existsSync(path)) return;
for (const line of readFileSync(path, "utf8").split(/\r?\n/)) {
const trimmed = line.trim();
if (!trimmed || trimmed.startsWith("#") || !trimmed.includes("=")) continue;
const index = trimmed.indexOf("=");
const key = trimmed.slice(0, index).replace(/^export\s+/, "").trim();
let value = trimmed.slice(index + 1).trim();
if ((value.startsWith('"') && value.endsWith('"')) || (value.startsWith("'") && value.endsWith("'"))) {
value = value.slice(1, -1);
}
value = value.replace(/\s+#.*$/, "");
value = value.replace(/\$\{([^}:]+)(:-([^}]*))?\}/g, (_, name, _fallback, fallbackValue) => (
env[name] ?? process.env[name] ?? fallbackValue ?? ""
));
if (!value || value === "0x" || value.includes("${")) continue;
env[key] ??= value;
}
}
function loadEnv() {
const env = { ...process.env };
loadEnvFile(resolve(repoRoot, ".env"), env);
loadEnvFile(resolve(repoRoot, "smom-dbis-138/.env"), env);
loadEnvFile(resolve(homedir(), ".secure-secrets/private-keys.env"), env);
return env;
}
function table(headers, rows) {
return [
`| ${headers.join(" | ")} |`,
`| ${headers.map(() => "---").join(" | ")} |`,
...rows.map((row) => `| ${row.map((cell) => String(cell ?? "").replace(/\|/g, "\\|")).join(" | ")} |`),
].join("\n");
}
function findAggregatorRow(matrix, task) {
return matrix.rows.find((row) => row.poolId === task.matchingAggregatorPoolId)
|| matrix.rows.find((row) => (
row.chainId === task.chainId &&
row.protocol === "oneinch_aggregator" &&
`${row.baseToken?.symbol || "?"}/${row.quoteToken?.symbol || "?"}` === task.pair
));
}
function summarizeQuote(data) {
const candidates = [
data?.dstAmount,
data?.toAmount,
data?.amount,
data?.quote?.dstAmount,
];
const raw = candidates.find((value) => typeof value === "string" || typeof value === "number" || typeof value === "bigint");
if (raw === undefined) return { amountOutRaw: null, nonzero: false };
try {
const amount = BigInt(String(raw));
return { amountOutRaw: amount.toString(), nonzero: amount > 0n };
} catch {
return { amountOutRaw: String(raw), nonzero: false };
}
}
async function probeQuote({ apiBase, apiKey, chainId, src, dst, amount }) {
const url = new URL(`${apiBase.replace(/\/$/, "")}/${chainId}/quote`);
url.searchParams.set("src", src);
url.searchParams.set("dst", dst);
url.searchParams.set("amount", amount);
const response = await fetch(url, {
headers: {
Accept: "application/json",
Authorization: `Bearer ${apiKey}`,
},
});
const text = await response.text();
let data = null;
try {
data = text ? JSON.parse(text) : null;
} catch {
data = { raw: text.slice(0, 500) };
}
return { url: url.toString(), statusCode: response.status, ok: response.ok, data };
}
const env = loadEnv();
const apiKey = env.ONEINCH_API_KEY || env.ONE_INCH_API_KEY || "";
const apiBase = env.ONEINCH_API_BASE || DEFAULT_API_BASE;
const amount = env.ONEINCH_PREFLIGHT_AMOUNT_RAW || DEFAULT_AMOUNT_RAW;
const matrix = readJson(matrixPath);
const tasks = readJson(tasksPath);
const generatedAt = new Date().toISOString();
const taskRows = (tasks.unsupportedRoutingTasks || [])
.filter((task) => task.targetSupportProtocol === "oneinch_aggregator");
const routes = [];
for (const task of taskRows) {
const row = findAggregatorRow(matrix, task);
const route = {
poolId: task.poolId,
aggregatorPoolId: row?.poolId || task.matchingAggregatorPoolId || null,
chainId: task.chainId,
network: task.network,
pair: task.pair,
src: row?.baseToken?.address || null,
dst: row?.quoteToken?.address || null,
amountInRaw: amount,
status: "pending",
quoteStatusCode: null,
amountOutRaw: null,
apiUrl: null,
blocker: null,
};
if (!row) {
route.status = "missing_inventory_row";
route.blocker = "No matching oneinch_aggregator row exists in config/all-mainnet-pool-creation-matrix.json.";
routes.push(route);
continue;
}
if (!apiKey) {
route.status = "blocked_missing_api_key";
route.blocker = "Set ONEINCH_API_KEY or ONE_INCH_API_KEY to run official 1inch quote preflight.";
routes.push(route);
continue;
}
if (!route.src || !route.dst) {
route.status = "blocked_missing_token_address";
route.blocker = "Aggregator row is missing src/dst token address.";
routes.push(route);
continue;
}
try {
const quote = await probeQuote({ apiBase, apiKey, chainId: route.chainId, src: route.src, dst: route.dst, amount });
const summary = summarizeQuote(quote.data);
route.apiUrl = quote.url.replace(apiKey, "<redacted>");
route.quoteStatusCode = quote.statusCode;
route.amountOutRaw = summary.amountOutRaw;
route.status = quote.ok && summary.nonzero ? "quote_nonzero" : "quote_not_promotable";
route.blocker = route.status === "quote_nonzero"
? null
: `Official 1inch quote did not return a nonzero output (HTTP ${quote.statusCode}).`;
} catch (error) {
route.status = "api_error";
route.blocker = error.shortMessage || error.message;
}
routes.push(route);
}
const summary = routes.reduce((counts, route) => {
counts.total += 1;
counts[route.status] = (counts[route.status] || 0) + 1;
return counts;
}, { total: 0 });
const report = {
generatedAt,
mode: "read_only_official_oneinch_quote_preflight",
apiBase,
credentialEnvChecked: ["ONEINCH_API_KEY", "ONE_INCH_API_KEY"],
hasApiKey: Boolean(apiKey),
amountInRaw: amount,
summary,
routes,
};
const md = [
"# ALL Mainnet 1inch Route Preflight",
"",
`- Generated: \`${generatedAt}\``,
`- Mode: \`${report.mode}\``,
`- API base: \`${apiBase}\``,
`- API key present: \`${report.hasApiKey}\``,
"",
table(["Metric", "Count"], Object.entries(summary)),
"",
table(
["Pool", "Aggregator Row", "Chain", "Pair", "Status", "HTTP", "Amount Out Raw", "Blocker"],
routes.map((route) => [
route.poolId,
route.aggregatorPoolId,
route.chainId,
route.pair,
route.status,
route.quoteStatusCode ?? "",
route.amountOutRaw ?? "",
route.blocker ?? "",
]),
),
"",
].join("\n");
mkdirSync(resolve(repoRoot, "reports/status"), { recursive: true });
writeFileSync(outJson, `${JSON.stringify(report, null, 2)}\n`);
writeFileSync(outMd, md);
const promotable = routes.filter((route) => route.status === "quote_nonzero").length;
console.log(`[OK] 1inch preflight written: ${outJson}`);
console.log(`[INFO] quote_nonzero=${promotable}/${routes.length}`);
if (promotable !== routes.length) {
process.exitCode = 2;
}