Stabilize token and pool metadata
This commit is contained in:
@@ -146,6 +146,22 @@ else
|
||||
fi
|
||||
fi
|
||||
done
|
||||
if [[ -f "$PROJECT_ROOT/scripts/validation/validate-token-list-metadata.mjs" ]]; then
|
||||
if node "$PROJECT_ROOT/scripts/validation/validate-token-list-metadata.mjs"; then
|
||||
log_ok "Token-list metadata conventions valid"
|
||||
else
|
||||
log_err "Token-list metadata conventions invalid"
|
||||
ERRORS=$((ERRORS + 1))
|
||||
fi
|
||||
fi
|
||||
if [[ -f "$PROJECT_ROOT/config/all-mainnet-pool-creation-matrix.json" ]] && [[ -f "$PROJECT_ROOT/scripts/validation/validate-pool-creation-matrix.mjs" ]]; then
|
||||
if node "$PROJECT_ROOT/scripts/validation/validate-pool-creation-matrix.mjs"; then
|
||||
log_ok "ALL Mainnet pool-creation matrix valid"
|
||||
else
|
||||
log_err "ALL Mainnet pool-creation matrix invalid"
|
||||
ERRORS=$((ERRORS + 1))
|
||||
fi
|
||||
fi
|
||||
# DUAL_CHAIN config (explorer deploy source)
|
||||
if [[ -f "$PROJECT_ROOT/explorer-monorepo/backend/api/rest/config/metamask/DUAL_CHAIN_TOKEN_LIST.tokenlist.json" ]] && command -v jq &>/dev/null; then
|
||||
if jq -e '(.tokens | type == "array") and (.tokens | length > 0)' "$PROJECT_ROOT/explorer-monorepo/backend/api/rest/config/metamask/DUAL_CHAIN_TOKEN_LIST.tokenlist.json" &>/dev/null; then
|
||||
|
||||
345
scripts/validation/validate-pool-creation-matrix.mjs
Executable file
345
scripts/validation/validate-pool-creation-matrix.mjs
Executable file
@@ -0,0 +1,345 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Validate the ALL Mainnet pool-creation matrix.
|
||||
*
|
||||
* This file is an operational dependency: pool rows are used to decide what can
|
||||
* be created, funded, or promoted. The checks here keep the matrix internally
|
||||
* consistent and cross-check token addresses against the repo token lists.
|
||||
*/
|
||||
|
||||
import { existsSync, readFileSync } from "node:fs";
|
||||
import { basename, resolve } from "node:path";
|
||||
|
||||
const repoRoot = resolve(new URL("../..", import.meta.url).pathname);
|
||||
const defaultMatrix = "config/all-mainnet-pool-creation-matrix.json";
|
||||
const defaultTokenLists = [
|
||||
"token-lists/lists/all-mainnet.tokenlist.json",
|
||||
"token-lists/lists/dbis-138.tokenlist.json",
|
||||
"token-lists/lists/ethereum-mainnet.tokenlist.json",
|
||||
"token-lists/lists/arbitrum.tokenlist.json",
|
||||
"token-lists/lists/avalanche.tokenlist.json",
|
||||
"token-lists/lists/cronos.tokenlist.json",
|
||||
"metamask-integration/config/token-list.json",
|
||||
"smom-dbis-138/metamask/token-list.json",
|
||||
];
|
||||
|
||||
const requiredVaultRoles = [
|
||||
"treasury_reserve",
|
||||
"bridge_liquidity",
|
||||
"single_sided_inventory",
|
||||
"protocol_adapter",
|
||||
"emergency_withdraw",
|
||||
];
|
||||
|
||||
const statusesRequiringPoolAddress = new Set(["created", "funded", "live_read", "canary_passed", "production"]);
|
||||
const addressPattern = /^0x[a-fA-F0-9]{40}$/;
|
||||
|
||||
function parseArgs() {
|
||||
const args = process.argv.slice(2);
|
||||
if (args.includes("--help") || args.includes("-h")) {
|
||||
console.log(`Usage: node scripts/validation/validate-pool-creation-matrix.mjs [matrix-path]\n\nDefaults to ${defaultMatrix}.`);
|
||||
process.exit(0);
|
||||
}
|
||||
return args[0] || defaultMatrix;
|
||||
}
|
||||
|
||||
function readJson(file, errors) {
|
||||
const abs = resolve(repoRoot, file);
|
||||
if (!existsSync(abs)) {
|
||||
errors.push(`${file}: missing`);
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
return JSON.parse(readFileSync(abs, "utf8"));
|
||||
} catch (error) {
|
||||
errors.push(`${file}: invalid JSON: ${error.message}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function tokenKey(chainId, symbol) {
|
||||
return `${chainId}:${String(symbol).toUpperCase()}`;
|
||||
}
|
||||
|
||||
function buildTokenIndex(warnings) {
|
||||
const index = new Map();
|
||||
|
||||
for (const file of defaultTokenLists) {
|
||||
const abs = resolve(repoRoot, file);
|
||||
if (!existsSync(abs)) {
|
||||
warnings.push(`${file}: token list missing; address cross-check skipped for that file`);
|
||||
continue;
|
||||
}
|
||||
|
||||
let list;
|
||||
try {
|
||||
list = JSON.parse(readFileSync(abs, "utf8"));
|
||||
} catch (error) {
|
||||
warnings.push(`${file}: invalid JSON; address cross-check skipped for that file: ${error.message}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!Array.isArray(list.tokens)) {
|
||||
warnings.push(`${file}: missing tokens[]; address cross-check skipped for that file`);
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const token of list.tokens) {
|
||||
if (!token?.chainId || !token?.symbol || !addressPattern.test(String(token.address || ""))) {
|
||||
continue;
|
||||
}
|
||||
const key = tokenKey(token.chainId, token.symbol);
|
||||
if (!index.has(key)) {
|
||||
index.set(key, new Map());
|
||||
}
|
||||
index.get(key).set(String(token.address).toLowerCase(), `${file}:${token.symbol}`);
|
||||
}
|
||||
}
|
||||
|
||||
return index;
|
||||
}
|
||||
|
||||
function ref(row, index) {
|
||||
return `rows[${index}] ${row.poolId || "<missing-poolId>"}`;
|
||||
}
|
||||
|
||||
function slug(value) {
|
||||
return String(value || "").toLowerCase();
|
||||
}
|
||||
|
||||
function sortedStrings(values) {
|
||||
return [...values].sort((a, b) => a.localeCompare(b));
|
||||
}
|
||||
|
||||
function countBy(rows, key) {
|
||||
const counts = {};
|
||||
for (const row of rows) {
|
||||
const value = row[key];
|
||||
counts[value] = (counts[value] || 0) + 1;
|
||||
}
|
||||
return counts;
|
||||
}
|
||||
|
||||
function sameCounts(actual, expected) {
|
||||
const keys = new Set([...Object.keys(actual || {}), ...Object.keys(expected || {})]);
|
||||
for (const key of keys) {
|
||||
if ((actual?.[key] || 0) !== (expected?.[key] || 0)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function validateAddress(value, path, errors, { allowNull = true } = {}) {
|
||||
if (value === null && allowNull) {
|
||||
return;
|
||||
}
|
||||
if (typeof value !== "string" || !addressPattern.test(value)) {
|
||||
errors.push(`${path}: expected ${allowNull ? "null or " : ""}0x-prefixed 20-byte address`);
|
||||
}
|
||||
}
|
||||
|
||||
function validateTokenAddress(row, index, side, tokenIndex, errors, warnings) {
|
||||
const token = row[side];
|
||||
const rowRef = ref(row, index);
|
||||
if (!token || typeof token !== "object") {
|
||||
errors.push(`${rowRef}: ${side} must be an object`);
|
||||
return;
|
||||
}
|
||||
if (typeof token.symbol !== "string" || token.symbol.length === 0) {
|
||||
errors.push(`${rowRef}: ${side}.symbol is required`);
|
||||
}
|
||||
validateAddress(token.address, `${rowRef}: ${side}.address`, errors);
|
||||
|
||||
const known = tokenIndex.get(tokenKey(row.chainId, token.symbol));
|
||||
if (!known || known.size !== 1) {
|
||||
return;
|
||||
}
|
||||
|
||||
const [knownAddress, source] = [...known.entries()][0];
|
||||
if (token.address === null) {
|
||||
if (row.status !== "planned" || row.publicRoutingEnabled === true) {
|
||||
errors.push(`${rowRef}: ${side} ${token.symbol} address is missing but ${source} has ${knownAddress}`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (String(token.address).toLowerCase() !== knownAddress) {
|
||||
warnings.push(`${rowRef}: ${side} ${token.symbol} address ${token.address} differs from ${source} ${knownAddress}`);
|
||||
}
|
||||
}
|
||||
|
||||
function validateMatrix(file, matrix, tokenIndex) {
|
||||
const errors = [];
|
||||
const warnings = [];
|
||||
|
||||
if (!matrix || typeof matrix !== "object" || Array.isArray(matrix)) {
|
||||
errors.push(`${file}: root must be an object`);
|
||||
return { errors, warnings };
|
||||
}
|
||||
|
||||
if (typeof matrix.version !== "string" || matrix.version.length === 0) {
|
||||
errors.push(`${file}: version is required`);
|
||||
}
|
||||
if (typeof matrix.generatedAt !== "string" || Number.isNaN(Date.parse(matrix.generatedAt))) {
|
||||
errors.push(`${file}: generatedAt must be an ISO-like date string`);
|
||||
}
|
||||
if (!Array.isArray(matrix.rows) || matrix.rows.length === 0) {
|
||||
errors.push(`${file}: rows[] is required`);
|
||||
return { errors, warnings };
|
||||
}
|
||||
if (!Array.isArray(matrix.lifecycle) || matrix.lifecycle.length === 0) {
|
||||
errors.push(`${file}: lifecycle[] is required`);
|
||||
}
|
||||
if (!Array.isArray(matrix.protocolRolloutOrder) || matrix.protocolRolloutOrder.length === 0) {
|
||||
errors.push(`${file}: protocolRolloutOrder[] is required`);
|
||||
}
|
||||
|
||||
const protocolCounts = countBy(matrix.rows, "protocol");
|
||||
const statusCounts = countBy(matrix.rows, "status");
|
||||
if (!sameCounts(protocolCounts, matrix.protocolCounts)) {
|
||||
errors.push(`${file}: protocolCounts does not match rows (${JSON.stringify(protocolCounts)})`);
|
||||
}
|
||||
if (!sameCounts(statusCounts, matrix.statusCounts)) {
|
||||
errors.push(`${file}: statusCounts does not match rows (${JSON.stringify(statusCounts)})`);
|
||||
}
|
||||
|
||||
const lifecycle = new Set(matrix.lifecycle || []);
|
||||
const rollout = new Set(matrix.protocolRolloutOrder || []);
|
||||
const poolIds = new Set();
|
||||
|
||||
matrix.rows.forEach((row, index) => {
|
||||
const rowRef = ref(row, index);
|
||||
if (!row || typeof row !== "object" || Array.isArray(row)) {
|
||||
errors.push(`rows[${index}]: row must be an object`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof row.poolId !== "string" || row.poolId.length === 0) {
|
||||
errors.push(`${rowRef}: poolId is required`);
|
||||
} else if (poolIds.has(row.poolId)) {
|
||||
errors.push(`${rowRef}: duplicate poolId`);
|
||||
} else {
|
||||
poolIds.add(row.poolId);
|
||||
}
|
||||
|
||||
if (!Number.isInteger(row.chainId)) {
|
||||
errors.push(`${rowRef}: chainId must be an integer`);
|
||||
}
|
||||
if (!rollout.has(row.protocol)) {
|
||||
errors.push(`${rowRef}: protocol ${row.protocol} is not in protocolRolloutOrder`);
|
||||
}
|
||||
if (!lifecycle.has(row.status)) {
|
||||
errors.push(`${rowRef}: status ${row.status} is not in lifecycle`);
|
||||
}
|
||||
|
||||
const expectedPoolId = `${row.chainId}-${row.protocol}-${slug(row.baseToken?.symbol)}-${slug(row.quoteToken?.symbol)}`;
|
||||
if (row.poolId && row.poolId !== expectedPoolId) {
|
||||
errors.push(`${rowRef}: poolId should be ${expectedPoolId}`);
|
||||
}
|
||||
|
||||
validateTokenAddress(row, index, "baseToken", tokenIndex, errors, warnings);
|
||||
validateTokenAddress(row, index, "quoteToken", tokenIndex, errors, warnings);
|
||||
|
||||
validateAddress(row.factoryAddress, `${rowRef}: factoryAddress`, errors);
|
||||
validateAddress(row.routerAddress, `${rowRef}: routerAddress`, errors);
|
||||
validateAddress(row.poolAddress, `${rowRef}: poolAddress`, errors, {
|
||||
allowNull: !statusesRequiringPoolAddress.has(row.status),
|
||||
});
|
||||
validateAddress(row.vaultAddress, `${rowRef}: vaultAddress`, errors);
|
||||
|
||||
if (statusesRequiringPoolAddress.has(row.status) && row.poolAddress === null) {
|
||||
errors.push(`${rowRef}: status ${row.status} requires poolAddress`);
|
||||
}
|
||||
|
||||
const shouldBeSingleSided = row.protocol === "single_sided_pmm" || row.poolType === "single_sided";
|
||||
if (row.singleSided !== shouldBeSingleSided) {
|
||||
errors.push(`${rowRef}: singleSided should be ${shouldBeSingleSided}`);
|
||||
}
|
||||
|
||||
if (!Array.isArray(row.vaultAssignments)) {
|
||||
errors.push(`${rowRef}: vaultAssignments[] is required`);
|
||||
} else {
|
||||
const roles = row.vaultAssignments.map((assignment) => assignment?.role);
|
||||
const roleSet = new Set(roles);
|
||||
const expectedRoles = new Set(requiredVaultRoles);
|
||||
if (roleSet.size !== expectedRoles.size || requiredVaultRoles.some((role) => !roleSet.has(role))) {
|
||||
errors.push(`${rowRef}: vaultAssignments roles must be ${requiredVaultRoles.join(",")}`);
|
||||
}
|
||||
for (const assignment of row.vaultAssignments) {
|
||||
if (!assignment || typeof assignment !== "object") {
|
||||
errors.push(`${rowRef}: vaultAssignments entries must be objects`);
|
||||
continue;
|
||||
}
|
||||
validateAddress(assignment.vaultAddress, `${rowRef}: vaultAssignments.${assignment.role || "<missing-role>"}.vaultAddress`, errors);
|
||||
if (typeof assignment.requiredBeforeFunding !== "boolean") {
|
||||
errors.push(`${rowRef}: vaultAssignments.${assignment.role || "<missing-role>"}.requiredBeforeFunding must be boolean`);
|
||||
}
|
||||
}
|
||||
|
||||
const actualMissing = sortedStrings(
|
||||
row.vaultAssignments
|
||||
.filter((assignment) => assignment?.requiredBeforeFunding === true && assignment.vaultAddress === null)
|
||||
.map((assignment) => assignment.role),
|
||||
);
|
||||
const declaredMissing = sortedStrings(row.missingRequiredVaultRoles || []);
|
||||
if (actualMissing.join("|") !== declaredMissing.join("|")) {
|
||||
errors.push(`${rowRef}: missingRequiredVaultRoles should be [${actualMissing.join(", ")}]`);
|
||||
}
|
||||
const expectedStatus = actualMissing.length > 0 ? "missing_required_vaults" : "ready";
|
||||
if (row.vaultAssignmentStatus !== expectedStatus) {
|
||||
errors.push(`${rowRef}: vaultAssignmentStatus should be ${expectedStatus}`);
|
||||
}
|
||||
}
|
||||
|
||||
const tiers = row.fundingTiersUsd;
|
||||
if (!tiers || typeof tiers !== "object") {
|
||||
errors.push(`${rowRef}: fundingTiersUsd is required`);
|
||||
} else if (!(tiers.seed > 0 && tiers.smoke >= tiers.seed && tiers.productionMinimum >= tiers.smoke)) {
|
||||
errors.push(`${rowRef}: fundingTiersUsd must satisfy seed > 0, smoke >= seed, productionMinimum >= smoke`);
|
||||
}
|
||||
|
||||
const policy = row.policy;
|
||||
if (!policy || typeof policy !== "object") {
|
||||
errors.push(`${rowRef}: policy is required`);
|
||||
} else {
|
||||
for (const key of ["maxPriceImpactBps", "minReserveUsd", "refillTriggerBps"]) {
|
||||
if (typeof policy[key] !== "number" || policy[key] < 0) {
|
||||
errors.push(`${rowRef}: policy.${key} must be a non-negative number`);
|
||||
}
|
||||
}
|
||||
if (typeof policy.pauseOnReserveReadFailure !== "boolean") {
|
||||
errors.push(`${rowRef}: policy.pauseOnReserveReadFailure must be boolean`);
|
||||
}
|
||||
}
|
||||
|
||||
if (!Array.isArray(row.notes)) {
|
||||
errors.push(`${rowRef}: notes[] is required`);
|
||||
}
|
||||
});
|
||||
|
||||
return { errors, warnings };
|
||||
}
|
||||
|
||||
const matrixPath = parseArgs();
|
||||
const bootstrapErrors = [];
|
||||
const bootstrapWarnings = [];
|
||||
const matrix = readJson(matrixPath, bootstrapErrors);
|
||||
const tokenIndex = buildTokenIndex(bootstrapWarnings);
|
||||
const { errors, warnings } = matrix ? validateMatrix(matrixPath, matrix, tokenIndex) : { errors: [], warnings: [] };
|
||||
const allErrors = [...bootstrapErrors, ...errors];
|
||||
const allWarnings = [...bootstrapWarnings, ...warnings];
|
||||
|
||||
for (const warning of allWarnings) {
|
||||
console.warn(`[WARN] ${warning}`);
|
||||
}
|
||||
|
||||
if (allErrors.length > 0) {
|
||||
console.error(`[ERROR] Pool-creation matrix validation failed with ${allErrors.length} issue(s):`);
|
||||
for (const error of allErrors) {
|
||||
console.error(` - ${error}`);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(`[OK] ${basename(matrixPath)} valid: ${matrix.rows.length} row(s), ${Object.keys(matrix.protocolCounts || {}).length} protocol(s).`);
|
||||
240
scripts/validation/validate-token-list-metadata.mjs
Executable file
240
scripts/validation/validate-token-list-metadata.mjs
Executable file
@@ -0,0 +1,240 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Validate DBIS token-list metadata conventions.
|
||||
*
|
||||
* This complements the Uniswap token-list schema validator. The schema checks
|
||||
* shape; this script checks the meaning of the compact tags/extensions we use
|
||||
* for fiat, cash-like, GRU, commodity, and wrapped-token presentation.
|
||||
*/
|
||||
|
||||
import { existsSync, readFileSync } from "node:fs";
|
||||
import { resolve } from "node:path";
|
||||
|
||||
const repoRoot = resolve(new URL("../..", import.meta.url).pathname);
|
||||
|
||||
const defaultTokenLists = [
|
||||
"token-lists/lists/all-mainnet.tokenlist.json",
|
||||
"token-lists/lists/arbitrum.tokenlist.json",
|
||||
"token-lists/lists/avalanche.tokenlist.json",
|
||||
"token-lists/lists/cronos.tokenlist.json",
|
||||
"token-lists/lists/dbis-138.tokenlist.json",
|
||||
"token-lists/lists/ethereum-mainnet.tokenlist.json",
|
||||
"metamask-integration/config/token-list.json",
|
||||
"metamask-integration/provider/config/DUAL_CHAIN_TOKEN_LIST.tokenlist.json",
|
||||
"metamask-integration/docs/METAMASK_TOKEN_LIST.json",
|
||||
"smom-dbis-138/metamask/token-list.json",
|
||||
];
|
||||
|
||||
const conventionTags = new Set(["fiat", "cash", "gru", "commodity"]);
|
||||
const protocolSymbols = new Set(["AUDA", "HYDX", "HYBX", "CHT"]);
|
||||
const cryptoCollateralStablecoins = new Set(["DAI"]);
|
||||
const fiatCurrencies = new Set(["USD", "EUR", "GBP", "AUD", "JPY", "CHF", "CAD"]);
|
||||
const allowedCategories = new Set([
|
||||
"tokenized-fiat",
|
||||
"stablecoin",
|
||||
"wrapped-native",
|
||||
"defi-token",
|
||||
"dex-token",
|
||||
"utility-token",
|
||||
"commodity-token",
|
||||
]);
|
||||
|
||||
function parseArgs() {
|
||||
const args = process.argv.slice(2);
|
||||
if (args.includes("--help") || args.includes("-h")) {
|
||||
console.log(`Usage: node scripts/validation/validate-token-list-metadata.mjs [token-list ...]\n\nIf no token-list paths are supplied, validates the repo's canonical token-list files that exist.`);
|
||||
process.exit(0);
|
||||
}
|
||||
return args.length > 0 ? args : defaultTokenLists;
|
||||
}
|
||||
|
||||
function isScalar(value) {
|
||||
return value === null || ["string", "number", "boolean"].includes(typeof value);
|
||||
}
|
||||
|
||||
function tokenRef(file, index, token) {
|
||||
return `${file} tokens[${index}] ${token.symbol || "<missing-symbol>"} ${token.chainId || "<missing-chain>"} ${token.address || "<missing-address>"}`;
|
||||
}
|
||||
|
||||
function hasTag(token, tag) {
|
||||
return Array.isArray(token.tags) && token.tags.includes(tag);
|
||||
}
|
||||
|
||||
function tagDefs(list) {
|
||||
return list.tags && typeof list.tags === "object" && !Array.isArray(list.tags)
|
||||
? list.tags
|
||||
: {};
|
||||
}
|
||||
|
||||
function validateList(file, list) {
|
||||
const errors = [];
|
||||
const warnings = [];
|
||||
const tags = tagDefs(list);
|
||||
|
||||
if (!Array.isArray(list.tokens)) {
|
||||
errors.push(`${file}: missing tokens[]`);
|
||||
return { errors, warnings };
|
||||
}
|
||||
|
||||
for (const conventionTag of conventionTags) {
|
||||
const used = list.tokens.some((token) => hasTag(token, conventionTag));
|
||||
if (used && !tags[conventionTag]) {
|
||||
errors.push(`${file}: tag "${conventionTag}" is used but missing from top-level tags`);
|
||||
}
|
||||
}
|
||||
|
||||
list.tokens.forEach((token, index) => {
|
||||
const ref = tokenRef(file, index, token);
|
||||
const tokenTags = Array.isArray(token.tags) ? token.tags : [];
|
||||
const extensions = token.extensions ?? {};
|
||||
|
||||
for (const tag of tokenTags) {
|
||||
if (typeof tag !== "string") {
|
||||
errors.push(`${ref}: tag values must be strings`);
|
||||
} else if (tag.length > 10) {
|
||||
errors.push(`${ref}: tag "${tag}" is longer than 10 characters`);
|
||||
}
|
||||
}
|
||||
|
||||
if (token.extensions !== undefined) {
|
||||
if (!extensions || typeof extensions !== "object" || Array.isArray(extensions)) {
|
||||
errors.push(`${ref}: extensions must be an object when present`);
|
||||
} else {
|
||||
const keys = Object.keys(extensions);
|
||||
if (keys.length > 10) {
|
||||
errors.push(`${ref}: extensions has ${keys.length} keys; max is 10`);
|
||||
}
|
||||
for (const [key, value] of Object.entries(extensions)) {
|
||||
if (!isScalar(value)) {
|
||||
errors.push(`${ref}: extensions.${key} must be scalar/null, not ${Array.isArray(value) ? "array" : typeof value}`);
|
||||
}
|
||||
if (typeof value === "string" && value.length > 42) {
|
||||
errors.push(`${ref}: extensions.${key} is longer than 42 characters`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (extensions.category && !allowedCategories.has(extensions.category)) {
|
||||
errors.push(`${ref}: extensions.category "${extensions.category}" is not in the allowed metadata category set`);
|
||||
}
|
||||
|
||||
if (hasTag(token, "cash")) {
|
||||
if (!hasTag(token, "fiat")) {
|
||||
errors.push(`${ref}: cash tag requires fiat tag`);
|
||||
}
|
||||
if (extensions.category !== "tokenized-fiat") {
|
||||
errors.push(`${ref}: cash tag requires extensions.category=tokenized-fiat`);
|
||||
}
|
||||
if (extensions.cashLike !== true) {
|
||||
errors.push(`${ref}: cash tag requires extensions.cashLike=true`);
|
||||
}
|
||||
if (extensions.settlement !== "fiat") {
|
||||
errors.push(`${ref}: cash tag requires extensions.settlement=fiat`);
|
||||
}
|
||||
if (typeof extensions.backing !== "string" || !extensions.backing.includes("cash")) {
|
||||
errors.push(`${ref}: cash tag requires extensions.backing to include cash`);
|
||||
}
|
||||
}
|
||||
|
||||
if (hasTag(token, "fiat")) {
|
||||
if (extensions.category !== "tokenized-fiat") {
|
||||
errors.push(`${ref}: fiat tag requires extensions.category=tokenized-fiat`);
|
||||
}
|
||||
if (!fiatCurrencies.has(extensions.currency)) {
|
||||
errors.push(`${ref}: fiat tag requires extensions.currency to be one of ${[...fiatCurrencies].join(",")}`);
|
||||
}
|
||||
if (extensions.settlement !== "fiat") {
|
||||
errors.push(`${ref}: fiat tag requires extensions.settlement=fiat`);
|
||||
}
|
||||
}
|
||||
|
||||
if (hasTag(token, "gru")) {
|
||||
if (typeof extensions.gruVersion !== "string" || !/^v\d+$/.test(extensions.gruVersion)) {
|
||||
errors.push(`${ref}: gru tag requires extensions.gruVersion like v1 or v2`);
|
||||
}
|
||||
if (typeof extensions.gruFamily !== "string" || extensions.gruFamily.length === 0) {
|
||||
errors.push(`${ref}: gru tag requires extensions.gruFamily`);
|
||||
}
|
||||
}
|
||||
|
||||
if (hasTag(token, "commodity")) {
|
||||
if (hasTag(token, "cash") || hasTag(token, "fiat")) {
|
||||
errors.push(`${ref}: commodity token must not be tagged cash or fiat`);
|
||||
}
|
||||
if (extensions.category !== "commodity-token") {
|
||||
errors.push(`${ref}: commodity tag requires extensions.category=commodity-token`);
|
||||
}
|
||||
if (extensions.cashLike !== false) {
|
||||
errors.push(`${ref}: commodity tag requires extensions.cashLike=false`);
|
||||
}
|
||||
if (extensions.backing !== "commodity-reserves") {
|
||||
errors.push(`${ref}: commodity tag requires extensions.backing=commodity-reserves`);
|
||||
}
|
||||
}
|
||||
|
||||
if (protocolSymbols.has(token.symbol)) {
|
||||
if (hasTag(token, "cash") || hasTag(token, "fiat") || hasTag(token, "gru")) {
|
||||
errors.push(`${ref}: protocol token ${token.symbol} must not be tagged cash, fiat, or gru`);
|
||||
}
|
||||
if (extensions.category === "tokenized-fiat") {
|
||||
errors.push(`${ref}: protocol token ${token.symbol} must not use category tokenized-fiat`);
|
||||
}
|
||||
if (extensions.cashLike === true) {
|
||||
errors.push(`${ref}: protocol token ${token.symbol} must not be cashLike`);
|
||||
}
|
||||
}
|
||||
|
||||
if (cryptoCollateralStablecoins.has(token.symbol)) {
|
||||
if (hasTag(token, "cash") || hasTag(token, "fiat")) {
|
||||
errors.push(`${ref}: ${token.symbol} must not be tagged cash or fiat`);
|
||||
}
|
||||
if (extensions.instrument !== "crypto-collateralized-stablecoin") {
|
||||
errors.push(`${ref}: ${token.symbol} requires extensions.instrument=crypto-collateralized-stablecoin`);
|
||||
}
|
||||
if (extensions.cashLike !== false) {
|
||||
errors.push(`${ref}: ${token.symbol} requires extensions.cashLike=false`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { errors, warnings };
|
||||
}
|
||||
|
||||
const files = parseArgs();
|
||||
const allErrors = [];
|
||||
const allWarnings = [];
|
||||
let validated = 0;
|
||||
|
||||
for (const file of files) {
|
||||
const abs = resolve(repoRoot, file);
|
||||
if (!existsSync(abs)) {
|
||||
allWarnings.push(`${file}: missing; skipped`);
|
||||
continue;
|
||||
}
|
||||
let list;
|
||||
try {
|
||||
list = JSON.parse(readFileSync(abs, "utf8"));
|
||||
} catch (error) {
|
||||
allErrors.push(`${file}: invalid JSON: ${error.message}`);
|
||||
continue;
|
||||
}
|
||||
const { errors, warnings } = validateList(file, list);
|
||||
allErrors.push(...errors);
|
||||
allWarnings.push(...warnings);
|
||||
validated += 1;
|
||||
}
|
||||
|
||||
for (const warning of allWarnings) {
|
||||
console.warn(`[WARN] ${warning}`);
|
||||
}
|
||||
|
||||
if (allErrors.length > 0) {
|
||||
console.error(`[ERROR] Token-list metadata validation failed with ${allErrors.length} issue(s):`);
|
||||
for (const error of allErrors) {
|
||||
console.error(` - ${error}`);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(`[OK] Token-list metadata conventions valid for ${validated} file(s).`);
|
||||
Reference in New Issue
Block a user