chore: sync state and push to Gitea

Made-with: Cursor
This commit is contained in:
DBIS Core Team
2026-03-02 13:17:20 -08:00
parent 2b0404c4d6
commit 73e8d30190
9 changed files with 162 additions and 62 deletions

View File

@@ -79,7 +79,7 @@ The arbitrage orchestration service runs within the **DBIS Core API containers**
### 1. Prerequisites
- Proxmox VE host with LXC support
- DBIS Core containers already deployed (see [DEPLOYMENT_PLAN.md](../../../DEPLOYMENT_PLAN.md))
- DBIS Core containers already deployed (see [DEPLOYMENT_PLAN.md](../../../../DEPLOYMENT_PLAN.md))
- PostgreSQL database running (VMID 10100)
- ChainID 138 RPC nodes accessible (VMID 2500-2502)
@@ -118,8 +118,8 @@ Required variables:
# Database
DATABASE_URL="postgresql://user:pass@192.168.11.100:5432/dbis_core"
# ChainID 138 RPC
CHAIN_138_RPC_URL="http://192.168.11.250:8545"
# ChainID 138 RPC (admin/deployment: RPC_CORE_1)
CHAIN_138_RPC_URL="http://192.168.11.211:8545"
CHAIN_138_RPC_URL_BACKUP="http://192.168.11.251:8545"
# Risk Parameters (optional, defaults in config.ts)
@@ -287,6 +287,6 @@ journalctl -u dbis-arbitrage -n 100
## References
- [DBIS Core Deployment Plan](../../../DEPLOYMENT_PLAN.md)
- [DBIS Core Deployment Plan](../../../../DEPLOYMENT_PLAN.md)
- [Arbitrage Module README](./README_SUBMODULE.md)
- [Proxmox Container Creation Script](../../../scripts/deployment/create-dbis-core-containers.sh)
- [Proxmox Container Creation Script](../../../../DEPLOYMENT_PLAN.md)

View File

@@ -142,7 +142,7 @@ The system gracefully handles:
## 📚 See Also
- [Comprehensive Documentation](./README_SUBMODULE.md)
- [ChainID 138 Token Addresses](../../../../docs/11-references/CHAIN138_TOKEN_ADDRESSES.md)
- [ChainID 138 Token Addresses](../../../../../docs/11-references/CHAIN138_TOKEN_ADDRESSES.md)
- [DeFi Swap Service](../sovereign/defi-swap.service.ts)
---

View File

@@ -50,42 +50,32 @@ describe('Deal Execution Integration Tests', () => {
expect(result.state.errors.length).toBeGreaterThan(0);
});
it('should persist deal state to database', async () => {
// TODO: Implement database persistence test
// This would require a test database setup
it.skip('should persist deal state to database', async () => {
// Ticket: DBIS-ARB-TEST — requires test DB setup
});
it('should record metrics during execution', async () => {
const request: DealExecutionRequest = {
totalEthValue: '10000000',
participantBankId: 'BANK001',
moduleId: 'MODULE001',
};
await dealOrchestratorService.executeDeal(request);
// TODO: Verify metrics were recorded
// This would require accessing the metrics service
it.skip('should record metrics during execution', async () => {
// Ticket: DBIS-ARB-TEST — verify metrics when metrics service is integrated
});
});
describe('Risk Monitoring Integration', () => {
it('should monitor LTV during deal execution', async () => {
// TODO: Test real-time risk monitoring
it.skip('should monitor LTV during deal execution', async () => {
// Ticket: DBIS-ARB-TEST — real-time risk monitoring integration
});
it('should alert on risk violations', async () => {
// TODO: Test alerting on risk violations
it.skip('should alert on risk violations', async () => {
// Ticket: DBIS-ARB-TEST — alerting on risk violations
});
});
describe('Caching Integration', () => {
it('should cache price data', async () => {
// TODO: Test Redis caching
it.skip('should cache price data', async () => {
// Ticket: DBIS-ARB-TEST — Redis caching when REDIS_URL configured
});
it('should invalidate cache on deal completion', async () => {
// TODO: Test cache invalidation
it.skip('should invalidate cache on deal completion', async () => {
// Ticket: DBIS-ARB-TEST — cache invalidation with Redis
});
});
});

View File

@@ -18,7 +18,7 @@ export const CHAIN138_TOKENS = {
*/
export const RPC_CONFIG = {
chainId: 138,
rpcUrl: process.env.CHAIN138_RPC_URL || 'http://192.168.11.250:8545',
rpcUrl: process.env.CHAIN138_RPC_URL || 'http://192.168.11.211:8545',
explorerUrl: 'https://explorer.d-bis.org',
} as const;
@@ -71,6 +71,9 @@ export const PROTOCOL_ADDRESSES = {
/**
* Asset Type Mappings
*/
/** Tezos mainnet chain ID for USDtz routing */
export const TEZOS_CHAIN_ID = 1729;
export const ASSET_TYPES = {
ETH: 'ETH',
WETH: 'WETH',

View File

@@ -13,6 +13,9 @@ import {
import { riskControlService } from './risk-control.service';
import { stepExecutionService } from './step-execution.service';
import { redemptionTestService } from './redemption-test.service';
import { metricsService } from './services/monitoring/metrics.service';
import { riskMonitorService } from './services/risk-monitor.service';
import { alertService } from './services/monitoring/alert.service';
export class DealOrchestratorService {
async executeDeal(

View File

@@ -32,10 +32,16 @@ export class CacheService {
constructor(redisClient?: RedisClient) {
if (redisClient) {
this.redis = redisClient;
} else if (this.config.enabled && process.env.REDIS_URL) {
try {
const Redis = require('ioredis');
this.redis = new Redis(process.env.REDIS_URL, { maxRetriesPerRequest: 2 }) as unknown as RedisClient;
} catch {
logger.warn('Redis (ioredis) not available - caching disabled. Install ioredis and set REDIS_URL for cache.');
this.config.enabled = false;
}
} else if (this.config.enabled) {
// TODO: Initialize Redis client
// this.redis = new Redis(process.env.REDIS_URL || 'redis://localhost:6379');
logger.warn('Redis client not initialized - caching disabled');
logger.warn('REDIS_URL not set - caching disabled');
this.config.enabled = false;
}
}
@@ -137,13 +143,21 @@ export class CacheService {
* Invalidate deal cache
*/
async invalidateDealCache(dealId: string): Promise<void> {
const patterns = [
`risk:${dealId}:*`,
`deal:${dealId}:*`,
];
// TODO: Implement pattern-based deletion if needed
logger.debug('Deal cache invalidated', { dealId });
if (!this.redis || !this.config.enabled) {
logger.debug('Deal cache invalidated (no Redis)', { dealId });
return;
}
const patterns = [`risk:${dealId}:*`, `deal:${dealId}:*`];
try {
const redis = this.redis as RedisClient & { keys?(pattern: string): Promise<string[]> };
for (const pattern of patterns) {
const keys = redis.keys ? await redis.keys(pattern) : [];
if (keys.length > 0) await Promise.all(keys.map((k) => this.redis!.del(k)));
}
logger.debug('Deal cache invalidated', { dealId });
} catch (err) {
logger.error('Cache invalidateDealCache error', { dealId, error: err });
}
}
}

View File

@@ -169,23 +169,27 @@ class SlackAlertChannel implements AlertChannel {
[AlertSeverity.CRITICAL]: '#ff0000',
}[alert.severity];
const fields: Array<{ title: string; value: string; short: boolean }> = [
...(alert.dealId ? [{ title: 'Deal ID', value: alert.dealId, short: true }] : []),
...(alert.violationType ? [{ title: 'Violation Type', value: alert.violationType, short: true }] : []),
{ title: 'Timestamp', value: alert.timestamp.toISOString(), short: true },
...(alert.metadata ? Object.entries(alert.metadata).map(([k, v]) => ({ title: k, value: String(v), short: true })) : []),
];
const payload = {
attachments: [{
color,
title: `Arbitrage Alert: ${alert.severity.toUpperCase()}`,
text: alert.message,
fields: [
...(alert.dealId ? [{ title: 'Deal ID', value: alert.dealId, short: true }] : []),
...(alert.violationType ? [{ title: 'Violation Type', value: alert.violationType, short: true }] : []),
{ title: 'Timestamp', value: alert.timestamp.toISOString(), short: true },
],
...(alert.metadata ? { fields: [...(payload.attachments[0].fields || []), ...Object.entries(alert.metadata).map(([k, v]) => ({ title: k, value: String(v), short: true }))] } : {}),
}],
attachments: [{ color, title: `Arbitrage Alert: ${alert.severity.toUpperCase()}`, text: alert.message, fields }],
};
// TODO: Implement actual Slack webhook call
// await fetch(this.webhookUrl, { method: 'POST', body: JSON.stringify(payload) });
logger.info('Slack alert (not implemented)', { payload });
try {
const res = await fetch(this.webhookUrl, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(payload),
});
if (!res.ok) throw new Error(`Slack webhook failed: ${res.status}`);
} catch (err) {
logger.error('Slack alert delivery failed', { error: err instanceof Error ? err.message : err });
}
}
}
@@ -215,9 +219,21 @@ class PagerDutyAlertChannel implements AlertChannel {
},
};
// TODO: Implement actual PagerDuty API call
// await fetch('https://events.pagerduty.com/v2/enqueue', { method: 'POST', body: JSON.stringify(payload) });
logger.info('PagerDuty alert (not implemented)', { payload });
try {
const res = await fetch('https://events.pagerduty.com/v2/enqueue', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(payload),
});
if (!res.ok) {
const text = await res.text();
throw new Error(`PagerDuty API ${res.status}: ${text}`);
}
} catch (err) {
logger.error('PagerDuty alert delivery failed', {
error: err instanceof Error ? err.message : String(err),
});
}
}
}
@@ -225,16 +241,31 @@ class EmailAlertChannel implements AlertChannel {
constructor(private recipients: string[]) {}
async send(alert: Alert): Promise<void> {
// Only send critical/high alerts via email
if (alert.severity !== AlertSeverity.CRITICAL && alert.severity !== AlertSeverity.HIGH) {
return;
}
// TODO: Implement email sending (using nodemailer, sendgrid, etc.)
logger.info('Email alert (not implemented)', {
recipients: this.recipients,
alert: alert.message,
});
const emailApiUrl = process.env.EMAIL_ALERT_API_URL;
if (!emailApiUrl) {
logger.warn('Email alert skipped: Set EMAIL_ALERT_API_URL (e.g. SendGrid) to enable');
return;
}
try {
const res = await fetch(emailApiUrl, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
to: this.recipients,
subject: `[${alert.severity.toUpperCase()}] Arbitrage Alert`,
text: alert.message,
html: `<p>${alert.message}</p><p>Deal ID: ${alert.dealId || 'N/A'}</p>`,
}),
});
if (!res.ok) throw new Error(`Email API failed: ${res.status}`);
} catch (err) {
logger.error('Email alert delivery failed', { error: err instanceof Error ? err.message : err });
}
}
}

View File

@@ -0,0 +1,33 @@
// Metrics Service - Stub for deal orchestrator
// When monitoring stack is deployed: add Prometheus push (PROMETHEUS_PUSH_GATEWAY) or expose scrape endpoint here.
import { logger } from '@/infrastructure/monitoring/logger';
class MetricsService {
updateActiveDeals(_status: string, _count: number): void {
// Stub: record in real metrics when monitoring available
}
recordStepExecution(_step: string, _durationSeconds: number): void {
// Stub
}
recordError(_errorName: string, _step?: string): void {
logger.debug('Metrics: recordError', { errorName: _errorName, step: _step });
}
recordDealExecution(
_status: string,
_participantBankId: string,
_moduleId: string,
_durationSeconds: number
): void {
// Stub
}
recordRiskViolation(_violationType: string, _severity: string): void {
logger.warn('Metrics: risk violation', { violationType: _violationType, severity: _severity });
}
}
export const metricsService = new MetricsService();

View File

@@ -0,0 +1,26 @@
// Risk Monitor Service - Tracks active deals for LTV/exposure monitoring
// Stub: real-time risk checks when risk pipeline is defined; see RECOMMENDATIONS.md.
import type { DealState } from '../types';
class RiskMonitorService {
private activeDeals = new Map<string, DealState>();
registerDeal(state: DealState): void {
this.activeDeals.set(state.dealId, state);
}
unregisterDeal(dealId: string): void {
this.activeDeals.delete(dealId);
}
getActiveDeal(dealId: string): DealState | undefined {
return this.activeDeals.get(dealId);
}
getActiveDealCount(): number {
return this.activeDeals.size;
}
}
export const riskMonitorService = new RiskMonitorService();