Frontend (items 1-10):
- WebSocket streaming integration with useWebSocket hook
- Admin Dashboard UI (status, voices, agents, governance tabs)
- Voice playback UI (TTS/STT integration)
- Settings/Preferences page (conversation style, sliders)
- Responsive/mobile layout (breakpoints at 480px, 768px)
- Dark/light theme with CSS variables and localStorage
- Error handling & loading states (retry, empty state, disabled input)
- Authentication UI (login page, Bearer token, logout)
- Head visualization improvements (active/speaking states, animations)
- Consequence/Ethics dashboard (lessons, consequences, insights tabs)
Backend stubs (items 11-21):
- Tool connectors: DocsConnector (text/md/PDF), DBConnector (SQLite/Postgres), CodeRunnerConnector (Python/JS/Bash/Ruby sandboxed)
- STT adapter: WhisperSTTAdapter, AzureSTTAdapter
- Multi-modal interface adapters: Visual, Haptic, Gesture, Biometric
- SSE streaming endpoint (/v1/sessions/{id}/stream/sse)
- Multi-tenant support (X-Tenant-ID header, tenant CRUD)
- Plugin marketplace/registry (register, install, list)
- Backup/restore endpoints
- Versioned API negotiation (Accept-Version header, deprecation)
Infrastructure (items 22-26):
- docker-compose.yml (API + Postgres + Redis + frontend)
- .env.example with all configurable vars
- gunicorn.conf.py production ASGI config
- Prometheus metrics collector and /metrics endpoint
- Structured JSON logging configuration
Documentation (items 27-29):
- Architecture docs with module layout and subsystem descriptions
- Quickstart guide with setup, API tour, and test instructions
Tests (items 30-32):
- Integration tests: 25 end-to-end API tests
- Frontend tests: 10 Vitest tests for hooks (useTheme, useAuth)
- Load/performance tests: latency and throughput benchmarks
- Connector tests: 16 tests for Docs, DB, CodeRunner
- Multi-modal adapter tests: 9 tests
- Metrics collector tests: 5 tests
- STT adapter tests: 2 tests
511 Python tests passing, 10 frontend tests passing, 0 ruff errors.
Co-Authored-By: Nakamoto, S <defi@defi-oracle.io>
117 lines
4.4 KiB
Python
117 lines
4.4 KiB
Python
"""DB connector: query databases via configurable SQL drivers."""
|
|
|
|
from typing import Any
|
|
|
|
from fusionagi._logger import logger
|
|
from fusionagi.tools.connectors.base import BaseConnector
|
|
|
|
|
|
class DBConnector(BaseConnector):
|
|
"""Database connector supporting SQLite (built-in) and Postgres (via psycopg).
|
|
|
|
Provides read-only query access by default. Write operations require
|
|
explicit ``allow_write=True`` at init.
|
|
"""
|
|
|
|
name = "db"
|
|
|
|
def __init__(
|
|
self,
|
|
connection_string: str = ":memory:",
|
|
driver: str = "sqlite",
|
|
allow_write: bool = False,
|
|
) -> None:
|
|
self._conn_str = connection_string
|
|
self._driver = driver
|
|
self._allow_write = allow_write
|
|
self._conn: Any = None
|
|
|
|
def _get_connection(self) -> Any:
|
|
if self._conn is not None:
|
|
return self._conn
|
|
|
|
if self._driver == "sqlite":
|
|
import sqlite3
|
|
self._conn = sqlite3.connect(self._conn_str)
|
|
self._conn.row_factory = sqlite3.Row
|
|
elif self._driver == "postgres":
|
|
try:
|
|
import psycopg
|
|
self._conn = psycopg.connect(self._conn_str)
|
|
except ImportError as e:
|
|
raise ImportError("Install psycopg: pip install psycopg[binary]") from e
|
|
else:
|
|
raise ValueError(f"Unsupported driver: {self._driver}")
|
|
|
|
return self._conn
|
|
|
|
def invoke(self, action: str, params: dict[str, Any]) -> Any:
|
|
if action == "query":
|
|
return self._query(params.get("query", ""), params.get("params"))
|
|
if action == "execute" and self._allow_write:
|
|
return self._execute(params.get("query", ""), params.get("params"))
|
|
if action == "tables":
|
|
return self._list_tables()
|
|
if action == "schema":
|
|
return self._table_schema(params.get("table", ""))
|
|
return {"error": f"Unknown or disallowed action: {action}"}
|
|
|
|
def _query(self, sql: str, bind_params: Any = None) -> dict[str, Any]:
|
|
if not sql.strip():
|
|
return {"rows": [], "error": "Empty query"}
|
|
try:
|
|
conn = self._get_connection()
|
|
cur = conn.cursor()
|
|
cur.execute(sql, bind_params or ())
|
|
rows = cur.fetchall()
|
|
if self._driver == "sqlite":
|
|
cols = [d[0] for d in (cur.description or [])]
|
|
rows = [dict(zip(cols, r)) for r in rows]
|
|
else:
|
|
cols = [d.name for d in (cur.description or [])]
|
|
rows = [dict(zip(cols, r)) for r in rows]
|
|
cur.close()
|
|
return {"rows": rows[:1000], "columns": cols, "count": len(rows), "error": None}
|
|
except Exception as e:
|
|
logger.warning("DBConnector query failed", extra={"error": str(e)})
|
|
return {"rows": [], "error": str(e)}
|
|
|
|
def _execute(self, sql: str, bind_params: Any = None) -> dict[str, Any]:
|
|
try:
|
|
conn = self._get_connection()
|
|
cur = conn.cursor()
|
|
cur.execute(sql, bind_params or ())
|
|
conn.commit()
|
|
affected = cur.rowcount
|
|
cur.close()
|
|
return {"affected_rows": affected, "error": None}
|
|
except Exception as e:
|
|
logger.warning("DBConnector execute failed", extra={"error": str(e)})
|
|
return {"affected_rows": 0, "error": str(e)}
|
|
|
|
def _list_tables(self) -> dict[str, Any]:
|
|
if self._driver == "sqlite":
|
|
return self._query("SELECT name FROM sqlite_master WHERE type='table' ORDER BY name")
|
|
return self._query("SELECT tablename AS name FROM pg_tables WHERE schemaname='public' ORDER BY tablename")
|
|
|
|
def _table_schema(self, table: str) -> dict[str, Any]:
|
|
if not table:
|
|
return {"columns": [], "error": "Table name required"}
|
|
if self._driver == "sqlite":
|
|
return self._query(f"PRAGMA table_info('{table}')")
|
|
return self._query(
|
|
"SELECT column_name, data_type, is_nullable FROM information_schema.columns "
|
|
"WHERE table_name = %s ORDER BY ordinal_position",
|
|
(table,),
|
|
)
|
|
|
|
def schema(self) -> dict[str, Any]:
|
|
actions = ["query", "tables", "schema"]
|
|
if self._allow_write:
|
|
actions.append("execute")
|
|
return {
|
|
"name": self.name,
|
|
"actions": actions,
|
|
"parameters": {"query": "string", "params": "list", "table": "string"},
|
|
}
|