Update README.md to provide a comprehensive overview of The Order monorepo, including repository structure, quickstart guide, development workflow, and contribution guidelines.

This commit is contained in:
defiQUG
2025-11-07 22:34:54 -08:00
parent e020318829
commit 4af7580f7a
128 changed files with 4558 additions and 2 deletions

31
.editorconfig Normal file
View File

@@ -0,0 +1,31 @@
# EditorConfig is awesome: https://EditorConfig.org
# top-most EditorConfig file
root = true
# Unix-style newlines with a newline ending every file
[*]
end_of_line = lf
insert_final_newline = true
charset = utf-8
trim_trailing_whitespace = true
# Matches multiple files with brace expansion notation
[*.{js,jsx,ts,tsx,json,yml,yaml}]
indent_style = space
indent_size = 2
[*.md]
trim_trailing_whitespace = false
[*.{py,go}]
indent_style = space
indent_size = 4
[Makefile]
indent_style = tab
[*.sh]
indent_style = space
indent_size = 2

19
.eslintrc.js Normal file
View File

@@ -0,0 +1,19 @@
module.exports = {
root: true,
extends: ['eslint:recommended'],
parser: '@typescript-eslint/parser',
plugins: ['@typescript-eslint'],
env: {
node: true,
es2022: true,
},
parserOptions: {
ecmaVersion: 2022,
sourceType: 'module',
},
rules: {
// Add custom rules here
},
ignorePatterns: ['node_modules', 'dist', 'build', '.next', 'coverage'],
};

29
.gitattributes vendored Normal file
View File

@@ -0,0 +1,29 @@
# Auto detect text files and perform LF normalization
* text=auto
# Force LF line endings for specific file types
*.sh text eol=lf
*.yaml text eol=lf
*.yml text eol=lf
*.json text eol=lf
*.md text eol=lf
*.ts text eol=lf
*.tsx text eol=lf
*.js text eol=lf
*.jsx text eol=lf
*.py text eol=lf
*.go text eol=lf
# Denote all files that are truly binary
*.png binary
*.jpg binary
*.jpeg binary
*.gif binary
*.ico binary
*.svg binary
*.woff binary
*.woff2 binary
*.ttf binary
*.eot binary
*.pdf binary

48
.github/CODEOWNERS vendored Normal file
View File

@@ -0,0 +1,48 @@
# Global owners
* @the-order/core-team
# Apps
/apps/portal-public/ @the-order/frontend-team
/apps/portal-internal/ @the-order/frontend-team @the-order/admin-team
/apps/mcp-members/ @the-order/mcp-team
/apps/mcp-legal/ @the-order/mcp-team @the-order/legal-team
# Services
/services/intake/ @the-order/backend-team
/services/identity/ @the-order/identity-team @the-order/security-team
/services/finance/ @the-order/finance-team @the-order/backend-team
/services/dataroom/ @the-order/backend-team @the-order/security-team
/services/omnis-brand/ @the-order/omnis-team
/services/arromis-brand/ @the-order/arromis-team
# Packages
/packages/ui/ @the-order/frontend-team
/packages/schemas/ @the-order/backend-team @the-order/frontend-team
/packages/auth/ @the-order/identity-team @the-order/security-team
/packages/storage/ @the-order/backend-team
/packages/crypto/ @the-order/security-team @the-order/identity-team
/packages/workflows/ @the-order/backend-team
/packages/test-utils/ @the-order/backend-team
# Infrastructure
/infra/ @the-order/devops-team @the-order/security-team
/infra/terraform/ @the-order/devops-team
/infra/k8s/ @the-order/devops-team
/infra/gateways/ @the-order/devops-team @the-order/security-team
# Documentation
/docs/ @the-order/core-team
/docs/governance/ @the-order/core-team @the-order/security-team
/docs/architecture/ @the-order/architecture-team
/docs/legal/ @the-order/legal-team
# CI/CD
/.github/workflows/ @the-order/devops-team
/.github/CODEOWNERS @the-order/core-team
# Root config files
/package.json @the-order/core-team
/pnpm-workspace.yaml @the-order/core-team
/turbo.json @the-order/core-team
/tsconfig*.json @the-order/core-team

38
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@@ -0,0 +1,38 @@
---
name: Bug Report
about: Create a report to help us improve
title: '[BUG] '
labels: bug
assignees: ''
---
## Description
A clear and concise description of what the bug is.
## Steps to Reproduce
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
## Expected Behavior
A clear and concise description of what you expected to happen.
## Actual Behavior
A clear and concise description of what actually happened.
## Environment
- OS: [e.g. Ubuntu 22.04]
- Node.js version: [e.g. 18.17.0]
- pnpm version: [e.g. 8.15.0]
- Browser (if applicable): [e.g. Chrome 120]
## Screenshots
If applicable, add screenshots to help explain your problem.
## Additional Context
Add any other context about the problem here.
## Possible Solution
If you have suggestions on how to fix the bug, please describe them here.

View File

@@ -0,0 +1,29 @@
---
name: Feature Request
about: Suggest an idea for this project
title: '[FEATURE] '
labels: enhancement
assignees: ''
---
## Description
A clear and concise description of what the feature is.
## Problem Statement
What problem does this feature solve? What is the use case?
## Proposed Solution
A clear and concise description of what you want to happen.
## Alternatives Considered
A clear and concise description of any alternative solutions or features you've considered.
## Additional Context
Add any other context, mockups, or screenshots about the feature request here.
## Implementation Notes
If you have ideas on how this could be implemented, please share them here.
## Related Issues
Link to any related issues or discussions.

38
.github/PULL_REQUEST_TEMPLATE.md vendored Normal file
View File

@@ -0,0 +1,38 @@
## Description
Brief description of changes
## Type of Change
- [ ] Bug fix
- [ ] New feature
- [ ] Breaking change
- [ ] Documentation update
- [ ] Performance improvement
- [ ] Refactoring
## Related Issues
Closes #(issue number)
## Testing
How was this tested?
- [ ] Unit tests added/updated
- [ ] Integration tests added/updated
- [ ] Manual testing performed
## Checklist
- [ ] Code follows style guidelines
- [ ] Self-review completed
- [ ] Comments added for complex code
- [ ] Documentation updated
- [ ] Tests added/updated
- [ ] All tests pass
- [ ] No new warnings
- [ ] Type checking passes
- [ ] Linting passes
## Screenshots (if applicable)
<!-- Add screenshots here -->
## Additional Notes
<!-- Any additional information -->

235
.github/workflows/ci.yml vendored Normal file
View File

@@ -0,0 +1,235 @@
name: CI
on:
push:
branches: [main, develop]
pull_request:
branches: [main, develop]
env:
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
jobs:
lint-and-typecheck:
name: Lint and Type Check
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup pnpm
uses: pnpm/action-setup@v2
with:
version: 8
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '18'
cache: 'pnpm'
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Lint
run: pnpm lint
- name: Type check
run: pnpm type-check
test:
name: Test
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup pnpm
uses: pnpm/action-setup@v2
with:
version: 8
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '18'
cache: 'pnpm'
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Run tests
run: pnpm test
- name: Upload coverage
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./coverage/lcov.info
build:
name: Build
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup pnpm
uses: pnpm/action-setup@v2
with:
version: 8
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '18'
cache: 'pnpm'
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Build
run: pnpm build
security-scan:
name: Security Scan
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@master
with:
scan-type: 'fs'
scan-ref: '.'
format: 'sarif'
output: 'trivy-results.sarif'
- name: Upload Trivy results to GitHub Security
uses: github/codeql-action/upload-sarif@v2
with:
sarif_file: 'trivy-results.sarif'
sbom:
name: Generate SBOM
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup pnpm
uses: pnpm/action-setup@v2
with:
version: 8
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '18'
cache: 'pnpm'
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Install Syft
uses: anchore/sbom-action/download-syft@v0
with:
syft-version: latest
- name: Generate SBOM
run: |
syft packages dir:. -o spdx-json > sbom.spdx.json
syft packages dir:. -o cyclonedx-json > sbom.cyclonedx.json
- name: Upload SBOM artifacts
uses: actions/upload-artifact@v3
with:
name: sbom
path: |
sbom.spdx.json
sbom.cyclonedx.json
- name: Run Grype scan
uses: anchore/scan-action@v3
id: grype
with:
path: "."
fail-build: false
severity-cutoff: high
- name: Upload Grype results
uses: github/codeql-action/upload-sarif@v2
if: always()
with:
sarif_file: ${{ steps.grype.outputs.sarif }}
docker-build:
name: Build Docker Images
runs-on: ubuntu-latest
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
needs: [build]
strategy:
matrix:
service:
- intake
- identity
- finance
- dataroom
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata
id: meta
uses: docker/metadata-action@v5
with:
images: ghcr.io/${{ github.repository }}/${{ matrix.service }}
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=sha
- name: Build and push
uses: docker/build-push-action@v5
with:
context: ./services/${{ matrix.service }}
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Sign image with Cosign
uses: sigstore/cosign-installer@v3
if: github.ref == 'refs/heads/main'
- name: Sign container image
run: |
cosign sign --yes ${{ steps.meta.outputs.tags }}

71
.github/workflows/release.yml vendored Normal file
View File

@@ -0,0 +1,71 @@
name: Release
on:
push:
tags:
- 'v*.*.*'
env:
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
jobs:
release:
name: Release
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
fetch-depth: 0
- name: Setup pnpm
uses: pnpm/action-setup@v2
with:
version: 8
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '18'
cache: 'pnpm'
registry-url: 'https://registry.npmjs.org'
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Build
run: pnpm build
- name: Run tests
run: pnpm test
- name: Create GitHub Release
uses: actions/create-release@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ github.ref }}
release_name: Release ${{ github.ref }}
draft: false
prerelease: false
- name: Deploy to Staging
run: |
echo "Deploy to staging environment"
# Add deployment steps here
- name: Wait for approval
uses: trstringer/manual-approval@v1
with:
secret: ${{ github.TOKEN }}
approvers: the-order/release-team
minimum-approvals: 1
- name: Deploy to Production
if: success()
run: |
echo "Deploy to production environment"
# Add deployment steps here

79
.gitignore vendored Normal file
View File

@@ -0,0 +1,79 @@
# Dependencies
node_modules/
.pnp
.pnp.js
# Testing
coverage/
*.lcov
.nyc_output
# Production
build/
dist/
.next/
out/
*.tsbuildinfo
# Environment variables
.env
.env.local
.env*.local
.env.enc
*.env.enc
!.env.example
# Logs
logs/
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
# OS
.DS_Store
*.swp
*.swo
*~
.idea/
.vscode/
*.sublime-project
*.sublime-workspace
# Turbo
.turbo/
# TypeScript
*.tsbuildinfo
# Terraform
*.tfstate
*.tfstate.*
.terraform/
.terraform.lock.hcl
# Kubernetes
*.kubeconfig
# Secrets
secrets/
*.key
*.pem
*.crt
!*.example.key
!*.example.pem
!*.example.crt
# SOPS
.sops.yaml
# Temporary files
tmp/
temp/
*.tmp
# Submodules (optional - uncomment if you want to track submodule changes)
# .git/modules/

43
.gitmodules vendored Normal file
View File

@@ -0,0 +1,43 @@
# Git submodules configuration for The Order monorepo
# Uncomment and update URLs when ready to add submodules
#
# [submodule "apps/portal-public"]
# path = apps/portal-public
# url = https://github.com/the-order/portal-public.git
#
# [submodule "apps/portal-internal"]
# path = apps/portal-internal
# url = https://github.com/the-order/portal-internal.git
#
# [submodule "apps/mcp-members"]
# path = apps/mcp-members
# url = https://github.com/the-order/mcp-members.git
#
# [submodule "apps/mcp-legal"]
# path = apps/mcp-legal
# url = https://github.com/the-order/mcp-legal.git
#
# [submodule "services/intake"]
# path = services/intake
# url = https://github.com/the-order/intake.git
#
# [submodule "services/identity"]
# path = services/identity
# url = https://github.com/the-order/identity.git
#
# [submodule "services/finance"]
# path = services/finance
# url = https://github.com/the-order/finance.git
#
# [submodule "services/dataroom"]
# path = services/dataroom
# url = https://github.com/the-order/dataroom.git
#
# [submodule "services/omnis-brand"]
# path = services/omnis-brand
# url = https://github.com/the-order/omnis-brand.git
#
# [submodule "services/arromis-brand"]
# path = services/arromis-brand
# url = https://github.com/the-order/arromis-brand.git

12
.prettierignore Normal file
View File

@@ -0,0 +1,12 @@
node_modules
dist
build
.next
coverage
*.min.js
*.min.css
package-lock.json
pnpm-lock.yaml
yarn.lock
.turbo

10
.prettierrc Normal file
View File

@@ -0,0 +1,10 @@
{
"semi": true,
"trailingComma": "es5",
"singleQuote": true,
"printWidth": 100,
"tabWidth": 2,
"useTabs": false,
"arrowParens": "avoid"
}

22
LICENSE Normal file
View File

@@ -0,0 +1,22 @@
MIT License
Copyright (c) 2024 The Order
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

35
NOTICE Normal file
View File

@@ -0,0 +1,35 @@
The Order
Copyright (c) 2024 The Order
This product includes software developed by The Order and its contributors.
Third-Party Components:
This product may include the following third-party components:
1. Node.js - https://nodejs.org/
Copyright Node.js contributors. All rights reserved.
2. TypeScript - https://www.typescriptlang.org/
Copyright Microsoft Corporation. All rights reserved.
3. React - https://react.dev/
Copyright Facebook, Inc. and its affiliates.
4. Next.js - https://nextjs.org/
Copyright Vercel, Inc.
5. Turborepo - https://turbo.build/
Copyright Vercel, Inc.
6. pnpm - https://pnpm.io/
Copyright Zoltan Kochan and contributors.
For a complete list of third-party components and their licenses, please see the
package.json files in each workspace or run:
pnpm licenses list
Additional third-party notices and attributions may be found in individual
package directories.

155
QUICKSTART.md Normal file
View File

@@ -0,0 +1,155 @@
# Quickstart Guide
## Getting Started
### Prerequisites
- Node.js >= 18.0.0
- pnpm >= 8.0.0
- Docker (for local development services)
- Git
### Initial Setup
1. **Clone the repository**
```bash
git clone <repository-url>
cd the-order
```
2. **Install dependencies**
```bash
pnpm install
```
3. **Start development services** (PostgreSQL, Redis, OpenSearch)
```bash
docker-compose up -d
```
4. **Build all packages**
```bash
pnpm build
```
5. **Start development servers**
```bash
pnpm dev
```
### Development Workflow
1. **Work on a specific package**
```bash
cd packages/ui
pnpm dev
```
2. **Work on an app**
```bash
cd apps/portal-public
pnpm dev
```
3. **Work on a service**
```bash
cd services/intake
pnpm dev
```
### Running Tests
```bash
# Run all tests
pnpm test
# Run tests for a specific package
pnpm --filter @the-order/ui test
# Run tests in watch mode
pnpm --filter @the-order/ui test:watch
```
### Adding Git Submodules
To add external repositories as submodules:
```bash
./scripts/add-submodules.sh
```
Or manually:
```bash
git submodule add <repository-url> services/omnis-brand
git submodule update --init --recursive
```
### Environment Variables
1. Copy `.env.example` to `.env.local` in each workspace
2. Configure required environment variables
3. For secrets, use SOPS (see `docs/governance/SECURITY.md`)
### Building for Production
```bash
# Build all packages and apps
pnpm build
# Build specific workspace
pnpm --filter @the-order/portal-public build
```
### Deployment
See `infra/README.md` for infrastructure and deployment documentation.
## Next Steps
1. Review the [README.md](README.md) for detailed documentation
2. Read [CONTRIBUTING.md](docs/governance/CONTRIBUTING.md) for contribution guidelines
3. Check [SECURITY.md](docs/governance/SECURITY.md) for security policies
4. Explore the architecture in [docs/architecture/](docs/architecture/)
## Troubleshooting
### Issues with dependencies
```bash
# Clean and reinstall
pnpm clean
pnpm install
```
### Issues with Docker services
```bash
# Restart services
docker-compose restart
# View logs
docker-compose logs -f
# Reset services
docker-compose down -v
docker-compose up -d
```
### TypeScript errors
```bash
# Run type checking
pnpm type-check
# Clean build artifacts
pnpm clean
pnpm build
```
## Getting Help
- Check the [documentation](docs/)
- Open an [issue](.github/ISSUE_TEMPLATE/)
- Review [architecture decisions](docs/architecture/adrs/)

183
README.md
View File

@@ -1,2 +1,181 @@
# the-order-monorepo
# the-order-monorepo
# The Order
Monorepo for The Order - A comprehensive platform for legal, financial, and governance systems.
## Overview
The Order is a mono-repo containing all applications, services, packages, infrastructure, and documentation for managing legal documents, financial systems, identity management, datarooms, and member portals.
## Repository Structure
```
the-order/
├─ apps/ # End-user applications (web + portals)
│ ├─ portal-public/ # Public web presence
│ ├─ portal-internal/ # Internal web (admin/ops)
│ ├─ mcp-members/ # MCP for Members of The Order
│ └─ mcp-legal/ # MCP for Legal Purpose (matters, filings, attestations)
├─ services/ # Domain services (APIs, workers)
│ ├─ intake/ # "Drop content" tool: ingestion → OCR → classify → route
│ ├─ identity/ # Digital Systems (eIDAS/DID, verifiable credentials)
│ ├─ finance/ # Requisite financial systems (payments, ledgers, rates)
│ ├─ dataroom/ # Dataroom(s) for all deals (secure VDR, deal room APIs)
│ ├─ omnis-brand/ # Branded version of OMNIS (as a service/app shell)
│ └─ arromis-brand/ # Branded version of ARROMIS (as a service/app shell)
├─ packages/ # Shared libraries (versioned in the mono-repo)
│ ├─ ui/ # Design system (React/Tailwind components)
│ ├─ schemas/ # Zod/OpenAPI/JSON Schema contracts
│ ├─ auth/ # AuthZ/AuthN helpers (OIDC, wallet, eIDAS adapters)
│ ├─ storage/ # S3/GCS abstraction, object lifecycle, WORM mode
│ ├─ crypto/ # KMS/HSM client, key mgmt, signatures (eIDAS/DID)
│ ├─ workflows/ # Temporal/Step Functions definitions
│ └─ test-utils/ # Testing utilities and helpers
├─ infra/ # Everything-as-code
│ ├─ terraform/ # Cloud infra, secrets backends, KMS/HSM, PKI
│ ├─ k8s/ # Helm charts/overlays (dev/stage/prod)
│ ├─ gateways/ # API gateway / proxy / WAF configs
│ └─ cicd/ # Reusable CI templates, SBOM, signing
├─ docs/ # Living documentation
│ ├─ legal/ # Generated legal/treaty artifacts, policies
│ ├─ governance/ # Contribution, security, incident runbooks
│ ├─ architecture/ # ADRs, data flows, threat models
│ └─ product/ # Roadmaps, PRDs
└─ scripts/ # Utility scripts
```
## Quickstart
### Prerequisites
- Node.js >= 18.0.0
- pnpm >= 8.0.0
- Git
### Installation
```bash
# Clone the repository
git clone https://github.com/the-order/the-order.git
cd the-order
# Initialize submodules (if any)
git submodule update --init --recursive
# Install dependencies
pnpm install
# Build all packages
pnpm build
```
### Development
```bash
# Start all apps and services in development mode
pnpm dev
# Run specific workspace
pnpm --filter portal-public dev
# Run linting
pnpm lint
# Run type checking
pnpm type-check
# Run tests
pnpm test
```
### Environment Variables
Environment variables are managed using SOPS (Secrets Operations). See `docs/governance/SECURITY.md` for details.
1. Copy `.env.example` files to `.env.local` in each workspace
2. For encrypted secrets, use SOPS with age keys
3. Never commit plaintext secrets to the repository
Example:
```bash
# Decrypt and load secrets
sops -d secrets/dev.env.enc > .env.local
```
## Environments
- **dev**: Local development environment
- **stage**: Staging environment for testing
- **prod**: Production environment
Environment-specific configurations are in `infra/k8s/overlays/`.
## Git Submodules
Some services and apps are managed as Git submodules. To add submodules:
```bash
# See scripts/add-submodules.sh for examples
./scripts/add-submodules.sh
```
To update submodules:
```bash
git submodule update --remote
```
## Contribution
Please read [CONTRIBUTING.md](docs/governance/CONTRIBUTING.md) for details on our code of conduct and the process for submitting pull requests.
### Development Workflow
1. Create a feature branch from `main`
2. Make your changes
3. Run tests and linting: `pnpm test && pnpm lint`
4. Commit using [Conventional Commits](https://www.conventionalcommits.org/)
5. Push and create a Pull Request
### Code Standards
- TypeScript strict mode enabled
- ESLint + Prettier for code formatting
- All tests must pass
- Type checking must pass
- Follow the architecture decisions in `docs/architecture/adrs/`
## Security
See [SECURITY.md](docs/governance/SECURITY.md) for security policies and reporting procedures.
## Architecture
Architecture Decision Records (ADRs) are located in `docs/architecture/adrs/`. See [docs/architecture/README.md](docs/architecture/README.md) for more information.
## Release Process
Releases are managed via semantic versioning and automated through CI/CD:
1. Merge to `main` triggers automated tests and builds
2. Tagged releases trigger deployment to staging
3. Manual approval required for production deployment
4. See `.github/workflows/release.yml` for details
## License
See [LICENSE](LICENSE) for license information.
## Support
For questions and support, please open an issue in this repository.
## Related Documentation
- [Security Policy](docs/governance/SECURITY.md)
- [Contributing Guide](docs/governance/CONTRIBUTING.md)
- [Architecture Documentation](docs/architecture/README.md)
- [Product Roadmap](docs/product/README.md)

31
apps/mcp-legal/README.md Normal file
View File

@@ -0,0 +1,31 @@
# MCP Legal
MCP server for Legal Purpose (matters, filings, attestations).
## Features
- Matter management
- Signature orchestration
- Legal-grade record retention
- Treaty registry
## Development
```bash
# Install dependencies
pnpm install
# Run development server
pnpm dev
# Build
pnpm build
# Start production server
pnpm start
```
## Usage
Configure this MCP server in your MCP client to access legal management features.

View File

@@ -0,0 +1,24 @@
{
"name": "@the-order/mcp-legal",
"version": "0.1.0",
"private": true,
"description": "MCP server for Legal Purpose (matters, filings, attestations)",
"main": "./src/index.ts",
"scripts": {
"dev": "tsx watch src/index.ts",
"build": "tsc",
"start": "node dist/index.js",
"lint": "eslint src --ext .ts",
"type-check": "tsc --noEmit"
},
"dependencies": {
"@modelcontextprotocol/sdk": "^0.4.0"
},
"devDependencies": {
"@types/node": "^20.10.6",
"typescript": "^5.3.3",
"tsx": "^4.7.0",
"eslint": "^8.56.0"
}
}

View File

@@ -0,0 +1,32 @@
#!/usr/bin/env node
/**
* MCP server for Legal Purpose (matters, filings, attestations)
*/
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
const server = new Server(
{
name: 'mcp-legal',
version: '0.1.0',
},
{
capabilities: {
tools: {},
},
}
);
// Initialize server
async function main() {
const transport = new StdioServerTransport();
await server.connect(transport);
console.error('MCP Legal server running on stdio');
}
main().catch((error) => {
console.error('Error starting server:', error);
process.exit(1);
});

View File

@@ -0,0 +1,10 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts"]
}

View File

@@ -0,0 +1,31 @@
# MCP Members
MCP server for Members of The Order.
## Features
- Membership management
- Credential wallet
- Role entitlements
- Dataroom access
## Development
```bash
# Install dependencies
pnpm install
# Run development server
pnpm dev
# Build
pnpm build
# Start production server
pnpm start
```
## Usage
Configure this MCP server in your MCP client to access member management features.

View File

@@ -0,0 +1,24 @@
{
"name": "@the-order/mcp-members",
"version": "0.1.0",
"private": true,
"description": "MCP server for Members of The Order",
"main": "./src/index.ts",
"scripts": {
"dev": "tsx watch src/index.ts",
"build": "tsc",
"start": "node dist/index.js",
"lint": "eslint src --ext .ts",
"type-check": "tsc --noEmit"
},
"dependencies": {
"@modelcontextprotocol/sdk": "^0.4.0"
},
"devDependencies": {
"@types/node": "^20.10.6",
"typescript": "^5.3.3",
"tsx": "^4.7.0",
"eslint": "^8.56.0"
}
}

View File

@@ -0,0 +1,32 @@
#!/usr/bin/env node
/**
* MCP server for Members of The Order
*/
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
const server = new Server(
{
name: 'mcp-members',
version: '0.1.0',
},
{
capabilities: {
tools: {},
},
}
);
// Initialize server
async function main() {
const transport = new StdioServerTransport();
await server.connect(transport);
console.error('MCP Members server running on stdio');
}
main().catch((error) => {
console.error('Error starting server:', error);
process.exit(1);
});

View File

@@ -0,0 +1,10 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts"]
}

View File

@@ -0,0 +1,28 @@
# Portal Internal
Internal web portal for The Order (admin/ops).
## Development
```bash
# Install dependencies
pnpm install
# Run development server (port 3001)
pnpm dev
# Build for production
pnpm build
# Start production server
pnpm start
```
## Environment Variables
See `.env.example` for required environment variables.
## Authentication
This portal requires authentication. Configure OIDC/DID settings in environment variables.

View File

@@ -0,0 +1,8 @@
/** @type {import('next').NextConfig} */
const nextConfig = {
reactStrictMode: true,
transpilePackages: ['@the-order/ui', '@the-order/schemas', '@the-order/auth'],
};
module.exports = nextConfig;

View File

@@ -0,0 +1,29 @@
{
"name": "@the-order/portal-internal",
"version": "0.1.0",
"private": true,
"scripts": {
"dev": "next dev -p 3001",
"build": "next build",
"start": "next start -p 3001",
"lint": "next lint",
"type-check": "tsc --noEmit"
},
"dependencies": {
"next": "^14.0.4",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"@the-order/ui": "workspace:*",
"@the-order/schemas": "workspace:*",
"@the-order/auth": "workspace:*"
},
"devDependencies": {
"@types/node": "^20.10.6",
"@types/react": "^18.2.45",
"@types/react-dom": "^18.2.18",
"typescript": "^5.3.3",
"eslint": "^8.56.0",
"eslint-config-next": "^14.0.4"
}
}

View File

@@ -0,0 +1,19 @@
import type { Metadata } from 'next';
export const metadata: Metadata = {
title: 'The Order - Internal Portal',
description: 'Internal portal for The Order (admin/ops)',
};
export default function RootLayout({
children,
}: {
children: React.ReactNode;
}) {
return (
<html lang="en">
<body>{children}</body>
</html>
);
}

View File

@@ -0,0 +1,9 @@
export default function Home() {
return (
<main>
<h1>The Order - Internal Portal</h1>
<p>Welcome to The Order internal portal (admin/ops).</p>
</main>
);
}

View File

@@ -0,0 +1,32 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"target": "ES2020",
"lib": ["dom", "dom.iterable", "esnext"],
"allowJs": true,
"skipLibCheck": true,
"strict": true,
"noEmit": true,
"esModuleInterop": true,
"module": "esnext",
"moduleResolution": "bundler",
"resolveJsonModule": true,
"isolatedModules": true,
"jsx": "preserve",
"incremental": true,
"plugins": [
{
"name": "next"
}
],
"paths": {
"@/*": ["./src/*"],
"@the-order/ui": ["../../packages/ui/src"],
"@the-order/schemas": ["../../packages/schemas/src"],
"@the-order/auth": ["../../packages/auth/src"]
}
},
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
"exclude": ["node_modules"]
}

View File

@@ -0,0 +1,24 @@
# Portal Public
Public web presence for The Order.
## Development
```bash
# Install dependencies
pnpm install
# Run development server
pnpm dev
# Build for production
pnpm build
# Start production server
pnpm start
```
## Environment Variables
See `.env.example` for required environment variables.

View File

@@ -0,0 +1,8 @@
/** @type {import('next').NextConfig} */
const nextConfig = {
reactStrictMode: true,
transpilePackages: ['@the-order/ui', '@the-order/schemas'],
};
module.exports = nextConfig;

View File

@@ -0,0 +1,28 @@
{
"name": "@the-order/portal-public",
"version": "0.1.0",
"private": true,
"scripts": {
"dev": "next dev",
"build": "next build",
"start": "next start",
"lint": "next lint",
"type-check": "tsc --noEmit"
},
"dependencies": {
"next": "^14.0.4",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"@the-order/ui": "workspace:*",
"@the-order/schemas": "workspace:*"
},
"devDependencies": {
"@types/node": "^20.10.6",
"@types/react": "^18.2.45",
"@types/react-dom": "^18.2.18",
"typescript": "^5.3.3",
"eslint": "^8.56.0",
"eslint-config-next": "^14.0.4"
}
}

View File

@@ -0,0 +1,19 @@
import type { Metadata } from 'next';
export const metadata: Metadata = {
title: 'The Order - Public Portal',
description: 'Public portal for The Order',
};
export default function RootLayout({
children,
}: {
children: React.ReactNode;
}) {
return (
<html lang="en">
<body>{children}</body>
</html>
);
}

View File

@@ -0,0 +1,9 @@
export default function Home() {
return (
<main>
<h1>The Order - Public Portal</h1>
<p>Welcome to The Order public portal.</p>
</main>
);
}

View File

@@ -0,0 +1,31 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"target": "ES2020",
"lib": ["dom", "dom.iterable", "esnext"],
"allowJs": true,
"skipLibCheck": true,
"strict": true,
"noEmit": true,
"esModuleInterop": true,
"module": "esnext",
"moduleResolution": "bundler",
"resolveJsonModule": true,
"isolatedModules": true,
"jsx": "preserve",
"incremental": true,
"plugins": [
{
"name": "next"
}
],
"paths": {
"@/*": ["./src/*"],
"@the-order/ui": ["../../packages/ui/src"],
"@the-order/schemas": ["../../packages/schemas/src"]
}
},
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
"exclude": ["node_modules"]
}

64
docker-compose.yml Normal file
View File

@@ -0,0 +1,64 @@
version: '3.8'
services:
postgres:
image: postgres:15-alpine
environment:
POSTGRES_USER: theorder
POSTGRES_PASSWORD: theorder_dev
POSTGRES_DB: theorder_dev
ports:
- "5432:5432"
volumes:
- postgres_data:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U theorder"]
interval: 10s
timeout: 5s
retries: 5
redis:
image: redis:7-alpine
ports:
- "6379:6379"
volumes:
- redis_data:/data
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 10s
timeout: 5s
retries: 5
opensearch:
image: opensearchproject/opensearch:2.11.0
environment:
- discovery.type=single-node
- "OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx512m"
- "DISABLE_SECURITY_PLUGIN=true"
ports:
- "9200:9200"
- "9600:9600"
volumes:
- opensearch_data:/usr/share/opensearch/data
healthcheck:
test: ["CMD-SHELL", "curl -f http://localhost:9200/_cluster/health || exit 1"]
interval: 30s
timeout: 10s
retries: 5
opensearch-dashboards:
image: opensearchproject/opensearch-dashboards:2.11.0
ports:
- "5601:5601"
environment:
- 'OPENSEARCH_HOSTS=["http://opensearch:9200"]'
- "DISABLE_SECURITY_DASHBOARDS_PLUGIN=true"
depends_on:
opensearch:
condition: service_healthy
volumes:
postgres_data:
redis_data:
opensearch_data:

236
docs/architecture/README.md Normal file
View File

@@ -0,0 +1,236 @@
# Architecture Documentation
This directory contains architecture documentation for The Order, including Architecture Decision Records (ADRs), data flow diagrams, and threat models.
## Architecture Decision Records (ADRs)
Architecture Decision Records document important architectural decisions made in the project. They capture the context, decision, and consequences of key choices.
### ADR Template
When creating a new ADR, use the template in `adrs/README.md`.
### Current ADRs
- See `adrs/` directory for all ADRs
- ADRs are numbered sequentially: `adr-001-*.md`, `adr-002-*.md`, etc.
### ADR Process
1. Propose an architectural decision
2. Create ADR using template
3. Discuss with team
4. Record decision in ADR
5. Update as needed if decision changes
## System Architecture
### High-Level Overview
```
┌─────────────┐ ┌─────────────┐ ┌─────────────┐
│ Portal │────▶│ Services │────▶│ Databases │
│ Apps │ │ (APIs) │ │ & Storage │
└─────────────┘ └─────────────┘ └─────────────┘
│ │ │
└───────────────────┴────────────────────┘
┌──────┴──────┐
│ Identity │
│ & Auth │
└─────────────┘
```
### Core Services
1. **Intake Service**: Document ingestion, OCR, classification
2. **Identity Service**: eIDAS/DID, verifiable credentials
3. **Finance Service**: Payments, ledgers, rate management
4. **Dataroom Service**: Secure VDR, deal rooms
5. **MCP Services**: Member and legal management portals
### Data Flow
#### Content Intake Flow
```
Document Upload → Intake Service → OCR → Classification →
Storage (WORM) → Indexing → Workflow Trigger
```
#### Identity Flow
```
User Request → Identity Service → eIDAS/DID Verification →
VC Issuance → Wallet Storage → Access Grant
```
#### Dataroom Flow
```
Deal Creation → Dataroom Service → Document Upload →
Access Control (OPA) → Watermarking → Presigned URLs
```
## Technology Stack
### Frontend
- **Framework**: Next.js 14+
- **UI Library**: React 18+
- **Styling**: Tailwind CSS
- **Components**: shadcn/ui
- **State Management**: Zustand / React Query
### Backend
- **Runtime**: Node.js 18+ (TypeScript)
- **API Framework**: NestJS / Fastify
- **Workflow Engine**: Temporal / AWS Step Functions
- **Message Queue**: Redis / Kafka
### Infrastructure
- **Container Orchestration**: Kubernetes
- **Infrastructure as Code**: Terraform
- **CI/CD**: GitHub Actions
- **Monitoring**: OpenTelemetry + Grafana
- **Logging**: Structured logging (JSON)
### Data Stores
- **Primary Database**: PostgreSQL
- **Cache**: Redis
- **Search**: OpenSearch
- **Object Storage**: S3 / GCS (WORM mode)
- **Key Management**: KMS / HSM
### Security
- **Secrets Management**: SOPS + age / External Secrets
- **Identity**: OIDC + DID (did:key, did:web)
- **Signing**: eIDAS qualified signatures
- **Policy Engine**: OPA (Open Policy Agent)
- **SBOM**: Syft
- **Vulnerability Scanning**: Grype
- **Image Signing**: Cosign
## Design Principles
1. **Security First**: All systems designed with security in mind
2. **Immutable Infrastructure**: Infrastructure as code, version controlled
3. **Observability**: Comprehensive logging, metrics, and tracing
4. **Scalability**: Horizontal scaling, stateless services
5. **Resilience**: Graceful degradation, circuit breakers
6. **Compliance**: eIDAS, data retention, audit trails
## Threat Models
Threat models for each service are located in `threat-models/`. They use STRIDE methodology:
- **S**poofing
- **T**ampering
- **R**epudiation
- **I**nformation Disclosure
- **D**enial of Service
- **E**levation of Privilege
## Data Models
### Core Entities
- **User**: Member of The Order
- **Document**: Legal document, treaty, etc.
- **Deal**: Business transaction with dataroom
- **Matter**: Legal matter with associated documents
- **Identity**: Digital identity (eIDAS/DID)
- **Credential**: Verifiable credential
### Relationships
See entity relationship diagrams in `data-models/`.
## API Design
### REST APIs
- Follow RESTful principles
- Use OpenAPI/Swagger for documentation
- Version APIs: `/v1/`, `/v2/`, etc.
- Use proper HTTP status codes
- Include request/response examples
### GraphQL (if applicable)
- Use GraphQL for complex queries
- Implement proper authorization
- Use DataLoader for N+1 queries
## Deployment Architecture
### Environments
- **Development**: Local development
- **Staging**: Pre-production testing
- **Production**: Live environment
### Deployment Strategy
- **Blue-Green Deployment**: For zero-downtime updates
- **Canary Releases**: For gradual rollouts
- **Feature Flags**: For controlled feature releases
### Infrastructure Regions
- Primary region: EU (for eIDAS compliance)
- Secondary region: Backup/DR
- CDN: Global distribution for static assets
## Monitoring & Observability
### Metrics
- Application metrics (Prometheus)
- Infrastructure metrics (cloud provider)
- Business metrics (custom dashboards)
### Logging
- Structured logging (JSON)
- Centralized log aggregation
- Log retention policies
### Tracing
- Distributed tracing (OpenTelemetry)
- Request flow visualization
- Performance analysis
## Disaster Recovery
### Backup Strategy
- Database backups: Daily full, hourly incremental
- Object storage: Cross-region replication
- Configuration: Version controlled
### Recovery Procedures
- RTO (Recovery Time Objective): 4 hours
- RPO (Recovery Point Objective): 1 hour
- Runbooks in `docs/governance/runbooks/`
## Future Considerations
- Multi-cloud deployment
- Edge computing for low latency
- Machine learning for document classification
- Blockchain integration for notarization
## References
- [ADR Template](adrs/README.md)
- [Threat Models](threat-models/)
- [Data Models](data-models/)
- [API Documentation](../api/)

View File

@@ -0,0 +1,65 @@
# Architecture Decision Records
This directory contains Architecture Decision Records (ADRs) for The Order project.
## What is an ADR?
An Architecture Decision Record is a document that captures an important architectural decision made along with its context and consequences.
## ADR Format
Each ADR should follow this structure:
```markdown
# ADR-XXX: [Title]
## Status
[Proposed | Accepted | Deprecated | Superseded]
## Context
Describe the issue that is motivating this decision or change.
## Decision
State the architectural decision that is being made.
## Consequences
Describe the consequences, both positive and negative, of this decision.
## Alternatives Considered
List alternatives that were considered and why they were rejected.
## References
Links to related ADRs, issues, or documentation.
```
## ADR Numbering
ADRs are numbered sequentially:
- `adr-001-*.md`
- `adr-002-*.md`
- etc.
## Creating a New ADR
1. Create a new file: `adr-XXX-short-title.md`
2. Use the template above
3. Set status to "Proposed"
4. Create a PR for discussion
5. Update status to "Accepted" after approval
## ADR Lifecycle
1. **Proposed**: Initial proposal, under discussion
2. **Accepted**: Decision has been made and approved
3. **Deprecated**: Decision is no longer followed
4. **Superseded**: Replaced by a newer ADR
## Current ADRs
*No ADRs yet. Create the first one using the template above.*
## References
- [ADR Template](https://github.com/joelparkerhenderson/architecture-decision-record)
- [Documenting Architecture Decisions](https://cognitect.com/blog/2011/11/15/documenting-architecture-decisions)

View File

@@ -0,0 +1,235 @@
# Contributing to The Order
Thank you for your interest in contributing to The Order! This document provides guidelines and instructions for contributing.
## Code of Conduct
By participating in this project, you agree to maintain a respectful and inclusive environment for all contributors.
## Getting Started
1. Fork the repository
2. Clone your fork: `git clone https://github.com/your-username/the-order.git`
3. Create a branch: `git checkout -b feature/your-feature-name`
4. Install dependencies: `pnpm install`
5. Make your changes
6. Test your changes: `pnpm test && pnpm lint`
7. Commit your changes (see [Commit Guidelines](#commit-guidelines))
8. Push to your fork: `git push origin feature/your-feature-name`
9. Open a Pull Request
## Development Setup
### Prerequisites
- Node.js >= 18.0.0
- pnpm >= 8.0.0
- Docker (for local services)
- Git
### Local Development
```bash
# Install dependencies
pnpm install
# Start development servers
pnpm dev
# Run tests
pnpm test
# Run linting
pnpm lint
# Type check
pnpm type-check
```
### Environment Setup
1. Copy `.env.example` to `.env.local` in the workspace you're working on
2. Configure required environment variables
3. For secrets, use SOPS (see [Security Policy](SECURITY.md))
## Commit Guidelines
We use [Conventional Commits](https://www.conventionalcommits.org/) for commit messages:
```
<type>(<scope>): <subject>
<body>
<footer>
```
### Types
- `feat`: New feature
- `fix`: Bug fix
- `docs`: Documentation changes
- `style`: Code style changes (formatting, etc.)
- `refactor`: Code refactoring
- `test`: Test additions or changes
- `chore`: Build process or auxiliary tool changes
- `perf`: Performance improvements
- `ci`: CI/CD changes
- `revert`: Revert a previous commit
### Examples
```
feat(auth): add OIDC provider support
Implement OAuth2/OIDC flow with support for multiple providers.
Adds configuration for eIDAS integration.
Closes #123
```
```
fix(intake): resolve OCR parsing issue with PDFs
Fixes character encoding problems when processing multi-page PDFs.
Adds proper error handling for corrupted documents.
Fixes #456
```
## Pull Request Process
1. **Update Documentation**: Update relevant documentation for your changes
2. **Add Tests**: Include tests for new features or bug fixes
3. **Update Changelog**: Add entry to CHANGELOG.md (if applicable)
4. **Ensure Tests Pass**: All CI checks must pass
5. **Request Review**: Request review from relevant code owners (see CODEOWNERS)
### PR Title Format
Use the same format as commit messages:
```
feat(scope): brief description
```
### PR Description Template
```markdown
## Description
Brief description of changes
## Type of Change
- [ ] Bug fix
- [ ] New feature
- [ ] Breaking change
- [ ] Documentation update
## Testing
How was this tested?
## Checklist
- [ ] Code follows style guidelines
- [ ] Self-review completed
- [ ] Comments added for complex code
- [ ] Documentation updated
- [ ] Tests added/updated
- [ ] All tests pass
- [ ] No new warnings
```
## Code Style
### TypeScript
- Use strict TypeScript configuration
- Prefer type over interface for unions/intersections
- Use explicit return types for public functions
- Avoid `any` type
### Formatting
- Use Prettier for code formatting
- Run `pnpm format` before committing
- ESLint rules must pass
### Naming Conventions
- Variables: `camelCase`
- Functions: `camelCase`
- Classes: `PascalCase`
- Constants: `UPPER_SNAKE_CASE`
- Files: `kebab-case` for utilities, `PascalCase` for components
## Testing
### Unit Tests
- Write unit tests for all new features
- Aim for >80% code coverage
- Use descriptive test names
- Follow AAA pattern (Arrange, Act, Assert)
### Integration Tests
- Write integration tests for API endpoints
- Test error cases and edge cases
- Use test fixtures from `packages/test-utils`
### E2E Tests
- E2E tests for critical user flows
- Use Playwright or similar framework
- Run in CI/CD pipeline
## Documentation
### Code Documentation
- Document all public APIs
- Use JSDoc for functions and classes
- Include examples for complex usage
### Architecture Documentation
- Update ADRs for significant architectural decisions
- Document new services in `docs/architecture/`
- Update data flow diagrams if applicable
## Code Review
### For Authors
- Keep PRs focused and small
- Respond to feedback promptly
- Be open to suggestions
- Update PR based on feedback
### For Reviewers
- Be constructive and respectful
- Focus on code, not the person
- Suggest improvements, don't just point out issues
- Approve when satisfied
## Release Process
1. Create release branch from `main`
2. Update version numbers
3. Update CHANGELOG.md
4. Create release PR
5. After approval, tag release
6. CI/CD automatically deploys
## Questions?
- Open an issue for questions
- Check existing documentation
- Ask in discussions
## Additional Resources
- [Architecture Documentation](docs/architecture/README.md)
- [Security Policy](SECURITY.md)
- [Code of Conduct](CODE_OF_CONDUCT.md)

105
docs/governance/SECURITY.md Normal file
View File

@@ -0,0 +1,105 @@
# Security Policy
## Supported Versions
We currently support the following versions with security updates:
| Version | Supported |
| ------- | ------------------ |
| 1.0.x | :white_check_mark: |
| < 1.0 | :x: |
## Reporting a Vulnerability
We take security vulnerabilities seriously. If you discover a security vulnerability, please follow these steps:
1. **Do NOT** open a public GitHub issue
2. Email security details to: security@the-order.org (or your security contact)
3. Include:
- Description of the vulnerability
- Steps to reproduce
- Potential impact
- Suggested fix (if any)
### Response Timeline
- **Initial Response**: Within 48 hours
- **Status Update**: Within 7 days
- **Fix Timeline**: Depends on severity (see below)
### Severity Levels
- **Critical**: Remote code execution, authentication bypass, data breach
- Fix timeline: 24-48 hours
- **High**: Privilege escalation, sensitive data exposure
- Fix timeline: 7 days
- **Medium**: Information disclosure, denial of service
- Fix timeline: 30 days
- **Low**: Best practices, defense in depth
- Fix timeline: Next release cycle
## Security Practices
### Secrets Management
- All secrets must be encrypted using SOPS
- Never commit plaintext secrets
- Use environment variables for configuration
- Rotate secrets regularly
- Use short-lived tokens via OIDC
### Code Security
- All code must pass security linting (ESLint security plugins)
- Dependencies are scanned for vulnerabilities (Grype)
- Container images are signed (Cosign)
- SBOM generation for all artifacts (Syft)
### Infrastructure Security
- Immutable infrastructure via Terraform
- Secrets stored in KMS/HSM
- Network policies enforced via Kubernetes
- API gateway with WAF rules
- Regular security audits and penetration testing
### Access Control
- Least privilege principle
- Quarterly access reviews
- MFA required for all production access
- Audit logs for all sensitive operations
### Compliance
- eIDAS compliance for identity services
- Data retention policies per jurisdiction
- WORM storage for legal documents
- Audit trails for all financial transactions
## Security Updates
Security updates are released as:
- **Hotfixes**: For critical vulnerabilities
- **Patch releases**: For high/medium severity issues
- **Regular releases**: For low severity and general improvements
## Disclosure Policy
- Vulnerabilities are disclosed after a fix is available
- Coordinated disclosure with responsible parties
- CVE assignment for eligible vulnerabilities
- Security advisories published in `docs/governance/security-advisories/`
## Contact
- **Security Email**: security@the-order.org
- **PGP Key**: [Link to public key]
- **Security Team**: @the-order/security-team
## Additional Resources
- [Threat Models](docs/architecture/threat-models/)
- [Incident Response Runbook](docs/governance/runbooks/incident-response.md)
- [Security Checklist](docs/governance/security-checklist.md)

15
docs/legal/README.md Normal file
View File

@@ -0,0 +1,15 @@
# Legal Documentation
Generated legal/treaty artifacts, policies, and legal documentation.
## Contents
- **Treaties** - Treaty documents and artifacts
- **Policies** - Legal policies and procedures
- **Compliance** - Compliance documentation
- **Attestations** - Legal attestations and certifications
## Note
This directory contains legal documents and should be treated with appropriate security and access controls.

27
docs/product/README.md Normal file
View File

@@ -0,0 +1,27 @@
# Product Documentation
Product documentation, roadmaps, and PRDs for The Order.
## Contents
- **Roadmaps** - Product roadmaps and feature planning
- **PRDs** - Product Requirements Documents
- **User Guides** - End-user documentation
- **API Documentation** - API reference and guides
## Roadmaps
See `roadmaps/` directory for product roadmaps.
## PRDs
See `prds/` directory for Product Requirements Documents.
## User Guides
See `user-guides/` directory for end-user documentation.
## API Documentation
See `api/` directory for API documentation and reference.

58
infra/README.md Normal file
View File

@@ -0,0 +1,58 @@
# Infrastructure
Infrastructure as code for The Order.
## Directory Structure
- `terraform/` - Terraform configurations
- `k8s/` - Kubernetes manifests and Helm charts
- `gateways/` - API gateway and proxy configurations
- `cicd/` - CI/CD templates and scripts
## Getting Started
1. **Terraform**: Set up cloud infrastructure
```bash
cd terraform
terraform init
terraform plan
terraform apply
```
2. **Kubernetes**: Deploy applications
```bash
cd k8s
kubectl apply -k overlays/dev
```
3. **Gateway**: Configure API gateway
```bash
cd gateways
# Apply gateway configuration
```
## Environments
- **Development**: Local development environment
- **Staging**: Pre-production environment
- **Production**: Live production environment
## Security
- Secrets management via SOPS and KMS
- Network policies and security groups
- WAF rules for API protection
- TLS/SSL certificates
- Regular security audits
## Monitoring
- Prometheus for metrics
- Grafana for dashboards
- OpenTelemetry for tracing
- ELK/OpenSearch for logging
## Documentation
See individual README files in each subdirectory for more details.

42
infra/cicd/README.md Normal file
View File

@@ -0,0 +1,42 @@
# CI/CD Templates and Configuration
Reusable CI/CD templates and configuration for The Order.
## Structure
- `templates/` - Reusable CI/CD templates
- `scripts/` - CI/CD helper scripts
- `config/` - CI/CD configuration files
## Templates
- `ci-template.yml` - Base CI template
- `deploy-template.yml` - Deployment template
- `release-template.yml` - Release template
## Features
- Automated testing
- Security scanning (Trivy, Grype)
- SBOM generation (Syft)
- Image signing (Cosign)
- Deployment automation
- Release automation
## Usage
Copy templates to `.github/workflows/` and customize for your needs.
## Security
- All images are signed with Cosign
- SBOMs are generated for all artifacts
- Vulnerability scanning on every build
- Secrets are managed via GitHub Secrets or External Secrets
## Deployment
- Development: Automatic deployment on push to `develop`
- Staging: Automatic deployment on push to `main`
- Production: Manual approval required for deployment

View File

@@ -0,0 +1,87 @@
# CI/CD Template
# This is a reusable template for CI/CD pipelines
name: CI Template
on:
push:
branches: [main, develop]
pull_request:
branches: [main, develop]
jobs:
build-and-test:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup pnpm
uses: pnpm/action-setup@v2
with:
version: 8
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '18'
cache: 'pnpm'
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Lint
run: pnpm lint
- name: Type check
run: pnpm type-check
- name: Test
run: pnpm test
- name: Build
run: pnpm build
security-scan:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@master
with:
scan-type: 'fs'
scan-ref: '.'
format: 'sarif'
output: 'trivy-results.sarif'
- name: Upload Trivy results to GitHub Security
uses: github/codeql-action/upload-sarif@v2
with:
sarif_file: 'trivy-results.sarif'
sbom:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Install Syft
uses: anchore/sbom-action/download-syft@v0
with:
syft-version: latest
- name: Generate SBOM
run: |
syft packages dir:. -o spdx-json > sbom.spdx.json
syft packages dir:. -o cyclonedx-json > sbom.cyclonedx.json
- name: Upload SBOM artifacts
uses: actions/upload-artifact@v3
with:
name: sbom
path: |
sbom.spdx.json
sbom.cyclonedx.json

45
infra/gateways/README.md Normal file
View File

@@ -0,0 +1,45 @@
# API Gateway Configuration
Configuration for API gateway, proxy, and WAF.
## Components
- **NGINX** - Reverse proxy and load balancer
- **API Gateway** - Cloud provider API gateway (AWS API Gateway, GCP API Gateway, etc.)
- **WAF** - Web Application Firewall rules
## Configuration Files
- `nginx.conf` - NGINX configuration
- `api-gateway.yaml` - API Gateway configuration (cloud-specific)
- `waf-rules.yaml` - WAF rules configuration
## Features
- Rate limiting
- Request routing
- SSL/TLS termination
- Authentication/Authorization
- Request/Response transformation
- Logging and monitoring
## Policies
- OPA (Open Policy Agent) policies in `policies/` directory
- Rate limiting policies
- Access control policies
- Data validation policies
## Usage
```bash
# Deploy NGINX configuration
kubectl apply -f nginx-configmap.yaml
# Update API Gateway
# (Cloud provider specific commands)
# Apply WAF rules
# (Cloud provider specific commands)
```

74
infra/gateways/nginx.conf Normal file
View File

@@ -0,0 +1,74 @@
# NGINX configuration for API Gateway
# This is a template - customize for your needs
upstream intake {
server intake-service:4001;
}
upstream identity {
server identity-service:4002;
}
upstream finance {
server finance-service:4003;
}
upstream dataroom {
server dataroom-service:4004;
}
server {
listen 80;
server_name api.the-order.local;
# Rate limiting
limit_req_zone $binary_remote_addr zone=api_limit:10m rate=10r/s;
# Intake service
location /api/intake/ {
limit_req zone=api_limit burst=20 nodelay;
proxy_pass http://intake/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
# Identity service
location /api/identity/ {
limit_req zone=api_limit burst=20 nodelay;
proxy_pass http://identity/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
# Finance service
location /api/finance/ {
limit_req zone=api_limit burst=20 nodelay;
proxy_pass http://finance/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
# Dataroom service
location /api/dataroom/ {
limit_req zone=api_limit burst=20 nodelay;
proxy_pass http://dataroom/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
# Health check
location /health {
access_log off;
return 200 "healthy\n";
add_header Content-Type text/plain;
}
}

67
infra/k8s/README.md Normal file
View File

@@ -0,0 +1,67 @@
# Kubernetes Configuration
Kubernetes manifests and Helm charts for The Order.
## Structure
- `base/` - Base Kubernetes manifests
- `overlays/` - Environment-specific overlays (dev, stage, prod)
- `charts/` - Helm charts (if using Helm)
## Usage
### Using Kustomize
```bash
# Build dev environment
kubectl kustomize overlays/dev
# Apply dev environment
kubectl apply -k overlays/dev
# Build prod environment
kubectl kustomize overlays/prod
# Apply prod environment
kubectl apply -k overlays/prod
```
### Using Helm
```bash
# Install chart
helm install the-order charts/the-order -f charts/the-order/values-dev.yaml
# Upgrade chart
helm upgrade the-order charts/the-order -f charts/the-order/values-prod.yaml
```
## Namespaces
- `the-order-dev` - Development environment
- `the-order-stage` - Staging environment
- `the-order-prod` - Production environment
## Services
Each service has its own deployment, service, and ingress configuration:
- Intake service
- Identity service
- Finance service
- Dataroom service
- Portal applications
## Secrets Management
Secrets are managed using:
- External Secrets Operator
- SOPS-encrypted secrets
- Cloud provider secrets managers
## Monitoring
- Prometheus for metrics
- Grafana for dashboards
- OpenTelemetry for tracing
- ELK/OpenSearch for logging

View File

@@ -0,0 +1,10 @@
apiVersion: v1
kind: ConfigMap
metadata:
name: the-order-config
namespace: the-order
data:
# Add configuration data here
LOG_LEVEL: "info"
ENVIRONMENT: "base"

View File

@@ -0,0 +1,16 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: the-order
resources:
- namespace.yaml
- configmap.yaml
- secrets.yaml
# Add service-specific resources
# - intake/
# - identity/
# - finance/
# - dataroom/

View File

@@ -0,0 +1,8 @@
apiVersion: v1
kind: Namespace
metadata:
name: the-order
labels:
name: the-order
environment: base

View File

@@ -0,0 +1,12 @@
apiVersion: v1
kind: Secret
metadata:
name: the-order-secrets
namespace: the-order
type: Opaque
stringData:
# Secrets should be managed via External Secrets Operator or SOPS
# This is a template - do not commit actual secrets
# DATABASE_URL: "postgresql://..."
# API_KEY: "..."

View File

@@ -0,0 +1,21 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: the-order-dev
resources:
- ../../base
patches:
- path: configmap-patch.yaml
target:
kind: ConfigMap
name: the-order-config
configMapGenerator:
- name: the-order-config
behavior: merge
literals:
- ENVIRONMENT=dev
- LOG_LEVEL=debug

View File

@@ -0,0 +1,31 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: the-order-prod
resources:
- ../../base
patches:
- path: configmap-patch.yaml
target:
kind: ConfigMap
name: the-order-config
configMapGenerator:
- name: the-order-config
behavior: merge
literals:
- ENVIRONMENT=prod
- LOG_LEVEL=info
replicas:
- name: intake
count: 3
- name: identity
count: 3
- name: finance
count: 2
- name: dataroom
count: 2

49
infra/terraform/README.md Normal file
View File

@@ -0,0 +1,49 @@
# Terraform Infrastructure
Terraform configuration for The Order infrastructure.
## Structure
- `main.tf` - Main Terraform configuration
- `variables.tf` - Variable definitions
- `outputs.tf` - Output definitions
- `modules/` - Reusable Terraform modules
## Usage
```bash
# Initialize Terraform
terraform init
# Plan changes
terraform plan
# Apply changes
terraform apply
# Destroy infrastructure
terraform destroy
```
## Environments
- `dev/` - Development environment
- `stage/` - Staging environment
- `prod/` - Production environment
## Resources
- Kubernetes cluster
- Database (PostgreSQL)
- Object storage (S3/GCS)
- KMS/HSM for key management
- Load balancers
- Network configuration
## Secrets Management
Secrets are managed using:
- SOPS for encrypted secrets
- Cloud KMS for key management
- External Secrets Operator for Kubernetes

46
infra/terraform/main.tf Normal file
View File

@@ -0,0 +1,46 @@
# Terraform configuration for The Order infrastructure
# This is a template - customize for your cloud provider
terraform {
required_version = ">= 1.5.0"
required_providers {
# Add your cloud provider(s) here
# Example for AWS:
# aws = {
# source = "hashicorp/aws"
# version = "~> 5.0"
# }
}
# Configure backend for state management
# backend "s3" {
# bucket = "the-order-terraform-state"
# key = "terraform.tfstate"
# region = "us-east-1"
# }
}
# Provider configuration
# provider "aws" {
# region = var.aws_region
# }
# Variables
variable "aws_region" {
description = "AWS region"
type = string
default = "us-east-1"
}
variable "environment" {
description = "Environment name (dev, stage, prod)"
type = string
default = "dev"
}
# Outputs
output "environment" {
value = var.environment
}

View File

@@ -0,0 +1,24 @@
# Terraform outputs for The Order infrastructure
output "environment" {
description = "Environment name"
value = var.environment
}
output "project_name" {
description = "Project name"
value = var.project_name
}
# Add more outputs as needed
# Example:
# output "kubernetes_cluster_endpoint" {
# description = "Kubernetes cluster endpoint"
# value = module.kubernetes.cluster_endpoint
# }
# output "database_endpoint" {
# description = "Database endpoint"
# value = module.database.endpoint
# }

View File

@@ -0,0 +1,41 @@
# Terraform variables for The Order infrastructure
variable "environment" {
description = "Environment name (dev, stage, prod)"
type = string
validation {
condition = contains(["dev", "stage", "prod"], var.environment)
error_message = "Environment must be dev, stage, or prod."
}
}
variable "aws_region" {
description = "AWS region"
type = string
default = "us-east-1"
}
variable "project_name" {
description = "Project name"
type = string
default = "the-order"
}
variable "domain_name" {
description = "Domain name for the application"
type = string
default = ""
}
variable "enable_monitoring" {
description = "Enable monitoring and observability"
type = bool
default = true
}
variable "enable_logging" {
description = "Enable centralized logging"
type = bool
default = true
}

35
package.json Normal file
View File

@@ -0,0 +1,35 @@
{
"name": "the-order-monorepo",
"version": "0.1.0",
"private": true,
"description": "The Order - Monorepo for all applications, services, and packages",
"scripts": {
"build": "turbo run build",
"dev": "turbo run dev",
"lint": "turbo run lint",
"type-check": "turbo run type-check",
"test": "turbo run test",
"clean": "turbo run clean && rm -rf node_modules",
"format": "prettier --write \"**/*.{ts,tsx,js,jsx,json,md,yaml,yml}\"",
"format:check": "prettier --check \"**/*.{ts,tsx,js,jsx,json,md,yaml,yml}\"",
"prepare": "husky install || true"
},
"devDependencies": {
"@turbo/gen": "^1.11.0",
"prettier": "^3.1.1",
"turbo": "^1.11.0",
"typescript": "^5.3.3",
"husky": "^8.0.3"
},
"engines": {
"node": ">=18.0.0",
"pnpm": ">=8.0.0"
},
"packageManager": "pnpm@8.15.0",
"workspaces": [
"apps/*",
"services/*",
"packages/*"
]
}

20
packages/auth/README.md Normal file
View File

@@ -0,0 +1,20 @@
# @the-order/auth
Authentication and authorization helpers for The Order.
## Features
- OIDC/OAuth2 support
- DID (Decentralized Identifier) support
- eIDAS integration
## Usage
```typescript
import { OIDCProvider, DIDResolver, EIDASProvider } from '@the-order/auth';
const oidc = new OIDCProvider(config);
const didResolver = new DIDResolver();
const eidas = new EIDASProvider(config);
```

View File

@@ -0,0 +1,23 @@
{
"name": "@the-order/auth",
"version": "0.1.0",
"private": true,
"description": "Authentication and authorization helpers for The Order",
"main": "./src/index.ts",
"types": "./src/index.ts",
"scripts": {
"build": "tsc",
"dev": "tsc --watch",
"lint": "eslint src --ext .ts",
"type-check": "tsc --noEmit"
},
"dependencies": {
"jsonwebtoken": "^9.0.2"
},
"devDependencies": {
"@types/jsonwebtoken": "^9.0.5",
"@types/node": "^20.10.6",
"typescript": "^5.3.3"
}
}

34
packages/auth/src/did.ts Normal file
View File

@@ -0,0 +1,34 @@
/**
* DID (Decentralized Identifier) helpers
*/
export interface DIDDocument {
id: string;
'@context': string[];
verificationMethod: VerificationMethod[];
authentication: string[];
}
export interface VerificationMethod {
id: string;
type: string;
controller: string;
publicKeyMultibase?: string;
}
export class DIDResolver {
async resolve(did: string): Promise<DIDDocument> {
// Implementation for DID resolution
throw new Error('Not implemented');
}
async verifySignature(
did: string,
message: string,
signature: string
): Promise<boolean> {
// Implementation for signature verification
throw new Error('Not implemented');
}
}

View File

@@ -0,0 +1,29 @@
/**
* eIDAS (electronic IDentification, Authentication and trust Services) helpers
*/
export interface EIDASConfig {
providerUrl: string;
apiKey: string;
}
export interface EIDASSignature {
signature: string;
certificate: string;
timestamp: Date;
}
export class EIDASProvider {
constructor(private config: EIDASConfig) {}
async requestSignature(document: string): Promise<EIDASSignature> {
// Implementation for eIDAS signature request
throw new Error('Not implemented');
}
async verifySignature(signature: EIDASSignature): Promise<boolean> {
// Implementation for eIDAS signature verification
throw new Error('Not implemented');
}
}

View File

@@ -0,0 +1,8 @@
/**
* The Order Auth Package
*/
export * from './oidc';
export * from './did';
export * from './eidas';

31
packages/auth/src/oidc.ts Normal file
View File

@@ -0,0 +1,31 @@
/**
* OIDC/OAuth2 helpers
*/
export interface OIDCConfig {
issuer: string;
clientId: string;
clientSecret: string;
redirectUri: string;
}
export class OIDCProvider {
constructor(private config: OIDCConfig) {}
async getAuthorizationUrl(state: string): Promise<string> {
const params = new URLSearchParams({
client_id: this.config.clientId,
redirect_uri: this.config.redirectUri,
response_type: 'code',
scope: 'openid profile email',
state,
});
return `${this.config.issuer}/authorize?${params.toString()}`;
}
async exchangeCodeForToken(code: string): Promise<string> {
// Implementation for token exchange
throw new Error('Not implemented');
}
}

View File

@@ -0,0 +1,10 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts"]
}

22
packages/crypto/README.md Normal file
View File

@@ -0,0 +1,22 @@
# @the-order/crypto
KMS/HSM client, key management, and signatures for eIDAS/DID.
## Usage
```typescript
import { KMSClient, SignatureService } from '@the-order/crypto';
const kms = new KMSClient(config);
const signatureService = new SignatureService(kms);
const signature = await signatureService.sign(data, options);
```
## Features
- KMS/HSM integration
- Key management
- Digital signatures
- eIDAS/DID support

View File

@@ -0,0 +1,22 @@
{
"name": "@the-order/crypto",
"version": "0.1.0",
"private": true,
"description": "KMS/HSM client, key management, and signatures for eIDAS/DID",
"main": "./src/index.ts",
"types": "./src/index.ts",
"scripts": {
"build": "tsc",
"dev": "tsc --watch",
"lint": "eslint src --ext .ts",
"type-check": "tsc --noEmit"
},
"dependencies": {
"@aws-sdk/client-kms": "^3.490.0"
},
"devDependencies": {
"@types/node": "^20.10.6",
"typescript": "^5.3.3"
}
}

View File

@@ -0,0 +1,7 @@
/**
* The Order Crypto Package
*/
export * from './kms';
export * from './signature';

View File

@@ -0,0 +1,34 @@
/**
* KMS/HSM client for key management
*/
export interface KMSConfig {
provider: 'aws' | 'gcp' | 'azure' | 'hsm';
keyId: string;
region?: string;
}
export class KMSClient {
constructor(private config: KMSConfig) {}
async encrypt(plaintext: Buffer): Promise<Buffer> {
// Implementation for encryption
throw new Error('Not implemented');
}
async decrypt(ciphertext: Buffer): Promise<Buffer> {
// Implementation for decryption
throw new Error('Not implemented');
}
async sign(data: Buffer): Promise<Buffer> {
// Implementation for signing
throw new Error('Not implemented');
}
async verify(data: Buffer, signature: Buffer): Promise<boolean> {
// Implementation for signature verification
throw new Error('Not implemented');
}
}

View File

@@ -0,0 +1,33 @@
/**
* Signature utilities for eIDAS/DID
*/
import { KMSClient } from './kms';
export interface SignatureOptions {
algorithm: 'RS256' | 'ES256' | 'EdDSA';
keyId: string;
}
export class SignatureService {
constructor(private kms: KMSClient) {}
async sign(data: Buffer, options: SignatureOptions): Promise<Buffer> {
return this.kms.sign(data);
}
async verify(
data: Buffer,
signature: Buffer,
options: SignatureOptions
): Promise<boolean> {
return this.kms.verify(data, signature);
}
async signJSON(data: unknown, options: SignatureOptions): Promise<string> {
const jsonString = JSON.stringify(data);
const signature = await this.sign(Buffer.from(jsonString), options);
return signature.toString('base64');
}
}

View File

@@ -0,0 +1,10 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts"]
}

View File

@@ -0,0 +1,25 @@
# @the-order/schemas
Shared schemas and contracts for The Order using Zod.
## Usage
```typescript
import { UserSchema, DocumentSchema } from '@the-order/schemas';
const user = UserSchema.parse(userData);
const document = DocumentSchema.parse(documentData);
```
## Available Schemas
- `UserSchema` - User entity schema
- `DocumentSchema` - Document entity schema
- `DealSchema` - Deal entity schema
## OpenAPI Generation
```bash
pnpm generate:openapi
```

View File

@@ -0,0 +1,24 @@
{
"name": "@the-order/schemas",
"version": "0.1.0",
"private": true,
"description": "Shared schemas and contracts for The Order",
"main": "./src/index.ts",
"types": "./src/index.ts",
"scripts": {
"build": "tsc",
"dev": "tsc --watch",
"lint": "eslint src --ext .ts",
"type-check": "tsc --noEmit",
"generate:openapi": "ts-node scripts/generate-openapi.ts"
},
"dependencies": {
"zod": "^3.22.4"
},
"devDependencies": {
"@types/node": "^20.10.6",
"typescript": "^5.3.3",
"zod-to-openapi": "^0.2.1"
}
}

View File

@@ -0,0 +1,23 @@
import { z } from 'zod';
export const DealStatusSchema = z.enum(['draft', 'active', 'closed', 'archived']);
export const DealSchema = z.object({
id: z.string().uuid(),
name: z.string().min(1),
status: DealStatusSchema,
dataroomId: z.string().uuid().optional(),
createdAt: z.date().or(z.string().datetime()),
updatedAt: z.date().or(z.string().datetime()),
});
export type Deal = z.infer<typeof DealSchema>;
export const CreateDealSchema = DealSchema.omit({
id: true,
createdAt: true,
updatedAt: true,
});
export type CreateDeal = z.infer<typeof CreateDealSchema>;

View File

@@ -0,0 +1,24 @@
import { z } from 'zod';
export const DocumentTypeSchema = z.enum(['legal', 'treaty', 'finance', 'history']);
export const DocumentSchema = z.object({
id: z.string().uuid(),
title: z.string().min(1),
type: DocumentTypeSchema,
content: z.string().optional(),
fileUrl: z.string().url().optional(),
createdAt: z.date().or(z.string().datetime()),
updatedAt: z.date().or(z.string().datetime()),
});
export type Document = z.infer<typeof DocumentSchema>;
export const CreateDocumentSchema = DocumentSchema.omit({
id: true,
createdAt: true,
updatedAt: true,
});
export type CreateDocument = z.infer<typeof CreateDocumentSchema>;

View File

@@ -0,0 +1,8 @@
/**
* The Order Schemas
*/
export * from './user';
export * from './document';
export * from './deal';

View File

@@ -0,0 +1,20 @@
import { z } from 'zod';
export const UserSchema = z.object({
id: z.string().uuid(),
email: z.string().email(),
name: z.string().min(1),
createdAt: z.date().or(z.string().datetime()),
updatedAt: z.date().or(z.string().datetime()),
});
export type User = z.infer<typeof UserSchema>;
export const CreateUserSchema = UserSchema.omit({
id: true,
createdAt: true,
updatedAt: true,
});
export type CreateUser = z.infer<typeof CreateUserSchema>;

View File

@@ -0,0 +1,10 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts"]
}

View File

@@ -0,0 +1,22 @@
# @the-order/storage
Storage abstraction for S3/GCS with WORM mode support.
## Usage
```typescript
import { StorageClient, WORMStorage } from '@the-order/storage';
const storage = new StorageClient(config);
const wormStorage = new WORMStorage(config);
await storage.upload({ key: 'file.txt', content: 'Hello' });
```
## Features
- S3 and GCS support
- WORM (Write Once Read Many) mode
- Presigned URL generation
- Object lifecycle management

View File

@@ -0,0 +1,22 @@
{
"name": "@the-order/storage",
"version": "0.1.0",
"private": true,
"description": "Storage abstraction for S3/GCS with WORM mode support",
"main": "./src/index.ts",
"types": "./src/index.ts",
"scripts": {
"build": "tsc",
"dev": "tsc --watch",
"lint": "eslint src --ext .ts",
"type-check": "tsc --noEmit"
},
"dependencies": {
"@aws-sdk/client-s3": "^3.490.0"
},
"devDependencies": {
"@types/node": "^20.10.6",
"typescript": "^5.3.3"
}
}

View File

@@ -0,0 +1,7 @@
/**
* The Order Storage Package
*/
export * from './storage';
export * from './worm';

View File

@@ -0,0 +1,43 @@
/**
* Storage abstraction for S3/GCS
*/
export interface StorageConfig {
provider: 's3' | 'gcs';
bucket: string;
region?: string;
accessKeyId?: string;
secretAccessKey?: string;
}
export interface StorageObject {
key: string;
content: Buffer | string;
contentType?: string;
metadata?: Record<string, string>;
}
export class StorageClient {
constructor(private config: StorageConfig) {}
async upload(object: StorageObject): Promise<string> {
// Implementation for file upload
throw new Error('Not implemented');
}
async download(key: string): Promise<Buffer> {
// Implementation for file download
throw new Error('Not implemented');
}
async delete(key: string): Promise<void> {
// Implementation for file deletion
throw new Error('Not implemented');
}
async getPresignedUrl(key: string, expiresIn: number): Promise<string> {
// Implementation for presigned URL generation
throw new Error('Not implemented');
}
}

View File

@@ -0,0 +1,26 @@
/**
* WORM (Write Once Read Many) mode storage
*/
import { StorageClient, StorageObject } from './storage';
export class WORMStorage extends StorageClient {
async upload(object: StorageObject): Promise<string> {
// WORM mode: prevent overwrites
const exists = await this.objectExists(object.key);
if (exists) {
throw new Error(`Object ${object.key} already exists in WORM storage`);
}
return super.upload(object);
}
async delete(key: string): Promise<void> {
throw new Error('Deletion not allowed in WORM mode');
}
private async objectExists(key: string): Promise<boolean> {
// Implementation to check if object exists
throw new Error('Not implemented');
}
}

View File

@@ -0,0 +1,10 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts"]
}

View File

@@ -0,0 +1,21 @@
# @the-order/test-utils
Shared testing utilities for The Order monorepo.
## Usage
```typescript
import { createTestUser, createTestDocument, sleep } from '@the-order/test-utils';
const user = createTestUser({ email: 'custom@example.com' });
const doc = createTestDocument({ title: 'My Document' });
await sleep(1000);
```
## Available Utilities
- `createTestUser()` - Create a test user object
- `createTestDocument()` - Create a test document object
- `sleep()` - Wait for a specified number of milliseconds
- `createMockResponse()` - Create a mock fetch response

View File

@@ -0,0 +1,22 @@
{
"name": "@the-order/test-utils",
"version": "0.1.0",
"private": true,
"description": "Shared testing utilities for The Order",
"main": "./src/index.ts",
"types": "./src/index.ts",
"scripts": {
"test": "vitest run",
"test:watch": "vitest",
"type-check": "tsc --noEmit"
},
"dependencies": {
"@vitest/ui": "^1.1.0",
"vitest": "^1.1.0"
},
"devDependencies": {
"@types/node": "^20.10.6",
"typescript": "^5.3.3"
}
}

View File

@@ -0,0 +1,62 @@
/**
* Test utilities for The Order
*/
/**
* Create a test user object
*/
export function createTestUser(overrides?: Partial<TestUser>): TestUser {
return {
id: 'test-user-id',
email: 'test@example.com',
name: 'Test User',
...overrides,
};
}
/**
* Create a test document object
*/
export function createTestDocument(overrides?: Partial<TestDocument>): TestDocument {
return {
id: 'test-doc-id',
title: 'Test Document',
type: 'legal',
content: 'Test content',
...overrides,
};
}
/**
* Wait for a specified number of milliseconds
*/
export function sleep(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms));
}
/**
* Mock fetch response
*/
export function createMockResponse(data: unknown, status = 200): Response {
return new Response(JSON.stringify(data), {
status,
headers: {
'Content-Type': 'application/json',
},
});
}
// Types
export interface TestUser {
id: string;
email: string;
name: string;
}
export interface TestDocument {
id: string;
title: string;
type: string;
content: string;
}

View File

@@ -0,0 +1,10 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts"]
}

28
packages/ui/README.md Normal file
View File

@@ -0,0 +1,28 @@
# @the-order/ui
Design system and UI components for The Order.
## Usage
```tsx
import { Button } from '@the-order/ui';
function MyComponent() {
return <Button variant="primary">Click me</Button>;
}
```
## Components
- `Button` - Button component with variants
## Development
```bash
# Build
pnpm build
# Watch mode
pnpm dev
```

28
packages/ui/package.json Normal file
View File

@@ -0,0 +1,28 @@
{
"name": "@the-order/ui",
"version": "0.1.0",
"private": true,
"description": "Design system and UI components for The Order",
"main": "./src/index.ts",
"types": "./src/index.ts",
"scripts": {
"build": "tsc",
"dev": "tsc --watch",
"lint": "eslint src --ext .ts,.tsx",
"type-check": "tsc --noEmit"
},
"dependencies": {
"react": "^18.2.0",
"react-dom": "^18.2.0"
},
"devDependencies": {
"@types/react": "^18.2.45",
"@types/react-dom": "^18.2.18",
"typescript": "^5.3.3"
},
"peerDependencies": {
"react": "^18.2.0",
"react-dom": "^18.2.0"
}
}

View File

@@ -0,0 +1,36 @@
import React from 'react';
export interface ButtonProps extends React.ButtonHTMLAttributes<HTMLButtonElement> {
variant?: 'primary' | 'secondary' | 'outline';
size?: 'sm' | 'md' | 'lg';
}
export const Button: React.FC<ButtonProps> = ({
variant = 'primary',
size = 'md',
children,
className = '',
...props
}) => {
const baseClasses = 'font-medium rounded-lg transition-colors';
const variantClasses = {
primary: 'bg-blue-600 text-white hover:bg-blue-700',
secondary: 'bg-gray-600 text-white hover:bg-gray-700',
outline: 'border border-gray-300 text-gray-700 hover:bg-gray-50',
};
const sizeClasses = {
sm: 'px-3 py-1.5 text-sm',
md: 'px-4 py-2 text-base',
lg: 'px-6 py-3 text-lg',
};
return (
<button
className={`${baseClasses} ${variantClasses[variant]} ${sizeClasses[size]} ${className}`}
{...props}
>
{children}
</button>
);
};

View File

@@ -0,0 +1,7 @@
/**
* UI Components
*/
// Export components here as they are created
export { Button } from './Button';

6
packages/ui/src/index.ts Normal file
View File

@@ -0,0 +1,6 @@
/**
* The Order UI Component Library
*/
export * from './components';

11
packages/ui/tsconfig.json Normal file
View File

@@ -0,0 +1,11 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src",
"jsx": "react-jsx"
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.test.tsx", "**/*.spec.ts", "**/*.spec.tsx"]
}

View File

@@ -0,0 +1,21 @@
# @the-order/workflows
Temporal/Step Functions workflow definitions for The Order.
## Usage
```typescript
import { intakeWorkflow, reviewWorkflow } from '@the-order/workflows';
const result = await intakeWorkflow({
documentId: 'doc-123',
fileUrl: 'https://...',
userId: 'user-123',
});
```
## Workflows
- `intakeWorkflow` - Document intake and processing
- `reviewWorkflow` - Document review and approval

View File

@@ -0,0 +1,22 @@
{
"name": "@the-order/workflows",
"version": "0.1.0",
"private": true,
"description": "Temporal/Step Functions workflow definitions",
"main": "./src/index.ts",
"types": "./src/index.ts",
"scripts": {
"build": "tsc",
"dev": "tsc --watch",
"lint": "eslint src --ext .ts",
"type-check": "tsc --noEmit"
},
"dependencies": {
"@temporalio/workflow": "^1.8.0"
},
"devDependencies": {
"@types/node": "^20.10.6",
"typescript": "^5.3.3"
}
}

View File

@@ -0,0 +1,7 @@
/**
* The Order Workflows Package
*/
export * from './intake';
export * from './review';

Some files were not shown because too many files have changed in this diff Show More